SidraMemon commited on
Commit
bc3cd15
·
verified ·
1 Parent(s): 24fc40e

Upload email_generator_using_groq_and_langchain.py

Browse files
email_generator_using_groq_and_langchain.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """Email_Generator_Using_Groq_and_Langchain.ipynb
3
+
4
+ Automatically generated by Colab.
5
+
6
+ Original file is located at
7
+ https://colab.research.google.com/drive/1Ev-35eeTqANlgNH1LFEo6AovAOKQk7oM
8
+ """
9
+
10
+ !pip install langchain-groq langchain_community -q
11
+
12
+ !pip install chromadb -q
13
+
14
+ !pip install streamlit -q
15
+
16
+ # Commented out IPython magic to ensure Python compatibility.
17
+ # %%writefile app.py
18
+ # import streamlit as st
19
+ # import pandas as pd
20
+ # import uuid
21
+ # import chromadb
22
+ # from langchain_groq import ChatGroq
23
+ # from langchain_community.document_loaders import WebBaseLoader
24
+ # from langchain_core.prompts import PromptTemplate
25
+ # from langchain_core.output_parsers import JsonOutputParser
26
+ #
27
+ # st.title("Cold Email Generator")
28
+ #
29
+ # url = st.text_input("Enter Job Posting URL")
30
+ # submit_button = st.button("Generate Email")
31
+ #
32
+ # if submit_button and url:
33
+ # llm = ChatGroq(temperature=0, model_name="llama-3.3-70b-versatile", groq_api_key="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")
34
+ # loader = WebBaseLoader(url)
35
+ # data = loader.load().pop().page_content
36
+ # #st.success("Job posting loaded!")
37
+ #
38
+ # prompt_extract = PromptTemplate.from_template(
39
+ # """
40
+ # ### SCRAPED TEXT FROM WEBSITE:
41
+ # {data}
42
+ # ### INSTRUCTION:
43
+ # The scraped text is from the career's page of a website.
44
+ # Your job is to extract the job postings and return them in JSON format containing the
45
+ # following keys: `role`, `experience`, `skills` and `description`.
46
+ # Only return the valid JSON.
47
+ # ### VALID JSON (NO PREAMBLE):
48
+ # """
49
+ # )
50
+ # chain_extract = prompt_extract | llm
51
+ # res = chain_extract.invoke({"data": data})
52
+ # json_parser = JsonOutputParser()
53
+ # json_res = json_parser.parse(res.content)
54
+ # #st.success("Job details extracted!")
55
+ # df=pd.read_csv("/content/my_portfolio.csv")
56
+ # client = chromadb.PersistentClient('vectorstore')
57
+ # collection = client.get_or_create_collection(name="portfolio")
58
+ # if not collection.count():
59
+ # for _, row in df.iterrows():
60
+ # collection.add(documents=row["Techstack"],
61
+ # metadatas={"links": row["Links"]},
62
+ # ids=[str(uuid.uuid4())])
63
+ # job=json_res
64
+ # links = collection.query(query_texts=job['skills'], n_results=2).get('metadatas', [])
65
+ #
66
+ #
67
+ # prompt_email = PromptTemplate.from_template(
68
+ # """
69
+ # ### JOB DESCRIPTION:
70
+ # {job_description}
71
+ # Your job is to write a cold email to the client regarding the job mentioned above describing your capability
72
+ # to fulfill their needs.
73
+ # Also add the most relevant ones from the following links to showcase portfolio: {link_list}
74
+ # Remember you are Sidra, ML Engineer at XYZ company.
75
+ # Avoid generic introductions—focus on **value, relevance, and engagement**.
76
+ # ### EMAIL (NO PREAMBLE):
77
+ # """
78
+ # )
79
+ # chain_email = prompt_email | llm
80
+ # res = chain_email.invoke({"job_description": str(job), "link_list": links})
81
+ # st.text_area("Generated Email", res.content, height=300)
82
+ #
83
+ # # Sidebar with instructions
84
+ # st.sidebar.markdown("## Guide")
85
+ # st.sidebar.info(
86
+ # "It allows users to input the URL of a company's careers page. "
87
+ # "The tool then extracts job listings from that page and generates personalized cold emails. "
88
+ # "These emails include relevant portfolio links sourced from a vector database, based on the specific job descriptions."
89
+ # )
90
+ #
91
+ #
92
+
93
+ !npm install -g localtunnel
94
+
95
+ # Your public ip is the password to the localtunnel
96
+ !curl ipv4.icanhazip.com
97
+
98
+ !streamlit run app.py &>./logs.txt & npx localtunnel --port 8501