Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st #deployment
|
2 |
+
from langchain.chains.llm import LLMChain #individual chain
|
3 |
+
from langchain.chains.sequential import SequentialChain #combine chains
|
4 |
+
from langchain_groq import ChatGroq # llm model
|
5 |
+
from langchain.prompts import PromptTemplate #prompt for query
|
6 |
+
|
7 |
+
st.title('LEADER DATABASE') #title
|
8 |
+
leader_name=st.text_input('Enter The Famous leader_name from India.')
|
9 |
+
|
10 |
+
sub=st.button('SUBMIT')
|
11 |
+
|
12 |
+
LLM_model=ChatGroq(temperature=0.6,
|
13 |
+
groq_api_key='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')
|
14 |
+
|
15 |
+
prompt1=PromptTemplate(input=['A'],
|
16 |
+
template='tell me about {A} in 20 words.')
|
17 |
+
|
18 |
+
chain1=LLMChain(llm=LLM_model,prompt=prompt1,output_key='person')
|
19 |
+
|
20 |
+
prompt2=PromptTemplate(input=['person'],
|
21 |
+
template='what is date of birth of this {person}.')
|
22 |
+
|
23 |
+
chain2=LLMChain(llm=LLM_model,prompt=prompt2,output_key='dob')
|
24 |
+
|
25 |
+
prompt3=PromptTemplate(input=['dob'],
|
26 |
+
template='mention how many time win elections {dob}.')
|
27 |
+
|
28 |
+
chain3=LLMChain(llm=LLM_model,prompt=prompt3,output_key='election win')
|
29 |
+
|
30 |
+
|
31 |
+
#combine all the chains in sequence
|
32 |
+
parent=SequentialChain(chains=[chain1,chain2,chain3],
|
33 |
+
input_variables=['A'],
|
34 |
+
output_variables=['person','dob','election win']
|
35 |
+
)
|
36 |
+
|
37 |
+
if sub:
|
38 |
+
st.write(parent({'A':leader_name}))
|