Alexander Casimir Fischer commited on
Commit
3d8a869
1 Parent(s): 95330ab
Files changed (1) hide show
  1. app.py +15 -9
app.py CHANGED
@@ -1,18 +1,20 @@
1
  #importing dependencies
2
  import os
3
- from keys import apikey, token
4
 
5
  import streamlit as st
6
- from langchain.llms import OpenAI, HuggingFaceHub
 
7
  from langchain.prompts import PromptTemplate
8
  from langchain.chains import LLMChain
9
  from langchain.tools import WikipediaQueryRun
10
  from langchain.utilities import WikipediaAPIWrapper
11
 
 
12
  wikipedia = WikipediaQueryRun(api_wrapper=WikipediaAPIWrapper())
13
 
14
- os.environ["OPENAI_API_KEY"] = apikey
15
  os.environ["HUGGINGFACEHUB_API_TOKEN"] = token
 
16
 
17
  #app framework
18
  st.title("🛕Gurubot AI")
@@ -95,12 +97,16 @@ yoda_grammar = PromptTemplate(
95
  #llms
96
  #llm = OpenAI(temperature=0.9)
97
  #llm_facts = OpenAI(temperature=0)
98
- llm = HuggingFaceHub
99
- llm_facts = HuggingFaceHub
100
- main_chain = LLMChain(llm=llm, prompt=context, verbose=True)
101
- yoda_grammar_chain = LLMChain(llm=llm_facts, prompt=yoda_grammar)
102
- keyword_chain = LLMChain(llm=llm_facts, prompt=find_keyword)
103
- wiki_chain = LLMChain(llm=llm_facts, prompt=context, verbose=True)
 
 
 
 
104
 
105
 
106
  #answer on screen if prompt is entered
 
1
  #importing dependencies
2
  import os
3
+ from keys import token
4
 
5
  import streamlit as st
6
+ from transformers import pipeline
7
+ from langchain.llms import HuggingFaceHub
8
  from langchain.prompts import PromptTemplate
9
  from langchain.chains import LLMChain
10
  from langchain.tools import WikipediaQueryRun
11
  from langchain.utilities import WikipediaAPIWrapper
12
 
13
+
14
  wikipedia = WikipediaQueryRun(api_wrapper=WikipediaAPIWrapper())
15
 
 
16
  os.environ["HUGGINGFACEHUB_API_TOKEN"] = token
17
+ repo_id = "tiiuae/falcon-40b"
18
 
19
  #app framework
20
  st.title("🛕Gurubot AI")
 
97
  #llms
98
  #llm = OpenAI(temperature=0.9)
99
  #llm_facts = OpenAI(temperature=0)
100
+ llm = HuggingFaceHub(
101
+ repo_id=repo_id, model_kwargs={"temperature": 0.9, "max_length": 500}
102
+ )
103
+ llm_facts = HuggingFaceHub(
104
+ repo_id=repo_id, model_kwargs={"temperature": 0.0, "max_length": 500}
105
+ )
106
+ main_chain = LLMChain(prompt=context, llm=llm)
107
+ yoda_grammar_chain = LLMChain(prompt=yoda_grammar, llm=llm_facts)
108
+ keyword_chain = LLMChain(prompt=find_keyword, llm=llm_facts)
109
+ wiki_chain = LLMChain(prompt=context, llm=llm_facts)
110
 
111
 
112
  #answer on screen if prompt is entered