Joanes commited on
Commit
36bdc23
1 Parent(s): e5e72c9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -4
app.py CHANGED
@@ -1,5 +1,6 @@
1
  import streamlit as st
2
- from transformers import pipeline
 
3
  from langchain import PromptTemplate, HuggingFaceHub, LLMChain
4
  import os
5
  from langchain.memory import ConversationBufferMemory
@@ -19,10 +20,10 @@ prompt = PromptTemplate(template=template, input_variables=["question"])
19
 
20
  answer_memory = ConversationBufferMemory(input_key='question', memory_key='chat_history')
21
 
 
22
 
23
- llm = HuggingFaceHub(repo_id="stabilityai/stablelm-tuned-alpha-3b" , model_kwargs={"temperature":0, "max_length":64})
24
-
25
- llm_chain = LLMChain(prompt=prompt, llm=llm, verbose=True, output_key='answer', memory=answer_memory)
26
 
27
 
28
  wiki = WikipediaAPIWrapper()
 
1
  import streamlit as st
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM
3
+
4
  from langchain import PromptTemplate, HuggingFaceHub, LLMChain
5
  import os
6
  from langchain.memory import ConversationBufferMemory
 
20
 
21
  answer_memory = ConversationBufferMemory(input_key='question', memory_key='chat_history')
22
 
23
+ tokenizer = AutoTokenizer.from_pretrained("stabilityai/stablelm-base-alpha-3b")
24
 
25
+ model = AutoModelForCausalLM.from_pretrained("stabilityai/stablelm-base-alpha-3b")
26
+ llm_chain = LLMChain(prompt=prompt, llm=model, verbose=True, output_key='answer', memory=answer_memory)
 
27
 
28
 
29
  wiki = WikipediaAPIWrapper()