kshitijkumbar commited on
Commit
e8f0dc8
β€’
1 Parent(s): 3107845

Change model

Browse files
Files changed (1) hide show
  1. app.py +9 -6
app.py CHANGED
@@ -46,22 +46,25 @@ def getVectorIndex(docs):
46
 
47
  def getLLM():
48
 
 
 
49
 
50
  llm = HuggingFaceLLM(
51
  context_window=3900,
52
  max_new_tokens=256,
53
  # generate_kwargs={"temperature": 0.25, "do_sample": False},
54
- tokenizer_name="meta-llama/Llama-2-13b-chat-hf",
55
- model_name="meta-llama/Llama-2-13b-chat-hf",
56
  device_map=0,
57
  tokenizer_kwargs={"max_length": 2048},
58
  # uncomment this if using CUDA to reduce memory usage
59
  model_kwargs={"torch_dtype": torch.float16,
60
- "quantization_config": default_bnb_config,
61
  }
62
  )
63
  return llm
64
 
 
65
  def getQueryEngine(index):
66
  query_engine = index.as_chat_engine(llm=getLLM())
67
  return query_engine
@@ -80,9 +83,9 @@ def getEmbedModel():
80
 
81
 
82
 
83
- st.set_page_config(page_title="Chat with the Streamlit docs, powered by LlamaIndex", page_icon="πŸ¦™", layout="centered", initial_sidebar_state="auto", menu_items=None)
84
- st.title("Chat with the Streamlit docs, powered by LlamaIndex πŸ’¬πŸ¦™")
85
- st.info("Check out the full tutorial to build this app in our [blog post](https://blog.streamlit.io/build-a-chatbot-with-custom-data-sources-powered-by-llamaindex/)", icon="πŸ“ƒ")
86
 
87
  if "messages" not in st.session_state.keys(): # Initialize the chat messages history
88
  st.session_state.messages = [
 
46
 
47
  def getLLM():
48
 
49
+ model_path = "NousResearch/Llama-2-13b-chat-hf"
50
+ # model_path = "meta-llama/Llama-2-13b-chat-hf"
51
 
52
  llm = HuggingFaceLLM(
53
  context_window=3900,
54
  max_new_tokens=256,
55
  # generate_kwargs={"temperature": 0.25, "do_sample": False},
56
+ tokenizer_name=model_path,
57
+ model_name=model_path,
58
  device_map=0,
59
  tokenizer_kwargs={"max_length": 2048},
60
  # uncomment this if using CUDA to reduce memory usage
61
  model_kwargs={"torch_dtype": torch.float16,
62
+ # "quantization_config": default_bnb_config,
63
  }
64
  )
65
  return llm
66
 
67
+
68
  def getQueryEngine(index):
69
  query_engine = index.as_chat_engine(llm=getLLM())
70
  return query_engine
 
83
 
84
 
85
 
86
+ st.set_page_config(page_title="Project BookWorm: Your own Librarian!", page_icon="πŸ¦™", layout="centered", initial_sidebar_state="auto", menu_items=None)
87
+ st.title("Project BookWorm: Your own Librarian!")
88
+ st.info("Use this app to get recommendations for books that your kids will love!", icon="πŸ“ƒ")
89
 
90
  if "messages" not in st.session_state.keys(): # Initialize the chat messages history
91
  st.session_state.messages = [