adhinojosa commited on
Commit
e02d784
1 Parent(s): 77b2de7

cambio a modelo con mejor respuesta

Browse files
Files changed (3) hide show
  1. __pycache__/model.cpython-310.pyc +0 -0
  2. app.py +1 -1
  3. model.py +4 -2
__pycache__/model.cpython-310.pyc CHANGED
Binary files a/__pycache__/model.cpython-310.pyc and b/__pycache__/model.cpython-310.pyc differ
 
app.py CHANGED
@@ -8,7 +8,7 @@ import streamlit as st
8
 
9
 
10
  if "model" not in st.session_state:
11
- st.session_state["model"] = "TheBloke/Mistral-7B-OpenOrca-GGUF"
12
 
13
  # Initialize chat history
14
  if "messages" not in st.session_state:
 
8
 
9
 
10
  if "model" not in st.session_state:
11
+ st.session_state["model"] = "TheBloke/Mistral-7B-Instruct-v0.2-GGUF"
12
 
13
  # Initialize chat history
14
  if "messages" not in st.session_state:
model.py CHANGED
@@ -1,2 +1,4 @@
1
- from ctransformers import AutoModelForCausalLM
2
- model = AutoModelForCausalLM.from_pretrained("TheBloke/Mistral-7B-OpenOrca-GGUF", model_file="mistral-7b-openorca.Q2_K.gguf", model_type="mistral")
 
 
 
1
+ from ctransformers import AutoModelForCausalLM
2
+ #model = AutoModelForCausalLM.from_pretrained("TheBloke/Mistral-7B-OpenOrca-GGUF", model_file="mistral-7b-openorca.Q2_K.gguf", model_type="mistral")
3
+
4
+ model = AutoModelForCausalLM.from_pretrained("TheBloke/Mistral-7B-Instruct-v0.2-GGUF")