Spaces:
Sleeping
Sleeping
use fine tuned model (1st model)
Browse files
app.py
CHANGED
@@ -38,7 +38,7 @@ chainlit_state.qa_model_name = "gpt-4o-mini"
|
|
38 |
chainlit_state.qa_model = ChatOpenAI(model=chainlit_state.qa_model_name, openai_api_key=openai_api_key)
|
39 |
|
40 |
hf_username = "rchrdgwr"
|
41 |
-
hf_repo_name = "finetuned-arctic-model
|
42 |
finetuned_model_name = f"{hf_username}/{hf_repo_name}"
|
43 |
|
44 |
chainlit_state.embedding_model_name = finetuned_model_name
|
@@ -109,8 +109,12 @@ async def main(message):
|
|
109 |
chunk_string = "Sources: "
|
110 |
for doc in context_documents:
|
111 |
document_title = doc.metadata.get("source", "Unknown Document")
|
112 |
-
chunk_number = doc.metadata.get("chunk_number", "Unknown Chunk")
|
113 |
-
|
|
|
|
|
|
|
|
|
114 |
|
115 |
await cl.Message(
|
116 |
content=f"{chunk_string}",
|
|
|
38 |
chainlit_state.qa_model = ChatOpenAI(model=chainlit_state.qa_model_name, openai_api_key=openai_api_key)
|
39 |
|
40 |
hf_username = "rchrdgwr"
|
41 |
+
hf_repo_name = "finetuned-arctic-model"
|
42 |
finetuned_model_name = f"{hf_username}/{hf_repo_name}"
|
43 |
|
44 |
chainlit_state.embedding_model_name = finetuned_model_name
|
|
|
109 |
chunk_string = "Sources: "
|
110 |
for doc in context_documents:
|
111 |
document_title = doc.metadata.get("source", "Unknown Document")
|
112 |
+
chunk_number = doc.metadata.get("chunk_number", "Unknown Chunk")
|
113 |
+
if document_title == "":
|
114 |
+
doc_string = "BOR"
|
115 |
+
else:
|
116 |
+
doc_string = "RMF"
|
117 |
+
chunk_string = chunk_string + " " + doc_string + "-" + str(chunk_number)
|
118 |
|
119 |
await cl.Message(
|
120 |
content=f"{chunk_string}",
|