Spaces:
Runtime error
Runtime error
Upload folder using huggingface_hub
Browse files
app.py
CHANGED
@@ -4,9 +4,9 @@ from typing import List
|
|
4 |
import gradio as gr
|
5 |
import openai
|
6 |
import pinecone
|
7 |
-
from llama_index import VectorStoreIndex, StorageContext
|
8 |
from llama_index.chat_engine.types import ChatMode
|
9 |
-
from llama_index.llms import ChatMessage, MessageRole
|
10 |
from llama_index.vector_stores import PineconeVectorStore
|
11 |
|
12 |
from environments import OPENAI_API_KEY, PINECONE_API_KEY, PINECONE_INDEX, PASSWORD, LOCAL
|
@@ -26,10 +26,13 @@ pinecone.init(
|
|
26 |
)
|
27 |
pinecone_index = pinecone.Index(PINECONE_INDEX)
|
28 |
|
|
|
|
|
|
|
29 |
vector_store = PineconeVectorStore(pinecone_index=pinecone_index)
|
30 |
storage_context = StorageContext.from_defaults(vector_store=vector_store)
|
31 |
-
index = VectorStoreIndex.from_documents([], storage_context=storage_context)
|
32 |
-
chat_engine = index.as_chat_engine(chat_mode=ChatMode.CONTEXT, similarity_top_k=
|
33 |
DENIED_ANSWER_PROMPT = '我是設計用於回答關於信義會地區中心的服務內容'
|
34 |
|
35 |
SYSTEM_PROMPT = f'你是信義會地區中心的智能助理,你必須基於提供的資訊進行總結,用以回答用戶的提問。' \
|
@@ -54,7 +57,7 @@ CHAT_EXAMPLES = [
|
|
54 |
|
55 |
|
56 |
def convert_to_chat_messages(history: List[List[str]]) -> List[ChatMessage]:
|
57 |
-
chat_messages = []
|
58 |
for conversation in history[-1:]:
|
59 |
for index, message in enumerate(conversation):
|
60 |
if not message:
|
@@ -119,9 +122,9 @@ def _invoke_chatgpt(history, message, is_include_system_prompt=False):
|
|
119 |
history_openai_format.append({"role": "user", "content": message})
|
120 |
|
121 |
response = openai.ChatCompletion.create(
|
122 |
-
model='gpt-3.5-turbo',
|
123 |
messages=history_openai_format,
|
124 |
-
temperature=
|
125 |
stream=True
|
126 |
)
|
127 |
partial_message = ""
|
@@ -152,7 +155,6 @@ with gr.Blocks() as demo:
|
|
152 |
gr.ChatInterface(predict_without_history, examples=CHAT_EXAMPLES)
|
153 |
chatbot.like(vote, None, None)
|
154 |
|
155 |
-
|
156 |
if LOCAL:
|
157 |
demo.queue()
|
158 |
demo.launch(share=False)
|
|
|
4 |
import gradio as gr
|
5 |
import openai
|
6 |
import pinecone
|
7 |
+
from llama_index import VectorStoreIndex, StorageContext, ServiceContext
|
8 |
from llama_index.chat_engine.types import ChatMode
|
9 |
+
from llama_index.llms import ChatMessage, MessageRole, OpenAI
|
10 |
from llama_index.vector_stores import PineconeVectorStore
|
11 |
|
12 |
from environments import OPENAI_API_KEY, PINECONE_API_KEY, PINECONE_INDEX, PASSWORD, LOCAL
|
|
|
26 |
)
|
27 |
pinecone_index = pinecone.Index(PINECONE_INDEX)
|
28 |
|
29 |
+
llm = OpenAI(temperature=0.0, model="gpt-3.5-turbo-instruct")
|
30 |
+
service_context = ServiceContext.from_defaults(llm=llm)
|
31 |
+
|
32 |
vector_store = PineconeVectorStore(pinecone_index=pinecone_index)
|
33 |
storage_context = StorageContext.from_defaults(vector_store=vector_store)
|
34 |
+
index = VectorStoreIndex.from_documents([], storage_context=storage_context, service_context=service_context)
|
35 |
+
chat_engine = index.as_chat_engine(chat_mode=ChatMode.CONTEXT, similarity_top_k=3)
|
36 |
DENIED_ANSWER_PROMPT = '我是設計用於回答關於信義會地區中心的服務內容'
|
37 |
|
38 |
SYSTEM_PROMPT = f'你是信義會地區中心的智能助理,你必須基於提供的資訊進行總結,用以回答用戶的提問。' \
|
|
|
57 |
|
58 |
|
59 |
def convert_to_chat_messages(history: List[List[str]]) -> List[ChatMessage]:
|
60 |
+
chat_messages = [ChatMessage(role=MessageRole.SYSTEM, content=SYSTEM_PROMPT)]
|
61 |
for conversation in history[-1:]:
|
62 |
for index, message in enumerate(conversation):
|
63 |
if not message:
|
|
|
122 |
history_openai_format.append({"role": "user", "content": message})
|
123 |
|
124 |
response = openai.ChatCompletion.create(
|
125 |
+
model='gpt-3.5-turbo-instruct',
|
126 |
messages=history_openai_format,
|
127 |
+
temperature=0.0,
|
128 |
stream=True
|
129 |
)
|
130 |
partial_message = ""
|
|
|
155 |
gr.ChatInterface(predict_without_history, examples=CHAT_EXAMPLES)
|
156 |
chatbot.like(vote, None, None)
|
157 |
|
|
|
158 |
if LOCAL:
|
159 |
demo.queue()
|
160 |
demo.launch(share=False)
|