Spaces:
Sleeping
Sleeping
Update chat.py
Browse files
chat.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
from time import time, sleep
|
2 |
import datetime
|
3 |
-
|
4 |
import os
|
5 |
import openai
|
6 |
import json
|
@@ -11,11 +11,14 @@ import re
|
|
11 |
from langchain.memory import VectorStoreRetrieverMemory
|
12 |
|
13 |
## Read the environment variables
|
14 |
-
|
15 |
openai.api_key = os.getenv('OPENAI_API_KEY')
|
16 |
embedding_model = os.getenv('EMBEDDING_ENGINE')
|
17 |
convo_length = int(os.getenv('CONVO_LENGTH_TO_FETCH'))
|
18 |
llm_model = os.getenv('LLM_MODEL')
|
|
|
|
|
|
|
19 |
|
20 |
pinecone_api_key = os.getenv('PINECONE_API_KEY')
|
21 |
pinecone_env = os.getenv('PINECONE_REGION')
|
@@ -25,6 +28,9 @@ pinecone.init(
|
|
25 |
environment=pinecone_env
|
26 |
)
|
27 |
vector_db = pinecone.Index(pinecone_index)
|
|
|
|
|
|
|
28 |
|
29 |
def timestamp_to_datetime(unix_time):
|
30 |
return datetime.datetime.fromtimestamp(unix_time).strftime("%A, %B %d, %Y at %I:%M%p %Z")
|
@@ -116,7 +122,14 @@ def start_game(game_id, user_id, user_input):
|
|
116 |
vector_db.upsert(payload)
|
117 |
|
118 |
return(llm_output)
|
119 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
120 |
|
121 |
def populate_prompt(game_id, splits):
|
122 |
prompt_text = list()
|
@@ -135,8 +148,12 @@ def populate_prompt(game_id, splits):
|
|
135 |
def initialize_game(game_id, user_id, user_input):
|
136 |
game_details = get_game_details(game_id)
|
137 |
whole_prompt = populate_prompt(game_id, game_details["splits"])
|
|
|
|
|
138 |
whole_prompt = whole_prompt.replace("<<USER_INPUT_MSG>>", user_input)
|
139 |
-
|
|
|
|
|
140 |
llm_prompt_op = call_gpt(whole_prompt)
|
141 |
#print(llm_prompt_op.choices[0]["message"]["content"])
|
142 |
fname="prompt_" + game_id + "_" + user_id + ".txt"
|
|
|
1 |
from time import time, sleep
|
2 |
import datetime
|
3 |
+
import dotenv
|
4 |
import os
|
5 |
import openai
|
6 |
import json
|
|
|
11 |
from langchain.memory import VectorStoreRetrieverMemory
|
12 |
|
13 |
## Read the environment variables
|
14 |
+
dotenv.load_dotenv('.env')
|
15 |
openai.api_key = os.getenv('OPENAI_API_KEY')
|
16 |
embedding_model = os.getenv('EMBEDDING_ENGINE')
|
17 |
convo_length = int(os.getenv('CONVO_LENGTH_TO_FETCH'))
|
18 |
llm_model = os.getenv('LLM_MODEL')
|
19 |
+
debug=False
|
20 |
+
if os.getenv('DEBUG') == 'True':
|
21 |
+
debug=True
|
22 |
|
23 |
pinecone_api_key = os.getenv('PINECONE_API_KEY')
|
24 |
pinecone_env = os.getenv('PINECONE_REGION')
|
|
|
28 |
environment=pinecone_env
|
29 |
)
|
30 |
vector_db = pinecone.Index(pinecone_index)
|
31 |
+
file_path = os.getenv('GAME_DOCS_FOLDER')
|
32 |
+
file_name = os.getenv('GAME_DOCS_FILE')
|
33 |
+
game_index = os.getenv('GAME_ID_INDEX')
|
34 |
|
35 |
def timestamp_to_datetime(unix_time):
|
36 |
return datetime.datetime.fromtimestamp(unix_time).strftime("%A, %B %d, %Y at %I:%M%p %Z")
|
|
|
122 |
vector_db.upsert(payload)
|
123 |
|
124 |
return(llm_output)
|
125 |
+
|
126 |
+
def get_game_details(game_id):
|
127 |
+
file_data = open_file(f"{file_path}/{game_index}")
|
128 |
+
tmp_json = json.loads(file_data)
|
129 |
+
for json_item in tmp_json["game_details"]:
|
130 |
+
if json_item["game_id"] == game_id:
|
131 |
+
return json_item
|
132 |
+
return "Not Found"
|
133 |
|
134 |
def populate_prompt(game_id, splits):
|
135 |
prompt_text = list()
|
|
|
148 |
def initialize_game(game_id, user_id, user_input):
|
149 |
game_details = get_game_details(game_id)
|
150 |
whole_prompt = populate_prompt(game_id, game_details["splits"])
|
151 |
+
if debug:
|
152 |
+
print(whole_prompt[:1000])
|
153 |
whole_prompt = whole_prompt.replace("<<USER_INPUT_MSG>>", user_input)
|
154 |
+
if debug:
|
155 |
+
print(whole_prompt[:1000])
|
156 |
+
|
157 |
llm_prompt_op = call_gpt(whole_prompt)
|
158 |
#print(llm_prompt_op.choices[0]["message"]["content"])
|
159 |
fname="prompt_" + game_id + "_" + user_id + ".txt"
|