Spaces:
Sleeping
Sleeping
Commit
•
9eb7af5
1
Parent(s):
a7b4411
Upload app.py
Browse files
app.py
CHANGED
@@ -99,11 +99,11 @@ def predict(user_input):
|
|
99 |
)
|
100 |
}
|
101 |
]
|
102 |
-
|
103 |
try:
|
104 |
response = client.chat.completions.create(
|
105 |
# model='mistralai/Mixtral-8x7B-Instruct-v0.1',
|
106 |
-
model='gpt-3.5-turbo',
|
107 |
messages=prompt,
|
108 |
temperature=0
|
109 |
)
|
@@ -113,20 +113,6 @@ def predict(user_input):
|
|
113 |
except Exception as e:
|
114 |
prediction = e
|
115 |
|
116 |
-
# While the prediction is made, log both the inputs and outputs to a local log file
|
117 |
-
# While writing to the log file, ensure that the commit scheduler is locked to avoid parallel
|
118 |
-
# access
|
119 |
-
|
120 |
-
# with scheduler.lock:
|
121 |
-
# with log_file.open("a") as f:
|
122 |
-
# f.write(json.dumps(
|
123 |
-
# {
|
124 |
-
# 'user_input': user_input,
|
125 |
-
# 'retrieved_context': context_for_query,
|
126 |
-
# 'model_response': prediction
|
127 |
-
# }
|
128 |
-
# ))
|
129 |
-
# f.write("\n")
|
130 |
|
131 |
return prediction
|
132 |
|
|
|
99 |
)
|
100 |
}
|
101 |
]
|
102 |
+
print(prompt)
|
103 |
try:
|
104 |
response = client.chat.completions.create(
|
105 |
# model='mistralai/Mixtral-8x7B-Instruct-v0.1',
|
106 |
+
model='gpt-3.5-turbo', # gpt-3.5-turbo
|
107 |
messages=prompt,
|
108 |
temperature=0
|
109 |
)
|
|
|
113 |
except Exception as e:
|
114 |
prediction = e
|
115 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
116 |
|
117 |
return prediction
|
118 |
|