|
from utils import get_embeddings, search_document_annoy, \ |
|
answer_with_gpt3_with_function_calls, transform_user_question, debug_print |
|
|
|
def get_response_from_model(user_input, top_k=3, annoy_metric='dot', model_name="gpt-3.5-turbo", user_query_preprocess=False): |
|
|
|
assert top_k > 0, 'k must be an integer greater than 0' |
|
|
|
if user_query_preprocess: |
|
chatgpt_question = transform_user_question(user_input, model_name) |
|
else: |
|
chatgpt_question = user_input |
|
debug_print("chatgpt_question: ", chatgpt_question) |
|
|
|
try: |
|
user_q_embedding = get_embeddings(chatgpt_question) |
|
document = search_document_annoy(user_q_embedding, top_k=top_k, metric=annoy_metric) |
|
reply = answer_with_gpt3_with_function_calls(document, user_input, model_name) |
|
return reply |
|
except Exception as e: |
|
print(e) |
|
return "Error when trying to get embedding for the user query. Please try with a shorter question." |
|
|