Spaces:
Sleeping
Sleeping
shresthasingh
commited on
Commit
•
570ab35
1
Parent(s):
93aaea3
Update app.py
Browse files
app.py
CHANGED
@@ -49,8 +49,9 @@ def classify_message(message: str) -> str:
|
|
49 |
"content": message,
|
50 |
},
|
51 |
],
|
52 |
-
model="
|
53 |
)
|
|
|
54 |
return classify.choices[0].message.content.strip()
|
55 |
|
56 |
# Function to extract information based on classification
|
@@ -77,12 +78,13 @@ def extract_info(message: str, classification: str) -> dict:
|
|
77 |
"content": message,
|
78 |
},
|
79 |
],
|
80 |
-
model="
|
81 |
response_format={
|
82 |
"type": "json_object",
|
83 |
"schema": schema.model_json_schema(),
|
84 |
},
|
85 |
)
|
|
|
86 |
return json.loads(extract.choices[0].message.content)
|
87 |
|
88 |
# Function to make API call
|
@@ -129,8 +131,9 @@ def interpret_response(user_message: str, classification: str, api_response: dic
|
|
129 |
"content": f"User message: {user_message}\nRequest type: {classification}\nAPI response: {json.dumps(api_response)}",
|
130 |
},
|
131 |
],
|
132 |
-
model="
|
133 |
)
|
|
|
134 |
return interpret.choices[0].message.content
|
135 |
|
136 |
# Main function to process the user request
|
|
|
49 |
"content": message,
|
50 |
},
|
51 |
],
|
52 |
+
model="meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
|
53 |
)
|
54 |
+
print(classify.choices[0].message.content.strip())
|
55 |
return classify.choices[0].message.content.strip()
|
56 |
|
57 |
# Function to extract information based on classification
|
|
|
78 |
"content": message,
|
79 |
},
|
80 |
],
|
81 |
+
model="meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
|
82 |
response_format={
|
83 |
"type": "json_object",
|
84 |
"schema": schema.model_json_schema(),
|
85 |
},
|
86 |
)
|
87 |
+
print(json.loads(extract.choices[0].message.content))
|
88 |
return json.loads(extract.choices[0].message.content)
|
89 |
|
90 |
# Function to make API call
|
|
|
131 |
"content": f"User message: {user_message}\nRequest type: {classification}\nAPI response: {json.dumps(api_response)}",
|
132 |
},
|
133 |
],
|
134 |
+
model="meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
|
135 |
)
|
136 |
+
print(interpret.choices[0].message.content)
|
137 |
return interpret.choices[0].message.content
|
138 |
|
139 |
# Main function to process the user request
|