Spaces:
Sleeping
Sleeping
from fastapi import FastAPI | |
from openai import OpenAI | |
import json | |
import os | |
app = FastAPI() | |
#client = OpenAI(api_key=OPENAI_API_KEY) | |
#org = os.getenv("org") | |
OPENAI_API_KEY = os.getenv("open_ai_key") | |
client = OpenAI(api_key=OPENAI_API_KEY) #, organization=org | |
description = """ | |
### A FastAPI endpoint that takes a string as input and returns a list of questions along with their corresponding answers. This endpoint will be used to generate questions from Job Discriptions. | |
Details: | |
Input-1: A string containing the input text. (Type: String) | |
Input-2: Number of questions (Type: Integer) | |
-------------------------------------------- | |
Output: A JSON response containing a list of questions and a corresponding list of answers. | |
""" | |
app = FastAPI(docs_url="/", description=description) | |
# def convert_format(input_dict): | |
# output_list = [] | |
# for i in range(1, len(input_dict) // 2 + 1): | |
# question_key = f"Question {i}" | |
# answer_key = f"Answer {i}" | |
# if question_key in input_dict and answer_key in input_dict: | |
# output_list.append({"Question": input_dict[question_key], "Answer": input_dict[answer_key]}) | |
# return output_list | |
async def getQuestions(job_description: str, no_of_questions: str): | |
response = client.chat.completions.create( | |
model="gpt-3.5-turbo-1106", | |
response_format={"type": "json_object"}, # To ENABLE JSON MODE | |
messages=[ | |
{"role": "system", | |
"content": "You are a helpful assistant designed to output JSON in this format [question-text as key and its value as answer-text]"}, | |
{"role": "user", | |
"content": f"Given the job description [{job_description}] create {no_of_questions} " | |
f"interview questions and their corresponding answers"} | |
] | |
) | |
result = response.choices[0].message.content | |
# Parse the JSON data | |
parsed_data = json.loads(result) | |
print(parsed_data) | |
# parsed_data = convert_format(parsed_data) | |
return parsed_data |