app.py
CHANGED
@@ -7,11 +7,28 @@ _ = load_dotenv(find_dotenv())
|
|
7 |
|
8 |
openai.api_key = os.getenv('OPENAI_API_KEY')
|
9 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
def greet(input):
|
11 |
|
|
|
|
|
|
|
|
|
|
|
12 |
|
|
|
|
|
|
|
13 |
|
14 |
-
return "
|
15 |
|
16 |
#iface = gr.Interface(fn=greet, inputs="text", outputs="text")
|
17 |
#iface.launch()
|
|
|
7 |
|
8 |
openai.api_key = os.getenv('OPENAI_API_KEY')
|
9 |
|
10 |
+
def get_completion(prompt, model="gpt-3.5-turbo"):
|
11 |
+
messages = [{"role": "user", "content": prompt}]
|
12 |
+
response = openai.ChatCompletion.create(
|
13 |
+
model=model,
|
14 |
+
messages=messages,
|
15 |
+
temperature=0, # this is the degree of randomness of the model's output
|
16 |
+
)
|
17 |
+
return response.choices[0].message["content"]
|
18 |
+
|
19 |
def greet(input):
|
20 |
|
21 |
+
prompt = f"""
|
22 |
+
Determine the product or solution, the problem being solved, features, target customer that are being discussed in the \
|
23 |
+
following text, which is delimited by triple backticks.
|
24 |
+
|
25 |
+
Format your response as a list of items separated by commas.
|
26 |
|
27 |
+
Text sample: '''{input}'''
|
28 |
+
"""
|
29 |
+
response = get_completion(prompt)
|
30 |
|
31 |
+
return "Response: " + response
|
32 |
|
33 |
#iface = gr.Interface(fn=greet, inputs="text", outputs="text")
|
34 |
#iface.launch()
|