|
from openai import OpenAI |
|
import panel as pn |
|
import requests |
|
import os |
|
|
|
pn.extension() |
|
|
|
|
|
API_URL = "https://api.crossref.org/works" |
|
|
|
|
|
DEFAULT_PROMPT_TEMPLATE = """ |
|
Here are the papers related to {query} |
|
|
|
Help me summarize these into bullet points, readable within 2 minutes. |
|
|
|
{items} |
|
""" |
|
|
|
|
|
def get_relevant_papers(query, rows): |
|
params = { |
|
"query.bibliographic": query, |
|
"rows": rows, |
|
} |
|
response = requests.get(API_URL, params=params) |
|
output = response.json() |
|
return output |
|
|
|
|
|
def process_inputs(contents, user, instance): |
|
output = get_relevant_papers(contents, rows_input.value) |
|
instance.send(pn.pane.JSON(output), respond=False, user="Sources") |
|
|
|
items = [] |
|
for item in output["message"]["items"]: |
|
abstract = item.get("abstract", "") |
|
title = item["title"] |
|
url = item["URL"] |
|
items.append(f"{title}({url}): {abstract}") |
|
|
|
prompt = prompt_template_input.value.format( |
|
query=contents, items=items |
|
) |
|
response = client.chat.completions.create( |
|
model="gpt-3.5-turbo", |
|
messages=[{"role": "user", "content": prompt}], |
|
stream=True, |
|
) |
|
message = "" |
|
for chunk in response: |
|
part = chunk.choices[0].delta.content |
|
if part is not None: |
|
message += part |
|
yield message |
|
|
|
|
|
client = OpenAI() |
|
|
|
|
|
prompt_template_input = pn.widgets.TextAreaInput( |
|
value=DEFAULT_PROMPT_TEMPLATE.strip(), height=500 |
|
) |
|
rows_input = pn.widgets.IntInput(name="Number of rows", value=2) |
|
chat_interface = pn.chat.ChatInterface(callback=process_inputs, callback_exception="verbose") |
|
|
|
|
|
sidebar = pn.Column(prompt_template_input, rows_input) |
|
main = pn.Column(chat_interface) |
|
pn.template.FastListTemplate( |
|
sidebar=[sidebar], |
|
main=[main], |
|
title="Elsevier Summarizer", |
|
).servable() |