File size: 3,475 Bytes
e40d713
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
from os import getenv
import openai
import gradio as gr
import os

ASSISTANT_ID = getenv("ASSISTANT_ID")
openai_client = openai.Client(
    api_key=getenv("OPENAI_API_KEY"),
)

with gr.Blocks() as demo:
    # Add a title
    gr.Label("ISy QM-Chat")
    chatbot = gr.Chatbot()
    msg = gr.Textbox()
    clear = gr.ClearButton([msg, chatbot])

    # Create a dictionary to store citations for each channel
    global citations_dict
    citations_dict = []

    MAX_HISTORY = 4 # Maximum number of messages to keep in the history

    async def respond(message, chat_history):
        bot_message = await chat(message)
        chat_history.append((message, bot_message))
        return "", chat_history

    async def chat(user_input):
        global citations_dict
        text = user_input

        # If the user's message starts with "/cite", send the corresponding citation
        if text.startswith("/cite"):
            try:
                index = int(text.split(" ", 1)[1])  # Get the index from the user's message
                citation = citations_dict[index]  # Get the corresponding citation
                return citation  # Send the citation
                
            except (IndexError, ValueError):
                return "Invalid citation index."

        thread = openai_client.beta.threads.create(
            messages= [
                {
                    "role": "user",
                    "content": text
                }
            ]
        )


        run = openai_client.beta.threads.runs.create(
            thread_id=thread.id,
            assistant_id=ASSISTANT_ID,
        )

        # Show "typing..." status while fetching response
        while run.status != "completed":
            run = openai_client.beta.threads.runs.retrieve(
                run_id=run.id,
                thread_id=thread.id,
            )

        messages = openai_client.beta.threads.messages.list(
            thread_id=thread.id
        )

        message = messages.data[0]
        message_content = message.content[0].text
        annotations = message_content.annotations
        citations = []

        # Iterate over the annotations and add footnotes
        for index, annotation in enumerate(annotations):
            # Replace the text with a footnote
            message_content.value = message_content.value.replace(annotation.text, f' [{index}]')

            # Gather citations based on annotation attributes
            if (file_citation := getattr(annotation, 'file_citation', None)):
                cited_file = openai_client.files.retrieve(file_citation.file_id)
                citations.append(f'> Zitat: "{file_citation.quote}"\n> Quelle: {cited_file.filename}')
            # elif (file_path := getattr(annotation, 'file_path', None)):
                # cited_file = openai_client.files.retrieve(file_path.file_id)
                # citations.append(f'> Download: [Link]({cited_file.filename})\n')
                # Note: File download functionality not implemented above for brevity

        # Store the citations for this channel
        citations_dict = citations

        # Add the assistant's response to the conversation history
        # conversations.append({"role": "assistant", "content": message_content.value})

        return message_content.value

    msg.submit(respond, [msg, chatbot], [msg, chatbot])

demo.launch()