File size: 4,065 Bytes
06ed727
3e38f63
 
 
 
 
06ed727
 
 
c0830f8
 
06ed727
 
3e38f63
 
 
 
 
06ed727
3e38f63
 
 
 
 
 
 
06ed727
3e38f63
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
06ed727
3e38f63
 
06ed727
3e38f63
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
06ed727
 
3e38f63
 
 
 
 
 
8960470
3e38f63
5b4252f
3e38f63
 
 
 
 
 
 
 
 
 
 
7d42d65
3e38f63
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
06ed727
 
3e38f63
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
import gradio as gr
import cohere
import os
import re
import uuid
import secrets



CO = "OrLcCzgoAtSg8TkG4VMB5G33H84Znq2kj3JjNIk9"
co = cohere.Client(CO, client_name="huggingface-rp")


def trigger_example(example):
    chat, updated_history = generate_response(example)
    return chat, updated_history
        
def generate_response(user_message, cid, token, history=None):

    if not token:
        raise gr.Error("Error loading.")
        
    if history is None:
        history = []
    if cid == "" or None:    
        cid = str(uuid.uuid4())

    print(f"cid: {cid} prompt:{user_message}")
    
    history.append(user_message)
    
    stream = co.chat_stream(message=user_message, conversation_id=cid, model='command-r-plus', connectors=[], temperature=0.3)
    
    output = ""
    
    for idx, response in enumerate(stream):
        if response.event_type == "text-generation":
            output += response.text
        if idx == 0:
            history.append(" " + output)
        else:
            history[-1] = output
        chat = [
            (history[i].strip(), history[i + 1].strip())
            for i in range(0, len(history) - 1, 2)
        ] 
        yield chat, history, cid
        
    return chat, history, cid
    

def clear_chat():
    return [], [], str(uuid.uuid4())


examples = [
    "Pouvez-vous expliquer les règles relatives aux heures supplémentaires selon le Code du travail ?",
    "Quels sont les droits et obligations concernant les congés payés selon le Code du travail ?",
    "Quelles sont les conditions requises pour un licenciement pour motif économique en vertu du Code du travail ?",
    "Quelles sont les mesures prévues par le Code du travail en cas de harcèlement au travail ?",
   
]

custom_css = """
#logo-img {
    border: none !important;
}
#chat-message {
    font-size: 14px;
    min-height: 300px;
}
"""

with gr.Blocks(analytics_enabled=False, css=custom_css) as demo:
    cid = gr.State("")
    token = gr.State(value=None)
    
    with gr.Row():
        with gr.Column(scale=1):
            gr.Image("logohf1.jpg", elem_id="logo-img", show_label=False, show_share_button=False, show_download_button=False)
        with gr.Column(scale=3):
            gr.Markdown("""##### Bienvenue sur votre Assistant Code du Travail Français ! """)
            
    with gr.Column():
        with gr.Row():
            chatbot = gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True)
        
        with gr.Row():
            user_message = gr.Textbox(lines=1, placeholder="Question ...", label="Input", show_label=False)

      
        with gr.Row():
            submit_button = gr.Button("Envoyer")
            clear_button = gr.Button("Supprimer la discussion")

                        
        history = gr.State([])
        
        user_message.submit(fn=generate_response, inputs=[user_message, cid, token, history], outputs=[chatbot, history, cid], concurrency_limit=32)
        submit_button.click(fn=generate_response, inputs=[user_message, cid, token, history], outputs=[chatbot, history, cid], concurrency_limit=32)
        
        clear_button.click(fn=clear_chat, inputs=None, outputs=[chatbot, history, cid], concurrency_limit=32)

        user_message.submit(lambda x: gr.update(value=""), None, [user_message], queue=False)
        submit_button.click(lambda x: gr.update(value=""), None, [user_message], queue=False)
        clear_button.click(lambda x: gr.update(value=""), None, [user_message], queue=False)
        
        with gr.Row():
            gr.Examples(
                examples=examples,
                inputs=user_message,
                cache_examples=False,
                fn=trigger_example,
                outputs=[chatbot],
                examples_per_page=100
            )

    demo.load(lambda: secrets.token_hex(16), None, token)

if __name__ == "__main__":
    # demo.launch(debug=True)
    try:
        demo.queue(api_open=False, max_size=40).launch(show_api=False)
    except Exception as e:
        print(f"Error: {e}")