Tonic commited on
Commit
1aa5b50
1 Parent(s): 29c1afb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -44
app.py CHANGED
@@ -16,29 +16,18 @@ top_p=0.92
16
  repetition_penalty=1.7
17
  max_length=2048
18
 
19
- # Use model IDs as variables
20
- base_model_id = "tiiuae/falcon-7b-instruct"
21
- model_directory = "Tonic/GaiaMiniMed"
22
-
23
- # Instantiate the Tokenizer
24
- tokenizer = AutoTokenizer.from_pretrained(base_model_id, trust_remote_code=True, padding_side="left")
25
- tokenizer.pad_token = tokenizer.eos_token
26
- tokenizer.padding_side = 'left'
27
-
28
-
29
- # Load the GaiaMiniMed model with the specified configuration
30
- # Load the Peft model with a specific configuration
31
- # Specify the configuration class for the model
32
- model_config = AutoConfig.from_pretrained(base_model_id)
33
- # Load the PEFT model with the specified configuration
34
- peft_model = AutoModelForCausalLM.from_pretrained(model_directory, config=model_config)
35
- peft_model = PeftModel.from_pretrained(peft_model, model_directory)
36
-
37
 
 
 
 
 
 
 
38
 
39
  # Class to encapsulate the Falcon chatbot
40
  class FalconChatBot:
41
- def __init__(self, system_prompt="You are an expert medical analyst:"):
42
  self.system_prompt = system_prompt
43
 
44
  def process_history(self, history):
@@ -56,7 +45,7 @@ class FalconChatBot:
56
  user_message = message.get("user", "")
57
  assistant_message = message.get("assistant", "")
58
  # Check if the user_message is not a special command
59
- if not user_message.startswith("Falcon:"):
60
  filtered_history.append({"user": user_message, "assistant": assistant_message})
61
  return filtered_history
62
 
@@ -83,31 +72,26 @@ class FalconChatBot:
83
  falcon_bot = FalconChatBot()
84
 
85
  # Define the Gradio interface
86
- title = "👋🏻Welcome to Tonic's 🦅Falcon's Medical👨🏻‍⚕️Expert Chat🚀"
87
- description = "You can use this Space to test out the GaiaMiniMed model [(Tonic/GaiaMiniMed)](https://huggingface.co/Tonic/GaiaMiniMed) or duplicate this Space and use it locally or on 🤗HuggingFace. [Join me on Discord to build together](https://discord.gg/VqTxc76K3u)."
88
-
89
- # Comment out cached examples and history to avoid time out on build.
90
- #
91
- # history = [
92
- # {"user": "hi there how can you help me?", "assistant": "Hello, my name is Gaia, i'm created by Tonic, i can answer questions about medicine and public health!"},
93
- # # Add more user and assistant messages as needed
94
- # ]
95
- # examples = [
96
- # [
97
- # {
98
- # "user_message": "What is the proper treatment for buccal herpes?",
99
- # "assistant_message": "My name is Gaia, I'm a health and sanitation expert ready to answer your medical questions.",
100
- # "history": [],
101
- # "temperature": 0.4,
102
- # "max_new_tokens": 700,
103
- # "top_p": 0.90,
104
- # "repetition_penalty": 1.9,
105
- # }
106
- # ]
107
- # ]
108
 
109
  additional_inputs=[
110
- gr.Textbox("", label="Optional system prompt"),
111
  gr.Slider(
112
  label="Temperature",
113
  value=0.9,
@@ -152,7 +136,7 @@ iface = gr.Interface(
152
  description=description,
153
  # examples=examples,
154
  inputs=[
155
- gr.inputs.Textbox(label="Input Parameters", type="text", lines=5),
156
  ] + additional_inputs,
157
  outputs="text",
158
  theme="ParityError/Anime"
 
16
  repetition_penalty=1.7
17
  max_length=2048
18
 
19
+ model_name = "OpenLLM-France/Claire-7B-0.1"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
 
21
+ tokenizer = transformers.AutoTokenizer.from_pretrained(model_name)
22
+ model = transformers.AutoModelForCausalLM.from_pretrained(model_name,
23
+ device_map="auto",
24
+ torch_dtype=torch.bfloat16,
25
+ load_in_4bit=True # For efficient inference, if supported by the GPU card
26
+ )
27
 
28
  # Class to encapsulate the Falcon chatbot
29
  class FalconChatBot:
30
+ def __init__(self, system_prompt):
31
  self.system_prompt = system_prompt
32
 
33
  def process_history(self, history):
 
45
  user_message = message.get("user", "")
46
  assistant_message = message.get("assistant", "")
47
  # Check if the user_message is not a special command
48
+ if not user_message.startswith("Protagoniste:"):
49
  filtered_history.append({"user": user_message, "assistant": assistant_message})
50
  return filtered_history
51
 
 
72
  falcon_bot = FalconChatBot()
73
 
74
  # Define the Gradio interface
75
+ title = "👋🏻Bienvenue à Tonic's 🌜🌚Claire Chat !"
76
+ description = "Vous pouvez utiliser [🌜🌚ClaireGPT](https://huggingface.co/OpenLLM-France/Claire-7B-0.1) Ou dupliquer pour l'uiliser localement ou sur huggingface! [Join me on Discord to build together](https://discord.gg/VqTxc76K3u)."
77
+ history = [
78
+ {"user": "Le dialogue suivant est une conversation entre Emmanuel Macron et Elon Musk:", "assistant": "Emmanuel Macron: Bonjour Monsieur Musk. Je vous remercie de me recevoir aujourd'hui."},]
79
+ examples = [
80
+ [
81
+ {
82
+ "user_message": "[Elon Musk:] - Bonjour Emmanuel. Enchanté de vous revoir.",
83
+ "assistant_message": "[Emmanuel Macron:] - Je vois que vous avez effectué un voyage dans la région de la Gascogne.",
84
+ "history": [],
85
+ "temperature": 0.4,
86
+ "max_new_tokens": 700,
87
+ "top_p": 0.90,
88
+ "repetition_penalty": 1.9,
89
+ }
90
+ ]
91
+ ]
 
 
 
 
 
92
 
93
  additional_inputs=[
94
+ gr.Textbox("", label="Introduisez Un Autre Personnage Ici ou Mettez En Scene"),
95
  gr.Slider(
96
  label="Temperature",
97
  value=0.9,
 
136
  description=description,
137
  # examples=examples,
138
  inputs=[
139
+ gr.inputs.Textbox(label="Utilisez se format pour initier une conversation [Personage:]", type="text", lines=5),
140
  ] + additional_inputs,
141
  outputs="text",
142
  theme="ParityError/Anime"