Thziin commited on
Commit
6f28eab
1 Parent(s): 9a3f681

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +51 -35
app.py CHANGED
@@ -2,34 +2,45 @@ import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  from datasets import load_dataset
4
 
5
- # Load the PleIAs/common_corpus dataset with error handling
6
  def load_common_corpus():
7
  try:
8
- return load_dataset("PleIAs/common_corpus")
 
 
 
9
  except Exception as e:
10
  print(f"Error loading dataset: {e}")
11
  return None
12
 
13
  common_corpus = load_common_corpus()
14
 
15
- # Retrieve an example from the dataset safely
16
  def get_example_from_corpus(dataset, index):
17
  if dataset and "train" in dataset:
18
  try:
19
  return dataset["train"][index]
20
  except IndexError:
21
- print("Index out of range for dataset")
22
  return {"text": "No example available"}
23
  else:
24
- return {"text": "Dataset not loaded correctly"}
 
25
 
26
- # Initialize the Inference Client with error handling
27
- try:
28
- client = InferenceClient("unsloth/Llama-3.2-1B-Instruct")
29
- except Exception as e:
30
- print(f"Error initializing inference client: {e}")
31
- client = None
 
 
 
 
 
 
32
 
 
33
  def respond(
34
  message,
35
  history: list[tuple[str, str]],
@@ -43,50 +54,55 @@ def respond(
43
 
44
  messages = [{"role": "system", "content": system_message}]
45
 
 
46
  for val in history:
47
  if val[0]:
48
  messages.append({"role": "user", "content": val[0]})
49
  if val[1]:
50
  messages.append({"role": "assistant", "content": val[1]})
51
 
 
52
  messages.append({"role": "user", "content": message})
53
 
54
  try:
 
55
  response = client.chat_completion(
56
  messages,
57
  max_tokens=max_tokens,
58
  temperature=temperature,
59
  top_p=top_p,
60
  ).choices[0].message.content
 
 
61
  except Exception as e:
62
  print(f"Error during inference: {e}")
63
- response = "An error occurred while generating a response."
64
-
65
- return response
66
 
67
- # Example: Retrieve an entry from the dataset to demonstrate integration
68
  example_data = get_example_from_corpus(common_corpus, 0)
69
- print("Example from PleIAs/common_corpus:", example_data)
70
 
71
- # Gradio interface with proper error handling
72
- demo = gr.ChatInterface(
73
- respond,
74
- additional_inputs=[
75
- gr.Textbox(value="You are a friendly Chatbot. Your name is Juninho.", label="System message"),
76
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
77
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
78
- gr.Slider(
79
- minimum=0.1,
80
- maximum=1.0,
81
- value=0.95,
82
- step=0.05,
83
- label="Top-p (nucleus sampling)",
84
- ),
85
- ],
86
- )
87
-
88
- if __name__ == "__main__":
89
  try:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
90
  demo.launch()
91
  except Exception as e:
92
- print(f"Error launching Gradio app: {e}")
 
 
 
 
2
  from huggingface_hub import InferenceClient
3
  from datasets import load_dataset
4
 
5
+ # Safely load the PleIAs/common_corpus dataset
6
  def load_common_corpus():
7
  try:
8
+ print("Loading dataset...")
9
+ dataset = load_dataset("PleIAs/common_corpus")
10
+ print("Dataset loaded successfully!")
11
+ return dataset
12
  except Exception as e:
13
  print(f"Error loading dataset: {e}")
14
  return None
15
 
16
  common_corpus = load_common_corpus()
17
 
18
+ # Retrieve an example safely
19
  def get_example_from_corpus(dataset, index):
20
  if dataset and "train" in dataset:
21
  try:
22
  return dataset["train"][index]
23
  except IndexError:
24
+ print("Index out of range for dataset.")
25
  return {"text": "No example available"}
26
  else:
27
+ print("Dataset not loaded correctly.")
28
+ return {"text": "Dataset not available."}
29
 
30
+ # Safely initialize the inference client
31
+ def initialize_client():
32
+ try:
33
+ print("Initializing inference client...")
34
+ client = InferenceClient("unsloth/Llama-3.2-1B-Instruct")
35
+ print("Inference client initialized successfully!")
36
+ return client
37
+ except Exception as e:
38
+ print(f"Error initializing inference client: {e}")
39
+ return None
40
+
41
+ client = initialize_client()
42
 
43
+ # Chatbot response logic
44
  def respond(
45
  message,
46
  history: list[tuple[str, str]],
 
54
 
55
  messages = [{"role": "system", "content": system_message}]
56
 
57
+ # Add historical interactions
58
  for val in history:
59
  if val[0]:
60
  messages.append({"role": "user", "content": val[0]})
61
  if val[1]:
62
  messages.append({"role": "assistant", "content": val[1]})
63
 
64
+ # Add user message
65
  messages.append({"role": "user", "content": message})
66
 
67
  try:
68
+ print("Sending request to model...")
69
  response = client.chat_completion(
70
  messages,
71
  max_tokens=max_tokens,
72
  temperature=temperature,
73
  top_p=top_p,
74
  ).choices[0].message.content
75
+ print("Response received successfully!")
76
+ return response
77
  except Exception as e:
78
  print(f"Error during inference: {e}")
79
+ return "An error occurred while generating a response."
 
 
80
 
81
+ # Example: Retrieve an entry from the dataset
82
  example_data = get_example_from_corpus(common_corpus, 0)
83
+ print("Example from dataset:", example_data)
84
 
85
+ # Gradio interface
86
+ def launch_demo():
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87
  try:
88
+ demo = gr.ChatInterface(
89
+ respond,
90
+ additional_inputs=[
91
+ gr.Textbox(value="You are a friendly Chatbot. Your name is Juninho.", label="System message"),
92
+ gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
93
+ gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
94
+ gr.Slider(
95
+ minimum=0.1,
96
+ maximum=1.0,
97
+ value=0.95,
98
+ step=0.05,
99
+ label="Top-p (nucleus sampling)",
100
+ ),
101
+ ],
102
+ )
103
  demo.launch()
104
  except Exception as e:
105
+ print(f"Error launching Gradio app: {e}")
106
+
107
+ if __name__ == "__main__":
108
+ launch_demo()