explorewithai commited on
Commit
29bf8cc
1 Parent(s): fc06f51

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -6,11 +6,11 @@ device = 0 if torch.cuda.is_available() else -1
6
 
7
  def generate_response(user_input, history, temperature=0.5, do_sample=True):
8
  pipe = pipeline("text-generation", model="frameai/ChatFrame-Instruct-Persian-Small", device=device)
9
- pipe.set_params(temperature=temperature, do_sample=do_sample)
10
  messages = [
11
  {"role": "user", "content": user_input},
12
  ]
13
- response = pipe(messages, max_length=1024)
14
  return response[0]['generated_text'][1]["content"]
15
 
16
  iface = gr.ChatInterface(
 
6
 
7
  def generate_response(user_input, history, temperature=0.5, do_sample=True):
8
  pipe = pipeline("text-generation", model="frameai/ChatFrame-Instruct-Persian-Small", device=device)
9
+
10
  messages = [
11
  {"role": "user", "content": user_input},
12
  ]
13
+ response = pipe(messages, max_length=1024, temperature=temperature, do_sample=do_sample)
14
  return response[0]['generated_text'][1]["content"]
15
 
16
  iface = gr.ChatInterface(