johann22 commited on
Commit
d02e668
1 Parent(s): d5f6a98

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -12
app.py CHANGED
@@ -29,12 +29,12 @@ date_time_str = now.strftime("%Y-%m-%d %H:%M:%S")
29
  #client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
30
  history = []
31
 
32
- def gen_from_infer(purpose,history,image,model_drop,chat_drop,choice):
33
  #out_img = infer(out_prompt)
34
  history.clear()
35
  if seed == 0:
36
  seed = random.randint(1,1111111111111111)
37
- out_prompt=generate(purpose,history,chat_drop)
38
  history.append((purpose,out_prompt))
39
  yield (history,None)
40
  infer_model = models[int(model_drop)]
@@ -54,18 +54,23 @@ def gen_from_infer(purpose,history,image,model_drop,chat_drop,choice):
54
  )
55
  yield (history,out_img)
56
 
57
- def format_prompt(message, history):
58
- prompt = "<s>"
59
- for user_prompt, bot_response in history:
60
- prompt += f"[INST] {user_prompt} [/INST]"
61
- prompt += f" {bot_response}</s> "
62
- prompt += f"[INST] {message} [/INST]"
63
- return prompt
 
 
 
 
 
64
 
65
- def run_gpt(in_prompt,history,model_drop,seed=0):
66
  client = InferenceClient(c_models[int(model_drop)])
67
  print(f'history :: {history}')
68
- prompt=format_prompt(in_prompt,history)
69
  if seed == 0:
70
  seed = random.randint(1,1111111111111111)
71
  print (seed)
@@ -88,7 +93,7 @@ def run_gpt(in_prompt,history,model_drop,seed=0):
88
  def run_idefics(in_prompt,history,model_drop,seed):
89
  client = InferenceClient("HuggingFaceM4/idefics-9b-instruct")
90
  print(f'history :: {history}')
91
- prompt=format_prompt(in_prompt,history)
92
  seed = random.randint(1,1111111111111111)
93
  print (seed)
94
  generate_kwargs = dict(
 
29
  #client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
30
  history = []
31
 
32
+ def gen_from_infer(purpose,history,image,model_drop,chat_drop,choice,seed):
33
  #out_img = infer(out_prompt)
34
  history.clear()
35
  if seed == 0:
36
  seed = random.randint(1,1111111111111111)
37
+ out_prompt=generate(purpose,history,chat_drop,seed)
38
  history.append((purpose,out_prompt))
39
  yield (history,None)
40
  infer_model = models[int(model_drop)]
 
54
  )
55
  yield (history,out_img)
56
 
57
+ def format_prompt(message, history,seed):
58
+ prompt = "<s>"
59
+ d = len(history)-1
60
+ for user_prompt, bot_response in history:
61
+ prompt += f"[INST] {user_prompt} [/INST]"
62
+ prompt += f" {bot_response}</s> "
63
+ for user_prompt, bot_response in history[d]:
64
+ if user_prompt == message:
65
+ print (user_prompt)
66
+ if seed != 0:
67
+ prompt += f"[INST] {message} [/INST]"
68
+ return prompt
69
 
70
+ def run_gpt(in_prompt,history,model_drop,seed):
71
  client = InferenceClient(c_models[int(model_drop)])
72
  print(f'history :: {history}')
73
+ prompt=format_prompt(in_prompt,history,seed)
74
  if seed == 0:
75
  seed = random.randint(1,1111111111111111)
76
  print (seed)
 
93
  def run_idefics(in_prompt,history,model_drop,seed):
94
  client = InferenceClient("HuggingFaceM4/idefics-9b-instruct")
95
  print(f'history :: {history}')
96
+ prompt=format_prompt(in_prompt,history,seed)
97
  seed = random.randint(1,1111111111111111)
98
  print (seed)
99
  generate_kwargs = dict(