Spaces:
Running
Running
broadfield
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -27,7 +27,7 @@ clients = [
|
|
27 |
{'type':'text','name':'meta-llama/Llama-3.3-70B-Instruct','rank':'pro','max_tokens':16384,'schema':{'bos':'<|im_start|>','eos':'<|im_end|>'}},
|
28 |
{'type':'text','name':'mistralai/Mixtral-8x7B-Instruct-v0.1','rank':'op','max_tokens':40000,'schema':{'bos':'<s>','eos':'</s>'}},
|
29 |
]
|
30 |
-
def generate(prompt,history,mod=2,tok=
|
31 |
#print("#####",history,"######")
|
32 |
gen_images=False
|
33 |
client=InferenceClient(clients[int(mod)]['name'])
|
|
|
27 |
{'type':'text','name':'meta-llama/Llama-3.3-70B-Instruct','rank':'pro','max_tokens':16384,'schema':{'bos':'<|im_start|>','eos':'<|im_end|>'}},
|
28 |
{'type':'text','name':'mistralai/Mixtral-8x7B-Instruct-v0.1','rank':'op','max_tokens':40000,'schema':{'bos':'<s>','eos':'</s>'}},
|
29 |
]
|
30 |
+
def generate(prompt,history,mod=2,tok=4000,seed=1,role="ASSISTANT",data=None):
|
31 |
#print("#####",history,"######")
|
32 |
gen_images=False
|
33 |
client=InferenceClient(clients[int(mod)]['name'])
|