loubnabnl HF staff commited on
Commit
f7b6a4b
1 Parent(s): 41d27ac

add multiprocessing

Browse files
Files changed (1) hide show
  1. app.py +5 -3
app.py CHANGED
@@ -19,8 +19,7 @@ def generate_code(model_name, gen_prompt, max_new_tokens, temperature, seed):
19
  url = f'https://hf.space/embed/loubnabnl/{model_name.lower()}-subspace/+/api/predict/'
20
  r = requests.post(url=url, json={"data": [gen_prompt, max_new_tokens, temperature, seed]})
21
  generated_text = r.json()['data'][0]
22
- st.markdown(model_name)
23
- st.code(generated_text)
24
 
25
  st.set_page_config(page_icon=":laptop:", layout="wide")
26
 
@@ -89,4 +88,7 @@ elif selected_task == "Code generation":
89
  max_new_tokens=max_new_tokens,
90
  temperature=temperature,
91
  seed=seed)
92
- pool.map(generate_parallel, selected_models)
 
 
 
 
19
  url = f'https://hf.space/embed/loubnabnl/{model_name.lower()}-subspace/+/api/predict/'
20
  r = requests.post(url=url, json={"data": [gen_prompt, max_new_tokens, temperature, seed]})
21
  generated_text = r.json()['data'][0]
22
+ return generated_text
 
23
 
24
  st.set_page_config(page_icon=":laptop:", layout="wide")
25
 
 
88
  max_new_tokens=max_new_tokens,
89
  temperature=temperature,
90
  seed=seed)
91
+ output = pool.map(generate_parallel, selected_models)
92
+ for i in range(len(output)):
93
+ st.markdown(selected_models[i])
94
+ st.code(output[i])