Loubna ben allal commited on
Commit
b2d10c3
1 Parent(s): e6bed89

update app

Browse files
Files changed (1) hide show
  1. app.py +12 -11
app.py CHANGED
@@ -37,8 +37,8 @@ tokenizer1 = load_tokenizer("lvwerra/codeparrot")
37
  model1 = load_model("lvwerra/codeparrot")
38
  tokenizer2 = load_tokenizer("facebook/incoder-1B")
39
  model2 = load_model("facebook/incoder-1B")
40
- tokenizer3 = load_tokenizer("facebook/opt-1.3b")
41
- model3 = load_model("facebook/opt-1.3b")
42
  pipelines = {}
43
  for model in models:
44
  if model == "CodeParrot":
@@ -47,10 +47,10 @@ for model in models:
47
  tokenizer = load_tokenizer("facebook/incoder-1B")
48
  model = load_model("facebook/incoder-1B")
49
  pipelines[model] = pipeline("text-generation", model=model2, tokenizer=tokenizer2)
50
- else:
51
- tokenizer = load_tokenizer("facebook/opt-1.3b")
52
- model = load_model("facebook/opt-1.3b")
53
- pipelines[model] = pipeline("text-generation", model=model3, tokenizer=tokenizer3)
54
 
55
  examples = load_examples()
56
  example_names = [example["name"] for example in examples]
@@ -94,8 +94,9 @@ elif selected_task == "Code generation":
94
  if st.button("Generate code!"):
95
  with st.spinner("Generating code..."):
96
  for model in selected_models:
97
- st.markdown(f"{men(pipelines)} model is {model} keys {pipelines.keys()}:")
98
- pipe = pipelines[model]
99
- generated_text = pipe(gen_prompt, **gen_kwargs)[0]['generated_text']
100
- st.markdown(f"{model}:")
101
- st.code(generated_text)
 
 
37
  model1 = load_model("lvwerra/codeparrot")
38
  tokenizer2 = load_tokenizer("facebook/incoder-1B")
39
  model2 = load_model("facebook/incoder-1B")
40
+ #tokenizer3 = load_tokenizer("facebook/opt-1.3b")
41
+ #model3 = load_model("facebook/opt-1.3b")
42
  pipelines = {}
43
  for model in models:
44
  if model == "CodeParrot":
 
47
  tokenizer = load_tokenizer("facebook/incoder-1B")
48
  model = load_model("facebook/incoder-1B")
49
  pipelines[model] = pipeline("text-generation", model=model2, tokenizer=tokenizer2)
50
+ #else:
51
+ # tokenizer = load_tokenizer("facebook/opt-1.3b")
52
+ # model = load_model("facebook/opt-1.3b")
53
+ # pipelines[model] = pipeline("text-generation", model=model3, tokenizer=tokenizer3)
54
 
55
  examples = load_examples()
56
  example_names = [example["name"] for example in examples]
 
94
  if st.button("Generate code!"):
95
  with st.spinner("Generating code..."):
96
  for model in selected_models:
97
+ if model != "OPT":
98
+ st.markdown(f"{men(pipelines)} model is {model} keys {pipelines.keys()}:")
99
+ pipe = pipelines[model]
100
+ generated_text = pipe(gen_prompt, **gen_kwargs)[0]['generated_text']
101
+ st.markdown(f"{model}:")
102
+ st.code(generated_text)