Not-Grim-Refer commited on
Commit
cccb6e7
1 Parent(s): d3fbab9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -7
app.py CHANGED
@@ -1,17 +1,14 @@
1
  import gradio as gr
2
- from transformers import AutoTokenizer, AutoModel
3
 
4
  tokenizer_code2desc = AutoTokenizer.from_pretrained("microsoft/codebert-base")
5
- model_code2desc = AutoModel.from_pretrained("microsoft/codebert-base")
6
 
7
  def code_to_description(code: str) -> str:
8
- inputs = tokenizer_code2desc.encode("summarize: " + code, return_tensors="pt", max_length=512)
9
- # Updated generate function
10
  outputs = model_code2desc.generate(inputs, max_length=150, num_return_sequences=1, no_repeat_ngram_size=2, do_sample=True, top_k=50, top_p=0.95, temperature=0.8)
11
- # Updated decode function
12
  description = tokenizer_code2desc.decode(outputs[0], skip_special_tokens=True, clean_up_tokenization_spaces=True)
13
  return description
14
 
15
- iface = gr.Interface(fn=code_to_description, inputs="text", outputs="text")
16
  iface.launch()
17
-
 
1
  import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM
3
 
4
  tokenizer_code2desc = AutoTokenizer.from_pretrained("microsoft/codebert-base")
5
+ model_code2desc = AutoModelForCausalLM.from_pretrained("microsoft/codebert-base")
6
 
7
  def code_to_description(code: str) -> str:
8
+ inputs = tokenizer_code2desc.encode("summarize: " + code, return_tensors="pt", max_length=512, truncation=True)
 
9
  outputs = model_code2desc.generate(inputs, max_length=150, num_return_sequences=1, no_repeat_ngram_size=2, do_sample=True, top_k=50, top_p=0.95, temperature=0.8)
 
10
  description = tokenizer_code2desc.decode(outputs[0], skip_special_tokens=True, clean_up_tokenization_spaces=True)
11
  return description
12
 
13
+ iface = gr.Interface(fn=code_to_description, inputs="text", outputs="text", share=True)
14
  iface.launch()