Update app.py
Browse files
app.py
CHANGED
@@ -3,14 +3,14 @@ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
|
3 |
|
4 |
def greet(name):
|
5 |
|
6 |
-
tokenizer = AutoTokenizer.from_pretrained("zjunlp/MolGen")
|
7 |
-
model = AutoModelForSeq2SeqLM.from_pretrained("zjunlp/MolGen")
|
8 |
|
9 |
-
sf_input = tokenizer(name, return_tensors="pt")
|
10 |
# beam search
|
11 |
-
molecules = model.generate(input_ids=sf_input["input_ids"],
|
12 |
-
sf_output = [tokenizer.decode(g, skip_special_tokens=True, clean_up_tokenization_spaces=True).replace(" ","") for g in molecules]
|
13 |
-
|
14 |
|
15 |
iface = gr.Interface(fn=greet, inputs="text", outputs="text")
|
16 |
iface.launch()
|
|
|
3 |
|
4 |
def greet(name):
|
5 |
|
6 |
+
#tokenizer = AutoTokenizer.from_pretrained("zjunlp/MolGen")
|
7 |
+
#model = AutoModelForSeq2SeqLM.from_pretrained("zjunlp/MolGen")
|
8 |
|
9 |
+
#sf_input = tokenizer(name, return_tensors="pt")
|
10 |
# beam search
|
11 |
+
#molecules = model.generate(input_ids=sf_input["input_ids"],
|
12 |
+
#sf_output = [tokenizer.decode(g, skip_special_tokens=True, clean_up_tokenization_spaces=True).replace(" ","") for g in molecules]
|
13 |
+
return name
|
14 |
|
15 |
iface = gr.Interface(fn=greet, inputs="text", outputs="text")
|
16 |
iface.launch()
|