akoksal commited on
Commit
d214fd2
1 Parent(s): ffda6f4
Files changed (1) hide show
  1. app.py +13 -7
app.py CHANGED
@@ -2,8 +2,8 @@ import gradio as gr
2
  from transformers import AutoTokenizer, pipeline
3
  import torch
4
 
5
- tokenizer1 = AutoTokenizer.from_pretrained("notexist/ttt2")
6
- tdk1 = pipeline('text-generation', model='notexist/ttt2', tokenizer=tokenizer1)
7
  tokenizer2 = AutoTokenizer.from_pretrained("notexist/ttte")
8
  tdk2 = pipeline('text-generation', model='notexist/ttte', tokenizer=tokenizer2)
9
 
@@ -23,11 +23,14 @@ def predict(name, sl, topk, topp):
23
  max_length=64,
24
  top_k=topk,
25
  top_p=topp,
26
- num_return_sequences=1,
27
  repetition_penalty=sl
28
  )[0]["generated_text"]
29
-
30
- return x1[len(f"<|endoftext|>"):]+"\n\n"+x2[len(f"<|endoftext|>{new_name}\n\n"):].replace("[TEXT]", " "+new_name+" ")
 
 
 
31
  else:
32
  x1 = tdk1(f"<|endoftext|>{name}\n\n",
33
  do_sample=True,
@@ -42,11 +45,14 @@ def predict(name, sl, topk, topp):
42
  max_length=64,
43
  top_k=topk,
44
  top_p=topp,
45
- num_return_sequences=1,
46
  repetition_penalty=sl
47
  )[0]["generated_text"]
48
 
49
- return x1[len(f"<|endoftext|>{name}\n\n"):]+"\n\n"+x2[len(f"<|endoftext|>{name}\n\n"):].replace("[TEXT]", " "+name+" ")
 
 
 
50
 
51
 
52
 
 
2
  from transformers import AutoTokenizer, pipeline
3
  import torch
4
 
5
+ tokenizer1 = AutoTokenizer.from_pretrained("notexist/tttf")
6
+ tdk1 = pipeline('text-generation', model='notexist/tttf', tokenizer=tokenizer1)
7
  tokenizer2 = AutoTokenizer.from_pretrained("notexist/ttte")
8
  tdk2 = pipeline('text-generation', model='notexist/ttte', tokenizer=tokenizer2)
9
 
 
23
  max_length=64,
24
  top_k=topk,
25
  top_p=topp,
26
+ num_return_sequences=2,
27
  repetition_penalty=sl
28
  )[0]["generated_text"]
29
+
30
+ if "[TEXT]" not in x2:
31
+ return x1[len(f"<|endoftext|>"):]
32
+ else:
33
+ return x1[len(f"<|endoftext|>"):]+"\n\n"+x2[len(f"<|endoftext|>{new_name}\n\n"):].replace("[TEXT]", " "+new_name+" ")
34
  else:
35
  x1 = tdk1(f"<|endoftext|>{name}\n\n",
36
  do_sample=True,
 
45
  max_length=64,
46
  top_k=topk,
47
  top_p=topp,
48
+ num_return_sequences=2,
49
  repetition_penalty=sl
50
  )[0]["generated_text"]
51
 
52
+ if "[TEXT]" not in x2:
53
+ return x1[len(f"<|endoftext|>{name}\n\n"):]
54
+ else:
55
+ return x1[len(f"<|endoftext|>{name}\n\n"):]+"\n\n"+x2[len(f"<|endoftext|>{name}\n\n"):].replace("[TEXT]", " "+name+" ")
56
 
57
 
58