Spaces:
Running
Running
liuyizhang
commited on
Commit
•
60d92db
1
Parent(s):
0220d50
update app.y
Browse files
app.py
CHANGED
@@ -55,8 +55,12 @@ extend_prompt_3 = True
|
|
55 |
|
56 |
thanks_info = "Thanks: "
|
57 |
if extend_prompt_1:
|
58 |
-
|
59 |
-
|
|
|
|
|
|
|
|
|
60 |
if extend_prompt_2:
|
61 |
def load_prompter():
|
62 |
prompter_model = AutoModelForCausalLM.from_pretrained("microsoft/Promptist")
|
@@ -64,7 +68,6 @@ if extend_prompt_2:
|
|
64 |
tokenizer.pad_token = tokenizer.eos_token
|
65 |
tokenizer.padding_side = "left"
|
66 |
return prompter_model, tokenizer
|
67 |
-
prompter_model, prompter_tokenizer = load_prompter()
|
68 |
def extend_prompt_microsoft(in_text):
|
69 |
input_ids = prompter_tokenizer(in_text.strip()+" Rephrase:", return_tensors="pt").input_ids
|
70 |
eos_id = prompter_tokenizer.eos_token_id
|
@@ -72,10 +75,18 @@ if extend_prompt_2:
|
|
72 |
output_texts = prompter_tokenizer.batch_decode(outputs, skip_special_tokens=True)
|
73 |
res = output_texts[0].replace(in_text+" Rephrase:", "").strip()
|
74 |
return res
|
75 |
-
|
|
|
|
|
|
|
|
|
|
|
76 |
if extend_prompt_3:
|
77 |
-
|
78 |
-
|
|
|
|
|
|
|
79 |
|
80 |
do_dreamlike_photoreal = False
|
81 |
if do_dreamlike_photoreal:
|
|
|
55 |
|
56 |
thanks_info = "Thanks: "
|
57 |
if extend_prompt_1:
|
58 |
+
try:
|
59 |
+
extend_prompt_pipe = pipeline('text-generation', model='yizhangliu/prompt-extend', max_length=77, pad_token_id=0)
|
60 |
+
thanks_info += "[<a style='display:inline-block' href='https://huggingface.co/spaces/daspartho/prompt-extend' _blank><font style='color:blue;weight:bold;'>prompt-extend(1)</font></a>]"
|
61 |
+
except Exception as e:
|
62 |
+
extend_prompt_1 = False
|
63 |
+
|
64 |
if extend_prompt_2:
|
65 |
def load_prompter():
|
66 |
prompter_model = AutoModelForCausalLM.from_pretrained("microsoft/Promptist")
|
|
|
68 |
tokenizer.pad_token = tokenizer.eos_token
|
69 |
tokenizer.padding_side = "left"
|
70 |
return prompter_model, tokenizer
|
|
|
71 |
def extend_prompt_microsoft(in_text):
|
72 |
input_ids = prompter_tokenizer(in_text.strip()+" Rephrase:", return_tensors="pt").input_ids
|
73 |
eos_id = prompter_tokenizer.eos_token_id
|
|
|
75 |
output_texts = prompter_tokenizer.batch_decode(outputs, skip_special_tokens=True)
|
76 |
res = output_texts[0].replace(in_text+" Rephrase:", "").strip()
|
77 |
return res
|
78 |
+
try:
|
79 |
+
prompter_model, prompter_tokenizer = load_prompter()
|
80 |
+
thanks_info += "[<a style='display:inline-block' href='https://huggingface.co/spaces/microsoft/Promptist' _blank><font style='color:blue;weight:bold;'>Promptist(2)</font></a>]"
|
81 |
+
except Exception as e:
|
82 |
+
extend_prompt_2 = False
|
83 |
+
|
84 |
if extend_prompt_3:
|
85 |
+
try:
|
86 |
+
MagicPrompt = gr.Interface.load("spaces/Gustavosta/MagicPrompt-Stable-Diffusion")
|
87 |
+
thanks_info += "[<a style='display:inline-block' href='https://huggingface.co/spaces/Gustavosta/MagicPrompt-Stable-Diffusion' _blank><font style='color:blue;weight:bold;'>MagicPrompt(3)</font></a>]"
|
88 |
+
except Exception as e:
|
89 |
+
extend_prompt_3 = False
|
90 |
|
91 |
do_dreamlike_photoreal = False
|
92 |
if do_dreamlike_photoreal:
|