5m4ck3r commited on
Commit
d6c4994
1 Parent(s): ce052d7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -34
app.py CHANGED
@@ -1,36 +1,19 @@
1
- from transformers import AutoModelForCausalLM, AutoTokenizer
2
- import torch
3
  import gradio as gr
4
 
5
- tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
6
- model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large")
7
-
8
- def chat_with_history(message, chat_history=None):
9
- # Initialize chat history if not provided
10
- if chat_history is None:
11
- chat_history = []
12
-
13
- # Encode the new user input, add the eos_token, and return a tensor in PyTorch
14
- new_user_input_ids = tokenizer.encode(message + tokenizer.eos_token, return_tensors='pt')
15
-
16
- # Append the new user input tokens to the chat history
17
- bot_input_ids = torch.cat([tokenizer.encode(pair[0] + tokenizer.eos_token, return_tensors='pt') for pair in chat_history] + [new_user_input_ids], dim=-1)
18
-
19
- # Generate a response while limiting the total chat history to 1000 tokens
20
- chat_history_ids = model.generate(bot_input_ids, max_length=1000, pad_token_id=tokenizer.eos_token_id)
21
-
22
- # Decode the last output tokens from bot
23
- response = tokenizer.decode(chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True)
24
-
25
- # Update the chat history with the new user message and bot response
26
- chat_history.append([message, response])
27
-
28
- return response
29
-
30
- demo = gr.ChatInterface(
31
- fn=chat_with_history,
32
- examples=["hey how are you ?", "hola", "Yo!"],
33
- title="Multi Chat Bot"
34
- )
35
-
36
- demo.launch()
 
1
+ # Use a pipeline as a high-level helper
2
+ from transformers import pipeline
3
  import gradio as gr
4
 
5
+ pipe = pipeline("text-generation", model="Gustavosta/MagicPrompt-Stable-Diffusion")
6
+
7
+ def genPrompt(text: str, total: int):
8
+ total = int(total)
9
+ ret = None
10
+ for i in range(0, total):
11
+ txt = pipe(text, max_length=1000, min_length=50)[0].get("generated_text").replace("\n", " ")
12
+ if not ret:
13
+ ret = txt
14
+ else:
15
+ ret = f"{ret}\n\n{txt}"
16
+ return ret
17
+
18
+ appli = gr.Interface(fn=genPrompt, inputs=[gr.Text(),gr.Slider(0, 10, 4)], outputs="text")
19
+ appli.launch(debug=True)