lruizap commited on
Commit
2c20702
โ€ข
1 Parent(s): b7e0564

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -1,5 +1,5 @@
1
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
2
- from transformers import pipeline
3
  import torch
4
  import gradio as gr
5
  from huggingface_hub import InferenceClient
@@ -12,9 +12,9 @@ model = AutoModelForSeq2SeqLM.from_pretrained("Kaludi/chatgpt-gpt4-prompts-bart-
12
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
13
  # pipe = pipeline("text-generation", model="HuggingFaceH4/zephyr-7b-alpha",torch_dtype=torch.bfloat16, device_map="auto")
14
 
15
- def generateZep(inputuno, inputdos):
16
  prompt = inputuno
17
- promptdos = inputdos
18
 
19
  generate_kwargs = dict(
20
  temperature=0.9,
@@ -39,9 +39,9 @@ def generateZep(inputuno, inputdos):
39
  # },
40
  # ]
41
 
42
- formatted_prompt = f"<s>[INST] {new_prompt} {promptdos} [/INST]"
43
 
44
- stream = client.text_generation(formatted_prompt, stream=True, details=True, return_full_text=False)
45
  output = ""
46
 
47
  for response in stream:
@@ -60,6 +60,6 @@ output_component = gr.Textbox(label="Output: ")
60
  examples = [["photographer"], ["developer"], ["teacher"], ["human resources staff"], ["recipe for ham croquettes"]]
61
  description = ""
62
 
63
- PerfectGPT = gr.Interface(fn=generateZep, inputs=[input_prompt, input_promptdos], outputs=output_component, examples=examples, title="๐Ÿ—ฟ PerfectGPT v1 ๐Ÿ—ฟ", description=description)
64
 
65
  PerfectGPT.launch()
 
1
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
2
+ # from transformers import pipeline
3
  import torch
4
  import gradio as gr
5
  from huggingface_hub import InferenceClient
 
12
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
13
  # pipe = pipeline("text-generation", model="HuggingFaceH4/zephyr-7b-alpha",torch_dtype=torch.bfloat16, device_map="auto")
14
 
15
+ def generateZep(inputuno):
16
  prompt = inputuno
17
+ # promptdos = inputdos
18
 
19
  generate_kwargs = dict(
20
  temperature=0.9,
 
39
  # },
40
  # ]
41
 
42
+ formatted_prompt = f"<s>[INST] {new_prompt} [/INST]"
43
 
44
+ stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
45
  output = ""
46
 
47
  for response in stream:
 
60
  examples = [["photographer"], ["developer"], ["teacher"], ["human resources staff"], ["recipe for ham croquettes"]]
61
  description = ""
62
 
63
+ PerfectGPT = gr.Interface(fn=generateZep, inputs=input_prompt, outputs=output_component, examples=examples, title="๐Ÿ—ฟ PerfectGPT v1 ๐Ÿ—ฟ", description=description)
64
 
65
  PerfectGPT.launch()