Cooper commited on
Commit
c39f600
1 Parent(s): 8b2fe10

Add app file

Browse files
Files changed (1) hide show
  1. app.py +38 -0
app.py ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
+
4
+ def load_model(model_name):
5
+ model = AutoModelForCausalLM.from_pretrained(model_name)
6
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
7
+ return model, tokenizer
8
+
9
+ def converse(prompt, model1_name, model2_name, temperature1, temperature2):
10
+ model1, tokenizer1 = load_model(model1_name)
11
+ model2, tokenizer2 = load_model(model2_name)
12
+
13
+ inputs = tokenizer1(prompt, return_tensors="pt")
14
+ outputs = model1.generate(inputs.input_ids, max_new_tokens=50, temperature=temperature1, do_sample=True, pad_token_id=tokenizer1.eos_token_id)
15
+ response1 = tokenizer1.decode(outputs[0], skip_special_tokens=True)
16
+
17
+ inputs = tokenizer2(response1, return_tensors="pt")
18
+ outputs = model2.generate(inputs.input_ids, max_new_tokens=50, temperature=temperature2, do_sample=True, pad_token_id=tokenizer2.eos_token_id)
19
+ response2 = tokenizer2.decode(outputs[0], skip_special_tokens=True)
20
+
21
+ return response1, response2
22
+
23
+ iface = gr.Interface(
24
+ fn=converse,
25
+ inputs=[
26
+ gr.Textbox(label="Input Prompt"),
27
+ gr.Dropdown(["gpt2", "distilgpt2", "EleutherAI/gpt-neo-2.7B"], label="Model 1"),
28
+ gr.Dropdown(["gpt2", "distilgpt2", "EleutherAI/gpt-neo-2.7B"], label="Model 2"),
29
+ gr.Slider(0.1, 1.0, step=0.1, value=0.7, label="Temperature for Model 1"),
30
+ gr.Slider(0.1, 1.0, step=0.1, value=0.7, label="Temperature for Model 2")
31
+ ],
32
+ outputs=["text", "text"],
33
+ title="Multi-Model Conversation",
34
+ description="Input a prompt to start a conversation between two models. Adjust temperatures for more diverse outputs."
35
+ )
36
+
37
+ if __name__ == "__main__":
38
+ iface.launch()