ishoboy commited on
Commit
b7b60f4
1 Parent(s): a3b6952

first commit

Browse files

first try on both gradio and huggingface spaces.

Files changed (1) hide show
  1. app.py +34 -0
app.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import torch
3
+ from transformers import AutoModelForCausalLM, AutoTokenizer
4
+ from huggingface_hub import snapshot_download
5
+
6
+ # Set device to CPU
7
+ device = "cpu"
8
+ repo_id = 'amgadhasan/phi-2'
9
+ model_path = snapshot_download(repo_id=repo_id, repo_type="model", local_dir="./phi-2", use_auth_token=False)
10
+
11
+ tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
12
+
13
+ # Set default dtype to float32 for compatibility with CPU
14
+ torch.set_default_dtype(torch.float32)
15
+
16
+ model = AutoModelForCausalLM.from_pretrained(model_path, device_map="auto", trust_remote_code=True)
17
+
18
+ def generate(prompt):
19
+ inputs = tokenizer(prompt, return_tensors="pt").to(device)
20
+ outputs = model.generate(**inputs, max_length=200)
21
+ completion = tokenizer.decode(outputs[0], skip_special_tokens=True)
22
+ return completion
23
+
24
+ def ask_question(user_question):
25
+ if user_question.lower() == 'quit':
26
+ return "Session ended. Goodbye!"
27
+ else:
28
+ # Here, we're explicitly setting the context for an academic answer.
29
+ prompt = f"Academic response to the question about basic science subjects: {user_question}"
30
+ answer = generate(prompt)
31
+ return answer
32
+
33
+ iface = gr.Interface(fn=ask_question, inputs="text", outputs="text")
34
+ iface.launch(share=True)