umarmajeedofficial commited on
Commit
1b9d15b
·
verified ·
1 Parent(s): 1c63c3f

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +56 -0
app.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from transformers import pipeline
3
+ import gradio as gr
4
+
5
+ # Initialize the model pipeline
6
+ pipe = pipeline("text-generation", model="umarmajeedofficial/TinyLlama-1.1B-Chat-v1.0-FineTuned-By-MixedIntelligence", torch_dtype=torch.bfloat16, device_map="auto")
7
+
8
+ # Function to generate response
9
+ def generate_response(question):
10
+ # Define messages with a clear prompt for a concise answer
11
+ messages = [
12
+ {"role": "system", "content": "You are an expert in emergency situations and environmental issues. Provide concise and direct answers."},
13
+ {"role": "user", "content": question}
14
+ ]
15
+
16
+ # Generate the prompt
17
+ prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
18
+
19
+ # Generate the output
20
+ outputs = pipe(prompt, max_new_tokens=150, do_sample=False) # Reduced tokens for concise answers
21
+
22
+ # Post-process the output to clean up the response
23
+ generated_text = outputs[0]["generated_text"]
24
+
25
+ # Clean up the response
26
+ # If the response starts with system prompt or question, strip it out
27
+ start_index = generated_text.find(question)
28
+ if start_index != -1:
29
+ clean_response = generated_text[start_index:].strip()
30
+ else:
31
+ clean_response = generated_text.strip()
32
+
33
+ # Optional: Remove any unwanted ending marks like `</s>`
34
+ clean_response = clean_response.replace("</s>", "").strip()
35
+
36
+ return clean_response
37
+
38
+ # Gradio UI
39
+ def qa_interface():
40
+ with gr.Blocks() as demo:
41
+ gr.Markdown("# Emergency Helper")
42
+ gr.Markdown("Developed by Mixed Intelligence Team")
43
+
44
+ with gr.Row():
45
+ with gr.Column():
46
+ emergency_question = gr.Textbox(label="Ask your question about emergency situations or environmental issues", placeholder="e.g., How to survive in an earthquake?")
47
+ submit_btn = gr.Button("Submit")
48
+ output = gr.Textbox(label="Response", placeholder="The answer will appear here...", lines=5)
49
+
50
+ submit_btn.click(generate_response, inputs=emergency_question, outputs=output)
51
+
52
+ return demo
53
+
54
+ # Launch the Gradio UI
55
+ demo = qa_interface()
56
+ demo.launch()