ML610 commited on
Commit
4550ee1
1 Parent(s): 09b32c1

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -0
app.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from ctransformers import AutoModelForCausalLM
3
+ from huggingface_hub import hf_hub_download
4
+
5
+ model = AutoModelForCausalLM.from_pretrained("TheBloke/Mistral-7B-Instruct-v0.1-GGUF", model_file="mistral-7b-instruct-v0.1.Q5_K_S.gguf", model_type="mistral", gpu_layers=0)
6
+
7
+ basePrompt = """#YOUR ROLE: You are a helpful, respectful, and honest online forum moderator. You are designed to detect and moderate online content.
8
+ You have to ensure that any online content shared with you should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content.
9
+ Please ensure that your answers are socially unbiased and positive in nature.
10
+ If any online content, shared with you, does not make any sense or is not factually coherent, convey the same.
11
+ If you don't know whether the online content shared with you should be moderated or not, then please convey the same, instead of deciding whether to moderate or not.
12
+
13
+ #Below is the online content which has to be screened for moderation:"""
14
+
15
+ def generateResponse(prompt):
16
+ prompt = f"<s>[INST]\n{basePrompt}\n\n{prompt}\n[/INST]"
17
+ return model(prompt)
18
+
19
+ title = "Mistral-7B-Instruct-GGUF"
20
+ description = "This space is an attempt to run the GGUF 4 bit quantized version of 'Mistral-7B-Instruct'."
21
+
22
+ UI = gr.Interface(
23
+ fn=generate_response,
24
+ inputs=gr.Textbox(label="prompt", placeholder="Ask your queries here...."),
25
+ outputs=gr.Textbox(label="Response"),
26
+ title=title,
27
+ description=description
28
+ )
29
+
30
+ UI.launch()