fatlindaramadani commited on
Commit
75de315
·
verified ·
1 Parent(s): c9cf36c

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -13
app.py CHANGED
@@ -1,23 +1,21 @@
1
  import os
2
  from transformers import pipeline
3
  from huggingface_hub import login
4
- import gradio as gr # ✅ You were missing this
5
 
6
- # Login with HF token
7
  hf_token = os.getenv("HF_TOKEN")
 
8
  if not hf_token:
9
  raise RuntimeError("Hugging Face token not found. Please set HF_TOKEN as a secret.")
10
 
 
11
  login(token=hf_token, new_session=True)
12
 
13
- # Load the model set device to 0 for GPU Space, or -1 for CPU (but Mistral is too big for CPU)
14
- llm = pipeline(
15
- "text-generation",
16
- model="mistralai/Mistral-7B-Instruct-v0.1",
17
- device=0, # ✅ set to 0 if using GPU Space
18
- )
19
 
20
- # Context for RDF
21
  ontology_context = """
22
  :Patient_001 rdf:type :Patient .
23
  :Patient_001 :hasDisease :Diabetes_Mellitus .
@@ -26,7 +24,7 @@ ontology_context = """
26
  :Metformin rdf:type :Drug .
27
  """
28
 
29
- # Define function to generate RDF triple response
30
  def ask_rdf_question(user_input):
31
  prompt = f"""
32
  Ti je një asistent mjekësor që jep përgjigje vetëm në formën e triples RDF.
@@ -36,10 +34,10 @@ Konteksti:
36
  Pyetja: {user_input}
37
  Përgjigju vetëm me triples pa ndonjë shpjegim tjetër.
38
  """
39
- result = llm(prompt, max_new_tokens=200, do_sample=False)
40
- return result[0]['generated_text'].strip()
41
 
42
- # Launch Gradio interface
43
  gr.Interface(
44
  fn=ask_rdf_question,
45
  inputs="text",
 
1
  import os
2
  from transformers import pipeline
3
  from huggingface_hub import login
4
+ import gradio as gr
5
 
6
+ # Get Hugging Face token from environment
7
  hf_token = os.getenv("HF_TOKEN")
8
+
9
  if not hf_token:
10
  raise RuntimeError("Hugging Face token not found. Please set HF_TOKEN as a secret.")
11
 
12
+ # Log in to Hugging Face Hub
13
  login(token=hf_token, new_session=True)
14
 
15
+ # Load a lightweight language model that works on CPU/GPU with small memory
16
+ llm = pipeline("text-generation", model="sshleifer/tiny-gpt2", device=0)
 
 
 
 
17
 
18
+ # RDF ontology context
19
  ontology_context = """
20
  :Patient_001 rdf:type :Patient .
21
  :Patient_001 :hasDisease :Diabetes_Mellitus .
 
24
  :Metformin rdf:type :Drug .
25
  """
26
 
27
+ # Function to format RDF-style answers
28
  def ask_rdf_question(user_input):
29
  prompt = f"""
30
  Ti je një asistent mjekësor që jep përgjigje vetëm në formën e triples RDF.
 
34
  Pyetja: {user_input}
35
  Përgjigju vetëm me triples pa ndonjë shpjegim tjetër.
36
  """
37
+ response = llm(prompt, max_new_tokens=100, do_sample=False)[0]['generated_text']
38
+ return response.strip()
39
 
40
+ # Gradio UI
41
  gr.Interface(
42
  fn=ask_rdf_question,
43
  inputs="text",