Nav772 commited on
Commit
ee8cd34
·
verified ·
1 Parent(s): 51bb11d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -38
app.py CHANGED
@@ -10,52 +10,30 @@ DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
10
 
11
  # --- Basic Agent Definition ---
12
  # ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
 
 
13
  class BasicAgent:
14
  def __init__(self):
15
- print("FLAN-T5 Agent initialized.")
16
- self.api_token = os.getenv("HF_API_TOKEN")
17
- self.api_url = "https://api-inference.huggingface.co/models/google/flan-t5-base"
18
- self.headers = {
19
- "Authorization": f"Bearer {self.api_token}",
20
- "Content-Type": "application/json"
21
- }
 
22
 
23
  def __call__(self, question: str) -> str:
24
  print(f"Agent received question (first 50 chars): {question[:50]}...")
25
 
26
- if not self.api_token:
27
- return "Error: Missing Hugging Face API token."
28
-
29
- prompt = f"Answer this question:\n{question.strip()}"
30
-
31
- payload = {
32
- "inputs": prompt,
33
- "parameters": {
34
- "max_new_tokens": 256,
35
- "temperature": 0.5
36
- }
37
- }
38
-
39
  try:
40
- response = requests.post(self.api_url, headers=self.headers, json=payload, timeout=30)
41
- response.raise_for_status()
42
- output = response.json()
43
-
44
- # Extract output
45
- if isinstance(output, list) and "generated_text" in output[0]:
46
- return output[0]["generated_text"]
47
- elif isinstance(output, dict) and "generated_text" in output:
48
- return output["generated_text"]
49
- elif isinstance(output, list) and "output" in output[0]:
50
- return output[0]["output"]
51
- else:
52
- print(f"Unexpected response: {output}")
53
- return "Model returned an unexpected format."
54
-
55
  except Exception as e:
56
- print(f"❌ Exception during inference: {e}")
57
- return f"❌ API Error: {str(e)}"
58
-
59
 
60
  def run_and_submit_all( profile: gr.OAuthProfile | None):
61
  """
 
10
 
11
  # --- Basic Agent Definition ---
12
  # ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
13
+ from transformers import pipeline
14
+
15
  class BasicAgent:
16
  def __init__(self):
17
+ print("FLAN-T5-SMALL Local Agent initialized.")
18
+
19
+ self.pipeline = pipeline(
20
+ "text2text-generation",
21
+ model="google/flan-t5-small",
22
+ tokenizer="google/flan-t5-small",
23
+ device=-1
24
+ )
25
 
26
  def __call__(self, question: str) -> str:
27
  print(f"Agent received question (first 50 chars): {question[:50]}...")
28
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  try:
30
+ prompt = f"Answer the following question:\n{question.strip()}"
31
+ result = self.pipeline(prompt, max_new_tokens=128, temperature=0.5)
32
+ answer = result[0]["generated_text"]
33
+ return answer.strip()
 
 
 
 
 
 
 
 
 
 
 
34
  except Exception as e:
35
+ print(f"❌ Error during model inference: {e}")
36
+ return f"❌ Model Error: {str(e)}"
 
37
 
38
  def run_and_submit_all( profile: gr.OAuthProfile | None):
39
  """