RoomSamurai commited on
Commit
00fc258
·
1 Parent(s): ef9084d

update from local

Browse files
Files changed (5) hide show
  1. app.py +1 -1
  2. local_submit.py +33 -0
  3. metadata.jsonl +0 -0
  4. models/openai_model.py +18 -0
  5. requirements.txt +8 -5
app.py CHANGED
@@ -1,4 +1,4 @@
1
- from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
2
  import datetime
3
  import requests
4
  import pytz
 
1
+ from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel,load_tool,tool
2
  import datetime
3
  import requests
4
  import pytz
local_submit.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ from smolagents import CodeAgent, HfApiModel
3
+ from tools.final_answer import FinalAnswerTool
4
+ import yaml
5
+ from dotenv import load_dotenv
6
+ import os
7
+ from models.openai_model import OpenAIModel
8
+
9
+ load_dotenv()
10
+
11
+ with open("metadata.jsonl", "r") as f:
12
+ questions = [json.loads(line) for line in f]
13
+
14
+ model = OpenAIModel()
15
+
16
+ results = []
17
+ for q in questions[:20]:
18
+ print(f"\nTask ID: {q['task_id']}")
19
+ print(f"Question: {q['Question']}")
20
+ try:
21
+ answer = model.run(q["Question"])
22
+ except Exception as e:
23
+ print("Error:", e)
24
+ answer = "error"
25
+ results.append({
26
+ "task_id": q["task_id"],
27
+ "model_answer": str(answer).strip(),
28
+ "reasoning_trace": None
29
+ })
30
+
31
+ with open("submission.jsonl", "w") as f:
32
+ for r in results:
33
+ f.write(json.dumps(r) + "\n")
metadata.jsonl ADDED
The diff for this file is too large to render. See raw diff
 
models/openai_model.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from openai import OpenAI
3
+
4
+ class OpenAIModel:
5
+ def __init__(self, model="gpt-4", temperature=0.5, max_tokens=2048):
6
+ self.client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
7
+ self.model = model
8
+ self.temperature = temperature
9
+ self.max_tokens = max_tokens
10
+
11
+ def run(self, prompt):
12
+ response = self.client.chat.completions.create(
13
+ model=self.model,
14
+ messages=[{"role": "user", "content": prompt}],
15
+ temperature=self.temperature,
16
+ max_tokens=self.max_tokens
17
+ )
18
+ return response.choices[0].message.content.strip()
requirements.txt CHANGED
@@ -1,6 +1,9 @@
1
- markdownify
2
- smolagents
3
- requests
4
- duckduckgo_search
5
  pandas
6
- requests
 
 
 
 
1
+ markdownify~=1.1.0
2
+ smolagents~=1.14.0
3
+ requests~=2.32.3
4
+ duckduckgo_search~=8.0.1
5
  pandas
6
+ requests
7
+ pytz~=2025.2
8
+ PyYAML~=6.0.2
9
+ openai>=1.0.0