Spaces:
Sleeping
Sleeping
zasharepw77
Локально запускается нормально но слабовата например время в Вашингтонске говорит Ньюйорк хотя переменную называет правильно...
a33f671
| from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel, load_tool, tool, LiteLLMModel | |
| import datetime | |
| import requests | |
| import json | |
| import pytz | |
| import yaml | |
| import os | |
| from tools.final_answer import FinalAnswerTool | |
| from Gradio_UI import GradioUI | |
| # Start local | |
| # source .venv/bin/activate | |
| # Below is an example of a tool that does nothing. Amaze us with your creativity ! | |
| def my_cutom_tool(arg1:str, arg2:int)-> str: #it's import to specify the return type | |
| #Keep this format for the description / args / args description but feel free to modify the tool | |
| """A tool that does nothing yet | |
| Args: | |
| arg1: the first argument | |
| arg2: the second argument | |
| """ | |
| return "What magic will you build ?" | |
| def get_current_time_in_timezone(timezone: str) -> str: | |
| """A tool that fetches the current local time in a specified timezone. | |
| Args: | |
| timezone: A string representing a valid timezone (e.g., 'America/New_York'). | |
| """ | |
| try: | |
| # Create timezone object | |
| tz = pytz.timezone(timezone) | |
| # Get current time in that timezone | |
| local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S") | |
| return f"The current local time in {timezone} is: {local_time}" | |
| except Exception as e: | |
| return f"Error fetching time for timezone '{timezone}': {str(e)}" | |
| def get_current_weather_by_city(city: str) -> str: | |
| '''Retrieves the current and forecasted weather for a specified city. | |
| Args: | |
| city: The name of the city (e.g., "New York") | |
| Return: | |
| A string with weather information | |
| ''' | |
| # API Geocoding от Open-Meteo | |
| geocoding_url = f"https://geocoding-api.open-meteo.com/v1/search?name={city}&count=1&language=en&format=json" | |
| response_geocoding = requests.get(geocoding_url) | |
| if response_geocoding.status_code == 200: | |
| data_geocoding = response_geocoding.json() | |
| if 'results' in data_geocoding and len(data_geocoding['results']) > 0: | |
| result = data_geocoding['results'][0] | |
| lat = result.get('latitude', None) | |
| lon = result.get('longitude', None) | |
| if lat is not None and lon is not None: | |
| # Получаем погоду по координатам | |
| weather_url = f"https://api.open-meteo.com/v1/forecast?latitude={lat}&longitude={lon}¤t_weather=true&hourly=temperature_2m" | |
| response_weather = requests.get(weather_url) | |
| if response_weather.status_code == 200: | |
| data_weather = response_weather.json() | |
| current_temp = data_weather['current_weather']['temperature'] if 'current_weather' in data_weather else None | |
| hourly_temps = data_weather['hourly']['temperature_2m'] if 'hourly' in data_weather and 'temperature_2m' in data_weather['hourly'] else [] | |
| weather_info = f"Current temperature: {current_temp}°C\n" | |
| weather_info += "Hourly temperatures:\n" | |
| for temp in hourly_temps: | |
| weather_info += f"{temp}°C\n" | |
| return weather_info | |
| else: | |
| return f"Failed to retrieve weather data. Status code: {response_weather.status_code}" | |
| else: | |
| return "Latitude and Longitude not found." | |
| else: | |
| return "No results found for the specified city." | |
| else: | |
| return f"Geocoding failed. Status code: {response_geocoding.status_code}" | |
| # Execute the function and prepare the final answer | |
| #result = get_current_weather_by_city("New York") | |
| #final_answer = f"The current weather in New York is:\n{result}" | |
| #print(final_answer) | |
| final_answer = FinalAnswerTool() | |
| # Local: mcedit ~/.bashrc | |
| # export hf_token="your_hugging_face_token" | |
| # source ~/.bashrc | |
| #''' | |
| model = HfApiModel( | |
| max_tokens=2096, | |
| temperature=0.5, | |
| model_id='Qwen/Qwen2.5-72B-Instruct', | |
| custom_role_conversions=None, | |
| token=os.getenv('hf_token'), | |
| ) | |
| #''' | |
| ''' | |
| # Локальная модель: | |
| # ollama = Запустился номально, qwen2.5-coder:7b немного справляется | |
| model = LiteLLMModel( | |
| model_id="ollama/qwen2.5-coder:7b", | |
| max_tokens=2096, | |
| temperature=0.6, | |
| api_base="http://localhost:11434", | |
| num_ctx=8192 | |
| ) | |
| ''' | |
| # print("Model Image") | |
| # Import tool from Hub | |
| # image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True) | |
| with open("prompts.yaml", 'r') as stream: | |
| prompt_templates = yaml.safe_load(stream) | |
| agent = CodeAgent( | |
| model=model, | |
| tools=[ | |
| final_answer, | |
| get_current_time_in_timezone, | |
| get_current_weather_by_city, | |
| ], ## add your tools here (don't remove final answer) | |
| max_steps=6, | |
| verbosity_level=1, | |
| grammar=None, | |
| planning_interval=None, | |
| name=None, | |
| description=None, | |
| prompt_templates=prompt_templates | |
| ) | |
| print("Start GradioUI") | |
| GradioUI(agent).launch() |