Spaces:
Sleeping
Sleeping
import wikipedia | |
from smolagents import tool | |
import pandas as pd | |
import requests | |
from bs4 import BeautifulSoup | |
def visitWikipedia(page_name: str) -> str: | |
""" | |
Visit a Wikipedia page and return its content. | |
Args: | |
page_name (str): The name of the Wikipedia page to visit. | |
Returns: | |
str: The content of the Wikipedia page. | |
""" | |
try: | |
page = wikipedia.page(page_name) | |
return page.content | |
except wikipedia.exceptions.DisambiguationError as e: | |
return f"Disambiguation error: {e}" | |
except wikipedia.exceptions.PageError as e: | |
return f"Page error: {e}" | |
def visitWikipediaTable(url: str) -> list: | |
""" | |
Visit a Wikipedia page and return all tables in the page as a list of pandas DataFrames. | |
Args: | |
url (str): The URL of the Wikipedia page to visit. | |
Returns: | |
list: A list of pandas DataFrames, each representing a table on the page. | |
""" | |
try: | |
response = requests.get(url) | |
response.raise_for_status() | |
except requests.exceptions.RequestException as e: | |
return f"Request failed: {e}" | |
soup = BeautifulSoup(response.content, 'html.parser') | |
tables = [] | |
for table in soup.find_all('table', class_='wikitable'): | |
df = pd.read_html(str(table))[0] | |
tables.append(df) | |
return tables |