import aiohttp
import asyncio

async def fetch_content(url):
    async with aiohttp.ClientSession() as session:
        async with session.get(url) as response:
            return await response.text()

from bs4 import BeautifulSoup

def parse_html(html_content):
    soup = BeautifulSoup(html_content, 'html.parser')
    repos = []
    for article in soup.find_all('article', class_='Box-row'):
        title_element = article.h2.a
        repo_name = title_element.text.strip()
        repo_url = 'https://github.com' + title_element['href'].strip()
        description = article.find('p', {'class': 'col-9'}).text.strip() if article.find('p', {'class': 'col-9'}) else None
        language = article.find('span', {'itemprop': 'programmingLanguage'})
        language = language.text.strip() if language else None
        stars = article.find_all('a', {'class': 'Link--muted'})[0].text.strip()
        forks = article.find_all('a', {'class': 'Link--muted'})[1].text.strip()
        today_stars = article.find('span', {'class': 'd-inline-block'}).text.strip().split(' ')[0]
        repos.append({
            'repo_name': repo_name,
            'repo_url': repo_url,
            'description': description,
            'stars': stars,
            'forks': forks,
            'language': language,
            'today_stars': today_stars
        })
    return repos

async def main():
    url = 'https://github.com/trending'
    html_content = await fetch_content(url)
    repos = parse_html(html_content)
    for repo in repos:
        print(repo)

asyncio.run(main())
#AT_zxL1oVcmGsr1kiqt5p5l9Z76kVH8KIe3
#UID_ZirBbQ55dxXPD9L2PKY17oFu8ILp