#!/usr/bin/env python
# -*- coding:utf-8 -*-
# @FileName  :dataset_info_aio.py
# @Time      :2023/10/20 
# @Author    :CL
# @email     :1037654919@qq.com
# todo 尚未成功


# !/usr/bin/env python
# -*- coding:utf-8 -*-
# @FileName  :asyncio_1.py
# @Time      :2023/10/5
# @Author    :CL
# @email     :1037654919@qq.com
import asyncio
import datetime
from loguru import logger
import aiohttp
from  aiohttp_socks import ProxyConnector

import json
import time
import pandas as pd
import requests
from lxml import etree
from bs4 import BeautifulSoup
from utils import mongo_manager
hugging_face_datasets = mongo_manager('hugging_face_datasets',db = 'datasets')
proxies ={'http':'127.0.0.1:15732',
          'https':'127.0.0.1:15732'}
headers = {
    "authority": "huggingface.co",
    "accept": "*/*",
    "accept-language": "zh-CN,zh;q=0.9",
    "cache-control": "no-cache",
    "pragma": "no-cache",
    "referer": "https://huggingface.co/datasets?task_categories=task_categories:feature-extraction&p=1&sort=trending",
    "sec-ch-ua": "\"Not.A/Brand\";v=\"8\", \"Chromium\";v=\"114\", \"Google Chrome\";v=\"114\"",
    "sec-ch-ua-mobile": "?0",
    "sec-ch-ua-platform": "\"Linux\"",
    "sec-fetch-dest": "empty",
    "sec-fetch-mode": "cors",
    "sec-fetch-site": "same-origin",
    "user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36"
}
cookies = {
    "__stripe_mid": "e0b77284-b1ae-4576-8f32-a09cbd0c2e7447d809",
    "_ga": "GA1.1.1717933824.1694677947",
    "__stripe_sid": "36ab759b-6b2c-4221-a3f5-a69d0c25ad08757de6",
    "_ga_8Q63TH4CSL": "GS1.1.1697679921.2.1.1697681246.60.0.0",
    "aws-waf-token": "fc77edbf-7de2-49c8-b0ce-b099b6b325fd:EwoAw7IQod0AAAAA:UoftFBdySVRFz1PM31q8YOLsF8bwlaq2Vt9Gog016UC7YgeJY7S7EPaSMkldDFg4ks57Ie/Gdn5xis+FCMiU1yxvFbFi1fKSMajBFi4VgMv3wsZ3OZJcQglf6bJb0Xm+zZ1Lm/eeNSQwK+7XCWiZOqkbRU1ayIm6GMJ9az0W8oag350xT4WcEZCCkF4XiWagBb1s4c2llrWnJuti9KViuV5quGQ4mdjvTfz0+cl1eOyCihl8pNRo"
}

async def fetch_content(session, url):
    # print(f'{i} 开始')
    try:
        # proxy="https://172.23.131.9:15732", proxy_auth=aiohttp.BasicAuth(usename, password)
        async with session.get(url, timeout=10) as response:
            if response.status == 200:
                text = await response.text()
                return f"{url} 请求成功"
            else:
                return f"{url}请求状态失败 {response.status}"
    except aiohttp.ClientError as e:
        return f"{url} 请求失败: {e}"


async def main():
    # url = 'http://httpbin.org/get'
    concurrency = 10  # 控制并发请求数量
    # , proxy = proxy_url, proxy_auth =
    connector = aiohttp.TCPConnector(limit=20)  # 使用连接池管理连接

    usename = '1037654919@qq.com'
    password = 'CHENGLEI112358'
    proxy_url = f"socks5://{usename}:{password}@127.0.0.1:15732"  #
    connector = ProxyConnector.from_url(url=proxy_url)  # 使用连接池管理连接
    async with aiohttp.ClientSession(connector=connector,) as session:
        tasks = []
        semaphore = asyncio.Semaphore(concurrency)

        for seed in seeds:
            url = 'https://huggingface.co/datasets/'+ seed['id']
            async with semaphore:
                task = asyncio.create_task(fetch_content(session, url))
                tasks.append(task)

        results = await asyncio.gather(*tasks)

    [logger.info(result) for result in results]

    # for result in results:


if __name__ == '__main__':
    print(a := datetime.datetime.now())
    seeds = hugging_face_datasets.findAll({'status': None}).limit(1000)
    asyncio.run(main())
    print(b := datetime.datetime.now(), '--------', b - a)
