#!/usr/bin/env python
# -*- coding:utf-8 -*-
# @FileName  :dataset_category.py
# @Time      :2023/10/19 
# @Author    :CL
# @email     :1037654919@qq.com
# 获取dataset的详细数据，保持hugging face 登录，控制访问速度
import time

from utils import mongo_manager
hugging_face_datasets = mongo_manager('hugging_face_datasets',db='datasets')

proxies ={'http':'127.0.0.1:15732',
          'https':'127.0.0.1:15732'}
import requests
from bs4 import BeautifulSoup
headers = {
    "authority": "datasets-server.huggingface.co",
    "accept": "*/*",
    "accept-language": "zh-CN,zh;q=0.9",
    "access-control-request-headers": "authorization",
    "access-control-request-method": "GET",
    "cache-control": "no-cache",
    "origin": "https://huggingface.co",
    "pragma": "no-cache",
    "referer": "https://huggingface.co/",
    "sec-fetch-dest": "empty",
    "sec-fetch-mode": "cors",
    "sec-fetch-site": "same-site",
}
cookies = {
    "__stripe_mid": "e0b77284-b1ae-4576-8f32-a09cbd0c2e7447d809",
    "_ga": "GA1.1.1717933824.1694677947",
    "__stripe_sid": "36ab759b-6b2c-4221-a3f5-a69d0c25ad08757de6",
    "_ga_8Q63TH4CSL": "GS1.1.1697679921.2.1.1697681246.60.0.0",
    "aws-waf-token": "fc77edbf-7de2-49c8-b0ce-b099b6b325fd:EwoAw7IQod0AAAAA:UoftFBdySVRFz1PM31q8YOLsF8bwlaq2Vt9Gog016UC7YgeJY7S7EPaSMkldDFg4ks57Ie/Gdn5xis+FCMiU1yxvFbFi1fKSMajBFi4VgMv3wsZ3OZJcQglf6bJb0Xm+zZ1Lm/eeNSQwK+7XCWiZOqkbRU1ayIm6GMJ9az0W8oag350xT4WcEZCCkF4XiWagBb1s4c2llrWnJuti9KViuV5quGQ4mdjvTfz0+cl1eOyCihl8pNRo"
}
def get_url():
    hugging_datasets_url = mongo_manager('hugging_datasets_url', db='datasets')
    url = 'https://huggingface.co/datasets'
    response = requests.get(url, headers=headers, cookies=cookies,proxies=proxies)
    # print(response.text)
    soups = BeautifulSoup(response.text, 'lxml')
    datas =soups.find('div',class_= 'mb-20 lg:mb-4').find_all('a')
    task_categories=[]
    for data in datas:
        # print(data.text)
        href = 'https://huggingface.co'+data['href']
        name = data.text.strip()
        reslut = {'_id': href,'name':name,'href':href}
        try:
            hugging_datasets_url.insertOne(reslut)
        except :
            pass

    hugging_datasets_url.close()
# get_url()

def get_datasets(params = {
        "task_categories": "task_categories:feature-extraction",
        "p": None,
        "sort": "trending"
    }):
    url = "https://huggingface.co/datasets-json"

    response = requests.get(url, headers=headers, cookies=cookies, params=params,proxies=proxies)
    requests.session().close()
    # print(response.text)
    print(response)
    if response.status_code == 200:
        return response.json()

if __name__ == "__main__":
    hugging_datasets_url = mongo_manager('hugging_datasets_url', db='datasets')
    seeds = hugging_datasets_url.findAll({'status': None})
    for seed in seeds:
        task_category = str(seed['href']).split('%3A')[-1]

        params = {
            "task_categories": f"task_categories:{task_category}",
            "p": None,
            "sort": "trending"
        }
        url = "https://huggingface.co/datasets-json"

        response = get_datasets(params=params)
        # print(response)
        datasets = response['datasets']
        for data in datasets:
            data['category'] = task_category
            data['_id'] = data['id']
            data['url'] = 'https://huggingface.co/datasets/' + data['id']
            try:
                hugging_face_datasets.insertOne(data)
            except:
                hugging_face_datasets.updateOne({"_id":data["_id"]},data)

        numTotalItems = response['numTotalItems']
        print(f'begin :{task_category}: numTotalItems :{numTotalItems}')
        for page in range(1,int(numTotalItems/30)+2):
            params = {
                "task_categories": f"task_categories:{task_category}",
                "p": page,
                "sort": "trending"
            }
            try:
                response = get_datasets(params=params)
                datasets = response['datasets']
                for data in datasets:
                    data['category'] = task_category
                    data['_id'] = data['id']
                    try:
                        hugging_face_datasets.insertOne(data)
                    except:
                        hugging_face_datasets.updateOne({"_id": data["_id"]}, data)
            except:
                print(f"{task_category} page {page} failed")
        seed['status'] = 'success'
        hugging_datasets_url.updateOne({"_id": seed["_id"]}, seed)
        # break
        time.sleep(10)