#!/usr/bin/env python
# -*- coding:utf-8 -*-
# @FileName  :cidianwang_lishi.py
# @Time      :2023/11/10 
# @Author    :CL
# @email     :1037654919@qq.com
# 爬取 https://www.cidianwang.com/lishi/
from bs4 import BeautifulSoup
import requests
from retrying import retry
from utils import mongo_manager, get_kuai_proxy

cidainwang_lishi = mongo_manager("cidainwang_lishi", db='public_data')

headers = {
    "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
    "Accept-Language": "zh-CN,zh;q=0.9",
    "Cache-Control": "no-cache",
    "Connection": "keep-alive",
    "Pragma": "no-cache",
    "Sec-Fetch-Dest": "document",
    "Sec-Fetch-Mode": "navigate",
    "Sec-Fetch-Site": "none",
    "Sec-Fetch-User": "?1",
    "Upgrade-Insecure-Requests": "1",
    "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36",
    "sec-ch-ua": "\"Not.A/Brand\";v=\"8\", \"Chromium\";v=\"114\", \"Google Chrome\";v=\"114\"",
    "sec-ch-ua-mobile": "?0",
    "sec-ch-ua-platform": "\"Linux\""
}
cookies = {
    "Hm_lvt_e0eec4afa8ab3236c032068920f81d60": "1699509492",
    "Hm_lpvt_e0eec4afa8ab3236c032068920f81d60": "1699509492"
}


def get_lishi(url='https://www.cidianwang.com/lishi/shijian/'):
    lists = []
    tag = url.replace('https://www.cidianwang.com/lishi/','').split('/')[0]
    try:
        response = requests.get(url,proxies=get_kuai_proxy(), headers=headers, cookies=cookies)
        response.encoding = "utf-8"
        print(url, response)
        soups = BeautifulSoup(response.text, 'lxml')
        # linfo =  soups.find('div', class_='left').find('div',class_='linfo').get_text()
        renwulist = soups.find('div', class_='left').find('ul').find_all('li')
        intruduces = soups.find('div', class_='left').find('ul').find_all('div')
        for data, intruduce in zip(renwulist, intruduces):
            name = data.find('a').get_text()
            href = 'https://www.cidianwang.com' + data.find('a').get('href')
            intru = intruduce.get_text().strip()
            lists.append({'type': tag, 'name': name, 'href': href, 'intru': intru})
    except BaseException as e:
        print(e)
    return lists


def down_lishi_list():  # 下载lishi
    # https://www.cidianwang.com/lishi/renwu/
    # https://www.cidianwang.com/lishi/shijian/
    # https://www.cidianwang.com/lishi/diangu/
    # https://www.cidianwang.com/lishi/diming/
    # https://www.cidianwang.com/lishi/guanzhi/
    # https://www.cidianwang.com/lishi/zhishi/
    # https://www.cidianwang.com/lishi/zhishi/index1.htm
    for tag in ['renwu','shijian','diangu','diming','guanzhi','zhishi']:
        print(f'begin {tag}')
        for i in range(0, 1000):
            if i == 0:
                url = f'https://www.cidianwang.com/lishi/{tag}/'
            else:
                url = f'https://www.cidianwang.com/lishi/{tag}/index{i}.htm'
            lists = get_lishi(url)
            print(len(lists))
            for ll in lists:
                ll['_id'] = ll['href']
                ll['type'] = tag
                try:
                    cidainwang_lishi.insertOne(ll)
                except Exception as e:
                    cidainwang_lishi.updateOne({'_id': ll['_id']}, ll)
            if len(lists) < 20:
                break
@retry
def get_lishi_info(url2):
    response = requests.get(url2, headers=headers, cookies=cookies, proxies=get_kuai_proxy())
    response.encoding = "utf-8"
    if response.status_code == 200:
        return response.text

def download_lishiinfo(lishi):
    url = lishi['href']
    response = get_lishi_info(url)
    if response:
        soups = BeautifulSoup(response ,'lxml')
        infos = soups.find('div', class_='left').find_all('div')[2]
        # print(infos)
        lishi['info'] = str(infos)
    return lishi

def main():
    cidainwang_lishi = mongo_manager("cidainwang_lishi", db='public_data')
    seeds = cidainwang_lishi.findAll({'status': None})
    for seed in seeds:
        print(seed)
        seed = download_lishiinfo(seed)
        seed['status'] = 'success'
        cidainwang_lishi.updateOne({'_id': seed['_id']},seed)
    cidainwang_lishi.close()

if __name__ == "__main__":
    print()
    # down_lishi_list()
    main()
    # print(get_lishi())
