#!/usr/bin/env python
# -*- coding:utf-8 -*-
# @FileName  :handianguji.py
# @Time      :2023/11/23 
# @Author    :CL
# @email     :1037654919@qq.com
#https://gj.zdic.net/ 
import requests
from bs4 import BeautifulSoup

headers = {
    "authority": "gj.zdic.net",
    "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
    "accept-language": "zh-CN,zh;q=0.9",
    "cache-control": "no-cache",
    "pragma": "no-cache",
    "referer": "https://gj.zdic.net/list.php?caid=10",
    "sec-ch-ua": "\"Not.A/Brand\";v=\"8\", \"Chromium\";v=\"114\", \"Google Chrome\";v=\"114\"",
    "sec-ch-ua-mobile": "?0",
    "sec-ch-ua-platform": "\"Linux\"",
    "sec-fetch-dest": "document",
    "sec-fetch-mode": "navigate",
    "sec-fetch-site": "same-origin",
    "sec-fetch-user": "?1",
    "upgrade-insecure-requests": "1",
    "user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36"
}
cookies = {
    "Hm_lvt_ed715607517bb15c35acb425d31cd10e": "1700729097",
    "fCW_08cms_regcode": "HzPPGTAnbYsQPdBPXJNt65OYn4jWhCc",
    "fCW_BR_R_0": "2129%2C1700730213%3B39637%2C1700730236",
    "Hm_lpvt_ed715607517bb15c35acb425d31cd10e": "1700730237",
    "fCW_msid": "Q9uwjl"
}
# pigcha
proxies = {'http':'127.0.0.1:15732',
           'https':'127.0.0.1:15732'}
def get_url(url = "https://gj.zdic.net/list.php"):

    params = {
        "caid": "14"
    }
    response = requests.get(url, headers=headers, cookies=cookies, params=params)

    # print(response.text)
    print(response)

    soups = BeautifulSoup(response.text,'lxml')
    datas =soups.find('div',id='gj_dh_z').find_all('div',recursive=False)
    lists=[]
    for data in datas:
        name = data.find('a').get_text()
        href = 'https://gj.zdic.net'+data.find('a')['href']

        lists.append({'name':name,'href':href})
    # print(lists)
    return lists


def get_book(url = "https://gj.zdic.net/list.php?caid=10"): #'https://gj.zdic.net/list.php?caid-10/page-2.html'
    response = requests.get(url, headers=headers, cookies=cookies)

    # print(response.text)
    print(response)
    soups = BeautifulSoup(response.text, 'lxml')
    datas = soups.find('div', id='list_d_1').find_all('li')
    lists = []
    for data in datas:
        name = data.find('a').get_text()
        href = 'https://gj.zdic.net/' + data.find('a')['href']

        lists.append({'name': name, 'href': href})
    print(lists)
    return lists


def main():
    url_list = get_url()
    print(url_list)
    for ll in url_list:
        print(ll)
        data = get_book(url=ll['href'])

def book_chapter(url = 'https://gj.zdic.netarchive.php?aid-2129.html'):
    import requests

    url = "https://gj.zdic.net/archive.php"
    params = {
        "aid-2129.html": ""
    }
    response = requests.get(url, headers=headers, cookies=cookies, params=params)

    # print(response.text)
    print(response)
    soups = BeautifulSoup(response.text, 'lxml')
    datas = soups.find('div', class_='mls').find_all('li')
    lists = []
    for data in datas:
        name = data.find('a').get_text()
        href = 'https://gj.zdic.net/' + data.find('a')['href']

        lists.append({'name': name, 'href': href})
    print(lists)
    return lists
def get_data(url ='https://gj.zdic.net/archive.php?aid-39637.html'):

    response = requests.get(url, headers=headers, cookies=cookies,timeout=10,proxies=proxies,params={},data={})

    print(response.url,response)
    soups = BeautifulSoup(response.text, 'lxml')
    datas = soups.find('div', id='snr2')
    # print(datas)
    text = datas.get_text()

    return text.strip()


if __name__ == "__main__":
    print('beidi')
    # get_book()
    # book_chapter()
    text = get_data()
    print(text)

    for book in get_book():
        print(book)
        for chapter in book_chapter(book['href']):
            try:
                text = get_data(chapter['href'])
                print(chapter['name'],text)
            except:
                pass


        break





