import json

import pymysql,requests,urllib3,datetime,re,asyncio,aiohttp,time,aiofiles
from lxml import etree
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

#爬虫

"""
1,同步操作 访问getCatalog ，拿到cid和名称
2，异步操作 方位getChapterContent 下载内容
"""

async def  aiodown(cid,b_id,name):
    data = {
        "book_id" : b_id,
        "cid" : f"{b_id}|{cid}",
        "need_bookinfo" : 1
    }
    name = name
    data = json.dumps(data)
    url = 'https://dushu.baidu.com/api/pc/getChapterContent?data=' + data
    async with aiohttp.ClientSession() as session:
        async with session.get(url) as resp:
            dic = await resp.json()

            async with aiofiles.open('./demo/content/SanG/'+name+'.txt',mode='w',encoding='utf-8') as f:
                await f.write(dic['data']['novel']['content'])



def down(url):
    headers = {}
    res = requests.get(url)
    return res

async def getCatalog(url):
    resp = down(url).json()
    tasks = []
    for book in resp['data']['novel']['items']:
        name = book['title']
        cid = book['cid']
        # print(name,cid)
        d = asyncio.ensure_future(aiodown(cid,b_id,name))
        tasks.append(d)
        sigurl = 'https://dushu.baidu.com/api/pc/getChapterContent?data={"book_id":"'+ b_id +'","cid":"'+ b_id +'|'+ cid +'","need_bookinfo":1}'
    await asyncio.wait(tasks)

        # res = down(sigurl).json()
        # content = ''.join(res['data']['novel']['content']).replace('/n','')
        # print(content)
        # # break



if __name__ == '__main__':
    b_id = '4356477283'
    url = 'https://dushu.baidu.com/api/pc/getCatalog?data={"book_id":"'+b_id+'"}'
    loop = asyncio.new_event_loop()
    asyncio.set_event_loop(loop)
    loop.run_until_complete(getCatalog(url))


