import asyncio
import re
import time
import urllib
from itertools import zip_longest

from pyppeteer import launch

async def main():
    # 启动浏览器
    browser = await launch(executablePath='C:\Program Files\Google\Chrome\Application\chrome.exe')
    # 新建一个页面
    page = await browser.newPage()
    # 设置User-Agent
    # user_agent='Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36'
    # await page.setUserAgent(user_agent)

    # 访问指定网址
    cookie={"name":"___wk_scode_token",
                  "value":"2zZ6Rm6GGDhBs/PQB1FFepifB03dxL6axAsP0sn25PU=",
                  'domain': 'www.baidu.com'}
    # 设置请求头
    header = {'User-agent': 'Googlebot'}
    await page.setExtraHTTPHeaders(header)
    await page.setCookie(cookie)
    await page.goto('https://wenku.baidu.com/view/5f9409e982d049649b6648d7c1c708a1284a0aa3?bfetype=new&_wkts_=1710322073865&bdQuery=%E7%99%BE%E5%BA%A6%E6%96%87%E5%BA%93',timeout=600000000000)
    # 获取页面数据
    content = await page.content()
    print(content)
    # 关闭浏览器
    await browser.close()
# 运行异步函数
resp=asyncio.get_event_loop().run_until_complete(main())
