import requests
import re
cookies = {
    'murmur': 'undefined--Win32',
    'PSTM': '1613449186',
    'BAIDUID': '0C9F564867C0D004E253817AE4F8F989:FG=1',
    'BIDUPSID': 'F80D438F191BBC9AA4037EEFCA1F1703',
    '__yjs_duid': '1_f58a8e2d734011ddc2fe213782e1c0cb1613449208540',
    '_click_param_reader_query_ab': '-1',
    '_click_param_pc_rec_doc_2017_testid': '3',
    'BAIDUID_BFESS': '0C9F564867C0D004E253817AE4F8F989:FG=1',
    'BDRCVFR[k2U9xfnuVt6]': 'mk3SLVN4HKm',
    'BDRCVFR[S4-dAuiWMmn]': '7Gup0zg2JVtfj6snjndnHmkg17xuAT',
    'delPer': '0',
    'PSINO': '2',
    'H_PS_PSSID': '33423_33506_33354_33273_31660_26350',
    'BDORZ': 'B490B5EBF6F3CD402E515D22BCDA1598',
    'BA_HECTOR': '01058h0g250ga421hh1g2um8o0q',
    'layer_show_times_by_day_2_a7f0ff13dc9a66e096b4544905ab4f91': '4',
    'layer_show_times_total_2_a7f0ff13dc9a66e096b4544905ab4f91': '14',
    'BDUSS': 'DV3MmFIeFkxalVuU0RUc0hCU016ZmZhSXplUXJ5V0I3eFJFcjlOOFpRTC01bFpnRUFBQUFBJCQAAAAAAAAAAAEAAADcelZ7yKu2vNTavdnE0czTAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP5ZL2D-WS9gT',
    'BDUSS_BFESS': 'DV3MmFIeFkxalVuU0RUc0hCU016ZmZhSXplUXJ5V0I3eFJFcjlOOFpRTC01bFpnRUFBQUFBJCQAAAAAAAAAAAEAAADcelZ7yKu2vNTavdnE0czTAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP5ZL2D-WS9gT',
    'Hm_lvt_f06186a102b11eb5f7bcfeeab8d86b34': '1613572700,1613716392',
    'close_cashier_time_3_a7f0ff13dc9a66e096b4544905ab4f91': '1',
    'Hm_lvt_d8bfb560f8d03bbefc9bdecafc4a4bf6': '1613715767,1613715973,1613716386,1613716395',
    '___wk_scode_token': '2ZCJjPnf87IWpRpmbeGhQwS%2BOLY8B%2Fg0NEfHEMpD66k%3D',
    'Hm_lpvt_f06186a102b11eb5f7bcfeeab8d86b34': '1613716399',
    'isJiaoyuVip': '1',
    'PMS_JT': '%28%7B%22s%22%3A1613716402281%2C%22r%22%3A%22https%3A//wenku.baidu.com/search%3Fword%3D%25E9%25AB%2598%25E6%25A0%25A1%25E5%25A4%25A7%25E5%25AD%25A6%25E7%2594%259F%25E5%25B0%25B1%25E4%25B8%259A%25E6%258C%2587%25E5%25AF%25BC%26lm%3D0%26od%3D0%26fr%3Dview_rs_1%22%7D%29',
    'Hm_lpvt_d8bfb560f8d03bbefc9bdecafc4a4bf6': '1613716404',
    'ab_sr': '1.0.0_YTU0NjM4NDY2MmJiOWU5ZjNmNzJkNDFjNTM2ZDRkZWRhMjMyZDg1OTljOGZkMGQ0ZTdmYzc4MWRhNTE1ZjhkYjI2OGY4ZTNkNmZiMDRhZTQwZWU1YzRjZjc3N2Y5NGY2ZjFjYjlhOTFlOWM5MmJmMjQxYjY4YWVlMTAyNWY5N2Y=',
    'bcat': 'db8b15d17cb878eccb7d99b546f0a12b0711e3f7f536c7484dffe3d1b43c7853ab4857174d3de68e3f1a8373293ee6972c3f970e5f1435c35cb3a5338cec1f0cc7961349dd035f8e11f23def8108abc703f0d277d4cc6f7c2b70c84cd6baef99cc7b6a2725ad24bae4c6b1ca1ae3e2d677bd0146ac0e2eeae255452b4259d537',
    'LoseUserAllPage': '%7B%22type%22%3A0%2C%22status%22%3A0%2C%22expire_time%22%3A0%2C%22create_time%22%3A1613716405%2C%22cookie_time%22%3A1613802805%7D',
}

headers = {
    'Connection': 'keep-alive',
    'Cache-Control': 'max-age=0',
    'Upgrade-Insecure-Requests': '1',
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.182 Safari/537.36 Edg/88.0.705.74',
    'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
    'Sec-Fetch-Site': 'none',
    'Sec-Fetch-Mode': 'navigate',
    'Sec-Fetch-User': '?1',
    'Sec-Fetch-Dest': 'document',
    'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
}

params = (
    ('fr', 'search-1-income1-psrec1'),
    ('fixfr', 'Fgtkfxv3k3GVV1rxDNFe9w=='),
)

response = requests.get('https://wenku.baidu.com/view/1096b2e4c381e53a580216fc700abb68a882ad90.html', headers=headers, params=params, cookies=cookies)
print(response)
#NB. Original query string below. It seems impossible to parse and
#reproduce query strings 100% accurately so the one below is given
#in case the reproduced version is not "correct".
# response = requests.get('https://wenku.baidu.com/view/1096b2e4c381e53a580216fc700abb68a882ad90.html?fr=search-1-income1-psrec1&fixfr=Fgtkfxv3k3GVV1rxDNFe9w%3D%3D', headers=headers, cookies=cookies)


url_tony = re.findall(r'pageLoadUrl.*?(https.*?0.json?.*?)\\',response.text)

for a in url_tony:
    url_new = a.replace('\\\\/','/')

    response_2 = requests.get(url_new).content.decode('utf-8')


    content_2 = re.findall(r'"c":"(.*?)".*?"y":(.*?),',response_2)

    with open('百度文库.txt', 'a+', encoding='utf-8') as f:
        for b in content_2:
            if b[0].strip() != '':
                print(b[0].encode('utf-8').decode('unicode_escape'))
                f.write(b[0].encode('utf-8').decode('unicode_escape') + '\n')









