# _*_ coding:utf-8 _*_
# @Time  : 2022-03-11
# @Author: zizle
import json
import pathlib
import requests
from loguru import logger
logger.add('error.log')

LOGIN_COOKIE = "ASP.NET_SessionId=jrmfo121jsnmcfghb00xzfue; FuturesDaily_Mac=2E28C9424E003194DB382B68C66CF0F6; FuturesDaily_Licence=3DBFA84844D0A476D9417D980C66ADA4; FuturesDaily_2018/*=7E00C1AEB6152D864DEA6E66A29BE9DB2701FE629066F9A5335018BF0A0098536F80CEDE8FC17647A2FDFD4599C1F86B1B20CBC5BC4C36BE8794CAD1A4BA60031D3F11C6909E290086DB99895BB432BC7D957524C59C72ACADF2472AC1857B2FDCB6F27476E13A09CBB63EACC0AD1E5ED64906C9051954AB71C9C845E926B7FE50069134107753D5F66E79D96A406DF18FC4B07D3680C76AAF2D7CAE2AC585CD99BF412FE1B0DF1F88010C2BFE21232A69B5CECBB06AC445AF84F658A3AD83226EEDF11058CFC9B4108535898C0F1EE4FBE773C186F90F5458ACD949C7EF767C1447A485361C48C0"
USE_COOKIE = False
USE_PROXIES = {'https': '221.4.241.198:9091'} if USE_COOKIE else False


def request_article_content(article_id, day):
    folder = 'DailyArticle/%04d/%s' % (int(day[:4]), day)
    save_folder = pathlib.Path(folder)
    if not save_folder.exists():
        save_folder.mkdir(parents=True)
    save_path = save_folder.joinpath(f'{day}_{article_id}.json')
    article_url = 'http://paper.7h365.com/Members/Article_Show'
    params = {
        'keyValue': article_id
    }
    headers = {
        'Host': 'paper.7h365.com',
        'Connection': 'keep-alive',
        'Accept': 'application/json, text/javascript, */*; q=0.01',
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36',
        'X-Requested-With': 'XMLHttpRequest',
        'Referer': 'http://paper.7h365.com/Members/MemberIndex',
        'Accept-Encoding': 'gzip,deflate',
        'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8'
    }
    if USE_COOKIE:
        headers['Cookie'] = LOGIN_COOKIE
    try:
        if USE_PROXIES:
            r = requests.get(article_url, params=params, headers=headers, proxies=USE_PROXIES)
        else:
            r = requests.get(article_url, params=params, headers=headers)
        content = r.json()
        with open(save_path, 'w', encoding='utf8') as fp:
            json.dump(content, fp, indent=4)
    except Exception as e:
        print(f'获取{save_path}失败：{e}')
        logger.error(f'获取{save_path}失败：{e}')
    else:
        print(f'保存{save_path}成功！')


if __name__ == '__main__':
    request_article_content('195436', '2022-04-18')