# _*_ coding:utf-8 _*_
# @File  : paser_article_html.py
# @Time  : 2022-03-15  07:42
# @Author: zizle
import requests
import json
import pathlib
import time


def read_blank_article(year: str, save_type='local', start_day=None, end_day=None, title=None):
    folder = pathlib.Path(year)
    for json_file in folder.glob('*'):
        with open(json_file, 'r') as jf:
            article_list_json = json.load(jf)
        # 解析出日期
        parser_day = json_file.name.split('_')[0]
        if start_day and parser_day < start_day:
            continue
        if end_day and end_day <= parser_day:
            break
        print(f'解析{parser_day}的文章数据...')
        file_folder = pathlib.Path('DailyArticle/').joinpath(year).joinpath(parser_day)
        for row in article_list_json['rows']:
            # 读取每一天的文章标题
            article_data = {
                'create_time': row["CreateTime"],
                'raw_time': None,
                'title': row['Subject'].strip().replace(':', '——').replace('?', '').replace('？', '').replace('*', ''),
                'authors': row['Authors'].replace('/', '-'),
                'words': 0,
                'issue_num': row["Num"],
                'layout': row["Layout"],
                'is_special': row['IsSpecial'],
                'content': ''
            }
            # print(article_data['title'])
            if title and article_data['title'] != title:
                continue
            if save_type == 'local':
                # 保存为转为html
                save_article_to_file(article_data, parser_day)
            elif save_type == 'server':
                if not article_data['raw_time']:
                    article_data['raw_time'] = article_data['create_time']
                # 保存为服务器：
                save_article_to_serve(article_data, parser_day)
                time.sleep(0.5)
            else:
                pass
        print('--' * 30)
        time.sleep(0.1)


def read_article_content(year: str, save_type='local', start_day=None, end_day=None, title=None):
    folder = pathlib.Path(year)
    for json_file in folder.glob('*'):
        with open(json_file, 'r') as jf:
            article_list_json = json.load(jf)
        # 解析出日期
        parser_day = json_file.name.split('_')[0]
        if start_day and parser_day < start_day:
            continue
        if end_day and end_day <= parser_day:
            break
        print(f'解析{parser_day}的文章数据...')
        file_folder = pathlib.Path('DailyArticle/').joinpath(year).joinpath(parser_day)
        for row in article_list_json['rows']:
            time.sleep(0.1)
            filepath = file_folder.joinpath(f'{parser_day}_{row["Id"]}.json')
            # print(filepath)
            with open(filepath, 'r') as fp:
                file_content = json.load(fp)[0]
            # 读取内容和create_time
            article_data = {
                'create_time': row["CreateTime"],
                'raw_time': file_content["createtime"].replace('/', '-'),
                'title': file_content['title'].strip().replace(':', '——').replace('?', '').replace('？', '').replace('*', ''),
                'authors': file_content['author'].replace('/', '-'),
                'words': file_content['wordnum'],
                'issue_num': row["Num"],
                'layout': row["Layout"],
                'is_special': row['IsSpecial'],
                'content': file_content['content']
            }
            # print(article_data['title'])
            if title and article_data['title'] != title:
                continue
            if save_type == 'local':
                # 保存为转为html
                save_article_to_file(article_data, parser_day)
            elif save_type == 'server':
                if not article_data['raw_time']:
                    article_data['raw_time'] = article_data['create_time']
                # 保存为服务器：
                # print(parser_day)
                # print(article_data['title'])
                # print(len(article_data['title']), len(article_data['authors']),
                #       article_data['create_time'], article_data['raw_time'],
                #       len(article_data['content']))
                save_article_to_serve(article_data, parser_day)
                time.sleep(0.4)
            else:
                pass
        print('--' * 30)
        time.sleep(0.1)


def save_article_to_serve(article_data, option_day):
    # base_url = 'http://127.0.0.1:8000/admin/qhrb/article/'
    base_url = 'http://210.13.218.130:9000/v1/qhrb/article/'
    try:
        r = requests.post(base_url, json=article_data)
        resp = r.json()
    except Exception as e:
        with open('save_article_log.txt', 'a+', encoding='utf8') as log:
            log.write(f'{option_day} - 发起保存{article_data["create_time"]}-{article_data["title"]}失败...{e}\n')
        print(f'发起保存{article_data["create_time"]}-{article_data["title"]}失败...{e}')
    else:
        if resp.get('code', 0) == 201:
            print(f'服务器保存{article_data["create_time"]}-{article_data["title"]}  成功!')
        else:
            print(resp)
            with open('save_article_log.txt', 'a+', encoding='utf8') as log:
                log.write(f'{option_day} - 服务器保存{article_data["create_time"]}-{article_data["title"]}失败...\n')
            print(f'服务器保存{article_data["create_time"]}-{article_data["title"]}  失败...')


def save_article_to_file(article_data, today):
    folder = pathlib.Path(f'DailyArticle/readable/{today[:4]}/{today}/')
    article_data['title'] = article_data['title'].replace('1+1>2', '1加1大于2')
    html_filename = f'{article_data["layout"]}_{article_data["title"].replace("/", "")}_{article_data["authors"]}.html'
    # 保存为html
    if not folder.exists():
        folder.mkdir(parents=True)
    html_content = '<div style=margin-left:30px;margin-right:30px>'
    html_content += f'<p style=font-size:24px;text-align:center>{article_data["title"]}</p>'
    html_content += f'<div style="text-align: right;margin-right: 10%;font-size:14px; color:#363662; margin-left:10px; margin-top:20px;"><span style="margin-right:20px;" v-show="dialogArticle.authors">作者：{ article_data["authors"] }</span><span>发布时间：{ article_data["raw_time"] }</span></div>'
    html_content += article_data["content"] + '</div>'
    with open(folder.joinpath(html_filename), 'w', encoding='utf8') as html:
        html.write(html_content)
    print(f'本地保存 {today} -- {article_data["title"]} HTML可读文件成功!')


def hand():
    with open('2022/2022-03-07_1.json', 'r', encoding='utf8') as f:
        c = json.load(f)
        print(c)
    with open('2022/2022-03-07_1.json', 'w', encoding='utf8') as f:
        json.dump(c, f, indent=4)


if __name__ == '__main__':
    # 2008年及以前没有文章内容,只有文章标题
    # 2009-01-13 -- 2009-04-28 数据无法获取,原网站也错误
    read_article_content('2022',  start_day='2022-04-25', save_type='server')  # 上传文章
    # read_blank_article('2013', save_type='server')  # 只上传文章标题
    # hand()
