import requests
import random
import pymysql
import time
from bs4 import BeautifulSoup

import sys
import io

sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding="UTF-8")

from requests.packages.urllib3.exceptions import InsecureRequestWarning

requests.packages.urllib3.disable_warnings(InsecureRequestWarning)  # 禁止提醒SSL警告

# mysql 数据库连接
mysql_config = {
    'user': 'aiiread',
    'password': 'nLP4IgCu',
    'host': '127.0.0.1',
    'db': 'aiiread',
}

conn = pymysql.connect(**mysql_config)


def jinse_news():
    categories = [
        'news',  # 新闻
        'zhengce'  # 政策
        'TOR',  # 相对论
        'personage',  # 人物
        'fenxishishuo',  # 行情
        'application',  # 应用
        'capitalmarket',  # 投研
        'tech',  # 技术
        'baike'  # 百科
    ]
    random.shuffle(categories)
    sql = "SELECT `type`, `top_id`  FROM `top_crawler` WHERE `type`=1"
    with conn.cursor() as cursor:
        cursor.execute(sql)
        conn.commit()
        result = cursor.fetchone()
        if (result is None):
            sql = "INSERT INTO `top_crawler`(`type`, `top_id`) VALUES(1, 0);"
            cursor.execute(sql)
            conn.commit()
            crawler_top_id = 0
        else:
            crawler_top_id = result[1]

    # 更新 top_id
    new_top_id = crawler_top_id

    for cate in categories:
        top_id = news_by_cate(cate, crawler_top_id)
        if top_id is not None:
            if (top_id > new_top_id):
                new_top_id = top_id
                sql = "UPDATE `top_crawler` SET top_id = %s where `type` = 1"
                with conn.cursor() as cursor:
                    result = cursor.execute(sql, (new_top_id))
                    conn.commit()
    # pass


def news_by_cate(cate, top_id=0):
    url = 'https://api.jinse.com/v6/information/list'
    params = {
        'limit': 5,
        'information_id': 0,
        'flag': 'down',
        'version': '9.9.9',
        'catelogue_key': cate
    }
    fetch_result = sq_fetch(url, params)
    list = fetch_result.get('list')
    fetch_cate_top_id = fetch_result.get('top_id')
    # print(fetch_cate_top_id, fetch_result)
    # pass
    if (top_id >= fetch_cate_top_id):  # 没有新文章跳过
        return
    for article in list:
        origin_id = article['id']
        if (origin_id < top_id):
            break
        else:
            with conn.cursor() as cursor:
                sql = 'SELECT 1 FROM jinse_crawler where  jinse_id =  %s';
                cursor.execute(sql, (origin_id))
                conn.commit()
                result = cursor.fetchone()
                if result is None:
                    sql = 'INSERT INTO `jinse_crawler`(`jinse_id`) values (%s)'
                    cursor.execute(sql, (origin_id))
                    conn.commit()
                else:
                    continue

        title = article['title']
        main_image = article['extra']['thumbnail_pic']
        content_images = ','.join(article['extra']['thumbnails_pics'])
        author = article['extra']['source']
        if author is None or len(author) <= 0:
            author = ''

        url = article['extra']['topic_url']
        content = fetch_document(url, 'js-article-detail')
        content = content.replace('onerror', 'xxxxxxx')
        content = content.replace('onmouseover', 'xxxxxxxxxxx')
        content = content.replace('onload', 'xxxxxx')

        if len(content) > 20000:
            print('文章过长,已经过滤')
            continue

        category_id = 6
        from_way = '金色财经'
        type = 4  # 爬取
        user_id = 1
        status = 1  # 已发布

        now = int(time.time())

        sql = "INSERT INTO `zzz_article` (`author`, `category_id`,`type`,`user_id`, `title`, `content`, `main_image`, " \
              "`content_images`, `createtime`, `updatetime`, `sendtime`, `status`, `from_way`) values ( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s )"
        with conn.cursor() as cursor:
            cursor.execute(sql, (
                author, category_id, type, user_id, title, content, main_image, content_images, now, now, now, status,
                from_way
            ))
            conn.commit()
    return fetch_cate_top_id
    # print(fetch_result)
    # return fetch_result.get('top_id')

    # news_count = result['count']
    # print('cate:' + cate, result)


def fetch_document(url, content_class_name):
    text = requests.get(url).content
    # soup = BeautifulSoup(text, 'html.parser')
    soup = BeautifulSoup(text, 'lxml')
    article = soup.find('div', {'class': content_class_name})
    return str(article)


def sq_fetch(url, params):
    sq = requests.session()
    headers = {
        'Accept': 'image/webp,image/*;q=0.8',
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:69.0) Gecko/20100101 Firefox/69.0',
        'Accept-Language': 'zh-cn'
    }
    sq.headers.update(headers)
    data = sq.get(url=url, params=params, verify=False).json()
    return data


if __name__ == '__main__':
    # document = fetch_document('https://www.jinse.com/blockchain/372388.html', 'js-article')
    # print(document)
    jinse_news()
