#!/usr/bin/env python3
# -*- coding: utf-8 -*-

__author__ = 'hhuua'

import logging
import asyncio
from datetime import datetime
import db_pool as db_pool
from QQNewsApi import get_news_with_type, get_news_content, get_write_info
from models import ArticleInfoModel, WriteInfoModel
logging.basicConfig(level=logging.ERROR)

sub_srv_id = ['tech', 'world', 'milite', 'ent', 'finance', 'auto', 'fashion', 'video', 'games',
              'cul', 'nstock', 'house', 'visit', 'emotion', 'history', 'pet', 'health', 'sports', 'lifes']


async def init_sql_loop(loop):
    await db_pool.create_db_pool(loop=loop)


async def get_all_write():
    global writeList
    write_arr = await WriteInfoModel.find_all(where='', limit=20)
    print(write_arr)
    writeList = write_arr


async def search_content(model):
    data = await get_news_content(model)
    return data


async def start_search():
    global sub_srv_id
    for srv in sub_srv_id:
        page = 0

        while 1:
            logging.error('now is: %s , page: %d' % (srv, page))
            if page * 20 >= 180:
                break
            data = await get_news_with_type(srv, page=page, sep=20)
            page = page+1
            if data is None or len(data) <= 0:
                break

            try:
                for i in data:
                    # 查看是否是限制时间内的内容
                    now = datetime.now()
                    time_str = i['update_time']
                    time = datetime.strptime(time_str, '%Y-%m-%d %H:%M:%S')
                    if (now - time).days > 2:
                        break

                    # 爬取文章详细内容
                    resp = await search_content(i)
                    i['content'] = resp
                    await ArticleInfoModel.save_with_dic(i)
                    # print('文章: %s 爬取成功!!' % i['title'])

                    # 更新作者信息
                    write = await WriteInfoModel.find_all(where='id=?', args=[i['media_id'] or ''])
                    if not write:
                        dic = await get_write_info(i['media_id'])
                        if dic:
                            await WriteInfoModel.save_with_dic(dic, last_time=i['update_time'])
                            print('新作者: %s 数据保存成功!!' % dic['name'])
                        else:
                            print('标题: %s 的文章无法找到作者!!' % i['title'])
                    else:
                        await WriteInfoModel.update_time(i)
            except BaseException as e:
                logging.warning(e)
                pass


def start_spider():
    # 初始化数据库
    event_loop = asyncio.get_event_loop()
    event_loop.run_until_complete(init_sql_loop(event_loop))

    event_loop.run_until_complete(start_search())


start_spider()

