#!/usr/bin/env python3
# -*- coding: utf-8 -*-

__author__ = 'hhuua'


import aiohttp
import json
from bs4 import BeautifulSoup


async def get_news_with_write(write_id):
    pass


async def get_news_with_type(type, page, sep):
    url = 'https://i.news.qq.com/trpc.qqnews_web.kv_srv.kv_srv_http_proxy/list?' \
          'sub_srv_id=%s&srv_id=pc&offset=%d&limit=%d&strategy=1' % (type, page * sep, sep)
    url = url + '&ext={%22pool%22:[%22high%22,%22top%22],%22is_filter%22:10,%22check_type%22:true}'
    async with aiohttp.ClientSession() as session:
        async with session.get(url) as resp:
            try:
                resp_text = await resp.text()
                resp_dic = json.loads(resp_text)

                data = resp_dic['data']['list']

                return data
            except BaseException as e:
                print('type:' + type + 'page:%d' % page + 'error!!')


async def get_news_content(model):
    url = model['url']
    async with aiohttp.ClientSession() as session:
        async with session.get(url) as resp:
            try:
                resp_text = await resp.text()
                if resp_text.find('文章页跳转') is not -1:
                    model['url'] = 'https://new.qq.com/rain/a/' + url.split('/')[-1]
                    data = await get_news_content(model)
                    return data

                soup = BeautifulSoup(resp_text, 'html.parser')
                content_div = soup.find('div', class_='LEFT')
                content_div.find('div', class_='content')

                for img in content_div.find_all('img'):
                    img['src'] = 'https:' + img['src']
                conntent = content_div.extract().prettify()

                return conntent
            except BaseException as e:
                print('get news content error: %s' % url)


async def get_write_info(write_id):
    url = 'https://pacaio.match.qq.com/om/mediaInfo?id=%s' % write_id
    async with aiohttp.ClientSession() as session:
        async with session.get(url) as resp:
            try:
                resp_text = await resp.text()
                resp_dic = json.loads(resp_text)
                data = resp_dic['data']

                return data
            except BaseException as e:
                print('get write info error: %s' % url)

