import requests
import gevent
from gevent import monkey
monkey.patch_all()
import Setting
import json
from lxml import etree
import time
import chardet


def deal_one(data):
    """deal json data"""
    data = data[data.index("{"):-1]
    data = eval(data, type('Dummy', (dict,), dict(__getitem__=lambda s,n:n))())
    return data

def get_news_table(item, num=600):
    """
    get one lask (news url table)
    """
    date = item[1]
    url = "http://roll.news.sina.com.cn/interface/rollnews_ch_out_interface.php?col=52&spec=&type=&date=%s&ch=03&k=&offset_page=0&offset_num=0&num=%d&asc=&page=1"%(date, num)

    headers = {"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
            "Accept-Encoding": "gzip, deflate",
            "Accept-Language": "zh-CN,zh;q=0.9",
            "Cache-Control": "max-age=0",
            "Connection": "keep-alive",
            "Host": "roll.news.sina.com.cn",
            "Upgrade-Insecure-Requests":"1",
            "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.181 Safari/537.36"}
    for _ in range(3):
        try:
            response = requests.get(url, headers=headers).content
            try:
                item = deal_one(response.decode('gbk'))
            except Exception as e:
                item = deal_one(response.decode('utf-8'))
                print('decode: ', e)
                e=str(e)
        except Exception as e:
            e = 'number of time is %d, error: %s'%(_, str(e))
            print(e)
            item=False
        else: break
    if not item:
        with open('error/%s.html'%date, 'wb') as f:
            f.write(response)
        try:
            return False, e
        except:
            return False, 'e'
    if item['count'] <= num:
        call_back = list()
        for _ in item['list']:
            info = dict()
            info['kind'] = _['channel']['title']
            info['title'] = _['title']
            info['url'] = _['url']
            info['date'] = date
            info['time'] = str(_['time'])
            info['text'] = "NULL"
            call_back.append(info)
        print(date, '完成')
        return True, call_back
    else: return get_news_table(date, num=item['count'])

 
def get_news_detail(item):
    url = item[2]
    """deal journalism body"""
    headers = {"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.181 Safari/537.36"}
    for _ in range(3):
        try:
            response = requests.get(url=url, headers=headers)
            response = response.content
            html = etree.HTML(response) 
        except Exception as e:
            e = str(e)
            print(2, e)
            html = False
        else: break
    if html is False:
        return False, 'e'
    title= html.xpath('//*[@id="artibodyTitle"]/text()')
    if not title:
        title= html.xpath('//*[@class="main-title"]/text()')
    if not title:
        title= html.xpath('//title/text()')
    try:
        title = title[0]
        if '页面没有找到' in title:
            return False, '页面没有找到'
    except: pass
    texts= html.xpath('//*[@id="artibody"]//p[not(@class="article-editor" or @target="_blank")]//text()')
    minue= html.xpath('//*[@id="artibody"]/div[@data-sudaclick="suda_1028_guba"]//p//text()')
    minue.extend(html.xpath('//*[@id="artibody"]/div[@class="finance_app_zqtg"]//p//text()'))
    for i in minue:
        if i in texts:
            texts.remove(i)
    if not texts:
        return False, 'NULL'
    text = str()
    for t in texts:
        text += t + '\n'
    text[:-1]
    callback = dict()
    callback['text'] = text
    try: callback['title'] = title
    except: pass
    return True, callback

def run(num):
    print(num, '号爬虫程序启动')
    while True:
        url = Setting.sina_get_task
        task = json.loads(requests.get(url).text)
        if task[0] == 'None':
            print("没任务了")
            return
        print(num, '号爬虫程序 ', time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), ' 开始任务:', task) 
        callback = dict()
        if task[0] == 'date_log':
            status, data = get_news_table(task[1])
            if status: callback['status'] = 'end'
            else: callback['status'] = 'error'
            callback['type'] = task[0]
            callback['item'] = task[1]
            callback['data'] = data
            requests.post(Setting.sina_call_back, data={'result': json.dumps(callback)})
        elif task[0] == 'news_log':
            status, data = get_news_detail(task[1])
            if status: callback['status'] = 'end'
            else: callback['status'] = 'error'
            callback['type'] = task[0]
            callback['item'] = task[1]
            callback['data'] = data
            requests.post(Setting.sina_call_back, data={'result': json.dumps(callback)})

        print(num, '号爬虫程序 ', time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), ' 结束:', task) 
        

def main():
    run_task_list = list()
    for _ in range(1):
        run_task_list.append(gevent.spawn(run, _))
    gevent.joinall(run_task_list)


if __name__ == '__main__':
    #a = get_news_detail(1)
    #print(a[1]['title'])
    #print(a[1]['text'])
    main()
