#!/usr/bin/env python3
# -*- coding: utf-8 -*-


import json, pymysql, time, re
from multiprocessing import Process
from urllib import request, parse


class Cnbeta(object):
    
    """docstring for Cnbeta"""
    
    def __init__(self, count):
        self.count = count


    def conn_db(self, config = {}):
        conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd='', db='datas', charset='utf8')
        return conn


    def http(self, url, page = -1):

        headers = {
                 'User-Agent': r'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) '
                               r'Chrome/45.0.2454.85 Safari/537.36 115Browser/6.0.3',
                 'Referer': r'http://www.cnbeta.com/',
                 'Connection': 'keep-alive',
        }

        if page != -1:
            args = {
                'type': 'all',
                'page': page,
                '_csrf': 'mOwbdY4fjd5TO-Orm1QBjdBu0XRsCB2OANuhWn_hNsSI-772KflDngkSLaNbyDcwgoACSfC-ZJ5EcrqNBY3DVA%3D%3D',
                '_': '1506068551012'
            }
            url = '%s?%s' % (url, parse.urlencode(args))

        txt = ''
        try:
            req = request.Request(url, headers=headers)
            txt = request.urlopen(req).read().decode('utf-8')
        except Exception as e:
            print('http error:%s \n url:%s' % (e, url))
            return '{}'

        return txt


    def store(self, data):
        conn = self.conn_db()
        try:
            with conn.cursor() as cursor:
                sql = 'INSERT INTO cnbeta_post_dir (\
                    sid, catid, topic, aid, user_id, title, keywords, hometext, comments, \
                    counter, mview, collectnum, good, bad, score, ratings, score_story, ratings_story,\
                    pollid, queueid, inputtime, updatetime, thumb, source, sourceid, url_show)\
                    VALUES \
                    (%s ,%s ,%s ,%s ,%s ,%s ,%s ,%s ,%s ,%s ,%s ,%s ,%s ,%s ,%s ,%s ,%s ,%s ,%s ,%s ,%s ,%s ,%s ,%s ,%s ,%s)'

                for val in data:
                    updatetime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(val['updatetime'])))
                    cursor.execute(sql, (
                        val['sid'],val['catid'],val['topic'],val['aid'],val['user_id'],val['title'],val['keywords'],
                        val['hometext'],val['comments'],val['counter'],val['mview'],val['collectnum'],val['good'],
                        val['bad'],val['score'],val['ratings'],val['score_story'],val['ratings_story'],val['pollid'],
                        val['queueid'],val['inputtime'], updatetime,val['thumb'],val['source'],
                        val['sourceid'],val['url_show']
                        )
                    )
            conn.commit()
        except Exception as e:
            print('store: %s' % e)
            return False

        return True


    # 获取35条连接
    def find_tag(self):
        conn = self.conn_db()
        try:
            with conn.cursor() as cursor:
                # 添加行锁,保证数据取到的数据不重复
                sql = 'SELECT sid, url_show FROM cnbeta_post_dir WHERE is_pull=0 ORDER BY id LIMIT 20 FOR UPDATE'
                cursor.execute(sql)
                res = cursor.fetchall()
                sql = 'UPDATE cnbeta_post_dir SET is_pull=2 WHERE is_pull=0 ORDER BY id LIMIT 20'
                cursor.execute(sql)
                conn.commit()
        except Exception as e:
            print('find_tag: %s' % e)
            res = ()

        return res


    # 获取文章详情页面,保存
    def save_info(self, datas):
        result = []
        id_str = ''
        conn = self.conn_db()
        for v in datas:
            html     = self.http(v[1])
            pattern  = r'<div class="article-content" id="artibody">(.*?)</div>'
            matchObj = re.search(pattern, html, re.I|re.S)
            if matchObj != None:
                content = matchObj.group(1)
                content = conn.escape(content.strip())
            else:
                print('没有匹配到数据:%s' % v[1])
                content  = r''

            result.append('(%s, "%s")' % (v[0], content))
            id_str  = id_str + ',' + str(v[0])

        try:
            with conn.cursor() as cursor:
                sql1 = 'INSERT INTO cnbeta_post_info(sid, text) VALUES %s ' % (','.join(result))
                cursor.execute(sql1)
                sql = 'UPDATE cnbeta_post_dir SET is_pull=1 WHERE sid in (%s)' % id_str.strip(',')
                cursor.execute(sql)
                conn.commit()
        except Exception as e:
            print('save_info error: %s' % e)
            return False

        return True


    # dir线程
    def run_dir(self):
        url = 'http://www.cnbeta.com/home/more'
        for x in range(1, self.count + 1):
            txt = self.http(url, x)
            data= json.loads(txt)
            if len(data) > 0:
                self.store(data['result']['list'])
            time.sleep(1)
        print('run_dir done!')
        

    # info线程
    def run_info(self):
        wait = 3
        while True:
            datas = self.find_tag()
            if len(datas) < 1:
                if wait > 0:
                    wait = wait - 1
                    time.sleep(3)
                    continue
                else:
                    return
            self.save_info(datas)


    # 进程资源获取
    def run(self):
        start = time.time()
        p = Process(target=self.run_dir)
        p.start()
        t1 = Process(target=self.run_info)
        t1.start()
        t2 = Process(target=self.run_info)
        t2.start()
        p.join()
        t1.join()
        t2.join()
        end = time.time()
        print('all Done!!! use sec: %f' % (end - start))



if __name__ == '__main__':
    d = Cnbeta(10)
    d.run()
