#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Created on 2017-08-15 09:34:17
# Project: 内涵段子
# 36379

### 内涵段子


from pyspider.libs.base_handler import *
import time
import config as cf
import pymysql
import redis
import db_action


class Handler(BaseHandler):
    crawl_config = {

    }

    def __init__(self):
        conn = pymysql.connect(**cf.mysql_option)
        cursor = conn.cursor()
        r = redis.StrictRedis(**cf.redis)
        self.conn = conn
        self.cursor = cursor
        self.r = r

    @every(minutes=4 * 60)
    def on_start(self):
        self.cursor.execute(
            "select account_id,account_name,plat_name,user_id,get_data_url from qp.med_plat_account WHERE plat_id = 3;")
        result = self.cursor.fetchall()
        for i in result:
            user_id = i["get_data_url"].split("?").pop().split("=").pop()
            self.crawl('http://lf.snssdk.com/2/essay/zone/user/posts/', callback=self.page_detail,
                       validate_cert=False,
                       params={
                           "user_id": user_id
                       },
                       save={
                           "user_info": i,
                           "user_id": user_id,
                           "page": 1
                       }
                       )

    @config(age=10 * 24 * 60 * 60)
    def page_detail(self, response):
        data = response.json['data']['data']
        has_more = response.json['data']['has_more']
        user_info = response.save['user_info']
        user_id = response.save['user_id']
        page = response.save['page']

        if len(data) is 0:
            # no return data
            pass
        else:
            # have return data
            for record in data:
                # pass one cell
                rs = {}
                if 'group' in record:
                    rs['title_name'] = record['group']['text']
                    rs['url'] = record['group']['share_url']
                    time_local = time.localtime(int(record['group']['create_time']))
                    rs['add_time'] = time.strftime("%Y-%m-%d %H:%M:%S", time_local)
                    rs['url'] = record['group']['share_url']
                    rs['flow_count'] = record['group']['play_count']
                    rs['third_id'] = record['group']['id']

                    rs['account_id'] = user_info['account_id']
                    rs['account_name'] = user_info['account_name']
                    rs['plat_name'] = user_info['plat_name']
                    rs['user_id'] = user_info['user_id']
                    rs['plat_id'] = 3
                    rs['tag'] = 0
                    rs['adv_id'] = 0
                    rs['audit_status'] = 1

                    # save the rs to redis
                    db_action.store_info_duanzi(rs)
                else:
                    pass
            # whether crawl next page
            if has_more is True:
                page += 1
                self.crawl('http://lf.snssdk.com/2/essay/zone/user/posts/', callback=self.page_detail,
                           validate_cert=False,
                           params={
                               "user_id": user_id,
                               "page": page
                           },
                           save={
                               "user_info": user_info,
                               "user_id": user_id,
                               "page": page
                           }
                           )
            else:
                pass










