#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Created on 2017-08-15 09:34:17
# Project: 人人
# 36379

### 人人


from selenium import webdriver
import time
import requests
import config as cf
from pyspider.libs.base_handler import *
import pymysql
import redis
import db_action
import json


class Handler(BaseHandler):
    crawl_config = {
    }

    def __init__(self):
        conn = pymysql.connect(**cf.mysql_option)
        cursor = conn.cursor()
        r = redis.StrictRedis(**cf.redis)
        b = webdriver.PhantomJS()
        self.conn = conn
        self.cursor = cursor
        self.r = r
        self.b = b

    def login(self, user_info):
        login_name = user_info['account_login_name']
        password = user_info['account_login_password']
        # use the phantomjs inject the value
        try:
            # use the get method get the login page
            self.b.get("http://ucenter.rr.tv/page/login")
            time.sleep(2)
            self.b.find_element_by_xpath('/html/body/div[1]/a').click()
            time.sleep(0.5)
            # print(self.b.find_element_by_tag_name('body').text)
            login_name_input = self.b.find_element_by_xpath('//*[@id="loginForm"]/div[1]/input')
            password_input = self.b.find_element_by_xpath('//*[@id="loginForm"]/div[2]/input')
            login_name_input.send_keys(login_name)
            password_input.send_keys(password)
            # trigger login
            self.b.find_element_by_xpath('//*[@id="loginForm"]/input').click()
            time.sleep(2)
            cookie = {}
            for i in self.b.get_cookies():
                cookie[i['name']] = i['value']
            res = requests.post('http://ucenter.rr.tv/video/myVideo', data={
                "isDel": False,
                "page": 1,
                "rows": 10
            }, cookies=cookie)
            # diff the login success and response with data or login fail
            try:
                data = json.loads(res.text)['data']
                total_num = json.loads(res.text)['recordsTotal']
                per_page = 10
                rs = {}
                for record in data:
                    # parse the data
                    rs['third_id'] = record['id']
                    rs['title_name'] = record['brief']
                    rs['url'] = record['playLink']
                    time_local = time.localtime(int(record['createTime']))
                    rs['add_time'] = time.strftime("%Y-%m-%d %H:%M:%S", time_local)
                    rs['flow_count'] = record['playCount']

                    rs['account_id'] = user_info['account_id']
                    rs['account_name'] = user_info['account_name']
                    rs['plat_name'] = user_info['plat_name']
                    rs['user_id'] = user_info['user_id']
                    rs['plat_id'] = 3
                    rs['tag'] = 0
                    rs['adv_id'] = 0
                    rs['audit_status'] = 1

                    # save the data to redis
                    # db_action.store_info_renren(rs)
                # whether crawl the next page
                for page in range(total_num // per_page):
                    print(page)

            except ValueError:
                return False
        except Exception as e:
            print(e)
            return False

    @every(minutes=4 * 60)
    def on_start(self):
        self.cursor.execute(
            "select * from qp.med_plat_account WHERE plat_id = 8;")
        result = self.cursor.fetchall()
        for user_info in result:
            pass
        user_info = {'account_login_name': '15021559272', 'account_login_password': 'feixiong208'}
        self.login(user_info)

    def page_detail(self, response):
        pass










