# -*- coding: utf-8 -*-
import scrapy
import json

from scrapy.loader import ItemLoader
from scrapy.http.request import Request
from urllib import parse
from ..items import juejinJsonItem, ArticleItemLoader
from ..entity.Account import Account
from ..utils.juejinUtil import get_infomation
from ..utils.mysqlUtil import MysqlUtil


class JuejinSpider(scrapy.Spider):
    name = 'juejin'
    allowed_domains = ['juejin.im']
    start_urls = ['http://juejin.im/']
    UserAgent = "Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.186 Mobile Safari/537.36"
    header = {
        "Host": "juejin.im",
        "Referer": "https://juejin.im/",
        "User-Agent": UserAgent
    }
    login_account = Account

    def parse(self, response):
        result = get_infomation(self.login_account)
        mysqlutil = MysqlUtil()
        for list in result:
            insert_sql = """
                                           insert into juejin(title,summaryInfo,updatedAt,tags,collectionCount,commentsCount,viewsCount,originalUrl,screenshot) 
                                           VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s) 
                                       """
            params = (
                str(list["title"]), str(list["summaryInfo"]), list["updatedAt"], tags_list_to_str(list['tags']),
                list["collectionCount"],
                list["commentsCount"], list["viewsCount"], list["originalUrl"], list["screenshot"])
            mysqlutil.execute_with_param(insert_sql=insert_sql, params=params)

    # 重写start_request,先完成登陆
    def start_requests(self):
        post_url = "https://juejin.im/auth/type/phoneNumber"
        post_data = {
            "phoneNumber": "18042261719",
            "password": "jh123456+++"
        }
        return [scrapy.FormRequest(
            url=post_url,
            formdata=post_data,
            headers=self.header,
            callback=self.check_login
        )]

    # 验证是否登陆成功的回调函数
    def check_login(self, response):
        text = json.loads(response.body.decode("utf-8"))
        token = text['token']
        userId = text['userId']
        clientId = str(text['clientId'])
        self.login_account = Account(userId, token, clientId)
        for url in self.start_urls:
            yield scrapy.Request(url, dont_filter=True, headers=self.header)

    pass

    def tags_list_to_str(self, list):
        str = ""
        for tag in list:
            str += tag['title']
            str += ","
        return str[0:-1]


def tags_list_to_str(list):
    str = ""
    for tag in list:
        str += tag['title']
        str += ","
    return str[0:-1]
