# -*- coding: utf-8 -*-
import scrapy
from scrapy.http import Request, FormRequest
import re, random
import json
import urllib
import pymysql
from zhider.items import TopicItem


ua = ["Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36"]


class DetailtopicSpider(scrapy.Spider):
    name = 'DetailTopic'
    allowed_domains = ['www.zhihu.com']
    start_urls = ['http://www.zhihu.com/']
    url = "https://www.zhihu.com/node/TopicsPlazzaListV2"
    hash_id = ''
    headers = {"User-Agent": random.choice(ua),
               "Referer": "https://www.zhihu.com"}
    level1 = {}

    def start_requests(self):
        url = "https://www.zhihu.com/topics"
        return [Request(url, meta={"cookiejar":True}, headers=self.headers, callback=self.parse)]

    def parse(self,response, offset=0):
        html = response.body.decode("utf-8","ignore")
        self.hash_id = re.compile("hash_id.*?: (.*?)},").findall(html)
        r = self.getAllTopic()
        for i in r:
            id = i[0]
            title = i[1]
            self.level1[id]=title
            data = {"method": "next", "params": '{"topic_id":%s,"offset":%s,"hash_id":"%s"}'%(id, 0, self.hash_id)}
            yield FormRequest(self.url, meta={"cookiejar": True}, headers=self.headers, formdata=data,
                              callback=self.next)


    def next(self, response):
        request_content = urllib.request.unquote(response.request.body.decode("utf-8","ignore"))
        id = re.compile('topic_id":(.*?),"offset').findall(request_content)[0]
        old = re.compile('offset":(.*?),"hash_id').findall(request_content)[0]

        html = response.body.decode("utf-8", "ignore")
        result = json.loads(html)
        msg = result["msg"]
        if(len(msg)==0):
            return

        h = eval("'''" + html + "'''")
        idsp = 'href=".+topic.+/(\d+)"'
        ids = re.compile(idsp).findall(h)
        titlep = '<strong>([^<]+)<.+strong>'
        titles = re.compile(titlep).findall(h)
        for i in range(0, len(ids)):
            item = TopicItem()
            item["id"] = ids[i]
            item["title"] = titles[i]
            item["pid"] = id
            print(item)
            yield item

        offset = int(old) + 20
        data = {"method": "next", "params": '{"topic_id":%s,"offset":%s,"hash_id":"%s"}'%(id, offset, self.hash_id)}
        yield FormRequest(self.url, meta={"cookiejar": True}, headers=self.headers, formdata=data,
                              callback=self.next)


    def getAllTopic(self):
        db = pymysql.connect(host="localhost", user="python", password="python", db="zhider", port=3306, charset="utf8")

        try:
            cur = db.cursor()
            cur.execute("select * from topic where pid=0")
            result = cur.fetchall()
            return result
        except Exception as ex:
            print(ex)
            return ()
        finally:
            db.close()

