# -*- coding: utf-8 -*-
import random
import string

import scrapy
import time
import json
import re

from FggSpider.items import FggspiderItem


class FggSpider(scrapy.Spider):
    name = 'fgg'
    allowed_domains = ['fagougou.com', 'api.fagougou.com']
    start_urls = ['https://api.fagougou.com/v1/chatbot/59e56ab9f8b6620cfe7c50cb?rand=0.06585272562432443']
    post_url = 'https://api.fagougou.com/v1/query/{}/chat'
    related_url = 'https://api.fagougou.com/v1/qaRelated/{qid}?rand={random_str}'
    headers = {
        # ':authority': 'api.fagougou.com',
        # ':method': 'OPTIONS',
        # ':path': '/v1/query/5c6d498f79e641b09332957d/chat',
        # ':scheme': 'https',
        'accept': '*/*',
        'accept-encoding': 'gzip, deflate, br',
        'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8',
        'access-control-request-headers': 'content-type,xchecksum',
        'access-control-request-method': 'POST',
        'origin': 'https://www.fagougou.com',
        'referer': 'https://www.fagougou.com/pc/?mkt=eldmh0340fd45',
        'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36'
    }
    cookies = {
        '_ga': 'GA1.2.669677932.1534124801',
        '_gid': 'GA1.2.633320641.{date}'.format(date=int(time.time())),
        'user_info': 's%3A6nGYUnQkNjmNZMNwPtMNo8k8JNjeTgth.4xTAI0wZOw4%2BvQd5fTgYNAWfTE8JtoC3fEbCN%2BlZvMw;',
        '_gat': '1'
    }
    post_headers = {
        'accept': 'application/json',
        'accept-encoding': 'gzip, deflate, br',
        'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8',
        'content-length': '47',
        'content-type': 'application/json',
        'cookie': '_ga=GA1.2.669677932.1534124801; _gid=GA1.2.1877746833.1550665701; user_info=s%3A6nGYUnQkNjmNZMNwPtMNo8k8JNjeTgth.4xTAI0wZOw4%2BvQd5fTgYNAWfTE8JtoC3fEbCN%2BlZvMw; _gat=1',
        'origin': 'https://www.fagougou.com',
        'referer': 'https://www.fagougou.com/pc/?mkt=eldmh0340fd45',
        'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36',
        'xchecksum': '%7Fv~wr*).*%7C~.%2F&t=1550727025918'
    }
    formdata = {'message': '遇到诈骗应该如何处理？'}

    def start_requests(self):
        yield scrapy.Request(
            url=self.start_urls[0],
            headers=self.headers,
            method='GET',
            # cookies=self.cookies,
            callback=self.parse
        )

    def parse(self, response):
        """
        获取第一批相似问题, 进行提交数据
        :param response:
        :return:
        """
        print(response.text)
        body = response.text
        body_dict = json.loads(body)
        pattern = 'recommendQA'
        res = re.findall(pattern, body)
        if len(res) > 0:
            recommendQA_list = body_dict['data']['botSays'][0]['recommendQA']
            queryid = body_dict['data']['queryId']
            print(recommendQA_list)
            for recommendQA in recommendQA_list:
                formdata = {'message': recommendQA}
                formdata = json.dumps(formdata)
                url = self.post_url.format(queryid)
                print(url)
                print(formdata)
                yield scrapy.Request(
                    url=self.post_url.format(queryid),
                    headers=self.post_headers,
                    body=formdata,
                    method='POST',
                    dont_filter=True,
                    # meta={'question': recommendQA},
                    callback=self.answer_parse
                )

    def answer_parse(self, response):
        """
        得到具体回答信息
        :param response:
        :return:
        """
        print('-----------')
        item = FggspiderItem()
        # question = response.meta['question']
        body = response.text
        print('--', body)
        content = json.loads(body)
        botSays = content['data']['botSays']
        if botSays['type'] == 'text':
            answer = botSays['content']['body']
            laws = botSays['content']['laws']
            qid = botSays['content']['queryRecordItemId']
            random_str = self.generate_random_str()
            yield scrapy.Request(
                url=self.related_url.format(qid=qid, random_str=random_str),
                # cookies=self.cookies,
                callback=self.related_parse,
                dont_filter=True
            )
            # item['question'] = question
            item['answer'] = answer
            item['laws'] = laws
            item['qid'] = qid
            return item

    def related_parse(self, response):
        print('---')
        print(response.text)

    def generate_random_str(self, randomlength=16):
        """
        生成一个指定长度的随机字符串，其中
        string.digits=0123456789
        string.ascii_letters=abcdefghigklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ
        """
        str_list = [random.choice(string.digits) for i in range(randomlength)]
        random_int = ''.join(str_list)
        random_str = '0.' + random_int
        return random_str