# -*- coding: utf-8 -*-
# @Time    : 2019/2/21 14:18
# @Author  : yueconger
# @File    : fgg_requests.py
from lxml import etree
import random
import string
import requests
import time
import json
import re


class FggSpider(object):
    def __init__(self):
        self.start_url = 'https://m.fagougou.com/chatbot/59e56ab9f8b6620cfe7c50cb'
        self.post_url = 'https://m.fagougou.com/query/{queryid}/chat'
        self.related_url = 'https://m.fagougou.com/qaRelated/{qid}'
        self.headers = {
            'Host': 'm.fagougou.com',
            'Connection': 'keep-alive',
            'Cache-Control': 'public, max-age=31536',
            'Accept': 'application/json, text/plain, */*',
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36 MicroMessenger/6.5.2.501 NetType/WIFI WindowsWechat QBCore/3.43.901.400 QQBrowser/9.0.2524.400',
            'X-Custom-Header': 'foobar',
            'Referer': 'https://m.fagougou.com/wx/custom?mkt=eldmh0340fd45',
            'Accept-Encoding': 'gzip, deflate',
            'Accept-Language': 'zh-CN,zh;q=0.8,en-us;q=0.6,en;q=0.5;q=0.4',
            'Cookie': 'user_info=s%3A-KQKKvqDI6iS37cnKmt-KKwel4IP35UA.G%2B9QPeDNMXMRgmtQchDdJ%2BJwt4snPTpUhl4XME3tl0Y; _ga=GA1.2.1840922608.1551059337; _gid=GA1.2.1001787979.1551059337; _gat=1',
            'If-None-Match': 'W/"247-uY4oIVPUvng/siadWA9J5szUGUs"'
        }
        self.post_headers = {
            'Host': 'm.fagougou.com',
            'Connection': 'keep-alive',
            'Content-Length': '27',
            'Accept': 'application/json, text/plain, */*',
            'Cache-Control': 'public, max-age=31536',
            'Content-Type': 'application/json;charset=UTF-8',
            'Origin': 'https://m.fagougou.com',
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36 MicroMessenger/6.5.2.501 NetType/WIFI WindowsWechat QBCore/3.43.901.400 QQBrowser/9.0.2524.400',
            'X-Custom-Header': 'foobar',
            'Referer': 'https://m.fagougou.com/wx/custom?mkt=eldmh0340fd45',
            'Accept-Encoding': 'gzip, deflate',
            'Cookie': 'user_info=s%3A-KQKKvqDI6iS37cnKmt-KKwel4IP35UA.G%2B9QPeDNMXMRgmtQchDdJ%2BJwt4snPTpUhl4XME3tl0Y; _ga=GA1.2.1840922608.1551059337; _gid=GA1.2.1001787979.1551059337; _gat=1',
            'Accept-Language': 'zh-CN,zh;q=0.8,en-us;q=0.6,en;q=0.5;q=0.4'
        }

    def parse(self, get_url, question_set):
        print('-----')
        response = requests.get(get_url, headers=self.headers)
        html = response.content.decode()
        body_dict = json.loads(html)
        pattern = 'recommendQA'
        res = re.findall(pattern, html)
        if len(res) > 0:
            print('+++')
            recommendQA_list = body_dict['data']['botSays'][0]['recommendQA']
            queryid = body_dict['data']['queryId']
            print(recommendQA_list)
            for question in recommendQA_list:
                if question in question_set:
                    print('问题已存在,跳过')
                    pass
                else:
                    question_set.add(question)
                    print('当前问题:', question)
                    formdata = {'message': question}
                    formdata = json.dumps(formdata)
                    url = self.post_url.format(queryid=queryid)
                    body = self.post_info(formdata, url)
                    self.answer_parse(body, question, queryid, question_set)

    def parse_first(self, get_url, question_set):
        print('-----')
        response = requests.get(get_url, headers=self.headers)
        html = response.content.decode()
        body_dict = json.loads(html)
        pattern = 'recommendQA'
        res = re.findall(pattern, html)
        if len(res) > 0:
            print('+++')
            recommendQA_list = body_dict['data']['botSays'][0]['recommendQA']
            queryid = body_dict['data']['queryId']
            print(recommendQA_list)

            kw_local_list = self.read_local_question()
            # for question in recommendQA_list:
            for question in kw_local_list:
                if question in question_set:
                    print('关键词问题已存在,跳过')
                    continue
                else:
                    question_set.add(question)
                    print('当前问题:', question)
                    formdata = {'message': question}
                    formdata = json.dumps(formdata)
                    url = self.post_url.format(queryid=queryid)
                    body = self.post_info(formdata, url)
                    self.answer_parse(body, question, queryid, question_set)

    def post_info(self, formdata, url):
        response = requests.post(
            url=url,
            headers=self.post_headers,
            data=formdata
        )
        body = response.content.decode()
        time.sleep(3)
        return body

    def read_local_question(self):
        with open(r'question/刑事问题描述.txt', 'r', encoding='utf-8') as f:
            contents = f.readlines()
        kw_local_list = []
        for con in contents:
            kw = con.strip()
            kw_local_list.append(kw)
        return kw_local_list

    def answer_parse(self, body, question, queryid, question_set):
        content = json.loads(body)
        print('content', content)
        botSays = content['data']['botSays'][-1]
        print(botSays)
        if botSays['type'] == 'text':
            item = {}
            answer = botSays['content']['body']
            if 'laws' in body:
                laws = botSays['content']['laws']
            else:
                laws = []
            try:
                qid = botSays['content']['queryRecordItemId']
            except Exception as e:
                print('窗口出错')
                pass
            else:
                print('qid:', qid)
                item['question'] = question
                item['answer'] = answer
                item['laws'] = laws
                item['qid'] = qid
                self.save_json(str(item))
                url = self.related_url.format(qid=qid)
                self.related_parse(url, queryid, question_set)

    def related_parse(self, url, queryid, question_set):
        print('======')
        response = requests.get(url=url, headers=self.headers)
        html = response.content.decode()
        body_dict = json.loads(html)
        print('body:', body_dict)
        try:
            recommendQA_list = body_dict['data']['botSays'][0]['content']['option']
        except Exception as e:
            print('内容不全', e)
        else:
            print(recommendQA_list)
            for question in recommendQA_list:
                if question in question_set:
                    print('问题已存在,跳过')
                else:
                    print('相似问题:', question)
                    question_set.add(question)
                    formdata = {'message': question}
                    formdata = json.dumps(formdata)
                    post_url = self.post_url.format(queryid=queryid)
                    print('post_url:', post_url)
                    body = self.post_info(formdata, post_url)
                    self.answer_parse(body, question, queryid, question_set)

    def save_json(self, item):
        with open('刑事犯罪_4.json', 'a+', encoding='utf-8') as f:
            f.write(item + ',\n')

    def generate_random_int(self, randomlength=16):
        """
        生成一个指定长度的随机字符串，其中
        string.digits=0123456789
        string.ascii_letters=abcdefghigklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ
        """
        str_list = [random.choice(string.ascii_letters) for i in range(randomlength)]
        random_str = ''.join(str_list)
        return random_str

    def question_queue(self):
        pass


if __name__ == '__main__':
    fgg_spider = FggSpider()
    with open('question/已爬数据.txt', 'r', encoding='utf-8') as f:
        question_list = f.readlines()
    question_set = set()
    for question in question_list:
        question = question.strip()
        question_set.add(question)

    fgg_spider.parse_first(fgg_spider.start_url, question_set)
