import warnings
warnings.filterwarnings('ignore')
from urllib.parse import unquote
import json,time,re
import pymysql

class CrawlDMP:
    def __init__(self):
        self.db = pymysql.connect(host="",
                                  port=3306,
                                  database="paid_service",
                                  user="",
                                  password="",
                                  charset='utf8')
        self.cursor = self.db.cursor()

    def response(self, flow):

        if 'https://dmp.taobao.com/api_2/analysis/insight/tag/list' in flow.request.url:
            shop_name = self.get_shop_name(flow)  # 获取店铺名称
            crowd_id, tag_group_id, tag_group_text = self.get_crowd_id(flow)
            content = flow.response.text    # 获取对应关系的id
            self.parse_analysis_insight_tag(shop_name, crowd_id, tag_group_id, tag_group_text, content)

        if 'https://dmp.taobao.com/api_2/analysis/tag/' in flow.request.url:
            shop_name = self.get_shop_name(flow)   # 获取店铺名称
            self.parse_analysis_tag(flow, shop_name)  # 获取用户特征，品类特征，渠道特征

        if 'https://dmp.taobao.com/api_2/crowd/' in flow.request.url and flow.request.url.find("crowd/insight") < 0:
            shop_name = self.get_shop_name(flow)   # 获取店铺名称
            content = flow.response.text
            self.parse_crowd(content, shop_name)   # 获取覆盖人数数据

        if 'https://dmp.taobao.com/api_2/analysis/insight/coverage' in flow.request.url:
            self.parse_crowd_change(flow)  # 获取更换列表的人数数据


    def parse_analysis_insight_tag(self, shop_name, crowd_id, tag_group_id, tag_group_text, content):
        try:
            data = json.loads(content)
            if data['info']['ok'] is True:
                list_data = []
                for items in data['data']:
                    table_id = items['id']   # 表格对应的id
                    list_data.append((shop_name, crowd_id, tag_group_id, tag_group_text, table_id))
                self.insert_mysql_analysis_insight_tag(list_data)

        except Exception as e:
            print("parse_analysis_insight_tag error", e)

    def parse_crowd_change(self, flow):
        try:
            list_data = []
            shop_name = self.get_shop_name(flow)
            content_res = flow.response.text
            data_res = json.loads(content_res)
            if data_res['info']['ok'] is True:
                coverage = data_res['data']['coverage']  # 覆盖人数
                content_req = flow.request.get_text()
                data_req = json.loads(content_req)
                crawl_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
                crowd_id = data_req['crowdId']
                crowd_name = data_req['selectTagOptionSet']['selectTagOptionSet'][0]['name']
                list_data.append((shop_name,crowd_id, crowd_name, coverage, crawl_time))
                self.insert_mysql_analysis_crowd(list_data)

        except Exception as e:
            print("parse_crowd_change error", e)

    def get_crowd_id(self, flow):
        try:
            tag_group_cate = {
                "3091": "渠道特征",
                "3092": "私域特征",
                "3090": "品类特征",
                "3084": "用户特征"
            }
            content = flow.request.get_text()
            data = json.loads(content)
            crowd_id = data['crowdId']
            tag_group_id = data['tagGroupIds'][0]
            tag_group_text = tag_group_cate[str(tag_group_id)]
            return crowd_id, tag_group_id, tag_group_text
        except Exception as e:
            print("get_crowd_id error", e)

    def get_shop_name(self, flow):
        try:
            cookies = flow.request.cookies
            shop_name = cookies.get('sn')
            if not shop_name:
                shop_name = cookies.get('lgc').replace('%5C', '\\')
                shop_name = shop_name.encode('utf-8').decode('unicode_escape')
            else:
                shop_name = unquote(shop_name).split(':')[0]
            return shop_name
        except Exception as e:
            print("get_shop_name error", e)

    def parse_crowd(self, content, shop_name):
        try:
            data = json.loads(content)
            list_data = []
            if data['data']['crowd']['coverage']:
                coverage = data['data']['crowd']['coverage']  # 覆盖人数
                crowd_name = data['data']['crowd']['crowdName']  # 分析人群
                crowd_id = data['data']['crowd']['crowdId']  # crowdId
                crawl_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
                list_data.append((shop_name, crowd_id, crowd_name, coverage, crawl_time))
            self.insert_mysql_analysis_crowd(list_data)

        except Exception as e:
            print("parse_crowd error", e)

    def parse_analysis_tag(self,flow,shop_name):
        try:
            request_url = flow.request.url
            mx_id_re = re.search("mx_(.*?)&csrf", request_url, re.S)
            mx_id = ''
            if mx_id_re:
                mx_id = mx_id_re.group(1)
            content_req = flow.request.get_text()
            data_req = json.loads(content_req)
            crowd_id = data_req.get('crowdId', '')
            content = flow.response.text
            data = json.loads(content)
            if data['info']['ok'] is True:
                list_data = []
                for items in  data['data']['chartDataFull']:
                    tag_id = items['tagId']  # 用来关联用的
                    tag_name = items['tagName']
                    rate = items['rate']  # 占比
                    ctr_index = items.get("ctrIndex", 0)
                    ppc_index = items.get("ppcIndex", "")
                    option_name = items['optionName']
                    option_id = items['optionId']
                    crawl_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
                    list_data.append((crowd_id, shop_name, tag_id, rate, ctr_index, ppc_index, option_name, option_id, tag_name, mx_id, crawl_time))
                self.insert_mysql_analysis_tag(list_data)

        except Exception as e:
            print("parse_analysis_content error", e)

    def insert_mysql_analysis_crowd(self, list_data):
        try:
            insert_sql = "insert into dmp_crowd_portrait_analysis_crowd (shop_name,crowd_id,crowd_name,coverage,crawl_time) values(%s,%s,%s,%s,%s)"
            self.cursor.executemany(insert_sql, list_data)
            self.db.commit()
        except Exception as e:
            print('insert_mysql_analysis_crowd error', e)

    def insert_mysql_analysis_tag(self, list_data):
        try:
            insert_sql = "insert into dmp_crowd_portrait_analysis_tag (crowd_id,shop_name,tag_id,rate,ctr_index,ppc_index,option_name,option_id,tag_name,mx_id,crawl_time) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
            self.cursor.executemany(insert_sql, list_data)
            self.db.commit()
        except Exception as e:
            print('insert_mysql_analysis_tag error', e)

    def insert_mysql_analysis_insight_tag(self, list_data):
        try:
            insert_sql = "insert into dmp_crowd_portrait_analysis_group (shop_name,crowd_id, tag_group_id, tag_group_text, table_id) values(%s,%s,%s,%s,%s)"
            self.cursor.executemany(insert_sql, list_data)
            self.db.commit()
        except Exception as e:
            print('insert_mysql_analysis_tag error', e)


addons = [
    CrawlDMP()
]

