import re
import utils
import insert_mysql

# 流量主表格 数据入库
def Flow_master_data_table(self,date_time, income):
    try:
        self.date_time, self.income = date_time, income
        self.Main_company = self.account['gs']
        self.Person_in_charge = self.account['charge']
        self.Namee = self.account['nickname']
        self.Wechat_number = self.account['wx_id']
        self.Original_ID = self.account['gh_id']
        self.Fan_source = self.account['fans_src']
        self.typee = self.account['type']
        self.Sex = self.account['gender']
        if self.income:
            self.Account_income = self.income
        else:
            self.Account_income = ' _ '
        effect_row = insert_mysql.select_Flow_master_data_table(date_time,self.Original_ID)
        if effect_row == 1:
            pass
        else:
            insert_mysql.insert_Flow_master_data_table(self.date_time,self.Main_company,self.Person_in_charge,self.Namee,self.Wechat_number,self.Original_ID,self.Fan_source,self.typee,self.Sex,self.Account_income)
    except Exception:
        print("流量主表格 数据入库失败")

# 广告主表格 数据入库
def Advertiser_data_form(data_date,account,Accumulative_attention,Service_provider, Amount_of_money):
    try:
        Main_company = account['gs']
        Person_in_charge = account['charge']
        Namee = account['nickname']
        Wechat_number = account['wx_id']
        Original_ID = account['gh_id']
        Fan_source = account['fans_src']
        typee = account['type']
        Sex = account['gender']
        effect_row = insert_mysql.select_Advertiser_data_table(data_date,Original_ID)
        if effect_row == 1:
            pass
        else:
            insert_mysql.insert_Advertiser_data_table(data_date, Person_in_charge, Namee, Wechat_number, Original_ID, Fan_source, typee, Sex, Accumulative_attention, Service_provider, Amount_of_money, Main_company)
    except Exception as e:
        print(e)
        print("广告主表格 数据入库 失败")

# 违规表格 数据入库
def Violation_data_table(data_date, account, Type_of_violation, Irregularities, Violation_treatment, Whether_or_not_to_ban, Accumulative_attention):
    try:
        data_date = data_date
        Irregularities = Irregularities
        Main_company = account['gs']
        Person_in_charge = account['charge']
        Namee = account['nickname']
        Wechat_number = account['wx_id']
        Original_ID = account['gh_id']
        Fan_source = account['fans_src']
        typee = account['type']
        Sex = account['gender']
        Accumulative_attention = Accumulative_attention
        Type_of_violation = Type_of_violation
        effect_row = insert_mysql.select_violation_data_table_mysql(data_date,Original_ID)
        if effect_row == 1:
            pass
        else:
            insert_mysql.insert_violation_data_table_mysql(data_date,Person_in_charge,Namee,Wechat_number,Original_ID,Fan_source,typee,Sex,Accumulative_attention,Type_of_violation,Irregularities,Violation_treatment,Whether_or_not_to_ban,Main_company)
    except Exception:
        print("违规表格 数据入库失败")

# 数据表格 数据入库
def Data_form_table(data_date,account,Sex_rate,Accumulative_attention,Newly_added,Abolish_concern,Net_growth,Headline_readings,info,income_info):
    try:
        date_time = data_date
        Main_company = account['gs']
        Person_in_charge = account['charge']
        Namee = account['nickname']
        Wechat_number = account['wx_id']
        Original_ID = account['gh_id']
        Fan_source = account['fans_src']
        Typee = account['type']
        Sex = account['gender']
        Comprehensive_reading_population = info[0]
        Comprehensive_reading_frequency = info[1]
        Conversational_reading = info[2]
        Conversational_reading_frequency = info[3]
        friends_reading_number = info[4]
        Friends_circle_reading_frequency = info[5]
        Forwarding_number = info[6]
        Number_of_forwarding = info[7]
        Collection_number = info[8]
        Collection_times = info[9]
        Historical_news_readings = info[10]
        Real_reading = info[11]
        Exposure = income_info[0]
        Clicks = income_info[1]
        Total_income = income_info[2]

        if Headline_readings == "当天未发":
            Headline_reading_rate = 0
        else:
            Headline_reading_rate = ('{0}%'.format(round(Headline_readings / Accumulative_attention*100,2)))
        if Exposure == 0 or Exposure == ' _ ':
            Clicking_rate = 0
            Exposure_unit_price = 0
        else:
            Exposure_unit_price = round(Total_income / Exposure,4)
            Clicking_rate = ('{0}%'.format(round(Clicks / Exposure*100,2)))

        if Clicks == 0 or Clicks == ' _ ' or Total_income == ' _ ' or Total_income == 0.0:
            Click_unit_price = 0
        else:
            Click_unit_price = round(Total_income / Clicks,4)


        if Total_income == " _ ":
            Vermicelli_unit_price = 0
            Reading_unit_price = 0
            Comprehensive_reading_rate = 0
            True_reading_rate = 0
            Two_transmission_ratio = 0
            Per_capita_opening_times = 0
            Forwarding_rate = 0
            The_exposure_rate = 0
        else:
            if Accumulative_attention == 0:
                Comprehensive_reading_rate = 0
                True_reading_rate = 0
                Vermicelli_unit_price = 0
            else:
                Comprehensive_reading_rate = ('{0}%'.format(round(Comprehensive_reading_frequency / Accumulative_attention*100,2)))
                True_reading_rate = ('{0}%'.format(round((Conversational_reading_frequency + Friends_circle_reading_frequency + Number_of_forwarding + Historical_news_readings) / Accumulative_attention*100,2)))
                Vermicelli_unit_price = round(Total_income / Accumulative_attention,4)


            if Conversational_reading_frequency + Historical_news_readings == 0:
                Two_transmission_ratio = 0
            else:
                Two_transmission_ratio = round((Friends_circle_reading_frequency + Number_of_forwarding) / (Conversational_reading_frequency + Historical_news_readings),4)


            if Conversational_reading + Historical_news_readings == 0:
                Per_capita_opening_times = 0
            else:
                Per_capita_opening_times = round((Conversational_reading_frequency + Historical_news_readings) / (Conversational_reading + Historical_news_readings),4)


            if Conversational_reading_frequency + Friends_circle_reading_frequency +Number_of_forwarding + Historical_news_readings == 0:
                Forwarding_rate = 0
            else:
                Forwarding_rate = round(Number_of_forwarding / (Conversational_reading_frequency + Friends_circle_reading_frequency + Number_of_forwarding + Historical_news_readings),4)

            if Comprehensive_reading_frequency == 0:
                The_exposure_rate = 0
                Reading_unit_price = 0
            else:
                The_exposure_rate = ('{0}%'.format(round(Exposure / Comprehensive_reading_frequency*100,2)))
                Reading_unit_price = round(Total_income / Comprehensive_reading_frequency,4)

            if Sex_rate != '':
                Sex_rate = ('{0}%'.format(round(Sex_rate * 100, 2)))

        Headline_reading_rate = Headline_reading_rate
        Comprehensive_reading_rate = Comprehensive_reading_rate
        True_reading_rate = True_reading_rate
        Two_transmission_ratio = Two_transmission_ratio
        Per_capita_opening_times = Per_capita_opening_times
        Forwarding_rate = Forwarding_rate
        Opening_rate = Conversational_reading + Accumulative_attention
        The_exposure_rate = The_exposure_rate
        Clicking_rate = Clicking_rate
        Exposure_unit_price = Exposure_unit_price
        Click_unit_price = Click_unit_price
        Reading_unit_price = Reading_unit_price
        Vermicelli_unit_price = Vermicelli_unit_price

        effect_row = insert_mysql.select_Data_form_table(data_date,Original_ID)
        if effect_row == 1:
            pass
        else:
            insert_mysql.insert_dataForm_mysql(date_time,Person_in_charge,Namee,Wechat_number,Original_ID,Fan_source,Typee,Sex,Sex_rate,Accumulative_attention,Newly_added,Abolish_concern,Net_growth,Headline_readings,Comprehensive_reading_population,Comprehensive_reading_frequency,Conversational_reading,Conversational_reading_frequency,friends_reading_number,Friends_circle_reading_frequency,Forwarding_number,Number_of_forwarding,Collection_number,Collection_times,Historical_news_readings,Real_reading,Headline_reading_rate,Comprehensive_reading_rate,True_reading_rate,Two_transmission_ratio,Per_capita_opening_times,Forwarding_rate,Opening_rate,Exposure,Clicks,Total_income,The_exposure_rate,Clicking_rate,Exposure_unit_price,Click_unit_price,Reading_unit_price,Vermicelli_unit_price,Main_company)
    except Exception as e:
        print(e)
        print('数据表格 数据入库')

# 12小时 阅读对比 数据入库
def insert_reading_comparison_12_hour_table(diff,data_date,account,Accumulative_attention):
    try:
        names = locals()
        date_time = data_date
        Main_company = account['gs']
        Person_in_charge = account['charge']
        Namee = account['nickname']
        Wechat_number = account['wx_id']
        Original_ID = account['gh_id']
        Fan_source = account['fans_src']
        typee = account['type']
        Sex = account['gender']
        Accumulative_attention = Accumulative_attention
        i = ReportGenerator.get_msg_info(data_date,Original_ID)
        msg_all = get_app_msg_detail_stat(i,utils.get_day_before_today(diff, data_date))
        for x in range(17):
            names['headlines' + str(x)] = ''
        try:
            if len(msg_all[0]) != 0:
                for x in range(len(msg_all[0])):
                    names['headlines' + str(x)] = msg_all[0][x]
        except Exception as e:
            print(e)
        Picture_text_transmission_time = names['headlines0']
        first_headlines = names['headlines1']
        first_Reading = names['headlines2']
        sec_headlines = names['headlines3']
        sec_Reading = names['headlines4']
        third_headlines = names['headlines5']
        third_Reading = names['headlines6']
        four_headlines = names['headlines7']
        four_Reading = names['headlines8']
        five_headlines = names['headlines9']
        five_Reading = names['headlines10']
        six_headlines = names['headlines11']
        six_Reading = names['headlines12']
        seven_headlines = names['headlines13']
        seven_Reading = names['headlines14']
        eight_headlines = names['headlines15']
        eight_Reading = names['headlines16']
        effect_row = insert_mysql.select_reading_comparison_12_hour(data_date,Original_ID)
        if effect_row == 1:
            pass
        else:
            insert_mysql.insert_reading_comparison_12_hour_table(date_time , Main_company , Person_in_charge , Namee , Wechat_number , Original_ID , Fan_source , typee , Sex , Accumulative_attention , Picture_text_transmission_time , first_headlines , first_Reading , sec_headlines , sec_Reading ,third_headlines , third_Reading , four_headlines , four_Reading , five_headlines , five_Reading , six_headlines , six_Reading , seven_headlines , seven_Reading , eight_headlines , eight_Reading)
    except Exception as e:
        print(e)
        print('12小时 阅读对比 数据入库')

# 36小时 阅读对比 数据入库
def insert_reading_comparison_36_hour_table(diff,data_date,account,Accumulative_attention):
    try:
        names = locals()
        date_time = data_date
        Main_company = account['gs']
        Person_in_charge = account['charge']
        Namee = account['nickname']
        Wechat_number = account['wx_id']
        Original_ID = account['gh_id']
        Fan_source = account['fans_src']
        typee = account['type']
        Sex = account['gender']
        Accumulative_attention = Accumulative_attention
        i = ReportGenerator.get_msg_info(data_date,Original_ID)
        msg_all = get_app_msg_detail_stat(i,utils.get_day_before_today(diff, data_date))
        for x in range(17):
            names['headlines' + str(x)] = ''
        try:
            if len(msg_all[0]) != 0:
                for x in range(len(msg_all[0])):
                    names['headlines' + str(x)] = msg_all[0][x]
        except Exception as e:
            print(e)
        Picture_text_transmission_time = names['headlines0']
        first_headlines = names['headlines1']
        first_Reading = names['headlines2']
        sec_headlines = names['headlines3']
        sec_Reading = names['headlines4']
        third_headlines = names['headlines5']
        third_Reading = names['headlines6']
        four_headlines = names['headlines7']
        four_Reading = names['headlines8']
        five_headlines = names['headlines9']
        five_Reading = names['headlines10']
        six_headlines = names['headlines11']
        six_Reading = names['headlines12']
        seven_headlines = names['headlines13']
        seven_Reading = names['headlines14']
        eight_headlines = names['headlines15']
        eight_Reading = names['headlines16']
        effect_row = insert_mysql.select_reading_comparison_36_hour(data_date,Original_ID)
        if effect_row == 1:
            pass
        else:
            insert_mysql.insert_reading_comparison_36_hour_table(date_time , Main_company , Person_in_charge , Namee , Wechat_number , Original_ID , Fan_source , typee , Sex , Accumulative_attention , Picture_text_transmission_time , first_headlines , first_Reading , sec_headlines , sec_Reading ,third_headlines , third_Reading , four_headlines , four_Reading , five_headlines , five_Reading , six_headlines , six_Reading , seven_headlines , seven_Reading , eight_headlines , eight_Reading)
    except Exception as e:
        print(e)
        print('36小时 阅读对比 数据入库')


def get_violation_info(item):
    title = item['Title']
    violation_content = re.sub('<.*?>', '', item['Content'])
    violation_cause = ''
    violation_title = ''
    violation_result = ''
    if re.search('违规处理', title):
        # if '申诉审核不通过' in violation_content:
        #     violation_cause = re.search('你好，你的(.*?)。',violation_content).group(1)
        # else:
        violation_cause = re.search('涉[嫌及]|存在(.+?)，', violation_content).group(1)
        violation_title = re.search(
        '违规(.*?)消息“(.*?)”', violation_content).group(2)
    elif re.search('功能屏蔽', title):
        if re.search('涉[嫌及](.+?)，', violation_content):
            violation_cause = re.search(
                '涉[嫌及](.+?)，', violation_content).group(1)
        elif re.search('您的帐号经查(.+?)，', violation_content):
            violation_cause = re.search(
                '您的帐号经查(.+?)，', violation_content).group(1)
        if re.search('违规内容：(.+?)如有异议', violation_content):
            violation_title = re.sub('&nbsp;', '', re.search(
                '违规内容：(.+?)如有异议', violation_content).group(1))
        elif re.search('流量主通过不正当方式制造虚假或无效曝光量、点击量', violation_content):
            violation_title = re.search(
                '流量主通过不正当方式制造虚假或无效曝光量、点击量', violation_content).group(0)
        features_by_date = {}
        features = re.findall('已屏蔽([^功能]+?)功能至([^,，]+)[，,]', violation_content)
        for feature in features:
            feature_name = feature[0]
            feature_date = feature[1]
            if feature_date in features_by_date:
                features_by_date[feature_date].append(feature_name)
            else:
                features_by_date[feature_date] = [feature_name]
        for feature_date, forbidden_features in features_by_date.items():
            violation_result += "{}: {}\n".format(
                feature_date, ','.join(forbidden_features))
    elif re.search('粉丝删除', title):
        violation_cause = re.search('所禁止的(.+?)，', violation_content).group(1)
        violation_title = re.sub('&nbsp;', '', re.search(
            '发起的"(.*?)"', violation_content).group(1))
        violation_result = re.search('已删除.+?粉丝数。', violation_content).group(0)
    elif re.search('责令处理', title):
        violation_title = re.search('图文消息“(.*?)”', violation_content).group(1)
        violation_result = re.search(
            '已被互联网信息内容主管部门责令删除', violation_content).group(0)
    return violation_cause, violation_title, violation_result


def get_violation_list(data_date,gh_id):
    return utils.get_json_file(gh_id, data_date, 'violation-list.json', data_type=list)


def get_user_gender_ratio(genders, total, needed):
    if not needed or not total:
        return ''
    needed = needed.strip()
    if not genders:
        return ''
    try:
        for gender in genders['genders']:
            if gender['attr_name'] == needed:
                return gender['user_count'] / total
        return ''
    except (KeyError, ValueError):
        print(genders)
        return ''


def get_app_msg_topmost_read(i, ma):
    if ma.get('total') == []:
        return '当天未发'
    if len(i):
        info = i[0].get('appmsg_info')
        if info and len(info):
            other = info[0].get('other_info')
            if 'read_num' in other:
                return other['read_num']
        return '非图文类型'
    else:
        return '当天未发'


def get_app_msg_stat(m):
    all_uv = all_pv = dialog_uv = dialog_pv = moment_uv = moment_pv = share_user = share_cnt = fav_user = fav_cnt = 0
    hist = 0  # TODO
    for item in m.get('item', []):
        fav_user += item['add_to_fav_user']
        fav_cnt += item['add_to_fav_count']
        src = item['user_source']
        if src == 99999999:
            all_pv = item['int_page_read_count']
            all_uv = item['int_page_read_user']
        elif src == 0:
            dialog_pv = item['int_page_read_count']
            dialog_uv = item['int_page_read_user']
        elif src == 2:
            moment_pv = item['int_page_read_count']
            moment_uv = item['int_page_read_user']
    for share in m.get('share', []):
        share_user += share['share_user']
        share_cnt += share['share_count']
    return [all_uv, all_pv, dialog_uv, dialog_pv, moment_uv, moment_pv, share_user, share_cnt, fav_user, fav_cnt, hist,
            dialog_pv + moment_pv + share_cnt + hist ]


def get_publisher_stat(p):
    if p == "EMPTY":
        return ['-', '-', '-']
    return [p.get('view_count', 0), p.get('click_count', 0), p.get('cost', 0) / 100.0]


def get_app_msg_detail_stat(messages, date_to_find):
    sent_messages = []  # 服务号可能一天发送多条消息
    for message in messages:
        # 图文类型才有阅读数统计
        if message['type'] == 9 and utils.get_date_from_timestamp(message['sent_info']['time']) == date_to_find:
            sent_message = [utils.get_datetime_from_timestamp(
                message['sent_info']['time'])]
            for message_item in message['appmsg_info']:
                sent_message.append(message_item['title'])
                sent_message.append(message_item['other_info']['read_num'])
            sent_messages.append(sent_message)
    return sent_messages if sent_messages != [] else [['']]


class ReportGenerator:
    def __init__(self, library, date_to_report):
        """
        :param utils.AccountLibrary library:
        :param date_to_report:
        """
        self.date = date_to_report
        self.library = library
        self.gh_id = None

    def get_basic(self, gh_id):
        date = self.date
        if self.date == utils.get_today():
            date = utils.get_yesterday()
        return utils.get_json_file(gh_id, date, 'basic.json')

    def get_user_counter(self, gh_id):
        date = self.date
        if self.date == utils.get_today():
            date = utils.get_yesterday()
        return utils.get_json_file(gh_id, date, 'user-counter.json')

    def get_ad_info(self, gh_id):
        return utils.get_json_file(gh_id, utils.get_today(), 'app-ad-info.json')

    def get_user_attr(self, gh_id):
        return utils.get_json_file(gh_id, self.date, 'user-attr.json')

    def get_msg_counter(self, gh_id):
        return utils.get_json_file(gh_id, self.date, 'app-msg-counter.json')

    def get_msg_all(self, gh_id):
        return utils.get_json_file(gh_id, self.date, 'app-msg-all.json')

    def get_publisher_counter(self, gh_id):
        return utils.get_json_file(gh_id, self.date, 'publisher.json')

    def get_violation_list(self, gh_id):
        return utils.get_json_file(gh_id, self.date, 'violation-list.json', data_type=list)

    def get_msg_info(data_date,gh_id):
        return utils.get_json_file(gh_id, data_date, 'app-msg-info.json', data_type=list)

def get_all_publisher(gh_id, date_list):
    res = []
    date_data = []
    for item in date_list:
        data = utils.get_json_file(gh_id, item, 'publisher_by_day.json')
        if data:
            res.append(str(data.get('income')/100))
            date_data.append(item)

        else:
            res.append('-')
            date_data.append(item)
    return res,date_data