#!/usr/bin python3
# -*- encoding: utf-8 -*-
'''
@File    :   offineStat.py
@Time    :   2020/09/02 16:34:48
@Author  :   陈培杞
@Version :   1.0
@Doc     :   离线构造统计特征
'''

from pyspark import sql
from pyspark.sql import functions as F
from pyspark.sql.functions import size,col,explode,array,concat_ws,udf,instr
from pyspark.sql.functions import UserDefinedFunction,monotonically_increasing_id

from myinit import spark
from util import splitToken
from logger import worklog
from config import config
from db  import into_MySQL


def stat():
    worklog.debug("开始进行 统计画像")
    worklog.debug("\t 正在计算 统计画像")

    spark.sql("""select * 
                from (select explode(split(rcpt,';')) as rcpt , sender, tag, region, domainrep,authuser,attach, usertag from data) 
                where rcpt <> '' 
                """).createOrReplaceTempView('stat')
 
    # 统计收信数量
    spark.sql("select ROW_NUMBER() OVER(ORDER BY RAND()) as id, rcpt, count(rcpt) as rcptCnt from data group by rcpt ").createOrReplaceTempView('rcptCntStat')

    # 统计发信数量
    spark.sql("""select id, rcpt, rcptCnt, if(sendCnt is null,0,sendCnt) as sendCnt
                from rcptCntStat left join 
                    (select sender, count(sender) as sendCnt from data group by sender )  as a
                    on rcpt=a.sender
            """).createOrReplaceTempView('sendCntStat')

    # 统计tag数量
    spark.sql("""
            select rcpt , 
                   sum(tag1) as tag1, 
                   sum(tag2) as tag2, 
                   sum(tag3) as tag3,
                   sum(tag4) as tag4,
                   sum(tag5) as tag5,
                   sum(tag7) as tag6,
                   sum(tag7) as tag7
            from 
                (select rcpt, 
                        if(tag=1,count(tag),0) as tag1, 
                        if(tag=2,count(tag),0) as tag2,
                        if(tag=3,count(tag),0) as tag3,
                        if(tag=4,count(tag),0) as tag4,
                        if(tag=5,count(tag),0) as tag5,
                        if(tag=6,count(tag),0) as tag6,
                        if(tag=7,count(tag),0) as tag7
                from data group by rcpt,tag) as a 
            group by rcpt
            """).createOrReplaceTempView('tagStat')

    # 统计region，取数量最大值对应的地区
    spark.sql("""select rcpt ,max(count_region), first(region)  as region
                 from (select rcpt, count(region) as count_region, region from data group by rcpt,region) as a 
                 group by rcpt
              """).createOrReplaceTempView('regionStat')

    # 统计attach数量
    spark.sql("""select rcpt, count(rcpt) as attachCnt from data where attach <> '' group by rcpt""").createOrReplaceTempView('attachStat')

    # 统计usertag数量
    spark.sql("""
        select rcpt , 
                sum(usertag1) as usertag1, 
                sum(usertag3) as usertag3
        from 
            (select rcpt, 
                    if(usertag=1,count(usertag),0) as usertag1, 
                    if(usertag=3,count(usertag),0) as usertag3
            from data group by rcpt,usertag) as a 
        group by rcpt
        """).createOrReplaceTempView('usertagStat')
    
    # join 回data表
    statData = spark.sql("""
        select id, a.rcpt, rcptCnt, sendCnt,tag1,tag2,tag3,tag4,tag5,tag6,tag7,region,if(attachCnt is null, 0, attachCnt) as attachCnt ,usertag1,usertag3
        from (((sendCntStat as a
            left join tagStat on a.rcpt=tagStat.rcpt)
            left join regionStat on a.rcpt=regionStat.rcpt)
            left join attachStat on a.rcpt=attachStat.rcpt)
            left join usertagStat on a.rcpt=usertagStat.rcpt
        """)

    print('stat  profile into hive')
    #into_MySQL(statData, 'stat_profile')
    statData.createOrReplaceTempView('statData')
    #statData.write.saveAsTable('profile.stat', mode='overwrite')
    spark.sql("CREATE TABLE IF NOT EXISTS profile.stat LIKE statData")
    spark.sql("insert overwrite table profile.stat select * from statData")
    
    spark.catalog.dropTempView("rcptCntStat")
    spark.catalog.dropTempView("sendCntStat")
    spark.catalog.dropTempView("tagStat")
    spark.catalog.dropTempView("regionStat")
    spark.catalog.dropTempView("attachStat")
    spark.catalog.dropTempView("usertagStat")

    worklog.debug("\t 结束计算 统计画像")

