#  网站统计大数据
#  2017-12-27 20:00启动
#  参与人：壮志凌云

from __future__ import print_function
from pyspark.sql import SparkSession
from pyspark.sql.functions import sum,row_number,desc,count,countDistinct,from_unixtime,max
from pyspark.sql import Window
import json

if __name__ == "__main__":
    spark = SparkSession \
        .builder \
        .appName("网站统计-流量分析") \
        .getOrCreate()

#  采用python库函数，自行解析json，数据读取正常
    jsonData = spark.sparkContext.textFile("main_pv.json")
    WebData = jsonData.map(json.loads).toDF()
    WebData.printSchema()

###################################################
#  实时访客
###################################################
#  缺失用户实时在线相关信息
#  或JS采集时有相关规定，可以通过相同用户，不同时间下采集仍然在线来判断
#  暂时搁置

###################################################
#  趋势分析
###################################################
#  趋势分析 #1 今日流量
    OverViewDay = WebData.groupBy(from_unixtime("vt", format='yyyy-MM-dd').alias("日期"),
                                  from_unixtime("vt", format='HH').alias("小时"),
                                  ).agg(
        count("_id").alias("浏览量(PV)"),
        countDistinct("u").alias("访客数(UV)"),
        countDistinct("ip").alias("IP数")
    )

#  为数据添加日期、小时
    WebDataWithDH = WebData.withColumn('day',from_unixtime("vt", format='yyyy-MM-dd')).withColumn('hour',from_unixtime("vt", format='HH'))

#  DataCube Create
    WebDataFlowCube = WebDataWithDH.select(
        WebDataWithDH["day"],
        WebDataWithDH["hour"],
        WebDataWithDH["u"],
        WebDataWithDH["ip"],
        WebDataWithDH["ref"],
        WebDataWithDH["mi"]
    ).cube(
        WebDataWithDH["day"],
        WebDataWithDH["hour"],
        WebDataWithDH["u"],
        WebDataWithDH["ip"],
        WebDataWithDH["ref"],
        WebDataWithDH["mi"]
    ).count(
    )

#  网页获取参数
    AnalysisCondition = ["day = '2017-12-26'", "hour = ''", "u = ''", "ip = ''", "ref = ''", "mi = ''"]

    WebDataFlow = WebDataFlowCube\
        .filter(AnalysisCondition[0])\
        .groupBy("day", "hour")\
        .agg(
        max("count").alias("浏览量(PV)"),
        countDistinct("u").alias("访客数(UV)"),
        countDistinct("ip").alias("IP数")
    )

    WebDataFlow.show()
    OverViewDay.show()




    spark.stop()
