from pyspark.sql.functions import *
from pyspark.sql.session import SparkSession
from pyspark.sql import Window
# - **需求描述**：验证系统记录的常住地与实际活动区域的一致性
# - **分析逻辑**：
#   - 计算用户在常住地区县的停留点占比
#   - 识别异常用户(常住地停留占比<30%)
# - **输出要求**：
#   - 常住地准确率统计表
#   - 异常用户名单及活动热区

spark = SparkSession.builder.enableHiveSupport().appName('ads_Residence_Activity_Consistency_Check_d_i').getOrCreate()

grid_stay = spark.table('dws.dws_grid_stay_d_i')

grid_stay = grid_stay.withColumn('flag', when(col('resi_grid_id') == col('grid_id'), 1).otherwise(0)).withColumn(
  'sum_resi_num', sum('flag').over(Window.partitionBy('mdn').orderBy('in_time')))\
  .withColumn('total_num',count('mdn').over(Window.partitionBy('mdn').orderBy('in_time')))

grid_stay = grid_stay.where(col('sum_resi_num')>0).withColumn('avg_resi', col('sum_resi_num')/col('total_num')).where(col('avg_resi')>0.7)\
  .dropDuplicates(['mdn']).select('mdn','avg_resi','prov_name','city_name')

grid_stay.write \
    .format("jdbc") \
    .option("url", "jdbc:mysql://master:3306/ads") \
    .option("driver", "com.mysql.cj.jdbc.Driver") \
    .option("dbtable", "ads_Residence_Activity_Consistency_Check_d_i") \
    .option("user", "root") \
    .option("password", "123456") \
    .mode("overwrite") \
    .save()
