from pyspark.sql.session import SparkSession

#创建环境
spark = SparkSession.builder.enableHiveSupport().appName('ads_avg_user_stop_frequency_d_i').getOrCreate()

#统计指标
result_df = spark.sql('''
select day ,avg(num) as avg_num
from
(select substring(start_time,1,8) as day ,mdn,count(1) as num
from
dwd.dwd_staypoint_d_i_msk
group by substring(start_time,1,8),mdn
) as a
group by day
''')

result_df.write \
    .format("jdbc") \
    .option("url", "jdbc:mysql://master:3306") \
    .option("driver", "com.mysql.cj.jdbc.Driver") \
    .option("dbtable", "ads.ads_avg_user_stop_frequency_d_i") \
    .option("user", "root") \
    .option("password", "123456") \
    .mode("overwrite") \
    .save()