import java.text.SimpleDateFormat
import java.util.Date

import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object HdfsSink {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .appName("sink")
      .config("spark.sql.parquet.writeLegacyFormat", true)
      .config("spark.sql.hive.convertMetastoreParquet","false")
      .getOrCreate()

    import spark.implicits._

    val date: Date = new Date(new Date().getTime - 86400000L)
    val dateformat: SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd")
    val statisDate: String = dateformat.format(date)


    //生成string 并发送到kafka
    val querySql =
      """
            select
                a.user_id
                ,b.college_code
            from
                (
                    select
                        user_id
                    from (
                        SELECT
                          user_id
                        from  ods.ods_matomo_log_visit_fun
                        where   visit_last_action_time>='2021-01-01 00:00:00'
                            and user_id is not null
                            and user_id <>'未登录用户'
                        union all
                        select
                            user_id
                        from
                            dws.dws_event_user_active
                        where dt BETWEEN date_add(current_date,-365) and '2021-01-01' ) a
                    group by user_id
                ) a
            left join
                (
                    select
                        user_id
                        ,college_code
                    from
                        dim.dim_user_affiliated_info
                    where college_code != ''
                    group by user_id,college_code
                ) b on a.user_id = b.user_id
            where b.college_code is not null

        """
    val resultDF: DataFrame = spark.sql(querySql)

    val file_path ="hdfs://172.17.104.238:9000/tmp/demo"

    resultDF.write.mode(SaveMode.Overwrite).format("csv").save(file_path)

    spark.stop()
  }
}
