package statement

import org.apache.spark.sql.{SaveMode, SparkSession}

object StationDayIncomeSql {

    def main(args: Array[String]): Unit = {
        val spark = SparkSession
            .builder()
            .appName("StationDayIncome")
            .master("local[*]")
            .enableHiveSupport()
            .config("spark.sql.parquet.writeLegacyFormat", true)
            .getOrCreate()


        val df = spark.sql(
            """
              |insert overwrite table ads.ads_org_income_day
              |partition(dt='$do_date')
              |select
              |sum(income) as income,
              |org_id,
              |org_name
              |from dws.dws_station_income_day
              |where dt='$do_date'
              |group by org_id, org_name
              |""".stripMargin)
//        df.write.mode(SaveMode.Overwrite).saveAsTable("")
        df.show()
        spark.close()
    }

    /*def sql2(): Unit = {
        val spark = SparkSession
            .builder()
            .appName("StationDayIncome")
            .master("local[*]")
            .enableHiveSupport()
            .config("spark.sql.parquet.writeLegacyFormat", true)
            .getOrCreate()




    }*/

}
