package insurance

import insurance.udf.{CalcLxdDxdDxci, udaf_test}
import insurance.util.SparkUtil.{executeSQLFile, executeSQLFileFromHdfs, jdbc_prop, mysql_url}
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object Main {

    def main(args: Array[String]): Unit = {

        val spark = SparkSession.builder()
          .appName("SparkSQL")
          .master("local[*]")
          .enableHiveSupport()
          .getOrCreate()
        // 设置日志级别
        spark.sparkContext.setLogLevel("ERROR")



        spark.udf.register("CalcLx", udaf_test)
        spark.udf.register("CalcLxdDxdDxci", CalcLxdDxdDxci)


        if(args.length == 1)
        {
            executeSQLFileFromHdfs(spark,args(0),null,null)
        }else if (args.length == 2){
            executeSQLFileFromHdfs(spark,args(0),args(1),null)
        } else if (args.length == 3) {
            executeSQLFileFromHdfs(spark, args(0), args(1),args(2))

        }else{
            println("参数错误！")
            return
        }

        spark.stop()
    }
}
