package cn.pengpeng

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types._
import org.apache.spark.sql._

/**
  *
  */
object DianShang {
    def main(args: Array[String]): Unit = {
        //创建SparkSession
        val spark: SparkSession = SparkSession.builder().appName("DianShang").master("local[4]").getOrCreate()
        //读取数据创建临时视图
        val sdateDS: RDD[String] = spark.sparkContext.textFile("D:\\小牛学堂\\40期\\hive案例二\\电商交易数据分析\\数据\\sdate.txt")
        val stockDS: RDD[String] = spark.sparkContext.textFile("D:\\小牛学堂\\40期\\hive案例二\\电商交易数据分析\\数据\\stock.txt")
        val detailDS: RDD[String] = spark.sparkContext.textFile("D:\\小牛学堂\\40期\\hive案例二\\电商交易数据分析\\数据\\stockdetail.txt")
        //导入隐式转换
        import spark.implicits._
        //赋予字段含义
        val sdateRow: RDD[Row] = sdateDS.map(line => {
            val splits: Array[String] = line.split(",")
            val dateID = splits(0)
            val theyearmonth = splits(1)
            val theyear = splits(2)
            val themonth = splits(3)
            val thedate = splits(4)
            val theweek = splits(5)
            val theweeks = splits(6)
            val thequot = splits(7)
            val thetenday = splits(8)
            val thehalfmonth = splits(9)
            Row(dateID, theyearmonth,
                theyear, themonth,
                thedate, theweek,
                theweeks, thequot,
                thetenday, thehalfmonth)
        })
        val sdateSch = StructType(List(
            StructField("dateID", StringType, true),
            types.StructField("theyearmonth", StringType, true),
            StructField("theyear", StringType, true),
            types.StructField("themonth", StringType, true),
            StructField("thedate", StringType, true),
            types.StructField("theweek", StringType, true),
            StructField("theweeks", StringType, true),
            types.StructField("thequot", StringType, true),
            StructField("thetenday", StringType, true),
            types.StructField("thehalfmonth", StringType, true)
        ))
        val sdateDF: DataFrame = spark.createDataFrame(sdateRow, sdateSch)
        //创建临时视图
        sdateDF.createTempView("v_sdate")

        val stockRow: RDD[Row] = stockDS.map(line => {
            val splits = line.split(",")
            val ordernumber = splits(0)
            val locationid = splits(1)
            val dateID = splits(2)
            Row(ordernumber, locationid, dateID)
        })
        //创建地段信息
        val stockSch = StructType(List(
            StructField("ordernumber", StringType, true),
            StructField("locationid", StringType, true),
            StructField("dateID", StringType, true)
        ))
        val stockDF: DataFrame = spark.createDataFrame(stockRow, stockSch)
        //创建临时视图
        stockDF.createTempView("v_stock")

        val detailRow: RDD[Row] = detailDS.map(line => {
            val splits = line.split(",")
            val ordernumber = splits(0)
            val rownum = splits(1).toInt
            val itemid = splits(2)
            val qty = splits(3).toInt
            val price = splits(4).toFloat
            val amount =Decimal(splits(5))
            Row(ordernumber, rownum, itemid, qty, price, amount)
        })
        //创建字段信息
        val detailSch = StructType(List(
            StructField("ordernumber", StringType, true),
            StructField("rownum", IntegerType, true),
            StructField("itemid", StringType, true),
            StructField("qty", IntegerType, true),
            StructField("price",FloatType , true),
            StructField("amount",DecimalType(19, 2) , true)
        ))
        val detailDF: DataFrame = spark.createDataFrame(detailRow, detailSch)
        //创建临时视图
        detailDF.createTempView("v_detail")


        //计算所有订单每年的总金额
//        MyCalculate.eachYearAllAmount(spark, sdateDF, stockDF, detailDF)

        //计算所有订单每年最大金额订单的销售额
        MyCalculate.eachYearMaxAmount(spark, sdateDF, stockDF, detailDF)

        //统计所有订单中季度销售额前10位
//        MyCalculate.eachQuotUpperTen(spark, sdateDF, stockDF, detailDF)

        //列出销售金额在100000以上的单据(订单号)

        //关闭资源
        spark.stop()
    }

}
