import org.apache.spark.sql.SparkSession

object aa {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("aaa")
      .master("local[*]")
      .config("hive.metastore.uris", "thrift://cdh02:9083") // Hive Metastore 地址
      .config("spark.sql.warehouse.dir", "/user/hive/warehouse") // Hive 仓库目录
      .enableHiveSupport()
      .getOrCreate()

    // 执行 Hive 查询
    //spark.sql("show databases").show()
    spark.sql("use zg6_stock")
    //spark.sql("show tables").show()
    spark.sql("select * from zg6_stock.apple_stock_trade").show()

    //业务1：编写SQL，统计每年苹果股票交易量，并按照交易量降序排序
    spark.sql("select substr(date1,0,4) as year,sum(volume) as price from zg6_stock.apple_stock_trade group by year order by price desc").show()
  //业务2：编写SQL，统计每天开盘价和收盘价差值，找出差值最大前20
    spark.sql("select date1,abs(open-close) as price_diff from zg6_stock.apple_stock_trade order by price_diff desc limit 20").show()
   //业务3：编写SQL，找出股票连续7天以上股票上涨日期区间，显示控制台
    spark.sql("with t1 as (" +
      "select date1,close,case when close>lag(close,1,close) over (order by date1) then 1 else 0 end as is_rising from zg6_stock.apple_stock_trade" +
      ")," +
      "t2 as (" +
      "select date1,close,is_rising,sum(case when is_rising=0 then 1 else 0 end) over(order by date1) as group_id from t1 " +
      ")," +
      "t3 as (" +
      "select min(date1) as start_date,max(date1) as end_date,count(*) as streak_length from t2 where is_rising=1 group by group_id" +
      ") select start_date,end_date from t3 where streak_length>7").show()

    spark.sql("with t1 as (" +
      "select date1,close,case when close>lag(close,1,close) over (order by date1) then 1 else 0 end as is_rising from zk" +
      "),t2 as (" +
      "select date1,close,is_rising,sum(case when is_rising=0 then 1 else 0 end) over (order by date1) as group_id from t1" +
      "),t3 as (" +
      "select min(date1) as start_date,max(date1) as end_date,count(*) as streak_length from t2 where is_rising=1 group by group_id" +
      ") select start_date,end_date from t3 where streak_length>7").show()

    //业务4：编写SQL，统计每天最高股票于最低股票差值，找出相差最大前20
    spark.sql("select date1,high - low as price_range from zg6_stock.apple_stock_trade order by price_range desc limit 20").show()

    //业务5：编写SQL，找出2020年6月份至8月份每日票价波峰和波谷；
    spark.sql("select date1,high as high_price,low as low_price from zg6_stock.apple_stock_trade where date1 between '2020-06-01' and  '2020-08-31'").show()
    //业务6：编写SQL，对2021年交易数据，使用窗口函数，按照周统计交易量，并降序排序
    spark.sql("select " +
      " concat_ws('-',year(date1),weekofyear(date1)) as week ,sum(volume)" +
      " from zg6_stock.apple_stock_trade where year(date1)='2021' group by week order by sum(volume) desc").show()


    spark.sql("select " +
      " concat_ws('-',year(date1),weekofyear(date1)) as week ,sum(volume)" +
      " from zg6_stock.apple_stock_trade where year(date1)='2020' group by week order by sum(volume) desc").show()
    spark.close()
  }
}
