import org.apache.spark.sql.SparkSession

object rk {
  def main(args: Array[String]): Unit = {
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("app")
      .config("hive.metastore.uris","thrift://cdh02:9083")
      .config("spark.warehouse.uri","/user/hive/warehouse")
      .enableHiveSupport()
      .getOrCreate()


    //启动SparkSQL服务，集成Hive，编写SQL查询苹果股票交易数据，统计数据量和展示样本数据
    spark.sql("use zg6_stock")
    spark.sql("select * from zg6_stock.apple_stock_trade").show()

    //业务1：编写SQL，统计每年苹果股票交易量，并按照交易量降序排序

    spark.sql("select year(date1) as every_year,sum(volume)as sum_num from zg6_stock.apple_stock_trade group by every_year order by sum_num desc").show()

    //业务2：编写SQL，统计每天开盘价和收盘价差值，找出差值最大前20
    spark.sql("select date1,abs(open-close) as price from zg6_stock.apple_stock_trade order by price desc limit 20").show()

    //业务3：编写SQL，找出股票连续7天以上股票上涨日期区间，显示控制台

    spark.sql("with t1 as (" +
      "select date1,close,case when close>lag(close,1,close) over (order by date1 )then 1 else 0 end as is_rising from zg6_stock.apple_stock_trade" +
      "),t2 as(" +
      "select date1,close,is_rising,sum(case when is_rising=0 then 1 else 0 end) over (order by date1) as group_id from t1" +
      "),t3 as (" +
      "select min(date1) as start_date,max(date1) as end_date,group_id from t2 where is_rising=1 group by group_id" +
      ") select start_date,end_date from t3 where group_id >7").show()

    //业务4：编写SQL，统计每天最高股票于最低股票差值，找出相差最大前20
    spark.sql("select date1,high-low as gp_diff from zg6_stock.apple_stock_trade order by gp_diff desc limit 20").show()

    //业务5：编写SQL，找出2020年6月份至8月份每日票价波峰和波谷
    spark.sql("select high,low from zg6_stock.apple_stock_trade where date1 between '2020-06-01' and '2020-08-31'").show()

    //业务5：编写SQL，对2021年交易数据，使用窗口函数，按照周统计交易量，并降序排序
    spark.sql("select concat_ws('-',year(date1),weekofyear(date1)) as week,sum(volume) from zg6_stock.apple_stock_trade where year(date1)='2021' group by week order by sum(volume) desc ").show()

    spark.stop()
  }
}
