import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, DataFrameReader, Dataset, Row, SparkSession}

object Demo {
  def main(args: Array[String]): Unit = {
    val sparkConf: SparkConf = new SparkConf().setMaster("local[4]").setAppName("stocks")
    val spark: SparkSession = SparkSession.builder().config(sparkConf).getOrCreate()
    val df: DataFrame = spark.read.format("csv").option("sep", ",").option("inferSchema",
      "true").option("header", "true").load("E:\\hadoop\\spark-realtime-1229\\sparkSql-stocks\\stock_prices.csv")
    //查看字段信息
    df.printSchema()
    //创建视图表
    df.createTempView("stocks")
    import spark.implicits._
    val df2: DataFrame = df.withColumn("marketValue", $"close" * $"volume")
    // 任务1
    df2.groupBy("date").sum("marketValue").sort("date").
      withColumnRenamed("sum(marketValue)", "return").show()
    // 任务2
    spark.sql("select variance(close),ticker from stocks group by ticker").
      withColumnRenamed("variance(close)", "var").sort("var").show()
    spark.close()
  }
}
