package com.txl.cn.spark06

import org.apache.spark.sql.{Dataset, Row, SparkSession}

/**
  * Created by txl on 2018/1/4.
  */
object JsonDemo {
  def main(args: Array[String]): Unit = {
    val session = SparkSession.builder()
      .master("local")
      .appName(this.getClass.getName)
      .getOrCreate()
    import session.implicits._
    val jsDF = session.read.json("data/json/test.json")
    jsDF.printSchema()
    val sql: Dataset[Row] = jsDF.where($"sums" >200 )
      //.show()
    sql.write.mode("overwrite").csv("data/out")
  }

}
