scala> import org.apache.spark.sql.SparkSession
scala> val spark=SparkSession.builder().getOrCreate()
scala> import spark.implicits._
scala> val df = spark.read.json("file:///usr/local/spark/employee.json")
scala> df.show()
scala> df.distinct().show()
scala> df.drop("id").show()
scala> df.filter(df("age") > 30 ).show()
scala> df.groupBy("name").count().show()
scala> df.sort(df("name").asc).show()
scala> df.take(3) 
scala> df.select(df("name").as("username")).show()
scala> df.agg("age"->"avg")
scala> df.agg("age"->"min")