import org.apache.commons.codec.digest.DigestUtils
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{Dataset, SparkSession}
import org.apache.spark.sql.types.{DataTypes, StructType}

object ReadParquet {

  def main(args: Array[String]): Unit = {

    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)

    val spark = SparkSession.builder()
      .appName(this.getClass.getSimpleName)
      .master("local")
      .getOrCreate()


    /*val df = spark.read.parquet("/doit12/preprocessed/applog/2020-02-02")

    df.show(50,false)*/



    import spark.implicits._
    spark
      .createDataset(List((1L,1L)))
      .toDF("biaoshi_hashcode", "guid")
      .write.parquet("d:/idmp-1-31")


    val ds: Dataset[String] = spark.read.textFile("")




    spark.close()

  }

}
