package com.navinfo.platform.examples.carbondata

import java.io.File

import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
import org.apache.spark.sql.{SaveMode, SparkSession}



object MyCarbondataSample {
  def main(args: Array[String]): Unit = {

    val storeLocation = s"/user/root/CarbonStore"
    val warehouse = s"/user/root/data"
    val metastoredb = s"/user/root/CarbonMetaStore"
    val workThreadNum =4

    CarbonProperties.getInstance()
      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd HH:mm:ss")
      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "yyyy/MM/dd")
      .addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE_LOADING, "true")
      .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, "")

    val masterUrl = if (workThreadNum <= 1) {
      "local"
    } else {
      "local[" + workThreadNum.toString() + "]"
    }
    import org.apache.spark.sql.CarbonSession._
    val spark = SparkSession
      .builder()
      .master(masterUrl)
      .appName("MyCarbondataSample")
      .config("spark.sql.warehouse.dir", warehouse)
      .getOrCreateCarbonSession(storeLocation, metastoredb)
    spark.sparkContext.setLogLevel("WARN")

    val complexTableName = s"complex_type_table"

    import spark.implicits._

    // drop table if exists previously
    spark.sql(s"DROP TABLE IF EXISTS ${ complexTableName }")
    spark.sql(
      s"""
         | CREATE TABLE ${ complexTableName }(
         | id INT,
         | name STRING,
         | city STRING,
         | salary FLOAT,
         | file struct<school:array<string>, age:int>
         | )
         | STORED BY 'carbondata'
         | TBLPROPERTIES(
         | 'sort_columns'='name',
         | 'dictionary_include'='city')
         | """.stripMargin)

    val sc = spark.sparkContext
    // generate data
    val df = sc.parallelize(Seq(
      ComplexTypeData(1, "index_1", "city_1", 10000.0f,
        StructElement(Array("struct_11", "struct_12"), 10)),
      ComplexTypeData(2, "index_2", "city_2", 20000.0f,
        StructElement(Array("struct_21", "struct_22"), 20)),
      ComplexTypeData(3, "index_3", "city_3", 30000.0f,
        StructElement(Array("struct_31", "struct_32"), 30))
    )).toDF
    df.printSchema()
    df.write
      .format("carbondata")
      .option("tableName", complexTableName)
      .mode(SaveMode.Append)
      .save()

    spark.sql(s"select count(*) from ${ complexTableName }").show(100, truncate = false)


  }

}
