import org.apache.commons.io.IOUtils
import org.apache.iceberg.catalog.TableIdentifier
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Dataset, Encoders, SparkSession, functions}
import org.apache.spark.{Partition, SparkConf, SparkContext, TaskContext}
import seaweedfs.client.FilerProto.Entry
import seaweedfs.client.SeaweedRead.ChunkView
import seaweedfs.client.{FilerClient, FilerProto, SeaweedInputStream}

import java.net.{HttpURLConnection, URL}
import java.util
import java.util.{Arrays, List}
import scala.collection.JavaConverters._
import scala.collection.convert.ImplicitConversions.`collection AsScalaIterable`

object HelloTable {

  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf()
      .setAppName("SparkSeaweedExample")
      .setMaster("local[1]")

      .set("spark.hadoop.fs.seaweedfs.impl", "seaweed.hdfs.SeaweedFileSystem")
      .set("spark.hadoop.fs.defaultFS", "seaweedfs://localhost:8888")
      .set("spark.eventLog.dir", "seaweedfs://localhost:8888/")
      .set("spark.eventLog.enabled", "true")

      .set("spark.sql.catalog.spark_catalog", "org.apache.iceberg.spark.SparkSessionCatalog")
      .set("spark.sql.catalog.spark_catalog.type", "hadoop")
      .set("spark.sql.catalog.spark_catalog.catalog-impl", "org.apache.iceberg.hadoop.HadoopCatalog")
      .set("spark.sql.catalog.spark_catalog.warehouse", "seaweedfs://localhost:8888/warehouse/")




      .set("spark.sql.hive.convertMetastoreOrc", "true")
      .set("spark.sql.statistics.fallBackToHdfs", "true")
      .set("spark.sql.orc.filterPushdown", "true")
      .set("spark.sql.orc.impl", "native")

      .set("spark.history.ui.port", "18081")
      .set("spark.history.fs.cleaner.interval", "7d")
      .set("spark.history.provider", "org.apache.spark.deploy.history.FsHistoryProvider")
      .set("spark.history.fs.cleaner.maxAge", "90d")
      .set("spark.history.fs.cleaner.enabled", "true")


      .set("spark.yarn.historyServer.address", "master:18081")




    val spark = SparkSession.builder.config(sparkConf).getOrCreate()

    val table = TableIdentifier.of("default.test_table")

    spark.sql(s"""
          CREATE TABLE if not exists  ${table}  (
            customer_id bigint COMMENT 'unique id',
            name string,
            age int
          ) USING iceberg
    """)
    spark.sql("INSERT INTO default.test_table select 1, 'customer_a', 123;")
    spark.sql("INSERT INTO default.test_table select 2, 'customer_b', 124;")
    spark.table("default.test_table").show(false)
    spark.close()

  }
}
