package cn.ekgc.shopping

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.sql.SparkSession

object CreateTables {



  def getSources(dataPath:String) ={
    //hdfs dfs -ls user/root/day23
    //user/root/day23
    //Array((path,tableName))
    val data = scala.collection.mutable.ListBuffer[(String,String)]()
    val conf = new Configuration()
    val fileSystem = FileSystem.get(conf)
    val rit = fileSystem.listFiles(new Path(dataPath),true)
    while (rit.hasNext){
      //LocatedFileStatus
       val lfs = rit.next()
       val csvPath = lfs.getPath
      // /user/root/day23/customers.csv  customers
      val fileName = csvPath.getName
       val tableName = fileName.substring(0,fileName.lastIndexOf("."))

       val filePah = csvPath.toString()

       data +=  filePah -> tableName
    }
    data
  }



  def createTableCustomers(path:String,tableName:String,spark:SparkSession) = {
    import spark.implicits._
    val rdd = spark.sparkContext.textFile(path)
    val ds = rdd.map(line => {
      val  Array(id,fname,lname,email,password,street,city,state,zipcode) = line.replaceAll("\"","").split(",")
      Customer(id,fname,lname,email,password,street,city,state,zipcode)
    }).toDS()
    ds.write.saveAsTable(s"shopping.${tableName}")
  }

  def createTableOrder(path:String,tableName:String,spark:SparkSession) = {
    import spark.implicits._
    val rdd = spark.sparkContext.textFile(path)
    val ds = rdd.map(line => {
      val Array(id,date,customerId,status) = line.replaceAll("\"","").split(",")
      Order(id,date,customerId,status)
    }).toDS()
    ds.write.saveAsTable(s"shopping.${tableName}")
  }
  def createTableOrderItem(path:String,tableName:String,spark:SparkSession) = {
    import spark.implicits._
    val rdd = spark.sparkContext.textFile(path)
    val ds = rdd.map(line => {
      val Array(id,orderID,productID,quantity,subtotal,productPrice) = line.replaceAll("\"","").split(",")
      OrderItem(id,orderID,productID,quantity.toInt,subtotal.toFloat,productPrice.toFloat)
    }).toDS()
    ds.write.saveAsTable(s"shopping.${tableName}")
  }

  def createTableProduct(path:String,tableName:String,spark:SparkSession) = {
    import spark.implicits._
    val rdd = spark.sparkContext.textFile(path)
    val ds = rdd.map(line => {
      val p = "\"(\\d+)\",\"(\\d+)\",\"(.+?)\",\"(.*)?\",\"([0-9\\.]+)\",\"(.*)\"".r
      var product = Product("","","","",0.0F,"")
      line match {
        case p(id,categoryID,name,description,price,image) =>  product = Product(id,categoryID,name,description,price.toFloat,image)
        case _ => println("no match..")
      }
      product
    }).toDS()
    ds.write.saveAsTable(s"shopping.${tableName}")
  }


}
