package cn.tiakon.dmp

import cn.tiakon.dmp.untils.ContextUtils
import org.apache.spark.sql.SQLContext
import org.junit.{Assert, Test}

/**
  * 测试文件
  *
  * @author Tiakon
  *         2018/3/28 20:33
  */
class Test01 extends Assert {

  /**
    * split截取空字符中串不加 -1 参数
    */
  @Test
  def test01(): Unit = {
    val length = "1,,,,1,2,3,4,5,5,6,,,,,,".split(",").length
    println(length)
  }


  /**
    * 读取 parquet 文件内容
    */
  @Test
  def test02(): Unit = {
    val sc = ContextUtils.getSparkContext()
    val sqlc = new SQLContext(sc)

    //读取 parquet 文件，注册成临时表显示结果
    sqlc.read.parquet("D:\\dmp-testdata\\output\\parquet").registerTempTable("logs")
    sqlc.sql("select * from logs").show()
  }
}
