package test

import org.apache.spark.sql.SparkSession

/**
  * Created by zhoucw on 18-8-27 上午9:46.
  */
object TestSpark {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().appName("test").master("local[*]").enableHiveSupport().getOrCreate()

    val path = "/hadoop/accesslog_etl/output/data/hid=327/d=180801/h=23/m5=50/"
    val df = spark.read.format("orc").load(path)


    val path2 = "/hadoop/accesslog_etl/output/data.bak/hid=327/d=180801/h=23/m5=50/"
    val df2 = spark.read.format("orc").load(path2)

    println(df.count() + "_" + df2.count())
  }
}
