package com.hiscene.sparksql

/**
 * <?xml version="1.0" encoding="UTF-8"?>
 * <project xmlns="http://maven.apache.org/POM/4.0.0"
 * xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
 * xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 * <modelVersion>4.0.0</modelVersion>
 *
 * <groupId>com.hiscene</groupId>
 * <artifactId>sparkOnHive</artifactId>
 * <version>1.0-SNAPSHOT</version>
 *
 * <dependencies>
 *
 *
 * <!-- https://mvnrepository.com/artifact/org.apache.spark/spark-core -->
 * <dependency>
 * <groupId>org.apache.spark</groupId>
 * <artifactId>spark-core_2.11</artifactId>
 * <version>2.3.0.2.6.5.0-292</version>
 * </dependency>
 *
 *
 * <!-- https://mvnrepository.com/artifact/org.apache.spark/spark-sql -->
 * <dependency>
 * <groupId>org.apache.spark</groupId>
 * <artifactId>spark-sql_2.11</artifactId>
 * <version>2.3.0.2.6.5.0-292</version>
 * </dependency>
 * <!-- https://mvnrepository.com/artifact/org.apache.spark/spark-hive -->
 * <dependency>
 * <groupId>org.apache.spark</groupId>
 * <artifactId>spark-hive_2.11</artifactId>
 * <version>2.3.0.2.6.5.0-292</version>
 * </dependency>
 * <!-- https://mvnrepository.com/artifact/org.apache.hive/hive-exec -->
 * <dependency>
 * <groupId>org.apache.hive</groupId>
 * <artifactId>hive-exec</artifactId>
 * <version>1.2.1000.2.6.5.0-292</version>
 * </dependency>
 *
 * </dependencies>
 *
 * <repositories>
 * <repository>
 * <id>repository.hortonworks</id>
 * <name>Hortonworks Repository</name>
 * <url>http://repo.hortonworks.com/content/repositories/releases/</url>
 * </repository>
 * </repositories>
 * </project>
 */
object SparkSQLOnHive {

  import java.io.File

  import org.apache.spark.sql.{SaveMode, SparkSession}

  def main(args: Array[String]): Unit = {
    import org.apache.spark.sql.DataFrame
    val warehouseLocation = new File("spark-warehouse").getAbsolutePath
    val spark: SparkSession = SparkSession
      .builder()
      .master("local[*]")
      .appName("Spark Hive Example")
      .config("spark.sql.warehouse.dir", warehouseLocation)
      .enableHiveSupport()
      .getOrCreate()

    import spark.implicits._

    spark.sql("use ods")

    val df: DataFrame = spark.sql("SELECT * from ods.ods_01_a_gantry_etc_tradelist_dm limit 10 ")

    df.printSchema()

    df.select($"computerorder",$"hourbatchno",$"stationdbtime").write.mode(SaveMode.Append).format("csv").save("file:///D:/a.csv")
    df.show(false)
  }
}
