package com.imooc.spark

import java.sql.DriverManager

/**
 * 通过JDBC的方式访问
 * 服务开启，网络连通，本地可以执行，和spark已经没啥关系啦
 */
object SparkSQLThriftServerApp {
  def main(args: Array[String]): Unit = {
    Class.forName("org.apache.hive.jdbc.HiveDriver")

    //在使用jdbc开发时，一定要先启动thriftserver
    val conn = DriverManager.getConnection("jdbc:hive2://hadoop000:10000", "hadoop", "")
    val pstmt = conn.prepareStatement("select empno, ename, sal from emp")
    val rs = pstmt.executeQuery()
    while (rs.next()) {
      println("empno:" + rs.getInt("empno") +
        " , ename:" + rs.getString("ename") +
        " , sal:" + rs.getDouble("sal"))
    }

    rs.close()
    pstmt.close()
    conn.close()

  }
}
