import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.DataFrameReader;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.SQLContext;

import java.sql.*;

public class HiveMain {
    public static void main(String[] args) {
        //使用hiveJDBC连接
        try {
            Class.forName("org.apache.hive.jdbc.HiveDriver").newInstance();
            Connection conn = DriverManager.getConnection("jdbc:hive2://bfd-lugu-dev71:2181,bfd-lugu-dev72:2181,bfd-lugu-dev73:2181"
                    );
            String sql = "select count(1) from deep_creator.id_mapping";
            PreparedStatement pstsm = conn.prepareStatement(sql);
            ResultSet resultSet = pstsm.executeQuery();
            int rowNum = 0;
            if(resultSet.next()){
                rowNum = resultSet.getInt(1);
            }
            System.out.println(rowNum);
        } catch (Exception e) {
            e.printStackTrace();
        }

        /**
         * sparksql
         */
        //首先新建一个sparkconf定义参数
        SparkConf conf = new SparkConf().setMaster("local").setAppName("JDBCDataSource");
        //创建sparkContext，是通往spark集群的唯一通道
        JavaSparkContext sc = new JavaSparkContext(conf);
        //新建一个sparksql
        SQLContext sqlContext = new SQLContext(sc);
        //分别将mysql中两张表的数据加载为DataFrame
        DataFrameReader reader = sqlContext.read().format("jdbc");
        reader.option("url","jdbc:hive2://bfd-lugu-dev71:2181,bfd-lugu-dev72:2181,bfd-lugu-dev73:2181");
        reader.option("driver","com.mysql.cj.jdbc.Driver");
        reader.option("user","root");
        reader.option("password","admin");
        reader.option("dbtable","information");
        Dataset myinformation = reader.load();
        reader.option("dbtable","score");
        Dataset scores = reader.load();

        //将两个DataFrame转换为javapairrdd，执行join操作
        myinformation.registerTempTable("info");
        scores.registerTempTable("score");

        //定义sql语句
        String sql = "select count(1) from deep_creator.id_mapping";

        Dataset sql2 = sqlContext.sql(sql);
        sql2.show();
    }
}
