package org.zjt.hive;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Test;

import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.PrintStream;
import java.net.URI;
import java.sql.*;


/**
 * 开启hive服务： hive --service hiveserver2 &
 */

public class HiveJdbcClient {

    private static PrintStream out = System.out;

    private static String driverName = "org.apache.hive.jdbc.HiveDriver";

    public static void main(String args[]) throws SQLException {
        try {
            Class.forName(driverName);
        } catch (ClassNotFoundException e) {
            e.printStackTrace();
            System.exit(1);
        }
        //hive --service hiveserver -p 10000 &  --> Run this command before running this prog. It will take time to start
        Connection con = DriverManager.getConnection("jdbc:hive2://192.168.83.131:10000/default", "root", "123456");
        Statement stmt = con.createStatement();


        String tableName = "t_user ";
        stmt.execute("drop table if exists " + tableName);

        stmt.execute("create table " + tableName + " (key int, value string)");
        System.out.println("Create table success!");


        // show tables
        String sql = "show tables '" + tableName + "'";
        System.out.println("Running: " + sql);
        ResultSet res = stmt.executeQuery(sql);
        if (res.next()) {
            System.out.println(res.getString(1));
        }

        // describe table
        sql = "describe " + tableName;
        System.out.println("Running: " + sql);
        res = stmt.executeQuery(sql);
        while (res.next()) {
            System.out.println(res.getString(1) + "\t" + res.getString(2));
        }

        sql = "INSERT INTO TABLE " + tableName + "  VALUES ( 35, 'fred flintstone')\n";
        out.println(stmt.executeUpdate(sql));



        sql = "select * from " + tableName;
        res = stmt.executeQuery(sql);
        while (res.next()) {
            System.out.println(String.valueOf(res.getInt(1)) + "\t" + res.getString(2));
        }


        /*loadHdfsData(stmt);*/

        /*sql = "select count(1) from " + tableName;
        System.out.println("Running: " + sql);
        res = stmt.executeQuery(sql);
        while (res.next()) {
            System.out.println(res.getString(1));
        }*/


        stmt.close();
        con.close();
    }

    /**
     * 将hadoop所在服务器本地的文件上传到mapreduce中。
     */
    private static void loadLocalData(Statement stmt, String tableName) throws SQLException {
        String filepath = "/Users/zhangjuntao/IdeaProjects/myproject/hadoop-demo/hive-demo/src/main/resources/data/t_user.txt";
        String sql = "load data local inpath '" + filepath + "' into table " + tableName;
        stmt.executeUpdate(sql);
    }


    /**
     * 将hdfs中的数据保存到hive表中。
     *
     * @param stmt
     * @throws SQLException
     */
    public static void loadHdfsData(Statement stmt) throws SQLException {
        String sql = "load data inpath '/t_user.txt' into table t_user";
        //记得先在文件系统中上传goods.txt
        stmt.execute(sql);
    }

    //@Test
    public void upload() throws Exception {
        Configuration config = new Configuration();
        FileSystem fs;
        String filepath = "/Users/zhangjuntao/IdeaProjects/myproject/hadoop-demo/hive-demo/src/main/resources/data/t_user.txt";
        InputStream input = new FileInputStream( new File(filepath));
        fs = FileSystem.get(new URI("hdfs://192.168.83.131:9000/"), config, "root");
        Path file = new Path("/t_user.txt");
        FSDataOutputStream out = fs.create(file);
		IOUtils.copy(input, out);
       // org.apache.hadoop.io.IOUtils.copyBytes(new FileInputStream(filepath), out, config);

    }
}