import java.io.IOException;
import java.io.InputStream;
import java.sql.*;
import java.util.Properties;

/**
 * Created by Sun on 2015/9/22.
 */
public class TestHive
{
    private static String driverName = "org.apache.hive.jdbc.HiveDriver";
    private static String url = "jdbc:hive2://101.227.247.192:10000/";
    private static String user = "hadoop";
    private static String password = "hx@yp114";
    private static String sql = "";
    private static ResultSet res;

    private final static Properties properties = new Properties();

    public static void main(String[] args) {
        Connection conn = null;
        Statement stmt = null;
        try {

            InputStream in = TestHive.class.getResourceAsStream("/test.properties");
            properties.load(in);

            conn = getConn();

            //创立会话
            stmt = conn.createStatement();

            selectData(stmt, "telecomdata");

            //创建JAVA数据库连接
            String driver = properties.getProperty("jdbc.driverClassName");
            String url = properties.getProperty("jdbc.url");
            String username = properties.getProperty("jdbc.username");
            String password = properties.getProperty("jdbc.password");

            Class.forName(driver);
            conn = DriverManager.getConnection(url, username, password);



            /**
             *  新增用户留存加载入库
             *  RETENTION_DAY  1：当日新增  2：次日留存用户数  3：第三日留存用户数  7：第7日留存用户数  30：第30日留存用户数
             **/
            //String insertO2oLogSumDaySql = "INSERT INTO tm_apponoff_active_rate_month_copy (`TM_APPONOFF_ACTIVE_RATE_MONTH_ID`,  `VERSION`,"
            //        + "  `CHANNEL_NO`,  `RATE` , `ETL_TIME`,  `MONTH_ID`,`CREATE_USER`,`NOTE`)"
            //        + "     VALUES (?, ?, ?, ?, ?, ?,?,?)";

            PreparedStatement pstmt = null;

            //pstmt = conn.prepareStatement(insertO2oLogSumDaySql);

            //针对结果进行赋值
            //int i = 1;
            //while (res.next())
            //{
            //    System.out.println(res.getString(i));
            //    pstmt.setString(i, res.getString(i));

                //给出每个栏位数据
            //    i++;
            //}


            pstmt.executeBatch();


            // 执行 regular hive query 统计操作
            //countData(stmt, tableName);
        } catch (ClassNotFoundException e) {
            e.printStackTrace();
            System.exit(1);
        } catch (SQLException e) {
            e.printStackTrace();
            System.exit(1);
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            try {
                if (conn != null) {
                    conn.close();
                    conn = null;
                }
                if (stmt != null) {
                    stmt.close();
                    stmt = null;
                }
            } catch (SQLException e) {
                e.printStackTrace();
            }
        }
    }
    private static void countData(Statement stmt, String tableName)
            throws SQLException {
        sql = "select count(1) from " + tableName;
        System.out.println("Running:" + sql);
        res = stmt.executeQuery(sql);
        System.out.println("执行“regular hive query”运行结果:");
        while (res.next()) {
            System.out.println("count ------>" + res.getString(1));
        }
    }
    private static void selectData(Statement stmt, String tableName)
            throws SQLException

    {
        sql = "select packagename,count(*) from  telecomdata GROUP  by packagename"  ;


        System.out.println("Running:" + sql);

        //获得执行结果
        res = stmt.executeQuery(sql);
        System.out.println("执行 select * query 运行结果:");
        while (res.next()) {
            System.out.println(res.getString(1) + "\t" + res.getString(2));
        }
    }
    private static void loadData(Statement stmt, String tableName)
            throws SQLException {
        String filepath = "/home/hadoop01/data";
        sql = "load data local inpath '" + filepath + "' into table "
                + tableName;
        System.out.println("Running:" + sql);
        res = stmt.executeQuery(sql);
    }
    private static void describeTables(Statement stmt, String tableName)
            throws SQLException {
        sql = "describe " + tableName;
        System.out.println("Running:" + sql);
        res = stmt.executeQuery(sql);
        System.out.println("执行 describe table 运行结果:");
        while (res.next()) {
            System.out.println(res.getString(1) + "\t" + res.getString(2));
        }
    }
    private static void showTables(Statement stmt, String tableName)
            throws SQLException {
        sql = "show tables '" + tableName + "'";
        System.out.println("Running:" + sql);
        res = stmt.executeQuery(sql);
        System.out.println("执行 show tables 运行结果:");
        if (res.next()) {
            System.out.println(res.getString(1));
        }
    }
    private static void createTable(Statement stmt, String tableName)
            throws SQLException {
        sql = "create table "
                + tableName
                + " (key int, value string)  row format delimited fields terminated by '\t'";
        stmt.executeQuery(sql);
    }
    private static String dropTable(Statement stmt) throws SQLException
    {
        // 创建的表名
        String tableName = "testHive";
        sql = "drop table " + tableName;
        stmt.executeQuery(sql);
        return tableName;
    }
    private static Connection getConn() throws ClassNotFoundException, SQLException
    {
        Class.forName(properties.getProperty("hive.driverName"));

        //hive1 server 暂时不放密码
        Connection conn = DriverManager.getConnection(properties.getProperty("hive.url"), properties.getProperty("hive.user"),properties.getProperty("hive.password"));
        return conn;
    }

}
