package com.loong.spark;

import java.sql.*;


/**
 * Created by 梁浩峰 on 2016/9/24 21:44.
 */
public class SparkSqlUtil {
    private static String jdbcdriver = "org.apache.hive.jdbc.HiveDriver";
    private static String jdbcurl = "jdbc:hive2://hivemaster:10000/";
    private static String username = "hive";
    private static String password = "hive";

    public static void main(String[] args) {
        String sql=" INSERT INTO t_log(id,send_log_info,send_time,send_log_address,log_level) VALUES(\"\",\"\",\"\",\"\",\"\",\"\") PARTITION (event_year='2016')";
        try {
            Class.forName(jdbcdriver);
            Connection conn = DriverManager.getConnection(jdbcurl);
            Statement st = conn.createStatement();


            ResultSet rs = st.executeQuery("SELECT * FROM hive.t_user");
            ResultSetMetaData meta = rs.getMetaData();

            //System.out.println( "\t"+res.getRow()+"条记录");

            String str = "";

            for (int i = 1; i <= meta.getColumnCount(); i++) {

                str += meta.getColumnName(i) + "   ";

                //System.out.println( meta.getColumnName(i)+"   ");

            }

            System.out.println("\t" + str);

            str = "";

            while (rs.next()) {

                for (int i = 1; i <= meta.getColumnCount(); i++) {

                    str += rs.getString(i) + "   ";
                }

                System.out.println("\t" + str);

                str = "";

            }


        } catch (Exception e) {

        }
    }




}
