package dao.hive;

import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Timer;

import mysql.MysqlUtil;

import org.apache.hadoop.hive.service.ThriftHive.AsyncClient.execute_call;

/**
 * Hive工具类
 * 
 * @author
 * 
 */
public class HiveUtil {
	public static void createTable() {
	}

	public static ResultSet queryHive(String hql) throws SQLException {
		Connection conn = Connect.GetHiveConn();
		Statement stmt = conn.createStatement();
		ResultSet rs = stmt.executeQuery(hql);
		return rs;
	}

	public static void insertToMysql(String sql) throws SQLException {

		Connection conn = Connect.GetHiveConn();
		Statement sta = conn.createStatement();
		sta.execute(sql);

	}

	public static long execute(String sql, String tomysql) throws SQLException {
		long startTime = System.currentTimeMillis(); // 获取开始时间
		System.out.println(startTime);
		Connection conn = Connect.GetHiveConn();
		Statement sta = conn.createStatement();
		
		ResultSet rs = sta.executeQuery(sql);   //
		
		long endTime = System.currentTimeMillis(); // 获取结束时间
		System.out.println(endTime);
		MysqlUtil.insertToMysql(tomysql, rs);  //将分析得到的数据集插入mysql数据库
		return (endTime-startTime)/1000;
		

	}
}