package com.weishe.hive;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.util.Date;
import java.util.TimerTask;

public class LoadDataToHive extends TimerTask {

	/**
	 * flume :规定每分钟生成一个文件
	 * hive：每分钟把生成的文件导入hive表
	 */
	public void run() {
		
		Connection conn =null;
		try{
			conn =Util.getConn();
			//查找上一分钟前生产的数据文件
			Long f =System.currentTimeMillis()-60000;
			String path="/flume/"+Util.getDay(new Date(f),"yyyy-MM-dd")+"/"+Util.getD(new Date(f),"h")+Util.getD(new Date(f), "m")+".*";
			System.out.println(path);
			//load data  inpath '/flume/2015-10-07/2244.*' into table trackinfo partition (ds='20151007')
			String sql="load data  inpath '"+path+"' into table trackinfo partition (ds='"+Util.getDay(new Date(f),"yyyyMMdd")+"')";
			PreparedStatement ps = conn.prepareStatement(sql);
			ps.execute();
		}catch (Exception e){
			e.printStackTrace();
		}finally{
//			Util.close(conn);
		}
	}

	public static void main(String[] args) {
		Connection conn =null;
		try{
			conn =Util.getConn();
			//查找上一分钟前生产的数据文件
			Long f =System.currentTimeMillis()-60000;
			String path="/flume/15-08-26/"+Util.getD(new Date(f),"h")+Util.getD(new Date(f), "m")+".*";
			System.out.println(path);
			String sql="load data  inpath '"+path+"' into table trackinfo partition (ds='20150826')";
			PreparedStatement ps = conn.prepareStatement(sql);
			ps.execute();
		}catch (Exception e){
			e.printStackTrace();
		}finally{
			Util.close(conn);
		}
	}
}
