package com.ustcinfo.study.hbase.wangqiangqiang;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;

import com.ustcinfo.ishare.bdp.hbase.HBaseMetaDao;
import com.ustcinfo.ishare.bdp.hbase.HBaseOperationDao;

public class Mysql2Hbase {

	public static List<Put> getPuts(){
		List<Put>list=new ArrayList<Put>();

		//1.读取mysql数据库中的表数据
		//字段：id,task_id,create_time,start_time,out_filter_number,from_run_param 写到列簇F1中,列名不变
		//字段：current_retry_times,retry_flag,parent_task_id,end_time,kill_time,failure_time 写到列簇F2中,列名不变
		Connection conn=null;
		try {
			conn=DBUtils.getConnection();
			String sql="select * from transfer_task_to";
			PreparedStatement ps=conn.prepareStatement(sql);
			ResultSet rs=ps.executeQuery();
			
			
			while(rs.next()){
				int id=rs.getInt("id");
				String task_id=rs.getString("task_id");
				Timestamp create_time=rs.getTimestamp("create_time");
				Timestamp start_time=rs.getTimestamp("start_time");
				String out_filter_number=rs.getString("out_filter_number");
				String from_run_param=rs.getString("from_run_param");
				
				String current_retry_times=rs.getString("current_retry_times");
				String retry_flag=rs.getString("retry_flag");
				String parent_task_id=rs.getString("parent_task_id");
				Timestamp end_time=rs.getTimestamp("end_time");
				Timestamp kill_time=rs.getTimestamp("kill_time");
				Timestamp failure_time=rs.getTimestamp("failure_time");
				
				String plan_id=rs.getString("plan_id");
				//将每条数据put到hbase中
				Put put = new Put(plan_id.getBytes());
				put.add(Bytes.toBytes("F1"), Bytes.toBytes("id"), Bytes.toBytes(id));
				put.add(Bytes.toBytes("F1"), Bytes.toBytes("task_id"), Bytes.toBytes(task_id));
				put.add(Bytes.toBytes("F1"), Bytes.toBytes("create_time"), Bytes.toBytes(create_time.toString()));
				put.add(Bytes.toBytes("F1"), Bytes.toBytes("start_time"), Bytes.toBytes(start_time.toString()));
				put.add(Bytes.toBytes("F1"), Bytes.toBytes("out_filter_number"), Bytes.toBytes(out_filter_number));
				put.add(Bytes.toBytes("F1"), Bytes.toBytes("from_run_param"), Bytes.toBytes(from_run_param));
				put.add(Bytes.toBytes("F2"), Bytes.toBytes("current_retry_times"), Bytes.toBytes(current_retry_times));
				put.add(Bytes.toBytes("F2"), Bytes.toBytes("retry_flag"), Bytes.toBytes(retry_flag));
				put.add(Bytes.toBytes("F2"), Bytes.toBytes("parent_task_id"), Bytes.toBytes(parent_task_id));
				put.add(Bytes.toBytes("F2"), Bytes.toBytes("end_time"), Bytes.toBytes(end_time.toString()));
				put.add(Bytes.toBytes("F2"), Bytes.toBytes("kill_time"), Bytes.toBytes(kill_time.toString()));
				put.add(Bytes.toBytes("F2"), Bytes.toBytes("failure_time"), Bytes.toBytes(failure_time.toString()));
				list.add(put);
			}
		} catch (SQLException e) {
			e.printStackTrace();
		}finally{
			DBUtils.closeConnection(conn);
		}
		return list;
	}
	public static void main(String[] args) {		
		//1.创建hbase表
		String tableName="BD_TEST";
		//2.判断表可存在
		boolean flag=HBaseMetaDao.tableExist(tableName);
		if(!flag){
			HBaseMetaDao.createTable(tableName, "F1","F2");
			HBaseOperationDao.putList(tableName, getPuts());
			System.out.println("导入数据完毕");
		}else{
			//先删除表
			HBaseMetaDao.deleteTable(tableName);
			HBaseMetaDao.createTable(tableName, "F1","F2");
			HBaseOperationDao.putList(tableName, getPuts());
			System.out.println("导入数据完毕");
		}
		
		
	}
}
