package com.ustcinfo.study.hbase.r1.chenzhiyu;

import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;

import java.util.List;
import java.util.Map;

/**
 *
 * 转存数据
 * 仅包含一个main方法，查询MySQL数据，插入HBase数据库
 * @author chen.zhiyu2
 * @email chen.zhiyu2@ustcinfo.com
 *
 */
public class Transfer {
    public static void main(String[] args){
        //判断HBase有BD_TEST数据库没，有就删了再新建
        if(HBaseMetaDao.tableExist("BD_TEST")){
            HBaseMetaDao.deleteTable("BD_TEST");
        }
        //新建数据库，参数对应：数据库名称、列簇1、列簇2，调用已有工具类，列簇为可变参数
        HBaseMetaDao.createTable("BD_TEST","F1", "F2");
        //存放查询返回的数，查询调用工具类，参数为6个列名
        List<Map<String,String>> list1 = ExtractData.getData("id","task_id","create_time","start_time","out_filter_number","from_run_param");
        for(int i=0;i<list1.size();i++){
            //拆解存放的一行数据
            Map<String,String> map = list1.get(i);
            //Put类，新建行号rowKey
            Put put1 = new Put(Bytes.toBytes(i+1));
            //Put类的addColumn方法，三个参数时，三个参数分别为列簇、列名、值
            put1.addColumn("F1".getBytes(),"id".getBytes(),map.get("id").getBytes());
            put1.addColumn("F1".getBytes(),"task_id".getBytes(),map.get("task_id").getBytes());
            put1.addColumn("F1".getBytes(),"create_time".getBytes(),map.get("create_time").getBytes());
            put1.addColumn("F1".getBytes(),"start_time".getBytes(),map.get("start_time").getBytes());
            put1.addColumn("F1".getBytes(),"out_filter_number".getBytes(),map.get("out_filter_number").getBytes());
            put1.addColumn("F1".getBytes(),"from_run_param".getBytes(),map.get("from_run_param").getBytes());
            //行内放数据
            HBaseOperationDao.put("BD_TEST",put1);
        }
        //以下结构同上，结构一模一样
        List<Map<String,String>> list2 = ExtractData.getData("current_retry_times","retry_flag","parent_task_id","end_time","kill_time","failure_time");
        for(int i=0;i<list1.size();i++){
            Map<String,String> map2 = list1.get(i);
            Put put2 = new Put(Bytes.toBytes(list1.size()+i+1));
            put2.addColumn("F2".getBytes(),"current_retry_times".getBytes(),map2.get("current_retry_times").getBytes());
            put2.addColumn("F2".getBytes(),"retry_flag".getBytes(),map2.get("retry_flag").getBytes());
            put2.addColumn("F2".getBytes(),"start_time".getBytes(),map2.get("start_time").getBytes());
            put2.addColumn("F2".getBytes(),"end_time".getBytes(),map2.get("end_time").getBytes());
            put2.addColumn("F2".getBytes(),"kill_time".getBytes(),map2.get("kill_time").getBytes());
            put2.addColumn("F2".getBytes(),"failure_time".getBytes(),map2.get("failure_time").getBytes());
            HBaseOperationDao.put("BD_TEST",put2);
        }


    }
}
