package com.xian.java.batch;


import com.mongodb.hadoop.io.BSONWritable;
import com.mongodb.hadoop.mapred.MongoInputFormat;
import com.xian.java.MyDataUtil;
import org.apache.flink.api.common.io.RichOutputFormat;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.hadoop.mapred.HadoopInputFormat;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.configuration.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapred.JobConf;
import org.bson.BSONObject;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;

/**
 * flink获取mongo数据
 * flink run --class com.xian.java.batch.GetMongoData /home/pubuser/szx-dev-224-144/Flink-1-11-V1.0-jar-with-dependencies.jar --mongoInputUri mongodb://pubuser:KAov0XvKeFPVrF+4+rz-6P49B9U6Rz@172.18.4.25:20017/traffic.ws_traffic_operate_pay?authSource=admin --hbaseZK 172.18.224.143:2181,172.18.224.144:2181,172.18.224.145:2181
 * 创建hbase 命名空间
 * create_namespace 'traffic'
 * 创建表名
 * create 'traffic:ws_traffic_operate_pay', 'traffic'
 */
public class GetMongoData23 {
    public static void main(String[] args) throws Exception {

        ParameterTool parameterTool = ParameterTool.fromArgs(args);
        String mongoInputUri = parameterTool.get("mongoInputUri");
        //String mongoInputUri = "mongodb://pubuser:KAov0XvKeFPVrF+4+rz-6P49B9U6Rz@172.18.4.25:20017/activity.activityAssistanceRelationPo?authSource=admin";  //指定验证库正确方式
        //String mongoInputUri = "mongodb://pubuser:KAov0XvKeFPVrF+4+rz-6P49B9U6Rz@172.18.4.25:20017/activity.activityAssistanceRelationPo?authSource=activity";  //指定验证库正确方式
        //String mongoInputUri = "mongodb://pubuser:KAov0XvKeFPVrF+4+rz-6P49B9U6Rz@172.18.4.25:20017/traffic.ws_traffic_operate_pay?authSource=admin";  //指定验证库正确方式
        System.out.println("传入mongoInputUri="+mongoInputUri);

        String hbaseZK = parameterTool.get("hbaseZK");
        System.out.println("传入hbaseZK="+hbaseZK);


        // 获取执行环境
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

        // 定义输入格式
        HadoopInputFormat<BSONWritable, BSONWritable> hdIf = new HadoopInputFormat<BSONWritable, BSONWritable>(
                new MongoInputFormat(), BSONWritable.class, BSONWritable.class,	new JobConf());

        // 指定mongo 连接url
        hdIf.getJobConf().set("mongo.input.uri", mongoInputUri);

        DataSource<Tuple2<BSONWritable, BSONWritable>> dataSource = env.createInput(hdIf);

        //dataSource.print();
        //long count = dataSource.count();
        //System.out.println("总共数据量为:"+count);


        dataSource.output(new RichOutputFormat<Tuple2<BSONWritable, BSONWritable>>() {

            //String zk = "172.18.224.143:2181,172.18.224.144:2181,172.18.224.145:2181";
            String zk = hbaseZK;

            Connection con = null;


            @Override
            public void configure(Configuration parameters) {

            }

            @Override
            public void open(int taskNumber, int numTasks) throws IOException {
                org.apache.hadoop.conf.Configuration conf = HBaseConfiguration.create();
                conf.set("hbase.zookeeper.quorum", zk);
                con = ConnectionFactory.createConnection(conf);
                System.out.println("获取到连接:"+con);
            }

            @Override
            public void writeRecord(Tuple2<BSONWritable, BSONWritable> record) throws IOException {

                TableName tableName = TableName.valueOf("traffic:ws_traffic_operate_pay");
                String cf = "traffic";



                BSONObject doc = record.f1.getDoc();
                //System.out.println("doc="+doc+"\n");
                BSONObject dataDoc = (BSONObject)doc.get("data");  //doc节点获取完data节点后还有几个节点
                //System.out.println("dataDoc="+dataDoc);

                //组装rowkey
                String ot = MyDataUtil.timeStamp2Date(dataDoc.get("orderTime").toString());
                String oi = dataDoc.get("orderId").toString();
                String rowkey=ot+"|"+oi;         //hbase rowkey  orderTime|orderId

                //System.out.println("rowkey="+rowkey);


                Put put = new Put(Bytes.toBytes(rowkey));
                //添加字段和字段值
                //put.addColumn(Bytes.toBytes(cf), Bytes.toBytes("name"),Bytes.toBytes("jemson"));


                //批量写入提高效率
                List<Put> puts = new ArrayList<>();

                Set<String> data_keys = dataDoc.keySet();
                for(String data_key : data_keys){
                    if("startPoint".equals(data_key)){
                        BSONObject startPoint_Doc = (BSONObject)dataDoc.get(data_key);
                        Set<String> keys = startPoint_Doc.keySet();
                        for (String key : keys) {
                            //System.out.println("startPoint_"+key + "=="+ startPoint_Doc.get(key).toString());
                            put.addColumn(Bytes.toBytes(cf), Bytes.toBytes("startPoint_"+key),Bytes.toBytes(startPoint_Doc.get(key).toString()));
                        }
                        continue;
                    } else if("endPoint".equals(data_key)){
                        BSONObject endPoint_Doc = (BSONObject)dataDoc.get(data_key);
                        Set<String> keys = endPoint_Doc.keySet();
                        for (String key : keys) {
                            //System.out.println("endPoint_"+key + "=="+ endPoint_Doc.get(key).toString());
                            put.addColumn(Bytes.toBytes(cf), Bytes.toBytes("endPoint_"+key),Bytes.toBytes(endPoint_Doc.get(key).toString()));
                        }
                        continue;
                    } else if("startVehPoint".equals(data_key)){
                        BSONObject startVehPoint_Doc = (BSONObject)dataDoc.get(data_key);
                        Set<String> keys = startVehPoint_Doc.keySet();
                        for (String key : keys) {
                            //System.out.println("startVehPoint_"+key + "==" + startVehPoint_Doc.get(key).toString());
                            put.addColumn(Bytes.toBytes(cf), Bytes.toBytes("startVehPoint_"+key),Bytes.toBytes(startVehPoint_Doc.get(key).toString()));
                        }
                        continue;
                    } else if("endVehPoint".equals(data_key)){
                        BSONObject endVehPoint_Doc = (BSONObject)dataDoc.get(data_key);
                        Set<String> keys = endVehPoint_Doc.keySet();
                        for (String key : keys) {
                            //System.out.println("endVehPoint_"+key + "=="+ endVehPoint_Doc.get(key).toString());
                            put.addColumn(Bytes.toBytes(cf), Bytes.toBytes("endVehPoint_"+key),Bytes.toBytes(endVehPoint_Doc.get(key).toString()));
                        }
                        continue;
                    }
                    //System.out.println(data_key +"=="+ dataDoc.get(data_key).toString());
                    put.addColumn(Bytes.toBytes(cf), Bytes.toBytes(data_key),Bytes.toBytes(dataDoc.get(data_key).toString()));


                    //还有几个节点
                    String areaCode = doc.get("areaCode").toString();
                    put.addColumn(Bytes.toBytes(cf), Bytes.toBytes("areaCode"),Bytes.toBytes(areaCode));
                    String cityCode = doc.get("cityCode").toString();
                    put.addColumn(Bytes.toBytes(cf), Bytes.toBytes("cityCode"),Bytes.toBytes(cityCode));
                    String send = doc.get("send").toString();
                    put.addColumn(Bytes.toBytes(cf), Bytes.toBytes("send"),Bytes.toBytes(send));
                    String parentSend = doc.get("parentSend").toString();
                    put.addColumn(Bytes.toBytes(cf), Bytes.toBytes("parentSend"),Bytes.toBytes(parentSend));
                    String commonName = doc.get("commonName").toString();
                    put.addColumn(Bytes.toBytes(cf), Bytes.toBytes("commonName"),Bytes.toBytes(commonName));
                    String creationTime = doc.get("creationTime").toString();
                    put.addColumn(Bytes.toBytes(cf), Bytes.toBytes("creationTime"),Bytes.toBytes(creationTime));
                    String lastModifiedTime = doc.get("lastModifiedTime").toString();
                    put.addColumn(Bytes.toBytes(cf), Bytes.toBytes("lastModifiedTime"),Bytes.toBytes(lastModifiedTime));




                    puts.add(put);
                    //设置缓存10m，当达到1m时数据会自动刷到hbase
                    BufferedMutatorParams bufferedMutatorParams = new BufferedMutatorParams(tableName);
                    //设置缓存的大小
                    bufferedMutatorParams.writeBufferSize(10 * 1024 * 1024);
                    BufferedMutator bufferedMutator = con.getBufferedMutator(bufferedMutatorParams);
                    bufferedMutator.mutate(puts);
                    bufferedMutator.flush();
                    puts.clear();




                }





            }

            @Override
            public void close() throws IOException {
                if (null != con){
                    con.close();
                }
            }
        });







        //务必执行
        env.execute();


        System.out.println("程序运行完毕！");
    }
}
