package com.xian.java.batch;

import com.mongodb.hadoop.io.BSONWritable;
import com.mongodb.hadoop.mapred.MongoInputFormat;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.hadoop.mapred.HadoopInputFormat;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.hadoop.mapred.JobConf;

/**
 * flink获取mongo数据
 */
public class GetMongoData {
    public static void main(String[] args) throws Exception {

        ParameterTool parameterTool = ParameterTool.fromArgs(args);
        //String mongoInputUri = parameterTool.get("mongoInputUri");
        //String mongoInputUri = "mongodb://localhost:27017/xian.person";
        //String mongoInputUri = "mongodb://pubuser:KAov0XvKeFPVrF+4+rz-6P49B9U6Rz@172.18.4.25:20017/activity.activityAssistanceRelationPo";
        //String mongoInputUri = "mongodb://pubuser:KAov0XvKeFPVrF+4+rz-6P49B9U6Rz@172.18.4.25:20017/activity.activityAssistanceRelationPo?authenticationDatabase=activity";
        //String mongoInputUri = "mongodb://pubuser:KAov0XvKeFPVrF+4+rz-6P49B9U6Rz@172.18.4.25:20017/activity.activityAssistanceRelationPo?authSource=${auth_db:admin}";
        //String mongoInputUri = "mongodb://pubuser:KAov0XvKeFPVrF+4+rz-6P49B9U6Rz@172.18.4.25:20017/activity.activityAssistanceRelationPo?authSource=${auth_db:activity}";
        //String mongoInputUri = "mongodb://pubuser:KAov0XvKeFPVrF+4+rz-6P49B9U6Rz@172.18.4.25:20017/activity.activityAssistanceRelationPo?authSource=activity";
        String mongoInputUri = "mongodb://pubuser:KAov0XvKeFPVrF+4+rz-6P49B9U6Rz@172.18.4.25:20017/activity.activityAssistanceRelationPo?authSource=admin";  //指定验证库正确方式
        System.out.println("传入mongoInputUri="+mongoInputUri);

        // set up the execution environment
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

        // create a MongodbInputFormat, using a Hadoop input format wrapper
        HadoopInputFormat<BSONWritable, BSONWritable> hdIf = new HadoopInputFormat<BSONWritable, BSONWritable>(
                new MongoInputFormat(), BSONWritable.class, BSONWritable.class,	new JobConf());

        // specify connection parameters
        hdIf.getJobConf().set("mongo.input.uri", mongoInputUri);

        DataSource<Tuple2<BSONWritable, BSONWritable>> dataSource = env.createInput(hdIf);

        dataSource.print();
        long count = dataSource.count();
        System.out.println("总共数据量为:"+count);

        System.out.println("程序运行完毕！");
    }
}
