package com.electron.power.reduce;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.hadoop.mapred.HadoopInputFormat;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.hadoop.mapred.JobConf;

import com.alibaba.fastjson.JSON;
import com.mongodb.hadoop.io.BSONWritable;
import com.mongodb.hadoop.mapred.MongoInputFormat;

import lombok.extern.slf4j.Slf4j;

@Slf4j
public class MongoDBDatasetHandler {

	//public static final String MONGO_URI = "mongodb://m.juhefupay.shop:3389/db.test";
	public static final String MONGO_URI = "mongodb://m.juhefupay.shop:3389/test.modStr";

	public static void main(String[] args) throws Exception {
		// 获取条件参数
		final ParameterTool parameterTool = ParameterTool.fromArgs(args);
		String webSource = parameterTool.get("webSource", "baidu");
		int year = parameterTool.getInt("year", 2016);
//		String condition = String.format("{'source':'%s','year':{'$regex':'^%d'}}", webSource, year);
		String condition = "{}";
		// 创建运行环境
		final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
		// 将mongo数据转化为Hadoop数据格式
		HadoopInputFormat<BSONWritable, BSONWritable> hdIf = new HadoopInputFormat<>(new MongoInputFormat(),
				BSONWritable.class, BSONWritable.class, new JobConf());
		
		hdIf.getJobConf().set("mongo.input.split.create_input_splits", "false");
		hdIf.getJobConf().set("mongo.input.uri", MONGO_URI);
		hdIf.getJobConf().set("mongo.input.query", condition);
//		val tupleInfo = createTypeInformation[new Tuple2<ImmutableBytesWritable, Result>()]
		env.createInput(hdIf);
		long count = env.createInput(hdIf).map((MapFunction<Tuple2<BSONWritable, BSONWritable>, String>) value -> {
			BSONWritable v = value.getField(1);
			return JSON.parseObject(v.getDoc().toString()).toJSONString();
		}).count();
		log.info("总共读取到{}条MongoDB数据", count);
	}
	
//	public static void main1(String[] args) throws Exception {
//	    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
//	    env.setParallelism(4);
//            Job inputJob = Job.getInstance();
//            //inputJob.getConfiguration().set("mongo.input.uri", "mongodb://readuser:readpw@mongos01:port,mongos02:port,mongos03:port/db.collection");
//            //inputJob.getConfiguration().set("mongo.auth.uri", "mongodb://root:rootpw@mongos01:port,mongos02:port,mongos03:port/admin");
//           
//            inputJob.getConfiguration().set("mongo.input.uri", "mongodb://readuser:readpw@mongos01:port,mongos02:port,mongos03:port/db.collection?&authMechanism=SCRAM-SHA-1&authSource=admin&readPreference=secondary");
//            inputJob.getConfiguration().set("mongo.input.split.read_shard_chunks", "true");
//	    inputJob.getConfiguration().set("mongo.input.split.create_input_splits", "false");
//            inputJob.getConfiguration().set("mongo.input.split_size","16");
//            inputJob.getConfiguration().set("mongo.input.query", "{'createDateTime': {\"$lte\":{\"$date\":\"2019-05-27T00:00:00.000Z\"}, \"$gte\":{\"$date\":\"2010-03-17T00:00:00.000Z\"}}}");
//            inputJob.getConfiguration().set("mongo.input.fields", "{\"Id\":\"1\",\"saleType\":\"1\",\"saleNum\":\"1\",\"createDateTime\":\"1\"}");
// 
//            HadoopInputFormat<Object, BSONObject> hdIf =
//				new HadoopInputFormat<>(new MongoInputFormat(), Object.class, BSONObject.class, inputJob);
// 
//	    DataSet<Tuple2<Object, BSONObject>> inputNew = env.createInput(hdIf);
// 
//	    DataSet<Tuple2<String, BSONWritable>> personInfoDataSet = inputNew
//				.map(new BSONMapToRecord())
//				.groupBy(new RecordSeclectId())
//				.reduceGroup(new KeyedGroupReduce());
// 
//	    Job outputJob = Job.getInstance();
//	    outputJob.getConfiguration().set("mongo.output.uri", "mongodb://mongo:27017/db.collection");
//	    outputJob.getConfiguration().set("mongo.output.batch.size", "8");
//	    outputJob.getConfiguration().set("mapreduce.output.fileoutputformat.outputdir", "/tmp");
//	    personInfoDataSet.output(new HadoopOutputFormat<>(new MongoOutputFormat<>(), outputJob));
// 
//	    env.execute(MongoSet.class.getCanonicalName());
//	    }
}
