package com.digiwin.demo.util;

import com.digiwin.demo.constant.ConfigConstant;
import com.mongodb.MongoClient;
import com.mongodb.ServerAddress;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import com.mongodb.hadoop.io.BSONWritable;
import com.mongodb.hadoop.mapred.MongoInputFormat;
import lombok.Data;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.hadoop.mapred.HadoopInputFormat;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.hadoop.mapred.JobConf;
import org.bson.Document;

import java.io.Serializable;

/**
 * @Author yanggld
 * @Date 2019/12/26-14:38
 * @Description
 */
@Data
public class MongoUtil {
	private static final String host = ConfigConstant.MONGO_HOST;
	private static final int port = ConfigConstant.MONGO_PORT;
	private String dbName;
	private String colName;
	private MongoClient mongoClient = new MongoClient(new ServerAddress(host, port));

	public MongoUtil(String dbName,String colName) {
		this.dbName = dbName;
		this.colName = colName;
	}

	public MongoDatabase getDB(){
		return mongoClient.getDatabase(dbName);
	}
	public MongoCollection<Document> getCollection(){
		return getDB().getCollection(colName);
	}
	public void close(){
		mongoClient.close();
	}

	public DataSource<Tuple2<BSONWritable, BSONWritable>> getInput(ExecutionEnvironment env ){
		//将mongo数据转化为Hadoop数据格式
		HadoopInputFormat<BSONWritable, BSONWritable> hdIf =
				new HadoopInputFormat<>(new MongoInputFormat(), BSONWritable.class, BSONWritable.class, new JobConf());
		hdIf.getJobConf().set("mongo.input.split.create_input_splits", "false");
		String MONGO_URI = "mongodb://"+host+":"+port+"/"+dbName+"."+colName;
		hdIf.getJobConf().set("mongo.input.uri", MONGO_URI);
		// 查询条件
//		hdIf.getJobConf().set("mongo.input.query", condition);
		DataSource<Tuple2<BSONWritable, BSONWritable>> input = env.createInput(hdIf);
		return input;
	}
}
