package com.rainsoft.center.isec.stream.library.controller.impl;

import com.alibaba.fastjson.JSONObject;
import com.rainsoft.center.isec.common.entity.Constants;
import com.rainsoft.center.isec.common.entity.RedisBaseInfo;
import com.rainsoft.center.isec.common.utils.JedisUtil;
import com.rainsoft.center.isec.stream.library.controller.BaseLibDataLoad;
import com.rainsoft.center.isec.stream.library.entity.hbase.impl.RealidLoginout;
import com.rainsoft.center.isec.stream.library.service.LibService;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;

import java.util.List;
import java.util.Properties;


/**
 * @Name com.rainsoft.center.isec.stream.library.controller.impl.RealidDataLoad
 * @Description
 * @Author Elwyn
 * @Version 2017/11/28
 * @Copyright 上海云辰信息科技有限公司
 **/
public class RealidDataLoad extends BaseLibDataLoad<RealidLoginout> {


	public RealidDataLoad(long duration, Class<RealidLoginout> aClass) {
		super(duration, aClass);
	}

	@Override
	public void process() {
		JavaStreamingContext javaStreamingContext = getJavaStreamingContext();
		SparkSession sparkSession = SparkSession.builder().getOrCreate();

		JavaInputDStream<ConsumerRecord<String, String>> javaInputDStream = getJavaInputDStream(javaStreamingContext);
		JavaDStream<RealidLoginout> realidLoginoutDStream = javaInputDStream.map(
				(Function<ConsumerRecord<String, String>, RealidLoginout>) v1 -> JSONObject.parseObject(v1.value(), tClass))
				.filter(new Function<RealidLoginout, Boolean>() {
					@Override
					public Boolean call(RealidLoginout v1) throws Exception {
						return v1 != null;
					}
				});

		//找缓存
		List<RedisBaseInfo> listBaseInfo = JedisUtil.get("listBaseInfo");



		JavaSparkContext javaSparkContext=javaStreamingContext.sparkContext();
		JavaRDD<RedisBaseInfo> baseInfoJavaRDD =javaSparkContext.parallelize(listBaseInfo);
		Dataset<Row> baseInfoTable = sparkSession.createDataFrame(baseInfoJavaRDD, RedisBaseInfo.class);
		baseInfoTable.show();

		realidLoginoutDStream.foreachRDD(new VoidFunction<JavaRDD<RealidLoginout>>() {
			@Override
			public void call(JavaRDD<RealidLoginout> realidLoginoutJavaRDD) throws Exception {
				if (realidLoginoutJavaRDD.isEmpty()) {
					return;
				}
				realidLoginoutJavaRDD.cache();

				//存hbase
				saveToHBase(realidLoginoutJavaRDD);
				try {
					 LibService libService	=new LibService();
					libService.statLib(sparkSession, baseInfoTable, realidLoginoutJavaRDD);
				} catch (Exception e) {
					e.printStackTrace();
				}

			}
		});
		start(javaStreamingContext);
	}


	public Dataset<Row> getDataFromJdbc(SparkSession sparkSession, String tableName, String sql) {
		Properties connectionProperties = new Properties();
		connectionProperties.put("user", Constants.JDBC_USERNAME);
		connectionProperties.put("password", Constants.JDBC_PASSWORD);
		connectionProperties.put("driver", Constants.JDBC_DRIVER);
		Dataset<Row> endingImprove = sparkSession.read().jdbc(Constants.JDBC_URL, tableName, connectionProperties);
		endingImprove.createOrReplaceTempView("sys_dict");
		Dataset<Row> sql1 = sparkSession.sql(sql);
		return sql1;

	}
}
