/**
 * FileName: StructuredStreamingHDFS3
 * Author:   SAMSUNG-PC 孙中军
 * Date:     2019/02/22 10:25
 * Description: 读取来自HDFS数据JoinRedis，并将数据存储到HDFS中
 */
package cn.com.bonc.app;

import cn.com.bonc.process.impl.RuleActionProcessImpl;
import cn.com.bonc.process.chain.ProcessChain;
import cn.com.bonc.process.impl.SingleColum2MutiProcessImpl;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.streaming.OutputMode;
import org.apache.spark.sql.streaming.StreamingQuery;
import org.apache.spark.sql.streaming.StreamingQueryException;

import static org.apache.spark.sql.functions.col;
import static org.apache.spark.sql.functions.expr;

public class StructuredStreamingJoinHDFS {


	private static final String SOURCE_PATH="/test_szj/source_data";

	private static final String HDFS_JOIN_PATH="/test_szj/join_data";

	private static final String SAVE_PATH = "hdfs://192.168.70.21:9000/test_szj/parquet7";

	public static void main(String[] args) {

		SparkSession spark = SparkSession
				.builder()
				.appName("HDFSJoinHDFSApp")
				.getOrCreate();

		//读取来自HDFS数据源
		Dataset<Row> HDFSDataset = spark
				.readStream()
				.option("latestFirst","false")
				.option("fileNameOnly","false")
				.text(SOURCE_PATH);

		//读取被Join的HDFS数据
		Dataset<String> staticHDFSDataset = spark
				.readStream()
				.textFile(HDFS_JOIN_PATH);


		Dataset<Row> filterResultDataset = ProcessChain
				.setSourceData(HDFSDataset)
				.addProcess(new RuleActionProcessImpl())
				.addProcess(new SingleColum2MutiProcessImpl())
				.execute();

		if ("is join".equals(null)){

		}


		//输出到控制台
		StreamingQuery query = filterResultDataset
				.join(staticHDFSDataset, expr("phone = value"))//进行关联的字段
				.select(col("phone"),col("userAgent"),col("URL"))
				.writeStream()
				.format("console")
				.outputMode(OutputMode.Append())
				//.trigger(Trigger.ProcessingTime(30))//设定程序固定执行时间间隔
				.start();

		//保存到HDFS
//		StreamingQuery query = filterResultDataset
//				.join(staticHDFSDataset, expr("phone = value"))//进行关联的字段
//				.select(col("phone"),col("userAgent"),col("URL"))
//				.writeStream()
//				.outputMode(OutputMode.Update())
//				.format("text")
//				.option("checkpointLocation", SAVE_PATH)
//				.option("path", SAVE_PATH)
//				.start();

		//保持程序等待
		try {
			query.awaitTermination();
		} catch (StreamingQueryException e) {
			e.printStackTrace();
		}
	}

}