package com.johnguo.carbondata_demo;

import java.util.HashMap;
import java.util.HashSet;
import java.util.Set;

import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.CarbonContext;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;

import com.johnguo.carbondata_demo.LoadData.Sale;

import kafka.serializer.StringDecoder;
import scala.Tuple2;

public class StreamCarbonLoad {
	public static void main(String...args) {
		SparkConf conf = new SparkConf().setMaster("local[4]").setAppName("StreamCarbonLoad");
		JavaSparkContext sc = new JavaSparkContext(conf);
		CarbonContext cc = new CarbonContext(sc.sc(), "hdfs://master:9000/usr/cardata");
		Set<String> topicSet = new HashSet<String>();
		topicSet.add("input_topic");

		HashMap<String, String> kafkaParam = new HashMap<String, String>();
		kafkaParam.put("metadata.broker.list", "master:9092");
		JavaStreamingContext jssc = new JavaStreamingContext(sc, new Duration(10000));
		JavaPairInputDStream<String, String> message = KafkaUtils.createDirectStream(jssc, String.class, String.class,
				StringDecoder.class, StringDecoder.class, kafkaParam, topicSet);
		
		
		JavaDStream<Sale> valueDStream = message.map(new Function<Tuple2<String, String>, Sale>() {
			private static final long serialVersionUID = 1L;

			public Sale call(Tuple2<String, String> tuple) throws Exception {
				String val = tuple._2();
				String[] vals = val.split("\\|");
				Sale sale = new Sale();
				sale.setName(vals[0]);
				sale.setArea(vals[1]);
				sale.setBrand(vals[2]);
				sale.setSale_pay(Double.valueOf(vals[3]));
				sale.setPay_num(Integer.valueOf(vals[4]));
				sale.setTotal_pay(Double.valueOf(vals[5]));
				sale.setDate(vals[6]);
				return sale;
			}
		});
		valueDStream.foreachRDD(new VoidFunction<JavaRDD<Sale>>() {
			private static final long serialVersionUID = 1L;

			@Override
			public void call(JavaRDD<Sale> rdd) throws Exception {
				if(!rdd.isEmpty()) {
					DataFrame df = cc.createDataFrame(rdd, Sale.class);
					df.write().format("org.apache.spark.sql.CarbonSource").option("tableName", "sale")
					.mode(SaveMode.Append).save();	
				}
				
			}
		});
		valueDStream.count().print();
		jssc.start();
		jssc.awaitTermination();
	}
}
