package com.navinfo.platform;


import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


public class RealtimePbProcesser {

    private static Logger logger = LoggerFactory.getLogger(RealtimePbProcesser.class);

    public static void main(final String[] args) throws InterruptedException {


//        System.setProperty("hadoop.home.dir", "c:\\winutils\\");
//        System.setProperty("user.name ", "flume");
//                SparkSession spark = SparkSession.builder()
//                        .master("local[4]")
//                        .appName("MongoSparkEvent")
//                        .config("spark.mongodb.input.uri", "mongodb://10.30.50.27:40006/press.GpsDataEntity_1801")
//                                        .config("spark.mongodb.input.partitioner", "MongoPaginateByCountPartitioner")
//                                        .getOrCreate();
//
//        // Create a JavaSparkContext using the SparkSession's SparkContext object
//        JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());
//
//        RealTimeDataPb.RealTimeData.Builder builder = RealTimeDataPb.RealTimeData.newBuilder();
//        builder.setGpsTime(System.currentTimeMillis()/1000);
//        JavaRDD<RealTimeDataPb.RealTimeData> rdd = jsc.parallelize(Arrays.asList( builder.build()));
//
//        Configuration conf = new Configuration();
//        //conf.set("io.compression.codecs","org.apache.hadoop.io.compress.SnappyCodec");
//                //TODO:snappy压缩
//                //conf.set("mapreduce.output.fileoutputformat.compress.codec","org.apache.hadoop.io.compress.SnappyCodec");
//        //conf.set("mapreduce.output.fileoutputformat.compress","true");
//
//
//
//        // 最后保存到HDFS
//        rdd.mapToPair((PairFunction<RealTimeDataPb.RealTimeData, NullWritable, BytesWritable>) location -> {
//            // 这里new BytesWritable(location.toByteArray()) 是将java对象序列化为protobuf二进制数组
//            return new Tuple2<NullWritable, BytesWritable>(NullWritable.get(), new BytesWritable(location.toByteArray()));
//        }).saveAsNewAPIHadoopFile("hdfs://aerozhong/tmp/ml_location_pb", NullWritable.class, BytesWritable.class, SequenceFileOutputFormat.class,conf);
//
//        jsc.close();
    }
}