package SparkAPILearning;

import com.google.protobuf.GeneratedMessage;
import com.googlecode.protobuf.format.JsonFormat;
import com.sdyc.ndmp.protobuf.hadoop.ProtobufInputFormat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.LongWritable;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import scala.Tuple2;

/**
 * <pre>
 * Created with IntelliJ IDEA.
 * User: zhengzhi
 * Date: 2017/4/18
 * To change this template use File | Settings | File Templates.
 * </pre>
 */
public class protobuf2JsonStringInput {

    private transient JavaSparkContext sc;

    private transient Configuration hadoopConf;


    public void init(){
        sc = new JavaSparkContext();
        hadoopConf = new Configuration();

        hadoopConf.set("INPUT_PATH","");
    }

    public JavaRDD<String> getProtobufferSourceRDD(){
        return sc.newAPIHadoopFile(hadoopConf.get("INPUT_PATH"),
                ProtobufInputFormat.class,
                LongWritable.class, GeneratedMessage.class, hadoopConf)
                .map(new Function<Tuple2<LongWritable, GeneratedMessage>, String>() {
                    @Override
                    public String call(Tuple2<LongWritable, GeneratedMessage> v1) throws Exception {
                        return JsonFormat.printToString(v1._2());
                    }
                });

    }

}
