package com.song.sparkstudy;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.PairFunction;

import scala.Tuple2;

import org.apache.hadoop.io.Text;

public class SequenceFileTest {

	private static final String[] DATA = { "One", "Two", "Three", "aaa", "bbb", "ccc" };
	
	public static void main(String[] args) {
		
//		sequenceFileWrite();
		sequencerdd();
		
	}

	private static void sequencerdd() {
		
		SparkConf conf = new SparkConf().setAppName("rddtest");
		JavaSparkContext context = new JavaSparkContext(conf);
		
		JavaPairRDD<Integer, String> seqrdd = context.sequenceFile("hdfs://song-dinfo:9000/seq/data", IntWritable.class	, Text.class).mapToPair(new PairFunction<Tuple2<IntWritable,Text>, Integer, String>() {

			@Override
			public Tuple2<Integer, String> call(Tuple2<IntWritable, Text> t) throws Exception {
				// TODO Auto-generated method stub
				return new Tuple2<Integer, String>(t._1.get(), t._2.toString());
			}
		});
		
		
		for(Tuple2<Integer, String> t :seqrdd.collect())
		{
			System.out.println(t._1() +"---------" +t._2);
		}
	}

	private static void sequenceFileWrite() {
		String  hdfsurl = "hdfs://song-dinfo:9000";
		String pathstr ="/seq/data";
		String compressType ="1";
		
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", hdfsurl);
		Path path =  new Path(pathstr);
		
		
		
		IntWritable key = new IntWritable();
		Text value =  new Text();
		
		SequenceFile.Writer writer =null;
		SequenceFile.Writer.Option pathopt = SequenceFile.Writer.file(path);
		SequenceFile.Writer.Option keyclassopt = SequenceFile.Writer.keyClass(key.getClass());
		SequenceFile.Writer.Option valueClassOpt = SequenceFile.Writer.valueClass(value.getClass());
		SequenceFile.Writer.Option compressionopt =null;
		
		
		if(compressType.equals("1"))
		{
			compressionopt = SequenceFile.Writer.compression(CompressionType.NONE);
			
		}else if(compressType.equals("2"))
		{
			compressionopt = SequenceFile.Writer.compression(CompressionType.BLOCK);
		}else if(compressType.equals("3") )
		{
			compressionopt = SequenceFile.Writer.compression(CompressionType.RECORD);
		}else{
			compressionopt = SequenceFile.Writer.compression(CompressionType.NONE);
		}
		
		try {
			writer  = SequenceFile.createWriter( conf, pathopt, keyclassopt, valueClassOpt, compressionopt);
			
			for(int i = 0;i<100 ;i++)
			{
				key.set(100 -i);
				value.set(DATA[i % DATA.length]);
				writer.append(key, value);
			}
			
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
}
