package chen.bupt.test;

import java.io.IOException;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import chen.bupt.util.HDFSFileUtils;

public class HadoopText {
	public static class SynonymsMap extends
			Mapper<LongWritable, Text, Text, NullWritable> {
		private static final IntWritable one = new IntWritable(1);

		@Override
		public void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			List<String> list = HDFSFileUtils.readLines("/user/chenlingpeng/data.txt");
			for(String s:list){
				context.write(new Text(s), NullWritable.get());
			}
		}

		public static void main(String[] args) throws Exception {
			Configuration conf = new Configuration();
			Job job = new Job(conf, "hdfsreadjob");
			job.setJarByClass(HadoopText.class);
			job.setMapperClass(SynonymsMap.class);
			job.setOutputKeyClass(Text.class);
			job.setOutputValueClass(NullWritable.class);
			Path input = new Path("/user/chenlingpeng/data.txt");
			Path output = new Path("/user/chenlingpeng/Test");
			HDFSFileUtils.deleteFile(output, conf);
			FileInputFormat.addInputPath(job, input);
			FileOutputFormat.setOutputPath(job, output);
			job.waitForCompletion(true);
		}
	}

}
