package com.zhangwoo.analyser.jobs.matcher;

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.log4j.Logger;

class MyMergeOneDayMapper extends MapReduceBase implements
		Mapper<LongWritable, Text, Text, Text> {
	JobConf job=null;
	
	@Override
	public void configure(JobConf job) {
		this.job=job;
	}

	public void map(LongWritable key, Text value,
			OutputCollector<Text, Text> output, Reporter reporter)
			throws IOException {
		output.collect(new Text(""), value);
	}
}


class MyMergeOneDayReducer extends MapReduceBase implements
		Reducer<Text, Text, Text, Text> {
	JobConf job=null;

	private Logger logger = Logger.getLogger(MyMergeOneDayReducer.class);
	
	@Override
	public void configure(JobConf job) {
		this.job=job;
	}

	public void reduce(Text key, Iterator<Text> values,
			OutputCollector<Text, Text> output, Reporter reporter)
			throws IOException {
		try {
            String host = "10.0.0.70";
 
            Configuration conf = new Configuration();
            conf.set("fs.default.name", "hdfs://" + host + ":9000");
            conf.set("mapred.job.tracker", host + ":9001");
 
            FileSystem hdfs = FileSystem.get(conf);
 
            Path hdfsFile = new Path(job.get("oneday"));
            logger.info("now hdfs path is : "+job.get("oneday"));
 
            try {
                FSDataOutputStream out = hdfs.create(hdfsFile);
                
            	while(values.hasNext()){
            		Text val=values.next();
            		if(val.toString().trim().length()==0) continue;
//            		logger.info("now text value is : "+val);
                    out.write(val.toString().getBytes());
                    out.writeUTF(System.getProperty("line.separator"));
            	}
                out.close();
            } catch (Exception ioe) {
                logger.error("write in hdfs error!", ioe);
            }
        } catch (Exception e) {
            logger.error("put merge thread error!", e);
        }
    }
        
}

public class MergeOneDay {
	/**
	 * @param args
	 * @throws IOException
	 * @throws ClassNotFoundException 
	 * @throws InterruptedException 
	 */
	public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
		if (args.length == 0)
			System.exit(-1);

		JobConf conf = new JobConf(AllDataAnalyser.class);
		conf.setJobName("MergeOneDay@" + new java.util.Date().getTime());
		conf.set("oneday", "digitalmall/"+args[0]);
		addTmpJar("/hadoop/hadoop-1.0.3/jobs-2.0.3.jar", conf);
		for (int i = 1; i < args.length; i++) {
			FileInputFormat.addInputPaths(conf, args[i]);
		}
		FileOutputFormat.setOutputPath(conf, new Path("result/"
				+ new java.util.Date().getTime()));

		conf.setJarByClass(com.zhangwoo.analyser.jobs.matcher.MergeOneDay.class);
		conf.setMapperClass(MyMergeOneDayMapper.class);
		conf.setReducerClass(MyMergeOneDayReducer.class);
		conf.setNumReduceTasks(1);
		conf.setInputFormat(TextInputFormat.class);
		conf.setOutputFormat(TextOutputFormat.class);
		conf.setOutputKeyClass(Text.class);
		conf.setOutputValueClass(Text.class);
		JobClient.runJob(conf);
	}
	
	public static void addTmpJar(String jarPath, Configuration conf) throws IOException {
		System.setProperty("path.separator", ":");
		FileSystem fs = FileSystem.getLocal(conf);
		String newJarPath = new Path(jarPath).makeQualified(fs).toString();
		String tmpjars = conf.get("tmpjars");
		if (tmpjars == null || tmpjars.length() == 0) {
			conf.set("tmpjars", newJarPath);
		} else {
			conf.set("tmpjars", tmpjars + "," + newJarPath);
		}
	}
}