package mapreduceFromToDb;

import java.io.IOException;
import java.util.StringTokenizer;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.lib.db.DBOutputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;

public class WriteMapDB {
	private static class WMap extends Mapper<LongWritable, Text, Text, IntWritable> {
		private Text word = new Text();
		private IntWritable one = new IntWritable(1);

		@Override
		protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context)
				throws IOException, InterruptedException {
			String line = value.toString();
			if (line.getBytes().length >= 1) {

				if (line.charAt(0) == 65279) {
					line = line.substring(1);
				}

			}
			StringTokenizer st = new StringTokenizer(line);
			while (st.hasMoreTokens()) {
				word.set(st.nextToken());
				context.write(word, one);
			}
		}

	}

	private static class WCombine extends Reducer<Text, IntWritable, Text, IntWritable> {
		@Override
		protected void reduce(Text key, Iterable<IntWritable> value,
				Reducer<Text, IntWritable, Text, IntWritable>.Context context)
				throws IOException, InterruptedException {
			int sum = 0;
			for (IntWritable iw : value) {
				sum += iw.get();
			}
			context.write(key, new IntWritable(sum));
		}
	}

	private static class WReduce extends Reducer<Text, IntWritable, WordRecoder, Text> {
		@Override
		protected void reduce(Text key, Iterable<IntWritable> value,
				Reducer<Text, IntWritable, WordRecoder, Text>.Context context)
				throws IOException, InterruptedException {
			int sum = 0;
			for (IntWritable i : value) {
				sum += i.get();
			}
			WordRecoder wr = new WordRecoder();
			wr.word = key.toString();
			wr.count = sum;
			context.write(wr, new Text());
		}
	}

	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		JobConf conf = new JobConf(WriteMapDB.class);
		DBConfiguration.configureDB(conf, "com.mysql.jdbc.Driver", "jdbc:mysql://127.0.0.1:3306/test", "root", "root");
		String[] fields = new String[] { "word", "count" };
		Job job = new Job(conf, "WriteDB");
		System.out.println("schema:" + conf.get("mapred.job.tracker"));
		DBOutputFormat.setOutput(job, "wordresult", fields);
		job.setInputFormatClass(TextInputFormat.class);
		job.setOutputFormatClass(DBOutputFormat.class);
		job.setNumReduceTasks(1);
		job.setOutputValueClass(IntWritable.class);
		job.setOutputKeyClass(Text.class);

		job.setMapperClass(WMap.class);
		/** 设置Combiner类 **/
		job.setCombinerClass(WCombine.class);
		/** 设置Reduce类 **/
		job.setReducerClass(WReduce.class);

		/** 设置输入路径 */
		FileInputFormat.setInputPaths(job, new Path("hdfs://192.168.8.70:8020/root/input"));

		System.exit(job.waitForCompletion(true) ? 0 : 1);

	}
}
