package mapreduceFromToDb;

import java.io.IOException;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class ReadMapDB {
	private static class DBMap extends Mapper<LongWritable, PersonRecorder, LongWritable, Text> {
		@Override
		protected void map(LongWritable key, PersonRecorder value, Context context)
				throws IOException, InterruptedException {
			context.write(new LongWritable(value.id), new Text(value.toString()));
		}
	}
	
	public static void main(String []args) throws Exception{
		JobConf conf=new JobConf(ReadMapDB.class);
		DBConfiguration.configureDB(conf, "com.mysql.jdbc.Driver", "jdbc:mysql://127.0.0.1:3306/test","root","root");
		String fields[]=new String[]{"id","name","age"};
		
		
		
		Job job=new Job(conf,"readDB");
		System.out.println("schema:"+conf.get("mapred.job.tarcker"));
		
		DBInputFormat.setInput(job, PersonRecorder.class, "person", null,"id",fields);
		job.setInputFormatClass(DBInputFormat.class);
		job.setOutputKeyClass(LongWritable.class);
		job.setOutputValueClass(Text.class);
		job.setMapperClass(DBMap.class);
		String path="hdfs://192.168.8.70:8020/root/outputdb";
		FileSystem fs =FileSystem.get(conf);
		Path p=new Path(path);
		if(fs.exists(p)){
			fs.delete(p,true);
			
		}
		FileOutputFormat.setOutputPath(job, p);
		System.exit(job.waitForCompletion(true)?0:1);
		
	}

}
