package db.read;
import luculent.HdfsUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.apache.hadoop.mapred.lib.db.DBConfiguration;
import org.apache.hadoop.mapred.lib.db.DBInputFormat;
import org.apache.hadoop.mapreduce.Job;


/**
 * 从数据库读取数据到hdfs
 */
public class DBAccess {  
      public static void main(String[] args) throws Exception{   
    	  	
             JobConf conf = new JobConf(DBAccess.class);  
             conf.setOutputKeyClass(LongWritable.class);  
             conf.setOutputValueClass(Text.class);  
             conf.setInputFormat(DBInputFormat.class);
             //是否运行为本地模式，就是看这个参数值是否为local，默认就是local
             conf.set("mapreduce.framework.name", "local");

             HdfsUtils.deleteDir("/user/fpbaddress");
             Path path = new Path("hdfs://master:9000/user/fpbaddress");
             FileOutputFormat.setOutputPath(conf, path);  
             DBConfiguration.configureDB(conf,"oracle.jdbc.driver.OracleDriver", "jdbc:oracle:thin:@192.168.0.182:1521/orcl","linshi","linshi");
             String [] fields = {"id", "name", "intro"};  
             DBInputFormat.setInput(conf, DBRecord.class, "FPB_ADDRESS",  
                        null, "id", fields);  
             conf.setMapperClass(DBRecordMapper.class);  
             conf.setReducerClass(IdentityReducer.class);   
             Job job = Job.getInstance(conf); 
             job.waitForCompletion(true);
     		 System.out.println("Finished");
             
             //JobClient.runJob(conf);  
      }  
}