package com.gome.extract;

import java.io.IOException;
import java.net.UnknownHostException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.log4j.Logger;

import com.gome.extract.common.AppConfiguer;
import com.mongodb.DBCollection;
import com.mongodb.Mongo;
import com.mongodb.MongoOptions;
import com.mongodb.ServerAddress;

public class SpiderDataDriver extends Configured implements Tool{
	private static Logger logger = Logger.getLogger(SpiderDataDriver.class);
	public static Mongo flowCrawlConfigMongo = null;
	static {
		try {
			MongoOptions options = new MongoOptions();
			options.connectionsPerHost = 100;
			options.autoConnectRetry = true;
			options.threadsAllowedToBlockForConnectionMultiplier=5;
			options.slaveOk = true;
			ServerAddress serverAddress=new ServerAddress(AppConfiguer.getStringByKey("mongodb_host"),
					AppConfiguer.getIntByKey("mongodb_port"));
			flowCrawlConfigMongo = new Mongo(serverAddress, options);
		} catch (UnknownHostException e) {
			// TODO Auto-generated catch block
			logger.error(AppConfiguer.getTrace(e));
		}
	}
	
	@Override
	public int run(String[] arg0) throws Exception {
		// TODO Auto-generated method stub
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.property.clientPort", AppConfiguer.getStringByKey("zkport"));
        conf.set("hbase.zookeeper.quorum", AppConfiguer.getStringByKey("zkhost"));
        conf.set("from.table", arg0[0]);
        conf.setBoolean("mapred.map.tasks.speculative.execution", false);
        conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
        
        Job job = new Job(conf,"Spider Data To Mongo");
        job.setJarByClass(DBOperator.class);
        
        Scan scan = new Scan();
        scan.setCacheBlocks(false);
        
        TableMapReduceUtil.initTableMapperJob(conf.get("from.table"), scan, SpiderDataMapper.class, Text.class, IntWritable.class, job);
        
        String path = "hdfs://10.58.222.102:9000/flowcrawl/"+arg0[0]+"_urls";
        clean(path, arg0[0]);
        FileOutputFormat.setOutputPath(job, new Path(path));
        
        job.waitForCompletion(true);
		return 0;
	}
	
	private void clean(String filePath, String collectName) throws IOException {
		
		flowCrawlConfigMongo.getDB("flowcrawl").getCollection(collectName).drop();
		
		Configuration conf = new Configuration();
		conf.addResource(new Path(AppConfiguer.getConfDir() + "/core-site.xml"));
		conf.addResource(new Path(AppConfiguer.getConfDir() + "/hdfs-site.xml"));
		conf.addResource(new Path(AppConfiguer.getConfDir() + "/mapred-site.xml"));
		conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
		FileSystem fs = FileSystem.get(conf);
		Path path = new Path(filePath);
		fs.deleteOnExit(path);
		fs.close();
	}

}
