import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;

import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobConfigurable;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.lib.db.DBConfiguration;
import org.apache.hadoop.mapred.lib.db.DBInputFormat;
import org.apache.hadoop.mapred.lib.db.DBWritable;

public class DBCooccurance {

	static public class CooccurMapper extends MapReduceBase implements
			Mapper<LongWritable, DBRow, Text, LongWritable>, JobConfigurable {
		
		public void map(LongWritable key, DBRow val,
				OutputCollector<Text, LongWritable> output, Reporter reporter)
				throws IOException {
			List<CooccurData> cooccurances = val.getCooccurances();
			for (CooccurData cooccurance : cooccurances) {
				output.collect(cooccurance.key, cooccurance.value);
			}
		}
		
		public void configure(JobConf job) {
			int numCols = job.getInt("numCols", 0);
			DBRow.numCols = numCols;
		}
	}
	
	private static class CooccurReducer extends MapReduceBase implements
	Reducer<Text, LongWritable, Text, LongWritable> {

		@Override
		public void reduce(Text key, Iterator<LongWritable> values,
				OutputCollector<Text, LongWritable> output, Reporter reporter)
				throws IOException {
			long count = 0;
			while (values.hasNext()) {
				count += values.next().get();
			}

			output.collect(key, new LongWritable(count));
		}
	}

	@SuppressWarnings("deprecation")
	public static void main(String[] args) throws IOException {

		if (args.length < 7) {
			System.err.println("Usage: hadoop jar <jar-file> <mysql-server-address> " +
					"<database> <user> <password> <table> <num-cols> <output-path>");
			System.err.println("Sample cmd: hadoop jar ~/Medicaid/Medicaid.jar DBCooccurance localhost:3306 test root social test_data 3 dbout");
			return;
		}
		
		String mysqlServerAddress = args[0];
		String mysqlDatabase = args[1];
		String mysqlUser = args[2];
		String mysqlPassword = args[3];
		String mysqlTable = args[4];
		Integer numCols = Integer.parseInt(args[5]);
		String outputPath = args[6];
		
		JobConf conf = new JobConf(DBCooccurance.class);
		conf.setJobName("DBCooccurance");
		conf.setInputFormat(DBInputFormat.class);
		conf.setInt("numCols", numCols);
		
		DBConfiguration.configureDB(conf, "com.mysql.jdbc.Driver",
				"jdbc:mysql://" + mysqlServerAddress + "/" + mysqlDatabase,
				mysqlUser, mysqlPassword);
		
		String query = "select * from " + mysqlTable;
		String countQuery = "select count(*) from " + mysqlTable;
		DBInputFormat.setInput(conf, DBRow.class, query, countQuery);

		conf.setMapperClass(CooccurMapper.class);
		conf.setReducerClass(CooccurReducer.class);
		
		conf.setMapOutputKeyClass(Text.class);
		conf.setMapOutputValueClass(LongWritable.class);
		conf.setOutputKeyClass(Text.class);
		conf.setOutputValueClass(LongWritable.class);
		
		FileOutputFormat.setOutputPath(conf, new Path(outputPath));

		long startTime = System.currentTimeMillis();

		JobClient.runJob(conf);
		System.out.println("Job Finished in "
				+ (System.currentTimeMillis() - startTime) / 1000.0
				+ " seconds");
	}
}
