package parallel;

import java.io.IOException;
import java.net.URISyntaxException;
import java.util.Iterator;
import java.util.List;
import java.util.Set;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;

import parallel.ParallelPageGenerator.NodeGenMapper;
import event.BasicConfig;
import event.EventNode;
import event.Generator;
import event.Link;
import event.PageConfig;
import event.PageGenerator;

public class ParallelPageLoader extends Configured implements Tool {
	private static final String NODE = "Node";
	private static final String LINK = "Link";
	private static final String ID = "Id";
	static class NodeTableMapper
	extends Mapper<LongWritable, Text,ImmutableBytesWritable, Writable> {
		private byte[] columnFamily = null;
		private PageConfig nodeConfig = null;
		private BasicConfig basicConfig = null;
		@Override
		protected void setup(Context context)
		{
			nodeConfig = new PageConfig();
			basicConfig = new BasicConfig();
		}
		protected void map(LongWritable row, Text line, Context context)
				throws IOException, InterruptedException {
			int format = basicConfig.getFormat();
			if(format == 1)
			{
				loadCsv(line, context);
			}
			else
			{
				loadJson(line, context);
			}
			
		}
		private void loadJson(Text line, Context context) throws IOException, InterruptedException
		{
			try {
				FileSplit fileSplit = (FileSplit)context.getInputSplit();
				String filename = fileSplit.getPath().getName();
				System.out.println("intput file: " + filename);
				JSONObject record = new JSONObject(line.toString());
				String id = record.getString(ID);
				String name = id.split("-")[0];
				EventNode eventNode = nodeConfig.getEventNode(name);
				List<Link> linkList = eventNode.getLinkList();
				for(int i = 0; i < linkList.size(); i++)
				{
					Link link = linkList.get(i);
					String linkName = link.getLinkName();
					String linkParam = link.getParams().get(0);
					columnFamily = Bytes.toBytes(linkName);
					byte [] rowKey = Bytes.toBytes(id);
					Put put = new Put(rowKey);
					JSONArray attrs = record.getJSONArray(linkName);
					for(int j = 0; j < attrs.length(); j++)
					{
						JSONObject attr = attrs.getJSONObject(j);
						String linkValue = attr.getString(linkParam);
						put.add(columnFamily, Bytes.toBytes(linkValue), Bytes.toBytes(true));
						System.out.println(id + ": " + linkValue);
						context.write(new ImmutableBytesWritable(rowKey), put);
					}	
				}
			} catch (JSONException e) {
				e.printStackTrace();
			}
		}
		private void loadCsv(Text line, Context context) throws IOException, InterruptedException
		{
			FileSplit fileSplit = (FileSplit)context.getInputSplit();
			String filename = fileSplit.getPath().getName();
			String [] fileNameSplits = filename.split("-");
			String linkName = fileNameSplits[1];
			String [] fields = line.toString().split(",");
			String id = fields[0];
			// user link name as column families
			columnFamily = Bytes.toBytes(linkName);
			byte [] rowKey = Bytes.toBytes(id);
			Put put = new Put(rowKey);
			// use link value as a column name and add it at runtime
			put.add(columnFamily, Bytes.toBytes(fields[1]), Bytes.toBytes(true));
			context.write(new ImmutableBytesWritable(rowKey), put);
		}
	}
	/**
	 * @param args the cli arguments
	 * @throws ClassNotFoundException 
	 * @throws InterruptedException 
	 * @throws URISyntaxException 
	 */
	public int run(String[] args) throws IOException, InterruptedException, ClassNotFoundException, URISyntaxException {
		Configuration conf = HBaseConfiguration.create(getConf());
		Job job = new Job(conf, "parallelNodeLoader");
		job.setMapperClass(NodeTableMapper.class);
		job.setNumReduceTasks(0);
		job.setJarByClass(ParallelPageLoader.class);
		job.setInputFormatClass(TextInputFormat.class);
		job.setOutputFormatClass(TableOutputFormat.class);
		job.getConfiguration().set(TableOutputFormat.OUTPUT_TABLE,NODE);
		job.setOutputKeyClass(ImmutableBytesWritable.class);
		job.setOutputValueClass(Writable.class);
		String inputPath = args[0] + "/*/" + LINK + "/*/*";
		TextInputFormat.setInputPaths(job, new Path(inputPath));

		return job.waitForCompletion(true)? 0: 1;
	}

	public static void main(String[] args) throws Exception {
		int res = ToolRunner.run(new Configuration(), new ParallelPageLoader(), args);
		System.exit(res);
	}

}
