package com.cnebula.common.hadoop.oozie.client.impl;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;

import org.osgi.service.component.ComponentContext;

import com.cnebula.common.annotations.es.ESRef;
import com.cnebula.common.annotations.es.EasyService;
import com.cnebula.common.conf.IEasyServiceConfAdmin;
import com.cnebula.common.hadoop.oozie.client.IOozieRestService;
import com.cnebula.common.hadoop.oozie.client.Property;
import com.cnebula.common.log.ILog;
import com.cnebula.common.xml.IEasyObjectXMLTransformer;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.reflect.TypeToken;

//@EasyService(immediate = true, noservice = true)
public class TestOozieRestService {

	@ESRef
	ILog log;

	@ESRef
	IEasyServiceConfAdmin confAdmin;

	@ESRef
	IEasyObjectXMLTransformer xtf;

	@ESRef
	IOozieRestService oozieRestService;

	protected void deactivate(ComponentContext ctx) {
	}

	protected void activate(ComponentContext ctx) throws Exception {

		Gson gson = new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create();
		List<Property> properties = new ArrayList<Property>();
		properties.add(new Property("user.name", "calis"));
		properties.add(new Property("queueName", "default"));
		properties.add(new Property("nameNode", "hdfs://calis-OptiPlex-9020-161:9000"));
		properties.add(new Property("jobTracker", "192.168.2.161:8032"));
		properties.add(new Property("examplesRoot", "examples"));
		properties.add(new Property("oozie.use.system.libpath", "true"));
		properties.add(new Property("master", "spark://calis-OptiPlex-9020-161:7077"));
		properties.add(	new Property("oozie.wf.application.path", "hdfs://calis-OptiPlex-9020-161:9000/user/oozie/examples/apps/spark/workflow.xml"));
		properties.add(new Property("oozie.libpath", "hdfs://calis-OptiPlex-9020-161:9000/user/oozie/examples/apps/spark/lib"));

		/**
		properties.add(new Property("user.name", "hadoop"));
		properties.add(new Property("queueName", "default"));
		properties.add(new Property("nameNode", "hdfs://hadoop169:9000"));
		properties.add(new Property("jobTracker", "hadoop169:8032"));
		
		properties.add(new Property("oozie.use.system.libpath", "true"));
		properties.add(new Property("master", "spark://192.168.2.169:6066"));
		properties.add(	new Property("oozie.wf.application.path", "hdfs://hadoop169:9000/user/oozie/examples/apps/spark"));
		**/
//		properties.add(	new Property("oozie.wf.application.path", "hdfs://hadoop169:9000/user/tanhh/wordcount"));
//		properties.add(	new Property("inputDir", "/user/tanhh/input-data/text"));
//		properties.add(	new Property("outputDir", "/user/tanhh/output-data/result"));
//		properties.add(new Property("examplesRoot", "examples"));
//		properties.add(new Property("oozie.coord.application.path", "/user/tanhh/oozie/${examplesRoot}/apps/aggregator/coordinator.xml"));
//		properties.add(new Property("start", "2016-07-07T01:00Z"));
//		properties.add(new Property("end", "2016-07-07T04:00Z"));
//		properties.add(new Property("frequency", "0/30 * * * *"));
		String rst = oozieRestService.submitStandardJob(properties, true);
		log.info(rst);
		
		
		Map<String, Object> map = gson.fromJson(rst, new TypeToken<Map<String, Object>>() {
		}.getType());
		if (map.containsKey("Response")) {
			Map<String, String> res = gson.fromJson(map.get("Response").toString(), new TypeToken<Map<String, String>>() {
			}.getType());
			String jobId = res.get("id");
			log.info(oozieRestService.getJobInformation(jobId));
		}
		
//		String rst2 = oozieRestService.killStandardJob(properties, true);
//		log.info(rst2);
	}
}
