//package com.bestv.datalake.task;
//
///**
// * Created by jing.zigao on 2016/6/7.
// */
//import java.io.File;
//import java.util.HashMap;
//import java.util.List;
//import java.util.Map;
//import org.khaleesi.carfield.tools.sparkjobserver.api.ISparkJobServerClient;
//import org.khaleesi.carfield.tools.sparkjobserver.api.ISparkJobServerClientConstants;
//import org.khaleesi.carfield.tools.sparkjobserver.api.SparkJobInfo;
//import org.khaleesi.carfield.tools.sparkjobserver.api.SparkJobResult;
//import org.khaleesi.carfield.tools.sparkjobserver.api.SparkJobServerClientException;
//import org.khaleesi.carfield.tools.sparkjobserver.api.SparkJobServerClientFactory;
//
///**
// * A sample shows how to use spark-job-server-client.
// *
// * @author bluebreezecf
// * @since 2014-09-16
// *
// */
//public class SparkJobServerClient {
//
//    public SparkJobResult getSparkJob() {
//        ISparkJobServerClient client = null;
//        SparkJobResult result_new = null;
//        try {
//            client = SparkJobServerClientFactory.getInstance().createSparkJobServerClient("http://10.200.8.143:3000/");
//            Map<String, String> params = new HashMap<String, String>();
//            params.put(ISparkJobServerClientConstants.PARAM_MEM_PER_NODE, "512m");
//            params.put(ISparkJobServerClientConstants.PARAM_NUM_CPU_CORES, "10");
//
//            //GET /jobs
//            List<SparkJobInfo> jobInfos = client.getJobs();
//            System.out.println("Current jobs:");
//            for (SparkJobInfo jobInfo : jobInfos) {
//                System.out.println(jobInfo);
//            }
//
//            //Post /jobs---Create a new job
//            Map<String, String> params2 = new HashMap<String, String>();
//            params.put(ISparkJobServerClientConstants.PARAM_APP_NAME, "DataLakeTest001");
//            params.put(ISparkJobServerClientConstants.PARAM_CLASS_PATH, "com.bestv.datalake.spark.DatalakeJobServerEntry");
//            params.put(ISparkJobServerClientConstants.PARAM_CONTEXT, "sql-context1");
//            params.put(ISparkJobServerClientConstants.PARAM_SYNC, "false");
//            params.put("datalakeJobType", "onlinelog");
//            params.put("datalakeSubjectStatus", "new");
//            SparkJobResult result = client.startJob("input = {\"datalakeJobType\":\"onlinelog\",\"datalakeSubjectStatus\":\"new\"}", params);
//            System.out.println(result);
//            result_new = client.getJobResult(result.getJobId());
//            return result_new;
//        } catch (SparkJobServerClientException e1) {
//            e1.printStackTrace();
//        } catch (Exception e) {
//            e.printStackTrace();
//        } finally {
//            if (client != null) {
//                //client.stop();
//            }
//        }
//        return result_new;
//    }
//
//}
