package com.yiidata.example.spark;

import com.alibaba.fastjson.JSONObject;
import com.yiidata.amc.common.client.SparkJobDriverProxy;
import com.yiidata.amc.common.client.TaskInfo;
import com.yiidata.amc.job.JavaAbstractJob;
import com.yiidata.amc.job.config.ParameterConfig;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.api.java.JavaRDD;

/**
 * <p>
 * </p>
 * Created by ZhenQin on 2018/1/8 0008-9:06
 */
public class CountFileSparkTask extends JavaAbstractJob {


    String inputPath;



    @Override
    public void run(ParameterConfig parameter) {
        inputPath = parameter.getParameter("inputPath");

        if(StringUtils.isBlank(inputPath)){
            inputPath = "/tenant/Tenant_shisuo/install.log";
        }

        System.out.println("input path: " + inputPath);
        System.out.println("======================>>>>>>>>>>>>>>>>job start");


        JavaRDD<String> rdd = this.javaSparkContext().textFile(inputPath);
        System.out.println("data lines: " + rdd.count());

        try {
            Thread.sleep(1000 * 60);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
        System.out.println("======================>>>>>>>>>>>>>>>>job end");
    }


    public static void main(String[] args) throws Exception {
        JSONObject json = new JSONObject();
        json.put("inputPath", "/user/zhaopingxi/upload/20180802/dest_029_MOBILE_CDR__9983421.txtcomplete");
        json.put("aaa", "cccccc");
        ParameterConfig config = ParameterConfig.string2ParameterConfig(json.toJSONString());


        TaskInfo info = new TaskInfo("HelloFirstSparkExample");
        info.setExecutorMemory(2);
        info.setExecutorInstances(1);
        //info.setQueue("TEMPORARYANALYSIS");
        info.setSparkJar("hdfs:////user/spark/share/lib/spark-assembly-1.6.1-hadoop2.6.0.jar");

        CountFileSparkTask task = new SparkJobDriverProxy<>(CountFileSparkTask.class).createProxy(info);
        task.run(config);

        System.in.read();
    }
}
