package com.fsql.controller;

import com.alibaba.fastjson.JSONObject;
import com.fsql.utils.JobUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.util.HashMap;
import java.util.Map;
import java.util.TimeZone;

@RestController
@RequestMapping("/test")
public class TestController {

    @Autowired
    private Environment env;

    @GetMapping("/helloworld")
    public String helloworld() {
        return "helloworld";
    }

    @GetMapping("/standalone")
    public ResponseEntity<Map<String, Object>> testStandaloneJob() {
        Map<String, Object> map = new HashMap<>();

        String sqlPath = "/mnt/d/work/workspace/flink-sqlweb/src/main/resources/examples/complexkafka_to_stream.sql";
        Map<String, Object> conf = new HashMap<>();
        JSONObject properties = new JSONObject();

        //其他参数配置
//        properties.put("time.characteristic", "eventTime");
        properties.put("timezone", TimeZone.getDefault());
        properties.put("early.trigger", "1");

        // 任务配置参数
        conf.put("-sql", sqlPath);
        conf.put("-mode", "standalone");
        conf.put("-name", "complexkafka_to_stream");
        conf.put("-confProp", properties.toString());

        conf.put("-localSqlPluginPath", env.getProperty("flinksql.localSqlPluginPath"));
        conf.put("-remoteSqlPluginPath", env.getProperty("flinksql.remoteSqlPluginPath"));
        conf.put("-flinkconf", env.getProperty("flinksql.flinkconf"));
        conf.put("-pluginLoadMode", "classpath");
//        conf.put("-checkResource", "true");

        map.put("jobId", JobUtils.submitStandaloneJob(conf));
        return new ResponseEntity<>(map, HttpStatus.OK);
    }

    @GetMapping("/udf")
    public ResponseEntity<Map<String, Object>> testudf() {
        Map<String, Object> map = new HashMap<>();

        String sqlPath = "/mnt/d/work/workspace/flink-sqlweb/src/main/resources/examples/complexkafka_to_stream_udf.sql";
        Map<String, Object> conf = new HashMap<>();
        JSONObject properties = new JSONObject();

        //其他参数配置
//        properties.put("time.characteristic", "eventTime");
        properties.put("timezone", TimeZone.getDefault());
        properties.put("early.trigger", "1");

        // 任务配置参数
        conf.put("-sql", sqlPath);
        conf.put("-mode", "standalone");
        conf.put("-name", "complexkafka_to_stream_udf");
        conf.put("-confProp", properties.toString());

        // 添加UDF函数
        // conf.put("-addjar", "[\"/mnt/d/work/workspace/wiki-edits/target/wiki-edits-0.1.jar\"]");
        conf.put("-addjar", "[\"/mnt/d/work/workspace/FlinkUdfDemo/target/FlinkUdfDemo-1.0-SNAPSHOT.jar\"]");
        conf.put("-localSqlPluginPath", env.getProperty("flinksql.localSqlPluginPath"));
        conf.put("-remoteSqlPluginPath", env.getProperty("flinksql.remoteSqlPluginPath"));
        conf.put("-flinkconf", env.getProperty("flinksql.flinkconf"));
        conf.put("-pluginLoadMode", "classpath");

        map.put("jobId", JobUtils.submitStandaloneJob(conf));
        return new ResponseEntity<>(map, HttpStatus.OK);
    }
}
