package com.fsql.executor;

import com.dtstack.flink.sql.Main;
import com.dtstack.flink.sql.exception.sqlparse.PlannerNotMatchException;
import com.dtstack.flink.sql.exception.sqlparse.SqlExceptionConstant;
import com.dtstack.flink.sql.exception.sqlparse.SqlParseCodeEnum;
import com.dtstack.flink.sql.exec.ExecuteProcessHelper;
import com.dtstack.flink.sql.exec.ParamsInfo;
import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.table.api.TableException;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class FSqlLocalExecutor {
    private static final Logger LOG = LoggerFactory.getLogger(Main.class);

    public static String exec(String[] args) throws Exception {
        ParamsInfo paramsInfo = ExecuteProcessHelper.parseParams(args);
        StreamTableEnvironment env = ExecuteProcessHelper.getStreamExecution(paramsInfo);
        try {
            JobExecutionResult execute = env.execute(paramsInfo.getName());
            LOG.info("program {} execution success", paramsInfo.getName());
            return String.valueOf(execute.getJobID());
        } catch (TableException e) {
            if (e.getMessage().contains(SqlExceptionConstant.JOIN_WITH_FLINK_PLANNER)) {
                throw new PlannerNotMatchException(SqlParseCodeEnum.PLANNER_NOT_MATCH);
            } else {
                throw e;
            }
        }
    }
}
