package com.ds.lens.flink.monitor.datasource;

import com.ds.lens.data.common.client.LensResponseData;
import com.ds.lens.data.common.document.datasource.LensDatasource;
import com.ds.lens.data.common.document.datasource.LensDsSource;
import com.ds.lens.data.common.document.datasource.LensDsSchema;
import com.ds.lens.flink.monitor.MonitorJobContext;
import com.ds.lens.flink.monitor.common.LensElasticsearchSink;
import com.ds.lens.flink.monitor.datasource.function.DataSourceProcess;
import com.ds.lens.flink.monitor.datasource.function.DataSourceWatermarks;
import com.ds.lens.flink.monitor.datasource.function.KafkaDataSource;
import com.ds.lens.flink.monitor.metricsjob.Metrics;
import com.ds.lens.flink.monitor.metricsjob.MetricsProcessFunction;
import com.ds.lens.flink.monitor.metricsjob.MetricsSinkFunction;
import com.ds.lens.flink.monitor.metricsjob.MetricsTagsKeySelector;
import com.ds.lens.flink.monitor.metricsjob.MetricsTagsProcessFunction;
import org.apache.commons.lang.StringUtils;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.typeutils.RowTypeInfo;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.Tumble;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

import java.sql.Timestamp;
import java.util.List;

/**
 * @author WeiShaoying
 * @date 2020/3/16 下午3:49
 */
public class LensDataSourceJob {

    /**
     * args[0] lens datasource id
     * 入参参考
     *
     * @param args
     * @throws Exception
     */

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        LensResponseData<LensDatasource> responseData = MonitorJobContext.getInstance().getMonitorClient().queryDatasource(Long.valueOf(args[0]));
        LensDatasource dataSource = responseData.getData();
        env.setParallelism(dataSource.getSource().getPartition());
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        processKafkaConfig(dataSource.getSource());
        DataStream<Row> stream = env.addSource(new FlinkKafkaConsumer<>(dataSource.getSource().getTopic(),
                new KafkaDataSource(getRowTypeInfo(dataSource), dataSource.getDsSchema()), dataSource.getSource().getProperties()))
                .assignTimestampsAndWatermarks(new DataSourceWatermarks(dataSource.getDsSql().getSchema()));
        Table table = tableEnv.fromDataStream(stream, dataSource.getDsSql().getFields());
        Table query = table.where(dataSource.getDsSql().getWhere())
                .window(Tumble.over(dataSource.getDsSql().getWindowSize() + ".minutes")
                        .on(dataSource.getDsSql().getTimeField()).as("w"))
                .groupBy(StringUtils.isNotEmpty(dataSource.getDsSql().getGroup()) ? dataSource.getDsSql().getGroup() + ",w" : "w")
                .select(dataSource.getDsSql().getSelect() + ", w.start");

        DataStream<Metrics> sinkStream = tableEnv
                .toAppendStream(query, TypeInformation.of(Row.class))
                .process(new DataSourceProcess(dataSource));
        //ES
        sinkStream.process(new MetricsProcessFunction())
                .addSink(new LensElasticsearchSink<>(MonitorJobContext.getInstance().getEsTransportAddresses(),
                        new MetricsSinkFunction(), MonitorJobContext.getInstance().getEsConfig()).build());
        //MYSQL
        sinkStream.keyBy(new MetricsTagsKeySelector())
                .window(TumblingProcessingTimeWindows.of(Time.minutes(1)))
                .process(new MetricsTagsProcessFunction());
        env.execute(LensDataSourceJob.class.getSimpleName() + dataSource.getName());
    }

    private static RowTypeInfo getRowTypeInfo(LensDatasource dataSource) {
        List<LensDsSchema> schemas = dataSource.getDsSchema();
        TypeInformation[] fieldTypes = new TypeInformation[schemas.size()];
        String[] fieldNames = new String[schemas.size()];
        StringBuilder sb = new StringBuilder();
        for (int i = 0; i < schemas.size(); i++) {
            schemas.get(i).setIndex(i);
            switch (schemas.get(i).getType()) {
                case "String":
                    fieldTypes[i] = TypeInformation.of(new TypeHint<String>() {
                    });
                    break;
                case "Long":
                    fieldTypes[i] = TypeInformation.of(new TypeHint<Long>() {
                    });
                    break;
                case "Timestamp":
                    fieldTypes[i] = TypeInformation.of(new TypeHint<Timestamp>() {
                    });
                    break;
                default:
                    fieldTypes[i] = TypeInformation.of(new TypeHint<Object>() {
                    });
                    break;
            }
            fieldNames[i] = schemas.get(i).getTarget();
            sb.append(schemas.get(i).getTarget());
            if (schemas.get(i).getTarget().equals(dataSource.getDsSql().getTimeField())) {
                dataSource.getDsSql().setSchema(schemas.get(i));
                sb.append(".rowtime");
            }
            sb.append(",");
        }
        dataSource.getDsSql().setFields(sb.deleteCharAt(sb.length() - 1).toString());
        return new RowTypeInfo(fieldTypes, fieldNames);
    }

    private static void processKafkaConfig(LensDsSource source){
        source.getProperties().setProperty("bootstrap.servers", source.getServers());
        source.getProperties().setProperty("group.id", source.getGroup());
    }
}
