package themis.flink.riskengine;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import java.io.IOException;
import java.io.Serializable;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;

public class FlinkRiskEngine {
    private static final List<String[]> features = (List)Arrays.asList(new String[][] { parseDSL("count(pay_account.history,1h)"),
            parseDSL("sum(amount#rcv_account.history,1h)"),
            parseDSL("count_distinct(rcv_account#pay_account.history,1h)") });

    private static final Set<String> keys;

    private static String[] parseDSL(String dsl) {
        return (String[])((List)Arrays.<String>stream(dsl.split("[(,)]")).map(String::trim)
                .collect(Collectors.toList())).toArray((Object[])new String[0]);
    }

    static {
        keys = (Set<String>)features.stream().map(x -> x[1]).collect(Collectors.toSet());
    }

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        FlinkKafkaConsumer<String> myConsumer = createKafkaConsumer();
        DataStreamSource<String> dataStreamSource = env.addSource(myConsumer);
        SingleOutputStreamOperator<JSONObject> singleOutputStreamOperator = dataStreamSource
                .map(new MapFunction<String, JSONObject>() {
            public JSONObject map(String s) throws Exception {
                if (StringUtils.isEmpty(s))
                    return new JSONObject();
                return JSONObject.parseObject(s);
            }
        }).flatMap(new EventSplitFunction()).keyBy(new KeySelector<JSONObject, String>() {
            public String getKey(JSONObject value) throws Exception {
                return value.getString("KEY_VALUE");
            }
        }).map(new KeyEnrichFunction())
                .map(new FeatureEnrichFunction())
                .keyBy(new KeySelector<JSONObject, String>() {
            public String getKey(JSONObject value) throws Exception {
                return value.getString("EVENT_ID");
            }
        }).flatMap(new FeatureReduceFunction()).map(new RuleBasedModeling());
        singleOutputStreamOperator.print().setParallelism(1);
        env.execute("FlinkRiskEngine");
    }

    public static class EventSplitFunction implements FlatMapFunction<JSONObject, JSONObject> {
        private static final Set<String> keys = FlinkRiskEngine.keys;

        public void flatMap(JSONObject value, Collector<JSONObject> out) throws Exception {
            String eventId = UUID.randomUUID().toString();
            long timestamp = value.getLongValue("timestamp");
            JSONObject event = new JSONObject();
            event.put("KEY_NAME", "event");
            event.put("KEY_VALUE", eventId);
            event.put("EVENT_ID", eventId);
            event.putAll(value);
            out.collect(event);
            keys.forEach(key -> {
                JSONObject json = new JSONObject();
                json.put("timestamp", timestamp);
                json.put("KEY_NAME", key);
                json.put("KEY_VALUE", genKeyValue(value, key));
                json.put("EVENT_ID", eventId);
//                genKeyFields(key).forEach(()); // TODO
                out.collect(json);
            });
        }

        private String genKeyValue(JSONObject event, String key) {
            String keyValue;
            if (!key.endsWith(".history"))
                throw new UnsupportedOperationException("unsupported key type");
            String[] splits = key.replace(".history", "").split("#");
            if (splits.length == 1) {
                String target = splits[0];
                keyValue = String.format("%s#%s.history", target, event.get(target));
            } else if (splits.length == 2) {
                String target = splits[0];
                String on = splits[1];
                keyValue = String.format("%s#%s.history", target, event.get(on));
            } else {
                throw new UnsupportedOperationException("unsupported key type");
            }
            return keyValue;
        }

        private Set<String> genKeyFields(String key) {
            if (!key.endsWith(".history"))
                throw new UnsupportedOperationException("unsupported key type");
            String[] splits = key.replace(".history", "").split("#");
            return new HashSet<>(Arrays.asList(splits));
        }
    }

    public static class KeyEnrichFunction extends RichMapFunction<JSONObject, JSONObject> {
        private ValueState<Serializable> keyState;

        public void open(Configuration config) {
            this.keyState = getRuntimeContext().getState(new ValueStateDescriptor("saved keyState", Serializable.class));
        }

        private <T> T getState(Class<T> tClass) throws IOException {
            return tClass.cast(this.keyState.value());
        }

        private void setState(Serializable v) throws IOException {
            this.keyState.update(v);
        }

        public JSONObject map(JSONObject event) throws Exception {
            String keyName = event.getString("KEY_NAME");
            if (keyName.equals("event"))
                return event;
            if (keyName.endsWith(".history")) {
                JSONArray history = getState(JSONArray.class);
                if (history == null)
                    history = new JSONArray();
                history.add(event);
                if (history.size() > 100)
                    history.remove(0);
                setState((Serializable)history);
                JSONObject newEvent = new JSONObject();
                newEvent.putAll((Map)event);
                newEvent.put("HISTORY", history);
                return newEvent;
            }
            throw new UnsupportedOperationException("unsupported key type");
        }
    }

    public static class FeatureEnrichFunction extends RichMapFunction<JSONObject, JSONObject> {
        private static final List<String[]> features = FlinkRiskEngine.features;

        public JSONObject map(JSONObject value) throws Exception {
            String keyName = value.getString("KEY_NAME");
            if (keyName.equals("event"))
                return value;
            for (String[] feature : features) {
                Object featureResult;
                String key = feature[1];
                if (!StringUtils.equals(key, keyName))
                    continue;
                String function = feature[0];
                long window = FlinkRiskEngine.parseTimestamp(feature[2]);
                JSONArray history = value.getJSONArray("HISTORY");
                String target = key.replace(".history", "").split("#")[0];
                if ("sum".equalsIgnoreCase(function)) {
                    featureResult = doSum(history, target, window);
                } else if ("count".equalsIgnoreCase(function)) {
                    featureResult = doCount(history, target, window);
                } else if ("count_distinct".equalsIgnoreCase(function)) {
                    featureResult = doCountDistinct(history, target, window);
                } else {
                    throw new UnsupportedOperationException(String.format("unsupported function[%s]", new Object[] { function }));
                }
                value.putIfAbsent("features", new JSONObject());
                String featureName = String.format("%s(%s,%s)", feature[0], feature[1], feature[2]);
                value.getJSONObject("features").put(featureName, featureResult);
            }
            return value;
        }

        private double doSum(JSONArray history, String target, long window) {
            long maxTimestamp = (Long) history.stream()
                    .map(x -> ((JSONObject) x).getLong("timestamp"))
                    .max(Long::compare)
                    .orElse(Long.MIN_VALUE);
            long minTimestamp = maxTimestamp - window;
            return (Double) history.stream()
                    .filter(x -> (((JSONObject) x).getLong("timestamp") > minTimestamp))
                    .map(x -> ((JSONObject) x).getDouble(target))
                    .reduce(Double::sum)
                    .orElse(Double.MIN_VALUE);
        }

        private double doCount(JSONArray history, String target, long window) {
            long maxTimestamp = (Long) history.stream()
                    .map(x -> ((JSONObject) x).getLong("timestamp"))
                    .max(Long::compare)
                    .orElse(Long.MIN_VALUE);
            long minTimestamp = maxTimestamp - window;
            return history.stream()
                    .filter(x -> (((JSONObject) x).getLong("timestamp") > minTimestamp))
                    .count();
        }

        private double doCountDistinct(JSONArray history, String target, long window) {
            long maxTimestamp = (Long) history.stream()
                    .map(x -> ((JSONObject) x).getLong("timestamp"))
                    .max(Long::compare)
                    .orElse(Long.MIN_VALUE);
            long minTimestamp = maxTimestamp - window;
            return history.stream()
                    .filter(x -> (((JSONObject) x).getLong("timestamp") > minTimestamp))
                    .map(x -> ((JSONObject)x).getString(target))
                    .distinct()
                    .count();
        }
    }

    public static long parseTimestamp(String time) {
        if (time.endsWith("d"))
            return TimeUnit.DAYS.toMillis(Long.parseLong(time.replace("d", "")));
        if (time.endsWith("h"))
            return TimeUnit.HOURS.toMillis(Long.parseLong(time.replace("h", "")));
        throw new UnsupportedOperationException(String.format("unsupported time[%s]", time));
    }

    public static class FeatureReduceFunction extends RichFlatMapFunction<JSONObject, JSONObject> {
        private static final List<String[]> features = FlinkRiskEngine.features;

        private ValueState<JSONObject> merged;

        public void open(Configuration config) {
            this.merged = getRuntimeContext().getState(new ValueStateDescriptor("saved reduceJson", JSONObject.class));
        }

        public void flatMap(JSONObject value, Collector<JSONObject> out) throws Exception {
            JSONObject mergedValue = (JSONObject)this.merged.value();
            if (mergedValue == null)
                mergedValue = new JSONObject();
            String keyName = value.getString("KEY_NAME");
            if (keyName.equals("event")) {
                mergedValue.put("event", value);
            } else {
                mergedValue.putIfAbsent("features", new JSONObject());
                if (value.containsKey("features"))
                    mergedValue.getJSONObject("features").putAll((Map)value.getJSONObject("features"));
            }
            if (mergedValue.containsKey("event") && mergedValue.containsKey("features") && mergedValue
                    .getJSONObject("features").size() == features.size()) {
                out.collect(mergedValue);
                this.merged.clear();
            } else {
                this.merged.update(mergedValue);
            }
        }
    }

    public static class RuleBasedModeling implements MapFunction<JSONObject, JSONObject> {
        public JSONObject map(JSONObject value) throws Exception {
            boolean isAnomaly = (value.getJSONObject("features").getDouble("count(pay_account.history,1h)") > 5.0D
                    && value.getJSONObject("features").getDouble("sum(amount#rcv_account.history,1h)") > 5000.0D
                    && value.getJSONObject("features").getDouble("count_distinct(rcv_account#pay_account.history,1h)") <= 2.0D);
            value.put("isAnomaly", isAnomaly);
            return value;
        }
    }

    private static FlinkKafkaConsumer<String> createKafkaConsumer() {
        Properties properties = new Properties();
        properties.setProperty("zookeeper.connect", "localhost:2181");
        properties.setProperty("bootstrap.servers", "localhost:9092");
        properties.setProperty("group.id", "test");
        properties.setProperty("enable.auto.commit", "true");
        properties.setProperty("auto.commit.interval.ms", "1000");
        properties.setProperty("auto.offset.reset", "earliest");
        properties.setProperty("session.timeout.ms", "30000");
        FlinkKafkaConsumer<String> consumer010 = new FlinkKafkaConsumer("event-input", (DeserializationSchema)new SimpleStringSchema(), properties);
        consumer010.setStartFromLatest();
        return consumer010;
    }
}
