package chart ;

import java.util.* ;

import lib.Utils ;
import lib.ApiClient ;
import lib.ExtendedConfluentRegistryAvroDeserializationSchema ;

import com.google.common.collect.Sets ;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.datastax.driver.core.Cluster;

import org.apache.flink.avro.shaded.org.apache.avro.Schema;
import org.apache.flink.avro.shaded.org.apache.avro.generic.GenericData;
import org.apache.flink.avro.shaded.org.apache.avro.generic.GenericRecord;

import org.apache.flink.types.Row;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.cassandra.ClusterBuilder;
import org.apache.flink.streaming.connectors.cassandra.CassandraRowSink;

import org.apache.flink.streaming.api.datastream.DataStream;

import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
// import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;

import org.apache.flink.streaming.api.functions.source.SourceFunction;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.table.data.RowData;
import org.apache.flink.util.Collector;

import org.apache.flink.api.java.tuple.Tuple2 ;
// import org.apache.flink.streaming.api.datastream.KeyedStream;

import org.apache.flink.streaming.api.functions.sink.PrintSinkFunction;

import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.typeinfo.Types;

// import com.datastax.oss.driver.api.core.CqlSession ;
import com.datastax.driver.core.Session ;


import org.apache.flink.runtime.state.hashmap.HashMapStateBackend ;

public class Dashboard {
    public static void loadUserInitState(String cql, MapState<Row, String> userInitState, int pn, Session cqlSession) throws Exception {
        ObjectMapper om = new ObjectMapper() ;
        
        // for(com.datastax.oss.driver.api.core.cql.Row row: cqlSession.execute(cql)) {
        for(com.datastax.driver.core.Row row: cqlSession.execute(cql)) {
            String jsonText = row.getObject(0).toString() ;
            Map<String, Object> r = om.readValue(jsonText, Map.class) ;
            Row key = Row.of(r.get("tenant_id"), r.get("project_id"), r.get("app_id"), r.get("apex_id")) ;
            userInitState.put(key, jsonText) ;
        }
    }
    public static void updateUserInitState (MapState<Row, String> userInitState, Collector<Tuple2<String, Row>> out, Object jsonRaw) throws Exception {
        ObjectMapper om = new ObjectMapper() ;
        
        Map<String, Object> json = (Map<String, Object>) jsonRaw ;

        Map<String, Object> header = (Map<String, Object>)json.get("common") ;
        Map<String, Object> headerBasic = (Map<String, Object>)header.get("basic") ;
        String tenant_id = (String)headerBasic.get("tenant_id") ;
        String project_id = (String)headerBasic.get("project_id") ;
        String app_id = (String)headerBasic.get("app_id") ;
        String apex_id = (String)headerBasic.get("apex_id") ;

        Row userKey = Row.of(tenant_id, project_id, app_id, apex_id) ;
        if (userInitState.contains(userKey)) return ;

        Map<String, Object> eventInfo = (Map<String, Object>) json.get("event") ;
        String eventName = Utils.extractEventName(eventInfo) ;

        Map<String, Object> record = new HashMap<String, Object>() ;

        record.put("tenant_id", tenant_id) ;
        record.put("project_id", project_id) ;
        record.put("app_id", app_id) ;
        record.put("apex_id", apex_id) ;

        record.put("event_name", eventName) ;

        Map<String, Object> eventRecord = new HashMap<String, Object>() ;
        eventRecord.put("common", header) ;

        if (eventName.equals("cold_start")) {
            eventInfo.remove("is_cold_start") ;
            eventRecord.put("event", eventInfo) ;
            record.put("cold_start_record", eventRecord) ;
        } else if (eventName.equals("hot_start")) {
            eventInfo.remove("is_hot_start") ;
            eventRecord.put("event", eventInfo) ;
            record.put("hot_start_record", eventRecord) ;
        } else if (eventName.equals("use_end")) {
            eventInfo.remove("is_use_end") ;
            eventRecord.put("event", eventInfo) ;
            record.put("use_end_record", eventRecord) ;
        } else if (eventName.equals("click")) {
            eventInfo.remove("is_click") ;
            eventRecord.put("event", eventInfo) ;
            record.put("click_record", eventRecord) ;
        } else if (eventName.equals("view_page")) {
            eventInfo.remove("is_view_page") ;
            eventRecord.put("event", eventInfo) ; 
            record.put("view_page_record", eventRecord) ;
        } else if (eventName.equals("custom")) {
            eventInfo.remove("is_custom") ;
            eventRecord.put("event", eventInfo) ;
            record.put("custom_record", eventRecord) ;
        }

        String recordJson = om.writeValueAsString(record) ;

        userInitState.put(userKey, recordJson) ;
        out.collect(new Tuple2<>("user_init_state", Row.of(recordJson))) ;
    }
    public static void loadUserState(String cql, MapState<Row, String> userState, int pn, Session cqlSession) throws Exception {
        ObjectMapper om = new ObjectMapper() ;
        for(com.datastax.driver.core.Row row: cqlSession.execute(cql)) { 
        // for(com.datastax.oss.driver.api.core.cql.Row row: cqlSession.execute(cql)) {
            String jsonText = row.getObject(0).toString() ;
            Map<String, Object> r = om.readValue(jsonText, Map.class) ;

            Row key = Row.of(r.get("tenant_id"), r.get("project_id"), r.get("app_id"), r.get("apex_id")) ;
            String accumVal = userState.get(key) == null ? "{}" : userState.get(key) ;
            String valText = Utils.jsonSet(accumVal, Arrays.asList(r.get("channel"), r.get("version"), r.get("dt"), r.get("hh")), om.readValue(jsonText, Object.class)) ;
            userState.put(key, valText) ;
        }
    }
    public static void updateUserState (MapState<Row, String> userState, Collector<Tuple2<String, Row>> out, Object jsonRaw) throws Exception {
        ObjectMapper om = new ObjectMapper() ;

        Map<String, Object> json = (Map<String, Object>) jsonRaw ;
        String platformName = Utils.extractPlatformName(json) ;
        Map<String, Object> header = (Map<String, Object>)json.get("common") ;

        Map<String, Object> headerBasic = (Map<String, Object>)header.get("basic") ;
        String tenant_id = (String)headerBasic.get("tenant_id") ;
        String project_id = (String)headerBasic.get("project_id") ;
        String app_id = (String)headerBasic.get("app_id") ;
        String apex_id = (String)headerBasic.get("apex_id") ;
        String channel = (String)headerBasic.get("market_name") ;
        if (channel == null) channel = (String)headerBasic.get("app_package_id") ;
        if (channel == null || channel.equals("")) {
            channel = "null" ;
        }
        String version = (String)headerBasic.get("app_version") ;
        if (version == null || version.equals("")) {
            version = "null" ;
        }
        String session_id = (String)headerBasic.get("session_id") ;

        Map<String, Object> event =  (Map<String, Object>) json.get("event") ;
        String event_name = Utils.extractEventName(event) ;

        Map<String, Object> eventBasic = (Map<String, Object>) event.get("basic") ;
        long tsVal = (long)eventBasic.get("ts") ;
        String ts = Utils.parseChinaTs(tsVal) ;
        String dt = ts.substring(0, 8) ;
        String hh = ts.substring(8, 10) + "00" ;

        // List<Set<Object>> branchesArgs = Arrays.asList(
        //     new HashSet<Object>(Arrays.asList(channel, "_ALL_")), // channel
        //     new HashSet<Object>(Arrays.asList(version, "_ALL_"))  // version
        // ) ;
        List<List<Object>> branchInfos = Arrays.asList(
            Arrays.asList(channel, version),
            Arrays.asList("_ALL_", version),
            Arrays.asList(channel, "_ALL_"),
            Arrays.asList("_ALL_", "_ALL_")
        ) ;
        // for(List<Object> branchInfo: Sets.cartesianProduct(branchesArgs)) {
        for(List<Object> branchInfo: branchInfos) {
            String branchChannel = (String)branchInfo.get(0) ;
            String branchVersion = (String)branchInfo.get(1) ;

            Map<String, Object> userRecord = new HashMap<String, Object>() ; 
            userRecord.put("tenant_id", tenant_id) ;
            userRecord.put("project_id", project_id) ;
            userRecord.put("app_id", app_id) ;
            userRecord.put("apex_id", apex_id) ;
            userRecord.put("channel", branchChannel) ;
            userRecord.put("version", branchVersion) ;
            userRecord.put("dt", dt) ;
            userRecord.put("hh", hh) ;

            Set<String> newSessions = new HashSet<String>() ;
            
            Row userKey = Row.of(tenant_id, project_id, app_id, apex_id) ;
            String userData = userState.get(userKey) ;

            List<String> oldSessions = (List<String>)Utils.jsonGet(userData, Arrays.asList(channel, version, dt, hh, "sessions")) ;
            
            
            if (oldSessions != null) newSessions.addAll(oldSessions) ;
        
            if (!Utils.hasColdStartEvent(platformName)) {
                newSessions.add(session_id) ;
            }
            
            userRecord.put("sessions", newSessions) ;
            
            out.collect(new Tuple2<>("user_state", Row.of(om.writeValueAsString(userRecord)))) ;

            userState.put(userKey, Utils.jsonSet(userData, Arrays.asList(branchChannel, branchVersion, dt, hh), userRecord)) ;
        }
    }

    public static void loadResultState(String cql, MapState<Row, String> resultState, int pn, Session cqlSession) throws Exception {
          ObjectMapper om = new ObjectMapper() ;
          for(com.datastax.driver.core.Row row: cqlSession.execute(cql)) {
          // for(com.datastax.oss.driver.api.core.cql.Row row: cqlSession.execute(cql)) {
              String jsonText = row.getObject(0).toString() ;
              Map<String, Object> r = om.readValue(jsonText, Map.class) ;
              Row key = Row.of(r.get("tenant_id"), r.get("project_id"), r.get("app_id"), r.get("channel"), r.get("version"), r.get("dt"), r.get("hh")) ;
              resultState.put(key, jsonText) ;
          }
    }
    public static void updateResultState (int pn, MapState<Row, String> userState, MapState<Row, String> resultState, Collector<Tuple2<String, Row>> out, Object jsonRaw) throws Exception {
        ObjectMapper om = new ObjectMapper() ;
        Map<String, Object> json = (Map<String, Object>) jsonRaw ;
        String platformName = Utils.extractPlatformName(json) ;

        Map<String, Object> header = (Map<String, Object>) json.get("common") ;
        Map<String, Object> headerBasic = (Map<String, Object>)header.get("basic") ;
        String tenant_id = (String)headerBasic.get("tenant_id") ;
        String project_id = (String)headerBasic.get("project_id") ;
        String app_id = (String)headerBasic.get("app_id") ;
        String apex_id = (String)headerBasic.get("apex_id") ;
        String channel = (String)headerBasic.get("market_name") ;
        if (channel == null) channel = (String)headerBasic.get("app_package_id") ;
        if (channel == null || channel.equals("")) {
            channel = "null" ;
        }
        String version = (String)headerBasic.get("app_version") ;
        if (version == null || version.equals("")) {
            version = "null" ;
        }
        String session_id = (String)headerBasic.get("session_id") ;

        Map<String, Object> event = (Map<String, Object>) json.get("event") ;
        String eventName = Utils.extractEventName(event) ;
        Map<String, Object> eventBasic = (Map<String, Object>) event.get("basic") ;
        String ts = Utils.parseChinaTs((long)eventBasic.get("ts")) ;
        String dt = ts.substring(0, 8) ;
        String hh = ts.substring(8, 10) + "00" ;

        /*
        List<Set<Object>> branchesArgs = Arrays.asList(
            new HashSet<Object>(Arrays.asList(channel, "_ALL_")), // channel
            new HashSet<Object>(Arrays.asList(version, "_ALL_"))  // version
        ) ;
        */

        List<List<Object>> branchInfos = Arrays.asList(
            Arrays.asList(channel, version),
            Arrays.asList("_ALL_", version),
            Arrays.asList(channel, "_ALL_"),
            Arrays.asList("_ALL_", "_ALL_")
        ) ;

        // for(List<Object> branchInfo: Sets.cartesianProduct(branchesArgs)) {
        for(List<Object> branchInfo: branchInfos) {
            String branchChannel = (String)branchInfo.get(0) ;
            String branchVersion = (String)branchInfo.get(1) ;

            Row allKey = Row.of(tenant_id, project_id, app_id, branchChannel, branchVersion, "_ALL_", "_ALL_") ;
            Row todayKey = Row.of(tenant_id, project_id, app_id, branchChannel, branchVersion, dt, "_ALL_") ;
            Row hhKey = Row.of(tenant_id, project_id, app_id, branchChannel, branchVersion, dt, hh) ;

            Map<String, Object> oldAll = (resultState.get(allKey) != null) ? om.readValue(resultState.get(allKey), Map.class) : null;
            Map<String, Object> oldToday = (resultState.get(todayKey) != null) ? om.readValue(resultState.get(todayKey), Map.class) : null;
            Map<String, Object> oldHh = (resultState.get(hhKey) != null) ? om.readValue(resultState.get(hhKey), Map.class) : null;
            
            String userData = userState.get(Row.of(tenant_id, project_id, app_id, apex_id)) ;
            Object userAllData = Utils.jsonGet(userData, Arrays.asList(branchChannel, branchVersion)) ;
            Object userTodayData = Utils.jsonGet(userAllData, Arrays.asList(dt)) ;
            Object userHourData = Utils.jsonGet(userTodayData, Arrays.asList(hh)) ;
            
            long device_num_new_all = (oldAll == null) ? 0 : ((Number)oldAll.get("device_num_new_hh")).longValue() ;
            long device_num_new_today = (oldToday == null) ? 0 : ((Number)oldToday.get("device_num_new_hh")).longValue() ;
            long device_num_new_hh = (oldHh == null) ? 0 : ((Number)oldHh.get("device_num_new_hh")).longValue() ;
            long device_num_active_hh = (oldHh == null) ? 0 : ((Number)oldHh.get("device_num_active_hh")).longValue() ;
            long device_num_active_today = (oldToday == null) ? 0 : ((Number)oldToday.get("device_num_active_hh")).longValue() ;
            long session_num_hh = (oldHh == null) ? 0 : ((Number)oldHh.get("session_num_hh")).longValue() ;
            long session_num_today = (oldToday == null) ? 0 : ((Number)oldToday.get("session_num_hh")).longValue() ;
            long view_num_hh = (oldHh == null) ? 0 : ((Number)oldHh.get("view_num_hh")).longValue() ;
            long view_num_today = (oldToday == null) ? 0 : ((Number)oldToday.get("view_num_hh")).longValue() ;
          
            if (userAllData == null) {
                device_num_new_all += 1 ;
                device_num_new_today += 1 ;
                device_num_new_hh += 1 ;

            } 
            if (userTodayData == null) {
                device_num_active_today += 1; 
                // if cold_start | session_id
                if (Utils.hasColdStartEvent(platformName)) {
                    if (eventName.equals("cold_start")) {
                        session_num_today += 1 ;
                    }
                } else {
                    List<Object> sessions = (List<Object>)Utils.jsonGet(userTodayData, Arrays.asList("sessions")) ;
                    if (sessions == null || !sessions.contains(session_id)) {
                        session_num_today += 1 ;
                    }
                }
            } 
            if (userHourData == null) {
                device_num_active_hh += 1 ;
                // if cold_start | session_id
                if (Utils.hasColdStartEvent(platformName)) {
                    if (eventName.equals("cold_start")) {
                        session_num_hh += 1 ; 
                    }
                } else {
                    List<Object> sessions = (List<Object>)Utils.jsonGet(userHourData, Arrays.asList("sessions")) ;
                    if (sessions == null || !sessions.contains(session_id)) {
                        session_num_hh += 1 ;
                    }
                }
            } 
            view_num_hh += 1 ;

            
            Map<String, Object> hhRecord = new HashMap<String, Object>() ;
            hhRecord.put("tenant_id", tenant_id) ;
            hhRecord.put("project_id", project_id) ;
            hhRecord.put("app_id", app_id) ;
            hhRecord.put("channel", branchChannel) ;
            hhRecord.put("version", branchVersion) ;
            hhRecord.put("dt", dt) ;
            hhRecord.put("hh", hh) ;
            hhRecord.put("part_no", pn) ;
            
            
            hhRecord.put("device_num_new_hh", device_num_new_hh) ;
            hhRecord.put("device_num_new_today", device_num_new_today) ;
            hhRecord.put("device_num_active_hh", device_num_active_hh) ;
            hhRecord.put("device_num_active_today", device_num_active_today) ;
            hhRecord.put("session_num_hh", session_num_hh) ;
            hhRecord.put("session_num_today", session_num_today) ;
            hhRecord.put("view_num_hh", view_num_hh) ;
            hhRecord.put("view_num_today", view_num_today) ;
            
            String hhJson = om.writeValueAsString(hhRecord) ;
            resultState.put(hhKey, hhJson) ;
            out.collect(new Tuple2<>("dashboard", Row.of(hhJson))) ;
            
            Map<String, Object> todayRecord = new HashMap<String, Object>() ;
            todayRecord.put("tenant_id", tenant_id) ;
            todayRecord.put("project_id", project_id) ;
            todayRecord.put("app_id", app_id) ;
            todayRecord.put("channel", branchChannel) ;
            todayRecord.put("version", branchVersion) ;
            todayRecord.put("dt", dt) ;
            todayRecord.put("hh", "_ALL_") ;
            todayRecord.put("part_no",  pn) ;
            
            todayRecord.put("device_num_new_hh", device_num_new_today) ;
            todayRecord.put("device_num_active_hh", device_num_active_today) ;
            todayRecord.put("session_num_hh", session_num_today) ;
            todayRecord.put("view_num_hh", view_num_today) ;
            
            String todayJson = om.writeValueAsString(todayRecord) ;

            resultState.put(todayKey, todayJson) ;
            out.collect(new Tuple2<>("dashboard", Row.of(todayJson))) ;
            
            Map<String, Object> allRecord = new HashMap<String, Object>() ;
            allRecord.put("tenant_id", tenant_id) ;
            allRecord.put("project_id", project_id) ;
            allRecord.put("app_id", app_id) ;
            allRecord.put("channel", branchChannel) ;
            allRecord.put("version", branchVersion) ;
            allRecord.put("dt", "_ALL_") ;
            allRecord.put("hh", "_ALL_") ;
            allRecord.put("part_no", pn) ;
            allRecord.put("device_num_new_hh", device_num_new_all) ;
            
            String allJson = om.writeValueAsString(allRecord) ;
            resultState.put(allKey, allJson) ;
            out.collect(new Tuple2<>("dashboard", Row.of(allJson))) ;
        }
    }
                
    public static void main(String[] args) throws Exception {
        String metaServerNodes = args[1] ;
        System.out.println("metaServerNodes:" + metaServerNodes) ;
        Map<String, String> dotenv = ApiClient.getMetaEnv(metaServerNodes) ;

        String buffer_prefix = dotenv.getOrDefault("PIPELINE_DASHBOARD_BUFFER_PREFIX", "data_buffer_dev");
        String pipeline_prefix = dotenv.getOrDefault("PIPELINE_DASHBOARD_PIPELINE_PREFIX", "data_pipeline_dev") ;
        String channel_prefix = dotenv.getOrDefault("PIPELINE_DASHBOARD_CHANNEL_PREFIX", "data_channel_dev") ;

        String kafkas_s = dotenv.getOrDefault("PIPELINE_DASHBOARD_KAFKAS", "127.0.0.1:9092");
        String srs_s = dotenv.getOrDefault("PIPELINE_DASHBOARD_KAFKA_SRS", "127.0.0.1:8081");
        String cassandra_s = dotenv.getOrDefault("ACCESS_STREAMING_CASSANDRA_NODES", "127.0.0.1:9042");

        System.out.println("buffer_prefix:" + buffer_prefix) ;
        System.out.println("pipeline_prefix:" + pipeline_prefix) ;
        System.out.println("channel_prefix:" + channel_prefix) ;
        System.out.println("kafkas_s:" + kafkas_s) ;
        System.out.println("srs_s:" + srs_s) ;
        System.out.println("cassandra_s:" + cassandra_s) ;

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1) ;

        String reader_schema_s = ApiClient.getMetaCommonSchema(metaServerNodes) ;
        Schema reader_schema = new Schema.Parser().parse(reader_schema_s) ;
        Properties consumer_properties = new Properties();
        consumer_properties.setProperty("bootstrap.servers", kafkas_s);
        consumer_properties.setProperty("group.id", "data_pipeline__dashboard");
        consumer_properties.setProperty("auto.offset.reset", "earliest");
                        
        SourceFunction<GenericData.Array> kafka_source = new FlinkKafkaConsumer<>(
            String.format("%s_%s", buffer_prefix, "dc_sdk_push"),
            ExtendedConfluentRegistryAvroDeserializationSchema.forGenericArray(reader_schema, srs_s),
            consumer_properties
        ) ;

        ClusterBuilder cb = new ClusterBuilder() {
                private static final long serialVersionUID = -1671641202177852775L;
                
                @Override
                protected Cluster buildCluster(Cluster.Builder builder) {
                    String[] hosts = cassandra_s.split(",");
                    String single_ip = hosts[0].split(":")[0];
                    int single_port = Integer.parseInt(hosts[0].split(":")[1]);
                    Cluster cluster = builder.addContactPoint(single_ip).withPort(single_port).build();
                    return cluster;
                }
        } ;
        CassandraRowSink myUserInitStateSink = new CassandraRowSink(1, "INSERT INTO " + pipeline_prefix + ".dashboard_user_init JSON ?", cb) ;
        CassandraRowSink myUserStateSink = new CassandraRowSink(1, "INSERT INTO " + pipeline_prefix + ".dashboard_user JSON ?", cb) ;
        CassandraRowSink myDashboardSink = new CassandraRowSink(1, "INSERT INTO " + channel_prefix + ".dashboard JSON ?", cb) ;
        String userInitCql = "SELECT JSON * FROM " + pipeline_prefix + ".dashboard_user_init" ;
        String userCql = "SELECT JSON * FROM " + pipeline_prefix + ".dashboard_user" ;
        String resultCql = "SELECT JSON * FROM " + channel_prefix + ".dashboard" ;
            
        DataStream<Tuple2<String, Row>> stream = env.addSource(kafka_source)
            .flatMap(new FlatMapFunction<GenericData.Array, Tuple2<String, Object>>() {
                    @Override
                    public void flatMap(GenericData.Array xs, Collector<Tuple2<String, Object>> out) throws Exception {
                        System.out.println("[debug] xs:" + xs) ;
                        GenericData.Array<GenericRecord> bulk = xs ;
                        for (GenericRecord sdkRecord : bulk) {
                            GenericRecord header = (GenericRecord) sdkRecord.get("common") ;
                            GenericRecord headerBasic = (GenericRecord) header.get("basic") ;
                            String apex_id = headerBasic.get("apex_id").toString() ;
                            for (GenericRecord event : (GenericData.Array<GenericRecord>) sdkRecord.get("events")) {
                                Map<String, Object> eventMap = new HashMap<String, Object>() ;

                                try {
                                    ObjectMapper om = new ObjectMapper() ;
                                    eventMap.put("common", Utils.avroToJson(header)) ;
                                    eventMap.put("event", Utils.avroToJson(event)) ;
                                    String apex_id_hash = "_unimplemented_" ;
                                    out.collect(new Tuple2<>(apex_id_hash, eventMap)) ;
                                } catch (Exception e) {
                                    e.printStackTrace();
                                    throw e ;
                                }
                            }
                        }
                    }
            }).keyBy(x -> x.f0)
            .flatMap(new RichFlatMapFunction<Tuple2<String, Object>, Tuple2<String, Row>>() {
                    private int pn = -1 ;
                    private boolean initFlag = true ;
                    private String cassandras_ref = cassandra_s ;
                    private Session cqlSession = null ;
                    private transient MapState<Row, String> userInitState  ;
                    private transient MapState<Row, String> userState ;
                    private transient MapState<Row, String> resultState ;

                    @Override
                    public void open(Configuration config) {
                        this.pn = getRuntimeContext().getIndexOfThisSubtask() ;

                        MapStateDescriptor<Row, String> userInitDescriptor = new MapStateDescriptor<Row, String> (
                             "userInitStateDesc", Types.ROW(Types.STRING, Types.STRING, Types.STRING, Types.STRING), Types.STRING
                        ) ;
                        this.userInitState = getRuntimeContext().getMapState(userInitDescriptor) ;

                        MapStateDescriptor<Row, String> userDescriptor = new MapStateDescriptor<Row, String> (
                            "userStateDesc", Types.ROW(Types.STRING, Types.STRING, Types.STRING, Types.STRING), Types.STRING
                        ) ;
                        this.userState = getRuntimeContext().getMapState(userDescriptor) ;

                        MapStateDescriptor<Row, String> resultDescriptor = new MapStateDescriptor<Row, String> (
                            "resultStateDesc", Types.ROW(Types.STRING, Types.STRING, Types.STRING, Types.STRING,Types.STRING, Types.STRING, Types.STRING), Types.STRING
                        ) ;
                        this.resultState = getRuntimeContext().getMapState(resultDescriptor) ;
                        this.cqlSession = Utils.mkCqlSession(cassandras_ref) ;
                    }
                    
                    @Override
                    public void flatMap(Tuple2<String, Object> xs, Collector<Tuple2<String, Row>> out) throws Exception {
                        if (initFlag) {
                            loadUserInitState(userInitCql, userInitState, pn, cqlSession) ;
                            loadUserState(userCql, userState, pn, cqlSession) ;
                            loadResultState(resultCql, resultState, pn, cqlSession) ;
                            initFlag = false ;
                        }

                        ObjectMapper om = new ObjectMapper() ;
                        Object json = xs.f1 ;
        
                        try {
                            updateResultState(pn, userState, resultState, out, json) ;
                            updateUserState(userState, out, json) ;
                            updateUserInitState(userInitState, out, json) ;

                            out.collect(new Tuple2<>("print", Row.of(om.writeValueAsString(json)))) ;
                        } catch (Exception e) {
                            e.printStackTrace();
                            throw e ;
                        }
                        
                    }
                }) ;

/*
        stream.flatMap(new FlatMapFunction<Tuple2<String, Row>, Row>() {
                @Override
                public void flatMap(Tuple2<String, Row> in, Collector<Row> out) {
                    if (in.f0.equals("print")) out.collect(in.f1) ;
                }
        }).addSink(new PrintSinkFunction()) ;
*/

        stream.flatMap(new FlatMapFunction<Tuple2<String, Row>, Row>() {
            @Override
            public void flatMap(Tuple2<String, Row> in, Collector<Row> out) {
                if (in.f0.equals("user_init_state")) out.collect(in.f1) ;
            }
        }).addSink(myUserInitStateSink) ;

        stream.flatMap(new FlatMapFunction<Tuple2<String, Row>, Row>() {
            @Override
            public void flatMap(Tuple2<String, Row> in, Collector<Row> out) {
                if (in.f0.equals("user_state")) out.collect(in.f1) ;
            }
        }).addSink(myUserStateSink) ;

        stream.flatMap(new FlatMapFunction<Tuple2<String, Row>, Row>() {
            @Override
            public void flatMap(Tuple2<String, Row> in, Collector<Row> out) {
                if (in.f0.equals("dashboard")) out.collect(in.f1) ;
            }
        }).addSink(myDashboardSink) ;
        
        env.execute("streaming:dashboard");
        System.out.println("finished!") ;
    }
}
