package dwd ;

import lib.ApiClient ;
import lib.ExtendedConfluentRegistryAvroDeserializationSchema;

import java.io.IOException;
import java.util.regex.Pattern;
import java.util.regex.Matcher;

import org.apache.zookeeper.ZooKeeper;

import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.mapred.lib.MultipleOutputFormat;
import org.apache.hadoop.mapred.RecordWriter;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.Progressable;

import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;

import extension.ParseDcEventColdStart;
import extension.ParseDcEventHotStart;
import extension.ParseDcEventUseEnd;
import extension.ParseDcEventClick;
import extension.ParseDcEventViewPage ;
import extension.ParseDcEventCustom ;

import common.DatetimeUtils;
import common.MailAgent;
import common.Utils;

import java.util.List ;
import java.util.Map ;
import java.net.InetAddress;
import java.util.concurrent.ExecutionException;

import com.fasterxml.jackson.databind.ObjectMapper;

import com.google.protobuf.Descriptors.Descriptor;
import com.google.protobuf.DynamicMessage;

import org.apache.logging.log4j.Logger ;
import org.apache.logging.log4j.LogManager ;


public class DcEventBatch {
    public static void main(String[] args) throws Exception {
        // Dotenv dotenv = Dotenv.configure().directory("meta-lib-static/env/.env").load();
        // String zks_s = resolveEnv(dotenv, "PIPELINE_DC_EVENT_ZKS", "127.0.0.1:2181");
        // String zk_hdfs = resolveEnv(dotenv, "PIPELINE_DC_EVENT_ZK_HDFS", "cdh");
        // String buffer_prefix = resolveEnv(dotenv, "PIPELINE_DC_EVENT_BUFFER_PREFIX", "data_buffer_dev");
        // String pipeline_prefix = resolveEnv(dotenv, "PIPELINE_DC_EVENT_PIPELINE_PREFIX", "data_pipeline_dev");

        String targetTable = args[0].split(":")[1] ;
        String metaServerNodes = args[1] ;
        Map<String, String> dotenv= ApiClient.getMetaEnv(metaServerNodes) ;
        String zks_s = dotenv.getOrDefault("PIPELINE_DC_EVENT_ZKS", "127.0.0.1:2181") ;
        String zk_hdfs = dotenv.getOrDefault("PIPELINE_DC_EVENT_ZK_HDFS", "cdh") ;
        String buffer_prefix = dotenv.getOrDefault("PIPELINE_DC_EVENT_BUFFER_PREFIX", "data_buffer_dev") ;
        String pipeline_prefix = dotenv.getOrDefault("PIPELINE_DC_EVENT_PIPELINE_PREFIX", "data_pipeline_dev") ;

	String startDt = args [2] ;
	String endDt = startDt ;
	if (args.length >= 4) {
	    endDt = args [3]  ;
	}
	System.out.println(String.format("startDt: %s -> endDt: %s", startDt, endDt)) ;

// email version begin
        String executeDate = DatetimeUtils.nowWithFormat("yyyyMMdd");
// email version end

        System.out.println("targetTable:(" + targetTable + ")") ;

        try {
    	    System.out.println("zookeepers:" + zks_s) ;
            ZooKeeper zk = new ZooKeeper(zks_s, 600000, null) ;
            
            Descriptor descriptor = ApiClient.getHdfsDescriptor() ;
            DynamicMessage activeNodeInfo = ApiClient.getHdfsMaster(zk, descriptor, zk_hdfs) ;
            String host = (String) activeNodeInfo.getField(descriptor.findFieldByName("hostname"));
            Integer port = (Integer) activeNodeInfo.getField(descriptor.findFieldByName("port"));

            System.out.println("host:" + host + ", port:" + port);

            String hadoopPrefix = String.format("hdfs://%s:%d/", host, port);

            EnvironmentSettings settings = EnvironmentSettings
                .newInstance()
                .useBlinkPlanner()
                .inBatchMode()
                .build();
            TableEnvironment env = TableEnvironment.create(settings);


            String bufferPath = String.format("%s/user/hive/warehouse/%s.db/dc_sdk_push/origin=staging", hadoopPrefix, buffer_prefix) ;
            System.out.println("bufferPath: " + bufferPath) ;
            String hdfsSourceDdl =
                "CREATE TABLE dc_sdk_push (\n" +
                "  region STRING, dt STRING, b64 STRING\n" +
                ") PARTITIONED BY (region, dt) WITH (\n" +
                "  'connector' = 'filesystem',\n" +
                "  'path' = '" + bufferPath + "',\n" + 
                "  'format' = 'csv'\n" +
                ")";


            String headerField = 
                    "header ROW<" +
                        "basic ROW<" +
                            "message_id STRING, project_id STRING, app_id STRING, tenant_id STRING, upload_time STRING," +
                            "apex_id STRING, session_id STRING, app_version STRING, app_package_id STRING, carrier STRING, mini_program_scene STRING," +
                            "market_name STRING" +
                        ">," +
                        "sdk_info ROW<sdk_version STRING, sdk_type STRING>," +
                        "device_info ROW<" +
                            "device_id STRING, device_model STRING, device_brand STRING, device_type STRING, device_uuid STRING," +
                            "android_id STRING, imei STRING, mac STRING, user_agent STRING, idfa STRING," +
                            "idfv STRING, oaid STRING" +
                        ">," +
                        "screen ROW<screen_height STRING, screen_width STRING>," +
                        "os ROW<os_system STRING, os_version STRING>," +
                        "browser ROW<browser_type STRING, browser_version STRING>," +
                        "wechat ROW<wechat_open_id STRING, wechat_union_id STRING>," +
                        "referrer_info ROW<referrer_url STRING, referrer_host STRING>," +
                        "utm_info ROW<" +
                            "utm_matching_type STRING, utm_source STRING, utm_medium STRING, utm_term STRING, utm_content STRING," +
                            "utm_campaign STRING" +
                        ">" +
                    ">" ;
            

            String eventPrefix = String.format("%s/user/hive/warehouse/%s.db/dc_event", hadoopPrefix, pipeline_prefix);
            System.out.println("eventPrefix: " + eventPrefix) ;
            String coldStartEventDdl = 
                "CREATE TABLE dc_event_cold_start (\n" +
                    "platform STRING, dt STRING," +
                    headerField + "," + 
                    "basic ROW<" +
                        "event_code STRING, event_name STRING, event_type STRING, send_type STRING, page_type STRING," +
                        "ts STRING, network_type STRING, time_since_last_operation STRING" +
                    ">," +
                    "business_info ROW<" +
                        "user_id STRING," +
                        "extensions MAP<STRING, STRING>" +
                    ">," +
                    "location_info ROW<" +
                        "ip STRING," +
                        "latitude STRING," +
                        "longitude STRING," +
                        "province STRING," +
                        "city STRING," +
                        "country STRING," +
                        "province_id STRING," +
                        "city_id STRING," +
                        "county STRING" +
                    ">" +
                ") PARTITIONED BY ( platform, dt) WITH (\n" +
                "  'connector' = 'filesystem',\n" +
                String.format("  'path' = '%s/event=cold_start',\n", eventPrefix) +
                "  'format' = 'avro'\n" +
                ")";

            String hotStartEventDdl = 
                "CREATE TABLE dc_event_hot_start (\n" +
                    "platform STRING, dt STRING," +
                    headerField + "," + 
                    "basic ROW<" +
                        "event_code STRING, event_name STRING, event_type STRING, send_type STRING, page_type STRING," +
                        "ts STRING, network_type STRING, time_since_last_operation STRING" +
                    ">," +
                    "business_info ROW<" +
                        "user_id STRING," +
                        "extensions MAP<STRING, STRING>" +
                    ">," +
                    "location_info ROW<" +
                        "ip STRING," +
                        "latitude STRING," +
                        "longitude STRING," +
                        "province STRING," +
                        "city STRING," +
                        "country STRING," +
                        "province_id STRING," +
                        "city_id STRING," +
                        "county STRING" +
                    ">" +
                ") PARTITIONED BY ( platform, dt) WITH (\n" +
                "  'connector' = 'filesystem',\n" +
                String.format("  'path' = '%s/event=hot_start',\n", eventPrefix) +
                "  'format' = 'avro'\n" +
                ")";

            String useEndEventDdl =
                "CREATE TABLE dc_event_use_end (\n" +
                    "platform STRING, dt STRING," +
                    headerField + "," +
                    "basic ROW<" +
                        "event_code STRING, event_name STRING, event_duration STRING, event_type STRING, send_type STRING, page_type STRING," +
                        "ts STRING, network_type STRING, time_since_last_operation STRING" +
                    ">," +
                    "business_info ROW<" +
                        "user_id STRING," +
                        "extensions MAP<STRING, STRING>" +
                    ">," +
                    "location_info ROW<" +
                        "ip STRING," +
                        "latitude STRING," +
                        "longitude STRING," +
                        "province STRING," +
                        "city STRING," +
                        "country STRING," +
                        "province_id STRING," +
                        "city_id STRING," +
                        "county STRING" +
                    ">" +
                ") PARTITIONED BY ( platform, dt) WITH (\n" +
                "  'connector' = 'filesystem',\n" +
                String.format("  'path' = '%s/event=use_end',\n", eventPrefix) +
                "  'format' = 'avro'\n" +
                ")";

            String clickEventDdl = 
                "CREATE TABLE dc_event_click (\n" + 
                    "platform STRING\n, dt STRING," +
                    headerField + "," + 
                    "basic ROW<" +
                        "event_code STRING, event_name STRING, event_type STRING, send_type STRING, page_type STRING," +
                        "ts STRING, network_type STRING, time_since_last_operation STRING, event_code_md5 STRING" +
                    ">," +
                    "coordinate_info ROW<" +
                        "page_x STRING, page_y STRING, client_x STRING, client_y STRING" +
                    ">," +
                    "element_info ROW<" +
                        "element_id STRING, element_name STRING, element_target_url STRING, element_type STRING, element_content STRING," +
                        "element_selector STRING, element_position STRING, element_index STRING,element_tag_name STRING, element_class STRING," +
                        "element_path STRING" +
                    ">," +
                    "business_info ROW<" +
                        "user_id STRING," +
                        "extensions MAP<STRING, STRING>" +
                    ">," +
                    "location_info ROW<" +
                        "ip STRING," +
                        "latitude STRING," +
                        "longitude STRING," +
                        "province STRING," +
                        "city STRING," +
                        "country STRING," +
                        "province_id STRING," +
                        "city_id STRING," +
                        "county STRING" +
                    ">," +
                    "page_info ROW<" +
                        "current_page_id STRING," +
                        "current_page_name STRING," +
                        "current_page_url STRING," +
                        "current_page_host STRING," +
                        "current_page_query STRING," +
                        "current_page_path STRING," +
                        "source_page_id STRING," +
                        "source_page_name STRING," +
                        "source_page_url STRING," +
                        "source_page_host STRING," +
                        "source_page_query STRING," +
                        "source_page_path STRING" +
                    ">," +
                    "size_info ROW<" +
                        "visible_window_width STRING," +
                        "visible_window_height STRING," +
                        "page_height STRING," +
                        "page_width STRING" +
                    ">" +
                // ") WITH (\n" + 
                // "  'connector' = 'print'\n" +
                ") PARTITIONED BY ( platform, dt) WITH (\n" + 
                "  'connector' = 'filesystem',\n" +
                String.format("  'path' = '%s/event=click',\n", eventPrefix) +
                "  'format' = 'avro'\n" +
                ")";

            String viewPageEventDdl = 
                "CREATE TABLE dc_event_view_page (\n" + 
                    "platform STRING\n, dt STRING," +
                    headerField + "," + 
                    "basic ROW<" +
                        "event_code STRING, event_name STRING, event_type STRING, send_type STRING, page_type STRING," +
                        "ts STRING, network_type STRING, time_since_last_operation STRING, event_code_md5 STRING, browse_time STRING" +
                    ">," +
                    "business_info ROW<" +
                        "user_id STRING," +
                        "extensions MAP<STRING, STRING>" +
                    ">," +
                    "location_info ROW<" +
                        "ip STRING," +
                        "latitude STRING," +
                        "longitude STRING," +
                        "province STRING," +
                        "city STRING," +
                        "country STRING," +
                        "province_id STRING," +
                        "city_id STRING," +
                        "county STRING" +
                    ">," +
                    "page_info ROW<" +
                        "current_page_id STRING," +
                        "current_page_name STRING," +
                        "current_page_url STRING," +
                        "current_page_host STRING," +
                        "current_page_query STRING," +
                        "current_page_path STRING," +
                        "source_page_id STRING," +
                        "source_page_name STRING," +
                        "source_page_url STRING," +
                        "source_page_host STRING," +
                        "source_page_query STRING," +
                        "source_page_path STRING" +
                    ">," +
                    "size_info ROW<" +
                        "visible_window_width STRING," +
                        "visible_window_height STRING," +
                        "page_height STRING," +
                        "page_width STRING" +
                    ">" +
                // ") WITH (\n" + 
                // "  'connector' = 'print'\n" +
                ") PARTITIONED BY (platform, dt) WITH (\n" + 
                "  'connector' = 'filesystem',\n" +
                String.format("  'path' = '%s/event=view_page',\n", eventPrefix) +
                "  'format' = 'avro'\n" +
                ")";

            String customEventDdl = 
                "CREATE TABLE dc_event_custom (\n" + 
                    "platform STRING\n, dt STRING," +
                    headerField + "," + 
                    "basic ROW<" +
                        "event_code STRING, event_name STRING, event_type STRING, send_type STRING, page_type STRING," +
                        "ts STRING, network_type STRING, time_since_last_operation STRING, event_code_md5 STRING" +
                    ">," +
                    "business_info ROW<" +
                        "user_id STRING," +
                        "extensions MAP<STRING, STRING>" +
                    ">," +
                    "location_info ROW<" +
                        "ip STRING," +
                        "latitude STRING," +
                        "longitude STRING," +
                        "province STRING," +
                        "city STRING," +
                        "country STRING," +
                        "province_id STRING," +
                        "city_id STRING," +
                        "county STRING" +
                    ">," +
                    "page_info ROW<" +
                        "current_page_id STRING," +
                        "current_page_name STRING," +
                        "current_page_url STRING," +
                        "current_page_host STRING," +
                        "current_page_query STRING," +
                        "current_page_path STRING," +
                        "source_page_id STRING," +
                        "source_page_name STRING," +
                        "source_page_url STRING," +
                        "source_page_host STRING," +
                        "source_page_query STRING," +
                        "source_page_path STRING" +
                    ">," +
                    "size_info ROW<" +
                        "visible_window_width STRING," +
                        "visible_window_height STRING," +
                        "page_height STRING," +
                        "page_width STRING" +
                    ">" +
                // ") WITH (\n" + 
                // "  'connector' = 'print'\n" +
                ") PARTITIONED BY ( platform, dt) WITH (\n" + 
                "  'connector' = 'filesystem',\n" +
                String.format("  'path' = '%s/event=custom',\n", eventPrefix) +
                "  'format' = 'avro'\n" +
                ")";

            String coldStartEventFields = "platform,dt,header,basic,business_info,location_info" ;
            String hotStartEventFields = "platform,dt,header,basic,business_info,location_info" ;
            String useEndEventFields = "platform,dt,header,basic,business_info,location_info" ;
            String clickEventFields = "platform,dt,header,basic,coordinate_info,element_info,business_info,location_info,page_info,size_info" ;
            String viewPageEventFields = "platform,dt,header,basic,business_info,location_info,page_info,size_info" ;
            String customEventFields = "platform,dt,header,basic,business_info,location_info,page_info,size_info" ;
                


            String eventTable = "", eventFields = "" ;
            switch (targetTable) {
                case "dc_event_cold_start":
                    eventTable = "dc_event_cold_start" ;
                    eventFields = coldStartEventFields ;
                    env.createTemporarySystemFunction("ParseDataBuffer", ParseDcEventColdStart.class);
                    break;

                case "dc_event_hot_start":
                    eventTable = "dc_event_hot_start" ;
                    eventFields = hotStartEventFields ;
                    env.createTemporarySystemFunction("ParseDataBuffer", ParseDcEventHotStart.class);
                    break;

                case "dc_event_use_end":
                    eventTable = "dc_event_use_end" ;
                    eventFields = useEndEventFields ;
                    env.createTemporarySystemFunction("ParseDataBuffer", ParseDcEventUseEnd.class);
                    break;

                case "dc_event_click":
                    eventTable = "dc_event_click" ;
                    eventFields = clickEventFields ;
                    env.createTemporarySystemFunction("ParseDataBuffer", ParseDcEventClick.class);
                    break;

                case "dc_event_view_page":
                    eventTable = "dc_event_view_page" ;
                    eventFields = viewPageEventFields;
                    env.createTemporarySystemFunction("ParseDataBuffer", ParseDcEventViewPage.class);
                    break;

                case "dc_event_custom":
                    eventTable = "dc_event_custom" ;
                    eventFields = customEventFields;
                    env.createTemporarySystemFunction("ParseDataBuffer", ParseDcEventCustom.class);
                    break;

                default:
                    throw new IllegalArgumentException("invalid event table name!");
            }

            String syncSql = 
                String.format("INSERT OVERWRITE %s\n", eventTable) + 
                "SELECT\n" + 
                eventFields + "\n" +
                String.format("FROM (SELECT b64 FROM dc_sdk_push WHERE dt BETWEEN '%s' AND '%S')\n", startDt, endDt )+ 
                "INNER JOIN LATERAL TABLE(ParseDataBuffer('" + metaServerNodes + "',b64)) ON TRUE\n"
                ;

            env.executeSql(hdfsSourceDdl) ;
            env.executeSql(clickEventDdl) ;
            env.executeSql(coldStartEventDdl) ;
            env.executeSql(hotStartEventDdl) ;
            env.executeSql(useEndEventDdl) ;
            env.executeSql(viewPageEventDdl) ;
            env.executeSql(customEventDdl) ;
            env.executeSql(syncSql).await() ;

            System.out.println("finished!") ;

        } catch (Exception e) {
            if (e instanceof ExecutionException && "org.apache.flink.table.api.TableException: Failed to wait job finish".equals(e.getMessage())) {
                e.printStackTrace();
                return;
            }
            try {
                String smtpHost = dotenv.get("PIPELINE_DC_EVENT_MAIL_SMTP_HOST");
                int port = Integer.parseInt(dotenv.get("PIPELINE_DC_EVENT_MAIL_SMTP_PORT"));
                String sender = dotenv.get("PIPELINE_DC_EVENT_MAIL_SMTP_SENDER");
                String username = dotenv.get("PIPELINE_DC_EVENT_MAIL_SMTP_USERNAME");
                String password = dotenv.get("PIPELINE_DC_EVENT_MAIL_SMTP_PASSWORD");
                String recipients = dotenv.get("PIPELINE_DC_EVENT_MAIL_RECIPIENTS");
                String ccs = dotenv.get("PIPELINE_DC_EVENT_MAIL_CCS");
                String subject = "DC事件日志处理-Flink任务执行失败提醒";
                InetAddress address = InetAddress.getLocalHost();
                String content = executeDate + "的Flink任务执行失败（当前任务处理的事件日期范围：" + startDt + "-" + endDt + "）\n服务器IP：" + address.getHostAddress() + "\n报错内容：\n\n" + Utils.exceptionToString(e);
                MailAgent mailAgent = new MailAgent(smtpHost, port, sender, username, password);
                mailAgent.send(subject, content, recipients, ccs, null);
            } catch (Exception ex) {
                ex.printStackTrace();
            }
            throw e;
        }
    }
}

