package lib.avro;

import java.util.Map;
import java.util.Base64;
import java.util.List;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Collections;

import java.io.InputStream;
import java.io.FileInputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.util.zip.GZIPInputStream;

import com.github.luben.zstd.Zstd;

import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
import okhttp3.MediaType;

import com.fasterxml.jackson.databind.ObjectMapper;

import org.apache.avro.Schema;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.Decoder;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericContainer;

// import lib.avro.io.ExtendedBinaryDecoder;
import lib.avro.io.ExtendedJsonDecoder;

import io.github.cdimascio.dotenv.Dotenv;
import io.github.cdimascio.dotenv.DotenvEntry;

import java.util.regex.Pattern;
import java.util.regex.Matcher;

import org.apache.flink.table.api.TableConfig;

import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.validate.SqlConformance;
import static org.apache.calcite.jdbc.CalciteSchemaBuilder.asRootSchema;

import org.apache.flink.configuration.Configuration;

import org.apache.flink.sql.parser.validate.FlinkSqlConformance;
import org.apache.flink.table.planner.calcite.FlinkPlannerImpl;
import org.apache.flink.table.planner.parse.CalciteParser;
import org.apache.flink.table.planner.operations.SqlToOperationConverter;
import org.apache.flink.table.planner.delegation.FlinkSqlParserFactories;
import org.apache.flink.table.planner.delegation.PlannerContext;
import org.apache.flink.table.catalog.CatalogManager;
import org.apache.flink.table.types.DataType;

import org.apache.flink.table.planner.catalog.CatalogManagerCalciteSchema;
import org.apache.flink.table.catalog.GenericInMemoryCatalog;
import org.apache.flink.table.catalog.FunctionCatalog;
import org.apache.flink.table.operations.ddl.CreateTableOperation;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.formats.avro.typeutils.AvroSchemaConverter;

import org.apache.avro.compiler.idl.Idl;
import org.apache.avro.Protocol;

public class Avro {
  public static String _resolve(Dotenv dotenv, String name) {
    String v = dotenv.get(name);
    if (v == null)
      return null;
    Matcher p = Pattern.compile("\\$\\{([^}]*)\\}").matcher(v);

    String ret = v;
    while (p.find()) {
      String subVarValue = _resolve(dotenv, p.group(1));
      if (subVarValue == null)
        return null;
      ret = v.replace(p.group(0), subVarValue);
    }
    return ret;
  }

  public static String resolve(Dotenv dotenv, String name, String defaultValue) {
    String resolveValue = _resolve(dotenv, name);
    return resolveValue != null ? resolveValue : defaultValue;
  }

  private static byte[] toByteArray(InputStream in) throws Exception {
    ByteArrayOutputStream buffer = new ByteArrayOutputStream();
    int nRead;
    byte[] data = new byte[16384];
    while ((nRead = in.read(data, 0, data.length)) != -1) {
      buffer.write(data, 0, nRead);
    }
    return buffer.toByteArray();
  }

  private static String toText(InputStream in) throws Exception {
    return new String(toByteArray(in));
  }

  public static String slurpFile(String schemaPath) throws Exception {
    // InputStream in = Avro.class.getClassLoader().getResourceAsStream(schemaPath);
    InputStream in = new FileInputStream(schemaPath);
    String content = toText(in);
    in.close();
    return content;
  }

  public static Schema parseAvsc(String schemaText) throws Exception {
    return new Schema.Parser().parse(schemaText);
  }

  public static Object parseAvdl(String schemaPath) throws Exception {
    String schemaPathPhysical = schemaPath.split(":")[0];
    Idl parser = new Idl(new FileInputStream(schemaPathPhysical));
    parser.CompilationUnit();
    return null;
  }

  public static Schema parseAvdlSchema(String schemaPath) throws Exception {
    String[] schemaPathParts = schemaPath.split(":");
    String schemaPathPhysical = schemaPathParts[0];
    String schemaSelector = schemaPathParts[1];
    Idl parser = new Idl(new FileInputStream(schemaPathPhysical));
    return parser.CompilationUnit().getType("MAPPING").getField(schemaSelector).schema();
  }

  public static SqlNode parseSql(String schemaText) throws Exception {
    CatalogManager catalogManager = mkCatalogManager();
    PlannerContext plannerContext = mkPlannerContext(catalogManager);
    CalciteParser parser = plannerContext.createCalciteParser();

    return parser.parse(schemaText);
  }

  public static Schema parseSqlSchema(String schemaText) throws Exception {
    CatalogManager catalogManager = mkCatalogManager();
    PlannerContext plannerContext = mkPlannerContext(catalogManager);

    SqlNode sqlNode = parseSql(schemaText);
    FlinkPlannerImpl planner = plannerContext.createFlinkPlanner(catalogManager.getCurrentCatalog(),
        catalogManager.getCurrentDatabase());
    CreateTableOperation op = (CreateTableOperation) SqlToOperationConverter.convert(planner, catalogManager, sqlNode)
        .get();
    DataType dataType = op.getCatalogTable().getSchema().toPersistedRowDataType();
    String avroSchemaText = AvroSchemaConverter.convertToSchema(dataType.getLogicalType()).toString();
    return parseAvsc(avroSchemaText);
  }

  public static Dotenv parseDotenv(String schemaPath) throws Exception {
    return Dotenv.configure().filename(schemaPath).load();
  }

  public static String readRawSchema(String schemaPath) throws Exception {
    String schemaPathPhysical = schemaPath.split(":")[0];
    String schemaText = slurpFile(schemaPathPhysical);
    if (schemaPathPhysical.endsWith(".avsc")) {
      Schema avsc = parseAvsc(schemaText);
      return avsc.toString();
    } else if (schemaPathPhysical.endsWith(".avdl")) {
      parseAvdl(schemaPath);
      return schemaText;
    } else if (schemaPathPhysical.endsWith(".sql")) {
      SqlNode sqlNode = parseSql(schemaText);
      return sqlNode.toString();
    } else if (schemaPathPhysical.endsWith(".env")) {
      Map<String, String> m = new HashMap<String, String>();
      Dotenv dotenv = parseDotenv(schemaPathPhysical);
      for (DotenvEntry entry : dotenv.entries(Dotenv.Filter.DECLARED_IN_ENV_FILE)) {
        String key = entry.getKey();
        // String val = entry.getValue() ;
        m.put(key, resolve(dotenv, key, null));
      }
      return new ObjectMapper().writeValueAsString(m);
    } else {
      return schemaText;
    }
  }

  public static Schema readAvscSchema(String schemaPath) throws Exception {
    String schemaPathPhysical = schemaPath.split(":")[0];
    String schemaText = slurpFile(schemaPathPhysical);
    if (schemaPathPhysical.endsWith(".avsc")) {
      return parseAvsc(schemaText);
    } else if (schemaPathPhysical.endsWith(".avdl")) {
      return parseAvdlSchema(schemaPath);
    } else if (schemaPathPhysical.endsWith(".sql")) {
      return parseSqlSchema(schemaText);
    } else {
      return null;
    }
  }

  public static int registerSchema(Schema schema, String kafkaSrs, String subject) throws Exception {
    OkHttpClient http = new OkHttpClient();

    String url = String.format("%s/subjects/%s/versions", kafkaSrs.split(",")[0], subject);
    MediaType SR_TYPE = MediaType.get("application/vnd.schemaregistry.v1+json");

    Request request = new Request.Builder()
        .url(url)
        .post(RequestBody.create(SR_TYPE, schema.toString()))
        .build();
    try (Response response = http.newCall(request).execute()) {
      String response_str = response.body().string();
      Map<String, Object> sr_json = new ObjectMapper().readValue(response_str, Map.class);
      return (int) sr_json.get("id");
    }
  }

  public static boolean isGzip(byte[] bs) {
    return (bs[0] == (byte) 0x1f) && (bs[1] == (byte) 0x8b);
  }

  public static boolean isZstd(byte[] bs) {
    return true;
  }

  public static byte[] uncompress(byte[] bs) throws Exception {
    if (isGzip(bs)) {
      GZIPInputStream in = new GZIPInputStream(new ByteArrayInputStream(bs));
      return toByteArray(in);
    } else if (isZstd(bs)) {
      return Zstd.decompress(bs, bs.length * 50);
    } else {
      return bs;
    }
  }

  public static GenericContainer decode(Schema schema, Schema validSchema, Decoder decoder) throws Exception {
    if (validSchema == null)
      validSchema = schema;
    GenericDatumReader<GenericContainer> reader = new GenericDatumReader<GenericContainer>(schema, validSchema);
    return reader.read(null, decoder);
  }

  public static GenericContainer fromBin(Schema schema, Schema validSchema, byte[] bs) throws Exception {
    return decode(schema, validSchema, DecoderFactory.get().binaryDecoder(bs, null));
  }

  public static GenericContainer fromJson(Schema schema, Schema validSchema, String json, boolean isDebug)
      throws Exception {
    ExtendedJsonDecoder decoder = new ExtendedJsonDecoder(schema, json);
    if (isDebug)
      decoder.enableDebug();
    return decode(schema, validSchema, decoder);
  }

  public static GenericContainer fromJsonDebug(Schema schema, String json) throws Exception {
    return fromJson(schema, null, json, true);
  }

  public static GenericContainer fromJson(Schema schema, String json) throws Exception {
    return fromJson(schema, null, json, false);
  }

  public static GenericContainer fromCompressedBin(Schema schema, Schema validSchema, byte[] bs) throws Exception {
    return fromBin(schema, validSchema, uncompress(bs));
  }

  public static GenericContainer fromCompressedBin(Schema schema, byte[] bs) throws Exception {
    return fromCompressedBin(schema, null, bs);
  }

  public static GenericContainer fromCompressedJson(Schema schema, Schema validSchema, byte[] bs) throws Exception {
    return fromJson(schema, validSchema, new String(uncompress(bs)), false);
  }

  public static GenericContainer fromCompressedJson(Schema schema, byte[] bs) throws Exception {
    return fromCompressedJson(schema, null, bs);
  }

  public static String toJson(Schema schema, GenericContainer datum) throws Exception {
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    Encoder jsonEncoder = EncoderFactory.get().jsonEncoder(schema, out);
    GenericDatumWriter<GenericContainer> writer = new GenericDatumWriter<GenericContainer>(schema);
    writer.write(datum, jsonEncoder);
    jsonEncoder.flush();
    return new String(out.toByteArray());
  }

  public static GenericContainer parseBinB64(String schemaPath, String b64) throws Exception {
    return fromCompressedBin(readAvscSchema(schemaPath), Base64.getDecoder().decode(b64.getBytes()));
  }

  public static GenericContainer parseJsonB64(String schemaPath, String b64) throws Exception {
    return fromCompressedJson(readAvscSchema(schemaPath), Base64.getDecoder().decode(b64.getBytes()));
  }

  public static GenericContainer parseJsonText(String schemaPath, String json) throws Exception {
    return fromJson(readAvscSchema(schemaPath), json);
  }

  public static CatalogManager mkCatalogManager() {
    return CatalogManager.newBuilder()
        .classLoader(Avro.class.getClassLoader())
        .config(new Configuration())
        .defaultCatalog("default_catalog", new GenericInMemoryCatalog("default_catalog", "default_database"))
        .build();
  }

  public static PlannerContext mkPlannerContext(CatalogManager catalogManager) {
    TableConfig tableConfig = TableConfig.getDefault();
    FunctionCatalog functionCatalog = null;
    return new PlannerContext(
        tableConfig, functionCatalog, catalogManager,
        asRootSchema(new CatalogManagerCalciteSchema(catalogManager, true)), Collections.emptyList());
  }

  public static String patchIos(String bulkText) throws Exception {
    Object bulk = new ObjectMapper().readValue(bulkText, Object.class);
    if (!(bulk instanceof List))
      return bulkText;
    List<Object> newBulk = new ArrayList<Object>();
    for (Object one : (List<Object>) bulk) {
      Map<String, Object> oneMap = (Map<String, Object>) one;
      if (!oneMap.containsKey("is_ios")) {
        newBulk.add(oneMap);
        continue;
      }

      List<Object> newIosEvents = new ArrayList<Object>();
      for (Map<String, Object> iosEvent : (List<Map<String, Object>>) oneMap.get("events")) {
        Map<String, Object> eventUnion = new HashMap<String, Object>();
        if (iosEvent.containsKey("is_cold_start")) {
          eventUnion.put("EventColdStartIos", iosEvent);
        } else if (iosEvent.containsKey("is_hot_start")) {
          eventUnion.put("EventHotStartIos", iosEvent);
        } else if (iosEvent.containsKey("is_use_end")) {
          eventUnion.put("EventUseEndIos", iosEvent);
        } else if (iosEvent.containsKey("is_view_page")) {
          eventUnion.put("EventViewPageIos", iosEvent);
        } else if (iosEvent.containsKey("is_click")) {
          eventUnion.put("EventClickIos", iosEvent);
        } else if (iosEvent.containsKey("is_custom")) {
          eventUnion.put("EventCustomIos", iosEvent);
        }
        newIosEvents.add(eventUnion);
      }
      oneMap.put("events", newIosEvents);
      Map<String, Object> oneUnion = new HashMap<String, Object>();
      oneUnion.put("DcSdkIos", oneMap);
      newBulk.add(oneUnion);
    }
    return new ObjectMapper().writeValueAsString(newBulk);
  }

  public static void testWechat() throws Exception {
    Schema schema = readAvscSchema("meta-lib-static/avsc/dc_sdk_push.avsc");
    String line = slurpFile("larluo.json");
    List<Object> jsonCollector = new ArrayList<Object>();
    for (Object sdkRecord : new ObjectMapper().readValue(line, List.class)) {
      Map<String, Object> wechatRecord = new HashMap<String, Object>();
      wechatRecord.put("wechat.wechat", sdkRecord);
      jsonCollector.add(wechatRecord);
    }
    String jsonNew = new ObjectMapper().writeValueAsString(jsonCollector);
    Avro.fromJson(schema, jsonNew);
  }

  public static void main(String[] args) throws Exception {
    // System.out.println(readRawSchema("meta-lib-static/env/.env")) ;
    // System.out.println(readRawSchema("meta-lib-static/sql/data_pipeline/flink.kafka.ip_dt_dim.sql"))
    // ;
    // System.out.println(readAvscSchema("meta-lib-static/avsc/dc_sdk_push.avsc")) ;
    // System.out.println(readAvscSchema("meta-lib-static/sql/data_pipeline/flink.kafka.ip_dt_dim.sql"))
    // ;

    // System.out.println(readAvscSchema("meta-lib-static/avdl/data_buffer.avdl:dcSdkGenericBulk"))
    // ;

    String iosSample = slurpFile("meta-lib-static/sample/ios.EventViewPage.pretty.json");
    String newIosSample = patchIos(iosSample);

    System.out.println(newIosSample);

    // testWechat() ;

    System.exit(0);
  }
}
