package com.airbnb.mixtape.hive.udf;

import com.airbnb.jitney.utils.parser.Json2ThriftParser;
import com.airbnb.mixtape.hive.udf.utility.MixtapeUtil;
import com.airbnb.mixtape.hive.udf.utility.Serializer;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.thrift.TBase;

@Description(
  name = "json_to_jitney",
  value =
      "_FUNC_((STRING) jitney_schema, (JSON STRING) content) - "
          + "Create Jitney event based on the given schema and json formatted fields and values.",
  extended =
      "Example:\n"
          + "> SELECT _FUNC_("
          + "'jitney_event_schema', "
          + "to_json()) FROM ticketEvent;\n"
)
public class JsonToJitney extends GenericUDF {
  @Override
  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 2) {
      throw new UDFArgumentLengthException(
          "Two arguments (i.e. schema name and json formatted field values) are expected.");
    }
    return PrimitiveObjectInspectorFactory.javaStringObjectInspector;
  }

  public Object evaluate(DeferredObject[] args) throws HiveException {
    if (args == null || args.length != 2 || args[0] == null || args[1] == null) return null;
    String json = "", schema = "";
    try {
      json = args[1].get().toString();
      schema = args[0].get().toString();
      JsonNode payload = mapper.readTree(json);
      TBase jitneyEvent = Json2ThriftParser.parse(payload, schema);
      return Serializer.toBase64(jitneyEvent);
    } catch (JsonProcessingException e) {
      MixtapeUtil.logExceptionFromUDF(e);
      return String.format("JsonProcessingException for schema: {0}, payload: {1} ", schema, json);
    } catch (Exception e) {
      MixtapeUtil.logExceptionFromUDF(e);
      throw new HiveException("Error occurred with udf.", e);
    }
  }

  @Override
  public String getDisplayString(String[] args) {
    return null;
  }

  private static ObjectMapper mapper = new ObjectMapper();
}
