package com.dtdream.emr;

import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.udf.generic.AbstractGenericUDAFResolver;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@Description("_FUNC_(col) - calculate col value across rows")
public class GenericUDAFConcat extends AbstractGenericUDAFResolver {

  private static final Logger LOG = LoggerFactory.getLogger(GenericUDAFConcat.class);

  @Override
  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
    if (parameters.length != 1) {
      throw new UDFArgumentTypeException(parameters.length - 1,
          "Exactly one argument is expected.");
    }

    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
      throw new UDFArgumentTypeException(0,
          "Only integral type arguments are accepted but "
              + parameters[0].getTypeName() + " is passed.");
    }

    switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
      case STRING:
        return new GenericUDAFConcatEvaluator();
      default:
        throw new UDFArgumentTypeException(0,
            "Only integral type arguments are accepted but "
                + parameters[0].getTypeName() + " is passed.");
    }
  }

  public static class GenericUDAFConcatEvaluator extends GenericUDAFEvaluator {
    private PrimitiveObjectInspector inputOI;

    public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
      super.init(m, parameters);
      inputOI = (PrimitiveObjectInspector) parameters[0];
      return PrimitiveObjectInspectorFactory.javaStringObjectInspector;
    }

    static class ConcatAggBuffer extends AbstractAggregationBuffer {
      String str = "";
    }

    @Override
    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
      return new ConcatAggBuffer();
    }

    @Override
    public void reset(AggregationBuffer agg) throws HiveException {
      ((ConcatAggBuffer) agg).str = "";
    }

    @Override
    public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
      if (parameters[0] != null) {
        String n = PrimitiveObjectInspectorUtils.getString(parameters[0], inputOI);
        ((ConcatAggBuffer) agg).str = ((ConcatAggBuffer) agg).str + " " + n;
        LOG.info("Concat: {}", ((ConcatAggBuffer) agg).str);
      }
    }

    @Override
    public Object terminatePartial(AggregationBuffer agg) throws HiveException {
      return null;
    }

    @Override
    public void merge(AggregationBuffer agg, Object partial) throws HiveException {
    }

    @Override
    public Object terminate(AggregationBuffer agg) throws HiveException {
      ConcatAggBuffer sumAgg = (ConcatAggBuffer) agg;
      return sumAgg.str;
    }
  }
}