package cn.lsh.hive.udtf;

import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;

/**
 * 自定义表值函数,将struct类型转换成表格
 */
public class SplitStructUDTF extends GenericUDTF {

	private transient StructObjectInspector structInputOI;

	/**
	 * 函数输出结果一行数据
	 */
	private transient Object[] forwardStructObj;

	@Override
	public StructObjectInspector initialize(StructObjectInspector argOI) throws UDFArgumentException {
		//获取函数的入参域
		List<? extends StructField> inputFields = argOI.getAllStructFieldRefs();
		if (inputFields.size() != 1) {
			throw new UDFArgumentException("splitStruct() takes only one argument");
		}
		ObjectInspector udtfInputOI = inputFields.get(0).getFieldObjectInspector();

		//返回的列名
		List<String> fieldNames = new ArrayList<>();
		//返回的列值类型
		List<ObjectInspector> fieldOIs = new ArrayList<>();
		if (udtfInputOI instanceof StructObjectInspector) {
			//判断入参是否是struct类型
			structInputOI = (StructObjectInspector) udtfInputOI;
			//获得struct中的所有属性
			List<? extends StructField> structFields = structInputOI.getAllStructFieldRefs();
			//属性个数与输出值个数对应
			forwardStructObj = new Object[structFields.size()];
			for (StructField field : structFields) {
				//属性作为输出的列名
				fieldNames.add(field.getFieldName());
				//申明函数返回值类型
				fieldOIs.add(field.getFieldObjectInspector());
				// fieldOIs.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
			}
			return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
		} else {
			throw new RuntimeException("只能接收Struct入参");
		}
	}

	@Override
	public void process(Object[] args) throws HiveException {
		if (args != null && args.length > 0) {
			//获得传入的struct中各属性的值
			List<Object> values = structInputOI.getStructFieldsDataAsList(args[0]);
			for (int i = 0; i < values.size(); i++) {
				forwardStructObj[i] = values.get(i);
			}
			//输出一行结果，类似于context.write()
			forward(forwardStructObj);
		}
	}

	@Override
	public void close() throws HiveException {
		//可以在这里实现一些关流操作
	}
}
