package com.seago.iceberg.config;

import cn.hutool.core.util.StrUtil;
import jakarta.annotation.PostConstruct;
import jakarta.persistence.Column;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import org.apache.iceberg.PartitionSpec;
import org.apache.iceberg.Schema;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.iceberg.hive.HiveCatalog;
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.DependsOn;
import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.stereotype.Component;

import java.lang.reflect.Field;
import java.util.Collection;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;

import static org.apache.iceberg.types.Types.NestedField.required;

/**
 * Title:
 * <p>
 * description:
 *
 * @Company: 中国船舶集团海舟系统技术有限公司-武汉
 * @author: yangs
 * @since: 2023-05-06 17:40
 * @version: 1.0
 **/
@Component
@DependsOn("entityScanner")
public class IcebergInit {

	private static final Logger logger = LoggerFactory.getLogger(IcebergInit.class);

	public static Map<Class, org.apache.iceberg.Table> tableMap = new ConcurrentHashMap<>();

	@Autowired
	private EntityRegistry entityRegistry;

	@Autowired
	private HiveCatalog hiveCatalog;

	@Autowired
	IcebergConfiguration icebergConfiguration;

	public static org.apache.iceberg.Table getIcebergTable(Class<?> clazz) {
		org.apache.iceberg.Table icebergTable = tableMap.get(clazz);
		if (icebergTable == null) {
			return null;
		}
		return icebergTable;
	}

	@PostConstruct
	public void loadIcebergTable() {
		Collection<Class<?>> entities = entityRegistry.getAllEntities();
		for (Class<?> entity : entities) {
			tableMap.put(entity, buildTable(entity));
		}
		logger.info("loadIcebergTable success");
	}

	private org.apache.iceberg.Table buildTable(Class<?> entity) {
		Table tableAnnotation = AnnotationUtils.findAnnotation(entity, Table.class);
		String tableName = tableAnnotation.name();
		// 定义命名空间（数据库）
		Namespace namespace = Namespace.of(icebergConfiguration.getNamespace());
		// 定义表标识符
		TableIdentifier tableIdentifier = TableIdentifier.of(namespace, tableName);
		// 定义表模式
		Schema schema = buildSchema(entity);
		if (!hiveCatalog.namespaceExists(namespace)) {
			hiveCatalog.createNamespace(namespace);
		}
		// 创建表
		org.apache.iceberg.Table table;
		if (!hiveCatalog.tableExists(tableIdentifier)) {
			table = hiveCatalog.createTable(tableIdentifier, schema, PartitionSpec.unpartitioned());
		} else {
			table = hiveCatalog.loadTable(tableIdentifier);
		}
		return table;
	}

	private static Schema buildSchema(Class<?> entity) {
		Field[] fields = entity.getDeclaredFields();
		Types.NestedField[] nestedFields = new Types.NestedField[fields.length];
		for (int i = 0; i < fields.length; i++) {
			// 获取属性上的 Column 注解
			Column column = fields[i].getAnnotation(Column.class);
			// 解析注解的内容
			if (column != null) {
				Types.NestedField required = getNestedField(fields, i, column.name());
				nestedFields[i] = required;
			} else if (fields[i].getAnnotation(Id.class) != null) {
				Types.NestedField required = getNestedField(fields, i, fields[i].getName());
				nestedFields[i] = required;
			}
		}
		return new Schema(nestedFields);
	}

	private static Types.NestedField getNestedField(Field[] fields, int i, String columnName) {
		// 注解内容为空的直接使用属性名
		if (StrUtil.isEmpty(columnName)) {
			columnName = fields[i].getName();
		}
		// 判断属性类型
		Type nestedType;
		switch (fields[i].getType().getName()) {
			case "java.lang.Long":
				nestedType = Types.LongType.get();
				break;
			case "java.lang.Integer":
				nestedType = Types.IntegerType.get();
				break;
			default:
				nestedType = Types.StringType.get();
		}
		Types.NestedField required = required(i + 1, columnName, nestedType);
		return required;
	}
}
