package com.hlkj.pay.controller.pay;
/*
 * Hlpay-Plus aggregate payment system.
 * Copyright (c) 2024-2025 Hlpay Team Copyright has the right of final interpretation.
 */

import java.util.*;
import java.util.stream.Collectors;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.alibaba.excel.context.AnalysisContext;
import com.alibaba.excel.event.AnalysisEventListener;
/**
 * @author HlpayTeam
 * @date 2024/09/25 9:54
 */
public class SecondLevelDictListener extends AnalysisEventListener<SecondLevelDict> {
	private static final Logger LOGGER = LoggerFactory.getLogger(SecondLevelDictListener.class);
	/**
	 * 每隔5条存储数据库，实际使用中可以3000条，然后清理list ，方便内存回收
	 */
	private static final int BATCH_COUNT = 5;
	List<SecondLevelDict> list = new ArrayList<>();
	
	private String channelCode;
	private String dictKey;
	/**
	 * 假设这个是一个DAO，当然有业务逻辑这个也可以是一个service。当然如果不用存储这个对象没用。
	 */
	
	public SecondLevelDictListener(String channelCode,String dictKey) {
		this.channelCode = channelCode;
		this.dictKey = dictKey;
	}
	
	
	/**
	 * 这个每一条数据解析都会来调用
	 *
	 * @param data
	 *            one row value. Is is same as {@link AnalysisContext#readRowHolder()}
	 * @param context
	 */
	@Override
	public void invoke(SecondLevelDict data, AnalysisContext context) {
		// LOGGER.info("解析到一条数据:{}", JSON.toJSONString(data));
		list.add(data);
		// 达到BATCH_COUNT了，需要去存储一次数据库，防止数据几万条数据在内存，容易OOM
		if (list.size() >= BATCH_COUNT) {
			// processSql();
			// 存储完成清理 list
			// list.clear();
		}
	}
	
	/**
	 * 所有数据解析完成了 都会来调用
	 *
	 * @param context
	 */
	@Override
	public void doAfterAllAnalysed(AnalysisContext context) {
		// 这里也要保存数据，确保最后遗留的数据也存储到数据库
		LOGGER.info("所有数据解析完成！");
		processSql();
	}
	public void processSql(){
		Set<String> parentDictSql=new HashSet<>();
		Set<String> secondDictSql=new HashSet<>();
		Map<String, List<SecondLevelDict>> collect = list.stream()
				.collect(Collectors.groupingBy(secondLevelDict -> secondLevelDict.getParentDictName()));
		Set<String> parentDictKeys=collect.keySet();
		parentDictKeys.stream().forEach(parentDictKey -> {
			StringBuilder sb=new StringBuilder("insert into t_pay_channel_dict(channel_code,dict_key,dict_level,dict_value,dict_desc) value (");
			sb.append("'");
			sb.append(channelCode);
			sb.append("','");
			sb.append(dictKey);
			sb.append("',");
			sb.append(1);
			sb.append(",'");
			sb.append(parentDictKey);
			sb.append("','");
            sb.append(parentDictKey);
			sb.append("');");
			parentDictSql.add(sb.toString());
			System.out.println(sb.toString());
		});
		collect.forEach((key,value)->{
			value.stream().forEach(secondLevelDict -> {
				StringBuilder sb=new StringBuilder("insert into t_pay_channel_dict(channel_code,dict_key,parent_dict_key,dict_level,dict_value,dict_desc) value (");
				sb.append("'");
				sb.append(channelCode);
				sb.append("','");
				sb.append(dictKey);
				sb.append("','");
				sb.append(secondLevelDict.getParentDictName());
				sb.append("',");
				sb.append(2);
				sb.append(",'");
				sb.append(secondLevelDict.getValue());
				sb.append("','");
				sb.append(secondLevelDict.getDesc());
				sb.append("');");
				secondDictSql.add(sb.toString());
				System.out.println(sb.toString());
			});
		});
	}
}
