package com.desay.pangoo.itmt.utils;

import java.io.FileInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.stream.Collectors;

import javax.persistence.EntityManager;
import javax.persistence.Query;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import javax.validation.constraints.NotNull;

import org.hibernate.SQLQuery;
import org.hibernate.transform.Transformers;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.jpa.domain.Specification;
import org.springframework.data.jpa.repository.JpaSpecificationExecutor;

import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.date.DatePattern;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.io.FileUtil;
import cn.hutool.core.io.IoUtil;
import cn.hutool.core.util.RandomUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import cn.hutool.poi.excel.ExcelUtil;
import cn.hutool.poi.excel.ExcelWriter;

/**
 * 分片导出数据到Excel，避免一次性数据过大，产生OOM @Description： 功能描述
 * 
 * @author Shouyi.Huang@desay-svautomotive.com on [2021年5月5日下午1:26:45]
 * @Modified By： [修改人] on [修改日期] for [修改说明]
 *
 */
public class ExportUtil<X> {
	private final Log logger = LogFactory.get();
	private JpaSpecificationExecutor<X> jpaSpecificationExecutor;
	private Specification<X> spec;
	private EntityManager em;
	private String sql;
	Map<String, Object> sqlParams;
	private static final int BATCH_SIZE = 2000;
	private static final int SHEET_SIZE = 20000;
	private final AtomicInteger currentLine = new AtomicInteger(0);
	private final ExecutorService executorService = Executors.newCachedThreadPool();
	private static final int DEFAULTTHREADNUM = 5;
	private static final int MAXTHREADNUM = 10;
	/** Excel自动适应宽度比较耗时，因此在数据量大于这个阀值时，不保证所有的单元格都自动适应大小 */
	private static final int AUTOSIZETHRESHOLDNUM = SHEET_SIZE;
	/** 每个线程处理的记录数 */
	private static final int PERTHREADROW = 10000;

	/**
	 * 通过JPA的方式将查询到的结果导出到Excel
	 *
	 * @param jpaSpecificationExecutor
	 * @param spec
	 */
	public ExportUtil(@NotNull JpaSpecificationExecutor<X> jpaSpecificationExecutor, @NotNull Specification<X> spec) {
		this.jpaSpecificationExecutor = jpaSpecificationExecutor;
		this.spec = spec;
	}

	/**
	 * 通过sql的方式查询导出数据
	 * 
	 * @param sql
	 * @param em
	 * @param sqlParams
	 */
	public ExportUtil(@NotNull String sql, @NotNull EntityManager em, Map<String, Object> sqlParams) {
		this.em = em;
		this.sqlParams = sqlParams;
		this.sql = sql;
	}

	/**
	 * 从数据库到Excel导出流程
	 *
	 * @param fristRowName
	 * @param response
	 * @param process
	 * @throws IOException
	 */
	public void export(@NotNull String fristRowName, @NotNull HttpServletResponse response,
			@NotNull Function<? super X, Map<String, Object>> process) throws IOException {
		String tempFile = genTempFile();
		ExcelWriter writer = ExcelUtil.getWriter(tempFile);
		if (Objects.isNull(writer)) {
			return;
		}
		long count = Optional.ofNullable(jpaSpecificationExecutor).map(x -> x.count(spec))
				.orElseGet(() -> new SqlQueryData(0, 1).count());
		int threadNum = Math.min(MAXTHREADNUM, Math.max((int) count / PERTHREADROW, DEFAULTTHREADNUM));
		Set<Future<?>> submitSet = new HashSet<>(threadNum);
		for (int i = 0; i < count; i += BATCH_SIZE) {
			waiteSubmit(submitSet, threadNum - 1);
			// 保证至少有一个空闲线程才向下执行
			QueryData queryData;
			if (Objects.nonNull(jpaSpecificationExecutor)) {
				queryData = new JpaQueryData(i, BATCH_SIZE);
			} else {
				queryData = new SqlQueryData(i, BATCH_SIZE);
			}
			Future<?> submit = executorService.submit(new DB2Excel(queryData, writer, process, fristRowName));
			submitSet.add(submit);
		}
		// 保证所有线程都执行完成才向下执行
		waiteSubmit(submitSet, 0);
		if (count <= AUTOSIZETHRESHOLDNUM && count > 0) {
			writer.autoSizeColumnAll();
		}
		writer.close();
		flush(response, tempFile);
	}

	/**
	 * 等待指定数量的线程结束
	 *
	 * @param submits
	 *            线程结果集
	 * @param max
	 *            最大允许多少个正在执行的线程，如果超过这个数量，则等待
	 */
	private void waiteSubmit(Collection<Future<?>> submits, int max) {
		while (submits.size() > max) {
			for (Future<?> submit : submits) {
				try {
					// 查询这个线程是否结束，如果结束从线程结果集中删除
					submit.get(10, TimeUnit.MILLISECONDS);
					submits.remove(submit);
					break;
				} catch (TimeoutException e) {
					// 正常超时,查询下一个线程
				} catch (ExecutionException e) {
					logger.warn("等待查询导出线程过程中出现异常，不再查询这个线程：{}", e.getMessage());
					submits.remove(submit);
				} catch (InterruptedException e) {
					logger.warn("等待查询导出线程过程中收到中断信号：{}", e.getMessage());
				}
			}
		}
	}

	/**
	 * 把文件输出到浏览器
	 *
	 * @param response
	 * @param outFile
	 * @throws IOException
	 */
	private void flush(HttpServletResponse response, String outFile) throws IOException {
		response.reset();
		response.setCharacterEncoding("utf-8");
		String fileName = RandomUtil.randomNumbers(15) + ".xls";
		response.setHeader("Content-Disposition", "attachment;filename=" + fileName);
		response.setContentType("application/octet-stream");
		try (FileInputStream fis = new FileInputStream(outFile); ServletOutputStream out = response.getOutputStream()) {
			IoUtil.copy(fis, out);
		} catch (IOException e) {
			logger.warn("把文件:{} 输出到HttpServletResponse失败：{}", outFile, e.getMessage());
			throw e;
		} finally {
			FileUtil.del(outFile);
		}
	}

	/**
	 * 导出实现类
	 */
	class DB2Excel implements Runnable {
		QueryData queryData;
		ExcelWriter excelWriter;
		Function<? super X, Map<String, Object>> process;
		String fristRowName;

		DB2Excel(@NotNull QueryData queryData, @NotNull ExcelWriter excelWriter,
				@NotNull Function<? super X, Map<String, Object>> process, @NotNull String fristRowName) {
			this.queryData = queryData;
			this.excelWriter = excelWriter;
			this.process = process;
			this.fristRowName = fristRowName;
		}

		@Override
		public void run() {
			logger.info("开始查询导出：{}", queryData);
			List<Map<String, Object>> content = queryData.query().stream().map(process).collect(Collectors.toList());
			int contentSize = content.size();
			long totalElements = queryData.count();
			synchronized (excelWriter) {
				if (0 == currentLine.get() % SHEET_SIZE && CollUtil.isNotEmpty(content)) {
					if (currentLine.get() > 0 && totalElements <= AUTOSIZETHRESHOLDNUM) {
						excelWriter.autoSizeColumnAll();
					}
					excelWriter.setSheet("sheet" + (currentLine.get() / SHEET_SIZE + 1));
					if (content.get(0).size() > 1) {
						excelWriter.merge(content.get(0).size() - 1, fristRowName);
					}
					if (totalElements > AUTOSIZETHRESHOLDNUM) {
						List<Object> firstLine = new ArrayList<>(1);
						firstLine.add(content.get(0));
						excelWriter.write(firstLine, true);
						excelWriter.autoSizeColumnAll();
						content.remove(firstLine.get(0));
						excelWriter.write(content, false);
					} else {
						excelWriter.write(content, true);
					}

				} else {
					excelWriter.write(content, false);
				}
				currentLine.addAndGet(contentSize);
			}
		}
	}

	/**
	 * 查询类基类
	 */
	abstract class QueryData {
		protected int offset;
		protected int limit;

		/**
		 * 查询参数
		 *
		 * @param offset
		 * @param limit
		 */
		QueryData(int offset, int limit) {
			this.offset = offset;
			this.limit = limit;
		}

		/**
		 * 查询结果总数
		 *
		 * @return
		 */
		abstract long count();

		/**
		 * 查询结果
		 *
		 * @return
		 */
		public abstract List<X> query();

		@Override
		public String toString() {
			return "QueryData{" + "offset=" + offset + ", limit=" + limit + '}';
		}
	}

	/**
	 * JPA查询实现类
	 */
	class JpaQueryData extends QueryData {

		JpaQueryData(int offset, int limit) {
			super(offset, limit);
		}

		@Override
		long count() {
			return jpaSpecificationExecutor.count(spec);
		}

		@Override
		public List<X> query() {
			Pageable pageable = PageRequest.of(offset / BATCH_SIZE, BATCH_SIZE, Sort.by("createTime"));
			Page<X> page = jpaSpecificationExecutor.findAll(spec, pageable);
			return page.getContent();
		}
	}

	/**
	 * SQL查询实现类
	 */
	class SqlQueryData extends QueryData {
		private static final String SQL_REG = "SELECT .* FROM .*";

		String convertCountSql() {
			if (!sql.matches(SQL_REG)) {
				logger.warn("非法的SQL查询语句:{}", sql);
				throw new RuntimeException("不能查询到有效的数据！");
			}
			int fromIdx = sql.indexOf("FROM");
			return "SELECT count(1) AS total " + sql.substring(fromIdx);
		}

		SqlQueryData(int offset, int limit) {
			super(offset, limit);
		}

		@Override
		long count() {
			Query query = em.createNativeQuery(convertCountSql());
			query.setMaxResults(BATCH_SIZE);
			query.setFirstResult(0);
			if (Objects.nonNull(sqlParams)) {
				sqlParams.forEach(query::setParameter);
			}
			Object singleResult = query.getSingleResult();
			return Long.parseLong(singleResult.toString());
		}

		@SuppressWarnings("deprecation")
		@Override
		public List<X> query() {
			Query query = em.createNativeQuery(sql);
			query.setMaxResults(BATCH_SIZE);
			query.setFirstResult(offset);
			if (Objects.nonNull(sqlParams)) {
				sqlParams.forEach(query::setParameter);
			}
			query.unwrap(SQLQuery.class).setResultTransformer(Transformers.ALIAS_TO_ENTITY_MAP);
			return query.getResultList();
		}

	}

	/**
	 * 产生一个临时文件名
	 *
	 * @return
	 */
	private String genTempFile() {
		String tmpDir;
		if (System.getProperty("os.name").toLowerCase().contains("linux")) {
			tmpDir = "/tmp/";
		} else {
			tmpDir = System.getProperty("java.io.tmpdir");
		}
		return String.format("%s%s-%d.xls", tmpDir, DateUtil.date().toString(DatePattern.PURE_DATETIME_MS_PATTERN),
				RandomUtil.randomInt(1000));
	}
}
