package rpc.core.plugin.impl;

import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.io.File;
import java.nio.charset.StandardCharsets;
import java.time.LocalDate;
import java.util.Arrays;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.apache.commons.io.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import rpc.bootstrap.spring.SpringBootstrap;
import rpc.core.InitClose;
import rpc.core.plugin.DumpPlugin;
import rpc.utils.DateUtils;
import rpc.utils.ExceptionUtils;
import rpc.utils.JsonUtil;

public class ThreadDumpPlugin implements InitClose, DumpPlugin {

  private static final Logger logger = LoggerFactory.getLogger(ThreadDumpPlugin.class);
  ApplicationContext context;
  ScheduledExecutorService executorService;

  public ThreadDumpPlugin(ApplicationContext applicationContext) {
    this.context = applicationContext;
  }

  public static Thread[] findAllThreads() {
    ThreadGroup group = Thread.currentThread().getThreadGroup();
    ThreadGroup topGroup = group;
    // 遍历线程组树，获取根线程组
    while (group != null) {
      topGroup = group;
      group = group.getParent();
    }
    // 激活的线程数加倍
    int estimatedSize = topGroup.activeCount() * 2;
    Thread[] slackList = new Thread[estimatedSize];
    //获取根线程组的所有线程
    int actualSize = topGroup.enumerate(slackList);
// copy into a list that is the exact size
    Thread[] list = new Thread[actualSize];
    System.arraycopy(slackList, 0, list, 0, actualSize);
    return list;
  }

  @Override
  public Map<String, Object> dump() {

    Thread[] tt = findAllThreads();
    Map<String, Object> dumpMapTmp = new LinkedHashMap<>();
    for (Thread t : tt) {
      String traceLine = Arrays.stream(t.getStackTrace()).filter(c -> c.getClassName().contains(""))
          .map(c -> c.getClassName() + "|" + c.getMethodName() + "|" + c.getLineNumber()).collect(Collectors.joining(","));
      dumpMapTmp.put(t.getId() + "|" + t.getName() + "|" + t.getState(), traceLine);
    }
    return dumpMapTmp;
  }

  @Override
  public void init() {
    executorService = new ScheduledThreadPoolExecutor(1,
        new ThreadFactoryBuilder().setNameFormat("query-client-%d").build());
    executorService.scheduleAtFixedRate(this::readDump, 5, 60 * 30, TimeUnit.SECONDS);
  }

  private void readDump() {

    try {
      Map<String, Object> dump = new LinkedHashMap<>();
      Map<String, DumpPlugin> beans = this.context.getBeansOfType(DumpPlugin.class);
      beans.forEach((k, v) -> {
        dump.put(k, v.dump());
      });
      String data = JsonUtil.toJson(dump);
      FileUtils
          .writeStringToFile(new File("/opt/data/" + SpringBootstrap.getInstance().getRpcApp().getAppName() + "/dump/" + LocalDate.now().toString() + ".log")
              , DateUtils.format(new Date()) + "\t" + data + "\r\n", StandardCharsets.UTF_8, true);
    } catch (Exception e) {
      logger.error("exception:{} stackTrace:{}", e.getMessage(), ExceptionUtils.getTrace(e));
    }

  }

  @Override
  public void start() {

  }

  @Override
  public void close() {
    if (executorService != null) {
      executorService.shutdown();
    }
  }
}
