package rpc.netty.tcp;

import com.google.common.util.concurrent.ThreadFactoryBuilder;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelDuplexHandler;
import io.netty.channel.ChannelHandler.Sharable;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.epoll.EpollEventLoopGroup;
import io.netty.channel.group.ChannelGroup;
import io.netty.channel.group.ChannelGroupFuture;
import io.netty.channel.group.DefaultChannelGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.util.concurrent.GlobalEventExecutor;
import java.net.InetSocketAddress;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import rpc.core.FlowReq;
import rpc.core.FlowRes;
import rpc.core.InitClose;
import rpc.core.StartStop;
import rpc.core.registry.RegistryManager;
import rpc.netty.tcp.entity.MessageHeader;
import rpc.netty.tcp.entity.RpcMessage;

/*********
 *
 *
 * @author 197
 * @date 2020/6/21 0:47
 *
 **/

@Sharable
public class NettyClient extends ChannelDuplexHandler implements InitClose, StartStop {

  public volatile static Map<String, BlockingQueue<RpcMessage>> messageQueueMap = new ConcurrentHashMap<>();

  private final Logger logger = LoggerFactory.getLogger(NettyClient.class);
  private final int connectTimeout = 5000;
  private final int pingSeconds = 60;
  private final int maxPackageSize = 1000000;
  private final String host;
  private final int port;
  private final String id;
  Channel channel;
  private EventLoopGroup workerGroup;
  private Bootstrap bootstrap;
  private int workerThreads = 0;
  private boolean nativeNetty = false;

  public NettyClient(String direct, String id) {
    Objects.requireNonNull(direct, "NettyClient direct is null");
    String[] split = direct.split(":");
    this.host = split[0];
    this.port = Integer.parseInt(split[1]);
    this.id = id;
  }

  public NettyClient(String id, String host, int port) {
    this.id = id;
    this.host = host;
    this.port = port;
  }

  public String getId() {
    return id;
  }

  @Override
  public void init() {
    int processors = Runtime.getRuntime().availableProcessors();
    if (this.workerThreads == 0) {
      this.workerThreads = processors;
    }

    String osName = System.getProperty("os.name");
    this.nativeNetty = this.nativeNetty && osName != null && osName.toLowerCase().contains("linux");
    ThreadFactory workThreadFactory = new ThreadFactoryBuilder()
        .setNameFormat("rpc_nettyClient_worker-%d").build();
    if (this.nativeNetty) {
      this.workerGroup = new EpollEventLoopGroup(this.workerThreads, workThreadFactory);
    } else {
      this.workerGroup = new NioEventLoopGroup(this.workerThreads, workThreadFactory);
    }

    // decoder 解码 encoder 编码
    this.bootstrap = new Bootstrap();
    this.bootstrap.group(this.workerGroup).channel(NioSocketChannel.class)
        .handler(new ChannelInitializer<SocketChannel>() {
          @Override
          protected void initChannel(SocketChannel ch) throws Exception {
            ChannelPipeline pipeline = ch.pipeline();
            pipeline.addLast("frame-decoder",
                new RpcMessageDecoder(maxPackageSize, 4, 4, 0, 0));
            pipeline.addLast("handler", NettyClient.this);
            pipeline.addLast("frame-encoder", new RpcMessageEncoder());
          }
        });
    this.bootstrap.option(ChannelOption.TCP_NODELAY, true);
    this.bootstrap.option(ChannelOption.SO_REUSEADDR, true);
    this.bootstrap.option(ChannelOption.SO_KEEPALIVE, true);
    this.bootstrap.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, this.connectTimeout);
//    logger.info("init end");
  }

  @Override
  public void start() {
    try {
      channel = this.bootstrap.connect(new InetSocketAddress(this.host, this.port))
          .syncUninterruptibly().channel();
    } catch (Exception e) {
      logger.error("start error:{}", e.getMessage());
    }
  }

  public boolean isConnect() {
    return Objects.nonNull(channel);
  }

  @Override
  public void close() {
    if (this.workerGroup != null) {
      logger.info("stopping netty client");
      ChannelGroup allChannels = new DefaultChannelGroup(GlobalEventExecutor.INSTANCE);
      ChannelGroupFuture future = allChannels.close();
      future.awaitUninterruptibly();
      this.workerGroup.shutdownGracefully();
      this.workerGroup = null;
      logger.info("netty client stopped");
    }
  }

  public Channel getChannel() {
    return channel;
  }

  public void writeAndFlush(int serviceId, int msgId, String sequence, FlowReq req) {
    BlockingQueue<RpcMessage> messagesBlockingQueue = new LinkedBlockingQueue<>(1);
    NettyClient.messageQueueMap.put(sequence, messagesBlockingQueue);
    MessageHeader messageHeader = new MessageHeader();
    messageHeader.setSequence(sequence);
    if (Objects.nonNull(channel)) {

      Map<String, Object> rpcProMap = new HashMap<>();
      HashMap<String, Object> headerMap = new HashMap<>();
      messageHeader.setAcceptTime(System.currentTimeMillis())
          .setHeaderMap(headerMap);//.setSequence(System.currentTimeMillis() + "");
      RpcMessage rpcMessage = new RpcMessage(0, serviceId, msgId, req.toMap(), rpcProMap,
          messageHeader);
      channel.writeAndFlush(rpcMessage);
    } else {
      RpcMessage rpcMessage = new RpcMessage(0, serviceId, msgId, new FlowRes().fail(-819).toMap(), new HashMap<>(), messageHeader);
//      rpcMessage.setData();
      ArrayBlockingQueue<RpcMessage> rpcMessages = new ArrayBlockingQueue<RpcMessage>(1);
      rpcMessages.add(rpcMessage);
      NettyClient.messageQueueMap.put(sequence, rpcMessages);
    }
  }

  @Override
  public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
//    logger.info("channelRead msg :{}", GsonUtil.toJson(msg));

    if (msg instanceof RpcMessage) {
      RpcMessage rpcMessage = (RpcMessage) msg;
      BlockingQueue<RpcMessage> rpcMessages = NettyClient.messageQueueMap.get(rpcMessage.getMessageHeader().getSequence());
      if (Objects.nonNull(rpcMessages)) {
        rpcMessages.put(rpcMessage);
      }
    } else {
      logger.info("msg:{}", msg);
    }
//    super.channelRead(ctx, msg);
  }

  @Override
  public int hashCode() {
    return Objects.hash(host, port);
  }

  @Override
  public boolean equals(Object o) {
    if (this == o) {
      return true;
    }
    if (!(o instanceof NettyClient)) {
      return false;
    }
    NettyClient that = (NettyClient) o;
    return port == that.port &&
        host.equals(that.host);
  }

  @Override
  public void finalize() {
//    stop();
    if (logger.isDebugEnabled()) {
      logger.debug("{},finalized", this);
    }
  }

  @Override
  public void stop() {
    if (Objects.nonNull(channel)) {
      channel.closeFuture();
    }
  }

  @Override
  public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
    logger.error("channelId {}, e:{}", ctx.channel().id().asShortText(), cause.getMessage());
    this.channel = null;
    RegistryManager.nettyClientListMap.values().forEach(ncl -> ncl.remove(this));
  }


}
