﻿using Confluent.Kafka;
using Confluent.Kafka.Admin;
using Grow.Module.Service;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using static Confluent.Kafka.ConfigPropertyNames;

namespace Grow.EventBus.Core.Kafka.KafkaConnection
{
    internal class KafkaConnection : IKafkaConnection
    {
        private ConcurrentBag<string> _existsTopics;
        private ConcurrentDictionary<string, IProducer<string, byte[]>> _producers;
        private ConcurrentDictionary<string, IConsumer<string, byte[]>> _consumers;

        private readonly IServiceContext _serviceContext;
        private readonly EventBusOptions _eventBusOptions;
        public KafkaConnection(IServiceContext serviceContext)
        {
            _existsTopics = new();
            _producers = new();
            _consumers = new();
            _serviceContext = serviceContext;
            _eventBusOptions = serviceContext.GetOptions<EventBusOptions>();
        }
       

        public IProducer<string, byte[]> GetProducer(string topic)
        {
            var properties=InitTopic(topic).ConfigureAwait(false).GetAwaiter().GetResult();
            return _producers.GetOrAdd(topic, _ =>
                new ProducerBuilder<string, byte[]>(properties).Build()
            );
        }

        public IConsumer<string, byte[]> CreateConsumer(string groupId, string topic)
        {
            var properties = InitTopic(topic).ConfigureAwait(false).GetAwaiter().GetResult();
            properties["group.id"] = groupId;
            return _consumers.GetOrAdd(groupId, _ =>
                new ConsumerBuilder<string, byte[]>(properties).Build()
            );
        }
        private async Task<IDictionary<string, string>> InitTopic(string topic)
        {
            IDictionary<string, string> properties = new Dictionary<string, string>
                {
                    { "bootstrap.servers", _eventBusOptions.ConnectionString },
                    // 这几项配置不要覆盖,否则会影响消息的接收确认
                    { "enable.auto.offset.store", "false" },
                    { "enable.auto.commit", "true" },
                    { "auto.offset.reset", "earliest" },
                };
            if (_existsTopics.Contains(topic))
            {
                return properties;
            }
            try
            {
               
                var config = new AdminClientConfig(properties);
                using var adminClient = new AdminClientBuilder(config).Build();
                await adminClient.CreateTopicsAsync(new[] { new TopicSpecification { Name = topic } }).ConfigureAwait(false);
                _existsTopics.Add(topic);
            }
            catch (CreateTopicsException ex)
            {
                if (!ex.Message.Contains("exists", StringComparison.OrdinalIgnoreCase))
                    throw;
                _existsTopics.Add(topic);
            }
            return properties;
        }
        #region Dispose
        private bool disposedValue;


        protected virtual void Dispose(bool disposing)
        {
            if (!disposedValue)
            {
                if (disposing)
                {
                    // TODO: 释放托管状态(托管对象)
                }

                // TODO: 释放未托管的资源(未托管的对象)并重写终结器
                // TODO: 将大型字段设置为 null
                disposedValue = true;
            }
        }

        // // TODO: 仅当“Dispose(bool disposing)”拥有用于释放未托管资源的代码时才替代终结器
        // ~KafkaConnection()
        // {
        //     // 不要更改此代码。请将清理代码放入“Dispose(bool disposing)”方法中
        //     Dispose(disposing: false);
        // }

        public void Dispose()
        {
            // 不要更改此代码。请将清理代码放入“Dispose(bool disposing)”方法中
            Dispose(disposing: true);
            GC.SuppressFinalize(this);
        }
        #endregion

    }
}
