﻿using System;
using System.Diagnostics;
using System.Net.Http;
using System.Text;
using System.Threading.Tasks;
using System.Collections.Concurrent;
using System.Linq;
using Confluent.Kafka;
using System.Threading;

namespace Message_Check
{
   public class Class1
    {
        private static int i = 0;
        private static async Task Main1()
        {
            var respo = KafkaConsume("1003");
            //int requestCount = 1000;         // 请求总数
            //int concurrencyLevel = 100;      // 并发数量
            //string url = "https://your-api-endpoint.com"; // 目标URL
            //string postData = "{\"key\": \"value\"}";    // 请求体数据 (JSON 示例)

            //HttpClient client = new HttpClient();
            //Stopwatch stopwatch = new Stopwatch();
            //ConcurrentBag<long> responseTimes = new ConcurrentBag<long>(); // 记录每个请求的响应时间
            //int successCount = 0;
            //int failureCount = 0;

            //// 启动计时器
            //stopwatch.Start();

            //// 初始化任务数组
            //var tasks = new Task[concurrencyLevel];

            //// 并发执行请求
            //for (int i = 0; i < requestCount; i += concurrencyLevel)
            //{
            //    int tasksToRun = Math.Min(concurrencyLevel, requestCount - i);

            //    for (int j = 0; j < tasksToRun; j++)
            //    {
            //        tasks[j] = Task.Run(async () =>
            //        {
            //            Stopwatch requestWatch = Stopwatch.StartNew();  // 记录单个请求的耗时
            //            try
            //            {
            //                StringContent content = new StringContent(postData, Encoding.UTF8, "application/json");
            //                HttpResponseMessage response = await client.PostAsync(url, content);
            //                requestWatch.Stop();

            //                responseTimes.Add(requestWatch.ElapsedMilliseconds); // 添加响应时间

            //                if (response.IsSuccessStatusCode)
            //                {
            //                    successCount++;
            //                }
            //                else
            //                {
            //                    failureCount++;
            //                }
            //            }
            //            catch (Exception)
            //            {
            //                requestWatch.Stop();
            //                responseTimes.Add(requestWatch.ElapsedMilliseconds); // 添加响应时间
            //                failureCount++;
            //            }
            //        });
            //    }

            //    // 等待当前批次的请求完成
            //    await Task.WhenAll(tasks);
            //}

            //// 停止计时器
            //stopwatch.Stop();

            //// 计算平均响应时间和吞吐量
            //double averageResponseTime = responseTimes.Count > 0 ? (double)responseTimes.Sum() / responseTimes.Count : 0;
            //double throughput = requestCount / (stopwatch.ElapsedMilliseconds / 1000.0);

            //// 输出结果
            //Console.WriteLine("压测结果:");
            //Console.WriteLine($"请求总数: {requestCount}");
            //Console.WriteLine($"成功请求: {successCount}");
            //Console.WriteLine($"失败请求: {failureCount}");
            //Console.WriteLine($"总耗时: {stopwatch.ElapsedMilliseconds} ms");
            //Console.WriteLine($"平均响应时间: {averageResponseTime} ms");
            //Console.WriteLine($"吞吐量 (Requests per Second): {throughput} rps");
        }

        public static async Task KafkaConsume(string topics)
        {
            string bootstrapServers = "10.159.44.171:9092"; //Kafka集群地址
            string topic = topics; //topic消费的主题
            string groupId = "savegen2pana_" + topic; //消费者群组的ID
            try
            {
                var config = new ConsumerConfig
                {
                    BootstrapServers = bootstrapServers,
                    GroupId = groupId,
                    AutoOffsetReset = AutoOffsetReset.Earliest,
                    FetchMaxBytes = 5 * 1024 * 1024, // 5 MB
                    FetchMinBytes = 1 * 1024 * 1024, // 1 MB
                    EnableAutoCommit = false // Disable auto-commit to control commit manually
                };

                using (var consumer = new ConsumerBuilder<Ignore, string>(config).Build())
                {
                    consumer.Subscribe(topic);
                    var cancellationTokenSource = new CancellationTokenSource();
                    var cancellationToken = cancellationTokenSource.Token;
                    try
                    {
                        while (!cancellationToken.IsCancellationRequested)
                        {
                            var consumeResult = consumer.Consume(TimeSpan.FromMilliseconds(100));
                            if (consumeResult != null)
                            {
                                Console.WriteLine($"第{i++}次循环 成功获取数据");
                                Console.WriteLine($"Received message: {consumeResult.Message.Value} at: {consumeResult.TopicPartitionOffset}");
                                consumer.Commit(consumeResult);
                            }
                            else
                            {
                                Console.WriteLine($"第{i++}次循环 无数据");
                                Console.WriteLine("No message received. Doing other work...");
                                if (i > 20) { break; }
                            }

                        }
                    }
                    catch (ConsumeException e)
                    {
                        consumer.Close();
                        Console.WriteLine($"Consume error: {e.Error.Reason}");
                    }

                    finally
                    {
                        consumer.Close();
                    }
                }
            }
            catch (Exception e)
            {
                Console.WriteLine($"Consume error: {e.Message}");
            }
        }
    }
}
