using Confluent.Kafka; using Microsoft.Extensions.Configuration; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Text.Json; using System.Threading; using System.Threading.Tasks; namespace DG.Kafka { public class KafkaClient { /// /// 是否停止服务 /// public static bool Shop { get; set; } = false; public void ReadFromConfiguration(IConfiguration configuration) { ConfigurationManager = configuration; } public IConfiguration ConfigurationManager { get; private set; } private static readonly Lazy _defaultClient = new(() => new KafkaClient()); public static KafkaClient Default { get { return _defaultClient.Value; } } private static List _consumers { get; set; } = new List(); public static List GetConsumers() { _consumers = Default.ConfigurationManager.GetSection("Consumers").Get>(); return _consumers; } public static async Task Builder(Consumer consumer, Func received, Func shoped) { await Task.Run(() => { var tasks = new List(); //var receivedDelegate = new ReceivedDelegate(ReceivedAsync); ThreadPool.QueueUserWorkItem(async task => { await ReceivedAsync(consumer, received, shoped); }); }); } public static async Task BatchBuilder(Consumer consumer, Func, Task> received, Func shoped, int batchsize = 1000) { await Task.Run(() => { var tasks = new List(); //var receivedDelegate = new BatchReceivedDelegate(BatchReceivedAsync); ThreadPool.QueueUserWorkItem(async task => { await BatchReceivedAsync(consumer, received, shoped, batchsize); }); }); } public delegate Task ReceivedDelegate(Consumer consumer, Func received); public delegate Task BatchReceivedDelegate(Consumer consumer, Func, Task> received, int batchsize = 1000); public static async Task ReceivedAsync(Consumer consumer, Func received, Func shoped) { try { var consumerConfig = new ConsumerConfig { BootstrapServers = consumer.Host, GroupId = consumer.GroupId, AutoOffsetReset = AutoOffsetReset.Earliest, EnableAutoCommit = false }; var cancel = false; using var consumerBuilder = new ConsumerBuilder(consumerConfig).Build(); Console.WriteLine($"Kafka alone 连接成功,配置: {JsonSerializer.Serialize(consumerConfig)}"); var topic = consumer.Topic; consumerBuilder.Subscribe(topic); while (!cancel) { try { if (Shop) { await shoped(); return; } var consumeResult = consumerBuilder.Consume(CancellationToken.None); Console.WriteLine($"Consumer message: {consumeResult.Message.Value} topic: {consumeResult.Topic} Partition: {consumeResult.Partition}"); var message = JsonSerializer.Deserialize(consumeResult.Message.Value); if (message != null) { await received(message); } try { consumerBuilder.Commit(consumeResult); } catch (KafkaException e) { Console.WriteLine(e.Message); } } catch (Exception ex) { Console.WriteLine(ex.ToString()); } await Task.Delay(1); } consumerBuilder.Close(); } catch (Exception ex) { Console.WriteLine(ex.ToString()); } } public static async Task BatchReceivedAsync(Consumer consumer, Func, Task> received, Func shoped, int batchsize = 1000) { try { var consumerConfig = new ConsumerConfig { BootstrapServers = consumer.Host, GroupId = consumer.GroupId, AutoOffsetReset = AutoOffsetReset.Earliest, EnableAutoCommit = false }; var cancel = false; using var consumerBuilder = new ConsumerBuilder(consumerConfig).Build(); Console.WriteLine($"Kafka batch 连接成功,配置: {JsonSerializer.Serialize(consumerConfig)}"); var topic = consumer.Topic; consumerBuilder.Subscribe(topic); while (!cancel) { try { if (Shop) { await shoped(); return; } var batchRecords = new List(); var consumeResults = new List>(); while (batchRecords.Count < batchsize) { var consumeResult = consumerBuilder.Consume(CancellationToken.None); Console.WriteLine($"Consumer message: {consumeResult.Message.Value} topic: {consumeResult.Topic} Partition: {consumeResult.Partition}"); var message = JsonSerializer.Deserialize(consumeResult.Message.Value); if (message == null) break; // 贤有更多的消启可供消裁 consumeResults.Add(consumeResult); batchRecords.Add(message); } await received(batchRecords); foreach (var consumeResult in consumeResults) { consumerBuilder.Commit(consumeResult); } } catch (KafkaException e) { Console.WriteLine("KafkaException:" + e.Message); } catch (Exception ex) { Console.WriteLine("KafkaException:" + ex.ToString()); } await Task.Delay(1); } consumerBuilder.Close(); } catch (Exception ex) { Console.WriteLine("KafkaException:" + ex.ToString()); } } public static async Task SendMessage(Consumer consumer, TMessage message) { var config = new ProducerConfig { BootstrapServers = consumer.Host, }; var topic = consumer.Topic; Action> handler = r => Console.WriteLine(!r.Error.IsError ? $"Delivered message to {r.TopicPartitionOffset}" : $"Delivery Error: {r.Error.Reason}"); using (var p = new ProducerBuilder(config).Build()) { try { p.Produce(topic, new Message { Value = JsonSerializer.Serialize(message) }); p.Flush(TimeSpan.FromSeconds(10)); } catch (ProduceException e) { Console.WriteLine($"Delivery failed: {e.Error.Reason}"); } } } } }