-
Notifications
You must be signed in to change notification settings - Fork 15
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
4 changed files
with
182 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,162 @@ | ||
using System.Collections.Concurrent; | ||
using System.Reflection; | ||
using Confluent.Kafka; | ||
using KafkaFlow; | ||
using KafkaFlow.Consumers; | ||
|
||
namespace Sitko.Core.Kafka; | ||
|
||
internal class KafkaConsumerOffsetsEnsurer | ||
{ | ||
private static FieldInfo? consumerManagerField; | ||
private static FieldInfo? directConsumerField; | ||
private static PropertyInfo? consumerProperty; | ||
private static readonly HashSet<string> ProcessedPartitions = new(); | ||
|
||
private static readonly | ||
ConcurrentDictionary<IMessageConsumer, (IConsumer kafkaFlowConsumer, IConsumer<byte[], byte[]> confluentConsumer | ||
)> Consumers = new(); | ||
|
||
private readonly IConsumerAccessor consumerAccessor; | ||
private readonly ILogHandler logHandler; | ||
private readonly ConcurrentDictionary<string, Task> tasks = new(); | ||
private IAdminClient? adminClient; | ||
|
||
public KafkaConsumerOffsetsEnsurer(IConsumerAccessor consumerAccessor, ILogHandler logHandler) | ||
{ | ||
this.consumerAccessor = consumerAccessor; | ||
this.logHandler = logHandler; | ||
} | ||
|
||
private IAdminClient GetAdminClient(string[] brokers) | ||
{ | ||
if (adminClient is null) | ||
{ | ||
var adminClientConfig = new AdminClientConfig | ||
{ | ||
BootstrapServers = string.Join(",", brokers), ClientId = "AdminClient" | ||
}; | ||
adminClient = new AdminClientBuilder(adminClientConfig) | ||
.SetLogHandler((_, m) => logHandler.Info(m.Message, m)) | ||
.SetErrorHandler((_, error) => logHandler.Error("Kafka Consumer Error", null, new { Error = error })) | ||
.Build(); | ||
} | ||
|
||
return adminClient; | ||
} | ||
|
||
public void EnsureOffsets( | ||
string[] brokers, | ||
string name, | ||
List<TopicPartition> list | ||
) | ||
{ | ||
foreach (var partition in list) | ||
{ | ||
var key = $"{name}/{partition.Partition.Value}"; | ||
if (ProcessedPartitions.Contains(key)) | ||
{ | ||
continue; | ||
} | ||
|
||
tasks.GetOrAdd( | ||
key, _ => { return Task.Run(async () => await ProcessPartition(brokers, name, partition)); } | ||
); | ||
ProcessedPartitions.Add(key); | ||
} | ||
} | ||
|
||
private async Task ProcessPartition(string[] brokers, string name, TopicPartition partition) | ||
{ | ||
var messageConsumer = consumerAccessor.GetConsumer(name); | ||
messageConsumer.Pause(new[] { partition }); | ||
try | ||
{ | ||
var (kafkaFlowConsumer, confluentConsumer) = GetConsumers(messageConsumer); | ||
|
||
var commited = await GetAdminClient(brokers).ListConsumerGroupOffsetsAsync(new[] | ||
{ | ||
new ConsumerGroupTopicPartitions(messageConsumer.GroupId, new List<TopicPartition> { partition }) | ||
}); | ||
if (!commited.Any()) | ||
{ | ||
logHandler.Warning( | ||
$"Не получилось найти оффсеты для назначенных партиций консьюмера {messageConsumer.ConsumerName}", | ||
null); | ||
return; | ||
} | ||
|
||
var currentOffset = commited.First().Partitions.FirstOrDefault( | ||
partitionOffset => | ||
partitionOffset.TopicPartition == partition | ||
); | ||
|
||
if (currentOffset is null || currentOffset.Offset == Offset.Unset) | ||
{ | ||
var partitionOffset = confluentConsumer.QueryWatermarkOffsets(partition, TimeSpan.FromSeconds(30)); | ||
var newOffset = new TopicPartitionOffset(partition, partitionOffset.High); | ||
logHandler.Warning( | ||
$"Сохраняем отсутствующий оффсет для партиции {partition} консьюмера {name}: {newOffset.Offset}", | ||
null); | ||
kafkaFlowConsumer.Commit(new[] { newOffset }); | ||
} | ||
} | ||
finally | ||
{ | ||
messageConsumer.Resume(new[] { partition }); | ||
} | ||
} | ||
|
||
private static (IConsumer kafkaFlowConsumer, IConsumer<byte[], byte[]> confluentConsumer) GetConsumers( | ||
IMessageConsumer consumer) => | ||
Consumers.GetOrAdd( | ||
consumer, messageConsumer => | ||
{ | ||
consumerManagerField ??= messageConsumer.GetType().GetField( | ||
"consumerManager", | ||
BindingFlags.Instance | | ||
BindingFlags.NonPublic | ||
) ?? | ||
throw new InvalidOperationException( | ||
"Can't find field consumerManager" | ||
); | ||
var consumerManager = | ||
consumerManagerField.GetValue(messageConsumer) ?? | ||
throw new InvalidOperationException( | ||
"Can't get consumerManager" | ||
); | ||
consumerProperty ??= consumerManager.GetType() | ||
.GetProperty( | ||
"Consumer", | ||
BindingFlags.Instance | | ||
BindingFlags.Public | ||
) ?? | ||
throw new InvalidOperationException( | ||
"Can't find field consumer" | ||
); | ||
var flowConsumer = | ||
consumerProperty.GetValue(consumerManager) as IConsumer ?? | ||
throw new InvalidOperationException( | ||
"Can't get flowConsumer" | ||
); | ||
|
||
directConsumerField ??= flowConsumer.GetType() | ||
.GetField( | ||
"consumer", | ||
BindingFlags.Instance | | ||
BindingFlags.NonPublic | ||
) ?? | ||
throw new InvalidOperationException( | ||
"Can't find field directConsumer" | ||
); | ||
var confluentConsumer = | ||
directConsumerField.GetValue(flowConsumer) as | ||
IConsumer<byte[], byte[]> ?? | ||
throw new InvalidOperationException( | ||
"Can't getdirectConsumer" | ||
); | ||
|
||
return (flowConsumer, confluentConsumer); | ||
} | ||
); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters