From 4c1dfecf526fe772cc566c48682b1c67534cfa16 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Pereira=20Rodrigues?= Date: Tue, 21 Nov 2023 16:17:57 +0000 Subject: [PATCH] refactor: adopt common .NET conventions This refactor aims the following - Have all usings outside namespace - Replace private variables this to use _ - Move tests folder to root - Move sln file to root --- .editorconfig | 95 +++++++ .github/workflows/build.yml | 6 +- .github/workflows/deploy-website.yml | 2 +- .github/workflows/publish.yml | 2 +- .github/workflows/test-deploy-website.yml | 2 +- KafkaFlow.sln | 265 ++++++++++++++++++ Makefile | 10 +- .../PrintConsoleMiddleware.cs | 3 +- samples/KafkaFlow.Sample.Dashboard/Startup.cs | 2 +- .../MessageHandler.cs | 2 - .../PauseConsumerOnExceptionMiddleware.cs | 14 +- .../Handlers/AvroMessageHandler.cs | 11 +- .../Handlers/AvroMessageHandler2.cs | 7 +- .../Handlers/JsonMessageHandler.cs | 7 +- .../Handlers/ProtobufMessageHandler.cs | 4 +- .../Avro/SchemaRegistry/AvroLogMessage.cs | 10 +- .../Avro/SchemaRegistry/AvroLogMessage2.cs | 8 +- .../MessageTypes/Json/JsonLogMessage.cs | 24 +- .../Program.cs | 12 +- samples/KafkaFlow.Sample.WebApi/Program.cs | 1 - .../KafkaFlow.Sample/PrintConsoleHandler.cs | 1 - samples/KafkaFlow.Sample/TestMessage.cs | 2 +- .../IClusterConfigurationBuilder.cs | 6 +- .../IConsumerConfigurationBuilder.cs | 10 +- .../IKafkaConfigurationBuilder.cs | 4 +- .../IProducerConfigurationBuilder.cs | 4 +- .../Configuration/TopicPartitions.cs | 4 +- .../Configuration/WorkersCountContext.cs | 4 +- src/KafkaFlow.Abstractions/Delegates.cs | 4 +- .../DependencyConfiguratorExtensions.cs | 4 +- .../DependencyResolverExtensions.cs | 2 +- .../Extensions/DictionaryExtensions.cs | 6 +- .../Extensions/MessageHeaderExtensions.cs | 4 +- .../IConsumerContext.cs | 8 +- .../IDateTimeProvider.cs | 4 +- .../IDependencyConfigurator.cs | 4 +- .../IDependencyResolver.cs | 6 +- .../IDependencyResolverScope.cs | 4 +- src/KafkaFlow.Abstractions/IDeserializer.cs | 8 +- src/KafkaFlow.Abstractions/IEvent.cs | 10 +- src/KafkaFlow.Abstractions/ILogHandler.cs | 4 +- src/KafkaFlow.Abstractions/IMessageContext.cs | 4 +- src/KafkaFlow.Abstractions/IMessageHandler.cs | 4 +- src/KafkaFlow.Abstractions/IMessageHeaders.cs | 4 +- .../IMessageMiddleware.cs | 4 +- src/KafkaFlow.Abstractions/ISerializer.cs | 6 +- .../IWorkerDistributionStrategy.cs | 4 +- .../MessageErrorEventContext.cs | 6 +- src/KafkaFlow.Abstractions/NullLogHandler.cs | 4 +- .../WorkerDistributionContext.cs | 4 +- .../ApplicationBuilderExtensions.cs | 20 +- .../DashboardConfiguration.cs | 8 +- .../DashboardConfigurationBuilder.cs | 20 +- .../IDashboardConfigurationBuilder.cs | 6 +- .../KafkaFlow.Admin.Dashboard.csproj | 2 +- .../TelemetryResponse.cs | 10 +- .../TelemetryResponseAdapter.cs | 8 +- .../Adapters/ConsumerResponseAdapter.cs | 6 +- .../Adapters/TelemetryResponseAdapter.cs | 10 +- .../Contracts/ConsumerResponse.cs | 106 +++---- .../Contracts/ConsumersResponse.cs | 4 +- .../Contracts/GroupResponse.cs | 8 +- .../Contracts/GroupsResponse.cs | 4 +- .../Contracts/RewindOffsetsToDateRequest.cs | 4 +- .../Contracts/TelemetryResponse.cs | 10 +- .../Controllers/ConsumersController.cs | 60 ++-- .../Controllers/GroupsController.cs | 30 +- .../Controllers/TelemetryController.cs | 16 +- src/KafkaFlow.Admin/AdminProducer.cs | 18 +- src/KafkaFlow.Admin/ConsumerAdmin.cs | 36 +-- .../ClusterConfigurationBuilderExtensions.cs | 16 +- .../Extensions/MessageConsumerExtensions.cs | 10 +- .../ChangeConsumerWorkersCountHandler.cs | 15 +- .../ConsumerTelemetryMetricHandler.cs | 13 +- .../Handlers/PauseConsumerByNameHandler.cs | 19 +- .../Handlers/PauseConsumersByGroupHandler.cs | 19 +- .../Handlers/ResetConsumerOffsetHandler.cs | 23 +- .../Handlers/RestartConsumerByNameHandler.cs | 15 +- .../Handlers/ResumeConsumerByNameHandler.cs | 19 +- .../Handlers/ResumeConsumersByGroupHandler.cs | 19 +- .../RewindConsumerOffsetToDateTimeHandler.cs | 23 +- .../Handlers/StartConsumerByNameHandler.cs | 15 +- .../Handlers/StopConsumerByNameHandler.cs | 15 +- src/KafkaFlow.Admin/IAdminProducer.cs | 6 +- src/KafkaFlow.Admin/IConsumerAdmin.cs | 8 +- src/KafkaFlow.Admin/ITelemetryStorage.cs | 6 +- src/KafkaFlow.Admin/MemoryTelemetryStorage.cs | 50 ++-- .../Messages/ChangeConsumerWorkersCount.cs | 4 +- .../Messages/ConsumerTelemetryMetric.cs | 10 +- .../Messages/PauseConsumerByName.cs | 6 +- .../Messages/PauseConsumersByGroup.cs | 6 +- .../Messages/ResetConsumerOffset.cs | 6 +- .../Messages/RestartConsumerByName.cs | 4 +- .../Messages/ResumeConsumerByName.cs | 6 +- .../Messages/ResumeConsumersByGroup.cs | 6 +- .../RewindConsumerOffsetToDateTime.cs | 8 +- .../Messages/StartConsumerByName.cs | 4 +- .../Messages/StopConsumerByName.cs | 4 +- src/KafkaFlow.Admin/TelemetryScheduler.cs | 46 +-- .../GzipMessageCompressor.cs | 8 +- .../GzipMessageDecompressor.cs | 8 +- .../KafkaFlowHostedService.cs | 18 +- .../ServiceCollectionExtensions.cs | 10 +- .../CompressionTest.cs | 44 --- .../ProducerTest.cs | 40 --- .../ConsoleLogHandler.cs | 8 +- .../ExtensionMethods.cs | 4 +- .../ExtensionMethods.cs | 4 +- .../MicrosoftLogHandler.cs | 22 +- .../MicrosoftDependencyConfigurator.cs | 22 +- .../MicrosoftDependencyResolver.cs | 20 +- .../MicrosoftDependencyResolverScope.cs | 10 +- .../ServiceCollectionExtensions.cs | 8 +- .../ServiceProviderExtensions.cs | 8 +- .../ActivitySourceAccessor.cs | 22 +- .../ExtensionMethods.cs | 6 +- .../OpenTelemetryConsumerEventsHandler.cs | 24 +- .../OpenTelemetryProducerEventsHandler.cs | 24 +- .../ClusterConfigurationBuilderExtensions.cs | 8 +- .../ConfluentDeserializerWrapper.cs | 32 +-- .../ConfluentSerializerWrapper.cs | 26 +- .../ISchemaRegistryTypeNameResolver.cs | 4 +- .../SchemaRegistryTypeResolver.cs | 38 +-- .../JsonCoreDeserializer.cs | 18 +- .../JsonCoreSerializer.cs | 26 +- .../NewtonsoftJsonDeserializer.cs | 25 +- .../NewtonsoftJsonSerializer.cs | 23 +- .../ProtobufNetDeserializer.cs | 13 +- .../ProtobufNetSerializer.cs | 11 +- .../ConfluentAvroDeserializer.cs | 20 +- .../ConfluentAvroSerializer.cs | 26 +- .../ConfluentAvroTypeNameResolver.cs | 14 +- .../ConsumerConfigurationBuilderExtensions.cs | 12 +- .../ProducerConfigurationBuilderExtensions.cs | 14 +- .../ConfluentJsonDeserializer.cs | 20 +- .../ConfluentJsonSerializer.cs | 34 +-- .../ConsumerConfigurationBuilderExtensions.cs | 10 +- .../ProducerConfigurationBuilderExtensions.cs | 12 +- .../ConfluentProtobufDeserializer.cs | 14 +- .../ConfluentProtobufSerializer.cs | 26 +- .../ConfluentProtobufTypeNameResolver.cs | 18 +- .../ConsumerConfigurationBuilderExtensions.cs | 12 +- .../ProducerConfigurationBuilderExtensions.cs | 14 +- .../Consumer/ConsumerManagerTests.cs | 167 ----------- .../SchemaRegistryTypeResolverTests.cs | 47 ---- src/KafkaFlow.UnitTests/OffsetManagerTests.cs | 88 ------ .../UnityDependencyConfigurator.cs | 37 ++- .../UnityDependencyResolver.cs | 20 +- .../UnityDependencyResolverScope.cs | 10 +- src/KafkaFlow.sln | 252 ----------------- .../Batching/BatchConsumeMessageContext.cs | 14 +- .../Batching/BatchConsumeMiddleware.cs | 92 +++--- src/KafkaFlow/Batching/BatchingExtensions.cs | 12 +- src/KafkaFlow/Clusters/ClusterManager.cs | 54 ++-- .../Clusters/ClusterManagerAccessor.cs | 14 +- src/KafkaFlow/Clusters/IClusterManager.cs | 8 +- .../Clusters/IClusterManagerAccessor.cs | 4 +- .../Configuration/ClusterConfiguration.cs | 44 +-- .../ClusterConfigurationBuilder.cs | 58 ++-- .../Configuration/ConsumerConfiguration.cs | 31 +- .../ConsumerConfigurationBuilder.cs | 183 ++++++------ .../Configuration/IConsumerConfiguration.cs | 17 +- .../IMiddlewareInstanceContainer.cs | 4 +- .../Configuration/IProducerConfiguration.cs | 9 +- .../Configuration/KafkaConfiguration.cs | 10 +- .../KafkaConfigurationBuilder.cs | 44 +-- .../Configuration/KafkaFlowConfigurator.cs | 16 +- .../Configuration/MiddlewareConfiguration.cs | 4 +- .../MiddlewareConfigurationBuilder.cs | 18 +- .../MiddlewareInstanceContainer.cs | 26 +- .../PendingOffsetsStatisticsHandler.cs | 11 +- .../Configuration/ProducerConfiguration.cs | 9 +- .../ProducerConfigurationBuilder.cs | 75 +++-- src/KafkaFlow/ConsumerManagerFactory.cs | 6 +- src/KafkaFlow/Consumers/Consumer.cs | 153 +++++----- src/KafkaFlow/Consumers/ConsumerAccessor.cs | 12 +- src/KafkaFlow/Consumers/ConsumerContext.cs | 55 ++-- .../Consumers/ConsumerFlowManager.cs | 82 +++--- src/KafkaFlow/Consumers/ConsumerManager.cs | 55 ++-- src/KafkaFlow/Consumers/ConsumerWorker.cs | 112 ++++---- src/KafkaFlow/Consumers/ConsumerWorkerPool.cs | 120 ++++---- .../BytesSumDistributionStrategy.cs | 14 +- .../FreeWorkerDistributionStrategy.cs | 14 +- src/KafkaFlow/Consumers/IConsumer.cs | 57 ++-- src/KafkaFlow/Consumers/IConsumerAccessor.cs | 4 +- .../Consumers/IConsumerFlowManager.cs | 6 +- src/KafkaFlow/Consumers/IConsumerManager.cs | 4 +- src/KafkaFlow/Consumers/IConsumerWorker.cs | 8 +- .../Consumers/IConsumerWorkerPool.cs | 10 +- src/KafkaFlow/Consumers/IMessageConsumer.cs | 31 +- src/KafkaFlow/Consumers/IOffsetCommitter.cs | 8 +- src/KafkaFlow/Consumers/IOffsetManager.cs | 4 +- src/KafkaFlow/Consumers/IWorkerPoolFeeder.cs | 4 +- src/KafkaFlow/Consumers/MessageConsumer.cs | 115 ++++---- .../Consumers/NullOffsetCommitter.cs | 9 +- src/KafkaFlow/Consumers/NullOffsetManager.cs | 4 +- src/KafkaFlow/Consumers/OffsetCommitter.cs | 72 ++--- src/KafkaFlow/Consumers/OffsetManager.cs | 26 +- src/KafkaFlow/Consumers/OffsetsWatermark.cs | 18 +- src/KafkaFlow/Consumers/PartitionOffsets.cs | 35 +-- src/KafkaFlow/Consumers/WorkerPoolFeeder.cs | 44 +-- .../ConsumerLagWorkerBalancer.cs | 54 ++-- src/KafkaFlow/DateTimeProvider.cs | 4 +- src/KafkaFlow/Delegates.cs | 6 +- src/KafkaFlow/Event.cs | 36 +-- src/KafkaFlow/EventSubscription.cs | 12 +- .../ConfigurationBuilderExtensions.cs | 24 +- .../Extensions/ConfigurationExtensions.cs | 14 +- src/KafkaFlow/GlobalEvents.cs | 57 ++-- src/KafkaFlow/IConsumerManagerFactory.cs | 6 +- src/KafkaFlow/IKafkaBus.cs | 10 +- src/KafkaFlow/IMiddlewareExecutor.cs | 6 +- src/KafkaFlow/KafkaBus.cs | 54 ++-- src/KafkaFlow/MessageContext.cs | 4 +- src/KafkaFlow/MessageHeaders.cs | 24 +- src/KafkaFlow/MiddlewareExecutor.cs | 30 +- .../CompressorProducerMiddleware.cs | 14 +- .../DecompressorConsumerMiddleware.cs | 14 +- ...merThrottlingActionConfigurationBuilder.cs | 4 +- .../ConsumerThrottlingConfiguration.cs | 6 +- .../ConsumerThrottlingConfigurationBuilder.cs | 26 +- ...hrottlingConfigurationBuilderExtensions.cs | 14 +- ...erThrottlingActionsConfigurationBuilder.cs | 4 +- ...merThrottlingMetricConfigurationBuilder.cs | 4 +- ...lingThresholdActionConfigurationBuilder.cs | 4 +- .../ConsumerThrottlingDelayAction.cs | 12 +- .../ConsumerThrottlingKafkaLagMetric.cs | 16 +- .../ConsumerThrottlingMiddleware.cs | 38 +-- .../ConsumerThrottlingThreshold.cs | 10 +- .../IConsumerThrottlingAction.cs | 4 +- .../IConsumerThrottlingMetric.cs | 4 +- .../IConsumerThrottlingThreshold.cs | 4 +- .../ConsumerMiddlewareConfigurationBuilder.cs | 12 +- .../ProducerMiddlewareConfigurationBuilder.cs | 12 +- .../DeserializerConsumerMiddleware.cs | 24 +- .../Resolvers/DefaultTypeResolver.cs | 8 +- .../Resolvers/IMessageTypeResolver.cs | 8 +- .../Resolvers/SingleMessageTypeResolver.cs | 12 +- .../SerializerProducerMiddleware.cs | 26 +- .../TypedHandlerConfiguration.cs | 4 +- .../TypedHandlerConfigurationBuilder.cs | 42 +-- .../TypedHandler/HandlerExecutor.cs | 18 +- .../TypedHandler/HandlerTypeMapping.cs | 20 +- .../TypedHandler/TypedHandlerMiddleware.cs | 22 +- .../Producers/BatchProduceException.cs | 6 +- .../Producers/BatchProduceExtension.cs | 8 +- src/KafkaFlow/Producers/BatchProduceItem.cs | 4 +- src/KafkaFlow/Producers/IMessageProducer.cs | 8 +- src/KafkaFlow/Producers/IProducerAccessor.cs | 4 +- src/KafkaFlow/Producers/MessageProducer.cs | 98 +++---- .../Producers/MessageProducerWrapper.cs | 22 +- src/KafkaFlow/Producers/ProducerAccessor.cs | 16 +- src/KafkaFlow/TopicMetadata.cs | 4 +- src/KafkaFlow/TopicPartitionMetadata.cs | 4 +- src/StyleCopAnalyzersDefault.ruleset | 18 +- src/stylecop.json | 4 + .../CompressionSerializationTest.cs | 36 +-- .../CompressionTest.cs | 44 +++ .../ConsumerTest.cs | 56 ++-- .../Core/Bootstrapper.cs | 39 ++- .../ErrorExecutingMiddlewareException.cs | 6 +- .../PartitionAssignmentException.cs | 6 +- .../Core/Handlers/AvroMessageHandler.cs | 7 +- .../Handlers/ConfluentJsonMessageHandler.cs | 7 +- .../ConfluentProtobufMessageHandler.cs | 7 +- .../Core/Handlers/MessageHandler.cs | 7 +- .../Core/Handlers/MessageHandler1.cs | 7 +- .../Core/Handlers/MessageHandler2.cs | 9 +- .../Core/Handlers/MessageStorage.cs | 58 ++-- .../Core/Handlers/PauseResumeHandler.cs | 7 +- .../Core/Messages/ITestMessage.cs | 6 +- .../Core/Messages/LogMessages2.cs | 6 +- .../Core/Messages/PauseResumeMessage.cs | 8 +- .../Core/Messages/TestMessage1.cs | 8 +- .../Core/Messages/TestMessage2.cs | 8 +- .../Core/Messages/TestMessage3.cs | 6 +- .../Core/Messages/TestProtoMessage.cs | 0 .../Core/Messages/logmessages2.avsc | 0 .../Core/Middlewares/GzipMiddleware.cs | 8 +- .../Core/Producers/AvroProducer.cs | 0 .../Core/Producers/ConfluentJsonProducer.cs | 0 .../Producers/ConfluentProtobufProducer.cs | 0 .../Core/Producers/GzipProducer.cs | 0 .../Core/Producers/JsonGzipProducer.cs | 0 .../Core/Producers/JsonProducer.cs | 0 .../Core/Producers/JsonProducer2.cs | 0 .../Core/Producers/ProtobufGzipProducer.cs | 0 .../Core/Producers/ProtobufGzipProducer2.cs | 0 .../Core/Producers/ProtobufProducer.cs | 0 .../Core/TraceLogHandler.cs | 8 +- .../GlobalEventsTest.cs | 74 ++--- .../KafkaFlow.IntegrationTests.csproj | 20 +- .../OpenTelemetryTests.cs | 75 +++-- .../ProducerTest.cs | 40 +++ .../SerializationTest.cs | 50 ++-- .../conf/appsettings.json | 0 .../Controllers/ConsumersControllerTests.cs | 154 +++++----- .../Controllers/GroupsControllerTests.cs | 54 ++-- .../Controllers/TelemetryControllerTests.cs | 42 +-- .../BatchConsumeMiddlewareTests.cs | 85 +++--- .../CompressorConsumerMiddlewareTests.cs | 58 ++-- .../CompressorProducerMiddlewareTests.cs | 52 ++-- .../ConsumerConfigurationBuilderTests.cs | 62 ++-- .../KafkaConfigurationBuilderTests.cs | 10 +- .../ProducerConfigurationBuilderTests.cs | 62 ++-- .../Consumer/ConsumerManagerTests.cs | 166 +++++++++++ .../Consumer/WorkerPoolFeederTests.cs | 94 +++---- .../DummyObjects/DummyProtobufObject.cs | 0 .../DummyObjects/DummyProtobufObject.proto | 0 .../KafkaFlow.UnitTests/EventTests.cs | 66 ++--- .../KafkaFlow.UnitTests/ExtensionHelpers.cs | 6 +- .../KafkaFlow.UnitTests.csproj | 18 +- .../LogHandlers/MicrosoftLogHandlerTests.cs | 10 +- .../MemoryTelemetryStorageTests.cs | 66 ++--- .../MessageHeadersTests.cs | 18 +- .../ConfluentAvroTypeNameResolverTests.cs | 28 +- .../ConfluentProtobufTypeNameResolverTests.cs | 31 +- .../SchemaRegistryTypeResolverTests.cs | 47 ++++ .../OffsetCommitterTests.cs | 77 +++-- .../KafkaFlow.UnitTests/OffsetManagerTests.cs | 87 ++++++ .../PartitionOffsetsTests.cs | 16 +- .../Serializers/JsonCoreSerializerTests.cs | 26 +- .../NewtonsoftJsonDeserializerTests.cs | 32 +-- .../NewtonsoftJsonSerializerTests.cs | 28 +- .../SerializerConsumerMiddlewareTests.cs | 102 +++---- .../SerializerProducerMiddlewareTests.cs | 66 ++--- .../TypedHandler/HandlerTypeMappingTests.cs | 20 +- 327 files changed, 3917 insertions(+), 3859 deletions(-) create mode 100644 .editorconfig create mode 100644 KafkaFlow.sln delete mode 100644 src/KafkaFlow.IntegrationTests/CompressionTest.cs delete mode 100644 src/KafkaFlow.IntegrationTests/ProducerTest.cs delete mode 100644 src/KafkaFlow.UnitTests/Consumer/ConsumerManagerTests.cs delete mode 100644 src/KafkaFlow.UnitTests/Middlewares/Serialization/SchemaRegistryTypeResolverTests.cs delete mode 100644 src/KafkaFlow.UnitTests/OffsetManagerTests.cs delete mode 100644 src/KafkaFlow.sln rename {src => tests}/KafkaFlow.IntegrationTests/CompressionSerializationTest.cs (52%) create mode 100644 tests/KafkaFlow.IntegrationTests/CompressionTest.cs rename {src => tests}/KafkaFlow.IntegrationTests/ConsumerTest.cs (70%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Bootstrapper.cs (95%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Exceptions/ErrorExecutingMiddlewareException.cs (76%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Exceptions/PartitionAssignmentException.cs (78%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Handlers/AvroMessageHandler.cs (75%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Handlers/ConfluentJsonMessageHandler.cs (70%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Handlers/ConfluentProtobufMessageHandler.cs (71%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler.cs (70%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler1.cs (70%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler2.cs (70%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Handlers/MessageStorage.cs (64%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Handlers/PauseResumeHandler.cs (77%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Messages/ITestMessage.cs (68%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Messages/LogMessages2.cs (94%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Messages/PauseResumeMessage.cs (73%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Messages/TestMessage1.cs (72%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Messages/TestMessage2.cs (72%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Messages/TestMessage3.cs (72%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Messages/TestProtoMessage.cs (100%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Messages/logmessages2.avsc (100%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Middlewares/GzipMiddleware.cs (63%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Producers/AvroProducer.cs (100%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Producers/ConfluentJsonProducer.cs (100%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Producers/ConfluentProtobufProducer.cs (100%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Producers/GzipProducer.cs (100%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Producers/JsonGzipProducer.cs (100%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Producers/JsonProducer.cs (100%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Producers/JsonProducer2.cs (100%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Producers/ProtobufGzipProducer.cs (100%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Producers/ProtobufGzipProducer2.cs (100%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/Producers/ProtobufProducer.cs (100%) rename {src => tests}/KafkaFlow.IntegrationTests/Core/TraceLogHandler.cs (94%) rename {src => tests}/KafkaFlow.IntegrationTests/GlobalEventsTest.cs (87%) rename {src => tests}/KafkaFlow.IntegrationTests/KafkaFlow.IntegrationTests.csproj (59%) rename {src => tests}/KafkaFlow.IntegrationTests/OpenTelemetryTests.cs (80%) create mode 100644 tests/KafkaFlow.IntegrationTests/ProducerTest.cs rename {src => tests}/KafkaFlow.IntegrationTests/SerializationTest.cs (59%) rename {src => tests}/KafkaFlow.IntegrationTests/conf/appsettings.json (100%) rename {src => tests}/KafkaFlow.UnitTests/Admin.WebApi/Controllers/ConsumersControllerTests.cs (67%) rename {src => tests}/KafkaFlow.UnitTests/Admin.WebApi/Controllers/GroupsControllerTests.cs (51%) rename {src => tests}/KafkaFlow.UnitTests/Admin.WebApi/Controllers/TelemetryControllerTests.cs (83%) rename {src => tests}/KafkaFlow.UnitTests/BatchConsume/BatchConsumeMiddlewareTests.cs (64%) rename {src => tests}/KafkaFlow.UnitTests/Compressors/CompressorConsumerMiddlewareTests.cs (54%) rename {src => tests}/KafkaFlow.UnitTests/Compressors/CompressorProducerMiddlewareTests.cs (62%) rename {src => tests}/KafkaFlow.UnitTests/ConfigurationBuilders/ConsumerConfigurationBuilderTests.cs (72%) rename {src => tests}/KafkaFlow.UnitTests/ConfigurationBuilders/KafkaConfigurationBuilderTests.cs (75%) rename {src => tests}/KafkaFlow.UnitTests/ConfigurationBuilders/ProducerConfigurationBuilderTests.cs (66%) create mode 100644 tests/KafkaFlow.UnitTests/Consumer/ConsumerManagerTests.cs rename {src => tests}/KafkaFlow.UnitTests/Consumer/WorkerPoolFeederTests.cs (67%) rename {src => tests}/KafkaFlow.UnitTests/DummyObjects/DummyProtobufObject.cs (100%) rename {src => tests}/KafkaFlow.UnitTests/DummyObjects/DummyProtobufObject.proto (100%) rename {src => tests}/KafkaFlow.UnitTests/EventTests.cs (73%) rename {src => tests}/KafkaFlow.UnitTests/ExtensionHelpers.cs (88%) rename {src => tests}/KafkaFlow.UnitTests/KafkaFlow.UnitTests.csproj (55%) rename {src => tests}/KafkaFlow.UnitTests/LogHandlers/MicrosoftLogHandlerTests.cs (79%) rename {src => tests}/KafkaFlow.UnitTests/MemoryTelemetryStorageTests.cs (71%) rename {src => tests}/KafkaFlow.UnitTests/MessageHeadersTests.cs (74%) rename {src => tests}/KafkaFlow.UnitTests/Middlewares/Serialization/ConfluentAvroTypeNameResolverTests.cs (52%) rename {src => tests}/KafkaFlow.UnitTests/Middlewares/Serialization/ConfluentProtobufTypeNameResolverTests.cs (50%) create mode 100644 tests/KafkaFlow.UnitTests/Middlewares/Serialization/SchemaRegistryTypeResolverTests.cs rename {src => tests}/KafkaFlow.UnitTests/OffsetCommitterTests.cs (53%) create mode 100644 tests/KafkaFlow.UnitTests/OffsetManagerTests.cs rename {src => tests}/KafkaFlow.UnitTests/PartitionOffsetsTests.cs (95%) rename {src => tests}/KafkaFlow.UnitTests/Serializers/JsonCoreSerializerTests.cs (69%) rename {src => tests}/KafkaFlow.UnitTests/Serializers/NewtonsoftJsonDeserializerTests.cs (54%) rename {src => tests}/KafkaFlow.UnitTests/Serializers/NewtonsoftJsonSerializerTests.cs (56%) rename {src => tests}/KafkaFlow.UnitTests/Serializers/SerializerConsumerMiddlewareTests.cs (55%) rename {src => tests}/KafkaFlow.UnitTests/Serializers/SerializerProducerMiddlewareTests.cs (53%) rename {src => tests}/KafkaFlow.UnitTests/TypedHandler/HandlerTypeMappingTests.cs (52%) diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 000000000..2e3356a89 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,95 @@ +# editorconfig.org + +[*.cs] +# Naming rules for fields +dotnet_naming_rule.private_fields_with_underscore.symbols = private_field +dotnet_naming_rule.private_fields_with_underscore.style = prefix_underscore +dotnet_naming_rule.private_fields_with_underscore.severity = warning + +dotnet_naming_symbols.private_field.applicable_kinds = field +dotnet_naming_symbols.private_field.applicable_accessibilities = private +dotnet_naming_symbols.private_field.required_modifiers = + +dotnet_naming_style.prefix_underscore.capitalization = camel_case +dotnet_naming_style.prefix_underscore.required_prefix = _ + +# Naming rules for properties +dotnet_naming_rule.private_properties_with_underscore.symbols = private_property +dotnet_naming_rule.private_properties_with_underscore.style = prefix_underscore +dotnet_naming_rule.private_properties_with_underscore.severity = warning + +dotnet_naming_symbols.private_property.applicable_kinds = property +dotnet_naming_symbols.private_property.applicable_accessibilities = private +dotnet_naming_symbols.private_property.required_modifiers = + +dotnet_naming_style.prefix_underscore.capitalization = camel_case +dotnet_naming_style.prefix_underscore.required_prefix = _ + +# Do not use 'this.' for private fields +dotnet_diagnostic.DOTNET_Naming_Style_DoNotUseThisForPrivateFields.severity = warning +dotnet_diagnostic.DOTNET_Naming_Style_DoNotUseThisForPrivateFields.symbols = field_like + +dotnet_naming_rule.style_dotnet_naming_rule_DotNetNamingStyle.DoNotUseThisForPrivateFields.symbols = field_like +dotnet_naming_rule.style_dotnet_naming_rule_DotNetNamingStyle.DoNotUseThisForPrivateFields.style = this_prefix +dotnet_naming_rule.style_dotnet_naming_rule_DotNetNamingStyle.DoNotUseThisForPrivateFields.severity = warning + +# name all constant fields using PascalCase +dotnet_naming_rule.constant_fields_should_be_pascal_case.severity = warning +dotnet_naming_rule.constant_fields_should_be_pascal_case.symbols = constant_fields +dotnet_naming_rule.constant_fields_should_be_pascal_case.style = pascal_case_style + +dotnet_naming_symbols.constant_fields.applicable_kinds = field +dotnet_naming_symbols.constant_fields.required_modifiers = const + +dotnet_naming_style.pascal_case_style.capitalization = pascal_case + +# static fields should have s_ prefix +dotnet_naming_rule.static_fields_should_have_prefix.severity = warning +dotnet_naming_rule.static_fields_should_have_prefix.symbols = static_fields +dotnet_naming_rule.static_fields_should_have_prefix.style = static_prefix_style + +dotnet_naming_symbols.static_fields.applicable_kinds = field +dotnet_naming_symbols.static_fields.required_modifiers = static + +dotnet_naming_symbols.static_fields.applicable_accessibilities = private, internal, private_protected +dotnet_naming_style.static_prefix_style.required_prefix = s_ + +dotnet_naming_style.static_prefix_style.capitalization = camel_case + +csharp_indent_labels = one_less_than_current +csharp_using_directive_placement = outside_namespace:silent +csharp_prefer_simple_using_statement = true:suggestion +csharp_prefer_braces = true:silent +csharp_style_namespace_declarations = block_scoped:silent +csharp_style_prefer_method_group_conversion = true:silent +csharp_style_prefer_top_level_statements = true:silent +csharp_style_prefer_primary_constructors = true:suggestion +csharp_style_expression_bodied_methods = false:silent +csharp_style_expression_bodied_constructors = false:silent +csharp_style_expression_bodied_operators = false:silent +csharp_style_expression_bodied_properties = true:silent +csharp_style_expression_bodied_indexers = true:silent +csharp_style_expression_bodied_accessors = true:silent +csharp_style_expression_bodied_lambdas = true:silent +csharp_style_expression_bodied_local_functions = false:silent + +[*.{cs,vb}] +dotnet_style_operator_placement_when_wrapping = beginning_of_line +tab_width = 4 +indent_size = 4 +end_of_line = crlf +dotnet_style_coalesce_expression = true:suggestion +dotnet_style_null_propagation = true:suggestion +dotnet_style_prefer_is_null_check_over_reference_equality_method = true:suggestion +dotnet_style_prefer_auto_properties = true:silent +dotnet_style_object_initializer = true:suggestion +dotnet_style_prefer_collection_expression = true:suggestion +dotnet_style_collection_initializer = true:suggestion +dotnet_style_prefer_simplified_boolean_expressions = true:suggestion +dotnet_style_prefer_conditional_expression_over_assignment = true:silent +dotnet_style_prefer_conditional_expression_over_return = true:silent +dotnet_style_explicit_tuple_names = true:suggestion +dotnet_style_prefer_inferred_tuple_names = true:suggestion +dotnet_style_prefer_inferred_anonymous_type_member_names = true:suggestion +dotnet_style_prefer_compound_assignment = true:suggestion +dotnet_style_prefer_simplified_interpolation = true:suggestion \ No newline at end of file diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index fc0b7a7fa..4d6e09f81 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -47,14 +47,14 @@ jobs: npm run build:prod - name: Build Framework - run: dotnet build ./src/KafkaFlow.sln -c Release + run: dotnet build KafkaFlow.sln -c Release - name: UnitTest run: | - dotnet test src/KafkaFlow.UnitTests/KafkaFlow.UnitTests.csproj -c Release --logger "console;verbosity=detailed" + dotnet test tests/KafkaFlow.UnitTests/KafkaFlow.UnitTests.csproj -c Release --logger "console;verbosity=detailed" - name: IntegrationTest run: | make init_broker - dotnet test src/KafkaFlow.IntegrationTests/KafkaFlow.IntegrationTests.csproj -c Release --logger "console;verbosity=detailed" + dotnet test tests/KafkaFlow.IntegrationTests/KafkaFlow.IntegrationTests.csproj -c Release --logger "console;verbosity=detailed" make shutdown_broker diff --git a/.github/workflows/deploy-website.yml b/.github/workflows/deploy-website.yml index 7fa32abe6..90c4a1bad 100644 --- a/.github/workflows/deploy-website.yml +++ b/.github/workflows/deploy-website.yml @@ -25,7 +25,7 @@ jobs: shell: bash - name: .NET Publish - run: dotnet publish src/KafkaFlow.sln -c Release -o ./drop + run: dotnet publish KafkaFlow.sln -c Release -o ./drop - run: dotnet tool list --global shell: bash diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index a78d54c45..e83bf5ab1 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -29,7 +29,7 @@ jobs: npm run build:prod - name: Pack - run: dotnet pack ./src/KafkaFlow.sln -c Release /p:Version=${{ github.event.release.tag_name }} -o ./drop + run: dotnet pack ./KafkaFlow.sln -c Release /p:Version=${{ github.event.release.tag_name }} -o ./drop - name: Publish run: dotnet nuget push ./drop/**/*.nupkg -k ${{ secrets.NUGET_PUBLISH_KEY }} -s https://api.nuget.org/v3/index.json --skip-duplicate diff --git a/.github/workflows/test-deploy-website.yml b/.github/workflows/test-deploy-website.yml index 04584aec9..b622bb796 100644 --- a/.github/workflows/test-deploy-website.yml +++ b/.github/workflows/test-deploy-website.yml @@ -19,7 +19,7 @@ jobs: shell: bash - name: .NET Publish - run: dotnet publish src/KafkaFlow.sln -c Release -o ./drop + run: dotnet publish KafkaFlow.sln -c Release -o ./drop - run: dotnet tool list --global shell: bash diff --git a/KafkaFlow.sln b/KafkaFlow.sln new file mode 100644 index 000000000..f376d8ef2 --- /dev/null +++ b/KafkaFlow.sln @@ -0,0 +1,265 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.7.34031.279 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Framework", "Framework", "{068CB250-2804-4C7E-9490-17F432B9CE21}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "tests", "tests", "{7A9B997B-DAAC-4004-94F3-32F6B88E0068}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "samples", "samples", "{303AE78F-6C96-4DF4-AC89-5C4FD53AFF0B}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Serialization", "Serialization", "{ADAAA63C-E17C-4F1B-A062-3CCA071D75C2}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Compression", "Compression", "{0A782A83-B66D-4B99-9BE2-2B18AAD2E03C}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "DependencyResolvers", "DependencyResolvers", "{292BCEDD-55B4-49BB-B8B2-24CD834FF2AA}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "LogHandlers", "LogHandlers", "{EF626895-FDAE-4B28-9110-BA85671CBBF2}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Admin", "Admin", "{58483813-0D7C-423E-8E7D-8FBF3E6CDB6D}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Middlewares", "Middlewares", "{ED24B548-6F37-4283-A35B-F6015BFB7A34}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Deploy", "Deploy", "{4A6A390C-A63A-4371-86BB-28481AD6D4C0}" + ProjectSection(SolutionItems) = preProject + .github\workflows\build.yml = .github\workflows\build.yml + .github\workflows\publish.yml = .github\workflows\publish.yml + EndProjectSection +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Telemetry", "Telemetry", "{96F5D441-B8DE-4ABC-BEF2-F758D1B2BA39}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Admin", "src\KafkaFlow.Admin\KafkaFlow.Admin.csproj", "{A30B89BE-418D-47CC-88C0-600AC8B0A5B4}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Admin.Dashboard", "src\KafkaFlow.Admin.Dashboard\KafkaFlow.Admin.Dashboard.csproj", "{0829B33A-5942-4DA7-BBD4-77EF2DF1C39C}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Admin.WebApi", "src\KafkaFlow.Admin.WebApi\KafkaFlow.Admin.WebApi.csproj", "{09D8AD16-45E4-4443-BCF6-7981FF69A3A3}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Microsoft.DependencyInjection", "src\KafkaFlow.Microsoft.DependencyInjection\KafkaFlow.Microsoft.DependencyInjection.csproj", "{9BF4C82B-3E20-450C-BDFF-549575DB231A}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Unity", "src\KafkaFlow.Unity\KafkaFlow.Unity.csproj", "{0F461CEC-CC0D-4557-A4E7-98262CA976AE}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow", "src\KafkaFlow\KafkaFlow.csproj", "{1FF13D3E-803F-402E-BBBE-3688ABCBE58C}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Abstractions", "src\KafkaFlow.Abstractions\KafkaFlow.Abstractions.csproj", "{966B54E9-1ADB-40C3-9D17-8987182834A7}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Extensions.Hosting", "src\KafkaFlow.Extensions.Hosting\KafkaFlow.Extensions.Hosting.csproj", "{2D5469E0-CACB-49AB-A3C2-2758C5A3303A}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.LogHandler.Console", "src\KafkaFlow.LogHandler.Console\KafkaFlow.LogHandler.Console.csproj", "{D05610E6-865A-487E-8FE3-8F64A38098CD}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.LogHandler.Microsoft", "src\KafkaFlow.LogHandler.Microsoft\KafkaFlow.LogHandler.Microsoft.csproj", "{6E1A8664-805C-4513-9D2A-3648A2C0FC0E}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Compressor.Gzip", "src\KafkaFlow.Compressor.Gzip\KafkaFlow.Compressor.Gzip.csproj", "{21A3E0A6-94DD-44BA-8707-14D18AA0B037}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.SchemaRegistry", "src\KafkaFlow.SchemaRegistry\KafkaFlow.SchemaRegistry.csproj", "{B2955BD4-8A74-4C00-8559-2357D2C3A48F}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Serializer.JsonCore", "src\KafkaFlow.Serializer.JsonCore\KafkaFlow.Serializer.JsonCore.csproj", "{3AA3BB1D-B1BB-4B30-AF5C-FE554ECC4025}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Serializer.NewtonsoftJson", "src\KafkaFlow.Serializer.NewtonsoftJson\KafkaFlow.Serializer.NewtonsoftJson.csproj", "{CEFFB532-0F13-4BEC-B119-B61CC0C024C3}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Serializer.ProtobufNet", "src\KafkaFlow.Serializer.ProtobufNet\KafkaFlow.Serializer.ProtobufNet.csproj", "{794CDAEA-4EBE-4B39-B42F-D40B2C6C10EF}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro", "src\KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro\KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro.csproj", "{28246082-79FB-468E-91D8-D400C7CBCADB}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Serializer.SchemaRegistry.ConfluentJson", "src\KafkaFlow.Serializer.SchemaRegistry.ConfluentJson\KafkaFlow.Serializer.SchemaRegistry.ConfluentJson.csproj", "{1027A158-B83B-4BB0-A8B3-D387252F5982}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf", "src\KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf\KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf.csproj", "{B1791758-EB49-41F0-BEAB-AC83160E2BEE}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Sample", "samples\KafkaFlow.Sample\KafkaFlow.Sample.csproj", "{237C8A3F-F2AF-420C-873E-D3DF504DC7A2}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Sample.BatchOperations", "samples\KafkaFlow.Sample.BatchOperations\KafkaFlow.Sample.BatchOperations.csproj", "{D2A5BB81-28E8-4C72-9E11-379DFCE5C311}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Sample.ConsumerThrottling", "samples\KafkaFlow.Sample.ConsumerThrottling\KafkaFlow.Sample.ConsumerThrottling.csproj", "{FD1DD2C6-A7B5-4BF3-8BD3-DCE9BF1EE2B0}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Sample.Dashboard", "samples\KafkaFlow.Sample.Dashboard\KafkaFlow.Sample.Dashboard.csproj", "{CD3788D9-EAF3-428E-A237-FE24A7D76CC0}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Sample.FlowControl", "samples\KafkaFlow.Sample.FlowControl\KafkaFlow.Sample.FlowControl.csproj", "{BC33650E-E774-48B5-B35D-01B1E04BBC05}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Sample.PauseConsumerOnError", "samples\KafkaFlow.Sample.PauseConsumerOnError\KafkaFlow.Sample.PauseConsumerOnError.csproj", "{7FCF5FC3-F59C-4A89-A511-EF98201181F3}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Sample.SchemaRegistry", "samples\KafkaFlow.Sample.SchemaRegistry\KafkaFlow.Sample.SchemaRegistry.csproj", "{FD052A19-B9B3-4A74-A9C0-6EA0C26A8D1B}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Sample.WebApi", "samples\KafkaFlow.Sample.WebApi\KafkaFlow.Sample.WebApi.csproj", "{5E722C8E-2C81-42DF-B70E-7AC0AEBD1593}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.OpenTelemetry", "src\KafkaFlow.OpenTelemetry\KafkaFlow.OpenTelemetry.csproj", "{0C98213A-A553-40DC-BEA9-137BDE4A7398}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.UnitTests", "tests\KafkaFlow.UnitTests\KafkaFlow.UnitTests.csproj", "{1755E8DB-970C-4A24-8B7C-A2BEC1410BEE}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.IntegrationTests", "tests\KafkaFlow.IntegrationTests\KafkaFlow.IntegrationTests.csproj", "{80080C1D-579E-4AB2-935D-5CFFC51843D8}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{A97E7FE2-FFFD-4E23-B004-86BDB9AFAFF0}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{B4704AE9-D85C-472E-93E7-1CB0839364F4}" + ProjectSection(SolutionItems) = preProject + .editorconfig = .editorconfig + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {A30B89BE-418D-47CC-88C0-600AC8B0A5B4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A30B89BE-418D-47CC-88C0-600AC8B0A5B4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A30B89BE-418D-47CC-88C0-600AC8B0A5B4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A30B89BE-418D-47CC-88C0-600AC8B0A5B4}.Release|Any CPU.Build.0 = Release|Any CPU + {0829B33A-5942-4DA7-BBD4-77EF2DF1C39C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0829B33A-5942-4DA7-BBD4-77EF2DF1C39C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0829B33A-5942-4DA7-BBD4-77EF2DF1C39C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0829B33A-5942-4DA7-BBD4-77EF2DF1C39C}.Release|Any CPU.Build.0 = Release|Any CPU + {09D8AD16-45E4-4443-BCF6-7981FF69A3A3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {09D8AD16-45E4-4443-BCF6-7981FF69A3A3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {09D8AD16-45E4-4443-BCF6-7981FF69A3A3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {09D8AD16-45E4-4443-BCF6-7981FF69A3A3}.Release|Any CPU.Build.0 = Release|Any CPU + {9BF4C82B-3E20-450C-BDFF-549575DB231A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {9BF4C82B-3E20-450C-BDFF-549575DB231A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {9BF4C82B-3E20-450C-BDFF-549575DB231A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {9BF4C82B-3E20-450C-BDFF-549575DB231A}.Release|Any CPU.Build.0 = Release|Any CPU + {0F461CEC-CC0D-4557-A4E7-98262CA976AE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0F461CEC-CC0D-4557-A4E7-98262CA976AE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0F461CEC-CC0D-4557-A4E7-98262CA976AE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0F461CEC-CC0D-4557-A4E7-98262CA976AE}.Release|Any CPU.Build.0 = Release|Any CPU + {1FF13D3E-803F-402E-BBBE-3688ABCBE58C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1FF13D3E-803F-402E-BBBE-3688ABCBE58C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1FF13D3E-803F-402E-BBBE-3688ABCBE58C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1FF13D3E-803F-402E-BBBE-3688ABCBE58C}.Release|Any CPU.Build.0 = Release|Any CPU + {966B54E9-1ADB-40C3-9D17-8987182834A7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {966B54E9-1ADB-40C3-9D17-8987182834A7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {966B54E9-1ADB-40C3-9D17-8987182834A7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {966B54E9-1ADB-40C3-9D17-8987182834A7}.Release|Any CPU.Build.0 = Release|Any CPU + {2D5469E0-CACB-49AB-A3C2-2758C5A3303A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2D5469E0-CACB-49AB-A3C2-2758C5A3303A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2D5469E0-CACB-49AB-A3C2-2758C5A3303A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2D5469E0-CACB-49AB-A3C2-2758C5A3303A}.Release|Any CPU.Build.0 = Release|Any CPU + {D05610E6-865A-487E-8FE3-8F64A38098CD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D05610E6-865A-487E-8FE3-8F64A38098CD}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D05610E6-865A-487E-8FE3-8F64A38098CD}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D05610E6-865A-487E-8FE3-8F64A38098CD}.Release|Any CPU.Build.0 = Release|Any CPU + {6E1A8664-805C-4513-9D2A-3648A2C0FC0E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6E1A8664-805C-4513-9D2A-3648A2C0FC0E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6E1A8664-805C-4513-9D2A-3648A2C0FC0E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6E1A8664-805C-4513-9D2A-3648A2C0FC0E}.Release|Any CPU.Build.0 = Release|Any CPU + {21A3E0A6-94DD-44BA-8707-14D18AA0B037}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {21A3E0A6-94DD-44BA-8707-14D18AA0B037}.Debug|Any CPU.Build.0 = Debug|Any CPU + {21A3E0A6-94DD-44BA-8707-14D18AA0B037}.Release|Any CPU.ActiveCfg = Release|Any CPU + {21A3E0A6-94DD-44BA-8707-14D18AA0B037}.Release|Any CPU.Build.0 = Release|Any CPU + {B2955BD4-8A74-4C00-8559-2357D2C3A48F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B2955BD4-8A74-4C00-8559-2357D2C3A48F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B2955BD4-8A74-4C00-8559-2357D2C3A48F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B2955BD4-8A74-4C00-8559-2357D2C3A48F}.Release|Any CPU.Build.0 = Release|Any CPU + {3AA3BB1D-B1BB-4B30-AF5C-FE554ECC4025}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3AA3BB1D-B1BB-4B30-AF5C-FE554ECC4025}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3AA3BB1D-B1BB-4B30-AF5C-FE554ECC4025}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3AA3BB1D-B1BB-4B30-AF5C-FE554ECC4025}.Release|Any CPU.Build.0 = Release|Any CPU + {CEFFB532-0F13-4BEC-B119-B61CC0C024C3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {CEFFB532-0F13-4BEC-B119-B61CC0C024C3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {CEFFB532-0F13-4BEC-B119-B61CC0C024C3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {CEFFB532-0F13-4BEC-B119-B61CC0C024C3}.Release|Any CPU.Build.0 = Release|Any CPU + {794CDAEA-4EBE-4B39-B42F-D40B2C6C10EF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {794CDAEA-4EBE-4B39-B42F-D40B2C6C10EF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {794CDAEA-4EBE-4B39-B42F-D40B2C6C10EF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {794CDAEA-4EBE-4B39-B42F-D40B2C6C10EF}.Release|Any CPU.Build.0 = Release|Any CPU + {28246082-79FB-468E-91D8-D400C7CBCADB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {28246082-79FB-468E-91D8-D400C7CBCADB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {28246082-79FB-468E-91D8-D400C7CBCADB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {28246082-79FB-468E-91D8-D400C7CBCADB}.Release|Any CPU.Build.0 = Release|Any CPU + {1027A158-B83B-4BB0-A8B3-D387252F5982}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1027A158-B83B-4BB0-A8B3-D387252F5982}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1027A158-B83B-4BB0-A8B3-D387252F5982}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1027A158-B83B-4BB0-A8B3-D387252F5982}.Release|Any CPU.Build.0 = Release|Any CPU + {B1791758-EB49-41F0-BEAB-AC83160E2BEE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B1791758-EB49-41F0-BEAB-AC83160E2BEE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B1791758-EB49-41F0-BEAB-AC83160E2BEE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B1791758-EB49-41F0-BEAB-AC83160E2BEE}.Release|Any CPU.Build.0 = Release|Any CPU + {237C8A3F-F2AF-420C-873E-D3DF504DC7A2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {237C8A3F-F2AF-420C-873E-D3DF504DC7A2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {237C8A3F-F2AF-420C-873E-D3DF504DC7A2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {237C8A3F-F2AF-420C-873E-D3DF504DC7A2}.Release|Any CPU.Build.0 = Release|Any CPU + {D2A5BB81-28E8-4C72-9E11-379DFCE5C311}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D2A5BB81-28E8-4C72-9E11-379DFCE5C311}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D2A5BB81-28E8-4C72-9E11-379DFCE5C311}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D2A5BB81-28E8-4C72-9E11-379DFCE5C311}.Release|Any CPU.Build.0 = Release|Any CPU + {FD1DD2C6-A7B5-4BF3-8BD3-DCE9BF1EE2B0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FD1DD2C6-A7B5-4BF3-8BD3-DCE9BF1EE2B0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FD1DD2C6-A7B5-4BF3-8BD3-DCE9BF1EE2B0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FD1DD2C6-A7B5-4BF3-8BD3-DCE9BF1EE2B0}.Release|Any CPU.Build.0 = Release|Any CPU + {CD3788D9-EAF3-428E-A237-FE24A7D76CC0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {CD3788D9-EAF3-428E-A237-FE24A7D76CC0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {CD3788D9-EAF3-428E-A237-FE24A7D76CC0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {CD3788D9-EAF3-428E-A237-FE24A7D76CC0}.Release|Any CPU.Build.0 = Release|Any CPU + {BC33650E-E774-48B5-B35D-01B1E04BBC05}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BC33650E-E774-48B5-B35D-01B1E04BBC05}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BC33650E-E774-48B5-B35D-01B1E04BBC05}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BC33650E-E774-48B5-B35D-01B1E04BBC05}.Release|Any CPU.Build.0 = Release|Any CPU + {7FCF5FC3-F59C-4A89-A511-EF98201181F3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7FCF5FC3-F59C-4A89-A511-EF98201181F3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7FCF5FC3-F59C-4A89-A511-EF98201181F3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7FCF5FC3-F59C-4A89-A511-EF98201181F3}.Release|Any CPU.Build.0 = Release|Any CPU + {FD052A19-B9B3-4A74-A9C0-6EA0C26A8D1B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FD052A19-B9B3-4A74-A9C0-6EA0C26A8D1B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FD052A19-B9B3-4A74-A9C0-6EA0C26A8D1B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FD052A19-B9B3-4A74-A9C0-6EA0C26A8D1B}.Release|Any CPU.Build.0 = Release|Any CPU + {5E722C8E-2C81-42DF-B70E-7AC0AEBD1593}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5E722C8E-2C81-42DF-B70E-7AC0AEBD1593}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5E722C8E-2C81-42DF-B70E-7AC0AEBD1593}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5E722C8E-2C81-42DF-B70E-7AC0AEBD1593}.Release|Any CPU.Build.0 = Release|Any CPU + {0C98213A-A553-40DC-BEA9-137BDE4A7398}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0C98213A-A553-40DC-BEA9-137BDE4A7398}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0C98213A-A553-40DC-BEA9-137BDE4A7398}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0C98213A-A553-40DC-BEA9-137BDE4A7398}.Release|Any CPU.Build.0 = Release|Any CPU + {1755E8DB-970C-4A24-8B7C-A2BEC1410BEE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1755E8DB-970C-4A24-8B7C-A2BEC1410BEE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1755E8DB-970C-4A24-8B7C-A2BEC1410BEE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1755E8DB-970C-4A24-8B7C-A2BEC1410BEE}.Release|Any CPU.Build.0 = Release|Any CPU + {80080C1D-579E-4AB2-935D-5CFFC51843D8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {80080C1D-579E-4AB2-935D-5CFFC51843D8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {80080C1D-579E-4AB2-935D-5CFFC51843D8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {80080C1D-579E-4AB2-935D-5CFFC51843D8}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {068CB250-2804-4C7E-9490-17F432B9CE21} = {A97E7FE2-FFFD-4E23-B004-86BDB9AFAFF0} + {ADAAA63C-E17C-4F1B-A062-3CCA071D75C2} = {ED24B548-6F37-4283-A35B-F6015BFB7A34} + {0A782A83-B66D-4B99-9BE2-2B18AAD2E03C} = {ED24B548-6F37-4283-A35B-F6015BFB7A34} + {292BCEDD-55B4-49BB-B8B2-24CD834FF2AA} = {A97E7FE2-FFFD-4E23-B004-86BDB9AFAFF0} + {EF626895-FDAE-4B28-9110-BA85671CBBF2} = {A97E7FE2-FFFD-4E23-B004-86BDB9AFAFF0} + {58483813-0D7C-423E-8E7D-8FBF3E6CDB6D} = {A97E7FE2-FFFD-4E23-B004-86BDB9AFAFF0} + {ED24B548-6F37-4283-A35B-F6015BFB7A34} = {A97E7FE2-FFFD-4E23-B004-86BDB9AFAFF0} + {96F5D441-B8DE-4ABC-BEF2-F758D1B2BA39} = {A97E7FE2-FFFD-4E23-B004-86BDB9AFAFF0} + {A30B89BE-418D-47CC-88C0-600AC8B0A5B4} = {58483813-0D7C-423E-8E7D-8FBF3E6CDB6D} + {0829B33A-5942-4DA7-BBD4-77EF2DF1C39C} = {58483813-0D7C-423E-8E7D-8FBF3E6CDB6D} + {09D8AD16-45E4-4443-BCF6-7981FF69A3A3} = {58483813-0D7C-423E-8E7D-8FBF3E6CDB6D} + {9BF4C82B-3E20-450C-BDFF-549575DB231A} = {292BCEDD-55B4-49BB-B8B2-24CD834FF2AA} + {0F461CEC-CC0D-4557-A4E7-98262CA976AE} = {292BCEDD-55B4-49BB-B8B2-24CD834FF2AA} + {1FF13D3E-803F-402E-BBBE-3688ABCBE58C} = {068CB250-2804-4C7E-9490-17F432B9CE21} + {966B54E9-1ADB-40C3-9D17-8987182834A7} = {068CB250-2804-4C7E-9490-17F432B9CE21} + {2D5469E0-CACB-49AB-A3C2-2758C5A3303A} = {068CB250-2804-4C7E-9490-17F432B9CE21} + {D05610E6-865A-487E-8FE3-8F64A38098CD} = {EF626895-FDAE-4B28-9110-BA85671CBBF2} + {6E1A8664-805C-4513-9D2A-3648A2C0FC0E} = {EF626895-FDAE-4B28-9110-BA85671CBBF2} + {21A3E0A6-94DD-44BA-8707-14D18AA0B037} = {0A782A83-B66D-4B99-9BE2-2B18AAD2E03C} + {B2955BD4-8A74-4C00-8559-2357D2C3A48F} = {ADAAA63C-E17C-4F1B-A062-3CCA071D75C2} + {3AA3BB1D-B1BB-4B30-AF5C-FE554ECC4025} = {ADAAA63C-E17C-4F1B-A062-3CCA071D75C2} + {CEFFB532-0F13-4BEC-B119-B61CC0C024C3} = {ADAAA63C-E17C-4F1B-A062-3CCA071D75C2} + {794CDAEA-4EBE-4B39-B42F-D40B2C6C10EF} = {ADAAA63C-E17C-4F1B-A062-3CCA071D75C2} + {28246082-79FB-468E-91D8-D400C7CBCADB} = {ADAAA63C-E17C-4F1B-A062-3CCA071D75C2} + {1027A158-B83B-4BB0-A8B3-D387252F5982} = {ADAAA63C-E17C-4F1B-A062-3CCA071D75C2} + {B1791758-EB49-41F0-BEAB-AC83160E2BEE} = {ADAAA63C-E17C-4F1B-A062-3CCA071D75C2} + {237C8A3F-F2AF-420C-873E-D3DF504DC7A2} = {303AE78F-6C96-4DF4-AC89-5C4FD53AFF0B} + {D2A5BB81-28E8-4C72-9E11-379DFCE5C311} = {303AE78F-6C96-4DF4-AC89-5C4FD53AFF0B} + {FD1DD2C6-A7B5-4BF3-8BD3-DCE9BF1EE2B0} = {303AE78F-6C96-4DF4-AC89-5C4FD53AFF0B} + {CD3788D9-EAF3-428E-A237-FE24A7D76CC0} = {303AE78F-6C96-4DF4-AC89-5C4FD53AFF0B} + {BC33650E-E774-48B5-B35D-01B1E04BBC05} = {303AE78F-6C96-4DF4-AC89-5C4FD53AFF0B} + {7FCF5FC3-F59C-4A89-A511-EF98201181F3} = {303AE78F-6C96-4DF4-AC89-5C4FD53AFF0B} + {FD052A19-B9B3-4A74-A9C0-6EA0C26A8D1B} = {303AE78F-6C96-4DF4-AC89-5C4FD53AFF0B} + {5E722C8E-2C81-42DF-B70E-7AC0AEBD1593} = {303AE78F-6C96-4DF4-AC89-5C4FD53AFF0B} + {0C98213A-A553-40DC-BEA9-137BDE4A7398} = {96F5D441-B8DE-4ABC-BEF2-F758D1B2BA39} + {1755E8DB-970C-4A24-8B7C-A2BEC1410BEE} = {7A9B997B-DAAC-4004-94F3-32F6B88E0068} + {80080C1D-579E-4AB2-935D-5CFFC51843D8} = {7A9B997B-DAAC-4004-94F3-32F6B88E0068} + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {6AE955B5-16B0-41CF-9F12-66D15B3DD1AB} + EndGlobalSection +EndGlobal diff --git a/Makefile b/Makefile index 85210eef3..6f2f5a2b1 100644 --- a/Makefile +++ b/Makefile @@ -11,20 +11,20 @@ shutdown_broker: docker-compose -f docker-compose.yml down restore: - dotnet restore src/KafkaFlow.sln + dotnet restore KafkaFlow.sln build: - dotnet build src/KafkaFlow.sln + dotnet build KafkaFlow.sln unit_tests: @echo command | date @echo Running unit tests - dotnet test src/KafkaFlow.UnitTests/KafkaFlow.UnitTests.csproj --framework netcoreapp2.1 --logger "console;verbosity=detailed" - dotnet test src/KafkaFlow.UnitTests/KafkaFlow.UnitTests.csproj --framework netcoreapp3.1 --logger "console;verbosity=detailed" + dotnet test tests/KafkaFlow.UnitTests/KafkaFlow.UnitTests.csproj --framework netcoreapp2.1 --logger "console;verbosity=detailed" + dotnet test tests/KafkaFlow.UnitTests/KafkaFlow.UnitTests.csproj --framework netcoreapp3.1 --logger "console;verbosity=detailed" integration_tests: @echo command | date make init_broker @echo Running integration tests - dotnet test src/KafkaFlow.IntegrationTests/KafkaFlow.IntegrationTests.csproj -c Release --framework netcoreapp3.1 --logger "console;verbosity=detailed" + dotnet test tests/KafkaFlow.IntegrationTests/KafkaFlow.IntegrationTests.csproj -c Release --framework netcoreapp3.1 --logger "console;verbosity=detailed" make shutdown_broker diff --git a/samples/KafkaFlow.Sample.BatchOperations/PrintConsoleMiddleware.cs b/samples/KafkaFlow.Sample.BatchOperations/PrintConsoleMiddleware.cs index 438689b7a..ff6309d39 100644 --- a/samples/KafkaFlow.Sample.BatchOperations/PrintConsoleMiddleware.cs +++ b/samples/KafkaFlow.Sample.BatchOperations/PrintConsoleMiddleware.cs @@ -1,7 +1,6 @@ using System; using System.Linq; using System.Threading.Tasks; -using KafkaFlow.Batching; namespace KafkaFlow.Sample.BatchOperations; @@ -13,7 +12,7 @@ public Task Invoke(IMessageContext context, MiddlewareDelegate next) var text = string.Join( '\n', - batch.Select(ctx => ((SampleBatchMessage) ctx.Message.Value).Text)); + batch.Select(ctx => ((SampleBatchMessage)ctx.Message.Value).Text)); Console.WriteLine(text); diff --git a/samples/KafkaFlow.Sample.Dashboard/Startup.cs b/samples/KafkaFlow.Sample.Dashboard/Startup.cs index 289b2bef6..db465fdcc 100644 --- a/samples/KafkaFlow.Sample.Dashboard/Startup.cs +++ b/samples/KafkaFlow.Sample.Dashboard/Startup.cs @@ -1,4 +1,3 @@ -namespace KafkaFlow.Sample.Dashboard; using KafkaFlow.Admin.Dashboard; using Microsoft.AspNetCore.Builder; @@ -6,6 +5,7 @@ namespace KafkaFlow.Sample.Dashboard; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; +namespace KafkaFlow.Sample.Dashboard; public class Startup { // This method gets called by the runtime. Use this method to add services to the container. diff --git a/samples/KafkaFlow.Sample.PauseConsumerOnError/MessageHandler.cs b/samples/KafkaFlow.Sample.PauseConsumerOnError/MessageHandler.cs index 77b597265..a5248185a 100644 --- a/samples/KafkaFlow.Sample.PauseConsumerOnError/MessageHandler.cs +++ b/samples/KafkaFlow.Sample.PauseConsumerOnError/MessageHandler.cs @@ -1,5 +1,3 @@ -using KafkaFlow.Middlewares.TypedHandler; - namespace KafkaFlow.Sample.PauseConsumerOnError; public class MessageHandler : IMessageHandler diff --git a/samples/KafkaFlow.Sample.PauseConsumerOnError/PauseConsumerOnExceptionMiddleware.cs b/samples/KafkaFlow.Sample.PauseConsumerOnError/PauseConsumerOnExceptionMiddleware.cs index b45b0ec4b..885221775 100644 --- a/samples/KafkaFlow.Sample.PauseConsumerOnError/PauseConsumerOnExceptionMiddleware.cs +++ b/samples/KafkaFlow.Sample.PauseConsumerOnError/PauseConsumerOnExceptionMiddleware.cs @@ -4,13 +4,13 @@ namespace KafkaFlow.Sample.PauseConsumerOnError; public class PauseConsumerOnExceptionMiddleware : IMessageMiddleware { - private readonly IConsumerAccessor consumerAccessor; - private readonly ILogHandler logHandler; + private readonly IConsumerAccessor _consumerAccessor; + private readonly ILogHandler _logHandler; public PauseConsumerOnExceptionMiddleware(IConsumerAccessor consumerAccessor, ILogHandler logHandler) { - this.consumerAccessor = consumerAccessor; - this.logHandler = logHandler; + _consumerAccessor = consumerAccessor; + _logHandler = logHandler; } public async Task Invoke(IMessageContext context, MiddlewareDelegate next) @@ -22,7 +22,7 @@ public async Task Invoke(IMessageContext context, MiddlewareDelegate next) catch (Exception exception) { context.ConsumerContext.AutoMessageCompletion = false; - this.logHandler.Error("Error handling message", exception, + _logHandler.Error("Error handling message", exception, new { context.Message, @@ -31,10 +31,10 @@ public async Task Invoke(IMessageContext context, MiddlewareDelegate next) context.ConsumerContext.ConsumerName, }); - var consumer = this.consumerAccessor[context.ConsumerContext.ConsumerName]; + var consumer = _consumerAccessor[context.ConsumerContext.ConsumerName]; consumer.Pause(consumer.Assignment); - this.logHandler.Warning("Consumer stopped", context.ConsumerContext.ConsumerName); + _logHandler.Warning("Consumer stopped", context.ConsumerContext.ConsumerName); } } } \ No newline at end of file diff --git a/samples/KafkaFlow.Sample.SchemaRegistry/Handlers/AvroMessageHandler.cs b/samples/KafkaFlow.Sample.SchemaRegistry/Handlers/AvroMessageHandler.cs index 0893f1225..35338d6d0 100644 --- a/samples/KafkaFlow.Sample.SchemaRegistry/Handlers/AvroMessageHandler.cs +++ b/samples/KafkaFlow.Sample.SchemaRegistry/Handlers/AvroMessageHandler.cs @@ -1,10 +1,9 @@ -namespace KafkaFlow.Sample.SchemaRegistry.Handlers -{ - using System; - using System.Threading.Tasks; - using KafkaFlow.Middlewares.TypedHandler; - using global::SchemaRegistry; +using System; +using System.Threading.Tasks; +using global::SchemaRegistry; +namespace KafkaFlow.Sample.SchemaRegistry.Handlers +{ public class AvroMessageHandler : IMessageHandler { public Task Handle(IMessageContext context, AvroLogMessage message) diff --git a/samples/KafkaFlow.Sample.SchemaRegistry/Handlers/AvroMessageHandler2.cs b/samples/KafkaFlow.Sample.SchemaRegistry/Handlers/AvroMessageHandler2.cs index 65660f5f3..0f772f0a3 100644 --- a/samples/KafkaFlow.Sample.SchemaRegistry/Handlers/AvroMessageHandler2.cs +++ b/samples/KafkaFlow.Sample.SchemaRegistry/Handlers/AvroMessageHandler2.cs @@ -1,9 +1,8 @@ -namespace KafkaFlow.Sample.SchemaRegistry.Handlers; - -using System; +using System; using System.Threading.Tasks; using global::SchemaRegistry; -using KafkaFlow.Middlewares.TypedHandler; + +namespace KafkaFlow.Sample.SchemaRegistry.Handlers; public class AvroMessageHandler2 : IMessageHandler { diff --git a/samples/KafkaFlow.Sample.SchemaRegistry/Handlers/JsonMessageHandler.cs b/samples/KafkaFlow.Sample.SchemaRegistry/Handlers/JsonMessageHandler.cs index 39b446434..774449a3c 100644 --- a/samples/KafkaFlow.Sample.SchemaRegistry/Handlers/JsonMessageHandler.cs +++ b/samples/KafkaFlow.Sample.SchemaRegistry/Handlers/JsonMessageHandler.cs @@ -1,9 +1,8 @@ -namespace KafkaFlow.Sample.SchemaRegistry.Handlers; - -using System; +using System; using System.Threading.Tasks; using global::SchemaRegistry; -using KafkaFlow.Middlewares.TypedHandler; + +namespace KafkaFlow.Sample.SchemaRegistry.Handlers; public class JsonMessageHandler : IMessageHandler { diff --git a/samples/KafkaFlow.Sample.SchemaRegistry/Handlers/ProtobufMessageHandler.cs b/samples/KafkaFlow.Sample.SchemaRegistry/Handlers/ProtobufMessageHandler.cs index e1158c9c8..768f7c1d5 100644 --- a/samples/KafkaFlow.Sample.SchemaRegistry/Handlers/ProtobufMessageHandler.cs +++ b/samples/KafkaFlow.Sample.SchemaRegistry/Handlers/ProtobufMessageHandler.cs @@ -1,7 +1,5 @@ - -using System; +using System; using System.Threading.Tasks; -using KafkaFlow.Middlewares.TypedHandler; using SchemaRegistry; namespace KafkaFlow.Sample.SchemaRegistry.Handlers; diff --git a/samples/KafkaFlow.Sample.SchemaRegistry/MessageTypes/Avro/SchemaRegistry/AvroLogMessage.cs b/samples/KafkaFlow.Sample.SchemaRegistry/MessageTypes/Avro/SchemaRegistry/AvroLogMessage.cs index 81ecc9a29..cb325ee1a 100644 --- a/samples/KafkaFlow.Sample.SchemaRegistry/MessageTypes/Avro/SchemaRegistry/AvroLogMessage.cs +++ b/samples/KafkaFlow.Sample.SchemaRegistry/MessageTypes/Avro/SchemaRegistry/AvroLogMessage.cs @@ -18,8 +18,10 @@ namespace SchemaRegistry /// public partial class AvroLogMessage : ISpecificRecord { - public static Schema _SCHEMA = Avro.Schema.Parse(@"{""type"":""record"",""name"":""AvroLogMessage"",""doc"":""A simple log message."",""namespace"":""SchemaRegistry"",""fields"":[{""name"":""Severity"",""type"":{""type"":""enum"",""name"":""LogLevel"",""doc"":""Enumerates the set of allowable log levels."",""namespace"":""SchemaRegistry"",""symbols"":[""None"",""Verbose"",""Info"",""Warning"",""Error""]}}]}"); - private SchemaRegistry.LogLevel _Severity; + public static Schema _SCHEMA = Avro.Schema.Parse(@"{""type"":""record"",""name"":""AvroLogMessage"",""doc"":""A simple log message."",""namespace"":""SchemaRegistry"",""fields"":[{""name"":""Severity"",""type"":{""type"":""enum"",""name"":""LogLevel"",""doc"":""Enumerates the set of allowable log levels."",""namespace"":""SchemaRegistry"",""symbols"":[""None"",""Verbose"",""Info"",""Warning"",""Error""]}}]}") + ; + private SchemaRegistry.LogLevel _severity; + public virtual Schema Schema { get @@ -31,11 +33,11 @@ public SchemaRegistry.LogLevel Severity { get { - return this._Severity; + return _severity; } set { - this._Severity = value; + _severity = value; } } public virtual object Get(int fieldPos) diff --git a/samples/KafkaFlow.Sample.SchemaRegistry/MessageTypes/Avro/SchemaRegistry/AvroLogMessage2.cs b/samples/KafkaFlow.Sample.SchemaRegistry/MessageTypes/Avro/SchemaRegistry/AvroLogMessage2.cs index 695a07862..e90b8e3f7 100644 --- a/samples/KafkaFlow.Sample.SchemaRegistry/MessageTypes/Avro/SchemaRegistry/AvroLogMessage2.cs +++ b/samples/KafkaFlow.Sample.SchemaRegistry/MessageTypes/Avro/SchemaRegistry/AvroLogMessage2.cs @@ -20,7 +20,9 @@ public partial class AvroLogMessage2 : ISpecificRecord { public static Schema _SCHEMA = Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"AvroLogMessage2\",\"doc\":\"A simple log message.\",\"namespac" + "e\":\"SchemaRegistry\",\"fields\":[{\"name\":\"Message\",\"type\":\"string\"}]}"); - private string _Message; + + private string _message; + public virtual Schema Schema { get @@ -32,11 +34,11 @@ public string Message { get { - return this._Message; + return _message; } set { - this._Message = value; + _message = value; } } public virtual object Get(int fieldPos) diff --git a/samples/KafkaFlow.Sample.SchemaRegistry/MessageTypes/Json/JsonLogMessage.cs b/samples/KafkaFlow.Sample.SchemaRegistry/MessageTypes/Json/JsonLogMessage.cs index 886adb834..95d0c1cbe 100644 --- a/samples/KafkaFlow.Sample.SchemaRegistry/MessageTypes/Json/JsonLogMessage.cs +++ b/samples/KafkaFlow.Sample.SchemaRegistry/MessageTypes/Json/JsonLogMessage.cs @@ -1,16 +1,16 @@ +using Newtonsoft.Json; + namespace SchemaRegistry { - using Newtonsoft.Json; + /// + /// A simple log message. + /// + public class JsonLogMessage + { + [JsonProperty] + public string Message { get; set; } - /// - /// A simple log message. - /// - public class JsonLogMessage - { - [JsonProperty] - public string Message { get; set; } - - [JsonProperty] - public string Type { get; set; } - } + [JsonProperty] + public string Type { get; set; } + } } diff --git a/samples/KafkaFlow.Sample.SchemaRegistry/Program.cs b/samples/KafkaFlow.Sample.SchemaRegistry/Program.cs index 926879042..ba5b18d80 100644 --- a/samples/KafkaFlow.Sample.SchemaRegistry/Program.cs +++ b/samples/KafkaFlow.Sample.SchemaRegistry/Program.cs @@ -22,7 +22,7 @@ .UseConsoleLog() .AddCluster( cluster => cluster - .WithBrokers(new[] {"localhost:9092"}) + .WithBrokers(new[] { "localhost:9092" }) .WithSchemaRegistry(config => config.Url = "localhost:8081") .CreateTopicIfNotExists(avroTopic, 1, 1) .CreateTopicIfNotExists(jsonTopic, 1, 1) @@ -79,7 +79,7 @@ .AddHandler() .AddHandler()) ) - ) + ) .AddConsumer( consumer => consumer .Topic(jsonTopic) @@ -128,16 +128,16 @@ await Task.WhenAll( producers[avroProducerName].ProduceAsync( Guid.NewGuid().ToString(), - new AvroLogMessage {Severity = LogLevel.Info}), + new AvroLogMessage { Severity = LogLevel.Info }), producers[avroProducerName].ProduceAsync( Guid.NewGuid().ToString(), - new AvroLogMessage2 {Message = Guid.NewGuid().ToString()}), + new AvroLogMessage2 { Message = Guid.NewGuid().ToString() }), producers[jsonProducerName].ProduceAsync( Guid.NewGuid().ToString(), - new JsonLogMessage {Message = Guid.NewGuid().ToString()}), + new JsonLogMessage { Message = Guid.NewGuid().ToString() }), producers[protobufProducerName].ProduceAsync( Guid.NewGuid().ToString(), - new ProtobufLogMessage {Message = Guid.NewGuid().ToString()}) + new ProtobufLogMessage { Message = Guid.NewGuid().ToString() }) ); } diff --git a/samples/KafkaFlow.Sample.WebApi/Program.cs b/samples/KafkaFlow.Sample.WebApi/Program.cs index d50287655..f91ad3a4b 100644 --- a/samples/KafkaFlow.Sample.WebApi/Program.cs +++ b/samples/KafkaFlow.Sample.WebApi/Program.cs @@ -44,4 +44,3 @@ await kafkaBus.StartAsync(); await app.RunAsync(); - \ No newline at end of file diff --git a/samples/KafkaFlow.Sample/PrintConsoleHandler.cs b/samples/KafkaFlow.Sample/PrintConsoleHandler.cs index 57a1a48b8..a2341a744 100644 --- a/samples/KafkaFlow.Sample/PrintConsoleHandler.cs +++ b/samples/KafkaFlow.Sample/PrintConsoleHandler.cs @@ -1,6 +1,5 @@ using System; using System.Threading.Tasks; -using KafkaFlow.Middlewares.TypedHandler; namespace KafkaFlow.Sample; diff --git a/samples/KafkaFlow.Sample/TestMessage.cs b/samples/KafkaFlow.Sample/TestMessage.cs index e363575bc..a9b8c5c6d 100644 --- a/samples/KafkaFlow.Sample/TestMessage.cs +++ b/samples/KafkaFlow.Sample/TestMessage.cs @@ -5,6 +5,6 @@ namespace KafkaFlow.Sample; [DataContract] public class TestMessage { - [DataMember(Order = 1)] + [DataMember(Order = 1)] public string Text { get; set; } } \ No newline at end of file diff --git a/src/KafkaFlow.Abstractions/Configuration/IClusterConfigurationBuilder.cs b/src/KafkaFlow.Abstractions/Configuration/IClusterConfigurationBuilder.cs index 41325ef45..c216693de 100644 --- a/src/KafkaFlow.Abstractions/Configuration/IClusterConfigurationBuilder.cs +++ b/src/KafkaFlow.Abstractions/Configuration/IClusterConfigurationBuilder.cs @@ -1,8 +1,8 @@ +using System; +using System.Collections.Generic; + namespace KafkaFlow.Configuration { - using System; - using System.Collections.Generic; - /// /// A interface to build the cluster configuration /// diff --git a/src/KafkaFlow.Abstractions/Configuration/IConsumerConfigurationBuilder.cs b/src/KafkaFlow.Abstractions/Configuration/IConsumerConfigurationBuilder.cs index b7cc3c798..1c9d5c198 100644 --- a/src/KafkaFlow.Abstractions/Configuration/IConsumerConfigurationBuilder.cs +++ b/src/KafkaFlow.Abstractions/Configuration/IConsumerConfigurationBuilder.cs @@ -1,9 +1,9 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; + namespace KafkaFlow.Configuration { - using System; - using System.Collections.Generic; - using System.Threading.Tasks; - /// /// Used to build the consumer configuration /// @@ -191,4 +191,4 @@ IConsumerConfigurationBuilder WithWorkerDistributionStrategy() /// IConsumerConfigurationBuilder WithStatisticsIntervalMs(int statisticsIntervalMs); } -} \ No newline at end of file +} diff --git a/src/KafkaFlow.Abstractions/Configuration/IKafkaConfigurationBuilder.cs b/src/KafkaFlow.Abstractions/Configuration/IKafkaConfigurationBuilder.cs index 1b26034b0..828e20df2 100644 --- a/src/KafkaFlow.Abstractions/Configuration/IKafkaConfigurationBuilder.cs +++ b/src/KafkaFlow.Abstractions/Configuration/IKafkaConfigurationBuilder.cs @@ -1,7 +1,7 @@ +using System; + namespace KafkaFlow.Configuration { - using System; - /// /// A builder to configure KafkaFlow /// diff --git a/src/KafkaFlow.Abstractions/Configuration/IProducerConfigurationBuilder.cs b/src/KafkaFlow.Abstractions/Configuration/IProducerConfigurationBuilder.cs index a5293b67a..7e2a8f64c 100644 --- a/src/KafkaFlow.Abstractions/Configuration/IProducerConfigurationBuilder.cs +++ b/src/KafkaFlow.Abstractions/Configuration/IProducerConfigurationBuilder.cs @@ -1,7 +1,7 @@ +using System; + namespace KafkaFlow.Configuration { - using System; - /// /// Used to build the producer configuration /// diff --git a/src/KafkaFlow.Abstractions/Configuration/TopicPartitions.cs b/src/KafkaFlow.Abstractions/Configuration/TopicPartitions.cs index 4dd58b2e2..b8c3b4939 100644 --- a/src/KafkaFlow.Abstractions/Configuration/TopicPartitions.cs +++ b/src/KafkaFlow.Abstractions/Configuration/TopicPartitions.cs @@ -1,7 +1,7 @@ +using System.Collections.Generic; + namespace KafkaFlow.Configuration { - using System.Collections.Generic; - public class TopicPartitions { public TopicPartitions(string name, IEnumerable partitions) diff --git a/src/KafkaFlow.Abstractions/Configuration/WorkersCountContext.cs b/src/KafkaFlow.Abstractions/Configuration/WorkersCountContext.cs index a54a4b82f..800587f1c 100644 --- a/src/KafkaFlow.Abstractions/Configuration/WorkersCountContext.cs +++ b/src/KafkaFlow.Abstractions/Configuration/WorkersCountContext.cs @@ -1,7 +1,7 @@ +using System.Collections.Generic; + namespace KafkaFlow.Configuration { - using System.Collections.Generic; - /// /// A metadata class with some context information help to calculate the number of workers /// diff --git a/src/KafkaFlow.Abstractions/Delegates.cs b/src/KafkaFlow.Abstractions/Delegates.cs index fd7edde9e..f9fb9710d 100644 --- a/src/KafkaFlow.Abstractions/Delegates.cs +++ b/src/KafkaFlow.Abstractions/Delegates.cs @@ -1,7 +1,7 @@ +using System.Threading.Tasks; + namespace KafkaFlow { - using System.Threading.Tasks; - /// /// The delegate used to call the next middleware /// diff --git a/src/KafkaFlow.Abstractions/Extensions/DependencyConfiguratorExtensions.cs b/src/KafkaFlow.Abstractions/Extensions/DependencyConfiguratorExtensions.cs index f2021fed2..c316a28b9 100644 --- a/src/KafkaFlow.Abstractions/Extensions/DependencyConfiguratorExtensions.cs +++ b/src/KafkaFlow.Abstractions/Extensions/DependencyConfiguratorExtensions.cs @@ -1,7 +1,7 @@ +using System; + namespace KafkaFlow { - using System; - /// /// Provides extension methods over /// diff --git a/src/KafkaFlow.Abstractions/Extensions/DependencyResolverExtensions.cs b/src/KafkaFlow.Abstractions/Extensions/DependencyResolverExtensions.cs index 50a1467a7..37d716ff5 100644 --- a/src/KafkaFlow.Abstractions/Extensions/DependencyResolverExtensions.cs +++ b/src/KafkaFlow.Abstractions/Extensions/DependencyResolverExtensions.cs @@ -11,6 +11,6 @@ public static class DependencyResolverExtensions /// Instance of /// The type to be resolved /// - public static T Resolve(this IDependencyResolver resolver) => (T) resolver.Resolve(typeof(T)); + public static T Resolve(this IDependencyResolver resolver) => (T)resolver.Resolve(typeof(T)); } } diff --git a/src/KafkaFlow.Abstractions/Extensions/DictionaryExtensions.cs b/src/KafkaFlow.Abstractions/Extensions/DictionaryExtensions.cs index 029649868..7dd803ed7 100644 --- a/src/KafkaFlow.Abstractions/Extensions/DictionaryExtensions.cs +++ b/src/KafkaFlow.Abstractions/Extensions/DictionaryExtensions.cs @@ -1,8 +1,8 @@ +using System; +using System.Collections.Generic; + namespace KafkaFlow { - using System; - using System.Collections.Generic; - /// /// No needed /// diff --git a/src/KafkaFlow.Abstractions/Extensions/MessageHeaderExtensions.cs b/src/KafkaFlow.Abstractions/Extensions/MessageHeaderExtensions.cs index 7f25f2e62..169bfa707 100644 --- a/src/KafkaFlow.Abstractions/Extensions/MessageHeaderExtensions.cs +++ b/src/KafkaFlow.Abstractions/Extensions/MessageHeaderExtensions.cs @@ -1,7 +1,7 @@ +using System.Text; + namespace KafkaFlow { - using System.Text; - /// /// Provides extension methods over /// diff --git a/src/KafkaFlow.Abstractions/IConsumerContext.cs b/src/KafkaFlow.Abstractions/IConsumerContext.cs index b185d9c2b..fbc7ed656 100644 --- a/src/KafkaFlow.Abstractions/IConsumerContext.cs +++ b/src/KafkaFlow.Abstractions/IConsumerContext.cs @@ -1,9 +1,9 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + namespace KafkaFlow { - using System; - using System.Threading; - using System.Threading.Tasks; - /// /// Represents the message consumer /// diff --git a/src/KafkaFlow.Abstractions/IDateTimeProvider.cs b/src/KafkaFlow.Abstractions/IDateTimeProvider.cs index b235eeb4a..73b28d39b 100644 --- a/src/KafkaFlow.Abstractions/IDateTimeProvider.cs +++ b/src/KafkaFlow.Abstractions/IDateTimeProvider.cs @@ -1,7 +1,7 @@ +using System; + namespace KafkaFlow { - using System; - /// /// Provides access to DateTime static members /// diff --git a/src/KafkaFlow.Abstractions/IDependencyConfigurator.cs b/src/KafkaFlow.Abstractions/IDependencyConfigurator.cs index 5f8212664..47d52ce46 100644 --- a/src/KafkaFlow.Abstractions/IDependencyConfigurator.cs +++ b/src/KafkaFlow.Abstractions/IDependencyConfigurator.cs @@ -1,7 +1,7 @@ +using System; + namespace KafkaFlow { - using System; - /// /// Represents the interface to be implemented by custom dependency configurator /// diff --git a/src/KafkaFlow.Abstractions/IDependencyResolver.cs b/src/KafkaFlow.Abstractions/IDependencyResolver.cs index 597afff92..92457ae9c 100644 --- a/src/KafkaFlow.Abstractions/IDependencyResolver.cs +++ b/src/KafkaFlow.Abstractions/IDependencyResolver.cs @@ -1,8 +1,8 @@ +using System; +using System.Collections.Generic; + namespace KafkaFlow { - using System; - using System.Collections.Generic; - /// /// Represents the interface of a dependency injection resolver /// diff --git a/src/KafkaFlow.Abstractions/IDependencyResolverScope.cs b/src/KafkaFlow.Abstractions/IDependencyResolverScope.cs index 74c932245..08462f942 100644 --- a/src/KafkaFlow.Abstractions/IDependencyResolverScope.cs +++ b/src/KafkaFlow.Abstractions/IDependencyResolverScope.cs @@ -1,7 +1,7 @@ +using System; + namespace KafkaFlow { - using System; - /// /// Represents the interface of a dependency injection resolver scope /// diff --git a/src/KafkaFlow.Abstractions/IDeserializer.cs b/src/KafkaFlow.Abstractions/IDeserializer.cs index 0ecfd2c86..fbf10eea3 100644 --- a/src/KafkaFlow.Abstractions/IDeserializer.cs +++ b/src/KafkaFlow.Abstractions/IDeserializer.cs @@ -1,9 +1,9 @@ +using System; +using System.IO; +using System.Threading.Tasks; + namespace KafkaFlow { - using System; - using System.IO; - using System.Threading.Tasks; - /// /// Used to implement a message serializer /// diff --git a/src/KafkaFlow.Abstractions/IEvent.cs b/src/KafkaFlow.Abstractions/IEvent.cs index f9a055b4f..176c33f29 100644 --- a/src/KafkaFlow.Abstractions/IEvent.cs +++ b/src/KafkaFlow.Abstractions/IEvent.cs @@ -1,8 +1,8 @@ -namespace KafkaFlow -{ - using System; - using System.Threading.Tasks; +using System; +using System.Threading.Tasks; +namespace KafkaFlow +{ /// /// Represents an Event to be subscribed. /// @@ -29,4 +29,4 @@ public interface IEvent /// Event subscription reference IEventSubscription Subscribe(Func handler); } -} \ No newline at end of file +} diff --git a/src/KafkaFlow.Abstractions/ILogHandler.cs b/src/KafkaFlow.Abstractions/ILogHandler.cs index b426491ee..70afdd3c2 100644 --- a/src/KafkaFlow.Abstractions/ILogHandler.cs +++ b/src/KafkaFlow.Abstractions/ILogHandler.cs @@ -1,7 +1,7 @@ +using System; + namespace KafkaFlow { - using System; - /// /// An interface used to create a log handler /// diff --git a/src/KafkaFlow.Abstractions/IMessageContext.cs b/src/KafkaFlow.Abstractions/IMessageContext.cs index 8b39dba96..2011e1b71 100644 --- a/src/KafkaFlow.Abstractions/IMessageContext.cs +++ b/src/KafkaFlow.Abstractions/IMessageContext.cs @@ -1,7 +1,7 @@ +using System.Collections.Generic; + namespace KafkaFlow { - using System.Collections.Generic; - /// /// A context that contains the message and metadata /// diff --git a/src/KafkaFlow.Abstractions/IMessageHandler.cs b/src/KafkaFlow.Abstractions/IMessageHandler.cs index 611ba91f6..329784013 100644 --- a/src/KafkaFlow.Abstractions/IMessageHandler.cs +++ b/src/KafkaFlow.Abstractions/IMessageHandler.cs @@ -1,7 +1,7 @@ +using System.Threading.Tasks; + namespace KafkaFlow { - using System.Threading.Tasks; - /// /// Used to create a message handler /// diff --git a/src/KafkaFlow.Abstractions/IMessageHeaders.cs b/src/KafkaFlow.Abstractions/IMessageHeaders.cs index 25067ca04..8424c6f9b 100644 --- a/src/KafkaFlow.Abstractions/IMessageHeaders.cs +++ b/src/KafkaFlow.Abstractions/IMessageHeaders.cs @@ -1,7 +1,7 @@ +using System.Collections.Generic; + namespace KafkaFlow { - using System.Collections.Generic; - /// /// Represents a collection of message headers /// diff --git a/src/KafkaFlow.Abstractions/IMessageMiddleware.cs b/src/KafkaFlow.Abstractions/IMessageMiddleware.cs index 1f5891970..92633e265 100644 --- a/src/KafkaFlow.Abstractions/IMessageMiddleware.cs +++ b/src/KafkaFlow.Abstractions/IMessageMiddleware.cs @@ -1,7 +1,7 @@ +using System.Threading.Tasks; + namespace KafkaFlow { - using System.Threading.Tasks; - /// /// Used to create a message middleware /// diff --git a/src/KafkaFlow.Abstractions/ISerializer.cs b/src/KafkaFlow.Abstractions/ISerializer.cs index 23cd9b73a..42f026a44 100644 --- a/src/KafkaFlow.Abstractions/ISerializer.cs +++ b/src/KafkaFlow.Abstractions/ISerializer.cs @@ -1,8 +1,8 @@ +using System.IO; +using System.Threading.Tasks; + namespace KafkaFlow { - using System.IO; - using System.Threading.Tasks; - /// /// Used to implement a message serializer /// diff --git a/src/KafkaFlow.Abstractions/IWorkerDistributionStrategy.cs b/src/KafkaFlow.Abstractions/IWorkerDistributionStrategy.cs index c883b2f7a..cb4af966f 100644 --- a/src/KafkaFlow.Abstractions/IWorkerDistributionStrategy.cs +++ b/src/KafkaFlow.Abstractions/IWorkerDistributionStrategy.cs @@ -1,8 +1,8 @@ -namespace KafkaFlow; - using System.Collections.Generic; using System.Threading.Tasks; +namespace KafkaFlow; + /// /// An interface used to create a distribution strategy /// diff --git a/src/KafkaFlow.Abstractions/MessageErrorEventContext.cs b/src/KafkaFlow.Abstractions/MessageErrorEventContext.cs index fb5f38f2a..57229a862 100644 --- a/src/KafkaFlow.Abstractions/MessageErrorEventContext.cs +++ b/src/KafkaFlow.Abstractions/MessageErrorEventContext.cs @@ -1,7 +1,7 @@ -namespace KafkaFlow -{ - using System; +using System; +namespace KafkaFlow +{ /// /// Represents the errors in message context used in the events /// diff --git a/src/KafkaFlow.Abstractions/NullLogHandler.cs b/src/KafkaFlow.Abstractions/NullLogHandler.cs index 45da09d88..e7a8a496f 100644 --- a/src/KafkaFlow.Abstractions/NullLogHandler.cs +++ b/src/KafkaFlow.Abstractions/NullLogHandler.cs @@ -1,7 +1,7 @@ +using System; + namespace KafkaFlow { - using System; - /// /// A log handler that does nothing /// diff --git a/src/KafkaFlow.Abstractions/WorkerDistributionContext.cs b/src/KafkaFlow.Abstractions/WorkerDistributionContext.cs index 43c3ce04d..d5ce00c59 100644 --- a/src/KafkaFlow.Abstractions/WorkerDistributionContext.cs +++ b/src/KafkaFlow.Abstractions/WorkerDistributionContext.cs @@ -1,8 +1,8 @@ -namespace KafkaFlow; - using System; using System.Threading; +namespace KafkaFlow; + /// /// Represents a strategy context for distributing workers based on specific message and consumer details. /// diff --git a/src/KafkaFlow.Admin.Dashboard/ApplicationBuilderExtensions.cs b/src/KafkaFlow.Admin.Dashboard/ApplicationBuilderExtensions.cs index f525b9a1a..8181c8e96 100644 --- a/src/KafkaFlow.Admin.Dashboard/ApplicationBuilderExtensions.cs +++ b/src/KafkaFlow.Admin.Dashboard/ApplicationBuilderExtensions.cs @@ -1,15 +1,15 @@ +using System; +using System.Globalization; +using System.Reflection; +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.FileProviders; +using Newtonsoft.Json; +using Newtonsoft.Json.Serialization; + namespace KafkaFlow.Admin.Dashboard { - using System; - using System.Globalization; - using System.Reflection; - using Microsoft.AspNetCore.Builder; - using Microsoft.AspNetCore.Http; - using Microsoft.Extensions.DependencyInjection; - using Microsoft.Extensions.FileProviders; - using Newtonsoft.Json; - using Newtonsoft.Json.Serialization; - /// /// Extension methods over IApplicationBuilder /// diff --git a/src/KafkaFlow.Admin.Dashboard/DashboardConfiguration.cs b/src/KafkaFlow.Admin.Dashboard/DashboardConfiguration.cs index 53f455329..cc6489e9a 100644 --- a/src/KafkaFlow.Admin.Dashboard/DashboardConfiguration.cs +++ b/src/KafkaFlow.Admin.Dashboard/DashboardConfiguration.cs @@ -1,9 +1,9 @@ +using System; +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Http; + namespace KafkaFlow.Admin.Dashboard { - using System; - using Microsoft.AspNetCore.Builder; - using Microsoft.AspNetCore.Http; - internal class DashboardConfiguration { public DashboardConfiguration( diff --git a/src/KafkaFlow.Admin.Dashboard/DashboardConfigurationBuilder.cs b/src/KafkaFlow.Admin.Dashboard/DashboardConfigurationBuilder.cs index 4c0e1e61f..f72a65624 100644 --- a/src/KafkaFlow.Admin.Dashboard/DashboardConfigurationBuilder.cs +++ b/src/KafkaFlow.Admin.Dashboard/DashboardConfigurationBuilder.cs @@ -1,31 +1,31 @@ +using System; +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Http; + namespace KafkaFlow.Admin.Dashboard { - using System; - using Microsoft.AspNetCore.Builder; - using Microsoft.AspNetCore.Http; - internal class DashboardConfigurationBuilder : IDashboardConfigurationBuilder { - private readonly PathString basePath = "/kafkaflow"; + private readonly PathString _basePath = "/kafkaflow"; - private Action requestHandler = _ => { }; - private Action endpointHandler = _ => { }; + private Action _requestHandler = _ => { }; + private Action _endpointHandler = _ => { }; public IDashboardConfigurationBuilder ConfigureRequestPipeline(Action requestHandler) { - this.requestHandler = requestHandler; + _requestHandler = requestHandler; return this; } public IDashboardConfigurationBuilder ConfigureEndpoint(Action endpointHandler) { - this.endpointHandler = endpointHandler; + _endpointHandler = endpointHandler; return this; } public DashboardConfiguration Build() { - return new(this.basePath, this.requestHandler, this.endpointHandler); + return new(_basePath, _requestHandler, _endpointHandler); } } } diff --git a/src/KafkaFlow.Admin.Dashboard/IDashboardConfigurationBuilder.cs b/src/KafkaFlow.Admin.Dashboard/IDashboardConfigurationBuilder.cs index ea0b82c7c..8ab61c8ef 100644 --- a/src/KafkaFlow.Admin.Dashboard/IDashboardConfigurationBuilder.cs +++ b/src/KafkaFlow.Admin.Dashboard/IDashboardConfigurationBuilder.cs @@ -1,8 +1,8 @@ +using System; +using Microsoft.AspNetCore.Builder; + namespace KafkaFlow.Admin.Dashboard { - using System; - using Microsoft.AspNetCore.Builder; - /// /// Used to build the dashboard configuration /// diff --git a/src/KafkaFlow.Admin.Dashboard/KafkaFlow.Admin.Dashboard.csproj b/src/KafkaFlow.Admin.Dashboard/KafkaFlow.Admin.Dashboard.csproj index 8c6e2431a..c7ae07915 100644 --- a/src/KafkaFlow.Admin.Dashboard/KafkaFlow.Admin.Dashboard.csproj +++ b/src/KafkaFlow.Admin.Dashboard/KafkaFlow.Admin.Dashboard.csproj @@ -39,5 +39,5 @@ - + diff --git a/src/KafkaFlow.Admin.Dashboard/TelemetryResponse.cs b/src/KafkaFlow.Admin.Dashboard/TelemetryResponse.cs index 774a88c06..75a4705bd 100644 --- a/src/KafkaFlow.Admin.Dashboard/TelemetryResponse.cs +++ b/src/KafkaFlow.Admin.Dashboard/TelemetryResponse.cs @@ -1,10 +1,10 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel.DataAnnotations; +using Newtonsoft.Json; + namespace KafkaFlow.Admin.Dashboard { - using System; - using System.Collections.Generic; - using System.ComponentModel.DataAnnotations; - using Newtonsoft.Json; - /// /// The response of telemetry data /// diff --git a/src/KafkaFlow.Admin.Dashboard/TelemetryResponseAdapter.cs b/src/KafkaFlow.Admin.Dashboard/TelemetryResponseAdapter.cs index d86045671..21c2f934f 100644 --- a/src/KafkaFlow.Admin.Dashboard/TelemetryResponseAdapter.cs +++ b/src/KafkaFlow.Admin.Dashboard/TelemetryResponseAdapter.cs @@ -1,9 +1,9 @@ +using System.Collections.Generic; +using System.Linq; +using KafkaFlow.Admin.Messages; + namespace KafkaFlow.Admin.Dashboard { - using System.Collections.Generic; - using System.Linq; - using KafkaFlow.Admin.Messages; - internal static class TelemetryResponseAdapter { internal static TelemetryResponse Adapt(this IEnumerable metrics) diff --git a/src/KafkaFlow.Admin.WebApi/Adapters/ConsumerResponseAdapter.cs b/src/KafkaFlow.Admin.WebApi/Adapters/ConsumerResponseAdapter.cs index 1bef549f9..68b798bdc 100644 --- a/src/KafkaFlow.Admin.WebApi/Adapters/ConsumerResponseAdapter.cs +++ b/src/KafkaFlow.Admin.WebApi/Adapters/ConsumerResponseAdapter.cs @@ -1,8 +1,8 @@ +using KafkaFlow.Admin.WebApi.Contracts; +using KafkaFlow.Consumers; + namespace KafkaFlow.Admin.WebApi.Adapters { - using KafkaFlow.Admin.WebApi.Contracts; - using KafkaFlow.Consumers; - internal static class ConsumerResponseAdapter { internal static ConsumerResponse Adapt(this IMessageConsumer consumer) diff --git a/src/KafkaFlow.Admin.WebApi/Adapters/TelemetryResponseAdapter.cs b/src/KafkaFlow.Admin.WebApi/Adapters/TelemetryResponseAdapter.cs index a4abd4720..1b6493038 100644 --- a/src/KafkaFlow.Admin.WebApi/Adapters/TelemetryResponseAdapter.cs +++ b/src/KafkaFlow.Admin.WebApi/Adapters/TelemetryResponseAdapter.cs @@ -1,10 +1,10 @@ +using System.Collections.Generic; +using System.Linq; +using KafkaFlow.Admin.Messages; +using KafkaFlow.Admin.WebApi.Contracts; + namespace KafkaFlow.Admin.WebApi.Adapters { - using System.Collections.Generic; - using System.Linq; - using KafkaFlow.Admin.Messages; - using KafkaFlow.Admin.WebApi.Contracts; - internal static class TelemetryResponseAdapter { internal static TelemetryResponse Adapt(this IEnumerable metrics) diff --git a/src/KafkaFlow.Admin.WebApi/Contracts/ConsumerResponse.cs b/src/KafkaFlow.Admin.WebApi/Contracts/ConsumerResponse.cs index c3c6cf6cb..ea7587cf8 100644 --- a/src/KafkaFlow.Admin.WebApi/Contracts/ConsumerResponse.cs +++ b/src/KafkaFlow.Admin.WebApi/Contracts/ConsumerResponse.cs @@ -1,69 +1,69 @@ +using System.Collections.Generic; +using System.ComponentModel.DataAnnotations; +using Newtonsoft.Json; + namespace KafkaFlow.Admin.WebApi.Contracts { - using System.Collections.Generic; - using System.ComponentModel.DataAnnotations; - using Newtonsoft.Json; - /// /// The response of the consumers /// public class ConsumerResponse { - /// - /// Gets or sets the consumer´s name - /// - [Required] - [JsonProperty(Required = Required.DisallowNull)] - public string ConsumerName { get; set; } + /// + /// Gets or sets the consumer´s name + /// + [Required] + [JsonProperty(Required = Required.DisallowNull)] + public string ConsumerName { get; set; } - /// - /// Gets or sets a value indicating whether the consumer is able to be manageable or not - /// - public bool ManagementDisabled { get; set; } + /// + /// Gets or sets a value indicating whether the consumer is able to be manageable or not + /// + public bool ManagementDisabled { get; set; } - /// - /// Gets or sets the group id - /// - [Required] - [JsonProperty(Required = Required.DisallowNull)] - public string GroupId { get; set; } + /// + /// Gets or sets the group id + /// + [Required] + [JsonProperty(Required = Required.DisallowNull)] + public string GroupId { get; set; } - /// - /// Gets or sets the current number of workers allocated by the consumer - /// - public int WorkersCount { get; set; } + /// + /// Gets or sets the current number of workers allocated by the consumer + /// + public int WorkersCount { get; set; } - /// - /// Gets or sets the current topics subscription - /// - public IEnumerable Subscription { get; set; } + /// + /// Gets or sets the current topics subscription + /// + public IEnumerable Subscription { get; set; } - /// - /// Gets or sets the (dynamic) group member id of this consumer (as set by the broker). - /// - [Required] - [JsonProperty(Required = Required.DisallowNull)] - public string MemberId { get; set; } + /// + /// Gets or sets the (dynamic) group member id of this consumer (as set by the broker). + /// + [Required] + [JsonProperty(Required = Required.DisallowNull)] + public string MemberId { get; set; } - /// - /// Gets or sets the name of this client instance. - /// Contains (but is not equal to) the client.id configuration parameter. - /// - /// - /// This name will be unique across all client - /// instances in a given application which allows - /// log messages to be associated with the - /// corresponding instance. - /// - [Required] - [JsonProperty(Required = Required.DisallowNull)] - public string ClientInstanceName { get; set; } + /// + /// Gets or sets the name of this client instance. + /// Contains (but is not equal to) the client.id configuration parameter. + /// + /// + /// This name will be unique across all client + /// instances in a given application which allows + /// log messages to be associated with the + /// corresponding instance. + /// + [Required] + [JsonProperty(Required = Required.DisallowNull)] + public string ClientInstanceName { get; set; } - /// - /// Gets or sets the current consumer status - /// - [Required] - [JsonProperty(Required = Required.DisallowNull)] - public string Status { get; set; } + /// + /// Gets or sets the current consumer status + /// + [Required] + [JsonProperty(Required = Required.DisallowNull)] + public string Status { get; set; } } } diff --git a/src/KafkaFlow.Admin.WebApi/Contracts/ConsumersResponse.cs b/src/KafkaFlow.Admin.WebApi/Contracts/ConsumersResponse.cs index 2ac630bf1..ab7ef5c51 100644 --- a/src/KafkaFlow.Admin.WebApi/Contracts/ConsumersResponse.cs +++ b/src/KafkaFlow.Admin.WebApi/Contracts/ConsumersResponse.cs @@ -1,7 +1,7 @@ +using System.Collections.Generic; + namespace KafkaFlow.Admin.WebApi.Contracts { - using System.Collections.Generic; - /// /// The response of the consumers /// diff --git a/src/KafkaFlow.Admin.WebApi/Contracts/GroupResponse.cs b/src/KafkaFlow.Admin.WebApi/Contracts/GroupResponse.cs index 4669f2fc4..461d80009 100644 --- a/src/KafkaFlow.Admin.WebApi/Contracts/GroupResponse.cs +++ b/src/KafkaFlow.Admin.WebApi/Contracts/GroupResponse.cs @@ -1,9 +1,9 @@ +using System.Collections.Generic; +using System.ComponentModel.DataAnnotations; +using Newtonsoft.Json; + namespace KafkaFlow.Admin.WebApi.Contracts { - using System.Collections.Generic; - using System.ComponentModel.DataAnnotations; - using Newtonsoft.Json; - /// /// The response of the consumer group /// diff --git a/src/KafkaFlow.Admin.WebApi/Contracts/GroupsResponse.cs b/src/KafkaFlow.Admin.WebApi/Contracts/GroupsResponse.cs index 79e97c03c..875d03dfa 100644 --- a/src/KafkaFlow.Admin.WebApi/Contracts/GroupsResponse.cs +++ b/src/KafkaFlow.Admin.WebApi/Contracts/GroupsResponse.cs @@ -1,7 +1,7 @@ +using System.Collections.Generic; + namespace KafkaFlow.Admin.WebApi.Contracts { - using System.Collections.Generic; - /// /// The response of the consumer groups /// diff --git a/src/KafkaFlow.Admin.WebApi/Contracts/RewindOffsetsToDateRequest.cs b/src/KafkaFlow.Admin.WebApi/Contracts/RewindOffsetsToDateRequest.cs index c11cfe5cd..c34050651 100644 --- a/src/KafkaFlow.Admin.WebApi/Contracts/RewindOffsetsToDateRequest.cs +++ b/src/KafkaFlow.Admin.WebApi/Contracts/RewindOffsetsToDateRequest.cs @@ -1,7 +1,7 @@ +using System; + namespace KafkaFlow.Admin.WebApi.Contracts { - using System; - /// /// The request to rewind offsets to a point in time /// diff --git a/src/KafkaFlow.Admin.WebApi/Contracts/TelemetryResponse.cs b/src/KafkaFlow.Admin.WebApi/Contracts/TelemetryResponse.cs index 778833ce0..195d30365 100644 --- a/src/KafkaFlow.Admin.WebApi/Contracts/TelemetryResponse.cs +++ b/src/KafkaFlow.Admin.WebApi/Contracts/TelemetryResponse.cs @@ -1,10 +1,10 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel.DataAnnotations; +using Newtonsoft.Json; + namespace KafkaFlow.Admin.WebApi.Contracts { - using System; - using System.Collections.Generic; - using System.ComponentModel.DataAnnotations; - using Newtonsoft.Json; - /// /// The response of telemetry data /// diff --git a/src/KafkaFlow.Admin.WebApi/Controllers/ConsumersController.cs b/src/KafkaFlow.Admin.WebApi/Controllers/ConsumersController.cs index 7ef6f33f3..2d050b539 100644 --- a/src/KafkaFlow.Admin.WebApi/Controllers/ConsumersController.cs +++ b/src/KafkaFlow.Admin.WebApi/Controllers/ConsumersController.cs @@ -1,13 +1,13 @@ +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using KafkaFlow.Admin.WebApi.Adapters; +using KafkaFlow.Admin.WebApi.Contracts; +using KafkaFlow.Consumers; +using Microsoft.AspNetCore.Mvc; + namespace KafkaFlow.Admin.WebApi.Controllers { - using System.Collections.Generic; - using System.Linq; - using System.Threading.Tasks; - using KafkaFlow.Admin.WebApi.Adapters; - using KafkaFlow.Admin.WebApi.Contracts; - using KafkaFlow.Consumers; - using Microsoft.AspNetCore.Mvc; - /// /// Consumers controller /// @@ -15,8 +15,8 @@ namespace KafkaFlow.Admin.WebApi.Controllers [ApiController] public class ConsumersController : ControllerBase { - private readonly IConsumerAccessor consumers; - private readonly IConsumerAdmin consumerAdmin; + private readonly IConsumerAccessor _consumers; + private readonly IConsumerAdmin _consumerAdmin; /// /// Initializes a new instance of the class. @@ -25,8 +25,8 @@ public class ConsumersController : ControllerBase /// The admin messages consumer public ConsumersController(IConsumerAccessor consumers, IConsumerAdmin consumerAdmin) { - this.consumers = consumers; - this.consumerAdmin = consumerAdmin; + _consumers = consumers; + _consumerAdmin = consumerAdmin; } /// @@ -41,7 +41,7 @@ public IActionResult GetConsumersByGroupId([FromRoute] string groupId) return this.Ok( new ConsumersResponse { - Consumers = this.consumers + Consumers = _consumers .All .Where(x => x.GroupId == groupId) .Select(x => x.Adapt()), @@ -62,7 +62,7 @@ public IActionResult GetConsumerByGroupIdName( [FromRoute] string groupId, [FromRoute] string consumerName) { - var consumer = this.consumers.All + var consumer = _consumers.All .FirstOrDefault(x => x.GroupId == groupId && x.ConsumerName == consumerName); if (consumer is null) @@ -89,7 +89,7 @@ public async Task PauseConsumer( [FromRoute] string consumerName, [FromQuery] IList topics) { - var consumer = this.consumers.All + var consumer = _consumers.All .FirstOrDefault(x => x.GroupId == groupId && x.ConsumerName == consumerName); if (consumer is null) @@ -97,7 +97,7 @@ public async Task PauseConsumer( return this.NotFound(); } - await this.consumerAdmin.PauseConsumerAsync(consumerName, topics); + await _consumerAdmin.PauseConsumerAsync(consumerName, topics); return this.Accepted(); } @@ -118,7 +118,7 @@ public async Task ResumeConsumer( [FromRoute] string consumerName, [FromQuery] IList topics) { - var consumer = this.consumers.All + var consumer = _consumers.All .FirstOrDefault(x => x.GroupId == groupId && x.ConsumerName == consumerName); if (consumer is null) @@ -126,7 +126,7 @@ public async Task ResumeConsumer( return this.NotFound(); } - await this.consumerAdmin.ResumeConsumerAsync(consumerName, topics); + await _consumerAdmin.ResumeConsumerAsync(consumerName, topics); return this.Accepted(); } @@ -145,14 +145,14 @@ public async Task StartConsumer( [FromRoute] string groupId, [FromRoute] string consumerName) { - var consumer = this.consumers.All.FirstOrDefault(x => x.GroupId == groupId && x.ConsumerName == consumerName); + var consumer = _consumers.All.FirstOrDefault(x => x.GroupId == groupId && x.ConsumerName == consumerName); if (consumer is null) { return this.NotFound(); } - await this.consumerAdmin.StartConsumerAsync(consumerName); + await _consumerAdmin.StartConsumerAsync(consumerName); return this.Accepted(); } @@ -171,14 +171,14 @@ public async Task StopConsumer( [FromRoute] string groupId, [FromRoute] string consumerName) { - var consumer = this.consumers.All.FirstOrDefault(x => x.GroupId == groupId && x.ConsumerName == consumerName); + var consumer = _consumers.All.FirstOrDefault(x => x.GroupId == groupId && x.ConsumerName == consumerName); if (consumer is null) { return this.NotFound(); } - await this.consumerAdmin.StopConsumerAsync(consumerName); + await _consumerAdmin.StopConsumerAsync(consumerName); return this.Accepted(); } @@ -197,7 +197,7 @@ public async Task RestartConsumer( [FromRoute] string groupId, [FromRoute] string consumerName) { - var consumer = this.consumers.All + var consumer = _consumers.All .FirstOrDefault(x => x.GroupId == groupId && x.ConsumerName == consumerName); if (consumer is null) @@ -205,7 +205,7 @@ public async Task RestartConsumer( return this.NotFound(); } - await this.consumerAdmin.RestartConsumerAsync(consumerName); + await _consumerAdmin.RestartConsumerAsync(consumerName); return this.Accepted(); } @@ -234,7 +234,7 @@ public async Task ResetOffsets( return this.BadRequest(); } - var consumer = this.consumers.All + var consumer = _consumers.All .FirstOrDefault(x => x.GroupId == groupId && x.ConsumerName == consumerName); if (consumer is null) @@ -242,7 +242,7 @@ public async Task ResetOffsets( return this.NotFound(); } - await this.consumerAdmin.ResetOffsetsAsync(consumerName, topics); + await _consumerAdmin.ResetOffsetsAsync(consumerName, topics); return this.Accepted(); } @@ -271,7 +271,7 @@ public async Task RewindOffsets( return this.BadRequest(); } - var consumer = this.consumers.All + var consumer = _consumers.All .FirstOrDefault(x => x.GroupId == groupId && x.ConsumerName == consumerName); if (consumer is null) @@ -279,7 +279,7 @@ public async Task RewindOffsets( return this.NotFound(); } - await this.consumerAdmin.RewindOffsetsAsync(consumerName, request.Date, topics); + await _consumerAdmin.RewindOffsetsAsync(consumerName, request.Date, topics); return this.Accepted(); } @@ -306,7 +306,7 @@ public async Task ChangeWorkersCount( return this.BadRequest(); } - var consumer = this.consumers.All + var consumer = _consumers.All .FirstOrDefault(x => x.GroupId == groupId && x.ConsumerName == consumerName); if (consumer is null) @@ -314,7 +314,7 @@ public async Task ChangeWorkersCount( return this.NotFound(); } - await this.consumerAdmin.ChangeWorkersCountAsync(consumerName, request.WorkersCount); + await _consumerAdmin.ChangeWorkersCountAsync(consumerName, request.WorkersCount); return this.Accepted(); } diff --git a/src/KafkaFlow.Admin.WebApi/Controllers/GroupsController.cs b/src/KafkaFlow.Admin.WebApi/Controllers/GroupsController.cs index 0ed3a5b2a..9c6f1609b 100644 --- a/src/KafkaFlow.Admin.WebApi/Controllers/GroupsController.cs +++ b/src/KafkaFlow.Admin.WebApi/Controllers/GroupsController.cs @@ -1,13 +1,13 @@ +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using KafkaFlow.Admin.WebApi.Adapters; +using KafkaFlow.Admin.WebApi.Contracts; +using KafkaFlow.Consumers; +using Microsoft.AspNetCore.Mvc; + namespace KafkaFlow.Admin.WebApi.Controllers { - using System.Collections.Generic; - using System.Linq; - using System.Threading.Tasks; - using KafkaFlow.Admin.WebApi.Adapters; - using KafkaFlow.Admin.WebApi.Contracts; - using KafkaFlow.Consumers; - using Microsoft.AspNetCore.Mvc; - /// /// Groups controller /// @@ -15,8 +15,8 @@ namespace KafkaFlow.Admin.WebApi.Controllers [ApiController] public class GroupsController : ControllerBase { - private readonly IConsumerAccessor consumers; - private readonly IConsumerAdmin consumerAdmin; + private readonly IConsumerAccessor _consumers; + private readonly IConsumerAdmin _consumerAdmin; /// /// Initializes a new instance of the class. @@ -25,8 +25,8 @@ public class GroupsController : ControllerBase /// The admin messages consumer public GroupsController(IConsumerAccessor consumers, IConsumerAdmin consumerAdmin) { - this.consumers = consumers; - this.consumerAdmin = consumerAdmin; + _consumers = consumers; + _consumerAdmin = consumerAdmin; } /// @@ -40,7 +40,7 @@ public IActionResult GetAllGroups() return this.Ok( new GroupsResponse { - Groups = this.consumers.All + Groups = _consumers.All .GroupBy(x => x.GroupId) .Select( x => new GroupResponse @@ -64,7 +64,7 @@ public async Task PauseGroup( [FromRoute] string groupId, [FromQuery] IList topics) { - await this.consumerAdmin.PauseConsumerGroupAsync(groupId, topics); + await _consumerAdmin.PauseConsumerGroupAsync(groupId, topics); return this.Accepted(); } @@ -82,7 +82,7 @@ public async Task ResumeGroup( [FromRoute] string groupId, [FromQuery] IList topics) { - await this.consumerAdmin.ResumeConsumerGroupAsync(groupId, topics); + await _consumerAdmin.ResumeConsumerGroupAsync(groupId, topics); return this.Accepted(); } diff --git a/src/KafkaFlow.Admin.WebApi/Controllers/TelemetryController.cs b/src/KafkaFlow.Admin.WebApi/Controllers/TelemetryController.cs index 2cc39ef3c..b2a8e0362 100644 --- a/src/KafkaFlow.Admin.WebApi/Controllers/TelemetryController.cs +++ b/src/KafkaFlow.Admin.WebApi/Controllers/TelemetryController.cs @@ -1,9 +1,9 @@ +using KafkaFlow.Admin.WebApi.Adapters; +using KafkaFlow.Admin.WebApi.Contracts; +using Microsoft.AspNetCore.Mvc; + namespace KafkaFlow.Admin.WebApi.Controllers { - using KafkaFlow.Admin.WebApi.Adapters; - using KafkaFlow.Admin.WebApi.Contracts; - using Microsoft.AspNetCore.Mvc; - /// /// Telemetry controller /// @@ -11,7 +11,7 @@ namespace KafkaFlow.Admin.WebApi.Controllers [ApiController] public class TelemetryController : ControllerBase { - private readonly ITelemetryStorage storage; + private readonly ITelemetryStorage _storage; /// /// Initializes a new instance of the class. @@ -19,18 +19,18 @@ public class TelemetryController : ControllerBase /// The telemetry storage public TelemetryController(ITelemetryStorage storage) { - this.storage = storage; + _storage = storage; } /// /// Get telemetry data from all the consumer groups /// /// A telemetry response - [HttpGet(Name=nameof(GetTelemetry))] + [HttpGet(Name = nameof(GetTelemetry))] [ProducesResponseType(typeof(TelemetryResponse), 200)] public IActionResult GetTelemetry() { - var metrics = this.storage.Get(); + var metrics = _storage.Get(); return this.Ok(metrics.Adapt()); } diff --git a/src/KafkaFlow.Admin/AdminProducer.cs b/src/KafkaFlow.Admin/AdminProducer.cs index 6718240ba..da9d056d2 100644 --- a/src/KafkaFlow.Admin/AdminProducer.cs +++ b/src/KafkaFlow.Admin/AdminProducer.cs @@ -1,21 +1,21 @@ +using System; +using System.Threading.Tasks; +using KafkaFlow.Admin.Messages; + namespace KafkaFlow.Admin { - using System; - using System.Threading.Tasks; - using KafkaFlow.Admin.Messages; - internal class AdminProducer : IAdminProducer { - private readonly IMessageProducer producer; - private readonly int topicPartition; + private readonly IMessageProducer _producer; + private readonly int _topicPartition; public AdminProducer(IMessageProducer producer, int topicPartition) { - this.producer = producer; - this.topicPartition = topicPartition; + _producer = producer; + _topicPartition = topicPartition; } public Task ProduceAsync(IAdminMessage message) => - this.producer.ProduceAsync(Guid.NewGuid().ToString(), message, partition: this.topicPartition); + _producer.ProduceAsync(Guid.NewGuid().ToString(), message, partition: _topicPartition); } } diff --git a/src/KafkaFlow.Admin/ConsumerAdmin.cs b/src/KafkaFlow.Admin/ConsumerAdmin.cs index 408286e94..51af3bc81 100644 --- a/src/KafkaFlow.Admin/ConsumerAdmin.cs +++ b/src/KafkaFlow.Admin/ConsumerAdmin.cs @@ -1,24 +1,24 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using KafkaFlow.Admin.Messages; + namespace KafkaFlow.Admin { - using System; - using System.Collections.Generic; - using System.Linq; - using System.Threading.Tasks; - using KafkaFlow.Admin.Messages; - internal class ConsumerAdmin : IConsumerAdmin { - private readonly IAdminProducer producer; + private readonly IAdminProducer _producer; public ConsumerAdmin(IAdminProducer producer) { - this.producer = producer; + _producer = producer; } public async Task PauseConsumerGroupAsync(string groupId, IEnumerable topics) { - await this.producer.ProduceAsync( + await _producer.ProduceAsync( new PauseConsumersByGroup { GroupId = groupId, @@ -28,7 +28,7 @@ await this.producer.ProduceAsync( public async Task ResumeConsumerGroupAsync(string groupId, IEnumerable topics) { - await this.producer.ProduceAsync( + await _producer.ProduceAsync( new ResumeConsumersByGroup { GroupId = groupId, @@ -38,7 +38,7 @@ await this.producer.ProduceAsync( public async Task PauseConsumerAsync(string consumerName, IEnumerable topics) { - await this.producer.ProduceAsync( + await _producer.ProduceAsync( new PauseConsumerByName { ConsumerName = consumerName, @@ -48,7 +48,7 @@ await this.producer.ProduceAsync( public async Task ResumeConsumerAsync(string consumerName, IEnumerable topics) { - await this.producer.ProduceAsync( + await _producer.ProduceAsync( new ResumeConsumerByName { ConsumerName = consumerName, @@ -58,22 +58,22 @@ await this.producer.ProduceAsync( public async Task StartConsumerAsync(string consumerName) { - await this.producer.ProduceAsync(new StartConsumerByName { ConsumerName = consumerName }); + await _producer.ProduceAsync(new StartConsumerByName { ConsumerName = consumerName }); } public async Task StopConsumerAsync(string consumerName) { - await this.producer.ProduceAsync(new StopConsumerByName { ConsumerName = consumerName }); + await _producer.ProduceAsync(new StopConsumerByName { ConsumerName = consumerName }); } public async Task RestartConsumerAsync(string consumerName) { - await this.producer.ProduceAsync(new RestartConsumerByName { ConsumerName = consumerName }); + await _producer.ProduceAsync(new RestartConsumerByName { ConsumerName = consumerName }); } public async Task ResetOffsetsAsync(string consumerName, IEnumerable topics) { - await this.producer.ProduceAsync( + await _producer.ProduceAsync( new ResetConsumerOffset { ConsumerName = consumerName, @@ -83,7 +83,7 @@ await this.producer.ProduceAsync( public async Task RewindOffsetsAsync(string consumerName, DateTime pointInTime, IEnumerable topics) { - await this.producer.ProduceAsync( + await _producer.ProduceAsync( new RewindConsumerOffsetToDateTime { ConsumerName = consumerName, @@ -94,7 +94,7 @@ await this.producer.ProduceAsync( public async Task ChangeWorkersCountAsync(string consumerName, int workersCount) { - await this.producer.ProduceAsync( + await _producer.ProduceAsync( new ChangeConsumerWorkersCount { ConsumerName = consumerName, diff --git a/src/KafkaFlow.Admin/Extensions/ClusterConfigurationBuilderExtensions.cs b/src/KafkaFlow.Admin/Extensions/ClusterConfigurationBuilderExtensions.cs index 608649536..ee3b51163 100644 --- a/src/KafkaFlow.Admin/Extensions/ClusterConfigurationBuilderExtensions.cs +++ b/src/KafkaFlow.Admin/Extensions/ClusterConfigurationBuilderExtensions.cs @@ -1,12 +1,12 @@ -namespace KafkaFlow -{ - using System; - using System.Reflection; - using KafkaFlow.Admin; - using KafkaFlow.Admin.Handlers; - using KafkaFlow.Configuration; - using KafkaFlow.Serializer; +using System; +using System.Reflection; +using KafkaFlow.Admin; +using KafkaFlow.Admin.Handlers; +using KafkaFlow.Configuration; +using KafkaFlow.Serializer; +namespace KafkaFlow +{ /// /// No needed /// diff --git a/src/KafkaFlow.Admin/Extensions/MessageConsumerExtensions.cs b/src/KafkaFlow.Admin/Extensions/MessageConsumerExtensions.cs index 8c7524298..8f4bdbab0 100644 --- a/src/KafkaFlow.Admin/Extensions/MessageConsumerExtensions.cs +++ b/src/KafkaFlow.Admin/Extensions/MessageConsumerExtensions.cs @@ -1,10 +1,10 @@ +using System.Collections.Generic; +using System.Linq; +using Confluent.Kafka; +using KafkaFlow.Consumers; + namespace KafkaFlow.Admin.Extensions { - using System.Collections.Generic; - using System.Linq; - using Confluent.Kafka; - using KafkaFlow.Consumers; - internal static class MessageConsumerExtensions { public static IReadOnlyList FilterAssigment(this IMessageConsumer consumer, IList topics) diff --git a/src/KafkaFlow.Admin/Handlers/ChangeConsumerWorkersCountHandler.cs b/src/KafkaFlow.Admin/Handlers/ChangeConsumerWorkersCountHandler.cs index 4dbf0f7a0..ef46f59ae 100644 --- a/src/KafkaFlow.Admin/Handlers/ChangeConsumerWorkersCountHandler.cs +++ b/src/KafkaFlow.Admin/Handlers/ChangeConsumerWorkersCountHandler.cs @@ -1,19 +1,18 @@ +using System.Threading.Tasks; +using KafkaFlow.Admin.Messages; +using KafkaFlow.Consumers; + namespace KafkaFlow.Admin.Handlers { - using System.Threading.Tasks; - using KafkaFlow.Admin.Messages; - using KafkaFlow.Consumers; - using KafkaFlow.Middlewares.TypedHandler; - internal class ChangeConsumerWorkersCountHandler : IMessageHandler { - private readonly IConsumerAccessor consumerAccessor; + private readonly IConsumerAccessor _consumerAccessor; - public ChangeConsumerWorkersCountHandler(IConsumerAccessor consumerAccessor) => this.consumerAccessor = consumerAccessor; + public ChangeConsumerWorkersCountHandler(IConsumerAccessor consumerAccessor) => _consumerAccessor = consumerAccessor; public Task Handle(IMessageContext context, ChangeConsumerWorkersCount message) { - var consumer = this.consumerAccessor[message.ConsumerName]; + var consumer = _consumerAccessor[message.ConsumerName]; return consumer?.ChangeWorkersCountAndRestartAsync(message.WorkersCount) ?? diff --git a/src/KafkaFlow.Admin/Handlers/ConsumerTelemetryMetricHandler.cs b/src/KafkaFlow.Admin/Handlers/ConsumerTelemetryMetricHandler.cs index e2b7fe36b..e57c832ec 100644 --- a/src/KafkaFlow.Admin/Handlers/ConsumerTelemetryMetricHandler.cs +++ b/src/KafkaFlow.Admin/Handlers/ConsumerTelemetryMetricHandler.cs @@ -1,18 +1,17 @@ +using System.Threading.Tasks; +using KafkaFlow.Admin.Messages; + namespace KafkaFlow.Admin.Handlers { - using System.Threading.Tasks; - using KafkaFlow.Admin.Messages; - using KafkaFlow.Middlewares.TypedHandler; - internal class ConsumerTelemetryMetricHandler : IMessageHandler { - private readonly ITelemetryStorage storage; + private readonly ITelemetryStorage _storage; - public ConsumerTelemetryMetricHandler(ITelemetryStorage storage) => this.storage = storage; + public ConsumerTelemetryMetricHandler(ITelemetryStorage storage) => _storage = storage; public Task Handle(IMessageContext context, ConsumerTelemetryMetric message) { - this.storage.Put(message); + _storage.Put(message); return Task.CompletedTask; } } diff --git a/src/KafkaFlow.Admin/Handlers/PauseConsumerByNameHandler.cs b/src/KafkaFlow.Admin/Handlers/PauseConsumerByNameHandler.cs index 9afc8efdf..b76e6da5f 100644 --- a/src/KafkaFlow.Admin/Handlers/PauseConsumerByNameHandler.cs +++ b/src/KafkaFlow.Admin/Handlers/PauseConsumerByNameHandler.cs @@ -1,21 +1,20 @@ +using System.Linq; +using System.Threading.Tasks; +using KafkaFlow.Admin.Extensions; +using KafkaFlow.Admin.Messages; +using KafkaFlow.Consumers; + namespace KafkaFlow.Admin.Handlers { - using System.Linq; - using System.Threading.Tasks; - using KafkaFlow.Admin.Extensions; - using KafkaFlow.Admin.Messages; - using KafkaFlow.Consumers; - using KafkaFlow.Middlewares.TypedHandler; - internal class PauseConsumerByNameHandler : IMessageHandler { - private readonly IConsumerAccessor consumerAccessor; + private readonly IConsumerAccessor _consumerAccessor; - public PauseConsumerByNameHandler(IConsumerAccessor consumerAccessor) => this.consumerAccessor = consumerAccessor; + public PauseConsumerByNameHandler(IConsumerAccessor consumerAccessor) => _consumerAccessor = consumerAccessor; public Task Handle(IMessageContext context, PauseConsumerByName message) { - var consumer = this.consumerAccessor[message.ConsumerName]; + var consumer = _consumerAccessor[message.ConsumerName]; var assignment = consumer.FilterAssigment(message.Topics); diff --git a/src/KafkaFlow.Admin/Handlers/PauseConsumersByGroupHandler.cs b/src/KafkaFlow.Admin/Handlers/PauseConsumersByGroupHandler.cs index 376393842..cf92366b9 100644 --- a/src/KafkaFlow.Admin/Handlers/PauseConsumersByGroupHandler.cs +++ b/src/KafkaFlow.Admin/Handlers/PauseConsumersByGroupHandler.cs @@ -1,21 +1,20 @@ +using System.Linq; +using System.Threading.Tasks; +using KafkaFlow.Admin.Extensions; +using KafkaFlow.Admin.Messages; +using KafkaFlow.Consumers; + namespace KafkaFlow.Admin.Handlers { - using System.Linq; - using System.Threading.Tasks; - using KafkaFlow.Admin.Extensions; - using KafkaFlow.Admin.Messages; - using KafkaFlow.Consumers; - using KafkaFlow.Middlewares.TypedHandler; - internal class PauseConsumersByGroupHandler : IMessageHandler { - private readonly IConsumerAccessor consumerAccessor; + private readonly IConsumerAccessor _consumerAccessor; - public PauseConsumersByGroupHandler(IConsumerAccessor consumerAccessor) => this.consumerAccessor = consumerAccessor; + public PauseConsumersByGroupHandler(IConsumerAccessor consumerAccessor) => _consumerAccessor = consumerAccessor; public Task Handle(IMessageContext context, PauseConsumersByGroup message) { - var consumers = this.consumerAccessor.All.Where(x => x.GroupId == message.GroupId); + var consumers = _consumerAccessor.All.Where(x => x.GroupId == message.GroupId); foreach (var consumer in consumers) { diff --git a/src/KafkaFlow.Admin/Handlers/ResetConsumerOffsetHandler.cs b/src/KafkaFlow.Admin/Handlers/ResetConsumerOffsetHandler.cs index 2d9c60008..1735e5c10 100644 --- a/src/KafkaFlow.Admin/Handlers/ResetConsumerOffsetHandler.cs +++ b/src/KafkaFlow.Admin/Handlers/ResetConsumerOffsetHandler.cs @@ -1,23 +1,22 @@ +using System; +using System.Linq; +using System.Threading.Tasks; +using Confluent.Kafka; +using KafkaFlow.Admin.Extensions; +using KafkaFlow.Admin.Messages; +using KafkaFlow.Consumers; + namespace KafkaFlow.Admin.Handlers { - using System; - using System.Linq; - using System.Threading.Tasks; - using Confluent.Kafka; - using KafkaFlow.Admin.Extensions; - using KafkaFlow.Admin.Messages; - using KafkaFlow.Consumers; - using KafkaFlow.Middlewares.TypedHandler; - internal class ResetConsumerOffsetHandler : IMessageHandler { - private readonly IConsumerAccessor consumerAccessor; + private readonly IConsumerAccessor _consumerAccessor; - public ResetConsumerOffsetHandler(IConsumerAccessor consumerAccessor) => this.consumerAccessor = consumerAccessor; + public ResetConsumerOffsetHandler(IConsumerAccessor consumerAccessor) => _consumerAccessor = consumerAccessor; public Task Handle(IMessageContext context, ResetConsumerOffset message) { - var consumer = this.consumerAccessor[message.ConsumerName]; + var consumer = _consumerAccessor[message.ConsumerName]; if (consumer is null) { diff --git a/src/KafkaFlow.Admin/Handlers/RestartConsumerByNameHandler.cs b/src/KafkaFlow.Admin/Handlers/RestartConsumerByNameHandler.cs index 3525bf490..50759b6eb 100644 --- a/src/KafkaFlow.Admin/Handlers/RestartConsumerByNameHandler.cs +++ b/src/KafkaFlow.Admin/Handlers/RestartConsumerByNameHandler.cs @@ -1,19 +1,18 @@ +using System.Threading.Tasks; +using KafkaFlow.Admin.Messages; +using KafkaFlow.Consumers; + namespace KafkaFlow.Admin.Handlers { - using System.Threading.Tasks; - using KafkaFlow.Admin.Messages; - using KafkaFlow.Consumers; - using KafkaFlow.Middlewares.TypedHandler; - internal class RestartConsumerByNameHandler : IMessageHandler { - private readonly IConsumerAccessor consumerAccessor; + private readonly IConsumerAccessor _consumerAccessor; - public RestartConsumerByNameHandler(IConsumerAccessor consumerAccessor) => this.consumerAccessor = consumerAccessor; + public RestartConsumerByNameHandler(IConsumerAccessor consumerAccessor) => _consumerAccessor = consumerAccessor; public Task Handle(IMessageContext context, RestartConsumerByName message) { - var consumer = this.consumerAccessor[message.ConsumerName]; + var consumer = _consumerAccessor[message.ConsumerName]; return consumer?.RestartAsync() ?? Task.CompletedTask; } diff --git a/src/KafkaFlow.Admin/Handlers/ResumeConsumerByNameHandler.cs b/src/KafkaFlow.Admin/Handlers/ResumeConsumerByNameHandler.cs index e8eb336ff..730f5e711 100644 --- a/src/KafkaFlow.Admin/Handlers/ResumeConsumerByNameHandler.cs +++ b/src/KafkaFlow.Admin/Handlers/ResumeConsumerByNameHandler.cs @@ -1,21 +1,20 @@ +using System.Linq; +using System.Threading.Tasks; +using KafkaFlow.Admin.Extensions; +using KafkaFlow.Admin.Messages; +using KafkaFlow.Consumers; + namespace KafkaFlow.Admin.Handlers { - using System.Linq; - using System.Threading.Tasks; - using KafkaFlow.Admin.Extensions; - using KafkaFlow.Admin.Messages; - using KafkaFlow.Consumers; - using KafkaFlow.Middlewares.TypedHandler; - internal class ResumeConsumerByNameHandler : IMessageHandler { - private readonly IConsumerAccessor consumerAccessor; + private readonly IConsumerAccessor _consumerAccessor; - public ResumeConsumerByNameHandler(IConsumerAccessor consumerAccessor) => this.consumerAccessor = consumerAccessor; + public ResumeConsumerByNameHandler(IConsumerAccessor consumerAccessor) => _consumerAccessor = consumerAccessor; public Task Handle(IMessageContext context, ResumeConsumerByName message) { - var consumer = this.consumerAccessor[message.ConsumerName]; + var consumer = _consumerAccessor[message.ConsumerName]; var assignment = consumer.FilterAssigment(message.Topics); diff --git a/src/KafkaFlow.Admin/Handlers/ResumeConsumersByGroupHandler.cs b/src/KafkaFlow.Admin/Handlers/ResumeConsumersByGroupHandler.cs index 553d2ca23..2f82c181d 100644 --- a/src/KafkaFlow.Admin/Handlers/ResumeConsumersByGroupHandler.cs +++ b/src/KafkaFlow.Admin/Handlers/ResumeConsumersByGroupHandler.cs @@ -1,21 +1,20 @@ +using System.Linq; +using System.Threading.Tasks; +using KafkaFlow.Admin.Extensions; +using KafkaFlow.Admin.Messages; +using KafkaFlow.Consumers; + namespace KafkaFlow.Admin.Handlers { - using System.Linq; - using System.Threading.Tasks; - using KafkaFlow.Admin.Extensions; - using KafkaFlow.Admin.Messages; - using KafkaFlow.Consumers; - using KafkaFlow.Middlewares.TypedHandler; - internal class ResumeConsumersByGroupHandler : IMessageHandler { - private readonly IConsumerAccessor consumerAccessor; + private readonly IConsumerAccessor _consumerAccessor; - public ResumeConsumersByGroupHandler(IConsumerAccessor consumerAccessor) => this.consumerAccessor = consumerAccessor; + public ResumeConsumersByGroupHandler(IConsumerAccessor consumerAccessor) => _consumerAccessor = consumerAccessor; public Task Handle(IMessageContext context, ResumeConsumersByGroup message) { - var consumers = this.consumerAccessor.All.Where(x => x.GroupId == message.GroupId); + var consumers = _consumerAccessor.All.Where(x => x.GroupId == message.GroupId); foreach (var consumer in consumers) { diff --git a/src/KafkaFlow.Admin/Handlers/RewindConsumerOffsetToDateTimeHandler.cs b/src/KafkaFlow.Admin/Handlers/RewindConsumerOffsetToDateTimeHandler.cs index b5895ef56..8ade85e63 100644 --- a/src/KafkaFlow.Admin/Handlers/RewindConsumerOffsetToDateTimeHandler.cs +++ b/src/KafkaFlow.Admin/Handlers/RewindConsumerOffsetToDateTimeHandler.cs @@ -1,23 +1,22 @@ +using System; +using System.Linq; +using System.Threading.Tasks; +using Confluent.Kafka; +using KafkaFlow.Admin.Extensions; +using KafkaFlow.Admin.Messages; +using KafkaFlow.Consumers; + namespace KafkaFlow.Admin.Handlers { - using System; - using System.Linq; - using System.Threading.Tasks; - using Confluent.Kafka; - using KafkaFlow.Admin.Extensions; - using KafkaFlow.Admin.Messages; - using KafkaFlow.Consumers; - using KafkaFlow.Middlewares.TypedHandler; - internal class RewindConsumerOffsetToDateTimeHandler : IMessageHandler { - private readonly IConsumerAccessor consumerAccessor; + private readonly IConsumerAccessor _consumerAccessor; - public RewindConsumerOffsetToDateTimeHandler(IConsumerAccessor consumerAccessor) => this.consumerAccessor = consumerAccessor; + public RewindConsumerOffsetToDateTimeHandler(IConsumerAccessor consumerAccessor) => _consumerAccessor = consumerAccessor; public Task Handle(IMessageContext context, RewindConsumerOffsetToDateTime message) { - var consumer = this.consumerAccessor[message.ConsumerName]; + var consumer = _consumerAccessor[message.ConsumerName]; if (consumer is null) { diff --git a/src/KafkaFlow.Admin/Handlers/StartConsumerByNameHandler.cs b/src/KafkaFlow.Admin/Handlers/StartConsumerByNameHandler.cs index b7ad5fa08..12ab3185b 100644 --- a/src/KafkaFlow.Admin/Handlers/StartConsumerByNameHandler.cs +++ b/src/KafkaFlow.Admin/Handlers/StartConsumerByNameHandler.cs @@ -1,19 +1,18 @@ +using System.Threading.Tasks; +using KafkaFlow.Admin.Messages; +using KafkaFlow.Consumers; + namespace KafkaFlow.Admin.Handlers { - using System.Threading.Tasks; - using KafkaFlow.Admin.Messages; - using KafkaFlow.Consumers; - using KafkaFlow.Middlewares.TypedHandler; - internal class StartConsumerByNameHandler : IMessageHandler { - private readonly IConsumerAccessor consumerAccessor; + private readonly IConsumerAccessor _consumerAccessor; - public StartConsumerByNameHandler(IConsumerAccessor consumerAccessor) => this.consumerAccessor = consumerAccessor; + public StartConsumerByNameHandler(IConsumerAccessor consumerAccessor) => _consumerAccessor = consumerAccessor; public async Task Handle(IMessageContext context, StartConsumerByName message) { - var consumer = this.consumerAccessor[message.ConsumerName]; + var consumer = _consumerAccessor[message.ConsumerName]; await consumer.StartAsync(); } diff --git a/src/KafkaFlow.Admin/Handlers/StopConsumerByNameHandler.cs b/src/KafkaFlow.Admin/Handlers/StopConsumerByNameHandler.cs index 92a9fc0a0..fd83135ae 100644 --- a/src/KafkaFlow.Admin/Handlers/StopConsumerByNameHandler.cs +++ b/src/KafkaFlow.Admin/Handlers/StopConsumerByNameHandler.cs @@ -1,19 +1,18 @@ +using System.Threading.Tasks; +using KafkaFlow.Admin.Messages; +using KafkaFlow.Consumers; + namespace KafkaFlow.Admin.Handlers { - using System.Threading.Tasks; - using KafkaFlow.Admin.Messages; - using KafkaFlow.Consumers; - using KafkaFlow.Middlewares.TypedHandler; - internal class StopConsumerByNameHandler : IMessageHandler { - private readonly IConsumerAccessor consumerAccessor; + private readonly IConsumerAccessor _consumerAccessor; - public StopConsumerByNameHandler(IConsumerAccessor consumerAccessor) => this.consumerAccessor = consumerAccessor; + public StopConsumerByNameHandler(IConsumerAccessor consumerAccessor) => _consumerAccessor = consumerAccessor; public async Task Handle(IMessageContext context, StopConsumerByName message) { - var consumer = this.consumerAccessor[message.ConsumerName]; + var consumer = _consumerAccessor[message.ConsumerName]; await consumer.StopAsync(); } diff --git a/src/KafkaFlow.Admin/IAdminProducer.cs b/src/KafkaFlow.Admin/IAdminProducer.cs index 6149e01af..7a7ec4910 100644 --- a/src/KafkaFlow.Admin/IAdminProducer.cs +++ b/src/KafkaFlow.Admin/IAdminProducer.cs @@ -1,8 +1,8 @@ +using System.Threading.Tasks; +using KafkaFlow.Admin.Messages; + namespace KafkaFlow.Admin { - using System.Threading.Tasks; - using KafkaFlow.Admin.Messages; - /// /// A special producer to publish admin messages /// diff --git a/src/KafkaFlow.Admin/IConsumerAdmin.cs b/src/KafkaFlow.Admin/IConsumerAdmin.cs index 624b4fb95..90d3a4a3a 100644 --- a/src/KafkaFlow.Admin/IConsumerAdmin.cs +++ b/src/KafkaFlow.Admin/IConsumerAdmin.cs @@ -1,9 +1,9 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; + namespace KafkaFlow.Admin { - using System; - using System.Collections.Generic; - using System.Threading.Tasks; - /// /// Expose Consumer administration features /// diff --git a/src/KafkaFlow.Admin/ITelemetryStorage.cs b/src/KafkaFlow.Admin/ITelemetryStorage.cs index e9c81043b..02494b589 100644 --- a/src/KafkaFlow.Admin/ITelemetryStorage.cs +++ b/src/KafkaFlow.Admin/ITelemetryStorage.cs @@ -1,8 +1,8 @@ +using System.Collections.Generic; +using KafkaFlow.Admin.Messages; + namespace KafkaFlow.Admin { - using System.Collections.Generic; - using KafkaFlow.Admin.Messages; - /// /// Used to implement a telemetry data storage provider /// diff --git a/src/KafkaFlow.Admin/MemoryTelemetryStorage.cs b/src/KafkaFlow.Admin/MemoryTelemetryStorage.cs index 081856304..92cef9b91 100644 --- a/src/KafkaFlow.Admin/MemoryTelemetryStorage.cs +++ b/src/KafkaFlow.Admin/MemoryTelemetryStorage.cs @@ -1,37 +1,37 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.CompilerServices; +using KafkaFlow.Admin.Messages; + namespace KafkaFlow.Admin { - using System; - using System.Collections.Concurrent; - using System.Collections.Generic; - using System.Linq; - using System.Runtime.CompilerServices; - using KafkaFlow.Admin.Messages; - internal class MemoryTelemetryStorage : ITelemetryStorage { - private readonly IDateTimeProvider dateTimeProvider; - private readonly TimeSpan cleanRunInterval; - private readonly TimeSpan expiryTime; - private readonly object cleanSyncRoot = new(); + private readonly IDateTimeProvider _dateTimeProvider; + private readonly TimeSpan _cleanRunInterval; + private readonly TimeSpan _expiryTime; + private readonly object _cleanSyncRoot = new(); - private readonly ConcurrentDictionary<(string, string, string, string), ConsumerTelemetryMetric> metrics = new(); + private readonly ConcurrentDictionary<(string, string, string, string), ConsumerTelemetryMetric> _metrics = new(); - private DateTime lastCleanDate; + private DateTime _lastCleanDate; public MemoryTelemetryStorage(TimeSpan cleanRunInterval, TimeSpan expiryTime, IDateTimeProvider dateTimeProvider) { - this.cleanRunInterval = cleanRunInterval; - this.expiryTime = expiryTime; - this.dateTimeProvider = dateTimeProvider; - this.lastCleanDate = dateTimeProvider.MinValue; + _cleanRunInterval = cleanRunInterval; + _expiryTime = expiryTime; + _dateTimeProvider = dateTimeProvider; + _lastCleanDate = dateTimeProvider.MinValue; } - public IEnumerable Get() => this.metrics.Values; + public IEnumerable Get() => _metrics.Values; public void Put(ConsumerTelemetryMetric telemetryMetric) { this.TryCleanItems(); - this.metrics[BuildKey(telemetryMetric)] = telemetryMetric; + _metrics[BuildKey(telemetryMetric)] = telemetryMetric; } [MethodImpl(MethodImplOptions.AggressiveInlining)] @@ -45,14 +45,14 @@ private void TryCleanItems() return; } - lock (this.cleanSyncRoot) + lock (_cleanSyncRoot) { if (!this.NeedsCleaning()) { return; } - this.lastCleanDate = this.dateTimeProvider.UtcNow; + _lastCleanDate = _dateTimeProvider.UtcNow; this.CleanExpiredItems(); } @@ -60,16 +60,16 @@ private void TryCleanItems() private void CleanExpiredItems() { - foreach (var (key, metric) in this.metrics.Select(x=> (x.Key, x.Value))) + foreach (var (key, metric) in _metrics.Select(x => (x.Key, x.Value))) { - if (this.dateTimeProvider.UtcNow - metric.SentAt > this.expiryTime) + if (_dateTimeProvider.UtcNow - metric.SentAt > _expiryTime) { - this.metrics.TryRemove(key, out _); + _metrics.TryRemove(key, out _); } } } [MethodImpl(MethodImplOptions.AggressiveInlining)] - private bool NeedsCleaning() => this.dateTimeProvider.UtcNow - this.lastCleanDate > this.cleanRunInterval; + private bool NeedsCleaning() => _dateTimeProvider.UtcNow - _lastCleanDate > _cleanRunInterval; } } diff --git a/src/KafkaFlow.Admin/Messages/ChangeConsumerWorkersCount.cs b/src/KafkaFlow.Admin/Messages/ChangeConsumerWorkersCount.cs index daccb7d61..21a95c2a9 100644 --- a/src/KafkaFlow.Admin/Messages/ChangeConsumerWorkersCount.cs +++ b/src/KafkaFlow.Admin/Messages/ChangeConsumerWorkersCount.cs @@ -1,7 +1,7 @@ +using System.Runtime.Serialization; + namespace KafkaFlow.Admin.Messages { - using System.Runtime.Serialization; - /// /// A message to change the worker count /// diff --git a/src/KafkaFlow.Admin/Messages/ConsumerTelemetryMetric.cs b/src/KafkaFlow.Admin/Messages/ConsumerTelemetryMetric.cs index 38d5f1605..adfd01965 100644 --- a/src/KafkaFlow.Admin/Messages/ConsumerTelemetryMetric.cs +++ b/src/KafkaFlow.Admin/Messages/ConsumerTelemetryMetric.cs @@ -1,10 +1,10 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using KafkaFlow.Consumers; + namespace KafkaFlow.Admin.Messages { - using System; - using System.Collections.Generic; - using System.Runtime.Serialization; - using KafkaFlow.Consumers; - /// /// A message that contains data related to consumers partition assigment /// diff --git a/src/KafkaFlow.Admin/Messages/PauseConsumerByName.cs b/src/KafkaFlow.Admin/Messages/PauseConsumerByName.cs index 14ea13ea6..3a3b32748 100644 --- a/src/KafkaFlow.Admin/Messages/PauseConsumerByName.cs +++ b/src/KafkaFlow.Admin/Messages/PauseConsumerByName.cs @@ -1,8 +1,8 @@ +using System.Collections.Generic; +using System.Runtime.Serialization; + namespace KafkaFlow.Admin.Messages { - using System.Collections.Generic; - using System.Runtime.Serialization; - /// /// A message that pauses a consumer /// diff --git a/src/KafkaFlow.Admin/Messages/PauseConsumersByGroup.cs b/src/KafkaFlow.Admin/Messages/PauseConsumersByGroup.cs index 982e34c12..a484d15dd 100644 --- a/src/KafkaFlow.Admin/Messages/PauseConsumersByGroup.cs +++ b/src/KafkaFlow.Admin/Messages/PauseConsumersByGroup.cs @@ -1,8 +1,8 @@ +using System.Collections.Generic; +using System.Runtime.Serialization; + namespace KafkaFlow.Admin.Messages { - using System.Collections.Generic; - using System.Runtime.Serialization; - /// /// The message that pauses an entire consumer group /// diff --git a/src/KafkaFlow.Admin/Messages/ResetConsumerOffset.cs b/src/KafkaFlow.Admin/Messages/ResetConsumerOffset.cs index c46e1edf1..e0116a982 100644 --- a/src/KafkaFlow.Admin/Messages/ResetConsumerOffset.cs +++ b/src/KafkaFlow.Admin/Messages/ResetConsumerOffset.cs @@ -1,8 +1,8 @@ +using System.Collections.Generic; +using System.Runtime.Serialization; + namespace KafkaFlow.Admin.Messages { - using System.Collections.Generic; - using System.Runtime.Serialization; - /// /// The message that rewind the offset of all partitions/topics of a consumer to the beginning /// diff --git a/src/KafkaFlow.Admin/Messages/RestartConsumerByName.cs b/src/KafkaFlow.Admin/Messages/RestartConsumerByName.cs index 777c4cbec..cfefa0cea 100644 --- a/src/KafkaFlow.Admin/Messages/RestartConsumerByName.cs +++ b/src/KafkaFlow.Admin/Messages/RestartConsumerByName.cs @@ -1,7 +1,7 @@ +using System.Runtime.Serialization; + namespace KafkaFlow.Admin.Messages { - using System.Runtime.Serialization; - /// /// The message that destroy and recreates the internal consumer /// diff --git a/src/KafkaFlow.Admin/Messages/ResumeConsumerByName.cs b/src/KafkaFlow.Admin/Messages/ResumeConsumerByName.cs index 220a34e59..da723ea5e 100644 --- a/src/KafkaFlow.Admin/Messages/ResumeConsumerByName.cs +++ b/src/KafkaFlow.Admin/Messages/ResumeConsumerByName.cs @@ -1,8 +1,8 @@ +using System.Collections.Generic; +using System.Runtime.Serialization; + namespace KafkaFlow.Admin.Messages { - using System.Collections.Generic; - using System.Runtime.Serialization; - /// /// The message that resume a paused consumer /// diff --git a/src/KafkaFlow.Admin/Messages/ResumeConsumersByGroup.cs b/src/KafkaFlow.Admin/Messages/ResumeConsumersByGroup.cs index c11467f7f..71f9b5f00 100644 --- a/src/KafkaFlow.Admin/Messages/ResumeConsumersByGroup.cs +++ b/src/KafkaFlow.Admin/Messages/ResumeConsumersByGroup.cs @@ -1,8 +1,8 @@ +using System.Collections.Generic; +using System.Runtime.Serialization; + namespace KafkaFlow.Admin.Messages { - using System.Collections.Generic; - using System.Runtime.Serialization; - /// /// The message that resume a paused consumer group /// diff --git a/src/KafkaFlow.Admin/Messages/RewindConsumerOffsetToDateTime.cs b/src/KafkaFlow.Admin/Messages/RewindConsumerOffsetToDateTime.cs index d675ea05b..cb0417d4e 100644 --- a/src/KafkaFlow.Admin/Messages/RewindConsumerOffsetToDateTime.cs +++ b/src/KafkaFlow.Admin/Messages/RewindConsumerOffsetToDateTime.cs @@ -1,9 +1,9 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + namespace KafkaFlow.Admin.Messages { - using System; - using System.Collections.Generic; - using System.Runtime.Serialization; - /// /// The message that rewind a consumer to a point in time /// diff --git a/src/KafkaFlow.Admin/Messages/StartConsumerByName.cs b/src/KafkaFlow.Admin/Messages/StartConsumerByName.cs index af7c519ee..d8ebf3640 100644 --- a/src/KafkaFlow.Admin/Messages/StartConsumerByName.cs +++ b/src/KafkaFlow.Admin/Messages/StartConsumerByName.cs @@ -1,7 +1,7 @@ +using System.Runtime.Serialization; + namespace KafkaFlow.Admin.Messages { - using System.Runtime.Serialization; - /// /// The message that starts a consumer /// diff --git a/src/KafkaFlow.Admin/Messages/StopConsumerByName.cs b/src/KafkaFlow.Admin/Messages/StopConsumerByName.cs index 68fbe1ce0..aea85c140 100644 --- a/src/KafkaFlow.Admin/Messages/StopConsumerByName.cs +++ b/src/KafkaFlow.Admin/Messages/StopConsumerByName.cs @@ -1,7 +1,7 @@ +using System.Runtime.Serialization; + namespace KafkaFlow.Admin.Messages { - using System.Runtime.Serialization; - /// /// The message that stops a consumer /// diff --git a/src/KafkaFlow.Admin/TelemetryScheduler.cs b/src/KafkaFlow.Admin/TelemetryScheduler.cs index b3aaaec69..c59ec2c7c 100644 --- a/src/KafkaFlow.Admin/TelemetryScheduler.cs +++ b/src/KafkaFlow.Admin/TelemetryScheduler.cs @@ -1,45 +1,45 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Threading; +using KafkaFlow.Admin.Messages; +using KafkaFlow.Consumers; +using KafkaFlow.Producers; + namespace KafkaFlow.Admin { - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Linq; - using System.Threading; - using KafkaFlow.Admin.Messages; - using KafkaFlow.Consumers; - using KafkaFlow.Producers; - internal class TelemetryScheduler : ITelemetryScheduler { - private static readonly int ProcessId = Process.GetCurrentProcess().Id; - private readonly Dictionary timers = new(); - private readonly IDependencyResolver dependencyResolver; - private readonly ILogHandler logHandler; + private static readonly int s_processId = Process.GetCurrentProcess().Id; + private readonly Dictionary _timers = new(); + private readonly IDependencyResolver _dependencyResolver; + private readonly ILogHandler _logHandler; public TelemetryScheduler(IDependencyResolver dependencyResolver) { - this.dependencyResolver = dependencyResolver; - this.logHandler = dependencyResolver.Resolve(); + _dependencyResolver = dependencyResolver; + _logHandler = dependencyResolver.Resolve(); } public void Start(string telemetryId, string topicName, int topicPartition) { this.Stop(telemetryId); - var consumers = this.dependencyResolver + var consumers = _dependencyResolver .Resolve() .All .Where( c => !c.ManagementDisabled && c.ClusterName.Equals( - this.dependencyResolver + _dependencyResolver .Resolve()[telemetryId] .ClusterName)) .ToList(); - var producer = this.dependencyResolver.Resolve().GetProducer(telemetryId); + var producer = _dependencyResolver.Resolve().GetProducer(telemetryId); - this.timers[telemetryId] = new Timer( + _timers[telemetryId] = new Timer( _ => ProduceTelemetry(topicName, topicPartition, consumers, producer), null, TimeSpan.Zero, @@ -48,10 +48,10 @@ public void Start(string telemetryId, string topicName, int topicPartition) public void Stop(string telemetryId) { - if (this.timers.TryGetValue(telemetryId, out var timer)) + if (_timers.TryGetValue(telemetryId, out var timer)) { timer.Dispose(); - this.timers.Remove(telemetryId); + _timers.Remove(telemetryId); } } @@ -75,7 +75,7 @@ private void ProduceTelemetry( ConsumerName = c.ConsumerName, Topic = topic, GroupId = c.GroupId, - InstanceName = $"{Environment.MachineName}-{ProcessId}", + InstanceName = $"{Environment.MachineName}-{s_processId}", PausedPartitions = c.PausedPartitions .Where(p => p.Topic == topic) .Select(p => p.Partition.Value), @@ -96,7 +96,7 @@ private void ProduceTelemetry( } catch (Exception e) { - this.logHandler.Warning("Error producing telemetry data", new { Exception = e }); + _logHandler.Warning("Error producing telemetry data", new { Exception = e }); } } } diff --git a/src/KafkaFlow.Compressor.Gzip/GzipMessageCompressor.cs b/src/KafkaFlow.Compressor.Gzip/GzipMessageCompressor.cs index 3f795c6af..3e430d094 100644 --- a/src/KafkaFlow.Compressor.Gzip/GzipMessageCompressor.cs +++ b/src/KafkaFlow.Compressor.Gzip/GzipMessageCompressor.cs @@ -1,8 +1,8 @@ -namespace KafkaFlow.Compressor.Gzip -{ - using System.IO; - using System.IO.Compression; +using System.IO; +using System.IO.Compression; +namespace KafkaFlow.Compressor.Gzip +{ /// /// A GZIP message compressor /// diff --git a/src/KafkaFlow.Compressor.Gzip/GzipMessageDecompressor.cs b/src/KafkaFlow.Compressor.Gzip/GzipMessageDecompressor.cs index 19baffaf0..0d04f998c 100644 --- a/src/KafkaFlow.Compressor.Gzip/GzipMessageDecompressor.cs +++ b/src/KafkaFlow.Compressor.Gzip/GzipMessageDecompressor.cs @@ -1,8 +1,8 @@ -namespace KafkaFlow.Compressor.Gzip -{ - using System.IO; - using System.IO.Compression; +using System.IO; +using System.IO.Compression; +namespace KafkaFlow.Compressor.Gzip +{ /// /// A GZIP message decompressor /// diff --git a/src/KafkaFlow.Extensions.Hosting/KafkaFlowHostedService.cs b/src/KafkaFlow.Extensions.Hosting/KafkaFlowHostedService.cs index 7a08e6b68..ac265ee8e 100644 --- a/src/KafkaFlow.Extensions.Hosting/KafkaFlowHostedService.cs +++ b/src/KafkaFlow.Extensions.Hosting/KafkaFlowHostedService.cs @@ -1,18 +1,18 @@ +using System; +using System.Threading; +using System.Threading.Tasks; +using global::Microsoft.Extensions.Hosting; + namespace KafkaFlow { - using System; - using System.Threading; - using System.Threading.Tasks; - using global::Microsoft.Extensions.Hosting; - internal class KafkaFlowHostedService : IHostedService { - private readonly IKafkaBus kafkaBus; + private readonly IKafkaBus _kafkaBus; - public KafkaFlowHostedService(IServiceProvider serviceProvider) => this.kafkaBus = serviceProvider.CreateKafkaBus(); + public KafkaFlowHostedService(IServiceProvider serviceProvider) => _kafkaBus = serviceProvider.CreateKafkaBus(); - public Task StartAsync(CancellationToken cancellationToken) => this.kafkaBus.StartAsync(cancellationToken); + public Task StartAsync(CancellationToken cancellationToken) => _kafkaBus.StartAsync(cancellationToken); - public Task StopAsync(CancellationToken cancellationToken) => this.kafkaBus.StopAsync(); + public Task StopAsync(CancellationToken cancellationToken) => _kafkaBus.StopAsync(); } } diff --git a/src/KafkaFlow.Extensions.Hosting/ServiceCollectionExtensions.cs b/src/KafkaFlow.Extensions.Hosting/ServiceCollectionExtensions.cs index 89c81754b..fab8199e1 100644 --- a/src/KafkaFlow.Extensions.Hosting/ServiceCollectionExtensions.cs +++ b/src/KafkaFlow.Extensions.Hosting/ServiceCollectionExtensions.cs @@ -1,9 +1,9 @@ -namespace KafkaFlow -{ - using System; - using global::Microsoft.Extensions.DependencyInjection; - using KafkaFlow.Configuration; +using System; +using global::Microsoft.Extensions.DependencyInjection; +using KafkaFlow.Configuration; +namespace KafkaFlow +{ /// /// No needed /// diff --git a/src/KafkaFlow.IntegrationTests/CompressionTest.cs b/src/KafkaFlow.IntegrationTests/CompressionTest.cs deleted file mode 100644 index 86b45d4a1..000000000 --- a/src/KafkaFlow.IntegrationTests/CompressionTest.cs +++ /dev/null @@ -1,44 +0,0 @@ -namespace KafkaFlow.IntegrationTests -{ - using System; - using System.Linq; - using System.Threading.Tasks; - using AutoFixture; - using global::Microsoft.Extensions.DependencyInjection; - using global::Microsoft.VisualStudio.TestTools.UnitTesting; - using KafkaFlow.IntegrationTests.Core; - using KafkaFlow.IntegrationTests.Core.Handlers; - using KafkaFlow.IntegrationTests.Core.Producers; - - [TestClass] - public class CompressionTest - { - private readonly Fixture fixture = new(); - - private IServiceProvider provider; - - [TestInitialize] - public void Setup() - { - this.provider = Bootstrapper.GetServiceProvider(); - MessageStorage.Clear(); - } - - [TestMethod] - public async Task GzipTest() - { - // Arrange - var producer = this.provider.GetRequiredService>(); - var messages = this.fixture.CreateMany(10).ToList(); - - // Act - await Task.WhenAll(messages.Select(m => producer.ProduceAsync(Guid.NewGuid().ToString(), m))); - - // Assert - foreach (var message in messages) - { - await MessageStorage.AssertMessageAsync(message); - } - } - } -} diff --git a/src/KafkaFlow.IntegrationTests/ProducerTest.cs b/src/KafkaFlow.IntegrationTests/ProducerTest.cs deleted file mode 100644 index 00f4e58db..000000000 --- a/src/KafkaFlow.IntegrationTests/ProducerTest.cs +++ /dev/null @@ -1,40 +0,0 @@ -namespace KafkaFlow.IntegrationTests -{ - using System; - using System.Threading.Tasks; - using AutoFixture; - using KafkaFlow.IntegrationTests.Core; - using KafkaFlow.IntegrationTests.Core.Handlers; - using KafkaFlow.IntegrationTests.Core.Producers; - using Microsoft.Extensions.DependencyInjection; - using Microsoft.VisualStudio.TestTools.UnitTesting; - - [TestClass] - public class ProducerTest - { - private readonly Fixture fixture = new(); - - private IServiceProvider provider; - - [TestInitialize] - public void Setup() - { - this.provider = Bootstrapper.GetServiceProvider(); - MessageStorage.Clear(); - } - - [TestMethod] - public async Task ProduceNullKeyTest() - { - // Arrange - var producer = this.provider.GetRequiredService>(); - var message = this.fixture.Create(); - - // Act - await producer.ProduceAsync(null, message); - - // Assert - await MessageStorage.AssertMessageAsync(message); - } - } -} diff --git a/src/KafkaFlow.LogHandler.Console/ConsoleLogHandler.cs b/src/KafkaFlow.LogHandler.Console/ConsoleLogHandler.cs index a22ec1660..7f85f2b5a 100644 --- a/src/KafkaFlow.LogHandler.Console/ConsoleLogHandler.cs +++ b/src/KafkaFlow.LogHandler.Console/ConsoleLogHandler.cs @@ -1,8 +1,8 @@ -namespace KafkaFlow -{ - using System; - using System.Text.Json; +using System; +using System.Text.Json; +namespace KafkaFlow +{ internal class ConsoleLogHandler : ILogHandler { public void Error(string message, Exception ex, object data) diff --git a/src/KafkaFlow.LogHandler.Console/ExtensionMethods.cs b/src/KafkaFlow.LogHandler.Console/ExtensionMethods.cs index 1b6b36b00..435144b9d 100644 --- a/src/KafkaFlow.LogHandler.Console/ExtensionMethods.cs +++ b/src/KafkaFlow.LogHandler.Console/ExtensionMethods.cs @@ -1,7 +1,7 @@ +using KafkaFlow.Configuration; + namespace KafkaFlow { - using KafkaFlow.Configuration; - /// /// No needed /// diff --git a/src/KafkaFlow.LogHandler.Microsoft/ExtensionMethods.cs b/src/KafkaFlow.LogHandler.Microsoft/ExtensionMethods.cs index 6a0814444..762fda7ad 100644 --- a/src/KafkaFlow.LogHandler.Microsoft/ExtensionMethods.cs +++ b/src/KafkaFlow.LogHandler.Microsoft/ExtensionMethods.cs @@ -1,7 +1,7 @@ +using KafkaFlow.Configuration; + namespace KafkaFlow { - using KafkaFlow.Configuration; - /// /// No needed /// diff --git a/src/KafkaFlow.LogHandler.Microsoft/MicrosoftLogHandler.cs b/src/KafkaFlow.LogHandler.Microsoft/MicrosoftLogHandler.cs index aa399d2ce..e2468608e 100644 --- a/src/KafkaFlow.LogHandler.Microsoft/MicrosoftLogHandler.cs +++ b/src/KafkaFlow.LogHandler.Microsoft/MicrosoftLogHandler.cs @@ -1,36 +1,36 @@ -namespace KafkaFlow -{ - using System; - using System.Text.Json; - using Microsoft.Extensions.Logging; +using System; +using System.Text.Json; +using Microsoft.Extensions.Logging; +namespace KafkaFlow +{ internal class MicrosoftLogHandler : ILogHandler { - private readonly ILogger logger; + private readonly ILogger _logger; public MicrosoftLogHandler(ILoggerFactory loggerFactory) { - this.logger = loggerFactory.CreateLogger("KafkaFlow"); + _logger = loggerFactory.CreateLogger("KafkaFlow"); } public void Error(string message, Exception ex, object data) { - this.logger.LogError(ex, "{Message} | Data: {Data}", message, JsonSerializer.Serialize(data)); + _logger.LogError(ex, "{Message} | Data: {Data}", message, JsonSerializer.Serialize(data)); } public void Warning(string message, object data) { - this.logger.LogWarning("{Message} | Data: {Data}", message, JsonSerializer.Serialize(data)); + _logger.LogWarning("{Message} | Data: {Data}", message, JsonSerializer.Serialize(data)); } public void Info(string message, object data) { - this.logger.LogInformation("{Message} | Data: {Data}", message, JsonSerializer.Serialize(data)); + _logger.LogInformation("{Message} | Data: {Data}", message, JsonSerializer.Serialize(data)); } public void Verbose(string message, object data) { - this.logger.LogDebug("{Message} | Data: {Data}", message, JsonSerializer.Serialize(data)); + _logger.LogDebug("{Message} | Data: {Data}", message, JsonSerializer.Serialize(data)); } } } diff --git a/src/KafkaFlow.Microsoft.DependencyInjection/MicrosoftDependencyConfigurator.cs b/src/KafkaFlow.Microsoft.DependencyInjection/MicrosoftDependencyConfigurator.cs index 7d0b993d3..d7cec29e3 100644 --- a/src/KafkaFlow.Microsoft.DependencyInjection/MicrosoftDependencyConfigurator.cs +++ b/src/KafkaFlow.Microsoft.DependencyInjection/MicrosoftDependencyConfigurator.cs @@ -1,16 +1,16 @@ +using System; +using Microsoft.Extensions.DependencyInjection; + namespace KafkaFlow { - using System; - using Microsoft.Extensions.DependencyInjection; - internal class MicrosoftDependencyConfigurator : IDependencyConfigurator { - private readonly IServiceCollection services; + private readonly IServiceCollection _services; public MicrosoftDependencyConfigurator(IServiceCollection services) { - this.services = services; - this.services.AddSingleton(provider => new MicrosoftDependencyResolver(provider)); + _services = services; + _services.AddSingleton(provider => new MicrosoftDependencyResolver(provider)); } public IDependencyConfigurator Add( @@ -18,7 +18,7 @@ public IDependencyConfigurator Add( Type implementationType, InstanceLifetime lifetime) { - this.services.Add( + _services.Add( ServiceDescriptor.Describe( serviceType, implementationType, @@ -31,7 +31,7 @@ public IDependencyConfigurator Add(InstanceLifetime l where TService : class where TImplementation : class, TService { - this.services.Add( + _services.Add( ServiceDescriptor.Describe( typeof(TService), typeof(TImplementation), @@ -43,7 +43,7 @@ public IDependencyConfigurator Add(InstanceLifetime l public IDependencyConfigurator Add(InstanceLifetime lifetime) where TService : class { - this.services.Add( + _services.Add( ServiceDescriptor.Describe( typeof(TService), typeof(TService), @@ -55,7 +55,7 @@ public IDependencyConfigurator Add(InstanceLifetime lifetime) public IDependencyConfigurator Add(TImplementation service) where TImplementation : class { - this.services.AddSingleton(service); + _services.AddSingleton(service); return this; } @@ -64,7 +64,7 @@ public IDependencyConfigurator Add( Func factory, InstanceLifetime lifetime) { - this.services.Add( + _services.Add( ServiceDescriptor.Describe( serviceType, provider => factory(new MicrosoftDependencyResolver(provider)), diff --git a/src/KafkaFlow.Microsoft.DependencyInjection/MicrosoftDependencyResolver.cs b/src/KafkaFlow.Microsoft.DependencyInjection/MicrosoftDependencyResolver.cs index e0f3033dd..39a186681 100644 --- a/src/KafkaFlow.Microsoft.DependencyInjection/MicrosoftDependencyResolver.cs +++ b/src/KafkaFlow.Microsoft.DependencyInjection/MicrosoftDependencyResolver.cs @@ -1,31 +1,31 @@ -namespace KafkaFlow -{ - using System; - using System.Collections.Generic; - using global::Microsoft.Extensions.DependencyInjection; +using System; +using System.Collections.Generic; +using global::Microsoft.Extensions.DependencyInjection; +namespace KafkaFlow +{ internal class MicrosoftDependencyResolver : IDependencyResolver { - private readonly IServiceProvider serviceProvider; + private readonly IServiceProvider _serviceProvider; public MicrosoftDependencyResolver(IServiceProvider serviceProvider) { - this.serviceProvider = serviceProvider; + _serviceProvider = serviceProvider; } public object Resolve(Type type) { - return this.serviceProvider.GetService(type); + return _serviceProvider.GetService(type); } public IEnumerable ResolveAll(Type type) { - return this.serviceProvider.GetServices(type); + return _serviceProvider.GetServices(type); } public IDependencyResolverScope CreateScope() { - return new MicrosoftDependencyResolverScope(this.serviceProvider.CreateScope()); + return new MicrosoftDependencyResolverScope(_serviceProvider.CreateScope()); } } } diff --git a/src/KafkaFlow.Microsoft.DependencyInjection/MicrosoftDependencyResolverScope.cs b/src/KafkaFlow.Microsoft.DependencyInjection/MicrosoftDependencyResolverScope.cs index 20adbd8f8..6cd745d47 100644 --- a/src/KafkaFlow.Microsoft.DependencyInjection/MicrosoftDependencyResolverScope.cs +++ b/src/KafkaFlow.Microsoft.DependencyInjection/MicrosoftDependencyResolverScope.cs @@ -1,14 +1,14 @@ +using global::Microsoft.Extensions.DependencyInjection; + namespace KafkaFlow { - using global::Microsoft.Extensions.DependencyInjection; - internal class MicrosoftDependencyResolverScope : IDependencyResolverScope { - private readonly IServiceScope scope; + private readonly IServiceScope _scope; public MicrosoftDependencyResolverScope(IServiceScope scope) { - this.scope = scope; + _scope = scope; this.Resolver = new MicrosoftDependencyResolver(scope.ServiceProvider); } @@ -16,7 +16,7 @@ public MicrosoftDependencyResolverScope(IServiceScope scope) public void Dispose() { - this.scope.Dispose(); + _scope.Dispose(); } } } diff --git a/src/KafkaFlow.Microsoft.DependencyInjection/ServiceCollectionExtensions.cs b/src/KafkaFlow.Microsoft.DependencyInjection/ServiceCollectionExtensions.cs index 905fce15a..296bc6c6d 100644 --- a/src/KafkaFlow.Microsoft.DependencyInjection/ServiceCollectionExtensions.cs +++ b/src/KafkaFlow.Microsoft.DependencyInjection/ServiceCollectionExtensions.cs @@ -1,9 +1,9 @@ +using System; +using global::Microsoft.Extensions.DependencyInjection; +using KafkaFlow.Configuration; + namespace KafkaFlow { - using System; - using global::Microsoft.Extensions.DependencyInjection; - using KafkaFlow.Configuration; - /// /// Extension methods over IServiceCollection /// diff --git a/src/KafkaFlow.Microsoft.DependencyInjection/ServiceProviderExtensions.cs b/src/KafkaFlow.Microsoft.DependencyInjection/ServiceProviderExtensions.cs index dc872e585..098084edc 100644 --- a/src/KafkaFlow.Microsoft.DependencyInjection/ServiceProviderExtensions.cs +++ b/src/KafkaFlow.Microsoft.DependencyInjection/ServiceProviderExtensions.cs @@ -1,9 +1,9 @@ +using System; +using global::Microsoft.Extensions.DependencyInjection; +using KafkaFlow.Configuration; + namespace KafkaFlow { - using System; - using global::Microsoft.Extensions.DependencyInjection; - using KafkaFlow.Configuration; - /// /// Extension methods over IServiceProvider /// diff --git a/src/KafkaFlow.OpenTelemetry/ActivitySourceAccessor.cs b/src/KafkaFlow.OpenTelemetry/ActivitySourceAccessor.cs index b41b3065f..e62d6bf70 100644 --- a/src/KafkaFlow.OpenTelemetry/ActivitySourceAccessor.cs +++ b/src/KafkaFlow.OpenTelemetry/ActivitySourceAccessor.cs @@ -1,14 +1,14 @@ extern alias SemanticConventions; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Reflection; +using Conventions = SemanticConventions::OpenTelemetry.Trace.TraceSemanticConventions; + namespace KafkaFlow.OpenTelemetry { - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Linq; - using System.Reflection; - using Conventions = SemanticConventions::OpenTelemetry.Trace.TraceSemanticConventions; - internal static class ActivitySourceAccessor { internal const string ActivityString = "otel_activity"; @@ -17,10 +17,10 @@ internal static class ActivitySourceAccessor internal const string AttributeMessagingOperation = "messaging.operation"; internal const string AttributeMessagingKafkaMessageKey = "messaging.kafka.message.key"; internal const string AttributeMessagingKafkaMessageOffset = "messaging.kafka.message.offset"; - internal static readonly AssemblyName AssemblyName = typeof(ActivitySourceAccessor).Assembly.GetName(); - internal static readonly string ActivitySourceName = AssemblyName.Name; - internal static readonly string Version = Assembly.GetExecutingAssembly().GetName().Version.ToString(); - internal static readonly ActivitySource ActivitySource = new(ActivitySourceName, Version); + internal static readonly AssemblyName s_assemblyName = typeof(ActivitySourceAccessor).Assembly.GetName(); + internal static readonly string s_activitySourceName = s_assemblyName.Name; + internal static readonly string s_version = Assembly.GetExecutingAssembly().GetName().Version.ToString(); + internal static readonly ActivitySource s_activitySource = new(s_activitySourceName, s_version); public static void SetGenericTags(Activity activity, IEnumerable bootstrapServers) { diff --git a/src/KafkaFlow.OpenTelemetry/ExtensionMethods.cs b/src/KafkaFlow.OpenTelemetry/ExtensionMethods.cs index 9b169c949..ef72ef941 100644 --- a/src/KafkaFlow.OpenTelemetry/ExtensionMethods.cs +++ b/src/KafkaFlow.OpenTelemetry/ExtensionMethods.cs @@ -1,7 +1,7 @@ -namespace KafkaFlow.Configuration -{ - using KafkaFlow.OpenTelemetry; +using KafkaFlow.OpenTelemetry; +namespace KafkaFlow.Configuration +{ /// /// Adds OpenTelemetry instrumentation /// diff --git a/src/KafkaFlow.OpenTelemetry/OpenTelemetryConsumerEventsHandler.cs b/src/KafkaFlow.OpenTelemetry/OpenTelemetryConsumerEventsHandler.cs index b3c3244c4..48592a896 100644 --- a/src/KafkaFlow.OpenTelemetry/OpenTelemetryConsumerEventsHandler.cs +++ b/src/KafkaFlow.OpenTelemetry/OpenTelemetryConsumerEventsHandler.cs @@ -1,20 +1,20 @@ -namespace KafkaFlow.OpenTelemetry +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Text; +using System.Threading.Tasks; +using global::OpenTelemetry; +using global::OpenTelemetry.Context.Propagation; + +namespace KafkaFlow.OpenTelemetry { - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Text; - using System.Threading.Tasks; - using global::OpenTelemetry; - using global::OpenTelemetry.Context.Propagation; - internal static class OpenTelemetryConsumerEventsHandler { private const string ProcessString = "process"; private const string AttributeMessagingSourceName = "messaging.source.name"; private const string AttributeMessagingKafkaConsumerGroup = "messaging.kafka.consumer.group"; private const string AttributeMessagingKafkaSourcePartition = "messaging.kafka.source.partition"; - private static readonly TextMapPropagator Propagator = Propagators.DefaultTextMapPropagator; + private static readonly TextMapPropagator s_propagator = Propagators.DefaultTextMapPropagator; public static Task OnConsumeStarted(IMessageContext context) { @@ -23,13 +23,13 @@ public static Task OnConsumeStarted(IMessageContext context) var activityName = !string.IsNullOrEmpty(context?.ConsumerContext.Topic) ? $"{context?.ConsumerContext.Topic} {ProcessString}" : ProcessString; // Extract the PropagationContext of the upstream parent from the message headers. - var parentContext = Propagator.Extract(new PropagationContext(default, Baggage.Current), context, ExtractTraceContextIntoBasicProperties); + var parentContext = s_propagator.Extract(new PropagationContext(default, Baggage.Current), context, ExtractTraceContextIntoBasicProperties); Baggage.Current = parentContext.Baggage; // Start an activity with a name following the semantic convention of the OpenTelemetry messaging specification. // The convention also defines a set of attributes (in .NET they are mapped as `tags`) to be populated in the activity. // https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/messaging.md - var activity = ActivitySourceAccessor.ActivitySource.StartActivity(activityName, ActivityKind.Consumer, parentContext.ActivityContext); + var activity = ActivitySourceAccessor.s_activitySource.StartActivity(activityName, ActivityKind.Consumer, parentContext.ActivityContext); foreach (var item in Baggage.Current) { diff --git a/src/KafkaFlow.OpenTelemetry/OpenTelemetryProducerEventsHandler.cs b/src/KafkaFlow.OpenTelemetry/OpenTelemetryProducerEventsHandler.cs index 41745aa23..83c8025fa 100644 --- a/src/KafkaFlow.OpenTelemetry/OpenTelemetryProducerEventsHandler.cs +++ b/src/KafkaFlow.OpenTelemetry/OpenTelemetryProducerEventsHandler.cs @@ -1,19 +1,19 @@ -namespace KafkaFlow.OpenTelemetry +using System; +using System.Diagnostics; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using global::OpenTelemetry; +using global::OpenTelemetry.Context.Propagation; + +namespace KafkaFlow.OpenTelemetry { - using System; - using System.Diagnostics; - using System.Linq; - using System.Text; - using System.Threading.Tasks; - using global::OpenTelemetry; - using global::OpenTelemetry.Context.Propagation; - internal static class OpenTelemetryProducerEventsHandler { private const string PublishString = "publish"; private const string AttributeMessagingDestinationName = "messaging.destination.name"; private const string AttributeMessagingKafkaDestinationPartition = "messaging.kafka.destination.partition"; - private static readonly TextMapPropagator Propagator = Propagators.DefaultTextMapPropagator; + private static readonly TextMapPropagator s_propagator = Propagators.DefaultTextMapPropagator; public static Task OnProducerStarted(IMessageContext context) { @@ -24,7 +24,7 @@ public static Task OnProducerStarted(IMessageContext context) // Start an activity with a name following the semantic convention of the OpenTelemetry messaging specification. // The convention also defines a set of attributes (in .NET they are mapped as `tags`) to be populated in the activity. // https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/messaging.md - var activity = ActivitySourceAccessor.ActivitySource.StartActivity(activityName, ActivityKind.Producer); + var activity = ActivitySourceAccessor.s_activitySource.StartActivity(activityName, ActivityKind.Producer); // Depending on Sampling (and whether a listener is registered or not), the // activity above may not be created. @@ -46,7 +46,7 @@ public static Task OnProducerStarted(IMessageContext context) Baggage.Current = Baggage.Create(activity?.Baggage.ToDictionary(item => item.Key, item => item.Value)); // Inject the ActivityContext into the message headers to propagate trace context to the receiving service. - Propagator.Inject(new PropagationContext(contextToInject, Baggage.Current), context, InjectTraceContextIntoBasicProperties); + s_propagator.Inject(new PropagationContext(contextToInject, Baggage.Current), context, InjectTraceContextIntoBasicProperties); ActivitySourceAccessor.SetGenericTags(activity, context?.Brokers); diff --git a/src/KafkaFlow.SchemaRegistry/ClusterConfigurationBuilderExtensions.cs b/src/KafkaFlow.SchemaRegistry/ClusterConfigurationBuilderExtensions.cs index 83c4123a8..95abcaec0 100644 --- a/src/KafkaFlow.SchemaRegistry/ClusterConfigurationBuilderExtensions.cs +++ b/src/KafkaFlow.SchemaRegistry/ClusterConfigurationBuilderExtensions.cs @@ -1,9 +1,9 @@ +using System; +using Confluent.SchemaRegistry; +using KafkaFlow.Configuration; + namespace KafkaFlow { - using System; - using Confluent.SchemaRegistry; - using KafkaFlow.Configuration; - /// /// No needed /// diff --git a/src/KafkaFlow.SchemaRegistry/ConfluentDeserializerWrapper.cs b/src/KafkaFlow.SchemaRegistry/ConfluentDeserializerWrapper.cs index ace5aa9c5..c908eb5ea 100644 --- a/src/KafkaFlow.SchemaRegistry/ConfluentDeserializerWrapper.cs +++ b/src/KafkaFlow.SchemaRegistry/ConfluentDeserializerWrapper.cs @@ -1,20 +1,20 @@ +using System; +using System.Collections.Concurrent; +using System.IO; +using System.Threading.Tasks; +using Confluent.Kafka; +using Microsoft.IO; + namespace KafkaFlow { - using System; - using System.Collections.Concurrent; - using System.IO; - using System.Threading.Tasks; - using Confluent.Kafka; - using Microsoft.IO; - /// /// A wrapper to call the typed Confluent deserializers /// public abstract class ConfluentDeserializerWrapper { - private static readonly RecyclableMemoryStreamManager MemoryStreamManager = new(); + private static readonly RecyclableMemoryStreamManager s_memoryStreamManager = new(); - private static readonly ConcurrentDictionary Deserializers = new(); + private static readonly ConcurrentDictionary s_deserializers = new(); /// /// Get the deserializer based on the target message type @@ -26,9 +26,9 @@ public static ConfluentDeserializerWrapper GetOrCreateDeserializer( Type messageType, Func deserializerFactory) { - return Deserializers.SafeGetOrAdd( + return s_deserializers.SafeGetOrAdd( messageType, - _ => (ConfluentDeserializerWrapper) Activator.CreateInstance( + _ => (ConfluentDeserializerWrapper)Activator.CreateInstance( typeof(InnerConfluentDeserializerWrapper<>).MakeGenericType(messageType), deserializerFactory)); } @@ -43,22 +43,22 @@ public static ConfluentDeserializerWrapper GetOrCreateDeserializer( private class InnerConfluentDeserializerWrapper : ConfluentDeserializerWrapper { - private readonly IAsyncDeserializer deserializer; + private readonly IAsyncDeserializer _deserializer; public InnerConfluentDeserializerWrapper(Func deserializerFactory) { - this.deserializer = (IAsyncDeserializer) deserializerFactory(); + _deserializer = (IAsyncDeserializer)deserializerFactory(); } public override async Task DeserializeAsync(Stream input, ISerializerContext context) { - using var buffer = MemoryStreamManager.GetStream(); + using var buffer = s_memoryStreamManager.GetStream(); await input.CopyToAsync(buffer).ConfigureAwait(false); - return await this.deserializer + return await _deserializer .DeserializeAsync( - new ReadOnlyMemory(buffer.GetBuffer(), 0, (int) buffer.Length), + new ReadOnlyMemory(buffer.GetBuffer(), 0, (int)buffer.Length), false, new SerializationContext(MessageComponentType.Value, context.Topic)) .ConfigureAwait(false); diff --git a/src/KafkaFlow.SchemaRegistry/ConfluentSerializerWrapper.cs b/src/KafkaFlow.SchemaRegistry/ConfluentSerializerWrapper.cs index d705cdfda..6d506f0a8 100644 --- a/src/KafkaFlow.SchemaRegistry/ConfluentSerializerWrapper.cs +++ b/src/KafkaFlow.SchemaRegistry/ConfluentSerializerWrapper.cs @@ -1,17 +1,17 @@ +using System; +using System.Collections.Concurrent; +using System.IO; +using System.Threading.Tasks; +using Confluent.Kafka; + namespace KafkaFlow { - using System; - using System.Collections.Concurrent; - using System.IO; - using System.Threading.Tasks; - using Confluent.Kafka; - /// /// A wrapper to call the typed Confluent serializers and deserializers /// public abstract class ConfluentSerializerWrapper { - private static readonly ConcurrentDictionary Serializers = new(); + private static readonly ConcurrentDictionary s_serializers = new(); /// /// Get the serializer based on the target message type @@ -23,9 +23,9 @@ public static ConfluentSerializerWrapper GetOrCreateSerializer( Type messageType, Func serializerFactory) { - return Serializers.SafeGetOrAdd( + return s_serializers.SafeGetOrAdd( messageType, - _ => (ConfluentSerializerWrapper) Activator.CreateInstance( + _ => (ConfluentSerializerWrapper)Activator.CreateInstance( typeof(InnerConfluentSerializerWrapper<>).MakeGenericType(messageType), serializerFactory)); } @@ -41,17 +41,17 @@ public static ConfluentSerializerWrapper GetOrCreateSerializer( private class InnerConfluentSerializerWrapper : ConfluentSerializerWrapper { - private readonly IAsyncSerializer serializer; + private readonly IAsyncSerializer _serializer; public InnerConfluentSerializerWrapper(Func serializerFactory) { - this.serializer = (IAsyncSerializer) serializerFactory(); + _serializer = (IAsyncSerializer)serializerFactory(); } public override async Task SerializeAsync(object message, Stream output, ISerializerContext context) { - var data = await this.serializer - .SerializeAsync((T) message, new SerializationContext(MessageComponentType.Value, context.Topic)) + var data = await _serializer + .SerializeAsync((T)message, new SerializationContext(MessageComponentType.Value, context.Topic)) .ConfigureAwait(false); await output diff --git a/src/KafkaFlow.SchemaRegistry/ISchemaRegistryTypeNameResolver.cs b/src/KafkaFlow.SchemaRegistry/ISchemaRegistryTypeNameResolver.cs index 17cc95c82..eda8ed04f 100644 --- a/src/KafkaFlow.SchemaRegistry/ISchemaRegistryTypeNameResolver.cs +++ b/src/KafkaFlow.SchemaRegistry/ISchemaRegistryTypeNameResolver.cs @@ -1,7 +1,7 @@ +using System.Threading.Tasks; + namespace KafkaFlow { - using System.Threading.Tasks; - /// /// An interface to implement a type name resolver to messages serialized with schema registry serializers /// diff --git a/src/KafkaFlow.SchemaRegistry/SchemaRegistryTypeResolver.cs b/src/KafkaFlow.SchemaRegistry/SchemaRegistryTypeResolver.cs index 3861d1ef6..803f4199b 100644 --- a/src/KafkaFlow.SchemaRegistry/SchemaRegistryTypeResolver.cs +++ b/src/KafkaFlow.SchemaRegistry/SchemaRegistryTypeResolver.cs @@ -1,23 +1,23 @@ +using System; +using System.Buffers.Binary; +using System.Collections.Concurrent; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using KafkaFlow.Middlewares.Serializer.Resolvers; + namespace KafkaFlow { - using System; - using System.Buffers.Binary; - using System.Collections.Concurrent; - using System.Linq; - using System.Threading; - using System.Threading.Tasks; - using KafkaFlow.Middlewares.Serializer.Resolvers; - /// /// The message type resolver to be used with schema registry serializers /// public class SchemaRegistryTypeResolver : IMessageTypeResolver { - private static readonly ConcurrentDictionary Types = new(); + private static readonly ConcurrentDictionary s_types = new(); - private static readonly SemaphoreSlim Semaphore = new(1, 1); + private static readonly SemaphoreSlim s_semaphore = new(1, 1); - private readonly ISchemaRegistryTypeNameResolver typeNameResolver; + private readonly ISchemaRegistryTypeNameResolver _typeNameResolver; /// /// Initializes a new instance of the class. @@ -25,39 +25,39 @@ public class SchemaRegistryTypeResolver : IMessageTypeResolver /// A instance of the interface. public SchemaRegistryTypeResolver(ISchemaRegistryTypeNameResolver typeNameResolver) { - this.typeNameResolver = typeNameResolver; + _typeNameResolver = typeNameResolver; } /// public async ValueTask OnConsumeAsync(IMessageContext context) { var schemaId = BinaryPrimitives.ReadInt32BigEndian( - ((byte[]) context.Message.Value).AsSpan().Slice(1, 4)); + ((byte[])context.Message.Value).AsSpan().Slice(1, 4)); - if (Types.TryGetValue(schemaId, out var type)) + if (s_types.TryGetValue(schemaId, out var type)) { return type; } - await Semaphore.WaitAsync(); + await s_semaphore.WaitAsync(); try { - if (Types.TryGetValue(schemaId, out type)) + if (s_types.TryGetValue(schemaId, out type)) { return type; } - var typeName = await this.typeNameResolver.ResolveAsync(schemaId); + var typeName = await _typeNameResolver.ResolveAsync(schemaId); - return Types[schemaId] = AppDomain.CurrentDomain + return s_types[schemaId] = AppDomain.CurrentDomain .GetAssemblies() .Select(a => a.GetType(typeName)) .FirstOrDefault(x => x != null); } finally { - Semaphore.Release(); + s_semaphore.Release(); } } diff --git a/src/KafkaFlow.Serializer.JsonCore/JsonCoreDeserializer.cs b/src/KafkaFlow.Serializer.JsonCore/JsonCoreDeserializer.cs index 521b3be16..6a9a18d30 100644 --- a/src/KafkaFlow.Serializer.JsonCore/JsonCoreDeserializer.cs +++ b/src/KafkaFlow.Serializer.JsonCore/JsonCoreDeserializer.cs @@ -1,16 +1,16 @@ -namespace KafkaFlow.Serializer -{ - using System; - using System.IO; - using System.Text.Json; - using System.Threading.Tasks; +using System; +using System.IO; +using System.Text.Json; +using System.Threading.Tasks; +namespace KafkaFlow.Serializer +{ /// /// A message deserializer using System.Text.Json library /// public class JsonCoreDeserializer : IDeserializer { - private readonly JsonSerializerOptions serializerOptions; + private readonly JsonSerializerOptions _serializerOptions; /// /// Initializes a new instance of the class. @@ -18,7 +18,7 @@ public class JsonCoreDeserializer : IDeserializer /// Json serializer options public JsonCoreDeserializer(JsonSerializerOptions options) { - this.serializerOptions = options; + _serializerOptions = options; } /// @@ -33,7 +33,7 @@ public JsonCoreDeserializer() public async Task DeserializeAsync(Stream input, Type type, ISerializerContext context) { return await JsonSerializer - .DeserializeAsync(input, type, this.serializerOptions) + .DeserializeAsync(input, type, _serializerOptions) .ConfigureAwait(false); } } diff --git a/src/KafkaFlow.Serializer.JsonCore/JsonCoreSerializer.cs b/src/KafkaFlow.Serializer.JsonCore/JsonCoreSerializer.cs index e0069dd7e..22eb2b4e8 100644 --- a/src/KafkaFlow.Serializer.JsonCore/JsonCoreSerializer.cs +++ b/src/KafkaFlow.Serializer.JsonCore/JsonCoreSerializer.cs @@ -1,16 +1,16 @@ -namespace KafkaFlow.Serializer -{ - using System.IO; - using System.Text.Json; - using System.Threading.Tasks; +using System.IO; +using System.Text.Json; +using System.Threading.Tasks; +namespace KafkaFlow.Serializer +{ /// /// A message serializer using System.Text.Json library /// public class JsonCoreSerializer : ISerializer { - private readonly JsonSerializerOptions serializerOptions; - private readonly JsonWriterOptions writerOptions; + private readonly JsonSerializerOptions _serializerOptions; + private readonly JsonWriterOptions _writerOptions; /// /// Initializes a new instance of the class. @@ -18,7 +18,7 @@ public class JsonCoreSerializer : ISerializer /// Json serializer options public JsonCoreSerializer(JsonSerializerOptions options) { - this.serializerOptions = options; + _serializerOptions = options; } /// @@ -27,7 +27,7 @@ public JsonCoreSerializer(JsonSerializerOptions options) /// Json writer options public JsonCoreSerializer(JsonWriterOptions writerOptions) { - this.writerOptions = writerOptions; + _writerOptions = writerOptions; } /// @@ -37,8 +37,8 @@ public JsonCoreSerializer(JsonWriterOptions writerOptions) /// Json writer options public JsonCoreSerializer(JsonSerializerOptions serializerOptions, JsonWriterOptions writerOptions) { - this.serializerOptions = serializerOptions; - this.writerOptions = writerOptions; + _serializerOptions = serializerOptions; + _writerOptions = writerOptions; } /// @@ -52,9 +52,9 @@ public JsonCoreSerializer() /// public Task SerializeAsync(object message, Stream output, ISerializerContext context) { - using var writer = new Utf8JsonWriter(output, this.writerOptions); + using var writer = new Utf8JsonWriter(output, _writerOptions); - JsonSerializer.Serialize(writer, message, this.serializerOptions); + JsonSerializer.Serialize(writer, message, _serializerOptions); return Task.CompletedTask; } diff --git a/src/KafkaFlow.Serializer.NewtonsoftJson/NewtonsoftJsonDeserializer.cs b/src/KafkaFlow.Serializer.NewtonsoftJson/NewtonsoftJsonDeserializer.cs index f0b05dcff..170a87259 100644 --- a/src/KafkaFlow.Serializer.NewtonsoftJson/NewtonsoftJsonDeserializer.cs +++ b/src/KafkaFlow.Serializer.NewtonsoftJson/NewtonsoftJsonDeserializer.cs @@ -1,11 +1,11 @@ -namespace KafkaFlow.Serializer -{ - using System; - using System.IO; - using System.Text; - using System.Threading.Tasks; - using Newtonsoft.Json; +using System; +using System.IO; +using System.Text; +using System.Threading.Tasks; +using Newtonsoft.Json; +namespace KafkaFlow.Serializer +{ /// /// A message deserializer using NewtonsoftJson library /// @@ -13,8 +13,9 @@ public class NewtonsoftJsonDeserializer : IDeserializer { private const int DefaultBufferSize = 1024; - private static readonly UTF8Encoding UTF8NoBom = new (false); - private readonly JsonSerializerSettings settings; + private static readonly UTF8Encoding s_uTF8NoBom = new(false); + + private readonly JsonSerializerSettings _settings; /// /// Initializes a new instance of the class. @@ -22,7 +23,7 @@ public class NewtonsoftJsonDeserializer : IDeserializer /// Json serializer settings public NewtonsoftJsonDeserializer(JsonSerializerSettings settings) { - this.settings = settings; + _settings = settings; } /// @@ -38,12 +39,12 @@ public Task DeserializeAsync(Stream input, Type type, ISerializerContext { using var sr = new StreamReader( input, - UTF8NoBom, + s_uTF8NoBom, true, DefaultBufferSize, true); - var serializer = JsonSerializer.CreateDefault(this.settings); + var serializer = JsonSerializer.CreateDefault(_settings); return Task.FromResult(serializer.Deserialize(sr, type)); } diff --git a/src/KafkaFlow.Serializer.NewtonsoftJson/NewtonsoftJsonSerializer.cs b/src/KafkaFlow.Serializer.NewtonsoftJson/NewtonsoftJsonSerializer.cs index 8ca731d2c..2a312ca4f 100644 --- a/src/KafkaFlow.Serializer.NewtonsoftJson/NewtonsoftJsonSerializer.cs +++ b/src/KafkaFlow.Serializer.NewtonsoftJson/NewtonsoftJsonSerializer.cs @@ -1,10 +1,10 @@ -namespace KafkaFlow.Serializer -{ - using System.IO; - using System.Text; - using System.Threading.Tasks; - using Newtonsoft.Json; +using System.IO; +using System.Text; +using System.Threading.Tasks; +using Newtonsoft.Json; +namespace KafkaFlow.Serializer +{ /// /// A message serializer using NewtonsoftJson library /// @@ -12,8 +12,9 @@ public class NewtonsoftJsonSerializer : ISerializer { private const int DefaultBufferSize = 1024; - private static readonly UTF8Encoding UTF8NoBom = new (false); - private readonly JsonSerializerSettings settings; + private static readonly UTF8Encoding s_uTF8NoBom = new(false); + + private readonly JsonSerializerSettings _settings; /// /// Initializes a new instance of the class. @@ -21,7 +22,7 @@ public class NewtonsoftJsonSerializer : ISerializer /// Json serializer settings public NewtonsoftJsonSerializer(JsonSerializerSettings settings) { - this.settings = settings; + _settings = settings; } /// @@ -35,8 +36,8 @@ public NewtonsoftJsonSerializer() /// public Task SerializeAsync(object message, Stream output, ISerializerContext context) { - using var sw = new StreamWriter(output, UTF8NoBom, DefaultBufferSize, true); - var serializer = JsonSerializer.CreateDefault(this.settings); + using var sw = new StreamWriter(output, s_uTF8NoBom, DefaultBufferSize, true); + var serializer = JsonSerializer.CreateDefault(_settings); serializer.Serialize(sw, message); diff --git a/src/KafkaFlow.Serializer.ProtobufNet/ProtobufNetDeserializer.cs b/src/KafkaFlow.Serializer.ProtobufNet/ProtobufNetDeserializer.cs index c4838c35a..e2f6822a1 100644 --- a/src/KafkaFlow.Serializer.ProtobufNet/ProtobufNetDeserializer.cs +++ b/src/KafkaFlow.Serializer.ProtobufNet/ProtobufNetDeserializer.cs @@ -1,10 +1,9 @@ -namespace KafkaFlow.Serializer -{ - using System; - using System.IO; - using System.Threading.Tasks; - using ProtoBuf; +using System; +using System.IO; +using System.Threading.Tasks; +namespace KafkaFlow.Serializer +{ /// /// A message deserializer using protobuf-net library /// @@ -13,7 +12,7 @@ public class ProtobufNetDeserializer : IDeserializer /// public Task DeserializeAsync(Stream input, Type type, ISerializerContext context) { - return Task.FromResult(Serializer.Deserialize(type, input)); + return Task.FromResult(ProtoBuf.Serializer.Deserialize(type, input)); } } } diff --git a/src/KafkaFlow.Serializer.ProtobufNet/ProtobufNetSerializer.cs b/src/KafkaFlow.Serializer.ProtobufNet/ProtobufNetSerializer.cs index 12d8a706d..9aa879163 100644 --- a/src/KafkaFlow.Serializer.ProtobufNet/ProtobufNetSerializer.cs +++ b/src/KafkaFlow.Serializer.ProtobufNet/ProtobufNetSerializer.cs @@ -1,9 +1,8 @@ -namespace KafkaFlow.Serializer -{ - using System.IO; - using System.Threading.Tasks; - using ProtoBuf; +using System.IO; +using System.Threading.Tasks; +namespace KafkaFlow.Serializer +{ /// /// A message serializer using protobuf-net library /// @@ -12,7 +11,7 @@ public class ProtobufNetSerializer : ISerializer /// public Task SerializeAsync(object message, Stream output, ISerializerContext context) { - Serializer.Serialize(output, message); + ProtoBuf.Serializer.Serialize(output, message); return Task.CompletedTask; } diff --git a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ConfluentAvroDeserializer.cs b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ConfluentAvroDeserializer.cs index 3f29adc7d..b59bdd5b8 100644 --- a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ConfluentAvroDeserializer.cs +++ b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ConfluentAvroDeserializer.cs @@ -1,17 +1,17 @@ -namespace KafkaFlow.Serializer.SchemaRegistry -{ - using System; - using System.IO; - using System.Threading.Tasks; - using Confluent.SchemaRegistry; - using Confluent.SchemaRegistry.Serdes; +using System; +using System.IO; +using System.Threading.Tasks; +using Confluent.SchemaRegistry; +using Confluent.SchemaRegistry.Serdes; +namespace KafkaFlow.Serializer.SchemaRegistry +{ /// /// A message serializer using Apache.Avro library /// public class ConfluentAvroDeserializer : IDeserializer { - private readonly ISchemaRegistryClient schemaRegistryClient; + private readonly ISchemaRegistryClient _schemaRegistryClient; /// /// Initializes a new instance of the class. @@ -19,7 +19,7 @@ public class ConfluentAvroDeserializer : IDeserializer /// The to be used by the framework public ConfluentAvroDeserializer(IDependencyResolver resolver) { - this.schemaRegistryClient = + _schemaRegistryClient = resolver.Resolve() ?? throw new InvalidOperationException( $"No schema registry configuration was found. Set it using {nameof(ClusterConfigurationBuilderExtensions.WithSchemaRegistry)} on cluster configuration"); @@ -34,7 +34,7 @@ public Task DeserializeAsync(Stream input, Type type, ISerializerContext () => Activator .CreateInstance( typeof(AvroDeserializer<>).MakeGenericType(type), - this.schemaRegistryClient, + _schemaRegistryClient, null)) .DeserializeAsync(input, context); } diff --git a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ConfluentAvroSerializer.cs b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ConfluentAvroSerializer.cs index e0e2eb49e..6118f5932 100644 --- a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ConfluentAvroSerializer.cs +++ b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ConfluentAvroSerializer.cs @@ -1,18 +1,18 @@ -namespace KafkaFlow.Serializer.SchemaRegistry -{ - using System; - using System.IO; - using System.Threading.Tasks; - using Confluent.SchemaRegistry; - using Confluent.SchemaRegistry.Serdes; +using System; +using System.IO; +using System.Threading.Tasks; +using Confluent.SchemaRegistry; +using Confluent.SchemaRegistry.Serdes; +namespace KafkaFlow.Serializer.SchemaRegistry +{ /// /// A message serializer using Apache.Avro library /// public class ConfluentAvroSerializer : ISerializer { - private readonly ISchemaRegistryClient schemaRegistryClient; - private readonly AvroSerializerConfig serializerConfig; + private readonly ISchemaRegistryClient _schemaRegistryClient; + private readonly AvroSerializerConfig _serializerConfig; /// /// Initializes a new instance of the class. @@ -23,12 +23,12 @@ public ConfluentAvroSerializer( IDependencyResolver resolver, AvroSerializerConfig serializerConfig = null) { - this.schemaRegistryClient = + _schemaRegistryClient = resolver.Resolve() ?? throw new InvalidOperationException( $"No schema registry configuration was found. Set it using {nameof(ClusterConfigurationBuilderExtensions.WithSchemaRegistry)} on cluster configuration"); - this.serializerConfig = serializerConfig; + _serializerConfig = serializerConfig; } /// @@ -39,8 +39,8 @@ public Task SerializeAsync(object message, Stream output, ISerializerContext con message.GetType(), () => Activator.CreateInstance( typeof(AvroSerializer<>).MakeGenericType(message.GetType()), - this.schemaRegistryClient, - this.serializerConfig)) + _schemaRegistryClient, + _serializerConfig)) .SerializeAsync(message, output, context); } } diff --git a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ConfluentAvroTypeNameResolver.cs b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ConfluentAvroTypeNameResolver.cs index c0fc1c9a2..79be740ff 100644 --- a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ConfluentAvroTypeNameResolver.cs +++ b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ConfluentAvroTypeNameResolver.cs @@ -1,21 +1,21 @@ +using System.Threading.Tasks; +using Confluent.SchemaRegistry; +using Newtonsoft.Json; + namespace KafkaFlow.Serializer.SchemaRegistry { - using System.Threading.Tasks; - using Confluent.SchemaRegistry; - using Newtonsoft.Json; - internal class ConfluentAvroTypeNameResolver : ISchemaRegistryTypeNameResolver { - private readonly ISchemaRegistryClient client; + private readonly ISchemaRegistryClient _client; public ConfluentAvroTypeNameResolver(ISchemaRegistryClient client) { - this.client = client; + _client = client; } public async Task ResolveAsync(int id) { - var schema = await this.client.GetSchemaAsync(id); + var schema = await _client.GetSchemaAsync(id); var avroFields = JsonConvert.DeserializeObject(schema.SchemaString); return $"{avroFields.Namespace}.{avroFields.Name}"; diff --git a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ConsumerConfigurationBuilderExtensions.cs b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ConsumerConfigurationBuilderExtensions.cs index 859e852f5..e0e44fda5 100644 --- a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ConsumerConfigurationBuilderExtensions.cs +++ b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ConsumerConfigurationBuilderExtensions.cs @@ -1,10 +1,10 @@ -namespace KafkaFlow -{ - using Confluent.SchemaRegistry; - using KafkaFlow.Configuration; - using KafkaFlow.Middlewares.Serializer; - using KafkaFlow.Serializer.SchemaRegistry; +using Confluent.SchemaRegistry; +using KafkaFlow.Configuration; +using KafkaFlow.Middlewares.Serializer; +using KafkaFlow.Serializer.SchemaRegistry; +namespace KafkaFlow +{ /// /// No needed /// diff --git a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ProducerConfigurationBuilderExtensions.cs b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ProducerConfigurationBuilderExtensions.cs index 54bf6bac7..5c4e98536 100644 --- a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ProducerConfigurationBuilderExtensions.cs +++ b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro/ProducerConfigurationBuilderExtensions.cs @@ -1,11 +1,11 @@ -namespace KafkaFlow -{ - using Confluent.SchemaRegistry; - using Confluent.SchemaRegistry.Serdes; - using KafkaFlow.Configuration; - using KafkaFlow.Middlewares.Serializer; - using KafkaFlow.Serializer.SchemaRegistry; +using Confluent.SchemaRegistry; +using Confluent.SchemaRegistry.Serdes; +using KafkaFlow.Configuration; +using KafkaFlow.Middlewares.Serializer; +using KafkaFlow.Serializer.SchemaRegistry; +namespace KafkaFlow +{ /// /// No needed /// diff --git a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentJson/ConfluentJsonDeserializer.cs b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentJson/ConfluentJsonDeserializer.cs index 0ec931348..f86e20238 100644 --- a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentJson/ConfluentJsonDeserializer.cs +++ b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentJson/ConfluentJsonDeserializer.cs @@ -1,17 +1,17 @@ -namespace KafkaFlow.Serializer.SchemaRegistry -{ - using System; - using System.IO; - using System.Threading.Tasks; - using Confluent.SchemaRegistry.Serdes; - using NJsonSchema.Generation; +using System; +using System.IO; +using System.Threading.Tasks; +using Confluent.SchemaRegistry.Serdes; +using NJsonSchema.Generation; +namespace KafkaFlow.Serializer.SchemaRegistry +{ /// /// A json message serializer integrated with the confluent schema registry /// public class ConfluentJsonDeserializer : IDeserializer { - private readonly JsonSchemaGeneratorSettings schemaGeneratorSettings; + private readonly JsonSchemaGeneratorSettings _schemaGeneratorSettings; /// /// Initializes a new instance of the class. @@ -19,7 +19,7 @@ public class ConfluentJsonDeserializer : IDeserializer /// An instance of public ConfluentJsonDeserializer(JsonSchemaGeneratorSettings schemaGeneratorSettings = null) { - this.schemaGeneratorSettings = schemaGeneratorSettings; + _schemaGeneratorSettings = schemaGeneratorSettings; } /// @@ -32,7 +32,7 @@ public Task DeserializeAsync(Stream input, Type type, ISerializerContext .CreateInstance( typeof(JsonDeserializer<>).MakeGenericType(type), null, - this.schemaGeneratorSettings)) + _schemaGeneratorSettings)) .DeserializeAsync(input, context); } } diff --git a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentJson/ConfluentJsonSerializer.cs b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentJson/ConfluentJsonSerializer.cs index fa2763903..2da3b3d01 100644 --- a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentJson/ConfluentJsonSerializer.cs +++ b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentJson/ConfluentJsonSerializer.cs @@ -1,20 +1,20 @@ -namespace KafkaFlow.Serializer.SchemaRegistry -{ - using System; - using System.IO; - using System.Threading.Tasks; - using Confluent.SchemaRegistry; - using Confluent.SchemaRegistry.Serdes; - using NJsonSchema.Generation; +using System; +using System.IO; +using System.Threading.Tasks; +using Confluent.SchemaRegistry; +using Confluent.SchemaRegistry.Serdes; +using NJsonSchema.Generation; +namespace KafkaFlow.Serializer.SchemaRegistry +{ /// /// A json message serializer integrated with the confluent schema registry /// public class ConfluentJsonSerializer : ISerializer { - private readonly ISchemaRegistryClient schemaRegistryClient; - private readonly JsonSerializerConfig serializerConfig; - private readonly JsonSchemaGeneratorSettings schemaGeneratorSettings; + private readonly ISchemaRegistryClient _schemaRegistryClient; + private readonly JsonSerializerConfig _serializerConfig; + private readonly JsonSchemaGeneratorSettings _schemaGeneratorSettings; /// /// Initializes a new instance of the class. @@ -40,13 +40,13 @@ public ConfluentJsonSerializer( JsonSerializerConfig serializerConfig, JsonSchemaGeneratorSettings schemaGeneratorSettings = null) { - this.schemaRegistryClient = + _schemaRegistryClient = resolver.Resolve() ?? throw new InvalidOperationException( $"No schema registry configuration was found. Set it using {nameof(ClusterConfigurationBuilderExtensions.WithSchemaRegistry)} on cluster configuration"); - this.serializerConfig = serializerConfig; - this.schemaGeneratorSettings = schemaGeneratorSettings; + _serializerConfig = serializerConfig; + _schemaGeneratorSettings = schemaGeneratorSettings; } /// @@ -57,9 +57,9 @@ public Task SerializeAsync(object message, Stream output, ISerializerContext con message.GetType(), () => Activator.CreateInstance( typeof(JsonSerializer<>).MakeGenericType(message.GetType()), - this.schemaRegistryClient, - this.serializerConfig, - this.schemaGeneratorSettings)) + _schemaRegistryClient, + _serializerConfig, + _schemaGeneratorSettings)) .SerializeAsync(message, output, context); } } diff --git a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentJson/ConsumerConfigurationBuilderExtensions.cs b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentJson/ConsumerConfigurationBuilderExtensions.cs index 4700b797b..c73033f03 100644 --- a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentJson/ConsumerConfigurationBuilderExtensions.cs +++ b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentJson/ConsumerConfigurationBuilderExtensions.cs @@ -1,9 +1,9 @@ -namespace KafkaFlow -{ - using KafkaFlow.Configuration; - using KafkaFlow.Middlewares.Serializer.Resolvers; - using KafkaFlow.Serializer.SchemaRegistry; +using KafkaFlow.Configuration; +using KafkaFlow.Middlewares.Serializer.Resolvers; +using KafkaFlow.Serializer.SchemaRegistry; +namespace KafkaFlow +{ /// /// No needed /// diff --git a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentJson/ProducerConfigurationBuilderExtensions.cs b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentJson/ProducerConfigurationBuilderExtensions.cs index 208595450..e8ec141cb 100644 --- a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentJson/ProducerConfigurationBuilderExtensions.cs +++ b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentJson/ProducerConfigurationBuilderExtensions.cs @@ -1,10 +1,10 @@ -namespace KafkaFlow -{ - using Confluent.SchemaRegistry.Serdes; - using KafkaFlow.Configuration; - using KafkaFlow.Middlewares.Serializer.Resolvers; - using KafkaFlow.Serializer.SchemaRegistry; +using Confluent.SchemaRegistry.Serdes; +using KafkaFlow.Configuration; +using KafkaFlow.Middlewares.Serializer.Resolvers; +using KafkaFlow.Serializer.SchemaRegistry; +namespace KafkaFlow +{ /// /// No needed /// diff --git a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ConfluentProtobufDeserializer.cs b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ConfluentProtobufDeserializer.cs index 3722dc35d..9e18232a1 100644 --- a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ConfluentProtobufDeserializer.cs +++ b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ConfluentProtobufDeserializer.cs @@ -1,11 +1,11 @@ -namespace KafkaFlow.Serializer.SchemaRegistry -{ - using System; - using System.Collections.Generic; - using System.IO; - using System.Threading.Tasks; - using Confluent.SchemaRegistry.Serdes; +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading.Tasks; +using Confluent.SchemaRegistry.Serdes; +namespace KafkaFlow.Serializer.SchemaRegistry +{ /// /// A protobuf message serializer integrated with the confluent schema registry /// diff --git a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ConfluentProtobufSerializer.cs b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ConfluentProtobufSerializer.cs index d29699d71..3f5dc5c4b 100644 --- a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ConfluentProtobufSerializer.cs +++ b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ConfluentProtobufSerializer.cs @@ -1,18 +1,18 @@ -namespace KafkaFlow.Serializer.SchemaRegistry -{ - using System; - using System.IO; - using System.Threading.Tasks; - using Confluent.SchemaRegistry; - using Confluent.SchemaRegistry.Serdes; +using System; +using System.IO; +using System.Threading.Tasks; +using Confluent.SchemaRegistry; +using Confluent.SchemaRegistry.Serdes; +namespace KafkaFlow.Serializer.SchemaRegistry +{ /// /// A protobuf message serializer integrated with the confluent schema registry /// public class ConfluentProtobufSerializer : ISerializer { - private readonly ISchemaRegistryClient schemaRegistryClient; - private readonly ProtobufSerializerConfig serializerConfig; + private readonly ISchemaRegistryClient _schemaRegistryClient; + private readonly ProtobufSerializerConfig _serializerConfig; /// /// Initializes a new instance of the class. @@ -21,12 +21,12 @@ public class ConfluentProtobufSerializer : ISerializer /// An instance of public ConfluentProtobufSerializer(IDependencyResolver resolver, ProtobufSerializerConfig serializerConfig = null) { - this.schemaRegistryClient = + _schemaRegistryClient = resolver.Resolve() ?? throw new InvalidOperationException( $"No schema registry configuration was found. Set it using {nameof(ClusterConfigurationBuilderExtensions.WithSchemaRegistry)} on cluster configuration"); - this.serializerConfig = serializerConfig; + _serializerConfig = serializerConfig; } /// @@ -37,8 +37,8 @@ public Task SerializeAsync(object message, Stream output, ISerializerContext con message.GetType(), () => Activator.CreateInstance( typeof(ProtobufSerializer<>).MakeGenericType(message.GetType()), - this.schemaRegistryClient, - this.serializerConfig)) + _schemaRegistryClient, + _serializerConfig)) .SerializeAsync(message, output, context); } } diff --git a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ConfluentProtobufTypeNameResolver.cs b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ConfluentProtobufTypeNameResolver.cs index 8ae17ee63..ff754b45a 100644 --- a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ConfluentProtobufTypeNameResolver.cs +++ b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ConfluentProtobufTypeNameResolver.cs @@ -1,23 +1,23 @@ +using System.Linq; +using System.Threading.Tasks; +using Confluent.SchemaRegistry; +using Google.Protobuf; +using Google.Protobuf.Reflection; + namespace KafkaFlow { - using System.Linq; - using System.Threading.Tasks; - using Confluent.SchemaRegistry; - using Google.Protobuf; - using Google.Protobuf.Reflection; - internal class ConfluentProtobufTypeNameResolver : ISchemaRegistryTypeNameResolver { - private readonly ISchemaRegistryClient client; + private readonly ISchemaRegistryClient _client; public ConfluentProtobufTypeNameResolver(ISchemaRegistryClient client) { - this.client = client; + _client = client; } public async Task ResolveAsync(int id) { - var schemaString = (await this.client.GetSchemaAsync(id, "serialized")).SchemaString; + var schemaString = (await _client.GetSchemaAsync(id, "serialized")).SchemaString; var protoFields = FileDescriptorProto.Parser.ParseFrom(ByteString.FromBase64(schemaString)); diff --git a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ConsumerConfigurationBuilderExtensions.cs b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ConsumerConfigurationBuilderExtensions.cs index 59012f3d8..558eb8056 100644 --- a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ConsumerConfigurationBuilderExtensions.cs +++ b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ConsumerConfigurationBuilderExtensions.cs @@ -1,10 +1,10 @@ -namespace KafkaFlow -{ - using Confluent.SchemaRegistry; - using KafkaFlow.Configuration; - using KafkaFlow.Middlewares.Serializer; - using KafkaFlow.Serializer.SchemaRegistry; +using Confluent.SchemaRegistry; +using KafkaFlow.Configuration; +using KafkaFlow.Middlewares.Serializer; +using KafkaFlow.Serializer.SchemaRegistry; +namespace KafkaFlow +{ /// /// No needed /// diff --git a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ProducerConfigurationBuilderExtensions.cs b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ProducerConfigurationBuilderExtensions.cs index af83cd19d..180073f59 100644 --- a/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ProducerConfigurationBuilderExtensions.cs +++ b/src/KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf/ProducerConfigurationBuilderExtensions.cs @@ -1,11 +1,11 @@ -namespace KafkaFlow -{ - using Confluent.SchemaRegistry; - using Confluent.SchemaRegistry.Serdes; - using KafkaFlow.Configuration; - using KafkaFlow.Middlewares.Serializer; - using KafkaFlow.Serializer.SchemaRegistry; +using Confluent.SchemaRegistry; +using Confluent.SchemaRegistry.Serdes; +using KafkaFlow.Configuration; +using KafkaFlow.Middlewares.Serializer; +using KafkaFlow.Serializer.SchemaRegistry; +namespace KafkaFlow +{ /// /// No needed /// diff --git a/src/KafkaFlow.UnitTests/Consumer/ConsumerManagerTests.cs b/src/KafkaFlow.UnitTests/Consumer/ConsumerManagerTests.cs deleted file mode 100644 index d8492f775..000000000 --- a/src/KafkaFlow.UnitTests/Consumer/ConsumerManagerTests.cs +++ /dev/null @@ -1,167 +0,0 @@ -namespace KafkaFlow.UnitTests.Consumer -{ - using System; - using System.Collections.Generic; - using System.Threading.Tasks; - using AutoFixture; - using Confluent.Kafka; - using FluentAssertions; - using KafkaFlow.Configuration; - using KafkaFlow.Consumers; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; - - [TestClass] - public class ConsumerManagerTests - { - private readonly Fixture fixture = new(); - - private ConsumerManager target; - - private Mock consumerMock; - private Mock workerPoolMock; - private Mock feederMock; - private Mock logHandlerMock; - private Mock dependencyResolver; - - private Action, List> onPartitionAssignedHandler; - private Action, List> onPartitionRevokedHandler; - - [TestInitialize] - public void Setup() - { - this.consumerMock = new Mock(); - this.workerPoolMock = new Mock(); - this.feederMock = new Mock(); - this.logHandlerMock = new Mock(); - this.dependencyResolver = new Mock(); - - this.consumerMock - .Setup( - x => x.OnPartitionsAssigned(It.IsAny, List>>())) - .Callback( - (Action, List> value) => - this.onPartitionAssignedHandler = value); - - this.consumerMock - .Setup( - x => x.OnPartitionsRevoked( - It.IsAny, List>>())) - .Callback( - (Action, List> value) => - this.onPartitionRevokedHandler = value); - - var configurationMock = new Mock(); - - configurationMock - .SetupGet(x => x.WorkersCountCalculator) - .Returns((_, _) => Task.FromResult(10)); - - configurationMock - .SetupGet(x => x.WorkersCountEvaluationInterval) - .Returns(TimeSpan.FromMinutes(5)); - - this.consumerMock - .SetupGet(x => x.Configuration) - .Returns(configurationMock.Object); - - this.target = new ConsumerManager( - this.consumerMock.Object, - this.workerPoolMock.Object, - this.feederMock.Object, - this.dependencyResolver.Object, - this.logHandlerMock.Object); - } - - [TestMethod] - public void ConstructorCalled_InitializeProperties() - { - // Assert - this.target.Consumer.Should().Be(this.consumerMock.Object); - this.target.WorkerPool.Should().Be(this.workerPoolMock.Object); - this.target.Feeder.Should().Be(this.feederMock.Object); - } - - [TestMethod] - public async Task StartAsync_StartDependencies() - { - // Arrange - this.feederMock - .Setup(x => x.Start()); - - // Act - await this.target.StartAsync(); - - // Assert - this.feederMock.VerifyAll(); - } - - [TestMethod] - public async Task StopAsync_StopDependencies() - { - // Arrange - this.feederMock - .Setup(x => x.StopAsync()) - .Returns(Task.CompletedTask); - - this.workerPoolMock - .Setup(x => x.StopAsync()) - .Returns(Task.CompletedTask); - - // Act - await this.target.StopAsync(); - - // Assert - this.feederMock.VerifyAll(); - this.workerPoolMock.VerifyAll(); - this.consumerMock.Verify(x => x.Dispose(), Times.Once()); - } - - [TestMethod] - public void OnPartitionsAssigned_StartWorkerPool() - { - // Arrange - var partitions = this.fixture.Create>(); - - this.workerPoolMock - .Setup(x => x.StartAsync(partitions, It.IsAny())) - .Returns(Task.CompletedTask); - - this.logHandlerMock - .Setup(x => x.Info(It.IsAny(), It.IsAny())); - - // Act - this.onPartitionAssignedHandler(this.dependencyResolver.Object, Mock.Of>(), partitions); - - // Assert - this.workerPoolMock.VerifyAll(); - this.logHandlerMock.VerifyAll(); - } - - [TestMethod] - public void OnPartitionsRevoked_StopWorkerPool() - { - // Arrange - IConsumer consumer = null; - var partitions = this.fixture.Create>(); - - this.workerPoolMock - .Setup(x => x.StopAsync()) - .Returns(Task.CompletedTask); - - this.consumerMock - .SetupGet(x => x.Configuration) - .Returns(new Mock().Object); - - this.logHandlerMock - .Setup(x => x.Warning(It.IsAny(), It.IsAny())); - - // Act - this.onPartitionRevokedHandler(this.dependencyResolver.Object, consumer, partitions); - - // Assert - this.workerPoolMock.VerifyAll(); - this.logHandlerMock.VerifyAll(); - } - } -} diff --git a/src/KafkaFlow.UnitTests/Middlewares/Serialization/SchemaRegistryTypeResolverTests.cs b/src/KafkaFlow.UnitTests/Middlewares/Serialization/SchemaRegistryTypeResolverTests.cs deleted file mode 100644 index 38f720526..000000000 --- a/src/KafkaFlow.UnitTests/Middlewares/Serialization/SchemaRegistryTypeResolverTests.cs +++ /dev/null @@ -1,47 +0,0 @@ -namespace KafkaFlow.UnitTests.Middlewares.Serialization -{ - using System; - using System.Buffers.Binary; - using System.Threading.Tasks; - using FluentAssertions; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; - - [TestClass] - public class SchemaRegistryTypeResolverTests - { - private readonly Mock messageContextMock; - private readonly Mock schemaRegistryTypeNameResolverMock; - private readonly SchemaRegistryTypeResolver schemaRegistryTypeResolver; - private readonly byte[] messageKey = new byte[] { 0x18, 0x19 }; - private readonly byte[] messageValue = new byte[] { 0x20, 0x21, 0x22, 0x23, 0x24, 0x25 }; - - public SchemaRegistryTypeResolverTests() - { - this.messageContextMock = new Mock(); - this.messageContextMock.Setup(context => context.Message).Returns(new Message(messageKey, messageValue)); - this.schemaRegistryTypeNameResolverMock = new Mock(); - this.schemaRegistryTypeResolver = new SchemaRegistryTypeResolver(this.schemaRegistryTypeNameResolverMock.Object); - } - - [TestMethod] - public async Task OnConsumeAsync_WhenCalledTwice_TypeIsResolvedOnceThenTypeIsLoadedFromCache() - { - // Arrange - var expectedSchemaId = BinaryPrimitives.ReadInt32BigEndian( - this.messageValue.AsSpan().Slice(1, 4)); - - this.schemaRegistryTypeNameResolverMock.Setup( - resolver => resolver.ResolveAsync(expectedSchemaId)).ReturnsAsync(typeof(SchemaRegistryTypeResolverTests).FullName); - - // Act - await this.schemaRegistryTypeResolver.OnConsumeAsync(messageContextMock.Object); - var type = await this.schemaRegistryTypeResolver.OnConsumeAsync(messageContextMock.Object); - - // Assert - this.schemaRegistryTypeNameResolverMock.Verify(resolver => resolver.ResolveAsync(expectedSchemaId), Times.Once); - var expectedObject = (SchemaRegistryTypeResolverTests)Activator.CreateInstance(type); - expectedObject.Should().NotBeNull(); - } - } -} diff --git a/src/KafkaFlow.UnitTests/OffsetManagerTests.cs b/src/KafkaFlow.UnitTests/OffsetManagerTests.cs deleted file mode 100644 index 631de1336..000000000 --- a/src/KafkaFlow.UnitTests/OffsetManagerTests.cs +++ /dev/null @@ -1,88 +0,0 @@ -namespace KafkaFlow.UnitTests -{ - using System; - using System.Collections.Generic; - using Confluent.Kafka; - using FluentAssertions; - using KafkaFlow.Consumers; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; - using TopicPartitionOffset = KafkaFlow.TopicPartitionOffset; - - [TestClass] - public class OffsetManagerTests - { - private Mock committerMock; - private OffsetManager target; - private TopicPartition topicPartition; - - [TestInitialize] - public void Setup() - { - this.committerMock = new Mock(); - this.topicPartition = new TopicPartition("topic-A", new Partition(1)); - - this.target = new OffsetManager( - this.committerMock.Object, - new List { this.topicPartition }); - } - - [TestMethod] - public void MarkAsProcessed_WithNotQueuedContext_ShouldThrowInvalidOperation() - { - // Act - Action act = () => this.target.MarkAsProcessed(this.MockConsumerContext(1)); - - // Assert - act.Should().Throw(); - } - - [TestMethod] - public void MarkAsProcessed_WithGaps_ShouldStoreOffsetJustOnce() - { - // Arrange - this.target.Enqueue(this.MockConsumerContext(1)); - this.target.Enqueue(this.MockConsumerContext(2)); - this.target.Enqueue(this.MockConsumerContext(3)); - - // Act - this.target.MarkAsProcessed(this.MockConsumerContext(3)); - this.target.MarkAsProcessed(this.MockConsumerContext(2)); - this.target.MarkAsProcessed(this.MockConsumerContext(1)); - - // Assert - this.committerMock.Verify( - c => - c.MarkAsProcessed( - It.Is( - p => - p.Partition == this.topicPartition.Partition && - p.Offset == 3)), - Times.Once); - } - - private IConsumerContext MockConsumerContext(int offset) - { - var mock = new Mock(); - var tpo = new TopicPartitionOffset(this.topicPartition.Topic, this.topicPartition.Partition, offset); - - mock - .SetupGet(x => x.Offset) - .Returns(tpo.Offset); - - mock - .SetupGet(x => x.Partition) - .Returns(tpo.Partition); - - mock - .SetupGet(x => x.Topic) - .Returns(tpo.Topic); - - mock - .SetupGet(x => x.TopicPartitionOffset) - .Returns(tpo); - - return mock.Object; - } - } -} diff --git a/src/KafkaFlow.Unity/UnityDependencyConfigurator.cs b/src/KafkaFlow.Unity/UnityDependencyConfigurator.cs index 8273095bf..452e0d19e 100644 --- a/src/KafkaFlow.Unity/UnityDependencyConfigurator.cs +++ b/src/KafkaFlow.Unity/UnityDependencyConfigurator.cs @@ -1,17 +1,16 @@ -namespace KafkaFlow.Unity -{ - using System; - using System.Linq; - using global::Unity; - using global::Unity.Lifetime; - using InstanceLifetime = KafkaFlow.InstanceLifetime; +using System; +using System.Linq; +using global::Unity; +using global::Unity.Lifetime; +namespace KafkaFlow.Unity +{ /// /// The Unity implementation of /// public class UnityDependencyConfigurator : IDependencyConfigurator { - private readonly IUnityContainer container; + private readonly IUnityContainer _container; /// /// Initializes a new instance of the class. @@ -19,7 +18,7 @@ public class UnityDependencyConfigurator : IDependencyConfigurator /// The Unity container instance public UnityDependencyConfigurator(IUnityContainer container) { - this.container = container; + _container = container; } /// @@ -28,7 +27,7 @@ public IDependencyConfigurator Add( Type implementationType, InstanceLifetime lifetime) { - this.container.RegisterType( + _container.RegisterType( serviceType, implementationType, (ITypeLifetimeManager)ParseLifetime(lifetime)); @@ -40,7 +39,7 @@ public IDependencyConfigurator Add(InstanceLifetime l where TService : class where TImplementation : class, TService { - this.container.RegisterType((ITypeLifetimeManager)ParseLifetime(lifetime)); + _container.RegisterType((ITypeLifetimeManager)ParseLifetime(lifetime)); return this; } @@ -48,7 +47,7 @@ public IDependencyConfigurator Add(InstanceLifetime l public IDependencyConfigurator Add(InstanceLifetime lifetime) where TService : class { - this.container.RegisterType((ITypeLifetimeManager)ParseLifetime(lifetime)); + _container.RegisterType((ITypeLifetimeManager)ParseLifetime(lifetime)); return this; } @@ -56,7 +55,7 @@ public IDependencyConfigurator Add(InstanceLifetime lifetime) public IDependencyConfigurator Add(TImplementation service) where TImplementation : class { - this.container.RegisterInstance(service); + _container.RegisterInstance(service); return this; } @@ -73,7 +72,7 @@ public IDependencyConfigurator Add( name = Guid.NewGuid().ToString(); } - this.container.RegisterFactory( + _container.RegisterFactory( serviceType, name, c => factory(new UnityDependencyResolver(c)), @@ -82,11 +81,6 @@ public IDependencyConfigurator Add( return this; } - private bool AlreadyRegistered(Type registeredType) - { - return this.container.Registrations.Any(x => x.RegisteredType == registeredType); - } - private static object ParseLifetime(InstanceLifetime lifetime) => lifetime switch { @@ -95,5 +89,10 @@ private static object ParseLifetime(InstanceLifetime lifetime) => InstanceLifetime.Transient => new TransientLifetimeManager(), _ => throw new InvalidCastException($"There is not mapping defined to {lifetime}") }; + + private bool AlreadyRegistered(Type registeredType) + { + return _container.Registrations.Any(x => x.RegisteredType == registeredType); + } } } diff --git a/src/KafkaFlow.Unity/UnityDependencyResolver.cs b/src/KafkaFlow.Unity/UnityDependencyResolver.cs index 35b06c43f..b98099735 100644 --- a/src/KafkaFlow.Unity/UnityDependencyResolver.cs +++ b/src/KafkaFlow.Unity/UnityDependencyResolver.cs @@ -1,35 +1,35 @@ +using System; +using System.Collections.Generic; +using global::Unity; + namespace KafkaFlow.Unity { - using System; - using System.Collections.Generic; - using global::Unity; - /// /// Unity implementation of /// public class UnityDependencyResolver : IDependencyResolver { - private readonly IUnityContainer container; + private readonly IUnityContainer _container; /// /// Initializes a new instance of the class. /// /// A Unity container instance - public UnityDependencyResolver(IUnityContainer container) => this.container = container; + public UnityDependencyResolver(IUnityContainer container) => _container = container; /// public IDependencyResolverScope CreateScope() => - new UnityDependencyResolverScope(this.container.CreateChildContainer()); + new UnityDependencyResolverScope(_container.CreateChildContainer()); /// - public object Resolve(Type type) => this.container.Resolve(type); + public object Resolve(Type type) => _container.Resolve(type); /// public IEnumerable ResolveAll(Type type) { - yield return this.container.Resolve(type); + yield return _container.Resolve(type); - foreach (var instance in this.container.ResolveAll(type)) + foreach (var instance in _container.ResolveAll(type)) { yield return instance; } diff --git a/src/KafkaFlow.Unity/UnityDependencyResolverScope.cs b/src/KafkaFlow.Unity/UnityDependencyResolverScope.cs index 5a5bf27ba..c0aa31e8b 100644 --- a/src/KafkaFlow.Unity/UnityDependencyResolverScope.cs +++ b/src/KafkaFlow.Unity/UnityDependencyResolverScope.cs @@ -1,19 +1,19 @@ +using global::Unity; + namespace KafkaFlow.Unity { - using global::Unity; - internal class UnityDependencyResolverScope : IDependencyResolverScope { - private readonly IUnityContainer container; + private readonly IUnityContainer _container; public UnityDependencyResolverScope(IUnityContainer container) { - this.container = container; + _container = container; this.Resolver = new UnityDependencyResolver(container); } public IDependencyResolver Resolver { get; } - public void Dispose() => this.container.Dispose(); + public void Dispose() => _container.Dispose(); } } diff --git a/src/KafkaFlow.sln b/src/KafkaFlow.sln deleted file mode 100644 index 1d55c3c82..000000000 --- a/src/KafkaFlow.sln +++ /dev/null @@ -1,252 +0,0 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio Version 17 -VisualStudioVersion = 17.7.34031.279 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow", "KafkaFlow\KafkaFlow.csproj", "{E1055352-9F5B-4980-80A3-50C335B79A16}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Compressor.Gzip", "KafkaFlow.Compressor.Gzip\KafkaFlow.Compressor.Gzip.csproj", "{B668ACDA-5E45-4985-9D33-6EDFF9059D9A}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Serializer.JsonCore", "KafkaFlow.Serializer.JsonCore\KafkaFlow.Serializer.JsonCore.csproj", "{42CFFA0D-3BF2-42F0-A1B0-BDCD30CBB470}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Serializer.ProtobufNet", "KafkaFlow.Serializer.ProtobufNet\KafkaFlow.Serializer.ProtobufNet.csproj", "{8917E0B9-A943-46FB-8E91-6E9B91F976F6}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.UnitTests", "KafkaFlow.UnitTests\KafkaFlow.UnitTests.csproj", "{CE82B50F-6C03-4875-ADF5-63FD5B7F1AF8}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Framework", "Framework", "{068CB250-2804-4C7E-9490-17F432B9CE21}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Tests", "Tests", "{7A9B997B-DAAC-4004-94F3-32F6B88E0068}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Samples", "Samples", "{303AE78F-6C96-4DF4-AC89-5C4FD53AFF0B}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Serialization", "Serialization", "{ADAAA63C-E17C-4F1B-A062-3CCA071D75C2}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Compression", "Compression", "{0A782A83-B66D-4B99-9BE2-2B18AAD2E03C}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Abstractions", "KafkaFlow.Abstractions\KafkaFlow.Abstractions.csproj", "{88808771-56BE-422B-94DC-7AB070F64E98}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.IntegrationTests", "KafkaFlow.IntegrationTests\KafkaFlow.IntegrationTests.csproj", "{36F459F4-8323-472A-A8C5-8C9D89F92012}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Serializer.NewtonsoftJson", "KafkaFlow.Serializer.NewtonsoftJson\KafkaFlow.Serializer.NewtonsoftJson.csproj", "{FC622AB0-6481-4249-8D83-27BC39912103}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "DependencyResolvers", "DependencyResolvers", "{292BCEDD-55B4-49BB-B8B2-24CD834FF2AA}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Microsoft.DependencyInjection", "KafkaFlow.Microsoft.DependencyInjection\KafkaFlow.Microsoft.DependencyInjection.csproj", "{B86A51E3-7AC9-4EF8-BD2A-1ACC9EF0F5AE}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Unity", "KafkaFlow.Unity\KafkaFlow.Unity.csproj", "{22DB5734-E792-4309-B565-77F02D27D88C}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Sample", "..\samples\KafkaFlow.Sample\KafkaFlow.Sample.csproj", "{E59C13AF-41E7-4DFA-B118-A65EA87F0605}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "LogHandlers", "LogHandlers", "{EF626895-FDAE-4B28-9110-BA85671CBBF2}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.LogHandler.Console", "KafkaFlow.LogHandler.Console\KafkaFlow.LogHandler.Console.csproj", "{6E9F7CDC-E935-4A51-9358-2E6DBB901A34}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Admin", "Admin", "{58483813-0D7C-423E-8E7D-8FBF3E6CDB6D}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Admin", "KafkaFlow.Admin\KafkaFlow.Admin.csproj", "{46B6F104-D46E-4DC2-ADC2-18E76F5BBCD9}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Admin.WebApi", "KafkaFlow.Admin.WebApi\KafkaFlow.Admin.WebApi.csproj", "{827620D3-2258-410E-A79E-E782ED42284C}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Sample.WebApi", "..\samples\KafkaFlow.Sample.WebApi\KafkaFlow.Sample.WebApi.csproj", "{15C12D0C-FE8A-41F9-BBCF-5A963F05D5C7}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro", "KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro\KafkaFlow.Serializer.SchemaRegistry.ConfluentAvro.csproj", "{2E63A019-F8AD-4EC3-A80A-F560DEC7C5B4}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Middlewares", "Middlewares", "{ED24B548-6F37-4283-A35B-F6015BFB7A34}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Sample.BatchOperations", "..\samples\KafkaFlow.Sample.BatchOperations\KafkaFlow.Sample.BatchOperations.csproj", "{DE8A8871-B19E-489D-8292-386A06A4CDFA}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Extensions.Hosting", "KafkaFlow.Extensions.Hosting\KafkaFlow.Extensions.Hosting.csproj", "{7913342E-80FD-4094-B892-18DAA2E6948F}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.SchemaRegistry", "KafkaFlow.SchemaRegistry\KafkaFlow.SchemaRegistry.csproj", "{98C9826C-76F6-4C21-8A32-D55C2647905B}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Serializer.SchemaRegistry.ConfluentJson", "KafkaFlow.Serializer.SchemaRegistry.ConfluentJson\KafkaFlow.Serializer.SchemaRegistry.ConfluentJson.csproj", "{582D0B0C-F331-45A0-B4D8-CE9DC24F5389}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf", "KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf\KafkaFlow.Serializer.SchemaRegistry.ConfluentProtobuf.csproj", "{F212D09B-9AD6-428F-9008-38BF1F470B3A}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Admin.Dashboard", "KafkaFlow.Admin.Dashboard\KafkaFlow.Admin.Dashboard.csproj", "{4072F646-9393-4BF3-A479-0550AC1BB6C4}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Sample.Dashboard", "..\samples\KafkaFlow.Sample.Dashboard\KafkaFlow.Sample.Dashboard.csproj", "{F32DC7DA-36EA-4199-91F5-81960FD9C650}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Sample.SchemaRegistry", "..\samples\KafkaFlow.Sample.SchemaRegistry\KafkaFlow.Sample.SchemaRegistry.csproj", "{2BD49C06-7A88-4B98-91B0-659282D2A45E}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Sample.FlowControl", "..\samples\KafkaFlow.Sample.FlowControl\KafkaFlow.Sample.FlowControl.csproj", "{7B61C99E-3AEB-4497-8A38-F780CB309130}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Deploy", "Deploy", "{4A6A390C-A63A-4371-86BB-28481AD6D4C0}" - ProjectSection(SolutionItems) = preProject - ..\.github\workflows\build.yml = ..\.github\workflows\build.yml - ..\.github\workflows\publish.yml = ..\.github\workflows\publish.yml - EndProjectSection -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.LogHandler.Microsoft", "KafkaFlow.LogHandler.Microsoft\KafkaFlow.LogHandler.Microsoft.csproj", "{8EAF0D96-F760-4FEF-9237-92779F66482D}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Sample.PauseConsumerOnError", "..\samples\KafkaFlow.Sample.PauseConsumerOnError\KafkaFlow.Sample.PauseConsumerOnError.csproj", "{B4A9E7CE-7A37-411E-967E-D9B5FD1A3992}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Sample.ConsumerThrottling", "..\samples\KafkaFlow.Sample.ConsumerThrottling\KafkaFlow.Sample.ConsumerThrottling.csproj", "{4A16F519-FAF8-432C-AD0A-CC44F7BD392D}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Telemetry", "Telemetry", "{96F5D441-B8DE-4ABC-BEF2-F758D1B2BA39}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.OpenTelemetry", "KafkaFlow.OpenTelemetry\KafkaFlow.OpenTelemetry.csproj", "{1557B135-4925-4FA2-80DA-8AD13155F3BD}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Release|Any CPU = Release|Any CPU - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {E1055352-9F5B-4980-80A3-50C335B79A16}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {E1055352-9F5B-4980-80A3-50C335B79A16}.Debug|Any CPU.Build.0 = Debug|Any CPU - {E1055352-9F5B-4980-80A3-50C335B79A16}.Release|Any CPU.ActiveCfg = Release|Any CPU - {E1055352-9F5B-4980-80A3-50C335B79A16}.Release|Any CPU.Build.0 = Release|Any CPU - {B668ACDA-5E45-4985-9D33-6EDFF9059D9A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B668ACDA-5E45-4985-9D33-6EDFF9059D9A}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B668ACDA-5E45-4985-9D33-6EDFF9059D9A}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B668ACDA-5E45-4985-9D33-6EDFF9059D9A}.Release|Any CPU.Build.0 = Release|Any CPU - {42CFFA0D-3BF2-42F0-A1B0-BDCD30CBB470}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {42CFFA0D-3BF2-42F0-A1B0-BDCD30CBB470}.Debug|Any CPU.Build.0 = Debug|Any CPU - {42CFFA0D-3BF2-42F0-A1B0-BDCD30CBB470}.Release|Any CPU.ActiveCfg = Release|Any CPU - {42CFFA0D-3BF2-42F0-A1B0-BDCD30CBB470}.Release|Any CPU.Build.0 = Release|Any CPU - {8917E0B9-A943-46FB-8E91-6E9B91F976F6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {8917E0B9-A943-46FB-8E91-6E9B91F976F6}.Debug|Any CPU.Build.0 = Debug|Any CPU - {8917E0B9-A943-46FB-8E91-6E9B91F976F6}.Release|Any CPU.ActiveCfg = Release|Any CPU - {8917E0B9-A943-46FB-8E91-6E9B91F976F6}.Release|Any CPU.Build.0 = Release|Any CPU - {CE82B50F-6C03-4875-ADF5-63FD5B7F1AF8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {CE82B50F-6C03-4875-ADF5-63FD5B7F1AF8}.Debug|Any CPU.Build.0 = Debug|Any CPU - {CE82B50F-6C03-4875-ADF5-63FD5B7F1AF8}.Release|Any CPU.ActiveCfg = Release|Any CPU - {CE82B50F-6C03-4875-ADF5-63FD5B7F1AF8}.Release|Any CPU.Build.0 = Release|Any CPU - {88808771-56BE-422B-94DC-7AB070F64E98}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {88808771-56BE-422B-94DC-7AB070F64E98}.Debug|Any CPU.Build.0 = Debug|Any CPU - {88808771-56BE-422B-94DC-7AB070F64E98}.Release|Any CPU.ActiveCfg = Release|Any CPU - {88808771-56BE-422B-94DC-7AB070F64E98}.Release|Any CPU.Build.0 = Release|Any CPU - {36F459F4-8323-472A-A8C5-8C9D89F92012}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {36F459F4-8323-472A-A8C5-8C9D89F92012}.Debug|Any CPU.Build.0 = Debug|Any CPU - {36F459F4-8323-472A-A8C5-8C9D89F92012}.Release|Any CPU.ActiveCfg = Release|Any CPU - {36F459F4-8323-472A-A8C5-8C9D89F92012}.Release|Any CPU.Build.0 = Release|Any CPU - {FC622AB0-6481-4249-8D83-27BC39912103}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {FC622AB0-6481-4249-8D83-27BC39912103}.Debug|Any CPU.Build.0 = Debug|Any CPU - {FC622AB0-6481-4249-8D83-27BC39912103}.Release|Any CPU.ActiveCfg = Release|Any CPU - {FC622AB0-6481-4249-8D83-27BC39912103}.Release|Any CPU.Build.0 = Release|Any CPU - {B86A51E3-7AC9-4EF8-BD2A-1ACC9EF0F5AE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B86A51E3-7AC9-4EF8-BD2A-1ACC9EF0F5AE}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B86A51E3-7AC9-4EF8-BD2A-1ACC9EF0F5AE}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B86A51E3-7AC9-4EF8-BD2A-1ACC9EF0F5AE}.Release|Any CPU.Build.0 = Release|Any CPU - {22DB5734-E792-4309-B565-77F02D27D88C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {22DB5734-E792-4309-B565-77F02D27D88C}.Debug|Any CPU.Build.0 = Debug|Any CPU - {22DB5734-E792-4309-B565-77F02D27D88C}.Release|Any CPU.ActiveCfg = Release|Any CPU - {22DB5734-E792-4309-B565-77F02D27D88C}.Release|Any CPU.Build.0 = Release|Any CPU - {E59C13AF-41E7-4DFA-B118-A65EA87F0605}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {E59C13AF-41E7-4DFA-B118-A65EA87F0605}.Debug|Any CPU.Build.0 = Debug|Any CPU - {E59C13AF-41E7-4DFA-B118-A65EA87F0605}.Release|Any CPU.ActiveCfg = Release|Any CPU - {E59C13AF-41E7-4DFA-B118-A65EA87F0605}.Release|Any CPU.Build.0 = Release|Any CPU - {6E9F7CDC-E935-4A51-9358-2E6DBB901A34}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {6E9F7CDC-E935-4A51-9358-2E6DBB901A34}.Debug|Any CPU.Build.0 = Debug|Any CPU - {6E9F7CDC-E935-4A51-9358-2E6DBB901A34}.Release|Any CPU.ActiveCfg = Release|Any CPU - {6E9F7CDC-E935-4A51-9358-2E6DBB901A34}.Release|Any CPU.Build.0 = Release|Any CPU - {46B6F104-D46E-4DC2-ADC2-18E76F5BBCD9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {46B6F104-D46E-4DC2-ADC2-18E76F5BBCD9}.Debug|Any CPU.Build.0 = Debug|Any CPU - {46B6F104-D46E-4DC2-ADC2-18E76F5BBCD9}.Release|Any CPU.ActiveCfg = Release|Any CPU - {46B6F104-D46E-4DC2-ADC2-18E76F5BBCD9}.Release|Any CPU.Build.0 = Release|Any CPU - {827620D3-2258-410E-A79E-E782ED42284C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {827620D3-2258-410E-A79E-E782ED42284C}.Debug|Any CPU.Build.0 = Debug|Any CPU - {827620D3-2258-410E-A79E-E782ED42284C}.Release|Any CPU.ActiveCfg = Release|Any CPU - {827620D3-2258-410E-A79E-E782ED42284C}.Release|Any CPU.Build.0 = Release|Any CPU - {15C12D0C-FE8A-41F9-BBCF-5A963F05D5C7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {15C12D0C-FE8A-41F9-BBCF-5A963F05D5C7}.Debug|Any CPU.Build.0 = Debug|Any CPU - {15C12D0C-FE8A-41F9-BBCF-5A963F05D5C7}.Release|Any CPU.ActiveCfg = Release|Any CPU - {15C12D0C-FE8A-41F9-BBCF-5A963F05D5C7}.Release|Any CPU.Build.0 = Release|Any CPU - {2E63A019-F8AD-4EC3-A80A-F560DEC7C5B4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {2E63A019-F8AD-4EC3-A80A-F560DEC7C5B4}.Debug|Any CPU.Build.0 = Debug|Any CPU - {2E63A019-F8AD-4EC3-A80A-F560DEC7C5B4}.Release|Any CPU.ActiveCfg = Release|Any CPU - {2E63A019-F8AD-4EC3-A80A-F560DEC7C5B4}.Release|Any CPU.Build.0 = Release|Any CPU - {DE8A8871-B19E-489D-8292-386A06A4CDFA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {DE8A8871-B19E-489D-8292-386A06A4CDFA}.Debug|Any CPU.Build.0 = Debug|Any CPU - {DE8A8871-B19E-489D-8292-386A06A4CDFA}.Release|Any CPU.ActiveCfg = Release|Any CPU - {DE8A8871-B19E-489D-8292-386A06A4CDFA}.Release|Any CPU.Build.0 = Release|Any CPU - {7913342E-80FD-4094-B892-18DAA2E6948F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {7913342E-80FD-4094-B892-18DAA2E6948F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {7913342E-80FD-4094-B892-18DAA2E6948F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {7913342E-80FD-4094-B892-18DAA2E6948F}.Release|Any CPU.Build.0 = Release|Any CPU - {98C9826C-76F6-4C21-8A32-D55C2647905B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {98C9826C-76F6-4C21-8A32-D55C2647905B}.Debug|Any CPU.Build.0 = Debug|Any CPU - {98C9826C-76F6-4C21-8A32-D55C2647905B}.Release|Any CPU.ActiveCfg = Release|Any CPU - {98C9826C-76F6-4C21-8A32-D55C2647905B}.Release|Any CPU.Build.0 = Release|Any CPU - {582D0B0C-F331-45A0-B4D8-CE9DC24F5389}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {582D0B0C-F331-45A0-B4D8-CE9DC24F5389}.Debug|Any CPU.Build.0 = Debug|Any CPU - {582D0B0C-F331-45A0-B4D8-CE9DC24F5389}.Release|Any CPU.ActiveCfg = Release|Any CPU - {582D0B0C-F331-45A0-B4D8-CE9DC24F5389}.Release|Any CPU.Build.0 = Release|Any CPU - {F212D09B-9AD6-428F-9008-38BF1F470B3A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {F212D09B-9AD6-428F-9008-38BF1F470B3A}.Debug|Any CPU.Build.0 = Debug|Any CPU - {F212D09B-9AD6-428F-9008-38BF1F470B3A}.Release|Any CPU.ActiveCfg = Release|Any CPU - {F212D09B-9AD6-428F-9008-38BF1F470B3A}.Release|Any CPU.Build.0 = Release|Any CPU - {4072F646-9393-4BF3-A479-0550AC1BB6C4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {4072F646-9393-4BF3-A479-0550AC1BB6C4}.Debug|Any CPU.Build.0 = Debug|Any CPU - {4072F646-9393-4BF3-A479-0550AC1BB6C4}.Release|Any CPU.ActiveCfg = Release|Any CPU - {4072F646-9393-4BF3-A479-0550AC1BB6C4}.Release|Any CPU.Build.0 = Release|Any CPU - {F32DC7DA-36EA-4199-91F5-81960FD9C650}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {F32DC7DA-36EA-4199-91F5-81960FD9C650}.Debug|Any CPU.Build.0 = Debug|Any CPU - {F32DC7DA-36EA-4199-91F5-81960FD9C650}.Release|Any CPU.ActiveCfg = Release|Any CPU - {F32DC7DA-36EA-4199-91F5-81960FD9C650}.Release|Any CPU.Build.0 = Release|Any CPU - {2BD49C06-7A88-4B98-91B0-659282D2A45E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {2BD49C06-7A88-4B98-91B0-659282D2A45E}.Debug|Any CPU.Build.0 = Debug|Any CPU - {2BD49C06-7A88-4B98-91B0-659282D2A45E}.Release|Any CPU.ActiveCfg = Release|Any CPU - {2BD49C06-7A88-4B98-91B0-659282D2A45E}.Release|Any CPU.Build.0 = Release|Any CPU - {7B61C99E-3AEB-4497-8A38-F780CB309130}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {7B61C99E-3AEB-4497-8A38-F780CB309130}.Debug|Any CPU.Build.0 = Debug|Any CPU - {7B61C99E-3AEB-4497-8A38-F780CB309130}.Release|Any CPU.ActiveCfg = Release|Any CPU - {7B61C99E-3AEB-4497-8A38-F780CB309130}.Release|Any CPU.Build.0 = Release|Any CPU - {8EAF0D96-F760-4FEF-9237-92779F66482D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {8EAF0D96-F760-4FEF-9237-92779F66482D}.Debug|Any CPU.Build.0 = Debug|Any CPU - {8EAF0D96-F760-4FEF-9237-92779F66482D}.Release|Any CPU.ActiveCfg = Release|Any CPU - {8EAF0D96-F760-4FEF-9237-92779F66482D}.Release|Any CPU.Build.0 = Release|Any CPU - {B4A9E7CE-7A37-411E-967E-D9B5FD1A3992}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B4A9E7CE-7A37-411E-967E-D9B5FD1A3992}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B4A9E7CE-7A37-411E-967E-D9B5FD1A3992}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B4A9E7CE-7A37-411E-967E-D9B5FD1A3992}.Release|Any CPU.Build.0 = Release|Any CPU - {4A16F519-FAF8-432C-AD0A-CC44F7BD392D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {4A16F519-FAF8-432C-AD0A-CC44F7BD392D}.Debug|Any CPU.Build.0 = Debug|Any CPU - {4A16F519-FAF8-432C-AD0A-CC44F7BD392D}.Release|Any CPU.ActiveCfg = Release|Any CPU - {4A16F519-FAF8-432C-AD0A-CC44F7BD392D}.Release|Any CPU.Build.0 = Release|Any CPU - {1557B135-4925-4FA2-80DA-8AD13155F3BD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {1557B135-4925-4FA2-80DA-8AD13155F3BD}.Debug|Any CPU.Build.0 = Debug|Any CPU - {1557B135-4925-4FA2-80DA-8AD13155F3BD}.Release|Any CPU.ActiveCfg = Release|Any CPU - {1557B135-4925-4FA2-80DA-8AD13155F3BD}.Release|Any CPU.Build.0 = Release|Any CPU - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection - GlobalSection(NestedProjects) = preSolution - {E1055352-9F5B-4980-80A3-50C335B79A16} = {068CB250-2804-4C7E-9490-17F432B9CE21} - {B668ACDA-5E45-4985-9D33-6EDFF9059D9A} = {0A782A83-B66D-4B99-9BE2-2B18AAD2E03C} - {42CFFA0D-3BF2-42F0-A1B0-BDCD30CBB470} = {ADAAA63C-E17C-4F1B-A062-3CCA071D75C2} - {8917E0B9-A943-46FB-8E91-6E9B91F976F6} = {ADAAA63C-E17C-4F1B-A062-3CCA071D75C2} - {CE82B50F-6C03-4875-ADF5-63FD5B7F1AF8} = {7A9B997B-DAAC-4004-94F3-32F6B88E0068} - {ADAAA63C-E17C-4F1B-A062-3CCA071D75C2} = {ED24B548-6F37-4283-A35B-F6015BFB7A34} - {0A782A83-B66D-4B99-9BE2-2B18AAD2E03C} = {ED24B548-6F37-4283-A35B-F6015BFB7A34} - {88808771-56BE-422B-94DC-7AB070F64E98} = {068CB250-2804-4C7E-9490-17F432B9CE21} - {36F459F4-8323-472A-A8C5-8C9D89F92012} = {7A9B997B-DAAC-4004-94F3-32F6B88E0068} - {FC622AB0-6481-4249-8D83-27BC39912103} = {ADAAA63C-E17C-4F1B-A062-3CCA071D75C2} - {B86A51E3-7AC9-4EF8-BD2A-1ACC9EF0F5AE} = {292BCEDD-55B4-49BB-B8B2-24CD834FF2AA} - {22DB5734-E792-4309-B565-77F02D27D88C} = {292BCEDD-55B4-49BB-B8B2-24CD834FF2AA} - {E59C13AF-41E7-4DFA-B118-A65EA87F0605} = {303AE78F-6C96-4DF4-AC89-5C4FD53AFF0B} - {6E9F7CDC-E935-4A51-9358-2E6DBB901A34} = {EF626895-FDAE-4B28-9110-BA85671CBBF2} - {46B6F104-D46E-4DC2-ADC2-18E76F5BBCD9} = {58483813-0D7C-423E-8E7D-8FBF3E6CDB6D} - {827620D3-2258-410E-A79E-E782ED42284C} = {58483813-0D7C-423E-8E7D-8FBF3E6CDB6D} - {15C12D0C-FE8A-41F9-BBCF-5A963F05D5C7} = {303AE78F-6C96-4DF4-AC89-5C4FD53AFF0B} - {2E63A019-F8AD-4EC3-A80A-F560DEC7C5B4} = {ADAAA63C-E17C-4F1B-A062-3CCA071D75C2} - {DE8A8871-B19E-489D-8292-386A06A4CDFA} = {303AE78F-6C96-4DF4-AC89-5C4FD53AFF0B} - {7913342E-80FD-4094-B892-18DAA2E6948F} = {068CB250-2804-4C7E-9490-17F432B9CE21} - {98C9826C-76F6-4C21-8A32-D55C2647905B} = {ADAAA63C-E17C-4F1B-A062-3CCA071D75C2} - {582D0B0C-F331-45A0-B4D8-CE9DC24F5389} = {ADAAA63C-E17C-4F1B-A062-3CCA071D75C2} - {F212D09B-9AD6-428F-9008-38BF1F470B3A} = {ADAAA63C-E17C-4F1B-A062-3CCA071D75C2} - {4072F646-9393-4BF3-A479-0550AC1BB6C4} = {58483813-0D7C-423E-8E7D-8FBF3E6CDB6D} - {F32DC7DA-36EA-4199-91F5-81960FD9C650} = {303AE78F-6C96-4DF4-AC89-5C4FD53AFF0B} - {2BD49C06-7A88-4B98-91B0-659282D2A45E} = {303AE78F-6C96-4DF4-AC89-5C4FD53AFF0B} - {7B61C99E-3AEB-4497-8A38-F780CB309130} = {303AE78F-6C96-4DF4-AC89-5C4FD53AFF0B} - {8EAF0D96-F760-4FEF-9237-92779F66482D} = {EF626895-FDAE-4B28-9110-BA85671CBBF2} - {B4A9E7CE-7A37-411E-967E-D9B5FD1A3992} = {303AE78F-6C96-4DF4-AC89-5C4FD53AFF0B} - {4A16F519-FAF8-432C-AD0A-CC44F7BD392D} = {303AE78F-6C96-4DF4-AC89-5C4FD53AFF0B} - {1557B135-4925-4FA2-80DA-8AD13155F3BD} = {96F5D441-B8DE-4ABC-BEF2-F758D1B2BA39} - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - SolutionGuid = {6AE955B5-16B0-41CF-9F12-66D15B3DD1AB} - EndGlobalSection -EndGlobal diff --git a/src/KafkaFlow/Batching/BatchConsumeMessageContext.cs b/src/KafkaFlow/Batching/BatchConsumeMessageContext.cs index b398876ae..8fdbf72e3 100644 --- a/src/KafkaFlow/Batching/BatchConsumeMessageContext.cs +++ b/src/KafkaFlow/Batching/BatchConsumeMessageContext.cs @@ -1,11 +1,11 @@ +using System; +using System.Collections.Generic; + namespace KafkaFlow.Batching { - using System; - using System.Collections.Generic; - internal class BatchConsumeMessageContext : IMessageContext, IDisposable { - private readonly IDependencyResolverScope batchDependencyScope; + private readonly IDependencyResolverScope _batchDependencyScope; public BatchConsumeMessageContext( IConsumerContext consumer, @@ -14,7 +14,7 @@ public BatchConsumeMessageContext( { this.ConsumerContext = consumer; this.Message = new Message(null, batchMessage); - this.batchDependencyScope = consumer.WorkerDependencyResolver.CreateScope(); + _batchDependencyScope = consumer.WorkerDependencyResolver.CreateScope(); this.Items = new Dictionary(); this.Brokers = brokers; } @@ -27,7 +27,7 @@ public BatchConsumeMessageContext( public IProducerContext ProducerContext => null; - public IDependencyResolver DependencyResolver => this.batchDependencyScope.Resolver; + public IDependencyResolver DependencyResolver => _batchDependencyScope.Resolver; public IDictionary Items { get; } @@ -36,6 +36,6 @@ public BatchConsumeMessageContext( public IMessageContext SetMessage(object key, object value) => throw new NotSupportedException($"{nameof(BatchConsumeMessageContext)} does not allow to change the message"); - public void Dispose() => this.batchDependencyScope.Dispose(); + public void Dispose() => _batchDependencyScope.Dispose(); } } diff --git a/src/KafkaFlow/Batching/BatchConsumeMiddleware.cs b/src/KafkaFlow/Batching/BatchConsumeMiddleware.cs index c371bb80f..b4819de79 100644 --- a/src/KafkaFlow/Batching/BatchConsumeMiddleware.cs +++ b/src/KafkaFlow/Batching/BatchConsumeMiddleware.cs @@ -1,25 +1,25 @@ -namespace KafkaFlow.Batching +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using KafkaFlow.Configuration; +using KafkaFlow.Consumers; + +namespace KafkaFlow.Batching { - using System; - using System.Collections.Generic; - using System.Linq; - using System.Threading; - using System.Threading.Tasks; - using KafkaFlow.Configuration; - using KafkaFlow.Consumers; - internal class BatchConsumeMiddleware : IMessageMiddleware, IDisposable { - private readonly SemaphoreSlim dispatchSemaphore = new(1, 1); + private readonly SemaphoreSlim _dispatchSemaphore = new(1, 1); - private readonly int batchSize; - private readonly TimeSpan batchTimeout; - private readonly ILogHandler logHandler; - private readonly IConsumerConfiguration consumerConfiguration; + private readonly int _batchSize; + private readonly TimeSpan _batchTimeout; + private readonly ILogHandler _logHandler; + private readonly IConsumerConfiguration _consumerConfiguration; - private readonly List batch; - private CancellationTokenSource dispatchTokenSource; - private Task dispatchTask; + private readonly List _batch; + private CancellationTokenSource _dispatchTokenSource; + private Task _dispatchTask; public BatchConsumeMiddleware( IConsumerMiddlewareContext middlewareContext, @@ -27,26 +27,26 @@ public BatchConsumeMiddleware( TimeSpan batchTimeout, ILogHandler logHandler) { - this.batchSize = batchSize; - this.batchTimeout = batchTimeout; - this.logHandler = logHandler; - this.batch = new(batchSize); - this.consumerConfiguration = middlewareContext.Consumer.Configuration; + _batchSize = batchSize; + _batchTimeout = batchTimeout; + _logHandler = logHandler; + _batch = new(batchSize); + _consumerConfiguration = middlewareContext.Consumer.Configuration; middlewareContext.Worker.WorkerStopped.Subscribe(() => this.TriggerDispatchAndWaitAsync()); } public async Task Invoke(IMessageContext context, MiddlewareDelegate next) { - await this.dispatchSemaphore.WaitAsync(); + await _dispatchSemaphore.WaitAsync(); try { context.ConsumerContext.AutoMessageCompletion = false; - this.batch.Add(context); + _batch.Add(context); - if (this.batch.Count == 1) + if (_batch.Count == 1) { this.ScheduleExecution(context, next); return; @@ -54,10 +54,10 @@ public async Task Invoke(IMessageContext context, MiddlewareDelegate next) } finally { - this.dispatchSemaphore.Release(); + _dispatchSemaphore.Release(); } - if (this.batch.Count >= this.batchSize) + if (_batch.Count >= _batchSize) { await this.TriggerDispatchAndWaitAsync(); } @@ -65,26 +65,26 @@ public async Task Invoke(IMessageContext context, MiddlewareDelegate next) public void Dispose() { - this.dispatchTask?.Dispose(); - this.dispatchTokenSource?.Dispose(); - this.dispatchSemaphore.Dispose(); + _dispatchTask?.Dispose(); + _dispatchTokenSource?.Dispose(); + _dispatchSemaphore.Dispose(); } private async Task TriggerDispatchAndWaitAsync() { - await this.dispatchSemaphore.WaitAsync(); - this.dispatchTokenSource?.Cancel(); - this.dispatchSemaphore.Release(); + await _dispatchSemaphore.WaitAsync(); + _dispatchTokenSource?.Cancel(); + _dispatchSemaphore.Release(); - await (this.dispatchTask ?? Task.CompletedTask); + await (_dispatchTask ?? Task.CompletedTask); } private void ScheduleExecution(IMessageContext context, MiddlewareDelegate next) { - this.dispatchTokenSource = CancellationTokenSource.CreateLinkedTokenSource(context.ConsumerContext.WorkerStopped); + _dispatchTokenSource = CancellationTokenSource.CreateLinkedTokenSource(context.ConsumerContext.WorkerStopped); - this.dispatchTask = Task - .Delay(this.batchTimeout, this.dispatchTokenSource.Token) + _dispatchTask = Task + .Delay(_batchTimeout, _dispatchTokenSource.Token) .ContinueWith( _ => this.DispatchAsync(context, next), CancellationToken.None); @@ -92,12 +92,12 @@ private void ScheduleExecution(IMessageContext context, MiddlewareDelegate next) private async Task DispatchAsync(IMessageContext context, MiddlewareDelegate next) { - await this.dispatchSemaphore.WaitAsync(); + await _dispatchSemaphore.WaitAsync(); - this.dispatchTokenSource.Dispose(); - this.dispatchTokenSource = null; + _dispatchTokenSource.Dispose(); + _dispatchTokenSource = null; - var localBatch = this.batch.ToList(); + var localBatch = _batch.ToList(); try { @@ -106,7 +106,7 @@ private async Task DispatchAsync(IMessageContext context, MiddlewareDelegate nex return; } - var batchContext = new BatchConsumeMessageContext(context.ConsumerContext, localBatch, this.consumerConfiguration.ClusterConfiguration.Brokers); + var batchContext = new BatchConsumeMessageContext(context.ConsumerContext, localBatch, _consumerConfiguration.ClusterConfiguration.Brokers); await next(batchContext).ConfigureAwait(false); } @@ -119,7 +119,7 @@ private async Task DispatchAsync(IMessageContext context, MiddlewareDelegate nex } catch (Exception ex) { - this.logHandler.Error( + _logHandler.Error( "Error executing a message batch", ex, new @@ -131,10 +131,10 @@ private async Task DispatchAsync(IMessageContext context, MiddlewareDelegate nex } finally { - this.batch.Clear(); - this.dispatchSemaphore.Release(); + _batch.Clear(); + _dispatchSemaphore.Release(); - if (this.consumerConfiguration.AutoMessageCompletion) + if (_consumerConfiguration.AutoMessageCompletion) { foreach (var messageContext in localBatch) { diff --git a/src/KafkaFlow/Batching/BatchingExtensions.cs b/src/KafkaFlow/Batching/BatchingExtensions.cs index 00b100aa4..d79253dc7 100644 --- a/src/KafkaFlow/Batching/BatchingExtensions.cs +++ b/src/KafkaFlow/Batching/BatchingExtensions.cs @@ -1,11 +1,11 @@ +using System; +using System.Collections.Generic; +using KafkaFlow.Batching; +using KafkaFlow.Configuration; +using KafkaFlow.Consumers; + namespace KafkaFlow { - using System; - using System.Collections.Generic; - using KafkaFlow.Batching; - using KafkaFlow.Configuration; - using KafkaFlow.Consumers; - /// /// no needed /// diff --git a/src/KafkaFlow/Clusters/ClusterManager.cs b/src/KafkaFlow/Clusters/ClusterManager.cs index aeca278af..5b6f3d1c2 100644 --- a/src/KafkaFlow/Clusters/ClusterManager.cs +++ b/src/KafkaFlow/Clusters/ClusterManager.cs @@ -1,30 +1,28 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Confluent.Kafka; +using Confluent.Kafka.Admin; +using KafkaFlow.Configuration; + namespace KafkaFlow.Clusters { - using System; - using System.Collections.Concurrent; - using System.Collections.Generic; - using System.Linq; - using System.Threading.Tasks; - using Confluent.Kafka; - using Confluent.Kafka.Admin; - using KafkaFlow.Configuration; - using TopicMetadata = KafkaFlow.TopicMetadata; - using TopicPartitionOffset = KafkaFlow.TopicPartitionOffset; - internal class ClusterManager : IClusterManager, IDisposable { - private readonly ILogHandler logHandler; - private readonly Lazy lazyAdminClient; - private readonly ClusterConfiguration configuration; + private readonly ILogHandler _logHandler; + private readonly Lazy _lazyAdminClient; + private readonly ClusterConfiguration _configuration; - private readonly ConcurrentDictionary topicMetadataCache = new(); + private readonly ConcurrentDictionary _topicMetadataCache = new(); public ClusterManager(ILogHandler logHandler, ClusterConfiguration configuration) { - this.logHandler = logHandler; - this.configuration = configuration; + _logHandler = logHandler; + _configuration = configuration; - this.lazyAdminClient = new Lazy( + _lazyAdminClient = new Lazy( () => { var config = new AdminClientConfig @@ -39,16 +37,16 @@ public ClusterManager(ILogHandler logHandler, ClusterConfiguration configuration }); } - public string ClusterName => this.configuration.Name; + public string ClusterName => _configuration.Name; public ValueTask GetTopicMetadataAsync(string topicName) { return new ValueTask( - this.topicMetadataCache.GetOrAdd( + _topicMetadataCache.GetOrAdd( topicName, _ => { - var metadata = this.lazyAdminClient.Value.GetMetadata(topicName, TimeSpan.FromSeconds(30)); + var metadata = _lazyAdminClient.Value.GetMetadata(topicName, TimeSpan.FromSeconds(30)); if (!metadata.Topics.Any()) { @@ -84,7 +82,7 @@ public async Task> GetConsumerGroupOffsetsAsyn new Partition(partition.Id)))) .ToList(); - var result = await this.lazyAdminClient.Value.ListConsumerGroupOffsetsAsync( + var result = await _lazyAdminClient.Value.ListConsumerGroupOffsetsAsync( new[] { new ConsumerGroupTopicPartitions(consumerGroup, topics) }); if (!result.Any()) @@ -112,7 +110,7 @@ public async Task CreateIfNotExistsAsync(IEnumerable configu }) .ToArray(); - await this.lazyAdminClient.Value.CreateTopicsAsync(topics); + await _lazyAdminClient.Value.CreateTopicsAsync(topics); } catch (CreateTopicsException exception) { @@ -121,7 +119,7 @@ public async Task CreateIfNotExistsAsync(IEnumerable configu { if (exceptionResult.Error.Code == ErrorCode.TopicAlreadyExists) { - this.logHandler.Warning( + _logHandler.Warning( "An error occurred creating topic {Topic}: {Reason}", new { @@ -136,12 +134,12 @@ public async Task CreateIfNotExistsAsync(IEnumerable configu if (hasNonExpectedErrors) { - this.logHandler.Error( + _logHandler.Error( "An error occurred creating topics", exception, new { - Servers = this.configuration.Brokers, + Servers = _configuration.Brokers, }); throw; } @@ -150,9 +148,9 @@ public async Task CreateIfNotExistsAsync(IEnumerable configu public void Dispose() { - if (this.lazyAdminClient.IsValueCreated) + if (_lazyAdminClient.IsValueCreated) { - this.lazyAdminClient.Value.Dispose(); + _lazyAdminClient.Value.Dispose(); } } } diff --git a/src/KafkaFlow/Clusters/ClusterManagerAccessor.cs b/src/KafkaFlow/Clusters/ClusterManagerAccessor.cs index 89abe2751..8e61d681a 100644 --- a/src/KafkaFlow/Clusters/ClusterManagerAccessor.cs +++ b/src/KafkaFlow/Clusters/ClusterManagerAccessor.cs @@ -1,22 +1,22 @@ +using System.Collections.Generic; +using System.Linq; + namespace KafkaFlow.Clusters { - using System.Collections.Generic; - using System.Linq; - internal class ClusterManagerAccessor : IClusterManagerAccessor { - private readonly Dictionary managers; + private readonly Dictionary _managers; public ClusterManagerAccessor(IEnumerable managers) { - this.managers = managers.ToDictionary(manager => manager.ClusterName); + _managers = managers.ToDictionary(manager => manager.ClusterName); } - public IEnumerable All => this.managers.Values; + public IEnumerable All => _managers.Values; public IClusterManager this[string name] => this.GetManager(name); public IClusterManager GetManager(string name) => - this.managers.TryGetValue(name, out var manager) ? manager : null; + _managers.TryGetValue(name, out var manager) ? manager : null; } } diff --git a/src/KafkaFlow/Clusters/IClusterManager.cs b/src/KafkaFlow/Clusters/IClusterManager.cs index 6d2f149b4..d34c2365a 100644 --- a/src/KafkaFlow/Clusters/IClusterManager.cs +++ b/src/KafkaFlow/Clusters/IClusterManager.cs @@ -1,9 +1,9 @@ +using System.Collections.Generic; +using System.Threading.Tasks; +using KafkaFlow.Configuration; + namespace KafkaFlow.Clusters { - using System.Collections.Generic; - using System.Threading.Tasks; - using KafkaFlow.Configuration; - /// /// Provides access to Cluster administration /// diff --git a/src/KafkaFlow/Clusters/IClusterManagerAccessor.cs b/src/KafkaFlow/Clusters/IClusterManagerAccessor.cs index 854ccc3e1..509091631 100644 --- a/src/KafkaFlow/Clusters/IClusterManagerAccessor.cs +++ b/src/KafkaFlow/Clusters/IClusterManagerAccessor.cs @@ -1,7 +1,7 @@ +using System.Collections.Generic; + namespace KafkaFlow.Clusters { - using System.Collections.Generic; - /// /// Provides access to the configured cluster manager /// diff --git a/src/KafkaFlow/Configuration/ClusterConfiguration.cs b/src/KafkaFlow/Configuration/ClusterConfiguration.cs index e6299e5da..de20ad6fa 100644 --- a/src/KafkaFlow/Configuration/ClusterConfiguration.cs +++ b/src/KafkaFlow/Configuration/ClusterConfiguration.cs @@ -1,19 +1,19 @@ +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Linq; + namespace KafkaFlow.Configuration { - using System; - using System.Collections.Generic; - using System.Collections.ObjectModel; - using System.Linq; - /// /// Represents the cluster configuration values /// public class ClusterConfiguration { - private readonly Func securityInformationHandler; - private readonly List producers = new(); - private readonly List consumers = new(); - private readonly ReadOnlyCollection topicsToCreateIfNotExist; + private readonly Func _securityInformationHandler; + private readonly List _producers = new(); + private readonly List _consumers = new(); + private readonly ReadOnlyCollection _topicsToCreateIfNotExist; /// /// Initializes a new instance of the class. @@ -34,13 +34,13 @@ public ClusterConfiguration( Action onStoppingHandler, IEnumerable topicsToCreateIfNotExist = null) { - this.securityInformationHandler = securityInformationHandler; - this.Name = name ?? Guid.NewGuid().ToString(); - this.Kafka = kafka; - this.Brokers = brokers.ToList(); - this.OnStoppingHandler = onStoppingHandler; - this.OnStartedHandler = onStartedHandler; - this.topicsToCreateIfNotExist = topicsToCreateIfNotExist?.ToList().AsReadOnly() ?? + _securityInformationHandler = securityInformationHandler; + Name = name ?? Guid.NewGuid().ToString(); + Kafka = kafka; + Brokers = brokers.ToList(); + OnStoppingHandler = onStoppingHandler; + OnStartedHandler = onStartedHandler; + _topicsToCreateIfNotExist = topicsToCreateIfNotExist?.ToList().AsReadOnly() ?? new List().AsReadOnly(); } @@ -62,18 +62,18 @@ public ClusterConfiguration( /// /// Gets the list of producers /// - public IReadOnlyCollection Producers => this.producers.AsReadOnly(); + public IReadOnlyCollection Producers => _producers.AsReadOnly(); /// /// Gets the list of consumers /// - public IReadOnlyCollection Consumers => this.consumers.AsReadOnly(); + public IReadOnlyCollection Consumers => _consumers.AsReadOnly(); /// /// Gets the list of topics to create if they do not exist /// public IReadOnlyCollection TopicsToCreateIfNotExist => - this.topicsToCreateIfNotExist; + _topicsToCreateIfNotExist; /// /// Gets the handler to be executed when the cluster started @@ -90,19 +90,19 @@ public ClusterConfiguration( /// /// A list of consumer configurations public void AddConsumers(IEnumerable configurations) => - this.consumers.AddRange(configurations); + _consumers.AddRange(configurations); /// /// Adds a list of producer configurations /// /// A list of producer configurations public void AddProducers(IEnumerable configurations) => - this.producers.AddRange(configurations); + _producers.AddRange(configurations); /// /// Gets the kafka security information /// /// - public SecurityInformation GetSecurityInformation() => this.securityInformationHandler?.Invoke(); + public SecurityInformation GetSecurityInformation() => _securityInformationHandler?.Invoke(); } } diff --git a/src/KafkaFlow/Configuration/ClusterConfigurationBuilder.cs b/src/KafkaFlow/Configuration/ClusterConfigurationBuilder.cs index ac8c34585..d4c15607a 100644 --- a/src/KafkaFlow/Configuration/ClusterConfigurationBuilder.cs +++ b/src/KafkaFlow/Configuration/ClusterConfigurationBuilder.cs @@ -1,20 +1,20 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using KafkaFlow.Producers; + namespace KafkaFlow.Configuration { - using System; - using System.Collections.Generic; - using System.Linq; - using KafkaFlow.Producers; - internal class ClusterConfigurationBuilder : IClusterConfigurationBuilder { - private readonly List producers = new(); - private readonly List consumers = new(); - private readonly List topicsToCreateIfNotExist = new(); - private Action onStartedHandler = _ => { }; - private Action onStoppingHandler = _ => { }; - private IEnumerable brokers; - private string name; - private Func securityInformationHandler; + private readonly List _producers = new(); + private readonly List _consumers = new(); + private readonly List _topicsToCreateIfNotExist = new(); + private Action _onStartedHandler = _ => { }; + private Action _onStoppingHandler = _ => { }; + private IEnumerable _brokers; + private string _name; + private Func _securityInformationHandler; public ClusterConfigurationBuilder(IDependencyConfigurator dependencyConfigurator) { @@ -27,35 +27,35 @@ public ClusterConfiguration Build(KafkaConfiguration kafkaConfiguration) { var configuration = new ClusterConfiguration( kafkaConfiguration, - this.name, - this.brokers.ToList(), - this.securityInformationHandler, - this.onStartedHandler, - this.onStoppingHandler, - this.topicsToCreateIfNotExist); + _name, + _brokers.ToList(), + _securityInformationHandler, + _onStartedHandler, + _onStoppingHandler, + _topicsToCreateIfNotExist); - configuration.AddProducers(this.producers.Select(x => x.Build(configuration))); - configuration.AddConsumers(this.consumers.Select(x => x.Build(configuration))); + configuration.AddProducers(_producers.Select(x => x.Build(configuration))); + configuration.AddConsumers(_consumers.Select(x => x.Build(configuration))); return configuration; } public IClusterConfigurationBuilder WithBrokers(IEnumerable brokers) { - this.brokers = brokers; + _brokers = brokers; return this; } public IClusterConfigurationBuilder WithName(string name) { - this.name = name; + _name = name; return this; } public IClusterConfigurationBuilder WithSecurityInformation(Action handler) { // Uses a handler to avoid in-memory stored passwords for long periods - this.securityInformationHandler = () => + _securityInformationHandler = () => { var config = new SecurityInformation(); handler(config); @@ -80,7 +80,7 @@ public IClusterConfigurationBuilder AddProducer(string name, Action handler) { - this.onStoppingHandler = handler; + _onStoppingHandler = handler; return this; } public IClusterConfigurationBuilder OnStarted(Action handler) { - this.onStartedHandler = handler; + _onStartedHandler = handler; return this; } @@ -113,7 +113,7 @@ public IClusterConfigurationBuilder CreateTopicIfNotExists( int numberOfPartitions, short replicationFactor) { - this.topicsToCreateIfNotExist.Add(new TopicConfiguration(topicName, numberOfPartitions, replicationFactor)); + _topicsToCreateIfNotExist.Add(new TopicConfiguration(topicName, numberOfPartitions, replicationFactor)); return this; } } diff --git a/src/KafkaFlow/Configuration/ConsumerConfiguration.cs b/src/KafkaFlow/Configuration/ConsumerConfiguration.cs index 01eca3090..7b59709e4 100644 --- a/src/KafkaFlow/Configuration/ConsumerConfiguration.cs +++ b/src/KafkaFlow/Configuration/ConsumerConfiguration.cs @@ -1,16 +1,15 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; + namespace KafkaFlow.Configuration { - using System; - using System.Collections.Generic; - using System.Threading.Tasks; - using Confluent.Kafka; - internal class ConsumerConfiguration : IConsumerConfiguration { - private readonly ConsumerConfig consumerConfig; + private readonly Confluent.Kafka.ConsumerConfig _consumerConfig; public ConsumerConfiguration( - ConsumerConfig consumerConfig, + Confluent.Kafka.ConsumerConfig consumerConfig, IReadOnlyList topics, IReadOnlyList manualAssignPartitions, string consumerName, @@ -27,14 +26,14 @@ public ConsumerConfiguration( ConsumerInitialState initialState, TimeSpan autoCommitInterval, IReadOnlyList> statisticsHandlers, - IReadOnlyList>> partitionsAssignedHandlers, - IReadOnlyList>> partitionsRevokedHandlers, + IReadOnlyList>> partitionsAssignedHandlers, + IReadOnlyList>> partitionsRevokedHandlers, IReadOnlyList pendingOffsetsStatisticsHandlers, ConsumerCustomFactory customFactory) { - this.consumerConfig = consumerConfig ?? throw new ArgumentNullException(nameof(consumerConfig)); + _consumerConfig = consumerConfig ?? throw new ArgumentNullException(nameof(consumerConfig)); - if (string.IsNullOrEmpty(this.consumerConfig.GroupId)) + if (string.IsNullOrEmpty(_consumerConfig.GroupId)) { throw new ArgumentNullException(nameof(consumerConfig.GroupId)); } @@ -87,7 +86,7 @@ public ConsumerConfiguration( public TimeSpan WorkersCountEvaluationInterval { get; } - public string GroupId => this.consumerConfig.GroupId; + public string GroupId => _consumerConfig.GroupId; public int BufferSize { get; } @@ -103,17 +102,17 @@ public ConsumerConfiguration( public IReadOnlyList> StatisticsHandlers { get; } - public IReadOnlyList>> PartitionsAssignedHandlers { get; } + public IReadOnlyList>> PartitionsAssignedHandlers { get; } - public IReadOnlyList>> PartitionsRevokedHandlers { get; } + public IReadOnlyList>> PartitionsRevokedHandlers { get; } public IReadOnlyList PendingOffsetsStatisticsHandlers { get; } public ConsumerCustomFactory CustomFactory { get; } - public ConsumerConfig GetKafkaConfig() + public Confluent.Kafka.ConsumerConfig GetKafkaConfig() { - return this.consumerConfig; + return _consumerConfig; } } } diff --git a/src/KafkaFlow/Configuration/ConsumerConfigurationBuilder.cs b/src/KafkaFlow/Configuration/ConsumerConfigurationBuilder.cs index b0544080b..ba952b983 100644 --- a/src/KafkaFlow/Configuration/ConsumerConfigurationBuilder.cs +++ b/src/KafkaFlow/Configuration/ConsumerConfigurationBuilder.cs @@ -1,75 +1,74 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Threading.Tasks; +using KafkaFlow.Consumers.DistributionStrategies; + namespace KafkaFlow.Configuration { - using System; - using System.Collections.Generic; - using System.ComponentModel; - using System.Linq; - using System.Threading.Tasks; - using Confluent.Kafka; - using KafkaFlow.Consumers.DistributionStrategies; - internal sealed class ConsumerConfigurationBuilder : IConsumerConfigurationBuilder { - private readonly List topics = new(); - private readonly List topicsPartitions = new(); - private readonly List> statisticsHandlers = new(); + private readonly List _topics = new(); + private readonly List _topicsPartitions = new(); + private readonly List> _statisticsHandlers = new(); - private readonly List pendingOffsetsStatisticsHandlers = new(); + private readonly List _pendingOffsetsStatisticsHandlers = new(); - private readonly List>> partitionAssignedHandlers = new(); - private readonly List>> partitionRevokedHandlers = new(); - private readonly ConsumerMiddlewareConfigurationBuilder middlewareConfigurationBuilder; + private readonly List>> _partitionAssignedHandlers = new(); + private readonly List>> _partitionRevokedHandlers = new(); + private readonly ConsumerMiddlewareConfigurationBuilder _middlewareConfigurationBuilder; - private ConsumerConfig consumerConfig; + private Confluent.Kafka.ConsumerConfig _consumerConfig; - private string name; - private bool disableManagement; - private string groupId = string.Empty; - private AutoOffsetReset? autoOffsetReset; - private int? maxPollIntervalMs; - private Func> workersCountCalculator; - private TimeSpan workersCountEvaluationInterval = TimeSpan.FromMinutes(5); - private int bufferSize; - private TimeSpan workerStopTimeout = TimeSpan.FromSeconds(30); - private bool autoMessageCompletion = true; - private bool noStoreOffsets; - private ConsumerInitialState initialState = ConsumerInitialState.Running; - private int statisticsInterval; + private string _name; + private bool _disableManagement; + private string _groupId = string.Empty; + private Confluent.Kafka.AutoOffsetReset? _autoOffsetReset; + private int? _maxPollIntervalMs; + private Func> _workersCountCalculator; + private TimeSpan _workersCountEvaluationInterval = TimeSpan.FromMinutes(5); + private int _bufferSize; + private TimeSpan _workerStopTimeout = TimeSpan.FromSeconds(30); + private bool _autoMessageCompletion = true; + private bool _noStoreOffsets; + private ConsumerInitialState _initialState = ConsumerInitialState.Running; + private int _statisticsInterval; - private Factory distributionStrategyFactory = _ => new BytesSumDistributionStrategy(); - private TimeSpan autoCommitInterval = TimeSpan.FromSeconds(5); + private Factory _distributionStrategyFactory = _ => new BytesSumDistributionStrategy(); + private TimeSpan _autoCommitInterval = TimeSpan.FromSeconds(5); - private ConsumerCustomFactory customFactory = (consumer, _) => consumer; + private ConsumerCustomFactory _customFactory = (consumer, _) => consumer; public ConsumerConfigurationBuilder(IDependencyConfigurator dependencyConfigurator) { this.DependencyConfigurator = dependencyConfigurator; - this.middlewareConfigurationBuilder = new ConsumerMiddlewareConfigurationBuilder(dependencyConfigurator); + _middlewareConfigurationBuilder = new ConsumerMiddlewareConfigurationBuilder(dependencyConfigurator); } public IDependencyConfigurator DependencyConfigurator { get; } public IConsumerConfigurationBuilder Topic(string topicName) { - this.topics.Add(topicName); + _topics.Add(topicName); return this; } public IConsumerConfigurationBuilder ManualAssignPartitions(string topicName, IEnumerable partitions) { - this.topicsPartitions.Add(new TopicPartitions(topicName, partitions)); + _topicsPartitions.Add(new TopicPartitions(topicName, partitions)); return this; } - public IConsumerConfigurationBuilder WithConsumerConfig(ConsumerConfig config) + public IConsumerConfigurationBuilder WithConsumerConfig(Confluent.Kafka.ConsumerConfig config) { - this.consumerConfig = config; + _consumerConfig = config; return this; } public IConsumerConfigurationBuilder Topics(IEnumerable topicNames) { - this.topics.AddRange(topicNames); + _topics.AddRange(topicNames); return this; } @@ -77,28 +76,28 @@ public IConsumerConfigurationBuilder Topics(IEnumerable topicNames) public IConsumerConfigurationBuilder WithName(string name) { - this.name = name; + _name = name; return this; } public IConsumerConfigurationBuilder DisableManagement() { - this.disableManagement = true; + _disableManagement = true; return this; } public IConsumerConfigurationBuilder WithGroupId(string groupId) { - this.groupId = groupId; + _groupId = groupId; return this; } public IConsumerConfigurationBuilder WithAutoOffsetReset(KafkaFlow.AutoOffsetReset autoOffsetReset) { - this.autoOffsetReset = autoOffsetReset switch + _autoOffsetReset = autoOffsetReset switch { - KafkaFlow.AutoOffsetReset.Earliest => AutoOffsetReset.Earliest, - KafkaFlow.AutoOffsetReset.Latest => AutoOffsetReset.Latest, + KafkaFlow.AutoOffsetReset.Earliest => Confluent.Kafka.AutoOffsetReset.Earliest, + KafkaFlow.AutoOffsetReset.Latest => Confluent.Kafka.AutoOffsetReset.Latest, _ => throw new InvalidEnumArgumentException( nameof(autoOffsetReset), (int)autoOffsetReset, @@ -110,13 +109,13 @@ public IConsumerConfigurationBuilder WithAutoOffsetReset(KafkaFlow.AutoOffsetRes public IConsumerConfigurationBuilder WithAutoCommitIntervalMs(int autoCommitIntervalMs) { - this.autoCommitInterval = TimeSpan.FromMilliseconds(autoCommitIntervalMs); + _autoCommitInterval = TimeSpan.FromMilliseconds(autoCommitIntervalMs); return this; } public IConsumerConfigurationBuilder WithMaxPollIntervalMs(int maxPollIntervalMs) { - this.maxPollIntervalMs = maxPollIntervalMs; + _maxPollIntervalMs = maxPollIntervalMs; return this; } @@ -124,8 +123,8 @@ public IConsumerConfigurationBuilder WithWorkersCount( Func> calculator, TimeSpan evaluationInterval) { - this.workersCountCalculator = calculator; - this.workersCountEvaluationInterval = evaluationInterval; + _workersCountCalculator = calculator; + _workersCountEvaluationInterval = evaluationInterval; return this; } @@ -141,26 +140,26 @@ public IConsumerConfigurationBuilder WithWorkersCount(int workersCount) public IConsumerConfigurationBuilder WithBufferSize(int size) { - this.bufferSize = size; + _bufferSize = size; return this; } public IConsumerConfigurationBuilder WithWorkerStopTimeout(int seconds) { - this.workerStopTimeout = TimeSpan.FromSeconds(seconds); + _workerStopTimeout = TimeSpan.FromSeconds(seconds); return this; } public IConsumerConfigurationBuilder WithWorkerStopTimeout(TimeSpan timeout) { - this.workerStopTimeout = timeout; + _workerStopTimeout = timeout; return this; } public IConsumerConfigurationBuilder WithWorkerDistributionStrategy(Factory factory) where T : class, IWorkerDistributionStrategy { - this.distributionStrategyFactory = factory; + _distributionStrategyFactory = factory; return this; } @@ -168,88 +167,88 @@ public IConsumerConfigurationBuilder WithWorkerDistributionStrategy() where T : class, IWorkerDistributionStrategy { this.DependencyConfigurator.AddTransient(); - this.distributionStrategyFactory = resolver => resolver.Resolve(); + _distributionStrategyFactory = resolver => resolver.Resolve(); return this; } public IConsumerConfigurationBuilder WithManualMessageCompletion() { - this.autoMessageCompletion = false; + _autoMessageCompletion = false; return this; } public IConsumerConfigurationBuilder WithoutStoringOffsets() { - this.noStoreOffsets = true; + _noStoreOffsets = true; return this; } public IConsumerConfigurationBuilder WithInitialState(ConsumerInitialState state) { - this.initialState = state; + _initialState = state; return this; } public IConsumerConfigurationBuilder AddMiddlewares(Action middlewares) { - middlewares(this.middlewareConfigurationBuilder); + middlewares(_middlewareConfigurationBuilder); return this; } public IConsumerConfigurationBuilder WithPartitionsAssignedHandler( - Action> partitionsAssignedHandler) + Action> partitionsAssignedHandler) { - this.partitionAssignedHandlers.Add(partitionsAssignedHandler); + _partitionAssignedHandlers.Add(partitionsAssignedHandler); return this; } public IConsumerConfigurationBuilder WithPartitionsRevokedHandler( - Action> partitionsRevokedHandler) + Action> partitionsRevokedHandler) { - this.partitionRevokedHandlers.Add(partitionsRevokedHandler); + _partitionRevokedHandlers.Add(partitionsRevokedHandler); return this; } public IConsumerConfigurationBuilder WithStatisticsHandler(Action statisticsHandler) { - this.statisticsHandlers.Add(statisticsHandler); + _statisticsHandlers.Add(statisticsHandler); return this; } public IConsumerConfigurationBuilder WithStatisticsIntervalMs(int statisticsIntervalMs) { - this.statisticsInterval = statisticsIntervalMs; + _statisticsInterval = statisticsIntervalMs; return this; } public IConsumerConfigurationBuilder WithPendingOffsetsStatisticsHandler( - Action> pendingOffsetsHandler, + Action> pendingOffsetsHandler, TimeSpan interval) { - this.pendingOffsetsStatisticsHandlers.Add(new(pendingOffsetsHandler, interval)); + _pendingOffsetsStatisticsHandlers.Add(new(pendingOffsetsHandler, interval)); return this; } public IConsumerConfigurationBuilder WithCustomFactory(ConsumerCustomFactory customFactory) { - this.customFactory = customFactory; + _customFactory = customFactory; return this; } public IConsumerConfiguration Build(ClusterConfiguration clusterConfiguration) { - var middlewareConfiguration = this.middlewareConfigurationBuilder.Build(); + var middlewareConfiguration = _middlewareConfigurationBuilder.Build(); - this.consumerConfig ??= new ConsumerConfig(); + _consumerConfig ??= new Confluent.Kafka.ConsumerConfig(); - var consumerConfigCopy = new ConsumerConfig(this.consumerConfig.ToDictionary(x => x.Key, x => x.Value)); + var consumerConfigCopy = new Confluent.Kafka.ConsumerConfig(_consumerConfig.ToDictionary(x => x.Key, x => x.Value)); - consumerConfigCopy.BootstrapServers = this.consumerConfig.BootstrapServers ?? string.Join(",", clusterConfiguration.Brokers); - consumerConfigCopy.GroupId = this.consumerConfig.GroupId ?? this.groupId; - consumerConfigCopy.AutoOffsetReset = this.consumerConfig.AutoOffsetReset ?? this.autoOffsetReset; - consumerConfigCopy.MaxPollIntervalMs = this.consumerConfig.MaxPollIntervalMs ?? this.maxPollIntervalMs; - consumerConfigCopy.StatisticsIntervalMs = this.consumerConfig.StatisticsIntervalMs ?? this.statisticsInterval; + consumerConfigCopy.BootstrapServers = _consumerConfig.BootstrapServers ?? string.Join(",", clusterConfiguration.Brokers); + consumerConfigCopy.GroupId = _consumerConfig.GroupId ?? _groupId; + consumerConfigCopy.AutoOffsetReset = _consumerConfig.AutoOffsetReset ?? _autoOffsetReset; + consumerConfigCopy.MaxPollIntervalMs = _consumerConfig.MaxPollIntervalMs ?? _maxPollIntervalMs; + consumerConfigCopy.StatisticsIntervalMs = _consumerConfig.StatisticsIntervalMs ?? _statisticsInterval; consumerConfigCopy.EnableAutoOffsetStore = false; consumerConfigCopy.EnableAutoCommit = false; @@ -258,26 +257,26 @@ public IConsumerConfiguration Build(ClusterConfiguration clusterConfiguration) return new ConsumerConfiguration( consumerConfigCopy, - this.topics, - this.topicsPartitions, - this.name, + _topics, + _topicsPartitions, + _name, clusterConfiguration, - this.disableManagement, - this.workersCountCalculator, - this.workersCountEvaluationInterval, - this.bufferSize, - this.workerStopTimeout, - this.distributionStrategyFactory, + _disableManagement, + _workersCountCalculator, + _workersCountEvaluationInterval, + _bufferSize, + _workerStopTimeout, + _distributionStrategyFactory, middlewareConfiguration, - this.autoMessageCompletion, - this.noStoreOffsets, - this.initialState, - this.autoCommitInterval, - this.statisticsHandlers, - this.partitionAssignedHandlers, - this.partitionRevokedHandlers, - this.pendingOffsetsStatisticsHandlers, - this.customFactory); + _autoMessageCompletion, + _noStoreOffsets, + _initialState, + _autoCommitInterval, + _statisticsHandlers, + _partitionAssignedHandlers, + _partitionRevokedHandlers, + _pendingOffsetsStatisticsHandlers, + _customFactory); } } } diff --git a/src/KafkaFlow/Configuration/IConsumerConfiguration.cs b/src/KafkaFlow/Configuration/IConsumerConfiguration.cs index 21f5b6896..980fa5e0e 100644 --- a/src/KafkaFlow/Configuration/IConsumerConfiguration.cs +++ b/src/KafkaFlow/Configuration/IConsumerConfiguration.cs @@ -1,10 +1,9 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; + namespace KafkaFlow.Configuration { - using System; - using System.Collections.Generic; - using System.Threading.Tasks; - using Confluent.Kafka; - /// /// Represents the Consumer configuration values /// @@ -77,7 +76,7 @@ public interface IConsumerConfiguration bool AutoMessageCompletion { get; } /// - /// Gets a value indicating that no offsets will be stored on Kafka + /// Gets a value indicating whether gets a value indicating that no offsets will be stored on Kafka /// bool NoStoreOffsets { get; } @@ -94,12 +93,12 @@ public interface IConsumerConfiguration /// /// Gets the handlers that will be called when the partitions are assigned /// - IReadOnlyList>> PartitionsAssignedHandlers { get; } + IReadOnlyList>> PartitionsAssignedHandlers { get; } /// /// Gets the handlers that will be called when the partitions are revoked /// - IReadOnlyList>> PartitionsRevokedHandlers { get; } + IReadOnlyList>> PartitionsRevokedHandlers { get; } /// /// Gets the handlers that will be called when there are pending offsets @@ -120,6 +119,6 @@ public interface IConsumerConfiguration /// Parses KafkaFlow configuration to Confluent configuration /// /// - ConsumerConfig GetKafkaConfig(); + Confluent.Kafka.ConsumerConfig GetKafkaConfig(); } } diff --git a/src/KafkaFlow/Configuration/IMiddlewareInstanceContainer.cs b/src/KafkaFlow/Configuration/IMiddlewareInstanceContainer.cs index 81152c04e..fcf45c976 100644 --- a/src/KafkaFlow/Configuration/IMiddlewareInstanceContainer.cs +++ b/src/KafkaFlow/Configuration/IMiddlewareInstanceContainer.cs @@ -1,7 +1,7 @@ +using System; + namespace KafkaFlow.Configuration { - using System; - internal interface IMiddlewareInstanceContainer { Guid Id { get; } diff --git a/src/KafkaFlow/Configuration/IProducerConfiguration.cs b/src/KafkaFlow/Configuration/IProducerConfiguration.cs index 6481698f2..536331387 100644 --- a/src/KafkaFlow/Configuration/IProducerConfiguration.cs +++ b/src/KafkaFlow/Configuration/IProducerConfiguration.cs @@ -1,10 +1,9 @@ +using System; +using System.Collections.Generic; +using Confluent.Kafka; + namespace KafkaFlow.Configuration { - using System; - using System.Collections.Generic; - using Confluent.Kafka; - using Acks = KafkaFlow.Acks; - /// /// Represents the producer configuration values /// diff --git a/src/KafkaFlow/Configuration/KafkaConfiguration.cs b/src/KafkaFlow/Configuration/KafkaConfiguration.cs index 3b7adf3d4..6b636d87e 100644 --- a/src/KafkaFlow/Configuration/KafkaConfiguration.cs +++ b/src/KafkaFlow/Configuration/KafkaConfiguration.cs @@ -1,24 +1,24 @@ +using System.Collections.Generic; + namespace KafkaFlow.Configuration { - using System.Collections.Generic; - /// /// Represents the kafka configuration values /// public class KafkaConfiguration { - private readonly List clusters = new(); + private readonly List _clusters = new(); /// /// Gets the cluster configuration list /// - public IReadOnlyCollection Clusters => this.clusters; + public IReadOnlyCollection Clusters => _clusters; /// /// Adds a list of cluster configurations /// /// A list of cluster configurations public void AddClusters(IEnumerable configurations) => - this.clusters.AddRange(configurations); + _clusters.AddRange(configurations); } } diff --git a/src/KafkaFlow/Configuration/KafkaConfigurationBuilder.cs b/src/KafkaFlow/Configuration/KafkaConfigurationBuilder.cs index c194cc501..8953a2935 100644 --- a/src/KafkaFlow/Configuration/KafkaConfigurationBuilder.cs +++ b/src/KafkaFlow/Configuration/KafkaConfigurationBuilder.cs @@ -1,31 +1,31 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using KafkaFlow.Clusters; +using KafkaFlow.Consumers; +using KafkaFlow.Producers; + namespace KafkaFlow.Configuration { - using System; - using System.Collections.Generic; - using System.Linq; - using KafkaFlow.Clusters; - using KafkaFlow.Consumers; - using KafkaFlow.Producers; - internal class KafkaConfigurationBuilder : IKafkaConfigurationBuilder { - private readonly IDependencyConfigurator dependencyConfigurator; - private readonly List clusters = new(); - private readonly IList> globalEventsConfigurators = new List>(); - private Type logHandlerType = typeof(NullLogHandler); + private readonly IDependencyConfigurator _dependencyConfigurator; + private readonly List _clusters = new(); + private readonly IList> _globalEventsConfigurators = new List>(); + private Type _logHandlerType = typeof(NullLogHandler); public KafkaConfigurationBuilder(IDependencyConfigurator dependencyConfigurator) { - this.dependencyConfigurator = dependencyConfigurator; + _dependencyConfigurator = dependencyConfigurator; } public KafkaConfiguration Build() { var configuration = new KafkaConfiguration(); - configuration.AddClusters(this.clusters.Select(x => x.Build(configuration))); + configuration.AddClusters(_clusters.Select(x => x.Build(configuration))); - this.dependencyConfigurator.AddSingleton( + _dependencyConfigurator.AddSingleton( resolver => new ProducerAccessor( configuration.Clusters .SelectMany(x => x.Producers) @@ -36,13 +36,13 @@ public KafkaConfiguration Build() foreach (var cluster in configuration.Clusters) { - this.dependencyConfigurator.AddSingleton( + _dependencyConfigurator.AddSingleton( resolver => new ClusterManager(resolver.Resolve(), cluster)); } - this.dependencyConfigurator - .AddTransient(typeof(ILogHandler), this.logHandlerType) + _dependencyConfigurator + .AddTransient(typeof(ILogHandler), _logHandlerType) .AddSingleton() .AddSingleton(new ConsumerAccessor()) .AddSingleton(new ConsumerManagerFactory()) @@ -57,7 +57,7 @@ public KafkaConfiguration Build() var globalEvents = new GlobalEvents(logHandler); - foreach (var del in this.globalEventsConfigurators) + foreach (var del in _globalEventsConfigurators) { del.Invoke(globalEvents); } @@ -70,11 +70,11 @@ public KafkaConfiguration Build() public IKafkaConfigurationBuilder AddCluster(Action cluster) { - var builder = new ClusterConfigurationBuilder(this.dependencyConfigurator); + var builder = new ClusterConfigurationBuilder(_dependencyConfigurator); cluster(builder); - this.clusters.Add(builder); + _clusters.Add(builder); return this; } @@ -82,13 +82,13 @@ public IKafkaConfigurationBuilder AddCluster(Action() where TLogHandler : ILogHandler { - this.logHandlerType = typeof(TLogHandler); + _logHandlerType = typeof(TLogHandler); return this; } public IKafkaConfigurationBuilder SubscribeGlobalEvents(Action observers) { - this.globalEventsConfigurators.Add(observers); + _globalEventsConfigurators.Add(observers); return this; } diff --git a/src/KafkaFlow/Configuration/KafkaFlowConfigurator.cs b/src/KafkaFlow/Configuration/KafkaFlowConfigurator.cs index 25d503461..099c5d22c 100644 --- a/src/KafkaFlow/Configuration/KafkaFlowConfigurator.cs +++ b/src/KafkaFlow/Configuration/KafkaFlowConfigurator.cs @@ -1,16 +1,16 @@ +using System; +using KafkaFlow.Clusters; +using KafkaFlow.Consumers; +using KafkaFlow.Producers; + namespace KafkaFlow.Configuration { - using System; - using KafkaFlow.Clusters; - using KafkaFlow.Consumers; - using KafkaFlow.Producers; - /// /// A class to configure KafkaFlow /// public class KafkaFlowConfigurator { - private readonly KafkaConfiguration configuration; + private readonly KafkaConfiguration _configuration; /// /// Initializes a new instance of the class. @@ -25,7 +25,7 @@ public KafkaFlowConfigurator( kafka(builder); - this.configuration = builder.Build(); + _configuration = builder.Build(); } /// @@ -39,7 +39,7 @@ public IKafkaBus CreateBus(IDependencyResolver resolver) return new KafkaBus( scope.Resolver, - this.configuration, + _configuration, scope.Resolver.Resolve(), scope.Resolver.Resolve(), scope.Resolver.Resolve(), diff --git a/src/KafkaFlow/Configuration/MiddlewareConfiguration.cs b/src/KafkaFlow/Configuration/MiddlewareConfiguration.cs index 4eea54b91..32aa4832a 100644 --- a/src/KafkaFlow/Configuration/MiddlewareConfiguration.cs +++ b/src/KafkaFlow/Configuration/MiddlewareConfiguration.cs @@ -1,7 +1,7 @@ +using System; + namespace KafkaFlow.Configuration { - using System; - /// /// Represents a middleware configuration /// diff --git a/src/KafkaFlow/Configuration/MiddlewareConfigurationBuilder.cs b/src/KafkaFlow/Configuration/MiddlewareConfigurationBuilder.cs index 95cf0a893..5213b2d4f 100644 --- a/src/KafkaFlow/Configuration/MiddlewareConfigurationBuilder.cs +++ b/src/KafkaFlow/Configuration/MiddlewareConfigurationBuilder.cs @@ -1,13 +1,13 @@ +using System; +using System.Collections.Generic; + namespace KafkaFlow.Configuration { - using System; - using System.Collections.Generic; - internal class MiddlewareConfigurationBuilder : IMiddlewareConfigurationBuilder where TBuilder : class, IMiddlewareConfigurationBuilder { - private readonly List middlewaresConfigurations = new(); + private readonly List _middlewaresConfigurations = new(); protected MiddlewareConfigurationBuilder(IDependencyConfigurator dependencyConfigurator) { @@ -21,7 +21,7 @@ public TBuilder Add( MiddlewareLifetime lifetime = MiddlewareLifetime.ConsumerOrProducer) where T : class, IMessageMiddleware { - return this.AddAt(this.middlewaresConfigurations.Count, factory, lifetime); + return this.AddAt(_middlewaresConfigurations.Count, factory, lifetime); } public TBuilder AddAtBeginning( @@ -35,7 +35,7 @@ public TBuilder AddAtBeginning( public TBuilder Add(MiddlewareLifetime lifetime = MiddlewareLifetime.ConsumerOrProducer) where T : class, IMessageMiddleware { - return this.AddAt(this.middlewaresConfigurations.Count, lifetime); + return this.AddAt(_middlewaresConfigurations.Count, lifetime); } public TBuilder AddAtBeginning(MiddlewareLifetime lifetime = MiddlewareLifetime.ConsumerOrProducer) @@ -44,7 +44,7 @@ public TBuilder AddAtBeginning(MiddlewareLifetime lifetime = MiddlewareLifeti return this.AddAt(0, lifetime); } - public IReadOnlyList Build() => this.middlewaresConfigurations; + public IReadOnlyList Build() => _middlewaresConfigurations; private static InstanceLifetime ParseLifetime(MiddlewareLifetime lifetime) { @@ -72,7 +72,7 @@ private TBuilder AddAt( _ => new MiddlewareInstanceContainer(containerId, factory), ParseLifetime(lifetime)); - this.middlewaresConfigurations.Insert( + _middlewaresConfigurations.Insert( position, new MiddlewareConfiguration(typeof(T), lifetime, containerId)); @@ -86,7 +86,7 @@ private TBuilder AddAt( { this.DependencyConfigurator.Add(ParseLifetime(lifetime)); - this.middlewaresConfigurations.Insert( + _middlewaresConfigurations.Insert( position, new MiddlewareConfiguration(typeof(T), lifetime)); diff --git a/src/KafkaFlow/Configuration/MiddlewareInstanceContainer.cs b/src/KafkaFlow/Configuration/MiddlewareInstanceContainer.cs index a81d2b9e6..5a06c8bce 100644 --- a/src/KafkaFlow/Configuration/MiddlewareInstanceContainer.cs +++ b/src/KafkaFlow/Configuration/MiddlewareInstanceContainer.cs @@ -1,40 +1,40 @@ +using System; + namespace KafkaFlow.Configuration { - using System; - internal class MiddlewareInstanceContainer : IMiddlewareInstanceContainer { - private readonly object sync = new(); - private readonly Factory factory; + private readonly object _sync = new(); + private readonly Factory _factory; - private IMessageMiddleware instance; + private IMessageMiddleware _instance; public MiddlewareInstanceContainer(Guid id, Factory factory) { this.Id = id; - this.factory = factory; + _factory = factory; } public Guid Id { get; } public IMessageMiddleware GetInstance(IDependencyResolver resolver) { - if (this.instance is not null) + if (_instance is not null) { - return this.instance; + return _instance; } - lock (this.sync) + lock (_sync) { - if (this.instance is not null) + if (_instance is not null) { - return this.instance; + return _instance; } - this.instance = this.factory(resolver); + _instance = _factory(resolver); } - return this.instance; + return _instance; } } } diff --git a/src/KafkaFlow/Configuration/PendingOffsetsStatisticsHandler.cs b/src/KafkaFlow/Configuration/PendingOffsetsStatisticsHandler.cs index 6df2d96dc..04d0ddf11 100644 --- a/src/KafkaFlow/Configuration/PendingOffsetsStatisticsHandler.cs +++ b/src/KafkaFlow/Configuration/PendingOffsetsStatisticsHandler.cs @@ -1,9 +1,8 @@ +using System; +using System.Collections.Generic; + namespace KafkaFlow.Configuration { - using System; - using System.Collections.Generic; - using Confluent.Kafka; - /// /// Represents a handler for pending offsets statistics. /// @@ -14,7 +13,7 @@ public class PendingOffsetsStatisticsHandler /// /// The action to handle pending offsets statistics. /// The interval at which the handler should be executed. - public PendingOffsetsStatisticsHandler(Action> handler, TimeSpan interval) + public PendingOffsetsStatisticsHandler(Action> handler, TimeSpan interval) { this.Handler = handler; this.Interval = interval; @@ -23,7 +22,7 @@ public PendingOffsetsStatisticsHandler(Action /// Gets the action that handles pending offsets statistics. /// - public Action> Handler { get; } + public Action> Handler { get; } /// /// Gets the interval at which the handler should be executed. diff --git a/src/KafkaFlow/Configuration/ProducerConfiguration.cs b/src/KafkaFlow/Configuration/ProducerConfiguration.cs index 124931791..98c27943f 100644 --- a/src/KafkaFlow/Configuration/ProducerConfiguration.cs +++ b/src/KafkaFlow/Configuration/ProducerConfiguration.cs @@ -1,10 +1,9 @@ +using System; +using System.Collections.Generic; +using Confluent.Kafka; + namespace KafkaFlow.Configuration { - using System; - using System.Collections.Generic; - using Confluent.Kafka; - using Acks = KafkaFlow.Acks; - internal class ProducerConfiguration : IProducerConfiguration { public ProducerConfiguration( diff --git a/src/KafkaFlow/Configuration/ProducerConfigurationBuilder.cs b/src/KafkaFlow/Configuration/ProducerConfigurationBuilder.cs index a02a4c827..930750809 100644 --- a/src/KafkaFlow/Configuration/ProducerConfigurationBuilder.cs +++ b/src/KafkaFlow/Configuration/ProducerConfigurationBuilder.cs @@ -1,106 +1,105 @@ +using System; +using System.Collections.Generic; +using Confluent.Kafka; + namespace KafkaFlow.Configuration { - using System; - using System.Collections.Generic; - using Confluent.Kafka; - using Acks = KafkaFlow.Acks; - internal class ProducerConfigurationBuilder : IProducerConfigurationBuilder { - private readonly string name; - private readonly ProducerMiddlewareConfigurationBuilder middlewareConfigurationBuilder; - private readonly List> statisticsHandlers = new(); + private readonly string _name; + private readonly ProducerMiddlewareConfigurationBuilder _middlewareConfigurationBuilder; + private readonly List> _statisticsHandlers = new(); - private string topic; - private ProducerConfig producerConfig; - private Acks? acks; - private int statisticsInterval; - private double? lingerMs; - private ProducerCustomFactory customFactory = (producer, _) => producer; + private string _topic; + private ProducerConfig _producerConfig; + private Acks? _acks; + private int _statisticsInterval; + private double? _lingerMs; + private ProducerCustomFactory _customFactory = (producer, _) => producer; public ProducerConfigurationBuilder(IDependencyConfigurator dependencyConfigurator, string name) { - this.name = name; + _name = name; this.DependencyConfigurator = dependencyConfigurator; - this.middlewareConfigurationBuilder = new ProducerMiddlewareConfigurationBuilder(dependencyConfigurator); + _middlewareConfigurationBuilder = new ProducerMiddlewareConfigurationBuilder(dependencyConfigurator); } public IDependencyConfigurator DependencyConfigurator { get; } public IProducerConfigurationBuilder AddMiddlewares(Action middlewares) { - middlewares(this.middlewareConfigurationBuilder); + middlewares(_middlewareConfigurationBuilder); return this; } public IProducerConfigurationBuilder DefaultTopic(string topic) { - this.topic = topic; + _topic = topic; return this; } public IProducerConfigurationBuilder WithProducerConfig(ProducerConfig config) { - this.producerConfig = config; + _producerConfig = config; return this; } public IProducerConfigurationBuilder WithCompression(CompressionType compressionType, int? compressionLevel) { - this.producerConfig ??= new ProducerConfig(); - this.producerConfig.CompressionType = compressionType; - this.producerConfig.CompressionLevel = compressionLevel; + _producerConfig ??= new ProducerConfig(); + _producerConfig.CompressionType = compressionType; + _producerConfig.CompressionLevel = compressionLevel; return this; } public IProducerConfigurationBuilder WithAcks(Acks acks) { - this.acks = acks; + _acks = acks; return this; } public IProducerConfigurationBuilder WithLingerMs(double lingerMs) { - this.lingerMs = lingerMs; + _lingerMs = lingerMs; return this; } public IProducerConfigurationBuilder WithStatisticsHandler(Action statisticsHandler) { - this.statisticsHandlers.Add(statisticsHandler); + _statisticsHandlers.Add(statisticsHandler); return this; } public IProducerConfigurationBuilder WithStatisticsIntervalMs(int statisticsIntervalMs) { - this.statisticsInterval = statisticsIntervalMs; + _statisticsInterval = statisticsIntervalMs; return this; } public IProducerConfigurationBuilder WithCustomFactory(ProducerCustomFactory customFactory) { - this.customFactory = customFactory; + _customFactory = customFactory; return this; } public IProducerConfiguration Build(ClusterConfiguration clusterConfiguration) { - this.producerConfig ??= new ProducerConfig(); + _producerConfig ??= new ProducerConfig(); - this.producerConfig.StatisticsIntervalMs = this.statisticsInterval; - this.producerConfig.LingerMs = this.lingerMs; + _producerConfig.StatisticsIntervalMs = _statisticsInterval; + _producerConfig.LingerMs = _lingerMs; - this.producerConfig.ReadSecurityInformationFrom(clusterConfiguration); + _producerConfig.ReadSecurityInformationFrom(clusterConfiguration); var configuration = new ProducerConfiguration( clusterConfiguration, - this.name, - this.topic, - this.acks, - this.middlewareConfigurationBuilder.Build(), - this.producerConfig, - this.statisticsHandlers, - this.customFactory); + _name, + _topic, + _acks, + _middlewareConfigurationBuilder.Build(), + _producerConfig, + _statisticsHandlers, + _customFactory); return configuration; } diff --git a/src/KafkaFlow/ConsumerManagerFactory.cs b/src/KafkaFlow/ConsumerManagerFactory.cs index 33bce7adb..c8856b833 100644 --- a/src/KafkaFlow/ConsumerManagerFactory.cs +++ b/src/KafkaFlow/ConsumerManagerFactory.cs @@ -1,8 +1,8 @@ +using KafkaFlow.Configuration; +using KafkaFlow.Consumers; + namespace KafkaFlow { - using KafkaFlow.Configuration; - using KafkaFlow.Consumers; - internal class ConsumerManagerFactory : IConsumerManagerFactory { public IConsumerManager Create(IConsumerConfiguration configuration, IDependencyResolver consumerDependencyResolver) diff --git a/src/KafkaFlow/Consumers/Consumer.cs b/src/KafkaFlow/Consumers/Consumer.cs index e19583156..5682cecf1 100644 --- a/src/KafkaFlow/Consumers/Consumer.cs +++ b/src/KafkaFlow/Consumers/Consumer.cs @@ -1,43 +1,42 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using KafkaFlow.Configuration; + namespace KafkaFlow.Consumers { - using System; - using System.Collections.Concurrent; - using System.Collections.Generic; - using System.Linq; - using System.Threading; - using System.Threading.Tasks; - using Confluent.Kafka; - using KafkaFlow.Configuration; - internal class Consumer : IConsumer { - private readonly IDependencyResolver dependencyResolver; - private readonly ILogHandler logHandler; + private readonly IDependencyResolver _dependencyResolver; + private readonly ILogHandler _logHandler; - private readonly List, List>> - partitionsAssignedHandlers = new(); + private readonly List, List>> + _partitionsAssignedHandlers = new(); - private readonly List, List>> - partitionsRevokedHandlers = new(); + private readonly List, List>> + _partitionsRevokedHandlers = new(); - private readonly List, Error>> errorsHandlers = new(); - private readonly List, string>> statisticsHandlers = new(); - private readonly ConcurrentDictionary currentPartitionsOffsets = new(); - private readonly ConsumerFlowManager flowManager; + private readonly List, Confluent.Kafka.Error>> _errorsHandlers = new(); + private readonly List, string>> _statisticsHandlers = new(); + private readonly ConcurrentDictionary _currentPartitionsOffsets = new(); + private readonly ConsumerFlowManager _flowManager; - private IConsumer consumer; + private Confluent.Kafka.IConsumer _consumer; public Consumer( IConsumerConfiguration configuration, IDependencyResolver dependencyResolver, ILogHandler logHandler) { - this.dependencyResolver = dependencyResolver; - this.logHandler = logHandler; + _dependencyResolver = dependencyResolver; + _logHandler = logHandler; this.Configuration = configuration; - this.flowManager = new ConsumerFlowManager( + _flowManager = new ConsumerFlowManager( this, - this.logHandler); + _logHandler); foreach (var handler in this.Configuration.StatisticsHandlers) { @@ -54,7 +53,7 @@ public Consumer( this.OnPartitionsRevoked((resolver, _, topicPartitions) => handler(resolver, topicPartitions)); } - var middlewareContext = this.dependencyResolver.Resolve(); + var middlewareContext = _dependencyResolver.Resolve(); middlewareContext.Worker = null; middlewareContext.Consumer = this; @@ -66,13 +65,13 @@ public Consumer( public IReadOnlyList Subscription { get; private set; } = new List(); - public IReadOnlyList Assignment { get; private set; } = new List(); + public IReadOnlyList Assignment { get; private set; } = new List(); - public IConsumerFlowManager FlowManager => this.flowManager; + public IConsumerFlowManager FlowManager => _flowManager; - public string MemberId => this.consumer?.MemberId; + public string MemberId => _consumer?.MemberId; - public string ClientInstanceName => this.consumer?.Name; + public string ClientInstanceName => _consumer?.Name; public ConsumerStatus Status { @@ -94,45 +93,45 @@ public ConsumerStatus Status } } - public void OnPartitionsAssigned(Action, List> handler) => - this.partitionsAssignedHandlers.Add(handler); + public void OnPartitionsAssigned(Action, List> handler) => + _partitionsAssignedHandlers.Add(handler); - public void OnPartitionsRevoked(Action, List> handler) => - this.partitionsRevokedHandlers.Add(handler); + public void OnPartitionsRevoked(Action, List> handler) => + _partitionsRevokedHandlers.Add(handler); - public void OnError(Action, Error> handler) => - this.errorsHandlers.Add(handler); + public void OnError(Action, Confluent.Kafka.Error> handler) => + _errorsHandlers.Add(handler); - public void OnStatistics(Action, string> handler) => - this.statisticsHandlers.Add(handler); + public void OnStatistics(Action, string> handler) => + _statisticsHandlers.Add(handler); - public Offset GetPosition(TopicPartition topicPartition) => - this.consumer.Position(topicPartition); + public Confluent.Kafka.Offset GetPosition(Confluent.Kafka.TopicPartition topicPartition) => + _consumer.Position(topicPartition); - public WatermarkOffsets GetWatermarkOffsets(TopicPartition topicPartition) => - this.consumer.GetWatermarkOffsets(topicPartition); + public Confluent.Kafka.WatermarkOffsets GetWatermarkOffsets(Confluent.Kafka.TopicPartition topicPartition) => + _consumer.GetWatermarkOffsets(topicPartition); - public WatermarkOffsets QueryWatermarkOffsets(TopicPartition topicPartition, TimeSpan timeout) => - this.consumer.QueryWatermarkOffsets(topicPartition, timeout); + public Confluent.Kafka.WatermarkOffsets QueryWatermarkOffsets(Confluent.Kafka.TopicPartition topicPartition, TimeSpan timeout) => + _consumer.QueryWatermarkOffsets(topicPartition, timeout); - public List OffsetsForTimes( - IEnumerable topicPartitions, + public List OffsetsForTimes( + IEnumerable topicPartitions, TimeSpan timeout) => - this.consumer.OffsetsForTimes(topicPartitions, timeout); + _consumer.OffsetsForTimes(topicPartitions, timeout); public IEnumerable GetTopicPartitionsLag() { return this.Assignment.Select( tp => { - var offset = Math.Max(0, this.currentPartitionsOffsets.GetOrAdd(tp, _ => this.GetPosition(tp))); + var offset = Math.Max(0, _currentPartitionsOffsets.GetOrAdd(tp, _ => this.GetPosition(tp))); var offsetEnd = Math.Max(0, this.GetWatermarkOffsets(tp).High.Value); return new TopicPartitionLag(tp.Topic, tp.Partition.Value, offset == 0 ? 0 : offsetEnd - offset); }); } - public void Commit(IReadOnlyCollection offsets) + public void Commit(IReadOnlyCollection offsets) { var validOffsets = offsets .Where(x => x.Offset.Value >= 0) @@ -143,31 +142,31 @@ public void Commit(IReadOnlyCollection offsets) return; } - this.consumer.Commit(validOffsets); + _consumer.Commit(validOffsets); foreach (var offset in validOffsets) { - this.currentPartitionsOffsets[offset.TopicPartition] = offset.Offset.Value; + _currentPartitionsOffsets[offset.TopicPartition] = offset.Offset.Value; } } - public async ValueTask> ConsumeAsync(CancellationToken cancellationToken) + public async ValueTask> ConsumeAsync(CancellationToken cancellationToken) { while (true) { try { this.EnsureConsumer(); - await this.flowManager.BlockHeartbeat(cancellationToken); - return this.consumer.Consume(cancellationToken); + await _flowManager.BlockHeartbeat(cancellationToken); + return _consumer.Consume(cancellationToken); } catch (OperationCanceledException) { throw; } - catch (KafkaException ex) when (ex.Error.IsFatal) + catch (Confluent.Kafka.KafkaException ex) when (ex.Error.IsFatal) { - this.logHandler.Error( + _logHandler.Error( "Kafka Consumer fatal error occurred. Recreating consumer in 5 seconds", ex, null); @@ -178,11 +177,11 @@ public async ValueTask> ConsumeAsync(CancellationT } catch (Exception ex) { - this.logHandler.Error("Kafka Consumer Error", ex, null); + _logHandler.Error("Kafka Consumer Error", ex, null); } finally { - this.flowManager.ReleaseHeartbeat(); + _flowManager.ReleaseHeartbeat(); } } } @@ -205,47 +204,47 @@ private void RegisterLogErrorHandler() if (error.IsFatal) { - this.logHandler.Error("Kafka Consumer Internal Error", null, errorData); + _logHandler.Error("Kafka Consumer Internal Error", null, errorData); } else { - this.logHandler.Warning("Kafka Consumer Internal Warning", errorData); + _logHandler.Warning("Kafka Consumer Internal Warning", errorData); } }); } private void EnsureConsumer() { - if (this.consumer != null) + if (_consumer != null) { return; } var kafkaConfig = this.Configuration.GetKafkaConfig(); - var consumerBuilder = new ConsumerBuilder(kafkaConfig); + var consumerBuilder = new Confluent.Kafka.ConsumerBuilder(kafkaConfig); - this.consumer = + _consumer = consumerBuilder .SetPartitionsAssignedHandler( (consumer, partitions) => this.FirePartitionsAssignedHandlers(consumer, partitions)) .SetPartitionsRevokedHandler( (consumer, partitions) => { - this.Assignment = new List(); + this.Assignment = new List(); this.Subscription = new List(); - this.currentPartitionsOffsets.Clear(); - this.flowManager.Stop(); + _currentPartitionsOffsets.Clear(); + _flowManager.Stop(); - this.partitionsRevokedHandlers.ForEach(handler => handler(this.dependencyResolver, consumer, partitions)); + _partitionsRevokedHandlers.ForEach(handler => handler(_dependencyResolver, consumer, partitions)); }) - .SetErrorHandler((consumer, error) => this.errorsHandlers.ForEach(x => x(consumer, error))) - .SetStatisticsHandler((consumer, statistics) => this.statisticsHandlers.ForEach(x => x(consumer, statistics))) + .SetErrorHandler((consumer, error) => _errorsHandlers.ForEach(x => x(consumer, error))) + .SetStatisticsHandler((consumer, statistics) => _statisticsHandlers.ForEach(x => x(consumer, statistics))) .Build(); if (this.Configuration.Topics.Any()) { - this.consumer.Subscribe(this.Configuration.Topics); + _consumer.Subscribe(this.Configuration.Topics); } if (this.Configuration.ManualAssignPartitions.Any()) @@ -257,26 +256,26 @@ private void EnsureConsumer() private void ManualAssign(IEnumerable topics) { var partitions = topics - .SelectMany(topic => topic.Partitions.Select(partition => new TopicPartition(topic.Name, new Partition(partition)))) + .SelectMany(topic => topic.Partitions.Select(partition => new Confluent.Kafka.TopicPartition(topic.Name, new Confluent.Kafka.Partition(partition)))) .ToList(); - this.consumer.Assign(partitions); - this.FirePartitionsAssignedHandlers(this.consumer, partitions); + _consumer.Assign(partitions); + this.FirePartitionsAssignedHandlers(_consumer, partitions); } - private void FirePartitionsAssignedHandlers(IConsumer consumer, List partitions) + private void FirePartitionsAssignedHandlers(Confluent.Kafka.IConsumer consumer, List partitions) { this.Assignment = partitions; this.Subscription = consumer.Subscription; - this.flowManager.Start(consumer); + _flowManager.Start(consumer); - this.partitionsAssignedHandlers.ForEach(handler => handler(this.dependencyResolver, consumer, partitions)); + _partitionsAssignedHandlers.ForEach(handler => handler(_dependencyResolver, consumer, partitions)); } private void InvalidateConsumer() { - this.consumer?.Close(); - this.consumer = null; + _consumer?.Close(); + _consumer = null; } } } diff --git a/src/KafkaFlow/Consumers/ConsumerAccessor.cs b/src/KafkaFlow/Consumers/ConsumerAccessor.cs index 727d3ed9f..c627c4e92 100644 --- a/src/KafkaFlow/Consumers/ConsumerAccessor.cs +++ b/src/KafkaFlow/Consumers/ConsumerAccessor.cs @@ -1,18 +1,18 @@ +using System.Collections.Generic; + namespace KafkaFlow.Consumers { - using System.Collections.Generic; - internal class ConsumerAccessor : IConsumerAccessor { - private readonly IDictionary consumers = new Dictionary(); + private readonly IDictionary _consumers = new Dictionary(); - public IEnumerable All => this.consumers.Values; + public IEnumerable All => _consumers.Values; public IMessageConsumer this[string name] => this.GetConsumer(name); public IMessageConsumer GetConsumer(string name) => - this.consumers.TryGetValue(name, out var consumer) ? consumer : null; + _consumers.TryGetValue(name, out var consumer) ? consumer : null; - void IConsumerAccessor.Add(IMessageConsumer consumer) => this.consumers.Add(consumer.ConsumerName, consumer); + void IConsumerAccessor.Add(IMessageConsumer consumer) => _consumers.Add(consumer.ConsumerName, consumer); } } diff --git a/src/KafkaFlow/Consumers/ConsumerContext.cs b/src/KafkaFlow/Consumers/ConsumerContext.cs index 794dd7031..77fccbd2d 100644 --- a/src/KafkaFlow/Consumers/ConsumerContext.cs +++ b/src/KafkaFlow/Consumers/ConsumerContext.cs @@ -1,18 +1,17 @@ +using System; +using System.Threading; +using System.Threading.Tasks; +using Confluent.Kafka; + namespace KafkaFlow.Consumers { - using System; - using System.Threading; - using System.Threading.Tasks; - using Confluent.Kafka; - using TopicPartitionOffset = KafkaFlow.TopicPartitionOffset; - internal class ConsumerContext : IConsumerContext { - private readonly TaskCompletionSource completionSource = new(); - private readonly IConsumer consumer; - private readonly IOffsetManager offsetManager; - private readonly IConsumerWorker worker; - private readonly IDependencyResolverScope messageDependencyScope; + private readonly TaskCompletionSource _completionSource = new(); + private readonly IConsumer _consumer; + private readonly IOffsetManager _offsetManager; + private readonly IConsumerWorker _worker; + private readonly IDependencyResolverScope _messageDependencyScope; public ConsumerContext( IConsumer consumer, @@ -23,11 +22,11 @@ public ConsumerContext( IDependencyResolver consumerDependencyResolver) { this.ConsumerDependencyResolver = consumerDependencyResolver; - this.consumer = consumer; - this.offsetManager = offsetManager; - this.worker = worker; - this.messageDependencyScope = messageDependencyScope; - this.AutoMessageCompletion = this.consumer.Configuration.AutoMessageCompletion; + _consumer = consumer; + _offsetManager = offsetManager; + _worker = worker; + _messageDependencyScope = messageDependencyScope; + this.AutoMessageCompletion = _consumer.Configuration.AutoMessageCompletion; this.TopicPartitionOffset = new TopicPartitionOffset( kafkaResult.Topic, kafkaResult.Partition.Value, @@ -35,13 +34,13 @@ public ConsumerContext( this.MessageTimestamp = kafkaResult.Message.Timestamp.UtcDateTime; } - public string ConsumerName => this.consumer.Configuration.ConsumerName; + public string ConsumerName => _consumer.Configuration.ConsumerName; - public CancellationToken WorkerStopped => this.worker.StopCancellationToken; + public CancellationToken WorkerStopped => _worker.StopCancellationToken; - public int WorkerId => this.worker.Id; + public int WorkerId => _worker.Id; - public IDependencyResolver WorkerDependencyResolver => this.worker.WorkerDependencyResolver; + public IDependencyResolver WorkerDependencyResolver => _worker.WorkerDependencyResolver; public IDependencyResolver ConsumerDependencyResolver { get; } @@ -53,7 +52,7 @@ public ConsumerContext( public TopicPartitionOffset TopicPartitionOffset { get; } - public string GroupId => this.consumer.Configuration.GroupId; + public string GroupId => _consumer.Configuration.GroupId; public bool AutoMessageCompletion { get; set; } @@ -61,28 +60,28 @@ public ConsumerContext( public DateTime MessageTimestamp { get; } - public Task Completion => this.completionSource.Task; + public Task Completion => _completionSource.Task; public void Complete() { if (this.ShouldStoreOffset) { - this.offsetManager.MarkAsProcessed(this); + _offsetManager.MarkAsProcessed(this); } - this.messageDependencyScope.Dispose(); - this.completionSource.TrySetResult(this.TopicPartitionOffset); + _messageDependencyScope.Dispose(); + _completionSource.TrySetResult(this.TopicPartitionOffset); } public IOffsetsWatermark GetOffsetsWatermark() => new OffsetsWatermark( - this.consumer.GetWatermarkOffsets( + _consumer.GetWatermarkOffsets( new TopicPartition( this.TopicPartitionOffset.Topic, this.TopicPartitionOffset.Partition))); - public void Pause() => this.consumer.FlowManager.Pause(this.consumer.Assignment); + public void Pause() => _consumer.FlowManager.Pause(_consumer.Assignment); - public void Resume() => this.consumer.FlowManager.Resume(this.consumer.Assignment); + public void Resume() => _consumer.FlowManager.Resume(_consumer.Assignment); } } diff --git a/src/KafkaFlow/Consumers/ConsumerFlowManager.cs b/src/KafkaFlow/Consumers/ConsumerFlowManager.cs index 9e6c3cc50..9acba77a2 100644 --- a/src/KafkaFlow/Consumers/ConsumerFlowManager.cs +++ b/src/KafkaFlow/Consumers/ConsumerFlowManager.cs @@ -1,48 +1,48 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Confluent.Kafka; + namespace KafkaFlow.Consumers { - using System; - using System.Collections.Generic; - using System.Linq; - using System.Threading; - using System.Threading.Tasks; - using Confluent.Kafka; - internal class ConsumerFlowManager : IConsumerFlowManager { - private readonly IConsumer consumer; - private readonly ILogHandler logHandler; - private readonly List pausedPartitions = new(); - private readonly SemaphoreSlim consumerSemaphore = new(1, 1); + private readonly IConsumer _consumer; + private readonly ILogHandler _logHandler; + private readonly List _pausedPartitions = new(); + private readonly SemaphoreSlim _consumerSemaphore = new(1, 1); - private IConsumer clientConsumer; - private CancellationTokenSource heartbeatTokenSource; - private Task heartbeatTask; + private IConsumer _clientConsumer; + private CancellationTokenSource _heartbeatTokenSource; + private Task _heartbeatTask; public ConsumerFlowManager( IConsumer consumer, ILogHandler logHandler) { - this.consumer = consumer; - this.logHandler = logHandler; + _consumer = consumer; + _logHandler = logHandler; } - public IReadOnlyList PausedPartitions => this.pausedPartitions.AsReadOnly(); + public IReadOnlyList PausedPartitions => _pausedPartitions.AsReadOnly(); public void Pause(IReadOnlyCollection topicPartitions) { - lock (this.pausedPartitions) + lock (_pausedPartitions) { - topicPartitions = topicPartitions.Except(this.pausedPartitions).ToList(); + topicPartitions = topicPartitions.Except(_pausedPartitions).ToList(); if (!topicPartitions.Any()) { return; } - this.clientConsumer.Pause(topicPartitions); - this.pausedPartitions.AddRange(topicPartitions); + _clientConsumer.Pause(topicPartitions); + _pausedPartitions.AddRange(topicPartitions); - if (this.consumer.Status != ConsumerStatus.Paused) + if (_consumer.Status != ConsumerStatus.Paused) { return; } @@ -53,20 +53,20 @@ public void Pause(IReadOnlyCollection topicPartitions) public Task BlockHeartbeat(CancellationToken cancellationToken) { - return this.consumerSemaphore.WaitAsync(cancellationToken); + return _consumerSemaphore.WaitAsync(cancellationToken); } public void ReleaseHeartbeat() { - if (this.consumerSemaphore.CurrentCount != 1) + if (_consumerSemaphore.CurrentCount != 1) { - this.consumerSemaphore.Release(); + _consumerSemaphore.Release(); } } public void Resume(IReadOnlyCollection topicPartitions) { - lock (this.pausedPartitions) + lock (_pausedPartitions) { if (!topicPartitions.Any()) { @@ -75,51 +75,51 @@ public void Resume(IReadOnlyCollection topicPartitions) foreach (var topicPartition in topicPartitions) { - this.pausedPartitions.Remove(topicPartition); + _pausedPartitions.Remove(topicPartition); } - if (this.consumer.Status == ConsumerStatus.Paused) + if (_consumer.Status == ConsumerStatus.Paused) { return; } this.StopHeartbeat(); - this.clientConsumer.Resume(topicPartitions); + _clientConsumer.Resume(topicPartitions); } } public void Start(IConsumer clientConsumer) { - this.clientConsumer = clientConsumer; + _clientConsumer = clientConsumer; } public void Stop() { - this.pausedPartitions.Clear(); + _pausedPartitions.Clear(); this.StopHeartbeat(); } private void StartHeartbeat() { - this.heartbeatTokenSource = new CancellationTokenSource(); + _heartbeatTokenSource = new CancellationTokenSource(); - this.heartbeatTask = Task.Run( + _heartbeatTask = Task.Run( () => { - if (this.consumerSemaphore.Wait(0)) + if (_consumerSemaphore.Wait(0)) { try { const int consumeTimeoutCall = 1000; - while (!this.heartbeatTokenSource.IsCancellationRequested) + while (!_heartbeatTokenSource.IsCancellationRequested) { - var result = this.clientConsumer.Consume(consumeTimeoutCall); + var result = _clientConsumer.Consume(consumeTimeoutCall); if (result != null) { - this.logHandler.Warning( + _logHandler.Warning( "Paused consumer heartbeat process wrongly read a message, please report this issue", null); } @@ -127,7 +127,7 @@ private void StartHeartbeat() } catch (Exception ex) { - this.logHandler.Error( + _logHandler.Error( "Error executing paused consumer background heartbeat", ex, null); @@ -142,9 +142,9 @@ private void StartHeartbeat() private void StopHeartbeat() { - this.heartbeatTokenSource?.Cancel(); - this.heartbeatTask?.GetAwaiter().GetResult(); - this.heartbeatTask?.Dispose(); + _heartbeatTokenSource?.Cancel(); + _heartbeatTask?.GetAwaiter().GetResult(); + _heartbeatTask?.Dispose(); } } } diff --git a/src/KafkaFlow/Consumers/ConsumerManager.cs b/src/KafkaFlow/Consumers/ConsumerManager.cs index a8c34bea3..26d28b59f 100644 --- a/src/KafkaFlow/Consumers/ConsumerManager.cs +++ b/src/KafkaFlow/Consumers/ConsumerManager.cs @@ -1,19 +1,18 @@ -namespace KafkaFlow.Consumers +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using KafkaFlow.Configuration; + +namespace KafkaFlow.Consumers { - using System; - using System.Collections.Generic; - using System.Linq; - using System.Threading; - using System.Threading.Tasks; - using Confluent.Kafka; - using KafkaFlow.Configuration; - internal class ConsumerManager : IConsumerManager { - private readonly IDependencyResolver dependencyResolver; - private readonly ILogHandler logHandler; + private readonly IDependencyResolver _dependencyResolver; + private readonly ILogHandler _logHandler; - private Timer evaluateWorkersCountTimer; + private Timer _evaluateWorkersCountTimer; public ConsumerManager( IConsumer consumer, @@ -22,8 +21,8 @@ public ConsumerManager( IDependencyResolver dependencyResolver, ILogHandler logHandler) { - this.dependencyResolver = dependencyResolver; - this.logHandler = logHandler; + _dependencyResolver = dependencyResolver; + _logHandler = logHandler; this.Consumer = consumer; this.WorkerPool = consumerWorkerPool; this.Feeder = feeder; @@ -42,7 +41,7 @@ public Task StartAsync() { this.Feeder.Start(); - this.evaluateWorkersCountTimer = new Timer( + _evaluateWorkersCountTimer = new Timer( state => _ = this.EvaluateWorkersCountAsync(), null, this.Consumer.Configuration.WorkersCountEvaluationInterval, @@ -58,14 +57,14 @@ public async Task StopAsync() await this.Feeder.StopAsync().ConfigureAwait(false); await this.WorkerPool.StopAsync().ConfigureAwait(false); - this.evaluateWorkersCountTimer?.Dispose(); - this.evaluateWorkersCountTimer = null; + _evaluateWorkersCountTimer?.Dispose(); + _evaluateWorkersCountTimer = null; this.Consumer.Dispose(); } - private void StopEvaluateWorkerCountTimer() => this.evaluateWorkersCountTimer?.Change(Timeout.Infinite, Timeout.Infinite); + private void StopEvaluateWorkerCountTimer() => _evaluateWorkersCountTimer?.Change(Timeout.Infinite, Timeout.Infinite); - private void StartEvaluateWorkerCountTimer() => this.evaluateWorkersCountTimer?.Change( + private void StartEvaluateWorkerCountTimer() => _evaluateWorkersCountTimer?.Change( this.Consumer.Configuration.WorkersCountEvaluationInterval, this.Consumer.Configuration.WorkersCountEvaluationInterval); @@ -97,22 +96,22 @@ private async Task ChangeWorkersCountAsync(int workersCount) } catch (Exception e) { - this.logHandler.Error("Error changing workers count", e, null); + _logHandler.Error("Error changing workers count", e, null); } } - private void OnPartitionRevoked(IEnumerable topicPartitions) + private void OnPartitionRevoked(IEnumerable topicPartitions) { - this.logHandler.Warning( + _logHandler.Warning( "Partitions revoked", this.GetConsumerLogInfo(topicPartitions.Select(x => x.TopicPartition))); this.WorkerPool.StopAsync().GetAwaiter().GetResult(); } - private void OnPartitionAssigned(IReadOnlyCollection partitions) + private void OnPartitionAssigned(IReadOnlyCollection partitions) { - this.logHandler.Info( + _logHandler.Info( "Partitions assigned", this.GetConsumerLogInfo(partitions)); @@ -124,7 +123,7 @@ private void OnPartitionAssigned(IReadOnlyCollection partitions) .GetResult(); } - private object GetConsumerLogInfo(IEnumerable partitions) => new + private object GetConsumerLogInfo(IEnumerable partitions) => new { this.Consumer.Configuration.GroupId, this.Consumer.Configuration.ConsumerName, @@ -139,7 +138,7 @@ private void OnPartitionAssigned(IReadOnlyCollection partitions) }), }; - private async Task CalculateWorkersCount(IEnumerable partitions) + private async Task CalculateWorkersCount(IEnumerable partitions) { try { @@ -156,11 +155,11 @@ private async Task CalculateWorkersCount(IEnumerable partit .Select(x => x.Partition.Value) .ToList())) .ToList()), - this.dependencyResolver); + _dependencyResolver); } catch (Exception e) { - this.logHandler.Error("Error calculating new workers count, using one worker as fallback", e, null); + _logHandler.Error("Error calculating new workers count, using one worker as fallback", e, null); return 1; } diff --git a/src/KafkaFlow/Consumers/ConsumerWorker.cs b/src/KafkaFlow/Consumers/ConsumerWorker.cs index a40f2143a..a4c1fcbc9 100644 --- a/src/KafkaFlow/Consumers/ConsumerWorker.cs +++ b/src/KafkaFlow/Consumers/ConsumerWorker.cs @@ -1,26 +1,26 @@ +using System; +using System.Threading; +using System.Threading.Channels; +using System.Threading.Tasks; + namespace KafkaFlow.Consumers { - using System; - using System.Threading; - using System.Threading.Channels; - using System.Threading.Tasks; - internal class ConsumerWorker : IConsumerWorker { - private readonly IConsumer consumer; - private readonly IDependencyResolverScope workerDependencyResolverScope; - private readonly IMiddlewareExecutor middlewareExecutor; - private readonly ILogHandler logHandler; - private readonly GlobalEvents globalEvents; + private readonly IConsumer _consumer; + private readonly IDependencyResolverScope _workerDependencyResolverScope; + private readonly IMiddlewareExecutor _middlewareExecutor; + private readonly ILogHandler _logHandler; + private readonly GlobalEvents _globalEvents; - private readonly Channel messagesBuffer; + private readonly Channel _messagesBuffer; - private readonly Event workerStoppingEvent; - private readonly Event workerStoppedEvent; - private readonly Event workerProcessingEnded; + private readonly Event _workerStoppingEvent; + private readonly Event _workerStoppedEvent; + private readonly Event _workerProcessingEnded; - private CancellationTokenSource stopCancellationTokenSource; - private Task backgroundTask; + private CancellationTokenSource _stopCancellationTokenSource; + private Task _backgroundTask; public ConsumerWorker( IConsumer consumer, @@ -30,18 +30,18 @@ public ConsumerWorker( ILogHandler logHandler) { this.Id = workerId; - this.consumer = consumer; - this.workerDependencyResolverScope = consumerDependencyResolver.CreateScope(); - this.middlewareExecutor = middlewareExecutor; - this.logHandler = logHandler; - this.messagesBuffer = Channel.CreateBounded(consumer.Configuration.BufferSize); - this.globalEvents = consumerDependencyResolver.Resolve(); + _consumer = consumer; + _workerDependencyResolverScope = consumerDependencyResolver.CreateScope(); + _middlewareExecutor = middlewareExecutor; + _logHandler = logHandler; + _messagesBuffer = Channel.CreateBounded(consumer.Configuration.BufferSize); + _globalEvents = consumerDependencyResolver.Resolve(); - this.workerStoppingEvent = new(logHandler); - this.workerStoppedEvent = new(logHandler); - this.workerProcessingEnded = new Event(logHandler); + _workerStoppingEvent = new(logHandler); + _workerStoppedEvent = new(logHandler); + _workerProcessingEnded = new Event(logHandler); - var middlewareContext = this.workerDependencyResolverScope.Resolver.Resolve(); + var middlewareContext = _workerDependencyResolverScope.Resolver.Resolve(); middlewareContext.Worker = this; middlewareContext.Consumer = consumer; @@ -49,39 +49,39 @@ public ConsumerWorker( public int Id { get; } - public CancellationToken StopCancellationToken => this.stopCancellationTokenSource?.Token ?? default; + public CancellationToken StopCancellationToken => _stopCancellationTokenSource?.Token ?? default; - public IDependencyResolver WorkerDependencyResolver => this.workerDependencyResolverScope.Resolver; + public IDependencyResolver WorkerDependencyResolver => _workerDependencyResolverScope.Resolver; - public IEvent WorkerStopping => this.workerStoppingEvent; + public IEvent WorkerStopping => _workerStoppingEvent; - public IEvent WorkerStopped => this.workerStoppedEvent; + public IEvent WorkerStopped => _workerStoppedEvent; - public IEvent WorkerProcessingEnded => this.workerProcessingEnded; + public IEvent WorkerProcessingEnded => _workerProcessingEnded; public ValueTask EnqueueAsync( IMessageContext context, CancellationToken stopCancellationToken) { - return this.messagesBuffer.Writer.WriteAsync(context, stopCancellationToken); + return _messagesBuffer.Writer.WriteAsync(context, stopCancellationToken); } public Task StartAsync() { - this.stopCancellationTokenSource = new CancellationTokenSource(); + _stopCancellationTokenSource = new CancellationTokenSource(); - this.backgroundTask = Task.Run( + _backgroundTask = Task.Run( async () => { try { try { - while (await this.messagesBuffer.Reader.WaitToReadAsync(CancellationToken.None).ConfigureAwait(false)) + while (await _messagesBuffer.Reader.WaitToReadAsync(CancellationToken.None).ConfigureAwait(false)) { - while (this.messagesBuffer.Reader.TryRead(out var message)) + while (_messagesBuffer.Reader.TryRead(out var message)) { - await this.ProcessMessageAsync(message, this.stopCancellationTokenSource.Token).ConfigureAwait(false); + await this.ProcessMessageAsync(message, _stopCancellationTokenSource.Token).ConfigureAwait(false); } } } @@ -92,7 +92,7 @@ public Task StartAsync() } catch (Exception ex) { - this.logHandler.Error("KafkaFlow consumer worker fatal error", ex, null); + _logHandler.Error("KafkaFlow consumer worker fatal error", ex, null); } }, CancellationToken.None); @@ -102,25 +102,25 @@ public Task StartAsync() public async Task StopAsync() { - await this.workerStoppingEvent.FireAsync(); + await _workerStoppingEvent.FireAsync(); - this.messagesBuffer.Writer.TryComplete(); + _messagesBuffer.Writer.TryComplete(); - if (this.stopCancellationTokenSource.Token.CanBeCanceled) + if (_stopCancellationTokenSource.Token.CanBeCanceled) { - this.stopCancellationTokenSource.CancelAfter(this.consumer.Configuration.WorkerStopTimeout); + _stopCancellationTokenSource.CancelAfter(_consumer.Configuration.WorkerStopTimeout); } - await this.backgroundTask.ConfigureAwait(false); + await _backgroundTask.ConfigureAwait(false); - await this.workerStoppedEvent.FireAsync(); + await _workerStoppedEvent.FireAsync(); } public void Dispose() { - this.backgroundTask.Dispose(); - this.workerDependencyResolverScope.Dispose(); - this.stopCancellationTokenSource.Dispose(); + _backgroundTask.Dispose(); + _workerDependencyResolverScope.Dispose(); + _stopCancellationTokenSource.Dispose(); } private async Task ProcessMessageAsync(IMessageContext context, CancellationToken cancellationToken) @@ -129,20 +129,20 @@ private async Task ProcessMessageAsync(IMessageContext context, CancellationToke { try { - await this.globalEvents.FireMessageConsumeStartedAsync(new MessageEventContext(context)); + await _globalEvents.FireMessageConsumeStartedAsync(new MessageEventContext(context)); - _= context.ConsumerContext.Completion.ContinueWith( + _ = context.ConsumerContext.Completion.ContinueWith( async task => { if (task.IsFaulted) { - await this.globalEvents.FireMessageConsumeErrorAsync(new MessageErrorEventContext(context, task.Exception)); + await _globalEvents.FireMessageConsumeErrorAsync(new MessageErrorEventContext(context, task.Exception)); } - await this.globalEvents.FireMessageConsumeCompletedAsync(new MessageEventContext(context)); + await _globalEvents.FireMessageConsumeCompletedAsync(new MessageEventContext(context)); }); - await this.middlewareExecutor + await _middlewareExecutor .Execute(context, _ => Task.CompletedTask) .ConfigureAwait(false); } @@ -152,9 +152,9 @@ await this.middlewareExecutor } catch (Exception ex) { - await this.globalEvents.FireMessageConsumeErrorAsync(new MessageErrorEventContext(context, ex)); + await _globalEvents.FireMessageConsumeErrorAsync(new MessageErrorEventContext(context, ex)); - this.logHandler.Error( + _logHandler.Error( "Error processing message", ex, new @@ -172,12 +172,12 @@ await this.middlewareExecutor context.ConsumerContext.Complete(); } - await this.workerProcessingEnded.FireAsync(context); + await _workerProcessingEnded.FireAsync(context); } } catch (Exception ex) { - this.logHandler.Error("KafkaFlow internal message error", ex, null); + _logHandler.Error("KafkaFlow internal message error", ex, null); } } } diff --git a/src/KafkaFlow/Consumers/ConsumerWorkerPool.cs b/src/KafkaFlow/Consumers/ConsumerWorkerPool.cs index 2c45bc2fe..59846df75 100644 --- a/src/KafkaFlow/Consumers/ConsumerWorkerPool.cs +++ b/src/KafkaFlow/Consumers/ConsumerWorkerPool.cs @@ -1,29 +1,29 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Confluent.Kafka; +using KafkaFlow.Configuration; + namespace KafkaFlow.Consumers { - using System; - using System.Collections.Generic; - using System.Linq; - using System.Threading; - using System.Threading.Tasks; - using Confluent.Kafka; - using KafkaFlow.Configuration; - internal class ConsumerWorkerPool : IConsumerWorkerPool { - private readonly IConsumer consumer; - private readonly IDependencyResolver consumerDependencyResolver; - private readonly IMiddlewareExecutor middlewareExecutor; - private readonly ILogHandler logHandler; - private readonly Factory distributionStrategyFactory; - private readonly IOffsetCommitter offsetCommitter; + private readonly IConsumer _consumer; + private readonly IDependencyResolver _consumerDependencyResolver; + private readonly IMiddlewareExecutor _middlewareExecutor; + private readonly ILogHandler _logHandler; + private readonly Factory _distributionStrategyFactory; + private readonly IOffsetCommitter _offsetCommitter; - private readonly Event workerPoolStoppedSubject; + private readonly Event _workerPoolStoppedSubject; - private TaskCompletionSource startedTaskSource = new(); - private List workers = new(); + private TaskCompletionSource _startedTaskSource = new(); + private List _workers = new(); - private IWorkerDistributionStrategy distributionStrategy; - private IOffsetManager offsetManager; + private IWorkerDistributionStrategy _distributionStrategy; + private IOffsetManager _offsetManager; public ConsumerWorkerPool( IConsumer consumer, @@ -32,36 +32,36 @@ public ConsumerWorkerPool( IConsumerConfiguration consumerConfiguration, ILogHandler logHandler) { - this.consumer = consumer; - this.consumerDependencyResolver = consumerDependencyResolver; - this.middlewareExecutor = middlewareExecutor; - this.logHandler = logHandler; - this.distributionStrategyFactory = consumerConfiguration.DistributionStrategyFactory; - this.workerPoolStoppedSubject = new Event(logHandler); - - this.offsetCommitter = consumer.Configuration.NoStoreOffsets ? + _consumer = consumer; + _consumerDependencyResolver = consumerDependencyResolver; + _middlewareExecutor = middlewareExecutor; + _logHandler = logHandler; + _distributionStrategyFactory = consumerConfiguration.DistributionStrategyFactory; + _workerPoolStoppedSubject = new Event(logHandler); + + _offsetCommitter = consumer.Configuration.NoStoreOffsets ? new NullOffsetCommitter() : new OffsetCommitter( consumer, consumerDependencyResolver, logHandler); - this.offsetCommitter.PendingOffsetsStatisticsHandlers.AddRange(consumer.Configuration.PendingOffsetsStatisticsHandlers); + _offsetCommitter.PendingOffsetsStatisticsHandlers.AddRange(consumer.Configuration.PendingOffsetsStatisticsHandlers); } public int CurrentWorkersCount { get; private set; } - public IEvent WorkerPoolStopped => this.workerPoolStoppedSubject; + public IEvent WorkerPoolStopped => _workerPoolStoppedSubject; public async Task StartAsync(IReadOnlyCollection partitions, int workersCount) { try { - this.offsetManager = this.consumer.Configuration.NoStoreOffsets ? + _offsetManager = _consumer.Configuration.NoStoreOffsets ? new NullOffsetManager() : - new OffsetManager(this.offsetCommitter, partitions); + new OffsetManager(_offsetCommitter, partitions); - await this.offsetCommitter.StartAsync(); + await _offsetCommitter.StartAsync(); this.CurrentWorkersCount = workersCount; @@ -72,67 +72,67 @@ await Task.WhenAll( workerId => { var worker = new ConsumerWorker( - this.consumer, - this.consumerDependencyResolver, + _consumer, + _consumerDependencyResolver, workerId, - this.middlewareExecutor, - this.logHandler); + _middlewareExecutor, + _logHandler); - this.workers.Add(worker); + _workers.Add(worker); return worker.StartAsync(); })) .ConfigureAwait(false); - this.distributionStrategy = this.distributionStrategyFactory(this.consumerDependencyResolver); - this.distributionStrategy.Initialize(this.workers.AsReadOnly()); + _distributionStrategy = _distributionStrategyFactory(_consumerDependencyResolver); + _distributionStrategy.Initialize(_workers.AsReadOnly()); - this.startedTaskSource.TrySetResult(null); + _startedTaskSource.TrySetResult(null); } catch (Exception e) { - this.logHandler.Error( + _logHandler.Error( "Error starting WorkerPool", e, new { - this.consumer.Configuration.ConsumerName, + _consumer.Configuration.ConsumerName, }); } } public async Task StopAsync() { - if (this.workers.Count == 0) + if (_workers.Count == 0) { return; } - var currentWorkers = this.workers; - this.workers = new List(); - this.startedTaskSource = new(); + var currentWorkers = _workers; + _workers = new List(); + _startedTaskSource = new(); await Task.WhenAll(currentWorkers.Select(x => x.StopAsync())).ConfigureAwait(false); - await this.offsetManager.WaitContextsCompletionAsync(); + await _offsetManager.WaitContextsCompletionAsync(); currentWorkers.ForEach(worker => worker.Dispose()); - this.offsetManager = null; + _offsetManager = null; - await this.workerPoolStoppedSubject.FireAsync(); + await _workerPoolStoppedSubject.FireAsync(); - await this.offsetCommitter.StopAsync(); + await _offsetCommitter.StopAsync(); } public async Task EnqueueAsync(ConsumeResult message, CancellationToken stopCancellationToken) { - await this.startedTaskSource.Task.ConfigureAwait(false); + await _startedTaskSource.Task.ConfigureAwait(false); - var worker = (IConsumerWorker)await this.distributionStrategy + var worker = (IConsumerWorker)await _distributionStrategy .GetWorkerAsync( new WorkerDistributionContext( - this.consumer.Configuration.ConsumerName, + _consumer.Configuration.ConsumerName, message.Topic, message.Partition.Value, message.Message.Key, @@ -150,27 +150,27 @@ await worker .EnqueueAsync(context, stopCancellationToken) .ConfigureAwait(false); - this.offsetManager.Enqueue(context.ConsumerContext); + _offsetManager.Enqueue(context.ConsumerContext); } private MessageContext CreateMessageContext(ConsumeResult message, IConsumerWorker worker) { - var messageDependencyScope = this.consumerDependencyResolver.CreateScope(); + var messageDependencyScope = _consumerDependencyResolver.CreateScope(); var context = new MessageContext( new Message(message.Message.Key, message.Message.Value), new MessageHeaders(message.Message.Headers), messageDependencyScope.Resolver, new ConsumerContext( - this.consumer, - this.offsetManager, + _consumer, + _offsetManager, message, worker, messageDependencyScope, - this.consumerDependencyResolver), + _consumerDependencyResolver), null, - this.consumer.Configuration.ClusterConfiguration.Brokers); + _consumer.Configuration.ClusterConfiguration.Brokers); return context; } } -} \ No newline at end of file +} diff --git a/src/KafkaFlow/Consumers/DistributionStrategies/BytesSumDistributionStrategy.cs b/src/KafkaFlow/Consumers/DistributionStrategies/BytesSumDistributionStrategy.cs index 6581cbb6d..095dbd7c1 100644 --- a/src/KafkaFlow/Consumers/DistributionStrategies/BytesSumDistributionStrategy.cs +++ b/src/KafkaFlow/Consumers/DistributionStrategies/BytesSumDistributionStrategy.cs @@ -1,9 +1,9 @@ -namespace KafkaFlow.Consumers.DistributionStrategies; - using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; +namespace KafkaFlow.Consumers.DistributionStrategies; + /// /// This strategy sums all bytes in the partition key and apply a mod operator with the total number of workers, the resulting number is the worker ID to be chosen /// This algorithm is fast and creates a good work balance. Messages with the same partition key are always delivered in the same worker, so, message order is guaranteed @@ -11,20 +11,20 @@ namespace KafkaFlow.Consumers.DistributionStrategies; /// public class BytesSumDistributionStrategy : IWorkerDistributionStrategy { - private IReadOnlyList workers; + private IReadOnlyList _workers; /// public void Initialize(IReadOnlyList workers) { - this.workers = workers; + _workers = workers; } /// public ValueTask GetWorkerAsync(WorkerDistributionContext context) { - if (context.RawMessageKey is null || this.workers.Count == 1) + if (context.RawMessageKey is null || _workers.Count == 1) { - return new ValueTask(this.workers[0]); + return new ValueTask(_workers[0]); } var bytesSum = 0; @@ -37,6 +37,6 @@ public ValueTask GetWorkerAsync(WorkerDistributionContext context) return new ValueTask( context.ConsumerStoppedCancellationToken.IsCancellationRequested ? null - : this.workers.ElementAtOrDefault(bytesSum % this.workers.Count)); + : _workers.ElementAtOrDefault(bytesSum % _workers.Count)); } } diff --git a/src/KafkaFlow/Consumers/DistributionStrategies/FreeWorkerDistributionStrategy.cs b/src/KafkaFlow/Consumers/DistributionStrategies/FreeWorkerDistributionStrategy.cs index 9511a868a..3037917c7 100644 --- a/src/KafkaFlow/Consumers/DistributionStrategies/FreeWorkerDistributionStrategy.cs +++ b/src/KafkaFlow/Consumers/DistributionStrategies/FreeWorkerDistributionStrategy.cs @@ -1,30 +1,30 @@ -namespace KafkaFlow.Consumers.DistributionStrategies; - using System.Collections.Generic; using System.Threading.Channels; using System.Threading.Tasks; +namespace KafkaFlow.Consumers.DistributionStrategies; + /// /// This strategy chooses the first free worker to process the message. When a worker finishes the processing, it notifies the worker pool that it is free to get a new message /// This is the fastest and resource-friendly strategy (the message buffer is not used) but messages with the same partition key can be delivered in different workers, so, no message order guarantee /// public class FreeWorkerDistributionStrategy : IWorkerDistributionStrategy { - private readonly Channel freeWorkers = Channel.CreateUnbounded(); + private readonly Channel _freeWorkers = Channel.CreateUnbounded(); /// public void Initialize(IReadOnlyList workers) { foreach (var worker in workers) { - worker.WorkerProcessingEnded.Subscribe(_ => Task.FromResult(this.freeWorkers.Writer.WriteAsync(worker))); - this.freeWorkers.Writer.TryWrite(worker); + worker.WorkerProcessingEnded.Subscribe(_ => Task.FromResult(_freeWorkers.Writer.WriteAsync(worker))); + _freeWorkers.Writer.TryWrite(worker); } } /// public ValueTask GetWorkerAsync(WorkerDistributionContext context) { - return this.freeWorkers.Reader.ReadAsync(context.ConsumerStoppedCancellationToken); + return _freeWorkers.Reader.ReadAsync(context.ConsumerStoppedCancellationToken); } -} \ No newline at end of file +} diff --git a/src/KafkaFlow/Consumers/IConsumer.cs b/src/KafkaFlow/Consumers/IConsumer.cs index 4774289c7..3516aa609 100644 --- a/src/KafkaFlow/Consumers/IConsumer.cs +++ b/src/KafkaFlow/Consumers/IConsumer.cs @@ -1,12 +1,11 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using KafkaFlow.Configuration; + namespace KafkaFlow.Consumers { - using System; - using System.Collections.Generic; - using System.Threading; - using System.Threading.Tasks; - using Confluent.Kafka; - using KafkaFlow.Configuration; - /// /// Represents a KafkaFlow consumer /// @@ -17,21 +16,21 @@ public interface IConsumer : IDisposable /// IConsumerConfiguration Configuration { get; } - /// + /// IReadOnlyList Subscription { get; } - /// - IReadOnlyList Assignment { get; } + /// + IReadOnlyList Assignment { get; } /// /// Gets the consumer /// IConsumerFlowManager FlowManager { get; } - /// + /// string MemberId { get; } - /// + /// string ClientInstanceName { get; } /// @@ -49,44 +48,44 @@ public interface IConsumer : IDisposable /// Register a handler to be executed when the partitions are assigned /// /// The handler that will be executed - void OnPartitionsAssigned(Action, List> handler); + void OnPartitionsAssigned(Action, List> handler); /// /// Register a handler to be executed when the partitions are revoked /// /// The handler that will be executed - void OnPartitionsRevoked(Action, List> handler); + void OnPartitionsRevoked(Action, List> handler); /// /// Register a handler to be executed when an error occurs /// /// The handler that will be executed - void OnError(Action, Error> handler); + void OnError(Action, Confluent.Kafka.Error> handler); /// /// Register a handler to be executed to receive statistics information /// /// The handler that will be executed - void OnStatistics(Action, string> handler); + void OnStatistics(Action, string> handler); - /// - Offset GetPosition(TopicPartition topicPartition); + /// + Confluent.Kafka.Offset GetPosition(Confluent.Kafka.TopicPartition topicPartition); - /// - WatermarkOffsets GetWatermarkOffsets(TopicPartition topicPartition); + /// + Confluent.Kafka.WatermarkOffsets GetWatermarkOffsets(Confluent.Kafka.TopicPartition topicPartition); - /// - WatermarkOffsets QueryWatermarkOffsets(TopicPartition topicPartition, TimeSpan timeout); + /// + Confluent.Kafka.WatermarkOffsets QueryWatermarkOffsets(Confluent.Kafka.TopicPartition topicPartition, TimeSpan timeout); - /// - List OffsetsForTimes( - IEnumerable topicPartitions, + /// + List OffsetsForTimes( + IEnumerable topicPartitions, TimeSpan timeout); - /// - void Commit(IReadOnlyCollection offsetsValues); + /// + void Commit(IReadOnlyCollection offsetsValues); - /// - ValueTask> ConsumeAsync(CancellationToken cancellationToken); + /// + ValueTask> ConsumeAsync(CancellationToken cancellationToken); } } diff --git a/src/KafkaFlow/Consumers/IConsumerAccessor.cs b/src/KafkaFlow/Consumers/IConsumerAccessor.cs index 48033804b..f3db89fa2 100644 --- a/src/KafkaFlow/Consumers/IConsumerAccessor.cs +++ b/src/KafkaFlow/Consumers/IConsumerAccessor.cs @@ -1,7 +1,7 @@ +using System.Collections.Generic; + namespace KafkaFlow.Consumers { - using System.Collections.Generic; - /// /// Provides access to the configured consumers /// diff --git a/src/KafkaFlow/Consumers/IConsumerFlowManager.cs b/src/KafkaFlow/Consumers/IConsumerFlowManager.cs index 3102da03a..5dcf19ade 100644 --- a/src/KafkaFlow/Consumers/IConsumerFlowManager.cs +++ b/src/KafkaFlow/Consumers/IConsumerFlowManager.cs @@ -1,8 +1,8 @@ +using System.Collections.Generic; +using Confluent.Kafka; + namespace KafkaFlow.Consumers { - using System.Collections.Generic; - using Confluent.Kafka; - /// /// The consumer flow manager /// diff --git a/src/KafkaFlow/Consumers/IConsumerManager.cs b/src/KafkaFlow/Consumers/IConsumerManager.cs index a96d1fc84..fef29e6ce 100644 --- a/src/KafkaFlow/Consumers/IConsumerManager.cs +++ b/src/KafkaFlow/Consumers/IConsumerManager.cs @@ -1,7 +1,7 @@ +using System.Threading.Tasks; + namespace KafkaFlow.Consumers { - using System.Threading.Tasks; - internal interface IConsumerManager { IWorkerPoolFeeder Feeder { get; } diff --git a/src/KafkaFlow/Consumers/IConsumerWorker.cs b/src/KafkaFlow/Consumers/IConsumerWorker.cs index bf997816a..11c544d0a 100644 --- a/src/KafkaFlow/Consumers/IConsumerWorker.cs +++ b/src/KafkaFlow/Consumers/IConsumerWorker.cs @@ -1,9 +1,9 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + namespace KafkaFlow.Consumers { - using System; - using System.Threading; - using System.Threading.Tasks; - internal interface IConsumerWorker : IWorker, IDisposable { CancellationToken StopCancellationToken { get; } diff --git a/src/KafkaFlow/Consumers/IConsumerWorkerPool.cs b/src/KafkaFlow/Consumers/IConsumerWorkerPool.cs index 32f0c51f1..3824faa1a 100644 --- a/src/KafkaFlow/Consumers/IConsumerWorkerPool.cs +++ b/src/KafkaFlow/Consumers/IConsumerWorkerPool.cs @@ -1,10 +1,10 @@ +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Confluent.Kafka; + namespace KafkaFlow.Consumers { - using System.Collections.Generic; - using System.Threading; - using System.Threading.Tasks; - using Confluent.Kafka; - internal interface IConsumerWorkerPool { int CurrentWorkersCount { get; } diff --git a/src/KafkaFlow/Consumers/IMessageConsumer.cs b/src/KafkaFlow/Consumers/IMessageConsumer.cs index 70041a49f..78e7271ef 100644 --- a/src/KafkaFlow/Consumers/IMessageConsumer.cs +++ b/src/KafkaFlow/Consumers/IMessageConsumer.cs @@ -1,10 +1,9 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; + namespace KafkaFlow.Consumers { - using System; - using System.Collections.Generic; - using System.Threading.Tasks; - using Confluent.Kafka; - /// /// Provides access to the kafka message consumer /// @@ -48,7 +47,7 @@ public interface IMessageConsumer /// /// Gets the current partition assignment /// - IReadOnlyList Assignment { get; } + IReadOnlyList Assignment { get; } /// /// Gets the (dynamic) group member id of this consumer (as set by the broker). @@ -77,12 +76,12 @@ public interface IMessageConsumer /// /// Gets the consumer's paused partitions /// - IReadOnlyList PausedPartitions { get; } + IReadOnlyList PausedPartitions { get; } /// /// Gets the consumer's running partitions /// - IEnumerable RunningPartitions { get; } + IEnumerable RunningPartitions { get; } /// /// Gets the lag of each topic/partitions assigned @@ -94,7 +93,7 @@ public interface IMessageConsumer /// Overrides the offsets of the given partitions and restart the consumer /// /// The offset values - Task OverrideOffsetsAndRestartAsync(IReadOnlyCollection offsets); + Task OverrideOffsetsAndRestartAsync(IReadOnlyCollection offsets); /// /// Restart the current consumer with the new worker count @@ -134,7 +133,7 @@ public interface IMessageConsumer /// /// Per partition success or error. /// - void Pause(IReadOnlyCollection partitions); + void Pause(IReadOnlyCollection partitions); /// /// Resume consumption for the provided list of partitions. @@ -148,7 +147,7 @@ public interface IMessageConsumer /// /// Per partition success or error. /// - void Resume(IReadOnlyCollection partitions); + void Resume(IReadOnlyCollection partitions); /// /// Gets the current position (offset) for the @@ -162,7 +161,7 @@ public interface IMessageConsumer /// /// Thrown if the request failed. /// - Offset GetPosition(TopicPartition topicPartition); + Confluent.Kafka.Offset GetPosition(Confluent.Kafka.TopicPartition topicPartition); /// /// Get the last cached low (oldest available / @@ -185,7 +184,7 @@ public interface IMessageConsumer /// The requested WatermarkOffsets /// (see that class for additional documentation). /// - WatermarkOffsets GetWatermarkOffsets(TopicPartition topicPartition); + Confluent.Kafka.WatermarkOffsets GetWatermarkOffsets(Confluent.Kafka.TopicPartition topicPartition); /// /// Query the Kafka cluster for low (oldest @@ -205,7 +204,7 @@ public interface IMessageConsumer /// The requested WatermarkOffsets (see /// that class for additional documentation). /// - WatermarkOffsets QueryWatermarkOffsets(TopicPartition topicPartition, TimeSpan timeout); + Confluent.Kafka.WatermarkOffsets QueryWatermarkOffsets(Confluent.Kafka.TopicPartition topicPartition, TimeSpan timeout); /// /// Look up the offsets for the given partitions @@ -244,8 +243,8 @@ public interface IMessageConsumer /// /// property of the exception. /// - List GetOffsets( - IEnumerable timestampsToSearch, + List GetOffsets( + IEnumerable timestampsToSearch, TimeSpan timeout); } } diff --git a/src/KafkaFlow/Consumers/IOffsetCommitter.cs b/src/KafkaFlow/Consumers/IOffsetCommitter.cs index 2986b610e..3fe5227af 100644 --- a/src/KafkaFlow/Consumers/IOffsetCommitter.cs +++ b/src/KafkaFlow/Consumers/IOffsetCommitter.cs @@ -1,9 +1,9 @@ +using System.Collections.Generic; +using System.Threading.Tasks; +using KafkaFlow.Configuration; + namespace KafkaFlow.Consumers { - using System.Collections.Generic; - using System.Threading.Tasks; - using KafkaFlow.Configuration; - internal interface IOffsetCommitter { List PendingOffsetsStatisticsHandlers { get; } diff --git a/src/KafkaFlow/Consumers/IOffsetManager.cs b/src/KafkaFlow/Consumers/IOffsetManager.cs index aff5e27ab..feba67859 100644 --- a/src/KafkaFlow/Consumers/IOffsetManager.cs +++ b/src/KafkaFlow/Consumers/IOffsetManager.cs @@ -1,7 +1,7 @@ +using System.Threading.Tasks; + namespace KafkaFlow.Consumers { - using System.Threading.Tasks; - internal interface IOffsetManager { void Enqueue(IConsumerContext context); diff --git a/src/KafkaFlow/Consumers/IWorkerPoolFeeder.cs b/src/KafkaFlow/Consumers/IWorkerPoolFeeder.cs index 0ae7f6aad..3f36a8be5 100644 --- a/src/KafkaFlow/Consumers/IWorkerPoolFeeder.cs +++ b/src/KafkaFlow/Consumers/IWorkerPoolFeeder.cs @@ -1,7 +1,7 @@ +using System.Threading.Tasks; + namespace KafkaFlow.Consumers { - using System.Threading.Tasks; - internal interface IWorkerPoolFeeder { void Start(); diff --git a/src/KafkaFlow/Consumers/MessageConsumer.cs b/src/KafkaFlow/Consumers/MessageConsumer.cs index 262854764..acbcc82f7 100644 --- a/src/KafkaFlow/Consumers/MessageConsumer.cs +++ b/src/KafkaFlow/Consumers/MessageConsumer.cs @@ -1,117 +1,116 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; + namespace KafkaFlow.Consumers { - using System; - using System.Collections.Generic; - using System.Linq; - using System.Threading.Tasks; - using Confluent.Kafka; - internal class MessageConsumer : IMessageConsumer { - private static readonly IReadOnlyList EmptyTopicPartition = new List().AsReadOnly(); + private static readonly IReadOnlyList s_emptyTopicPartition = new List().AsReadOnly(); - private readonly IConsumerManager consumerManager; - private readonly ILogHandler logHandler; + private readonly IConsumerManager _consumerManager; + private readonly ILogHandler _logHandler; public MessageConsumer( IConsumerManager consumerManager, ILogHandler logHandler) { - this.consumerManager = consumerManager; - this.logHandler = logHandler; + _consumerManager = consumerManager; + _logHandler = logHandler; } - public string ConsumerName => this.consumerManager.Consumer.Configuration.ConsumerName; + public string ConsumerName => _consumerManager.Consumer.Configuration.ConsumerName; - public string ClusterName => this.consumerManager.Consumer.Configuration.ClusterConfiguration.Name; + public string ClusterName => _consumerManager.Consumer.Configuration.ClusterConfiguration.Name; - public bool ManagementDisabled => this.consumerManager.Consumer.Configuration.ManagementDisabled; + public bool ManagementDisabled => _consumerManager.Consumer.Configuration.ManagementDisabled; - public string GroupId => this.consumerManager.Consumer.Configuration.GroupId; + public string GroupId => _consumerManager.Consumer.Configuration.GroupId; - public IReadOnlyList Topics => this.consumerManager.Consumer.Configuration.Topics; + public IReadOnlyList Topics => _consumerManager.Consumer.Configuration.Topics; - public IReadOnlyList Subscription => this.consumerManager.Consumer.Subscription; + public IReadOnlyList Subscription => _consumerManager.Consumer.Subscription; - public IReadOnlyList Assignment => this.consumerManager.Consumer.Assignment ?? EmptyTopicPartition; + public IReadOnlyList Assignment => _consumerManager.Consumer.Assignment ?? s_emptyTopicPartition; - public ConsumerStatus Status => this.consumerManager.Consumer.Status; + public ConsumerStatus Status => _consumerManager.Consumer.Status; - public string MemberId => this.consumerManager.Consumer.MemberId; + public string MemberId => _consumerManager.Consumer.MemberId; - public string ClientInstanceName => this.consumerManager.Consumer.ClientInstanceName; + public string ClientInstanceName => _consumerManager.Consumer.ClientInstanceName; - public int WorkersCount => this.consumerManager.WorkerPool.CurrentWorkersCount; + public int WorkersCount => _consumerManager.WorkerPool.CurrentWorkersCount; - public IReadOnlyList PausedPartitions => - this.consumerManager.Consumer.FlowManager?.PausedPartitions ?? - EmptyTopicPartition; + public IReadOnlyList PausedPartitions => + _consumerManager.Consumer.FlowManager?.PausedPartitions ?? + s_emptyTopicPartition; - public IEnumerable RunningPartitions => this.Assignment.Except(this.PausedPartitions); + public IEnumerable RunningPartitions => this.Assignment.Except(this.PausedPartitions); public async Task StartAsync() { - await this.consumerManager.StartAsync().ConfigureAwait(false); - this.logHandler.Info($"Kafka consumer '{this.ConsumerName}' was manually started", null); + await _consumerManager.StartAsync().ConfigureAwait(false); + _logHandler.Info($"Kafka consumer '{this.ConsumerName}' was manually started", null); } public async Task StopAsync() { - await this.consumerManager.StopAsync().ConfigureAwait(false); - this.logHandler.Info($"Kafka consumer '{this.ConsumerName}' was manually stopped", null); + await _consumerManager.StopAsync().ConfigureAwait(false); + _logHandler.Info($"Kafka consumer '{this.ConsumerName}' was manually stopped", null); } public async Task RestartAsync() { await this.InternalRestart().ConfigureAwait(false); - this.logHandler.Info($"Kafka consumer '{this.ConsumerName}' was manually restarted", null); + _logHandler.Info($"Kafka consumer '{this.ConsumerName}' was manually restarted", null); } - public void Pause(IReadOnlyCollection topicPartitions) + public void Pause(IReadOnlyCollection topicPartitions) { - this.consumerManager.Consumer.FlowManager.Pause(topicPartitions); - this.logHandler.Info($"Kafka consumer '{this.ConsumerName}' was paused", topicPartitions); + _consumerManager.Consumer.FlowManager.Pause(topicPartitions); + _logHandler.Info($"Kafka consumer '{this.ConsumerName}' was paused", topicPartitions); } - public void Resume(IReadOnlyCollection topicPartitions) + public void Resume(IReadOnlyCollection topicPartitions) { - this.consumerManager.Consumer.FlowManager.Resume(topicPartitions); - this.logHandler.Info($"Kafka consumer '{this.ConsumerName}' was resumed", topicPartitions); + _consumerManager.Consumer.FlowManager.Resume(topicPartitions); + _logHandler.Info($"Kafka consumer '{this.ConsumerName}' was resumed", topicPartitions); } - public Offset GetPosition(TopicPartition topicPartition) => - this.consumerManager.Consumer.GetPosition(topicPartition); + public Confluent.Kafka.Offset GetPosition(Confluent.Kafka.TopicPartition topicPartition) => + _consumerManager.Consumer.GetPosition(topicPartition); - public WatermarkOffsets GetWatermarkOffsets(TopicPartition topicPartition) => - this.consumerManager.Consumer.GetWatermarkOffsets(topicPartition); + public Confluent.Kafka.WatermarkOffsets GetWatermarkOffsets(Confluent.Kafka.TopicPartition topicPartition) => + _consumerManager.Consumer.GetWatermarkOffsets(topicPartition); - public WatermarkOffsets QueryWatermarkOffsets(TopicPartition topicPartition, TimeSpan timeout) => - this.consumerManager.Consumer.QueryWatermarkOffsets(topicPartition, timeout); + public Confluent.Kafka.WatermarkOffsets QueryWatermarkOffsets(Confluent.Kafka.TopicPartition topicPartition, TimeSpan timeout) => + _consumerManager.Consumer.QueryWatermarkOffsets(topicPartition, timeout); - public List GetOffsets( - IEnumerable topicPartitions, + public List GetOffsets( + IEnumerable topicPartitions, TimeSpan timeout) => - this.consumerManager.Consumer.OffsetsForTimes(topicPartitions, timeout); + _consumerManager.Consumer.OffsetsForTimes(topicPartitions, timeout); public IEnumerable GetTopicPartitionsLag() => - this.consumerManager.Consumer.GetTopicPartitionsLag(); + _consumerManager.Consumer.GetTopicPartitionsLag(); - public async Task OverrideOffsetsAndRestartAsync(IReadOnlyCollection offsets) + public async Task OverrideOffsetsAndRestartAsync(IReadOnlyCollection offsets) { try { - await this.consumerManager.Feeder.StopAsync().ConfigureAwait(false); - await this.consumerManager.WorkerPool.StopAsync().ConfigureAwait(false); + await _consumerManager.Feeder.StopAsync().ConfigureAwait(false); + await _consumerManager.WorkerPool.StopAsync().ConfigureAwait(false); - this.consumerManager.Consumer.Commit(offsets); + _consumerManager.Consumer.Commit(offsets); await this.InternalRestart().ConfigureAwait(false); - this.logHandler.Info($"Offsets of Kafka consumer '{this.ConsumerName}' were overridden ", GetOffsetsLogData(offsets)); + _logHandler.Info($"Offsets of Kafka consumer '{this.ConsumerName}' were overridden ", GetOffsetsLogData(offsets)); } catch (Exception e) { - this.logHandler.Error( + _logHandler.Error( "Error overriding offsets", e, GetOffsetsLogData(offsets)); @@ -121,16 +120,16 @@ public async Task OverrideOffsetsAndRestartAsync(IReadOnlyCollection Task.FromResult(workersCount); + _consumerManager.Consumer.Configuration.WorkersCountCalculator = (_, _) => Task.FromResult(workersCount); await this.InternalRestart().ConfigureAwait(false); - this.logHandler.Info( + _logHandler.Info( $"Total of workers in KafkaFlow consumer '{this.ConsumerName}' were updated", new { workersCount }); } - private static object GetOffsetsLogData(IEnumerable offsets) => offsets + private static object GetOffsetsLogData(IEnumerable offsets) => offsets .GroupBy(x => x.Topic) .Select( x => new @@ -146,9 +145,9 @@ private static object GetOffsetsLogData(IEnumerable offset private async Task InternalRestart() { - await this.consumerManager.StopAsync().ConfigureAwait(false); + await _consumerManager.StopAsync().ConfigureAwait(false); await Task.Delay(5000).ConfigureAwait(false); - await this.consumerManager.StartAsync().ConfigureAwait(false); + await _consumerManager.StartAsync().ConfigureAwait(false); } } } diff --git a/src/KafkaFlow/Consumers/NullOffsetCommitter.cs b/src/KafkaFlow/Consumers/NullOffsetCommitter.cs index 27228989c..7de25bc93 100644 --- a/src/KafkaFlow/Consumers/NullOffsetCommitter.cs +++ b/src/KafkaFlow/Consumers/NullOffsetCommitter.cs @@ -1,10 +1,9 @@ +using System.Collections.Generic; +using System.Threading.Tasks; +using KafkaFlow.Configuration; + namespace KafkaFlow.Consumers { - using System.Collections.Generic; - using System.Threading.Tasks; - using KafkaFlow; - using KafkaFlow.Configuration; - internal class NullOffsetCommitter : IOffsetCommitter { public List PendingOffsetsStatisticsHandlers { get; } = new(); diff --git a/src/KafkaFlow/Consumers/NullOffsetManager.cs b/src/KafkaFlow/Consumers/NullOffsetManager.cs index a9c4df3d3..8faaa3f74 100644 --- a/src/KafkaFlow/Consumers/NullOffsetManager.cs +++ b/src/KafkaFlow/Consumers/NullOffsetManager.cs @@ -1,7 +1,7 @@ +using System.Threading.Tasks; + namespace KafkaFlow.Consumers { - using System.Threading.Tasks; - internal class NullOffsetManager : IOffsetManager { public void Enqueue(IConsumerContext context) diff --git a/src/KafkaFlow/Consumers/OffsetCommitter.cs b/src/KafkaFlow/Consumers/OffsetCommitter.cs index f93e6ac99..b51aca2ec 100644 --- a/src/KafkaFlow/Consumers/OffsetCommitter.cs +++ b/src/KafkaFlow/Consumers/OffsetCommitter.cs @@ -1,42 +1,42 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using KafkaFlow.Configuration; + namespace KafkaFlow.Consumers { - using System; - using System.Collections.Concurrent; - using System.Collections.Generic; - using System.Linq; - using System.Threading; - using System.Threading.Tasks; - using KafkaFlow.Configuration; - internal class OffsetCommitter : IOffsetCommitter { - private readonly IConsumer consumer; - private readonly IDependencyResolver resolver; + private readonly IConsumer _consumer; + private readonly IDependencyResolver _resolver; - private readonly ILogHandler logHandler; + private readonly ILogHandler _logHandler; - private readonly object commitSyncRoot = new(); + private readonly object _commitSyncRoot = new(); - private Timer commitTimer; - private IReadOnlyList statisticsTimers; + private Timer _commitTimer; + private IReadOnlyList _statisticsTimers; - private ConcurrentDictionary<(string, int), TopicPartitionOffset> offsetsToCommit = new(); + private ConcurrentDictionary<(string, int), TopicPartitionOffset> _offsetsToCommit = new(); public OffsetCommitter( IConsumer consumer, IDependencyResolver resolver, ILogHandler logHandler) { - this.consumer = consumer; - this.resolver = resolver; - this.logHandler = logHandler; + _consumer = consumer; + _resolver = resolver; + _logHandler = logHandler; } public List PendingOffsetsStatisticsHandlers { get; } = new(); public void MarkAsProcessed(TopicPartitionOffset tpo) { - this.offsetsToCommit.AddOrUpdate( + _offsetsToCommit.AddOrUpdate( (tpo.Topic, tpo.Partition), tpo, (_, _) => tpo); @@ -44,13 +44,13 @@ public void MarkAsProcessed(TopicPartitionOffset tpo) public Task StartAsync() { - this.commitTimer = new Timer( + _commitTimer = new Timer( _ => this.CommitHandler(), null, - this.consumer.Configuration.AutoCommitInterval, - this.consumer.Configuration.AutoCommitInterval); + _consumer.Configuration.AutoCommitInterval, + _consumer.Configuration.AutoCommitInterval); - this.statisticsTimers = this.PendingOffsetsStatisticsHandlers + _statisticsTimers = this.PendingOffsetsStatisticsHandlers .Select( handler => new Timer( _ => this.PendingOffsetsHandler(handler), @@ -64,10 +64,10 @@ public Task StartAsync() public Task StopAsync() { - this.commitTimer.Dispose(); + _commitTimer.Dispose(); this.CommitHandler(); - foreach (var timer in this.statisticsTimers) + foreach (var timer in _statisticsTimers) { timer.Dispose(); } @@ -77,11 +77,11 @@ public Task StopAsync() private void PendingOffsetsHandler(PendingOffsetsStatisticsHandler handler) { - if (!this.offsetsToCommit.IsEmpty) + if (!_offsetsToCommit.IsEmpty) { handler.Handler( - this.resolver, - this.offsetsToCommit.Values.Select( + _resolver, + _offsetsToCommit.Values.Select( x => new Confluent.Kafka.TopicPartitionOffset(x.Topic, x.Partition, x.Offset))); } @@ -89,34 +89,34 @@ private void PendingOffsetsHandler(PendingOffsetsStatisticsHandler handler) private void CommitHandler() { - lock (this.commitSyncRoot) + lock (_commitSyncRoot) { ConcurrentDictionary<(string, int), TopicPartitionOffset> offsets = null; try { - if (!this.offsetsToCommit.Any()) + if (!_offsetsToCommit.Any()) { return; } offsets = Interlocked.Exchange( - ref this.offsetsToCommit, + ref _offsetsToCommit, new ConcurrentDictionary<(string, int), TopicPartitionOffset>()); - this.consumer.Commit( + _consumer.Commit( offsets.Values .Select(x => new Confluent.Kafka.TopicPartitionOffset(x.Topic, x.Partition, x.Offset + 1)) .ToList()); - if (!this.consumer.Configuration.ManagementDisabled) + if (!_consumer.Configuration.ManagementDisabled) { this.LogOffsetsCommitted(offsets.Values); } } catch (Exception e) { - this.logHandler.Warning( + _logHandler.Warning( "Error Commiting Offsets", new { ErrorMessage = e.Message }); @@ -130,7 +130,7 @@ private void CommitHandler() private void LogOffsetsCommitted(IEnumerable offsets) { - this.logHandler.Verbose( + _logHandler.Verbose( "Offsets committed", new { @@ -153,7 +153,7 @@ private void RequeueFailedOffsets(IEnumerable offsets) { foreach (var tpo in offsets) { - this.offsetsToCommit.TryAdd((tpo.Topic, tpo.Partition), tpo); + _offsetsToCommit.TryAdd((tpo.Topic, tpo.Partition), tpo); } } } diff --git a/src/KafkaFlow/Consumers/OffsetManager.cs b/src/KafkaFlow/Consumers/OffsetManager.cs index 657091486..883c5e317 100644 --- a/src/KafkaFlow/Consumers/OffsetManager.cs +++ b/src/KafkaFlow/Consumers/OffsetManager.cs @@ -1,28 +1,28 @@ +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Confluent.Kafka; + namespace KafkaFlow.Consumers { - using System.Collections.Generic; - using System.Linq; - using System.Threading.Tasks; - using Confluent.Kafka; - internal class OffsetManager : IOffsetManager { - private readonly IOffsetCommitter committer; - private readonly Dictionary<(string, int), PartitionOffsets> partitionsOffsets; + private readonly IOffsetCommitter _committer; + private readonly Dictionary<(string, int), PartitionOffsets> _partitionsOffsets; public OffsetManager( IOffsetCommitter committer, IEnumerable partitions) { - this.committer = committer; - this.partitionsOffsets = partitions.ToDictionary( + _committer = committer; + _partitionsOffsets = partitions.ToDictionary( partition => (partition.Topic, partition.Partition.Value), _ => new PartitionOffsets()); } public void MarkAsProcessed(IConsumerContext context) { - if (!this.partitionsOffsets.TryGetValue((context.Topic, context.Partition), out var offsets)) + if (!_partitionsOffsets.TryGetValue((context.Topic, context.Partition), out var offsets)) { return; } @@ -31,14 +31,14 @@ public void MarkAsProcessed(IConsumerContext context) { if (offsets.TryDequeue(context)) { - this.committer.MarkAsProcessed(offsets.DequeuedContext.TopicPartitionOffset); + _committer.MarkAsProcessed(offsets.DequeuedContext.TopicPartitionOffset); } } } public void Enqueue(IConsumerContext context) { - if (this.partitionsOffsets.TryGetValue( + if (_partitionsOffsets.TryGetValue( (context.Topic, context.Partition), out var offsets)) { @@ -48,7 +48,7 @@ public void Enqueue(IConsumerContext context) public Task WaitContextsCompletionAsync() => Task.WhenAll( - this.partitionsOffsets + _partitionsOffsets .Select(x => x.Value.WaitContextsCompletionAsync()) .ToList()); } diff --git a/src/KafkaFlow/Consumers/OffsetsWatermark.cs b/src/KafkaFlow/Consumers/OffsetsWatermark.cs index 184e08a35..d24b55890 100644 --- a/src/KafkaFlow/Consumers/OffsetsWatermark.cs +++ b/src/KafkaFlow/Consumers/OffsetsWatermark.cs @@ -1,24 +1,24 @@ +using System; +using Confluent.Kafka; + namespace KafkaFlow.Consumers { - using System; - using Confluent.Kafka; - internal readonly struct OffsetsWatermark : IOffsetsWatermark, IEquatable { - private readonly WatermarkOffsets watermark; + private readonly WatermarkOffsets _watermark; public OffsetsWatermark(WatermarkOffsets watermark) { - this.watermark = watermark; + _watermark = watermark; } - public long High => this.watermark.High.Value; + public long High => _watermark.High.Value; - public long Low => this.watermark.Low.Value; + public long Low => _watermark.Low.Value; public bool Equals(OffsetsWatermark other) { - return Equals(this.watermark, other.watermark); + return Equals(_watermark, other._watermark); } public override bool Equals(object obj) @@ -28,7 +28,7 @@ public override bool Equals(object obj) public override int GetHashCode() { - return this.watermark != null ? this.watermark.GetHashCode() : 0; + return _watermark != null ? _watermark.GetHashCode() : 0; } } } diff --git a/src/KafkaFlow/Consumers/PartitionOffsets.cs b/src/KafkaFlow/Consumers/PartitionOffsets.cs index 36fa60314..9a4eada09 100644 --- a/src/KafkaFlow/Consumers/PartitionOffsets.cs +++ b/src/KafkaFlow/Consumers/PartitionOffsets.cs @@ -1,22 +1,22 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; + namespace KafkaFlow.Consumers { - using System; - using System.Collections.Generic; - using System.Linq; - using System.Threading.Tasks; - internal class PartitionOffsets { - private readonly SortedDictionary processedContexts = new(); - private readonly LinkedList receivedContexts = new(); + private readonly SortedDictionary _processedContexts = new(); + private readonly LinkedList _receivedContexts = new(); public IConsumerContext DequeuedContext { get; private set; } public void Enqueue(IConsumerContext context) { - lock (this.receivedContexts) + lock (_receivedContexts) { - this.receivedContexts.AddLast(context); + _receivedContexts.AddLast(context); } } @@ -24,32 +24,33 @@ public bool TryDequeue(IConsumerContext context) { this.DequeuedContext = null; - lock (this.receivedContexts) + lock (_receivedContexts) { - if (!this.receivedContexts.Any()) + if (!_receivedContexts.Any()) { throw new InvalidOperationException( $"There is no offsets in the received queue. Call {nameof(this.Enqueue)} first"); } - if (context.Offset != this.receivedContexts.First.Value.Offset) + if (context.Offset != _receivedContexts.First.Value.Offset) { - this.processedContexts.Add(context.Offset, context); + _processedContexts.Add(context.Offset, context); return false; } do { - this.DequeuedContext = this.receivedContexts.First.Value; - this.receivedContexts.RemoveFirst(); - } while (this.receivedContexts.Count > 0 && this.processedContexts.Remove(this.receivedContexts.First.Value.Offset)); + this.DequeuedContext = _receivedContexts.First.Value; + _receivedContexts.RemoveFirst(); + } + while (_receivedContexts.Count > 0 && _processedContexts.Remove(_receivedContexts.First.Value.Offset)); } return true; } public Task WaitContextsCompletionAsync() => Task.WhenAll( - this.receivedContexts + _receivedContexts .Select(x => x.Completion) .ToList()); } diff --git a/src/KafkaFlow/Consumers/WorkerPoolFeeder.cs b/src/KafkaFlow/Consumers/WorkerPoolFeeder.cs index 79f3cf8b6..d56625a0d 100644 --- a/src/KafkaFlow/Consumers/WorkerPoolFeeder.cs +++ b/src/KafkaFlow/Consumers/WorkerPoolFeeder.cs @@ -1,41 +1,41 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + namespace KafkaFlow.Consumers { - using System; - using System.Threading; - using System.Threading.Tasks; - internal class WorkerPoolFeeder : IWorkerPoolFeeder { - private readonly IConsumer consumer; - private readonly IConsumerWorkerPool workerPool; - private readonly ILogHandler logHandler; + private readonly IConsumer _consumer; + private readonly IConsumerWorkerPool _workerPool; + private readonly ILogHandler _logHandler; - private CancellationTokenSource stopTokenSource; - private Task feederTask; + private CancellationTokenSource _stopTokenSource; + private Task _feederTask; public WorkerPoolFeeder( IConsumer consumer, IConsumerWorkerPool workerPool, ILogHandler logHandler) { - this.consumer = consumer; - this.workerPool = workerPool; - this.logHandler = logHandler; + _consumer = consumer; + _workerPool = workerPool; + _logHandler = logHandler; } public void Start() { - this.stopTokenSource = new CancellationTokenSource(); - var token = this.stopTokenSource.Token; + _stopTokenSource = new CancellationTokenSource(); + var token = _stopTokenSource.Token; - this.feederTask = Task.Run( + _feederTask = Task.Run( async () => { while (!token.IsCancellationRequested) { try { - var message = await this.consumer + var message = await _consumer .ConsumeAsync(token) .ConfigureAwait(false); @@ -44,7 +44,7 @@ public void Start() continue; } - await this.workerPool + await _workerPool .EnqueueAsync(message, token) .ConfigureAwait(false); } @@ -54,7 +54,7 @@ await this.workerPool } catch (Exception ex) { - this.logHandler.Error( + _logHandler.Error( "Error consuming message from Kafka", ex, null); @@ -66,13 +66,13 @@ await this.workerPool public async Task StopAsync() { - if (this.stopTokenSource is { IsCancellationRequested: false }) + if (_stopTokenSource is { IsCancellationRequested: false }) { - this.stopTokenSource.Cancel(); - this.stopTokenSource.Dispose(); + _stopTokenSource.Cancel(); + _stopTokenSource.Dispose(); } - await (this.feederTask ?? Task.CompletedTask); + await (_feederTask ?? Task.CompletedTask); } } } diff --git a/src/KafkaFlow/Consumers/WorkersBalancers/ConsumerLagWorkerBalancer.cs b/src/KafkaFlow/Consumers/WorkersBalancers/ConsumerLagWorkerBalancer.cs index 911d4fb7a..59d0d068b 100644 --- a/src/KafkaFlow/Consumers/WorkersBalancers/ConsumerLagWorkerBalancer.cs +++ b/src/KafkaFlow/Consumers/WorkersBalancers/ConsumerLagWorkerBalancer.cs @@ -1,12 +1,12 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using KafkaFlow.Clusters; +using KafkaFlow.Configuration; + namespace KafkaFlow.Consumers.WorkersBalancers { - using System; - using System.Collections.Generic; - using System.Linq; - using System.Threading.Tasks; - using KafkaFlow.Clusters; - using KafkaFlow.Configuration; - /// /// Represents a balancer that dynamically calculates the number of workers for a consumer based on the current lag. /// The calculation employs a simple rule of three considering the total lag across all application instances, @@ -16,12 +16,12 @@ internal class ConsumerLagWorkerBalancer { private const int DefaultWorkersCount = 1; - private readonly IClusterManager clusterManager; - private readonly IConsumerAccessor consumerAccessor; - private readonly ILogHandler logHandler; - private readonly int totalConsumerWorkers; - private readonly int minInstanceWorkers; - private readonly int maxInstanceWorkers; + private readonly IClusterManager _clusterManager; + private readonly IConsumerAccessor _consumerAccessor; + private readonly ILogHandler _logHandler; + private readonly int _totalConsumerWorkers; + private readonly int _minInstanceWorkers; + private readonly int _maxInstanceWorkers; public ConsumerLagWorkerBalancer( IClusterManager clusterManager, @@ -31,19 +31,19 @@ public ConsumerLagWorkerBalancer( int minInstanceWorkers, int maxInstanceWorkers) { - this.clusterManager = clusterManager; - this.consumerAccessor = consumerAccessor; - this.logHandler = logHandler; - this.totalConsumerWorkers = totalConsumerWorkers; - this.minInstanceWorkers = minInstanceWorkers; - this.maxInstanceWorkers = maxInstanceWorkers; + _clusterManager = clusterManager; + _consumerAccessor = consumerAccessor; + _logHandler = logHandler; + _totalConsumerWorkers = totalConsumerWorkers; + _minInstanceWorkers = minInstanceWorkers; + _maxInstanceWorkers = maxInstanceWorkers; } public async Task GetWorkersCountAsync(WorkersCountContext context) { var workers = await this.CalculateAsync(context); - this.logHandler.Info( + _logHandler.Info( "New workers count calculated", new { @@ -101,7 +101,7 @@ private async Task CalculateAsync(WorkersCountContext context) var lastOffsets = this.GetPartitionsLastOffset(context.ConsumerName, topicsMetadata); - var partitionsOffset = await this.clusterManager.GetConsumerGroupOffsetsAsync( + var partitionsOffset = await _clusterManager.GetConsumerGroupOffsetsAsync( context.ConsumerGroupId, context.AssignedTopicsPartitions.Select(t => t.Name)); @@ -112,16 +112,16 @@ private async Task CalculateAsync(WorkersCountContext context) var ratio = instanceLag / Math.Max(1, totalConsumerLag); - var workers = (int)Math.Round(this.totalConsumerWorkers * ratio); + var workers = (int)Math.Round(_totalConsumerWorkers * ratio); - workers = Math.Min(workers, this.maxInstanceWorkers); - workers = Math.Max(workers, this.minInstanceWorkers); + workers = Math.Min(workers, _maxInstanceWorkers); + workers = Math.Max(workers, _minInstanceWorkers); return workers; } catch (Exception e) { - this.logHandler.Error( + _logHandler.Error( "Error calculating new workers count, using 1 as fallback", e, new @@ -137,7 +137,7 @@ private async Task CalculateAsync(WorkersCountContext context) string consumerName, IEnumerable<(string Name, TopicMetadata Metadata)> topicsMetadata) { - var consumer = this.consumerAccessor[consumerName]; + var consumer = _consumerAccessor[consumerName]; return topicsMetadata.SelectMany( topic => topic.Metadata.Partitions.Select( @@ -156,7 +156,7 @@ private async Task CalculateAsync(WorkersCountContext context) foreach (var topic in context.AssignedTopicsPartitions) { - topicsMetadata.Add((topic.Name, await this.clusterManager.GetTopicMetadataAsync(topic.Name))); + topicsMetadata.Add((topic.Name, await _clusterManager.GetTopicMetadataAsync(topic.Name))); } return topicsMetadata; diff --git a/src/KafkaFlow/DateTimeProvider.cs b/src/KafkaFlow/DateTimeProvider.cs index a5570242b..6f641fa9e 100644 --- a/src/KafkaFlow/DateTimeProvider.cs +++ b/src/KafkaFlow/DateTimeProvider.cs @@ -1,7 +1,7 @@ +using System; + namespace KafkaFlow { - using System; - internal class DateTimeProvider : IDateTimeProvider { public DateTime UtcNow => DateTime.UtcNow; diff --git a/src/KafkaFlow/Delegates.cs b/src/KafkaFlow/Delegates.cs index d448eb58f..3f2fec26b 100644 --- a/src/KafkaFlow/Delegates.cs +++ b/src/KafkaFlow/Delegates.cs @@ -1,8 +1,8 @@ +using Confluent.Kafka; +using KafkaFlow.Consumers; + namespace KafkaFlow { - using Confluent.Kafka; - using KafkaFlow.Consumers; - /// /// A factory to decorates the consumer created by KafkaFlow /// diff --git a/src/KafkaFlow/Event.cs b/src/KafkaFlow/Event.cs index 44d73ad48..598c0fde6 100644 --- a/src/KafkaFlow/Event.cs +++ b/src/KafkaFlow/Event.cs @@ -1,33 +1,33 @@ -namespace KafkaFlow -{ - using System; - using System.Collections.Generic; - using System.Threading.Tasks; +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +namespace KafkaFlow +{ internal class Event : IEvent { - private readonly ILogHandler logHandler; + private readonly ILogHandler _logHandler; - private readonly List> handlers = new(); + private readonly List> _handlers = new(); public Event(ILogHandler logHandler) { - this.logHandler = logHandler; + _logHandler = logHandler; } public IEventSubscription Subscribe(Func handler) { - if (!this.handlers.Contains(handler)) + if (!_handlers.Contains(handler)) { - this.handlers.Add(handler); + _handlers.Add(handler); } - return new EventSubscription(() => this.handlers.Remove(handler)); + return new EventSubscription(() => _handlers.Remove(handler)); } internal async Task FireAsync(TArg arg) { - foreach (var handler in this.handlers) + foreach (var handler in _handlers) { try { @@ -40,7 +40,7 @@ internal async Task FireAsync(TArg arg) } catch (Exception e) { - this.logHandler.Error("Error firing event", e, new { Event = this.GetType().Name }); + _logHandler.Error("Error firing event", e, new { Event = this.GetType().Name }); } } } @@ -48,15 +48,15 @@ internal async Task FireAsync(TArg arg) internal class Event : IEvent { - private readonly Event evt; + private readonly Event _evt; public Event(ILogHandler logHandler) { - this.evt = new Event(logHandler); + _evt = new Event(logHandler); } - public IEventSubscription Subscribe(Func handler) => this.evt.Subscribe(_ => handler.Invoke()); + public IEventSubscription Subscribe(Func handler) => _evt.Subscribe(_ => handler.Invoke()); - internal Task FireAsync() => this.evt.FireAsync(null); + internal Task FireAsync() => _evt.FireAsync(null); } -} \ No newline at end of file +} diff --git a/src/KafkaFlow/EventSubscription.cs b/src/KafkaFlow/EventSubscription.cs index 36d725083..ff528ef3e 100644 --- a/src/KafkaFlow/EventSubscription.cs +++ b/src/KafkaFlow/EventSubscription.cs @@ -1,19 +1,19 @@ -namespace KafkaFlow -{ - using System; +using System; +namespace KafkaFlow +{ internal class EventSubscription : IEventSubscription { - private readonly Action cancelDelegate; + private readonly Action _cancelDelegate; public EventSubscription(Action cancelDelegate) { - this.cancelDelegate = cancelDelegate; + _cancelDelegate = cancelDelegate; } public void Cancel() { - this.cancelDelegate.Invoke(); + _cancelDelegate.Invoke(); } } } diff --git a/src/KafkaFlow/Extensions/ConfigurationBuilderExtensions.cs b/src/KafkaFlow/Extensions/ConfigurationBuilderExtensions.cs index 8189c22b5..920496454 100644 --- a/src/KafkaFlow/Extensions/ConfigurationBuilderExtensions.cs +++ b/src/KafkaFlow/Extensions/ConfigurationBuilderExtensions.cs @@ -1,15 +1,15 @@ +using System; +using System.Collections.Generic; +using Confluent.Kafka; +using KafkaFlow.Clusters; +using KafkaFlow.Configuration; +using KafkaFlow.Consumers; +using KafkaFlow.Consumers.WorkersBalancers; +using KafkaFlow.Middlewares.Compressor; +using KafkaFlow.Middlewares.TypedHandler; + namespace KafkaFlow { - using System; - using System.Collections.Generic; - using Confluent.Kafka; - using KafkaFlow.Clusters; - using KafkaFlow.Configuration; - using KafkaFlow.Consumers; - using KafkaFlow.Consumers.WorkersBalancers; - using KafkaFlow.Middlewares.Compressor; - using KafkaFlow.Middlewares.TypedHandler; - /// /// Provides extension methods over and /// @@ -206,7 +206,6 @@ public static IConsumerMiddlewareConfigurationBuilder AddTypedHandlers( /// The middleware configuration builder /// The compressor type /// - [Obsolete("Compressors should only be used in backward compatibility scenarios, in the vast majority of cases native compression (producer.WithCompression()) should be used instead")] public static IConsumerMiddlewareConfigurationBuilder AddDecompressor(this IConsumerMiddlewareConfigurationBuilder middlewares) where T : class, IDecompressor { @@ -221,7 +220,6 @@ public static IConsumerMiddlewareConfigurationBuilder AddDecompressor(this IC /// The decompressor type that implements /// A factory to create the instance /// - [Obsolete("Compressors should only be used in backward compatibility scenarios, in the vast majority of cases native compression (producer.WithCompression()) should be used instead")] public static IConsumerMiddlewareConfigurationBuilder AddDecompressor( this IConsumerMiddlewareConfigurationBuilder middlewares, Factory factory) @@ -237,7 +235,6 @@ public static IConsumerMiddlewareConfigurationBuilder AddDecompressor( /// The middleware configuration builder /// The compressor type that implements /// - [Obsolete("Compressors should only be used in backward compatibility scenarios, in the vast majority of cases native compression (producer.WithCompression()) should be used instead")] public static IProducerMiddlewareConfigurationBuilder AddCompressor(this IProducerMiddlewareConfigurationBuilder middlewares) where T : class, ICompressor { @@ -253,7 +250,6 @@ public static IProducerMiddlewareConfigurationBuilder AddCompressor(this IPro /// The compressor type that implements /// A factory to create the instance /// - [Obsolete("Compressors should only be used in backward compatibility scenarios, in the vast majority of cases native compression (producer.WithCompression()) should be used instead")] public static IProducerMiddlewareConfigurationBuilder AddCompressor( this IProducerMiddlewareConfigurationBuilder middlewares, Factory factory) diff --git a/src/KafkaFlow/Extensions/ConfigurationExtensions.cs b/src/KafkaFlow/Extensions/ConfigurationExtensions.cs index eb1741167..6057d30c1 100644 --- a/src/KafkaFlow/Extensions/ConfigurationExtensions.cs +++ b/src/KafkaFlow/Extensions/ConfigurationExtensions.cs @@ -1,12 +1,12 @@ +using System; +using Confluent.Kafka; +using KafkaFlow.Configuration; +using SaslMechanism = KafkaFlow.Configuration.SaslMechanism; +using SecurityProtocol = KafkaFlow.Configuration.SecurityProtocol; +using SslEndpointIdentificationAlgorithm = KafkaFlow.Configuration.SslEndpointIdentificationAlgorithm; + namespace KafkaFlow { - using System; - using Confluent.Kafka; - using KafkaFlow.Configuration; - using SaslMechanism = KafkaFlow.Configuration.SaslMechanism; - using SecurityProtocol = KafkaFlow.Configuration.SecurityProtocol; - using SslEndpointIdentificationAlgorithm = KafkaFlow.Configuration.SslEndpointIdentificationAlgorithm; - internal static class ConfigurationExtensions { public static Confluent.Kafka.SaslMechanism ToConfluent(this SaslMechanism mechanism) => mechanism switch diff --git a/src/KafkaFlow/GlobalEvents.cs b/src/KafkaFlow/GlobalEvents.cs index c4e4212a3..4c012f47b 100644 --- a/src/KafkaFlow/GlobalEvents.cs +++ b/src/KafkaFlow/GlobalEvents.cs @@ -1,56 +1,55 @@ -namespace KafkaFlow -{ - using System.Threading.Tasks; - using KafkaFlow.Configuration; +using System.Threading.Tasks; +using KafkaFlow.Configuration; +namespace KafkaFlow +{ internal class GlobalEvents : IGlobalEvents { - private readonly Event messageConsumeCompleted; - private readonly Event messageConsumeError; - private readonly Event messageConsumeStarted; - private readonly Event messageProduceCompleted; - private readonly Event messageProduceError; - private readonly Event messageProduceStarted; + private readonly Event _messageConsumeCompleted; + private readonly Event _messageConsumeError; + private readonly Event _messageConsumeStarted; + private readonly Event _messageProduceCompleted; + private readonly Event _messageProduceError; + private readonly Event _messageProduceStarted; public GlobalEvents(ILogHandler log) { - this.messageConsumeCompleted = new(log); - this.messageConsumeError = new(log); - this.messageConsumeStarted = new(log); - this.messageProduceCompleted = new(log); - this.messageProduceError = new(log); - this.messageProduceStarted = new(log); + _messageConsumeCompleted = new(log); + _messageConsumeError = new(log); + _messageConsumeStarted = new(log); + _messageProduceCompleted = new(log); + _messageProduceError = new(log); + _messageProduceStarted = new(log); } - public IEvent MessageConsumeCompleted => this.messageConsumeCompleted; + public IEvent MessageConsumeCompleted => _messageConsumeCompleted; - public IEvent MessageConsumeError => this.messageConsumeError; + public IEvent MessageConsumeError => _messageConsumeError; - public IEvent MessageConsumeStarted => this.messageConsumeStarted; + public IEvent MessageConsumeStarted => _messageConsumeStarted; - public IEvent MessageProduceCompleted => this.messageProduceCompleted; + public IEvent MessageProduceCompleted => _messageProduceCompleted; - public IEvent MessageProduceError => this.messageProduceError; + public IEvent MessageProduceError => _messageProduceError; - public IEvent MessageProduceStarted => this.messageProduceStarted; + public IEvent MessageProduceStarted => _messageProduceStarted; public Task FireMessageConsumeStartedAsync(MessageEventContext context) - => this.messageConsumeStarted.FireAsync(context); + => _messageConsumeStarted.FireAsync(context); public Task FireMessageConsumeErrorAsync(MessageErrorEventContext context) - => this.messageConsumeError.FireAsync(context); + => _messageConsumeError.FireAsync(context); public Task FireMessageConsumeCompletedAsync(MessageEventContext context) - => this.messageConsumeCompleted.FireAsync(context); + => _messageConsumeCompleted.FireAsync(context); public Task FireMessageProduceStartedAsync(MessageEventContext context) - => this.messageProduceStarted.FireAsync(context); + => _messageProduceStarted.FireAsync(context); public Task FireMessageProduceErrorAsync(MessageErrorEventContext context) - => this.messageProduceError.FireAsync(context); + => _messageProduceError.FireAsync(context); public Task FireMessageProduceCompletedAsync(MessageEventContext context) - => this.messageProduceCompleted.FireAsync(context); - + => _messageProduceCompleted.FireAsync(context); } } diff --git a/src/KafkaFlow/IConsumerManagerFactory.cs b/src/KafkaFlow/IConsumerManagerFactory.cs index 5e95b3b94..430228060 100644 --- a/src/KafkaFlow/IConsumerManagerFactory.cs +++ b/src/KafkaFlow/IConsumerManagerFactory.cs @@ -1,8 +1,8 @@ +using KafkaFlow.Configuration; +using KafkaFlow.Consumers; + namespace KafkaFlow { - using KafkaFlow.Configuration; - using KafkaFlow.Consumers; - internal interface IConsumerManagerFactory { IConsumerManager Create(IConsumerConfiguration configuration, IDependencyResolver resolver); diff --git a/src/KafkaFlow/IKafkaBus.cs b/src/KafkaFlow/IKafkaBus.cs index 1a1751044..1b5f91b98 100644 --- a/src/KafkaFlow/IKafkaBus.cs +++ b/src/KafkaFlow/IKafkaBus.cs @@ -1,10 +1,10 @@ +using System.Threading; +using System.Threading.Tasks; +using KafkaFlow.Consumers; +using KafkaFlow.Producers; + namespace KafkaFlow { - using System.Threading; - using System.Threading.Tasks; - using KafkaFlow.Consumers; - using KafkaFlow.Producers; - /// /// Provides access to the kafka bus operations /// diff --git a/src/KafkaFlow/IMiddlewareExecutor.cs b/src/KafkaFlow/IMiddlewareExecutor.cs index 7f7580c04..6ff6f309f 100644 --- a/src/KafkaFlow/IMiddlewareExecutor.cs +++ b/src/KafkaFlow/IMiddlewareExecutor.cs @@ -1,8 +1,8 @@ +using System; +using System.Threading.Tasks; + namespace KafkaFlow { - using System; - using System.Threading.Tasks; - internal interface IMiddlewareExecutor { Task Execute(IMessageContext context, Func nextOperation); diff --git a/src/KafkaFlow/KafkaBus.cs b/src/KafkaFlow/KafkaBus.cs index 8ac7a699c..c6ce0e7d7 100644 --- a/src/KafkaFlow/KafkaBus.cs +++ b/src/KafkaFlow/KafkaBus.cs @@ -1,22 +1,22 @@ +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using KafkaFlow.Clusters; +using KafkaFlow.Configuration; +using KafkaFlow.Consumers; +using KafkaFlow.Producers; + namespace KafkaFlow { - using System.Collections.Generic; - using System.Linq; - using System.Threading; - using System.Threading.Tasks; - using KafkaFlow.Clusters; - using KafkaFlow.Configuration; - using KafkaFlow.Consumers; - using KafkaFlow.Producers; - internal class KafkaBus : IKafkaBus { - private readonly IDependencyResolver dependencyResolver; - private readonly KafkaConfiguration configuration; - private readonly IConsumerManagerFactory consumerManagerFactory; - private readonly IClusterManagerAccessor clusterManagerAccessor; + private readonly IDependencyResolver _dependencyResolver; + private readonly KafkaConfiguration _configuration; + private readonly IConsumerManagerFactory _consumerManagerFactory; + private readonly IClusterManagerAccessor _clusterManagerAccessor; - private readonly List consumerManagers = new(); + private readonly List _consumerManagers = new(); public KafkaBus( IDependencyResolver dependencyResolver, @@ -26,10 +26,10 @@ public KafkaBus( IProducerAccessor producers, IClusterManagerAccessor clusterManagerAccessor) { - this.dependencyResolver = dependencyResolver; - this.configuration = configuration; - this.consumerManagerFactory = consumerManagerFactory; - this.clusterManagerAccessor = clusterManagerAccessor; + _dependencyResolver = dependencyResolver; + _configuration = configuration; + _consumerManagerFactory = consumerManagerFactory; + _clusterManagerAccessor = clusterManagerAccessor; this.Consumers = consumers; this.Producers = producers; } @@ -44,18 +44,18 @@ public async Task StartAsync(CancellationToken stopCancellationToken = default) stopTokenSource.Token.Register(() => this.StopAsync().GetAwaiter().GetResult()); - foreach (var cluster in this.configuration.Clusters) + foreach (var cluster in _configuration.Clusters) { await this.CreateMissingClusterTopics(cluster); foreach (var consumerConfiguration in cluster.Consumers) { - var consumerDependencyScope = this.dependencyResolver.CreateScope(); + var consumerDependencyScope = _dependencyResolver.CreateScope(); var consumerManager = - this.consumerManagerFactory.Create(consumerConfiguration, consumerDependencyScope.Resolver); + _consumerManagerFactory.Create(consumerConfiguration, consumerDependencyScope.Resolver); - this.consumerManagers.Add(consumerManager); + _consumerManagers.Add(consumerManager); this.Consumers.Add( new MessageConsumer( consumerManager, @@ -67,18 +67,18 @@ public async Task StartAsync(CancellationToken stopCancellationToken = default) } } - cluster.OnStartedHandler(this.dependencyResolver); + cluster.OnStartedHandler(_dependencyResolver); } } public Task StopAsync() { - foreach (var cluster in this.configuration.Clusters) + foreach (var cluster in _configuration.Clusters) { - cluster.OnStoppingHandler(this.dependencyResolver); + cluster.OnStoppingHandler(_dependencyResolver); } - return Task.WhenAll(this.consumerManagers.Select(x => x.StopAsync())); + return Task.WhenAll(_consumerManagers.Select(x => x.StopAsync())); } private async Task CreateMissingClusterTopics(ClusterConfiguration cluster) @@ -88,7 +88,7 @@ private async Task CreateMissingClusterTopics(ClusterConfiguration cluster) return; } - await this.clusterManagerAccessor[cluster.Name].CreateIfNotExistsAsync( + await _clusterManagerAccessor[cluster.Name].CreateIfNotExistsAsync( cluster.TopicsToCreateIfNotExist); } } diff --git a/src/KafkaFlow/MessageContext.cs b/src/KafkaFlow/MessageContext.cs index e59f50bb4..cbbf708cc 100644 --- a/src/KafkaFlow/MessageContext.cs +++ b/src/KafkaFlow/MessageContext.cs @@ -1,7 +1,7 @@ +using System.Collections.Generic; + namespace KafkaFlow { - using System.Collections.Generic; - internal class MessageContext : IMessageContext { public MessageContext( diff --git a/src/KafkaFlow/MessageHeaders.cs b/src/KafkaFlow/MessageHeaders.cs index 54287586b..5d7cde588 100644 --- a/src/KafkaFlow/MessageHeaders.cs +++ b/src/KafkaFlow/MessageHeaders.cs @@ -1,15 +1,15 @@ +using System.Collections; +using System.Collections.Generic; +using Confluent.Kafka; + namespace KafkaFlow { - using System.Collections; - using System.Collections.Generic; - using Confluent.Kafka; - /// /// Collection of message headers /// public class MessageHeaders : IMessageHeaders { - private readonly Headers headers; + private readonly Headers _headers; /// /// Initializes a new instance of the class. @@ -17,7 +17,7 @@ public class MessageHeaders : IMessageHeaders /// The Confluent headers public MessageHeaders(Headers headers) { - this.headers = headers; + _headers = headers; } /// @@ -34,11 +34,11 @@ public MessageHeaders() /// The zero-based index of the element to get public byte[] this[string key] { - get => this.headers.TryGetLastBytes(key, out var value) ? value : null; + get => _headers.TryGetLastBytes(key, out var value) ? value : null; set { - this.headers.Remove(key); - this.headers.Add(key, value); + _headers.Remove(key); + _headers.Add(key, value); } } @@ -49,14 +49,14 @@ public byte[] this[string key] /// The header value (possibly null) public void Add(string key, byte[] value) { - this.headers.Add(key, value); + _headers.Add(key, value); } /// /// Gets all the kafka headers /// /// - public Headers GetKafkaHeaders() => this.headers; + public Headers GetKafkaHeaders() => _headers; /// /// Gets an enumerator that iterates through @@ -64,7 +64,7 @@ public void Add(string key, byte[] value) /// public IEnumerator> GetEnumerator() { - foreach (var header in this.headers) + foreach (var header in _headers) { yield return new KeyValuePair(header.Key, header.GetValueBytes()); } diff --git a/src/KafkaFlow/MiddlewareExecutor.cs b/src/KafkaFlow/MiddlewareExecutor.cs index b2710ff9d..6513b1bb5 100644 --- a/src/KafkaFlow/MiddlewareExecutor.cs +++ b/src/KafkaFlow/MiddlewareExecutor.cs @@ -1,21 +1,21 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using KafkaFlow.Configuration; + namespace KafkaFlow { - using System; - using System.Collections.Generic; - using System.Linq; - using System.Threading.Tasks; - using KafkaFlow.Configuration; - internal class MiddlewareExecutor : IMiddlewareExecutor { - private readonly IReadOnlyList configurations; + private readonly IReadOnlyList _configurations; - private readonly Dictionary consumerOrProducerMiddlewares = new(); - private readonly Dictionary<(int, int), IMessageMiddleware> workersMiddlewares = new(); + private readonly Dictionary _consumerOrProducerMiddlewares = new(); + private readonly Dictionary<(int, int), IMessageMiddleware> _workersMiddlewares = new(); public MiddlewareExecutor(IReadOnlyList configurations) { - this.configurations = configurations; + _configurations = configurations; } public Task Execute(IMessageContext context, Func nextOperation) @@ -25,7 +25,7 @@ public Task Execute(IMessageContext context, Func nextOpe internal Task OnWorkerPoolStopped() { - this.workersMiddlewares.Clear(); + _workersMiddlewares.Clear(); return Task.CompletedTask; } @@ -56,12 +56,12 @@ private static IMessageMiddleware CreateInstance( private Task ExecuteDefinition(int index, IMessageContext context, Func nextOperation) { - if (this.configurations.Count == index) + if (_configurations.Count == index) { return nextOperation(context); } - var configuration = this.configurations[index]; + var configuration = _configurations[index]; return this .ResolveInstance(index, context, configuration) @@ -91,7 +91,7 @@ private IMessageMiddleware GetConsumerOrProducerInstance( int index, MiddlewareConfiguration configuration) { - return this.consumerOrProducerMiddlewares.SafeGetOrAdd( + return _consumerOrProducerMiddlewares.SafeGetOrAdd( index, _ => CreateInstance(dependencyResolver, configuration)); } @@ -101,7 +101,7 @@ private IMessageMiddleware GetWorkerInstance( IMessageContext context, MiddlewareConfiguration configuration) { - return this.workersMiddlewares.SafeGetOrAdd( + return _workersMiddlewares.SafeGetOrAdd( (index, context.ConsumerContext?.WorkerId ?? 0), _ => CreateInstance(context.ConsumerContext.WorkerDependencyResolver, configuration)); } diff --git a/src/KafkaFlow/Middlewares/Compressor/CompressorProducerMiddleware.cs b/src/KafkaFlow/Middlewares/Compressor/CompressorProducerMiddleware.cs index ed3ca3d42..3373ed1af 100644 --- a/src/KafkaFlow/Middlewares/Compressor/CompressorProducerMiddleware.cs +++ b/src/KafkaFlow/Middlewares/Compressor/CompressorProducerMiddleware.cs @@ -1,14 +1,14 @@ -namespace KafkaFlow.Middlewares.Compressor -{ - using System; - using System.Threading.Tasks; +using System; +using System.Threading.Tasks; +namespace KafkaFlow.Middlewares.Compressor +{ /// /// Middleware to compress the messages when producing /// public class CompressorProducerMiddleware : IMessageMiddleware { - private readonly ICompressor compressor; + private readonly ICompressor _compressor; /// /// Initializes a new instance of the class. @@ -16,7 +16,7 @@ public class CompressorProducerMiddleware : IMessageMiddleware /// Instance of public CompressorProducerMiddleware(ICompressor compressor) { - this.compressor = compressor; + _compressor = compressor; } /// @@ -28,7 +28,7 @@ public Task Invoke(IMessageContext context, MiddlewareDelegate next) $"{nameof(context.Message.Value)} must be a byte array to be compressed and it is '{context.Message.Value.GetType().FullName}'"); } - var data = this.compressor.Compress(rawData); + var data = _compressor.Compress(rawData); return next(context.SetMessage(context.Message.Key, data)); } diff --git a/src/KafkaFlow/Middlewares/Compressor/DecompressorConsumerMiddleware.cs b/src/KafkaFlow/Middlewares/Compressor/DecompressorConsumerMiddleware.cs index d52ae8360..ba4f218f2 100644 --- a/src/KafkaFlow/Middlewares/Compressor/DecompressorConsumerMiddleware.cs +++ b/src/KafkaFlow/Middlewares/Compressor/DecompressorConsumerMiddleware.cs @@ -1,14 +1,14 @@ -namespace KafkaFlow.Middlewares.Compressor -{ - using System; - using System.Threading.Tasks; +using System; +using System.Threading.Tasks; +namespace KafkaFlow.Middlewares.Compressor +{ /// /// Middleware to decompress the messages when consuming /// public class DecompressorConsumerMiddleware : IMessageMiddleware { - private readonly IDecompressor decompressor; + private readonly IDecompressor _decompressor; /// /// Initializes a new instance of the class. @@ -16,7 +16,7 @@ public class DecompressorConsumerMiddleware : IMessageMiddleware /// Instance of public DecompressorConsumerMiddleware(IDecompressor decompressor) { - this.decompressor = decompressor; + _decompressor = decompressor; } /// @@ -28,7 +28,7 @@ public Task Invoke(IMessageContext context, MiddlewareDelegate next) $"{nameof(context.Message.Value)} must be a byte array to be decompressed and it is '{context.Message.Value.GetType().FullName}'"); } - var data = this.decompressor.Decompress(rawData); + var data = _decompressor.Decompress(rawData); return next(context.SetMessage(context.Message.Key, data)); } diff --git a/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/ConsumerThrottlingActionConfigurationBuilder.cs b/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/ConsumerThrottlingActionConfigurationBuilder.cs index e0c02903f..9794cfca2 100644 --- a/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/ConsumerThrottlingActionConfigurationBuilder.cs +++ b/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/ConsumerThrottlingActionConfigurationBuilder.cs @@ -1,7 +1,7 @@ +using System; + namespace KafkaFlow.Middlewares.ConsumerThrottling.Configuration { - using System; - internal class ConsumerThrottlingActionConfigurationBuilder : IConsumerThrottlingActionConfigurationBuilder, IConsumerThrottlingThresholdActionConfigurationBuilder diff --git a/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/ConsumerThrottlingConfiguration.cs b/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/ConsumerThrottlingConfiguration.cs index def130389..6185d4b37 100644 --- a/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/ConsumerThrottlingConfiguration.cs +++ b/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/ConsumerThrottlingConfiguration.cs @@ -1,8 +1,8 @@ +using System; +using System.Collections.Generic; + namespace KafkaFlow.Middlewares.ConsumerThrottling.Configuration { - using System; - using System.Collections.Generic; - internal class ConsumerThrottlingConfiguration { public ConsumerThrottlingConfiguration( diff --git a/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/ConsumerThrottlingConfigurationBuilder.cs b/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/ConsumerThrottlingConfigurationBuilder.cs index f1792b344..bacb06222 100644 --- a/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/ConsumerThrottlingConfigurationBuilder.cs +++ b/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/ConsumerThrottlingConfigurationBuilder.cs @@ -1,26 +1,26 @@ +using System; +using System.Collections.Generic; +using System.Linq; + namespace KafkaFlow.Middlewares.ConsumerThrottling.Configuration { - using System; - using System.Collections.Generic; - using System.Linq; - internal class ConsumerThrottlingConfigurationBuilder : IConsumerThrottlingMetricConfigurationBuilder, IConsumerThrottlingActionsConfigurationBuilder { - private readonly List> metrics = new(); - private readonly List actions = new(); - private TimeSpan interval = TimeSpan.FromSeconds(5); + private readonly List> _metrics = new(); + private readonly List _actions = new(); + private TimeSpan _interval = TimeSpan.FromSeconds(5); public IConsumerThrottlingActionsConfigurationBuilder WithInterval(TimeSpan interval) { - this.interval = interval; + _interval = interval; return this; } public IConsumerThrottlingMetricConfigurationBuilder AddMetric(Func factory) { - this.metrics.Add(factory); + _metrics.Add(factory); return this; } @@ -30,18 +30,18 @@ public IConsumerThrottlingActionsConfigurationBuilder AddAction(Action x(resolver)) .ToList(), - this.actions + _actions .Select(x => new ConsumerThrottlingThreshold(x.Threshold, x.Factory(resolver))) .ToList()); } diff --git a/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/ConsumerThrottlingConfigurationBuilderExtensions.cs b/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/ConsumerThrottlingConfigurationBuilderExtensions.cs index c9eefdd80..9621a0d1d 100644 --- a/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/ConsumerThrottlingConfigurationBuilderExtensions.cs +++ b/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/ConsumerThrottlingConfigurationBuilderExtensions.cs @@ -1,12 +1,12 @@ +using System; +using System.Collections.Generic; +using KafkaFlow.Configuration; +using KafkaFlow.Consumers; +using KafkaFlow.Middlewares.ConsumerThrottling; +using KafkaFlow.Middlewares.ConsumerThrottling.Configuration; + namespace KafkaFlow { - using System; - using System.Collections.Generic; - using KafkaFlow.Configuration; - using KafkaFlow.Consumers; - using KafkaFlow.Middlewares.ConsumerThrottling; - using KafkaFlow.Middlewares.ConsumerThrottling.Configuration; - /// /// Provides extension methods for configuring consumer throttling in KafkaFlow. /// diff --git a/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/IConsumerThrottlingActionsConfigurationBuilder.cs b/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/IConsumerThrottlingActionsConfigurationBuilder.cs index 0c44bd3d4..c5b1e7e49 100644 --- a/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/IConsumerThrottlingActionsConfigurationBuilder.cs +++ b/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/IConsumerThrottlingActionsConfigurationBuilder.cs @@ -1,7 +1,7 @@ +using System; + namespace KafkaFlow.Middlewares.ConsumerThrottling.Configuration { - using System; - /// /// Provides methods to configure throttling actions for KafkaFlow consumers. /// diff --git a/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/IConsumerThrottlingMetricConfigurationBuilder.cs b/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/IConsumerThrottlingMetricConfigurationBuilder.cs index ecf613255..f926107fb 100644 --- a/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/IConsumerThrottlingMetricConfigurationBuilder.cs +++ b/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/IConsumerThrottlingMetricConfigurationBuilder.cs @@ -1,7 +1,7 @@ +using System; + namespace KafkaFlow.Middlewares.ConsumerThrottling.Configuration { - using System; - /// /// An interface to configure the throttling metrics for KafkaFlow consumers. /// diff --git a/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/IConsumerThrottlingThresholdActionConfigurationBuilder.cs b/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/IConsumerThrottlingThresholdActionConfigurationBuilder.cs index 1a1f4b58e..91c5dc8ae 100644 --- a/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/IConsumerThrottlingThresholdActionConfigurationBuilder.cs +++ b/src/KafkaFlow/Middlewares/ConsumerThrottling/Configuration/IConsumerThrottlingThresholdActionConfigurationBuilder.cs @@ -1,7 +1,7 @@ +using System; + namespace KafkaFlow.Middlewares.ConsumerThrottling.Configuration { - using System; - /// /// An interface to configure the actions applied when a throttling threshold is met. /// diff --git a/src/KafkaFlow/Middlewares/ConsumerThrottling/ConsumerThrottlingDelayAction.cs b/src/KafkaFlow/Middlewares/ConsumerThrottling/ConsumerThrottlingDelayAction.cs index d0116d422..2c410bef7 100644 --- a/src/KafkaFlow/Middlewares/ConsumerThrottling/ConsumerThrottlingDelayAction.cs +++ b/src/KafkaFlow/Middlewares/ConsumerThrottling/ConsumerThrottlingDelayAction.cs @@ -1,20 +1,20 @@ +using System; +using System.Threading.Tasks; + namespace KafkaFlow.Middlewares.ConsumerThrottling { - using System; - using System.Threading.Tasks; - internal class ConsumerThrottlingDelayAction : IConsumerThrottlingAction { - private readonly TimeSpan delay; + private readonly TimeSpan _delay; public ConsumerThrottlingDelayAction(TimeSpan delay) { - this.delay = delay; + _delay = delay; } public Task ExecuteAsync() { - return Task.Delay(this.delay); + return Task.Delay(_delay); } } } diff --git a/src/KafkaFlow/Middlewares/ConsumerThrottling/ConsumerThrottlingKafkaLagMetric.cs b/src/KafkaFlow/Middlewares/ConsumerThrottling/ConsumerThrottlingKafkaLagMetric.cs index 8611084ef..578ad16c4 100644 --- a/src/KafkaFlow/Middlewares/ConsumerThrottling/ConsumerThrottlingKafkaLagMetric.cs +++ b/src/KafkaFlow/Middlewares/ConsumerThrottling/ConsumerThrottlingKafkaLagMetric.cs @@ -1,17 +1,17 @@ +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using KafkaFlow.Consumers; + namespace KafkaFlow.Middlewares.ConsumerThrottling { - using System.Collections.Generic; - using System.Linq; - using System.Threading.Tasks; - using KafkaFlow.Consumers; - internal class ConsumerThrottlingKafkaLagMetric : IConsumerThrottlingMetric { - private readonly IReadOnlyList consumers; + private readonly IReadOnlyList _consumers; public ConsumerThrottlingKafkaLagMetric(IConsumerAccessor consumerAccessor, IEnumerable consumersNames) { - this.consumers = consumerAccessor.All + _consumers = consumerAccessor.All .Where(consumer => consumersNames.Contains(consumer.ConsumerName)) .ToList() .AsReadOnly(); @@ -19,7 +19,7 @@ public ConsumerThrottlingKafkaLagMetric(IConsumerAccessor consumerAccessor, IEnu public Task GetValueAsync() { - var lag = this.consumers + var lag = _consumers .SelectMany(x => x.GetTopicPartitionsLag()) .Select(x => x.Lag) .Sum(); diff --git a/src/KafkaFlow/Middlewares/ConsumerThrottling/ConsumerThrottlingMiddleware.cs b/src/KafkaFlow/Middlewares/ConsumerThrottling/ConsumerThrottlingMiddleware.cs index 91019140d..1add86f0b 100644 --- a/src/KafkaFlow/Middlewares/ConsumerThrottling/ConsumerThrottlingMiddleware.cs +++ b/src/KafkaFlow/Middlewares/ConsumerThrottling/ConsumerThrottlingMiddleware.cs @@ -1,29 +1,29 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using KafkaFlow.Middlewares.ConsumerThrottling.Configuration; + namespace KafkaFlow.Middlewares.ConsumerThrottling { - using System; - using System.Collections.Generic; - using System.Linq; - using System.Threading; - using System.Threading.Tasks; - using KafkaFlow.Middlewares.ConsumerThrottling.Configuration; - internal class ConsumerThrottlingMiddleware : IMessageMiddleware, IDisposable { - private readonly ConsumerThrottlingConfiguration configuration; - private readonly IReadOnlyList thresholds; + private readonly ConsumerThrottlingConfiguration _configuration; + private readonly IReadOnlyList _thresholds; - private readonly Timer timer; + private readonly Timer _timer; - private long metricValue; + private long _metricValue; public ConsumerThrottlingMiddleware(ConsumerThrottlingConfiguration configuration) { - this.configuration = configuration; - this.thresholds = configuration.Thresholds + _configuration = configuration; + _thresholds = configuration.Thresholds .OrderByDescending(x => x.ThresholdValue) .ToList(); - this.timer = new Timer( + _timer = new Timer( state => _ = this.UpdateMetricValueAsync(), null, configuration.EvaluationInterval, @@ -32,9 +32,9 @@ public ConsumerThrottlingMiddleware(ConsumerThrottlingConfiguration configuratio public async Task Invoke(IMessageContext context, MiddlewareDelegate next) { - foreach (var threshold in this.thresholds) + foreach (var threshold in _thresholds) { - if (await threshold.TryExecuteActionAsync(this.metricValue).ConfigureAwait(false)) + if (await threshold.TryExecuteActionAsync(_metricValue).ConfigureAwait(false)) { break; } @@ -45,19 +45,19 @@ public async Task Invoke(IMessageContext context, MiddlewareDelegate next) public void Dispose() { - this.timer?.Dispose(); + _timer?.Dispose(); } private async Task UpdateMetricValueAsync() { long total = 0; - foreach (var metric in this.configuration.Metrics) + foreach (var metric in _configuration.Metrics) { total += await metric.GetValueAsync().ConfigureAwait(false); } - this.metricValue = total; + _metricValue = total; } } } diff --git a/src/KafkaFlow/Middlewares/ConsumerThrottling/ConsumerThrottlingThreshold.cs b/src/KafkaFlow/Middlewares/ConsumerThrottling/ConsumerThrottlingThreshold.cs index 89ef0ede9..b2a281d73 100644 --- a/src/KafkaFlow/Middlewares/ConsumerThrottling/ConsumerThrottlingThreshold.cs +++ b/src/KafkaFlow/Middlewares/ConsumerThrottling/ConsumerThrottlingThreshold.cs @@ -1,14 +1,14 @@ +using System.Threading.Tasks; + namespace KafkaFlow.Middlewares.ConsumerThrottling { - using System.Threading.Tasks; - internal class ConsumerThrottlingThreshold : IConsumerThrottlingThreshold { - private readonly IConsumerThrottlingAction action; + private readonly IConsumerThrottlingAction _action; public ConsumerThrottlingThreshold(long thresholdValue, IConsumerThrottlingAction action) { - this.action = action; + _action = action; this.ThresholdValue = thresholdValue; } @@ -21,7 +21,7 @@ public async Task TryExecuteActionAsync(long metricValue) return false; } - await this.action.ExecuteAsync(); + await _action.ExecuteAsync(); return true; } diff --git a/src/KafkaFlow/Middlewares/ConsumerThrottling/IConsumerThrottlingAction.cs b/src/KafkaFlow/Middlewares/ConsumerThrottling/IConsumerThrottlingAction.cs index a7f0c8f56..07cab0b53 100644 --- a/src/KafkaFlow/Middlewares/ConsumerThrottling/IConsumerThrottlingAction.cs +++ b/src/KafkaFlow/Middlewares/ConsumerThrottling/IConsumerThrottlingAction.cs @@ -1,7 +1,7 @@ +using System.Threading.Tasks; + namespace KafkaFlow.Middlewares.ConsumerThrottling { - using System.Threading.Tasks; - /// /// Defines a throttling action that can be executed by a KafkaFlow consumer. /// diff --git a/src/KafkaFlow/Middlewares/ConsumerThrottling/IConsumerThrottlingMetric.cs b/src/KafkaFlow/Middlewares/ConsumerThrottling/IConsumerThrottlingMetric.cs index 3fd4ce475..bf34851d6 100644 --- a/src/KafkaFlow/Middlewares/ConsumerThrottling/IConsumerThrottlingMetric.cs +++ b/src/KafkaFlow/Middlewares/ConsumerThrottling/IConsumerThrottlingMetric.cs @@ -1,7 +1,7 @@ +using System.Threading.Tasks; + namespace KafkaFlow.Middlewares.ConsumerThrottling { - using System.Threading.Tasks; - /// /// Defines a metric that is used by the KafkaFlow consumer throttling feature. /// diff --git a/src/KafkaFlow/Middlewares/ConsumerThrottling/IConsumerThrottlingThreshold.cs b/src/KafkaFlow/Middlewares/ConsumerThrottling/IConsumerThrottlingThreshold.cs index b13e8f312..d0780c4ce 100644 --- a/src/KafkaFlow/Middlewares/ConsumerThrottling/IConsumerThrottlingThreshold.cs +++ b/src/KafkaFlow/Middlewares/ConsumerThrottling/IConsumerThrottlingThreshold.cs @@ -1,7 +1,7 @@ +using System.Threading.Tasks; + namespace KafkaFlow.Middlewares.ConsumerThrottling { - using System.Threading.Tasks; - internal interface IConsumerThrottlingThreshold { long ThresholdValue { get; } diff --git a/src/KafkaFlow/Middlewares/Serializer/Configuration/ConsumerMiddlewareConfigurationBuilder.cs b/src/KafkaFlow/Middlewares/Serializer/Configuration/ConsumerMiddlewareConfigurationBuilder.cs index bae8b29c6..58105ca3b 100644 --- a/src/KafkaFlow/Middlewares/Serializer/Configuration/ConsumerMiddlewareConfigurationBuilder.cs +++ b/src/KafkaFlow/Middlewares/Serializer/Configuration/ConsumerMiddlewareConfigurationBuilder.cs @@ -1,10 +1,10 @@ -namespace KafkaFlow -{ - using System; - using KafkaFlow.Configuration; - using KafkaFlow.Middlewares.Serializer; - using KafkaFlow.Middlewares.Serializer.Resolvers; +using System; +using KafkaFlow.Configuration; +using KafkaFlow.Middlewares.Serializer; +using KafkaFlow.Middlewares.Serializer.Resolvers; +namespace KafkaFlow +{ /// /// No needed /// diff --git a/src/KafkaFlow/Middlewares/Serializer/Configuration/ProducerMiddlewareConfigurationBuilder.cs b/src/KafkaFlow/Middlewares/Serializer/Configuration/ProducerMiddlewareConfigurationBuilder.cs index 83b667532..43327258f 100644 --- a/src/KafkaFlow/Middlewares/Serializer/Configuration/ProducerMiddlewareConfigurationBuilder.cs +++ b/src/KafkaFlow/Middlewares/Serializer/Configuration/ProducerMiddlewareConfigurationBuilder.cs @@ -1,10 +1,10 @@ -namespace KafkaFlow -{ - using System; - using KafkaFlow.Configuration; - using KafkaFlow.Middlewares.Serializer; - using KafkaFlow.Middlewares.Serializer.Resolvers; +using System; +using KafkaFlow.Configuration; +using KafkaFlow.Middlewares.Serializer; +using KafkaFlow.Middlewares.Serializer.Resolvers; +namespace KafkaFlow +{ /// /// No needed /// diff --git a/src/KafkaFlow/Middlewares/Serializer/DeserializerConsumerMiddleware.cs b/src/KafkaFlow/Middlewares/Serializer/DeserializerConsumerMiddleware.cs index 191ae420a..fe929b05f 100644 --- a/src/KafkaFlow/Middlewares/Serializer/DeserializerConsumerMiddleware.cs +++ b/src/KafkaFlow/Middlewares/Serializer/DeserializerConsumerMiddleware.cs @@ -1,17 +1,17 @@ -namespace KafkaFlow.Middlewares.Serializer -{ - using System; - using System.IO; - using System.Threading.Tasks; - using KafkaFlow.Middlewares.Serializer.Resolvers; +using System; +using System.IO; +using System.Threading.Tasks; +using KafkaFlow.Middlewares.Serializer.Resolvers; +namespace KafkaFlow.Middlewares.Serializer +{ /// /// Middleware to deserialize messages when consuming /// public class DeserializerConsumerMiddleware : IMessageMiddleware { - private readonly IDeserializer deserializer; - private readonly IMessageTypeResolver typeResolver; + private readonly IDeserializer _deserializer; + private readonly IMessageTypeResolver _typeResolver; /// /// Initializes a new instance of the class. @@ -22,8 +22,8 @@ public DeserializerConsumerMiddleware( IDeserializer deserializer, IMessageTypeResolver typeResolver) { - this.deserializer = deserializer; - this.typeResolver = typeResolver; + _deserializer = deserializer; + _typeResolver = typeResolver; } /// @@ -53,7 +53,7 @@ public async Task Invoke(IMessageContext context, MiddlewareDelegate next) return; } - var messageType = await this.typeResolver.OnConsumeAsync(context); + var messageType = await _typeResolver.OnConsumeAsync(context); if (messageType is null) { @@ -62,7 +62,7 @@ public async Task Invoke(IMessageContext context, MiddlewareDelegate next) using var stream = new MemoryStream(rawData); - var data = await this.deserializer + var data = await _deserializer .DeserializeAsync( stream, messageType, diff --git a/src/KafkaFlow/Middlewares/Serializer/Resolvers/DefaultTypeResolver.cs b/src/KafkaFlow/Middlewares/Serializer/Resolvers/DefaultTypeResolver.cs index b1bf40385..368b6a80c 100644 --- a/src/KafkaFlow/Middlewares/Serializer/Resolvers/DefaultTypeResolver.cs +++ b/src/KafkaFlow/Middlewares/Serializer/Resolvers/DefaultTypeResolver.cs @@ -1,8 +1,8 @@ +using System; +using System.Threading.Tasks; + namespace KafkaFlow.Middlewares.Serializer.Resolvers { - using System; - using System.Threading.Tasks; - internal class DefaultTypeResolver : IMessageTypeResolver { private const string MessageType = "Message-Type"; @@ -12,7 +12,7 @@ public ValueTask OnConsumeAsync(IMessageContext context) var typeName = context.Headers.GetString(MessageType); return typeName is null ? - new ValueTask((Type) null) : + new ValueTask((Type)null) : new ValueTask(Type.GetType(typeName)); } diff --git a/src/KafkaFlow/Middlewares/Serializer/Resolvers/IMessageTypeResolver.cs b/src/KafkaFlow/Middlewares/Serializer/Resolvers/IMessageTypeResolver.cs index 44aba2ebf..23b9d2748 100644 --- a/src/KafkaFlow/Middlewares/Serializer/Resolvers/IMessageTypeResolver.cs +++ b/src/KafkaFlow/Middlewares/Serializer/Resolvers/IMessageTypeResolver.cs @@ -1,8 +1,8 @@ -namespace KafkaFlow.Middlewares.Serializer.Resolvers -{ - using System; - using System.Threading.Tasks; +using System; +using System.Threading.Tasks; +namespace KafkaFlow.Middlewares.Serializer.Resolvers +{ /// /// Used by the serializer middleware to resolve the type when consuming and store it when producing /// diff --git a/src/KafkaFlow/Middlewares/Serializer/Resolvers/SingleMessageTypeResolver.cs b/src/KafkaFlow/Middlewares/Serializer/Resolvers/SingleMessageTypeResolver.cs index bc75f6144..e19ae5bad 100644 --- a/src/KafkaFlow/Middlewares/Serializer/Resolvers/SingleMessageTypeResolver.cs +++ b/src/KafkaFlow/Middlewares/Serializer/Resolvers/SingleMessageTypeResolver.cs @@ -1,14 +1,14 @@ +using System; +using System.Threading.Tasks; + namespace KafkaFlow.Middlewares.Serializer.Resolvers { - using System; - using System.Threading.Tasks; - /// /// The message type resolver to be used when all messages are the same type /// public class SingleMessageTypeResolver : IMessageTypeResolver { - private readonly Type messageType; + private readonly Type _messageType; /// /// Initializes a new instance of the class. @@ -16,11 +16,11 @@ public class SingleMessageTypeResolver : IMessageTypeResolver /// The message type to be returned when consuming public SingleMessageTypeResolver(Type messageType) { - this.messageType = messageType; + _messageType = messageType; } /// - public ValueTask OnConsumeAsync(IMessageContext context) => new ValueTask(this.messageType); + public ValueTask OnConsumeAsync(IMessageContext context) => new ValueTask(_messageType); /// public ValueTask OnProduceAsync(IMessageContext context) diff --git a/src/KafkaFlow/Middlewares/Serializer/SerializerProducerMiddleware.cs b/src/KafkaFlow/Middlewares/Serializer/SerializerProducerMiddleware.cs index b91f6fcb3..cc544cc8d 100644 --- a/src/KafkaFlow/Middlewares/Serializer/SerializerProducerMiddleware.cs +++ b/src/KafkaFlow/Middlewares/Serializer/SerializerProducerMiddleware.cs @@ -1,19 +1,19 @@ -namespace KafkaFlow.Middlewares.Serializer -{ - using System.Threading.Tasks; - using KafkaFlow.Middlewares.Serializer.Resolvers; - using Microsoft.IO; +using System.Threading.Tasks; +using KafkaFlow.Middlewares.Serializer.Resolvers; +using Microsoft.IO; +namespace KafkaFlow.Middlewares.Serializer +{ /// /// Middleware to serialize messages when producing /// public class SerializerProducerMiddleware : IMessageMiddleware { - private static readonly RecyclableMemoryStreamManager MemoryStreamManager = new(); + private static readonly RecyclableMemoryStreamManager s_memoryStreamManager = new(); - private readonly ISerializer serializer; + private readonly ISerializer _serializer; - private readonly IMessageTypeResolver typeResolver; + private readonly IMessageTypeResolver _typeResolver; /// /// Initializes a new instance of the class. @@ -24,8 +24,8 @@ public SerializerProducerMiddleware( ISerializer serializer, IMessageTypeResolver typeResolver) { - this.serializer = serializer; - this.typeResolver = typeResolver; + _serializer = serializer; + _typeResolver = typeResolver; } /// @@ -36,13 +36,13 @@ public SerializerProducerMiddleware( /// public async Task Invoke(IMessageContext context, MiddlewareDelegate next) { - await this.typeResolver.OnProduceAsync(context); + await _typeResolver.OnProduceAsync(context); byte[] messageValue; - using (var buffer = MemoryStreamManager.GetStream()) + using (var buffer = s_memoryStreamManager.GetStream()) { - await this.serializer + await _serializer .SerializeAsync( context.Message.Value, buffer, diff --git a/src/KafkaFlow/Middlewares/TypedHandler/Configuration/TypedHandlerConfiguration.cs b/src/KafkaFlow/Middlewares/TypedHandler/Configuration/TypedHandlerConfiguration.cs index 22d5407e2..5df72bd20 100644 --- a/src/KafkaFlow/Middlewares/TypedHandler/Configuration/TypedHandlerConfiguration.cs +++ b/src/KafkaFlow/Middlewares/TypedHandler/Configuration/TypedHandlerConfiguration.cs @@ -1,7 +1,7 @@ +using System; + namespace KafkaFlow.Middlewares.TypedHandler.Configuration { - using System; - internal class TypedHandlerConfiguration { public HandlerTypeMapping HandlerMapping { get; } = new(); diff --git a/src/KafkaFlow/Middlewares/TypedHandler/Configuration/TypedHandlerConfigurationBuilder.cs b/src/KafkaFlow/Middlewares/TypedHandler/Configuration/TypedHandlerConfigurationBuilder.cs index acea49420..d4bd655d3 100644 --- a/src/KafkaFlow/Middlewares/TypedHandler/Configuration/TypedHandlerConfigurationBuilder.cs +++ b/src/KafkaFlow/Middlewares/TypedHandler/Configuration/TypedHandlerConfigurationBuilder.cs @@ -1,22 +1,22 @@ -namespace KafkaFlow -{ - using System; - using System.Collections.Generic; - using System.Linq; - using System.Reflection; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; - using KafkaFlow.Middlewares.TypedHandler.Configuration; +using KafkaFlow.Middlewares.TypedHandler.Configuration; +namespace KafkaFlow +{ /// /// Builder class for typed handler configuration /// public class TypedHandlerConfigurationBuilder { - private readonly IDependencyConfigurator dependencyConfigurator; - private readonly List handlers = new(); + private readonly IDependencyConfigurator _dependencyConfigurator; + private readonly List _handlers = new(); - private Action onNoHandlerFound = (_) => { }; - private InstanceLifetime serviceLifetime = InstanceLifetime.Singleton; + private Action _onNoHandlerFound = (_) => { }; + private InstanceLifetime _serviceLifetime = InstanceLifetime.Singleton; /// /// Initializes a new instance of the class. @@ -24,7 +24,7 @@ public class TypedHandlerConfigurationBuilder /// Dependency injection configurator public TypedHandlerConfigurationBuilder(IDependencyConfigurator dependencyConfigurator) { - this.dependencyConfigurator = dependencyConfigurator; + _dependencyConfigurator = dependencyConfigurator; } /// @@ -47,7 +47,7 @@ public TypedHandlerConfigurationBuilder AddHandlersFromAssemblyOf(params Type[] .Where(x => x.IsClass && !x.IsAbstract && typeof(IMessageHandler).IsAssignableFrom(x)) .Distinct(); - this.handlers.AddRange(handlerTypes); + _handlers.AddRange(handlerTypes); return this; } @@ -58,7 +58,7 @@ public TypedHandlerConfigurationBuilder AddHandlersFromAssemblyOf(params Type[] /// public TypedHandlerConfigurationBuilder AddHandlers(IEnumerable handlers) { - this.handlers.AddRange(handlers); + _handlers.AddRange(handlers); return this; } @@ -70,7 +70,7 @@ public TypedHandlerConfigurationBuilder AddHandlers(IEnumerable handlers) public TypedHandlerConfigurationBuilder AddHandler() where T : class, IMessageHandler { - this.handlers.Add(typeof(T)); + _handlers.Add(typeof(T)); return this; } @@ -81,7 +81,7 @@ public TypedHandlerConfigurationBuilder AddHandler() /// public TypedHandlerConfigurationBuilder WhenNoHandlerFound(Action handler) { - this.onNoHandlerFound = handler; + _onNoHandlerFound = handler; return this; } @@ -92,7 +92,7 @@ public TypedHandlerConfigurationBuilder WhenNoHandlerFound(Action public TypedHandlerConfigurationBuilder WithHandlerLifetime(InstanceLifetime lifetime) { - this.serviceLifetime = lifetime; + _serviceLifetime = lifetime; return this; } @@ -100,15 +100,15 @@ internal TypedHandlerConfiguration Build() { var configuration = new TypedHandlerConfiguration { - OnNoHandlerFound = this.onNoHandlerFound, + OnNoHandlerFound = _onNoHandlerFound, }; - foreach (var handlerType in this.handlers) + foreach (var handlerType in _handlers) { - this.dependencyConfigurator.Add( + _dependencyConfigurator.Add( handlerType, handlerType, - this.serviceLifetime); + _serviceLifetime); var messageTypes = handlerType .GetInterfaces() diff --git a/src/KafkaFlow/Middlewares/TypedHandler/HandlerExecutor.cs b/src/KafkaFlow/Middlewares/TypedHandler/HandlerExecutor.cs index 0af39ecd0..2753d4079 100644 --- a/src/KafkaFlow/Middlewares/TypedHandler/HandlerExecutor.cs +++ b/src/KafkaFlow/Middlewares/TypedHandler/HandlerExecutor.cs @@ -1,18 +1,18 @@ +using System; +using System.Collections.Concurrent; +using System.Threading.Tasks; + namespace KafkaFlow.Middlewares.TypedHandler { - using System; - using System.Collections.Concurrent; - using System.Threading.Tasks; - internal abstract class HandlerExecutor { - private static readonly ConcurrentDictionary Executors = new(); + private static readonly ConcurrentDictionary s_executors = new(); public static HandlerExecutor GetExecutor(Type messageType) { - return Executors.SafeGetOrAdd( + return s_executors.SafeGetOrAdd( messageType, - _ => (HandlerExecutor) Activator.CreateInstance(typeof(InnerHandlerExecutor<>).MakeGenericType(messageType))); + _ => (HandlerExecutor)Activator.CreateInstance(typeof(InnerHandlerExecutor<>).MakeGenericType(messageType))); } public abstract Task Execute(object handler, IMessageContext context, object message); @@ -21,9 +21,9 @@ private class InnerHandlerExecutor : HandlerExecutor { public override Task Execute(object handler, IMessageContext context, object message) { - var h = (IMessageHandler) handler; + var h = (IMessageHandler)handler; - return h.Handle(context, (T) message); + return h.Handle(context, (T)message); } } } diff --git a/src/KafkaFlow/Middlewares/TypedHandler/HandlerTypeMapping.cs b/src/KafkaFlow/Middlewares/TypedHandler/HandlerTypeMapping.cs index 986ee1c2a..766eb6b04 100644 --- a/src/KafkaFlow/Middlewares/TypedHandler/HandlerTypeMapping.cs +++ b/src/KafkaFlow/Middlewares/TypedHandler/HandlerTypeMapping.cs @@ -1,20 +1,20 @@ +using System; +using System.Collections.Generic; + namespace KafkaFlow.Middlewares.TypedHandler { - using System; - using System.Collections.Generic; - internal class HandlerTypeMapping { - private static readonly IReadOnlyList EmptyList = new List().AsReadOnly(); + private static readonly IReadOnlyList s_emptyList = new List().AsReadOnly(); - private readonly Dictionary> mapping = new(); + private readonly Dictionary> _mapping = new(); public void AddMapping(Type messageType, Type handlerType) { - if (!this.mapping.TryGetValue(messageType, out var handlers)) + if (!_mapping.TryGetValue(messageType, out var handlers)) { handlers = new List(); - this.mapping.Add(messageType, handlers); + _mapping.Add(messageType, handlers); } handlers.Add(handlerType); @@ -24,12 +24,12 @@ public IReadOnlyList GetHandlersTypes(Type messageType) { if (messageType is null) { - return EmptyList; + return s_emptyList; } - return this.mapping.TryGetValue(messageType, out var handlerType) ? + return _mapping.TryGetValue(messageType, out var handlerType) ? handlerType : - EmptyList; + s_emptyList; } } } diff --git a/src/KafkaFlow/Middlewares/TypedHandler/TypedHandlerMiddleware.cs b/src/KafkaFlow/Middlewares/TypedHandler/TypedHandlerMiddleware.cs index 37d308491..8de550dbd 100644 --- a/src/KafkaFlow/Middlewares/TypedHandler/TypedHandlerMiddleware.cs +++ b/src/KafkaFlow/Middlewares/TypedHandler/TypedHandlerMiddleware.cs @@ -1,31 +1,31 @@ +using System.Linq; +using System.Threading.Tasks; +using KafkaFlow.Middlewares.TypedHandler.Configuration; + namespace KafkaFlow.Middlewares.TypedHandler { - using System.Linq; - using System.Threading.Tasks; - using KafkaFlow.Middlewares.TypedHandler.Configuration; - internal class TypedHandlerMiddleware : IMessageMiddleware { - private readonly IDependencyResolver dependencyResolver; - private readonly TypedHandlerConfiguration configuration; + private readonly IDependencyResolver _dependencyResolver; + private readonly TypedHandlerConfiguration _configuration; public TypedHandlerMiddleware( IDependencyResolver dependencyResolver, TypedHandlerConfiguration configuration) { - this.dependencyResolver = dependencyResolver; - this.configuration = configuration; + _dependencyResolver = dependencyResolver; + _configuration = configuration; } public async Task Invoke(IMessageContext context, MiddlewareDelegate next) { - var handlers = this.configuration + var handlers = _configuration .HandlerMapping .GetHandlersTypes(context.Message.Value?.GetType()); if (!handlers.Any()) { - this.configuration.OnNoHandlerFound(context); + _configuration.OnNoHandlerFound(context); } else { @@ -36,7 +36,7 @@ await Task.WhenAll( HandlerExecutor .GetExecutor(context.Message.Value.GetType()) .Execute( - this.dependencyResolver.Resolve(handler), + _dependencyResolver.Resolve(handler), context, context.Message.Value))) .ConfigureAwait(false); diff --git a/src/KafkaFlow/Producers/BatchProduceException.cs b/src/KafkaFlow/Producers/BatchProduceException.cs index 23294f9d9..733f704fe 100644 --- a/src/KafkaFlow/Producers/BatchProduceException.cs +++ b/src/KafkaFlow/Producers/BatchProduceException.cs @@ -1,8 +1,8 @@ +using System; +using System.Collections.Generic; + namespace KafkaFlow.Producers { - using System; - using System.Collections.Generic; - /// /// Exception thrown by /// diff --git a/src/KafkaFlow/Producers/BatchProduceExtension.cs b/src/KafkaFlow/Producers/BatchProduceExtension.cs index 0cf70a929..69bbb1c38 100644 --- a/src/KafkaFlow/Producers/BatchProduceExtension.cs +++ b/src/KafkaFlow/Producers/BatchProduceExtension.cs @@ -1,9 +1,9 @@ +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + namespace KafkaFlow.Producers { - using System.Collections.Generic; - using System.Threading; - using System.Threading.Tasks; - /// /// No needed /// diff --git a/src/KafkaFlow/Producers/BatchProduceItem.cs b/src/KafkaFlow/Producers/BatchProduceItem.cs index e23cfd28d..08073e8f5 100644 --- a/src/KafkaFlow/Producers/BatchProduceItem.cs +++ b/src/KafkaFlow/Producers/BatchProduceItem.cs @@ -1,7 +1,7 @@ +using Confluent.Kafka; + namespace KafkaFlow.Producers { - using Confluent.Kafka; - /// /// Represents a message to be produced in batch /// diff --git a/src/KafkaFlow/Producers/IMessageProducer.cs b/src/KafkaFlow/Producers/IMessageProducer.cs index 5eaa83eca..d06ae0f76 100644 --- a/src/KafkaFlow/Producers/IMessageProducer.cs +++ b/src/KafkaFlow/Producers/IMessageProducer.cs @@ -1,9 +1,9 @@ +using System; +using System.Threading.Tasks; +using Confluent.Kafka; + namespace KafkaFlow { - using System; - using System.Threading.Tasks; - using Confluent.Kafka; - /// /// Provides access to the kafka message producer /// diff --git a/src/KafkaFlow/Producers/IProducerAccessor.cs b/src/KafkaFlow/Producers/IProducerAccessor.cs index 629b44665..75b478a0a 100644 --- a/src/KafkaFlow/Producers/IProducerAccessor.cs +++ b/src/KafkaFlow/Producers/IProducerAccessor.cs @@ -1,7 +1,7 @@ +using System.Collections.Generic; + namespace KafkaFlow.Producers { - using System.Collections.Generic; - /// /// Provides access to the configured producers /// diff --git a/src/KafkaFlow/Producers/MessageProducer.cs b/src/KafkaFlow/Producers/MessageProducer.cs index ab12773d3..76b6e7a5c 100644 --- a/src/KafkaFlow/Producers/MessageProducer.cs +++ b/src/KafkaFlow/Producers/MessageProducer.cs @@ -1,35 +1,35 @@ +using System; +using System.Text; +using System.Threading.Tasks; +using Confluent.Kafka; +using KafkaFlow.Configuration; + namespace KafkaFlow.Producers { - using System; - using System.Text; - using System.Threading.Tasks; - using Confluent.Kafka; - using KafkaFlow.Configuration; - internal class MessageProducer : IMessageProducer, IDisposable { - private readonly IDependencyResolverScope producerDependencyScope; - private readonly ILogHandler logHandler; - private readonly IProducerConfiguration configuration; - private readonly MiddlewareExecutor middlewareExecutor; - private readonly GlobalEvents globalEvents; + private readonly IDependencyResolverScope _producerDependencyScope; + private readonly ILogHandler _logHandler; + private readonly IProducerConfiguration _configuration; + private readonly MiddlewareExecutor _middlewareExecutor; + private readonly GlobalEvents _globalEvents; - private readonly object producerCreationSync = new(); + private readonly object _producerCreationSync = new(); - private volatile IProducer producer; + private volatile IProducer _producer; public MessageProducer( IDependencyResolver dependencyResolver, IProducerConfiguration configuration) { - this.producerDependencyScope = dependencyResolver.CreateScope(); - this.logHandler = dependencyResolver.Resolve(); - this.configuration = configuration; - this.middlewareExecutor = new MiddlewareExecutor(configuration.MiddlewaresConfigurations); - this.globalEvents = dependencyResolver.Resolve(); + _producerDependencyScope = dependencyResolver.CreateScope(); + _logHandler = dependencyResolver.Resolve(); + _configuration = configuration; + _middlewareExecutor = new MiddlewareExecutor(configuration.MiddlewaresConfigurations); + _globalEvents = dependencyResolver.Resolve(); } - public string ProducerName => this.configuration.Name; + public string ProducerName => _configuration.Name; public async Task> ProduceAsync( string topic, @@ -40,7 +40,7 @@ public async Task> ProduceAsync( { DeliveryResult report = null; - using var messageScope = this.producerDependencyScope.Resolver.CreateScope(); + using var messageScope = _producerDependencyScope.Resolver.CreateScope(); var messageContext = this.CreateMessageContext( topic, @@ -49,11 +49,11 @@ public async Task> ProduceAsync( headers, messageScope.Resolver); - await this.globalEvents.FireMessageProduceStartedAsync(new MessageEventContext(messageContext)); + await _globalEvents.FireMessageProduceStartedAsync(new MessageEventContext(messageContext)); try { - await this.middlewareExecutor + await _middlewareExecutor .Execute( messageContext, async context => @@ -64,11 +64,11 @@ await this.middlewareExecutor }) .ConfigureAwait(false); - await this.globalEvents.FireMessageProduceCompletedAsync(new MessageEventContext(messageContext)); + await _globalEvents.FireMessageProduceCompletedAsync(new MessageEventContext(messageContext)); } catch (Exception e) { - await this.globalEvents.FireMessageProduceErrorAsync(new MessageErrorEventContext(messageContext, e)); + await _globalEvents.FireMessageProduceErrorAsync(new MessageErrorEventContext(messageContext, e)); throw; } @@ -81,14 +81,14 @@ public Task> ProduceAsync( IMessageHeaders headers = null, int? partition = null) { - if (string.IsNullOrWhiteSpace(this.configuration.DefaultTopic)) + if (string.IsNullOrWhiteSpace(_configuration.DefaultTopic)) { throw new InvalidOperationException( $"There is no default topic defined for producer {this.ProducerName}"); } return this.ProduceAsync( - this.configuration.DefaultTopic, + _configuration.DefaultTopic, messageKey, messageValue, headers, @@ -103,7 +103,7 @@ public void Produce( Action> deliveryHandler = null, int? partition = null) { - var messageScope = this.producerDependencyScope.Resolver.CreateScope(); + var messageScope = _producerDependencyScope.Resolver.CreateScope(); var messageContext = this.CreateMessageContext( topic, @@ -112,9 +112,9 @@ public void Produce( headers, messageScope.Resolver); - this.globalEvents.FireMessageProduceStartedAsync(new MessageEventContext(messageContext)); + _globalEvents.FireMessageProduceStartedAsync(new MessageEventContext(messageContext)); - this.middlewareExecutor + _middlewareExecutor .Execute( messageContext, context => @@ -157,7 +157,7 @@ public void Produce( messageScope.Dispose(); }); - this.globalEvents.FireMessageProduceCompletedAsync(new MessageEventContext(messageContext)); + _globalEvents.FireMessageProduceCompletedAsync(new MessageEventContext(messageContext)); } public void Produce( @@ -167,14 +167,14 @@ public void Produce( Action> deliveryHandler = null, int? partition = null) { - if (string.IsNullOrWhiteSpace(this.configuration.DefaultTopic)) + if (string.IsNullOrWhiteSpace(_configuration.DefaultTopic)) { throw new InvalidOperationException( $"There is no default topic defined for producer {this.ProducerName}"); } this.Produce( - this.configuration.DefaultTopic, + _configuration.DefaultTopic, messageKey, messageValue, headers, @@ -184,7 +184,7 @@ public void Produce( public void Dispose() { - this.producer?.Dispose(); + _producer?.Dispose(); } private static void FillContextWithResultMetadata(IMessageContext context, DeliveryResult result) @@ -227,19 +227,19 @@ private static Message CreateMessage(IMessageContext context) private IProducer EnsureProducer() { - if (this.producer != null) + if (_producer != null) { - return this.producer; + return _producer; } - lock (this.producerCreationSync) + lock (_producerCreationSync) { - if (this.producer != null) + if (_producer != null) { - return this.producer; + return _producer; } - var producerBuilder = new ProducerBuilder(this.configuration.GetKafkaConfig()) + var producerBuilder = new ProducerBuilder(_configuration.GetKafkaConfig()) .SetErrorHandler( (_, error) => { @@ -249,32 +249,32 @@ private IProducer EnsureProducer() } else { - this.logHandler.Warning("Kafka Producer Error", new { Error = error }); + _logHandler.Warning("Kafka Producer Error", new { Error = error }); } }) .SetStatisticsHandler( (_, statistics) => { - foreach (var handler in this.configuration.StatisticsHandlers) + foreach (var handler in _configuration.StatisticsHandlers) { handler.Invoke(statistics); } }); - return this.producer = this.configuration.CustomFactory( + return _producer = _configuration.CustomFactory( producerBuilder.Build(), - this.producerDependencyScope.Resolver); + _producerDependencyScope.Resolver); } } private void InvalidateProducer(Error error, DeliveryResult result) { - lock (this.producerCreationSync) + lock (_producerCreationSync) { - this.producer = null; + _producer = null; } - this.logHandler.Error( + _logHandler.Error( "Kafka produce fatal error occurred. The producer will be recreated", result is null ? new KafkaException(error) : new ProduceException(error, result), new { Error = error }); @@ -297,7 +297,7 @@ private async Task> InternalProduceAsync(IMessage } catch (ProduceException e) { - await this.globalEvents.FireMessageProduceErrorAsync(new MessageErrorEventContext(context, e)); + await _globalEvents.FireMessageProduceErrorAsync(new MessageErrorEventContext(context, e)); if (e.Error.IsFatal) { @@ -360,8 +360,8 @@ private MessageContext CreateMessageContext( headers, messageScopedResolver, null, - new ProducerContext(topic, this.producerDependencyScope.Resolver), - this.configuration.Cluster.Brokers); + new ProducerContext(topic, _producerDependencyScope.Resolver), + _configuration.Cluster.Brokers); } } } diff --git a/src/KafkaFlow/Producers/MessageProducerWrapper.cs b/src/KafkaFlow/Producers/MessageProducerWrapper.cs index 78268f1d9..8115a3411 100644 --- a/src/KafkaFlow/Producers/MessageProducerWrapper.cs +++ b/src/KafkaFlow/Producers/MessageProducerWrapper.cs @@ -1,19 +1,19 @@ +using System; +using System.Threading.Tasks; +using Confluent.Kafka; + namespace KafkaFlow.Producers { - using System; - using System.Threading.Tasks; - using Confluent.Kafka; - internal class MessageProducerWrapper : IMessageProducer { - private readonly IMessageProducer producer; + private readonly IMessageProducer _producer; public MessageProducerWrapper(IMessageProducer producer) { - this.producer = producer; + _producer = producer; } - public string ProducerName => this.producer.ProducerName; + public string ProducerName => _producer.ProducerName; public Task> ProduceAsync( string topic, @@ -22,7 +22,7 @@ public Task> ProduceAsync( IMessageHeaders headers = null, int? partition = null) { - return this.producer.ProduceAsync( + return _producer.ProduceAsync( topic, messageKey, message, @@ -36,7 +36,7 @@ public Task> ProduceAsync( IMessageHeaders headers = null, int? partition = null) { - return this.producer.ProduceAsync( + return _producer.ProduceAsync( messageKey, message, headers, @@ -51,7 +51,7 @@ public void Produce( Action> deliveryHandler = null, int? partition = null) { - this.producer.Produce( + _producer.Produce( topic, messageKey, message, @@ -67,7 +67,7 @@ public void Produce( Action> deliveryHandler = null, int? partition = null) { - this.producer.Produce( + _producer.Produce( messageKey, message, headers, diff --git a/src/KafkaFlow/Producers/ProducerAccessor.cs b/src/KafkaFlow/Producers/ProducerAccessor.cs index 0bdecc0f2..cd844c620 100644 --- a/src/KafkaFlow/Producers/ProducerAccessor.cs +++ b/src/KafkaFlow/Producers/ProducerAccessor.cs @@ -1,25 +1,25 @@ +using System.Collections.Generic; +using System.Linq; + namespace KafkaFlow.Producers { - using System.Collections.Generic; - using System.Linq; - internal class ProducerAccessor : IProducerAccessor { - private readonly Dictionary producers; + private readonly Dictionary _producers; public ProducerAccessor(IEnumerable producers) { - this.producers = producers.ToDictionary(x => x.ProducerName); + _producers = producers.ToDictionary(x => x.ProducerName); } - public IEnumerable All => this.producers.Values; + public IEnumerable All => _producers.Values; public IMessageProducer this[string name] => this.GetProducer(name); public IMessageProducer GetProducer(string name) => - this.producers.TryGetValue(name, out var consumer) ? consumer : null; + _producers.TryGetValue(name, out var consumer) ? consumer : null; public IMessageProducer GetProducer() => - this.producers.TryGetValue(typeof(TProducer).FullName!, out var consumer) ? consumer : null; + _producers.TryGetValue(typeof(TProducer).FullName!, out var consumer) ? consumer : null; } } diff --git a/src/KafkaFlow/TopicMetadata.cs b/src/KafkaFlow/TopicMetadata.cs index cf3397cdf..1097b3fbc 100644 --- a/src/KafkaFlow/TopicMetadata.cs +++ b/src/KafkaFlow/TopicMetadata.cs @@ -1,7 +1,7 @@ +using System.Collections.Generic; + namespace KafkaFlow { - using System.Collections.Generic; - public record TopicMetadata { public TopicMetadata(string name, IReadOnlyCollection partitions) diff --git a/src/KafkaFlow/TopicPartitionMetadata.cs b/src/KafkaFlow/TopicPartitionMetadata.cs index 37ecd4a8e..c32462886 100644 --- a/src/KafkaFlow/TopicPartitionMetadata.cs +++ b/src/KafkaFlow/TopicPartitionMetadata.cs @@ -2,9 +2,9 @@ namespace KafkaFlow { public class TopicPartitionMetadata { - public TopicPartitionMetadata(int Id) + public TopicPartitionMetadata(int id) { - this.Id = Id; + this.Id = id; } public int Id { get; } diff --git a/src/StyleCopAnalyzersDefault.ruleset b/src/StyleCopAnalyzersDefault.ruleset index 841a79a7a..233fb45e0 100644 --- a/src/StyleCopAnalyzersDefault.ruleset +++ b/src/StyleCopAnalyzersDefault.ruleset @@ -5,16 +5,16 @@ - + - + - + @@ -38,7 +38,7 @@ - + @@ -103,14 +103,14 @@ - + - - - + + + - + diff --git a/src/stylecop.json b/src/stylecop.json index 1fb82d1e8..0ac797437 100644 --- a/src/stylecop.json +++ b/src/stylecop.json @@ -11,6 +11,10 @@ }, "layoutRules": { "newlineAtEndOfFile": "require" + }, + "orderingRules": { + "systemUsingDirectivesFirst": true, + "usingDirectivesPlacement": "outsideNamespace" } } } diff --git a/src/KafkaFlow.IntegrationTests/CompressionSerializationTest.cs b/tests/KafkaFlow.IntegrationTests/CompressionSerializationTest.cs similarity index 52% rename from src/KafkaFlow.IntegrationTests/CompressionSerializationTest.cs rename to tests/KafkaFlow.IntegrationTests/CompressionSerializationTest.cs index 02be99616..a3b3a8d31 100644 --- a/src/KafkaFlow.IntegrationTests/CompressionSerializationTest.cs +++ b/tests/KafkaFlow.IntegrationTests/CompressionSerializationTest.cs @@ -1,27 +1,27 @@ +using System; +using System.Linq; +using System.Threading.Tasks; +using AutoFixture; +using global::Microsoft.Extensions.DependencyInjection; +using global::Microsoft.VisualStudio.TestTools.UnitTesting; +using KafkaFlow.IntegrationTests.Core; +using KafkaFlow.IntegrationTests.Core.Handlers; +using KafkaFlow.IntegrationTests.Core.Messages; +using KafkaFlow.IntegrationTests.Core.Producers; + namespace KafkaFlow.IntegrationTests { - using System; - using System.Linq; - using System.Threading.Tasks; - using AutoFixture; - using global::Microsoft.Extensions.DependencyInjection; - using global::Microsoft.VisualStudio.TestTools.UnitTesting; - using KafkaFlow.IntegrationTests.Core; - using KafkaFlow.IntegrationTests.Core.Handlers; - using KafkaFlow.IntegrationTests.Core.Messages; - using KafkaFlow.IntegrationTests.Core.Producers; - [TestClass] public class CompressionSerializationTest { - private readonly Fixture fixture = new(); + private readonly Fixture _fixture = new(); - private IServiceProvider provider; + private IServiceProvider _provider; [TestInitialize] public void Setup() { - this.provider = Bootstrapper.GetServiceProvider(); + _provider = Bootstrapper.GetServiceProvider(); MessageStorage.Clear(); } @@ -29,8 +29,8 @@ public void Setup() public async Task JsonGzipMessageTest() { // Arrange - var producer = this.provider.GetRequiredService>(); - var messages = this.fixture.CreateMany(10).ToList(); + var producer = _provider.GetRequiredService>(); + var messages = _fixture.CreateMany(10).ToList(); // Act await Task.WhenAll(messages.Select(m => producer.ProduceAsync(m.Id.ToString(), m))); @@ -46,8 +46,8 @@ public async Task JsonGzipMessageTest() public async Task ProtoBufGzipMessageTest() { // Arrange - var producer = this.provider.GetRequiredService>(); - var messages = this.fixture.CreateMany(10).ToList(); + var producer = _provider.GetRequiredService>(); + var messages = _fixture.CreateMany(10).ToList(); // Act await Task.WhenAll(messages.Select(m => producer.ProduceAsync(m.Id.ToString(), m))); diff --git a/tests/KafkaFlow.IntegrationTests/CompressionTest.cs b/tests/KafkaFlow.IntegrationTests/CompressionTest.cs new file mode 100644 index 000000000..d445d627d --- /dev/null +++ b/tests/KafkaFlow.IntegrationTests/CompressionTest.cs @@ -0,0 +1,44 @@ +using System; +using System.Linq; +using System.Threading.Tasks; +using AutoFixture; +using global::Microsoft.Extensions.DependencyInjection; +using global::Microsoft.VisualStudio.TestTools.UnitTesting; +using KafkaFlow.IntegrationTests.Core; +using KafkaFlow.IntegrationTests.Core.Handlers; +using KafkaFlow.IntegrationTests.Core.Producers; + +namespace KafkaFlow.IntegrationTests +{ + [TestClass] + public class CompressionTest + { + private readonly Fixture _fixture = new(); + + private IServiceProvider _provider; + + [TestInitialize] + public void Setup() + { + _provider = Bootstrapper.GetServiceProvider(); + MessageStorage.Clear(); + } + + [TestMethod] + public async Task GzipTest() + { + // Arrange + var producer = _provider.GetRequiredService>(); + var messages = _fixture.CreateMany(10).ToList(); + + // Act + await Task.WhenAll(messages.Select(m => producer.ProduceAsync(Guid.NewGuid().ToString(), m))); + + // Assert + foreach (var message in messages) + { + await MessageStorage.AssertMessageAsync(message); + } + } + } +} diff --git a/src/KafkaFlow.IntegrationTests/ConsumerTest.cs b/tests/KafkaFlow.IntegrationTests/ConsumerTest.cs similarity index 70% rename from src/KafkaFlow.IntegrationTests/ConsumerTest.cs rename to tests/KafkaFlow.IntegrationTests/ConsumerTest.cs index d80752cf3..2268cf8ff 100644 --- a/src/KafkaFlow.IntegrationTests/ConsumerTest.cs +++ b/tests/KafkaFlow.IntegrationTests/ConsumerTest.cs @@ -1,28 +1,28 @@ +using System; +using System.Linq; +using System.Threading.Tasks; +using AutoFixture; +using global::Microsoft.Extensions.DependencyInjection; +using global::Microsoft.VisualStudio.TestTools.UnitTesting; +using KafkaFlow.Consumers; +using KafkaFlow.IntegrationTests.Core; +using KafkaFlow.IntegrationTests.Core.Handlers; +using KafkaFlow.IntegrationTests.Core.Messages; +using KafkaFlow.IntegrationTests.Core.Producers; + namespace KafkaFlow.IntegrationTests { - using System; - using System.Linq; - using System.Threading.Tasks; - using AutoFixture; - using global::Microsoft.Extensions.DependencyInjection; - using global::Microsoft.VisualStudio.TestTools.UnitTesting; - using KafkaFlow.Consumers; - using KafkaFlow.IntegrationTests.Core; - using KafkaFlow.IntegrationTests.Core.Handlers; - using KafkaFlow.IntegrationTests.Core.Messages; - using KafkaFlow.IntegrationTests.Core.Producers; - [TestClass] public class ConsumerTest { - private readonly Fixture fixture = new(); + private readonly Fixture _fixture = new(); - private IServiceProvider provider; + private IServiceProvider _provider; [TestInitialize] public void Setup() { - this.provider = Bootstrapper.GetServiceProvider(); + _provider = Bootstrapper.GetServiceProvider(); MessageStorage.Clear(); } @@ -30,9 +30,9 @@ public void Setup() public async Task MultipleMessagesMultipleHandlersSingleTopicTest() { // Arrange - var producer = this.provider.GetRequiredService>(); - var messages1 = this.fixture.CreateMany(5).ToList(); - var messages2 = this.fixture.CreateMany(5).ToList(); + var producer = _provider.GetRequiredService>(); + var messages1 = _fixture.CreateMany(5).ToList(); + var messages2 = _fixture.CreateMany(5).ToList(); // Act await Task.WhenAll(messages1.Select(m => producer.ProduceAsync(m.Id.ToString(), m))); @@ -54,9 +54,9 @@ public async Task MultipleMessagesMultipleHandlersSingleTopicTest() public async Task MultipleTopicsSingleConsumerTest() { // Arrange - var producer1 = this.provider.GetRequiredService>(); - var producer2 = this.provider.GetRequiredService>(); - var messages = this.fixture.CreateMany(1).ToList(); + var producer1 = _provider.GetRequiredService>(); + var producer2 = _provider.GetRequiredService>(); + var messages = _fixture.CreateMany(1).ToList(); // Act messages.ForEach(m => producer1.Produce(m.Id.ToString(), m)); @@ -73,8 +73,8 @@ public async Task MultipleTopicsSingleConsumerTest() public async Task MultipleHandlersSingleTypeConsumerTest() { // Arrange - var producer = this.provider.GetRequiredService>(); - var messages = this.fixture.CreateMany(5).ToList(); + var producer = _provider.GetRequiredService>(); + var messages = _fixture.CreateMany(5).ToList(); // Act messages.ForEach(m => producer.Produce(m.Id.ToString(), m)); @@ -92,8 +92,8 @@ public async Task MessageOrderingTest() // Arrange var version = 1; var partitionKey = Guid.NewGuid(); - var producer = this.provider.GetRequiredService>(); - var messages = this.fixture + var producer = _provider.GetRequiredService>(); + var messages = _fixture .Build() .Without(t => t.Version) .Do(t => t.Version = version++) @@ -122,8 +122,8 @@ public async Task MessageOrderingTest() public async Task PauseResumeHeartbeatTest() { // Arrange - var producer = this.provider.GetRequiredService>(); - var messages = this.fixture.CreateMany(5).ToList(); + var producer = _provider.GetRequiredService>(); + var messages = _fixture.CreateMany(5).ToList(); // Act await Task.WhenAll( @@ -146,7 +146,7 @@ await Task.WhenAll( public void AddConsumer_WithSharedConsumerConfig_ConsumersAreConfiguratedIndependently() { // Act - var consumers = this.provider.GetRequiredService().All; + var consumers = _provider.GetRequiredService().All; // Assert Assert.IsNotNull(consumers.FirstOrDefault(x => x.GroupId.Equals(Bootstrapper.AvroGroupId))); diff --git a/src/KafkaFlow.IntegrationTests/Core/Bootstrapper.cs b/tests/KafkaFlow.IntegrationTests/Core/Bootstrapper.cs similarity index 95% rename from src/KafkaFlow.IntegrationTests/Core/Bootstrapper.cs rename to tests/KafkaFlow.IntegrationTests/Core/Bootstrapper.cs index 23edee91c..c8de5991f 100644 --- a/src/KafkaFlow.IntegrationTests/Core/Bootstrapper.cs +++ b/tests/KafkaFlow.IntegrationTests/Core/Bootstrapper.cs @@ -1,23 +1,22 @@ +using System; +using System.IO; +using System.Threading; +using Confluent.Kafka; +using Confluent.SchemaRegistry; +using Confluent.SchemaRegistry.Serdes; +using global::Microsoft.Extensions.Configuration; +using global::Microsoft.Extensions.DependencyInjection; +using global::Microsoft.Extensions.Hosting; +using KafkaFlow.Compressor.Gzip; +using KafkaFlow.IntegrationTests.Core.Handlers; +using KafkaFlow.IntegrationTests.Core.Messages; +using KafkaFlow.IntegrationTests.Core.Middlewares; +using KafkaFlow.IntegrationTests.Core.Producers; +using KafkaFlow.Serializer; +using KafkaFlow.Serializer.SchemaRegistry; + namespace KafkaFlow.IntegrationTests.Core { - using System; - using System.IO; - using System.Threading; - using Confluent.Kafka; - using Confluent.SchemaRegistry; - using Confluent.SchemaRegistry.Serdes; - using global::Microsoft.Extensions.Configuration; - using global::Microsoft.Extensions.DependencyInjection; - using global::Microsoft.Extensions.Hosting; - using KafkaFlow.Compressor.Gzip; - using KafkaFlow.IntegrationTests.Core.Handlers; - using KafkaFlow.IntegrationTests.Core.Messages; - using KafkaFlow.IntegrationTests.Core.Middlewares; - using KafkaFlow.IntegrationTests.Core.Producers; - using KafkaFlow.Serializer; - using KafkaFlow.Serializer.SchemaRegistry; - using AutoOffsetReset = KafkaFlow.AutoOffsetReset; - internal static class Bootstrapper { public const string PauseResumeTopicName = "test-pause-resume"; @@ -41,9 +40,9 @@ internal static class Bootstrapper private const string ProtobufGzipTopicName2 = "test-protobuf-gzip-2"; private const string AvroTopicName = "test-avro"; - private static readonly Lazy LazyProvider = new(SetupProvider); + private static readonly Lazy s_lazyProvider = new(SetupProvider); - public static IServiceProvider GetServiceProvider() => LazyProvider.Value; + public static IServiceProvider GetServiceProvider() => s_lazyProvider.Value; private static IServiceProvider SetupProvider() { diff --git a/src/KafkaFlow.IntegrationTests/Core/Exceptions/ErrorExecutingMiddlewareException.cs b/tests/KafkaFlow.IntegrationTests/Core/Exceptions/ErrorExecutingMiddlewareException.cs similarity index 76% rename from src/KafkaFlow.IntegrationTests/Core/Exceptions/ErrorExecutingMiddlewareException.cs rename to tests/KafkaFlow.IntegrationTests/Core/Exceptions/ErrorExecutingMiddlewareException.cs index a582da1e5..90a717237 100644 --- a/src/KafkaFlow.IntegrationTests/Core/Exceptions/ErrorExecutingMiddlewareException.cs +++ b/tests/KafkaFlow.IntegrationTests/Core/Exceptions/ErrorExecutingMiddlewareException.cs @@ -1,7 +1,7 @@ -namespace KafkaFlow.IntegrationTests.Core.Exceptions -{ - using System; +using System; +namespace KafkaFlow.IntegrationTests.Core.Exceptions +{ public class ErrorExecutingMiddlewareException : Exception { public ErrorExecutingMiddlewareException(string middlewareName) diff --git a/src/KafkaFlow.IntegrationTests/Core/Exceptions/PartitionAssignmentException.cs b/tests/KafkaFlow.IntegrationTests/Core/Exceptions/PartitionAssignmentException.cs similarity index 78% rename from src/KafkaFlow.IntegrationTests/Core/Exceptions/PartitionAssignmentException.cs rename to tests/KafkaFlow.IntegrationTests/Core/Exceptions/PartitionAssignmentException.cs index 6f11c60da..61a0ce102 100644 --- a/src/KafkaFlow.IntegrationTests/Core/Exceptions/PartitionAssignmentException.cs +++ b/tests/KafkaFlow.IntegrationTests/Core/Exceptions/PartitionAssignmentException.cs @@ -1,7 +1,7 @@ -namespace KafkaFlow.IntegrationTests.Core.Exceptions -{ - using System; +using System; +namespace KafkaFlow.IntegrationTests.Core.Exceptions +{ public class PartitionAssignmentException : Exception { private const string ExceptionMessage = "Partition assignment hasn't occurred yet."; diff --git a/src/KafkaFlow.IntegrationTests/Core/Handlers/AvroMessageHandler.cs b/tests/KafkaFlow.IntegrationTests/Core/Handlers/AvroMessageHandler.cs similarity index 75% rename from src/KafkaFlow.IntegrationTests/Core/Handlers/AvroMessageHandler.cs rename to tests/KafkaFlow.IntegrationTests/Core/Handlers/AvroMessageHandler.cs index feb76b1f8..56bfe79c8 100644 --- a/src/KafkaFlow.IntegrationTests/Core/Handlers/AvroMessageHandler.cs +++ b/tests/KafkaFlow.IntegrationTests/Core/Handlers/AvroMessageHandler.cs @@ -1,9 +1,8 @@ +using System.Threading.Tasks; +using MessageTypes; + namespace KafkaFlow.IntegrationTests.Core.Handlers { - using System.Threading.Tasks; - using KafkaFlow.Middlewares.TypedHandler; - using MessageTypes; - internal class AvroMessageHandler : IMessageHandler { public Task Handle(IMessageContext context, LogMessages2 message) diff --git a/src/KafkaFlow.IntegrationTests/Core/Handlers/ConfluentJsonMessageHandler.cs b/tests/KafkaFlow.IntegrationTests/Core/Handlers/ConfluentJsonMessageHandler.cs similarity index 70% rename from src/KafkaFlow.IntegrationTests/Core/Handlers/ConfluentJsonMessageHandler.cs rename to tests/KafkaFlow.IntegrationTests/Core/Handlers/ConfluentJsonMessageHandler.cs index 22e12e1cc..11c8dbaba 100644 --- a/src/KafkaFlow.IntegrationTests/Core/Handlers/ConfluentJsonMessageHandler.cs +++ b/tests/KafkaFlow.IntegrationTests/Core/Handlers/ConfluentJsonMessageHandler.cs @@ -1,9 +1,8 @@ +using System.Threading.Tasks; +using KafkaFlow.IntegrationTests.Core.Messages; + namespace KafkaFlow.IntegrationTests.Core.Handlers { - using System.Threading.Tasks; - using KafkaFlow.IntegrationTests.Core.Messages; - using KafkaFlow.Middlewares.TypedHandler; - internal class ConfluentJsonMessageHandler : IMessageHandler { public Task Handle(IMessageContext context, TestMessage3 message) diff --git a/src/KafkaFlow.IntegrationTests/Core/Handlers/ConfluentProtobufMessageHandler.cs b/tests/KafkaFlow.IntegrationTests/Core/Handlers/ConfluentProtobufMessageHandler.cs similarity index 71% rename from src/KafkaFlow.IntegrationTests/Core/Handlers/ConfluentProtobufMessageHandler.cs rename to tests/KafkaFlow.IntegrationTests/Core/Handlers/ConfluentProtobufMessageHandler.cs index 697efca9f..c95e81db8 100644 --- a/src/KafkaFlow.IntegrationTests/Core/Handlers/ConfluentProtobufMessageHandler.cs +++ b/tests/KafkaFlow.IntegrationTests/Core/Handlers/ConfluentProtobufMessageHandler.cs @@ -1,9 +1,8 @@ +using System.Threading.Tasks; +using KafkaFlow.IntegrationTests.Core.Messages; + namespace KafkaFlow.IntegrationTests.Core.Handlers { - using System.Threading.Tasks; - using KafkaFlow.IntegrationTests.Core.Messages; - using KafkaFlow.Middlewares.TypedHandler; - internal class ConfluentProtobufMessageHandler : IMessageHandler { public Task Handle(IMessageContext context, TestProtoMessage message) diff --git a/src/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler.cs b/tests/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler.cs similarity index 70% rename from src/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler.cs rename to tests/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler.cs index f2d1bb484..e53cd4642 100644 --- a/src/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler.cs +++ b/tests/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler.cs @@ -1,9 +1,8 @@ +using System.Threading.Tasks; +using KafkaFlow.IntegrationTests.Core.Messages; + namespace KafkaFlow.IntegrationTests.Core.Handlers { - using System.Threading.Tasks; - using KafkaFlow.IntegrationTests.Core.Messages; - using KafkaFlow.Middlewares.TypedHandler; - internal class MessageHandler : IMessageHandler { public Task Handle(IMessageContext context, TestMessage1 message) diff --git a/src/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler1.cs b/tests/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler1.cs similarity index 70% rename from src/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler1.cs rename to tests/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler1.cs index 1b01e1ea0..fa550c173 100644 --- a/src/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler1.cs +++ b/tests/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler1.cs @@ -1,9 +1,8 @@ +using System.Threading.Tasks; +using KafkaFlow.IntegrationTests.Core.Messages; + namespace KafkaFlow.IntegrationTests.Core.Handlers { - using System.Threading.Tasks; - using KafkaFlow.IntegrationTests.Core.Messages; - using KafkaFlow.Middlewares.TypedHandler; - internal class MessageHandler1 : IMessageHandler { public Task Handle(IMessageContext context, TestMessage1 message) diff --git a/src/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler2.cs b/tests/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler2.cs similarity index 70% rename from src/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler2.cs rename to tests/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler2.cs index f47f02638..a257e65da 100644 --- a/src/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler2.cs +++ b/tests/KafkaFlow.IntegrationTests/Core/Handlers/MessageHandler2.cs @@ -1,10 +1,9 @@ +using System; +using System.Threading.Tasks; +using KafkaFlow.IntegrationTests.Core.Messages; + namespace KafkaFlow.IntegrationTests.Core.Handlers { - using System; - using System.Threading.Tasks; - using KafkaFlow.IntegrationTests.Core.Messages; - using KafkaFlow.Middlewares.TypedHandler; - internal class MessageHandler2 : IMessageHandler { public async Task Handle(IMessageContext context, TestMessage2 message) diff --git a/src/KafkaFlow.IntegrationTests/Core/Handlers/MessageStorage.cs b/tests/KafkaFlow.IntegrationTests/Core/Handlers/MessageStorage.cs similarity index 64% rename from src/KafkaFlow.IntegrationTests/Core/Handlers/MessageStorage.cs rename to tests/KafkaFlow.IntegrationTests/Core/Handlers/MessageStorage.cs index 9b117f8ed..5ab66f0ed 100644 --- a/src/KafkaFlow.IntegrationTests/Core/Handlers/MessageStorage.cs +++ b/tests/KafkaFlow.IntegrationTests/Core/Handlers/MessageStorage.cs @@ -1,49 +1,49 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using global::Microsoft.VisualStudio.TestTools.UnitTesting; +using KafkaFlow.IntegrationTests.Core.Messages; +using MessageTypes; + namespace KafkaFlow.IntegrationTests.Core.Handlers { - using System; - using System.Collections.Concurrent; - using System.Collections.Generic; - using System.Linq; - using System.Threading.Tasks; - using global::Microsoft.VisualStudio.TestTools.UnitTesting; - using KafkaFlow.IntegrationTests.Core.Messages; - using MessageTypes; - internal static class MessageStorage { private const int TimeoutSec = 8; - private static readonly ConcurrentBag TestMessages = new(); - private static readonly ConcurrentBag AvroMessages = new(); - private static readonly ConcurrentBag ProtoMessages = new(); - private static readonly ConcurrentBag<(long, int)> Versions = new(); - private static readonly ConcurrentBag ByteMessages = new(); + private static readonly ConcurrentBag s_testMessages = new(); + private static readonly ConcurrentBag s_avroMessages = new(); + private static readonly ConcurrentBag s_protoMessages = new(); + private static readonly ConcurrentBag<(long, int)> s_versions = new(); + private static readonly ConcurrentBag s_byteMessages = new(); public static void Add(ITestMessage message) { - Versions.Add((DateTime.Now.Ticks, message.Version)); - TestMessages.Add(message); + s_versions.Add((DateTime.Now.Ticks, message.Version)); + s_testMessages.Add(message); } public static void Add(LogMessages2 message) { - AvroMessages.Add(message); + s_avroMessages.Add(message); } public static void Add(TestProtoMessage message) { - ProtoMessages.Add(message); + s_protoMessages.Add(message); } public static void Add(byte[] message) { - ByteMessages.Add(message); + s_byteMessages.Add(message); } public static async Task AssertCountMessageAsync(ITestMessage message, int count) { var start = DateTime.Now; - while (TestMessages.Count(x => x.Id == message.Id && x.Value == message.Value) != count) + while (s_testMessages.Count(x => x.Id == message.Id && x.Value == message.Value) != count) { if (DateTime.Now.Subtract(start).Seconds > TimeoutSec) { @@ -59,7 +59,7 @@ public static async Task AssertMessageAsync(ITestMessage message) { var start = DateTime.Now; - while (!TestMessages.Any(x => x.Id == message.Id && x.Value == message.Value)) + while (!s_testMessages.Any(x => x.Id == message.Id && x.Value == message.Value)) { if (DateTime.Now.Subtract(start).Seconds > TimeoutSec) { @@ -75,7 +75,7 @@ public static async Task AssertMessageAsync(LogMessages2 message) { var start = DateTime.Now; - while (!AvroMessages.Any(x => x.Message == message.Message && x.Schema.Fullname == message.Schema.Fullname)) + while (!s_avroMessages.Any(x => x.Message == message.Message && x.Schema.Fullname == message.Schema.Fullname)) { if (DateTime.Now.Subtract(start).Seconds > TimeoutSec) { @@ -91,7 +91,7 @@ public static async Task AssertMessageAsync(TestProtoMessage message) { var start = DateTime.Now; - while (!ProtoMessages.Any(x => x.Id == message.Id && x.Value == message.Value && x.Version == message.Version)) + while (!s_protoMessages.Any(x => x.Id == message.Id && x.Value == message.Value && x.Version == message.Version)) { if (DateTime.Now.Subtract(start).Seconds > TimeoutSec) { @@ -107,7 +107,7 @@ public static async Task AssertMessageAsync(byte[] message) { var start = DateTime.Now; - while (!ByteMessages.Any(x => x.SequenceEqual(message))) + while (!s_byteMessages.Any(x => x.SequenceEqual(message))) { if (DateTime.Now.Subtract(start).Seconds > TimeoutSec) { @@ -121,15 +121,15 @@ public static async Task AssertMessageAsync(byte[] message) public static List<(long ticks, int version)> GetVersions() { - return Versions.ToList(); + return s_versions.ToList(); } public static void Clear() { - Versions.Clear(); - TestMessages.Clear(); - ByteMessages.Clear(); - ProtoMessages.Clear(); + s_versions.Clear(); + s_testMessages.Clear(); + s_byteMessages.Clear(); + s_protoMessages.Clear(); } } } diff --git a/src/KafkaFlow.IntegrationTests/Core/Handlers/PauseResumeHandler.cs b/tests/KafkaFlow.IntegrationTests/Core/Handlers/PauseResumeHandler.cs similarity index 77% rename from src/KafkaFlow.IntegrationTests/Core/Handlers/PauseResumeHandler.cs rename to tests/KafkaFlow.IntegrationTests/Core/Handlers/PauseResumeHandler.cs index 05ccf1350..9d4a00419 100644 --- a/src/KafkaFlow.IntegrationTests/Core/Handlers/PauseResumeHandler.cs +++ b/tests/KafkaFlow.IntegrationTests/Core/Handlers/PauseResumeHandler.cs @@ -1,9 +1,8 @@ +using System.Threading.Tasks; +using KafkaFlow.IntegrationTests.Core.Messages; + namespace KafkaFlow.IntegrationTests.Core.Handlers { - using System.Threading.Tasks; - using KafkaFlow.IntegrationTests.Core.Messages; - using KafkaFlow.Middlewares.TypedHandler; - internal class PauseResumeHandler : IMessageHandler { public async Task Handle(IMessageContext context, PauseResumeMessage message) diff --git a/src/KafkaFlow.IntegrationTests/Core/Messages/ITestMessage.cs b/tests/KafkaFlow.IntegrationTests/Core/Messages/ITestMessage.cs similarity index 68% rename from src/KafkaFlow.IntegrationTests/Core/Messages/ITestMessage.cs rename to tests/KafkaFlow.IntegrationTests/Core/Messages/ITestMessage.cs index 36f762119..43b4371ae 100644 --- a/src/KafkaFlow.IntegrationTests/Core/Messages/ITestMessage.cs +++ b/tests/KafkaFlow.IntegrationTests/Core/Messages/ITestMessage.cs @@ -1,7 +1,7 @@ -namespace KafkaFlow.IntegrationTests.Core.Messages -{ - using System; +using System; +namespace KafkaFlow.IntegrationTests.Core.Messages +{ internal interface ITestMessage { Guid Id { get; set; } diff --git a/src/KafkaFlow.IntegrationTests/Core/Messages/LogMessages2.cs b/tests/KafkaFlow.IntegrationTests/Core/Messages/LogMessages2.cs similarity index 94% rename from src/KafkaFlow.IntegrationTests/Core/Messages/LogMessages2.cs rename to tests/KafkaFlow.IntegrationTests/Core/Messages/LogMessages2.cs index 6e393f689..baba54966 100644 --- a/src/KafkaFlow.IntegrationTests/Core/Messages/LogMessages2.cs +++ b/tests/KafkaFlow.IntegrationTests/Core/Messages/LogMessages2.cs @@ -20,7 +20,7 @@ internal partial class LogMessages2 : ISpecificRecord { public static Schema _SCHEMA = Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"LogMessages2\",\"namespace\":\"MessageTypes\",\"fields\":[{\"nam" + "e\":\"Message\",\"type\":\"string\"}]}"); - private string _Message; + private string _message; public virtual Schema Schema { get @@ -32,11 +32,11 @@ public string Message { get { - return this._Message; + return _message; } set { - this._Message = value; + _message = value; } } public virtual object Get(int fieldPos) diff --git a/src/KafkaFlow.IntegrationTests/Core/Messages/PauseResumeMessage.cs b/tests/KafkaFlow.IntegrationTests/Core/Messages/PauseResumeMessage.cs similarity index 73% rename from src/KafkaFlow.IntegrationTests/Core/Messages/PauseResumeMessage.cs rename to tests/KafkaFlow.IntegrationTests/Core/Messages/PauseResumeMessage.cs index 540116376..e168e937e 100644 --- a/src/KafkaFlow.IntegrationTests/Core/Messages/PauseResumeMessage.cs +++ b/tests/KafkaFlow.IntegrationTests/Core/Messages/PauseResumeMessage.cs @@ -1,8 +1,8 @@ -namespace KafkaFlow.IntegrationTests.Core.Messages -{ - using System; - using System.Runtime.Serialization; +using System; +using System.Runtime.Serialization; +namespace KafkaFlow.IntegrationTests.Core.Messages +{ [DataContract] internal class PauseResumeMessage : ITestMessage { diff --git a/src/KafkaFlow.IntegrationTests/Core/Messages/TestMessage1.cs b/tests/KafkaFlow.IntegrationTests/Core/Messages/TestMessage1.cs similarity index 72% rename from src/KafkaFlow.IntegrationTests/Core/Messages/TestMessage1.cs rename to tests/KafkaFlow.IntegrationTests/Core/Messages/TestMessage1.cs index 044f99b65..aee78844b 100644 --- a/src/KafkaFlow.IntegrationTests/Core/Messages/TestMessage1.cs +++ b/tests/KafkaFlow.IntegrationTests/Core/Messages/TestMessage1.cs @@ -1,8 +1,8 @@ -namespace KafkaFlow.IntegrationTests.Core.Messages -{ - using System; - using System.Runtime.Serialization; +using System; +using System.Runtime.Serialization; +namespace KafkaFlow.IntegrationTests.Core.Messages +{ [DataContract] internal class TestMessage1 : ITestMessage { diff --git a/src/KafkaFlow.IntegrationTests/Core/Messages/TestMessage2.cs b/tests/KafkaFlow.IntegrationTests/Core/Messages/TestMessage2.cs similarity index 72% rename from src/KafkaFlow.IntegrationTests/Core/Messages/TestMessage2.cs rename to tests/KafkaFlow.IntegrationTests/Core/Messages/TestMessage2.cs index d1a4c6183..431f5f0fd 100644 --- a/src/KafkaFlow.IntegrationTests/Core/Messages/TestMessage2.cs +++ b/tests/KafkaFlow.IntegrationTests/Core/Messages/TestMessage2.cs @@ -1,8 +1,8 @@ -namespace KafkaFlow.IntegrationTests.Core.Messages -{ - using System; - using System.Runtime.Serialization; +using System; +using System.Runtime.Serialization; +namespace KafkaFlow.IntegrationTests.Core.Messages +{ [DataContract] internal class TestMessage2 : ITestMessage { diff --git a/src/KafkaFlow.IntegrationTests/Core/Messages/TestMessage3.cs b/tests/KafkaFlow.IntegrationTests/Core/Messages/TestMessage3.cs similarity index 72% rename from src/KafkaFlow.IntegrationTests/Core/Messages/TestMessage3.cs rename to tests/KafkaFlow.IntegrationTests/Core/Messages/TestMessage3.cs index e54bb4262..f6beafd59 100644 --- a/src/KafkaFlow.IntegrationTests/Core/Messages/TestMessage3.cs +++ b/tests/KafkaFlow.IntegrationTests/Core/Messages/TestMessage3.cs @@ -1,7 +1,7 @@ -namespace KafkaFlow.IntegrationTests.Core.Messages -{ - using System; +using System; +namespace KafkaFlow.IntegrationTests.Core.Messages +{ internal class TestMessage3 : ITestMessage { public Guid Id { get; set; } diff --git a/src/KafkaFlow.IntegrationTests/Core/Messages/TestProtoMessage.cs b/tests/KafkaFlow.IntegrationTests/Core/Messages/TestProtoMessage.cs similarity index 100% rename from src/KafkaFlow.IntegrationTests/Core/Messages/TestProtoMessage.cs rename to tests/KafkaFlow.IntegrationTests/Core/Messages/TestProtoMessage.cs diff --git a/src/KafkaFlow.IntegrationTests/Core/Messages/logmessages2.avsc b/tests/KafkaFlow.IntegrationTests/Core/Messages/logmessages2.avsc similarity index 100% rename from src/KafkaFlow.IntegrationTests/Core/Messages/logmessages2.avsc rename to tests/KafkaFlow.IntegrationTests/Core/Messages/logmessages2.avsc diff --git a/src/KafkaFlow.IntegrationTests/Core/Middlewares/GzipMiddleware.cs b/tests/KafkaFlow.IntegrationTests/Core/Middlewares/GzipMiddleware.cs similarity index 63% rename from src/KafkaFlow.IntegrationTests/Core/Middlewares/GzipMiddleware.cs rename to tests/KafkaFlow.IntegrationTests/Core/Middlewares/GzipMiddleware.cs index 4a73f4f4d..bd341128e 100644 --- a/src/KafkaFlow.IntegrationTests/Core/Middlewares/GzipMiddleware.cs +++ b/tests/KafkaFlow.IntegrationTests/Core/Middlewares/GzipMiddleware.cs @@ -1,13 +1,13 @@ +using System.Threading.Tasks; +using KafkaFlow.IntegrationTests.Core.Handlers; + namespace KafkaFlow.IntegrationTests.Core.Middlewares { - using System.Threading.Tasks; - using KafkaFlow.IntegrationTests.Core.Handlers; - internal class GzipMiddleware : IMessageMiddleware { public async Task Invoke(IMessageContext context, MiddlewareDelegate next) { - MessageStorage.Add((byte[]) context.Message.Value); + MessageStorage.Add((byte[])context.Message.Value); await next(context); } } diff --git a/src/KafkaFlow.IntegrationTests/Core/Producers/AvroProducer.cs b/tests/KafkaFlow.IntegrationTests/Core/Producers/AvroProducer.cs similarity index 100% rename from src/KafkaFlow.IntegrationTests/Core/Producers/AvroProducer.cs rename to tests/KafkaFlow.IntegrationTests/Core/Producers/AvroProducer.cs diff --git a/src/KafkaFlow.IntegrationTests/Core/Producers/ConfluentJsonProducer.cs b/tests/KafkaFlow.IntegrationTests/Core/Producers/ConfluentJsonProducer.cs similarity index 100% rename from src/KafkaFlow.IntegrationTests/Core/Producers/ConfluentJsonProducer.cs rename to tests/KafkaFlow.IntegrationTests/Core/Producers/ConfluentJsonProducer.cs diff --git a/src/KafkaFlow.IntegrationTests/Core/Producers/ConfluentProtobufProducer.cs b/tests/KafkaFlow.IntegrationTests/Core/Producers/ConfluentProtobufProducer.cs similarity index 100% rename from src/KafkaFlow.IntegrationTests/Core/Producers/ConfluentProtobufProducer.cs rename to tests/KafkaFlow.IntegrationTests/Core/Producers/ConfluentProtobufProducer.cs diff --git a/src/KafkaFlow.IntegrationTests/Core/Producers/GzipProducer.cs b/tests/KafkaFlow.IntegrationTests/Core/Producers/GzipProducer.cs similarity index 100% rename from src/KafkaFlow.IntegrationTests/Core/Producers/GzipProducer.cs rename to tests/KafkaFlow.IntegrationTests/Core/Producers/GzipProducer.cs diff --git a/src/KafkaFlow.IntegrationTests/Core/Producers/JsonGzipProducer.cs b/tests/KafkaFlow.IntegrationTests/Core/Producers/JsonGzipProducer.cs similarity index 100% rename from src/KafkaFlow.IntegrationTests/Core/Producers/JsonGzipProducer.cs rename to tests/KafkaFlow.IntegrationTests/Core/Producers/JsonGzipProducer.cs diff --git a/src/KafkaFlow.IntegrationTests/Core/Producers/JsonProducer.cs b/tests/KafkaFlow.IntegrationTests/Core/Producers/JsonProducer.cs similarity index 100% rename from src/KafkaFlow.IntegrationTests/Core/Producers/JsonProducer.cs rename to tests/KafkaFlow.IntegrationTests/Core/Producers/JsonProducer.cs diff --git a/src/KafkaFlow.IntegrationTests/Core/Producers/JsonProducer2.cs b/tests/KafkaFlow.IntegrationTests/Core/Producers/JsonProducer2.cs similarity index 100% rename from src/KafkaFlow.IntegrationTests/Core/Producers/JsonProducer2.cs rename to tests/KafkaFlow.IntegrationTests/Core/Producers/JsonProducer2.cs diff --git a/src/KafkaFlow.IntegrationTests/Core/Producers/ProtobufGzipProducer.cs b/tests/KafkaFlow.IntegrationTests/Core/Producers/ProtobufGzipProducer.cs similarity index 100% rename from src/KafkaFlow.IntegrationTests/Core/Producers/ProtobufGzipProducer.cs rename to tests/KafkaFlow.IntegrationTests/Core/Producers/ProtobufGzipProducer.cs diff --git a/src/KafkaFlow.IntegrationTests/Core/Producers/ProtobufGzipProducer2.cs b/tests/KafkaFlow.IntegrationTests/Core/Producers/ProtobufGzipProducer2.cs similarity index 100% rename from src/KafkaFlow.IntegrationTests/Core/Producers/ProtobufGzipProducer2.cs rename to tests/KafkaFlow.IntegrationTests/Core/Producers/ProtobufGzipProducer2.cs diff --git a/src/KafkaFlow.IntegrationTests/Core/Producers/ProtobufProducer.cs b/tests/KafkaFlow.IntegrationTests/Core/Producers/ProtobufProducer.cs similarity index 100% rename from src/KafkaFlow.IntegrationTests/Core/Producers/ProtobufProducer.cs rename to tests/KafkaFlow.IntegrationTests/Core/Producers/ProtobufProducer.cs diff --git a/src/KafkaFlow.IntegrationTests/Core/TraceLogHandler.cs b/tests/KafkaFlow.IntegrationTests/Core/TraceLogHandler.cs similarity index 94% rename from src/KafkaFlow.IntegrationTests/Core/TraceLogHandler.cs rename to tests/KafkaFlow.IntegrationTests/Core/TraceLogHandler.cs index 86f07fd3c..9b4eabca9 100644 --- a/src/KafkaFlow.IntegrationTests/Core/TraceLogHandler.cs +++ b/tests/KafkaFlow.IntegrationTests/Core/TraceLogHandler.cs @@ -1,9 +1,9 @@ +using System; +using System.Diagnostics; +using System.Text.Json; + namespace KafkaFlow.IntegrationTests.Core { - using System; - using System.Diagnostics; - using System.Text.Json; - internal class TraceLogHandler : ILogHandler { public void Error(string message, Exception ex, object data) diff --git a/src/KafkaFlow.IntegrationTests/GlobalEventsTest.cs b/tests/KafkaFlow.IntegrationTests/GlobalEventsTest.cs similarity index 87% rename from src/KafkaFlow.IntegrationTests/GlobalEventsTest.cs rename to tests/KafkaFlow.IntegrationTests/GlobalEventsTest.cs index c1452a340..7b5025d08 100644 --- a/src/KafkaFlow.IntegrationTests/GlobalEventsTest.cs +++ b/tests/KafkaFlow.IntegrationTests/GlobalEventsTest.cs @@ -1,36 +1,36 @@ -namespace KafkaFlow.IntegrationTests +using System; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using AutoFixture; +using Confluent.Kafka; +using KafkaFlow.Configuration; +using KafkaFlow.IntegrationTests.Core; +using KafkaFlow.IntegrationTests.Core.Exceptions; +using KafkaFlow.IntegrationTests.Core.Handlers; +using KafkaFlow.IntegrationTests.Core.Messages; +using KafkaFlow.IntegrationTests.Core.Middlewares; +using KafkaFlow.IntegrationTests.Core.Producers; +using KafkaFlow.Serializer; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Polly; + +namespace KafkaFlow.IntegrationTests { - using System; - using System.IO; - using System.Linq; - using System.Threading.Tasks; - using AutoFixture; - using Confluent.Kafka; - using KafkaFlow.Configuration; - using KafkaFlow.IntegrationTests.Core; - using KafkaFlow.IntegrationTests.Core.Exceptions; - using KafkaFlow.IntegrationTests.Core.Handlers; - using KafkaFlow.IntegrationTests.Core.Messages; - using KafkaFlow.IntegrationTests.Core.Middlewares; - using KafkaFlow.IntegrationTests.Core.Producers; - using KafkaFlow.Serializer; - using Microsoft.Extensions.Configuration; - using Microsoft.Extensions.DependencyInjection; - using Microsoft.Extensions.Hosting; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Polly; - [TestClass] public class GlobalEventsTest { - private readonly Fixture fixture = new(); - private string topic; - private bool isPartitionAssigned; + private readonly Fixture _fixture = new(); + private string _topic; + private bool _isPartitionAssigned; [TestInitialize] public void Setup() { - this.topic = $"GlobalEventsTestTopic_{Guid.NewGuid()}"; + _topic = $"GlobalEventsTestTopic_{Guid.NewGuid()}"; MessageStorage.Clear(); } @@ -69,7 +69,7 @@ void ConfigureGlobalEvents(IGlobalEvents observers) MessageStorage.Clear(); var producer = provider.GetRequiredService>(); - var message = this.fixture.Create(); + var message = _fixture.Create(); // Act await producer.ProduceAsync(null, message); @@ -105,7 +105,7 @@ void ConfigureGlobalEvents(IGlobalEvents observers) MessageStorage.Clear(); var producer = provider.GetRequiredService>(); - var message = this.fixture.Create(); + var message = _fixture.Create(); // Act producer.Produce(message.Id.ToString(), message); @@ -150,7 +150,7 @@ void ConfigureGlobalEvents(IGlobalEvents observers) MessageStorage.Clear(); var producer = provider.GetRequiredService>(); - var message = this.fixture.Create(); + var message = _fixture.Create(); // Act await producer.ProduceAsync(null, message); @@ -191,7 +191,7 @@ void ConfigureGlobalEvents(IGlobalEvents observers) MessageStorage.Clear(); var producer = provider.GetRequiredService>(); - var message = this.fixture.Create(); + var message = _fixture.Create(); var errorOccured = false; // Act @@ -215,8 +215,8 @@ private void ConfigureConsumer(IConsumerConfigurationBuilder consumerConfigur where T : class, IMessageMiddleware { consumerConfigurationBuilder - .Topic(this.topic) - .WithGroupId(this.topic) + .Topic(_topic) + .WithGroupId(_topic) .WithBufferSize(100) .WithWorkersCount(10) .WithAutoOffsetReset(KafkaFlow.AutoOffsetReset.Earliest) @@ -226,7 +226,7 @@ private void ConfigureConsumer(IConsumerConfigurationBuilder consumerConfigur .Add()) .WithPartitionsAssignedHandler((_, _) => { - this.isPartitionAssigned = true; + _isPartitionAssigned = true; }); } @@ -234,7 +234,7 @@ private void ConfigureProducer(IProducerConfigurationBuilder producerConfigur where T : class, ISerializer { producerConfigurationBuilder - .DefaultTopic(this.topic) + .DefaultTopic(_topic) .AddMiddlewares(middlewares => middlewares.AddSerializer()); } @@ -243,7 +243,7 @@ private async Task GetServiceProviderAsync( Action consumerConfiguration, Action producerConfiguration) { - this.isPartitionAssigned = false; + _isPartitionAssigned = false; var builder = Host .CreateDefaultBuilder() @@ -265,7 +265,7 @@ private async Task GetServiceProviderAsync( .AddCluster( cluster => cluster .WithBrokers(context.Configuration.GetValue("Kafka:Brokers").Split(';')) - .CreateTopicIfNotExists(this.topic, 1, 1) + .CreateTopicIfNotExists(_topic, 1, 1) .AddProducer(producerConfiguration) .AddConsumer(consumerConfiguration)) .SubscribeGlobalEvents(configureGlobalEvents))) @@ -290,7 +290,7 @@ private async Task WaitForPartitionAssignmentAsync() await Policy .HandleResult(isAvailable => !isAvailable) .WaitAndRetryAsync(Enumerable.Range(0, 6).Select(i => TimeSpan.FromSeconds(Math.Pow(i, 2)))) - .ExecuteAsync(() => Task.FromResult(this.isPartitionAssigned)); + .ExecuteAsync(() => Task.FromResult(_isPartitionAssigned)); } private class TriggerErrorMessageMiddleware : IMessageMiddleware @@ -313,7 +313,7 @@ public Task SerializeAsync(object _, Stream output, ISerializerContext context) public Task DeserializeAsync(Stream _, Type type, ISerializerContext context) { var error = new Error(ErrorCode.BrokerNotAvailable); - throw new ProduceException(error,null); + throw new ProduceException(error, null); } } } diff --git a/src/KafkaFlow.IntegrationTests/KafkaFlow.IntegrationTests.csproj b/tests/KafkaFlow.IntegrationTests/KafkaFlow.IntegrationTests.csproj similarity index 59% rename from src/KafkaFlow.IntegrationTests/KafkaFlow.IntegrationTests.csproj rename to tests/KafkaFlow.IntegrationTests/KafkaFlow.IntegrationTests.csproj index 63e7a9705..e1f24e26c 100644 --- a/src/KafkaFlow.IntegrationTests/KafkaFlow.IntegrationTests.csproj +++ b/tests/KafkaFlow.IntegrationTests/KafkaFlow.IntegrationTests.csproj @@ -35,16 +35,16 @@ - - - - - - - - - - + + + + + + + + + + diff --git a/src/KafkaFlow.IntegrationTests/OpenTelemetryTests.cs b/tests/KafkaFlow.IntegrationTests/OpenTelemetryTests.cs similarity index 80% rename from src/KafkaFlow.IntegrationTests/OpenTelemetryTests.cs rename to tests/KafkaFlow.IntegrationTests/OpenTelemetryTests.cs index 9a2bac810..3c05a6c37 100644 --- a/src/KafkaFlow.IntegrationTests/OpenTelemetryTests.cs +++ b/tests/KafkaFlow.IntegrationTests/OpenTelemetryTests.cs @@ -1,40 +1,39 @@ -namespace KafkaFlow.IntegrationTests +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using AutoFixture; +using global::OpenTelemetry; +using global::OpenTelemetry.Trace; +using KafkaFlow.Compressor.Gzip; +using KafkaFlow.Configuration; +using KafkaFlow.IntegrationTests.Core; +using KafkaFlow.IntegrationTests.Core.Handlers; +using KafkaFlow.IntegrationTests.Core.Middlewares; +using KafkaFlow.IntegrationTests.Core.Producers; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Polly; + +namespace KafkaFlow.IntegrationTests { - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.IO; - using System.Linq; - using System.Threading.Tasks; - using AutoFixture; - using global::OpenTelemetry; - using global::OpenTelemetry.Trace; - using KafkaFlow.Compressor; - using KafkaFlow.Compressor.Gzip; - using KafkaFlow.Configuration; - using KafkaFlow.IntegrationTests.Core; - using KafkaFlow.IntegrationTests.Core.Handlers; - using KafkaFlow.IntegrationTests.Core.Middlewares; - using KafkaFlow.IntegrationTests.Core.Producers; - using Microsoft.Extensions.Configuration; - using Microsoft.Extensions.DependencyInjection; - using Microsoft.Extensions.Hosting; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Polly; - [TestClass] public class OpenTelemetryTests { - private readonly Fixture fixture = new(); + private readonly Fixture _fixture = new(); - private List exportedItems; + private List _exportedItems; - private bool isPartitionAssigned; + private bool _isPartitionAssigned; [TestInitialize] public void Setup() { - this.exportedItems = new List(); + _exportedItems = new List(); } [TestMethod] @@ -46,11 +45,11 @@ public async Task AddOpenTelemetry_ProducingAndConsumingOneMessage_TraceAndSpans using var tracerProvider = Sdk.CreateTracerProviderBuilder() .AddSource("KafkaFlow.OpenTelemetry") - .AddInMemoryExporter(this.exportedItems) + .AddInMemoryExporter(_exportedItems) .Build(); var producer = provider.GetRequiredService>(); - var message = this.fixture.Create(); + var message = _fixture.Create(); // Act await producer.ProduceAsync(null, message); @@ -58,7 +57,7 @@ public async Task AddOpenTelemetry_ProducingAndConsumingOneMessage_TraceAndSpans // Assert var (producerSpan, consumerSpan) = await this.WaitForSpansAsync(); - Assert.IsNotNull(this.exportedItems); + Assert.IsNotNull(_exportedItems); Assert.IsNull(producerSpan.ParentId); Assert.AreEqual(producerSpan.TraceId, consumerSpan.TraceId); Assert.AreEqual(consumerSpan.ParentSpanId, producerSpan.SpanId); @@ -80,11 +79,11 @@ public async Task AddOpenTelemetry_ProducingAndConsumingOneMessage_BaggageIsProp using var tracerProvider = Sdk.CreateTracerProviderBuilder() .AddSource("KafkaFlow.OpenTelemetry") .AddSource(kafkaFlowTestString) - .AddInMemoryExporter(this.exportedItems) + .AddInMemoryExporter(_exportedItems) .Build(); var producer = provider.GetRequiredService>(); - var message = this.fixture.Create(); + var message = _fixture.Create(); // Act ActivitySource activitySource = new(kafkaFlowTestString); @@ -99,7 +98,7 @@ public async Task AddOpenTelemetry_ProducingAndConsumingOneMessage_BaggageIsProp // Assert var (producerSpan, consumerSpan) = await this.WaitForSpansAsync(); - Assert.IsNotNull(this.exportedItems); + Assert.IsNotNull(_exportedItems); Assert.AreEqual(producerSpan.TraceId, consumerSpan.TraceId); Assert.AreEqual(consumerSpan.ParentSpanId, producerSpan.SpanId); Assert.AreEqual(producerSpan.GetBaggageItem(baggageName1), baggageValue1); @@ -112,7 +111,7 @@ private async Task GetServiceProvider() { var topicName = $"OpenTelemetryTestTopic_{Guid.NewGuid()}"; - this.isPartitionAssigned = false; + _isPartitionAssigned = false; var builder = Host .CreateDefaultBuilder() @@ -154,7 +153,7 @@ private async Task GetServiceProvider() .Add()) .WithPartitionsAssignedHandler((_, _) => { - this.isPartitionAssigned = true; + _isPartitionAssigned = true; }))) .AddOpenTelemetryInstrumentation())) .UseDefaultServiceProvider( @@ -178,7 +177,7 @@ private async Task WaitForPartitionAssignmentAsync() await Policy .HandleResult(isAvailable => !isAvailable) .WaitAndRetryAsync(Enumerable.Range(0, 6).Select(i => TimeSpan.FromSeconds(Math.Pow(i, 2)))) - .ExecuteAsync(() => Task.FromResult(this.isPartitionAssigned)); + .ExecuteAsync(() => Task.FromResult(_isPartitionAssigned)); } private async Task<(Activity producerSpan, Activity consumerSpan)> WaitForSpansAsync() @@ -190,8 +189,8 @@ await Policy .WaitAndRetryAsync(Enumerable.Range(0, 6).Select(i => TimeSpan.FromSeconds(Math.Pow(i, 2)))) .ExecuteAsync(() => { - producerSpan = this.exportedItems.Find(x => x.Kind == ActivityKind.Producer); - consumerSpan = this.exportedItems.Find(x => x.Kind == ActivityKind.Consumer); + producerSpan = _exportedItems.Find(x => x.Kind == ActivityKind.Producer); + consumerSpan = _exportedItems.Find(x => x.Kind == ActivityKind.Consumer); return Task.FromResult(producerSpan != null && consumerSpan != null); }); diff --git a/tests/KafkaFlow.IntegrationTests/ProducerTest.cs b/tests/KafkaFlow.IntegrationTests/ProducerTest.cs new file mode 100644 index 000000000..c32e01213 --- /dev/null +++ b/tests/KafkaFlow.IntegrationTests/ProducerTest.cs @@ -0,0 +1,40 @@ +using System; +using System.Threading.Tasks; +using AutoFixture; +using KafkaFlow.IntegrationTests.Core; +using KafkaFlow.IntegrationTests.Core.Handlers; +using KafkaFlow.IntegrationTests.Core.Producers; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.VisualStudio.TestTools.UnitTesting; + +namespace KafkaFlow.IntegrationTests +{ + [TestClass] + public class ProducerTest + { + private readonly Fixture _fixture = new(); + + private IServiceProvider _provider; + + [TestInitialize] + public void Setup() + { + _provider = Bootstrapper.GetServiceProvider(); + MessageStorage.Clear(); + } + + [TestMethod] + public async Task ProduceNullKeyTest() + { + // Arrange + var producer = _provider.GetRequiredService>(); + var message = _fixture.Create(); + + // Act + await producer.ProduceAsync(null, message); + + // Assert + await MessageStorage.AssertMessageAsync(message); + } + } +} diff --git a/src/KafkaFlow.IntegrationTests/SerializationTest.cs b/tests/KafkaFlow.IntegrationTests/SerializationTest.cs similarity index 59% rename from src/KafkaFlow.IntegrationTests/SerializationTest.cs rename to tests/KafkaFlow.IntegrationTests/SerializationTest.cs index 39765da95..bd43d478f 100644 --- a/src/KafkaFlow.IntegrationTests/SerializationTest.cs +++ b/tests/KafkaFlow.IntegrationTests/SerializationTest.cs @@ -1,28 +1,28 @@ +using System; +using System.Linq; +using System.Threading.Tasks; +using AutoFixture; +using global::Microsoft.Extensions.DependencyInjection; +using global::Microsoft.VisualStudio.TestTools.UnitTesting; +using KafkaFlow.IntegrationTests.Core; +using KafkaFlow.IntegrationTests.Core.Handlers; +using KafkaFlow.IntegrationTests.Core.Messages; +using KafkaFlow.IntegrationTests.Core.Producers; +using MessageTypes; + namespace KafkaFlow.IntegrationTests { - using System; - using System.Linq; - using System.Threading.Tasks; - using AutoFixture; - using global::Microsoft.Extensions.DependencyInjection; - using global::Microsoft.VisualStudio.TestTools.UnitTesting; - using KafkaFlow.IntegrationTests.Core; - using KafkaFlow.IntegrationTests.Core.Handlers; - using KafkaFlow.IntegrationTests.Core.Messages; - using KafkaFlow.IntegrationTests.Core.Producers; - using MessageTypes; - [TestClass] public class SerializationTest { - private readonly Fixture fixture = new(); + private readonly Fixture _fixture = new(); - private IServiceProvider provider; + private IServiceProvider _provider; [TestInitialize] public void Setup() { - this.provider = Bootstrapper.GetServiceProvider(); + _provider = Bootstrapper.GetServiceProvider(); MessageStorage.Clear(); } @@ -30,8 +30,8 @@ public void Setup() public async Task JsonMessageTest() { // Arrange - var producer = this.provider.GetRequiredService>(); - var messages = this.fixture.CreateMany(10).ToList(); + var producer = _provider.GetRequiredService>(); + var messages = _fixture.CreateMany(10).ToList(); // Act await Task.WhenAll(messages.Select(m => producer.ProduceAsync(m.Id.ToString(), m))); @@ -47,8 +47,8 @@ public async Task JsonMessageTest() public async Task ProtobufMessageTest() { // Arrange - var producer = this.provider.GetRequiredService>(); - var messages = this.fixture.CreateMany(10).ToList(); + var producer = _provider.GetRequiredService>(); + var messages = _fixture.CreateMany(10).ToList(); // Act await Task.WhenAll(messages.Select(m => producer.ProduceAsync(m.Id.ToString(), m))); @@ -64,8 +64,8 @@ public async Task ProtobufMessageTest() public async Task AvroMessageTest() { // Arrange - var producer = this.provider.GetRequiredService>(); - var messages = this.fixture.CreateMany(10).ToList(); + var producer = _provider.GetRequiredService>(); + var messages = _fixture.CreateMany(10).ToList(); // Act await Task.WhenAll(messages.Select(m => producer.ProduceAsync(Guid.NewGuid().ToString(), m))); @@ -81,8 +81,8 @@ public async Task AvroMessageTest() public async Task ProtobufSchemaRegistryMessageTest() { // Arrange - var producer = this.provider.GetRequiredService>(); - var messages = this.fixture.CreateMany(10).ToList(); + var producer = _provider.GetRequiredService>(); + var messages = _fixture.CreateMany(10).ToList(); // Act await Task.WhenAll(messages.Select(m => producer.ProduceAsync(m.Id, m))); @@ -98,8 +98,8 @@ public async Task ProtobufSchemaRegistryMessageTest() public async Task JsonSchemaRegistryMessageTest() { // Arrange - var producer = this.provider.GetRequiredService>(); - var messages = this.fixture.CreateMany(10).ToList(); + var producer = _provider.GetRequiredService>(); + var messages = _fixture.CreateMany(10).ToList(); // Act await Task.WhenAll(messages.Select(m => producer.ProduceAsync(m.Id.ToString(), m))); diff --git a/src/KafkaFlow.IntegrationTests/conf/appsettings.json b/tests/KafkaFlow.IntegrationTests/conf/appsettings.json similarity index 100% rename from src/KafkaFlow.IntegrationTests/conf/appsettings.json rename to tests/KafkaFlow.IntegrationTests/conf/appsettings.json diff --git a/src/KafkaFlow.UnitTests/Admin.WebApi/Controllers/ConsumersControllerTests.cs b/tests/KafkaFlow.UnitTests/Admin.WebApi/Controllers/ConsumersControllerTests.cs similarity index 67% rename from src/KafkaFlow.UnitTests/Admin.WebApi/Controllers/ConsumersControllerTests.cs rename to tests/KafkaFlow.UnitTests/Admin.WebApi/Controllers/ConsumersControllerTests.cs index 25d018991..4b4bcf972 100644 --- a/src/KafkaFlow.UnitTests/Admin.WebApi/Controllers/ConsumersControllerTests.cs +++ b/tests/KafkaFlow.UnitTests/Admin.WebApi/Controllers/ConsumersControllerTests.cs @@ -1,33 +1,33 @@ -namespace KafkaFlow.UnitTests.Admin.WebApi.Controllers +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using AutoFixture; +using FluentAssertions; +using KafkaFlow.Admin; +using KafkaFlow.Admin.WebApi.Contracts; +using KafkaFlow.Admin.WebApi.Controllers; +using KafkaFlow.Consumers; +using Microsoft.AspNetCore.Mvc; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; + +namespace KafkaFlow.UnitTests.Admin.WebApi.Controllers { - using System; - using System.Collections.Generic; - using System.Linq; - using System.Threading.Tasks; - using AutoFixture; - using FluentAssertions; - using KafkaFlow.Admin; - using KafkaFlow.Admin.WebApi.Contracts; - using KafkaFlow.Admin.WebApi.Controllers; - using KafkaFlow.Consumers; - using Microsoft.AspNetCore.Mvc; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; - [TestClass] public class ConsumersControllerTests { - private readonly Fixture fixture = new(); - private ConsumersController target; - private Mock mockConsumerAccessor; - private Mock mockConsumerAdmin; + private readonly Fixture _fixture = new(); + private ConsumersController _target; + private Mock _mockConsumerAccessor; + private Mock _mockConsumerAdmin; [TestInitialize] public void TestSetup() { - this.mockConsumerAccessor = this.fixture.Freeze>(); - this.mockConsumerAdmin = this.fixture.Freeze>(); - this.target = new ConsumersController(this.mockConsumerAccessor.Object, this.mockConsumerAdmin.Object); + _mockConsumerAccessor = _fixture.Freeze>(); + _mockConsumerAdmin = _fixture.Freeze>(); + _target = new ConsumersController(_mockConsumerAccessor.Object, _mockConsumerAdmin.Object); } [TestMethod] @@ -42,10 +42,10 @@ public void GetConsumersByGroupId_ValidGroupId_ReturnsOkResultWithConsumersRespo Mock.Of(c => c.GroupId == "group1" && c.ConsumerName == "consumer2"), }; - this.mockConsumerAccessor.Setup(x => x.All).Returns(consumers); + _mockConsumerAccessor.Setup(x => x.All).Returns(consumers); // Act - var result = this.target.GetConsumersByGroupId(groupId) as ObjectResult; + var result = _target.GetConsumersByGroupId(groupId) as ObjectResult; // Assert result.Should().NotBeNull(); @@ -69,10 +69,10 @@ public void GetConsumerByGroupIdName_ValidGroupIdAndExistingConsumer_ReturnsOkRe Mock.Of(c => c.GroupId == "group1" && c.ConsumerName == "consumer2"), }; - this.mockConsumerAccessor.Setup(x => x.All).Returns(consumers.AsQueryable()); + _mockConsumerAccessor.Setup(x => x.All).Returns(consumers.AsQueryable()); // Act - var result = this.target.GetConsumerByGroupIdName(groupId, consumerName) as ObjectResult; + var result = _target.GetConsumerByGroupIdName(groupId, consumerName) as ObjectResult; // Assert result.Should().NotBeNull(); @@ -91,7 +91,7 @@ public void GetConsumerByGroupIdName_ValidGroupIdAndNonExistingConsumer_ReturnsN var consumerName = "nonExistingConsumer"; // Act - var result = this.target.GetConsumerByGroupIdName(groupId, consumerName) as NotFoundResult; + var result = _target.GetConsumerByGroupIdName(groupId, consumerName) as NotFoundResult; // Assert result.Should().NotBeNull(); @@ -111,16 +111,16 @@ public async Task PauseConsumer_ValidGroupIdAndExistingConsumer_ReturnsAcceptedR Mock.Of(c => c.GroupId == groupId && c.ConsumerName == consumerName), }; - this.mockConsumerAccessor.Setup(x => x.All).Returns(consumers); + _mockConsumerAccessor.Setup(x => x.All).Returns(consumers); // Act - var result = await this.target.PauseConsumer(groupId, consumerName, topics) as AcceptedResult; + var result = await _target.PauseConsumer(groupId, consumerName, topics) as AcceptedResult; // Assert result.Should().NotBeNull(); result.StatusCode.Should().Be(202); - this.mockConsumerAdmin.Verify(x => x.PauseConsumerAsync(consumerName, topics), Times.Once); + _mockConsumerAdmin.Verify(x => x.PauseConsumerAsync(consumerName, topics), Times.Once); } [TestMethod] @@ -136,10 +136,10 @@ public async Task PauseConsumer_ValidGroupIdAndNonExistingConsumer_ReturnsNotFou Mock.Of(c => c.GroupId == groupId && c.ConsumerName == "consumer2"), }; - this.mockConsumerAccessor.Setup(x => x.All).Returns(consumers); + _mockConsumerAccessor.Setup(x => x.All).Returns(consumers); // Act - var result = await this.target.PauseConsumer(groupId, consumerName, topics) as NotFoundResult; + var result = await _target.PauseConsumer(groupId, consumerName, topics) as NotFoundResult; // Assert result.Should().NotBeNull(); @@ -159,16 +159,16 @@ public async Task ResumeConsumer_ValidGroupIdAndNonExistingConsumer_ReturnsNotFo Mock.Of(c => c.GroupId == groupId && c.ConsumerName == "existingConsumer"), }; - this.mockConsumerAccessor.Setup(x => x.All).Returns(consumers); + _mockConsumerAccessor.Setup(x => x.All).Returns(consumers); // Act - var result = await this.target.ResumeConsumer(groupId, consumerName, topics) as NotFoundResult; + var result = await _target.ResumeConsumer(groupId, consumerName, topics) as NotFoundResult; // Assert result.Should().NotBeNull(); result.StatusCode.Should().Be(404); - this.mockConsumerAdmin.Verify(x => x.ResumeConsumerAsync(consumerName, topics), Times.Never); + _mockConsumerAdmin.Verify(x => x.ResumeConsumerAsync(consumerName, topics), Times.Never); } [TestMethod] @@ -183,16 +183,16 @@ public async Task StartConsumer_ValidGroupIdAndExistingConsumer_ReturnsAcceptedR Mock.Of(c => c.GroupId == groupId && c.ConsumerName == consumerName), }; - this.mockConsumerAccessor.Setup(x => x.All).Returns(consumers); + _mockConsumerAccessor.Setup(x => x.All).Returns(consumers); // Act - var result = await this.target.StartConsumer(groupId, consumerName) as AcceptedResult; + var result = await _target.StartConsumer(groupId, consumerName) as AcceptedResult; // Assert result.Should().NotBeNull(); result.StatusCode.Should().Be(202); - this.mockConsumerAdmin.Verify(x => x.StartConsumerAsync(consumerName), Times.Once); + _mockConsumerAdmin.Verify(x => x.StartConsumerAsync(consumerName), Times.Once); } [TestMethod] @@ -207,16 +207,16 @@ public async Task StartConsumer_ValidGroupIdAndNonExistingConsumer_ReturnsNotFou Mock.Of(c => c.GroupId == groupId && c.ConsumerName == "consumer1"), }; - this.mockConsumerAccessor.Setup(x => x.All).Returns(consumers); + _mockConsumerAccessor.Setup(x => x.All).Returns(consumers); // Act - var result = await this.target.StartConsumer(groupId, consumerName) as NotFoundResult; + var result = await _target.StartConsumer(groupId, consumerName) as NotFoundResult; // Assert result.Should().NotBeNull(); result.StatusCode.Should().Be(404); - this.mockConsumerAdmin.Verify(x => x.StartConsumerAsync(consumerName), Times.Never); + _mockConsumerAdmin.Verify(x => x.StartConsumerAsync(consumerName), Times.Never); } [TestMethod] @@ -231,16 +231,16 @@ public async Task StopConsumer_ValidGroupIdAndExistingConsumer_ReturnsAcceptedRe Mock.Of(c => c.GroupId == groupId && c.ConsumerName == consumerName), }; - this.mockConsumerAccessor.Setup(x => x.All).Returns(consumers); + _mockConsumerAccessor.Setup(x => x.All).Returns(consumers); // Act - var result = await this.target.StopConsumer(groupId, consumerName) as AcceptedResult; + var result = await _target.StopConsumer(groupId, consumerName) as AcceptedResult; // Assert result.Should().NotBeNull(); result.StatusCode.Should().Be(202); - this.mockConsumerAdmin.Verify(x => x.StopConsumerAsync(consumerName), Times.Once); + _mockConsumerAdmin.Verify(x => x.StopConsumerAsync(consumerName), Times.Once); } [TestMethod] @@ -255,16 +255,16 @@ public async Task StopConsumer_ValidGroupIdAndNonExistingConsumer_ReturnsNotFoun Mock.Of(c => c.GroupId == groupId && c.ConsumerName == "consumer1"), }; - this.mockConsumerAccessor.Setup(x => x.All).Returns(consumers); + _mockConsumerAccessor.Setup(x => x.All).Returns(consumers); // Act - var result = await this.target.StopConsumer(groupId, consumerName) as NotFoundResult; + var result = await _target.StopConsumer(groupId, consumerName) as NotFoundResult; // Assert result.Should().NotBeNull(); result.StatusCode.Should().Be(404); - this.mockConsumerAdmin.Verify(x => x.StopConsumerAsync(consumerName), Times.Never); + _mockConsumerAdmin.Verify(x => x.StopConsumerAsync(consumerName), Times.Never); } [TestMethod] @@ -279,16 +279,16 @@ public async Task RestartConsumer_ValidGroupIdAndExistingConsumer_ReturnsAccepte Mock.Of(c => c.GroupId == groupId && c.ConsumerName == consumerName), }; - this.mockConsumerAccessor.Setup(x => x.All).Returns(consumers); + _mockConsumerAccessor.Setup(x => x.All).Returns(consumers); // Act - var result = await this.target.RestartConsumer(groupId, consumerName) as AcceptedResult; + var result = await _target.RestartConsumer(groupId, consumerName) as AcceptedResult; // Assert result.Should().NotBeNull(); result.StatusCode.Should().Be(202); - this.mockConsumerAdmin.Verify(x => x.RestartConsumerAsync(consumerName), Times.Once); + _mockConsumerAdmin.Verify(x => x.RestartConsumerAsync(consumerName), Times.Once); } [TestMethod] @@ -303,16 +303,16 @@ public async Task RestartConsumer_ValidGroupIdAndNonExistingConsumer_ReturnsNotF Mock.Of(c => c.GroupId == groupId && c.ConsumerName == "consumer1"), }; - this.mockConsumerAccessor.Setup(x => x.All).Returns(consumers); + _mockConsumerAccessor.Setup(x => x.All).Returns(consumers); // Act - var result = await this.target.RestartConsumer(groupId, consumerName) as NotFoundResult; + var result = await _target.RestartConsumer(groupId, consumerName) as NotFoundResult; // Assert result.Should().NotBeNull(); result.StatusCode.Should().Be(404); - this.mockConsumerAdmin.Verify(x => x.StopConsumerAsync(consumerName), Times.Never); + _mockConsumerAdmin.Verify(x => x.StopConsumerAsync(consumerName), Times.Never); } [TestMethod] @@ -329,16 +329,16 @@ public async Task ResetOffsets_ValidRequest_ReturnsAcceptedResult() Mock.Of(c => c.GroupId == groupId && c.ConsumerName == consumerName), }; - this.mockConsumerAccessor.Setup(x => x.All).Returns(consumers); + _mockConsumerAccessor.Setup(x => x.All).Returns(consumers); // Act - var result = await this.target.ResetOffsets(groupId, consumerName, topics, request) as AcceptedResult; + var result = await _target.ResetOffsets(groupId, consumerName, topics, request) as AcceptedResult; // Assert result.Should().NotBeNull(); result.StatusCode.Should().Be(202); - this.mockConsumerAdmin.Verify(x => x.ResetOffsetsAsync(consumerName, topics), Times.Once); + _mockConsumerAdmin.Verify(x => x.ResetOffsetsAsync(consumerName, topics), Times.Once); } [TestMethod] @@ -355,16 +355,16 @@ public async Task ResetOffsets_InvalidConfirmValue_ReturnsBadRequestResult() Mock.Of(c => c.GroupId == groupId && c.ConsumerName == consumerName), }; - this.mockConsumerAccessor.Setup(x => x.All).Returns(consumers); + _mockConsumerAccessor.Setup(x => x.All).Returns(consumers); // Act - var result = await this.target.ResetOffsets(groupId, consumerName, topics, request) as BadRequestResult; + var result = await _target.ResetOffsets(groupId, consumerName, topics, request) as BadRequestResult; // Assert result.Should().NotBeNull(); result.StatusCode.Should().Be(400); - this.mockConsumerAdmin.Verify(x => x.ResetOffsetsAsync(consumerName, topics), Times.Never); + _mockConsumerAdmin.Verify(x => x.ResetOffsetsAsync(consumerName, topics), Times.Never); } [TestMethod] @@ -374,23 +374,23 @@ public async Task RewindOffsets_ValidGroupIdAndNonExistingConsumer_ReturnsNotFou var groupId = "group1"; var consumerName = "nonExistingConsumer"; var topics = new List { "topic1", "topic2" }; - var request = new RewindOffsetsToDateRequest { Date = this.fixture.Create() }; + var request = new RewindOffsetsToDateRequest { Date = _fixture.Create() }; var consumers = new List { Mock.Of(c => c.GroupId == groupId && c.ConsumerName == "consumer1"), }; - this.mockConsumerAccessor.Setup(x => x.All).Returns(consumers); + _mockConsumerAccessor.Setup(x => x.All).Returns(consumers); // Act - var result = await this.target.RewindOffsets(groupId, consumerName, topics, request) as NotFoundResult; + var result = await _target.RewindOffsets(groupId, consumerName, topics, request) as NotFoundResult; // Assert result.Should().NotBeNull(); result.StatusCode.Should().Be(404); - this.mockConsumerAdmin.Verify(x => x.RewindOffsetsAsync(consumerName, request.Date, topics), Times.Never); + _mockConsumerAdmin.Verify(x => x.RewindOffsetsAsync(consumerName, request.Date, topics), Times.Never); } [TestMethod] @@ -400,23 +400,23 @@ public async Task RewindOffsets_ValidRequest_ReturnsAcceptedResult() var groupId = "group1"; var consumerName = "consumer1"; var topics = new List { "topic1", "topic2" }; - var request = new RewindOffsetsToDateRequest { Date = this.fixture.Create() }; + var request = new RewindOffsetsToDateRequest { Date = _fixture.Create() }; var consumers = new List { Mock.Of(c => c.GroupId == groupId && c.ConsumerName == consumerName), }; - this.mockConsumerAccessor.Setup(x => x.All).Returns(consumers); + _mockConsumerAccessor.Setup(x => x.All).Returns(consumers); // Act - var result = await this.target.RewindOffsets(groupId, consumerName, topics, request) as AcceptedResult; + var result = await _target.RewindOffsets(groupId, consumerName, topics, request) as AcceptedResult; // Assert result.Should().NotBeNull(); result.StatusCode.Should().Be(202); - this.mockConsumerAdmin.Verify(x => x.RewindOffsetsAsync(consumerName, request.Date, topics), Times.Once); + _mockConsumerAdmin.Verify(x => x.RewindOffsetsAsync(consumerName, request.Date, topics), Times.Once); } [TestMethod] @@ -429,13 +429,13 @@ public async Task RewindOffsets_InvalidRequest_ReturnsBadRequestResult() RewindOffsetsToDateRequest request = null; // Act - var result = await this.target.RewindOffsets(groupId, consumerName, topics, request) as BadRequestResult; + var result = await _target.RewindOffsets(groupId, consumerName, topics, request) as BadRequestResult; // Assert result.Should().NotBeNull(); result.StatusCode.Should().Be(400); - this.mockConsumerAdmin.Verify(x => x.RewindOffsetsAsync(consumerName, It.IsAny(), topics), Times.Never); + _mockConsumerAdmin.Verify(x => x.RewindOffsetsAsync(consumerName, It.IsAny(), topics), Times.Never); } [TestMethod] @@ -445,23 +445,23 @@ public async Task ChangeWorkersCount_ValidRequest_ReturnsAcceptedResult() var groupId = "group1"; var consumerName = "consumer1"; var topics = new List { "topic1", "topic2" }; - var request = new RewindOffsetsToDateRequest { Date = this.fixture.Create() }; + var request = new RewindOffsetsToDateRequest { Date = _fixture.Create() }; var consumers = new List { Mock.Of(c => c.GroupId == groupId && c.ConsumerName == consumerName), }; - this.mockConsumerAccessor.Setup(x => x.All).Returns(consumers); + _mockConsumerAccessor.Setup(x => x.All).Returns(consumers); // Act - var result = await this.target.RewindOffsets(groupId, consumerName, topics, request) as AcceptedResult; + var result = await _target.RewindOffsets(groupId, consumerName, topics, request) as AcceptedResult; // Assert result.Should().NotBeNull(); result.StatusCode.Should().Be(202); - this.mockConsumerAdmin.Verify(x => x.RewindOffsetsAsync(consumerName, request.Date, topics), Times.Once); + _mockConsumerAdmin.Verify(x => x.RewindOffsetsAsync(consumerName, request.Date, topics), Times.Once); } [TestMethod] @@ -473,13 +473,13 @@ public async Task ChangeWorkersCount_NullRequest_ReturnsBadRequestResult() ChangeWorkersCountRequest request = null; // Act - var result = await this.target.ChangeWorkersCount(groupId, consumerName, request) as BadRequestResult; + var result = await _target.ChangeWorkersCount(groupId, consumerName, request) as BadRequestResult; // Assert result.Should().NotBeNull(); result.StatusCode.Should().Be(400); - this.mockConsumerAdmin.Verify(x => x.ChangeWorkersCountAsync(consumerName, It.IsAny()), Times.Never); + _mockConsumerAdmin.Verify(x => x.ChangeWorkersCountAsync(consumerName, It.IsAny()), Times.Never); } [DataRow(0)] @@ -494,13 +494,13 @@ public async Task ChangeWorkersCount_InvalidWorkerCount_ReturnsBadRequestResult( ChangeWorkersCountRequest request = new ChangeWorkersCountRequest { WorkersCount = workerCount }; // Act - var result = await this.target.ChangeWorkersCount(groupId, consumerName, request) as BadRequestResult; + var result = await _target.ChangeWorkersCount(groupId, consumerName, request) as BadRequestResult; // Assert result.Should().NotBeNull(); result.StatusCode.Should().Be(400); - this.mockConsumerAdmin.Verify(x => x.ChangeWorkersCountAsync(consumerName, It.IsAny()), Times.Never); + _mockConsumerAdmin.Verify(x => x.ChangeWorkersCountAsync(consumerName, It.IsAny()), Times.Never); } } } diff --git a/src/KafkaFlow.UnitTests/Admin.WebApi/Controllers/GroupsControllerTests.cs b/tests/KafkaFlow.UnitTests/Admin.WebApi/Controllers/GroupsControllerTests.cs similarity index 51% rename from src/KafkaFlow.UnitTests/Admin.WebApi/Controllers/GroupsControllerTests.cs rename to tests/KafkaFlow.UnitTests/Admin.WebApi/Controllers/GroupsControllerTests.cs index 91bbcdf18..75e2bd940 100644 --- a/src/KafkaFlow.UnitTests/Admin.WebApi/Controllers/GroupsControllerTests.cs +++ b/tests/KafkaFlow.UnitTests/Admin.WebApi/Controllers/GroupsControllerTests.cs @@ -1,32 +1,32 @@ -namespace KafkaFlow.UnitTests.Admin.WebApi.Controllers -{ - using System.Collections.Generic; - using System.Linq; - using System.Threading.Tasks; - using AutoFixture; - using FluentAssertions; - using KafkaFlow.Admin; - using KafkaFlow.Admin.WebApi.Contracts; - using KafkaFlow.Admin.WebApi.Controllers; - using KafkaFlow.Consumers; - using Microsoft.AspNetCore.Mvc; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using AutoFixture; +using FluentAssertions; +using KafkaFlow.Admin; +using KafkaFlow.Admin.WebApi.Contracts; +using KafkaFlow.Admin.WebApi.Controllers; +using KafkaFlow.Consumers; +using Microsoft.AspNetCore.Mvc; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; +namespace KafkaFlow.UnitTests.Admin.WebApi.Controllers +{ [TestClass] public class GroupsControllerTests { - private readonly Fixture fixture = new(); - private GroupsController target; - private Mock mockConsumerAccessor; - private Mock mockConsumerAdmin; + private readonly Fixture _fixture = new(); + private GroupsController _target; + private Mock _mockConsumerAccessor; + private Mock _mockConsumerAdmin; [TestInitialize] public void TestSetup() { - this.mockConsumerAccessor = this.fixture.Freeze>(); - this.mockConsumerAdmin = this.fixture.Freeze>(); - this.target = new GroupsController(this.mockConsumerAccessor.Object, this.mockConsumerAdmin.Object); + _mockConsumerAccessor = _fixture.Freeze>(); + _mockConsumerAdmin = _fixture.Freeze>(); + _target = new GroupsController(_mockConsumerAccessor.Object, _mockConsumerAdmin.Object); } [TestMethod] @@ -40,10 +40,10 @@ public void GetAllGroups_ReturnsOkResultWithGroupsResponse() Mock.Of(), }; - this.mockConsumerAccessor.Setup(x => x.All).Returns(consumers); + _mockConsumerAccessor.Setup(x => x.All).Returns(consumers); // Act - var result = this.target.GetAllGroups() as ObjectResult; + var result = _target.GetAllGroups() as ObjectResult; // Assert result.Should().NotBeNull(); @@ -62,13 +62,13 @@ public async Task PauseGroup_ValidGroupId_ReturnsAcceptedResult() var topics = new List { "topic1", "topic2" }; // Act - var result = await this.target.PauseGroup(groupId, topics) as AcceptedResult; + var result = await _target.PauseGroup(groupId, topics) as AcceptedResult; // Assert result.Should().NotBeNull(); result.StatusCode.Should().Be(202); - this.mockConsumerAdmin.Verify(x => x.PauseConsumerGroupAsync(groupId, topics), Times.Once); + _mockConsumerAdmin.Verify(x => x.PauseConsumerGroupAsync(groupId, topics), Times.Once); } [TestMethod] @@ -79,13 +79,13 @@ public async Task ResumeGroup_ValidGroupId_ReturnsAcceptedResult() var topics = new List { "topic1", "topic2" }; // Act - var result = await this.target.ResumeGroup(groupId, topics) as AcceptedResult; + var result = await _target.ResumeGroup(groupId, topics) as AcceptedResult; // Assert result.Should().NotBeNull(); result.StatusCode.Should().Be(202); - this.mockConsumerAdmin.Verify(x => x.ResumeConsumerGroupAsync(groupId, topics), Times.Once); + _mockConsumerAdmin.Verify(x => x.ResumeConsumerGroupAsync(groupId, topics), Times.Once); } } } diff --git a/src/KafkaFlow.UnitTests/Admin.WebApi/Controllers/TelemetryControllerTests.cs b/tests/KafkaFlow.UnitTests/Admin.WebApi/Controllers/TelemetryControllerTests.cs similarity index 83% rename from src/KafkaFlow.UnitTests/Admin.WebApi/Controllers/TelemetryControllerTests.cs rename to tests/KafkaFlow.UnitTests/Admin.WebApi/Controllers/TelemetryControllerTests.cs index 0a9d2bd0c..f801c6325 100644 --- a/src/KafkaFlow.UnitTests/Admin.WebApi/Controllers/TelemetryControllerTests.cs +++ b/tests/KafkaFlow.UnitTests/Admin.WebApi/Controllers/TelemetryControllerTests.cs @@ -1,35 +1,35 @@ -namespace KafkaFlow.UnitTests.Admin.WebApi.Controllers -{ - using System; - using System.Collections.Generic; - using System.Linq; - using FluentAssertions; - using KafkaFlow.Admin; - using KafkaFlow.Admin.Messages; - using KafkaFlow.Admin.WebApi.Contracts; - using KafkaFlow.Admin.WebApi.Controllers; - using KafkaFlow.Consumers; - using Microsoft.AspNetCore.Mvc; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; +using System; +using System.Collections.Generic; +using System.Linq; +using FluentAssertions; +using KafkaFlow.Admin; +using KafkaFlow.Admin.Messages; +using KafkaFlow.Admin.WebApi.Contracts; +using KafkaFlow.Admin.WebApi.Controllers; +using KafkaFlow.Consumers; +using Microsoft.AspNetCore.Mvc; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; +namespace KafkaFlow.UnitTests.Admin.WebApi.Controllers +{ [TestClass] public class TelemetryControllerTests { - private Mock mockTelemetryStorage; - private TelemetryController target; + private Mock _mockTelemetryStorage; + private TelemetryController _target; [TestInitialize] public void TestInitialize() { - this.mockTelemetryStorage = new Mock(); - this.target = new TelemetryController(this.mockTelemetryStorage.Object); + _mockTelemetryStorage = new Mock(); + _target = new TelemetryController(_mockTelemetryStorage.Object); } [TestMethod] public void GetTelemetry_ReturnsOkResultWithTelemetryResponse() { - // Arrange + // Arrange var metrics = new List { new ConsumerTelemetryMetric @@ -73,10 +73,10 @@ public void GetTelemetry_ReturnsOkResultWithTelemetryResponse() }, }; - this.mockTelemetryStorage.Setup(x => x.Get()).Returns(metrics); + _mockTelemetryStorage.Setup(x => x.Get()).Returns(metrics); // Act - var result = this.target.GetTelemetry() as ObjectResult; + var result = _target.GetTelemetry() as ObjectResult; // Assert result.Should().NotBeNull(); diff --git a/src/KafkaFlow.UnitTests/BatchConsume/BatchConsumeMiddlewareTests.cs b/tests/KafkaFlow.UnitTests/BatchConsume/BatchConsumeMiddlewareTests.cs similarity index 64% rename from src/KafkaFlow.UnitTests/BatchConsume/BatchConsumeMiddlewareTests.cs rename to tests/KafkaFlow.UnitTests/BatchConsume/BatchConsumeMiddlewareTests.cs index 0f8b27be7..acca249c9 100644 --- a/src/KafkaFlow.UnitTests/BatchConsume/BatchConsumeMiddlewareTests.cs +++ b/tests/KafkaFlow.UnitTests/BatchConsume/BatchConsumeMiddlewareTests.cs @@ -1,46 +1,45 @@ +using System; +using System.Threading.Tasks; +using AutoFixture; +using FluentAssertions; +using KafkaFlow.Batching; +using KafkaFlow.Configuration; +using KafkaFlow.Consumers; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; + namespace KafkaFlow.UnitTests.BatchConsume { - using System; - using System.Collections.Generic; - using System.Threading.Tasks; - using AutoFixture; - using FluentAssertions; - using KafkaFlow.Batching; - using KafkaFlow.Configuration; - using KafkaFlow.Consumers; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; - [TestClass] public class BatchConsumeMiddlewareTests { private const int BatchSize = 3; - private readonly TimeSpan batchTimeout = TimeSpan.FromMilliseconds(1000); - private readonly TimeSpan waitForTaskExecution = TimeSpan.FromMilliseconds(100); - private readonly Fixture fixture = new(); + private readonly TimeSpan _batchTimeout = TimeSpan.FromMilliseconds(1000); + private readonly TimeSpan _waitForTaskExecution = TimeSpan.FromMilliseconds(100); + private readonly Fixture _fixture = new(); - private Mock logHandlerMock; + private Mock _logHandlerMock; - private IMessageContext nextContext; - private int timesNextWasCalled; + private IMessageContext _nextContext; + private int _timesNextWasCalled; - private BatchConsumeMiddleware target; + private BatchConsumeMiddleware _target; [TestInitialize] public void Setup() { - this.nextContext = null; - this.timesNextWasCalled = 0; + _nextContext = null; + _timesNextWasCalled = 0; - this.logHandlerMock = new Mock(); + _logHandlerMock = new Mock(); var middlewareContextMock = new Mock(); var workerMock = new Mock(); var consumerMock = new Mock(); var consumerConfigurationMock = new Mock(); - var clusterConfig = this.fixture.Create(); + var clusterConfig = _fixture.Create(); consumerConfigurationMock.SetupGet(x => x.ClusterConfiguration).Returns(clusterConfig); @@ -58,17 +57,17 @@ public void Setup() workerMock .SetupGet(x => x.WorkerStopped) - .Returns(new Event(this.logHandlerMock.Object)); + .Returns(new Event(_logHandlerMock.Object)); consumerConfigurationMock .SetupGet(x => x.AutoMessageCompletion) .Returns(true); - this.target = new BatchConsumeMiddleware( + _target = new BatchConsumeMiddleware( middlewareContextMock.Object, BatchSize, - this.batchTimeout, - this.logHandlerMock.Object); + _batchTimeout, + _logHandlerMock.Object); } [TestMethod] @@ -87,12 +86,12 @@ public async Task AddAsync_LessThanBatchSize_CallNextOnTimeout() .Returns(consumerContext.Object); // Act - await this.target.Invoke(context.Object, this.NextCallback); + await _target.Invoke(context.Object, this.NextCallback); // Assert - this.timesNextWasCalled.Should().Be(0); + _timesNextWasCalled.Should().Be(0); await this.WaitBatchTimeoutAsync(); - this.timesNextWasCalled.Should().Be(1); + _timesNextWasCalled.Should().Be(1); consumerContext.Verify(x => x.Complete(), Times.Once); } @@ -114,14 +113,14 @@ public async Task AddAsync_ExactlyBatchSize_CallNextInstantly() // Act for (var i = 0; i < BatchSize; i++) { - await this.target.Invoke(contextMock.Object, this.NextCallback); + await _target.Invoke(contextMock.Object, this.NextCallback); } - await Task.Delay(this.waitForTaskExecution); + await Task.Delay(_waitForTaskExecution); // Assert - this.timesNextWasCalled.Should().Be(1); - this.nextContext.GetMessagesBatch().Should().HaveCount(BatchSize); + _timesNextWasCalled.Should().Be(1); + _nextContext.GetMessagesBatch().Should().HaveCount(BatchSize); consumerContext.Verify(x => x.Complete(), Times.Exactly(BatchSize)); } @@ -143,18 +142,18 @@ public async Task AddAsync_MoreThanBatchSize_CallNextInstantlyThenCallWhenTimeou // Act for (var i = 0; i < BatchSize + 1; i++) { - await this.target.Invoke(contextMock.Object, this.NextCallback); + await _target.Invoke(contextMock.Object, this.NextCallback); } - await Task.Delay(this.waitForTaskExecution); + await Task.Delay(_waitForTaskExecution); // Assert - this.timesNextWasCalled.Should().Be(1); - this.nextContext.GetMessagesBatch().Should().HaveCount(BatchSize); + _timesNextWasCalled.Should().Be(1); + _nextContext.GetMessagesBatch().Should().HaveCount(BatchSize); consumerContext.Verify(x => x.Complete(), Times.Exactly(BatchSize)); await this.WaitBatchTimeoutAsync(); - this.timesNextWasCalled.Should().Be(2); + _timesNextWasCalled.Should().Be(2); consumerContext.Verify(x => x.Complete(), Times.Exactly(BatchSize + 1)); } @@ -170,19 +169,19 @@ public async Task AddAsync_NextThrowException_LogError() .Returns(consumerContext.Object); // Act - await this.target.Invoke(contextMock.Object, _ => throw new Exception()); + await _target.Invoke(contextMock.Object, _ => throw new Exception()); // Assert await this.WaitBatchTimeoutAsync(); - this.logHandlerMock.Verify(x => x.Error(It.IsAny(), It.IsAny(), It.IsAny()), Times.Once); + _logHandlerMock.Verify(x => x.Error(It.IsAny(), It.IsAny(), It.IsAny()), Times.Once); } - private Task WaitBatchTimeoutAsync() => Task.Delay(this.batchTimeout + this.waitForTaskExecution); + private Task WaitBatchTimeoutAsync() => Task.Delay(_batchTimeout + _waitForTaskExecution); private Task NextCallback(IMessageContext ctx) { - this.nextContext = ctx; - this.timesNextWasCalled++; + _nextContext = ctx; + _timesNextWasCalled++; return Task.CompletedTask; } } diff --git a/src/KafkaFlow.UnitTests/Compressors/CompressorConsumerMiddlewareTests.cs b/tests/KafkaFlow.UnitTests/Compressors/CompressorConsumerMiddlewareTests.cs similarity index 54% rename from src/KafkaFlow.UnitTests/Compressors/CompressorConsumerMiddlewareTests.cs rename to tests/KafkaFlow.UnitTests/Compressors/CompressorConsumerMiddlewareTests.cs index 57ced96cb..1e79dd6db 100644 --- a/src/KafkaFlow.UnitTests/Compressors/CompressorConsumerMiddlewareTests.cs +++ b/tests/KafkaFlow.UnitTests/Compressors/CompressorConsumerMiddlewareTests.cs @@ -1,46 +1,46 @@ -namespace KafkaFlow.UnitTests.Compressors -{ - using System; - using System.Threading.Tasks; - using FluentAssertions; +using System; +using System.Threading.Tasks; +using FluentAssertions; - using KafkaFlow.Middlewares.Compressor; +using KafkaFlow.Middlewares.Compressor; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; +namespace KafkaFlow.UnitTests.Compressors +{ [TestClass] public class CompressorConsumerMiddlewareTests { - private Mock contextMock; - private Mock decompressorMock; - private bool nextCalled; - private DecompressorConsumerMiddleware target; + private Mock _contextMock; + private Mock _decompressorMock; + private bool _nextCalled; + private DecompressorConsumerMiddleware _target; [TestInitialize] public void Setup() { - this.contextMock = new Mock(); - this.decompressorMock = new Mock(); - this.target = new DecompressorConsumerMiddleware(this.decompressorMock.Object); + _contextMock = new Mock(); + _decompressorMock = new Mock(); + _target = new DecompressorConsumerMiddleware(_decompressorMock.Object); } [TestMethod] public void Invoke_NotByteArrayMessage_ThrowsInvalidOperationException() { // Arrange - this.contextMock + _contextMock .SetupGet(x => x.Message) .Returns(new Message(new object(), new object())); // Act - Func act = () => this.target.Invoke(this.contextMock.Object, _ => this.SetNextCalled()); + Func act = () => _target.Invoke(_contextMock.Object, _ => this.SetNextCalled()); // Assert act.Should().Throw(); - this.nextCalled.Should().BeFalse(); - this.contextMock.Verify(x => x.SetMessage(It.IsAny(), It.IsAny()), Times.Never); - this.decompressorMock.Verify(x => x.Decompress(It.IsAny()), Times.Never); + _nextCalled.Should().BeFalse(); + _contextMock.Verify(x => x.SetMessage(It.IsAny(), It.IsAny()), Times.Never); + _decompressorMock.Verify(x => x.Decompress(It.IsAny()), Times.Never); } [TestMethod] @@ -53,21 +53,21 @@ public async Task Invoke_ValidMessage_CallNext() var transformedContextMock = new Mock(); IMessageContext resultContext = null; - this.contextMock + _contextMock .SetupGet(x => x.Message) .Returns(compressedMessage); - this.decompressorMock - .Setup(x => x.Decompress((byte[]) compressedMessage.Value)) + _decompressorMock + .Setup(x => x.Decompress((byte[])compressedMessage.Value)) .Returns(uncompressedValue); - this.contextMock + _contextMock .Setup(x => x.SetMessage(compressedMessage.Key, uncompressedValue)) .Returns(transformedContextMock.Object); // Act - await this.target.Invoke( - this.contextMock.Object, + await _target.Invoke( + _contextMock.Object, ctx => { resultContext = ctx; @@ -77,13 +77,13 @@ await this.target.Invoke( // Assert resultContext.Should().NotBeNull(); resultContext.Should().Be(transformedContextMock.Object); - this.contextMock.VerifyAll(); - this.decompressorMock.VerifyAll(); + _contextMock.VerifyAll(); + _decompressorMock.VerifyAll(); } private Task SetNextCalled() { - this.nextCalled = true; + _nextCalled = true; return Task.CompletedTask; } } diff --git a/src/KafkaFlow.UnitTests/Compressors/CompressorProducerMiddlewareTests.cs b/tests/KafkaFlow.UnitTests/Compressors/CompressorProducerMiddlewareTests.cs similarity index 62% rename from src/KafkaFlow.UnitTests/Compressors/CompressorProducerMiddlewareTests.cs rename to tests/KafkaFlow.UnitTests/Compressors/CompressorProducerMiddlewareTests.cs index 24c41eed2..448a6a7e9 100644 --- a/src/KafkaFlow.UnitTests/Compressors/CompressorProducerMiddlewareTests.cs +++ b/tests/KafkaFlow.UnitTests/Compressors/CompressorProducerMiddlewareTests.cs @@ -1,29 +1,29 @@ -namespace KafkaFlow.UnitTests.Compressors -{ - using System; - using System.Threading.Tasks; - using FluentAssertions; +using System; +using System.Threading.Tasks; +using FluentAssertions; - using KafkaFlow.Middlewares.Compressor; +using KafkaFlow.Middlewares.Compressor; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; +namespace KafkaFlow.UnitTests.Compressors +{ [TestClass] public class CompressorProducerMiddlewareTests { - private Mock contextMock; - private Mock compressorMock; + private Mock _contextMock; + private Mock _compressorMock; - private CompressorProducerMiddleware target; + private CompressorProducerMiddleware _target; [TestInitialize] public void Setup() { - this.contextMock = new Mock(); - this.compressorMock = new Mock(); + _contextMock = new Mock(); + _compressorMock = new Mock(); - this.target = new CompressorProducerMiddleware(this.compressorMock.Object); + _target = new CompressorProducerMiddleware(_compressorMock.Object); } [TestMethod] @@ -33,13 +33,13 @@ public async Task Invoke_InvalidMessage_Throws() var uncompressedMessage = new Message(new byte[1], new object()); IMessageContext resultContext = null; - this.contextMock + _contextMock .SetupGet(x => x.Message) .Returns(uncompressedMessage); // Act - Func act = () => this.target.Invoke( - this.contextMock.Object, + Func act = () => _target.Invoke( + _contextMock.Object, ctx => { resultContext = ctx; @@ -49,8 +49,8 @@ public async Task Invoke_InvalidMessage_Throws() // Assert await act.Should().ThrowAsync(); resultContext.Should().BeNull(); - this.contextMock.Verify(x => x.SetMessage(It.IsAny(), It.IsAny()), Times.Never); - this.compressorMock.Verify(x => x.Compress(It.IsAny()), Times.Never); + _contextMock.Verify(x => x.SetMessage(It.IsAny(), It.IsAny()), Times.Never); + _compressorMock.Verify(x => x.Compress(It.IsAny()), Times.Never); } [TestMethod] @@ -64,21 +64,21 @@ public async Task Invoke_ValidMessage_Compress() var transformedContextMock = new Mock(); IMessageContext resultContext = null; - this.contextMock + _contextMock .SetupGet(x => x.Message) .Returns(uncompressedMessage); - this.compressorMock + _compressorMock .Setup(x => x.Compress(uncompressedValue)) .Returns(compressedValue); - this.contextMock + _contextMock .Setup(x => x.SetMessage(uncompressedMessage.Key, compressedValue)) .Returns(transformedContextMock.Object); // Act - await this.target.Invoke( - this.contextMock.Object, + await _target.Invoke( + _contextMock.Object, ctx => { resultContext = ctx; @@ -88,8 +88,8 @@ await this.target.Invoke( // Assert resultContext.Should().NotBeNull(); resultContext.Should().Be(transformedContextMock.Object); - this.contextMock.VerifyAll(); - this.compressorMock.VerifyAll(); + _contextMock.VerifyAll(); + _compressorMock.VerifyAll(); } } } diff --git a/src/KafkaFlow.UnitTests/ConfigurationBuilders/ConsumerConfigurationBuilderTests.cs b/tests/KafkaFlow.UnitTests/ConfigurationBuilders/ConsumerConfigurationBuilderTests.cs similarity index 72% rename from src/KafkaFlow.UnitTests/ConfigurationBuilders/ConsumerConfigurationBuilderTests.cs rename to tests/KafkaFlow.UnitTests/ConfigurationBuilders/ConsumerConfigurationBuilderTests.cs index 88efc9d1f..fe60be91d 100644 --- a/src/KafkaFlow.UnitTests/ConfigurationBuilders/ConsumerConfigurationBuilderTests.cs +++ b/tests/KafkaFlow.UnitTests/ConfigurationBuilders/ConsumerConfigurationBuilderTests.cs @@ -1,57 +1,55 @@ +using System; +using System.Collections.Generic; +using AutoFixture; +using FluentAssertions; +using KafkaFlow.Configuration; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; + namespace KafkaFlow.UnitTests.ConfigurationBuilders { - using System; - using System.Collections.Generic; - using AutoFixture; - using Confluent.Kafka; - using FluentAssertions; - using KafkaFlow.Configuration; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; - using AutoOffsetReset = KafkaFlow.AutoOffsetReset; - [TestClass] public class ConsumerConfigurationBuilderTests { - private readonly Fixture fixture = new(); + private readonly Fixture _fixture = new(); - private Mock dependencyConfiguratorMock; + private Mock _dependencyConfiguratorMock; - private ConsumerConfigurationBuilder target; + private ConsumerConfigurationBuilder _target; [TestInitialize] public void Setup() { - this.dependencyConfiguratorMock = new Mock(); + _dependencyConfiguratorMock = new Mock(); - this.target = new ConsumerConfigurationBuilder(this.dependencyConfiguratorMock.Object); + _target = new ConsumerConfigurationBuilder(_dependencyConfiguratorMock.Object); } [TestMethod] public void DependencyConfigurator_SetProperty_ReturnPassedInstance() { // Assert - this.target.DependencyConfigurator.Should().Be(this.dependencyConfiguratorMock.Object); + _target.DependencyConfigurator.Should().Be(_dependencyConfiguratorMock.Object); } [TestMethod] public void Build_RequiredCalls_ReturnDefaultValues() { // Arrange - var clusterConfiguration = this.fixture.Create(); - var topic1 = this.fixture.Create(); + var clusterConfiguration = _fixture.Create(); + var topic1 = _fixture.Create(); const int bufferSize = 100; const int workers = 10; - var groupId = this.fixture.Create(); + var groupId = _fixture.Create(); - this.target + _target .Topics(topic1) .WithBufferSize(bufferSize) .WithWorkersCount(workers) .WithGroupId(groupId); // Act - var configuration = this.target.Build(clusterConfiguration); + var configuration = _target.Build(clusterConfiguration); // Assert configuration.Topics.Should().BeEquivalentTo(topic1); @@ -73,27 +71,27 @@ public void Build_RequiredCalls_ReturnDefaultValues() public void Build_AllCalls_ReturnPassedValues() { // Arrange - var clusterConfiguration = this.fixture.Create(); - var topic1 = this.fixture.Create(); - var topic2 = this.fixture.Create(); - var name = this.fixture.Create(); + var clusterConfiguration = _fixture.Create(); + var topic1 = _fixture.Create(); + var topic2 = _fixture.Create(); + var name = _fixture.Create(); const int bufferSize = 100; const int workers = 10; const AutoOffsetReset offsetReset = AutoOffsetReset.Earliest; - var groupId = this.fixture.Create(); + var groupId = _fixture.Create(); const int autoCommitInterval = 10000; const int maxPollIntervalMs = 500000; ConsumerCustomFactory customFactory = (producer, _) => producer; Action statisticsHandler = _ => { }; - Action> partitionsAssignedHandler = (_, _) => { }; - Action> partitionsRevokedHandler = (_, _) => { }; + Action> partitionsAssignedHandler = (_, _) => { }; + Action> partitionsRevokedHandler = (_, _) => { }; const int statisticsIntervalMs = 100; - var consumerConfig = new ConsumerConfig + var consumerConfig = new Confluent.Kafka.ConsumerConfig { - ClientId = "testeclient" + ClientId = "testeclient", }; - this.target + _target .Topics(topic1) .Topic(topic2) .WithName(name) @@ -113,7 +111,7 @@ public void Build_AllCalls_ReturnPassedValues() .AddMiddlewares(m => m.Add()); // Act - var configuration = this.target.Build(clusterConfiguration); + var configuration = _target.Build(clusterConfiguration); // Assert configuration.Topics.Should().BeEquivalentTo(topic1, topic2); diff --git a/src/KafkaFlow.UnitTests/ConfigurationBuilders/KafkaConfigurationBuilderTests.cs b/tests/KafkaFlow.UnitTests/ConfigurationBuilders/KafkaConfigurationBuilderTests.cs similarity index 75% rename from src/KafkaFlow.UnitTests/ConfigurationBuilders/KafkaConfigurationBuilderTests.cs rename to tests/KafkaFlow.UnitTests/ConfigurationBuilders/KafkaConfigurationBuilderTests.cs index bccb9329a..89cb49317 100644 --- a/src/KafkaFlow.UnitTests/ConfigurationBuilders/KafkaConfigurationBuilderTests.cs +++ b/tests/KafkaFlow.UnitTests/ConfigurationBuilders/KafkaConfigurationBuilderTests.cs @@ -1,9 +1,9 @@ -namespace KafkaFlow.UnitTests.ConfigurationBuilders -{ - using KafkaFlow.Configuration; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; +using KafkaFlow.Configuration; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; +namespace KafkaFlow.UnitTests.ConfigurationBuilders +{ [TestClass] public class KafkaConfigurationBuilderTests { diff --git a/src/KafkaFlow.UnitTests/ConfigurationBuilders/ProducerConfigurationBuilderTests.cs b/tests/KafkaFlow.UnitTests/ConfigurationBuilders/ProducerConfigurationBuilderTests.cs similarity index 66% rename from src/KafkaFlow.UnitTests/ConfigurationBuilders/ProducerConfigurationBuilderTests.cs rename to tests/KafkaFlow.UnitTests/ConfigurationBuilders/ProducerConfigurationBuilderTests.cs index 938b16d9a..63557aa25 100644 --- a/src/KafkaFlow.UnitTests/ConfigurationBuilders/ProducerConfigurationBuilderTests.cs +++ b/tests/KafkaFlow.UnitTests/ConfigurationBuilders/ProducerConfigurationBuilderTests.cs @@ -1,54 +1,54 @@ +using System; +using AutoFixture; +using Confluent.Kafka; +using FluentAssertions; +using KafkaFlow.Configuration; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; + namespace KafkaFlow.UnitTests.ConfigurationBuilders { - using System; - using AutoFixture; - using Confluent.Kafka; - using FluentAssertions; - using KafkaFlow.Configuration; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; - [TestClass] public class ProducerConfigurationBuilderTests { - private readonly Fixture fixture = new(); + private readonly Fixture _fixture = new(); - private Mock dependencyConfiguratorMock; + private Mock _dependencyConfiguratorMock; - private string name; + private string _name; - private ProducerConfigurationBuilder target; + private ProducerConfigurationBuilder _target; [TestInitialize] public void Setup() { - this.dependencyConfiguratorMock = new Mock(); - this.name = this.fixture.Create(); + _dependencyConfiguratorMock = new Mock(); + _name = _fixture.Create(); - this.target = new ProducerConfigurationBuilder( - this.dependencyConfiguratorMock.Object, - this.name); + _target = new ProducerConfigurationBuilder( + _dependencyConfiguratorMock.Object, + _name); } [TestMethod] public void DependencyConfigurator_SetProperty_ReturnPassedInstance() { // Assert - this.target.DependencyConfigurator.Should().Be(this.dependencyConfiguratorMock.Object); + _target.DependencyConfigurator.Should().Be(_dependencyConfiguratorMock.Object); } [TestMethod] public void Build_RequiredCalls_ReturnDefaultValues() { // Arrange - var clusterConfiguration = this.fixture.Create(); + var clusterConfiguration = _fixture.Create(); // Act - var configuration = this.target.Build(clusterConfiguration); + var configuration = _target.Build(clusterConfiguration); // Assert configuration.Cluster.Should().Be(clusterConfiguration); - configuration.Name.Should().Be(this.name); + configuration.Name.Should().Be(_name); configuration.DefaultTopic.Should().BeNull(); configuration.Acks.Should().BeNull(); configuration.StatisticsHandlers.Should().BeEmpty(); @@ -59,10 +59,10 @@ public void Build_RequiredCalls_ReturnDefaultValues() public void Build_AllCalls_ReturnPassedValues() { // Arrange - var clusterConfiguration = this.fixture.Create(); + var clusterConfiguration = _fixture.Create(); - var defaultTopic = this.fixture.Create(); - var acks = this.fixture.Create(); + var defaultTopic = _fixture.Create(); + var acks = _fixture.Create(); const int lingerMs = 50; ProducerCustomFactory customFactory = (producer, _) => producer; Action statisticsHandler = _ => { }; @@ -71,7 +71,7 @@ public void Build_AllCalls_ReturnPassedValues() var compressionType = CompressionType.Lz4; var compressionLevel = 5; - this.target + _target .DefaultTopic(defaultTopic) .WithAcks(acks) .WithLingerMs(lingerMs) @@ -83,11 +83,11 @@ public void Build_AllCalls_ReturnPassedValues() .AddMiddlewares(m => m.Add()); // Act - var configuration = this.target.Build(clusterConfiguration); + var configuration = _target.Build(clusterConfiguration); // Assert configuration.Cluster.Should().Be(clusterConfiguration); - configuration.Name.Should().Be(this.name); + configuration.Name.Should().Be(_name); configuration.DefaultTopic.Should().Be(defaultTopic); configuration.Acks.Should().Be(acks); configuration.BaseProducerConfig.LingerMs.Should().Be(lingerMs); @@ -103,19 +103,19 @@ public void Build_AllCalls_ReturnPassedValues() public void Build_UseCompressionWithoutCompressionLevel_ReturnDefaultValues() { // Arrange - var clusterConfiguration = this.fixture.Create(); + var clusterConfiguration = _fixture.Create(); var compressionType = CompressionType.Gzip; - this.target + _target .WithCompression(compressionType); // Act - var configuration = this.target.Build(clusterConfiguration); + var configuration = _target.Build(clusterConfiguration); // Assert configuration.Cluster.Should().Be(clusterConfiguration); - configuration.Name.Should().Be(this.name); + configuration.Name.Should().Be(_name); configuration.BaseProducerConfig.CompressionType.Should().Be(compressionType); configuration.BaseProducerConfig.CompressionLevel.Should().Be(-1); } diff --git a/tests/KafkaFlow.UnitTests/Consumer/ConsumerManagerTests.cs b/tests/KafkaFlow.UnitTests/Consumer/ConsumerManagerTests.cs new file mode 100644 index 000000000..28d3d9574 --- /dev/null +++ b/tests/KafkaFlow.UnitTests/Consumer/ConsumerManagerTests.cs @@ -0,0 +1,166 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using AutoFixture; +using FluentAssertions; +using KafkaFlow.Configuration; +using KafkaFlow.Consumers; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; + +namespace KafkaFlow.UnitTests.Consumer +{ + [TestClass] + public class ConsumerManagerTests + { + private readonly Fixture _fixture = new(); + + private ConsumerManager _target; + + private Mock _consumerMock; + private Mock _workerPoolMock; + private Mock _feederMock; + private Mock _logHandlerMock; + private Mock _dependencyResolver; + + private Action, List> _onPartitionAssignedHandler; + private Action, List> _onPartitionRevokedHandler; + + [TestInitialize] + public void Setup() + { + _consumerMock = new Mock(); + _workerPoolMock = new Mock(); + _feederMock = new Mock(); + _logHandlerMock = new Mock(); + _dependencyResolver = new Mock(); + + _consumerMock + .Setup( + x => x.OnPartitionsAssigned(It.IsAny, List>>())) + .Callback( + (Action, List> value) => + _onPartitionAssignedHandler = value); + + _consumerMock + .Setup( + x => x.OnPartitionsRevoked( + It.IsAny, List>>())) + .Callback( + (Action, List> value) => + _onPartitionRevokedHandler = value); + + var configurationMock = new Mock(); + + configurationMock + .SetupGet(x => x.WorkersCountCalculator) + .Returns((_, _) => Task.FromResult(10)); + + configurationMock + .SetupGet(x => x.WorkersCountEvaluationInterval) + .Returns(TimeSpan.FromMinutes(5)); + + _consumerMock + .SetupGet(x => x.Configuration) + .Returns(configurationMock.Object); + + _target = new ConsumerManager( + _consumerMock.Object, + _workerPoolMock.Object, + _feederMock.Object, + _dependencyResolver.Object, + _logHandlerMock.Object); + } + + [TestMethod] + public void ConstructorCalled_InitializeProperties() + { + // Assert + _target.Consumer.Should().Be(_consumerMock.Object); + _target.WorkerPool.Should().Be(_workerPoolMock.Object); + _target.Feeder.Should().Be(_feederMock.Object); + } + + [TestMethod] + public async Task StartAsync_StartDependencies() + { + // Arrange + _feederMock + .Setup(x => x.Start()); + + // Act + await _target.StartAsync(); + + // Assert + _feederMock.VerifyAll(); + } + + [TestMethod] + public async Task StopAsync_StopDependencies() + { + // Arrange + _feederMock + .Setup(x => x.StopAsync()) + .Returns(Task.CompletedTask); + + _workerPoolMock + .Setup(x => x.StopAsync()) + .Returns(Task.CompletedTask); + + // Act + await _target.StopAsync(); + + // Assert + _feederMock.VerifyAll(); + _workerPoolMock.VerifyAll(); + _consumerMock.Verify(x => x.Dispose(), Times.Once()); + } + + [TestMethod] + public void OnPartitionsAssigned_StartWorkerPool() + { + // Arrange + var partitions = _fixture.Create>(); + + _workerPoolMock + .Setup(x => x.StartAsync(partitions, It.IsAny())) + .Returns(Task.CompletedTask); + + _logHandlerMock + .Setup(x => x.Info(It.IsAny(), It.IsAny())); + + // Act + _onPartitionAssignedHandler(_dependencyResolver.Object, Mock.Of>(), partitions); + + // Assert + _workerPoolMock.VerifyAll(); + _logHandlerMock.VerifyAll(); + } + + [TestMethod] + public void OnPartitionsRevoked_StopWorkerPool() + { + // Arrange + Confluent.Kafka.IConsumer consumer = null; + var partitions = _fixture.Create>(); + + _workerPoolMock + .Setup(x => x.StopAsync()) + .Returns(Task.CompletedTask); + + _consumerMock + .SetupGet(x => x.Configuration) + .Returns(new Mock().Object); + + _logHandlerMock + .Setup(x => x.Warning(It.IsAny(), It.IsAny())); + + // Act + _onPartitionRevokedHandler(_dependencyResolver.Object, consumer, partitions); + + // Assert + _workerPoolMock.VerifyAll(); + _logHandlerMock.VerifyAll(); + } + } +} diff --git a/src/KafkaFlow.UnitTests/Consumer/WorkerPoolFeederTests.cs b/tests/KafkaFlow.UnitTests/Consumer/WorkerPoolFeederTests.cs similarity index 67% rename from src/KafkaFlow.UnitTests/Consumer/WorkerPoolFeederTests.cs rename to tests/KafkaFlow.UnitTests/Consumer/WorkerPoolFeederTests.cs index a1930c73e..282064004 100644 --- a/src/KafkaFlow.UnitTests/Consumer/WorkerPoolFeederTests.cs +++ b/tests/KafkaFlow.UnitTests/Consumer/WorkerPoolFeederTests.cs @@ -1,33 +1,33 @@ +using System; +using System.Threading; +using System.Threading.Tasks; +using Confluent.Kafka; +using KafkaFlow.Consumers; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; + namespace KafkaFlow.UnitTests.Consumer { - using System; - using System.Threading; - using System.Threading.Tasks; - using Confluent.Kafka; - using KafkaFlow.Consumers; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; - [TestClass] public class WorkerPoolFeederTests { - private WorkerPoolFeeder target; + private WorkerPoolFeeder _target; - private Mock consumerMock; - private Mock workerPoolMock; - private Mock logHandlerMock; + private Mock _consumerMock; + private Mock _workerPoolMock; + private Mock _logHandlerMock; [TestInitialize] public void Setup() { - this.consumerMock = new Mock(MockBehavior.Strict); - this.workerPoolMock = new Mock(MockBehavior.Strict); - this.logHandlerMock = new Mock(MockBehavior.Strict); - - this.target = new WorkerPoolFeeder( - this.consumerMock.Object, - this.workerPoolMock.Object, - this.logHandlerMock.Object); + _consumerMock = new Mock(MockBehavior.Strict); + _workerPoolMock = new Mock(MockBehavior.Strict); + _logHandlerMock = new Mock(MockBehavior.Strict); + + _target = new WorkerPoolFeeder( + _consumerMock.Object, + _workerPoolMock.Object, + _logHandlerMock.Object); } [TestMethod] @@ -35,7 +35,7 @@ public void Setup() public async Task StopAsync_WithoutStarting_Return() { // Act - await this.target.StopAsync(); + await _target.StopAsync(); } [TestMethod] @@ -45,7 +45,7 @@ public async Task StopAsync_WaitingOnConsumeWithCancellation_MustStop() // Arrange var ready = new ManualResetEvent(false); - this.consumerMock + _consumerMock .Setup(x => x.ConsumeAsync(It.IsAny())) .Returns( async (CancellationToken ct) => @@ -56,12 +56,12 @@ public async Task StopAsync_WaitingOnConsumeWithCancellation_MustStop() }); // Act - this.target.Start(); + _target.Start(); ready.WaitOne(); - await this.target.StopAsync(); + await _target.StopAsync(); // Assert - this.consumerMock.Verify(x => x.ConsumeAsync(It.IsAny()), Times.Once); + _consumerMock.Verify(x => x.ConsumeAsync(It.IsAny()), Times.Once); } [TestMethod] @@ -72,11 +72,11 @@ public async Task StopAsync_WaitingOnQueuingWithCancellation_MustStop() var consumeResult = new ConsumeResult(); var ready = new ManualResetEvent(false); - this.consumerMock + _consumerMock .Setup(x => x.ConsumeAsync(It.IsAny())) .ReturnsAsync(consumeResult); - this.workerPoolMock + _workerPoolMock .Setup(x => x.EnqueueAsync(consumeResult, It.IsAny())) .Returns((ConsumeResult _, CancellationToken ct) => { @@ -85,13 +85,13 @@ public async Task StopAsync_WaitingOnQueuingWithCancellation_MustStop() }); // Act - this.target.Start(); + _target.Start(); ready.WaitOne(); - await this.target.StopAsync(); + await _target.StopAsync(); // Assert - this.consumerMock.VerifyAll(); - this.workerPoolMock.VerifyAll(); + _consumerMock.VerifyAll(); + _workerPoolMock.VerifyAll(); } [TestMethod] @@ -103,12 +103,12 @@ public async Task ConsumeAsyncThrows_LogAndCallConsumeAsyncAgain() var exception = new Exception(); var ready = new ManualResetEvent(false); - this.consumerMock + _consumerMock .SetupSequence(x => x.ConsumeAsync(It.IsAny())) .Throws(exception) .ReturnsAsync(consumeResult); - this.workerPoolMock + _workerPoolMock .Setup(x => x.EnqueueAsync(consumeResult, It.IsAny())) .Returns((ConsumeResult _, CancellationToken ct) => { @@ -116,18 +116,18 @@ public async Task ConsumeAsyncThrows_LogAndCallConsumeAsyncAgain() return Task.Delay(Timeout.Infinite, ct); }); - this.logHandlerMock + _logHandlerMock .Setup(x => x.Error(It.IsAny(), exception, It.IsAny())); // Act - this.target.Start(); + _target.Start(); ready.WaitOne(); - await this.target.StopAsync(); + await _target.StopAsync(); // Assert - this.consumerMock.VerifyAll(); - this.workerPoolMock.VerifyAll(); - this.logHandlerMock.VerifyAll(); + _consumerMock.VerifyAll(); + _workerPoolMock.VerifyAll(); + _logHandlerMock.VerifyAll(); } [TestMethod] @@ -139,12 +139,12 @@ public async Task EnqueueAsyncThrows_LogAndCallConsumeAsyncAgain() var exception = new Exception(); var ready = new ManualResetEvent(false); - this.consumerMock + _consumerMock .Setup(x => x.ConsumeAsync(It.IsAny())) .ReturnsAsync(consumeResult); var hasThrown = false; - this.workerPoolMock + _workerPoolMock .Setup(x => x.EnqueueAsync(consumeResult, It.IsAny())) .Returns((ConsumeResult _, CancellationToken ct) => { @@ -159,18 +159,18 @@ public async Task EnqueueAsyncThrows_LogAndCallConsumeAsyncAgain() return Task.Delay(Timeout.Infinite, ct); }); - this.logHandlerMock + _logHandlerMock .Setup(x => x.Error(It.IsAny(), exception, It.IsAny())); // Act - this.target.Start(); + _target.Start(); ready.WaitOne(); - await this.target.StopAsync(); + await _target.StopAsync(); // Assert - this.consumerMock.VerifyAll(); - this.workerPoolMock.VerifyAll(); - this.logHandlerMock.VerifyAll(); + _consumerMock.VerifyAll(); + _workerPoolMock.VerifyAll(); + _logHandlerMock.VerifyAll(); } } } diff --git a/src/KafkaFlow.UnitTests/DummyObjects/DummyProtobufObject.cs b/tests/KafkaFlow.UnitTests/DummyObjects/DummyProtobufObject.cs similarity index 100% rename from src/KafkaFlow.UnitTests/DummyObjects/DummyProtobufObject.cs rename to tests/KafkaFlow.UnitTests/DummyObjects/DummyProtobufObject.cs diff --git a/src/KafkaFlow.UnitTests/DummyObjects/DummyProtobufObject.proto b/tests/KafkaFlow.UnitTests/DummyObjects/DummyProtobufObject.proto similarity index 100% rename from src/KafkaFlow.UnitTests/DummyObjects/DummyProtobufObject.proto rename to tests/KafkaFlow.UnitTests/DummyObjects/DummyProtobufObject.proto diff --git a/src/KafkaFlow.UnitTests/EventTests.cs b/tests/KafkaFlow.UnitTests/EventTests.cs similarity index 73% rename from src/KafkaFlow.UnitTests/EventTests.cs rename to tests/KafkaFlow.UnitTests/EventTests.cs index 53aabdee0..8ac80c800 100644 --- a/src/KafkaFlow.UnitTests/EventTests.cs +++ b/tests/KafkaFlow.UnitTests/EventTests.cs @@ -1,23 +1,23 @@ -namespace KafkaFlow.UnitTests +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; + +namespace KafkaFlow.UnitTests { - using System; - using System.Collections.Generic; - using System.Linq; - using System.Threading.Tasks; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; - [TestClass] public class EventTests { - private readonly Event target; - private readonly Event typedTarget; + private readonly Event _target; + private readonly Event _typedTarget; public EventTests() { var log = new Mock(); - this.target = new Event(log.Object); - this.typedTarget = new Event(log.Object); + _target = new Event(log.Object); + _typedTarget = new Event(log.Object); } [TestMethod] @@ -26,14 +26,14 @@ public async Task FireAsync_EventSubscribed_CallDelegateWithSuccess() // Arrange var numberOfCalls = 0; - this.target.Subscribe(() => + _target.Subscribe(() => { numberOfCalls++; return Task.CompletedTask; }); // Act - await this.target.FireAsync(); + await _target.FireAsync(); // Assert Assert.AreEqual(1, numberOfCalls); @@ -45,20 +45,20 @@ public async Task FireAsync_EventWithMultipleObservers_CallAllDelegatesWithSucce // Arrange var numberOfCalls = 0; - this.target.Subscribe(() => + _target.Subscribe(() => { numberOfCalls++; return Task.CompletedTask; }); - this.target.Subscribe(() => + _target.Subscribe(() => { numberOfCalls++; return Task.CompletedTask; }); // Act - await this.target.FireAsync(); + await _target.FireAsync(); // Assert Assert.AreEqual(2, numberOfCalls); @@ -70,16 +70,16 @@ public async Task FireAsync_EventWithMultipleObserversAndErrors_CallAllDelegates // Arrange var numberOfCalls = 0; - this.target.Subscribe(() => throw new NotImplementedException()); + _target.Subscribe(() => throw new NotImplementedException()); - this.target.Subscribe(() => + _target.Subscribe(() => { numberOfCalls++; return Task.CompletedTask; }); // Act - await this.target.FireAsync(); + await _target.FireAsync(); // Assert Assert.AreEqual(1, numberOfCalls); @@ -92,14 +92,14 @@ public async Task FireAsync_EventSubscribedWithArgument_CallDelegateWithSuccess( var expectedArgument = Guid.NewGuid().ToString(); var receivedArgument = string.Empty; - this.typedTarget.Subscribe(arg => + _typedTarget.Subscribe(arg => { receivedArgument = arg; return Task.CompletedTask; }); // Act - await this.typedTarget.FireAsync(expectedArgument); + await _typedTarget.FireAsync(expectedArgument); // Assert Assert.AreEqual(expectedArgument, receivedArgument); @@ -112,20 +112,20 @@ public async Task FireAsync_EventWithMultipleObserversAndArgument_CallAllDelegat var expectedArgument = Guid.NewGuid().ToString(); var receivedArguments = new List(); - this.typedTarget.Subscribe(arg => + _typedTarget.Subscribe(arg => { receivedArguments.Add(arg); return Task.CompletedTask; }); - this.typedTarget.Subscribe(arg => + _typedTarget.Subscribe(arg => { receivedArguments.Add(arg); return Task.CompletedTask; }); // Act - await this.typedTarget.FireAsync(expectedArgument); + await _typedTarget.FireAsync(expectedArgument); // Assert Assert.AreEqual(2, receivedArguments.Count); @@ -138,16 +138,16 @@ public async Task FireAsync_TypedEventWithMultipleObserversAndErrors_CallAllDele // Arrange var numberOfCalls = 0; - this.typedTarget.Subscribe(_ => throw new NotImplementedException()); + _typedTarget.Subscribe(_ => throw new NotImplementedException()); - this.typedTarget.Subscribe(_ => + _typedTarget.Subscribe(_ => { numberOfCalls++; return Task.CompletedTask; }); // Act - await this.typedTarget.FireAsync(Guid.NewGuid().ToString()); + await _typedTarget.FireAsync(Guid.NewGuid().ToString()); // Assert Assert.AreEqual(1, numberOfCalls); @@ -166,11 +166,11 @@ public async Task FireAsync_DuplicatedEventHandler_CallHandlerOnce() return Task.CompletedTask; }; - this.typedTarget.Subscribe(handler); - this.typedTarget.Subscribe(handler); + _typedTarget.Subscribe(handler); + _typedTarget.Subscribe(handler); // Act - await this.typedTarget.FireAsync(expectedArgument); + await _typedTarget.FireAsync(expectedArgument); // Assert Assert.AreEqual(1, receivedArguments.Count); @@ -190,12 +190,12 @@ public async Task FireAsync_UnsubscribeEventHandler_DoesNotCallHandler() return Task.CompletedTask; }; - var subscription = this.typedTarget.Subscribe(handler); + var subscription = _typedTarget.Subscribe(handler); subscription.Cancel(); // Act - await this.typedTarget.FireAsync(expectedArgument); + await _typedTarget.FireAsync(expectedArgument); // Assert Assert.AreEqual(0, receivedArguments.Count); diff --git a/src/KafkaFlow.UnitTests/ExtensionHelpers.cs b/tests/KafkaFlow.UnitTests/ExtensionHelpers.cs similarity index 88% rename from src/KafkaFlow.UnitTests/ExtensionHelpers.cs rename to tests/KafkaFlow.UnitTests/ExtensionHelpers.cs index 50c81f1e2..f072e393f 100644 --- a/src/KafkaFlow.UnitTests/ExtensionHelpers.cs +++ b/tests/KafkaFlow.UnitTests/ExtensionHelpers.cs @@ -1,8 +1,8 @@ +using System; +using System.Threading.Tasks; + namespace KafkaFlow.UnitTests { - using System; - using System.Threading.Tasks; - public static class ExtensionHelpers { public static TaskCompletionSource WithTimeout(this TaskCompletionSource taskCompletionSource, int milliseconds) diff --git a/src/KafkaFlow.UnitTests/KafkaFlow.UnitTests.csproj b/tests/KafkaFlow.UnitTests/KafkaFlow.UnitTests.csproj similarity index 55% rename from src/KafkaFlow.UnitTests/KafkaFlow.UnitTests.csproj rename to tests/KafkaFlow.UnitTests/KafkaFlow.UnitTests.csproj index 681188711..4819c54ce 100644 --- a/src/KafkaFlow.UnitTests/KafkaFlow.UnitTests.csproj +++ b/tests/KafkaFlow.UnitTests/KafkaFlow.UnitTests.csproj @@ -29,15 +29,15 @@ - - - - - - - - - + + + + + + + + + diff --git a/src/KafkaFlow.UnitTests/LogHandlers/MicrosoftLogHandlerTests.cs b/tests/KafkaFlow.UnitTests/LogHandlers/MicrosoftLogHandlerTests.cs similarity index 79% rename from src/KafkaFlow.UnitTests/LogHandlers/MicrosoftLogHandlerTests.cs rename to tests/KafkaFlow.UnitTests/LogHandlers/MicrosoftLogHandlerTests.cs index 9f06cb861..d8978d771 100644 --- a/src/KafkaFlow.UnitTests/LogHandlers/MicrosoftLogHandlerTests.cs +++ b/tests/KafkaFlow.UnitTests/LogHandlers/MicrosoftLogHandlerTests.cs @@ -1,9 +1,9 @@ -namespace KafkaFlow.UnitTests.LogHandlers -{ - using Microsoft.Extensions.Logging; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; +using Microsoft.Extensions.Logging; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; +namespace KafkaFlow.UnitTests.LogHandlers +{ [TestClass] public class MicrosoftLogHandlerTests { diff --git a/src/KafkaFlow.UnitTests/MemoryTelemetryStorageTests.cs b/tests/KafkaFlow.UnitTests/MemoryTelemetryStorageTests.cs similarity index 71% rename from src/KafkaFlow.UnitTests/MemoryTelemetryStorageTests.cs rename to tests/KafkaFlow.UnitTests/MemoryTelemetryStorageTests.cs index bb7bf0f66..1d19ab2bc 100644 --- a/src/KafkaFlow.UnitTests/MemoryTelemetryStorageTests.cs +++ b/tests/KafkaFlow.UnitTests/MemoryTelemetryStorageTests.cs @@ -1,40 +1,40 @@ +using System; +using System.Linq; +using FluentAssertions; +using KafkaFlow.Admin; +using KafkaFlow.Admin.Messages; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; + namespace KafkaFlow.UnitTests { - using System; - using System.Linq; - using FluentAssertions; - using KafkaFlow.Admin; - using KafkaFlow.Admin.Messages; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; - [TestClass] public class MemoryTelemetryStorageTests { - private Mock dateTimeProviderMock; + private Mock _dateTimeProviderMock; - private MemoryTelemetryStorage target; + private MemoryTelemetryStorage _target; [TestInitialize] public void Setup() { - this.dateTimeProviderMock = new(); + _dateTimeProviderMock = new(); - this.dateTimeProviderMock + _dateTimeProviderMock .SetupGet(x => x.MinValue) .Returns(DateTime.MinValue); - this.target = new( + _target = new( TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(1), - this.dateTimeProviderMock.Object); + _dateTimeProviderMock.Object); } [TestMethod] public void Get_NoItems_ReturnsEmpty() { // Act - var metrics = this.target.Get(); + var metrics = _target.Get(); // Assert metrics.Should().BeEmpty(); @@ -46,15 +46,15 @@ public void Put_OneItem_ReturnsOneItem() // Arrange var now = DateTime.Now; - this.dateTimeProviderMock + _dateTimeProviderMock .SetupGet(x => x.UtcNow) .Returns(now); // Act - this.target.Put(new ConsumerTelemetryMetric { SentAt = now }); + _target.Put(new ConsumerTelemetryMetric { SentAt = now }); // Assert - this.target.Get().Should().HaveCount(1); + _target.Get().Should().HaveCount(1); } [TestMethod] @@ -63,7 +63,7 @@ public void PutTwoItems_SameInstanceGroupConsumer_ReplaceOlder() // Arrange var now = DateTime.Now; - this.dateTimeProviderMock + _dateTimeProviderMock .SetupGet(x => x.UtcNow) .Returns(now); @@ -84,12 +84,12 @@ public void PutTwoItems_SameInstanceGroupConsumer_ReplaceOlder() }; // Act - this.target.Put(metric1); - this.target.Put(metric2); + _target.Put(metric1); + _target.Put(metric2); // Assert - this.target.Get().Should().HaveCount(1); - this.target.Get().First().Should().Be(metric2); + _target.Get().Should().HaveCount(1); + _target.Get().First().Should().Be(metric2); } [TestMethod] @@ -98,7 +98,7 @@ public void PutTwoItems_DifferentInstanceGroupConsumer_ReturnsTwo() // Arrange var now = DateTime.Now; - this.dateTimeProviderMock + _dateTimeProviderMock .SetupGet(x => x.UtcNow) .Returns(now); @@ -119,11 +119,11 @@ public void PutTwoItems_DifferentInstanceGroupConsumer_ReturnsTwo() }; // Act - this.target.Put(metric1); - this.target.Put(metric2); + _target.Put(metric1); + _target.Put(metric2); // Assert - this.target.Get().Should().HaveCount(2); + _target.Get().Should().HaveCount(2); } [TestMethod] @@ -132,7 +132,7 @@ public void PutTwoItems_ExpiryOne_ReturnsOne() // Arrange var now = new DateTime(2000, 01, 01); - this.dateTimeProviderMock + _dateTimeProviderMock .SetupGet(x => x.UtcNow) .Returns(now); @@ -144,7 +144,7 @@ public void PutTwoItems_ExpiryOne_ReturnsOne() SentAt = now, }; - this.target.Put(metric1); + _target.Put(metric1); var metric2 = new ConsumerTelemetryMetric { @@ -154,16 +154,16 @@ public void PutTwoItems_ExpiryOne_ReturnsOne() SentAt = now.AddSeconds(5), }; - this.dateTimeProviderMock + _dateTimeProviderMock .SetupGet(x => x.UtcNow) .Returns(now.AddSeconds(2)); // Act - this.target.Put(metric2); + _target.Put(metric2); // Assert - this.target.Get().Should().HaveCount(1); - this.target.Get().First().Should().Be(metric2); + _target.Get().Should().HaveCount(1); + _target.Get().First().Should().Be(metric2); } } } diff --git a/src/KafkaFlow.UnitTests/MessageHeadersTests.cs b/tests/KafkaFlow.UnitTests/MessageHeadersTests.cs similarity index 74% rename from src/KafkaFlow.UnitTests/MessageHeadersTests.cs rename to tests/KafkaFlow.UnitTests/MessageHeadersTests.cs index ed89bddbd..be00d053a 100644 --- a/src/KafkaFlow.UnitTests/MessageHeadersTests.cs +++ b/tests/KafkaFlow.UnitTests/MessageHeadersTests.cs @@ -1,16 +1,16 @@ +using System.Text; +using Confluent.Kafka; +using FluentAssertions; +using Microsoft.VisualStudio.TestTools.UnitTesting; + namespace KafkaFlow.UnitTests { - using System.Text; - using Confluent.Kafka; - using FluentAssertions; - using Microsoft.VisualStudio.TestTools.UnitTesting; - [TestClass] public class MessageHeadersTests { private const string Key = "abc"; private const string StrValue = "123"; - private readonly byte[] value = Encoding.UTF8.GetBytes("123"); + private readonly byte[] _value = Encoding.UTF8.GetBytes("123"); [TestMethod] public void Add_WithKeyNotNull_ShouldAddValueCorrectly() @@ -19,17 +19,17 @@ public void Add_WithKeyNotNull_ShouldAddValueCorrectly() var header = new MessageHeaders(); // Act - header.Add(Key, this.value); + header.Add(Key, _value); // Assert - header[Key].Should().BeEquivalentTo(this.value); + header[Key].Should().BeEquivalentTo(_value); } [TestMethod] public void GetKafkaHeader_ShouldReturnKafkaHeaders() { // Arrange - var kafkaHeaders = new Headers { { Key, this.value } }; + var kafkaHeaders = new Headers { { Key, _value } }; var messageHeaders = new MessageHeaders(kafkaHeaders); // Act diff --git a/src/KafkaFlow.UnitTests/Middlewares/Serialization/ConfluentAvroTypeNameResolverTests.cs b/tests/KafkaFlow.UnitTests/Middlewares/Serialization/ConfluentAvroTypeNameResolverTests.cs similarity index 52% rename from src/KafkaFlow.UnitTests/Middlewares/Serialization/ConfluentAvroTypeNameResolverTests.cs rename to tests/KafkaFlow.UnitTests/Middlewares/Serialization/ConfluentAvroTypeNameResolverTests.cs index 5fdeb0921..b67801108 100644 --- a/src/KafkaFlow.UnitTests/Middlewares/Serialization/ConfluentAvroTypeNameResolverTests.cs +++ b/tests/KafkaFlow.UnitTests/Middlewares/Serialization/ConfluentAvroTypeNameResolverTests.cs @@ -1,23 +1,23 @@ +using System.Threading.Tasks; +using Confluent.SchemaRegistry; +using FluentAssertions; +using KafkaFlow.Serializer.SchemaRegistry; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; +using Newtonsoft.Json; + namespace KafkaFlow.UnitTests.Middlewares.Serialization { - using System.Threading.Tasks; - using Confluent.SchemaRegistry; - using FluentAssertions; - using KafkaFlow.Serializer.SchemaRegistry; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; - using Newtonsoft.Json; - [TestClass] public class ConfluentAvroTypeNameResolverTests { - private readonly Mock schemaRegistryClient; - private readonly ConfluentAvroTypeNameResolver schemaRegistryTypeResolver; + private readonly Mock _schemaRegistryClient; + private readonly ConfluentAvroTypeNameResolver _schemaRegistryTypeResolver; public ConfluentAvroTypeNameResolverTests() { - this.schemaRegistryClient = new Mock(); - this.schemaRegistryTypeResolver = new ConfluentAvroTypeNameResolver(this.schemaRegistryClient.Object); + _schemaRegistryClient = new Mock(); + _schemaRegistryTypeResolver = new ConfluentAvroTypeNameResolver(_schemaRegistryClient.Object); } [TestMethod] @@ -34,11 +34,11 @@ public async Task ResolveAsync_ValidSchemaObject_ReturnsAvroFieldsInCorrectForma var schema = new Schema(JsonConvert.SerializeObject(schemaObj), SchemaType.Avro); - this.schemaRegistryClient.Setup(client => client.GetSchemaAsync(schemaId, null)) + _schemaRegistryClient.Setup(client => client.GetSchemaAsync(schemaId, null)) .ReturnsAsync(schema); // Act - var avroFields = await this.schemaRegistryTypeResolver.ResolveAsync(schemaId); + var avroFields = await _schemaRegistryTypeResolver.ResolveAsync(schemaId); // Assert avroFields.Should().Be($"{schemaObj.NameSpace}.{schemaObj.Name}"); diff --git a/src/KafkaFlow.UnitTests/Middlewares/Serialization/ConfluentProtobufTypeNameResolverTests.cs b/tests/KafkaFlow.UnitTests/Middlewares/Serialization/ConfluentProtobufTypeNameResolverTests.cs similarity index 50% rename from src/KafkaFlow.UnitTests/Middlewares/Serialization/ConfluentProtobufTypeNameResolverTests.cs rename to tests/KafkaFlow.UnitTests/Middlewares/Serialization/ConfluentProtobufTypeNameResolverTests.cs index cd6ec854f..4185222eb 100644 --- a/src/KafkaFlow.UnitTests/Middlewares/Serialization/ConfluentProtobufTypeNameResolverTests.cs +++ b/tests/KafkaFlow.UnitTests/Middlewares/Serialization/ConfluentProtobufTypeNameResolverTests.cs @@ -1,26 +1,25 @@ -namespace KafkaFlow.UnitTests.Middlewares.Serialization -{ - using System; - using System.Threading.Tasks; - using Confluent.SchemaRegistry; +using System; +using System.Threading.Tasks; +using Confluent.SchemaRegistry; - using FluentAssertions; +using FluentAssertions; - using Google.Protobuf; - using KafkaFlow.Serializer.SchemaRegistry; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; +using Google.Protobuf; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; +namespace KafkaFlow.UnitTests.Middlewares.Serialization +{ [TestClass] public class ConfluentProtobufTypeNameResolverTests { - private readonly Mock schemaRegistryClient; - private readonly ConfluentProtobufTypeNameResolver schemaRegistryTypeResolver; + private readonly Mock _schemaRegistryClient; + private readonly ConfluentProtobufTypeNameResolver _schemaRegistryTypeResolver; public ConfluentProtobufTypeNameResolverTests() { - this.schemaRegistryClient = new Mock(); - this.schemaRegistryTypeResolver = new ConfluentProtobufTypeNameResolver(this.schemaRegistryClient.Object); + _schemaRegistryClient = new Mock(); + _schemaRegistryTypeResolver = new ConfluentProtobufTypeNameResolver(_schemaRegistryClient.Object); } [TestMethod] @@ -36,11 +35,11 @@ public async Task ResolveAsync_ValidProtobufObject_ReturnsProtoFields() }; var base64Encoded = Convert.ToBase64String(dummyProtobufObj.ToByteArray()); - this.schemaRegistryClient.Setup(client => client.GetSchemaAsync(schemaId, "serialized")) + _schemaRegistryClient.Setup(client => client.GetSchemaAsync(schemaId, "serialized")) .ReturnsAsync(new Schema(base64Encoded, SchemaType.Protobuf)); // Act - var protoFields = await this.schemaRegistryTypeResolver.ResolveAsync(schemaId); + var protoFields = await _schemaRegistryTypeResolver.ResolveAsync(schemaId); // Assert protoFields.Should().NotBeNull(); diff --git a/tests/KafkaFlow.UnitTests/Middlewares/Serialization/SchemaRegistryTypeResolverTests.cs b/tests/KafkaFlow.UnitTests/Middlewares/Serialization/SchemaRegistryTypeResolverTests.cs new file mode 100644 index 000000000..066944415 --- /dev/null +++ b/tests/KafkaFlow.UnitTests/Middlewares/Serialization/SchemaRegistryTypeResolverTests.cs @@ -0,0 +1,47 @@ +using System; +using System.Buffers.Binary; +using System.Threading.Tasks; +using FluentAssertions; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; + +namespace KafkaFlow.UnitTests.Middlewares.Serialization +{ + [TestClass] + public class SchemaRegistryTypeResolverTests + { + private readonly Mock _messageContextMock; + private readonly Mock _schemaRegistryTypeNameResolverMock; + private readonly SchemaRegistryTypeResolver _schemaRegistryTypeResolver; + private readonly byte[] _messageKey = new byte[] { 0x18, 0x19 }; + private readonly byte[] _messageValue = new byte[] { 0x20, 0x21, 0x22, 0x23, 0x24, 0x25 }; + + public SchemaRegistryTypeResolverTests() + { + _messageContextMock = new Mock(); + _messageContextMock.Setup(context => context.Message).Returns(new Message(_messageKey, _messageValue)); + _schemaRegistryTypeNameResolverMock = new Mock(); + _schemaRegistryTypeResolver = new SchemaRegistryTypeResolver(_schemaRegistryTypeNameResolverMock.Object); + } + + [TestMethod] + public async Task OnConsumeAsync_WhenCalledTwice_TypeIsResolvedOnceThenTypeIsLoadedFromCache() + { + // Arrange + var expectedSchemaId = BinaryPrimitives.ReadInt32BigEndian( + _messageValue.AsSpan().Slice(1, 4)); + + _schemaRegistryTypeNameResolverMock.Setup( + resolver => resolver.ResolveAsync(expectedSchemaId)).ReturnsAsync(typeof(SchemaRegistryTypeResolverTests).FullName); + + // Act + await _schemaRegistryTypeResolver.OnConsumeAsync(_messageContextMock.Object); + var type = await _schemaRegistryTypeResolver.OnConsumeAsync(_messageContextMock.Object); + + // Assert + _schemaRegistryTypeNameResolverMock.Verify(resolver => resolver.ResolveAsync(expectedSchemaId), Times.Once); + var expectedObject = (SchemaRegistryTypeResolverTests)Activator.CreateInstance(type); + expectedObject.Should().NotBeNull(); + } + } +} diff --git a/src/KafkaFlow.UnitTests/OffsetCommitterTests.cs b/tests/KafkaFlow.UnitTests/OffsetCommitterTests.cs similarity index 53% rename from src/KafkaFlow.UnitTests/OffsetCommitterTests.cs rename to tests/KafkaFlow.UnitTests/OffsetCommitterTests.cs index 1df3b351c..5c2a2b090 100644 --- a/src/KafkaFlow.UnitTests/OffsetCommitterTests.cs +++ b/tests/KafkaFlow.UnitTests/OffsetCommitterTests.cs @@ -1,72 +1,71 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using KafkaFlow.Consumers; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; + namespace KafkaFlow.UnitTests { - using System; - using System.Collections.Generic; - using System.Linq; - using System.Threading.Tasks; - using Confluent.Kafka; - using KafkaFlow.Consumers; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; - [TestClass] public class OffsetCommitterTests { private const int TestTimeout = 5000; - private Mock consumerMock; + private Mock _consumerMock; - private TopicPartition topicPartition; + private Confluent.Kafka.TopicPartition _topicPartition; - private OffsetCommitter offsetCommitter; + private OffsetCommitter _offsetCommitter; [TestInitialize] public async Task Setup() { - this.consumerMock = new Mock(); - this.topicPartition = new TopicPartition("topic-A", new Partition(1)); + _consumerMock = new Mock(); + _topicPartition = new Confluent.Kafka.TopicPartition("topic-A", new Confluent.Kafka.Partition(1)); - this.consumerMock + _consumerMock .Setup(c => c.Configuration.AutoCommitInterval) .Returns(TimeSpan.FromMilliseconds(1000)); - this.offsetCommitter = new OffsetCommitter( - this.consumerMock.Object, + _offsetCommitter = new OffsetCommitter( + _consumerMock.Object, Mock.Of(), Mock.Of()); - await this.offsetCommitter.StartAsync(); + await _offsetCommitter.StartAsync(); } [TestCleanup] public async Task Cleanup() { - await this.offsetCommitter.StopAsync(); + await _offsetCommitter.StopAsync(); } [TestMethod] public async Task MarkAsProcessed_ShouldCommit() { // Arrange - var expectedOffsets = new[] { new TopicPartitionOffset(this.topicPartition, new Offset(2)) }; + var expectedOffsets = new[] { new Confluent.Kafka.TopicPartitionOffset(_topicPartition, new Confluent.Kafka.Offset(2)) }; var ready = new TaskCompletionSource().WithTimeout(TestTimeout); - this.consumerMock - .Setup(c => c.Commit(It.Is>(l => l.SequenceEqual(expectedOffsets)))) - .Callback((IEnumerable _) => ready.SetResult()); + _consumerMock + .Setup(c => c.Commit(It.Is>(l => l.SequenceEqual(expectedOffsets)))) + .Callback((IEnumerable _) => ready.SetResult()); // Act - this.offsetCommitter.MarkAsProcessed( + _offsetCommitter.MarkAsProcessed( new KafkaFlow.TopicPartitionOffset( - this.topicPartition.Topic, - this.topicPartition.Partition, + _topicPartition.Topic, + _topicPartition.Partition, 1)); await ready.Task; // Assert - this.consumerMock.VerifyAll(); + _consumerMock.VerifyAll(); } [TestMethod] @@ -76,7 +75,7 @@ public async Task PendingOffsetsState_ShouldExecuteHandlers() var ready = new TaskCompletionSource().WithTimeout(TestTimeout); var committer = new OffsetCommitter( - this.consumerMock.Object, + _consumerMock.Object, Mock.Of(), Mock.Of()); @@ -87,8 +86,8 @@ public async Task PendingOffsetsState_ShouldExecuteHandlers() // Act committer.MarkAsProcessed( new KafkaFlow.TopicPartitionOffset( - this.topicPartition.Topic, - this.topicPartition.Partition, + _topicPartition.Topic, + _topicPartition.Partition, 1)); // Assert @@ -102,15 +101,15 @@ public async Task PendingOffsetsState_ShouldExecuteHandlers() public async Task MarkAsProcessed_WithFailure_ShouldRequeueFailedOffsetAndCommit() { // Arrange - var expectedOffsets = new[] { new TopicPartitionOffset(this.topicPartition, new Offset(2)) }; + var expectedOffsets = new[] { new Confluent.Kafka.TopicPartitionOffset(_topicPartition, new Confluent.Kafka.Offset(2)) }; var ready = new TaskCompletionSource().WithTimeout(TestTimeout); var hasThrown = false; - this.consumerMock - .Setup(c => c.Commit(It.Is>(l => l.SequenceEqual(expectedOffsets)))) + _consumerMock + .Setup(c => c.Commit(It.Is>(l => l.SequenceEqual(expectedOffsets)))) .Callback( - (IEnumerable _) => + (IEnumerable _) => { if (!hasThrown) { @@ -122,17 +121,17 @@ public async Task MarkAsProcessed_WithFailure_ShouldRequeueFailedOffsetAndCommit }); // Act - this.offsetCommitter.MarkAsProcessed( + _offsetCommitter.MarkAsProcessed( new KafkaFlow.TopicPartitionOffset( - this.topicPartition.Topic, - this.topicPartition.Partition, + _topicPartition.Topic, + _topicPartition.Partition, 1)); await ready.Task; // Assert - this.consumerMock.Verify( - c => c.Commit(It.Is>(l => l.SequenceEqual(expectedOffsets))), + _consumerMock.Verify( + c => c.Commit(It.Is>(l => l.SequenceEqual(expectedOffsets))), Times.Exactly(2)); } } diff --git a/tests/KafkaFlow.UnitTests/OffsetManagerTests.cs b/tests/KafkaFlow.UnitTests/OffsetManagerTests.cs new file mode 100644 index 000000000..0f2a7221a --- /dev/null +++ b/tests/KafkaFlow.UnitTests/OffsetManagerTests.cs @@ -0,0 +1,87 @@ +using System; +using System.Collections.Generic; +using Confluent.Kafka; +using FluentAssertions; +using KafkaFlow.Consumers; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; + +namespace KafkaFlow.UnitTests +{ + [TestClass] + public class OffsetManagerTests + { + private Mock _committerMock; + private OffsetManager _target; + private TopicPartition _topicPartition; + + [TestInitialize] + public void Setup() + { + _committerMock = new Mock(); + _topicPartition = new TopicPartition("topic-A", new Partition(1)); + + _target = new OffsetManager( + _committerMock.Object, + new List { _topicPartition }); + } + + [TestMethod] + public void MarkAsProcessed_WithNotQueuedContext_ShouldThrowInvalidOperation() + { + // Act + Action act = () => _target.MarkAsProcessed(this.MockConsumerContext(1)); + + // Assert + act.Should().Throw(); + } + + [TestMethod] + public void MarkAsProcessed_WithGaps_ShouldStoreOffsetJustOnce() + { + // Arrange + _target.Enqueue(this.MockConsumerContext(1)); + _target.Enqueue(this.MockConsumerContext(2)); + _target.Enqueue(this.MockConsumerContext(3)); + + // Act + _target.MarkAsProcessed(this.MockConsumerContext(3)); + _target.MarkAsProcessed(this.MockConsumerContext(2)); + _target.MarkAsProcessed(this.MockConsumerContext(1)); + + // Assert + _committerMock.Verify( + c => + c.MarkAsProcessed( + It.Is( + p => + p.Partition == _topicPartition.Partition && + p.Offset == 3)), + Times.Once); + } + + private IConsumerContext MockConsumerContext(int offset) + { + var mock = new Mock(); + var tpo = new TopicPartitionOffset(_topicPartition.Topic, _topicPartition.Partition, offset); + + mock + .SetupGet(x => x.Offset) + .Returns(tpo.Offset); + + mock + .SetupGet(x => x.Partition) + .Returns(tpo.Partition); + + mock + .SetupGet(x => x.Topic) + .Returns(tpo.Topic); + + mock + .SetupGet(x => x.TopicPartitionOffset) + .Returns(tpo); + + return mock.Object; + } + } +} diff --git a/src/KafkaFlow.UnitTests/PartitionOffsetsTests.cs b/tests/KafkaFlow.UnitTests/PartitionOffsetsTests.cs similarity index 95% rename from src/KafkaFlow.UnitTests/PartitionOffsetsTests.cs rename to tests/KafkaFlow.UnitTests/PartitionOffsetsTests.cs index e3a4b168c..0379e9c88 100644 --- a/src/KafkaFlow.UnitTests/PartitionOffsetsTests.cs +++ b/tests/KafkaFlow.UnitTests/PartitionOffsetsTests.cs @@ -1,13 +1,13 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using FluentAssertions; +using KafkaFlow.Consumers; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; + namespace KafkaFlow.UnitTests { - using System; - using System.Collections.Generic; - using System.Threading.Tasks; - using FluentAssertions; - using KafkaFlow.Consumers; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; - [TestClass] public class PartitionOffsetsTests { diff --git a/src/KafkaFlow.UnitTests/Serializers/JsonCoreSerializerTests.cs b/tests/KafkaFlow.UnitTests/Serializers/JsonCoreSerializerTests.cs similarity index 69% rename from src/KafkaFlow.UnitTests/Serializers/JsonCoreSerializerTests.cs rename to tests/KafkaFlow.UnitTests/Serializers/JsonCoreSerializerTests.cs index eb17adf0c..25aa6dda7 100644 --- a/src/KafkaFlow.UnitTests/Serializers/JsonCoreSerializerTests.cs +++ b/tests/KafkaFlow.UnitTests/Serializers/JsonCoreSerializerTests.cs @@ -1,18 +1,18 @@ -namespace KafkaFlow.UnitTests.Serializers +using System.IO; +using System.Text.Encodings.Web; +using System.Text.Json; +using System.Threading.Tasks; +using FluentAssertions; +using KafkaFlow.Serializer; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; + +namespace KafkaFlow.UnitTests.Serializers { - using System.IO; - using System.Text.Encodings.Web; - using System.Text.Json; - using System.Threading.Tasks; - using FluentAssertions; - using KafkaFlow.Serializer; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; - [TestClass] public class JsonCoreSerializerTests { - private readonly Mock contextMock = new(); + private readonly Mock _contextMock = new(); [TestMethod] public async Task SerializeAsync_PreventEscapeOfAccentedCharacter_SerializedObjectDoesNotHaveAccentedCharacterEscaped() @@ -26,7 +26,7 @@ public async Task SerializeAsync_PreventEscapeOfAccentedCharacter_SerializedObje var target = new JsonCoreSerializer(writerOptions); // Act - await target.SerializeAsync(message, output, this.contextMock.Object); + await target.SerializeAsync(message, output, _contextMock.Object); // Assert var result = GetStreamText(output); @@ -45,4 +45,4 @@ private class TestMessage public string Text { get; set; } } } -} \ No newline at end of file +} diff --git a/src/KafkaFlow.UnitTests/Serializers/NewtonsoftJsonDeserializerTests.cs b/tests/KafkaFlow.UnitTests/Serializers/NewtonsoftJsonDeserializerTests.cs similarity index 54% rename from src/KafkaFlow.UnitTests/Serializers/NewtonsoftJsonDeserializerTests.cs rename to tests/KafkaFlow.UnitTests/Serializers/NewtonsoftJsonDeserializerTests.cs index 88fa963e8..374b698d8 100644 --- a/src/KafkaFlow.UnitTests/Serializers/NewtonsoftJsonDeserializerTests.cs +++ b/tests/KafkaFlow.UnitTests/Serializers/NewtonsoftJsonDeserializerTests.cs @@ -1,33 +1,33 @@ +using System; +using System.IO; +using System.Text; +using System.Threading.Tasks; +using AutoFixture; +using FluentAssertions; +using KafkaFlow.Serializer; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; +using Newtonsoft.Json; + namespace KafkaFlow.UnitTests.Serializers { - using System; - using System.IO; - using System.Text; - using System.Threading.Tasks; - using AutoFixture; - using FluentAssertions; - using KafkaFlow.Serializer; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; - using Newtonsoft.Json; - [TestClass] public class NewtonsoftJsonDeserializerTests { - private readonly Mock contextMock = new (); - private readonly NewtonsoftJsonDeserializer deserializer = new (); + private readonly Mock _contextMock = new(); + private readonly NewtonsoftJsonDeserializer _deserializer = new(); - private readonly Fixture fixture = new(); + private readonly Fixture _fixture = new(); [TestMethod] public async Task DeserializeAsync_ValidPayload_ObjectGenerated() { // Arrange - var message = this.fixture.Create(); + var message = _fixture.Create(); using var input = new MemoryStream(Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(message))); // Act - var result = await this.deserializer.DeserializeAsync(input, typeof(TestMessage), this.contextMock.Object); + var result = await _deserializer.DeserializeAsync(input, typeof(TestMessage), _contextMock.Object); // Assert result.Should().NotBeNull(); diff --git a/src/KafkaFlow.UnitTests/Serializers/NewtonsoftJsonSerializerTests.cs b/tests/KafkaFlow.UnitTests/Serializers/NewtonsoftJsonSerializerTests.cs similarity index 56% rename from src/KafkaFlow.UnitTests/Serializers/NewtonsoftJsonSerializerTests.cs rename to tests/KafkaFlow.UnitTests/Serializers/NewtonsoftJsonSerializerTests.cs index 93673ed05..89956c1aa 100644 --- a/src/KafkaFlow.UnitTests/Serializers/NewtonsoftJsonSerializerTests.cs +++ b/tests/KafkaFlow.UnitTests/Serializers/NewtonsoftJsonSerializerTests.cs @@ -1,31 +1,31 @@ +using System; +using System.IO; +using System.Threading.Tasks; +using AutoFixture; +using FluentAssertions; +using KafkaFlow.Serializer; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; + namespace KafkaFlow.UnitTests.Serializers { - using System; - using System.IO; - using System.Threading.Tasks; - using AutoFixture; - using FluentAssertions; - using KafkaFlow.Serializer; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; - [TestClass] public class NewtonsoftJsonSerializerTests { - private readonly Mock contextMock = new (); - private readonly NewtonsoftJsonSerializer serializer = new (); + private readonly Mock _contextMock = new(); + private readonly NewtonsoftJsonSerializer _serializer = new(); - private readonly Fixture fixture = new(); + private readonly Fixture _fixture = new(); [TestMethod] public async Task SerializeAsync_ValidPayload_JsonByteArrayGenerated() { // Arrange - var message = this.fixture.Create(); + var message = _fixture.Create(); using var output = new MemoryStream(); // Act - await this.serializer.SerializeAsync(message, output, this.contextMock.Object); + await _serializer.SerializeAsync(message, output, _contextMock.Object); // Assert output.Length.Should().BeGreaterThan(0); diff --git a/src/KafkaFlow.UnitTests/Serializers/SerializerConsumerMiddlewareTests.cs b/tests/KafkaFlow.UnitTests/Serializers/SerializerConsumerMiddlewareTests.cs similarity index 55% rename from src/KafkaFlow.UnitTests/Serializers/SerializerConsumerMiddlewareTests.cs rename to tests/KafkaFlow.UnitTests/Serializers/SerializerConsumerMiddlewareTests.cs index 6c05b0201..6c9f857ca 100644 --- a/src/KafkaFlow.UnitTests/Serializers/SerializerConsumerMiddlewareTests.cs +++ b/tests/KafkaFlow.UnitTests/Serializers/SerializerConsumerMiddlewareTests.cs @@ -1,57 +1,57 @@ +using System; +using System.IO; +using System.Threading.Tasks; +using FluentAssertions; +using KafkaFlow.Middlewares.Serializer; +using KafkaFlow.Middlewares.Serializer.Resolvers; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; + namespace KafkaFlow.UnitTests.Serializers { - using System; - using System.IO; - using System.Threading.Tasks; - using FluentAssertions; - using KafkaFlow.Middlewares.Serializer; - using KafkaFlow.Middlewares.Serializer.Resolvers; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; - [TestClass] public class SerializerConsumerMiddlewareTests { - private Mock contextMock; - private Mock deserializerMock; - private Mock typeResolverMock; + private Mock _contextMock; + private Mock _deserializerMock; + private Mock _typeResolverMock; - private bool nextCalled; + private bool _nextCalled; - private DeserializerConsumerMiddleware target; + private DeserializerConsumerMiddleware _target; [TestInitialize] public void Setup() { - this.contextMock = new Mock(); - this.deserializerMock = new Mock(); - this.typeResolverMock = new Mock(); + _contextMock = new Mock(); + _deserializerMock = new Mock(); + _typeResolverMock = new Mock(); - this.target = new DeserializerConsumerMiddleware( - this.deserializerMock.Object, - this.typeResolverMock.Object); + _target = new DeserializerConsumerMiddleware( + _deserializerMock.Object, + _typeResolverMock.Object); } [TestMethod] public async Task Invoke_NullMessageType_ReturnWithoutCallingNext() { // Arrange - this.contextMock + _contextMock .SetupGet(x => x.Message) .Returns(new Message(new byte[1], new byte[1])); - this.typeResolverMock - .Setup(x => x.OnConsumeAsync(this.contextMock.Object)) + _typeResolverMock + .Setup(x => x.OnConsumeAsync(_contextMock.Object)) .ReturnsAsync((Type)null); // Act - await this.target.Invoke(this.contextMock.Object, _ => this.SetNextCalled()); + await _target.Invoke(_contextMock.Object, _ => this.SetNextCalled()); // Assert - this.nextCalled.Should().BeFalse(); - this.typeResolverMock.VerifyAll(); - this.contextMock.Verify(x => x.SetMessage(It.IsAny(), It.IsAny()), Times.Never); - this.deserializerMock.Verify( + _nextCalled.Should().BeFalse(); + _typeResolverMock.VerifyAll(); + _contextMock.Verify(x => x.SetMessage(It.IsAny(), It.IsAny()), Times.Never); + _deserializerMock.Verify( x => x.DeserializeAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.Never); } @@ -60,40 +60,40 @@ public async Task Invoke_NullMessageType_ReturnWithoutCallingNext() public async Task Invoke_NullMessage_CallNext() { // Arrange - this.contextMock + _contextMock .SetupGet(x => x.Message) .Returns(new Message(null, null)); // Act - await this.target.Invoke(this.contextMock.Object, _ => this.SetNextCalled()); + await _target.Invoke(_contextMock.Object, _ => this.SetNextCalled()); // Assert - this.nextCalled.Should().BeTrue(); - this.deserializerMock.Verify( + _nextCalled.Should().BeTrue(); + _deserializerMock.Verify( x => x.DeserializeAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.Never); - this.typeResolverMock.Verify(x => x.OnConsumeAsync(It.IsAny()), Times.Never); + _typeResolverMock.Verify(x => x.OnConsumeAsync(It.IsAny()), Times.Never); } [TestMethod] public void Invoke_NotByteArrayMessage_ThrowsInvalidOperationException() { // Arrange - this.contextMock + _contextMock .SetupGet(x => x.Message) .Returns(new Message(null, new TestMessage())); // Act - Func act = () => this.target.Invoke(this.contextMock.Object, _ => this.SetNextCalled()); + Func act = () => _target.Invoke(_contextMock.Object, _ => this.SetNextCalled()); // Assert act.Should().Throw(); - this.nextCalled.Should().BeFalse(); - this.contextMock.Verify(x => x.SetMessage(It.IsAny(), It.IsAny()), Times.Never); - this.deserializerMock.Verify( + _nextCalled.Should().BeFalse(); + _contextMock.Verify(x => x.SetMessage(It.IsAny(), It.IsAny()), Times.Never); + _deserializerMock.Verify( x => x.DeserializeAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.Never); - this.typeResolverMock.Verify(x => x.OnConsumeAsync(It.IsAny()), Times.Never); + _typeResolverMock.Verify(x => x.OnConsumeAsync(It.IsAny()), Times.Never); } [TestMethod] @@ -112,29 +112,29 @@ public async Task Invoke_ValidMessage_Deserialize() var transformedContextMock = new Mock(); IMessageContext resultContext = null; - this.contextMock + _contextMock .SetupGet(x => x.Message) .Returns(rawMessage); - this.contextMock + _contextMock .Setup(x => x.SetMessage(rawKey, deserializedMessage)) .Returns(transformedContextMock.Object); - this.typeResolverMock - .Setup(x => x.OnConsumeAsync(this.contextMock.Object)) + _typeResolverMock + .Setup(x => x.OnConsumeAsync(_contextMock.Object)) .ReturnsAsync(messageType); - this.deserializerMock + _deserializerMock .Setup(x => x.DeserializeAsync(It.IsAny(), messageType, It.IsAny())) .ReturnsAsync(deserializedMessage); - this.contextMock + _contextMock .SetupGet(x => x.ConsumerContext) .Returns(consumerContext.Object); // Act - await this.target.Invoke( - this.contextMock.Object, + await _target.Invoke( + _contextMock.Object, ctx => { resultContext = ctx; @@ -144,14 +144,14 @@ await this.target.Invoke( // Assert resultContext.Should().NotBeNull(); resultContext.Should().Be(transformedContextMock.Object); - this.contextMock.VerifyAll(); - this.deserializerMock.VerifyAll(); - this.typeResolverMock.VerifyAll(); + _contextMock.VerifyAll(); + _deserializerMock.VerifyAll(); + _typeResolverMock.VerifyAll(); } private Task SetNextCalled() { - this.nextCalled = true; + _nextCalled = true; return Task.CompletedTask; } diff --git a/src/KafkaFlow.UnitTests/Serializers/SerializerProducerMiddlewareTests.cs b/tests/KafkaFlow.UnitTests/Serializers/SerializerProducerMiddlewareTests.cs similarity index 53% rename from src/KafkaFlow.UnitTests/Serializers/SerializerProducerMiddlewareTests.cs rename to tests/KafkaFlow.UnitTests/Serializers/SerializerProducerMiddlewareTests.cs index c828b4504..061dccb9c 100644 --- a/src/KafkaFlow.UnitTests/Serializers/SerializerProducerMiddlewareTests.cs +++ b/tests/KafkaFlow.UnitTests/Serializers/SerializerProducerMiddlewareTests.cs @@ -1,58 +1,58 @@ +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using AutoFixture; +using FluentAssertions; +using KafkaFlow.Middlewares.Serializer; +using KafkaFlow.Middlewares.Serializer.Resolvers; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; + namespace KafkaFlow.UnitTests.Serializers { - using System.IO; - using System.Linq; - using System.Threading.Tasks; - using AutoFixture; - using FluentAssertions; - using KafkaFlow.Middlewares.Serializer; - using KafkaFlow.Middlewares.Serializer.Resolvers; - using Microsoft.VisualStudio.TestTools.UnitTesting; - using Moq; - [TestClass] public class SerializerProducerMiddlewareTests { - private readonly Fixture fixture = new(); + private readonly Fixture _fixture = new(); - private Mock contextMock; - private Mock serializerMock; - private Mock typeResolverMock; + private Mock _contextMock; + private Mock _serializerMock; + private Mock _typeResolverMock; - private SerializerProducerMiddleware target; + private SerializerProducerMiddleware _target; [TestInitialize] public void Setup() { - this.contextMock = new Mock(); - this.serializerMock = new Mock(); - this.typeResolverMock = new Mock(); + _contextMock = new Mock(); + _serializerMock = new Mock(); + _typeResolverMock = new Mock(); - this.target = new SerializerProducerMiddleware( - this.serializerMock.Object, - this.typeResolverMock.Object); + _target = new SerializerProducerMiddleware( + _serializerMock.Object, + _typeResolverMock.Object); } [TestMethod] public async Task Invoke_ValidMessage_Serialize() { // Arrange - var rawMessage = this.fixture.Create(); + var rawMessage = _fixture.Create(); var key = new object(); var deserializedMessage = new Message(key, new TestMessage()); IMessageContext resultContext = null; var producerContext = new Mock(); - producerContext.SetupGet(x=>x.Topic).Returns("test-topic"); + producerContext.SetupGet(x => x.Topic).Returns("test-topic"); var transformedContextMock = new Mock(); - this.contextMock + _contextMock .SetupGet(x => x.Message) .Returns(deserializedMessage); - this.typeResolverMock.Setup(x => x.OnProduceAsync(this.contextMock.Object)); + _typeResolverMock.Setup(x => x.OnProduceAsync(_contextMock.Object)); - this.serializerMock + _serializerMock .Setup( x => x.SerializeAsync( deserializedMessage.Value, @@ -60,17 +60,17 @@ public async Task Invoke_ValidMessage_Serialize() It.IsAny())) .Callback((object _, Stream stream, ISerializerContext _) => stream.WriteAsync(rawMessage)); - this.contextMock + _contextMock .Setup(x => x.SetMessage(key, It.Is(value => value.SequenceEqual(rawMessage)))) .Returns(transformedContextMock.Object); - this.contextMock + _contextMock .SetupGet(x => x.ProducerContext) .Returns(producerContext.Object); // Act - await this.target.Invoke( - this.contextMock.Object, + await _target.Invoke( + _contextMock.Object, ctx => { resultContext = ctx; @@ -80,9 +80,9 @@ await this.target.Invoke( // Assert resultContext.Should().NotBeNull(); resultContext.Should().Be(transformedContextMock.Object); - this.contextMock.VerifyAll(); - this.serializerMock.VerifyAll(); - this.typeResolverMock.VerifyAll(); + _contextMock.VerifyAll(); + _serializerMock.VerifyAll(); + _typeResolverMock.VerifyAll(); } private class TestMessage diff --git a/src/KafkaFlow.UnitTests/TypedHandler/HandlerTypeMappingTests.cs b/tests/KafkaFlow.UnitTests/TypedHandler/HandlerTypeMappingTests.cs similarity index 52% rename from src/KafkaFlow.UnitTests/TypedHandler/HandlerTypeMappingTests.cs rename to tests/KafkaFlow.UnitTests/TypedHandler/HandlerTypeMappingTests.cs index 90856c544..d5e34af58 100644 --- a/src/KafkaFlow.UnitTests/TypedHandler/HandlerTypeMappingTests.cs +++ b/tests/KafkaFlow.UnitTests/TypedHandler/HandlerTypeMappingTests.cs @@ -1,30 +1,30 @@ +using FluentAssertions; +using KafkaFlow.Middlewares.TypedHandler; +using Microsoft.VisualStudio.TestTools.UnitTesting; + namespace KafkaFlow.UnitTests.TypedHandler { - using FluentAssertions; - using KafkaFlow.Middlewares.TypedHandler; - using Microsoft.VisualStudio.TestTools.UnitTesting; - [TestClass] public class HandlerTypeMappingTests { - private HandlerTypeMapping target; + private HandlerTypeMapping _target; [TestInitialize] public void Setup() { - this.target = new HandlerTypeMapping(); + _target = new HandlerTypeMapping(); } [TestMethod] public void AddSeveralMappings_GetHandlersTypesReturnsListOfHandlers() { // Act - this.target.AddMapping(typeof(int), typeof(string)); - this.target.AddMapping(typeof(int), typeof(double)); - this.target.AddMapping(typeof(int), typeof(bool)); + _target.AddMapping(typeof(int), typeof(string)); + _target.AddMapping(typeof(int), typeof(double)); + _target.AddMapping(typeof(int), typeof(bool)); // Assert - this.target.GetHandlersTypes(typeof(int)) + _target.GetHandlersTypes(typeof(int)) .Should() .BeEquivalentTo( typeof(string),