diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 0000000..bacb3eb --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,207 @@ +name: Tests + +on: + push: + branches: + - develop + +jobs: + unit-tests: + name: Unit Tests (.NET ${{ matrix.dotnet-version }}) + runs-on: ubuntu-latest + strategy: + matrix: + dotnet-version: [8.0.x] + + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + + - name: Setup .NET + uses: actions/setup-dotnet@v3 + with: + dotnet-version: ${{ matrix.dotnet-version }} + + - name: Restore Dependencies + working-directory: src/OpenDDD.Tests + run: dotnet restore + + - name: Build Project + working-directory: src/OpenDDD.Tests + run: dotnet build --no-restore --configuration Release /p:TreatWarningsAsErrors=false + + - name: Run Unit Tests + working-directory: src/OpenDDD.Tests + run: dotnet test --no-build --configuration Release --filter "Category=Unit" --logger "trx;LogFileName=TestResults.trx" --results-directory TestResults + + - name: Upload Unit Test Results + if: always() + uses: actions/upload-artifact@v4 + with: + name: UnitTestResults + path: src/OpenDDD.Tests/TestResults/TestResults.trx + + - name: Publish Unit Test Report + if: always() + uses: dorny/test-reporter@v1 + with: + name: Unit Tests Report + path: src/OpenDDD.Tests/TestResults/TestResults.trx + reporter: dotnet-trx + + integration-tests: + name: Integration Tests (.NET ${{ matrix.dotnet-version }}) + runs-on: ubuntu-latest + strategy: + matrix: + dotnet-version: [8.0.x] + + services: + zookeeper: + image: confluentinc/cp-zookeeper:latest + env: + ZOOKEEPER_CLIENT_PORT: 2181 + ports: + - 2181:2181 + + kafka: + image: confluentinc/cp-kafka:latest + env: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:9092 + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + ports: + - 9092:9092 + options: --network-alias kafka + + rabbitmq: + image: rabbitmq:3-management + env: + RABBITMQ_DEFAULT_USER: guest + RABBITMQ_DEFAULT_PASS: guest + ports: + - 5672:5672 + - 15672:15672 + options: --health-cmd "rabbitmq-diagnostics check_port_connectivity" --health-interval 10s --health-timeout 5s --health-retries 5 + + postgres: + image: postgres:latest + env: + POSTGRES_DB: testdb + POSTGRES_USER: testuser + POSTGRES_PASSWORD: testpassword + ports: + - 5432:5432 + options: --health-cmd "pg_isready -U testuser" --health-interval 10s --health-timeout 5s --health-retries 5 + + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + + - name: Setup .NET + uses: actions/setup-dotnet@v3 + with: + dotnet-version: ${{ matrix.dotnet-version }} + + - name: Restore Dependencies + working-directory: src/OpenDDD.Tests + run: dotnet restore + + - name: Build Project + working-directory: src/OpenDDD.Tests + run: dotnet build --no-restore --configuration Release + + - name: Log in to Azure + uses: azure/login@v1 + with: + creds: ${{ secrets.AZURE_CREDENTIALS }} + + - name: Create Azure Service Bus namespace for Testing + run: | + NAMESPACE_NAME="test-servicebus-${{ github.run_id }}-${{ github.run_attempt }}" + echo "NAMESPACE_NAME=${NAMESPACE_NAME}" >> $GITHUB_ENV + az servicebus namespace create \ + --resource-group opendddnet \ + --name $NAMESPACE_NAME \ + --location northeurope + CONNECTION_STRING=$(az servicebus namespace authorization-rule keys list \ + --resource-group opendddnet \ + --namespace-name $NAMESPACE_NAME \ + --name RootManageSharedAccessKey \ + --query primaryConnectionString \ + -o tsv) + echo "AZURE_SERVICE_BUS_CONNECTION_STRING=${CONNECTION_STRING}" >> $GITHUB_ENV + + - name: Wait for RabbitMQ to be Ready + run: | + for i in {1..10}; do + if curl -s -f http://localhost:15672 || nc -z localhost 5672; then + echo "RabbitMQ is up!" + exit 0 + fi + echo "Waiting for RabbitMQ..." + sleep 5 + done + echo "RabbitMQ did not start in time!" && exit 1 + + - name: Wait for Kafka to be Ready + run: | + for i in {1..10}; do + if nc -z localhost 9092; then + echo "Kafka is up!" + exit 0 + fi + echo "Waiting for Kafka..." + sleep 5 + done + echo "Kafka did not start in time!" && exit 1 + + - name: Wait for PostgreSQL to be Ready + run: | + for i in {1..10}; do + if PGPASSWORD=testpassword psql -h localhost -U testuser -d testdb -c "SELECT 1" &> /dev/null; then + echo "PostgreSQL is up!" + exit 0 + fi + echo "Waiting for PostgreSQL..." + sleep 5 + done + echo "PostgreSQL did not start in time!" && exit 1 + + - name: Run Integration Tests + working-directory: src/OpenDDD.Tests + env: + KAFKA_BOOTSTRAP_SERVERS: localhost:9092 + RABBITMQ_HOST: localhost + RABBITMQ_PORT: 5672 + RABBITMQ_USERNAME: guest + RABBITMQ_PASSWORD: guest + AZURE_SERVICE_BUS_CONNECTION_STRING: ${{ env.AZURE_SERVICE_BUS_CONNECTION_STRING }} + POSTGRES_TEST_CONNECTION_STRING: "Host=localhost;Port=5432;Database=testdb;Username=testuser;Password=testpassword" + run: dotnet test --no-build --configuration Release --filter "Category=Integration" --logger "trx;LogFileName=TestResults.trx" --results-directory TestResults + + - name: Delete Azure Service Bus namespace After Tests + if: always() + run: | + if [[ -n "${NAMESPACE_NAME}" ]]; then + echo "Deleting namespace: $NAMESPACE_NAME" + az servicebus namespace delete --resource-group opendddnet --name $NAMESPACE_NAME + else + echo "No namespace found, skipping deletion." + fi + + - name: Upload Integration Test Results + if: always() + uses: actions/upload-artifact@v4 + with: + name: IntegrationTestResults + path: src/OpenDDD.Tests/TestResults/TestResults.trx + + - name: Publish Integration Test Report + if: always() + uses: dorny/test-reporter@v1 + with: + name: Integration Tests Report + path: src/OpenDDD.Tests/TestResults/TestResults.trx + reporter: dotnet-trx diff --git a/Makefile b/Makefile index 437654a..d1e99dd 100644 --- a/Makefile +++ b/Makefile @@ -18,12 +18,13 @@ export $(shell sed 's/=.*//' env.make) HOME := $(shell echo ~) PWD := $(shell pwd) NETWORK := openddd-net -BUILD_VERSION := 3.0.0-beta.1 +BUILD_VERSION := 3.0.0-beta.2 NUGET_NAME := OpenDDD.NET ROOT_NAMESPACE := OpenDDD SRC_DIR := $(PWD)/src +TESTS_DIR := $(SRC_DIR)/OpenDDD.Tests DOCS_DIR := $(PWD)/docs SAMPLES_DIR := $(PWD)/samples NAMESPACE_DIR := $(SRC_DIR)/$(ROOT_NAMESPACE) @@ -32,16 +33,7 @@ FEED_DIR := $(HOME)/Projects/LocalFeed USER_NUGET_CONFIG_DIR=$(HOME)/.config/NuGet/NuGet.Config SPHINXDOC_IMG := openddd.net/sphinxdoc -DOCSAUTOBUILD_HOST_NAME := docsautobuild-openddd.net -DOCSAUTOBUILD_CONTAINER_NAME := docsautobuild-openddd.net -DOCSAUTOBUILD_PORT := 10001 - -TEMPLATES_DIR := $(PWD)/templates -TEMPLATES_CSPROJ := $(TEMPLATES_DIR)/templatepack.csproj -TEMPLATES_OUT := $(TEMPLATES_DIR)/bin/templates -TEMPLATES_NAME := OpenDDD.NET-Templates -TEMPLATES_VERSION := 3.0.0-alpha.1 -TEMPLATES_NUPKG := $(TEMPLATES_OUT)/$(TEMPLATES_NAME).$(TEMPLATES_VERSION).nupkg +NETWORK := opendddnet BLUE := $(shell tput -Txterm setaf 4) GREEN := $(shell tput -Txterm setaf 2) @@ -103,18 +95,54 @@ help:: ##@Other Show this help. ########################################################################## # TEST ########################################################################## + .PHONY: test -test: ##@Test run all unit tests - ENV_FILE=env.test dotnet test $(TESTS_DIR) +test: ##@Test Run all tests (unit & integration) + cd $(TESTS_DIR) && dotnet test --configuration Release + +.PHONY: test-unit +test-unit: ##@Test Run only unit tests + cd $(TESTS_DIR) && dotnet test --configuration Release --filter "Category=Unit" + +.PHONY: test-integration +test-integration: ##@Test Run only integration tests + cd $(TESTS_DIR) && dotnet test --configuration Release --filter "Category=Integration" + +.PHONY: test-ef-migrations-create-postgres +test-ef-migrations-create-postgres: ##@Test Create PostgreSQL migrations for PostgresTestDbContext + cd $(TESTS_DIR) && \ + dotnet ef migrations add Postgres_InitialCreate \ + --context PostgresTestDbContext \ + --output-dir Integration/Infrastructure/Persistence/EfCore/Migrations/Postgres \ + --project $(TESTS_DIR) \ + -- --database-provider postgres + +.PHONY: test-ef-migrations-create-sqlite +test-ef-migrations-create-sqlite: ##@Test Create SQLite migrations for SqliteTestDbContext + cd $(TESTS_DIR) && \ + dotnet ef migrations add Sqlite_InitialCreate \ + --context SqliteTestDbContext \ + --output-dir Integration/Infrastructure/Persistence/EfCore/Migrations/Sqlite \ + --project $(TESTS_DIR) \ + -- --database-provider sqlite ########################################################################## # BUILD ########################################################################## + .PHONY: clean clean: ##@Build clean the solution find . $(SRC_DIR) -iname "bin" | xargs rm -rf find . $(SRC_DIR) -iname "obj" | xargs rm -rf +.PHONY: clear-nuget-caches +clear-nuget-caches: ##@Build clean all nuget caches + nuget locals all -clear + +.PHONY: restore +restore: ##@Build restore the solution + cd src && dotnet restore + .PHONY: build build: ##@Build build the solution cd $(SRC_DIR) && \ @@ -142,6 +170,10 @@ push: ##@Build Push the nuget to the global feed # DOCS ########################################################################## +DOCSAUTOBUILD_HOST_NAME := docsautobuild-openddd.net +DOCSAUTOBUILD_CONTAINER_NAME := docsautobuild-openddd.net +DOCSAUTOBUILD_PORT := 10001 + .PHONY: sphinx-buildimage sphinx-buildimage: ##@Docs Build the custom sphinxdoc image docker build -t $(SPHINXDOC_IMG) $(DOCS_DIR) @@ -175,21 +207,17 @@ sphinx-autobuild: ##@Docs Activate autobuild of docs sphinx-opendocs: ##@Docs Open the docs in browser open $(DOCS_DIR)/_build/html/index.html -########################################################################## -# .NET -########################################################################## -.PHONY: restore -restore: ##@Build restore the solution - cd src && dotnet restore - -.PHONY: clear-nuget-caches -clear-nuget-caches: ##@Build clean all nuget caches - nuget locals all -clear - ########################################################################## # TEMPLATES ########################################################################## +TEMPLATES_DIR := $(PWD)/templates +TEMPLATES_CSPROJ := $(TEMPLATES_DIR)/templatepack.csproj +TEMPLATES_OUT := $(TEMPLATES_DIR)/bin/templates +TEMPLATES_NAME := OpenDDD.NET-Templates +TEMPLATES_VERSION := 3.0.0-beta.2 +TEMPLATES_NUPKG := $(TEMPLATES_OUT)/$(TEMPLATES_NAME).$(TEMPLATES_VERSION).nupkg + .PHONY: templates-install templates-install: ##@Template Install the OpenDDD.NET project template locally dotnet new install $(TEMPLATES_NUPKG) @@ -203,8 +231,275 @@ templates-pack: ##@Template Pack the OpenDDD.NET project template into a NuGet dotnet pack $(TEMPLATES_CSPROJ) -o $(TEMPLATES_OUT) .PHONY: templates-publish -templates-publish: template-pack ##@Template Publish the template to NuGet +templates-publish: ##@Template Publish the template to NuGet dotnet nuget push $(TEMPLATES_NUPKG) --api-key $(NUGET_API_KEY) --source https://api.nuget.org/v3/index.json .PHONY: templates-rebuild templates-rebuild: templates-uninstall templates-pack templates-install ##@Template Rebuild and reinstall the template + +########################################################################## +# ACT +########################################################################## + +ACT_IMAGE := ghcr.io/catthehacker/ubuntu:act-latest + +.PHONY: act-install +act-install: ##@Act Install act CLI + brew install act + +.PHONY: act-clean +act-clean: ##@Act Stop and remove all act containers + @docker stop $$(docker ps -q --filter ancestor=$(ACT_IMAGE)) 2>/dev/null || true + @docker rm $$(docker ps -aq --filter ancestor=$(ACT_IMAGE)) 2>/dev/null || true + @echo "✅ All act containers stopped and removed." + +.PHONY: act-list +act-list: ##@Act List available workflows + act -l + +.PHONY: act-test +act-test: ##@Act Run all tests locally using act + act -P ubuntu-latest=$(ACT_IMAGE) --reuse + +.PHONY: act-test-dotnet +act-test-dotnet: ##@Act Run tests for a specific .NET version (usage: make act-test-dotnet DOTNET_VERSION=8.0.x) + @if [ -z "$(DOTNET_VERSION)" ]; then \ + echo "Error: Specify .NET version using DOTNET_VERSION="; \ + exit 1; \ + fi + act -P ubuntu-latest=$(ACT_IMAGE) -s matrix.dotnet-version=$(DOTNET_VERSION) --reuse + +.PHONY: act-unit-tests +act-unit-tests: ##@Act Run only unit tests + act -P ubuntu-latest=$(ACT_IMAGE) -j unit-tests --reuse + +.PHONY: act-integration-tests +act-integration-tests: ##@Act Run only integration tests + act -P ubuntu-latest=$(ACT_IMAGE) -j integration-tests --reuse -s AZURE_SERVICE_BUS_CONNECTION_STRING=$(AZURE_SERVICE_BUS_CONNECTION_STRING) + +.PHONY: act-debug +act-debug: ##@Act Run act with verbose logging + act -P ubuntu-latest=$(ACT_IMAGE) --verbose --reuse + +########################################################################## +# AZURE +########################################################################## + +.PHONY: azure-create-resource-group +azure-create-resource-group: ##@Azure Create the Azure Resource Group + az group create --name $(AZURE_RESOURCE_GROUP) --location $(AZURE_REGION) + +.PHONY: azure-create-service-principal +azure-create-service-principal: ##@Azure Create an Azure Service Principal for GitHub Actions + @echo "Creating Azure Service Principal..." + az ad sp create-for-rbac \ + --name "github-actions-opendddnet" \ + --role "Contributor" \ + --scopes /subscriptions/$(AZURE_SUBSCRIPTION_ID)/resourceGroups/$(AZURE_RESOURCE_GROUP) \ + --sdk-auth + @echo "✅ Copy the output above and add it as 'AZURE_CREDENTIALS' in GitHub Secrets." + +.PHONY: azure-create-servicebus-namespace +azure-create-servicebus-namespace: ##@Azure Create the Azure Service Bus namespace + az servicebus namespace create --name $(AZURE_SERVICEBUS_NAMESPACE) --resource-group $(AZURE_RESOURCE_GROUP) --location $(AZURE_REGION) --sku Standard + +.PHONY: azure-get-servicebus-connection +azure-get-servicebus-connection: ##@Azure Get the Service Bus connection string + az servicebus namespace authorization-rule keys list \ + --resource-group $(AZURE_RESOURCE_GROUP) \ + --namespace-name $(AZURE_SERVICEBUS_NAMESPACE) \ + --name RootManageSharedAccessKey \ + --query primaryConnectionString \ + --output tsv + +.PHONY: azure-delete-servicebus-namespace +azure-delete-servicebus-namespace: ##@Azure Delete the Azure Service Bus namespace + az servicebus namespace delete --resource-group $(AZURE_RESOURCE_GROUP) --name $(AZURE_SERVICEBUS_NAMESPACE) + +.PHONY: azure-list-servicebus-namespaces +azure-list-servicebus-namespaces: ##@Azure List all Azure Service Bus namespaces in the resource group + az servicebus namespace list --resource-group $(AZURE_RESOURCE_GROUP) --output table + +.PHONY: azure-list-servicebus-topics +azure-list-servicebus-topics: ##@Azure List all topics in the Azure Service Bus namespace + az servicebus topic list --resource-group $(AZURE_RESOURCE_GROUP) --namespace-name $(AZURE_SERVICEBUS_NAMESPACE) --output table + +.PHONY: azure-list-servicebus-subscriptions +azure-list-servicebus-subscriptions: ##@Azure List all subscriptions for a given topic (usage: make azure-list-servicebus-subscriptions TOPIC_NAME=) + @if [ -z "$(TOPIC_NAME)" ]; then \ + echo "Error: Specify the topic name using TOPIC_NAME="; \ + exit 1; \ + fi + az servicebus topic subscription list --resource-group $(AZURE_RESOURCE_GROUP) --namespace-name $(AZURE_SERVICEBUS_NAMESPACE) --topic-name $(TOPIC_NAME) --output table + +.PHONY: azure-list-servicebus-queues +azure-list-servicebus-queues: ##@Azure List all queues in the Azure Service Bus namespace + az servicebus queue list --resource-group $(AZURE_RESOURCE_GROUP) --namespace-name $(AZURE_SERVICEBUS_NAMESPACE) --output table + +.PHONY: azure-list-servicebus-authorization-rules +azure-list-servicebus-authorization-rules: ##@Azure List all authorization rules for the Azure Service Bus namespace + az servicebus namespace authorization-rule list --resource-group $(AZURE_RESOURCE_GROUP) --namespace-name $(AZURE_SERVICEBUS_NAMESPACE) --output table + +########################################################################## +# RABBITMQ +########################################################################## + +RABBITMQ_PORT := 5672 + +.PHONY: rabbitmq-start +rabbitmq-start: ##@@RabbitMQ Start a RabbitMQ container + docker run --rm -d --name rabbitmq --hostname rabbitmq \ + -e RABBITMQ_DEFAULT_USER=$(RABBITMQ_DEFAULT_USER) \ + -e RABBITMQ_DEFAULT_PASS=$(RABBITMQ_DEFAULT_PASS) \ + -p 5672:$(RABBITMQ_PORT) -p 15672:15672 rabbitmq:management + @echo "RabbitMQ started. Management UI available at http://localhost:15672" + +.PHONY: rabbitmq-stop +rabbitmq-stop: ##@RabbitMQ Stop the RabbitMQ container + docker stop rabbitmq + @echo "RabbitMQ stopped." + +.PHONY: rabbitmq-status +rabbitmq-status: ##@RabbitMQ Check RabbitMQ container status + docker ps | grep rabbitmq || echo "RabbitMQ is not running." + +.PHONY: rabbitmq-get-connection +rabbitmq-get-connection: ##@RabbitMQ Get the RabbitMQ connection string + @echo "amqp://$(RABBITMQ_DEFAULT_USER):$(RABBITMQ_DEFAULT_PASS)@localhost:$(RABBITMQ_PORT)/" + +.PHONY: rabbitmq-logs +rabbitmq-logs: ##@RabbitMQ Show RabbitMQ logs + docker logs -f rabbitmq + +########################################################################## +# KAFKA +########################################################################## + +ZOOKEEPER_CONTAINER := opendddnet-zookeeper + +KAFKA_NETWORK := $(NETWORK) +KAFKA_CONTAINER := opendddnet-kafka +KAFKA_BROKER := localhost:9092 +KAFKA_ZOOKEEPER := localhost:2181 + +.PHONY: kafka-start +kafka-start: ##@Kafka Start Kafka and Zookeeper using Docker + @docker network inspect $(KAFKA_NETWORK) >/dev/null 2>&1 || docker network create $(KAFKA_NETWORK) + @docker run -d --rm --name $(ZOOKEEPER_CONTAINER) --network $(KAFKA_NETWORK) -p 2181:2181 \ + wurstmeister/zookeeper:latest + @docker run -d --rm --name $(KAFKA_CONTAINER) --network $(KAFKA_NETWORK) -p 9092:9092 \ + -e KAFKA_BROKER_ID=1 \ + -e KAFKA_ZOOKEEPER_CONNECT=$(ZOOKEEPER_CONTAINER):2181 \ + -e KAFKA_LISTENERS=PLAINTEXT://0.0.0.0:9092 \ + -e KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://localhost:9092 \ + -e KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 \ + wurstmeister/kafka:latest + +.PHONY: kafka-stop +kafka-stop: ##@Kafka Stop Kafka and Zookeeper + @docker stop $(KAFKA_CONTAINER) || true + @docker stop $(ZOOKEEPER_CONTAINER) || true + +.PHONY: kafka-logs +kafka-logs: ##@Kafka Show Kafka logs + @docker logs -f $(KAFKA_CONTAINER) + +.PHONY: kafka-shell +kafka-shell: ##@@Kafka Open a shell inside the Kafka container + docker exec -it $(KAFKA_CONTAINER) /bin/sh + +.PHONY: kafka-create-topic +kafka-create-topic: ##@Kafka Create a Kafka topic (uses NAME) +ifndef NAME + $(error Topic name not specified. Usage: make kafka-create-topic NAME=) +endif + @docker exec -it $(KAFKA_CONTAINER) kafka-topics.sh --create --topic $(NAME) --bootstrap-server $(KAFKA_BROKER) --replication-factor 1 --partitions 1 + +.PHONY: kafka-list-brokers +kafka-list-brokers: ##@Kafka List Kafka broker configurations + @docker exec -it $(KAFKA_CONTAINER) /opt/kafka/bin/kafka-configs.sh --bootstrap-server localhost:9092 --describe --entity-type brokers + +.PHONY: kafka-list-topics +kafka-list-topics: ##@Kafka List all Kafka topics + @docker exec -it $(KAFKA_CONTAINER) kafka-topics.sh --list --bootstrap-server $(KAFKA_BROKER) + +.PHONY: kafka-broker-status +kafka-broker-status: ##@Kafka Show Kafka broker status + @docker exec -it $(KAFKA_CONTAINER) /opt/kafka/bin/kafka-broker-api-versions.sh --bootstrap-server localhost:9092 + +.PHONY: kafka-list-consumer-groups +kafka-list-consumer-groups: ##@Kafka List active Kafka consumer groups + @docker exec -it $(KAFKA_CONTAINER) /opt/kafka/bin/kafka-consumer-groups.sh --bootstrap-server localhost:9092 --list + +.PHONY: kafka-describe-consumer-groups +kafka-describe-consumer-groups: ##@Kafka List detailed info for all consumer groups + @docker exec -it $(KAFKA_CONTAINER) kafka-consumer-groups.sh --bootstrap-server $(KAFKA_BROKER) --all-groups --describe + +.PHONY: kafka-describe-consumer-group +kafka-describe-consumer-group: ##@Kafka Describe Kafka consumer group (requires GROUP=) +ifndef GROUP + $(error Consumer group not specified. Usage: make kafka-describe-consumer-group GROUP=) +endif + @docker exec -it $(KAFKA_CONTAINER) kafka-consumer-groups.sh --bootstrap-server $(KAFKA_BROKER) --group $(GROUP) --describe + +.PHONY: kafka-check-lag +kafka-check-lag: ##@Kafka Check Kafka consumer lag for a group (requires GROUP=) +ifndef GROUP + $(error Consumer group not specified. Usage: make kafka-check-lag GROUP=) +endif + @docker exec -it $(KAFKA_CONTAINER) kafka-consumer-groups.sh --bootstrap-server $(KAFKA_BROKER) --group $(GROUP) --describe | grep -E 'TOPIC|LAG' + +.PHONY: kafka-consume +kafka-consume: ##@Kafka Consume messages from a Kafka topic (uses NAME) +ifndef NAME + $(error Topic name not specified. Usage: make kafka-consume NAME=) +endif + @docker exec -it $(KAFKA_CONTAINER) kafka-console-consumer.sh --bootstrap-server $(KAFKA_BROKER) --topic $(NAME) --from-beginning + +.PHONY: kafka-produce +kafka-produce: ##@Kafka Produce messages to a Kafka topic (uses NAME) +ifndef NAME + $(error Topic name not specified. Usage: make kafka-produce NAME=) +endif + @docker exec -it $(KAFKA_CONTAINER) kafka-console-producer.sh --broker-list $(KAFKA_BROKER) --topic $(NAME) + +########################################################################## +# POSTGRES +########################################################################## + +POSTGRES_CONTAINER := opendddnet-testspostgres +POSTGRES_PORT := 5432 +POSTGRES_DB := testdb + +.PHONY: postgres-start +postgres-start: ##@Postgres Start a PostgreSQL container + @docker run --rm -d --name $(POSTGRES_CONTAINER) --network $(NETWORK) \ + -e POSTGRES_DB=$(POSTGRES_DB) \ + -e POSTGRES_USER=$(POSTGRES_USER) \ + -e POSTGRES_PASSWORD=$(POSTGRES_PASSWORD) \ + -p $(POSTGRES_PORT):5432 postgres:latest + @echo "PostgreSQL started on port $(POSTGRES_PORT)." + +.PHONY: postgres-stop +postgres-stop: ##@Postgres Stop the PostgreSQL container + @docker stop $(POSTGRES_CONTAINER) || true + @echo "PostgreSQL stopped." + +.PHONY: postgres-clean +postgres-clean: ##@Postgres Remove PostgreSQL container and its volumes + @docker rm -f $(POSTGRES_CONTAINER) || true + @echo "PostgreSQL container removed." + +.PHONY: postgres-logs +postgres-logs: ##@Postgres Show PostgreSQL logs + @docker logs -f $(POSTGRES_CONTAINER) + +.PHONY: postgres-shell +postgres-shell: ##@Postgres Open a shell inside the PostgreSQL container + docker exec -it $(POSTGRES_CONTAINER) psql -U $(POSTGRES_USER) -d $(POSTGRES_DB) + +.PHONY: postgres-connection-strings +postgres-connection-strings: ##@Postgres Display the connection strings for PostgreSQL + @echo "PostgreSQL Connection String (Key-Value/DSN): Host=localhost;Port=$(POSTGRES_PORT);Database=$(POSTGRES_DB);Username=$(POSTGRES_USER);Password=$(POSTGRES_PASSWORD)" + @echo "PostgreSQL Connection String (URI): postgresql://$(POSTGRES_USER):$(POSTGRES_PASSWORD)@localhost:$(POSTGRES_PORT)/$(POSTGRES_DB)" + @echo "PostgreSQL Connection String (JDBC): jdbc:postgresql://localhost:$(POSTGRES_PORT)/$(POSTGRES_DB)?user=$(POSTGRES_USER)&password=$(POSTGRES_PASSWORD)" diff --git a/README.md b/README.md index 435eb94..4d4db18 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,8 @@ # OpenDDD.NET -[![License](https://img.shields.io/badge/License-GPLv3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0.html) -[![NuGet](https://img.shields.io/nuget/v/OpenDDD.NET.svg)](https://www.nuget.org/packages/OpenDDD.NET/) +[![License](https://img.shields.io/badge/License-GPLv3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0.html) [![NuGet](https://img.shields.io/nuget/v/OpenDDD.NET.svg)](https://www.nuget.org/packages/OpenDDD.NET/) [![Tests](https://github.com/runemalm/OpenDDD.NET/actions/workflows/tests.yml/badge.svg?branch=develop)](https://github.com/runemalm/OpenDDD.NET/actions/workflows/tests.yml) -OpenDDD.NET is an open-source framework for domain-driven design (DDD) development using C# and .NET. It provides a set of powerful tools and abstractions to help developers build scalable, maintainable, and testable applications following the principles of DDD. +OpenDDD.NET is an open-source framework for domain-driven design (DDD) development using C# and ASP.NET Core. It provides a set of powerful tools and abstractions to help developers build scalable, maintainable, and testable applications following the principles of DDD. > **Note:** OpenDDD.NET is currently in a beta state as part of new major version 3. Use with caution in production environments. @@ -59,11 +58,11 @@ To get started with OpenDDD.NET, follow these simple steps: { options.UseInMemoryDatabase() .UseInMemoryMessaging() - .SetEventListenerGroup("Default") - .SetEventTopicTemplates( + .SetEventTopics( "Bookstore.Domain.{EventName}", "Bookstore.Interchange.{EventName}" ) + .SetEventListenerGroup("Default") .EnableAutoRegistration(); }); @@ -92,6 +91,11 @@ Explore the project in the repository: [Bookstore Sample Project](https://github ## Release History +**3.0.0-beta.2 (2025-03-13)** + +- **Integration Test Coverage**: Added full integration tests for repositories and messaging providers. +- **Reliability Improvements**: Fixed issues discovered through test coverage. + **3.0.0-beta.1 (2025-02-17)** - **Beta Release**: OpenDDD.NET has moved from alpha to `beta`, indicating improved stability. diff --git a/docs/building-blocks.rst b/docs/building-blocks.rst index c2f0c5e..67564cb 100644 --- a/docs/building-blocks.rst +++ b/docs/building-blocks.rst @@ -184,12 +184,10 @@ Repositories are **auto-registered** with `IRepository`. If **Example: Default Auto-Registered Repositories** - `IRepository` → `PostgresOpenDddRepository` -- `IRepository` → `EfCoreRepository` **Example: Custom Auto-Registered Repositories** - `ICustomerRepository` → `PostgresOpenDddCustomerRepository` -- `ICustomerRepository` → `EfCoreCustomerRepository` **NOTE:** If you have more than one implementation of a repository the framework won't know which of them to auto-register. In this case you need to delete one of the implementations or disable auto-registration and register the implementation you want manually. diff --git a/docs/conf.py b/docs/conf.py index 755ca56..e6d0c84 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -14,7 +14,7 @@ version = "3.0" # The full version, including alpha/beta/rc tags -release = '3.0.0-beta.1' +release = '3.0.0-beta.2' # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration diff --git a/docs/configuration.rst b/docs/configuration.rst index 8144cde..2a54e7a 100644 --- a/docs/configuration.rst +++ b/docs/configuration.rst @@ -30,8 +30,8 @@ An example configuration in `appsettings.json`: "DatabaseProvider": "InMemory", "MessagingProvider": "InMemory", "Events": { - "DomainEventTopicTemplate": "Bookstore.Domain.{EventName}", - "IntegrationEventTopicTemplate": "Bookstore.Interchange.{EventName}", + "DomainEventTopic": "Bookstore.Domain.{EventName}", + "IntegrationEventTopic": "Bookstore.Interchange.{EventName}", "ListenerGroup": "Default" }, "SQLite": { @@ -49,10 +49,12 @@ An example configuration in `appsettings.json`: "Port": 5672, "Username": "guest", "Password": "guest", - "VirtualHost": "/" + "VirtualHost": "/", + "AutoCreateTopics": true }, "Kafka": { - "BootstrapServers": "localhost:9092" + "BootstrapServers": "localhost:9092", + "AutoCreateTopics": true }, "AutoRegister": { "Actions": true, @@ -79,7 +81,7 @@ Instead of using `appsettings.json`, OpenDDD.NET can be configured **dynamically { options.UseInMemoryDatabase() .UseInMemoryMessaging() - .SetEventTopicTemplates( + .SetEventTopics( "Bookstore.Domain.{EventName}", "Bookstore.Interchange.{EventName}" ) @@ -140,14 +142,18 @@ OpenDDD.NET supports multiple messaging providers: port: 5672, username: "guest", password: "guest", - virtualHost: "/" + virtualHost: "/", + autoCreateTopics: true ); **Kafka**: .. code-block:: csharp - options.UseKafka("localhost:9092"); + options.UseKafka( + "localhost:9092", + autoCreateTopics: true + ); **Azure Service Bus**: @@ -168,7 +174,7 @@ Event settings define how domain and integration events are published: .. code-block:: csharp - options.SetEventTopicTemplates( + options.SetEventTopics( "Bookstore.Domain.{EventName}", "Bookstore.Interchange.{EventName}" ) diff --git a/docs/index.rst b/docs/index.rst index 8c6d64a..775c802 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -5,7 +5,7 @@ OpenDDD.NET =========== -OpenDDD.NET is an open-source framework for domain-driven design (DDD) development using C# and .NET. It provides a set of powerful tools and abstractions to help developers build scalable, maintainable, and testable applications following the principles of DDD. +OpenDDD.NET is an open-source framework for domain-driven design (DDD) development using C# and ASP.NET Core. It provides a set of powerful tools and abstractions to help developers build scalable, maintainable, and testable applications following the principles of DDD. Purpose ------- diff --git a/docs/releases.rst b/docs/releases.rst index a6c86f5..2a79989 100644 --- a/docs/releases.rst +++ b/docs/releases.rst @@ -6,6 +6,11 @@ Version History ############### +**3.0.0-beta.2 (2025-03-13)** + +- **Integration Test Coverage**: Added full integration tests for repositories and messaging providers. +- **Fix Issues**: Fixed issues discovered through test coverage. + **3.0.0-beta.1 (2025-02-17)** - **Beta Release**: OpenDDD.NET has moved from alpha to **beta**, indicating improved stability. diff --git a/docs/userguide.rst b/docs/userguide.rst index 5c90370..99c9979 100644 --- a/docs/userguide.rst +++ b/docs/userguide.rst @@ -79,9 +79,6 @@ Add OpenDDD.NET services and middleware to your application in the `Program.cs` .. code-block:: csharp using OpenDDD.API.Extensions; - using YourProjectName.Domain.Model.Ports; - using YourProjectName.Infrastructure.Adapters.Console; - using YourProjectName.Infrastructure.Persistence.EfCore; var builder = WebApplication.CreateBuilder(args); @@ -453,8 +450,125 @@ Then register the port with the adapter class in `Program.cs` like this: // ... +---------------------- +7: Add Web API Adapter +---------------------- + +Create an http adapter for your application layer actions. We need to: + +- Create a **controller** to open endpoints and invoke actions. +- Add **Controller-**, **Swagger-** and **API Explorer** services in `Program.cs`. +- Add **HTTPS Redirection-**, **CORS-** and **Swagger** middleware in `Program.cs`. +- Map controllers to endpoints in `Program.cs`. + +Example definitions: + +.. code-block:: csharp + + using Microsoft.AspNetCore.Mvc; + using YourProjectName.Application.Actions.GetCustomer; + using YourProjectName.Application.Actions.GetCustomers; + using YourProjectName.Application.Actions.RegisterCustomer; + using YourProjectName.Domain.Model; + + namespace YourProjectName.Infrastructure.Adapters.WebAPI.Controllers + { + [ApiController] + [Route("api/customers")] + public class CustomerController : ControllerBase + { + private readonly RegisterCustomerAction _registerCustomerAction; + private readonly GetCustomerAction _getCustomerAction; + private readonly GetCustomersAction _getCustomersAction; + + public CustomerController( + RegisterCustomerAction registerCustomerAction, + GetCustomerAction getCustomerAction, + GetCustomersAction getCustomersAction) + { + _registerCustomerAction = registerCustomerAction; + _getCustomerAction = getCustomerAction; + _getCustomersAction = getCustomersAction; + } + + [HttpPost("register-customer")] + public async Task> RegisterCustomer([FromBody] RegisterCustomerCommand command, CancellationToken ct) + { + try + { + var customer = await _registerCustomerAction.ExecuteAsync(command, ct); + return CreatedAtAction(nameof(GetCustomer), new { id = customer.Id }, customer); + } + catch (Exception ex) + { + return BadRequest(new { Message = ex.Message }); + } + } + } + } + +.. code-block:: csharp + + using OpenDDD.API.Extensions; + using YourProjectName.Domain.Model.Ports; + using YourProjectName.Infrastructure.Adapters.Console; + + var builder = WebApplication.CreateBuilder(args); + + // Add Swagger Services + builder.Services.AddEndpointsApiExplorer(); + builder.Services.AddSwaggerGen(); + + // Add OpenDDD services + builder.Services.AddOpenDDD(builder.Configuration, + options => + { + options.UseInMemoryDatabase() + .UseInMemoryMessaging() + .SetEventListenerGroup("YourProjectName") + .SetEventTopics( + "YourProjectName.Domain.{EventName}", + "YourProjectName.Interchange.{EventName}" + ) + .EnableAutoRegistration(); + }, + services => + { + services.AddTransient(); + } + ); + + // Add Controller Services + builder.Services.AddControllers(); + + // Build the application + var app = builder.Build(); + + // Use OpenDDD Middleware + app.UseOpenDDD(); + + // Use Swagger Middleware + if (app.Environment.IsDevelopment()) + { + app.UseSwagger(); + app.UseSwaggerUI(); + app.UseDeveloperExceptionPage(); + } + + // Use HTTP->HTTPS Redirection Middleware + app.UseHttpsRedirection(); + + // Use CORS Middleware + app.UseCors("AllowAll"); + + // Map Controller Actions to Endpoints + app.MapControllers(); + + // Run the application + app.Run(); + -------------------------- -7: Edit `appsettings.json` +8: Edit `appsettings.json` -------------------------- Add the following configuration to your `appsettings.json` file to customize OpenDDD.NET behavior: @@ -466,8 +580,8 @@ Add the following configuration to your `appsettings.json` file to customize Ope "DatabaseProvider": "InMemory", "MessagingProvider": "InMemory", "Events": { - "DomainEventTopicTemplate": "YourProjectName.Domain.{EventName}", - "IntegrationEventTopicTemplate": "YourProjectName.Interchange.{EventName}", + "DomainEventTopic": "YourProjectName.Domain.{EventName}", + "IntegrationEventTopic": "YourProjectName.Interchange.{EventName}", "ListenerGroup": "Default" }, "SQLite": { @@ -476,11 +590,6 @@ Add the following configuration to your `appsettings.json` file to customize Ope "Postgres": { "ConnectionString": "Host=localhost;Port=5432;Database=yourprojectname;Username=your_username;Password=your_password" }, - "Events": { - "DomainEventTopicTemplate": "YourProjectName.Domain.{EventName}", - "IntegrationEventTopicTemplate": "YourProjectName.Interchange.{EventName}", - "ListenerGroup": "Default" - }, "AzureServiceBus": { "ConnectionString": "", "AutoCreateTopics": true @@ -490,10 +599,12 @@ Add the following configuration to your `appsettings.json` file to customize Ope "Port": 5672, "Username": "guest", "Password": "guest", - "VirtualHost": "/" + "VirtualHost": "/", + "AutoCreateTopics": true }, "Kafka": { - "BootstrapServers": "localhost:9092" + "BootstrapServers": "localhost:9092", + "AutoCreateTopics": true }, "AutoRegister": { "Actions": true, @@ -509,7 +620,7 @@ Add the following configuration to your `appsettings.json` file to customize Ope For all information about configuration, see :ref:`Configuration `. ---------------------- -8: Run the Application +9: Run the Application ---------------------- Now you are ready to run the application: diff --git a/samples/Bookstore/Makefile b/samples/Bookstore/Makefile index 4948b06..96cfb4a 100644 --- a/samples/Bookstore/Makefile +++ b/samples/Bookstore/Makefile @@ -152,9 +152,23 @@ apply-migrations: ##@Migrations Apply all pending migrations to the database --project $(SRC)/Bookstore ########################################################################## -# AZURE +# AZURE SERVICE BUS ########################################################################## +.PHONY: azure-create-resource-group +azure-create-resource-group: ##@Azure Create the Azure Resource Group + az group create --name $(AZURE_RESOURCE_GROUP) --location $(AZURE_REGION) + +.PHONY: azure-create-service-principal +azure-create-service-principal: ##@Azure Create an Azure Service Principal for GitHub Actions + @echo "Creating Azure Service Principal..." + az ad sp create-for-rbac \ + --name "github-actions-opendddnet" \ + --role "Contributor" \ + --scopes /subscriptions/$(AZURE_SUBSCRIPTION_ID)/resourceGroups/$(AZURE_RESOURCE_GROUP) \ + --sdk-auth + @echo "✅ Copy the output above and add it as 'AZURE_CREDENTIALS' in GitHub Secrets." + .PHONY: azure-create-servicebus-namespace azure-create-servicebus-namespace: ##@Azure Create the Azure Service Bus namespace az servicebus namespace create --name $(AZURE_SERVICEBUS_NAMESPACE) --resource-group $(AZURE_RESOURCE_GROUP) --location $(AZURE_REGION) --sku Standard @@ -168,13 +182,23 @@ azure-get-servicebus-connection: ##@Azure Get the Service Bus connection string --query primaryConnectionString \ --output tsv +.PHONY: azure-delete-servicebus-namespace +azure-delete-servicebus-namespace: ##@Azure Delete the Azure Service Bus namespace + az servicebus namespace delete --resource-group $(AZURE_RESOURCE_GROUP) --name $(AZURE_SERVICEBUS_NAMESPACE) + +########################################################################## +# AZURE SQL SERVER +########################################################################## + + + ########################################################################## # RABBITMQ ########################################################################## .PHONY: rabbitmq-start rabbitmq-start: ##@@RabbitMQ Start a RabbitMQ container - docker run -d --name rabbitmq --hostname rabbitmq \ + docker run --rm -d --name rabbitmq --hostname rabbitmq \ -e RABBITMQ_DEFAULT_USER=$(RABBITMQ_DEFAULT_USER) \ -e RABBITMQ_DEFAULT_PASS=$(RABBITMQ_DEFAULT_PASS) \ -p 5672:$(RABBITMQ_PORT) -p 15672:15672 rabbitmq:management @@ -193,11 +217,6 @@ rabbitmq-status: ##@RabbitMQ Check RabbitMQ container status rabbitmq-get-connection: ##@RabbitMQ Get the RabbitMQ connection string @echo "amqp://$(RABBITMQ_DEFAULT_USER):$(RABBITMQ_DEFAULT_PASS)@localhost:$(RABBITMQ_PORT)/" -.PHONY: rabbitmq-clean -rabbitmq-clean: ##@RabbitMQ Remove RabbitMQ container and its volumes - docker rm -f rabbitmq || true - @echo "RabbitMQ container removed." - .PHONY: rabbitmq-logs rabbitmq-logs: ##@RabbitMQ Show RabbitMQ logs docker logs -f rabbitmq @@ -241,7 +260,7 @@ endif .PHONY: kafka-list-brokers kafka-list-brokers: ##@Kafka List Kafka broker configurations - docker exec -it $(KAFKA_CONTAINER) /opt/kafka/bin/kafka-configs.sh --bootstrap-server localhost:9092 --describe --entity-type brokers + @docker exec -it $(KAFKA_CONTAINER) /opt/kafka/bin/kafka-configs.sh --bootstrap-server localhost:9092 --describe --entity-type brokers .PHONY: kafka-list-topics kafka-list-topics: ##@Kafka List all Kafka topics @@ -249,11 +268,11 @@ kafka-list-topics: ##@Kafka List all Kafka topics .PHONY: kafka-broker-status kafka-broker-status: ##@Kafka Show Kafka broker status - docker exec -it bookstore-kafka /opt/kafka/bin/kafka-broker-api-versions.sh --bootstrap-server localhost:9092 + @docker exec -it bookstore-kafka /opt/kafka/bin/kafka-broker-api-versions.sh --bootstrap-server localhost:9092 .PHONY: kafka-list-consumers kafka-list-consumers: ##@Kafka List active Kafka consumer groups - docker exec -it bookstore-kafka /opt/kafka/bin/kafka-consumer-groups.sh --bootstrap-server localhost:9092 --list + @docker exec -it bookstore-kafka /opt/kafka/bin/kafka-consumer-groups.sh --bootstrap-server localhost:9092 --list .PHONY: kafka-consume kafka-consume: ##@Kafka Consume messages from a Kafka topic (uses NAME) @@ -270,7 +289,7 @@ endif @docker exec -it $(KAFKA_CONTAINER) kafka-console-producer.sh --broker-list $(KAFKA_BROKER) --topic $(NAME) ########################################################################## -# POSTGRESQL +# POSTGRES ########################################################################## .PHONY: postgres-start diff --git a/samples/Bookstore/env.make.sample b/samples/Bookstore/env.make.sample index 36968ae..0c44dc5 100644 --- a/samples/Bookstore/env.make.sample +++ b/samples/Bookstore/env.make.sample @@ -1,3 +1,4 @@ +AZURE_SUBSCRIPTION_ID="YOUR_SUB_ID" AZURE_RESOURCE_GROUP=bookstore AZURE_REGION=northeurope AZURE_SERVICEBUS_NAMESPACE=opendddnet-bookstore-sample diff --git a/samples/Bookstore/src/Bookstore.sln b/samples/Bookstore/src/Bookstore.sln index 7a95bb0..e6090ba 100644 --- a/samples/Bookstore/src/Bookstore.sln +++ b/samples/Bookstore/src/Bookstore.sln @@ -4,6 +4,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Bookstore", "Bookstore\Book EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "OpenDDD", "..\..\..\src\OpenDDD\OpenDDD.csproj", "{05861AFA-45DB-409A-AF0D-E81936198EB0}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "OpenDDD.Tests", "..\..\..\src\OpenDDD.Tests\OpenDDD.Tests.csproj", "{DB152EF6-1F44-40D6-8701-6FB0434724F1}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -18,5 +20,9 @@ Global {05861AFA-45DB-409A-AF0D-E81936198EB0}.Debug|Any CPU.Build.0 = Debug|Any CPU {05861AFA-45DB-409A-AF0D-E81936198EB0}.Release|Any CPU.ActiveCfg = Release|Any CPU {05861AFA-45DB-409A-AF0D-E81936198EB0}.Release|Any CPU.Build.0 = Release|Any CPU + {DB152EF6-1F44-40D6-8701-6FB0434724F1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {DB152EF6-1F44-40D6-8701-6FB0434724F1}.Debug|Any CPU.Build.0 = Debug|Any CPU + {DB152EF6-1F44-40D6-8701-6FB0434724F1}.Release|Any CPU.ActiveCfg = Release|Any CPU + {DB152EF6-1F44-40D6-8701-6FB0434724F1}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection EndGlobal diff --git a/samples/Bookstore/src/Bookstore/Bookstore.csproj b/samples/Bookstore/src/Bookstore/Bookstore.csproj index 35b9a95..5a508ef 100644 --- a/samples/Bookstore/src/Bookstore/Bookstore.csproj +++ b/samples/Bookstore/src/Bookstore/Bookstore.csproj @@ -14,6 +14,7 @@ runtime; build; native; contentfiles; analyzers; buildtransitive + diff --git a/samples/Bookstore/src/Bookstore/Tests/Infrastructure/Persistence/EfCore/EfCoreConfigurationTests.cs b/samples/Bookstore/src/Bookstore/Tests/Infrastructure/Persistence/EfCore/EfCoreConfigurationTests.cs deleted file mode 100644 index e3f2db6..0000000 --- a/samples/Bookstore/src/Bookstore/Tests/Infrastructure/Persistence/EfCore/EfCoreConfigurationTests.cs +++ /dev/null @@ -1,84 +0,0 @@ -using Xunit; -using Microsoft.EntityFrameworkCore; -using Microsoft.Extensions.Options; -using OpenDDD.Infrastructure.Persistence.EfCore.UoW; -using OpenDDD.Infrastructure.Persistence.UoW; -using OpenDDD.API.Options; -using OpenDDD.Domain.Model; -using OpenDDD.Infrastructure.Persistence.EfCore.Base; -using OpenDDD.Infrastructure.Repository.EfCore; -using OpenDDD.Infrastructure.Persistence.DatabaseSession; -using OpenDDD.Infrastructure.Persistence.EfCore.DatabaseSession; -using Bookstore.Domain.Model; -using Bookstore.Infrastructure.Persistence.EfCore; -using OpenDDD.Infrastructure.Events; -using OpenDDD.Infrastructure.TransactionalOutbox; -using OpenDDD.Infrastructure.TransactionalOutbox.EfCore; - -namespace Bookstore.Tests.Infrastructure.Persistence.EfCore -{ - public class EfCoreConfigurationTests - { - private readonly IServiceProvider _serviceProvider; - - public EfCoreConfigurationTests() - { - var services = new ServiceCollection(); - - // Register logging - services.AddLogging(); - - // Manually configure OpenDDD options - var options = new OpenDddOptions(); - services.AddSingleton(Options.Create(options)); - services.AddSingleton(options); - - // Add an in-memory database - services.AddDbContext(opts => - opts.UseInMemoryDatabase("TestDatabase")); - services.AddScoped(sp => sp.GetRequiredService()); - - // Register EfCoreDatabaseSession as the IDatabaseSession - services.AddScoped(); - services.AddScoped(sp => sp.GetRequiredService()); - - // Register dependencies - services.AddScoped(); - services.AddScoped(typeof(IRepository), typeof(EfCoreRepository)); - - // Register publishers - services.AddScoped(); - services.AddScoped(); - - // Register IOutboxRepository (EF Core implementation) - services.AddScoped(); - - _serviceProvider = services.BuildServiceProvider(); - } - - [Fact] - public async Task CreateAndRetrieveOrder_WithLineItems_ShouldPersistCorrectly() - { - using var scope = _serviceProvider.CreateScope(); - var repository = scope.ServiceProvider.GetRequiredService>(); - - var ct = CancellationToken.None; - - // Arrange - Create and save an order with line items - var order = Order.Create(Guid.NewGuid()); - order.AddLineItem(Guid.NewGuid(), Money.USD(19.99m)); - order.AddLineItem(Guid.NewGuid(), Money.USD(29.99m)); - - await repository.SaveAsync(order, ct); - - // Act - Retrieve order - var retrievedOrder = await repository.GetAsync(order.Id, ct); - - // Assert - Order and line items should be persisted - Assert.NotNull(retrievedOrder); - Assert.Equal(order.Id, retrievedOrder.Id); - Assert.NotEmpty(retrievedOrder.LineItems); - Assert.Equal(2, retrievedOrder.LineItems.Count); - } - } -} diff --git a/samples/Bookstore/src/Bookstore/appsettings.json b/samples/Bookstore/src/Bookstore/appsettings.json index cf4c806..ebe6847 100644 --- a/samples/Bookstore/src/Bookstore/appsettings.json +++ b/samples/Bookstore/src/Bookstore/appsettings.json @@ -13,8 +13,8 @@ "DatabaseProvider": "InMemory", "MessagingProvider": "InMemory", "Events": { - "DomainEventTopicTemplate": "Bookstore.Domain.{EventName}", - "IntegrationEventTopicTemplate": "Bookstore.Interchange.{EventName}", + "DomainEventTopic": "Bookstore.Domain.{EventName}", + "IntegrationEventTopic": "Bookstore.Interchange.{EventName}", "ListenerGroup": "Default" }, "SQLite": { @@ -32,10 +32,12 @@ "Port": 5672, "Username": "guest", "Password": "guest", - "VirtualHost": "/" + "VirtualHost": "/", + "AutoCreateTopics": true }, "Kafka": { - "BootstrapServers": "localhost:9092" + "BootstrapServers": "localhost:9092", + "AutoCreateTopics": true }, "AutoRegister": { "Actions": true, diff --git a/src/OpenDDD/Tests/Domain/Model/TestAggregateRoot.cs b/src/OpenDDD.Tests/Base/Domain/Model/TestAggregateRoot.cs similarity index 95% rename from src/OpenDDD/Tests/Domain/Model/TestAggregateRoot.cs rename to src/OpenDDD.Tests/Base/Domain/Model/TestAggregateRoot.cs index 26ec64d..d39102a 100644 --- a/src/OpenDDD/Tests/Domain/Model/TestAggregateRoot.cs +++ b/src/OpenDDD.Tests/Base/Domain/Model/TestAggregateRoot.cs @@ -1,6 +1,6 @@ using OpenDDD.Domain.Model.Base; -namespace OpenDDD.Tests.Domain.Model +namespace OpenDDD.Tests.Base.Domain.Model { public class TestAggregateRoot : AggregateRootBase { diff --git a/src/OpenDDD/Tests/Domain/Model/TestEntity.cs b/src/OpenDDD.Tests/Base/Domain/Model/TestEntity.cs similarity index 92% rename from src/OpenDDD/Tests/Domain/Model/TestEntity.cs rename to src/OpenDDD.Tests/Base/Domain/Model/TestEntity.cs index b0c344f..07d13ea 100644 --- a/src/OpenDDD/Tests/Domain/Model/TestEntity.cs +++ b/src/OpenDDD.Tests/Base/Domain/Model/TestEntity.cs @@ -1,6 +1,6 @@ using OpenDDD.Domain.Model.Base; -namespace OpenDDD.Tests.Domain.Model +namespace OpenDDD.Tests.Base.Domain.Model { public class TestEntity : EntityBase { diff --git a/src/OpenDDD/Tests/Domain/Model/TestValueObject.cs b/src/OpenDDD.Tests/Base/Domain/Model/TestValueObject.cs similarity index 90% rename from src/OpenDDD/Tests/Domain/Model/TestValueObject.cs rename to src/OpenDDD.Tests/Base/Domain/Model/TestValueObject.cs index 07c00e1..f1efad9 100644 --- a/src/OpenDDD/Tests/Domain/Model/TestValueObject.cs +++ b/src/OpenDDD.Tests/Base/Domain/Model/TestValueObject.cs @@ -1,6 +1,6 @@ using OpenDDD.Domain.Model; -namespace OpenDDD.Tests.Domain.Model +namespace OpenDDD.Tests.Base.Domain.Model { public class TestValueObject : IValueObject { diff --git a/src/OpenDDD.Tests/Base/IntegrationTests.cs b/src/OpenDDD.Tests/Base/IntegrationTests.cs new file mode 100644 index 0000000..d7fc11d --- /dev/null +++ b/src/OpenDDD.Tests/Base/IntegrationTests.cs @@ -0,0 +1,32 @@ +using Microsoft.Extensions.Logging; +using OpenDDD.Tests.Base.Logging; +using Xunit.Abstractions; + +namespace OpenDDD.Tests.Base +{ + [Trait("Category", "Integration")] + public class IntegrationTests : IDisposable + { + protected readonly ILoggerFactory LoggerFactory; + protected readonly ILogger Logger; + + public IntegrationTests(ITestOutputHelper testOutputHelper, bool enableLogging = false) + { + LoggerFactory = Microsoft.Extensions.Logging.LoggerFactory.Create(builder => + { + if (enableLogging) + { + builder.AddProvider(new XunitLoggerProvider(testOutputHelper)); + builder.SetMinimumLevel(LogLevel.Debug); + } + }); + + Logger = LoggerFactory.CreateLogger(GetType()); + } + + public void Dispose() + { + LoggerFactory.Dispose(); + } + } +} diff --git a/src/OpenDDD.Tests/Base/Logging/XunitLogger.cs b/src/OpenDDD.Tests/Base/Logging/XunitLogger.cs new file mode 100644 index 0000000..f1bd8d4 --- /dev/null +++ b/src/OpenDDD.Tests/Base/Logging/XunitLogger.cs @@ -0,0 +1,29 @@ +using Microsoft.Extensions.Logging; +using Xunit.Abstractions; + +namespace OpenDDD.Tests.Base.Logging +{ + public class XunitLogger : ILogger + { + private readonly ITestOutputHelper _testOutputHelper; + private readonly string _categoryName; + + public XunitLogger(ITestOutputHelper testOutputHelper, string categoryName) + { + _testOutputHelper = testOutputHelper; + _categoryName = categoryName; + } + + public IDisposable BeginScope(TState state) => null; + + public bool IsEnabled(LogLevel logLevel) => true; + + public void Log(LogLevel logLevel, EventId eventId, TState state, Exception exception, Func formatter) + { + if (formatter != null) + { + _testOutputHelper.WriteLine($"[{logLevel}] {_categoryName}: {formatter(state, exception)}"); + } + } + } +} \ No newline at end of file diff --git a/src/OpenDDD.Tests/Base/Logging/XunitLoggingProvider.cs b/src/OpenDDD.Tests/Base/Logging/XunitLoggingProvider.cs new file mode 100644 index 0000000..9abf476 --- /dev/null +++ b/src/OpenDDD.Tests/Base/Logging/XunitLoggingProvider.cs @@ -0,0 +1,22 @@ +using Microsoft.Extensions.Logging; +using Xunit.Abstractions; + +namespace OpenDDD.Tests.Base.Logging +{ + public class XunitLoggerProvider : ILoggerProvider + { + private readonly ITestOutputHelper _testOutputHelper; + + public XunitLoggerProvider(ITestOutputHelper testOutputHelper) + { + _testOutputHelper = testOutputHelper; + } + + public ILogger CreateLogger(string categoryName) + { + return new XunitLogger(_testOutputHelper, categoryName); + } + + public void Dispose() { } + } +} diff --git a/src/OpenDDD.Tests/Base/UnitTests.cs b/src/OpenDDD.Tests/Base/UnitTests.cs new file mode 100644 index 0000000..67be01b --- /dev/null +++ b/src/OpenDDD.Tests/Base/UnitTests.cs @@ -0,0 +1,8 @@ +namespace OpenDDD.Tests.Base +{ + [Trait("Category", "Unit")] + public abstract class UnitTests + { + + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Events/Azure/AzureServiceBusMessagingProviderTests.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Events/Azure/AzureServiceBusMessagingProviderTests.cs new file mode 100644 index 0000000..2a88694 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Events/Azure/AzureServiceBusMessagingProviderTests.cs @@ -0,0 +1,346 @@ +using System.Collections.Concurrent; +using Microsoft.Extensions.Logging; +using Xunit.Abstractions; +using OpenDDD.Infrastructure.Events.Azure; +using OpenDDD.Tests.Base; +using Azure.Messaging.ServiceBus; +using Azure.Messaging.ServiceBus.Administration; +using FluentAssertions; + +namespace OpenDDD.Tests.Integration.Infrastructure.Events.Azure +{ + [Collection("AzureServiceBusTests")] + public class AzureServiceBusMessagingProviderTests : IntegrationTests, IAsyncLifetime + { + private readonly string _connectionString; + private readonly ServiceBusAdministrationClient _adminClient; + private readonly ILogger _logger; + private readonly ServiceBusClient _serviceBusClient; + private readonly AzureServiceBusMessagingProvider _messagingProvider; + private readonly CancellationTokenSource _cts = new(TimeSpan.FromSeconds(120)); + + public AzureServiceBusMessagingProviderTests(ITestOutputHelper testOutputHelper) + : base(testOutputHelper, enableLogging: true) + { + _connectionString = Environment.GetEnvironmentVariable("AZURE_SERVICE_BUS_CONNECTION_STRING") + ?? throw new InvalidOperationException("AZURE_SERVICE_BUS_CONNECTION_STRING is not set."); + + _adminClient = new ServiceBusAdministrationClient(_connectionString); + _serviceBusClient = new ServiceBusClient(_connectionString); + _logger = LoggerFactory.CreateLogger(); + + _messagingProvider = new AzureServiceBusMessagingProvider( + _serviceBusClient, + _adminClient, + autoCreateTopics: true, + _logger); + } + + public async Task InitializeAsync() + { + await CleanupTopicsAndSubscriptionsAsync(); + } + + public async Task DisposeAsync() + { + await _cts.CancelAsync(); + await _messagingProvider.DisposeAsync(); + } + + private async Task CleanupTopicsAndSubscriptionsAsync() + { + var topics = _adminClient.GetTopicsAsync(); + await foreach (var topic in topics) + { + if (topic.Name.StartsWith("test-topic-")) + { + var subscriptions = _adminClient.GetSubscriptionsAsync(topic.Name); + await foreach (var sub in subscriptions) + { + await _adminClient.DeleteSubscriptionAsync(topic.Name, sub.SubscriptionName); + } + + await _adminClient.DeleteTopicAsync(topic.Name); + } + } + } + + [Fact] + public async Task AutoCreateTopic_ShouldCreateTopicOnSubscribe_WhenSettingEnabled() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + var subscriptionName = "test-subscription"; + + var topicExistsBefore = (await _adminClient.TopicExistsAsync(topicName)).Value; + topicExistsBefore.Should().BeFalse("The topic should not exist before subscribing."); + + // Act + await _messagingProvider.SubscribeAsync(topicName, subscriptionName, (msg, token) => Task.CompletedTask); + + // Assert + Assert.True(await _adminClient.TopicExistsAsync(topicName)); + } + + [Fact] + public async Task AutoCreateTopic_ShouldNotCreateTopicOnSubscribe_WhenSettingDisabled() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + + var topicExistsBefore = (await _adminClient.TopicExistsAsync(topicName)).Value; + topicExistsBefore.Should().BeFalse("The topic should not exist before subscribing."); + + var messagingProvider = new AzureServiceBusMessagingProvider( + _serviceBusClient, + _adminClient, + autoCreateTopics: false, + _logger); + + // Act & Assert + var exception = await Assert.ThrowsAsync(async () => + { + await messagingProvider.SubscribeAsync(topicName, "test-subscriber", (msg, token) => Task.CompletedTask); + }); + + Assert.False(await _adminClient.TopicExistsAsync(topicName), "Topic should not have been created."); + + exception.Message.Should().Be($"Topic '{topicName}' does not exist. Enable 'autoCreateTopics' to create topics automatically."); + } + + [Fact] + public async Task AutoCreateTopic_ShouldCreateTopicOnPublish_WhenSettingEnabled() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + + var topicExistsBefore = await _adminClient.TopicExistsAsync(topicName); + topicExistsBefore.Value.Should().BeFalse("The topic should not exist before publishing."); + + var messagingProvider = new AzureServiceBusMessagingProvider( + _serviceBusClient, + _adminClient, + autoCreateTopics: true, // Auto-create enabled + _logger); + + // Act + await messagingProvider.PublishAsync(topicName, "Test message", _cts.Token); + + // Assert + var topicExistsAfter = await _adminClient.TopicExistsAsync(topicName); + topicExistsAfter.Value.Should().BeTrue("Azure Service Bus should create the topic automatically when publishing."); + } + + [Fact] + public async Task AutoCreateTopic_ShouldNotCreateTopicOnPublish_WhenSettingDisabled() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + + var topicExistsBefore = await _adminClient.TopicExistsAsync(topicName); + topicExistsBefore.Value.Should().BeFalse("The topic should not exist before publishing."); + + var messagingProvider = new AzureServiceBusMessagingProvider( + _serviceBusClient, + _adminClient, + autoCreateTopics: false, // Auto-create disabled + _logger); + + // Act & Assert + var exception = await Assert.ThrowsAsync(async () => + { + await messagingProvider.PublishAsync(topicName, "Test message", _cts.Token); + }); + + exception.Message.Should().Contain($"Topic '{topicName}' does not exist. Enable 'autoCreateTopics' to create topics automatically."); + } + + [Fact] + public async Task AtLeastOnceGurantee_ShouldDeliverToLateSubscriber_WhenSubscribedBefore() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + var subscriptionName = "test-subscription"; + var receivedMessages = new ConcurrentBag(); + var messageToSend = "Persistent Message Test"; + var messageReceivedTcs = new TaskCompletionSource(); + + var firstSubscription = await _messagingProvider.SubscribeAsync(topicName, subscriptionName, async (msg, token) => + { + Assert.Fail("First subscription should not receive the message."); + }, _cts.Token); + + await Task.Delay(500); + + await _messagingProvider.UnsubscribeAsync(firstSubscription, _cts.Token); + + await Task.Delay(500); + + // Act + await _messagingProvider.PublishAsync(topicName, messageToSend, _cts.Token); + + await _messagingProvider.SubscribeAsync(topicName, subscriptionName, async (msg, token) => + { + receivedMessages.Add(msg); + messageReceivedTcs.TrySetResult(true); + }, _cts.Token); + + // Wait for message with timeout + await messageReceivedTcs.Task.WaitAsync(TimeSpan.FromSeconds(10)); + + + // Assert + Assert.Contains(messageToSend, receivedMessages); + } + + [Fact] + public async Task AtLeastOnceGurantee_ShouldNotDeliverToLateSubscriber_WhenNotSubscribedBefore() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + var subscriptionName = "test-subscription"; + var receivedMessages = new ConcurrentBag(); + var messageToSend = "Non-Persistent Message Test"; + + // Act + await _messagingProvider.PublishAsync(topicName, messageToSend, _cts.Token); + + await Task.Delay(500); + + await _messagingProvider.SubscribeAsync(topicName, subscriptionName, async (msg, token) => + { + receivedMessages.Add(msg); + }, _cts.Token); + + await Task.Delay(5000); + + // Assert + Assert.DoesNotContain(messageToSend, receivedMessages); + } + + [Fact] + public async Task AtLeastOnceGurantee_ShouldRedeliverLater_WhenMessageNotAcked() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + var subscriptionName = "test-subscription"; + var receivedMessages = new ConcurrentBag(); + var messageToSend = "Redelivery Test"; + + async Task FaultyHandler(string msg, CancellationToken token) + { + receivedMessages.Add(msg); + throw new Exception("Simulated consumer crash before acknowledgment."); + } + + await _messagingProvider.SubscribeAsync(topicName, subscriptionName, FaultyHandler, _cts.Token); + await Task.Delay(500); + + // Act + await _messagingProvider.PublishAsync(topicName, messageToSend, _cts.Token); + + for (int i = 0; i < 300; i++) + { + if (receivedMessages.Count > 1) break; + await Task.Delay(1000); + } + + // Assert + Assert.True(receivedMessages.Count > 1, "Message should be redelivered at least once."); + } + + [Fact] + public async Task CompetingConsumers_ShouldDeliverOnlyOnce_WhenMultipleConsumersInGroup() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + var subscriptionName = "test-consumer-group"; + var receivedMessages = new ConcurrentDictionary(); + var messageToSend = "Competing Consumer Test"; + + async Task MessageHandler(string msg, CancellationToken token) + { + receivedMessages.AddOrUpdate("received", 1, (key, value) => value + 1); + } + + await _messagingProvider.SubscribeAsync(topicName, subscriptionName, MessageHandler, _cts.Token); + await _messagingProvider.SubscribeAsync(topicName, subscriptionName, MessageHandler, _cts.Token); + await _messagingProvider.SubscribeAsync(topicName, subscriptionName, MessageHandler, _cts.Token); + await Task.Delay(500); + + // Act + await _messagingProvider.PublishAsync(topicName, messageToSend, _cts.Token); + + await Task.Delay(3000); + + // Assert + Assert.Equal(1, receivedMessages.GetValueOrDefault("received", 0)); + } + + [Fact] + public async Task CompetingConsumers_ShouldDistributeMessages_WhenMultipleConsumersInGroup() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + var consumerGroup = "test-consumer-group"; + var totalMessages = 50; + var numConsumers = 10; + var variancePercentage = 0.1; + var perConsumerMessageCount = new ConcurrentDictionary(); // Track messages per consumer + var allMessagesProcessed = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + + async Task Subscribe() + { + var consumerId = Guid.NewGuid(); + + async Task MessageHandler(string msg, CancellationToken token) + { + perConsumerMessageCount.AddOrUpdate(consumerId, 1, (_, count) => count + 1); + _logger.LogDebug($"Subscriber {consumerId} received a message."); + + if (perConsumerMessageCount.Values.Sum() >= totalMessages) + { + allMessagesProcessed.TrySetResult(true); + } + } + + await _messagingProvider.SubscribeAsync(topicName, consumerGroup, MessageHandler, _cts.Token); + } + + for (int i = 0; i < numConsumers; i++) + { + await Subscribe(); + } + + await Task.Delay(500); + + // Act + for (int i = 0; i < totalMessages; i++) + { + await _messagingProvider.PublishAsync(topicName, "Test Message", _cts.Token); + } + + try + { + await allMessagesProcessed.Task.WaitAsync(TimeSpan.FromSeconds(10)); + } + catch (TimeoutException) + { + _logger.LogDebug("Timed out waiting for consumers to receive all messages."); + Assert.Fail($"Consumers only processed {perConsumerMessageCount.Values.Sum()} of {totalMessages} messages."); + } + + // Assert + var messageCounts = perConsumerMessageCount.Values.ToList(); + var expectedPerConsumer = totalMessages / numConsumers; + var variance = (int)(expectedPerConsumer * variancePercentage); + var minAllowed = expectedPerConsumer - variance; + var maxAllowed = expectedPerConsumer + variance; + + foreach (var count in messageCounts) + { + Assert.InRange(count, minAllowed, maxAllowed); + } + } + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Events/Azure/AzureServiceBusTestsCollection.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Events/Azure/AzureServiceBusTestsCollection.cs new file mode 100644 index 0000000..f9cfe4f --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Events/Azure/AzureServiceBusTestsCollection.cs @@ -0,0 +1,5 @@ +namespace OpenDDD.Tests.Integration.Infrastructure.Events.Azure +{ + [CollectionDefinition("AzureServiceBusTests", DisableParallelization = true)] + public class AzureServiceBusTestsCollection { } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Events/InMemory/InMemoryMessagingProviderTests.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Events/InMemory/InMemoryMessagingProviderTests.cs new file mode 100644 index 0000000..6eb9e34 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Events/InMemory/InMemoryMessagingProviderTests.cs @@ -0,0 +1,238 @@ +using System.Collections.Concurrent; +using Microsoft.Extensions.Logging; +using Xunit.Abstractions; +using FluentAssertions; +using OpenDDD.Infrastructure.Events.InMemory; +using OpenDDD.Tests.Base; + +namespace OpenDDD.Tests.Integration.Infrastructure.Events.InMemory +{ + [Collection("InMemoryTests")] + public class InMemoryMessagingProviderTests : IntegrationTests, IAsyncLifetime + { + private readonly ILogger _logger; + private readonly InMemoryMessagingProvider _messagingProvider; + private readonly CancellationTokenSource _cts = new(TimeSpan.FromSeconds(60)); + + public InMemoryMessagingProviderTests(ITestOutputHelper testOutputHelper) + : base(testOutputHelper, enableLogging: true) + { + _logger = LoggerFactory.CreateLogger(); + _messagingProvider = new InMemoryMessagingProvider(_logger); + } + + public Task InitializeAsync() => Task.CompletedTask; + + public async Task DisposeAsync() + { + _cts.Cancel(); + await _messagingProvider.DisposeAsync(); + } + + [Fact] + public async Task AtLeastOnceGuarantee_ShouldDeliverToLateSubscriber_WhenSubscribedBefore() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + var groupName = "test-subscription"; + var receivedMessages = new ConcurrentBag(); + var messageToSend = "Persistent Message Test"; + + var lateSubscriberReceived = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + + var firstSubscription = await _messagingProvider.SubscribeAsync(topicName, groupName, async (msg, token) => + { + Assert.Fail("First subscription should not receive the message."); + }, _cts.Token); + + await Task.Delay(500); + + await _messagingProvider.UnsubscribeAsync(firstSubscription, _cts.Token); + + // Act + await _messagingProvider.PublishAsync(topicName, messageToSend, _cts.Token); + + await _messagingProvider.SubscribeAsync(topicName, groupName, async (msg, token) => + { + receivedMessages.Add(msg); + lateSubscriberReceived.TrySetResult(true); + }, _cts.Token); + + // Assert + try + { + await lateSubscriberReceived.Task.WaitAsync(TimeSpan.FromSeconds(5)); + } + catch (TimeoutException) + { + Assert.Fail($"Late subscriber did not receive the expected message '{messageToSend}' within 5 seconds."); + } + + receivedMessages.Should().Contain(messageToSend, "The subscriber should receive messages published while it was previously subscribed."); + } + + [Fact] + public async Task AtLeastOnceGuarantee_ShouldNotDeliverToLateSubscriber_WhenNotSubscribedBefore() + { + // Arrange + var topicName = "test-topic-no-late-subscriber"; + var consumerGroup = "test-consumer-group"; + var messageToSend = "Non-Persistent Message Test"; + ConcurrentBag receivedMessages = new(); + + // Act + await _messagingProvider.PublishAsync(topicName, messageToSend, _cts.Token); + await Task.Delay(500); + + // Late subscriber + await _messagingProvider.SubscribeAsync(topicName, consumerGroup, async (msg, token) => + { + receivedMessages.Add(msg); + }, _cts.Token); + + await Task.Delay(2000); + + // Assert + receivedMessages.Should().NotContain(messageToSend); + } + + [Fact] + public async Task AtLeastOnceGuarantee_ShouldRedeliverLater_WhenMessageNotAcked() + { + // Arrange + var topicName = "test-topic-redelivery"; + var consumerGroup = "test-consumer-group"; + var messageToSend = "Redelivery Test"; + ConcurrentBag receivedMessages = new(); + + async Task FaultyHandler(string msg, CancellationToken token) + { + receivedMessages.Add(msg); + throw new Exception("Simulated consumer crash before acknowledgment."); + } + + await _messagingProvider.SubscribeAsync(topicName, consumerGroup, FaultyHandler, _cts.Token); + + // Act + await _messagingProvider.PublishAsync(topicName, messageToSend, _cts.Token); + + for (int i = 0; i < 20; i++) + { + if (receivedMessages.Count > 1) break; + await Task.Delay(500); + } + + // Assert + receivedMessages.Count.Should().BeGreaterThan(1); + } + + [Fact] + public async Task CompetingConsumers_ShouldDeliverOnlyOnce_WhenMultipleConsumersInGroup() + { + // Arrange + var topicName = "test-topic-competing-consumers"; + var consumerGroup = "test-consumer-group"; + var receivedMessages = new ConcurrentDictionary(); + var messageToSend = "Competing Consumer Test"; + var allSubscribersProcessed = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + + async Task MessageHandler(string msg, CancellationToken token) + { + receivedMessages.AddOrUpdate("received", 1, (key, value) => value + 1); + + // If any consumer has received more than 1 message, fail immediately + if (receivedMessages["received"] > 1) + { + allSubscribersProcessed.TrySetException(new Exception("More than one consumer in the group received the message!")); + } + else + { + allSubscribersProcessed.TrySetResult(true); + } + } + + await _messagingProvider.SubscribeAsync(topicName, consumerGroup, MessageHandler, _cts.Token); + await _messagingProvider.SubscribeAsync(topicName, consumerGroup, MessageHandler, _cts.Token); + + // Act + await _messagingProvider.PublishAsync(topicName, messageToSend, _cts.Token); + + try + { + await allSubscribersProcessed.Task.WaitAsync(TimeSpan.FromSeconds(5)); + } + catch (TimeoutException) + { + Assert.Fail("Timed out waiting for message processing."); + } + + // Assert: Only one consumer in the group should receive the message + receivedMessages.GetValueOrDefault("received", 0).Should().Be(1); + } + + [Fact] + public async Task CompetingConsumers_ShouldDistributeMessages_WhenMultipleConsumersInGroup() + { + // Arrange + var topicName = "test-topic-even-distribution"; + var consumerGroup = "test-consumer-group"; + var totalMessages = 100; + var numConsumers = 2; + var variancePercentage = 0.3; + var perConsumerMessageCount = new ConcurrentDictionary(); + var allMessagesProcessed = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + + async Task CreateConsumer() + { + var consumerId = Guid.NewGuid(); + + async Task MessageHandler(string msg, CancellationToken token) + { + perConsumerMessageCount.AddOrUpdate(consumerId, 1, (_, count) => count + 1); + + _logger.LogDebug($"Consumer {consumerId} received a message."); + + if (perConsumerMessageCount.Values.Sum() >= totalMessages) + { + allMessagesProcessed.TrySetResult(true); + } + } + + await _messagingProvider.SubscribeAsync(topicName, consumerGroup, MessageHandler, _cts.Token); + } + + for (int i = 0; i < numConsumers; i++) + { + await CreateConsumer(); + } + + // Act + for (int i = 0; i < totalMessages; i++) + { + await _messagingProvider.PublishAsync(topicName, "Test Message", _cts.Token); + } + + try + { + await allMessagesProcessed.Task.WaitAsync(TimeSpan.FromSeconds(10)); + } + catch (TimeoutException) + { + _logger.LogDebug("Timed out waiting for consumers to receive all messages."); + Assert.Fail($"Consumers only processed {perConsumerMessageCount.Values.Sum()} of {totalMessages} messages."); + } + + // Assert + var messageCounts = perConsumerMessageCount.Values.ToList(); + var expectedPerConsumer = totalMessages / numConsumers; + var variance = (int)(expectedPerConsumer * variancePercentage); + var minAllowed = expectedPerConsumer - variance; + var maxAllowed = expectedPerConsumer + variance; + + foreach (var count in messageCounts) + { + Assert.InRange(count, minAllowed, maxAllowed); + } + } + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Events/InMemory/InMemoryTestsCollection.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Events/InMemory/InMemoryTestsCollection.cs new file mode 100644 index 0000000..4f0e579 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Events/InMemory/InMemoryTestsCollection.cs @@ -0,0 +1,5 @@ +namespace OpenDDD.Tests.Integration.Infrastructure.Events.InMemory +{ + [CollectionDefinition("InMemoryTests", DisableParallelization = true)] + public class InMemoryTestsCollection { } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Events/Kafka/KafkaMessagingProviderTests.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Events/Kafka/KafkaMessagingProviderTests.cs new file mode 100644 index 0000000..90cc136 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Events/Kafka/KafkaMessagingProviderTests.cs @@ -0,0 +1,448 @@ +using System.Collections.Concurrent; +using Microsoft.Extensions.Logging; +using Xunit.Abstractions; +using FluentAssertions; +using OpenDDD.Infrastructure.Events.Kafka; +using OpenDDD.Infrastructure.Events.Kafka.Factories; +using OpenDDD.Tests.Base; +using Confluent.Kafka; + +namespace OpenDDD.Tests.Integration.Infrastructure.Events.Kafka +{ + [Collection("KafkaTests")] + public class KafkaMessagingProviderTests : IntegrationTests, IAsyncLifetime + { + private readonly string _bootstrapServers; + private readonly IAdminClient _adminClient; + private readonly IProducer _producer; + private readonly KafkaConsumerFactory _consumerFactory; + private readonly ILogger _logger; + private readonly ILogger _consumerLogger; + private readonly KafkaMessagingProvider _messagingProvider; + private readonly CancellationTokenSource _cts = new(TimeSpan.FromSeconds(120)); + + public KafkaMessagingProviderTests(ITestOutputHelper testOutputHelper) + : base(testOutputHelper, enableLogging: true) + { + _bootstrapServers = Environment.GetEnvironmentVariable("KAFKA_BOOTSTRAP_SERVERS") + ?? throw new InvalidOperationException("KAFKA_BOOTSTRAP_SERVERS is not set."); + + var adminClientConfig = new AdminClientConfig { BootstrapServers = _bootstrapServers }; + var producerConfig = new ProducerConfig { BootstrapServers = _bootstrapServers }; + + _adminClient = new AdminClientBuilder(adminClientConfig).Build(); + _producer = new ProducerBuilder(producerConfig).Build(); + _logger = LoggerFactory.CreateLogger(); + _consumerLogger = LoggerFactory.CreateLogger(); + _consumerFactory = new KafkaConsumerFactory(_bootstrapServers, _consumerLogger); + + _messagingProvider = new KafkaMessagingProvider( + _adminClient, + _producer, + _consumerFactory, + autoCreateTopics: true, + _logger); + } + + public async Task InitializeAsync() + { + await CleanupTopicsAndConsumerGroupsAsync(); + } + + public async Task DisposeAsync() + { + await _cts.CancelAsync(); + await _messagingProvider.DisposeAsync(); + } + + private async Task CleanupTopicsAndConsumerGroupsAsync() + { + try + { + // Delete test topics + var metadata = _adminClient.GetMetadata(TimeSpan.FromSeconds(5)); + var testTopics = metadata.Topics + .Where(t => t.Topic.StartsWith("test-topic-")) + .Select(t => t.Topic) + .ToList(); + + if (testTopics.Any()) + { + await _adminClient.DeleteTopicsAsync(testTopics); + _logger.LogInformation("Deleted test topics: {Topics}", string.Join(", ", testTopics)); + } + + // Delete consumer groups + var consumerGroups = _adminClient.ListGroups(TimeSpan.FromSeconds(5)) + .Where(g => g.Group.StartsWith("test-consumer-group")) + .Select(g => g.Group) + .ToList(); + + if (consumerGroups.Any()) + { + await _adminClient.DeleteGroupsAsync(consumerGroups); + _logger.LogInformation("Deleted test consumer groups: {Groups}", string.Join(", ", consumerGroups)); + } + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to clean up Kafka topics and consumer groups."); + } + } + + [Fact] + public async Task AutoCreateTopic_ShouldCreateTopicOnSubscribe_WhenSettingEnabled() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + var consumerGroup = "test-consumer-group"; + + var metadata = _adminClient.GetMetadata(TimeSpan.FromSeconds(5)); + metadata.Topics.Any(t => t.Topic == topicName).Should().BeFalse(); + + // Act + await _messagingProvider.SubscribeAsync(topicName, consumerGroup, async (msg, token) => + await Task.CompletedTask, _cts.Token); + + var timeout = TimeSpan.FromSeconds(10); + var pollingInterval = TimeSpan.FromMilliseconds(500); + var startTime = DateTime.UtcNow; + + bool topicExists = false; + while (DateTime.UtcNow - startTime < timeout) + { + metadata = _adminClient.GetMetadata(TimeSpan.FromSeconds(5)); + if (metadata.Topics.Any(t => t.Topic == topicName)) + { + topicExists = true; + break; + } + await Task.Delay(pollingInterval); + } + + // Assert + topicExists.Should().BeTrue("Kafka should create the topic automatically when subscribing."); + } + + [Fact] + public async Task AutoCreateTopic_ShouldNotCreateTopicOnSubscribe_WhenSettingDisabled() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + var consumerGroup = "test-consumer-group"; + + var metadata = _adminClient.GetMetadata(TimeSpan.FromSeconds(5)); + metadata.Topics.Any(t => t.Topic == topicName).Should().BeFalse("The topic should not exist before subscribing."); + + var messagingProvider = new KafkaMessagingProvider( + _adminClient, + _producer, + _consumerFactory, + autoCreateTopics: false, + _logger); + + // Act & Assert + var exception = await Assert.ThrowsAsync(async () => + { + await messagingProvider.SubscribeAsync(topicName, consumerGroup, async (msg, token) => + await Task.CompletedTask, _cts.Token); + }); + + exception.Message.Should().Be($"Topic '{topicName}' does not exist. Enable 'autoCreateTopics' to create topics automatically."); + } + + [Fact] + public async Task AutoCreateTopic_ShouldCreateTopicOnPublish_WhenSettingEnabled() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + + var metadataBefore = _adminClient.GetMetadata(TimeSpan.FromSeconds(5)); + metadataBefore.Topics.Any(t => t.Topic == topicName).Should().BeFalse("The topic should not exist before publishing."); + + var messagingProvider = new KafkaMessagingProvider( + _adminClient, + _producer, + _consumerFactory, + autoCreateTopics: true, // Auto-create enabled + _logger); + + // Act + await messagingProvider.PublishAsync(topicName, "Test message", _cts.Token); + + // Assert + var metadataAfter = _adminClient.GetMetadata(TimeSpan.FromSeconds(5)); + metadataAfter.Topics.Any(t => t.Topic == topicName).Should().BeTrue("Kafka should create the topic automatically when publishing."); + } + + [Fact] + public async Task AutoCreateTopic_ShouldNotCreateTopicOnPublish_WhenSettingDisabled() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + + var metadataBefore = _adminClient.GetMetadata(TimeSpan.FromSeconds(5)); + metadataBefore.Topics.Any(t => t.Topic == topicName).Should().BeFalse("The topic should not exist before publishing."); + + var messagingProvider = new KafkaMessagingProvider( + _adminClient, + _producer, + _consumerFactory, + autoCreateTopics: false, // Auto-create disabled + _logger); + + // Act & Assert + var exception = await Assert.ThrowsAsync(async () => + { + await messagingProvider.PublishAsync(topicName, "Test message", _cts.Token); + }); + + exception.Message.Should().Contain($"Topic '{topicName}' does not exist. Enable 'autoCreateTopics' to create topics automatically."); + } + + [Fact] + public async Task AtLeastOnceGurantee_ShouldDeliverToLateSubscriber_WhenSubscribedBefore() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + var consumerGroup = $"test-consumer-group-{Guid.NewGuid()}"; + var messageToSend = "Persistent Message Test"; + var firstSubscriberReceived = new TaskCompletionSource(); + var lateSubscriberReceived = new TaskCompletionSource(); + ConcurrentBag _receivedMessages = new(); + + var firstSubscription = await _messagingProvider.SubscribeAsync(topicName, consumerGroup, async (msg, token) => + { + firstSubscriberReceived.SetResult(true); + }, _cts.Token); + + await WaitForKafkaConsumerGroupStable(consumerGroup, _cts.Token); + + await Task.Delay(500, _cts.Token); + + await _messagingProvider.PublishAsync(topicName, messageToSend, _cts.Token); + + await firstSubscriberReceived.Task.WaitAsync(TimeSpan.FromSeconds(30)); + + await Task.Delay(500, _cts.Token); + + await _messagingProvider.UnsubscribeAsync(firstSubscription, _cts.Token); + + await Task.Delay(500, _cts.Token); + + await _messagingProvider.PublishAsync(topicName, messageToSend, _cts.Token); + await Task.Delay(5000, _cts.Token); + + // Late subscriber + await _messagingProvider.SubscribeAsync(topicName, consumerGroup, async (msg, token) => + { + _receivedMessages.Add(msg); + lateSubscriberReceived.TrySetResult(true); + }, _cts.Token); + + await WaitForKafkaConsumerGroupStable(consumerGroup, _cts.Token); + + await Task.Delay(500, _cts.Token); + + await lateSubscriberReceived.Task.WaitAsync(TimeSpan.FromSeconds(30)); + + // Assert + _receivedMessages.Should().Contain(messageToSend); + } + + [Fact] + public async Task AtLeastOnceGurantee_ShouldNotDeliverToLateSubscriber_WhenNotSubscribedBefore() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + var consumerGroup = "test-consumer-group"; + var messageToSend = "Non-Persistent Message Test"; + ConcurrentBag _receivedMessages = new(); + + // Act + await _messagingProvider.PublishAsync(topicName, messageToSend, _cts.Token); + await Task.Delay(2000); + + // Late subscriber + await _messagingProvider.SubscribeAsync(topicName, consumerGroup, async (msg, token) => + { + _receivedMessages.Add(msg); + }, _cts.Token); + + await WaitForKafkaConsumerGroupStable(consumerGroup, _cts.Token); + + await Task.Delay(5000); + + // Assert + _receivedMessages.Should().NotContain(messageToSend); + } + + [Fact] + public async Task AtLeastOnceGurantee_ShouldRedeliverLater_WhenMessageNotAcked() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + var consumerGroup = "test-consumer-group"; + var messageToSend = "Redelivery Test"; + ConcurrentBag _receivedMessages = new(); + + async Task FaultyHandler(string msg, CancellationToken token) + { + _receivedMessages.Add(msg); + throw new Exception("Simulated consumer crash before acknowledgment."); + } + + await _messagingProvider.SubscribeAsync(topicName, consumerGroup, FaultyHandler, _cts.Token); + + await WaitForKafkaConsumerGroupStable(consumerGroup, _cts.Token); + + // Act + await _messagingProvider.PublishAsync(topicName, messageToSend, _cts.Token); + + for (int i = 0; i < 300; i++) + { + if (_receivedMessages.Count > 1) break; + await Task.Delay(1000); + } + + // Assert + _receivedMessages.Count.Should().BeGreaterThan(1); + } + + [Fact] + public async Task CompetingConsumers_ShouldDeliverOnlyOnce_WhenMultipleConsumersInGroup() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + var consumerGroup = "test-consumer-group"; + var receivedMessages = new ConcurrentDictionary(); // Track messages per consumer + var messageToSend = "Competing Consumer Test"; + var messageProcessed = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + var consumerIds = new ConcurrentBag(); + + async Task MessageHandler(string msg, CancellationToken token) + { + var consumerId = Guid.NewGuid(); + + receivedMessages.AddOrUpdate(consumerId, 1, (_, count) => count + 1); + consumerIds.Add(consumerId); + + _logger.LogDebug($"Consumer {consumerId} received a message."); + + messageProcessed.TrySetResult(true); + } + + await _messagingProvider.SubscribeAsync(topicName, consumerGroup, MessageHandler, _cts.Token); + await _messagingProvider.SubscribeAsync(topicName, consumerGroup, MessageHandler, _cts.Token); + + await WaitForKafkaConsumerGroupStable(consumerGroup, _cts.Token); + + // Act + await _messagingProvider.PublishAsync(topicName, messageToSend, _cts.Token); + + try + { + await messageProcessed.Task.WaitAsync(TimeSpan.FromSeconds(5)); + } + catch (TimeoutException) + { + _logger.LogDebug("Timed out waiting for consumer to receive the message."); + Assert.Fail("No consumer received the message."); + } + + await Task.Delay(5000); + + // Assert + receivedMessages.Count.Should().Be(1, + $"Expected only one consumer to receive the message, but {receivedMessages.Count} consumers received it."); + } + + [Fact] + public async Task CompetingConsumers_ShouldDistributeMessages_WhenMultipleConsumersInGroup() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + var consumerGroup = "test-consumer-group"; + var totalMessages = 100; + var numConsumers = 2; + var variancePercentage = 0.3; + var perConsumerMessageCount = new ConcurrentDictionary(); // Track messages per consumer + var allMessagesProcessed = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + + async Task CreateConsumer() + { + var consumerId = Guid.NewGuid(); + _logger.LogDebug($"Creating consumer with unique ID: {consumerId}"); + + async Task MessageHandler(string msg, CancellationToken token) + { + perConsumerMessageCount.AddOrUpdate(consumerId, 1, (_, count) => count + 1); + _logger.LogDebug($"Consumer {consumerId} received a message."); + + if (perConsumerMessageCount.Values.Sum() >= totalMessages) + { + allMessagesProcessed.TrySetResult(true); + } + } + + await _messagingProvider.SubscribeAsync(topicName, consumerGroup, MessageHandler, _cts.Token); + } + + for (int i = 0; i < numConsumers; i++) + { + await CreateConsumer(); + } + + await WaitForKafkaConsumerGroupStable(consumerGroup, _cts.Token); + + // Act + for (int i = 0; i < totalMessages; i++) + { + await _messagingProvider.PublishAsync(topicName, "Test Message", _cts.Token); + } + + try + { + await allMessagesProcessed.Task.WaitAsync(TimeSpan.FromSeconds(10)); + } + catch (TimeoutException) + { + _logger.LogDebug("Timed out waiting for consumers to receive all messages."); + Assert.Fail($"Consumers only processed {perConsumerMessageCount.Values.Sum()} of {totalMessages} messages."); + } + + // Assert + var messageCounts = perConsumerMessageCount.Values.ToList(); + var expectedPerConsumer = totalMessages / numConsumers; + var variance = (int)(expectedPerConsumer * variancePercentage); + var minAllowed = expectedPerConsumer - variance; + var maxAllowed = expectedPerConsumer + variance; + + foreach (var count in messageCounts) + { + Assert.InRange(count, minAllowed, maxAllowed); + } + } + + private async Task WaitForKafkaConsumerGroupStable(string consumerGroup, CancellationToken cancellationToken) + { + var maxAttempts = 30; + for (int i = 0; i < maxAttempts; i++) + { + var groupInfo = _adminClient.ListGroups(TimeSpan.FromSeconds(5)) + .FirstOrDefault(g => g.Group == consumerGroup); + + if (groupInfo?.State == "Stable") + { + _logger.LogDebug("Consumer group {ConsumerGroup} is now stable.", consumerGroup); + return; + } + + _logger.LogDebug("Waiting for consumer group {ConsumerGroup} to stabilize...", consumerGroup); + await Task.Delay(500, cancellationToken); + } + + throw new TimeoutException($"Consumer group {consumerGroup} did not stabilize in time."); + } + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Events/Kafka/KafkaTestsCollection.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Events/Kafka/KafkaTestsCollection.cs new file mode 100644 index 0000000..f5c5495 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Events/Kafka/KafkaTestsCollection.cs @@ -0,0 +1,5 @@ +namespace OpenDDD.Tests.Integration.Infrastructure.Events.Kafka +{ + [CollectionDefinition("KafkaTests", DisableParallelization = true)] + public class KafkaTestsCollection { } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Events/RabbitMq/RabbitMqMessagingProviderTests.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Events/RabbitMq/RabbitMqMessagingProviderTests.cs new file mode 100644 index 0000000..caa3e56 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Events/RabbitMq/RabbitMqMessagingProviderTests.cs @@ -0,0 +1,482 @@ +using System.Collections.Concurrent; +using FluentAssertions; +using Microsoft.Extensions.Logging; +using Xunit.Abstractions; +using OpenDDD.Infrastructure.Events.RabbitMq; +using OpenDDD.Infrastructure.Events.RabbitMq.Factories; +using OpenDDD.Tests.Base; +using RabbitMQ.Client; +using RabbitMQ.Client.Exceptions; + +namespace OpenDDD.Tests.Integration.Infrastructure.Events.RabbitMq +{ + [Collection("RabbitMqTests")] + public class RabbitMqMessagingProviderTests : IntegrationTests, IAsyncLifetime + { + private readonly RabbitMqMessagingProvider _messagingProvider; + private readonly IConnectionFactory _connectionFactory; + private readonly IRabbitMqConsumerFactory _consumerFactory; + private readonly ILogger _logger; + private IConnection? _connection; + private IChannel? _channel; + private readonly CancellationTokenSource _cts = new(TimeSpan.FromSeconds(60)); + + private readonly string _testTopic = "OpenDddTestTopic"; + private readonly string _testConsumerGroup = "OpenDddTestGroup"; + + public RabbitMqMessagingProviderTests(ITestOutputHelper testOutputHelper) + : base(testOutputHelper, enableLogging: true) + { + _logger = LoggerFactory.CreateLogger(); + + _connectionFactory = new ConnectionFactory + { + HostName = Environment.GetEnvironmentVariable("RABBITMQ_HOST") ?? "localhost", + Port = int.Parse(Environment.GetEnvironmentVariable("RABBITMQ_PORT") ?? "5672"), + UserName = Environment.GetEnvironmentVariable("RABBITMQ_USERNAME") ?? "guest", + Password = Environment.GetEnvironmentVariable("RABBITMQ_PASSWORD") ?? "guest", + VirtualHost = Environment.GetEnvironmentVariable("RABBITMQ_VHOST") ?? "/" + }; + + _consumerFactory = new RabbitMqConsumerFactory(_logger); + _messagingProvider = new RabbitMqMessagingProvider( + _connectionFactory, + _consumerFactory, + autoCreateTopics: true, + _logger); + } + + public async Task InitializeAsync() + { + await EnsureConnectionAndChannelOpenAsync(); + await CleanupExchangesAndQueuesAsync(); + } + + public async Task DisposeAsync() + { + await CleanupExchangesAndQueuesAsync(); + + if (_channel is not null) + { + await _channel.CloseAsync(); + await _channel.DisposeAsync(); + } + + if (_connection is not null) + { + await _connection.CloseAsync(); + await _connection.DisposeAsync(); + } + + _cts.Cancel(); + await _messagingProvider.DisposeAsync(); + } + + private async Task VerifyExchangeAndQueueDoNotExist() + { + try + { + await _channel!.ExchangeDeclarePassiveAsync(_testTopic, CancellationToken.None); + Assert.Fail($"Exchange '{_testTopic}' already exists before test."); + } + catch (OperationInterruptedException ex) when (ex.ShutdownReason?.ReplyCode == 404) + { + // Expected: Exchange does not exist + } + + await EnsureConnectionAndChannelOpenAsync(); + + try + { + await _channel!.QueueDeclarePassiveAsync($"{_testConsumerGroup}.{_testTopic}", CancellationToken.None); + Assert.Fail($"Queue '{_testConsumerGroup}.{_testTopic}' already exists before test."); + } + catch (OperationInterruptedException ex) when (ex.ShutdownReason?.ReplyCode == 404) + { + // Expected: Queue does not exist + } + + await EnsureConnectionAndChannelOpenAsync(); + } + + private async Task EnsureConnectionAndChannelOpenAsync() + { + if (_connection is null || !_connection.IsOpen) + { + _connection = await _connectionFactory.CreateConnectionAsync(CancellationToken.None); + } + + if (_channel is null || !_channel.IsOpen) + { + _channel = await _connection.CreateChannelAsync(null, CancellationToken.None); + } + } + + private async Task CleanupExchangesAndQueuesAsync() + { + try + { + await _channel!.ExchangeDeleteAsync(_testTopic, ifUnused: false, cancellationToken: CancellationToken.None); + } + catch (OperationInterruptedException) { /* Exchange does not exist */ } + + try + { + await _channel!.QueueDeleteAsync($"{_testConsumerGroup}.{_testTopic}", ifUnused: false, ifEmpty: false, cancellationToken: CancellationToken.None); + } + catch (OperationInterruptedException) { /* Queue does not exist */ } + } + + [Fact] + public async Task AutoCreateTopic_ShouldCreateTopicOnSubscribe_WhenSettingEnabled() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + var consumerGroup = "test-consumer-group"; + + var messagingProvider = new RabbitMqMessagingProvider( + _connectionFactory, + _consumerFactory, + autoCreateTopics: true, // Auto-create enabled + _logger); + + var exchangeExistsBefore = await ExchangeExistsAsync(topicName, _cts.Token); + exchangeExistsBefore.Should().BeFalse("The exchange should not exist before subscribing."); + + // Act + await messagingProvider.SubscribeAsync(topicName, consumerGroup, async (msg, token) => + await Task.CompletedTask, _cts.Token); + + var timeout = TimeSpan.FromSeconds(30); + var pollingInterval = TimeSpan.FromMilliseconds(500); + var startTime = DateTime.UtcNow; + + bool exchangeExists = false; + while (DateTime.UtcNow - startTime < timeout) + { + if (await ExchangeExistsAsync(topicName, _cts.Token)) + { + exchangeExists = true; + break; + } + await Task.Delay(pollingInterval, _cts.Token); + } + + // Assert + exchangeExists.Should().BeTrue("RabbitMQ should create the exchange automatically when subscribing."); + } + + [Fact] + public async Task AutoCreateTopic_ShouldNotCreateTopicOnSubscribe_WhenSettingDisabled() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + var consumerGroup = "test-consumer-group"; + + var exchangeExistsBefore = await ExchangeExistsAsync(topicName, _cts.Token); + exchangeExistsBefore.Should().BeFalse("The exchange should not exist before subscribing."); + + var messagingProvider = new RabbitMqMessagingProvider( + _connectionFactory, + _consumerFactory, + autoCreateTopics: false, // Auto-create disabled + _logger); + + // Act & Assert + var exception = await Assert.ThrowsAsync(async () => + { + await messagingProvider.SubscribeAsync(topicName, consumerGroup, async (msg, token) => + await Task.CompletedTask, _cts.Token); + }); + + exception.Message.Should().Be($"Topic '{topicName}' does not exist. Enable 'autoCreateTopics' to create topics automatically."); + } + + [Fact] + public async Task AutoCreateTopic_ShouldCreateTopicOnPublish_WhenSettingEnabled() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + + var exchangeExistsBefore = await ExchangeExistsAsync(topicName, _cts.Token); + exchangeExistsBefore.Should().BeFalse("The exchange should not exist before publishing."); + + var messagingProvider = new RabbitMqMessagingProvider( + _connectionFactory, + _consumerFactory, + autoCreateTopics: true, // Auto-create enabled + _logger); + + // Act + await messagingProvider.PublishAsync(topicName, "Test message", _cts.Token); + + // Assert + var exchangeExistsAfter = await ExchangeExistsAsync(topicName, _cts.Token); + exchangeExistsAfter.Should().BeTrue("RabbitMQ should create the exchange automatically when publishing."); + } + + [Fact] + public async Task AutoCreateTopic_ShouldNotCreateTopicOnPublish_WhenSettingDisabled() + { + // Arrange + var topicName = $"test-topic-{Guid.NewGuid()}"; + + var exchangeExistsBefore = await ExchangeExistsAsync(topicName, _cts.Token); + exchangeExistsBefore.Should().BeFalse("The exchange should not exist before publishing."); + + var messagingProvider = new RabbitMqMessagingProvider( + _connectionFactory, + _consumerFactory, + autoCreateTopics: false, // Auto-create disabled + _logger); + + // Act & Assert + var exception = await Assert.ThrowsAsync(async () => + { + await messagingProvider.PublishAsync(topicName, "Test message", _cts.Token); + }); + + exception.Message.Should().Contain($"Topic '{topicName}' does not exist. Enable 'autoCreateTopics' to create topics automatically."); + } + + [Fact] + public async Task AtLeastOnceGurantee_ShouldDeliverToLateSubscriber_WhenSubscribedBefore() + { + // Arrange + var receivedMessages = new ConcurrentBag(); + var messageToSend = "Persistent Message Test"; + + var lateSubscriberReceived = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + + var firstSubscription = await _messagingProvider.SubscribeAsync(_testTopic, _testConsumerGroup, async (msg, token) => + { + Assert.Fail("First subscription should not receive the message."); + }, _cts.Token); + await Task.Delay(500); + + await _messagingProvider.UnsubscribeAsync(firstSubscription, _cts.Token); + await Task.Delay(500); + + // Act + await _messagingProvider.PublishAsync(_testTopic, messageToSend, _cts.Token); + + await Task.Delay(2000); + + // Late subscriber + await _messagingProvider.SubscribeAsync(_testTopic, _testConsumerGroup, async (msg, token) => + { + receivedMessages.Add(msg); + lateSubscriberReceived.TrySetResult(true); + }, _cts.Token); + + await Task.Delay(1000); + + // Assert + try + { + await lateSubscriberReceived.Task.WaitAsync(TimeSpan.FromSeconds(5)); + } + catch (TimeoutException) + { + Assert.Fail($"Late subscriber did not receive the expected message '{messageToSend}' within 5 seconds."); + } + + Assert.Contains(messageToSend, receivedMessages); + } + + [Fact] + public async Task AtLeastOnceGurantee_ShouldNotDeliverToLateSubscriber_WhenNotSubscribedBefore() + { + // Arrange + var messageToSend = "Non-Persistent Message Test"; + var receivedMessages = new ConcurrentBag(); + + // Act + await _messagingProvider.PublishAsync(_testTopic, messageToSend, _cts.Token); + await Task.Delay(2000); + + // Late subscriber + await _messagingProvider.SubscribeAsync(_testTopic, _testConsumerGroup, async (msg, token) => + { + receivedMessages.Add(msg); + }, _cts.Token); + + await Task.Delay(1000); + + // Assert + Assert.DoesNotContain(messageToSend, receivedMessages); + } + + [Fact] + public async Task AtLeastOnceGurantee_ShouldRedeliverLater_WhenMessageNotAcked() + { + // Arrange + var messageToSend = "Redelivery Test"; + var receivedMessages = new ConcurrentBag(); + + async Task FaultyHandler(string msg, CancellationToken token) + { + receivedMessages.Add(msg); + throw new Exception("Simulated consumer crash before acknowledgment."); + } + + await _messagingProvider.SubscribeAsync(_testTopic, _testConsumerGroup, FaultyHandler, _cts.Token); + await Task.Delay(500); + + // Act + await _messagingProvider.PublishAsync(_testTopic, messageToSend, _cts.Token); + + for (int i = 0; i < 20; i++) + { + if (receivedMessages.Count > 1) break; + await Task.Delay(500); + } + + // Assert + Assert.True(receivedMessages.Count > 1, "Message should be redelivered at least once."); + } + + [Fact] + public async Task CompetingConsumers_ShouldDeliverOnlyOnce_WhenMultipleConsumersInGroup() + { + // Arrange + var receivedMessages = new ConcurrentDictionary(); + var messageToSend = "Competing Consumer Test"; + var allSubscribersProcessed = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + + async Task MessageHandler(string msg, CancellationToken token) + { + receivedMessages.AddOrUpdate("received", 1, (key, value) => value + 1); + + // If any consumer has received more than 1 message, fail immediately + if (receivedMessages["received"] > 1) + { + allSubscribersProcessed.TrySetException(new Exception("More than one consumer in the group received the message!")); + } + else + { + allSubscribersProcessed.TrySetResult(true); + } + } + + await _messagingProvider.SubscribeAsync(_testTopic, _testConsumerGroup, MessageHandler, _cts.Token); + await _messagingProvider.SubscribeAsync(_testTopic, _testConsumerGroup, MessageHandler, _cts.Token); + await _messagingProvider.SubscribeAsync(_testTopic, _testConsumerGroup, MessageHandler, _cts.Token); + await Task.Delay(500); + + // Act + await _messagingProvider.PublishAsync(_testTopic, messageToSend, _cts.Token); + + await Task.Delay(3000); + + try + { + await allSubscribersProcessed.Task.WaitAsync(TimeSpan.FromSeconds(5)); + } + catch (TimeoutException) + { + Assert.Fail("Timed out waiting for message processing."); + } + + // Assert + Assert.Equal(1, receivedMessages.GetValueOrDefault("received", 0)); + } + + [Fact] + public async Task CompetingConsumers_ShouldDistributeMessages_WhenMultipleConsumersInGroup() + { + // Arrange + var receivedMessages = new ConcurrentDictionary(); + var totalMessages = 100; + var numConsumers = 10; + var variancePercentage = 0.1; + var perConsumerMessageCount = new ConcurrentDictionary(); + var allMessagesProcessed = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + + async Task CreateConsumer() + { + var consumerId = Guid.NewGuid(); + + async Task MessageHandler(string msg, CancellationToken token) + { + perConsumerMessageCount.AddOrUpdate(consumerId, 1, (_, count) => count + 1); + + _logger.LogDebug($"Consumer {consumerId} received a message."); + + if (perConsumerMessageCount.Values.Sum() >= totalMessages) + { + allMessagesProcessed.TrySetResult(true); + } + } + + await _messagingProvider.SubscribeAsync(_testTopic, _testConsumerGroup, MessageHandler, _cts.Token); + } + + for (int i = 0; i < numConsumers; i++) + { + await CreateConsumer(); + } + + // Act + for (int i = 0; i < totalMessages; i++) + { + await _messagingProvider.PublishAsync(_testTopic, "Test Message", _cts.Token); + } + + try + { + await allMessagesProcessed.Task.WaitAsync(TimeSpan.FromSeconds(10)); + } + catch (TimeoutException) + { + _logger.LogDebug("Timed out waiting for consumers to receive all messages."); + Assert.Fail($"Consumers only processed {perConsumerMessageCount.Values.Sum()} of {totalMessages} messages."); + } + + // Assert + var messageCounts = perConsumerMessageCount.Values.ToList(); + var expectedPerConsumer = totalMessages / numConsumers; + var variance = (int)(expectedPerConsumer * variancePercentage); + var minAllowed = expectedPerConsumer - variance; + var maxAllowed = expectedPerConsumer + variance; + + foreach (var count in messageCounts) + { + Assert.InRange(count, minAllowed, maxAllowed); + } + } + + private async Task EnsureConnectedAsync(CancellationToken cancellationToken) + { + if (_connection is { IsOpen: true } && _channel is { IsOpen: true }) return; + + _connection = await _connectionFactory.CreateConnectionAsync(cancellationToken); + _channel = await _connection.CreateChannelAsync(null, cancellationToken); + } + + private async Task ExchangeExistsAsync(string exchange, CancellationToken cancellationToken) + { + try + { + // Use a temporary channel to check exchange existence, since old one might have stale topic data + using var tempChannel = await _connection!.CreateChannelAsync(null, cancellationToken); + await tempChannel.ExchangeDeclarePassiveAsync(exchange, cancellationToken); + + return true; + } + catch (OperationInterruptedException ex) when (ex.ShutdownReason?.ReplyCode == 404) + { + _logger.LogDebug("Exchange '{Exchange}' does not exist yet.", exchange); + + await EnsureConnectedAsync(cancellationToken); + + return false; + } + catch (Exception ex) + { + _logger.LogError(ex, "Unexpected error while checking if exchange '{Exchange}' exists."); + throw; + } + } + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Events/RabbitMq/RabbitMqTestsCollection.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Events/RabbitMq/RabbitMqTestsCollection.cs new file mode 100644 index 0000000..70668a3 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Events/RabbitMq/RabbitMqTestsCollection.cs @@ -0,0 +1,5 @@ +namespace OpenDDD.Tests.Integration.Infrastructure.Events.RabbitMq +{ + [CollectionDefinition("RabbitMqTests", DisableParallelization = true)] + public class RabbitMqTestsCollection { } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Configurations/TestAggregateRootConfiguration.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Configurations/TestAggregateRootConfiguration.cs new file mode 100644 index 0000000..7ba2510 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Configurations/TestAggregateRootConfiguration.cs @@ -0,0 +1,24 @@ +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Metadata.Builders; +using OpenDDD.Infrastructure.Persistence.EfCore.Base; +using OpenDDD.Tests.Base.Domain.Model; + +namespace OpenDDD.Tests.Integration.Infrastructure.Persistence.EfCore.Configurations +{ + public class TestAggregateRootConfiguration : EfAggregateRootConfigurationBase + { + public override void Configure(EntityTypeBuilder builder) + { + base.Configure(builder); + + builder.OwnsOne(a => a.Value); + + builder.HasMany(a => a.Entities) + .WithOne() + .HasForeignKey("TestAggregateRootId") + .OnDelete(DeleteBehavior.Cascade); + + builder.Navigation(a => a.Entities).AutoInclude(); + } + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Configurations/TestEntityConfiguration.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Configurations/TestEntityConfiguration.cs new file mode 100644 index 0000000..4d08546 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Configurations/TestEntityConfiguration.cs @@ -0,0 +1,16 @@ +using Microsoft.EntityFrameworkCore.Metadata.Builders; +using OpenDDD.Infrastructure.Persistence.EfCore.Base; +using OpenDDD.Tests.Base.Domain.Model; + +namespace OpenDDD.Tests.Integration.Infrastructure.Persistence.EfCore.Configurations +{ + public class TestEntityConfiguration : EfEntityConfigurationBase + { + public override void Configure(EntityTypeBuilder builder) + { + base.Configure(builder); + + builder.Property("TestAggregateRootId").IsRequired(); + } + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Configurations/TestValueObjectConfiguration.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Configurations/TestValueObjectConfiguration.cs new file mode 100644 index 0000000..52bda60 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Configurations/TestValueObjectConfiguration.cs @@ -0,0 +1,14 @@ +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Metadata.Builders; +using OpenDDD.Tests.Base.Domain.Model; + +namespace OpenDDD.Tests.Integration.Infrastructure.Persistence.EfCore.Configurations +{ + public class TestValueObjectConfiguration : IEntityTypeConfiguration + { + public void Configure(EntityTypeBuilder builder) + { + builder.HasNoKey(); + } + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/DbContext/Postgres/PostgresTestDbContext.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/DbContext/Postgres/PostgresTestDbContext.cs new file mode 100644 index 0000000..f8d0d6e --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/DbContext/Postgres/PostgresTestDbContext.cs @@ -0,0 +1,18 @@ +using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.Logging; +using OpenDDD.API.Options; +using OpenDDD.Infrastructure.Persistence.EfCore.Base; +using OpenDDD.Tests.Base.Domain.Model; + +namespace OpenDDD.Tests.Integration.Infrastructure.Persistence.EfCore.DbContext.Postgres +{ + public class PostgresTestDbContext : OpenDddDbContextBase + { + public PostgresTestDbContext(DbContextOptions options, OpenDddOptions openDddOptions, ILogger logger) + : base(options, openDddOptions, logger) + { + } + + public DbSet TestAggregates { get; set; } = null!; + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/DbContext/Postgres/PostgresTestDbContextFactory.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/DbContext/Postgres/PostgresTestDbContextFactory.cs new file mode 100644 index 0000000..bec47b7 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/DbContext/Postgres/PostgresTestDbContextFactory.cs @@ -0,0 +1,23 @@ +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Design; +using Microsoft.Extensions.Logging; +using OpenDDD.API.Options; + +namespace OpenDDD.Tests.Integration.Infrastructure.Persistence.EfCore.DbContext.Postgres +{ + public class PostgresTestDbContextFactory : IDesignTimeDbContextFactory + { + public PostgresTestDbContext CreateDbContext(string[] args) + { + var optionsBuilder = new DbContextOptionsBuilder() + .UseNpgsql("Host=localhost;Port=5432;Database=testdb;Username=testuser;Password=testpassword") + .EnableSensitiveDataLogging(); + + var openDddOptions = new OpenDddOptions(); + var loggerFactory = LoggerFactory.Create(builder => builder.AddConsole()); + var logger = loggerFactory.CreateLogger(); + + return new PostgresTestDbContext(optionsBuilder.Options, openDddOptions, logger); + } + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/DbContext/Sqlite/SqliteTestDbContext.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/DbContext/Sqlite/SqliteTestDbContext.cs new file mode 100644 index 0000000..515ad76 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/DbContext/Sqlite/SqliteTestDbContext.cs @@ -0,0 +1,18 @@ +using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.Logging; +using OpenDDD.API.Options; +using OpenDDD.Infrastructure.Persistence.EfCore.Base; +using OpenDDD.Tests.Base.Domain.Model; + +namespace OpenDDD.Tests.Integration.Infrastructure.Persistence.EfCore.DbContext.Sqlite +{ + public class SqliteTestDbContext : OpenDddDbContextBase + { + public SqliteTestDbContext(DbContextOptions options, OpenDddOptions openDddOptions, ILogger logger) + : base(options, openDddOptions, logger) + { + } + + public DbSet TestAggregates { get; set; } = null!; + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/DbContext/Sqlite/SqliteTestDbContextFactory.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/DbContext/Sqlite/SqliteTestDbContextFactory.cs new file mode 100644 index 0000000..1f1e124 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/DbContext/Sqlite/SqliteTestDbContextFactory.cs @@ -0,0 +1,23 @@ +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Design; +using Microsoft.Extensions.Logging; +using OpenDDD.API.Options; + +namespace OpenDDD.Tests.Integration.Infrastructure.Persistence.EfCore.DbContext.Sqlite +{ + public class SqliteTestDbContextFactory : IDesignTimeDbContextFactory + { + public SqliteTestDbContext CreateDbContext(string[] args) + { + var optionsBuilder = new DbContextOptionsBuilder() + .UseSqlite("DataSource=:memory:") + .EnableSensitiveDataLogging(); + + var openDddOptions = new OpenDddOptions(); + var loggerFactory = LoggerFactory.Create(builder => builder.AddConsole()); + var logger = loggerFactory.CreateLogger(); + + return new SqliteTestDbContext(optionsBuilder.Options, openDddOptions, logger); + } + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Migrations/Postgres/20250304021847_Postgres_InitialCreate.Designer.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Migrations/Postgres/20250304021847_Postgres_InitialCreate.Designer.cs new file mode 100644 index 0000000..86b891d --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Migrations/Postgres/20250304021847_Postgres_InitialCreate.Designer.cs @@ -0,0 +1,140 @@ +// +using System; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; +using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata; +using OpenDDD.Tests.Integration.Infrastructure.Persistence.EfCore.DbContext.Postgres; + +#nullable disable + +namespace OpenDDD.Tests.Integration.Infrastructure.Persistence.EfCore.Migrations.Postgres +{ + [DbContext(typeof(PostgresTestDbContext))] + [Migration("20250304021847_Postgres_InitialCreate")] + partial class Postgres_InitialCreate + { + /// + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "9.0.2") + .HasAnnotation("Relational:MaxIdentifierLength", 63); + + NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder); + + modelBuilder.Entity("OpenDDD.Infrastructure.TransactionalOutbox.OutboxEntry", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("uuid"); + + b.Property("CreatedAt") + .HasColumnType("timestamp with time zone"); + + b.Property("EventName") + .IsRequired() + .HasColumnType("text"); + + b.Property("EventType") + .IsRequired() + .HasColumnType("text"); + + b.Property("Payload") + .IsRequired() + .HasColumnType("text"); + + b.Property("ProcessedAt") + .HasColumnType("timestamp with time zone"); + + b.HasKey("Id"); + + b.ToTable("OutboxEntries"); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestAggregateRoot", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("uuid"); + + b.Property("CreatedAt") + .HasColumnType("timestamp with time zone"); + + b.Property("Name") + .IsRequired() + .HasColumnType("text"); + + b.Property("UpdatedAt") + .HasColumnType("timestamp with time zone"); + + b.HasKey("Id"); + + b.ToTable("TestAggregateRoots", (string)null); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestEntity", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("uuid"); + + b.Property("Description") + .IsRequired() + .HasColumnType("text"); + + b.Property("TestAggregateRootId") + .HasColumnType("uuid"); + + b.HasKey("Id"); + + b.HasIndex("TestAggregateRootId"); + + b.ToTable("TestEntities", (string)null); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestAggregateRoot", b => + { + b.OwnsOne("OpenDDD.Tests.Domain.Model.TestValueObject", "Value", b1 => + { + b1.Property("TestAggregateRootId") + .HasColumnType("uuid"); + + b1.Property("Number") + .HasColumnType("integer"); + + b1.Property("Text") + .IsRequired() + .HasColumnType("text"); + + b1.HasKey("TestAggregateRootId"); + + b1.ToTable("TestAggregateRoots"); + + b1.WithOwner() + .HasForeignKey("TestAggregateRootId"); + }); + + b.Navigation("Value") + .IsRequired(); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestEntity", b => + { + b.HasOne("OpenDDD.Tests.Domain.Model.TestAggregateRoot", null) + .WithMany("Entities") + .HasForeignKey("TestAggregateRootId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestAggregateRoot", b => + { + b.Navigation("Entities"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Migrations/Postgres/20250304021847_Postgres_InitialCreate.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Migrations/Postgres/20250304021847_Postgres_InitialCreate.cs new file mode 100644 index 0000000..7e861b0 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Migrations/Postgres/20250304021847_Postgres_InitialCreate.cs @@ -0,0 +1,84 @@ +using System; +using Microsoft.EntityFrameworkCore.Migrations; + +#nullable disable + +namespace OpenDDD.Tests.Integration.Infrastructure.Persistence.EfCore.Migrations.Postgres +{ + /// + public partial class Postgres_InitialCreate : Migration + { + /// + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.CreateTable( + name: "OutboxEntries", + columns: table => new + { + Id = table.Column(type: "uuid", nullable: false), + EventType = table.Column(type: "text", nullable: false), + EventName = table.Column(type: "text", nullable: false), + Payload = table.Column(type: "text", nullable: false), + CreatedAt = table.Column(type: "timestamp with time zone", nullable: false), + ProcessedAt = table.Column(type: "timestamp with time zone", nullable: true) + }, + constraints: table => + { + table.PrimaryKey("PK_OutboxEntries", x => x.Id); + }); + + migrationBuilder.CreateTable( + name: "TestAggregateRoots", + columns: table => new + { + Id = table.Column(type: "uuid", nullable: false), + Name = table.Column(type: "text", nullable: false), + Value_Number = table.Column(type: "integer", nullable: false), + Value_Text = table.Column(type: "text", nullable: false), + CreatedAt = table.Column(type: "timestamp with time zone", nullable: false), + UpdatedAt = table.Column(type: "timestamp with time zone", nullable: false) + }, + constraints: table => + { + table.PrimaryKey("PK_TestAggregateRoots", x => x.Id); + }); + + migrationBuilder.CreateTable( + name: "TestEntities", + columns: table => new + { + Id = table.Column(type: "uuid", nullable: false), + Description = table.Column(type: "text", nullable: false), + TestAggregateRootId = table.Column(type: "uuid", nullable: false) + }, + constraints: table => + { + table.PrimaryKey("PK_TestEntities", x => x.Id); + table.ForeignKey( + name: "FK_TestEntities_TestAggregateRoots_TestAggregateRootId", + column: x => x.TestAggregateRootId, + principalTable: "TestAggregateRoots", + principalColumn: "Id", + onDelete: ReferentialAction.Cascade); + }); + + migrationBuilder.CreateIndex( + name: "IX_TestEntities_TestAggregateRootId", + table: "TestEntities", + column: "TestAggregateRootId"); + } + + /// + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropTable( + name: "OutboxEntries"); + + migrationBuilder.DropTable( + name: "TestEntities"); + + migrationBuilder.DropTable( + name: "TestAggregateRoots"); + } + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Migrations/Postgres/PostgresTestDbContextModelSnapshot.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Migrations/Postgres/PostgresTestDbContextModelSnapshot.cs new file mode 100644 index 0000000..8105d48 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Migrations/Postgres/PostgresTestDbContextModelSnapshot.cs @@ -0,0 +1,137 @@ +// +using System; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; +using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata; +using OpenDDD.Tests.Integration.Infrastructure.Persistence.EfCore.DbContext.Postgres; + +#nullable disable + +namespace OpenDDD.Tests.Integration.Infrastructure.Persistence.EfCore.Migrations.Postgres +{ + [DbContext(typeof(PostgresTestDbContext))] + partial class PostgresTestDbContextModelSnapshot : ModelSnapshot + { + protected override void BuildModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "9.0.2") + .HasAnnotation("Relational:MaxIdentifierLength", 63); + + NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder); + + modelBuilder.Entity("OpenDDD.Infrastructure.TransactionalOutbox.OutboxEntry", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("uuid"); + + b.Property("CreatedAt") + .HasColumnType("timestamp with time zone"); + + b.Property("EventName") + .IsRequired() + .HasColumnType("text"); + + b.Property("EventType") + .IsRequired() + .HasColumnType("text"); + + b.Property("Payload") + .IsRequired() + .HasColumnType("text"); + + b.Property("ProcessedAt") + .HasColumnType("timestamp with time zone"); + + b.HasKey("Id"); + + b.ToTable("OutboxEntries"); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestAggregateRoot", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("uuid"); + + b.Property("CreatedAt") + .HasColumnType("timestamp with time zone"); + + b.Property("Name") + .IsRequired() + .HasColumnType("text"); + + b.Property("UpdatedAt") + .HasColumnType("timestamp with time zone"); + + b.HasKey("Id"); + + b.ToTable("TestAggregateRoots", (string)null); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestEntity", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("uuid"); + + b.Property("Description") + .IsRequired() + .HasColumnType("text"); + + b.Property("TestAggregateRootId") + .HasColumnType("uuid"); + + b.HasKey("Id"); + + b.HasIndex("TestAggregateRootId"); + + b.ToTable("TestEntities", (string)null); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestAggregateRoot", b => + { + b.OwnsOne("OpenDDD.Tests.Domain.Model.TestValueObject", "Value", b1 => + { + b1.Property("TestAggregateRootId") + .HasColumnType("uuid"); + + b1.Property("Number") + .HasColumnType("integer"); + + b1.Property("Text") + .IsRequired() + .HasColumnType("text"); + + b1.HasKey("TestAggregateRootId"); + + b1.ToTable("TestAggregateRoots"); + + b1.WithOwner() + .HasForeignKey("TestAggregateRootId"); + }); + + b.Navigation("Value") + .IsRequired(); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestEntity", b => + { + b.HasOne("OpenDDD.Tests.Domain.Model.TestAggregateRoot", null) + .WithMany("Entities") + .HasForeignKey("TestAggregateRootId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestAggregateRoot", b => + { + b.Navigation("Entities"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Migrations/Sqlite/20250304021905_Sqlite_InitialCreate.Designer.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Migrations/Sqlite/20250304021905_Sqlite_InitialCreate.Designer.cs new file mode 100644 index 0000000..3b35708 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Migrations/Sqlite/20250304021905_Sqlite_InitialCreate.Designer.cs @@ -0,0 +1,135 @@ +// +using System; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; +using OpenDDD.Tests.Integration.Infrastructure.Persistence.EfCore.DbContext.Sqlite; + +#nullable disable + +namespace OpenDDD.Tests.Integration.Infrastructure.Persistence.EfCore.Migrations.Sqlite +{ + [DbContext(typeof(SqliteTestDbContext))] + [Migration("20250304021905_Sqlite_InitialCreate")] + partial class Sqlite_InitialCreate + { + /// + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder.HasAnnotation("ProductVersion", "9.0.2"); + + modelBuilder.Entity("OpenDDD.Infrastructure.TransactionalOutbox.OutboxEntry", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("TEXT"); + + b.Property("CreatedAt") + .HasColumnType("TEXT"); + + b.Property("EventName") + .IsRequired() + .HasColumnType("TEXT"); + + b.Property("EventType") + .IsRequired() + .HasColumnType("TEXT"); + + b.Property("Payload") + .IsRequired() + .HasColumnType("TEXT"); + + b.Property("ProcessedAt") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.ToTable("OutboxEntries"); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestAggregateRoot", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("TEXT"); + + b.Property("CreatedAt") + .HasColumnType("TEXT"); + + b.Property("Name") + .IsRequired() + .HasColumnType("TEXT"); + + b.Property("UpdatedAt") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.ToTable("TestAggregateRoots", (string)null); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestEntity", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("TEXT"); + + b.Property("Description") + .IsRequired() + .HasColumnType("TEXT"); + + b.Property("TestAggregateRootId") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("TestAggregateRootId"); + + b.ToTable("TestEntities", (string)null); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestAggregateRoot", b => + { + b.OwnsOne("OpenDDD.Tests.Domain.Model.TestValueObject", "Value", b1 => + { + b1.Property("TestAggregateRootId") + .HasColumnType("TEXT"); + + b1.Property("Number") + .HasColumnType("INTEGER"); + + b1.Property("Text") + .IsRequired() + .HasColumnType("TEXT"); + + b1.HasKey("TestAggregateRootId"); + + b1.ToTable("TestAggregateRoots"); + + b1.WithOwner() + .HasForeignKey("TestAggregateRootId"); + }); + + b.Navigation("Value") + .IsRequired(); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestEntity", b => + { + b.HasOne("OpenDDD.Tests.Domain.Model.TestAggregateRoot", null) + .WithMany("Entities") + .HasForeignKey("TestAggregateRootId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestAggregateRoot", b => + { + b.Navigation("Entities"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Migrations/Sqlite/20250304021905_Sqlite_InitialCreate.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Migrations/Sqlite/20250304021905_Sqlite_InitialCreate.cs new file mode 100644 index 0000000..a01dd6e --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Migrations/Sqlite/20250304021905_Sqlite_InitialCreate.cs @@ -0,0 +1,84 @@ +using System; +using Microsoft.EntityFrameworkCore.Migrations; + +#nullable disable + +namespace OpenDDD.Tests.Integration.Infrastructure.Persistence.EfCore.Migrations.Sqlite +{ + /// + public partial class Sqlite_InitialCreate : Migration + { + /// + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.CreateTable( + name: "OutboxEntries", + columns: table => new + { + Id = table.Column(type: "TEXT", nullable: false), + EventType = table.Column(type: "TEXT", nullable: false), + EventName = table.Column(type: "TEXT", nullable: false), + Payload = table.Column(type: "TEXT", nullable: false), + CreatedAt = table.Column(type: "TEXT", nullable: false), + ProcessedAt = table.Column(type: "TEXT", nullable: true) + }, + constraints: table => + { + table.PrimaryKey("PK_OutboxEntries", x => x.Id); + }); + + migrationBuilder.CreateTable( + name: "TestAggregateRoots", + columns: table => new + { + Id = table.Column(type: "TEXT", nullable: false), + Name = table.Column(type: "TEXT", nullable: false), + Value_Number = table.Column(type: "INTEGER", nullable: false), + Value_Text = table.Column(type: "TEXT", nullable: false), + CreatedAt = table.Column(type: "TEXT", nullable: false), + UpdatedAt = table.Column(type: "TEXT", nullable: false) + }, + constraints: table => + { + table.PrimaryKey("PK_TestAggregateRoots", x => x.Id); + }); + + migrationBuilder.CreateTable( + name: "TestEntities", + columns: table => new + { + Id = table.Column(type: "TEXT", nullable: false), + Description = table.Column(type: "TEXT", nullable: false), + TestAggregateRootId = table.Column(type: "TEXT", nullable: false) + }, + constraints: table => + { + table.PrimaryKey("PK_TestEntities", x => x.Id); + table.ForeignKey( + name: "FK_TestEntities_TestAggregateRoots_TestAggregateRootId", + column: x => x.TestAggregateRootId, + principalTable: "TestAggregateRoots", + principalColumn: "Id", + onDelete: ReferentialAction.Cascade); + }); + + migrationBuilder.CreateIndex( + name: "IX_TestEntities_TestAggregateRootId", + table: "TestEntities", + column: "TestAggregateRootId"); + } + + /// + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropTable( + name: "OutboxEntries"); + + migrationBuilder.DropTable( + name: "TestEntities"); + + migrationBuilder.DropTable( + name: "TestAggregateRoots"); + } + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Migrations/Sqlite/SqliteTestDbContextModelSnapshot.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Migrations/Sqlite/SqliteTestDbContextModelSnapshot.cs new file mode 100644 index 0000000..5ae467c --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Persistence/EfCore/Migrations/Sqlite/SqliteTestDbContextModelSnapshot.cs @@ -0,0 +1,132 @@ +// +using System; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; +using OpenDDD.Tests.Integration.Infrastructure.Persistence.EfCore.DbContext.Sqlite; + +#nullable disable + +namespace OpenDDD.Tests.Integration.Infrastructure.Persistence.EfCore.Migrations.Sqlite +{ + [DbContext(typeof(SqliteTestDbContext))] + partial class SqliteTestDbContextModelSnapshot : ModelSnapshot + { + protected override void BuildModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder.HasAnnotation("ProductVersion", "9.0.2"); + + modelBuilder.Entity("OpenDDD.Infrastructure.TransactionalOutbox.OutboxEntry", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("TEXT"); + + b.Property("CreatedAt") + .HasColumnType("TEXT"); + + b.Property("EventName") + .IsRequired() + .HasColumnType("TEXT"); + + b.Property("EventType") + .IsRequired() + .HasColumnType("TEXT"); + + b.Property("Payload") + .IsRequired() + .HasColumnType("TEXT"); + + b.Property("ProcessedAt") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.ToTable("OutboxEntries"); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestAggregateRoot", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("TEXT"); + + b.Property("CreatedAt") + .HasColumnType("TEXT"); + + b.Property("Name") + .IsRequired() + .HasColumnType("TEXT"); + + b.Property("UpdatedAt") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.ToTable("TestAggregateRoots", (string)null); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestEntity", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("TEXT"); + + b.Property("Description") + .IsRequired() + .HasColumnType("TEXT"); + + b.Property("TestAggregateRootId") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("TestAggregateRootId"); + + b.ToTable("TestEntities", (string)null); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestAggregateRoot", b => + { + b.OwnsOne("OpenDDD.Tests.Domain.Model.TestValueObject", "Value", b1 => + { + b1.Property("TestAggregateRootId") + .HasColumnType("TEXT"); + + b1.Property("Number") + .HasColumnType("INTEGER"); + + b1.Property("Text") + .IsRequired() + .HasColumnType("TEXT"); + + b1.HasKey("TestAggregateRootId"); + + b1.ToTable("TestAggregateRoots"); + + b1.WithOwner() + .HasForeignKey("TestAggregateRootId"); + }); + + b.Navigation("Value") + .IsRequired(); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestEntity", b => + { + b.HasOne("OpenDDD.Tests.Domain.Model.TestAggregateRoot", null) + .WithMany("Entities") + .HasForeignKey("TestAggregateRootId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("OpenDDD.Tests.Domain.Model.TestAggregateRoot", b => + { + b.Navigation("Entities"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Repository/EfCore/EfCoreTestsCollection.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Repository/EfCore/EfCoreTestsCollection.cs new file mode 100644 index 0000000..8f2f8f4 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Repository/EfCore/EfCoreTestsCollection.cs @@ -0,0 +1,5 @@ +namespace OpenDDD.Tests.Integration.Infrastructure.Repository.EfCore +{ + [CollectionDefinition("EfCoreTests", DisableParallelization = true)] + public class EfCoreTestsCollection { } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Repository/EfCore/Postgres/PostgresEfCoreRepositoryTests.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Repository/EfCore/Postgres/PostgresEfCoreRepositoryTests.cs new file mode 100644 index 0000000..d9cda77 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Repository/EfCore/Postgres/PostgresEfCoreRepositoryTests.cs @@ -0,0 +1,233 @@ +using System.Linq.Expressions; +using FluentAssertions; +using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Xunit.Abstractions; +using Moq; +using Npgsql; +using OpenDDD.API.Options; +using OpenDDD.Domain.Model; +using OpenDDD.Infrastructure.Persistence.EfCore.Base; +using OpenDDD.Infrastructure.Persistence.EfCore.DatabaseSession; +using OpenDDD.Infrastructure.Persistence.EfCore.UoW; +using OpenDDD.Infrastructure.Repository.EfCore; +using OpenDDD.Infrastructure.TransactionalOutbox; +using OpenDDD.Tests.Base; +using OpenDDD.Tests.Base.Domain.Model; +using OpenDDD.Tests.Integration.Infrastructure.Persistence.EfCore.DbContext.Postgres; + +namespace OpenDDD.Tests.Integration.Infrastructure.Repository.EfCore.Postgres +{ + [Collection("EfCoreTests")] + public class PostgresEfCoreRepositoryTests : IntegrationTests, IAsyncLifetime + { + private readonly string _connectionString; + private readonly EfCoreDatabaseSession _session; + private readonly Mock _mockDomainPublisher; + private readonly Mock _mockIntegrationPublisher; + private readonly Mock _mockOutboxRepository; + private readonly EfCoreUnitOfWork _unitOfWork; + private readonly EfCoreRepository _repository; + private readonly PostgresTestDbContext _dbContext; + private readonly NpgsqlConnection _connection; + private NpgsqlTransaction _transaction = null!; + private readonly OpenDddOptions _openDddOptions; + private readonly ILoggerFactory _loggerFactory; + private readonly ILogger _dbContextLogger; + + public PostgresEfCoreRepositoryTests(ITestOutputHelper testOutputHelper) + : base(testOutputHelper, enableLogging: true) + { + _connectionString = Environment.GetEnvironmentVariable("POSTGRES_TEST_CONNECTION_STRING") + ?? "Host=localhost;Port=5432;Database=testdb;Username=testuser;Password=testpassword"; + + _connection = new NpgsqlConnection(_connectionString); + + var options = new DbContextOptionsBuilder() + .UseNpgsql(_connection, x => x.MigrationsHistoryTable("__EFMigrationsHistory", "public")) + .EnableSensitiveDataLogging() + .Options; + + _openDddOptions = new OpenDddOptions + { + AutoRegister = { EfCoreConfigurations = true } + }; + + var services = new ServiceCollection() + .AddLogging(builder => builder.AddSimpleConsole()) + .BuildServiceProvider(); + _dbContextLogger = services.GetRequiredService>(); + + _dbContext = new PostgresTestDbContext(options, _openDddOptions, _dbContextLogger); + + _session = new EfCoreDatabaseSession(_dbContext); + + _mockDomainPublisher = new Mock(); + _mockIntegrationPublisher = new Mock(); + _mockOutboxRepository = new Mock(); + + _loggerFactory = services.GetRequiredService(); + + _unitOfWork = new EfCoreUnitOfWork( + _session, + _mockDomainPublisher.Object, + _mockIntegrationPublisher.Object, + _mockOutboxRepository.Object, + _loggerFactory.CreateLogger() + ); + _repository = new EfCoreRepository(_unitOfWork); + } + + public async Task InitializeAsync() + { + // Re-create database + await _dbContext.Database.EnsureDeletedAsync(); + EnsureDatabaseExists(); + + // Run migrations + _dbContext.Database.SetCommandTimeout(120); + await _dbContext.Database.MigrateAsync(); + + // Initialize connection and transaction + _connection.Open(); + _transaction = await _connection.BeginTransactionAsync(); + _dbContext.Database.UseTransaction(_transaction); + } + + public async Task DisposeAsync() + { + await _transaction.RollbackAsync(); + await _connection.CloseAsync(); + } + + private void EnsureDatabaseExists() + { + using var adminConnection = new NpgsqlConnection($"{_connectionString};Database=postgres"); + adminConnection.Open(); + + var databaseName = "testdb"; + + using var command = new NpgsqlCommand($"SELECT 1 FROM pg_database WHERE datname = '{databaseName}'", adminConnection); + var databaseExists = command.ExecuteScalar() != null; + + if (!databaseExists) + { + using var createCommand = new NpgsqlCommand($"CREATE DATABASE {databaseName}", adminConnection); + createCommand.ExecuteNonQuery(); + } + } + + [Fact] + public async Task SaveAsync_ShouldInsertOrUpdateEntity() + { + // Arrange + var aggregate = TestAggregateRoot.Create( + "Initial Root", + new List { TestEntity.Create("Entity 1"), TestEntity.Create("Entity 2") }, + new TestValueObject(100, "Value Object Data") + ); + + // Act + await _repository.SaveAsync(aggregate, CancellationToken.None); + var retrieved = await _repository.GetAsync(aggregate.Id, CancellationToken.None); + + // Assert + retrieved.Should().NotBeNull(); + retrieved.Id.Should().Be(aggregate.Id); + retrieved.Name.Should().Be("Initial Root"); + retrieved.Entities.Should().HaveCount(2); + retrieved.Value.Number.Should().Be(100); + + // Act (update) + aggregate = TestAggregateRoot.Create( + "Updated Root", + new List { TestEntity.Create("Updated Entity") }, + new TestValueObject(200, "Updated Value") + ); + + await _repository.SaveAsync(aggregate, CancellationToken.None); + var updated = await _repository.GetAsync(aggregate.Id, CancellationToken.None); + + // Assert (update) + updated.Name.Should().Be("Updated Root"); + updated.Entities.Should().HaveCount(1); + updated.Entities.First().Description.Should().Be("Updated Entity"); + updated.Value.Number.Should().Be(200); + } + + [Fact] + public async Task FindAsync_ShouldReturnEntityIfExists() + { + // Arrange + var aggregate = TestAggregateRoot.Create("Find Test", new List(), new TestValueObject(50, "Find Test Value")); + await _repository.SaveAsync(aggregate, CancellationToken.None); + + // Act + var result = await _repository.FindAsync(aggregate.Id, CancellationToken.None); + + // Assert + result.Should().NotBeNull(); + result!.Id.Should().Be(aggregate.Id); + } + + [Fact] + public async Task FindAsync_ShouldReturnNullIfNotExists() + { + // Act + var result = await _repository.FindAsync(Guid.NewGuid(), CancellationToken.None); + + // Assert + result.Should().BeNull(); + } + + [Fact] + public async Task FindWithAsync_ShouldReturnFilteredResults() + { + // Arrange + var aggregate1 = TestAggregateRoot.Create("Filter Match", new List(), new TestValueObject(300, "Match")); + var aggregate2 = TestAggregateRoot.Create("No Match", new List(), new TestValueObject(400, "Different")); + await _repository.SaveAsync(aggregate1, CancellationToken.None); + await _repository.SaveAsync(aggregate2, CancellationToken.None); + + // Act + Expression> filter = a => a.Value.Number == 300; + var results = (await _repository.FindWithAsync(filter, CancellationToken.None)).ToList(); + + // Assert + results.Should().HaveCount(1); + results[0].Name.Should().Be("Filter Match"); + } + + [Fact] + public async Task FindAllAsync_ShouldReturnAllEntities() + { + // Arrange + var aggregate1 = TestAggregateRoot.Create("Entity 1", new List(), new TestValueObject(10, "VO 1")); + var aggregate2 = TestAggregateRoot.Create("Entity 2", new List(), new TestValueObject(20, "VO 2")); + await _repository.SaveAsync(aggregate1, CancellationToken.None); + await _repository.SaveAsync(aggregate2, CancellationToken.None); + + // Act + var results = (await _repository.FindAllAsync(CancellationToken.None)).ToList(); + + // Assert + results.Should().HaveCount(2); + } + + [Fact] + public async Task DeleteAsync_ShouldRemoveEntity() + { + // Arrange + var aggregate = TestAggregateRoot.Create("To be deleted", new List(), new TestValueObject(99, "Delete")); + await _repository.SaveAsync(aggregate, CancellationToken.None); + + // Act + await _repository.DeleteAsync(aggregate, CancellationToken.None); + var result = await _repository.FindAsync(aggregate.Id, CancellationToken.None); + + // Assert + result.Should().BeNull(); + } + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Repository/EfCore/Sqlite/SqliteEfCoreRepositoryTests.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Repository/EfCore/Sqlite/SqliteEfCoreRepositoryTests.cs new file mode 100644 index 0000000..8f27603 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Repository/EfCore/Sqlite/SqliteEfCoreRepositoryTests.cs @@ -0,0 +1,198 @@ +using System.Linq.Expressions; +using FluentAssertions; +using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Xunit.Abstractions; +using Moq; +using OpenDDD.API.Options; +using OpenDDD.Domain.Model; +using OpenDDD.Infrastructure.Persistence.EfCore.Base; +using OpenDDD.Infrastructure.Persistence.EfCore.DatabaseSession; +using OpenDDD.Infrastructure.Persistence.EfCore.UoW; +using OpenDDD.Infrastructure.Repository.EfCore; +using OpenDDD.Infrastructure.TransactionalOutbox; +using OpenDDD.Tests.Base; +using OpenDDD.Tests.Base.Domain.Model; +using OpenDDD.Tests.Integration.Infrastructure.Persistence.EfCore.DbContext.Sqlite; + +namespace OpenDDD.Tests.Integration.Infrastructure.Repository.EfCore.Sqlite +{ + [Collection("EfCoreTests")] + public class SqliteEfCoreRepositoryTests : IntegrationTests, IAsyncLifetime + { + private readonly EfCoreDatabaseSession _session; + private readonly Mock _mockDomainPublisher; + private readonly Mock _mockIntegrationPublisher; + private readonly Mock _mockOutboxRepository; + private readonly EfCoreUnitOfWork _unitOfWork; + private readonly EfCoreRepository _repository; + private readonly SqliteTestDbContext _dbContext; + private readonly OpenDddOptions _openDddOptions; + private readonly ILoggerFactory _loggerFactory; + private readonly ILogger _dbContextLogger; + private readonly string _connectionString = "DataSource=:memory:"; + + public SqliteEfCoreRepositoryTests(ITestOutputHelper testOutputHelper) + : base(testOutputHelper, enableLogging: true) + { + var options = new DbContextOptionsBuilder() + .UseSqlite(_connectionString) + .EnableSensitiveDataLogging() + .Options; + + _openDddOptions = new OpenDddOptions + { + AutoRegister = { EfCoreConfigurations = true } + }; + + var services = new ServiceCollection() + .AddLogging(builder => builder.AddSimpleConsole()) + .BuildServiceProvider(); + _dbContextLogger = services.GetRequiredService>(); + + _dbContext = new SqliteTestDbContext(options, _openDddOptions, _dbContextLogger); + + _session = new EfCoreDatabaseSession(_dbContext); + + _mockDomainPublisher = new Mock(); + _mockIntegrationPublisher = new Mock(); + _mockOutboxRepository = new Mock(); + + _loggerFactory = services.GetRequiredService(); + + _unitOfWork = new EfCoreUnitOfWork( + _session, + _mockDomainPublisher.Object, + _mockIntegrationPublisher.Object, + _mockOutboxRepository.Object, + _loggerFactory.CreateLogger() + ); + _repository = new EfCoreRepository(_unitOfWork); + } + + public async Task InitializeAsync() + { + await _dbContext.Database.OpenConnectionAsync(); + await _dbContext.Database.EnsureCreatedAsync(); + } + + public async Task DisposeAsync() + { + await _dbContext.Database.EnsureDeletedAsync(); + await _dbContext.DisposeAsync(); + } + + [Fact] + public async Task SaveAsync_ShouldInsertOrUpdateEntity() + { + // Arrange + var aggregate = TestAggregateRoot.Create( + "Initial Root", + new List { TestEntity.Create("Entity 1"), TestEntity.Create("Entity 2") }, + new TestValueObject(100, "Value Object Data") + ); + + // Act + await _repository.SaveAsync(aggregate, CancellationToken.None); + var retrieved = await _repository.GetAsync(aggregate.Id, CancellationToken.None); + + // Assert + retrieved.Should().NotBeNull(); + retrieved.Id.Should().Be(aggregate.Id); + retrieved.Name.Should().Be("Initial Root"); + retrieved.Entities.Should().HaveCount(2); + retrieved.Value.Number.Should().Be(100); + + // Act (update) + aggregate = TestAggregateRoot.Create( + "Updated Root", + new List { TestEntity.Create("Updated Entity") }, + new TestValueObject(200, "Updated Value") + ); + + await _repository.SaveAsync(aggregate, CancellationToken.None); + var updated = await _repository.GetAsync(aggregate.Id, CancellationToken.None); + + // Assert (update) + updated.Name.Should().Be("Updated Root"); + updated.Entities.Should().HaveCount(1); + updated.Entities.First().Description.Should().Be("Updated Entity"); + updated.Value.Number.Should().Be(200); + } + + [Fact] + public async Task FindAsync_ShouldReturnEntityIfExists() + { + // Arrange + var aggregate = TestAggregateRoot.Create("Find Test", new List(), new TestValueObject(50, "Find Test Value")); + await _repository.SaveAsync(aggregate, CancellationToken.None); + + // Act + var result = await _repository.FindAsync(aggregate.Id, CancellationToken.None); + + // Assert + result.Should().NotBeNull(); + result!.Id.Should().Be(aggregate.Id); + } + + [Fact] + public async Task FindAsync_ShouldReturnNullIfNotExists() + { + // Act + var result = await _repository.FindAsync(Guid.NewGuid(), CancellationToken.None); + + // Assert + result.Should().BeNull(); + } + + [Fact] + public async Task FindWithAsync_ShouldReturnFilteredResults() + { + // Arrange + var aggregate1 = TestAggregateRoot.Create("Filter Match", new List(), new TestValueObject(300, "Match")); + var aggregate2 = TestAggregateRoot.Create("No Match", new List(), new TestValueObject(400, "Different")); + await _repository.SaveAsync(aggregate1, CancellationToken.None); + await _repository.SaveAsync(aggregate2, CancellationToken.None); + + // Act + Expression> filter = a => a.Value.Number == 300; + var results = (await _repository.FindWithAsync(filter, CancellationToken.None)).ToList(); + + // Assert + results.Should().HaveCount(1); + results[0].Name.Should().Be("Filter Match"); + } + + [Fact] + public async Task FindAllAsync_ShouldReturnAllEntities() + { + // Arrange + var aggregate1 = TestAggregateRoot.Create("Entity 1", new List(), new TestValueObject(10, "VO 1")); + var aggregate2 = TestAggregateRoot.Create("Entity 2", new List(), new TestValueObject(20, "VO 2")); + await _repository.SaveAsync(aggregate1, CancellationToken.None); + await _repository.SaveAsync(aggregate2, CancellationToken.None); + + // Act + var results = (await _repository.FindAllAsync(CancellationToken.None)).ToList(); + + // Assert + results.Should().HaveCount(2); + } + + [Fact] + public async Task DeleteAsync_ShouldRemoveEntity() + { + // Arrange + var aggregate = TestAggregateRoot.Create("To be deleted", new List(), new TestValueObject(99, "Delete")); + await _repository.SaveAsync(aggregate, CancellationToken.None); + + // Act + await _repository.DeleteAsync(aggregate, CancellationToken.None); + var result = await _repository.FindAsync(aggregate.Id, CancellationToken.None); + + // Assert + result.Should().BeNull(); + } + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Repository/OpenDdd/InMemory/InMemoryOpenDddRepositoryTests.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Repository/OpenDdd/InMemory/InMemoryOpenDddRepositoryTests.cs new file mode 100644 index 0000000..8ba871d --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Repository/OpenDdd/InMemory/InMemoryOpenDddRepositoryTests.cs @@ -0,0 +1,158 @@ +using System.Linq.Expressions; +using Microsoft.Extensions.Logging; +using FluentAssertions; +using Xunit.Abstractions; +using OpenDDD.Infrastructure.Persistence.OpenDdd.DatabaseSession.InMemory; +using OpenDDD.Infrastructure.Persistence.OpenDdd.Serializers; +using OpenDDD.Infrastructure.Persistence.Serializers; +using OpenDDD.Infrastructure.Persistence.Storage.InMemory; +using OpenDDD.Infrastructure.Repository.OpenDdd.InMemory; +using OpenDDD.Tests.Base; +using OpenDDD.Tests.Base.Domain.Model; + +namespace OpenDDD.Tests.Integration.Infrastructure.Repository.OpenDdd.InMemory +{ + [Collection("InMemoryTests")] + public class InMemoryOpenDddRepositoryTests : IntegrationTests, IAsyncLifetime + { + private readonly ILogger _storageLogger; + private readonly ILogger _sessionLogger; + private readonly InMemoryKeyValueStorage _storage; + private readonly InMemoryDatabaseSession _session; + private readonly IAggregateSerializer _serializer; + private readonly InMemoryOpenDddRepository _repository; + + public InMemoryOpenDddRepositoryTests(ITestOutputHelper testOutputHelper) + : base(testOutputHelper, enableLogging: true) + { + _storageLogger = LoggerFactory.CreateLogger(); + _sessionLogger = LoggerFactory.CreateLogger(); + _storage = new InMemoryKeyValueStorage(_storageLogger); + _session = new InMemoryDatabaseSession(_storage, _sessionLogger); + _serializer = new OpenDddAggregateSerializer(); + _repository = new InMemoryOpenDddRepository(_session, _serializer); + } + + public async Task InitializeAsync() + { + await _storage.ClearAsync(CancellationToken.None); + } + + public Task DisposeAsync() + { + return Task.CompletedTask; + } + + [Fact] + public async Task SaveAsync_ShouldInsertOrUpdateEntity() + { + // Arrange + var aggregate = TestAggregateRoot.Create( + "Initial Root", + new List { TestEntity.Create("Entity 1"), TestEntity.Create("Entity 2") }, + new TestValueObject(100, "Value Object Data") + ); + + // Act + await _repository.SaveAsync(aggregate, CancellationToken.None); + var retrieved = await _repository.GetAsync(aggregate.Id, CancellationToken.None); + + // Assert + retrieved.Should().NotBeNull(); + retrieved.Id.Should().Be(aggregate.Id); + retrieved.Name.Should().Be("Initial Root"); + retrieved.Entities.Should().HaveCount(2); + retrieved.Value.Number.Should().Be(100); + + // Act (update) + aggregate = TestAggregateRoot.Create( + "Updated Root", + new List { TestEntity.Create("Updated Entity") }, + new TestValueObject(200, "Updated Value") + ); + + await _repository.SaveAsync(aggregate, CancellationToken.None); + var updated = await _repository.GetAsync(aggregate.Id, CancellationToken.None); + + // Assert (update) + updated.Name.Should().Be("Updated Root"); + updated.Entities.Should().HaveCount(1); + updated.Entities.First().Description.Should().Be("Updated Entity"); + updated.Value.Number.Should().Be(200); + } + + [Fact] + public async Task FindAsync_ShouldReturnEntityIfExists() + { + // Arrange + var aggregate = TestAggregateRoot.Create("Find Test", new List(), new TestValueObject(50, "Find Test Value")); + await _repository.SaveAsync(aggregate, CancellationToken.None); + + // Act + var result = await _repository.FindAsync(aggregate.Id, CancellationToken.None); + + // Assert + result.Should().NotBeNull(); + result!.Id.Should().Be(aggregate.Id); + } + + [Fact] + public async Task FindAsync_ShouldReturnNullIfNotExists() + { + // Act + var result = await _repository.FindAsync(Guid.NewGuid(), CancellationToken.None); + + // Assert + result.Should().BeNull(); + } + + [Fact] + public async Task FindWithAsync_ShouldReturnFilteredResults() + { + // Arrange + var aggregate1 = TestAggregateRoot.Create("Filter Match", new List(), new TestValueObject(300, "Match")); + var aggregate2 = TestAggregateRoot.Create("No Match", new List(), new TestValueObject(400, "Different")); + await _repository.SaveAsync(aggregate1, CancellationToken.None); + await _repository.SaveAsync(aggregate2, CancellationToken.None); + + // Act + Expression> filter = a => a.Value.Number == 300; + var results = (await _repository.FindWithAsync(filter, CancellationToken.None)).ToList(); + + // Assert + results.Should().HaveCount(1); + results[0].Name.Should().Be("Filter Match"); + } + + [Fact] + public async Task FindAllAsync_ShouldReturnAllEntities() + { + // Arrange + var aggregate1 = TestAggregateRoot.Create("Entity 1", new List(), new TestValueObject(10, "VO 1")); + var aggregate2 = TestAggregateRoot.Create("Entity 2", new List(), new TestValueObject(20, "VO 2")); + await _repository.SaveAsync(aggregate1, CancellationToken.None); + await _repository.SaveAsync(aggregate2, CancellationToken.None); + + // Act + var results = (await _repository.FindAllAsync(CancellationToken.None)).ToList(); + + // Assert + results.Should().HaveCount(2); + } + + [Fact] + public async Task DeleteAsync_ShouldRemoveEntity() + { + // Arrange + var aggregate = TestAggregateRoot.Create("To be deleted", new List(), new TestValueObject(99, "Delete")); + await _repository.SaveAsync(aggregate, CancellationToken.None); + + // Act + await _repository.DeleteAsync(aggregate, CancellationToken.None); + var result = await _repository.FindAsync(aggregate.Id, CancellationToken.None); + + // Assert + result.Should().BeNull(); + } + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Repository/OpenDdd/InMemory/InMemoryTestsCollection.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Repository/OpenDdd/InMemory/InMemoryTestsCollection.cs new file mode 100644 index 0000000..0edce68 --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Repository/OpenDdd/InMemory/InMemoryTestsCollection.cs @@ -0,0 +1,5 @@ +namespace OpenDDD.Tests.Integration.Infrastructure.Repository.OpenDdd.InMemory +{ + [CollectionDefinition("InMemoryTests", DisableParallelization = true)] + public class InMemoryTestsCollection { } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Repository/OpenDdd/Postgres/PostgresOpenDddRepositoryTests.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Repository/OpenDdd/Postgres/PostgresOpenDddRepositoryTests.cs new file mode 100644 index 0000000..c57d53e --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Repository/OpenDdd/Postgres/PostgresOpenDddRepositoryTests.cs @@ -0,0 +1,172 @@ +using System.Linq.Expressions; +using FluentAssertions; +using Npgsql; +using Xunit.Abstractions; +using OpenDDD.API.Extensions; +using OpenDDD.Infrastructure.Persistence.OpenDdd.DatabaseSession.Postgres; +using OpenDDD.Infrastructure.Persistence.OpenDdd.Serializers; +using OpenDDD.Infrastructure.Persistence.Serializers; +using OpenDDD.Infrastructure.Repository.OpenDdd.Postgres; +using OpenDDD.Tests.Base; +using OpenDDD.Tests.Base.Domain.Model; + +namespace OpenDDD.Tests.Integration.Infrastructure.Repository.OpenDdd.Postgres +{ + [Collection("PostgresTests")] + public class PostgresOpenDddRepositoryTests : IntegrationTests, IAsyncLifetime + { + private readonly string _connectionString; + private readonly PostgresDatabaseSession _session; + private readonly IAggregateSerializer _serializer; + private readonly PostgresOpenDddRepository _repository; + private readonly NpgsqlConnection _connection; + private readonly NpgsqlTransaction _transaction; + + public PostgresOpenDddRepositoryTests(ITestOutputHelper testOutputHelper) + : base(testOutputHelper, enableLogging: true) + { + _connectionString = Environment.GetEnvironmentVariable("POSTGRES_TEST_CONNECTION_STRING") + ?? "Host=localhost;Port=5432;Database=testdb;Username=testuser;Password=testpassword"; + + _connection = new NpgsqlConnection(_connectionString); + _connection.Open(); + _transaction = _connection.BeginTransaction(); + + _session = new PostgresDatabaseSession(_connection); + _serializer = new OpenDddAggregateSerializer(); + _repository = new PostgresOpenDddRepository(_session, _serializer); + } + + public async Task InitializeAsync() + { + var tableName = typeof(TestAggregateRoot).Name.ToLower().Pluralize(); + + var createTableQuery = $@" + CREATE TABLE {tableName} ( + id UUID PRIMARY KEY, + data JSONB NOT NULL + );"; + + await using var createCmd = new NpgsqlCommand(createTableQuery, _connection, _transaction); + await createCmd.ExecuteNonQueryAsync(); + } + + public async Task DisposeAsync() + { + await _transaction.RollbackAsync(); + await _connection.CloseAsync(); + } + + [Fact] + public async Task SaveAsync_ShouldInsertOrUpdateEntity() + { + // Arrange + var aggregate = TestAggregateRoot.Create( + "Initial Root", + new List { TestEntity.Create("Entity 1"), TestEntity.Create("Entity 2") }, + new TestValueObject(100, "Value Object Data") + ); + + // Act + await _repository.SaveAsync(aggregate, CancellationToken.None); + var retrieved = await _repository.GetAsync(aggregate.Id, CancellationToken.None); + + // Assert + retrieved.Should().NotBeNull(); + retrieved.Id.Should().Be(aggregate.Id); + retrieved.Name.Should().Be("Initial Root"); + retrieved.Entities.Should().HaveCount(2); + retrieved.Value.Number.Should().Be(100); + + // Act (update) + aggregate = TestAggregateRoot.Create( + "Updated Root", + new List { TestEntity.Create("Updated Entity") }, + new TestValueObject(200, "Updated Value") + ); + + await _repository.SaveAsync(aggregate, CancellationToken.None); + var updated = await _repository.GetAsync(aggregate.Id, CancellationToken.None); + + // Assert (update) + updated.Name.Should().Be("Updated Root"); + updated.Entities.Should().HaveCount(1); + updated.Entities.First().Description.Should().Be("Updated Entity"); + updated.Value.Number.Should().Be(200); + } + + [Fact] + public async Task FindAsync_ShouldReturnEntityIfExists() + { + // Arrange + var aggregate = TestAggregateRoot.Create("Find Test", new List(), new TestValueObject(50, "Find Test Value")); + await _repository.SaveAsync(aggregate, CancellationToken.None); + + // Act + var result = await _repository.FindAsync(aggregate.Id, CancellationToken.None); + + // Assert + result.Should().NotBeNull(); + result!.Id.Should().Be(aggregate.Id); + } + + [Fact] + public async Task FindAsync_ShouldReturnNullIfNotExists() + { + // Act + var result = await _repository.FindAsync(Guid.NewGuid(), CancellationToken.None); + + // Assert + result.Should().BeNull(); + } + + [Fact] + public async Task FindWithAsync_ShouldReturnFilteredResults() + { + // Arrange + var aggregate1 = TestAggregateRoot.Create("Filter Match", new List(), new TestValueObject(300, "Match")); + var aggregate2 = TestAggregateRoot.Create("No Match", new List(), new TestValueObject(400, "Different")); + await _repository.SaveAsync(aggregate1, CancellationToken.None); + await _repository.SaveAsync(aggregate2, CancellationToken.None); + + // Act + Expression> filter = a => a.Value.Number == 300; + var results = (await _repository.FindWithAsync(filter, CancellationToken.None)).ToList(); + + // Assert + results.Should().HaveCount(1); + results[0].Name.Should().Be("Filter Match"); + } + + [Fact] + public async Task FindAllAsync_ShouldReturnAllEntities() + { + // Arrange + var aggregate1 = TestAggregateRoot.Create("Entity 1", new List(), new TestValueObject(10, "VO 1")); + var aggregate2 = TestAggregateRoot.Create("Entity 2", new List(), new TestValueObject(20, "VO 2")); + await _repository.SaveAsync(aggregate1, CancellationToken.None); + await _repository.SaveAsync(aggregate2, CancellationToken.None); + + // Act + var results = (await _repository.FindAllAsync(CancellationToken.None)).ToList(); + + // Assert + results.Should().HaveCount(2); + } + + [Fact] + public async Task DeleteAsync_ShouldRemoveEntity() + { + // Arrange + var aggregate = TestAggregateRoot.Create("To be deleted", new List(), new TestValueObject(99, "Delete")); + await _repository.SaveAsync(aggregate, CancellationToken.None); + + // Act + await _repository.DeleteAsync(aggregate, CancellationToken.None); + var result = await _repository.FindAsync(aggregate.Id, CancellationToken.None); + + // Assert + result.Should().BeNull(); + } + } +} diff --git a/src/OpenDDD.Tests/Integration/Infrastructure/Repository/OpenDdd/Postgres/PostgresTestsCollection.cs b/src/OpenDDD.Tests/Integration/Infrastructure/Repository/OpenDdd/Postgres/PostgresTestsCollection.cs new file mode 100644 index 0000000..98ae4fb --- /dev/null +++ b/src/OpenDDD.Tests/Integration/Infrastructure/Repository/OpenDdd/Postgres/PostgresTestsCollection.cs @@ -0,0 +1,5 @@ +namespace OpenDDD.Tests.Integration.Infrastructure.Repository.OpenDdd.Postgres +{ + [CollectionDefinition("PostgresTests", DisableParallelization = true)] + public class PostgresTestsCollection { } +} diff --git a/src/OpenDDD.Tests/OpenDDD.Tests.csproj b/src/OpenDDD.Tests/OpenDDD.Tests.csproj new file mode 100644 index 0000000..73390bb --- /dev/null +++ b/src/OpenDDD.Tests/OpenDDD.Tests.csproj @@ -0,0 +1,34 @@ + + + + net8.0 + enable + enable + + false + true + + + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + + diff --git a/src/OpenDDD.Tests/Unit/Infrastructure/Events/Azure/AzureServiceBusMessagingProviderTests.cs b/src/OpenDDD.Tests/Unit/Infrastructure/Events/Azure/AzureServiceBusMessagingProviderTests.cs new file mode 100644 index 0000000..ef5874d --- /dev/null +++ b/src/OpenDDD.Tests/Unit/Infrastructure/Events/Azure/AzureServiceBusMessagingProviderTests.cs @@ -0,0 +1,116 @@ +using Azure; +using Azure.Messaging.ServiceBus; +using Azure.Messaging.ServiceBus.Administration; +using Microsoft.Extensions.Logging; +using Moq; +using OpenDDD.Infrastructure.Events.Azure; +using OpenDDD.Tests.Base; + +namespace OpenDDD.Tests.Unit.Infrastructure.Events.Azure +{ + public class AzureServiceBusMessagingProviderTests : UnitTests + { + private readonly Mock _mockClient; + private readonly Mock _mockAdminClient; + private readonly Mock _mockSender; + private readonly Mock _mockProcessor; + private readonly Mock> _mockLogger; + private readonly AzureServiceBusMessagingProvider _provider; + private readonly string _testTopic = "test-topic"; + private readonly string _testSubscription = "test-subscription"; + + public AzureServiceBusMessagingProviderTests() + { + _mockClient = new Mock(); + _mockAdminClient = new Mock(); + _mockSender = new Mock(); + _mockProcessor = new Mock(); + _mockLogger = new Mock>(); + + _mockClient + .Setup(client => client.CreateSender(It.IsAny())) + .Returns(_mockSender.Object); + + _mockClient + .Setup(client => client.CreateProcessor(It.IsAny(), It.IsAny())) + .Returns(_mockProcessor.Object); + + _mockAdminClient + .Setup(admin => admin.TopicExistsAsync(It.IsAny(), It.IsAny())) + .ReturnsAsync(Response.FromValue(true, Mock.Of())); + + _mockAdminClient + .Setup(admin => admin.SubscriptionExistsAsync(It.IsAny(), It.IsAny(), It.IsAny())) + .ReturnsAsync(Response.FromValue(true, Mock.Of())); + + _provider = new AzureServiceBusMessagingProvider( + client: _mockClient.Object, + adminClient: _mockAdminClient.Object, + autoCreateTopics: true, + logger: _mockLogger.Object + ); + } + + [Theory] + [InlineData(null, "adminClient", "logger")] + [InlineData("client", null, "logger")] + [InlineData("client", "adminClient", null)] + public void Constructor_ShouldThrowException_WhenDependenciesAreNull( + string? client, string? adminClient, string? logger) + { + var mockClient = client is null ? null! : _mockClient.Object; + var mockAdminClient = adminClient is null ? null! : _mockAdminClient.Object; + var mockLogger = logger is null ? null! : _mockLogger.Object; + + Assert.Throws(() => + new AzureServiceBusMessagingProvider(mockClient, mockAdminClient, true, mockLogger)); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData(" ")] + public async Task SubscribeAsync_ShouldThrowException_WhenTopicIsInvalid(string invalidTopic) + { + await Assert.ThrowsAsync(() => + _provider.SubscribeAsync(invalidTopic, _testSubscription, (msg, token) => Task.CompletedTask, CancellationToken.None)); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData(" ")] + public async Task SubscribeAsync_ShouldThrowException_WhenConsumerGroupIsInvalid(string invalidConsumerGroup) + { + await Assert.ThrowsAsync(() => + _provider.SubscribeAsync(_testTopic, invalidConsumerGroup, (msg, token) => Task.CompletedTask, CancellationToken.None)); + } + + [Fact] + public async Task SubscribeAsync_ShouldThrowException_WhenHandlerIsNull() + { + await Assert.ThrowsAsync(() => + _provider.SubscribeAsync(_testTopic, _testSubscription, null!, CancellationToken.None)); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData(" ")] + public async Task PublishAsync_ShouldThrowException_WhenTopicIsInvalid(string invalidTopic) + { + await Assert.ThrowsAsync(() => + _provider.PublishAsync(invalidTopic, "message", CancellationToken.None)); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData(" ")] + public async Task PublishAsync_ShouldThrowException_WhenMessageIsInvalid(string invalidMessage) + { + await Assert.ThrowsAsync(() => + _provider.PublishAsync(_testTopic, invalidMessage, CancellationToken.None)); + } + } +} diff --git a/src/OpenDDD.Tests/Unit/Infrastructure/Events/DomainPublisherTests.cs b/src/OpenDDD.Tests/Unit/Infrastructure/Events/DomainPublisherTests.cs new file mode 100644 index 0000000..f89122b --- /dev/null +++ b/src/OpenDDD.Tests/Unit/Infrastructure/Events/DomainPublisherTests.cs @@ -0,0 +1,72 @@ +using FluentAssertions; +using OpenDDD.Domain.Model; +using OpenDDD.Infrastructure.Events; +using OpenDDD.Tests.Base; + +namespace OpenDDD.Tests.Unit.Infrastructure.Events +{ + public class DomainPublisherTests : UnitTests + { + private class TestEvent : IDomainEvent { } + + [Fact] + public async Task PublishAsync_ShouldStoreEvent_WhenValidEventIsPublished() + { + // Arrange + var publisher = new DomainPublisher(); + var domainEvent = new TestEvent(); + + // Act + await publisher.PublishAsync(domainEvent, CancellationToken.None); + + // Assert + publisher.GetPublishedEvents().Should().ContainSingle() + .Which.Should().Be(domainEvent); + } + + [Fact] + public async Task PublishAsync_ShouldThrowArgumentNullException_WhenEventIsNull() + { + // Arrange + var publisher = new DomainPublisher(); + + // Act + Func act = async () => await publisher.PublishAsync(null!, CancellationToken.None); + + // Assert + await act.Should().ThrowAsync() + .WithParameterName("domainEvent"); + } + + [Fact] + public async Task GetPublishedEvents_ShouldReturnEmptyList_WhenNoEventsArePublished() + { + // Arrange + var publisher = new DomainPublisher(); + + // Act + var events = publisher.GetPublishedEvents(); + + // Assert + events.Should().BeEmpty(); + } + + [Fact] + public async Task GetPublishedEvents_ShouldReturnAllPublishedEvents() + { + // Arrange + var publisher = new DomainPublisher(); + var event1 = new TestEvent(); + var event2 = new TestEvent(); + + // Act + await publisher.PublishAsync(event1, CancellationToken.None); + await publisher.PublishAsync(event2, CancellationToken.None); + var publishedEvents = publisher.GetPublishedEvents(); + + // Assert + publishedEvents.Should().HaveCount(2); + publishedEvents.Should().ContainInOrder(event1, event2); + } + } +} diff --git a/src/OpenDDD/Tests/Infrastructure/Events/EventSerializerTests.cs b/src/OpenDDD.Tests/Unit/Infrastructure/Events/EventSerializerTests.cs similarity index 92% rename from src/OpenDDD/Tests/Infrastructure/Events/EventSerializerTests.cs rename to src/OpenDDD.Tests/Unit/Infrastructure/Events/EventSerializerTests.cs index fc5e030..5cf2c8b 100644 --- a/src/OpenDDD/Tests/Infrastructure/Events/EventSerializerTests.cs +++ b/src/OpenDDD.Tests/Unit/Infrastructure/Events/EventSerializerTests.cs @@ -1,9 +1,9 @@ -using Xunit; -using OpenDDD.Infrastructure.Events; +using OpenDDD.Infrastructure.Events; +using OpenDDD.Tests.Base; -namespace OpenDDD.Tests.Infrastructure.Events +namespace OpenDDD.Tests.Unit.Infrastructure.Events { - public class EventSerializerTests + public class EventSerializerTests : UnitTests { [Fact] public void Serialize_ShouldReturnJsonString_WhenEventIsValid() diff --git a/src/OpenDDD.Tests/Unit/Infrastructure/Events/InMemory/InMemoryMessagingProviderTests.cs b/src/OpenDDD.Tests/Unit/Infrastructure/Events/InMemory/InMemoryMessagingProviderTests.cs new file mode 100644 index 0000000..a666d2b --- /dev/null +++ b/src/OpenDDD.Tests/Unit/Infrastructure/Events/InMemory/InMemoryMessagingProviderTests.cs @@ -0,0 +1,80 @@ +using Microsoft.Extensions.Logging; +using Moq; +using OpenDDD.Infrastructure.Events.InMemory; +using OpenDDD.Tests.Base; + +namespace OpenDDD.Tests.Unit.Infrastructure.Events.InMemory +{ + public class InMemoryMessagingProviderTests : UnitTests + { + private readonly Mock> _mockLogger; + private readonly InMemoryMessagingProvider _messagingProvider; + private const string Topic = "TestTopic"; + private const string ConsumerGroup = "TestGroup"; + private const string Message = "Hello, InMemory!"; + + public InMemoryMessagingProviderTests() + { + _mockLogger = new Mock>(); + _messagingProvider = new InMemoryMessagingProvider(_mockLogger.Object); + } + + // Constructor validation tests + [Fact] + public void Constructor_ShouldThrowException_WhenLoggerIsNull() + { + Assert.Throws(() => new InMemoryMessagingProvider(null!)); + } + + // SubscribeAsync validation tests + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData(" ")] + public async Task SubscribeAsync_ShouldThrowException_WhenTopicIsInvalid(string invalidTopic) + { + await Assert.ThrowsAsync(() => + _messagingProvider.SubscribeAsync(invalidTopic, ConsumerGroup, (msg, token) => Task.CompletedTask, + CancellationToken.None)); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData(" ")] + public async Task SubscribeAsync_ShouldThrowException_WhenConsumerGroupIsInvalid(string invalidConsumerGroup) + { + await Assert.ThrowsAsync(() => + _messagingProvider.SubscribeAsync(Topic, invalidConsumerGroup, (msg, token) => Task.CompletedTask, + CancellationToken.None)); + } + + [Fact] + public async Task SubscribeAsync_ShouldThrowException_WhenHandlerIsNull() + { + await Assert.ThrowsAsync(() => + _messagingProvider.SubscribeAsync(Topic, ConsumerGroup, null!, CancellationToken.None)); + } + + // PublishAsync validation tests + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData(" ")] + public async Task PublishAsync_ShouldThrowException_WhenTopicIsInvalid(string invalidTopic) + { + await Assert.ThrowsAsync(() => + _messagingProvider.PublishAsync(invalidTopic, Message, CancellationToken.None)); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData(" ")] + public async Task PublishAsync_ShouldThrowException_WhenMessageIsInvalid(string invalidMessage) + { + await Assert.ThrowsAsync(() => + _messagingProvider.PublishAsync(Topic, invalidMessage, CancellationToken.None)); + } + } +} \ No newline at end of file diff --git a/src/OpenDDD.Tests/Unit/Infrastructure/Events/IntegrationPublisherTests.cs b/src/OpenDDD.Tests/Unit/Infrastructure/Events/IntegrationPublisherTests.cs new file mode 100644 index 0000000..9cdc015 --- /dev/null +++ b/src/OpenDDD.Tests/Unit/Infrastructure/Events/IntegrationPublisherTests.cs @@ -0,0 +1,72 @@ +using FluentAssertions; +using OpenDDD.Domain.Model; +using OpenDDD.Infrastructure.Events; +using OpenDDD.Tests.Base; + +namespace OpenDDD.Tests.Unit.Infrastructure.Events +{ + public class IntegrationPublisherTests : UnitTests + { + private class TestIntegrationEvent : IIntegrationEvent { } + + [Fact] + public async Task PublishAsync_ShouldStoreEvent_WhenValidEventIsPublished() + { + // Arrange + var publisher = new IntegrationPublisher(); + var integrationEvent = new TestIntegrationEvent(); + + // Act + await publisher.PublishAsync(integrationEvent, CancellationToken.None); + + // Assert + publisher.GetPublishedEvents().Should().ContainSingle() + .Which.Should().Be(integrationEvent); + } + + [Fact] + public async Task PublishAsync_ShouldThrowArgumentNullException_WhenEventIsNull() + { + // Arrange + var publisher = new IntegrationPublisher(); + + // Act + Func act = async () => await publisher.PublishAsync(null!, CancellationToken.None); + + // Assert + await act.Should().ThrowAsync() + .WithParameterName("integrationEvent"); + } + + [Fact] + public async Task GetPublishedEvents_ShouldReturnEmptyList_WhenNoEventsArePublished() + { + // Arrange + var publisher = new IntegrationPublisher(); + + // Act + var events = publisher.GetPublishedEvents(); + + // Assert + events.Should().BeEmpty(); + } + + [Fact] + public async Task GetPublishedEvents_ShouldReturnAllPublishedEvents() + { + // Arrange + var publisher = new IntegrationPublisher(); + var event1 = new TestIntegrationEvent(); + var event2 = new TestIntegrationEvent(); + + // Act + await publisher.PublishAsync(event1, CancellationToken.None); + await publisher.PublishAsync(event2, CancellationToken.None); + var publishedEvents = publisher.GetPublishedEvents(); + + // Assert + publishedEvents.Should().HaveCount(2); + publishedEvents.Should().ContainInOrder(event1, event2); + } + } +} diff --git a/src/OpenDDD.Tests/Unit/Infrastructure/Events/Kafka/KafkaMessagingProviderTests.cs b/src/OpenDDD.Tests/Unit/Infrastructure/Events/Kafka/KafkaMessagingProviderTests.cs new file mode 100644 index 0000000..181c2f3 --- /dev/null +++ b/src/OpenDDD.Tests/Unit/Infrastructure/Events/Kafka/KafkaMessagingProviderTests.cs @@ -0,0 +1,123 @@ +using Confluent.Kafka; +using Microsoft.Extensions.Logging; +using Moq; +using OpenDDD.Infrastructure.Events.Kafka; +using OpenDDD.Infrastructure.Events.Kafka.Factories; +using OpenDDD.Tests.Base; + +namespace OpenDDD.Tests.Unit.Infrastructure.Events.Kafka +{ + public class KafkaMessagingProviderTests : UnitTests + { + private readonly Mock> _mockProducer; + private readonly Mock _mockAdminClient; + private readonly Mock _mockConsumerFactory; + private readonly Mock> _mockConsumer; + private readonly Mock> _mockLogger; + private readonly Mock> _mockConsumerLogger; + private readonly KafkaMessagingProvider _provider; + private const string BootstrapServers = "localhost:9092"; + private const string Topic = "test-topic"; + private const string Message = "Hello, Kafka!"; + private const string ConsumerGroup = "test-group"; + + public KafkaMessagingProviderTests() + { + _mockProducer = new Mock>(); + _mockAdminClient = new Mock(); + _mockConsumerFactory = new Mock(); + _mockConsumer = new Mock>(); + _mockLogger = new Mock>(); + _mockConsumerLogger = new Mock>(); + + _provider = new KafkaMessagingProvider( + _mockAdminClient.Object, + _mockProducer.Object, + _mockConsumerFactory.Object, + autoCreateTopics: true, + _mockLogger.Object); + + _mockConsumerFactory + .Setup(f => f.Create(It.IsAny())) + .Returns((string consumerGroup) => + { + var kafkaConsumer = new KafkaConsumer(_mockConsumer.Object, consumerGroup, _mockConsumerLogger.Object); + return kafkaConsumer; + }); + + var metadata = new Metadata( + new List { new(1, "localhost", 9092) }, + new List { new(Topic, new List(), ErrorCode.NoError) }, // Ensure topic exists + -1, + "" + ); + _mockAdminClient + .Setup(a => a.GetMetadata(It.IsAny())) + .Returns(metadata); + } + + [Theory] + [InlineData(null, "producer", "consumerFactory", "logger")] + [InlineData("adminClient", null, "consumerFactory", "logger")] + [InlineData("adminClient", "producer", null, "logger")] + [InlineData("adminClient", "producer", "consumerFactory", null)] + public void Constructor_ShouldThrowException_WhenDependenciesAreNull( + string? adminClient, string? producer, string? consumerFactory, string? logger) + { + var mockAdmin = adminClient is null ? null! : _mockAdminClient.Object; + var mockProducer = producer is null ? null! : _mockProducer.Object; + var mockConsumerFactory = consumerFactory is null ? null! : _mockConsumerFactory.Object; + var mockLogger = logger is null ? null! : _mockLogger.Object; + + Assert.Throws(() => + new KafkaMessagingProvider(mockAdmin, mockProducer, mockConsumerFactory, true, mockLogger)); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData(" ")] + public async Task SubscribeAsync_ShouldThrowException_WhenTopicIsInvalid(string invalidTopic) + { + await Assert.ThrowsAsync(() => + _provider.SubscribeAsync(invalidTopic, ConsumerGroup, (msg, token) => Task.CompletedTask, CancellationToken.None)); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData(" ")] + public async Task SubscribeAsync_ShouldThrowException_WhenConsumerGroupIsInvalid(string invalidConsumerGroup) + { + await Assert.ThrowsAsync(() => + _provider.SubscribeAsync(Topic, invalidConsumerGroup, (msg, token) => Task.CompletedTask, CancellationToken.None)); + } + + [Fact] + public async Task SubscribeAsync_ShouldThrowException_WhenHandlerIsNull() + { + await Assert.ThrowsAsync(() => + _provider.SubscribeAsync(Topic, ConsumerGroup, null!, CancellationToken.None)); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData(" ")] + public async Task PublishAsync_ShouldThrowException_WhenTopicIsInvalid(string invalidTopic) + { + await Assert.ThrowsAsync(() => + _provider.PublishAsync(invalidTopic, Message, CancellationToken.None)); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData(" ")] + public async Task PublishAsync_ShouldThrowException_WhenMessageIsInvalid(string invalidMessage) + { + await Assert.ThrowsAsync(() => + _provider.PublishAsync(Topic, invalidMessage, CancellationToken.None)); + } + } +} diff --git a/src/OpenDDD.Tests/Unit/Infrastructure/Events/RabbitMq/RabbitMqMessagingProviderTests.cs b/src/OpenDDD.Tests/Unit/Infrastructure/Events/RabbitMq/RabbitMqMessagingProviderTests.cs new file mode 100644 index 0000000..3549224 --- /dev/null +++ b/src/OpenDDD.Tests/Unit/Infrastructure/Events/RabbitMq/RabbitMqMessagingProviderTests.cs @@ -0,0 +1,97 @@ +using Microsoft.Extensions.Logging; +using Moq; +using OpenDDD.Infrastructure.Events.RabbitMq; +using OpenDDD.Infrastructure.Events.RabbitMq.Factories; +using OpenDDD.Tests.Base; +using RabbitMQ.Client; + +namespace OpenDDD.Tests.Unit.Infrastructure.Events.RabbitMq +{ + public class RabbitMqMessagingProviderTests : UnitTests + { + private readonly Mock _mockConnectionFactory; + private readonly Mock _mockConsumerFactory; + private readonly Mock> _mockLogger; + private readonly RabbitMqMessagingProvider _provider; + + private const string TestTopic = "test-topic"; + private const string TestConsumerGroup = "test-group"; + + public RabbitMqMessagingProviderTests() + { + _mockConnectionFactory = new Mock(); + _mockConsumerFactory = new Mock(); + _mockLogger = new Mock>(); + + _provider = new RabbitMqMessagingProvider( + _mockConnectionFactory.Object, + _mockConsumerFactory.Object, + autoCreateTopics: true, + _mockLogger.Object + ); + } + + [Theory] + [InlineData(null, "consumerFactory", "logger")] + [InlineData("connectionFactory", null, "logger")] + [InlineData("connectionFactory", "consumerFactory", null)] + public void Constructor_ShouldThrowException_WhenDependenciesAreNull( + string? connectionFactory, string? consumerFactory, string? logger) + { + var mockConnectionFactory = connectionFactory is null ? null! : _mockConnectionFactory.Object; + var mockConsumerFactory = consumerFactory is null ? null! : _mockConsumerFactory.Object; + var mockLogger = logger is null ? null! : _mockLogger.Object; + + Assert.Throws(() => + new RabbitMqMessagingProvider(mockConnectionFactory, mockConsumerFactory, autoCreateTopics: true, mockLogger)); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData(" ")] + public async Task SubscribeAsync_ShouldThrowException_WhenTopicIsInvalid(string invalidTopic) + { + await Assert.ThrowsAsync(() => + _provider.SubscribeAsync(invalidTopic, TestConsumerGroup, (msg, token) => Task.CompletedTask, CancellationToken.None)); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData(" ")] + public async Task SubscribeAsync_ShouldThrowException_WhenConsumerGroupIsInvalid(string invalidConsumerGroup) + { + await Assert.ThrowsAsync(() => + _provider.SubscribeAsync(TestTopic, invalidConsumerGroup, (msg, token) => Task.CompletedTask, CancellationToken.None)); + } + + [Fact] + public async Task SubscribeAsync_ShouldThrowException_WhenHandlerIsNull() + { + // Act & Assert + await Assert.ThrowsAsync(() => + _provider.SubscribeAsync(TestTopic, TestConsumerGroup, null!, CancellationToken.None)); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData(" ")] + public async Task PublishAsync_ShouldThrowException_WhenTopicIsInvalid(string invalidTopic) + { + await Assert.ThrowsAsync(() => + _provider.PublishAsync(invalidTopic, "message", CancellationToken.None)); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData(" ")] + public async Task PublishAsync_ShouldThrowException_WhenMessageIsInvalid(string invalidMessage) + { + await Assert.ThrowsAsync(() => + _provider.PublishAsync(TestTopic, invalidMessage, CancellationToken.None)); + } + } +} diff --git a/src/OpenDDD/Tests/Infrastructure/Events/TestEvent.cs b/src/OpenDDD.Tests/Unit/Infrastructure/Events/TestEvent.cs similarity index 90% rename from src/OpenDDD/Tests/Infrastructure/Events/TestEvent.cs rename to src/OpenDDD.Tests/Unit/Infrastructure/Events/TestEvent.cs index ecf00c0..048da8b 100644 --- a/src/OpenDDD/Tests/Infrastructure/Events/TestEvent.cs +++ b/src/OpenDDD.Tests/Unit/Infrastructure/Events/TestEvent.cs @@ -1,6 +1,6 @@ using OpenDDD.Domain.Model; -namespace OpenDDD.Tests.Infrastructure.Events +namespace OpenDDD.Tests.Unit.Infrastructure.Events { public class TestEvent : IDomainEvent { diff --git a/src/OpenDDD/Tests/Infrastructure/Persistence/OpenDdd/Expressions/JsonbExpressionParserTests.cs b/src/OpenDDD.Tests/Unit/Infrastructure/Persistence/OpenDdd/Expressions/JsonbExpressionParserTests.cs similarity index 96% rename from src/OpenDDD/Tests/Infrastructure/Persistence/OpenDdd/Expressions/JsonbExpressionParserTests.cs rename to src/OpenDDD.Tests/Unit/Infrastructure/Persistence/OpenDdd/Expressions/JsonbExpressionParserTests.cs index 1bc3ba4..8278f51 100644 --- a/src/OpenDDD/Tests/Infrastructure/Persistence/OpenDdd/Expressions/JsonbExpressionParserTests.cs +++ b/src/OpenDDD.Tests/Unit/Infrastructure/Persistence/OpenDdd/Expressions/JsonbExpressionParserTests.cs @@ -1,11 +1,11 @@ using System.Linq.Expressions; -using Xunit; using FluentAssertions; using OpenDDD.Infrastructure.Persistence.OpenDdd.Expressions; +using OpenDDD.Tests.Base; -namespace OpenDDD.Tests.Infrastructure.Persistence.OpenDdd.Expressions +namespace OpenDDD.Tests.Unit.Infrastructure.Persistence.OpenDdd.Expressions { - public class JsonbExpressionParserTests + public class JsonbExpressionParserTests : UnitTests { private class Customer { diff --git a/src/OpenDDD/Tests/Infrastructure/Persistence/OpenDdd/Serializers/OpenDddAggregateSerializerTests.cs b/src/OpenDDD.Tests/Unit/Infrastructure/Persistence/OpenDdd/Serializers/OpenDddAggregateSerializerTests.cs similarity index 90% rename from src/OpenDDD/Tests/Infrastructure/Persistence/OpenDdd/Serializers/OpenDddAggregateSerializerTests.cs rename to src/OpenDDD.Tests/Unit/Infrastructure/Persistence/OpenDdd/Serializers/OpenDddAggregateSerializerTests.cs index 436200a..eba8b40 100644 --- a/src/OpenDDD/Tests/Infrastructure/Persistence/OpenDdd/Serializers/OpenDddAggregateSerializerTests.cs +++ b/src/OpenDDD.Tests/Unit/Infrastructure/Persistence/OpenDdd/Serializers/OpenDddAggregateSerializerTests.cs @@ -1,10 +1,10 @@ -using Xunit; -using OpenDDD.Infrastructure.Persistence.OpenDdd.Serializers; -using OpenDDD.Tests.Domain.Model; +using OpenDDD.Infrastructure.Persistence.OpenDdd.Serializers; +using OpenDDD.Tests.Base; +using OpenDDD.Tests.Base.Domain.Model; -namespace OpenDDD.Tests.Infrastructure.Persistence.OpenDdd.Serializers +namespace OpenDDD.Tests.Unit.Infrastructure.Persistence.OpenDdd.Serializers { - public class OpenDddAggregateSerializerTests + public class OpenDddAggregateSerializerTests : UnitTests { private readonly OpenDddAggregateSerializer _serializer; diff --git a/src/OpenDDD/Tests/Infrastructure/Persistence/OpenDdd/Serializers/OpenDddSerializerTests.cs b/src/OpenDDD.Tests/Unit/Infrastructure/Persistence/OpenDdd/Serializers/OpenDddSerializerTests.cs similarity index 92% rename from src/OpenDDD/Tests/Infrastructure/Persistence/OpenDdd/Serializers/OpenDddSerializerTests.cs rename to src/OpenDDD.Tests/Unit/Infrastructure/Persistence/OpenDdd/Serializers/OpenDddSerializerTests.cs index abb6c86..7212305 100644 --- a/src/OpenDDD/Tests/Infrastructure/Persistence/OpenDdd/Serializers/OpenDddSerializerTests.cs +++ b/src/OpenDDD.Tests/Unit/Infrastructure/Persistence/OpenDdd/Serializers/OpenDddSerializerTests.cs @@ -1,9 +1,9 @@ using OpenDDD.Infrastructure.Persistence.OpenDdd.Serializers; -using Xunit; +using OpenDDD.Tests.Base; -namespace OpenDDD.Tests.Infrastructure.Persistence.OpenDdd.Serializers +namespace OpenDDD.Tests.Unit.Infrastructure.Persistence.OpenDdd.Serializers { - public class OpenDddSerializerTests + public class OpenDddSerializerTests : UnitTests { private readonly OpenDddSerializer _serializer; diff --git a/src/OpenDDD.sln b/src/OpenDDD.sln index 47ad203..e6015c7 100644 --- a/src/OpenDDD.sln +++ b/src/OpenDDD.sln @@ -1,7 +1,10 @@  Microsoft Visual Studio Solution File, Format Version 12.00 +# Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "OpenDDD", "OpenDDD\OpenDDD.csproj", "{B5482293-BC97-4009-B3AD-2B0789526E52}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "OpenDDD.Tests", "OpenDDD.Tests\OpenDDD.Tests.csproj", "{75DD96D0-3849-4F8F-BC7D-CBE2560D9471}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -12,5 +15,9 @@ Global {B5482293-BC97-4009-B3AD-2B0789526E52}.Debug|Any CPU.Build.0 = Debug|Any CPU {B5482293-BC97-4009-B3AD-2B0789526E52}.Release|Any CPU.ActiveCfg = Release|Any CPU {B5482293-BC97-4009-B3AD-2B0789526E52}.Release|Any CPU.Build.0 = Release|Any CPU + {75DD96D0-3849-4F8F-BC7D-CBE2560D9471}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {75DD96D0-3849-4F8F-BC7D-CBE2560D9471}.Debug|Any CPU.Build.0 = Debug|Any CPU + {75DD96D0-3849-4F8F-BC7D-CBE2560D9471}.Release|Any CPU.ActiveCfg = Release|Any CPU + {75DD96D0-3849-4F8F-BC7D-CBE2560D9471}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection EndGlobal diff --git a/src/OpenDDD/API/Extensions/OpenDddServiceCollectionExtensions.cs b/src/OpenDDD/API/Extensions/OpenDddServiceCollectionExtensions.cs index 61fcdf1..eac6823 100644 --- a/src/OpenDDD/API/Extensions/OpenDddServiceCollectionExtensions.cs +++ b/src/OpenDDD/API/Extensions/OpenDddServiceCollectionExtensions.cs @@ -1,10 +1,15 @@ using System.Reflection; +using Azure.Messaging.ServiceBus; +using Azure.Messaging.ServiceBus.Administration; using Microsoft.AspNetCore.Hosting; using Microsoft.EntityFrameworkCore; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using Npgsql; +using Confluent.Kafka; +using RabbitMQ.Client; using OpenDDD.API.Attributes; using OpenDDD.API.HostedServices; using OpenDDD.API.Options; @@ -18,7 +23,9 @@ using OpenDDD.Infrastructure.Events.Base; using OpenDDD.Infrastructure.Events.InMemory; using OpenDDD.Infrastructure.Events.Kafka; +using OpenDDD.Infrastructure.Events.Kafka.Factories; using OpenDDD.Infrastructure.Events.RabbitMq; +using OpenDDD.Infrastructure.Events.RabbitMq.Factories; using OpenDDD.Infrastructure.Persistence.DatabaseSession; using OpenDDD.Infrastructure.Persistence.EfCore.Base; using OpenDDD.Infrastructure.Persistence.EfCore.DatabaseSession; @@ -256,17 +263,94 @@ private static void AddInMemoryOpenDddPersistence(this IServiceCollection servic private static void AddAzureServiceBus(this IServiceCollection services) { - services.AddSingleton(); + services.AddSingleton(provider => + { + var options = provider.GetRequiredService>().Value; + var azureOptions = options.AzureServiceBus ?? throw new InvalidOperationException("Azure Service Bus options are missing."); + + if (string.IsNullOrWhiteSpace(azureOptions.ConnectionString)) + { + throw new InvalidOperationException("Azure Service Bus connection string is missing."); + } + + return new ServiceBusClient(azureOptions.ConnectionString); + }); + + services.AddSingleton(provider => + { + var options = provider.GetRequiredService>().Value; + var azureOptions = options.AzureServiceBus ?? throw new InvalidOperationException("Azure Service Bus options are missing."); + + return new ServiceBusAdministrationClient(azureOptions.ConnectionString); + }); + + services.AddSingleton(provider => + { + var options = provider.GetRequiredService>().Value; + var azureOptions = options.AzureServiceBus ?? throw new InvalidOperationException("Azure Service Bus options are missing."); + + return new AzureServiceBusMessagingProvider( + provider.GetRequiredService(), + provider.GetRequiredService(), + azureOptions.AutoCreateTopics, + provider.GetRequiredService>() + ); + }); } private static void AddRabbitMq(this IServiceCollection services) { - services.AddSingleton(); + services.AddSingleton(provider => + { + var options = provider.GetRequiredService>().Value; + var rabbitMqOptions = options.RabbitMq ?? throw new InvalidOperationException("RabbitMQ options are missing."); + + if (string.IsNullOrWhiteSpace(rabbitMqOptions.HostName)) + { + throw new InvalidOperationException("RabbitMQ host is missing."); + } + + var connectionFactory = new ConnectionFactory + { + HostName = rabbitMqOptions.HostName, + Port = rabbitMqOptions.Port, + UserName = rabbitMqOptions.Username, + Password = rabbitMqOptions.Password, + VirtualHost = rabbitMqOptions.VirtualHost + }; + + var logger = provider.GetRequiredService>(); + + var consumerFactory = new RabbitMqConsumerFactory(logger); + + return new RabbitMqMessagingProvider( + connectionFactory, + consumerFactory, + rabbitMqOptions.AutoCreateTopics, + logger); + }); } private static void AddKafka(this IServiceCollection services) { - services.AddSingleton(); + services.AddSingleton(provider => + { + var options = provider.GetRequiredService>().Value; + var kafkaOptions = options.Kafka ?? throw new InvalidOperationException("Kafka options are missing."); + + if (string.IsNullOrWhiteSpace(kafkaOptions.BootstrapServers)) + throw new InvalidOperationException("Kafka bootstrap servers must be configured."); + + var logger = provider.GetRequiredService>(); + var consumerLogger = provider.GetRequiredService>(); + return new KafkaMessagingProvider( + new AdminClientBuilder(new AdminClientConfig { BootstrapServers = kafkaOptions.BootstrapServers, ClientId = "OpenDDD" }).Build(), + new ProducerBuilder(new ProducerConfig { BootstrapServers = kafkaOptions.BootstrapServers, ClientId = "OpenDDD" }).Build(), + new KafkaConsumerFactory(kafkaOptions.BootstrapServers, consumerLogger), + kafkaOptions.AutoCreateTopics, + logger + ); + }); } private static void AddInMemoryMessaging(this IServiceCollection services) diff --git a/src/OpenDDD/API/Options/OpenDddEventsOptions.cs b/src/OpenDDD/API/Options/OpenDddEventsOptions.cs index 2db84cd..9627462 100644 --- a/src/OpenDDD/API/Options/OpenDddEventsOptions.cs +++ b/src/OpenDDD/API/Options/OpenDddEventsOptions.cs @@ -2,8 +2,8 @@ { public class OpenDddEventsOptions { - public string DomainEventTopicTemplate { get; set; } = "YourProjectName.Domain.{EventName}"; - public string IntegrationEventTopicTemplate { get; set; } = "YourProjectName.Interchange.{EventName}"; + public string DomainEventTopic { get; set; } = "YourProjectName.Domain.{EventName}"; + public string IntegrationEventTopic { get; set; } = "YourProjectName.Interchange.{EventName}"; public string ListenerGroup { get; set; } = "Default"; } } diff --git a/src/OpenDDD/API/Options/OpenDddOptions.cs b/src/OpenDDD/API/Options/OpenDddOptions.cs index 4a0cf96..b9524ef 100644 --- a/src/OpenDDD/API/Options/OpenDddOptions.cs +++ b/src/OpenDDD/API/Options/OpenDddOptions.cs @@ -62,7 +62,13 @@ public OpenDddOptions UseInMemoryMessaging() return this; } - public OpenDddOptions UseRabbitMq(string hostName, int port, string username, string password, string virtualHost = "/") + public OpenDddOptions UseRabbitMq( + string hostName, + int port, + string username, + string password, + string virtualHost = "/", + bool autoCreateTopics = true) { MessagingProvider = "RabbitMq"; RabbitMq = new OpenDddRabbitMqOptions @@ -71,12 +77,13 @@ public OpenDddOptions UseRabbitMq(string hostName, int port, string username, st Port = port, Username = username, Password = password, - VirtualHost = virtualHost + VirtualHost = virtualHost, + AutoCreateTopics = autoCreateTopics }; return this; } - public OpenDddOptions UseKafka(string bootstrapServers) + public OpenDddOptions UseKafka(string bootstrapServers, bool autoCreateTopics = true) { MessagingProvider = "Kafka"; Kafka = new OpenDddKafkaOptions { BootstrapServers = bootstrapServers }; @@ -103,8 +110,8 @@ public OpenDddOptions SetEventListenerGroup(string group) public OpenDddOptions SetEventTopics(string domainEventTemplate, string integrationEventTemplate) { - Events.DomainEventTopicTemplate = domainEventTemplate; - Events.IntegrationEventTopicTemplate = integrationEventTemplate; + Events.DomainEventTopic = domainEventTemplate; + Events.IntegrationEventTopic = integrationEventTemplate; return this; } diff --git a/src/OpenDDD/Domain/Model/Helpers/EventTopicHelper.cs b/src/OpenDDD/Domain/Model/Helpers/EventTopicHelper.cs index b2b3cca..c9c3b32 100644 --- a/src/OpenDDD/Domain/Model/Helpers/EventTopicHelper.cs +++ b/src/OpenDDD/Domain/Model/Helpers/EventTopicHelper.cs @@ -18,8 +18,8 @@ public static string DetermineTopic(Type eventClassType, OpenDddEventsOptions ev // Select the correct format from configuration string topicTemplate = isIntegrationEvent - ? eventOptions.IntegrationEventTopicTemplate - : eventOptions.DomainEventTopicTemplate; + ? eventOptions.IntegrationEventTopic + : eventOptions.DomainEventTopic; // Ensure the topic format contains "{EventName}" if (!topicTemplate.Contains("{EventName}")) @@ -53,8 +53,8 @@ public static string DetermineTopic(string eventType, string eventName, OpenDddE // Select the correct format from configuration string topicTemplate = eventType == "Integration" - ? eventOptions.IntegrationEventTopicTemplate - : eventOptions.DomainEventTopicTemplate; + ? eventOptions.IntegrationEventTopic + : eventOptions.DomainEventTopic; // Ensure the topic format contains "{EventName}" if (!topicTemplate.Contains("{EventName}")) diff --git a/src/OpenDDD/Infrastructure/Events/Azure/AzureServiceBusMessagingProvider.cs b/src/OpenDDD/Infrastructure/Events/Azure/AzureServiceBusMessagingProvider.cs index 273638b..ba7bd4f 100644 --- a/src/OpenDDD/Infrastructure/Events/Azure/AzureServiceBusMessagingProvider.cs +++ b/src/OpenDDD/Infrastructure/Events/Azure/AzureServiceBusMessagingProvider.cs @@ -1,48 +1,56 @@ -using Azure.Messaging.ServiceBus; -using Azure.Messaging.ServiceBus.Administration; +using System.Collections.Concurrent; using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using OpenDDD.API.Options; -using OpenDDD.Infrastructure.Events.Azure.Options; +using OpenDDD.Infrastructure.Events.Base; +using Azure.Messaging.ServiceBus; +using Azure.Messaging.ServiceBus.Administration; namespace OpenDDD.Infrastructure.Events.Azure { - public class AzureServiceBusMessagingProvider : IMessagingProvider + public class AzureServiceBusMessagingProvider : IMessagingProvider, IAsyncDisposable { private readonly ServiceBusClient _client; - private readonly OpenDddAzureServiceBusOptions _options; + private readonly ServiceBusAdministrationClient _adminClient; + private readonly bool _autoCreateTopics; private readonly ILogger _logger; + private readonly ConcurrentDictionary _subscriptions = new(); + private readonly ConcurrentDictionary _topicCache = new(); + private readonly TimeSpan _cacheExpiration = TimeSpan.FromSeconds(600); + private bool _disposed; public AzureServiceBusMessagingProvider( - IOptions options, + ServiceBusClient client, + ServiceBusAdministrationClient adminClient, + bool autoCreateTopics, ILogger logger) { - var openDddOptions = options.Value ?? throw new ArgumentNullException(nameof(options)); - _options = openDddOptions.AzureServiceBus ?? throw new InvalidOperationException("AzureServiceBus settings are missing in OpenDddOptions."); - - if (string.IsNullOrWhiteSpace(_options.ConnectionString)) - { - throw new InvalidOperationException("Azure Service Bus connection string is missing."); - } - - _client = new ServiceBusClient(_options.ConnectionString); + _client = client ?? throw new ArgumentNullException(nameof(client)); + _adminClient = adminClient ?? throw new ArgumentNullException(nameof(adminClient)); + _autoCreateTopics = autoCreateTopics; _logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - public async Task SubscribeAsync(string topic, string consumerGroup, Func messageHandler, CancellationToken cancellationToken = default) + public async Task SubscribeAsync(string topic, string consumerGroup, Func messageHandler, CancellationToken cancellationToken = default) { - topic = topic.ToLower(); + if (string.IsNullOrWhiteSpace(topic)) + { + throw new ArgumentException("Topic cannot be null or empty.", nameof(topic)); + } - var subscriptionName = consumerGroup; + if (string.IsNullOrWhiteSpace(consumerGroup)) + { + throw new ArgumentException("Consumer group cannot be null or empty.", nameof(consumerGroup)); + } - if (_options.AutoCreateTopics) + if (messageHandler is null) { - await CreateTopicIfNotExistsAsync(topic, cancellationToken); + throw new ArgumentNullException(nameof(messageHandler)); } - - await CreateSubscriptionIfNotExistsAsync(topic, subscriptionName, cancellationToken); - var processor = _client.CreateProcessor(topic, subscriptionName); + await EnsureTopicExistsAsync(topic, cancellationToken); + + await CreateSubscriptionIfNotExistsAsync(topic, consumerGroup, cancellationToken); + + var processor = _client.CreateProcessor(topic, consumerGroup); processor.ProcessMessageAsync += async args => { @@ -52,49 +60,105 @@ public async Task SubscribeAsync(string topic, string consumerGroup, Func { - _logger.LogError(args.Exception, "Error processing message in subscription {SubscriptionName}", subscriptionName); + _logger.LogError(args.Exception, "Error processing message in subscription {SubscriptionName}", consumerGroup); return Task.CompletedTask; }; - _logger.LogInformation("Starting message processor for topic '{Topic}' and subscription '{Subscription}'", topic, subscriptionName); + var subscription = new AzureServiceBusSubscription(topic, consumerGroup, processor); + _subscriptions[subscription.Id] = subscription; + + _logger.LogInformation("Starting message processor for topic '{Topic}' and subscription '{Subscription}', Subscription ID: {SubscriptionId}", topic, consumerGroup, subscription.Id); await processor.StartProcessingAsync(cancellationToken); + + return subscription; } - public async Task PublishAsync(string topic, string message, CancellationToken cancellationToken = default) + public async Task UnsubscribeAsync(ISubscription subscription, CancellationToken cancellationToken = default) { - topic = topic.ToLower(); + if (subscription == null) + throw new ArgumentNullException(nameof(subscription)); - if (_options.AutoCreateTopics) + if (subscription is not AzureServiceBusSubscription serviceBusSubscription || !_subscriptions.TryRemove(serviceBusSubscription.Id, out _)) { - await CreateTopicIfNotExistsAsync(topic, cancellationToken); + _logger.LogWarning("No active subscription found with ID {SubscriptionId}", subscription.Id); + return; } + _logger.LogInformation("Unsubscribing from Azure Service Bus topic '{Topic}' and subscription '{Subscription}', Subscription ID: {SubscriptionId}", serviceBusSubscription.Topic, serviceBusSubscription.ConsumerGroup, serviceBusSubscription.Id); + + await serviceBusSubscription.Consumer.StopProcessingAsync(cancellationToken); + await serviceBusSubscription.DisposeAsync(); + } + + public async Task PublishAsync(string topic, string message, CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(topic)) + throw new ArgumentException("Topic cannot be null or empty.", nameof(topic)); + + if (string.IsNullOrWhiteSpace(message)) + throw new ArgumentException("Message cannot be null or empty.", nameof(message)); + + await EnsureTopicExistsAsync(topic, cancellationToken); + var sender = _client.CreateSender(topic); await sender.SendMessageAsync(new ServiceBusMessage(message), cancellationToken); _logger.LogInformation("Published message to topic '{Topic}'", topic); } - private async Task CreateTopicIfNotExistsAsync(string topic, CancellationToken cancellationToken) + private async Task EnsureTopicExistsAsync(string topic, CancellationToken cancellationToken) { - var adminClient = new ServiceBusAdministrationClient(_options.ConnectionString); - topic = topic.ToLower(); + if (_topicCache.TryGetValue(topic, out var lastChecked) && DateTime.UtcNow - lastChecked < _cacheExpiration) + { + _logger.LogDebug("Skipping topic check for '{Topic}' (cached result).", topic); + return; + } - if (!await adminClient.TopicExistsAsync(topic, cancellationToken)) + var topicExists = await _adminClient.TopicExistsAsync(topic, cancellationToken); + + if (topicExists) { - await adminClient.CreateTopicAsync(topic, cancellationToken); + _topicCache[topic] = DateTime.UtcNow; + return; + } + + if (_autoCreateTopics) + { + await _adminClient.CreateTopicAsync(topic, cancellationToken); _logger.LogInformation("Created topic: {Topic}", topic); + _topicCache[topic] = DateTime.UtcNow; + return; } + + throw new InvalidOperationException($"Topic '{topic}' does not exist. Enable 'autoCreateTopics' to create topics automatically."); } private async Task CreateSubscriptionIfNotExistsAsync(string topic, string subscriptionName, CancellationToken cancellationToken) { - var adminClient = new ServiceBusAdministrationClient(_options.ConnectionString); - - if (!await adminClient.SubscriptionExistsAsync(topic, subscriptionName, cancellationToken)) + if (!await _adminClient.SubscriptionExistsAsync(topic, subscriptionName, cancellationToken)) { - await adminClient.CreateSubscriptionAsync(topic, subscriptionName, cancellationToken); + await _adminClient.CreateSubscriptionAsync(topic, subscriptionName, cancellationToken); _logger.LogInformation("Created subscription: {Subscription} for topic: {Topic}", subscriptionName, topic); } } + + public async ValueTask DisposeAsync() + { + if (_disposed) return; + _disposed = true; + + _logger.LogDebug("Disposing AzureServiceBusMessagingProvider..."); + + foreach (var subscription in _subscriptions.Values) + { + if (subscription.Consumer.IsProcessing) + { + await subscription.Consumer.StopProcessingAsync(CancellationToken.None); + } + await subscription.DisposeAsync(); + } + + _subscriptions.Clear(); + await _client.DisposeAsync(); + } } } diff --git a/src/OpenDDD/Infrastructure/Events/Azure/AzureServiceBusSubscription.cs b/src/OpenDDD/Infrastructure/Events/Azure/AzureServiceBusSubscription.cs new file mode 100644 index 0000000..7495d5b --- /dev/null +++ b/src/OpenDDD/Infrastructure/Events/Azure/AzureServiceBusSubscription.cs @@ -0,0 +1,23 @@ +using OpenDDD.Infrastructure.Events.Base; +using Azure.Messaging.ServiceBus; + +namespace OpenDDD.Infrastructure.Events.Azure +{ + public class AzureServiceBusSubscription : Subscription + { + public AzureServiceBusSubscription(string topic, string consumerGroup, ServiceBusProcessor processor) + : base(topic, consumerGroup, processor) + { + + } + + public override async ValueTask DisposeAsync() + { + if (Consumer.IsProcessing) + { + await Consumer.StopProcessingAsync(); + } + await Consumer.DisposeAsync(); + } + } +} diff --git a/src/OpenDDD/Infrastructure/Events/Base/ISubscription.cs b/src/OpenDDD/Infrastructure/Events/Base/ISubscription.cs new file mode 100644 index 0000000..2bb6df7 --- /dev/null +++ b/src/OpenDDD/Infrastructure/Events/Base/ISubscription.cs @@ -0,0 +1,9 @@ +namespace OpenDDD.Infrastructure.Events.Base +{ + public interface ISubscription : IAsyncDisposable + { + string Id { get; } + string Topic { get; } + string ConsumerGroup { get; } + } +} diff --git a/src/OpenDDD/Infrastructure/Events/Base/Subscription.cs b/src/OpenDDD/Infrastructure/Events/Base/Subscription.cs new file mode 100644 index 0000000..72a8dbb --- /dev/null +++ b/src/OpenDDD/Infrastructure/Events/Base/Subscription.cs @@ -0,0 +1,23 @@ +namespace OpenDDD.Infrastructure.Events.Base +{ + public class Subscription : ISubscription where TConsumer : IAsyncDisposable + { + public string Id { get; } = Guid.NewGuid().ToString(); + public string Topic { get; } + public string ConsumerGroup { get; } + public TConsumer? Consumer { get; } + + public Subscription(string topic, string consumerGroup, TConsumer? consumer) + { + Topic = topic ?? throw new ArgumentException(nameof(topic)); + ConsumerGroup = consumerGroup ?? throw new ArgumentException(nameof(consumerGroup)); + Consumer = consumer; + } + + public virtual async ValueTask DisposeAsync() + { + await Consumer.DisposeAsync(); + await ValueTask.CompletedTask; + } + } +} diff --git a/src/OpenDDD/Infrastructure/Events/IMessagingProvider.cs b/src/OpenDDD/Infrastructure/Events/IMessagingProvider.cs index 1f89396..a930fc3 100644 --- a/src/OpenDDD/Infrastructure/Events/IMessagingProvider.cs +++ b/src/OpenDDD/Infrastructure/Events/IMessagingProvider.cs @@ -1,8 +1,16 @@ -namespace OpenDDD.Infrastructure.Events +using OpenDDD.Infrastructure.Events.Base; + +namespace OpenDDD.Infrastructure.Events { public interface IMessagingProvider { - Task SubscribeAsync(string topic, string consumerGroup, Func messageHandler, CancellationToken cancellationToken = default); + Task SubscribeAsync( + string topic, + string consumerGroup, + Func messageHandler, + CancellationToken cancellationToken = default); + + Task UnsubscribeAsync(ISubscription subscription, CancellationToken cancellationToken = default); Task PublishAsync(string topic, string message, CancellationToken cancellationToken = default); } } diff --git a/src/OpenDDD/Infrastructure/Events/InMemory/InMemoryMessagingProvider.cs b/src/OpenDDD/Infrastructure/Events/InMemory/InMemoryMessagingProvider.cs index 21a51d3..586f912 100644 --- a/src/OpenDDD/Infrastructure/Events/InMemory/InMemoryMessagingProvider.cs +++ b/src/OpenDDD/Infrastructure/Events/InMemory/InMemoryMessagingProvider.cs @@ -1,56 +1,182 @@ using System.Collections.Concurrent; using Microsoft.Extensions.Logging; +using OpenDDD.Infrastructure.Events.Base; namespace OpenDDD.Infrastructure.Events.InMemory { - public class InMemoryMessagingProvider : IMessagingProvider + public class InMemoryMessagingProvider : IMessagingProvider, IAsyncDisposable { - private readonly ConcurrentDictionary>> _subscribers = new(); + private readonly ConcurrentDictionary> _messageLog = new(); + private readonly ConcurrentDictionary _consumerGroupOffsets = new(); + private readonly ConcurrentDictionary _subscriptions = new(); + private readonly ConcurrentQueue<(string Topic, string Message, string ConsumerGroup, int RetryCount)> _retryQueue = new(); private readonly ILogger _logger; + private readonly TimeSpan _initialRetryDelay = TimeSpan.FromSeconds(1); + private readonly Task _retryTask; + private readonly int _maxRetries = 5; + private readonly CancellationTokenSource _cts = new(); public InMemoryMessagingProvider(ILogger logger) { _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _retryTask = Task.Run(ProcessRetries, _cts.Token); // Start retry processing loop } - public Task SubscribeAsync(string topic, string consumerGroup, Func messageHandler, CancellationToken ct) + public async Task SubscribeAsync(string topic, string consumerGroup, Func messageHandler, CancellationToken ct = default) { + if (messageHandler is null) + throw new ArgumentException(nameof(messageHandler)); + + var subscription = new InMemorySubscription(topic, consumerGroup, messageHandler); + _subscriptions[subscription.Id] = subscription; + + _logger.LogDebug("Subscribed to topic: {Topic} in listener group: {ConsumerGroup}, Subscription ID: {SubscriptionId}", + topic, consumerGroup, subscription.Id); + var groupKey = $"{topic}:{consumerGroup}"; - var handlers = _subscribers.GetOrAdd(groupKey, _ => new ConcurrentBag>()); + if (!_consumerGroupOffsets.ContainsKey(groupKey)) + { + _consumerGroupOffsets[groupKey] = _messageLog.TryGetValue(topic, out var messages) ? messages.Count : 0; + _logger.LogDebug("First subscription in consumer group '{ConsumerGroup}', starting at offset {Offset}.", + consumerGroup, _consumerGroupOffsets[groupKey]); + } + else + { + var offset = _consumerGroupOffsets[groupKey]; + if (_messageLog.TryGetValue(topic, out var storedMessages)) + { + var unseenMessages = storedMessages.Skip(offset).ToList(); + foreach (var msg in unseenMessages) + { + _ = Task.Run(async () => await messageHandler(msg, ct), ct); + _consumerGroupOffsets[groupKey]++; + } + } + } - handlers.Add(messageHandler); + return subscription; + } - _logger.LogInformation("Subscribed to topic: {Topic} in listener group: {ConsumerGroup}", topic, consumerGroup); - return Task.CompletedTask; + public async Task UnsubscribeAsync(ISubscription subscription, CancellationToken cancellationToken = default) + { + if (subscription == null) + throw new ArgumentNullException(nameof(subscription)); + + if (!_subscriptions.TryRemove(subscription.Id, out _)) + { + _logger.LogWarning("No active subscription found with ID {SubscriptionId}", subscription.Id); + return; + } + + _logger.LogDebug("Unsubscribed from topic: {Topic} in listener group: {ConsumerGroup}, Subscription ID: {SubscriptionId}", + subscription.Topic, subscription.ConsumerGroup, subscription.Id); + + await subscription.DisposeAsync(); } public Task PublishAsync(string topic, string message, CancellationToken ct) { - var matchingGroups = _subscribers.Keys.Where(key => key.StartsWith($"{topic}:")); + if (string.IsNullOrWhiteSpace(topic)) + throw new ArgumentException("Topic cannot be null or empty.", nameof(topic)); + + if (string.IsNullOrWhiteSpace(message)) + throw new ArgumentException("Message cannot be null or empty.", nameof(message)); + + var messages = _messageLog.GetOrAdd(topic, _ => new List()); + lock (messages) + { + messages.Add(message); + } + + var groupSubscriptions = _subscriptions.Values + .Where(s => s.Topic == topic) + .GroupBy(s => s.ConsumerGroup); + + foreach (var group in groupSubscriptions) + { + var subscription = group.OrderBy(_ => Guid.NewGuid()).FirstOrDefault(); + if (subscription == null) continue; + + _ = Task.Run(async () => + { + try + { + await subscription.MessageHandler(message, ct); + _consumerGroupOffsets.AddOrUpdate($"{topic}:{subscription.ConsumerGroup}", 0, (_, currentOffset) => currentOffset + 1); + } + catch (Exception ex) + { + _logger.LogError(ex, $"Error in handler for topic '{topic}' in consumer group '{subscription.ConsumerGroup}': {ex.Message}"); + _retryQueue.Enqueue((topic, message, subscription.ConsumerGroup, 1)); + } + }, ct); + } + + return Task.CompletedTask; + } - foreach (var groupKey in matchingGroups) + private async Task ProcessRetries() + { + while (!_cts.Token.IsCancellationRequested) { - if (_subscribers.TryGetValue(groupKey, out var handlers)) + if (_retryQueue.TryDequeue(out var retryMessage)) { - foreach (var handler in handlers) + var (topic, message, consumerGroup, retryCount) = retryMessage; + + if (retryCount > _maxRetries) { - _ = Task.Run(async () => + _logger.LogError("Message dropped after exceeding max retries: {Message}", message); + continue; + } + + var subscription = _subscriptions.Values.FirstOrDefault(s => s.Topic == topic && s.ConsumerGroup == consumerGroup); + if (subscription != null) + { + await Task.Delay(ComputeBackoff(retryCount), _cts.Token); + + try { - try - { - await handler(message, ct); - } - catch (Exception ex) - { - _logger.LogError(ex, $"Error in handler for topic '{topic}': {ex.Message}"); - } - }, ct); + await subscription.MessageHandler(message, _cts.Token); + } + catch (Exception ex) + { + _logger.LogError(ex, $"Retry failed for message: {message}"); + _retryQueue.Enqueue((topic, message, consumerGroup, retryCount + 1)); + } } } + else + { + await Task.Delay(500, _cts.Token); + } } + } - return Task.CompletedTask; + private TimeSpan ComputeBackoff(int retryCount) + { + return TimeSpan.FromMilliseconds(_initialRetryDelay.TotalMilliseconds * Math.Pow(2, retryCount)); + } + + public async ValueTask DisposeAsync() + { + _cts.Cancel(); + + foreach (var subscription in _subscriptions.Values) + { + await subscription.DisposeAsync(); + } + + _subscriptions.Clear(); + + try + { + await _retryTask; + } + catch (OperationCanceledException) + { + _logger.LogDebug("Retry processing task canceled."); + } } } } diff --git a/src/OpenDDD/Infrastructure/Events/InMemory/InMemorySubscription.cs b/src/OpenDDD/Infrastructure/Events/InMemory/InMemorySubscription.cs new file mode 100644 index 0000000..35422d7 --- /dev/null +++ b/src/OpenDDD/Infrastructure/Events/InMemory/InMemorySubscription.cs @@ -0,0 +1,26 @@ +using OpenDDD.Infrastructure.Events.Base; + +namespace OpenDDD.Infrastructure.Events.InMemory +{ + public class InMemorySubscription : Subscription + { + public Func MessageHandler { get; } + + public InMemorySubscription(string topic, string consumerGroup, Func messageHandler) + : base(topic, consumerGroup, null) + { + if (string.IsNullOrWhiteSpace(topic)) + throw new ArgumentException("Topic cannot be null or empty.", nameof(topic)); + + if (string.IsNullOrWhiteSpace(consumerGroup)) + throw new ArgumentException("Consumer group cannot be null or empty.", nameof(consumerGroup)); + + MessageHandler = messageHandler; + } + + public override ValueTask DisposeAsync() + { + return ValueTask.CompletedTask; + } + } +} diff --git a/src/OpenDDD/Infrastructure/Events/Kafka/Factories/IKafkaConsumerFactory.cs b/src/OpenDDD/Infrastructure/Events/Kafka/Factories/IKafkaConsumerFactory.cs new file mode 100644 index 0000000..9410f36 --- /dev/null +++ b/src/OpenDDD/Infrastructure/Events/Kafka/Factories/IKafkaConsumerFactory.cs @@ -0,0 +1,7 @@ +namespace OpenDDD.Infrastructure.Events.Kafka.Factories +{ + public interface IKafkaConsumerFactory + { + KafkaConsumer Create(string consumerGroup); + } +} diff --git a/src/OpenDDD/Infrastructure/Events/Kafka/Factories/KafkaConsumer.cs b/src/OpenDDD/Infrastructure/Events/Kafka/Factories/KafkaConsumer.cs new file mode 100644 index 0000000..39e1c83 --- /dev/null +++ b/src/OpenDDD/Infrastructure/Events/Kafka/Factories/KafkaConsumer.cs @@ -0,0 +1,171 @@ +using Microsoft.Extensions.Logging; +using Confluent.Kafka; + +namespace OpenDDD.Infrastructure.Events.Kafka.Factories +{ + public class KafkaConsumer : IAsyncDisposable + { + private readonly IConsumer _consumer; + private readonly ILogger _logger; + private readonly CancellationTokenSource _cts = new(); + private Task? _consumerTask; + private bool _disposed; + + public string ConsumerGroup { get; } + public HashSet SubscribedTopics { get; } = new(); + + public KafkaConsumer(IConsumer consumer, string consumerGroup, ILogger logger) + { + _consumer = consumer ?? throw new ArgumentNullException(nameof(consumer)); + ConsumerGroup = consumerGroup ?? throw new ArgumentNullException(nameof(consumerGroup)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public void Subscribe(string topic) + { + if (SubscribedTopics.Contains(topic)) return; + + _consumer.Subscribe(topic); + SubscribedTopics.Add(topic); + } + + public void StartProcessing(Func messageHandler, CancellationToken globalToken) + { + if (_consumerTask != null) return; + + var linkedCts = CancellationTokenSource.CreateLinkedTokenSource(globalToken, _cts.Token); + _consumerTask = Task.Run(() => ConsumeLoop(messageHandler, linkedCts.Token), linkedCts.Token); + } + + private async Task ConsumeLoop(Func messageHandler, CancellationToken token) + { + while (!token.IsCancellationRequested) + { + ConsumeResult? result = null; + + try + { + result = _consumer.Consume(token); + if (result?.Message == null) continue; + + await messageHandler(result.Message.Value, token); + _consumer.Commit(result); + } + catch (OperationCanceledException) + { + break; + } + catch (Exception ex) + { + _logger.LogError(ex, "Error in Kafka consumer loop. Retrying..."); + + try + { + await Task.Delay(5000, token); + } + catch (TaskCanceledException) + { + break; + } + + if (result != null) + { + _consumer.Seek(new TopicPartitionOffset(result.TopicPartition, result.Offset)); + } + } + } + } + + public async Task StopProcessingAsync() + { + _cts.Cancel(); + + if (_consumerTask != null) + { + await _consumerTask; + _consumerTask = null; + } + + await CommitInitialOffsetsAsync(); + + _consumer.Close(); + _consumer.Dispose(); + } + + private async Task CommitInitialOffsetsAsync() + { + foreach (var partition in _consumer.Assignment) + { + var committedOffsets = _consumer.Committed(new[] { partition }, TimeSpan.FromSeconds(5)); + var currentOffset = committedOffsets?.FirstOrDefault()?.Offset; + + if (currentOffset == null || currentOffset == Offset.Unset) + { + var watermarkOffsets = _consumer.QueryWatermarkOffsets(partition, TimeSpan.FromSeconds(5)); + + if (watermarkOffsets.High == 0) // No messages ever written + { + _logger.LogDebug("Partition {Partition} has no messages. Sending placeholder message.", partition); + + using var producer = new ProducerBuilder(new ProducerConfig { BootstrapServers = "localhost:9092" }).Build(); + var deliveryResult = await producer.ProduceAsync( + new TopicPartition(partition.Topic, partition.Partition), // Ensure correct partition + new Message { Value = "__init__" }); + + producer.Flush(TimeSpan.FromSeconds(5)); + + _logger.LogDebug("Sent __init__ message to partition {Partition}. Offset: {Offset}", partition, deliveryResult.Offset); + + var timeout = TimeSpan.FromSeconds(5); + var startTime = DateTime.UtcNow; + + while (DateTime.UtcNow - startTime < timeout) + { + await Task.Delay(100); + watermarkOffsets = _consumer.QueryWatermarkOffsets(partition, TimeSpan.FromSeconds(5)); + + if (watermarkOffsets.High > 0) + break; + } + + if (watermarkOffsets.High == 0) + { + throw new TimeoutException($"Kafka did not register the __init__ message for partition {partition} within 5 seconds."); + } + + // Ensure the new offset for the partition is exactly 1 + var initialOffset = watermarkOffsets.High; + if (initialOffset.Value != 1) + { + throw new Exception($"Expected initial offset to be 1, but got {initialOffset.Value}"); + } + + _consumer.Commit(new[] { new TopicPartitionOffset(partition, initialOffset) }); + + _logger.LogDebug("Committed initial offset {Offset} for partition {Partition}", initialOffset, partition); + } + } + } + } + + public ValueTask DisposeAsync() + { + if (_disposed) return ValueTask.CompletedTask; + _disposed = true; + + try + { + _consumer.Close(); + } + catch (ObjectDisposedException) + { + _logger.LogWarning("Attempted to close a disposed Kafka consumer."); + } + + _consumer.Dispose(); + _cts.Dispose(); + + return ValueTask.CompletedTask; + } + } +} diff --git a/src/OpenDDD/Infrastructure/Events/Kafka/Factories/KafkaConsumerFactory.cs b/src/OpenDDD/Infrastructure/Events/Kafka/Factories/KafkaConsumerFactory.cs new file mode 100644 index 0000000..4ac12e7 --- /dev/null +++ b/src/OpenDDD/Infrastructure/Events/Kafka/Factories/KafkaConsumerFactory.cs @@ -0,0 +1,41 @@ +using Confluent.Kafka; +using Microsoft.Extensions.Logging; + +namespace OpenDDD.Infrastructure.Events.Kafka.Factories +{ + public class KafkaConsumerFactory : IKafkaConsumerFactory + { + private readonly string _bootstrapServers; + private readonly ILogger _logger; + + public KafkaConsumerFactory(string bootstrapServers, ILogger logger) + { + if (string.IsNullOrWhiteSpace(bootstrapServers)) + throw new ArgumentException("Kafka bootstrap servers must be configured.", nameof(bootstrapServers)); + + _bootstrapServers = bootstrapServers; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public virtual KafkaConsumer Create(string consumerGroup) + { + var consumerConfig = new ConsumerConfig + { + BootstrapServers = _bootstrapServers, + ClientId = $"OpenDDD-{Guid.NewGuid()}", + GroupId = consumerGroup, + PartitionAssignmentStrategy = PartitionAssignmentStrategy.RoundRobin, + EnableAutoCommit = false, + AutoOffsetReset = AutoOffsetReset.Latest, + MaxPollIntervalMs = 300000, // Max time consumer can take to process a message before kafka removes it from group + SessionTimeoutMs = 45000, // Time before kafka assumes consumer is dead if it stops sending heartbeats + HeartbeatIntervalMs = 3000 // Frequence of heartbeats to kafka + }; + + var consumer = new ConsumerBuilder(consumerConfig) + .SetValueDeserializer(Deserializers.Utf8) + .Build(); + return new KafkaConsumer(consumer, consumerGroup, _logger); + } + } +} diff --git a/src/OpenDDD/Infrastructure/Events/Kafka/KafkaMessagingProvider.cs b/src/OpenDDD/Infrastructure/Events/Kafka/KafkaMessagingProvider.cs index 4d75264..189c3d9 100644 --- a/src/OpenDDD/Infrastructure/Events/Kafka/KafkaMessagingProvider.cs +++ b/src/OpenDDD/Infrastructure/Events/Kafka/KafkaMessagingProvider.cs @@ -1,7 +1,7 @@ -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using OpenDDD.API.Options; -using OpenDDD.Infrastructure.Events.Kafka.Options; +using System.Collections.Concurrent; +using Microsoft.Extensions.Logging; +using OpenDDD.Infrastructure.Events.Base; +using OpenDDD.Infrastructure.Events.Kafka.Factories; using Confluent.Kafka; using Confluent.Kafka.Admin; @@ -11,116 +11,152 @@ public class KafkaMessagingProvider : IMessagingProvider, IAsyncDisposable { private readonly IProducer _producer; private readonly IAdminClient _adminClient; - private readonly OpenDddKafkaOptions _options; + private readonly bool _autoCreateTopics; + private readonly IKafkaConsumerFactory _consumerFactory; private readonly ILogger _logger; + private readonly ConcurrentDictionary _subscriptions = new(); + private readonly ConcurrentDictionary _topicCache = new(); + private readonly TimeSpan _cacheExpiration = TimeSpan.FromSeconds(600); + private readonly CancellationTokenSource _cts = new(); + private bool _disposed; + + public KafkaMessagingProvider( + IAdminClient adminClient, + IProducer producer, + IKafkaConsumerFactory consumerFactory, + bool autoCreateTopics, + ILogger logger) + { + _adminClient = adminClient ?? throw new ArgumentNullException(nameof(adminClient)); + _producer = producer ?? throw new ArgumentNullException(nameof(producer)); + _consumerFactory = consumerFactory ?? throw new ArgumentNullException(nameof(consumerFactory)); + _autoCreateTopics = autoCreateTopics; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } - public KafkaMessagingProvider(IOptions options, ILogger logger) + public async Task SubscribeAsync( + string topic, + string consumerGroup, + Func messageHandler, + CancellationToken cancellationToken = default) { - var openDddOptions = options?.Value ?? throw new ArgumentNullException(nameof(options)); - _options = openDddOptions.Kafka ?? throw new InvalidOperationException("Kafka settings are missing in OpenDddOptions."); + if (string.IsNullOrWhiteSpace(topic)) + throw new ArgumentException("Topic cannot be null or empty.", nameof(topic)); - if (string.IsNullOrWhiteSpace(_options.BootstrapServers)) - throw new InvalidOperationException("Kafka bootstrap servers must be configured."); + if (string.IsNullOrWhiteSpace(consumerGroup)) + throw new ArgumentException("Consumer group cannot be null or empty.", nameof(consumerGroup)); - var producerConfig = new ProducerConfig - { - BootstrapServers = _options.BootstrapServers, - ClientId = "OpenDDD" - }; + if (messageHandler is null) + throw new ArgumentNullException(nameof(messageHandler)); - var adminConfig = new AdminClientConfig - { - BootstrapServers = _options.BootstrapServers, - ClientId = "OpenDDD" - }; + await EnsureTopicExistsAsync(topic, cancellationToken); - _producer = new ProducerBuilder(producerConfig).Build(); - _adminClient = new AdminClientBuilder(adminConfig).Build(); + var consumer = _consumerFactory.Create(consumerGroup); + consumer.Subscribe(topic); + consumer.StartProcessing(messageHandler, _cts.Token); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + var subscription = new KafkaSubscription(topic, consumerGroup, consumer); + _subscriptions[subscription.Id] = subscription; + + _logger.LogDebug("Subscribed a new consumer to Kafka topic '{Topic}' with consumer group '{ConsumerGroup}', Subscription ID: {SubscriptionId}", topic, consumerGroup, subscription.Id); + return subscription; } - private async Task EnsureTopicExistsAsync(string topic, CancellationToken cancellationToken) + public async Task UnsubscribeAsync(ISubscription subscription, CancellationToken cancellationToken = default) { - try - { - var metadata = _adminClient.GetMetadata(topic, TimeSpan.FromSeconds(5)); - if (metadata.Topics.Any(t => t.Topic == topic)) return; // Topic already exists + if (subscription == null) + throw new ArgumentNullException(nameof(subscription)); - _logger.LogInformation("Creating Kafka topic: {Topic}", topic); - await _adminClient.CreateTopicsAsync(new[] - { - new TopicSpecification { Name = topic, NumPartitions = 1, ReplicationFactor = 1 } - }); - } - catch (Exception ex) + if (!_subscriptions.TryRemove(subscription.Id, out var removedSubscription)) { - _logger.LogWarning("Could not check or create Kafka topic {Topic}: {Message}", topic, ex.Message); + _logger.LogWarning("No active subscription found with ID {SubscriptionId}", subscription.Id); + return; } + + _logger.LogDebug("Unsubscribing from Kafka topic '{Topic}' with consumer group '{ConsumerGroup}', Subscription ID: {SubscriptionId}", removedSubscription.Topic, removedSubscription.ConsumerGroup, removedSubscription.Id); + + await removedSubscription.Consumer.StopProcessingAsync(); + await removedSubscription.DisposeAsync(); } public async Task PublishAsync(string topic, string message, CancellationToken cancellationToken = default) { + if (string.IsNullOrWhiteSpace(topic)) + throw new ArgumentException("Topic cannot be null or empty.", nameof(topic)); + + if (string.IsNullOrWhiteSpace(message)) + throw new ArgumentException("Message cannot be null or empty.", nameof(message)); + await EnsureTopicExistsAsync(topic, cancellationToken); await _producer.ProduceAsync(topic, new Message { Value = message }, cancellationToken); - _logger.LogInformation("Published message to Kafka topic '{Topic}'", topic); + _logger.LogDebug("Published message to Kafka topic '{Topic}'", topic); } - public async Task SubscribeAsync(string topic, string consumerGroup, Func messageHandler, CancellationToken cancellationToken = default) + private async Task EnsureTopicExistsAsync(string topic, CancellationToken cancellationToken) { - await EnsureTopicExistsAsync(topic, cancellationToken); - - var consumerConfig = new ConsumerConfig + if (_topicCache.TryGetValue(topic, out var lastChecked) && DateTime.UtcNow - lastChecked < _cacheExpiration) { - BootstrapServers = _options.BootstrapServers, - ClientId = "OpenDDD", - GroupId = consumerGroup, - EnableAutoCommit = false, - AutoOffsetReset = AutoOffsetReset.Earliest - }; - - var consumer = new ConsumerBuilder(consumerConfig) - .SetValueDeserializer(Deserializers.Utf8) - .Build(); + _logger.LogDebug("Skipping topic check for '{Topic}' (cached result).", topic); + return; + } - consumer.Subscribe(topic); + var metadata = _adminClient.GetMetadata(TimeSpan.FromSeconds(5)); - _logger.LogInformation("Subscribed to Kafka topic '{Topic}' with consumer group '{ConsumerGroup}'", topic, consumerGroup); + if (metadata.Topics.Any(t => t.Topic == topic)) + { + _logger.LogDebug("Topic '{Topic}' already exists.", topic); + _topicCache[topic] = DateTime.UtcNow; + return; + } - _ = Task.Run(async () => + if (_autoCreateTopics) { - try + _logger.LogDebug("Creating Kafka topic: {Topic}", topic); + await _adminClient.CreateTopicsAsync(new[] + { + new TopicSpecification { Name = topic, NumPartitions = 2, ReplicationFactor = 1 } + }, null); + + _topicCache[topic] = DateTime.UtcNow; + + // Wait for the topic to be available + for (int i = 0; i < 30; i++) { - while (!cancellationToken.IsCancellationRequested) + await Task.Delay(500, cancellationToken); + metadata = _adminClient.GetMetadata(TimeSpan.FromSeconds(1)); + if (metadata.Topics.Any(t => t.Topic == topic)) { - // Seems like consume don't respect cancellation token. - // See: https://github.com/confluentinc/confluent-kafka-dotnet/issues/1085 - var result = consumer.Consume(cancellationToken); - if (result.Message != null) - { - _logger.LogInformation("Received message from Kafka: {Message}", result.Message.Value); - await messageHandler(result.Message.Value, cancellationToken); - consumer.Commit(result); - _logger.LogInformation("Message processed and offset committed: {Offset}", result.Offset); - } + _logger.LogDebug("Kafka topic '{Topic}' is now available.", topic); + return; } } - catch (OperationCanceledException) - { - // Handle cancellation gracefully - } - finally - { - consumer.Close(); - } - }, cancellationToken); + throw new KafkaException(new Error(ErrorCode.UnknownTopicOrPart, $"Failed to create topic '{topic}' within timeout.")); + } + + throw new InvalidOperationException($"Topic '{topic}' does not exist. Enable 'autoCreateTopics' to create topics automatically."); } public async ValueTask DisposeAsync() { - _producer?.Dispose(); - _adminClient?.Dispose(); + if (_disposed) return; + _disposed = true; + + _logger.LogDebug("Disposing KafkaMessagingProvider..."); + + _cts.Cancel(); + + var disposeTasks = _subscriptions.Values.Select(async sub => + { + await sub.Consumer.StopProcessingAsync(); + await sub.DisposeAsync(); + }).ToList(); + + await Task.WhenAll(disposeTasks); + + _subscriptions.Clear(); + _producer.Dispose(); + _adminClient.Dispose(); } } } diff --git a/src/OpenDDD/Infrastructure/Events/Kafka/KafkaSubscription.cs b/src/OpenDDD/Infrastructure/Events/Kafka/KafkaSubscription.cs new file mode 100644 index 0000000..59577d4 --- /dev/null +++ b/src/OpenDDD/Infrastructure/Events/Kafka/KafkaSubscription.cs @@ -0,0 +1,16 @@ +using OpenDDD.Infrastructure.Events.Base; +using OpenDDD.Infrastructure.Events.Kafka.Factories; + +namespace OpenDDD.Infrastructure.Events.Kafka +{ + public class KafkaSubscription : Subscription + { + public KafkaSubscription(string topic, string consumerGroup, KafkaConsumer consumer) + : base(topic, consumerGroup, consumer) { } + + public override async ValueTask DisposeAsync() + { + await Consumer.DisposeAsync(); + } + } +} diff --git a/src/OpenDDD/Infrastructure/Events/Kafka/Options/OpenDddKafkaOptions.cs b/src/OpenDDD/Infrastructure/Events/Kafka/Options/OpenDddKafkaOptions.cs index 44dd974..fa6ba03 100644 --- a/src/OpenDDD/Infrastructure/Events/Kafka/Options/OpenDddKafkaOptions.cs +++ b/src/OpenDDD/Infrastructure/Events/Kafka/Options/OpenDddKafkaOptions.cs @@ -3,5 +3,6 @@ public class OpenDddKafkaOptions { public string BootstrapServers { get; set; } = string.Empty; + public bool AutoCreateTopics { get; set; } = true; } } diff --git a/src/OpenDDD/Infrastructure/Events/RabbitMq/Factories/IRabbitMqConsumerFactory.cs b/src/OpenDDD/Infrastructure/Events/RabbitMq/Factories/IRabbitMqConsumerFactory.cs new file mode 100644 index 0000000..7241f30 --- /dev/null +++ b/src/OpenDDD/Infrastructure/Events/RabbitMq/Factories/IRabbitMqConsumerFactory.cs @@ -0,0 +1,9 @@ +using RabbitMQ.Client; + +namespace OpenDDD.Infrastructure.Events.RabbitMq.Factories +{ + public interface IRabbitMqConsumerFactory + { + RabbitMqConsumer CreateConsumer(IChannel channel, Func messageHandler); + } +} diff --git a/src/OpenDDD/Infrastructure/Events/RabbitMq/Factories/RabbitMqConsumerFactory.cs b/src/OpenDDD/Infrastructure/Events/RabbitMq/Factories/RabbitMqConsumerFactory.cs new file mode 100644 index 0000000..f92543a --- /dev/null +++ b/src/OpenDDD/Infrastructure/Events/RabbitMq/Factories/RabbitMqConsumerFactory.cs @@ -0,0 +1,23 @@ +using Microsoft.Extensions.Logging; +using RabbitMQ.Client; + +namespace OpenDDD.Infrastructure.Events.RabbitMq.Factories +{ + public class RabbitMqConsumerFactory : IRabbitMqConsumerFactory + { + private readonly ILogger _logger; + + public RabbitMqConsumerFactory(ILogger logger) + { + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public RabbitMqConsumer CreateConsumer(IChannel channel, Func messageHandler) + { + if (channel == null) throw new ArgumentNullException(nameof(channel)); + if (messageHandler == null) throw new ArgumentNullException(nameof(messageHandler)); + + return new RabbitMqConsumer(channel, messageHandler, _logger); + } + } +} diff --git a/src/OpenDDD/Infrastructure/Events/RabbitMq/Options/OpenDddRabbitMqOptions.cs b/src/OpenDDD/Infrastructure/Events/RabbitMq/Options/OpenDddRabbitMqOptions.cs index 7944563..9d2ab6b 100644 --- a/src/OpenDDD/Infrastructure/Events/RabbitMq/Options/OpenDddRabbitMqOptions.cs +++ b/src/OpenDDD/Infrastructure/Events/RabbitMq/Options/OpenDddRabbitMqOptions.cs @@ -7,5 +7,6 @@ public class OpenDddRabbitMqOptions public string Username { get; set; } = "guest"; public string Password { get; set; } = "guest"; public string VirtualHost { get; set; } = "/"; + public bool AutoCreateTopics { get; set; } = true; } } diff --git a/src/OpenDDD/Infrastructure/Events/RabbitMq/RabbitMqCustomAsyncConsumer.cs b/src/OpenDDD/Infrastructure/Events/RabbitMq/RabbitMqConsumer.cs similarity index 63% rename from src/OpenDDD/Infrastructure/Events/RabbitMq/RabbitMqCustomAsyncConsumer.cs rename to src/OpenDDD/Infrastructure/Events/RabbitMq/RabbitMqConsumer.cs index 5a766f5..e5008d6 100644 --- a/src/OpenDDD/Infrastructure/Events/RabbitMq/RabbitMqCustomAsyncConsumer.cs +++ b/src/OpenDDD/Infrastructure/Events/RabbitMq/RabbitMqConsumer.cs @@ -5,19 +5,39 @@ namespace OpenDDD.Infrastructure.Events.RabbitMq { - public class RabbitMqCustomAsyncConsumer : IAsyncBasicConsumer + public class RabbitMqConsumer : IAsyncBasicConsumer, IAsyncDisposable { private readonly Func _messageHandler; private readonly ILogger _logger; + private readonly IChannel _channel; + private string? _consumerTag; + private bool _disposed; - public RabbitMqCustomAsyncConsumer(IChannel channel, Func messageHandler, ILogger logger) + public RabbitMqConsumer( + IChannel channel, + Func messageHandler, + ILogger logger) { - Channel = channel ?? throw new ArgumentNullException(nameof(channel)); + _channel = channel ?? throw new ArgumentNullException(nameof(channel)); _messageHandler = messageHandler ?? throw new ArgumentNullException(nameof(messageHandler)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - public IChannel? Channel { get; } + public IChannel Channel => _channel; + + public async Task StartConsumingAsync(string queueName, CancellationToken cancellationToken) + { + _consumerTag = await _channel.BasicConsumeAsync(queueName, autoAck: false, this, cancellationToken); + _logger.LogInformation("Started consuming messages from queue '{QueueName}' with consumer tag '{ConsumerTag}'", queueName, _consumerTag); + } + + public async Task StopConsumingAsync(CancellationToken cancellationToken) + { + if (_consumerTag is null || _disposed) return; + + await _channel.BasicCancelAsync(_consumerTag, false, cancellationToken); + _logger.LogInformation("Stopped consuming messages for consumer tag '{ConsumerTag}'", _consumerTag); + } public async Task HandleBasicDeliverAsync( string consumerTag, @@ -67,5 +87,14 @@ public Task HandleChannelShutdownAsync(object channel, ShutdownEventArgs reason) _logger.LogWarning("Channel was shut down. Reason: {Reason}", reason.ReplyText); return Task.CompletedTask; } + + public async ValueTask DisposeAsync() + { + if (_disposed) + return; + + _disposed = true; + await StopConsumingAsync(CancellationToken.None); + } } } diff --git a/src/OpenDDD/Infrastructure/Events/RabbitMq/RabbitMqMessagingProvider.cs b/src/OpenDDD/Infrastructure/Events/RabbitMq/RabbitMqMessagingProvider.cs index a3b8876..fbb5eb4 100644 --- a/src/OpenDDD/Infrastructure/Events/RabbitMq/RabbitMqMessagingProvider.cs +++ b/src/OpenDDD/Infrastructure/Events/RabbitMq/RabbitMqMessagingProvider.cs @@ -1,95 +1,185 @@ -using System.Text; +using System.Collections.Concurrent; +using System.Text; using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using OpenDDD.API.Options; -using OpenDDD.Infrastructure.Events.RabbitMq.Options; +using OpenDDD.Infrastructure.Events.Base; +using OpenDDD.Infrastructure.Events.RabbitMq.Factories; using RabbitMQ.Client; +using RabbitMQ.Client.Exceptions; namespace OpenDDD.Infrastructure.Events.RabbitMq { public class RabbitMqMessagingProvider : IMessagingProvider, IAsyncDisposable { - private readonly ConnectionFactory _factory; + private readonly IConnectionFactory _connectionFactory; + private readonly IRabbitMqConsumerFactory _consumerFactory; + private readonly ILogger _logger; + private readonly bool _autoCreateTopics; private IConnection? _connection; private IChannel? _channel; - private readonly OpenDddRabbitMqOptions _options; - private readonly ILogger _logger; - + private readonly ConcurrentDictionary _subscriptions = new(); + private readonly ConcurrentDictionary _topicCache = new(); + private readonly TimeSpan _cacheExpiration = TimeSpan.FromSeconds(600); + private bool _disposed; + public RabbitMqMessagingProvider( - IOptions options, + IConnectionFactory factory, + IRabbitMqConsumerFactory consumerFactory, + bool autoCreateTopics, ILogger logger) { - var openDddOptions = options?.Value ?? throw new ArgumentNullException(nameof(options)); - _options = openDddOptions.RabbitMq ?? throw new InvalidOperationException("RabbitMQ settings are missing in OpenDddOptions."); + _connectionFactory = factory ?? throw new ArgumentNullException(nameof(factory)); + _consumerFactory = consumerFactory ?? throw new ArgumentNullException(nameof(consumerFactory)); + _autoCreateTopics = autoCreateTopics; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } - if (string.IsNullOrWhiteSpace(_options.HostName)) - { - throw new InvalidOperationException("RabbitMQ host is missing."); - } + public async Task SubscribeAsync(string topic, string consumerGroup, Func messageHandler, CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(topic)) + throw new ArgumentException("Topic cannot be null or empty.", nameof(topic)); - _factory = new ConnectionFactory - { - HostName = _options.HostName, - Port = _options.Port, - UserName = _options.Username, - Password = _options.Password, - VirtualHost = _options.VirtualHost - }; + if (string.IsNullOrWhiteSpace(consumerGroup)) + throw new ArgumentException("Consumer group cannot be null or empty.", nameof(consumerGroup)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + if (messageHandler == null) + throw new ArgumentNullException(nameof(messageHandler), "Message handler cannot be null."); + + await EnsureConnectedAsync(cancellationToken); + await EnsureTopicExistsAsync(topic, cancellationToken); + + var queueName = $"{consumerGroup}.{topic}"; + await _channel.QueueDeclareAsync(queueName, durable: true, exclusive: false, autoDelete: false, cancellationToken: cancellationToken); + await _channel.QueueBindAsync(queueName, topic, "", cancellationToken: cancellationToken); + + var consumer = _consumerFactory.CreateConsumer(_channel, messageHandler); + await consumer.StartConsumingAsync(queueName, cancellationToken); + + var subscription = new RabbitMqSubscription(topic, consumerGroup, consumer); + _subscriptions[subscription.Id] = subscription; + + _logger.LogDebug("Subscribed to RabbitMQ topic '{Topic}' with consumer group '{ConsumerGroup}', Subscription ID: {SubscriptionId}", topic, consumerGroup, subscription.Id); + + return subscription; } - private async Task EnsureConnectedAsync(CancellationToken cancellationToken) + public async Task UnsubscribeAsync(ISubscription subscription, CancellationToken cancellationToken = default) { - if (_connection is { IsOpen: true } && _channel is { IsOpen: true }) return; + if (subscription == null) + throw new ArgumentNullException(nameof(subscription)); - _connection = await _factory.CreateConnectionAsync(cancellationToken); - _channel = await _connection.CreateChannelAsync(null, cancellationToken); + if (subscription is not RabbitMqSubscription rabbitSubscription || !_subscriptions.TryRemove(rabbitSubscription.Id, out _)) + { + _logger.LogWarning("No active subscription found with ID {SubscriptionId}", subscription.Id); + return; + } + + _logger.LogDebug("Unsubscribing from RabbitMQ topic '{Topic}' with consumer group '{ConsumerGroup}', Subscription ID: {SubscriptionId}", + rabbitSubscription.Topic, rabbitSubscription.ConsumerGroup, rabbitSubscription.Id); + + await rabbitSubscription.Consumer.StopConsumingAsync(cancellationToken); + await rabbitSubscription.DisposeAsync(); } public async Task PublishAsync(string topic, string message, CancellationToken cancellationToken = default) { - await EnsureConnectedAsync(cancellationToken); + if (string.IsNullOrWhiteSpace(topic)) + throw new ArgumentException("Topic cannot be null or empty.", nameof(topic)); - if (_channel is null) throw new InvalidOperationException("RabbitMQ channel is not available."); + if (string.IsNullOrWhiteSpace(message)) + throw new ArgumentException("Message cannot be null or empty.", nameof(message)); - await _channel.ExchangeDeclareAsync(topic, ExchangeType.Fanout, durable: true, autoDelete: false, cancellationToken: cancellationToken); + await EnsureConnectedAsync(cancellationToken); + await EnsureTopicExistsAsync(topic, cancellationToken); var body = Encoding.UTF8.GetBytes(message); await _channel.BasicPublishAsync(topic, "", body, cancellationToken: cancellationToken); - _logger.LogInformation("Published message to topic '{Topic}'", topic); + _logger.LogDebug("Published message to topic '{Topic}'", topic); } - - public async Task SubscribeAsync(string topic, string consumerGroup, Func messageHandler, CancellationToken cancellationToken = default) + + private async Task EnsureConnectedAsync(CancellationToken cancellationToken) { - await EnsureConnectedAsync(cancellationToken); + if (_connection is { IsOpen: true } && _channel is { IsOpen: true }) return; - if (_channel is null) throw new InvalidOperationException("RabbitMQ channel is not available."); + _connection = await _connectionFactory.CreateConnectionAsync(cancellationToken); + _channel = await _connection.CreateChannelAsync(null, cancellationToken); + } + + private async Task EnsureTopicExistsAsync(string topic, CancellationToken cancellationToken) + { + if (_topicCache.TryGetValue(topic, out var lastChecked) && DateTime.UtcNow - lastChecked < _cacheExpiration) + { + _logger.LogDebug("Skipping exchange check for '{Topic}' (cached result).", topic); + return; + } - await _channel.ExchangeDeclareAsync(topic, ExchangeType.Fanout, durable: true, autoDelete: false, cancellationToken: cancellationToken); - var queueName = $"{consumerGroup}.{topic}"; - await _channel.QueueDeclareAsync(queueName, durable: true, exclusive: false, autoDelete: false, cancellationToken: cancellationToken); - await _channel.QueueBindAsync(queueName, topic, "", cancellationToken: cancellationToken); + bool exchangeExists = await ExchangeExistsAsync(topic, cancellationToken); + + if (exchangeExists) + { + _topicCache[topic] = DateTime.UtcNow; + return; + } - var consumer = new RabbitMqCustomAsyncConsumer(_channel, messageHandler, _logger); - await _channel.BasicConsumeAsync(queueName, autoAck: false, consumer, cancellationToken); + if (_autoCreateTopics) + { + await _channel.ExchangeDeclareAsync(topic, ExchangeType.Fanout, durable: true, autoDelete: false, cancellationToken: cancellationToken); + _logger.LogInformation("Auto-created exchange (topic): {Topic}", topic); + _topicCache[topic] = DateTime.UtcNow; + return; + } + + throw new InvalidOperationException($"Topic '{topic}' does not exist. Enable 'autoCreateTopics' to create topics automatically."); + } + + private async Task ExchangeExistsAsync(string exchange, CancellationToken cancellationToken) + { + try + { + await _channel!.ExchangeDeclarePassiveAsync(exchange, cancellationToken); + return true; + } + catch (OperationInterruptedException ex) when (ex.ShutdownReason?.ReplyCode == 404) + { + _logger.LogDebug("Exchange '{Exchange}' does not exist.", exchange); - _logger.LogInformation("Subscribed to RabbitMQ topic '{Topic}' with consumer group '{ConsumerGroup}'", topic, consumerGroup); + await EnsureConnectedAsync(cancellationToken); + + return false; + } + catch (Exception ex) + { + _logger.LogError(ex, "Unexpected error while checking if exchange '{Exchange}' exists.", exchange); + throw; + } } public async ValueTask DisposeAsync() { + if (_disposed) return; + _disposed = true; + + _logger.LogDebug("Disposing RabbitMqMessagingProvider..."); + + foreach (var subscription in _subscriptions.Values) + { + await UnsubscribeAsync(subscription); + } + + _subscriptions.Clear(); + if (_channel is not null) { + _logger.LogDebug("Disposing RabbitMQ channel..."); await _channel.CloseAsync(); - _channel.Dispose(); + await _channel.DisposeAsync(); } if (_connection is not null) { + _logger.LogDebug("Disposing RabbitMQ connection..."); await _connection.CloseAsync(); - _connection.Dispose(); + await _connection.DisposeAsync(); } } } diff --git a/src/OpenDDD/Infrastructure/Events/RabbitMq/RabbitMqSubscription.cs b/src/OpenDDD/Infrastructure/Events/RabbitMq/RabbitMqSubscription.cs new file mode 100644 index 0000000..5766a1b --- /dev/null +++ b/src/OpenDDD/Infrastructure/Events/RabbitMq/RabbitMqSubscription.cs @@ -0,0 +1,16 @@ +using OpenDDD.Infrastructure.Events.Base; + +namespace OpenDDD.Infrastructure.Events.RabbitMq +{ + public class RabbitMqSubscription : Subscription + { + public RabbitMqSubscription(string topic, string consumerGroup, RabbitMqConsumer consumer) + : base(topic, consumerGroup, consumer) { } + + public override async ValueTask DisposeAsync() + { + await Consumer.StopConsumingAsync(CancellationToken.None); + await Consumer.DisposeAsync(); + } + } +} diff --git a/src/OpenDDD/Infrastructure/Persistence/EfCore/Base/OpenDddDbContextBase.cs b/src/OpenDDD/Infrastructure/Persistence/EfCore/Base/OpenDddDbContextBase.cs index d4f9061..5c0be94 100644 --- a/src/OpenDDD/Infrastructure/Persistence/EfCore/Base/OpenDddDbContextBase.cs +++ b/src/OpenDDD/Infrastructure/Persistence/EfCore/Base/OpenDddDbContextBase.cs @@ -46,7 +46,7 @@ protected override void OnModelCreating(ModelBuilder modelBuilder) public void ApplyConfigurations(ModelBuilder modelBuilder) { - var configurationTypes = TypeScanner.GetRelevantTypes() + var configurationTypes = TypeScanner.GetRelevantTypes(excludeTestNamespaces: false) .Where(t => t.IsClass && !t.IsAbstract && t.BaseType != null && (t.BaseType.IsGenericType && diff --git a/src/OpenDDD/Infrastructure/Persistence/Storage/IKeyValueStorage.cs b/src/OpenDDD/Infrastructure/Persistence/Storage/IKeyValueStorage.cs index 452e9a5..f32477b 100644 --- a/src/OpenDDD/Infrastructure/Persistence/Storage/IKeyValueStorage.cs +++ b/src/OpenDDD/Infrastructure/Persistence/Storage/IKeyValueStorage.cs @@ -6,5 +6,6 @@ public interface IKeyValueStorage Task GetAsync(string key, CancellationToken ct); Task> GetByPrefixAsync(string keyPrefix, CancellationToken ct); Task RemoveAsync(string key, CancellationToken ct); + Task ClearAsync(CancellationToken ct); } } diff --git a/src/OpenDDD/Infrastructure/Persistence/Storage/InMemory/InMemoryKeyValueStorage.cs b/src/OpenDDD/Infrastructure/Persistence/Storage/InMemory/InMemoryKeyValueStorage.cs index ad0a729..c0f120a 100644 --- a/src/OpenDDD/Infrastructure/Persistence/Storage/InMemory/InMemoryKeyValueStorage.cs +++ b/src/OpenDDD/Infrastructure/Persistence/Storage/InMemory/InMemoryKeyValueStorage.cs @@ -43,5 +43,12 @@ public Task RemoveAsync(string key, CancellationToken ct) _logger.LogDebug("Removed value with key '{Key}'", key); return Task.CompletedTask; } + + public Task ClearAsync(CancellationToken ct = default) + { + _storage.Clear(); + _logger.LogDebug("Cleared all in-memory storage."); + return Task.CompletedTask; + } } } diff --git a/src/OpenDDD/Infrastructure/Repository/OpenDdd/Postgres/PostgresOpenDddRepository.cs b/src/OpenDDD/Infrastructure/Repository/OpenDdd/Postgres/PostgresOpenDddRepository.cs index 705a334..aae13a7 100644 --- a/src/OpenDDD/Infrastructure/Repository/OpenDdd/Postgres/PostgresOpenDddRepository.cs +++ b/src/OpenDDD/Infrastructure/Repository/OpenDdd/Postgres/PostgresOpenDddRepository.cs @@ -4,8 +4,8 @@ using OpenDDD.Infrastructure.Repository.OpenDdd.Base; using OpenDDD.API.Extensions; using OpenDDD.Infrastructure.Persistence.OpenDdd.DatabaseSession.Postgres; -using Npgsql; using OpenDDD.Infrastructure.Persistence.OpenDdd.Expressions; +using Npgsql; namespace OpenDDD.Infrastructure.Repository.OpenDdd.Postgres { @@ -22,7 +22,7 @@ public PostgresOpenDddRepository(PostgresDatabaseSession session, IAggregateSeri Session = session ?? throw new ArgumentNullException(nameof(session)); _tableName = typeof(TAggregateRoot).Name.ToLower().Pluralize(); } - + public override async Task GetAsync(TId id, CancellationToken ct) { var entity = await FindAsync(id, ct); diff --git a/src/OpenDDD/Infrastructure/Utils/TypeScanner.cs b/src/OpenDDD/Infrastructure/Utils/TypeScanner.cs index b326f67..231bc70 100644 --- a/src/OpenDDD/Infrastructure/Utils/TypeScanner.cs +++ b/src/OpenDDD/Infrastructure/Utils/TypeScanner.cs @@ -7,7 +7,7 @@ public static class TypeScanner private static readonly object _assemblyCacheLock = new(); private static IEnumerable? _cachedAssemblies; - public static IEnumerable GetRelevantAssemblies(bool includeDynamic = false) + public static IEnumerable GetRelevantAssemblies(bool includeDynamic = false, bool excludeTestNamespaces = true) { if (_cachedAssemblies == null) { @@ -20,7 +20,7 @@ public static IEnumerable GetRelevantAssemblies(bool includeDynamic = .Where(a => !a.FullName.StartsWith("System", StringComparison.OrdinalIgnoreCase) && !a.FullName.StartsWith("Microsoft", StringComparison.OrdinalIgnoreCase) && - !a.FullName.Contains("Tests", StringComparison.OrdinalIgnoreCase)) // Exclude test assemblies + (excludeTestNamespaces ? !a.FullName.Contains("Tests", StringComparison.OrdinalIgnoreCase) : true)) .ToList(); } } @@ -34,7 +34,7 @@ public static IEnumerable GetRelevantTypes( bool onlyConcreteClasses = false, bool excludeTestNamespaces = true) { - var types = GetRelevantAssemblies(includeDynamic) + var types = GetRelevantAssemblies(includeDynamic, excludeTestNamespaces) .SelectMany(assembly => assembly.GetTypes()); if (excludeTestNamespaces) diff --git a/src/OpenDDD/OpenDDD.csproj b/src/OpenDDD/OpenDDD.csproj index cf25118..ceca2ad 100644 --- a/src/OpenDDD/OpenDDD.csproj +++ b/src/OpenDDD/OpenDDD.csproj @@ -7,7 +7,7 @@ false true OpenDDD.NET - 3.0.0-alpha.3 + 3.0.0-beta.2 David Runemalm A framework for domain-driven design using C# and .NET. DDD;Domain-Driven Design;C#;.NET;Hexagonal Architecture @@ -19,12 +19,10 @@ - - @@ -32,7 +30,6 @@ - diff --git a/templates/Bookstore/src/Bookstore/Bookstore.csproj b/templates/Bookstore/src/Bookstore/Bookstore.csproj index 2830eba..e2613e9 100644 --- a/templates/Bookstore/src/Bookstore/Bookstore.csproj +++ b/templates/Bookstore/src/Bookstore/Bookstore.csproj @@ -13,7 +13,7 @@ all runtime; build; native; contentfiles; analyzers; buildtransitive - + diff --git a/templates/Bookstore/src/Bookstore/Tests/Infrastructure/Persistence/EfCore/EfCoreConfigurationTests.cs b/templates/Bookstore/src/Bookstore/Tests/Infrastructure/Persistence/EfCore/EfCoreConfigurationTests.cs deleted file mode 100644 index e3f2db6..0000000 --- a/templates/Bookstore/src/Bookstore/Tests/Infrastructure/Persistence/EfCore/EfCoreConfigurationTests.cs +++ /dev/null @@ -1,84 +0,0 @@ -using Xunit; -using Microsoft.EntityFrameworkCore; -using Microsoft.Extensions.Options; -using OpenDDD.Infrastructure.Persistence.EfCore.UoW; -using OpenDDD.Infrastructure.Persistence.UoW; -using OpenDDD.API.Options; -using OpenDDD.Domain.Model; -using OpenDDD.Infrastructure.Persistence.EfCore.Base; -using OpenDDD.Infrastructure.Repository.EfCore; -using OpenDDD.Infrastructure.Persistence.DatabaseSession; -using OpenDDD.Infrastructure.Persistence.EfCore.DatabaseSession; -using Bookstore.Domain.Model; -using Bookstore.Infrastructure.Persistence.EfCore; -using OpenDDD.Infrastructure.Events; -using OpenDDD.Infrastructure.TransactionalOutbox; -using OpenDDD.Infrastructure.TransactionalOutbox.EfCore; - -namespace Bookstore.Tests.Infrastructure.Persistence.EfCore -{ - public class EfCoreConfigurationTests - { - private readonly IServiceProvider _serviceProvider; - - public EfCoreConfigurationTests() - { - var services = new ServiceCollection(); - - // Register logging - services.AddLogging(); - - // Manually configure OpenDDD options - var options = new OpenDddOptions(); - services.AddSingleton(Options.Create(options)); - services.AddSingleton(options); - - // Add an in-memory database - services.AddDbContext(opts => - opts.UseInMemoryDatabase("TestDatabase")); - services.AddScoped(sp => sp.GetRequiredService()); - - // Register EfCoreDatabaseSession as the IDatabaseSession - services.AddScoped(); - services.AddScoped(sp => sp.GetRequiredService()); - - // Register dependencies - services.AddScoped(); - services.AddScoped(typeof(IRepository), typeof(EfCoreRepository)); - - // Register publishers - services.AddScoped(); - services.AddScoped(); - - // Register IOutboxRepository (EF Core implementation) - services.AddScoped(); - - _serviceProvider = services.BuildServiceProvider(); - } - - [Fact] - public async Task CreateAndRetrieveOrder_WithLineItems_ShouldPersistCorrectly() - { - using var scope = _serviceProvider.CreateScope(); - var repository = scope.ServiceProvider.GetRequiredService>(); - - var ct = CancellationToken.None; - - // Arrange - Create and save an order with line items - var order = Order.Create(Guid.NewGuid()); - order.AddLineItem(Guid.NewGuid(), Money.USD(19.99m)); - order.AddLineItem(Guid.NewGuid(), Money.USD(29.99m)); - - await repository.SaveAsync(order, ct); - - // Act - Retrieve order - var retrievedOrder = await repository.GetAsync(order.Id, ct); - - // Assert - Order and line items should be persisted - Assert.NotNull(retrievedOrder); - Assert.Equal(order.Id, retrievedOrder.Id); - Assert.NotEmpty(retrievedOrder.LineItems); - Assert.Equal(2, retrievedOrder.LineItems.Count); - } - } -} diff --git a/templates/Bookstore/src/Bookstore/appsettings.json b/templates/Bookstore/src/Bookstore/appsettings.json index cf4c806..ebe6847 100644 --- a/templates/Bookstore/src/Bookstore/appsettings.json +++ b/templates/Bookstore/src/Bookstore/appsettings.json @@ -13,8 +13,8 @@ "DatabaseProvider": "InMemory", "MessagingProvider": "InMemory", "Events": { - "DomainEventTopicTemplate": "Bookstore.Domain.{EventName}", - "IntegrationEventTopicTemplate": "Bookstore.Interchange.{EventName}", + "DomainEventTopic": "Bookstore.Domain.{EventName}", + "IntegrationEventTopic": "Bookstore.Interchange.{EventName}", "ListenerGroup": "Default" }, "SQLite": { @@ -32,10 +32,12 @@ "Port": 5672, "Username": "guest", "Password": "guest", - "VirtualHost": "/" + "VirtualHost": "/", + "AutoCreateTopics": true }, "Kafka": { - "BootstrapServers": "localhost:9092" + "BootstrapServers": "localhost:9092", + "AutoCreateTopics": true }, "AutoRegister": { "Actions": true, diff --git a/templates/templatepack.csproj b/templates/templatepack.csproj index 4f485c6..53f69dd 100644 --- a/templates/templatepack.csproj +++ b/templates/templatepack.csproj @@ -13,7 +13,7 @@ OpenDDD.NET-Templates - 3.0.0-alpha.1 + 3.0.0-beta.2 David Runemalm Project templates for OpenDDD.NET dotnet-new;templates;openddd.net;ddd;hexagonal