-
- Failed: {session.failed_events} events
-
- {#if session.error_message}
-
-
- Error: {session.error_message}
-
-
- {/if}
- {#if session.failed_event_errors && session.failed_event_errors.length > 0}
-
- {#each session.failed_event_errors as error}
-
-
{error.event_id}
-
{error.error}
-
- {/each}
+ {#if session.errors?.length}
+
+ {#each session.errors as err}
+
+ {#if err.event_id}
{err.event_id}
{/if}
+
{err.error}
- {/if}
+ {/each}
{/if}
- {#if session.execution_results && session.execution_results.length > 0}
+ {#if session.execution_results?.length}
Execution Results:
@@ -82,25 +65,15 @@
}">
{result.status}
- {#if result.execution_time}
-
- {result.execution_time.toFixed(2)}s
-
+ {#if result.resource_usage?.execution_time_wall_seconds}
+ {result.resource_usage.execution_time_wall_seconds.toFixed(2)}s
{/if}
- {#if result.output || result.errors}
+ {#if result.stdout || result.stderr}
- {#if result.output}
-
- Output: {result.output}
-
- {/if}
- {#if result.errors}
-
- Error: {result.errors}
-
- {/if}
+ {#if result.stdout}
{result.stdout}
{/if}
+ {#if result.stderr}
{result.stderr}
{/if}
{/if}
diff --git a/frontend/src/lib/api/index.ts b/frontend/src/lib/api/index.ts
index bbd8ae8..2788bc9 100644
--- a/frontend/src/lib/api/index.ts
+++ b/frontend/src/lib/api/index.ts
@@ -1,4 +1,4 @@
// This file is auto-generated by @hey-api/openapi-ts
export { aggregateEventsApiV1EventsAggregatePost, browseEventsApiV1AdminEventsBrowsePost, cancelExecutionApiV1ExecutionIdCancelPost, cancelReplaySessionApiV1ReplaySessionsSessionIdCancelPost, cancelSagaApiV1SagasSagaIdCancelPost, cleanupOldSessionsApiV1ReplayCleanupPost, createExecutionApiV1ExecutePost, createReplaySessionApiV1ReplaySessionsPost, createSavedScriptApiV1ScriptsPost, createUserApiV1AdminUsersPost, deleteEventApiV1AdminEventsEventIdDelete, deleteEventApiV1EventsEventIdDelete, deleteExecutionApiV1ExecutionIdDelete, deleteNotificationApiV1NotificationsNotificationIdDelete, deleteSavedScriptApiV1ScriptsScriptIdDelete, deleteUserApiV1AdminUsersUserIdDelete, discardDlqMessageApiV1DlqMessagesEventIdDelete, executionEventsApiV1EventsExecutionsExecutionIdGet, exportEventsCsvApiV1AdminEventsExportCsvGet, exportEventsJsonApiV1AdminEventsExportJsonGet, getCurrentRequestEventsApiV1EventsCurrentRequestGet, getCurrentUserProfileApiV1AuthMeGet, getDlqMessageApiV1DlqMessagesEventIdGet, getDlqMessagesApiV1DlqMessagesGet, getDlqStatisticsApiV1DlqStatsGet, getDlqTopicsApiV1DlqTopicsGet, getEventApiV1EventsEventIdGet, getEventDetailApiV1AdminEventsEventIdGet, getEventsByCorrelationApiV1EventsCorrelationCorrelationIdGet, getEventStatisticsApiV1EventsStatisticsGet, getEventStatsApiV1AdminEventsStatsGet, getExampleScriptsApiV1ExampleScriptsGet, getExecutionEventsApiV1EventsExecutionsExecutionIdEventsGet, getExecutionEventsApiV1ExecutionsExecutionIdEventsGet, getExecutionSagasApiV1SagasExecutionExecutionIdGet, getK8sResourceLimitsApiV1K8sLimitsGet, getNotificationsApiV1NotificationsGet, getReplaySessionApiV1ReplaySessionsSessionIdGet, getReplayStatusApiV1AdminEventsReplaySessionIdStatusGet, getResultApiV1ResultExecutionIdGet, getSagaStatusApiV1SagasSagaIdGet, getSavedScriptApiV1ScriptsScriptIdGet, getSettingsHistoryApiV1UserSettingsHistoryGet, getSubscriptionsApiV1NotificationsSubscriptionsGet, getSystemSettingsApiV1AdminSettingsGet, getUnreadCountApiV1NotificationsUnreadCountGet, getUserApiV1AdminUsersUserIdGet, getUserEventsApiV1EventsUserGet, getUserExecutionsApiV1UserExecutionsGet, getUserOverviewApiV1AdminUsersUserIdOverviewGet, getUserRateLimitsApiV1AdminUsersUserIdRateLimitsGet, getUserSettingsApiV1UserSettingsGet, listEventTypesApiV1EventsTypesListGet, listReplaySessionsApiV1ReplaySessionsGet, listSagasApiV1SagasGet, listSavedScriptsApiV1ScriptsGet, listUsersApiV1AdminUsersGet, livenessApiV1HealthLiveGet, loginApiV1AuthLoginPost, logoutApiV1AuthLogoutPost, markAllReadApiV1NotificationsMarkAllReadPost, markNotificationReadApiV1NotificationsNotificationIdReadPut, notificationStreamApiV1EventsNotificationsStreamGet, type Options, pauseReplaySessionApiV1ReplaySessionsSessionIdPausePost, publishCustomEventApiV1EventsPublishPost, queryEventsApiV1EventsQueryPost, readinessApiV1HealthReadyGet, receiveGrafanaAlertsApiV1AlertsGrafanaPost, registerApiV1AuthRegisterPost, replayAggregateEventsApiV1EventsReplayAggregateIdPost, replayEventsApiV1AdminEventsReplayPost, resetSystemSettingsApiV1AdminSettingsResetPost, resetUserPasswordApiV1AdminUsersUserIdResetPasswordPost, resetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPost, restoreSettingsApiV1UserSettingsRestorePost, resumeReplaySessionApiV1ReplaySessionsSessionIdResumePost, retryDlqMessagesApiV1DlqRetryPost, retryExecutionApiV1ExecutionIdRetryPost, setRetryPolicyApiV1DlqRetryPolicyPost, sseHealthApiV1EventsHealthGet, startReplaySessionApiV1ReplaySessionsSessionIdStartPost, testGrafanaAlertEndpointApiV1AlertsGrafanaTestGet, updateCustomSettingApiV1UserSettingsCustomKeyPut, updateEditorSettingsApiV1UserSettingsEditorPut, updateNotificationSettingsApiV1UserSettingsNotificationsPut, updateSavedScriptApiV1ScriptsScriptIdPut, updateSubscriptionApiV1NotificationsSubscriptionsChannelPut, updateSystemSettingsApiV1AdminSettingsPut, updateThemeApiV1UserSettingsThemePut, updateUserApiV1AdminUsersUserIdPut, updateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPut, updateUserSettingsApiV1UserSettingsPut, verifyTokenApiV1AuthVerifyTokenGet } from './sdk.gen';
-export type { AdminUserOverview, AggregateEventsApiV1EventsAggregatePostData, AggregateEventsApiV1EventsAggregatePostError, AggregateEventsApiV1EventsAggregatePostErrors, AggregateEventsApiV1EventsAggregatePostResponse, AggregateEventsApiV1EventsAggregatePostResponses, AlertResponse, BodyLoginApiV1AuthLoginPost, BrowseEventsApiV1AdminEventsBrowsePostData, BrowseEventsApiV1AdminEventsBrowsePostError, BrowseEventsApiV1AdminEventsBrowsePostErrors, BrowseEventsApiV1AdminEventsBrowsePostResponse, BrowseEventsApiV1AdminEventsBrowsePostResponses, CancelExecutionApiV1ExecutionIdCancelPostData, CancelExecutionApiV1ExecutionIdCancelPostError, CancelExecutionApiV1ExecutionIdCancelPostErrors, CancelExecutionApiV1ExecutionIdCancelPostResponse, CancelExecutionApiV1ExecutionIdCancelPostResponses, CancelExecutionRequest, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostData, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostError, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostErrors, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostResponse, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostResponses, CancelResponse, CancelSagaApiV1SagasSagaIdCancelPostData, CancelSagaApiV1SagasSagaIdCancelPostError, CancelSagaApiV1SagasSagaIdCancelPostErrors, CancelSagaApiV1SagasSagaIdCancelPostResponse, CancelSagaApiV1SagasSagaIdCancelPostResponses, CleanupOldSessionsApiV1ReplayCleanupPostData, CleanupOldSessionsApiV1ReplayCleanupPostError, CleanupOldSessionsApiV1ReplayCleanupPostErrors, CleanupOldSessionsApiV1ReplayCleanupPostResponse, CleanupOldSessionsApiV1ReplayCleanupPostResponses, CleanupResponse, ClientOptions, CreateExecutionApiV1ExecutePostData, CreateExecutionApiV1ExecutePostError, CreateExecutionApiV1ExecutePostErrors, CreateExecutionApiV1ExecutePostResponse, CreateExecutionApiV1ExecutePostResponses, CreateReplaySessionApiV1ReplaySessionsPostData, CreateReplaySessionApiV1ReplaySessionsPostError, CreateReplaySessionApiV1ReplaySessionsPostErrors, CreateReplaySessionApiV1ReplaySessionsPostResponse, CreateReplaySessionApiV1ReplaySessionsPostResponses, CreateSavedScriptApiV1ScriptsPostData, CreateSavedScriptApiV1ScriptsPostError, CreateSavedScriptApiV1ScriptsPostErrors, CreateSavedScriptApiV1ScriptsPostResponse, CreateSavedScriptApiV1ScriptsPostResponses, CreateUserApiV1AdminUsersPostData, CreateUserApiV1AdminUsersPostError, CreateUserApiV1AdminUsersPostErrors, CreateUserApiV1AdminUsersPostResponse, CreateUserApiV1AdminUsersPostResponses, DeleteEventApiV1AdminEventsEventIdDeleteData, DeleteEventApiV1AdminEventsEventIdDeleteError, DeleteEventApiV1AdminEventsEventIdDeleteErrors, DeleteEventApiV1AdminEventsEventIdDeleteResponse, DeleteEventApiV1AdminEventsEventIdDeleteResponses, DeleteEventApiV1EventsEventIdDeleteData, DeleteEventApiV1EventsEventIdDeleteError, DeleteEventApiV1EventsEventIdDeleteErrors, DeleteEventApiV1EventsEventIdDeleteResponse, DeleteEventApiV1EventsEventIdDeleteResponses, DeleteEventResponse, DeleteExecutionApiV1ExecutionIdDeleteData, DeleteExecutionApiV1ExecutionIdDeleteError, DeleteExecutionApiV1ExecutionIdDeleteErrors, DeleteExecutionApiV1ExecutionIdDeleteResponse, DeleteExecutionApiV1ExecutionIdDeleteResponses, DeleteNotificationApiV1NotificationsNotificationIdDeleteData, DeleteNotificationApiV1NotificationsNotificationIdDeleteError, DeleteNotificationApiV1NotificationsNotificationIdDeleteErrors, DeleteNotificationApiV1NotificationsNotificationIdDeleteResponse, DeleteNotificationApiV1NotificationsNotificationIdDeleteResponses, DeleteNotificationResponse, DeleteResponse, DeleteSavedScriptApiV1ScriptsScriptIdDeleteData, DeleteSavedScriptApiV1ScriptsScriptIdDeleteError, DeleteSavedScriptApiV1ScriptsScriptIdDeleteErrors, DeleteSavedScriptApiV1ScriptsScriptIdDeleteResponse, DeleteSavedScriptApiV1ScriptsScriptIdDeleteResponses, DeleteUserApiV1AdminUsersUserIdDeleteData, DeleteUserApiV1AdminUsersUserIdDeleteError, DeleteUserApiV1AdminUsersUserIdDeleteErrors, DeleteUserApiV1AdminUsersUserIdDeleteResponse, DeleteUserApiV1AdminUsersUserIdDeleteResponses, DeleteUserResponse, DerivedCounts, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteData, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteError, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteErrors, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteResponse, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteResponses, DlqBatchRetryResponse, DlqMessageDetail, DlqMessageResponse, DlqMessagesResponse, DlqMessageStatus, DlqStats, DlqTopicSummaryResponse, EditorSettings, EndpointGroup, EventAggregationRequest, EventBrowseRequest, EventBrowseResponse, EventDeleteResponse, EventDetailResponse, EventFilter, EventFilterRequest, EventListResponse, EventReplayRequest, EventReplayResponse, EventReplayStatusResponse, EventResponse, EventStatistics, EventStatsResponse, EventType, ExampleScripts, ExecutionErrorType, ExecutionEventResponse, ExecutionEventsApiV1EventsExecutionsExecutionIdGetData, ExecutionEventsApiV1EventsExecutionsExecutionIdGetError, ExecutionEventsApiV1EventsExecutionsExecutionIdGetErrors, ExecutionEventsApiV1EventsExecutionsExecutionIdGetResponses, ExecutionLimitsSchema, ExecutionListResponse, ExecutionRequest, ExecutionResponse, ExecutionResult, ExecutionStatus, ExportEventsCsvApiV1AdminEventsExportCsvGetData, ExportEventsCsvApiV1AdminEventsExportCsvGetError, ExportEventsCsvApiV1AdminEventsExportCsvGetErrors, ExportEventsCsvApiV1AdminEventsExportCsvGetResponses, ExportEventsJsonApiV1AdminEventsExportJsonGetData, ExportEventsJsonApiV1AdminEventsExportJsonGetError, ExportEventsJsonApiV1AdminEventsExportJsonGetErrors, ExportEventsJsonApiV1AdminEventsExportJsonGetResponses, GetCurrentRequestEventsApiV1EventsCurrentRequestGetData, GetCurrentRequestEventsApiV1EventsCurrentRequestGetError, GetCurrentRequestEventsApiV1EventsCurrentRequestGetErrors, GetCurrentRequestEventsApiV1EventsCurrentRequestGetResponse, GetCurrentRequestEventsApiV1EventsCurrentRequestGetResponses, GetCurrentUserProfileApiV1AuthMeGetData, GetCurrentUserProfileApiV1AuthMeGetResponse, GetCurrentUserProfileApiV1AuthMeGetResponses, GetDlqMessageApiV1DlqMessagesEventIdGetData, GetDlqMessageApiV1DlqMessagesEventIdGetError, GetDlqMessageApiV1DlqMessagesEventIdGetErrors, GetDlqMessageApiV1DlqMessagesEventIdGetResponse, GetDlqMessageApiV1DlqMessagesEventIdGetResponses, GetDlqMessagesApiV1DlqMessagesGetData, GetDlqMessagesApiV1DlqMessagesGetError, GetDlqMessagesApiV1DlqMessagesGetErrors, GetDlqMessagesApiV1DlqMessagesGetResponse, GetDlqMessagesApiV1DlqMessagesGetResponses, GetDlqStatisticsApiV1DlqStatsGetData, GetDlqStatisticsApiV1DlqStatsGetResponse, GetDlqStatisticsApiV1DlqStatsGetResponses, GetDlqTopicsApiV1DlqTopicsGetData, GetDlqTopicsApiV1DlqTopicsGetResponse, GetDlqTopicsApiV1DlqTopicsGetResponses, GetEventApiV1EventsEventIdGetData, GetEventApiV1EventsEventIdGetError, GetEventApiV1EventsEventIdGetErrors, GetEventApiV1EventsEventIdGetResponse, GetEventApiV1EventsEventIdGetResponses, GetEventDetailApiV1AdminEventsEventIdGetData, GetEventDetailApiV1AdminEventsEventIdGetError, GetEventDetailApiV1AdminEventsEventIdGetErrors, GetEventDetailApiV1AdminEventsEventIdGetResponse, GetEventDetailApiV1AdminEventsEventIdGetResponses, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetData, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetError, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetErrors, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetResponse, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetResponses, GetEventStatisticsApiV1EventsStatisticsGetData, GetEventStatisticsApiV1EventsStatisticsGetError, GetEventStatisticsApiV1EventsStatisticsGetErrors, GetEventStatisticsApiV1EventsStatisticsGetResponse, GetEventStatisticsApiV1EventsStatisticsGetResponses, GetEventStatsApiV1AdminEventsStatsGetData, GetEventStatsApiV1AdminEventsStatsGetError, GetEventStatsApiV1AdminEventsStatsGetErrors, GetEventStatsApiV1AdminEventsStatsGetResponse, GetEventStatsApiV1AdminEventsStatsGetResponses, GetExampleScriptsApiV1ExampleScriptsGetData, GetExampleScriptsApiV1ExampleScriptsGetResponse, GetExampleScriptsApiV1ExampleScriptsGetResponses, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetData, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetError, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetErrors, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetResponse, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetResponses, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetData, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetError, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetErrors, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetResponse, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetResponses, GetExecutionSagasApiV1SagasExecutionExecutionIdGetData, GetExecutionSagasApiV1SagasExecutionExecutionIdGetError, GetExecutionSagasApiV1SagasExecutionExecutionIdGetErrors, GetExecutionSagasApiV1SagasExecutionExecutionIdGetResponse, GetExecutionSagasApiV1SagasExecutionExecutionIdGetResponses, GetK8sResourceLimitsApiV1K8sLimitsGetData, GetK8sResourceLimitsApiV1K8sLimitsGetResponse, GetK8sResourceLimitsApiV1K8sLimitsGetResponses, GetNotificationsApiV1NotificationsGetData, GetNotificationsApiV1NotificationsGetError, GetNotificationsApiV1NotificationsGetErrors, GetNotificationsApiV1NotificationsGetResponse, GetNotificationsApiV1NotificationsGetResponses, GetReplaySessionApiV1ReplaySessionsSessionIdGetData, GetReplaySessionApiV1ReplaySessionsSessionIdGetError, GetReplaySessionApiV1ReplaySessionsSessionIdGetErrors, GetReplaySessionApiV1ReplaySessionsSessionIdGetResponse, GetReplaySessionApiV1ReplaySessionsSessionIdGetResponses, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetData, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetError, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetErrors, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetResponse, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetResponses, GetResultApiV1ResultExecutionIdGetData, GetResultApiV1ResultExecutionIdGetError, GetResultApiV1ResultExecutionIdGetErrors, GetResultApiV1ResultExecutionIdGetResponse, GetResultApiV1ResultExecutionIdGetResponses, GetSagaStatusApiV1SagasSagaIdGetData, GetSagaStatusApiV1SagasSagaIdGetError, GetSagaStatusApiV1SagasSagaIdGetErrors, GetSagaStatusApiV1SagasSagaIdGetResponse, GetSagaStatusApiV1SagasSagaIdGetResponses, GetSavedScriptApiV1ScriptsScriptIdGetData, GetSavedScriptApiV1ScriptsScriptIdGetError, GetSavedScriptApiV1ScriptsScriptIdGetErrors, GetSavedScriptApiV1ScriptsScriptIdGetResponse, GetSavedScriptApiV1ScriptsScriptIdGetResponses, GetSettingsHistoryApiV1UserSettingsHistoryGetData, GetSettingsHistoryApiV1UserSettingsHistoryGetError, GetSettingsHistoryApiV1UserSettingsHistoryGetErrors, GetSettingsHistoryApiV1UserSettingsHistoryGetResponse, GetSettingsHistoryApiV1UserSettingsHistoryGetResponses, GetSubscriptionsApiV1NotificationsSubscriptionsGetData, GetSubscriptionsApiV1NotificationsSubscriptionsGetResponse, GetSubscriptionsApiV1NotificationsSubscriptionsGetResponses, GetSystemSettingsApiV1AdminSettingsGetData, GetSystemSettingsApiV1AdminSettingsGetResponse, GetSystemSettingsApiV1AdminSettingsGetResponses, GetUnreadCountApiV1NotificationsUnreadCountGetData, GetUnreadCountApiV1NotificationsUnreadCountGetResponse, GetUnreadCountApiV1NotificationsUnreadCountGetResponses, GetUserApiV1AdminUsersUserIdGetData, GetUserApiV1AdminUsersUserIdGetError, GetUserApiV1AdminUsersUserIdGetErrors, GetUserApiV1AdminUsersUserIdGetResponse, GetUserApiV1AdminUsersUserIdGetResponses, GetUserEventsApiV1EventsUserGetData, GetUserEventsApiV1EventsUserGetError, GetUserEventsApiV1EventsUserGetErrors, GetUserEventsApiV1EventsUserGetResponse, GetUserEventsApiV1EventsUserGetResponses, GetUserExecutionsApiV1UserExecutionsGetData, GetUserExecutionsApiV1UserExecutionsGetError, GetUserExecutionsApiV1UserExecutionsGetErrors, GetUserExecutionsApiV1UserExecutionsGetResponse, GetUserExecutionsApiV1UserExecutionsGetResponses, GetUserOverviewApiV1AdminUsersUserIdOverviewGetData, GetUserOverviewApiV1AdminUsersUserIdOverviewGetError, GetUserOverviewApiV1AdminUsersUserIdOverviewGetErrors, GetUserOverviewApiV1AdminUsersUserIdOverviewGetResponse, GetUserOverviewApiV1AdminUsersUserIdOverviewGetResponses, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetData, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetError, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetErrors, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetResponse, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetResponses, GetUserSettingsApiV1UserSettingsGetData, GetUserSettingsApiV1UserSettingsGetResponse, GetUserSettingsApiV1UserSettingsGetResponses, GrafanaAlertItem, GrafanaWebhook, HttpValidationError, LanguageInfo, ListEventTypesApiV1EventsTypesListGetData, ListEventTypesApiV1EventsTypesListGetResponse, ListEventTypesApiV1EventsTypesListGetResponses, ListReplaySessionsApiV1ReplaySessionsGetData, ListReplaySessionsApiV1ReplaySessionsGetError, ListReplaySessionsApiV1ReplaySessionsGetErrors, ListReplaySessionsApiV1ReplaySessionsGetResponse, ListReplaySessionsApiV1ReplaySessionsGetResponses, ListSagasApiV1SagasGetData, ListSagasApiV1SagasGetError, ListSagasApiV1SagasGetErrors, ListSagasApiV1SagasGetResponse, ListSagasApiV1SagasGetResponses, ListSavedScriptsApiV1ScriptsGetData, ListSavedScriptsApiV1ScriptsGetResponse, ListSavedScriptsApiV1ScriptsGetResponses, ListUsersApiV1AdminUsersGetData, ListUsersApiV1AdminUsersGetError, ListUsersApiV1AdminUsersGetErrors, ListUsersApiV1AdminUsersGetResponse, ListUsersApiV1AdminUsersGetResponses, LivenessApiV1HealthLiveGetData, LivenessApiV1HealthLiveGetResponse, LivenessApiV1HealthLiveGetResponses, LivenessResponse, LoginApiV1AuthLoginPostData, LoginApiV1AuthLoginPostError, LoginApiV1AuthLoginPostErrors, LoginApiV1AuthLoginPostResponse, LoginApiV1AuthLoginPostResponses, LoginResponse, LogoutApiV1AuthLogoutPostData, LogoutApiV1AuthLogoutPostResponse, LogoutApiV1AuthLogoutPostResponses, ManualRetryRequest, MarkAllReadApiV1NotificationsMarkAllReadPostData, MarkAllReadApiV1NotificationsMarkAllReadPostResponse, MarkAllReadApiV1NotificationsMarkAllReadPostResponses, MarkNotificationReadApiV1NotificationsNotificationIdReadPutData, MarkNotificationReadApiV1NotificationsNotificationIdReadPutError, MarkNotificationReadApiV1NotificationsNotificationIdReadPutErrors, MarkNotificationReadApiV1NotificationsNotificationIdReadPutResponse, MarkNotificationReadApiV1NotificationsNotificationIdReadPutResponses, MessageResponse, MonitoringSettingsSchema, NotificationChannel, NotificationListResponse, NotificationResponse, NotificationSettings, NotificationSeverity, NotificationStatus, NotificationStreamApiV1EventsNotificationsStreamGetData, NotificationStreamApiV1EventsNotificationsStreamGetResponses, NotificationSubscription, PasswordResetRequest, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostData, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostError, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostErrors, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostResponse, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostResponses, PublishCustomEventApiV1EventsPublishPostData, PublishCustomEventApiV1EventsPublishPostError, PublishCustomEventApiV1EventsPublishPostErrors, PublishCustomEventApiV1EventsPublishPostResponse, PublishCustomEventApiV1EventsPublishPostResponses, PublishEventRequest, PublishEventResponse, QueryEventsApiV1EventsQueryPostData, QueryEventsApiV1EventsQueryPostError, QueryEventsApiV1EventsQueryPostErrors, QueryEventsApiV1EventsQueryPostResponse, QueryEventsApiV1EventsQueryPostResponses, RateLimitAlgorithm, RateLimitRule, RateLimitRuleResponse, RateLimitSummary, RateLimitUpdateResponse, ReadinessApiV1HealthReadyGetData, ReadinessApiV1HealthReadyGetResponse, ReadinessApiV1HealthReadyGetResponses, ReadinessResponse, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostData, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostError, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostErrors, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostResponse, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostResponses, RegisterApiV1AuthRegisterPostData, RegisterApiV1AuthRegisterPostError, RegisterApiV1AuthRegisterPostErrors, RegisterApiV1AuthRegisterPostResponse, RegisterApiV1AuthRegisterPostResponses, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostData, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostError, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostErrors, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostResponse, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostResponses, ReplayAggregateResponse, ReplayConfigSchema, ReplayEventsApiV1AdminEventsReplayPostData, ReplayEventsApiV1AdminEventsReplayPostError, ReplayEventsApiV1AdminEventsReplayPostErrors, ReplayEventsApiV1AdminEventsReplayPostResponse, ReplayEventsApiV1AdminEventsReplayPostResponses, ReplayFilterSchema, ReplayRequest, ReplayResponse, ReplaySession, ReplayStatus, ReplayTarget, ReplayType, ResetSystemSettingsApiV1AdminSettingsResetPostData, ResetSystemSettingsApiV1AdminSettingsResetPostResponse, ResetSystemSettingsApiV1AdminSettingsResetPostResponses, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostData, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostError, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostErrors, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostResponse, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostResponses, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostData, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostError, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostErrors, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostResponse, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostResponses, ResourceLimits, ResourceUsage, RestoreSettingsApiV1UserSettingsRestorePostData, RestoreSettingsApiV1UserSettingsRestorePostError, RestoreSettingsApiV1UserSettingsRestorePostErrors, RestoreSettingsApiV1UserSettingsRestorePostResponse, RestoreSettingsApiV1UserSettingsRestorePostResponses, RestoreSettingsRequest, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostData, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostError, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostErrors, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostResponse, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostResponses, RetryDlqMessagesApiV1DlqRetryPostData, RetryDlqMessagesApiV1DlqRetryPostError, RetryDlqMessagesApiV1DlqRetryPostErrors, RetryDlqMessagesApiV1DlqRetryPostResponse, RetryDlqMessagesApiV1DlqRetryPostResponses, RetryExecutionApiV1ExecutionIdRetryPostData, RetryExecutionApiV1ExecutionIdRetryPostError, RetryExecutionApiV1ExecutionIdRetryPostErrors, RetryExecutionApiV1ExecutionIdRetryPostResponse, RetryExecutionApiV1ExecutionIdRetryPostResponses, RetryExecutionRequest, RetryPolicyRequest, RetryStrategy, SagaCancellationResponse, SagaListResponse, SagaState, SagaStatusResponse, SavedScriptCreateRequest, SavedScriptResponse, SecuritySettingsSchema, SessionSummary, SetRetryPolicyApiV1DlqRetryPolicyPostData, SetRetryPolicyApiV1DlqRetryPolicyPostError, SetRetryPolicyApiV1DlqRetryPolicyPostErrors, SetRetryPolicyApiV1DlqRetryPolicyPostResponse, SetRetryPolicyApiV1DlqRetryPolicyPostResponses, SettingsHistoryEntry, SettingsHistoryResponse, ShutdownStatusResponse, SortOrder, SseHealthApiV1EventsHealthGetData, SseHealthApiV1EventsHealthGetResponse, SseHealthApiV1EventsHealthGetResponses, SseHealthResponse, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostData, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostError, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostErrors, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostResponse, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostResponses, SubscriptionsResponse, SubscriptionUpdate, SystemSettings, TestGrafanaAlertEndpointApiV1AlertsGrafanaTestGetData, TestGrafanaAlertEndpointApiV1AlertsGrafanaTestGetResponse, TestGrafanaAlertEndpointApiV1AlertsGrafanaTestGetResponses, Theme, ThemeUpdateRequest, TokenValidationResponse, UnreadCountResponse, UpdateCustomSettingApiV1UserSettingsCustomKeyPutData, UpdateCustomSettingApiV1UserSettingsCustomKeyPutError, UpdateCustomSettingApiV1UserSettingsCustomKeyPutErrors, UpdateCustomSettingApiV1UserSettingsCustomKeyPutResponse, UpdateCustomSettingApiV1UserSettingsCustomKeyPutResponses, UpdateEditorSettingsApiV1UserSettingsEditorPutData, UpdateEditorSettingsApiV1UserSettingsEditorPutError, UpdateEditorSettingsApiV1UserSettingsEditorPutErrors, UpdateEditorSettingsApiV1UserSettingsEditorPutResponse, UpdateEditorSettingsApiV1UserSettingsEditorPutResponses, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutData, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutError, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutErrors, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutResponse, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutResponses, UpdateSavedScriptApiV1ScriptsScriptIdPutData, UpdateSavedScriptApiV1ScriptsScriptIdPutError, UpdateSavedScriptApiV1ScriptsScriptIdPutErrors, UpdateSavedScriptApiV1ScriptsScriptIdPutResponse, UpdateSavedScriptApiV1ScriptsScriptIdPutResponses, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutData, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutError, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutErrors, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutResponse, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutResponses, UpdateSystemSettingsApiV1AdminSettingsPutData, UpdateSystemSettingsApiV1AdminSettingsPutError, UpdateSystemSettingsApiV1AdminSettingsPutErrors, UpdateSystemSettingsApiV1AdminSettingsPutResponse, UpdateSystemSettingsApiV1AdminSettingsPutResponses, UpdateThemeApiV1UserSettingsThemePutData, UpdateThemeApiV1UserSettingsThemePutError, UpdateThemeApiV1UserSettingsThemePutErrors, UpdateThemeApiV1UserSettingsThemePutResponse, UpdateThemeApiV1UserSettingsThemePutResponses, UpdateUserApiV1AdminUsersUserIdPutData, UpdateUserApiV1AdminUsersUserIdPutError, UpdateUserApiV1AdminUsersUserIdPutErrors, UpdateUserApiV1AdminUsersUserIdPutResponse, UpdateUserApiV1AdminUsersUserIdPutResponses, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutData, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutError, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutErrors, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutResponse, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutResponses, UpdateUserSettingsApiV1UserSettingsPutData, UpdateUserSettingsApiV1UserSettingsPutError, UpdateUserSettingsApiV1UserSettingsPutErrors, UpdateUserSettingsApiV1UserSettingsPutResponse, UpdateUserSettingsApiV1UserSettingsPutResponses, UserCreate, UserListResponse, UserRateLimit, UserRateLimitConfigResponse, UserRateLimitsResponse, UserResponse, UserRole, UserSettings, UserSettingsUpdate, UserUpdate, ValidationError, VerifyTokenApiV1AuthVerifyTokenGetData, VerifyTokenApiV1AuthVerifyTokenGetResponse, VerifyTokenApiV1AuthVerifyTokenGetResponses } from './types.gen';
+export type { AdminUserOverview, AggregateEventsApiV1EventsAggregatePostData, AggregateEventsApiV1EventsAggregatePostError, AggregateEventsApiV1EventsAggregatePostErrors, AggregateEventsApiV1EventsAggregatePostResponse, AggregateEventsApiV1EventsAggregatePostResponses, AlertResponse, BodyLoginApiV1AuthLoginPost, BrowseEventsApiV1AdminEventsBrowsePostData, BrowseEventsApiV1AdminEventsBrowsePostError, BrowseEventsApiV1AdminEventsBrowsePostErrors, BrowseEventsApiV1AdminEventsBrowsePostResponse, BrowseEventsApiV1AdminEventsBrowsePostResponses, CancelExecutionApiV1ExecutionIdCancelPostData, CancelExecutionApiV1ExecutionIdCancelPostError, CancelExecutionApiV1ExecutionIdCancelPostErrors, CancelExecutionApiV1ExecutionIdCancelPostResponse, CancelExecutionApiV1ExecutionIdCancelPostResponses, CancelExecutionRequest, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostData, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostError, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostErrors, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostResponse, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostResponses, CancelResponse, CancelSagaApiV1SagasSagaIdCancelPostData, CancelSagaApiV1SagasSagaIdCancelPostError, CancelSagaApiV1SagasSagaIdCancelPostErrors, CancelSagaApiV1SagasSagaIdCancelPostResponse, CancelSagaApiV1SagasSagaIdCancelPostResponses, CleanupOldSessionsApiV1ReplayCleanupPostData, CleanupOldSessionsApiV1ReplayCleanupPostError, CleanupOldSessionsApiV1ReplayCleanupPostErrors, CleanupOldSessionsApiV1ReplayCleanupPostResponse, CleanupOldSessionsApiV1ReplayCleanupPostResponses, CleanupResponse, ClientOptions, CreateExecutionApiV1ExecutePostData, CreateExecutionApiV1ExecutePostError, CreateExecutionApiV1ExecutePostErrors, CreateExecutionApiV1ExecutePostResponse, CreateExecutionApiV1ExecutePostResponses, CreateReplaySessionApiV1ReplaySessionsPostData, CreateReplaySessionApiV1ReplaySessionsPostError, CreateReplaySessionApiV1ReplaySessionsPostErrors, CreateReplaySessionApiV1ReplaySessionsPostResponse, CreateReplaySessionApiV1ReplaySessionsPostResponses, CreateSavedScriptApiV1ScriptsPostData, CreateSavedScriptApiV1ScriptsPostError, CreateSavedScriptApiV1ScriptsPostErrors, CreateSavedScriptApiV1ScriptsPostResponse, CreateSavedScriptApiV1ScriptsPostResponses, CreateUserApiV1AdminUsersPostData, CreateUserApiV1AdminUsersPostError, CreateUserApiV1AdminUsersPostErrors, CreateUserApiV1AdminUsersPostResponse, CreateUserApiV1AdminUsersPostResponses, DeleteEventApiV1AdminEventsEventIdDeleteData, DeleteEventApiV1AdminEventsEventIdDeleteError, DeleteEventApiV1AdminEventsEventIdDeleteErrors, DeleteEventApiV1AdminEventsEventIdDeleteResponse, DeleteEventApiV1AdminEventsEventIdDeleteResponses, DeleteEventApiV1EventsEventIdDeleteData, DeleteEventApiV1EventsEventIdDeleteError, DeleteEventApiV1EventsEventIdDeleteErrors, DeleteEventApiV1EventsEventIdDeleteResponse, DeleteEventApiV1EventsEventIdDeleteResponses, DeleteEventResponse, DeleteExecutionApiV1ExecutionIdDeleteData, DeleteExecutionApiV1ExecutionIdDeleteError, DeleteExecutionApiV1ExecutionIdDeleteErrors, DeleteExecutionApiV1ExecutionIdDeleteResponse, DeleteExecutionApiV1ExecutionIdDeleteResponses, DeleteNotificationApiV1NotificationsNotificationIdDeleteData, DeleteNotificationApiV1NotificationsNotificationIdDeleteError, DeleteNotificationApiV1NotificationsNotificationIdDeleteErrors, DeleteNotificationApiV1NotificationsNotificationIdDeleteResponse, DeleteNotificationApiV1NotificationsNotificationIdDeleteResponses, DeleteNotificationResponse, DeleteResponse, DeleteSavedScriptApiV1ScriptsScriptIdDeleteData, DeleteSavedScriptApiV1ScriptsScriptIdDeleteError, DeleteSavedScriptApiV1ScriptsScriptIdDeleteErrors, DeleteSavedScriptApiV1ScriptsScriptIdDeleteResponse, DeleteSavedScriptApiV1ScriptsScriptIdDeleteResponses, DeleteUserApiV1AdminUsersUserIdDeleteData, DeleteUserApiV1AdminUsersUserIdDeleteError, DeleteUserApiV1AdminUsersUserIdDeleteErrors, DeleteUserApiV1AdminUsersUserIdDeleteResponse, DeleteUserApiV1AdminUsersUserIdDeleteResponses, DeleteUserResponse, DerivedCounts, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteData, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteError, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteErrors, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteResponse, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteResponses, DlqBatchRetryResponse, DlqMessageDetail, DlqMessageResponse, DlqMessagesResponse, DlqMessageStatus, DlqStats, DlqTopicSummaryResponse, EditorSettings, EndpointGroup, EventAggregationRequest, EventBrowseRequest, EventBrowseResponse, EventDeleteResponse, EventDetailResponse, EventFilter, EventFilterRequest, EventListResponse, EventMetadataResponse, EventReplayRequest, EventReplayResponse, EventReplayStatusResponse, EventReplayStatusResponseWritable, EventResponse, EventStatistics, EventStatsResponse, EventType, ExampleScripts, ExecutionErrorType, ExecutionEventResponse, ExecutionEventsApiV1EventsExecutionsExecutionIdGetData, ExecutionEventsApiV1EventsExecutionsExecutionIdGetError, ExecutionEventsApiV1EventsExecutionsExecutionIdGetErrors, ExecutionEventsApiV1EventsExecutionsExecutionIdGetResponses, ExecutionLimitsSchema, ExecutionListResponse, ExecutionRequest, ExecutionResponse, ExecutionResult, ExecutionStatus, ExportEventsCsvApiV1AdminEventsExportCsvGetData, ExportEventsCsvApiV1AdminEventsExportCsvGetError, ExportEventsCsvApiV1AdminEventsExportCsvGetErrors, ExportEventsCsvApiV1AdminEventsExportCsvGetResponses, ExportEventsJsonApiV1AdminEventsExportJsonGetData, ExportEventsJsonApiV1AdminEventsExportJsonGetError, ExportEventsJsonApiV1AdminEventsExportJsonGetErrors, ExportEventsJsonApiV1AdminEventsExportJsonGetResponses, GetCurrentRequestEventsApiV1EventsCurrentRequestGetData, GetCurrentRequestEventsApiV1EventsCurrentRequestGetError, GetCurrentRequestEventsApiV1EventsCurrentRequestGetErrors, GetCurrentRequestEventsApiV1EventsCurrentRequestGetResponse, GetCurrentRequestEventsApiV1EventsCurrentRequestGetResponses, GetCurrentUserProfileApiV1AuthMeGetData, GetCurrentUserProfileApiV1AuthMeGetResponse, GetCurrentUserProfileApiV1AuthMeGetResponses, GetDlqMessageApiV1DlqMessagesEventIdGetData, GetDlqMessageApiV1DlqMessagesEventIdGetError, GetDlqMessageApiV1DlqMessagesEventIdGetErrors, GetDlqMessageApiV1DlqMessagesEventIdGetResponse, GetDlqMessageApiV1DlqMessagesEventIdGetResponses, GetDlqMessagesApiV1DlqMessagesGetData, GetDlqMessagesApiV1DlqMessagesGetError, GetDlqMessagesApiV1DlqMessagesGetErrors, GetDlqMessagesApiV1DlqMessagesGetResponse, GetDlqMessagesApiV1DlqMessagesGetResponses, GetDlqStatisticsApiV1DlqStatsGetData, GetDlqStatisticsApiV1DlqStatsGetResponse, GetDlqStatisticsApiV1DlqStatsGetResponses, GetDlqTopicsApiV1DlqTopicsGetData, GetDlqTopicsApiV1DlqTopicsGetResponse, GetDlqTopicsApiV1DlqTopicsGetResponses, GetEventApiV1EventsEventIdGetData, GetEventApiV1EventsEventIdGetError, GetEventApiV1EventsEventIdGetErrors, GetEventApiV1EventsEventIdGetResponse, GetEventApiV1EventsEventIdGetResponses, GetEventDetailApiV1AdminEventsEventIdGetData, GetEventDetailApiV1AdminEventsEventIdGetError, GetEventDetailApiV1AdminEventsEventIdGetErrors, GetEventDetailApiV1AdminEventsEventIdGetResponse, GetEventDetailApiV1AdminEventsEventIdGetResponses, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetData, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetError, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetErrors, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetResponse, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetResponses, GetEventStatisticsApiV1EventsStatisticsGetData, GetEventStatisticsApiV1EventsStatisticsGetError, GetEventStatisticsApiV1EventsStatisticsGetErrors, GetEventStatisticsApiV1EventsStatisticsGetResponse, GetEventStatisticsApiV1EventsStatisticsGetResponses, GetEventStatsApiV1AdminEventsStatsGetData, GetEventStatsApiV1AdminEventsStatsGetError, GetEventStatsApiV1AdminEventsStatsGetErrors, GetEventStatsApiV1AdminEventsStatsGetResponse, GetEventStatsApiV1AdminEventsStatsGetResponses, GetExampleScriptsApiV1ExampleScriptsGetData, GetExampleScriptsApiV1ExampleScriptsGetResponse, GetExampleScriptsApiV1ExampleScriptsGetResponses, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetData, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetError, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetErrors, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetResponse, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetResponses, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetData, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetError, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetErrors, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetResponse, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetResponses, GetExecutionSagasApiV1SagasExecutionExecutionIdGetData, GetExecutionSagasApiV1SagasExecutionExecutionIdGetError, GetExecutionSagasApiV1SagasExecutionExecutionIdGetErrors, GetExecutionSagasApiV1SagasExecutionExecutionIdGetResponse, GetExecutionSagasApiV1SagasExecutionExecutionIdGetResponses, GetK8sResourceLimitsApiV1K8sLimitsGetData, GetK8sResourceLimitsApiV1K8sLimitsGetResponse, GetK8sResourceLimitsApiV1K8sLimitsGetResponses, GetNotificationsApiV1NotificationsGetData, GetNotificationsApiV1NotificationsGetError, GetNotificationsApiV1NotificationsGetErrors, GetNotificationsApiV1NotificationsGetResponse, GetNotificationsApiV1NotificationsGetResponses, GetReplaySessionApiV1ReplaySessionsSessionIdGetData, GetReplaySessionApiV1ReplaySessionsSessionIdGetError, GetReplaySessionApiV1ReplaySessionsSessionIdGetErrors, GetReplaySessionApiV1ReplaySessionsSessionIdGetResponse, GetReplaySessionApiV1ReplaySessionsSessionIdGetResponses, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetData, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetError, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetErrors, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetResponse, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetResponses, GetResultApiV1ResultExecutionIdGetData, GetResultApiV1ResultExecutionIdGetError, GetResultApiV1ResultExecutionIdGetErrors, GetResultApiV1ResultExecutionIdGetResponse, GetResultApiV1ResultExecutionIdGetResponses, GetSagaStatusApiV1SagasSagaIdGetData, GetSagaStatusApiV1SagasSagaIdGetError, GetSagaStatusApiV1SagasSagaIdGetErrors, GetSagaStatusApiV1SagasSagaIdGetResponse, GetSagaStatusApiV1SagasSagaIdGetResponses, GetSavedScriptApiV1ScriptsScriptIdGetData, GetSavedScriptApiV1ScriptsScriptIdGetError, GetSavedScriptApiV1ScriptsScriptIdGetErrors, GetSavedScriptApiV1ScriptsScriptIdGetResponse, GetSavedScriptApiV1ScriptsScriptIdGetResponses, GetSettingsHistoryApiV1UserSettingsHistoryGetData, GetSettingsHistoryApiV1UserSettingsHistoryGetError, GetSettingsHistoryApiV1UserSettingsHistoryGetErrors, GetSettingsHistoryApiV1UserSettingsHistoryGetResponse, GetSettingsHistoryApiV1UserSettingsHistoryGetResponses, GetSubscriptionsApiV1NotificationsSubscriptionsGetData, GetSubscriptionsApiV1NotificationsSubscriptionsGetResponse, GetSubscriptionsApiV1NotificationsSubscriptionsGetResponses, GetSystemSettingsApiV1AdminSettingsGetData, GetSystemSettingsApiV1AdminSettingsGetResponse, GetSystemSettingsApiV1AdminSettingsGetResponses, GetUnreadCountApiV1NotificationsUnreadCountGetData, GetUnreadCountApiV1NotificationsUnreadCountGetResponse, GetUnreadCountApiV1NotificationsUnreadCountGetResponses, GetUserApiV1AdminUsersUserIdGetData, GetUserApiV1AdminUsersUserIdGetError, GetUserApiV1AdminUsersUserIdGetErrors, GetUserApiV1AdminUsersUserIdGetResponse, GetUserApiV1AdminUsersUserIdGetResponses, GetUserEventsApiV1EventsUserGetData, GetUserEventsApiV1EventsUserGetError, GetUserEventsApiV1EventsUserGetErrors, GetUserEventsApiV1EventsUserGetResponse, GetUserEventsApiV1EventsUserGetResponses, GetUserExecutionsApiV1UserExecutionsGetData, GetUserExecutionsApiV1UserExecutionsGetError, GetUserExecutionsApiV1UserExecutionsGetErrors, GetUserExecutionsApiV1UserExecutionsGetResponse, GetUserExecutionsApiV1UserExecutionsGetResponses, GetUserOverviewApiV1AdminUsersUserIdOverviewGetData, GetUserOverviewApiV1AdminUsersUserIdOverviewGetError, GetUserOverviewApiV1AdminUsersUserIdOverviewGetErrors, GetUserOverviewApiV1AdminUsersUserIdOverviewGetResponse, GetUserOverviewApiV1AdminUsersUserIdOverviewGetResponses, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetData, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetError, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetErrors, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetResponse, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetResponses, GetUserSettingsApiV1UserSettingsGetData, GetUserSettingsApiV1UserSettingsGetResponse, GetUserSettingsApiV1UserSettingsGetResponses, GrafanaAlertItem, GrafanaWebhook, HourlyEventCountSchema, HttpValidationError, LanguageInfo, ListEventTypesApiV1EventsTypesListGetData, ListEventTypesApiV1EventsTypesListGetResponse, ListEventTypesApiV1EventsTypesListGetResponses, ListReplaySessionsApiV1ReplaySessionsGetData, ListReplaySessionsApiV1ReplaySessionsGetError, ListReplaySessionsApiV1ReplaySessionsGetErrors, ListReplaySessionsApiV1ReplaySessionsGetResponse, ListReplaySessionsApiV1ReplaySessionsGetResponses, ListSagasApiV1SagasGetData, ListSagasApiV1SagasGetError, ListSagasApiV1SagasGetErrors, ListSagasApiV1SagasGetResponse, ListSagasApiV1SagasGetResponses, ListSavedScriptsApiV1ScriptsGetData, ListSavedScriptsApiV1ScriptsGetResponse, ListSavedScriptsApiV1ScriptsGetResponses, ListUsersApiV1AdminUsersGetData, ListUsersApiV1AdminUsersGetError, ListUsersApiV1AdminUsersGetErrors, ListUsersApiV1AdminUsersGetResponse, ListUsersApiV1AdminUsersGetResponses, LivenessApiV1HealthLiveGetData, LivenessApiV1HealthLiveGetResponse, LivenessApiV1HealthLiveGetResponses, LivenessResponse, LoginApiV1AuthLoginPostData, LoginApiV1AuthLoginPostError, LoginApiV1AuthLoginPostErrors, LoginApiV1AuthLoginPostResponse, LoginApiV1AuthLoginPostResponses, LoginResponse, LogoutApiV1AuthLogoutPostData, LogoutApiV1AuthLogoutPostResponse, LogoutApiV1AuthLogoutPostResponses, ManualRetryRequest, MarkAllReadApiV1NotificationsMarkAllReadPostData, MarkAllReadApiV1NotificationsMarkAllReadPostResponse, MarkAllReadApiV1NotificationsMarkAllReadPostResponses, MarkNotificationReadApiV1NotificationsNotificationIdReadPutData, MarkNotificationReadApiV1NotificationsNotificationIdReadPutError, MarkNotificationReadApiV1NotificationsNotificationIdReadPutErrors, MarkNotificationReadApiV1NotificationsNotificationIdReadPutResponse, MarkNotificationReadApiV1NotificationsNotificationIdReadPutResponses, MessageResponse, MonitoringSettingsSchema, NotificationChannel, NotificationListResponse, NotificationResponse, NotificationSettings, NotificationSeverity, NotificationStatus, NotificationStreamApiV1EventsNotificationsStreamGetData, NotificationStreamApiV1EventsNotificationsStreamGetResponses, NotificationSubscription, PasswordResetRequest, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostData, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostError, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostErrors, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostResponse, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostResponses, PublishCustomEventApiV1EventsPublishPostData, PublishCustomEventApiV1EventsPublishPostError, PublishCustomEventApiV1EventsPublishPostErrors, PublishCustomEventApiV1EventsPublishPostResponse, PublishCustomEventApiV1EventsPublishPostResponses, PublishEventRequest, PublishEventResponse, QueryEventsApiV1EventsQueryPostData, QueryEventsApiV1EventsQueryPostError, QueryEventsApiV1EventsQueryPostErrors, QueryEventsApiV1EventsQueryPostResponse, QueryEventsApiV1EventsQueryPostResponses, RateLimitAlgorithm, RateLimitRule, RateLimitRuleResponse, RateLimitSummary, RateLimitUpdateResponse, ReadinessApiV1HealthReadyGetData, ReadinessApiV1HealthReadyGetResponse, ReadinessApiV1HealthReadyGetResponses, ReadinessResponse, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostData, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostError, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostErrors, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostResponse, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostResponses, RegisterApiV1AuthRegisterPostData, RegisterApiV1AuthRegisterPostError, RegisterApiV1AuthRegisterPostErrors, RegisterApiV1AuthRegisterPostResponse, RegisterApiV1AuthRegisterPostResponses, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostData, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostError, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostErrors, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostResponse, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostResponses, ReplayAggregateResponse, ReplayConfigSchema, ReplayErrorInfo, ReplayEventsApiV1AdminEventsReplayPostData, ReplayEventsApiV1AdminEventsReplayPostError, ReplayEventsApiV1AdminEventsReplayPostErrors, ReplayEventsApiV1AdminEventsReplayPostResponse, ReplayEventsApiV1AdminEventsReplayPostResponses, ReplayFilter, ReplayFilterSchema, ReplayRequest, ReplayResponse, ReplaySession, ReplayStatus, ReplayTarget, ReplayType, ResetSystemSettingsApiV1AdminSettingsResetPostData, ResetSystemSettingsApiV1AdminSettingsResetPostResponse, ResetSystemSettingsApiV1AdminSettingsResetPostResponses, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostData, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostError, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostErrors, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostResponse, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostResponses, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostData, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostError, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostErrors, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostResponse, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostResponses, ResourceLimits, ResourceUsage, RestoreSettingsApiV1UserSettingsRestorePostData, RestoreSettingsApiV1UserSettingsRestorePostError, RestoreSettingsApiV1UserSettingsRestorePostErrors, RestoreSettingsApiV1UserSettingsRestorePostResponse, RestoreSettingsApiV1UserSettingsRestorePostResponses, RestoreSettingsRequest, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostData, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostError, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostErrors, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostResponse, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostResponses, RetryDlqMessagesApiV1DlqRetryPostData, RetryDlqMessagesApiV1DlqRetryPostError, RetryDlqMessagesApiV1DlqRetryPostErrors, RetryDlqMessagesApiV1DlqRetryPostResponse, RetryDlqMessagesApiV1DlqRetryPostResponses, RetryExecutionApiV1ExecutionIdRetryPostData, RetryExecutionApiV1ExecutionIdRetryPostError, RetryExecutionApiV1ExecutionIdRetryPostErrors, RetryExecutionApiV1ExecutionIdRetryPostResponse, RetryExecutionApiV1ExecutionIdRetryPostResponses, RetryExecutionRequest, RetryPolicyRequest, RetryStrategy, SagaCancellationResponse, SagaListResponse, SagaState, SagaStatusResponse, SavedScriptCreateRequest, SavedScriptResponse, SavedScriptUpdate, SecuritySettingsSchema, SessionSummary, SessionSummaryWritable, SetRetryPolicyApiV1DlqRetryPolicyPostData, SetRetryPolicyApiV1DlqRetryPolicyPostError, SetRetryPolicyApiV1DlqRetryPolicyPostErrors, SetRetryPolicyApiV1DlqRetryPolicyPostResponse, SetRetryPolicyApiV1DlqRetryPolicyPostResponses, SettingsHistoryEntry, SettingsHistoryResponse, ShutdownStatusResponse, SortOrder, SseHealthApiV1EventsHealthGetData, SseHealthApiV1EventsHealthGetResponse, SseHealthApiV1EventsHealthGetResponses, SseHealthResponse, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostData, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostError, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostErrors, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostResponse, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostResponses, SubscriptionsResponse, SubscriptionUpdate, SystemSettings, TestGrafanaAlertEndpointApiV1AlertsGrafanaTestGetData, TestGrafanaAlertEndpointApiV1AlertsGrafanaTestGetResponse, TestGrafanaAlertEndpointApiV1AlertsGrafanaTestGetResponses, Theme, ThemeUpdateRequest, TokenValidationResponse, UnreadCountResponse, UpdateCustomSettingApiV1UserSettingsCustomKeyPutData, UpdateCustomSettingApiV1UserSettingsCustomKeyPutError, UpdateCustomSettingApiV1UserSettingsCustomKeyPutErrors, UpdateCustomSettingApiV1UserSettingsCustomKeyPutResponse, UpdateCustomSettingApiV1UserSettingsCustomKeyPutResponses, UpdateEditorSettingsApiV1UserSettingsEditorPutData, UpdateEditorSettingsApiV1UserSettingsEditorPutError, UpdateEditorSettingsApiV1UserSettingsEditorPutErrors, UpdateEditorSettingsApiV1UserSettingsEditorPutResponse, UpdateEditorSettingsApiV1UserSettingsEditorPutResponses, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutData, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutError, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutErrors, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutResponse, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutResponses, UpdateSavedScriptApiV1ScriptsScriptIdPutData, UpdateSavedScriptApiV1ScriptsScriptIdPutError, UpdateSavedScriptApiV1ScriptsScriptIdPutErrors, UpdateSavedScriptApiV1ScriptsScriptIdPutResponse, UpdateSavedScriptApiV1ScriptsScriptIdPutResponses, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutData, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutError, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutErrors, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutResponse, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutResponses, UpdateSystemSettingsApiV1AdminSettingsPutData, UpdateSystemSettingsApiV1AdminSettingsPutError, UpdateSystemSettingsApiV1AdminSettingsPutErrors, UpdateSystemSettingsApiV1AdminSettingsPutResponse, UpdateSystemSettingsApiV1AdminSettingsPutResponses, UpdateThemeApiV1UserSettingsThemePutData, UpdateThemeApiV1UserSettingsThemePutError, UpdateThemeApiV1UserSettingsThemePutErrors, UpdateThemeApiV1UserSettingsThemePutResponse, UpdateThemeApiV1UserSettingsThemePutResponses, UpdateUserApiV1AdminUsersUserIdPutData, UpdateUserApiV1AdminUsersUserIdPutError, UpdateUserApiV1AdminUsersUserIdPutErrors, UpdateUserApiV1AdminUsersUserIdPutResponse, UpdateUserApiV1AdminUsersUserIdPutResponses, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutData, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutError, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutErrors, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutResponse, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutResponses, UpdateUserSettingsApiV1UserSettingsPutData, UpdateUserSettingsApiV1UserSettingsPutError, UpdateUserSettingsApiV1UserSettingsPutErrors, UpdateUserSettingsApiV1UserSettingsPutResponse, UpdateUserSettingsApiV1UserSettingsPutResponses, UserCreate, UserEventCountSchema, UserListResponse, UserRateLimit, UserRateLimitConfigResponse, UserRateLimitsResponse, UserResponse, UserRole, UserSettings, UserSettingsUpdate, UserUpdate, ValidationError, VerifyTokenApiV1AuthVerifyTokenGetData, VerifyTokenApiV1AuthVerifyTokenGetResponse, VerifyTokenApiV1AuthVerifyTokenGetResponses } from './types.gen';
diff --git a/frontend/src/lib/api/sdk.gen.ts b/frontend/src/lib/api/sdk.gen.ts
index 503b996..5b5b33f 100644
--- a/frontend/src/lib/api/sdk.gen.ts
+++ b/frontend/src/lib/api/sdk.gen.ts
@@ -699,9 +699,11 @@ export const getSagaStatusApiV1SagasSagaIdGet =
| null;
/**
* Estimated Completion
*/
@@ -892,9 +938,11 @@ export type EventReplayStatusResponse = {
/**
* Execution Results
*/
- execution_results?: Array<{
- [key: string]: unknown;
- }> | null;
+ execution_results?: Array | null;
+ /**
+ * Progress Percentage
+ */
+ readonly progress_percentage: number;
};
/**
@@ -926,12 +974,7 @@ export type EventResponse = {
* Causation Id
*/
causation_id?: string | null;
- /**
- * Metadata
- */
- metadata: {
- [key: string]: unknown;
- };
+ metadata: EventMetadataResponse;
/**
* Payload
*/
@@ -969,9 +1012,7 @@ export type EventStatistics = {
/**
* Events By Hour
*/
- events_by_hour: Array<{
- [key: string]: unknown;
- }>;
+ events_by_hour: Array;
/**
* Start Time
*/
@@ -1001,15 +1042,11 @@ export type EventStatsResponse = {
/**
* Events By Hour
*/
- events_by_hour: Array<{
- [key: string]: unknown;
- }>;
+ events_by_hour: Array;
/**
* Top Users
*/
- top_users: Array<{
- [key: string]: unknown;
- }>;
+ top_users: Array;
/**
* Error Rate
*/
@@ -1284,6 +1321,22 @@ export type HttpValidationError = {
detail?: Array;
};
+/**
+ * HourlyEventCountSchema
+ *
+ * Hourly event count for statistics.
+ */
+export type HourlyEventCountSchema = {
+ /**
+ * Hour
+ */
+ hour: string;
+ /**
+ * Count
+ */
+ count: number;
+};
+
/**
* LanguageInfo
*
@@ -1841,7 +1894,7 @@ export type ReplayAggregateResponse = {
export type ReplayConfigSchema = {
replay_type: ReplayType;
target?: ReplayTarget;
- filter: ReplayFilterSchema;
+ filter?: ReplayFilterSchema;
/**
* Speed Multiplier
*/
@@ -1887,9 +1940,33 @@ export type ReplayConfigSchema = {
};
/**
- * ReplayFilterSchema
+ * ReplayErrorInfo
+ *
+ * Error info for replay operations.
*/
-export type ReplayFilterSchema = {
+export type ReplayErrorInfo = {
+ /**
+ * Timestamp
+ */
+ timestamp: string;
+ /**
+ * Error
+ */
+ error: string;
+ /**
+ * Event Id
+ */
+ event_id?: string | null;
+ /**
+ * Error Type
+ */
+ error_type?: string | null;
+};
+
+/**
+ * ReplayFilter
+ */
+export type ReplayFilter = {
/**
* Execution Id
*/
@@ -1897,7 +1974,7 @@ export type ReplayFilterSchema = {
/**
* Event Types
*/
- event_types?: Array | null;
+ event_types?: Array | null;
/**
* Start Time
*/
@@ -1923,17 +2000,13 @@ export type ReplayFilterSchema = {
/**
* Exclude Event Types
*/
- exclude_event_types?: Array | null;
+ exclude_event_types?: Array | null;
};
/**
- * ReplayRequest
- *
- * Request schema for creating replay sessions
+ * ReplayFilterSchema
*/
-export type ReplayRequest = {
- replay_type: ReplayType;
- target?: ReplayTarget;
+export type ReplayFilterSchema = {
/**
* Execution Id
*/
@@ -1958,6 +2031,27 @@ export type ReplayRequest = {
* Service Name
*/
service_name?: string | null;
+ /**
+ * Custom Query
+ */
+ custom_query?: {
+ [key: string]: unknown;
+ } | null;
+ /**
+ * Exclude Event Types
+ */
+ exclude_event_types?: Array | null;
+};
+
+/**
+ * ReplayRequest
+ *
+ * Request schema for creating replay sessions
+ */
+export type ReplayRequest = {
+ replay_type: ReplayType;
+ target?: ReplayTarget;
+ filter?: ReplayFilter;
/**
* Speed Multiplier
*/
@@ -1982,6 +2076,24 @@ export type ReplayRequest = {
* Target File Path
*/
target_file_path?: string | null;
+ /**
+ * Target Topics
+ */
+ target_topics?: {
+ [key: string]: string;
+ } | null;
+ /**
+ * Retry Failed
+ */
+ retry_failed?: boolean;
+ /**
+ * Retry Attempts
+ */
+ retry_attempts?: number;
+ /**
+ * Enable Progress Tracking
+ */
+ enable_progress_tracking?: boolean;
};
/**
@@ -2108,28 +2220,20 @@ export type ResourceLimits = {
export type ResourceUsage = {
/**
* Execution Time Wall Seconds
- *
- * Wall clock execution time in seconds
*/
- execution_time_wall_seconds?: number | null;
+ execution_time_wall_seconds?: number;
/**
* Cpu Time Jiffies
- *
- * CPU time in jiffies (multiply by 10 for milliseconds)
*/
- cpu_time_jiffies?: number | null;
+ cpu_time_jiffies?: number;
/**
* Clk Tck Hertz
- *
- * Clock ticks per second (usually 100)
*/
- clk_tck_hertz?: number | null;
+ clk_tck_hertz?: number;
/**
* Peak Memory Kb
- *
- * Peak memory usage in KB
*/
- peak_memory_kb?: number | null;
+ peak_memory_kb?: number;
};
/**
@@ -2292,6 +2396,18 @@ export type SagaListResponse = {
* Total
*/
total: number;
+ /**
+ * Skip
+ */
+ skip: number;
+ /**
+ * Limit
+ */
+ limit: number;
+ /**
+ * Has More
+ */
+ has_more: boolean;
};
/**
@@ -2418,6 +2534,36 @@ export type SavedScriptResponse = {
updated_at: string;
};
+/**
+ * SavedScriptUpdate
+ */
+export type SavedScriptUpdate = {
+ /**
+ * Name
+ */
+ name?: string | null;
+ /**
+ * Script
+ */
+ script?: string | null;
+ /**
+ * Lang
+ */
+ lang?: string | null;
+ /**
+ * Lang Version
+ */
+ lang_version?: string | null;
+ /**
+ * Description
+ */
+ description?: string | null;
+ /**
+ * Updated At
+ */
+ updated_at?: string;
+};
+
/**
* SecuritySettingsSchema
*
@@ -2494,11 +2640,11 @@ export type SessionSummary = {
/**
* Duration Seconds
*/
- duration_seconds?: number | null;
+ readonly duration_seconds: number | null;
/**
* Throughput Events Per Second
*/
- throughput_events_per_second?: number | null;
+ readonly throughput_events_per_second: number | null;
};
/**
@@ -2537,7 +2683,7 @@ export type SettingsHistoryEntry = {
/**
* SettingsHistoryResponse
*
- * Response model for settings history
+ * Response model for settings history (limited snapshot of recent changes)
*/
export type SettingsHistoryResponse = {
/**
@@ -2545,9 +2691,9 @@ export type SettingsHistoryResponse = {
*/
history: Array;
/**
- * Total
+ * Limit
*/
- total: number;
+ limit: number;
};
/**
@@ -2753,6 +2899,22 @@ export type UserCreate = {
password: string;
};
+/**
+ * UserEventCountSchema
+ *
+ * User event count schema
+ */
+export type UserEventCountSchema = {
+ /**
+ * User Id
+ */
+ user_id: string;
+ /**
+ * Event Count
+ */
+ event_count: number;
+};
+
/**
* UserListResponse
*
@@ -3041,6 +3203,109 @@ export type ValidationError = {
type: string;
};
+/**
+ * EventReplayStatusResponse
+ *
+ * Response model for replay status
+ */
+export type EventReplayStatusResponseWritable = {
+ /**
+ * Session Id
+ */
+ session_id: string;
+ /**
+ * Status
+ */
+ status: string;
+ /**
+ * Total Events
+ */
+ total_events: number;
+ /**
+ * Replayed Events
+ */
+ replayed_events: number;
+ /**
+ * Failed Events
+ */
+ failed_events: number;
+ /**
+ * Skipped Events
+ */
+ skipped_events: number;
+ /**
+ * Correlation Id
+ */
+ correlation_id: string;
+ /**
+ * Created At
+ */
+ created_at: string;
+ /**
+ * Started At
+ */
+ started_at?: string | null;
+ /**
+ * Completed At
+ */
+ completed_at?: string | null;
+ /**
+ * Errors
+ */
+ errors?: Array | null;
+ /**
+ * Estimated Completion
+ */
+ estimated_completion?: string | null;
+ /**
+ * Execution Results
+ */
+ execution_results?: Array | null;
+};
+
+/**
+ * SessionSummary
+ *
+ * Summary information for replay sessions
+ */
+export type SessionSummaryWritable = {
+ /**
+ * Session Id
+ */
+ session_id: string;
+ replay_type: ReplayType;
+ target: ReplayTarget;
+ status: ReplayStatus;
+ /**
+ * Total Events
+ */
+ total_events: number;
+ /**
+ * Replayed Events
+ */
+ replayed_events: number;
+ /**
+ * Failed Events
+ */
+ failed_events: number;
+ /**
+ * Skipped Events
+ */
+ skipped_events: number;
+ /**
+ * Created At
+ */
+ created_at: string;
+ /**
+ * Started At
+ */
+ started_at: string | null;
+ /**
+ * Completed At
+ */
+ completed_at: string | null;
+};
+
export type LoginApiV1AuthLoginPostData = {
body: BodyLoginApiV1AuthLoginPost;
path?: never;
@@ -3519,7 +3784,7 @@ export type GetSavedScriptApiV1ScriptsScriptIdGetResponses = {
export type GetSavedScriptApiV1ScriptsScriptIdGetResponse = GetSavedScriptApiV1ScriptsScriptIdGetResponses[keyof GetSavedScriptApiV1ScriptsScriptIdGetResponses];
export type UpdateSavedScriptApiV1ScriptsScriptIdPutData = {
- body: SavedScriptCreateRequest;
+ body: SavedScriptUpdate;
path: {
/**
* Script Id
@@ -3852,7 +4117,7 @@ export type GetDlqMessagesApiV1DlqMessagesGetData = {
/**
* Event Type
*/
- event_type?: string | null;
+ event_type?: EventType | null;
/**
* Limit
*/
@@ -4091,6 +4356,14 @@ export type GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetData = {
* Include system-generated events
*/
include_system_events?: boolean;
+ /**
+ * Limit
+ */
+ limit?: number;
+ /**
+ * Skip
+ */
+ skip?: number;
};
url: '/api/v1/events/executions/{execution_id}/events';
};
@@ -4204,6 +4477,10 @@ export type GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetData = {
* Limit
*/
limit?: number;
+ /**
+ * Skip
+ */
+ skip?: number;
};
url: '/api/v1/events/correlation/{correlation_id}';
};
@@ -4234,6 +4511,10 @@ export type GetCurrentRequestEventsApiV1EventsCurrentRequestGetData = {
* Limit
*/
limit?: number;
+ /**
+ * Skip
+ */
+ skip?: number;
};
url: '/api/v1/events/current-request';
};
@@ -5574,6 +5855,14 @@ export type GetExecutionSagasApiV1SagasExecutionExecutionIdGetData = {
* Filter by saga state
*/
state?: SagaState | null;
+ /**
+ * Limit
+ */
+ limit?: number;
+ /**
+ * Skip
+ */
+ skip?: number;
};
url: '/api/v1/sagas/execution/{execution_id}';
};
@@ -5611,9 +5900,9 @@ export type ListSagasApiV1SagasGetData = {
*/
limit?: number;
/**
- * Offset
+ * Skip
*/
- offset?: number;
+ skip?: number;
};
url: '/api/v1/sagas/';
};
From dcda01d8c193cb5c5efd5ebb497d43e485b67300 Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Tue, 30 Dec 2025 23:29:33 +0100
Subject: [PATCH 36/42] ruff + unit tests fixes
---
.../admin/admin_events_repository.py | 4 +-
.../tests/unit/events/test_metadata_model.py | 14 ---
.../schemas_pydantic/test_events_schemas.py | 98 +----------------
.../test_execution_schemas.py | 1 -
.../test_health_dashboard_schemas.py | 100 ------------------
.../test_notification_schemas.py | 42 +-------
.../test_replay_models_schemas.py | 44 --------
.../schemas_pydantic/test_saga_schemas.py | 26 -----
8 files changed, 7 insertions(+), 322 deletions(-)
delete mode 100644 backend/tests/unit/schemas_pydantic/test_health_dashboard_schemas.py
delete mode 100644 backend/tests/unit/schemas_pydantic/test_replay_models_schemas.py
delete mode 100644 backend/tests/unit/schemas_pydantic/test_saga_schemas.py
diff --git a/backend/app/db/repositories/admin/admin_events_repository.py b/backend/app/db/repositories/admin/admin_events_repository.py
index 70630cc..1190752 100644
--- a/backend/app/db/repositories/admin/admin_events_repository.py
+++ b/backend/app/db/repositories/admin/admin_events_repository.py
@@ -83,7 +83,9 @@ async def get_event_detail(self, event_id: str) -> EventDetail | None:
return None
doc_fields = set(EventStoreDocument.model_fields.keys()) - {"id", "revision_id"}
- event = Event(**{**doc.model_dump(include=doc_fields), "metadata": DomainEventMetadata(**doc.metadata.model_dump())})
+ event = Event(
+ **{**doc.model_dump(include=doc_fields), "metadata": DomainEventMetadata(**doc.metadata.model_dump())}
+ )
related_query = {"metadata.correlation_id": doc.metadata.correlation_id, "event_id": {"$ne": event_id}}
related_docs = await (
diff --git a/backend/tests/unit/events/test_metadata_model.py b/backend/tests/unit/events/test_metadata_model.py
index 94afa34..71440ce 100644
--- a/backend/tests/unit/events/test_metadata_model.py
+++ b/backend/tests/unit/events/test_metadata_model.py
@@ -1,20 +1,6 @@
from app.infrastructure.kafka.events.metadata import AvroEventMetadata
-def test_to_dict() -> None:
- m = AvroEventMetadata(service_name="svc", service_version="1.0")
- d = m.to_dict()
- assert d["service_name"] == "svc"
- assert d["service_version"] == "1.0"
-
-
-def test_from_dict() -> None:
- m = AvroEventMetadata.from_dict({"service_name": "a", "service_version": "2", "user_id": "u"})
- assert m.service_name == "a"
- assert m.service_version == "2"
- assert m.user_id == "u"
-
-
def test_with_correlation() -> None:
m = AvroEventMetadata(service_name="svc", service_version="1")
m2 = m.with_correlation("cid")
diff --git a/backend/tests/unit/schemas_pydantic/test_events_schemas.py b/backend/tests/unit/schemas_pydantic/test_events_schemas.py
index 13b99bd..30ef50c 100644
--- a/backend/tests/unit/schemas_pydantic/test_events_schemas.py
+++ b/backend/tests/unit/schemas_pydantic/test_events_schemas.py
@@ -1,25 +1,7 @@
-import math
-from datetime import datetime, timezone, timedelta
-
import pytest
+from app.schemas_pydantic.events import EventFilterRequest
from app.domain.enums.common import SortOrder
-from app.domain.enums.events import EventType
-from app.infrastructure.kafka.events.metadata import AvroEventMetadata
-from app.schemas_pydantic.events import (
- EventAggregationRequest,
- EventBase,
- EventFilterRequest,
- EventInDB,
- EventListResponse,
- EventProjection,
- EventQuery,
- EventResponse,
- EventStatistics,
- PublishEventRequest,
- PublishEventResponse,
- ResourceUsage,
-)
def test_event_filter_request_sort_validator_accepts_allowed_fields():
@@ -34,81 +16,3 @@ def test_event_filter_request_sort_validator_accepts_allowed_fields():
def test_event_filter_request_sort_validator_rejects_invalid():
with pytest.raises(ValueError):
EventFilterRequest(sort_by="not-a-field")
-
-
-def test_event_base_and_in_db_defaults_and_metadata():
- meta = AvroEventMetadata(service_name="tests", service_version="1.0", user_id="u1")
- ev = EventBase(
- event_type=EventType.EXECUTION_REQUESTED,
- metadata=meta,
- payload={"execution_id": "e1"},
- )
- assert ev.event_id and ev.timestamp.tzinfo is not None
- edb = EventInDB(**ev.model_dump())
- assert isinstance(edb.stored_at, datetime)
- assert isinstance(edb.ttl_expires_at, datetime)
- # ttl should be after stored_at by ~30 days
- assert edb.ttl_expires_at > edb.stored_at
-
-
-def test_publish_event_request_and_response():
- req = PublishEventRequest(
- event_type=EventType.EXECUTION_REQUESTED,
- payload={"x": 1},
- aggregate_id="agg",
- )
- assert req.event_type is EventType.EXECUTION_REQUESTED
- resp = PublishEventResponse(event_id="e", status="queued", timestamp=datetime.now(timezone.utc))
- assert resp.status == "queued"
-
-
-def test_event_query_schema_and_list_response():
- q = EventQuery(
- event_types=[EventType.EXECUTION_REQUESTED, EventType.POD_CREATED],
- user_id="u1",
- start_time=datetime.now(timezone.utc) - timedelta(hours=1),
- end_time=datetime.now(timezone.utc),
- limit=50,
- skip=0,
- )
- assert len(q.event_types or []) == 2 and q.limit == 50
-
- # Minimal list response compose/decompose
- from app.schemas_pydantic.events import EventMetadataResponse
- er = EventResponse(
- event_id="id",
- event_type=EventType.POD_CREATED,
- event_version="1.0",
- timestamp=datetime.now(timezone.utc),
- metadata=EventMetadataResponse(
- service_name="test", service_version="1.0", correlation_id="cid-1"
- ),
- payload={},
- )
- lst = EventListResponse(events=[er], total=1, limit=1, skip=0, has_more=False)
- assert lst.total == 1 and not lst.has_more
-
-
-def test_event_projection_and_statistics_examples():
- proj = EventProjection(
- name="exec_summary",
- source_events=[EventType.EXECUTION_REQUESTED, EventType.EXECUTION_COMPLETED],
- aggregation_pipeline=[{"$match": {"event_type": str(EventType.EXECUTION_REQUESTED)}}],
- output_collection="summary",
- )
- assert proj.refresh_interval_seconds == 300
-
- stats = EventStatistics(
- total_events=2,
- events_by_type={str(EventType.EXECUTION_REQUESTED): 1},
- events_by_service={"svc": 2},
- events_by_hour=[{"hour": "2025-01-01 00:00", "count": 2}],
- )
- assert stats.total_events == 2
-
-
-def test_resource_usage_schema():
- ru = ResourceUsage(cpu_seconds=1.5, memory_mb_seconds=256.0, disk_io_mb=10.0, network_io_mb=5.0)
- dumped = ru.model_dump()
- assert math.isclose(dumped["cpu_seconds"], 1.5)
-
diff --git a/backend/tests/unit/schemas_pydantic/test_execution_schemas.py b/backend/tests/unit/schemas_pydantic/test_execution_schemas.py
index 2ff863f..38e5940 100644
--- a/backend/tests/unit/schemas_pydantic/test_execution_schemas.py
+++ b/backend/tests/unit/schemas_pydantic/test_execution_schemas.py
@@ -20,4 +20,3 @@ def test_execution_request_unsupported_version_raises():
with pytest.raises(ValueError) as e:
ExecutionRequest(script="print(1)", lang="python", lang_version="9.9")
assert "Version '9.9' not supported for python" in str(e.value)
-
diff --git a/backend/tests/unit/schemas_pydantic/test_health_dashboard_schemas.py b/backend/tests/unit/schemas_pydantic/test_health_dashboard_schemas.py
deleted file mode 100644
index fb1f0d0..0000000
--- a/backend/tests/unit/schemas_pydantic/test_health_dashboard_schemas.py
+++ /dev/null
@@ -1,100 +0,0 @@
-from datetime import datetime, timezone
-
-from app.schemas_pydantic.health_dashboard import (
- CategoryHealthResponse,
- CategoryHealthStatistics,
- CategoryServices,
- DependencyEdge,
- DependencyGraph,
- DependencyNode,
- DetailedHealthStatus,
- HealthAlert,
- HealthCheckConfig,
- HealthCheckState,
- HealthDashboardResponse,
- HealthMetricsSummary,
- HealthStatistics,
- HealthTrend,
- ServiceHealth,
- ServiceHealthDetails,
- ServiceHistoryDataPoint,
- ServiceHistoryResponse,
- ServiceHistorySummary,
- ServiceRealtimeStatus,
- ServiceDependenciesResponse,
-)
-from app.domain.enums.health import AlertSeverity
-
-
-def _now() -> datetime:
- return datetime.now(timezone.utc)
-
-
-def test_alert_and_metrics_and_trend_models():
- alert = HealthAlert(
- id="a1", severity=AlertSeverity.CRITICAL, service="backend", status="unhealthy", message="down",
- timestamp=_now(), duration_ms=12.3
- )
- assert alert.severity is AlertSeverity.CRITICAL
-
- metrics = HealthMetricsSummary(
- total_checks=10, healthy_checks=7, failed_checks=3, avg_check_duration_ms=5.5, total_failures_24h=3, uptime_percentage_24h=99.1
- )
- assert metrics.total_checks == 10
-
- trend = HealthTrend(timestamp=_now(), status="ok", healthy_count=10, unhealthy_count=0, degraded_count=0)
- assert trend.healthy_count == 10
-
-
-def test_service_health_and_dashboard_models():
- svc = ServiceHealth(name="backend", status="healthy", uptime_percentage=99.9, last_check=_now(), message="ok", critical=False)
- dash = HealthDashboardResponse(
- overall_status="healthy", last_updated=_now(), services=[svc], statistics={"total": 1}, alerts=[], trends=[]
- )
- assert dash.overall_status == "healthy"
-
-
-def test_category_services_and_detailed_status():
- cat = CategoryServices(status="healthy", message="ok", duration_ms=1.0, details={"k": "v"})
- stats = HealthStatistics(total_checks=10, healthy=9, degraded=1, unhealthy=0, unknown=0)
- detailed = DetailedHealthStatus(
- timestamp=_now().isoformat(), overall_status="healthy", categories={"core": {"db": cat}}, statistics=stats
- )
- assert detailed.categories["core"]["db"].status == "healthy"
-
-
-def test_dependency_graph_and_service_dependencies():
- nodes = [DependencyNode(id="svcA", label="Service A", status="healthy", critical=False, message="ok")]
- edges = [DependencyEdge(**{"from": "svcA", "to": "svcB", "critical": True})]
- graph = DependencyGraph(nodes=nodes, edges=edges)
- assert graph.edges[0].from_service == "svcA" and graph.edges[0].to_service == "svcB"
-
- from app.schemas_pydantic.health_dashboard import ServiceImpactAnalysis
- impact = {"svcA": ServiceImpactAnalysis(status="ok", affected_services=[], is_critical=False)}
- dep = ServiceDependenciesResponse(
- dependency_graph=graph,
- impact_analysis=impact,
- total_services=1,
- healthy_services=1,
- critical_services_down=0,
- )
- assert dep.total_services == 1
-
-
-def test_service_health_details_and_history():
- cfg = HealthCheckConfig(type="http", critical=True, interval_seconds=10.0, timeout_seconds=2.0, failure_threshold=3)
- state = HealthCheckState(consecutive_failures=0, consecutive_successes=5)
- details = ServiceHealthDetails(
- name="backend", status="healthy", message="ok", duration_ms=1.2, timestamp=_now(), check_config=cfg, state=state
- )
- assert details.state.consecutive_successes == 5
-
- dp = ServiceHistoryDataPoint(timestamp=_now(), status="ok", duration_ms=1.0, healthy=True)
- summary = ServiceHistorySummary(uptime_percentage=99.9, total_checks=10, healthy_checks=9, failure_count=1)
- hist = ServiceHistoryResponse(service_name="backend", time_range_hours=24, data_points=[dp], summary=summary)
- assert hist.time_range_hours == 24
-
-
-def test_realtime_status_model():
- rt = ServiceRealtimeStatus(status="ok", message="fine", duration_ms=2.0, last_check=_now(), details={})
- assert rt.status == "ok"
diff --git a/backend/tests/unit/schemas_pydantic/test_notification_schemas.py b/backend/tests/unit/schemas_pydantic/test_notification_schemas.py
index 00a0c7d..14b304b 100644
--- a/backend/tests/unit/schemas_pydantic/test_notification_schemas.py
+++ b/backend/tests/unit/schemas_pydantic/test_notification_schemas.py
@@ -1,19 +1,12 @@
from datetime import UTC, datetime, timedelta
import pytest
+
from app.domain.enums.notification import NotificationChannel, NotificationSeverity, NotificationStatus
-from app.schemas_pydantic.notification import (
- Notification,
- NotificationBatch,
- NotificationListResponse,
- NotificationResponse,
- NotificationStats,
- NotificationSubscription,
- SubscriptionUpdate,
-)
+from app.schemas_pydantic.notification import Notification, NotificationBatch
-def test_notification_scheduled_for_future_validation():
+def test_notification_scheduled_for_must_be_future():
n = Notification(
user_id="u1",
channel=NotificationChannel.IN_APP,
@@ -43,35 +36,6 @@ def test_notification_batch_validation_limits():
with pytest.raises(ValueError):
NotificationBatch(notifications=[])
- # Upper bound: >1000 should fail
many = [n1.model_copy() for _ in range(1001)]
with pytest.raises(ValueError):
NotificationBatch(notifications=many)
-
-
-def test_notification_response_and_list():
- n = Notification(user_id="u1", channel=NotificationChannel.IN_APP, subject="s", body="b")
- resp = NotificationResponse(
- notification_id=n.notification_id,
- channel=n.channel,
- status=n.status,
- subject=n.subject,
- body=n.body,
- action_url=None,
- created_at=n.created_at,
- read_at=None,
- severity=n.severity,
- tags=[],
- )
- lst = NotificationListResponse(notifications=[resp], total=1, unread_count=1)
- assert lst.unread_count == 1
-
-
-def test_subscription_models_and_stats():
- sub = NotificationSubscription(user_id="u1", channel=NotificationChannel.IN_APP)
- upd = SubscriptionUpdate(enabled=True)
- assert sub.enabled is True and upd.enabled is True
-
- now = datetime.now(UTC)
- stats = NotificationStats(start_date=now - timedelta(days=1), end_date=now)
- assert stats.total_sent == 0 and stats.delivery_rate == 0.0
diff --git a/backend/tests/unit/schemas_pydantic/test_replay_models_schemas.py b/backend/tests/unit/schemas_pydantic/test_replay_models_schemas.py
deleted file mode 100644
index 98fff48..0000000
--- a/backend/tests/unit/schemas_pydantic/test_replay_models_schemas.py
+++ /dev/null
@@ -1,44 +0,0 @@
-from datetime import datetime, timezone
-
-from app.domain.enums.events import EventType
-from app.domain.enums.replay import ReplayStatus, ReplayTarget, ReplayType
-from app.domain.replay.models import ReplayConfig as DomainReplayConfig, ReplayFilter as DomainReplayFilter
-from app.schemas_pydantic.replay_models import ReplayConfigSchema, ReplayFilterSchema, ReplaySession
-
-
-def test_replay_filter_schema_from_domain():
- df = DomainReplayFilter(
- execution_id="e1",
- event_types=[EventType.EXECUTION_REQUESTED],
- exclude_event_types=[EventType.POD_CREATED],
- start_time=datetime.now(timezone.utc),
- end_time=datetime.now(timezone.utc),
- user_id="u1",
- service_name="svc",
- custom_query={"x": 1},
- )
- sf = ReplayFilterSchema.from_domain(df)
- assert sf.event_types == [str(EventType.EXECUTION_REQUESTED)]
- assert sf.exclude_event_types == [str(EventType.POD_CREATED)]
-
-
-def test_replay_config_schema_from_domain_and_key_conversion():
- df = DomainReplayFilter(event_types=[EventType.EXECUTION_REQUESTED])
- cfg = DomainReplayConfig(
- replay_type=ReplayType.TIME_RANGE,
- target=ReplayTarget.KAFKA,
- filter=df,
- target_topics={EventType.EXECUTION_REQUESTED: "execution-events"},
- max_events=10,
- )
- sc = ReplayConfigSchema.model_validate(cfg)
- assert sc.target_topics == {str(EventType.EXECUTION_REQUESTED): "execution-events"}
- assert sc.max_events == 10
-
-
-def test_replay_session_coerces_config_from_domain():
- df = DomainReplayFilter()
- cfg = DomainReplayConfig(replay_type=ReplayType.TIME_RANGE, filter=df)
- session = ReplaySession(config=cfg)
- assert session.status == ReplayStatus.CREATED
- assert isinstance(session.config, ReplayConfigSchema)
diff --git a/backend/tests/unit/schemas_pydantic/test_saga_schemas.py b/backend/tests/unit/schemas_pydantic/test_saga_schemas.py
deleted file mode 100644
index 290446c..0000000
--- a/backend/tests/unit/schemas_pydantic/test_saga_schemas.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from datetime import datetime, timezone
-
-from app.domain.enums.saga import SagaState
-from app.domain.saga.models import Saga
-from app.schemas_pydantic.saga import SagaStatusResponse
-
-
-def test_saga_status_response_from_domain():
- s = Saga(
- saga_id="s1",
- saga_name="exec-saga",
- execution_id="e1",
- state=SagaState.RUNNING,
- current_step="allocate",
- completed_steps=["validate"],
- compensated_steps=[],
- error_message=None,
- created_at=datetime.now(timezone.utc),
- updated_at=datetime.now(timezone.utc),
- completed_at=None,
- retry_count=1,
- )
- resp = SagaStatusResponse.from_domain(s)
- assert resp.saga_id == "s1" and resp.current_step == "allocate"
- assert resp.created_at.endswith("Z") is False # isoformat without enforced Z; just ensure string
-
From 981ae0a5c1f4445a9df4422c168a934eb5c2165b Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Tue, 30 Dec 2025 23:50:55 +0100
Subject: [PATCH 37/42] ruff + unit tests fixes
---
backend/app/core/container.py | 3 +++
backend/app/core/dishka_lifespan.py | 11 ++++-------
backend/app/core/providers.py | 18 ++++++++++++++++++
3 files changed, 25 insertions(+), 7 deletions(-)
diff --git a/backend/app/core/container.py b/backend/app/core/container.py
index 7976e61..c7f8b9d 100644
--- a/backend/app/core/container.py
+++ b/backend/app/core/container.py
@@ -7,6 +7,7 @@
BusinessServicesProvider,
ConnectionProvider,
CoreServicesProvider,
+ DatabaseProvider,
EventProvider,
LoggingProvider,
MessagingProvider,
@@ -24,6 +25,7 @@ def create_app_container() -> AsyncContainer:
return make_async_container(
SettingsProvider(),
LoggingProvider(),
+ DatabaseProvider(),
RedisProvider(),
CoreServicesProvider(),
MessagingProvider(),
@@ -45,6 +47,7 @@ def create_result_processor_container() -> AsyncContainer:
return make_async_container(
SettingsProvider(),
LoggingProvider(),
+ DatabaseProvider(),
CoreServicesProvider(),
ConnectionProvider(),
RedisProvider(),
diff --git a/backend/app/core/dishka_lifespan.py b/backend/app/core/dishka_lifespan.py
index 9944867..038fd18 100644
--- a/backend/app/core/dishka_lifespan.py
+++ b/backend/app/core/dishka_lifespan.py
@@ -6,8 +6,8 @@
from beanie import init_beanie
from dishka import AsyncContainer
from fastapi import FastAPI
-from pymongo.asynchronous.mongo_client import AsyncMongoClient
+from app.core.database_context import Database
from app.core.startup import initialize_metrics_context, initialize_rate_limits
from app.core.tracing import init_tracing
from app.db.docs import ALL_DOCUMENTS
@@ -69,11 +69,9 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
schema_registry = await container.get(SchemaRegistryManager)
await initialize_event_schemas(schema_registry)
- # Initialize Beanie ODM with PyMongo async database
- db_client: AsyncMongoClient[dict[str, object]] = AsyncMongoClient(
- settings.MONGODB_URL, tz_aware=True, serverSelectionTimeoutMS=5000
- )
- await init_beanie(database=db_client[settings.DATABASE_NAME], document_models=ALL_DOCUMENTS)
+ # Initialize Beanie ODM with database from DI container
+ database = await container.get(Database)
+ await init_beanie(database=database, document_models=ALL_DOCUMENTS)
logger.info(f"Beanie ODM initialized with {len(ALL_DOCUMENTS)} document models")
# Initialize metrics context with instances from DI container
@@ -93,7 +91,6 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
event_store_consumer = await container.get(EventStoreConsumer)
async with AsyncExitStack() as stack:
- stack.callback(db_client.close)
await stack.enter_async_context(sse_bridge)
logger.info("SSE Kafka→Redis bridge started with consumer pool")
await stack.enter_async_context(event_store_consumer)
diff --git a/backend/app/core/providers.py b/backend/app/core/providers.py
index 9733db3..49f3c7d 100644
--- a/backend/app/core/providers.py
+++ b/backend/app/core/providers.py
@@ -3,7 +3,9 @@
import redis.asyncio as redis
from dishka import Provider, Scope, provide
+from pymongo.asynchronous.mongo_client import AsyncMongoClient
+from app.core.database_context import Database
from app.core.k8s_clients import K8sClients, close_k8s_clients, create_k8s_clients
from app.core.logging import setup_logger
from app.core.metrics import (
@@ -118,6 +120,22 @@ def get_rate_limit_service(
return RateLimitService(redis_client, settings, rate_limit_metrics)
+class DatabaseProvider(Provider):
+ scope = Scope.APP
+
+ @provide
+ async def get_database(self, settings: Settings, logger: logging.Logger) -> AsyncIterator[Database]:
+ client: AsyncMongoClient[dict[str, object]] = AsyncMongoClient(
+ settings.MONGODB_URL, tz_aware=True, serverSelectionTimeoutMS=5000
+ )
+ database = client[settings.DATABASE_NAME]
+ logger.info(f"MongoDB connected: {settings.DATABASE_NAME}")
+ try:
+ yield database
+ finally:
+ client.close()
+
+
class CoreServicesProvider(Provider):
scope = Scope.APP
From 295436ceb918356a7c3a161abea78d2bb0d2ae90 Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Tue, 30 Dec 2025 23:54:44 +0100
Subject: [PATCH 38/42] providers fix
---
backend/app/core/providers.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/backend/app/core/providers.py b/backend/app/core/providers.py
index 49f3c7d..0ab818d 100644
--- a/backend/app/core/providers.py
+++ b/backend/app/core/providers.py
@@ -133,7 +133,7 @@ async def get_database(self, settings: Settings, logger: logging.Logger) -> Asyn
try:
yield database
finally:
- client.close()
+ await client.close()
class CoreServicesProvider(Provider):
From 87b2dffe1b285e2661d951e823d63f334e37a579 Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Wed, 31 Dec 2025 00:07:25 +0100
Subject: [PATCH 39/42] e2e test fix
---
backend/app/api/routes/auth.py | 10 ++++++++++
.../integration/k8s/test_k8s_worker_create_pod.py | 15 +++++----------
backend/tests/integration/test_saga_routes.py | 9 ++++-----
3 files changed, 19 insertions(+), 15 deletions(-)
diff --git a/backend/app/api/routes/auth.py b/backend/app/api/routes/auth.py
index e8263cd..606d57f 100644
--- a/backend/app/api/routes/auth.py
+++ b/backend/app/api/routes/auth.py
@@ -5,6 +5,7 @@
from dishka.integrations.fastapi import DishkaRoute
from fastapi import APIRouter, Depends, HTTPException, Request, Response
from fastapi.security import OAuth2PasswordRequestForm
+from pymongo.errors import DuplicateKeyError
from app.core.security import security_service
from app.core.utils import get_client_ip
@@ -181,6 +182,15 @@ async def register(
updated_at=created_user.updated_at,
)
+ except DuplicateKeyError as e:
+ logger.warning(
+ "Registration failed - duplicate email",
+ extra={
+ "username": user.username,
+ "client_ip": get_client_ip(request),
+ },
+ )
+ raise HTTPException(status_code=409, detail="Email already registered") from e
except Exception as e:
logger.error(
f"Registration failed - database error: {str(e)}",
diff --git a/backend/tests/integration/k8s/test_k8s_worker_create_pod.py b/backend/tests/integration/k8s/test_k8s_worker_create_pod.py
index 1222db8..732ce09 100644
--- a/backend/tests/integration/k8s/test_k8s_worker_create_pod.py
+++ b/backend/tests/integration/k8s/test_k8s_worker_create_pod.py
@@ -3,18 +3,15 @@
import uuid
import pytest
-from kubernetes.client.rest import ApiException
-
+from app.events.core import UnifiedProducer
+from app.events.event_store import EventStore
+from app.events.schema.schema_registry import SchemaRegistryManager
from app.infrastructure.kafka.events.metadata import AvroEventMetadata
from app.infrastructure.kafka.events.saga import CreatePodCommandEvent
+from app.services.idempotency import IdempotencyManager
from app.services.k8s_worker.config import K8sWorkerConfig
from app.services.k8s_worker.worker import KubernetesWorker
-
-from app.core.database_context import Database
-from app.events.event_store import EventStore
-from app.events.schema.schema_registry import SchemaRegistryManager
-from app.events.core import UnifiedProducer
-from app.services.idempotency import IdempotencyManager
+from kubernetes.client.rest import ApiException
pytestmark = [pytest.mark.integration, pytest.mark.k8s]
@@ -29,7 +26,6 @@ async def test_worker_creates_configmap_and_pod(scope, monkeypatch): # type: ig
ns = "integr8scode"
monkeypatch.setenv("K8S_NAMESPACE", ns)
- database: Database = await scope.get(Database)
schema: SchemaRegistryManager = await scope.get(SchemaRegistryManager)
store: EventStore = await scope.get(EventStore)
producer: UnifiedProducer = await scope.get(UnifiedProducer)
@@ -38,7 +34,6 @@ async def test_worker_creates_configmap_and_pod(scope, monkeypatch): # type: ig
cfg = K8sWorkerConfig(namespace=ns, max_concurrent_pods=1)
worker = KubernetesWorker(
config=cfg,
- database=database,
producer=producer,
schema_registry_manager=schema,
event_store=store,
diff --git a/backend/tests/integration/test_saga_routes.py b/backend/tests/integration/test_saga_routes.py
index 81cb137..b26d7b9 100644
--- a/backend/tests/integration/test_saga_routes.py
+++ b/backend/tests/integration/test_saga_routes.py
@@ -1,15 +1,14 @@
-import uuid
import asyncio
+import uuid
from typing import Dict
import pytest
-from httpx import AsyncClient
-
from app.domain.enums.saga import SagaState
from app.schemas_pydantic.saga import (
SagaListResponse,
SagaStatusResponse,
)
+from httpx import AsyncClient
class TestSagaRoutes:
@@ -34,7 +33,7 @@ async def test_get_saga_not_found(
saga_id = str(uuid.uuid4())
response = await client.get(f"/api/v1/sagas/{saga_id}")
assert response.status_code == 404
- assert "Saga not found" in response.json()["detail"]
+ assert "not found" in response.json()["detail"]
@pytest.mark.asyncio
async def test_get_execution_sagas_requires_auth(
@@ -196,7 +195,7 @@ async def test_cancel_saga_not_found(
saga_id = str(uuid.uuid4())
response = await client.post(f"/api/v1/sagas/{saga_id}/cancel")
assert response.status_code == 404
- assert "Saga not found" in response.json()["detail"]
+ assert "not found" in response.json()["detail"]
@pytest.mark.asyncio
async def test_saga_access_control(
From 690f44444b89b21352932c7b7eb4b08d7fff1acd Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Wed, 31 Dec 2025 00:36:14 +0100
Subject: [PATCH 40/42] e2e test fix
---
.../app/db/repositories/event_repository.py | 6 +-
backend/app/services/kafka_event_service.py | 6 +-
backend/app/services/user_settings_service.py | 2 +-
.../test_admin_events_repository.py | 66 ++++++++---
.../test_admin_settings_repository.py | 8 +-
.../test_admin_user_repository.py | 112 +++++++++++-------
.../db/repositories/test_dlq_repository.py | 5 +-
.../db/repositories/test_event_repository.py | 27 +++--
.../repositories/test_execution_repository.py | 2 +-
.../test_notification_repository.py | 84 ++++++++++---
.../db/repositories/test_replay_repository.py | 42 +++++--
.../db/repositories/test_saga_repository.py | 42 +++++--
.../test_saved_script_repository.py | 9 +-
.../db/repositories/test_sse_repository.py | 24 ++--
.../db/repositories/test_user_repository.py | 14 ++-
.../test_user_settings_repository.py | 43 +++----
.../integration/events/test_admin_utils.py | 3 +-
.../events/test_consume_roundtrip.py | 8 +-
.../events/test_consumer_group_monitor.py | 4 +-
.../events/test_consumer_group_monitor_e2e.py | 15 ++-
.../integration/events/test_dlq_handler.py | 22 ++--
.../events/test_event_dispatcher.py | 3 +-
.../integration/events/test_event_store.py | 19 +--
.../events/test_event_store_consumer.py | 8 +-
.../test_event_store_consumer_flush_e2e.py | 17 ++-
.../events/test_event_store_e2e.py | 20 +---
.../integration/events/test_producer_e2e.py | 17 +--
.../events/test_schema_registry_e2e.py | 5 +-
.../events/test_schema_registry_real.py | 2 +-
29 files changed, 383 insertions(+), 252 deletions(-)
diff --git a/backend/app/db/repositories/event_repository.py b/backend/app/db/repositories/event_repository.py
index dc2a4e3..901f72f 100644
--- a/backend/app/db/repositories/event_repository.py
+++ b/backend/app/db/repositories/event_repository.py
@@ -39,7 +39,8 @@ def _build_time_filter(self, start_time: datetime | None, end_time: datetime | N
async def store_event(self, event: Event) -> str:
data = asdict(event)
- data["metadata"] = {k: (v.value if hasattr(v, "value") else v) for k, v in asdict(event.metadata).items()}
+ meta = event.metadata.model_dump() if hasattr(event.metadata, "model_dump") else asdict(event.metadata)
+ data["metadata"] = {k: (v.value if hasattr(v, "value") else v) for k, v in meta.items()}
if not data.get("stored_at"):
data["stored_at"] = datetime.now(timezone.utc)
# Remove None values so EventDocument defaults can apply (e.g., ttl_expires_at)
@@ -64,7 +65,8 @@ async def store_events_batch(self, events: list[Event]) -> list[str]:
docs = []
for event in events:
data = asdict(event)
- data["metadata"] = {k: (v.value if hasattr(v, "value") else v) for k, v in asdict(event.metadata).items()}
+ meta = event.metadata.model_dump() if hasattr(event.metadata, "model_dump") else asdict(event.metadata)
+ data["metadata"] = {k: (v.value if hasattr(v, "value") else v) for k, v in meta.items()}
if not data.get("stored_at"):
data["stored_at"] = now
# Remove None values so EventDocument defaults can apply
diff --git a/backend/app/services/kafka_event_service.py b/backend/app/services/kafka_event_service.py
index 07b76bf..25a13f4 100644
--- a/backend/app/services/kafka_event_service.py
+++ b/backend/app/services/kafka_event_service.py
@@ -72,8 +72,10 @@ async def publish_event(
event_id = str(uuid4())
timestamp = datetime.now(timezone.utc)
- # Convert to domain metadata for storage
- domain_metadata = DomainEventMetadata(**avro_metadata.model_dump())
+ # Convert to domain metadata for storage (only include defined fields)
+ domain_metadata = DomainEventMetadata(
+ **avro_metadata.model_dump(include=set(DomainEventMetadata.__dataclass_fields__))
+ )
event = Event(
event_id=event_id,
diff --git a/backend/app/services/user_settings_service.py b/backend/app/services/user_settings_service.py
index 12a7f3e..ce51aaf 100644
--- a/backend/app/services/user_settings_service.py
+++ b/backend/app/services/user_settings_service.py
@@ -332,7 +332,7 @@ async def _get_settings_events(
event_type=et,
timestamp=e.timestamp,
payload=e.payload,
- correlation_id=e.correlation_id,
+ correlation_id=e.metadata.correlation_id if e.metadata else None,
)
)
return out
diff --git a/backend/tests/integration/db/repositories/test_admin_events_repository.py b/backend/tests/integration/db/repositories/test_admin_events_repository.py
index b6a84d4..de46ca1 100644
--- a/backend/tests/integration/db/repositories/test_admin_events_repository.py
+++ b/backend/tests/integration/db/repositories/test_admin_events_repository.py
@@ -1,31 +1,46 @@
-from datetime import datetime, timezone, timedelta
+from datetime import datetime, timedelta, timezone
import pytest
-
-from app.db.docs import ReplaySessionDocument
from app.db.docs.replay import ReplayConfig, ReplayFilter
from app.db.repositories.admin.admin_events_repository import AdminEventsRepository
from app.domain.admin import ReplayQuery
from app.domain.admin.replay_updates import ReplaySessionUpdate
-from app.domain.enums.replay import ReplayStatus, ReplayType, ReplayTarget
-from app.domain.events.event_models import EventFilter, EventStatistics, Event
+from app.domain.enums.replay import ReplayStatus, ReplayTarget, ReplayType
+from app.domain.events.event_models import Event, EventFilter, EventStatistics
+from app.domain.replay.models import ReplaySessionState
from app.infrastructure.kafka.events.metadata import AvroEventMetadata
pytestmark = pytest.mark.integration
@pytest.fixture()
-def repo() -> AdminEventsRepository:
- return AdminEventsRepository()
+async def repo(scope) -> AdminEventsRepository: # type: ignore[valid-type]
+ return await scope.get(AdminEventsRepository)
@pytest.mark.asyncio
async def test_browse_detail_delete_and_export(repo: AdminEventsRepository, db) -> None: # type: ignore[valid-type]
now = datetime.now(timezone.utc)
- await db.get_collection("events").insert_many([
- {"event_id": "e1", "event_type": "X", "timestamp": now, "metadata": AvroEventMetadata(service_name="svc", service_version="1", correlation_id="c1").to_dict()},
- {"event_id": "e2", "event_type": "X", "timestamp": now, "metadata": AvroEventMetadata(service_name="svc", service_version="1", correlation_id="c1").to_dict()},
- ])
+ await db.get_collection("events").insert_many(
+ [
+ {
+ "event_id": "e1",
+ "event_type": "X",
+ "timestamp": now,
+ "metadata": AvroEventMetadata(
+ service_name="svc", service_version="1", correlation_id="c1"
+ ).model_dump(),
+ },
+ {
+ "event_id": "e2",
+ "event_type": "X",
+ "timestamp": now,
+ "metadata": AvroEventMetadata(
+ service_name="svc", service_version="1", correlation_id="c1"
+ ).model_dump(),
+ },
+ ]
+ )
res = await repo.browse_events(EventFilter())
assert res.total >= 2
detail = await repo.get_event_detail("e1")
@@ -38,13 +53,29 @@ async def test_browse_detail_delete_and_export(repo: AdminEventsRepository, db)
@pytest.mark.asyncio
async def test_event_stats_and_archive(repo: AdminEventsRepository, db) -> None: # type: ignore[valid-type]
now = datetime.now(timezone.utc)
- await db.get_collection("events").insert_many([
- {"event_id": "e10", "event_type": "step.completed", "timestamp": now, "metadata": AvroEventMetadata(service_name="svc", service_version="1", user_id="u1").to_dict()},
- ])
- await db.get_collection("executions").insert_one({"created_at": now, "status": "completed", "resource_usage": {"execution_time_wall_seconds": 1.25}})
+ await db.get_collection("events").insert_many(
+ [
+ {
+ "event_id": "e10",
+ "event_type": "step.completed",
+ "timestamp": now,
+ "metadata": AvroEventMetadata(service_name="svc", service_version="1", user_id="u1").model_dump(),
+ },
+ ]
+ )
+ await db.get_collection("executions").insert_one(
+ {"created_at": now, "status": "completed", "resource_usage": {"execution_time_wall_seconds": 1.25}}
+ )
stats = await repo.get_event_stats(hours=1)
assert isinstance(stats, EventStatistics)
- ev = Event(event_id="a1", event_type="X", event_version="1.0", timestamp=now, metadata=AvroEventMetadata(service_name="s", service_version="1"), payload={})
+ ev = Event(
+ event_id="a1",
+ event_type="X",
+ event_version="1.0",
+ timestamp=now,
+ metadata=AvroEventMetadata(service_name="s", service_version="1"),
+ payload={},
+ )
assert await repo.archive_event(ev, deleted_by="admin") is True
@@ -56,7 +87,7 @@ async def test_replay_session_flow_and_helpers(repo: AdminEventsRepository, db)
target=ReplayTarget.TEST,
filter=ReplayFilter(),
)
- session = ReplaySessionDocument(
+ session = ReplaySessionState(
session_id="s1",
config=config,
status=ReplayStatus.SCHEDULED,
@@ -76,4 +107,3 @@ async def test_replay_session_flow_and_helpers(repo: AdminEventsRepository, db)
assert await repo.count_events_for_replay(ReplayQuery()) >= 0
prev = await repo.get_replay_events_preview(event_ids=["e10"]) # from earlier insert
assert isinstance(prev, dict)
-
diff --git a/backend/tests/integration/db/repositories/test_admin_settings_repository.py b/backend/tests/integration/db/repositories/test_admin_settings_repository.py
index 11edf3a..7c19cf5 100644
--- a/backend/tests/integration/db/repositories/test_admin_settings_repository.py
+++ b/backend/tests/integration/db/repositories/test_admin_settings_repository.py
@@ -1,17 +1,13 @@
-import logging
import pytest
-
from app.db.repositories.admin.admin_settings_repository import AdminSettingsRepository
from app.domain.admin import SystemSettings
pytestmark = pytest.mark.integration
-_test_logger = logging.getLogger("test.db.repositories.admin_settings_repository")
-
@pytest.fixture()
-def repo(db) -> AdminSettingsRepository: # type: ignore[valid-type]
- return AdminSettingsRepository(db, logger=_test_logger)
+async def repo(scope) -> AdminSettingsRepository: # type: ignore[valid-type]
+ return await scope.get(AdminSettingsRepository)
@pytest.mark.asyncio
diff --git a/backend/tests/integration/db/repositories/test_admin_user_repository.py b/backend/tests/integration/db/repositories/test_admin_user_repository.py
index 10dd72c..e65658a 100644
--- a/backend/tests/integration/db/repositories/test_admin_user_repository.py
+++ b/backend/tests/integration/db/repositories/test_admin_user_repository.py
@@ -1,32 +1,34 @@
-import pytest
from datetime import datetime, timezone
-from app.db.repositories.admin.admin_user_repository import AdminUserRepository
-from app.domain.user import UserFields, UserUpdate, PasswordReset
+import pytest
from app.core.security import SecurityService
+from app.db.repositories.admin.admin_user_repository import AdminUserRepository
+from app.domain.user import PasswordReset, UserFields, UserUpdate
pytestmark = pytest.mark.integration
@pytest.fixture()
-def repo(db) -> AdminUserRepository: # type: ignore[valid-type]
- return AdminUserRepository(db)
+async def repo(scope) -> AdminUserRepository: # type: ignore[valid-type]
+ return await scope.get(AdminUserRepository)
@pytest.mark.asyncio
async def test_list_and_get_user(repo: AdminUserRepository, db) -> None: # type: ignore[valid-type]
# Insert a user
- await db.get_collection("users").insert_one({
- UserFields.USER_ID: "u1",
- UserFields.USERNAME: "alice",
- UserFields.EMAIL: "alice@example.com",
- UserFields.ROLE: "user",
- UserFields.IS_ACTIVE: True,
- UserFields.IS_SUPERUSER: False,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.CREATED_AT: datetime.now(timezone.utc),
- UserFields.UPDATED_AT: datetime.now(timezone.utc),
- })
+ await db.get_collection("users").insert_one(
+ {
+ UserFields.USER_ID: "u1",
+ UserFields.USERNAME: "alice",
+ UserFields.EMAIL: "alice@example.com",
+ UserFields.ROLE: "user",
+ UserFields.IS_ACTIVE: True,
+ UserFields.IS_SUPERUSER: False,
+ UserFields.HASHED_PASSWORD: "h",
+ UserFields.CREATED_AT: datetime.now(timezone.utc),
+ UserFields.UPDATED_AT: datetime.now(timezone.utc),
+ }
+ )
res = await repo.list_users(limit=10)
assert res.total >= 1 and any(u.username == "alice" for u in res.users)
user = await repo.get_user_by_id("u1")
@@ -36,17 +38,19 @@ async def test_list_and_get_user(repo: AdminUserRepository, db) -> None: # type
@pytest.mark.asyncio
async def test_update_delete_and_reset_password(repo: AdminUserRepository, db, monkeypatch: pytest.MonkeyPatch) -> None: # type: ignore[valid-type]
# Insert base user
- await db.get_collection("users").insert_one({
- UserFields.USER_ID: "u1",
- UserFields.USERNAME: "bob",
- UserFields.EMAIL: "bob@example.com",
- UserFields.ROLE: "user",
- UserFields.IS_ACTIVE: True,
- UserFields.IS_SUPERUSER: False,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.CREATED_AT: datetime.now(timezone.utc),
- UserFields.UPDATED_AT: datetime.now(timezone.utc),
- })
+ await db.get_collection("users").insert_one(
+ {
+ UserFields.USER_ID: "u1",
+ UserFields.USERNAME: "bob",
+ UserFields.EMAIL: "bob@example.com",
+ UserFields.ROLE: "user",
+ UserFields.IS_ACTIVE: True,
+ UserFields.IS_SUPERUSER: False,
+ UserFields.HASHED_PASSWORD: "h",
+ UserFields.CREATED_AT: datetime.now(timezone.utc),
+ UserFields.UPDATED_AT: datetime.now(timezone.utc),
+ }
+ )
# No updates → returns current
updated = await repo.update_user("u1", UserUpdate())
assert updated and updated.user_id == "u1"
@@ -54,17 +58,19 @@ async def test_update_delete_and_reset_password(repo: AdminUserRepository, db, m
deleted = await repo.delete_user("u1", cascade=True)
assert deleted["user"] in (0, 1)
# Re-insert and reset password
- await db.get_collection("users").insert_one({
- UserFields.USER_ID: "u1",
- UserFields.USERNAME: "bob",
- UserFields.EMAIL: "bob@example.com",
- UserFields.ROLE: "user",
- UserFields.IS_ACTIVE: True,
- UserFields.IS_SUPERUSER: False,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.CREATED_AT: datetime.now(timezone.utc),
- UserFields.UPDATED_AT: datetime.now(timezone.utc),
- })
+ await db.get_collection("users").insert_one(
+ {
+ UserFields.USER_ID: "u1",
+ UserFields.USERNAME: "bob",
+ UserFields.EMAIL: "bob@example.com",
+ UserFields.ROLE: "user",
+ UserFields.IS_ACTIVE: True,
+ UserFields.IS_SUPERUSER: False,
+ UserFields.HASHED_PASSWORD: "h",
+ UserFields.CREATED_AT: datetime.now(timezone.utc),
+ UserFields.UPDATED_AT: datetime.now(timezone.utc),
+ }
+ )
monkeypatch.setattr(SecurityService, "get_password_hash", staticmethod(lambda p: "HASHED"))
pr = PasswordReset(user_id="u1", new_password="secret123")
assert await repo.reset_user_password(pr) is True
@@ -73,10 +79,32 @@ async def test_update_delete_and_reset_password(repo: AdminUserRepository, db, m
@pytest.mark.asyncio
async def test_list_with_filters_and_reset_invalid(repo: AdminUserRepository, db) -> None: # type: ignore[valid-type]
# Insert a couple of users
- await db.get_collection("users").insert_many([
- {UserFields.USER_ID: "u1", UserFields.USERNAME: "Alice", UserFields.EMAIL: "a@e.com", UserFields.ROLE: "user", UserFields.IS_ACTIVE: True, UserFields.IS_SUPERUSER: False, UserFields.HASHED_PASSWORD: "h", UserFields.CREATED_AT: datetime.now(timezone.utc), UserFields.UPDATED_AT: datetime.now(timezone.utc)},
- {UserFields.USER_ID: "u2", UserFields.USERNAME: "Bob", UserFields.EMAIL: "b@e.com", UserFields.ROLE: "admin", UserFields.IS_ACTIVE: True, UserFields.IS_SUPERUSER: True, UserFields.HASHED_PASSWORD: "h", UserFields.CREATED_AT: datetime.now(timezone.utc), UserFields.UPDATED_AT: datetime.now(timezone.utc)},
- ])
+ await db.get_collection("users").insert_many(
+ [
+ {
+ UserFields.USER_ID: "u1",
+ UserFields.USERNAME: "Alice",
+ UserFields.EMAIL: "a@e.com",
+ UserFields.ROLE: "user",
+ UserFields.IS_ACTIVE: True,
+ UserFields.IS_SUPERUSER: False,
+ UserFields.HASHED_PASSWORD: "h",
+ UserFields.CREATED_AT: datetime.now(timezone.utc),
+ UserFields.UPDATED_AT: datetime.now(timezone.utc),
+ },
+ {
+ UserFields.USER_ID: "u2",
+ UserFields.USERNAME: "Bob",
+ UserFields.EMAIL: "b@e.com",
+ UserFields.ROLE: "admin",
+ UserFields.IS_ACTIVE: True,
+ UserFields.IS_SUPERUSER: True,
+ UserFields.HASHED_PASSWORD: "h",
+ UserFields.CREATED_AT: datetime.now(timezone.utc),
+ UserFields.UPDATED_AT: datetime.now(timezone.utc),
+ },
+ ]
+ )
res = await repo.list_users(limit=5, offset=0, search="Al", role=None)
assert any(u.username.lower().startswith("al") for u in res.users) or res.total >= 0
# invalid password reset (empty)
diff --git a/backend/tests/integration/db/repositories/test_dlq_repository.py b/backend/tests/integration/db/repositories/test_dlq_repository.py
index a2b06af..07d3711 100644
--- a/backend/tests/integration/db/repositories/test_dlq_repository.py
+++ b/backend/tests/integration/db/repositories/test_dlq_repository.py
@@ -1,12 +1,11 @@
-from datetime import datetime, timezone
import logging
+from datetime import datetime, timezone
import pytest
-
from app.db.docs import DLQMessageDocument
from app.db.repositories.dlq_repository import DLQRepository
-from app.domain.enums.events import EventType
from app.dlq import DLQMessageStatus
+from app.domain.enums.events import EventType
pytestmark = pytest.mark.integration
diff --git a/backend/tests/integration/db/repositories/test_event_repository.py b/backend/tests/integration/db/repositories/test_event_repository.py
index e1f9e19..feda7d4 100644
--- a/backend/tests/integration/db/repositories/test_event_repository.py
+++ b/backend/tests/integration/db/repositories/test_event_repository.py
@@ -1,20 +1,16 @@
-from datetime import datetime, timezone, timedelta
-import logging
+from datetime import datetime, timedelta, timezone
import pytest
-
from app.db.repositories.event_repository import EventRepository
-from app.domain.events.event_models import Event, EventFilter
+from app.domain.events.event_models import Event
from app.infrastructure.kafka.events.metadata import AvroEventMetadata
pytestmark = pytest.mark.integration
-_test_logger = logging.getLogger("test.db.repositories.event_repository")
-
@pytest.fixture()
-def repo(db) -> EventRepository: # type: ignore[valid-type]
- return EventRepository(db, logger=_test_logger)
+async def repo(scope) -> EventRepository: # type: ignore[valid-type]
+ return await scope.get(EventRepository)
def make_event(event_id: str, etype: str = "UserLoggedIn", user: str | None = "u1", agg: str | None = "agg1") -> Event:
@@ -54,9 +50,18 @@ async def test_store_get_and_queries(repo: EventRepository, db) -> None: # type
@pytest.mark.asyncio
async def test_statistics_and_search_and_delete(repo: EventRepository, db) -> None: # type: ignore[valid-type]
now = datetime.now(timezone.utc)
- await db.get_collection("events").insert_many([
- {"event_id": "e3", "event_type": "C", "event_version": "1.0", "timestamp": now, "metadata": AvroEventMetadata(service_name="svc", service_version="1").to_dict(), "payload": {}},
- ])
+ await db.get_collection("events").insert_many(
+ [
+ {
+ "event_id": "e3",
+ "event_type": "C",
+ "event_version": "1.0",
+ "timestamp": now,
+ "metadata": AvroEventMetadata(service_name="svc", service_version="1").model_dump(),
+ "payload": {},
+ },
+ ]
+ )
stats = await repo.get_event_statistics(start_time=now - timedelta(days=1), end_time=now + timedelta(days=1))
assert stats.total_events >= 1
diff --git a/backend/tests/integration/db/repositories/test_execution_repository.py b/backend/tests/integration/db/repositories/test_execution_repository.py
index 8fdaabd..eb3bf2c 100644
--- a/backend/tests/integration/db/repositories/test_execution_repository.py
+++ b/backend/tests/integration/db/repositories/test_execution_repository.py
@@ -1,7 +1,7 @@
import logging
-import pytest
from uuid import uuid4
+import pytest
from app.db.repositories.execution_repository import ExecutionRepository
from app.domain.enums.execution import ExecutionStatus
from app.domain.execution import DomainExecutionCreate, DomainExecutionUpdate
diff --git a/backend/tests/integration/db/repositories/test_notification_repository.py b/backend/tests/integration/db/repositories/test_notification_repository.py
index bbcda29..8390355 100644
--- a/backend/tests/integration/db/repositories/test_notification_repository.py
+++ b/backend/tests/integration/db/repositories/test_notification_repository.py
@@ -1,10 +1,9 @@
-from datetime import datetime, UTC, timedelta
import logging
+from datetime import UTC, datetime, timedelta
import pytest
-
from app.db.repositories.notification_repository import NotificationRepository
-from app.domain.enums.notification import NotificationChannel, NotificationStatus, NotificationSeverity
+from app.domain.enums.notification import NotificationChannel, NotificationSeverity, NotificationStatus
from app.domain.enums.notification import NotificationChannel as NC
from app.domain.enums.user import UserRole
from app.domain.notification import (
@@ -52,10 +51,32 @@ async def test_list_count_unread_and_pending(db) -> None: # type: ignore[no-unt
now = datetime.now(UTC)
# Seed notifications
- await db.get_collection("notifications").insert_many([
- {"notification_id": "n1", "user_id": "u1", "severity": NotificationSeverity.MEDIUM.value, "tags": ["execution"], "channel": NotificationChannel.IN_APP.value, "subject": "s", "body": "b", "status": NotificationStatus.PENDING.value, "created_at": now},
- {"notification_id": "n2", "user_id": "u1", "severity": NotificationSeverity.LOW.value, "tags": ["completed"], "channel": NotificationChannel.IN_APP.value, "subject": "s", "body": "b", "status": NotificationStatus.DELIVERED.value, "created_at": now},
- ])
+ await db.get_collection("notifications").insert_many(
+ [
+ {
+ "notification_id": "n1",
+ "user_id": "u1",
+ "severity": NotificationSeverity.MEDIUM.value,
+ "tags": ["execution"],
+ "channel": NotificationChannel.IN_APP.value,
+ "subject": "s",
+ "body": "b",
+ "status": NotificationStatus.PENDING.value,
+ "created_at": now,
+ },
+ {
+ "notification_id": "n2",
+ "user_id": "u1",
+ "severity": NotificationSeverity.LOW.value,
+ "tags": ["completed"],
+ "channel": NotificationChannel.IN_APP.value,
+ "subject": "s",
+ "body": "b",
+ "status": NotificationStatus.DELIVERED.value,
+ "created_at": now,
+ },
+ ]
+ )
lst = await repo.list_notifications("u1")
assert len(lst) >= 2
assert await repo.count_notifications("u1") >= 2
@@ -64,11 +85,20 @@ async def test_list_count_unread_and_pending(db) -> None: # type: ignore[no-unt
# Pending and scheduled
pending = await repo.find_pending_notifications()
assert any(n.status == NotificationStatus.PENDING for n in pending)
- await db.get_collection("notifications").insert_one({
- "notification_id": "n3", "user_id": "u1", "severity": NotificationSeverity.MEDIUM.value, "tags": ["execution"],
- "channel": NotificationChannel.IN_APP.value, "subject": "s", "body": "b", "status": NotificationStatus.PENDING.value,
- "created_at": now, "scheduled_for": now + timedelta(seconds=1)
- })
+ await db.get_collection("notifications").insert_one(
+ {
+ "notification_id": "n3",
+ "user_id": "u1",
+ "severity": NotificationSeverity.MEDIUM.value,
+ "tags": ["execution"],
+ "channel": NotificationChannel.IN_APP.value,
+ "subject": "s",
+ "body": "b",
+ "status": NotificationStatus.PENDING.value,
+ "created_at": now,
+ "scheduled_for": now + timedelta(seconds=1),
+ }
+ )
scheduled = await repo.find_scheduled_notifications()
assert isinstance(scheduled, list)
assert await repo.cleanup_old_notifications(days=0) >= 0
@@ -89,12 +119,32 @@ async def test_subscriptions_and_user_queries(db) -> None: # type: ignore[no-un
assert len(subs) == len(list(NC))
# Users by role and active users
- await db.get_collection("users").insert_many([
- {UserFields.USER_ID: "u1", UserFields.USERNAME: "A", UserFields.EMAIL: "a@e.com", UserFields.ROLE: "user", UserFields.IS_ACTIVE: True},
- {UserFields.USER_ID: "u2", UserFields.USERNAME: "B", UserFields.EMAIL: "b@e.com", UserFields.ROLE: "admin", UserFields.IS_ACTIVE: True},
- ])
+ await db.get_collection("users").insert_many(
+ [
+ {
+ UserFields.USER_ID: "u1",
+ UserFields.USERNAME: "A",
+ UserFields.EMAIL: "a@e.com",
+ UserFields.ROLE: "user",
+ UserFields.IS_ACTIVE: True,
+ UserFields.HASHED_PASSWORD: "h",
+ UserFields.IS_SUPERUSER: False,
+ },
+ {
+ UserFields.USER_ID: "u2",
+ UserFields.USERNAME: "B",
+ UserFields.EMAIL: "b@e.com",
+ UserFields.ROLE: "admin",
+ UserFields.IS_ACTIVE: True,
+ UserFields.HASHED_PASSWORD: "h",
+ UserFields.IS_SUPERUSER: False,
+ },
+ ]
+ )
ids = await repo.get_users_by_roles([UserRole.USER])
assert "u1" in ids or isinstance(ids, list)
- await db.get_collection("executions").insert_one({"execution_id": "e1", "user_id": "u2", "created_at": datetime.now(UTC)})
+ await db.get_collection("executions").insert_one(
+ {"execution_id": "e1", "user_id": "u2", "created_at": datetime.now(UTC)}
+ )
active = await repo.get_active_users(days=1)
assert set(active) >= {"u2"} or isinstance(active, list)
diff --git a/backend/tests/integration/db/repositories/test_replay_repository.py b/backend/tests/integration/db/repositories/test_replay_repository.py
index 496b864..7ab5bc7 100644
--- a/backend/tests/integration/db/repositories/test_replay_repository.py
+++ b/backend/tests/integration/db/repositories/test_replay_repository.py
@@ -1,8 +1,6 @@
from datetime import datetime, timezone
-import logging
import pytest
-
from app.db.repositories.replay_repository import ReplayRepository
from app.domain.admin.replay_updates import ReplaySessionUpdate
from app.domain.enums.replay import ReplayStatus, ReplayType
@@ -11,19 +9,19 @@
pytestmark = pytest.mark.integration
-_test_logger = logging.getLogger("test.db.repositories.replay_repository")
-
@pytest.fixture()
-def repo(db) -> ReplayRepository: # type: ignore[valid-type]
- return ReplayRepository(db, logger=_test_logger)
+async def repo(scope) -> ReplayRepository: # type: ignore[valid-type]
+ return await scope.get(ReplayRepository)
@pytest.mark.asyncio
async def test_indexes_and_session_crud(repo: ReplayRepository) -> None:
await repo.create_indexes()
config = ReplayConfig(replay_type=ReplayType.EXECUTION, filter=ReplayFilter())
- session = ReplaySession(session_id="s1", status=ReplayStatus.CREATED, created_at=datetime.now(timezone.utc), config=config)
+ session = ReplaySession(
+ session_id="s1", status=ReplayStatus.CREATED, created_at=datetime.now(timezone.utc), config=config
+ )
await repo.save_session(session)
got = await repo.get_session("s1")
assert got and got.session_id == "s1"
@@ -38,11 +36,31 @@ async def test_indexes_and_session_crud(repo: ReplayRepository) -> None:
async def test_count_fetch_events_and_delete(repo: ReplayRepository, db) -> None: # type: ignore[valid-type]
now = datetime.now(timezone.utc)
# Insert events
- await db.get_collection("events").insert_many([
- {"event_id": "e1", "timestamp": now, "execution_id": "x1", "event_type": "T", "metadata": {"user_id": "u1"}},
- {"event_id": "e2", "timestamp": now, "execution_id": "x2", "event_type": "T", "metadata": {"user_id": "u1"}},
- {"event_id": "e3", "timestamp": now, "execution_id": "x3", "event_type": "U", "metadata": {"user_id": "u2"}},
- ])
+ await db.get_collection("events").insert_many(
+ [
+ {
+ "event_id": "e1",
+ "timestamp": now,
+ "execution_id": "x1",
+ "event_type": "T",
+ "metadata": {"user_id": "u1"},
+ },
+ {
+ "event_id": "e2",
+ "timestamp": now,
+ "execution_id": "x2",
+ "event_type": "T",
+ "metadata": {"user_id": "u1"},
+ },
+ {
+ "event_id": "e3",
+ "timestamp": now,
+ "execution_id": "x3",
+ "event_type": "U",
+ "metadata": {"user_id": "u2"},
+ },
+ ]
+ )
cnt = await repo.count_events(ReplayFilter())
assert cnt >= 3
batches = []
diff --git a/backend/tests/integration/db/repositories/test_saga_repository.py b/backend/tests/integration/db/repositories/test_saga_repository.py
index c237d61..d78f82e 100644
--- a/backend/tests/integration/db/repositories/test_saga_repository.py
+++ b/backend/tests/integration/db/repositories/test_saga_repository.py
@@ -1,27 +1,43 @@
from datetime import datetime, timezone
import pytest
-
from app.db.repositories.saga_repository import SagaRepository
from app.domain.enums.saga import SagaState
-from app.domain.saga.models import Saga, SagaFilter, SagaListResult
+from app.domain.saga.models import SagaFilter, SagaListResult
pytestmark = pytest.mark.integration
@pytest.fixture()
-def repo(db) -> SagaRepository: # type: ignore[valid-type]
- return SagaRepository(db)
+async def repo(scope) -> SagaRepository: # type: ignore[valid-type]
+ return await scope.get(SagaRepository)
@pytest.mark.asyncio
async def test_saga_crud_and_queries(repo: SagaRepository, db) -> None: # type: ignore[valid-type]
now = datetime.now(timezone.utc)
# Insert saga docs
- await db.get_collection("sagas").insert_many([
- {"saga_id": "s1", "saga_name": "test", "execution_id": "e1", "state": "running", "created_at": now, "updated_at": now},
- {"saga_id": "s2", "saga_name": "test2", "execution_id": "e2", "state": "completed", "created_at": now, "updated_at": now, "completed_at": now},
- ])
+ await db.get_collection("sagas").insert_many(
+ [
+ {
+ "saga_id": "s1",
+ "saga_name": "test",
+ "execution_id": "e1",
+ "state": "running",
+ "created_at": now,
+ "updated_at": now,
+ },
+ {
+ "saga_id": "s2",
+ "saga_name": "test2",
+ "execution_id": "e2",
+ "state": "completed",
+ "created_at": now,
+ "updated_at": now,
+ "completed_at": now,
+ },
+ ]
+ )
saga = await repo.get_saga("s1")
assert saga and saga.saga_id == "s1"
lst = await repo.get_sagas_by_execution("e1")
@@ -34,10 +50,12 @@ async def test_saga_crud_and_queries(repo: SagaRepository, db) -> None: # type:
assert await repo.update_saga_state("s1", SagaState.COMPLETED) in (True, False)
# user execution ids
- await db.get_collection("executions").insert_many([
- {"execution_id": "e1", "user_id": "u1"},
- {"execution_id": "e2", "user_id": "u1"},
- ])
+ await db.get_collection("executions").insert_many(
+ [
+ {"execution_id": "e1", "user_id": "u1"},
+ {"execution_id": "e2", "user_id": "u1"},
+ ]
+ )
ids = await repo.get_user_execution_ids("u1")
assert set(ids) == {"e1", "e2"}
diff --git a/backend/tests/integration/db/repositories/test_saved_script_repository.py b/backend/tests/integration/db/repositories/test_saved_script_repository.py
index 73f2d64..85fc2b5 100644
--- a/backend/tests/integration/db/repositories/test_saved_script_repository.py
+++ b/backend/tests/integration/db/repositories/test_saved_script_repository.py
@@ -1,14 +1,17 @@
import pytest
-
from app.db.repositories.saved_script_repository import SavedScriptRepository
from app.domain.saved_script import DomainSavedScriptCreate, DomainSavedScriptUpdate
pytestmark = pytest.mark.integration
+@pytest.fixture()
+async def repo(scope) -> SavedScriptRepository: # type: ignore[valid-type]
+ return await scope.get(SavedScriptRepository)
+
+
@pytest.mark.asyncio
-async def test_create_get_update_delete_saved_script(db) -> None: # type: ignore[valid-type]
- repo = SavedScriptRepository(db)
+async def test_create_get_update_delete_saved_script(repo: SavedScriptRepository) -> None:
create = DomainSavedScriptCreate(name="n", lang="python", lang_version="3.11", description=None, script="print(1)")
created = await repo.create_saved_script(create, user_id="u1")
assert created.user_id == "u1" and created.script == "print(1)"
diff --git a/backend/tests/integration/db/repositories/test_sse_repository.py b/backend/tests/integration/db/repositories/test_sse_repository.py
index bd7556a..b196ba4 100644
--- a/backend/tests/integration/db/repositories/test_sse_repository.py
+++ b/backend/tests/integration/db/repositories/test_sse_repository.py
@@ -1,14 +1,17 @@
import pytest
-
from app.db.repositories.sse_repository import SSERepository
from app.domain.enums.execution import ExecutionStatus
pytestmark = pytest.mark.integration
+@pytest.fixture()
+async def repo(scope) -> SSERepository: # type: ignore[valid-type]
+ return await scope.get(SSERepository)
+
+
@pytest.mark.asyncio
-async def test_get_execution_status(db) -> None: # type: ignore[valid-type]
- repo = SSERepository(db)
+async def test_get_execution_status(repo: SSERepository, db) -> None: # type: ignore[valid-type]
await db.get_collection("executions").insert_one({"execution_id": "e1", "status": "running"})
status = await repo.get_execution_status("e1")
assert status is not None
@@ -17,25 +20,18 @@ async def test_get_execution_status(db) -> None: # type: ignore[valid-type]
@pytest.mark.asyncio
-async def test_get_execution_status_none(db) -> None: # type: ignore[valid-type]
- repo = SSERepository(db)
+async def test_get_execution_status_none(repo: SSERepository, db) -> None: # type: ignore[valid-type]
assert await repo.get_execution_status("missing") is None
@pytest.mark.asyncio
-async def test_get_execution(db) -> None: # type: ignore[valid-type]
- repo = SSERepository(db)
- await db.get_collection("executions").insert_one({
- "execution_id": "e1",
- "status": "queued",
- "resource_usage": {}
- })
+async def test_get_execution(repo: SSERepository, db) -> None: # type: ignore[valid-type]
+ await db.get_collection("executions").insert_one({"execution_id": "e1", "status": "queued", "resource_usage": {}})
doc = await repo.get_execution("e1")
assert doc is not None
assert doc.execution_id == "e1"
@pytest.mark.asyncio
-async def test_get_execution_not_found(db) -> None: # type: ignore[valid-type]
- repo = SSERepository(db)
+async def test_get_execution_not_found(repo: SSERepository, db) -> None: # type: ignore[valid-type]
assert await repo.get_execution("missing") is None
diff --git a/backend/tests/integration/db/repositories/test_user_repository.py b/backend/tests/integration/db/repositories/test_user_repository.py
index fca2873..12f8672 100644
--- a/backend/tests/integration/db/repositories/test_user_repository.py
+++ b/backend/tests/integration/db/repositories/test_user_repository.py
@@ -1,17 +1,21 @@
-import pytest
from datetime import datetime, timezone
+import pytest
from app.db.repositories.user_repository import UserRepository
-from app.domain.user.user_models import User as DomainUser, UserUpdate
from app.domain.enums.user import UserRole
+from app.domain.user.user_models import User as DomainUser
+from app.domain.user.user_models import UserUpdate
pytestmark = pytest.mark.integration
-@pytest.mark.asyncio
-async def test_create_get_update_delete_user(db) -> None: # type: ignore[valid-type]
- repo = UserRepository(db)
+@pytest.fixture()
+async def repo(scope) -> UserRepository: # type: ignore[valid-type]
+ return await scope.get(UserRepository)
+
+@pytest.mark.asyncio
+async def test_create_get_update_delete_user(repo: UserRepository) -> None:
# Create user
user = DomainUser(
user_id="", # let repo assign
diff --git a/backend/tests/integration/db/repositories/test_user_settings_repository.py b/backend/tests/integration/db/repositories/test_user_settings_repository.py
index 8b647ee..83bf6a6 100644
--- a/backend/tests/integration/db/repositories/test_user_settings_repository.py
+++ b/backend/tests/integration/db/repositories/test_user_settings_repository.py
@@ -1,21 +1,20 @@
-from datetime import datetime, timezone, timedelta
-import logging
+from datetime import datetime, timedelta, timezone
import pytest
-
from app.db.repositories.user_settings_repository import UserSettingsRepository
from app.domain.enums.events import EventType
from app.domain.user.settings_models import DomainUserSettings
pytestmark = pytest.mark.integration
-_test_logger = logging.getLogger("test.db.repositories.user_settings_repository")
+@pytest.fixture()
+async def repo(scope) -> UserSettingsRepository: # type: ignore[valid-type]
+ return await scope.get(UserSettingsRepository)
-@pytest.mark.asyncio
-async def test_user_settings_snapshot_and_events(db) -> None: # type: ignore[valid-type]
- repo = UserSettingsRepository(db, logger=_test_logger)
+@pytest.mark.asyncio
+async def test_user_settings_snapshot_and_events(repo: UserSettingsRepository, db) -> None: # type: ignore[valid-type]
# Create indexes (should not raise)
await repo.create_indexes()
@@ -27,20 +26,22 @@ async def test_user_settings_snapshot_and_events(db) -> None: # type: ignore[va
# Insert events and query
now = datetime.now(timezone.utc)
- await db.get_collection("events").insert_many([
- {
- "aggregate_id": "user_settings_u1",
- "event_type": str(EventType.USER_SETTINGS_UPDATED),
- "timestamp": now,
- "payload": {}
- },
- {
- "aggregate_id": "user_settings_u1",
- "event_type": str(EventType.USER_THEME_CHANGED),
- "timestamp": now,
- "payload": {}
- },
- ])
+ await db.get_collection("events").insert_many(
+ [
+ {
+ "aggregate_id": "user_settings_u1",
+ "event_type": str(EventType.USER_SETTINGS_UPDATED),
+ "timestamp": now,
+ "payload": {},
+ },
+ {
+ "aggregate_id": "user_settings_u1",
+ "event_type": str(EventType.USER_THEME_CHANGED),
+ "timestamp": now,
+ "payload": {},
+ },
+ ]
+ )
evs = await repo.get_settings_events("u1", [EventType.USER_SETTINGS_UPDATED], since=now - timedelta(days=1))
assert any(e.event_type == EventType.USER_SETTINGS_UPDATED for e in evs)
diff --git a/backend/tests/integration/events/test_admin_utils.py b/backend/tests/integration/events/test_admin_utils.py
index 689fa70..7ab3450 100644
--- a/backend/tests/integration/events/test_admin_utils.py
+++ b/backend/tests/integration/events/test_admin_utils.py
@@ -2,7 +2,6 @@
import os
import pytest
-
from app.events.admin_utils import AdminUtils
_test_logger = logging.getLogger("test.events.admin_utils")
@@ -12,7 +11,7 @@
@pytest.mark.asyncio
async def test_admin_utils_real_topic_checks() -> None:
prefix = os.environ.get("KAFKA_TOPIC_PREFIX", "test.")
- topic = f"{prefix}adminutils.{os.environ.get('PYTEST_SESSION_ID','sid')}"
+ topic = f"{prefix}adminutils.{os.environ.get('PYTEST_SESSION_ID', 'sid')}"
au = AdminUtils(logger=_test_logger)
# Ensure topic exists (idempotent)
diff --git a/backend/tests/integration/events/test_consume_roundtrip.py b/backend/tests/integration/events/test_consume_roundtrip.py
index 604bdbd..185196e 100644
--- a/backend/tests/integration/events/test_consume_roundtrip.py
+++ b/backend/tests/integration/events/test_consume_roundtrip.py
@@ -3,17 +3,15 @@
import uuid
import pytest
-
from app.domain.enums.events import EventType
+from app.domain.enums.kafka import KafkaTopic
from app.events.core import UnifiedConsumer, UnifiedProducer
+from app.events.core.dispatcher import EventDispatcher
from app.events.core.types import ConsumerConfig
from app.events.schema.schema_registry import SchemaRegistryManager, initialize_event_schemas
-from app.domain.enums.kafka import KafkaTopic
-from tests.helpers import make_execution_requested_event
-from app.core.metrics.context import get_event_metrics
-from app.events.core.dispatcher import EventDispatcher
from app.settings import get_settings
+from tests.helpers import make_execution_requested_event
pytestmark = [pytest.mark.integration, pytest.mark.kafka]
diff --git a/backend/tests/integration/events/test_consumer_group_monitor.py b/backend/tests/integration/events/test_consumer_group_monitor.py
index 150e123..cfab301 100644
--- a/backend/tests/integration/events/test_consumer_group_monitor.py
+++ b/backend/tests/integration/events/test_consumer_group_monitor.py
@@ -1,7 +1,7 @@
import logging
-import pytest
-from app.events.consumer_group_monitor import NativeConsumerGroupMonitor, ConsumerGroupHealth
+import pytest
+from app.events.consumer_group_monitor import ConsumerGroupHealth, NativeConsumerGroupMonitor
_test_logger = logging.getLogger("test.events.consumer_group_monitor")
diff --git a/backend/tests/integration/events/test_consumer_group_monitor_e2e.py b/backend/tests/integration/events/test_consumer_group_monitor_e2e.py
index b901c6e..1be5835 100644
--- a/backend/tests/integration/events/test_consumer_group_monitor_e2e.py
+++ b/backend/tests/integration/events/test_consumer_group_monitor_e2e.py
@@ -1,16 +1,13 @@
-import asyncio
import logging
from uuid import uuid4
import pytest
-
from app.events.consumer_group_monitor import (
ConsumerGroupHealth,
ConsumerGroupStatus,
NativeConsumerGroupMonitor,
)
-
pytestmark = [pytest.mark.integration, pytest.mark.kafka]
_test_logger = logging.getLogger("test.events.consumer_group_monitor_e2e")
@@ -34,8 +31,16 @@ def test_assess_group_health_branches():
m = NativeConsumerGroupMonitor(logger=_test_logger)
# Error state
s = ConsumerGroupStatus(
- group_id="g", state="ERROR", protocol="p", protocol_type="ptype", coordinator="c",
- members=[], member_count=0, assigned_partitions=0, partition_distribution={}, total_lag=0
+ group_id="g",
+ state="ERROR",
+ protocol="p",
+ protocol_type="ptype",
+ coordinator="c",
+ members=[],
+ member_count=0,
+ assigned_partitions=0,
+ partition_distribution={},
+ total_lag=0,
)
h, msg = m._assess_group_health(s) # noqa: SLF001
assert h is ConsumerGroupHealth.UNHEALTHY and "error" in msg.lower()
diff --git a/backend/tests/integration/events/test_dlq_handler.py b/backend/tests/integration/events/test_dlq_handler.py
index 3e4d0e1..5659529 100644
--- a/backend/tests/integration/events/test_dlq_handler.py
+++ b/backend/tests/integration/events/test_dlq_handler.py
@@ -1,9 +1,7 @@
import logging
import pytest
-
-from app.events.core import create_dlq_error_handler, create_immediate_dlq_handler
-from app.events.core import UnifiedProducer
+from app.events.core import UnifiedProducer, create_dlq_error_handler, create_immediate_dlq_handler
from app.infrastructure.kafka.events.metadata import AvroEventMetadata
from app.infrastructure.kafka.events.saga import SagaStartedEvent
@@ -22,8 +20,13 @@ async def _record_send_to_dlq(original_event, original_topic, error, retry_count
monkeypatch.setattr(p, "send_to_dlq", _record_send_to_dlq)
h = create_dlq_error_handler(p, original_topic="t", max_retries=2, logger=_test_logger)
- e = SagaStartedEvent(saga_id="s", saga_name="n", execution_id="x", initial_event_id="i",
- metadata=AvroEventMetadata(service_name="a", service_version="1"))
+ e = SagaStartedEvent(
+ saga_id="s",
+ saga_name="n",
+ execution_id="x",
+ initial_event_id="i",
+ metadata=AvroEventMetadata(service_name="a", service_version="1"),
+ )
# Call 1 and 2 should not send to DLQ
await h(RuntimeError("boom"), e)
await h(RuntimeError("boom"), e)
@@ -44,7 +47,12 @@ async def _record_send_to_dlq(original_event, original_topic, error, retry_count
monkeypatch.setattr(p, "send_to_dlq", _record_send_to_dlq)
h = create_immediate_dlq_handler(p, original_topic="t", logger=_test_logger)
- e = SagaStartedEvent(saga_id="s2", saga_name="n", execution_id="x", initial_event_id="i",
- metadata=AvroEventMetadata(service_name="a", service_version="1"))
+ e = SagaStartedEvent(
+ saga_id="s2",
+ saga_name="n",
+ execution_id="x",
+ initial_event_id="i",
+ metadata=AvroEventMetadata(service_name="a", service_version="1"),
+ )
await h(RuntimeError("x"), e)
assert calls and calls[0][3] == 0
diff --git a/backend/tests/integration/events/test_event_dispatcher.py b/backend/tests/integration/events/test_event_dispatcher.py
index c79ef29..c88e3fa 100644
--- a/backend/tests/integration/events/test_event_dispatcher.py
+++ b/backend/tests/integration/events/test_event_dispatcher.py
@@ -3,16 +3,15 @@
import uuid
import pytest
-
from app.domain.enums.events import EventType
from app.domain.enums.kafka import KafkaTopic
from app.events.core import UnifiedConsumer, UnifiedProducer
from app.events.core.dispatcher import EventDispatcher
from app.events.core.types import ConsumerConfig
from app.events.schema.schema_registry import SchemaRegistryManager, initialize_event_schemas
-from tests.helpers import make_execution_requested_event
from app.settings import get_settings
+from tests.helpers import make_execution_requested_event
pytestmark = [pytest.mark.integration, pytest.mark.kafka]
diff --git a/backend/tests/integration/events/test_event_store.py b/backend/tests/integration/events/test_event_store.py
index 49822e5..470beb9 100644
--- a/backend/tests/integration/events/test_event_store.py
+++ b/backend/tests/integration/events/test_event_store.py
@@ -1,27 +1,17 @@
-from datetime import datetime, timezone, timedelta
-import logging
+from datetime import datetime, timedelta, timezone
import pytest
-
from app.events.event_store import EventStore
-from app.events.schema.schema_registry import SchemaRegistryManager
from app.infrastructure.kafka.events.metadata import AvroEventMetadata
from app.infrastructure.kafka.events.pod import PodCreatedEvent
from app.infrastructure.kafka.events.user import UserLoggedInEvent
-from app.core.database_context import Database
pytestmark = [pytest.mark.integration, pytest.mark.mongodb]
-_test_logger = logging.getLogger("test.events.event_store")
-
@pytest.fixture()
async def event_store(scope) -> EventStore: # type: ignore[valid-type]
- db: Database = await scope.get(Database)
- schema_registry: SchemaRegistryManager = await scope.get(SchemaRegistryManager)
- store = EventStore(db=db, schema_registry=schema_registry, logger=_test_logger)
- await store.initialize()
- return store
+ return await scope.get(EventStore)
@pytest.mark.asyncio
@@ -57,8 +47,9 @@ async def test_store_and_query_events(event_store: EventStore) -> None:
@pytest.mark.asyncio
async def test_replay_events(event_store: EventStore) -> None:
- ev = UserLoggedInEvent(user_id="u1", login_method="password",
- metadata=AvroEventMetadata(service_name="svc", service_version="1"))
+ ev = UserLoggedInEvent(
+ user_id="u1", login_method="password", metadata=AvroEventMetadata(service_name="svc", service_version="1")
+ )
await event_store.store_event(ev)
called = {"n": 0}
diff --git a/backend/tests/integration/events/test_event_store_consumer.py b/backend/tests/integration/events/test_event_store_consumer.py
index 25bea13..111d6fe 100644
--- a/backend/tests/integration/events/test_event_store_consumer.py
+++ b/backend/tests/integration/events/test_event_store_consumer.py
@@ -1,19 +1,15 @@
-import asyncio
import logging
import uuid
import pytest
-
from app.core.database_context import Database
-
from app.domain.enums.kafka import KafkaTopic
from app.events.core import UnifiedProducer
-from app.events.event_store_consumer import EventStoreConsumer, create_event_store_consumer
from app.events.event_store import EventStore
+from app.events.event_store_consumer import EventStoreConsumer, create_event_store_consumer
from app.events.schema.schema_registry import SchemaRegistryManager, initialize_event_schemas
-from app.infrastructure.kafka.events.user import UserLoggedInEvent
from app.infrastructure.kafka.events.metadata import AvroEventMetadata
-
+from app.infrastructure.kafka.events.user import UserLoggedInEvent
pytestmark = [pytest.mark.integration, pytest.mark.kafka, pytest.mark.mongodb]
diff --git a/backend/tests/integration/events/test_event_store_consumer_flush_e2e.py b/backend/tests/integration/events/test_event_store_consumer_flush_e2e.py
index 458ab3e..38610f5 100644
--- a/backend/tests/integration/events/test_event_store_consumer_flush_e2e.py
+++ b/backend/tests/integration/events/test_event_store_consumer_flush_e2e.py
@@ -1,37 +1,36 @@
-import asyncio
-import logging
from uuid import uuid4
import pytest
from app.core.database_context import Database
-
from app.domain.enums.events import EventType
from app.domain.enums.kafka import KafkaTopic
+from app.events.core import UnifiedProducer
from app.events.event_store import EventStore
from app.events.event_store_consumer import create_event_store_consumer
-from app.events.core import UnifiedProducer
from app.events.schema.schema_registry import SchemaRegistryManager
+
from tests.helpers import make_execution_requested_event
from tests.helpers.eventually import eventually
pytestmark = [pytest.mark.integration, pytest.mark.kafka, pytest.mark.mongodb]
-_test_logger = logging.getLogger("test.events.event_store_consumer_flush_e2e")
+
+@pytest.fixture()
+async def store(scope) -> EventStore: # type: ignore[valid-type]
+ return await scope.get(EventStore)
@pytest.mark.asyncio
-async def test_event_store_consumer_flush_on_timeout(scope): # type: ignore[valid-type]
+async def test_event_store_consumer_flush_on_timeout(scope, store: EventStore) -> None: # type: ignore[valid-type]
producer: UnifiedProducer = await scope.get(UnifiedProducer)
schema: SchemaRegistryManager = await scope.get(SchemaRegistryManager)
db: Database = await scope.get(Database)
- store = EventStore(db=db, schema_registry=schema, logger=_test_logger)
- await store.initialize()
consumer = create_event_store_consumer(
event_store=store,
topics=[KafkaTopic.EXECUTION_EVENTS],
schema_registry_manager=schema,
- logger=_test_logger,
+ logger=store.logger,
producer=producer,
batch_size=100,
batch_timeout_seconds=0.2,
diff --git a/backend/tests/integration/events/test_event_store_e2e.py b/backend/tests/integration/events/test_event_store_e2e.py
index 7ad8c58..25f5be9 100644
--- a/backend/tests/integration/events/test_event_store_e2e.py
+++ b/backend/tests/integration/events/test_event_store_e2e.py
@@ -1,27 +1,19 @@
-from datetime import datetime, timezone, timedelta
-import logging
-
import pytest
-from app.core.database_context import Database
-
from app.domain.enums.events import EventType
from app.events.event_store import EventStore
-from app.events.schema.schema_registry import SchemaRegistryManager
-from tests.helpers import make_execution_requested_event
+from tests.helpers import make_execution_requested_event
pytestmark = [pytest.mark.integration, pytest.mark.mongodb]
-_test_logger = logging.getLogger("test.events.event_store_e2e")
+@pytest.fixture()
+async def store(scope) -> EventStore: # type: ignore[valid-type]
+ return await scope.get(EventStore)
-@pytest.mark.asyncio
-async def test_event_store_initialize_and_crud(scope): # type: ignore[valid-type]
- schema: SchemaRegistryManager = await scope.get(SchemaRegistryManager)
- db: Database = await scope.get(Database)
- store = EventStore(db=db, schema_registry=schema, ttl_days=1, logger=_test_logger)
- await store.initialize()
+@pytest.mark.asyncio
+async def test_event_store_initialize_and_crud(store: EventStore) -> None:
# Store single event
ev = make_execution_requested_event(execution_id="e-1")
assert await store.store_event(ev) is True
diff --git a/backend/tests/integration/events/test_producer_e2e.py b/backend/tests/integration/events/test_producer_e2e.py
index 898042b..eedbfaa 100644
--- a/backend/tests/integration/events/test_producer_e2e.py
+++ b/backend/tests/integration/events/test_producer_e2e.py
@@ -1,14 +1,12 @@
-import asyncio
import json
import logging
from uuid import uuid4
import pytest
-
-from app.events.core import UnifiedProducer, ProducerConfig
+from app.events.core import ProducerConfig, UnifiedProducer
from app.events.schema.schema_registry import SchemaRegistryManager
-from tests.helpers import make_execution_requested_event
+from tests.helpers import make_execution_requested_event
pytestmark = [pytest.mark.integration, pytest.mark.kafka]
@@ -36,17 +34,14 @@ async def test_unified_producer_start_produce_send_to_dlq_stop(scope): # type:
def test_producer_handle_stats_path():
# Directly run stats parsing to cover branch logic; avoid relying on timing
- from app.events.core.producer import UnifiedProducer as UP, ProducerMetrics
+ from app.events.core.producer import ProducerMetrics
+ from app.events.core.producer import UnifiedProducer as UP
+
m = ProducerMetrics()
p = object.__new__(UP) # bypass __init__ safely for method call
# Inject required attributes
p._metrics = m # type: ignore[attr-defined]
p._stats_callback = None # type: ignore[attr-defined]
- payload = json.dumps({
- "msg_cnt": 1,
- "topics": {
- "t": {"partitions": {"0": {"msgq_cnt": 2, "rtt": {"avg": 5}}}}
- }
- })
+ payload = json.dumps({"msg_cnt": 1, "topics": {"t": {"partitions": {"0": {"msgq_cnt": 2, "rtt": {"avg": 5}}}}}})
UP._handle_stats(p, payload) # type: ignore[misc]
assert m.queue_size == 1 and m.avg_latency_ms > 0
diff --git a/backend/tests/integration/events/test_schema_registry_e2e.py b/backend/tests/integration/events/test_schema_registry_e2e.py
index 2a8df44..44c5f82 100644
--- a/backend/tests/integration/events/test_schema_registry_e2e.py
+++ b/backend/tests/integration/events/test_schema_registry_e2e.py
@@ -1,13 +1,10 @@
-import asyncio
import logging
-import struct
import pytest
+from app.events.schema.schema_registry import MAGIC_BYTE, SchemaRegistryManager
-from app.events.schema.schema_registry import SchemaRegistryManager, MAGIC_BYTE
from tests.helpers import make_execution_requested_event
-
pytestmark = [pytest.mark.integration]
_test_logger = logging.getLogger("test.events.schema_registry_e2e")
diff --git a/backend/tests/integration/events/test_schema_registry_real.py b/backend/tests/integration/events/test_schema_registry_real.py
index dedf326..895f109 100644
--- a/backend/tests/integration/events/test_schema_registry_real.py
+++ b/backend/tests/integration/events/test_schema_registry_real.py
@@ -1,6 +1,6 @@
import logging
-import pytest
+import pytest
from app.events.schema.schema_registry import SchemaRegistryManager
from app.infrastructure.kafka.events.metadata import AvroEventMetadata
from app.infrastructure.kafka.events.pod import PodCreatedEvent
From 527da5c2f93a04601827a619d26208305967777f Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Wed, 31 Dec 2025 00:46:31 +0100
Subject: [PATCH 41/42] removed outdated tests
---
.../test_admin_events_repository.py | 109 -------------
.../test_admin_user_repository.py | 112 -------------
.../db/repositories/test_event_repository.py | 74 ---------
.../test_notification_repository.py | 150 ------------------
.../db/repositories/test_replay_repository.py | 71 ---------
.../db/repositories/test_saga_repository.py | 66 --------
.../db/repositories/test_sse_repository.py | 37 -----
.../db/repositories/test_user_repository.py | 53 -------
.../test_user_settings_repository.py | 50 ------
.../integration/events/test_event_store.py | 62 --------
.../test_event_store_consumer_flush_e2e.py | 55 -------
.../events/test_event_store_e2e.py | 40 -----
.../events/test_event_service_integration.py | 63 --------
13 files changed, 942 deletions(-)
delete mode 100644 backend/tests/integration/db/repositories/test_admin_events_repository.py
delete mode 100644 backend/tests/integration/db/repositories/test_admin_user_repository.py
delete mode 100644 backend/tests/integration/db/repositories/test_event_repository.py
delete mode 100644 backend/tests/integration/db/repositories/test_notification_repository.py
delete mode 100644 backend/tests/integration/db/repositories/test_replay_repository.py
delete mode 100644 backend/tests/integration/db/repositories/test_saga_repository.py
delete mode 100644 backend/tests/integration/db/repositories/test_sse_repository.py
delete mode 100644 backend/tests/integration/db/repositories/test_user_repository.py
delete mode 100644 backend/tests/integration/db/repositories/test_user_settings_repository.py
delete mode 100644 backend/tests/integration/events/test_event_store.py
delete mode 100644 backend/tests/integration/events/test_event_store_consumer_flush_e2e.py
delete mode 100644 backend/tests/integration/events/test_event_store_e2e.py
delete mode 100644 backend/tests/integration/services/events/test_event_service_integration.py
diff --git a/backend/tests/integration/db/repositories/test_admin_events_repository.py b/backend/tests/integration/db/repositories/test_admin_events_repository.py
deleted file mode 100644
index de46ca1..0000000
--- a/backend/tests/integration/db/repositories/test_admin_events_repository.py
+++ /dev/null
@@ -1,109 +0,0 @@
-from datetime import datetime, timedelta, timezone
-
-import pytest
-from app.db.docs.replay import ReplayConfig, ReplayFilter
-from app.db.repositories.admin.admin_events_repository import AdminEventsRepository
-from app.domain.admin import ReplayQuery
-from app.domain.admin.replay_updates import ReplaySessionUpdate
-from app.domain.enums.replay import ReplayStatus, ReplayTarget, ReplayType
-from app.domain.events.event_models import Event, EventFilter, EventStatistics
-from app.domain.replay.models import ReplaySessionState
-from app.infrastructure.kafka.events.metadata import AvroEventMetadata
-
-pytestmark = pytest.mark.integration
-
-
-@pytest.fixture()
-async def repo(scope) -> AdminEventsRepository: # type: ignore[valid-type]
- return await scope.get(AdminEventsRepository)
-
-
-@pytest.mark.asyncio
-async def test_browse_detail_delete_and_export(repo: AdminEventsRepository, db) -> None: # type: ignore[valid-type]
- now = datetime.now(timezone.utc)
- await db.get_collection("events").insert_many(
- [
- {
- "event_id": "e1",
- "event_type": "X",
- "timestamp": now,
- "metadata": AvroEventMetadata(
- service_name="svc", service_version="1", correlation_id="c1"
- ).model_dump(),
- },
- {
- "event_id": "e2",
- "event_type": "X",
- "timestamp": now,
- "metadata": AvroEventMetadata(
- service_name="svc", service_version="1", correlation_id="c1"
- ).model_dump(),
- },
- ]
- )
- res = await repo.browse_events(EventFilter())
- assert res.total >= 2
- detail = await repo.get_event_detail("e1")
- assert detail and detail.event.event_id == "e1"
- assert await repo.delete_event("e2") is True
- rows = await repo.export_events_csv(EventFilter())
- assert isinstance(rows, list) and len(rows) >= 1
-
-
-@pytest.mark.asyncio
-async def test_event_stats_and_archive(repo: AdminEventsRepository, db) -> None: # type: ignore[valid-type]
- now = datetime.now(timezone.utc)
- await db.get_collection("events").insert_many(
- [
- {
- "event_id": "e10",
- "event_type": "step.completed",
- "timestamp": now,
- "metadata": AvroEventMetadata(service_name="svc", service_version="1", user_id="u1").model_dump(),
- },
- ]
- )
- await db.get_collection("executions").insert_one(
- {"created_at": now, "status": "completed", "resource_usage": {"execution_time_wall_seconds": 1.25}}
- )
- stats = await repo.get_event_stats(hours=1)
- assert isinstance(stats, EventStatistics)
- ev = Event(
- event_id="a1",
- event_type="X",
- event_version="1.0",
- timestamp=now,
- metadata=AvroEventMetadata(service_name="s", service_version="1"),
- payload={},
- )
- assert await repo.archive_event(ev, deleted_by="admin") is True
-
-
-@pytest.mark.asyncio
-async def test_replay_session_flow_and_helpers(repo: AdminEventsRepository, db) -> None: # type: ignore[valid-type]
- # create/get/update
- config = ReplayConfig(
- replay_type=ReplayType.QUERY,
- target=ReplayTarget.TEST,
- filter=ReplayFilter(),
- )
- session = ReplaySessionState(
- session_id="s1",
- config=config,
- status=ReplayStatus.SCHEDULED,
- total_events=1,
- correlation_id="corr",
- created_at=datetime.now(timezone.utc) - timedelta(seconds=5),
- dry_run=False,
- )
- sid = await repo.create_replay_session(session)
- assert sid == "s1"
- got = await repo.get_replay_session("s1")
- assert got and got.session_id == "s1"
- session_update = ReplaySessionUpdate(status=ReplayStatus.RUNNING)
- assert await repo.update_replay_session("s1", session_update) is True
- detail = await repo.get_replay_status_with_progress("s1")
- assert detail and detail.session.session_id == "s1"
- assert await repo.count_events_for_replay(ReplayQuery()) >= 0
- prev = await repo.get_replay_events_preview(event_ids=["e10"]) # from earlier insert
- assert isinstance(prev, dict)
diff --git a/backend/tests/integration/db/repositories/test_admin_user_repository.py b/backend/tests/integration/db/repositories/test_admin_user_repository.py
deleted file mode 100644
index e65658a..0000000
--- a/backend/tests/integration/db/repositories/test_admin_user_repository.py
+++ /dev/null
@@ -1,112 +0,0 @@
-from datetime import datetime, timezone
-
-import pytest
-from app.core.security import SecurityService
-from app.db.repositories.admin.admin_user_repository import AdminUserRepository
-from app.domain.user import PasswordReset, UserFields, UserUpdate
-
-pytestmark = pytest.mark.integration
-
-
-@pytest.fixture()
-async def repo(scope) -> AdminUserRepository: # type: ignore[valid-type]
- return await scope.get(AdminUserRepository)
-
-
-@pytest.mark.asyncio
-async def test_list_and_get_user(repo: AdminUserRepository, db) -> None: # type: ignore[valid-type]
- # Insert a user
- await db.get_collection("users").insert_one(
- {
- UserFields.USER_ID: "u1",
- UserFields.USERNAME: "alice",
- UserFields.EMAIL: "alice@example.com",
- UserFields.ROLE: "user",
- UserFields.IS_ACTIVE: True,
- UserFields.IS_SUPERUSER: False,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.CREATED_AT: datetime.now(timezone.utc),
- UserFields.UPDATED_AT: datetime.now(timezone.utc),
- }
- )
- res = await repo.list_users(limit=10)
- assert res.total >= 1 and any(u.username == "alice" for u in res.users)
- user = await repo.get_user_by_id("u1")
- assert user and user.user_id == "u1"
-
-
-@pytest.mark.asyncio
-async def test_update_delete_and_reset_password(repo: AdminUserRepository, db, monkeypatch: pytest.MonkeyPatch) -> None: # type: ignore[valid-type]
- # Insert base user
- await db.get_collection("users").insert_one(
- {
- UserFields.USER_ID: "u1",
- UserFields.USERNAME: "bob",
- UserFields.EMAIL: "bob@example.com",
- UserFields.ROLE: "user",
- UserFields.IS_ACTIVE: True,
- UserFields.IS_SUPERUSER: False,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.CREATED_AT: datetime.now(timezone.utc),
- UserFields.UPDATED_AT: datetime.now(timezone.utc),
- }
- )
- # No updates → returns current
- updated = await repo.update_user("u1", UserUpdate())
- assert updated and updated.user_id == "u1"
- # Delete cascade (collections empty → zeros)
- deleted = await repo.delete_user("u1", cascade=True)
- assert deleted["user"] in (0, 1)
- # Re-insert and reset password
- await db.get_collection("users").insert_one(
- {
- UserFields.USER_ID: "u1",
- UserFields.USERNAME: "bob",
- UserFields.EMAIL: "bob@example.com",
- UserFields.ROLE: "user",
- UserFields.IS_ACTIVE: True,
- UserFields.IS_SUPERUSER: False,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.CREATED_AT: datetime.now(timezone.utc),
- UserFields.UPDATED_AT: datetime.now(timezone.utc),
- }
- )
- monkeypatch.setattr(SecurityService, "get_password_hash", staticmethod(lambda p: "HASHED"))
- pr = PasswordReset(user_id="u1", new_password="secret123")
- assert await repo.reset_user_password(pr) is True
-
-
-@pytest.mark.asyncio
-async def test_list_with_filters_and_reset_invalid(repo: AdminUserRepository, db) -> None: # type: ignore[valid-type]
- # Insert a couple of users
- await db.get_collection("users").insert_many(
- [
- {
- UserFields.USER_ID: "u1",
- UserFields.USERNAME: "Alice",
- UserFields.EMAIL: "a@e.com",
- UserFields.ROLE: "user",
- UserFields.IS_ACTIVE: True,
- UserFields.IS_SUPERUSER: False,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.CREATED_AT: datetime.now(timezone.utc),
- UserFields.UPDATED_AT: datetime.now(timezone.utc),
- },
- {
- UserFields.USER_ID: "u2",
- UserFields.USERNAME: "Bob",
- UserFields.EMAIL: "b@e.com",
- UserFields.ROLE: "admin",
- UserFields.IS_ACTIVE: True,
- UserFields.IS_SUPERUSER: True,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.CREATED_AT: datetime.now(timezone.utc),
- UserFields.UPDATED_AT: datetime.now(timezone.utc),
- },
- ]
- )
- res = await repo.list_users(limit=5, offset=0, search="Al", role=None)
- assert any(u.username.lower().startswith("al") for u in res.users) or res.total >= 0
- # invalid password reset (empty)
- with pytest.raises(ValueError):
- await repo.reset_user_password(PasswordReset(user_id="u1", new_password=""))
diff --git a/backend/tests/integration/db/repositories/test_event_repository.py b/backend/tests/integration/db/repositories/test_event_repository.py
deleted file mode 100644
index feda7d4..0000000
--- a/backend/tests/integration/db/repositories/test_event_repository.py
+++ /dev/null
@@ -1,74 +0,0 @@
-from datetime import datetime, timedelta, timezone
-
-import pytest
-from app.db.repositories.event_repository import EventRepository
-from app.domain.events.event_models import Event
-from app.infrastructure.kafka.events.metadata import AvroEventMetadata
-
-pytestmark = pytest.mark.integration
-
-
-@pytest.fixture()
-async def repo(scope) -> EventRepository: # type: ignore[valid-type]
- return await scope.get(EventRepository)
-
-
-def make_event(event_id: str, etype: str = "UserLoggedIn", user: str | None = "u1", agg: str | None = "agg1") -> Event:
- return Event(
- event_id=event_id,
- event_type=etype,
- event_version="1.0",
- timestamp=datetime.now(timezone.utc),
- metadata=AvroEventMetadata(service_name="svc", service_version="1", user_id=user, correlation_id="c1"),
- payload={"k": 1, "execution_id": agg} if agg else {"k": 1},
- aggregate_id=agg,
- )
-
-
-@pytest.mark.asyncio
-async def test_store_get_and_queries(repo: EventRepository, db) -> None: # type: ignore[valid-type]
- e1 = make_event("e1", etype="A", agg="x1")
- e2 = make_event("e2", etype="B", agg="x2")
- await repo.store_event(e1)
- await repo.store_events_batch([e2])
- got = await repo.get_event("e1")
- assert got and got.event_id == "e1"
-
- now = datetime.now(timezone.utc)
- by_type = await repo.get_events_by_type("A", start_time=now - timedelta(days=1), end_time=now + timedelta(days=1))
- assert any(ev.event_id == "e1" for ev in by_type)
- by_agg = await repo.get_events_by_aggregate("x2")
- assert any(ev.event_id == "e2" for ev in by_agg)
- by_corr = await repo.get_events_by_correlation("c1")
- assert len(by_corr.events) >= 2
- by_user = await repo.get_events_by_user("u1", limit=10)
- assert len(by_user) >= 2
- exec_events = await repo.get_execution_events("x1")
- assert any(ev.event_id == "e1" for ev in exec_events.events)
-
-
-@pytest.mark.asyncio
-async def test_statistics_and_search_and_delete(repo: EventRepository, db) -> None: # type: ignore[valid-type]
- now = datetime.now(timezone.utc)
- await db.get_collection("events").insert_many(
- [
- {
- "event_id": "e3",
- "event_type": "C",
- "event_version": "1.0",
- "timestamp": now,
- "metadata": AvroEventMetadata(service_name="svc", service_version="1").model_dump(),
- "payload": {},
- },
- ]
- )
- stats = await repo.get_event_statistics(start_time=now - timedelta(days=1), end_time=now + timedelta(days=1))
- assert stats.total_events >= 1
-
- # search requires text index; guard if index not present
- try:
- res = await repo.search_events("test", filters=None, limit=10, skip=0)
- assert isinstance(res, list)
- except Exception:
- # Accept environments without text index
- pass
diff --git a/backend/tests/integration/db/repositories/test_notification_repository.py b/backend/tests/integration/db/repositories/test_notification_repository.py
deleted file mode 100644
index 8390355..0000000
--- a/backend/tests/integration/db/repositories/test_notification_repository.py
+++ /dev/null
@@ -1,150 +0,0 @@
-import logging
-from datetime import UTC, datetime, timedelta
-
-import pytest
-from app.db.repositories.notification_repository import NotificationRepository
-from app.domain.enums.notification import NotificationChannel, NotificationSeverity, NotificationStatus
-from app.domain.enums.notification import NotificationChannel as NC
-from app.domain.enums.user import UserRole
-from app.domain.notification import (
- DomainNotificationCreate,
- DomainNotificationUpdate,
- DomainSubscriptionUpdate,
-)
-from app.domain.user import UserFields
-
-pytestmark = pytest.mark.integration
-
-_test_logger = logging.getLogger("test.db.repositories.notification_repository")
-
-
-@pytest.mark.asyncio
-async def test_create_and_crud() -> None:
- repo = NotificationRepository(logger=_test_logger)
-
- create_data = DomainNotificationCreate(
- user_id="u1",
- channel=NotificationChannel.IN_APP,
- subject="sub",
- body="body",
- severity=NotificationSeverity.MEDIUM,
- tags=["execution", "completed"],
- )
- n = await repo.create_notification(create_data)
- assert n.notification_id
-
- # Update
- update_data = DomainNotificationUpdate(status=NotificationStatus.DELIVERED)
- assert await repo.update_notification(n.notification_id, n.user_id, update_data) is True
-
- got = await repo.get_notification(n.notification_id, n.user_id)
- assert got and got.notification_id == n.notification_id and got.status == NotificationStatus.DELIVERED
-
- assert await repo.mark_as_read(n.notification_id, n.user_id) is True
- assert await repo.mark_all_as_read(n.user_id) >= 0
- assert await repo.delete_notification(n.notification_id, n.user_id) is True
-
-
-@pytest.mark.asyncio
-async def test_list_count_unread_and_pending(db) -> None: # type: ignore[no-untyped-def]
- repo = NotificationRepository(logger=_test_logger)
- now = datetime.now(UTC)
-
- # Seed notifications
- await db.get_collection("notifications").insert_many(
- [
- {
- "notification_id": "n1",
- "user_id": "u1",
- "severity": NotificationSeverity.MEDIUM.value,
- "tags": ["execution"],
- "channel": NotificationChannel.IN_APP.value,
- "subject": "s",
- "body": "b",
- "status": NotificationStatus.PENDING.value,
- "created_at": now,
- },
- {
- "notification_id": "n2",
- "user_id": "u1",
- "severity": NotificationSeverity.LOW.value,
- "tags": ["completed"],
- "channel": NotificationChannel.IN_APP.value,
- "subject": "s",
- "body": "b",
- "status": NotificationStatus.DELIVERED.value,
- "created_at": now,
- },
- ]
- )
- lst = await repo.list_notifications("u1")
- assert len(lst) >= 2
- assert await repo.count_notifications("u1") >= 2
- assert await repo.get_unread_count("u1") >= 0
-
- # Pending and scheduled
- pending = await repo.find_pending_notifications()
- assert any(n.status == NotificationStatus.PENDING for n in pending)
- await db.get_collection("notifications").insert_one(
- {
- "notification_id": "n3",
- "user_id": "u1",
- "severity": NotificationSeverity.MEDIUM.value,
- "tags": ["execution"],
- "channel": NotificationChannel.IN_APP.value,
- "subject": "s",
- "body": "b",
- "status": NotificationStatus.PENDING.value,
- "created_at": now,
- "scheduled_for": now + timedelta(seconds=1),
- }
- )
- scheduled = await repo.find_scheduled_notifications()
- assert isinstance(scheduled, list)
- assert await repo.cleanup_old_notifications(days=0) >= 0
-
-
-@pytest.mark.asyncio
-async def test_subscriptions_and_user_queries(db) -> None: # type: ignore[no-untyped-def]
- repo = NotificationRepository(logger=_test_logger)
-
- update_data = DomainSubscriptionUpdate(enabled=True, severities=[])
- sub = await repo.upsert_subscription("u1", NotificationChannel.IN_APP, update_data)
- assert sub.user_id == "u1"
-
- got = await repo.get_subscription("u1", NotificationChannel.IN_APP)
- assert got and got.user_id == "u1"
-
- subs = await repo.get_all_subscriptions("u1")
- assert len(subs) == len(list(NC))
-
- # Users by role and active users
- await db.get_collection("users").insert_many(
- [
- {
- UserFields.USER_ID: "u1",
- UserFields.USERNAME: "A",
- UserFields.EMAIL: "a@e.com",
- UserFields.ROLE: "user",
- UserFields.IS_ACTIVE: True,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.IS_SUPERUSER: False,
- },
- {
- UserFields.USER_ID: "u2",
- UserFields.USERNAME: "B",
- UserFields.EMAIL: "b@e.com",
- UserFields.ROLE: "admin",
- UserFields.IS_ACTIVE: True,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.IS_SUPERUSER: False,
- },
- ]
- )
- ids = await repo.get_users_by_roles([UserRole.USER])
- assert "u1" in ids or isinstance(ids, list)
- await db.get_collection("executions").insert_one(
- {"execution_id": "e1", "user_id": "u2", "created_at": datetime.now(UTC)}
- )
- active = await repo.get_active_users(days=1)
- assert set(active) >= {"u2"} or isinstance(active, list)
diff --git a/backend/tests/integration/db/repositories/test_replay_repository.py b/backend/tests/integration/db/repositories/test_replay_repository.py
deleted file mode 100644
index 7ab5bc7..0000000
--- a/backend/tests/integration/db/repositories/test_replay_repository.py
+++ /dev/null
@@ -1,71 +0,0 @@
-from datetime import datetime, timezone
-
-import pytest
-from app.db.repositories.replay_repository import ReplayRepository
-from app.domain.admin.replay_updates import ReplaySessionUpdate
-from app.domain.enums.replay import ReplayStatus, ReplayType
-from app.domain.replay import ReplayConfig, ReplayFilter
-from app.schemas_pydantic.replay_models import ReplaySession
-
-pytestmark = pytest.mark.integration
-
-
-@pytest.fixture()
-async def repo(scope) -> ReplayRepository: # type: ignore[valid-type]
- return await scope.get(ReplayRepository)
-
-
-@pytest.mark.asyncio
-async def test_indexes_and_session_crud(repo: ReplayRepository) -> None:
- await repo.create_indexes()
- config = ReplayConfig(replay_type=ReplayType.EXECUTION, filter=ReplayFilter())
- session = ReplaySession(
- session_id="s1", status=ReplayStatus.CREATED, created_at=datetime.now(timezone.utc), config=config
- )
- await repo.save_session(session)
- got = await repo.get_session("s1")
- assert got and got.session_id == "s1"
- lst = await repo.list_sessions(limit=5)
- assert any(s.session_id == "s1" for s in lst)
- assert await repo.update_session_status("s1", ReplayStatus.RUNNING) is True
- session_update = ReplaySessionUpdate(status=ReplayStatus.COMPLETED)
- assert await repo.update_replay_session("s1", session_update) is True
-
-
-@pytest.mark.asyncio
-async def test_count_fetch_events_and_delete(repo: ReplayRepository, db) -> None: # type: ignore[valid-type]
- now = datetime.now(timezone.utc)
- # Insert events
- await db.get_collection("events").insert_many(
- [
- {
- "event_id": "e1",
- "timestamp": now,
- "execution_id": "x1",
- "event_type": "T",
- "metadata": {"user_id": "u1"},
- },
- {
- "event_id": "e2",
- "timestamp": now,
- "execution_id": "x2",
- "event_type": "T",
- "metadata": {"user_id": "u1"},
- },
- {
- "event_id": "e3",
- "timestamp": now,
- "execution_id": "x3",
- "event_type": "U",
- "metadata": {"user_id": "u2"},
- },
- ]
- )
- cnt = await repo.count_events(ReplayFilter())
- assert cnt >= 3
- batches = []
- async for b in repo.fetch_events(ReplayFilter(), batch_size=2):
- batches.append(b)
- assert sum(len(b) for b in batches) >= 3
- # Delete old sessions (none match date predicate likely)
- assert await repo.delete_old_sessions(datetime(2000, 1, 1, tzinfo=timezone.utc)) >= 0
diff --git a/backend/tests/integration/db/repositories/test_saga_repository.py b/backend/tests/integration/db/repositories/test_saga_repository.py
deleted file mode 100644
index d78f82e..0000000
--- a/backend/tests/integration/db/repositories/test_saga_repository.py
+++ /dev/null
@@ -1,66 +0,0 @@
-from datetime import datetime, timezone
-
-import pytest
-from app.db.repositories.saga_repository import SagaRepository
-from app.domain.enums.saga import SagaState
-from app.domain.saga.models import SagaFilter, SagaListResult
-
-pytestmark = pytest.mark.integration
-
-
-@pytest.fixture()
-async def repo(scope) -> SagaRepository: # type: ignore[valid-type]
- return await scope.get(SagaRepository)
-
-
-@pytest.mark.asyncio
-async def test_saga_crud_and_queries(repo: SagaRepository, db) -> None: # type: ignore[valid-type]
- now = datetime.now(timezone.utc)
- # Insert saga docs
- await db.get_collection("sagas").insert_many(
- [
- {
- "saga_id": "s1",
- "saga_name": "test",
- "execution_id": "e1",
- "state": "running",
- "created_at": now,
- "updated_at": now,
- },
- {
- "saga_id": "s2",
- "saga_name": "test2",
- "execution_id": "e2",
- "state": "completed",
- "created_at": now,
- "updated_at": now,
- "completed_at": now,
- },
- ]
- )
- saga = await repo.get_saga("s1")
- assert saga and saga.saga_id == "s1"
- lst = await repo.get_sagas_by_execution("e1")
- assert len(lst.sagas) >= 1
-
- f = SagaFilter(execution_ids=["e1"])
- result = await repo.list_sagas(f, limit=2)
- assert isinstance(result, SagaListResult)
-
- assert await repo.update_saga_state("s1", SagaState.COMPLETED) in (True, False)
-
- # user execution ids
- await db.get_collection("executions").insert_many(
- [
- {"execution_id": "e1", "user_id": "u1"},
- {"execution_id": "e2", "user_id": "u1"},
- ]
- )
- ids = await repo.get_user_execution_ids("u1")
- assert set(ids) == {"e1", "e2"}
-
- counts = await repo.count_sagas_by_state()
- assert isinstance(counts, dict) and ("running" in counts or "completed" in counts)
-
- stats = await repo.get_saga_statistics()
- assert isinstance(stats, dict) and "total" in stats
diff --git a/backend/tests/integration/db/repositories/test_sse_repository.py b/backend/tests/integration/db/repositories/test_sse_repository.py
deleted file mode 100644
index b196ba4..0000000
--- a/backend/tests/integration/db/repositories/test_sse_repository.py
+++ /dev/null
@@ -1,37 +0,0 @@
-import pytest
-from app.db.repositories.sse_repository import SSERepository
-from app.domain.enums.execution import ExecutionStatus
-
-pytestmark = pytest.mark.integration
-
-
-@pytest.fixture()
-async def repo(scope) -> SSERepository: # type: ignore[valid-type]
- return await scope.get(SSERepository)
-
-
-@pytest.mark.asyncio
-async def test_get_execution_status(repo: SSERepository, db) -> None: # type: ignore[valid-type]
- await db.get_collection("executions").insert_one({"execution_id": "e1", "status": "running"})
- status = await repo.get_execution_status("e1")
- assert status is not None
- assert status.status == ExecutionStatus.RUNNING
- assert status.execution_id == "e1"
-
-
-@pytest.mark.asyncio
-async def test_get_execution_status_none(repo: SSERepository, db) -> None: # type: ignore[valid-type]
- assert await repo.get_execution_status("missing") is None
-
-
-@pytest.mark.asyncio
-async def test_get_execution(repo: SSERepository, db) -> None: # type: ignore[valid-type]
- await db.get_collection("executions").insert_one({"execution_id": "e1", "status": "queued", "resource_usage": {}})
- doc = await repo.get_execution("e1")
- assert doc is not None
- assert doc.execution_id == "e1"
-
-
-@pytest.mark.asyncio
-async def test_get_execution_not_found(repo: SSERepository, db) -> None: # type: ignore[valid-type]
- assert await repo.get_execution("missing") is None
diff --git a/backend/tests/integration/db/repositories/test_user_repository.py b/backend/tests/integration/db/repositories/test_user_repository.py
deleted file mode 100644
index 12f8672..0000000
--- a/backend/tests/integration/db/repositories/test_user_repository.py
+++ /dev/null
@@ -1,53 +0,0 @@
-from datetime import datetime, timezone
-
-import pytest
-from app.db.repositories.user_repository import UserRepository
-from app.domain.enums.user import UserRole
-from app.domain.user.user_models import User as DomainUser
-from app.domain.user.user_models import UserUpdate
-
-pytestmark = pytest.mark.integration
-
-
-@pytest.fixture()
-async def repo(scope) -> UserRepository: # type: ignore[valid-type]
- return await scope.get(UserRepository)
-
-
-@pytest.mark.asyncio
-async def test_create_get_update_delete_user(repo: UserRepository) -> None:
- # Create user
- user = DomainUser(
- user_id="", # let repo assign
- username="alice",
- email="alice@example.com",
- role=UserRole.USER,
- is_active=True,
- is_superuser=False,
- hashed_password="h",
- created_at=datetime.now(timezone.utc),
- updated_at=datetime.now(timezone.utc),
- )
- created = await repo.create_user(user)
- assert created.user_id
-
- # Get by username
- fetched = await repo.get_user("alice")
- assert fetched and fetched.username == "alice"
-
- # Get by id
- by_id = await repo.get_user_by_id(created.user_id)
- assert by_id and by_id.user_id == created.user_id
-
- # List with search + role
- users = await repo.list_users(limit=10, offset=0, search="ali", role=UserRole.USER)
- assert any(u.username == "alice" for u in users)
-
- # Update
- upd = UserUpdate(email="alice2@example.com")
- updated = await repo.update_user(created.user_id, upd)
- assert updated and updated.email == "alice2@example.com"
-
- # Delete
- assert await repo.delete_user(created.user_id) is True
- assert await repo.get_user("alice") is None
diff --git a/backend/tests/integration/db/repositories/test_user_settings_repository.py b/backend/tests/integration/db/repositories/test_user_settings_repository.py
deleted file mode 100644
index 83bf6a6..0000000
--- a/backend/tests/integration/db/repositories/test_user_settings_repository.py
+++ /dev/null
@@ -1,50 +0,0 @@
-from datetime import datetime, timedelta, timezone
-
-import pytest
-from app.db.repositories.user_settings_repository import UserSettingsRepository
-from app.domain.enums.events import EventType
-from app.domain.user.settings_models import DomainUserSettings
-
-pytestmark = pytest.mark.integration
-
-
-@pytest.fixture()
-async def repo(scope) -> UserSettingsRepository: # type: ignore[valid-type]
- return await scope.get(UserSettingsRepository)
-
-
-@pytest.mark.asyncio
-async def test_user_settings_snapshot_and_events(repo: UserSettingsRepository, db) -> None: # type: ignore[valid-type]
- # Create indexes (should not raise)
- await repo.create_indexes()
-
- # Snapshot CRUD
- us = DomainUserSettings(user_id="u1")
- await repo.create_snapshot(us)
- got = await repo.get_snapshot("u1")
- assert got and got.user_id == "u1"
-
- # Insert events and query
- now = datetime.now(timezone.utc)
- await db.get_collection("events").insert_many(
- [
- {
- "aggregate_id": "user_settings_u1",
- "event_type": str(EventType.USER_SETTINGS_UPDATED),
- "timestamp": now,
- "payload": {},
- },
- {
- "aggregate_id": "user_settings_u1",
- "event_type": str(EventType.USER_THEME_CHANGED),
- "timestamp": now,
- "payload": {},
- },
- ]
- )
- evs = await repo.get_settings_events("u1", [EventType.USER_SETTINGS_UPDATED], since=now - timedelta(days=1))
- assert any(e.event_type == EventType.USER_SETTINGS_UPDATED for e in evs)
-
- # Counting helpers
- assert await repo.count_events_for_user("u1") >= 2
- assert await repo.count_events_since_snapshot("u1") >= 0
diff --git a/backend/tests/integration/events/test_event_store.py b/backend/tests/integration/events/test_event_store.py
deleted file mode 100644
index 470beb9..0000000
--- a/backend/tests/integration/events/test_event_store.py
+++ /dev/null
@@ -1,62 +0,0 @@
-from datetime import datetime, timedelta, timezone
-
-import pytest
-from app.events.event_store import EventStore
-from app.infrastructure.kafka.events.metadata import AvroEventMetadata
-from app.infrastructure.kafka.events.pod import PodCreatedEvent
-from app.infrastructure.kafka.events.user import UserLoggedInEvent
-
-pytestmark = [pytest.mark.integration, pytest.mark.mongodb]
-
-
-@pytest.fixture()
-async def event_store(scope) -> EventStore: # type: ignore[valid-type]
- return await scope.get(EventStore)
-
-
-@pytest.mark.asyncio
-async def test_store_and_query_events(event_store: EventStore) -> None:
- ev1 = PodCreatedEvent(
- execution_id="x1",
- pod_name="pod1",
- namespace="ns",
- metadata=AvroEventMetadata(service_name="svc", service_version="1", user_id="u1", correlation_id="cid"),
- )
- assert await event_store.store_event(ev1) is True
-
- ev2 = PodCreatedEvent(
- execution_id="x2",
- pod_name="pod2",
- namespace="ns",
- metadata=AvroEventMetadata(service_name="svc", service_version="1", user_id="u1"),
- )
- res = await event_store.store_batch([ev1, ev2])
- assert res["total"] == 2 and res["stored"] >= 1
-
- items = await event_store.get_events_by_type(ev1.event_type)
- assert any(getattr(e, "execution_id", None) == "x1" for e in items)
- exec_items = await event_store.get_execution_events("x1")
- assert any(getattr(e, "execution_id", None) == "x1" for e in exec_items)
- user_items = await event_store.get_user_events("u1")
- assert len(user_items) >= 2
- chain = await event_store.get_correlation_chain("cid")
- assert isinstance(chain, list)
- # Security types (may be empty)
- _ = await event_store.get_security_events()
-
-
-@pytest.mark.asyncio
-async def test_replay_events(event_store: EventStore) -> None:
- ev = UserLoggedInEvent(
- user_id="u1", login_method="password", metadata=AvroEventMetadata(service_name="svc", service_version="1")
- )
- await event_store.store_event(ev)
-
- called = {"n": 0}
-
- async def cb(_): # noqa: ANN001
- called["n"] += 1
-
- start = datetime.now(timezone.utc) - timedelta(days=1)
- cnt = await event_store.replay_events(start_time=start, callback=cb)
- assert cnt >= 1 and called["n"] >= 1
diff --git a/backend/tests/integration/events/test_event_store_consumer_flush_e2e.py b/backend/tests/integration/events/test_event_store_consumer_flush_e2e.py
deleted file mode 100644
index 38610f5..0000000
--- a/backend/tests/integration/events/test_event_store_consumer_flush_e2e.py
+++ /dev/null
@@ -1,55 +0,0 @@
-from uuid import uuid4
-
-import pytest
-from app.core.database_context import Database
-from app.domain.enums.events import EventType
-from app.domain.enums.kafka import KafkaTopic
-from app.events.core import UnifiedProducer
-from app.events.event_store import EventStore
-from app.events.event_store_consumer import create_event_store_consumer
-from app.events.schema.schema_registry import SchemaRegistryManager
-
-from tests.helpers import make_execution_requested_event
-from tests.helpers.eventually import eventually
-
-pytestmark = [pytest.mark.integration, pytest.mark.kafka, pytest.mark.mongodb]
-
-
-@pytest.fixture()
-async def store(scope) -> EventStore: # type: ignore[valid-type]
- return await scope.get(EventStore)
-
-
-@pytest.mark.asyncio
-async def test_event_store_consumer_flush_on_timeout(scope, store: EventStore) -> None: # type: ignore[valid-type]
- producer: UnifiedProducer = await scope.get(UnifiedProducer)
- schema: SchemaRegistryManager = await scope.get(SchemaRegistryManager)
- db: Database = await scope.get(Database)
-
- consumer = create_event_store_consumer(
- event_store=store,
- topics=[KafkaTopic.EXECUTION_EVENTS],
- schema_registry_manager=schema,
- logger=store.logger,
- producer=producer,
- batch_size=100,
- batch_timeout_seconds=0.2,
- )
- await consumer.start()
- try:
- # Directly invoke handler to enqueue
- exec_ids = []
- for _ in range(3):
- x = f"exec-{uuid4().hex[:6]}"
- exec_ids.append(x)
- ev = make_execution_requested_event(execution_id=x)
- await consumer._handle_event(ev) # noqa: SLF001
-
- async def _all_present() -> None:
- docs = await db[store.collection_name].find({"event_type": str(EventType.EXECUTION_REQUESTED)}).to_list(50)
- have = {d.get("execution_id") for d in docs}
- assert set(exec_ids).issubset(have)
-
- await eventually(_all_present, timeout=5.0, interval=0.2)
- finally:
- await consumer.stop()
diff --git a/backend/tests/integration/events/test_event_store_e2e.py b/backend/tests/integration/events/test_event_store_e2e.py
deleted file mode 100644
index 25f5be9..0000000
--- a/backend/tests/integration/events/test_event_store_e2e.py
+++ /dev/null
@@ -1,40 +0,0 @@
-import pytest
-from app.domain.enums.events import EventType
-from app.events.event_store import EventStore
-
-from tests.helpers import make_execution_requested_event
-
-pytestmark = [pytest.mark.integration, pytest.mark.mongodb]
-
-
-@pytest.fixture()
-async def store(scope) -> EventStore: # type: ignore[valid-type]
- return await scope.get(EventStore)
-
-
-@pytest.mark.asyncio
-async def test_event_store_initialize_and_crud(store: EventStore) -> None:
- # Store single event
- ev = make_execution_requested_event(execution_id="e-1")
- assert await store.store_event(ev) is True
-
- # Duplicate insert should be treated as success True (DuplicateKey swallowed)
- assert await store.store_event(ev) is True
-
- # Batch store with duplicates
- ev2 = ev.model_copy(update={"event_id": "new-2", "execution_id": "e-2"})
- res = await store.store_batch([ev, ev2])
- assert res["total"] == 2 and res["stored"] >= 1
-
- # Queries
- by_id = await store.get_event(ev.event_id)
- assert by_id is not None and by_id.event_id == ev.event_id
-
- by_type = await store.get_events_by_type(EventType.EXECUTION_REQUESTED, limit=10)
- assert any(e.event_id == ev.event_id for e in by_type)
-
- by_exec = await store.get_execution_events("e-1")
- assert any(e.event_id == ev.event_id for e in by_exec)
-
- by_user = await store.get_user_events("u-unknown", limit=10)
- assert isinstance(by_user, list)
diff --git a/backend/tests/integration/services/events/test_event_service_integration.py b/backend/tests/integration/services/events/test_event_service_integration.py
deleted file mode 100644
index 21b7895..0000000
--- a/backend/tests/integration/services/events/test_event_service_integration.py
+++ /dev/null
@@ -1,63 +0,0 @@
-from datetime import datetime, timezone, timedelta
-
-import pytest
-
-from app.db.repositories import EventRepository
-from app.domain.events.event_models import Event, EventFilter
-from app.domain.enums.common import SortOrder
-from app.domain.enums.user import UserRole
-from app.infrastructure.kafka.events.metadata import AvroEventMetadata
-from app.domain.enums.events import EventType
-from app.services.event_service import EventService
-
-pytestmark = [pytest.mark.integration, pytest.mark.mongodb]
-
-
-@pytest.mark.asyncio
-async def test_event_service_access_and_queries(scope) -> None: # type: ignore[valid-type]
- repo: EventRepository = await scope.get(EventRepository)
- svc: EventService = await scope.get(EventService)
-
- now = datetime.now(timezone.utc)
- # Seed some events (domain Event, not infra BaseEvent)
- md1 = AvroEventMetadata(service_name="svc", service_version="1", user_id="u1", correlation_id="c1")
- md2 = AvroEventMetadata(service_name="svc", service_version="1", user_id="u2", correlation_id="c1")
- e1 = Event(event_id="e1", event_type=str(EventType.USER_LOGGED_IN), event_version="1.0", timestamp=now,
- metadata=md1, payload={"user_id": "u1", "login_method": "password"}, aggregate_id="agg1")
- e2 = Event(event_id="e2", event_type=str(EventType.USER_LOGGED_IN), event_version="1.0", timestamp=now,
- metadata=md2, payload={"user_id": "u2", "login_method": "password"}, aggregate_id="agg2")
- await repo.store_event(e1)
- await repo.store_event(e2)
-
- # get_execution_events returns None when non-admin for different user; then admin sees
- events_user = await svc.get_execution_events("agg1", "u2", UserRole.USER)
- assert events_user is None
- events_admin = await svc.get_execution_events("agg1", "admin", UserRole.ADMIN)
- assert any(ev.aggregate_id == "agg1" for ev in events_admin.events)
-
- # query_events_advanced: basic run (empty filters) should return a result structure
- res = await svc.query_events_advanced("u1", UserRole.USER, filters=EventFilter(), sort_by="correlation_id", sort_order=SortOrder.ASC)
- assert res is not None
-
- # get_events_by_correlation filters non-admin to their own user_id
- by_corr_user = await svc.get_events_by_correlation("c1", user_id="u1", user_role=UserRole.USER, include_all_users=False)
- assert all(ev.metadata.user_id == "u1" for ev in by_corr_user.events)
- by_corr_admin = await svc.get_events_by_correlation("c1", user_id="admin", user_role=UserRole.ADMIN, include_all_users=True)
- assert len(by_corr_admin.events) >= 2
-
- # get_event_statistics (time window)
- _ = await svc.get_event_statistics("u1", UserRole.USER, start_time=now - timedelta(days=1), end_time=now + timedelta(days=1))
-
- # get_event enforces access control
- one_allowed = await svc.get_event(e1.event_id, user_id="u1", user_role=UserRole.USER)
- assert one_allowed is not None
- one_denied = await svc.get_event(e1.event_id, user_id="u2", user_role=UserRole.USER)
- assert one_denied is None
-
- # aggregate_events injects user filter for non-admin
- pipe = [{"$match": {"event_type": str(e1.event_type)}}]
- _ = await svc.aggregate_events("u1", UserRole.USER, pipe)
-
- # list_event_types returns at least one type
- types = await svc.list_event_types("u1", UserRole.USER)
- assert isinstance(types, list) and len(types) >= 1
From 9cc16c4d57839784af6b4dc73d19d623d69f8826 Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Wed, 31 Dec 2025 01:27:02 +0100
Subject: [PATCH 42/42] moved from stdlib dataclass to pydantic dataclass
---
backend/app/domain/admin/overview_models.py | 4 +-
backend/app/domain/admin/replay_models.py | 4 +-
backend/app/domain/admin/replay_updates.py | 3 +-
backend/app/domain/admin/settings_models.py | 4 +-
backend/app/domain/events/event_metadata.py | 4 +-
backend/app/domain/events/event_models.py | 4 +-
backend/app/domain/execution/models.py | 4 +-
backend/app/domain/idempotency/models.py | 3 +-
backend/app/domain/notification/models.py | 4 +-
.../domain/rate_limit/rate_limit_models.py | 4 +-
backend/app/domain/replay/models.py | 3 +-
backend/app/domain/saga/models.py | 4 +-
backend/app/domain/saved_script/models.py | 4 +-
backend/app/domain/sse/models.py | 3 +-
backend/app/domain/user/settings_models.py | 4 +-
backend/app/domain/user/user_models.py | 3 +-
docs/architecture/pydantic-dataclasses.md | 173 ++++++++++++++++++
mkdocs.yml | 1 +
18 files changed, 217 insertions(+), 16 deletions(-)
create mode 100644 docs/architecture/pydantic-dataclasses.md
diff --git a/backend/app/domain/admin/overview_models.py b/backend/app/domain/admin/overview_models.py
index a208c95..23f9140 100644
--- a/backend/app/domain/admin/overview_models.py
+++ b/backend/app/domain/admin/overview_models.py
@@ -1,8 +1,10 @@
from __future__ import annotations
-from dataclasses import dataclass, field
+from dataclasses import field
from typing import List
+from pydantic.dataclasses import dataclass
+
from app.domain.events import Event, EventStatistics
from app.domain.user import User as DomainAdminUser
diff --git a/backend/app/domain/admin/replay_models.py b/backend/app/domain/admin/replay_models.py
index 59bd720..44d7d79 100644
--- a/backend/app/domain/admin/replay_models.py
+++ b/backend/app/domain/admin/replay_models.py
@@ -1,7 +1,9 @@
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import datetime
from typing import Any
+from pydantic.dataclasses import dataclass
+
from app.domain.enums.replay import ReplayStatus
from app.domain.events.event_models import EventSummary
from app.domain.replay.models import ReplaySessionState
diff --git a/backend/app/domain/admin/replay_updates.py b/backend/app/domain/admin/replay_updates.py
index 075a34b..c326565 100644
--- a/backend/app/domain/admin/replay_updates.py
+++ b/backend/app/domain/admin/replay_updates.py
@@ -1,8 +1,9 @@
"""Domain models for replay session updates."""
-from dataclasses import dataclass
from datetime import datetime
+from pydantic.dataclasses import dataclass
+
from app.domain.enums.replay import ReplayStatus
diff --git a/backend/app/domain/admin/settings_models.py b/backend/app/domain/admin/settings_models.py
index d8ffc1c..cad09f3 100644
--- a/backend/app/domain/admin/settings_models.py
+++ b/backend/app/domain/admin/settings_models.py
@@ -1,7 +1,9 @@
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import datetime, timezone
from typing import Any
+from pydantic.dataclasses import dataclass
+
from app.core.utils import StringEnum
diff --git a/backend/app/domain/events/event_metadata.py b/backend/app/domain/events/event_metadata.py
index 9bca16c..c3a5744 100644
--- a/backend/app/domain/events/event_metadata.py
+++ b/backend/app/domain/events/event_metadata.py
@@ -1,6 +1,8 @@
-from dataclasses import dataclass, field, replace
+from dataclasses import field, replace
from uuid import uuid4
+from pydantic.dataclasses import dataclass
+
from app.domain.enums.common import Environment
diff --git a/backend/app/domain/events/event_models.py b/backend/app/domain/events/event_models.py
index eda1fef..3dc5762 100644
--- a/backend/app/domain/events/event_models.py
+++ b/backend/app/domain/events/event_models.py
@@ -1,7 +1,9 @@
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import datetime
from typing import Any
+from pydantic.dataclasses import dataclass
+
from app.core.utils import StringEnum
from app.domain.enums.events import EventType
from app.domain.events.event_metadata import EventMetadata
diff --git a/backend/app/domain/execution/models.py b/backend/app/domain/execution/models.py
index 2b6e8eb..2bd3095 100644
--- a/backend/app/domain/execution/models.py
+++ b/backend/app/domain/execution/models.py
@@ -1,10 +1,12 @@
from __future__ import annotations
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import datetime, timezone
from typing import Any, Optional
from uuid import uuid4
+from pydantic.dataclasses import dataclass
+
from app.domain.enums.execution import ExecutionStatus
from app.domain.enums.storage import ExecutionErrorType
diff --git a/backend/app/domain/idempotency/models.py b/backend/app/domain/idempotency/models.py
index f3001c8..38fba57 100644
--- a/backend/app/domain/idempotency/models.py
+++ b/backend/app/domain/idempotency/models.py
@@ -1,9 +1,10 @@
from __future__ import annotations
-from dataclasses import dataclass
from datetime import datetime
from typing import Dict, Optional
+from pydantic.dataclasses import dataclass
+
from app.core.utils import StringEnum
diff --git a/backend/app/domain/notification/models.py b/backend/app/domain/notification/models.py
index d697b25..8a1bac4 100644
--- a/backend/app/domain/notification/models.py
+++ b/backend/app/domain/notification/models.py
@@ -1,10 +1,12 @@
from __future__ import annotations
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import UTC, datetime
from typing import Any
from uuid import uuid4
+from pydantic.dataclasses import dataclass
+
from app.domain.enums.notification import (
NotificationChannel,
NotificationSeverity,
diff --git a/backend/app/domain/rate_limit/rate_limit_models.py b/backend/app/domain/rate_limit/rate_limit_models.py
index 5971355..08ef946 100644
--- a/backend/app/domain/rate_limit/rate_limit_models.py
+++ b/backend/app/domain/rate_limit/rate_limit_models.py
@@ -1,8 +1,10 @@
import re
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import datetime, timezone
from typing import Dict, List, Optional
+from pydantic.dataclasses import dataclass
+
from app.core.utils import StringEnum
diff --git a/backend/app/domain/replay/models.py b/backend/app/domain/replay/models.py
index 80331c1..17e241b 100644
--- a/backend/app/domain/replay/models.py
+++ b/backend/app/domain/replay/models.py
@@ -1,9 +1,10 @@
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import datetime, timezone
from typing import Any, Dict, List
from uuid import uuid4
from pydantic import BaseModel, Field, PrivateAttr
+from pydantic.dataclasses import dataclass
from app.domain.enums.events import EventType
from app.domain.enums.replay import ReplayStatus, ReplayTarget, ReplayType
diff --git a/backend/app/domain/saga/models.py b/backend/app/domain/saga/models.py
index 06c2cca..a885c3b 100644
--- a/backend/app/domain/saga/models.py
+++ b/backend/app/domain/saga/models.py
@@ -1,8 +1,10 @@
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import datetime, timezone
from typing import Any
from uuid import uuid4
+from pydantic.dataclasses import dataclass
+
from app.domain.enums.saga import SagaState
diff --git a/backend/app/domain/saved_script/models.py b/backend/app/domain/saved_script/models.py
index ba819cb..0862242 100644
--- a/backend/app/domain/saved_script/models.py
+++ b/backend/app/domain/saved_script/models.py
@@ -1,8 +1,10 @@
from __future__ import annotations
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import datetime, timezone
+from pydantic.dataclasses import dataclass
+
@dataclass
class DomainSavedScriptBase:
diff --git a/backend/app/domain/sse/models.py b/backend/app/domain/sse/models.py
index e4dfa5f..c8a59e8 100644
--- a/backend/app/domain/sse/models.py
+++ b/backend/app/domain/sse/models.py
@@ -1,8 +1,9 @@
from __future__ import annotations
-from dataclasses import dataclass
from datetime import datetime
+from pydantic.dataclasses import dataclass
+
from app.domain.enums.execution import ExecutionStatus
diff --git a/backend/app/domain/user/settings_models.py b/backend/app/domain/user/settings_models.py
index 171f1b1..10a730d 100644
--- a/backend/app/domain/user/settings_models.py
+++ b/backend/app/domain/user/settings_models.py
@@ -1,9 +1,11 @@
from __future__ import annotations
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import datetime, timezone
from typing import Any, Dict, List, Optional
+from pydantic.dataclasses import dataclass
+
from app.domain.enums.common import Theme
from app.domain.enums.events import EventType
from app.domain.enums.notification import NotificationChannel
diff --git a/backend/app/domain/user/user_models.py b/backend/app/domain/user/user_models.py
index 242529f..fa34d06 100644
--- a/backend/app/domain/user/user_models.py
+++ b/backend/app/domain/user/user_models.py
@@ -1,8 +1,9 @@
import re
-from dataclasses import dataclass
from datetime import datetime
from typing import List
+from pydantic.dataclasses import dataclass
+
from app.core.utils import StringEnum
from app.domain.enums.user import UserRole
diff --git a/docs/architecture/pydantic-dataclasses.md b/docs/architecture/pydantic-dataclasses.md
new file mode 100644
index 0000000..b169958
--- /dev/null
+++ b/docs/architecture/pydantic-dataclasses.md
@@ -0,0 +1,173 @@
+# Pydantic dataclasses
+
+This document explains why domain models use `pydantic.dataclasses.dataclass` instead of the standard library
+`dataclasses.dataclass`. It covers the problem with nested dict conversion, the solution, and migration considerations.
+
+## Why pydantic dataclasses
+
+Domain models are dataclasses that represent business entities like `DomainUserSettings`, `DomainExecution`, and `Saga`.
+These models often have nested structures - for example, `DomainUserSettings` contains `DomainNotificationSettings` and
+`DomainEditorSettings` as nested dataclasses.
+
+The problem appears when loading data from MongoDB. Beanie documents are Pydantic models, and calling `model_dump()` on
+them returns plain Python dicts, including nested dicts for nested models. When you pass these dicts to a stdlib
+dataclass constructor, nested dicts stay as dicts instead of being converted to their proper dataclass types.
+
+```python
+# Data from MongoDB via Beanie document.model_dump()
+data = {
+ "user_id": "user123",
+ "notifications": {
+ "execution_completed": False,
+ "execution_failed": True
+ }
+}
+
+# With stdlib dataclass - FAILS
+settings = DomainUserSettings(**data)
+settings.notifications.execution_completed # AttributeError: 'dict' has no attribute 'execution_completed'
+
+# With pydantic dataclass - WORKS
+settings = DomainUserSettings(**data)
+settings.notifications.execution_completed # Returns False
+```
+
+Pydantic dataclasses use type annotations to automatically convert nested dicts into the correct dataclass instances. No
+reflection, no isinstance checks, no manual conversion code.
+
+## What pydantic dataclasses provide
+
+Pydantic dataclasses are a drop-in replacement for stdlib dataclasses with added features:
+
+| Feature | stdlib | pydantic |
+|------------------------|--------|----------|
+| Nested dict conversion | No | Yes |
+| Enum from string | No | Yes |
+| Type validation | No | Yes |
+| String-to-int coercion | No | Yes |
+| `asdict()` | Yes | Yes |
+| `is_dataclass()` | Yes | Yes |
+| `__dataclass_fields__` | Yes | Yes |
+| `field()` | Yes | Yes |
+| `__post_init__` | Yes | Yes |
+| `replace()` | Yes | Yes |
+| frozen/eq/hash | Yes | Yes |
+| Inheritance | Yes | Yes |
+
+The migration requires changing one import:
+
+```python
+# Before
+from dataclasses import dataclass
+
+# After
+from pydantic.dataclasses import dataclass
+```
+
+Everything else stays the same. The `field` function still comes from stdlib `dataclasses`.
+
+## Performance
+
+Pydantic dataclasses add validation overhead at construction time:
+
+| Operation | stdlib | pydantic | Ratio |
+|--------------------|-------------|-------------|-------------|
+| Creation from dict | 0.2 us | 1.4 us | 6x slower |
+| Attribute access | 4.1 ms/100k | 4.6 ms/100k | 1.1x slower |
+
+The creation overhead is negligible for typical usage patterns - domain models are created during request handling, not
+in tight loops. Attribute access after construction has no meaningful overhead.
+
+## Domain model locations
+
+All domain models live in `app/domain/` and use pydantic dataclasses:
+
+| Module | File | Key models |
+|--------------|---------------------------------------|----------------------------------------------------------------------------|
+| User | `app/domain/user/settings_models.py` | `DomainUserSettings`, `DomainNotificationSettings`, `DomainEditorSettings` |
+| User | `app/domain/user/user_models.py` | `User`, `UserCreation`, `UserUpdate` |
+| Execution | `app/domain/execution/models.py` | `DomainExecution`, `ExecutionResultDomain` |
+| Events | `app/domain/events/event_models.py` | `Event`, `EventFilter`, `EventQuery` |
+| Events | `app/domain/events/event_metadata.py` | `EventMetadata` |
+| Saga | `app/domain/saga/models.py` | `Saga`, `SagaInstance`, `SagaConfig` |
+| Replay | `app/domain/replay/models.py` | `ReplaySessionState` |
+| Notification | `app/domain/notification/models.py` | `DomainNotification`, `DomainNotificationSubscription` |
+| Admin | `app/domain/admin/settings_models.py` | `SystemSettings`, `ExecutionLimits` |
+
+## Using domain models in repositories
+
+Repositories that load from MongoDB convert Beanie documents to domain models:
+
+```python
+from app.domain.user.settings_models import DomainUserSettings
+
+class UserSettingsRepository:
+ async def get_snapshot(self, user_id: str) -> DomainUserSettings | None:
+ doc = await UserSettingsDocument.find_one({"user_id": user_id})
+ if not doc:
+ return None
+ # Pydantic dataclass handles nested conversion automatically
+ return DomainUserSettings(**doc.model_dump(exclude={"id", "revision_id"}))
+```
+
+No manual conversion of nested fields needed. The type annotations on `DomainUserSettings` tell pydantic how to convert
+each nested dict.
+
+## Validation behavior
+
+Pydantic dataclasses validate input data at construction time. Invalid data raises `ValidationError`:
+
+```python
+# Invalid enum value
+DomainUserSettings(user_id="u1", theme="invalid_theme")
+# ValidationError: Input should be 'light', 'dark' or 'auto'
+
+# Invalid type
+DomainNotificationSettings(execution_completed="not_a_bool")
+# ValidationError: Input should be a valid boolean
+```
+
+This catches data problems at the boundary where data enters the domain, rather than later during processing. Services
+can trust that domain models contain valid data.
+
+## What stays as Pydantic BaseModel
+
+Some classes still use `pydantic.BaseModel` instead of dataclasses:
+
+- Beanie documents (require BaseModel for ODM features)
+- Request/response schemas (FastAPI integration)
+- Configuration models with complex validation
+- Classes that need `model_validate()`, `model_json_schema()`, or other BaseModel methods
+
+The rule: use pydantic dataclasses for domain models that represent business entities. Use BaseModel for infrastructure
+concerns like documents, schemas, and configs.
+
+## Adding new domain models
+
+When creating a new domain model:
+
+1. Import dataclass from pydantic: `from pydantic.dataclasses import dataclass`
+2. Import field from stdlib if needed: `from dataclasses import field`
+3. Define the class with `@dataclass` decorator
+4. Use type annotations - pydantic uses them for conversion and validation
+5. Put nested dataclasses before the parent class that uses them
+
+```python
+from dataclasses import field
+from datetime import datetime
+from pydantic.dataclasses import dataclass
+
+@dataclass
+class NestedModel:
+ value: int
+ label: str = "default"
+
+@dataclass
+class ParentModel:
+ id: str
+ nested: NestedModel
+ items: list[str] = field(default_factory=list)
+ created_at: datetime = field(default_factory=datetime.utcnow)
+```
+
+The model automatically handles nested dict conversion, enum parsing, and type coercion.
diff --git a/mkdocs.yml b/mkdocs.yml
index 1078a3f..aabc3c4 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -104,6 +104,7 @@ nav:
- Overview: architecture/overview.md
- Services: architecture/services-overview.md
- Domain Exceptions: architecture/domain-exceptions.md
+ - Pydantic Dataclasses: architecture/pydantic-dataclasses.md
- Frontend Build: architecture/frontend-build.md
- Svelte 5 Migration: architecture/svelte5-migration.md
- Kafka Topics: architecture/kafka-topic-architecture.md