From 0bc8e4e0c0edd668366038f77ddc1d7e06f6ea03 Mon Sep 17 00:00:00 2001 From: Maxwell Brown Date: Thu, 15 Jan 2026 18:00:37 -0500 Subject: [PATCH 1/2] add domain, error, and api specifications for registry api --- packages/amp/src/Models.ts | 14 +- packages/amp/src/domain.ts | 673 +++++++++++++++++++++++ packages/amp/src/registry/api.ts | 595 ++++++++++++++++++++ packages/amp/src/registry/domain.ts | 813 ++++++++++++++++++++++++++++ packages/amp/src/registry/error.ts | 236 ++++++++ pnpm-lock.yaml | 37 +- 6 files changed, 2352 insertions(+), 16 deletions(-) create mode 100644 packages/amp/src/domain.ts create mode 100644 packages/amp/src/registry/api.ts create mode 100644 packages/amp/src/registry/domain.ts create mode 100644 packages/amp/src/registry/error.ts diff --git a/packages/amp/src/Models.ts b/packages/amp/src/Models.ts index af95432..d9eb42d 100644 --- a/packages/amp/src/Models.ts +++ b/packages/amp/src/Models.ts @@ -380,7 +380,9 @@ export type DatasetReadme = typeof DatasetReadme.Type /** * Represents additional description and details about the dataset. */ -export const DatasetDescription = Schema.String.annotations({ +export const DatasetDescription = Schema.String.pipe( + Schema.maxLength(1024) +).annotations({ identifier: "DatasetDescription", title: "Description", description: "Additional description and details about the dataset." @@ -429,6 +431,14 @@ export const DatasetLicense = Schema.String.annotations({ }) export type DatasetLicense = typeof DatasetLicense.Type +/** + * Represents the visibility of a dataset. + */ +export const DatasetVisibility = Schema.Literal("public", "private").annotations({ + identifier: "DatasetVisibility" +}) +export type DatasetVisibility = typeof DatasetVisibility.Type + /** * Represents metadata associated with a dataset. */ @@ -441,7 +451,7 @@ export const DatasetMetadata = Schema.Struct({ keywords: Schema.optional(Schema.Array(DatasetKeyword)), sources: Schema.optional(Schema.Array(DatasetSource)), license: Schema.optional(DatasetLicense), - visibility: Schema.optional(Schema.Literal("public", "private")) + visibility: Schema.optional(DatasetVisibility) }).annotations({ identifier: "DatasetMetadata", description: "Metadata associated with a dataset." diff --git a/packages/amp/src/domain.ts b/packages/amp/src/domain.ts new file mode 100644 index 0000000..544e7a1 --- /dev/null +++ b/packages/amp/src/domain.ts @@ -0,0 +1,673 @@ +import * as S from "effect/Schema" + +export class HealthcheckResponse extends S.Class("HealthcheckResponse")({ + "status": S.String, + "version": S.String +}) {} + +/** + * Time-based buckets for grouping datasets by last updated time + */ +export class LastUpdatedBucket extends S.Literal("last_day", "last_week", "last_month", "last_year") {} + +export class DatasetsListParams extends S.Struct({ + "limit": S.optionalWith(S.Int, { nullable: true }), + "page": S.optionalWith(S.Int, { nullable: true }), + "sort_by": S.optionalWith(S.String, { nullable: true }), + "direction": S.optionalWith(S.String, { nullable: true }), + "indexing_chains": S.optionalWith(S.Array(S.String), { nullable: true }), + "keywords": S.optionalWith(S.Array(S.String), { nullable: true }), + "last_updated": S.optionalWith(LastUpdatedBucket, { nullable: true }) +}) {} + +export class DatasetVersionAncestry extends S.Class("DatasetVersionAncestry")({ + /** + * Dataset reference in the format: {namespace}/{name}@{version_tag}. Points to the DatasetVersion.dataset_reference. This allows version-pinned dependencies. + */ + "dataset_reference": S.String.pipe(S.pattern(new RegExp("^[a-z0-9_]+/[a-z_][a-z0-9_]*@[a-z0-9._-]+$"))) +}) {} + +export class DatasetVersionStatus extends S.Literal("draft", "published", "deprecated", "archived") {} + +export class DatasetVersion extends S.Class("DatasetVersion")({ + /** + * Array of ancestor DatasetVersion references that this version extends from (version-pinned dependencies). + */ + "ancestors": S.optionalWith(S.Array(DatasetVersionAncestry), { nullable: true }), + /** + * A description of what changed with this version. Allows developers of the Dataset to communicate to downstream consumers what has changed with this version from previous versions. Migration guides, etc. + */ + "changelog": S.optionalWith(S.String, { nullable: true }), + /** + * Timestamp when the DatasetVersion record was created (immutable). + */ + "created_at": S.String, + /** + * Dataset reference in the format: {namespace}/{name}@{version_tag}. This value is globally unique and is a pointer to a tagged and published Manifest. + */ + "dataset_reference": S.String.pipe(S.pattern(new RegExp("^[a-z0-9_]+/[a-z_][a-z0-9_]*@[a-z0-9._-]+$"))), + /** + * Array of descendant DatasetVersion references that extend from this version. + */ + "descendants": S.optionalWith(S.Array(DatasetVersionAncestry), { nullable: true }), + "status": DatasetVersionStatus, + /** + * The published version tag. This is basically the version label. Can be semver, a commit hash, or 'latest'. + */ + "version_tag": S.String.pipe(S.pattern(new RegExp("^[a-z0-9._-]+$"))) +}) {} + +export class DatasetVisibility extends S.Literal("private", "public") {} + +/** + * Top-level container for a user-defined, tagged, and published Dataset. Contains metadata and discovery information for datasets. + */ +export class Dataset extends S.Class("Dataset")({ + /** + * Timestamp when the Dataset record was created (immutable). + */ + "created_at": S.String, + /** + * Computed link to the latest DatasetVersion reference in PURL format. + */ + "dataset_reference": S.optionalWith( + S.String.pipe(S.minLength(1), S.pattern(new RegExp("^[a-z0-9_]+/[a-z_][a-z0-9_]*@[a-z0-9._-]+$"))), + { nullable: true } + ), + /** + * Description of the dataset, its intended use, and purpose. + */ + "description": S.optionalWith(S.String.pipe(S.maxLength(1024)), { nullable: true }), + /** + * Chains being indexed by the Dataset. Used for discovery by chain. + */ + "indexing_chains": S.Array(S.String), + /** + * User-defined or derived keywords defining the usage of the dataset. + */ + "keywords": S.optionalWith(S.Array(S.String), { nullable: true }), + "latest_version": S.optionalWith(DatasetVersion, { nullable: true }), + /** + * Usage license covering the Dataset. + */ + "license": S.optionalWith(S.String, { nullable: true }), + /** + * The dataset name. Lowercase, alphanumeric with underscores. Cannot start with a number. + */ + "name": S.String.pipe(S.minLength(1), S.pattern(new RegExp("^[a-z_][a-z0-9_]*$"))), + /** + * The dataset namespace. Logical grouping mechanism for datasets. Can be a user 0x address, username, or organization. + */ + "namespace": S.String.pipe(S.minLength(1), S.pattern(new RegExp("^[a-z0-9_]*$"))), + /** + * Owner of the Dataset. Can be an organization or user 0x address. + */ + "owner": S.String, + /** + * User-defined README for the Dataset providing usage examples and documentation. + */ + "readme": S.optionalWith(S.String, { nullable: true }), + /** + * VCS repository URL containing the Dataset source code. + */ + "repository_url": S.optionalWith(S.String, { nullable: true }), + /** + * Source of data being materialized by the Dataset (e.g., contract addresses, logs, transactions). + */ + "source": S.optionalWith(S.Array(S.String), { nullable: true }), + /** + * Timestamp when the Dataset record was last updated. + */ + "updated_at": S.String, + /** + * Link to all DatasetVersion records that this Dataset is a parent of. + */ + "versions": S.optionalWith(S.Array(DatasetVersion), { nullable: true }), + "visibility": DatasetVisibility +}) {} + +export class DatasetListResponse extends S.Class("DatasetListResponse")({ + /** + * List of the datasets being returned in this page + */ + "datasets": S.Array(Dataset), + /** + * If true, there are more datasets that can be fetched + */ + "has_next_page": S.Boolean, + /** + * Total number of datasets matching the query filters + */ + "total_count": S.Int +}) {} + +/** + * Standard error response returned by the API + * + * This struct represents error information returned in HTTP error responses. + * It provides structured error details including a machine-readable error code + * and human-readable message. + * + * ## Error Code Conventions + * - Error codes use SCREAMING_SNAKE_CASE (e.g., `DATASET_NOT_FOUND`) + * - Codes are stable and can be relied upon programmatically + * - Messages may change and should only be used for display/logging + * + * ## Example JSON Response + * ```json + * { + * "error_code": "DATASET_NOT_FOUND", + * "error_message": "dataset 'eth_mainnet' version '1.0.0' not found", + * "request_id": "a1b2c3d4-e5f6-7890-abcd-ef1234567890" + * } + * ``` + */ +export class ErrorResponse extends S.Class("ErrorResponse")({ + /** + * Machine-readable error code in SCREAMING_SNAKE_CASE format + * + * Error codes are stable across API versions and should be used + * for programmatic error handling. Examples: `INVALID_SELECTOR`, + * `DATASET_NOT_FOUND`, `REGISTRY_DB_ERROR` + */ + "error_code": S.String, + /** + * Human-readable error message + * + * Messages provide detailed context about the error but may change + * over time. Use `error_code` for programmatic decisions. + */ + "error_message": S.String, + /** + * Request ID for tracing and correlation + * + * This ID can be used to correlate error responses with server logs + * for debugging and support purposes. The ID is generated per-request + * and appears in both logs and error responses. + */ + "request_id": S.optionalWith(S.String, { nullable: true }) +}) {} + +/** + * Count of datasets by indexing chain. + * + * Returns the chain name and the number of datasets indexing that chain. + */ +export class DatasetCountByChainDto extends S.Class("DatasetCountByChainDto")({ + /** + * The indexing chain name (e.g., "mainnet", "arbitrum-one", "base-mainnet") + */ + "chain": S.String, + /** + * The count of Dataset records indexing this chain + */ + "count": S.Int +}) {} + +export class DatasetsCountByChain200 extends S.Array(DatasetCountByChainDto) {} + +/** + * Count of datasets by keyword (tag). + * + * Returns the keyword and the number of datasets with that keyword. + */ +export class DatasetCountByKeywordDto extends S.Class("DatasetCountByKeywordDto")({ + /** + * The count of Dataset records with this keyword + */ + "count": S.Int, + /** + * The keyword (e.g., "DeFi", "NFT", "logs") + */ + "keyword": S.String +}) {} + +export class DatasetsCountByKeyword200 extends S.Array(DatasetCountByKeywordDto) {} + +/** + * Cumulative count of datasets by last updated time bucket. + * + * Counts are cumulative - a dataset updated 1 hour ago will be counted in + * all four buckets (last_day, last_week, last_month, and last_year). + */ +export class DatasetCountByLastUpdatedBucketDto + extends S.Class("DatasetCountByLastUpdatedBucketDto")({ + /** + * The time bucket + */ + "bucket": LastUpdatedBucket, + /** + * The count of Dataset records updated within this time period + */ + "count": S.Int + }) +{} + +export class DatasetsCountByLastUpdated200 extends S.Array(DatasetCountByLastUpdatedBucketDto) {} + +export class DatasetsSearchParams extends S.Struct({ + "search": S.String, + "limit": S.optionalWith(S.Int, { nullable: true }), + "page": S.optionalWith(S.Int, { nullable: true }), + "indexing_chains": S.optionalWith(S.String, { nullable: true }), + "keywords": S.optionalWith(S.String, { nullable: true }), + "last_updated": S.optionalWith(LastUpdatedBucket, { nullable: true }) +}) {} + +/** + * Dataset with search relevance score. Extends the base Dataset with a weighted score indicating how well it matches the search query. Higher scores indicate better relevance. + */ +export class DatasetWithScore extends S.Class("DatasetWithScore")({ + /** + * Timestamp when the Dataset record was created (immutable). + */ + "created_at": S.String, + /** + * Computed link to the latest DatasetVersion reference in PURL format. + */ + "dataset_reference": S.optionalWith( + S.String.pipe(S.minLength(1), S.pattern(new RegExp("^[a-z0-9_]+/[a-z_][a-z0-9_]*@[a-z0-9._-]+$"))), + { nullable: true } + ), + /** + * Description of the dataset, its intended use, and purpose. + */ + "description": S.optionalWith(S.String.pipe(S.maxLength(1024)), { nullable: true }), + /** + * Chains being indexed by the Dataset. Used for discovery by chain. + */ + "indexing_chains": S.Array(S.String), + /** + * User-defined or derived keywords defining the usage of the dataset. + */ + "keywords": S.optionalWith(S.Array(S.String), { nullable: true }), + "latest_version": S.optionalWith(DatasetVersion, { nullable: true }), + /** + * Usage license covering the Dataset. + */ + "license": S.optionalWith(S.String, { nullable: true }), + /** + * The dataset name. Lowercase, alphanumeric with underscores. Cannot start with a number. + */ + "name": S.String.pipe(S.minLength(1), S.pattern(new RegExp("^[a-z_][a-z0-9_]*$"))), + /** + * The dataset namespace. Logical grouping mechanism for datasets. Can be a user 0x address, username, or organization. + */ + "namespace": S.String.pipe(S.minLength(1), S.pattern(new RegExp("^[a-z0-9_]*$"))), + /** + * Owner of the Dataset. Can be an organization or user 0x address. + */ + "owner": S.String, + /** + * User-defined README for the Dataset providing usage examples and documentation. + */ + "readme": S.optionalWith(S.String, { nullable: true }), + /** + * VCS repository URL containing the Dataset source code. + */ + "repository_url": S.optionalWith(S.String, { nullable: true }), + /** + * Weighted relevance score indicating how well this dataset matches the search query. Higher scores indicate better relevance. Score is calculated based on matches in description, keywords, source, and indexing chains fields. + */ + "score": S.Number, + /** + * Source of data being materialized by the Dataset (e.g., contract addresses, logs, transactions). + */ + "source": S.optionalWith(S.Array(S.String), { nullable: true }), + /** + * Timestamp when the Dataset record was last updated. + */ + "updated_at": S.String, + /** + * Link to all DatasetVersion records that this Dataset is a parent of. + */ + "versions": S.optionalWith(S.Array(DatasetVersion), { nullable: true }), + "visibility": DatasetVisibility +}) {} + +export class DatasetSearchResponse extends S.Class("DatasetSearchResponse")({ + /** + * List of the datasets being returned in this page + */ + "datasets": S.Array(DatasetWithScore), + /** + * If true, there are more datasets that can be fetched + */ + "has_next_page": S.Boolean, + /** + * Total number of datasets matching the query filters + */ + "total_count": S.Int +}) {} + +export class DatasetsAiSearchParams extends S.Struct({ + "search": S.String +}) {} + +export class DatasetsAiSearch200 extends S.Array(DatasetWithScore) {} + +export class DatasetsListVersions200 extends S.Array(DatasetVersion) {} + +export class DatasetsGetLatestManifest200 extends S.String {} + +export class SavedQuery extends S.Class("SavedQuery")({ + /** + * Timestamp when the SavedQuery was created + */ + "created_at": S.String, + /** + * Creator/owner of the saved query (ethereum address or user_id) + */ + "creator": S.String, + /** + * Optional description of what the query does + */ + "description": S.optionalWith(S.String, { nullable: true }), + /** + * Unique identifier for the saved query (UUID) + */ + "id": S.String.pipe( + S.pattern(new RegExp("^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$")) + ), + /** + * Name of the saved query + */ + "name": S.String, + /** + * The SQL query string + */ + "query": S.String, + /** + * Timestamp when the SavedQuery was last updated + */ + "updated_at": S.String, + "visibility": DatasetVisibility +}) {} + +export class DatasetsListLatestQueries200 extends S.Array(SavedQuery) {} + +export class DatasetsGetManifest200 extends S.String {} + +export class DatasetsListQueries200 extends S.Array(SavedQuery) {} + +export class DatasetsOwnedListParams extends S.Struct({ + "limit": S.optionalWith(S.Int, { nullable: true }), + "page": S.optionalWith(S.Int, { nullable: true }), + "sort_by": S.optionalWith(S.String, { nullable: true }), + "direction": S.optionalWith(S.String, { nullable: true }), + "indexing_chains": S.optionalWith(S.String, { nullable: true }), + "keywords": S.optionalWith(S.String, { nullable: true }), + "last_updated": S.optionalWith(LastUpdatedBucket, { nullable: true }) +}) {} + +export class AuthUserOwnedDatasetListResponse + extends S.Class("AuthUserOwnedDatasetListResponse")({ + /** + * List of the datasets being returned in this page + */ + "datasets": S.Array(Dataset), + /** + * If true, there are more datasets that can be fetched + */ + "has_next_page": S.Boolean, + /** + * Total number of datasets matching the query filters + */ + "total_count": S.Int + }) +{} + +export class OwnedDatasetsCountByChain200 extends S.Array(DatasetCountByChainDto) {} + +export class OwnedDatasetsCountByKeyword200 extends S.Array(DatasetCountByKeywordDto) {} + +export class OwnedDatasetsCountByLastUpdated200 extends S.Array(DatasetCountByLastUpdatedBucketDto) {} + +/** + * Count of datasets by version status. + * + * Returns the version status and the number of datasets that have at least one version with that status. + */ +export class DatasetCountByStatusDto extends S.Class("DatasetCountByStatusDto")({ + /** + * The count of Dataset records with at least one version in this status + */ + "count": S.Int, + /** + * The version status (Draft, Published, Deprecated, or Archived) + */ + "status": DatasetVersionStatus +}) {} + +export class OwnedDatasetsCountByStatus200 extends S.Array(DatasetCountByStatusDto) {} + +/** + * Count of datasets by visibility. + * + * Returns the visibility and the number of datasets with that visibility. + */ +export class DatasetCountByVisibilityDto extends S.Class("DatasetCountByVisibilityDto")({ + /** + * The count of Dataset records with this visibility + */ + "count": S.Int, + /** + * The visibility (Public or Private) + */ + "visibility": DatasetVisibility +}) {} + +export class OwnedDatasetsCountByVisibility200 extends S.Array(DatasetCountByVisibilityDto) {} + +export class ManifestKind extends S.Literal("manifest", "evm-rpc", "eth-beacon", "firehose") {} + +/** + * Input for creating a new DatasetVersion. Contains the version tag, manifest hash, and manifest content. + */ +export class InsertDatasetVersion extends S.Class("InsertDatasetVersion")({ + /** + * Optional changelog describing what changed in this version. + */ + "changelog": S.optionalWith(S.String, { nullable: true }), + "kind": ManifestKind, + /** + * Manifest JSON content. This should be a valid datasets_derived::Manifest structure. The SHA256 hash will be calculated server-side. + */ + "manifest": S.Record({ key: S.String, value: S.Unknown }), + "status": DatasetVersionStatus, + /** + * Version tag (e.g., '1.0.0', 'latest', '8e0acc0'). Pattern: lowercase, numbers, dots, underscores, hyphens. + */ + "version_tag": S.String.pipe(S.pattern(new RegExp("^[a-z0-9._-]+$"))) +}) {} + +/** + * Input for creating a new Dataset. Contains metadata, discovery information, and the initial version to create. The owner will be automatically set to the authenticated user. + */ +export class InsertDataset extends S.Class("InsertDataset")({ + /** + * Description of the dataset, its intended use, and purpose. + */ + "description": S.optionalWith(S.String.pipe(S.maxLength(1024)), { nullable: true }), + /** + * Chains being indexed by the Dataset. Used for discovery by chain. + */ + "indexing_chains": S.Array(S.String), + /** + * User-defined keywords defining the usage of the dataset. + */ + "keywords": S.optionalWith(S.Array(S.String), { nullable: true }), + /** + * Usage license covering the Dataset. + */ + "license": S.optionalWith(S.String, { nullable: true }), + /** + * The dataset name. Pattern: lowercase, alphanumeric with underscores, cannot start with a number. + */ + "name": S.String.pipe(S.minLength(1), S.pattern(new RegExp("^[a-z_][a-z0-9_]*$"))), + /** + * The dataset namespace. Pattern: lowercase, numbers, underscores. + */ + "namespace": S.String.pipe(S.minLength(1), S.pattern(new RegExp("^[a-z0-9_]*$"))), + /** + * User-defined README for the Dataset providing usage examples and documentation. + */ + "readme": S.optionalWith(S.String, { nullable: true }), + /** + * VCS repository URL containing the Dataset source code. + */ + "repository_url": S.optionalWith( + S.String.pipe( + S.pattern( + new RegExp( + "^https?://[a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(\\\\.[a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*(/.*)?$" + ) + ) + ), + { nullable: true } + ), + /** + * Source of data being materialized by the Dataset (e.g., contract addresses). + */ + "source": S.optionalWith(S.Array(S.String), { nullable: true }), + "version": InsertDatasetVersion, + "visibility": DatasetVisibility +}) {} + +export class DatasetsOwnedSearchParams extends S.Struct({ + "search": S.String, + "limit": S.optionalWith(S.Int, { nullable: true }), + "page": S.optionalWith(S.Int, { nullable: true }), + "indexing_chains": S.optionalWith(S.String, { nullable: true }), + "keywords": S.optionalWith(S.String, { nullable: true }), + "last_updated": S.optionalWith(LastUpdatedBucket, { nullable: true }) +}) {} + +/** + * Input for update the Datasets metadata fields: + * - keywords + * - README + * - sources + * - repository_url + * - license + * - description + */ +export class UpdateDatasetMetadataDto extends S.Class("UpdateDatasetMetadataDto")({ + /** + * Dataset description + */ + "description": S.optionalWith(S.String, { nullable: true }), + /** + * Chains being indexed by the dataset + */ + "indexing_chains": S.Array(S.String), + /** + * Keywords for dataset discovery + */ + "keywords": S.optionalWith(S.Array(S.String), { nullable: true }), + /** + * License covering the dataset + */ + "license": S.optionalWith(S.String, { nullable: true }), + /** + * User-defined README for the dataset + */ + "readme": S.optionalWith(S.String, { nullable: true }), + /** + * VCS repository URL + */ + "repository_url": S.optionalWith(S.String, { nullable: true }), + /** + * Source of data being materialized + */ + "source": S.optionalWith(S.Array(S.String), { nullable: true }) +}) {} + +/** + * Response for archiving a dataset version + */ +export class ArchiveDatasetVersionResponse + extends S.Class("ArchiveDatasetVersionResponse")({ + /** + * The reference of the archived dataset version + */ + "reference": S.String + }) +{} + +/** + * Input for updating a DatasetVersion's status + */ +export class UpdateDatasetVersionStatusDto + extends S.Class("UpdateDatasetVersionStatusDto")({ + /** + * The new status for the dataset version (Draft or Published) + * Note: Use the DELETE endpoint to archive a version + */ + "status": DatasetVersionStatus + }) +{} + +export class DatasetsOwnedListQueries200 extends S.Array(SavedQuery) {} + +/** + * Input for updating a Dataset's visibility + */ +export class UpdateDatasetVisibilityDto extends S.Class("UpdateDatasetVisibilityDto")({ + /** + * The new visibility level for the dataset + */ + "visibility": DatasetVisibility +}) {} + +export class DatasetsOwnerListParams extends S.Struct({ + "limit": S.optionalWith(S.Int, { nullable: true }), + "page": S.optionalWith(S.Int, { nullable: true }), + "sort_by": S.optionalWith(S.String, { nullable: true }), + "direction": S.optionalWith(S.String, { nullable: true }), + "indexing_chains": S.optionalWith(S.String, { nullable: true }), + "keywords": S.optionalWith(S.String, { nullable: true }), + "last_updated": S.optionalWith(LastUpdatedBucket, { nullable: true }) +}) {} + +export class OwnerDatasetListResponse extends S.Class("OwnerDatasetListResponse")({ + /** + * List of the datasets being returned in this page + */ + "datasets": S.Array(Dataset), + /** + * If true, there are more datasets that can be fetched + */ + "has_next_page": S.Boolean, + /** + * Total number of datasets matching the query filters + */ + "total_count": S.Int +}) {} + +export class DatasetsOwnerSearchParams extends S.Struct({ + "search": S.String, + "limit": S.optionalWith(S.Int, { nullable: true }), + "page": S.optionalWith(S.Int, { nullable: true }), + "indexing_chains": S.optionalWith(S.String, { nullable: true }), + "keywords": S.optionalWith(S.String, { nullable: true }), + "last_updated": S.optionalWith(LastUpdatedBucket, { nullable: true }) +}) {} + +export class LivenessResponse extends S.Class("LivenessResponse")({ + "status": S.String +}) {} + +export class ServiceStatus extends S.Class("ServiceStatus")({ + "error": S.optionalWith(S.String, { nullable: true }), + "status": S.String +}) {} + +export class ReadinessChecks extends S.Class("ReadinessChecks")({ + "database": ServiceStatus +}) {} + +export class ReadinessResponse extends S.Class("ReadinessResponse")({ + "checks": ReadinessChecks, + "status": S.String +}) {} diff --git a/packages/amp/src/registry/api.ts b/packages/amp/src/registry/api.ts new file mode 100644 index 0000000..6489041 --- /dev/null +++ b/packages/amp/src/registry/api.ts @@ -0,0 +1,595 @@ +import * as HttpApi from "@effect/platform/HttpApi" +import * as HttpApiEndpoint from "@effect/platform/HttpApiEndpoint" +import * as HttpApiError from "@effect/platform/HttpApiError" +import * as HttpApiGroup from "@effect/platform/HttpApiGroup" +import * as HttpApiSchema from "@effect/platform/HttpApiSchema" +import * as Schema from "effect/Schema" +import * as Models from "../Models.ts" +import * as Domain from "./domain.ts" +import * as Errors from "./error.ts" + +// ============================================================================= +// Registry API Params +// ============================================================================= + +/** + * A URL parameter for the dataset namespace. + */ +const datasetNamespaceParam = HttpApiSchema.param("namespace", Models.DatasetNamespace) + +/** + * A URL parameter for the dataset name. + */ +const datasetNameParam = HttpApiSchema.param("name", Models.DatasetName) + +/** + * A URL parameter for the dataset revision. + */ +const datasetRevisionParam = HttpApiSchema.param("revision", Models.DatasetRevision) + +/** + * A URL parameter for the dataset owners. + */ +const datasetOwnerParam = HttpApiSchema.param("owner", Schema.String) + +// ============================================================================= +// Health +// ============================================================================= + +// ----------------------------------------------------------------------------- +// GET / +// ----------------------------------------------------------------------------- + +const getHealth = HttpApiEndpoint.get("getHealth")`/`.addSuccess(Domain.HealthcheckResponse) + +// ----------------------------------------------------------------------------- +// GET /health/live +// ----------------------------------------------------------------------------- + +const getLiveness = HttpApiEndpoint.get("getLiveness")`/health/live`.addSuccess(Domain.LivenessResponse) + +// ----------------------------------------------------------------------------- +// GET /health/ready +// ----------------------------------------------------------------------------- + +const getReadiness = HttpApiEndpoint.get("getReadiness")`/health/ready` + .addSuccess(Domain.ReadinessResponse) + .addError(Errors.ServiceUnavailableError) + +// ----------------------------------------------------------------------------- +// Group Definition +// ----------------------------------------------------------------------------- + +/** + * Api group for checking API health. + */ +export class HealthApiGroup extends HttpApiGroup.make("health") + .add(getHealth) + .add(getLiveness) + .add(getReadiness) +{} + +// ============================================================================= +// Datasets +// ============================================================================= + +// ----------------------------------------------------------------------------- +// GET /api/vX/datasets +// ----------------------------------------------------------------------------- + +const listDatasets = HttpApiEndpoint.get( + "listDatasets" +)`/datasets` + .setUrlParams(Domain.ListDatasetsParams) + .addSuccess(Domain.DatasetListResponse) + .addError(Errors.DatasetConversionError) + .addError(Errors.InvalidQueryParametersError) + .addError(Errors.LimitInvalidError) + .addError(Errors.LimitTooLargeError) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// GET /api/vX/datasets/{namespace}/{name}/versions/{version}/manifest +// ----------------------------------------------------------------------------- + +const getDataset = HttpApiEndpoint.get( + "getDatasetByFqdn" +)`/datasets/${datasetNamespaceParam}/${datasetNameParam}/versions/${datasetRevisionParam}/manifest` + .addSuccess(Domain.Dataset) + .addError(Errors.DatasetConversionError) + .addError(Errors.DatasetNotFoundError) + .addError(Errors.InvalidDatasetSelectorError) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// GET /api/v1/datasets/counts/by-chain +// ----------------------------------------------------------------------------- + +const getDatasetCountsByChain = HttpApiEndpoint.get( + "getDatasetCountsByChain" +)`/datasets/counts/by-chain` + .addSuccess(Domain.DatasetCountsByChainResponse) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// GET /api/v1/datasets/counts/by-keyword +// ----------------------------------------------------------------------------- + +const getDatasetCountsByKeyword = HttpApiEndpoint.get( + "getDatasetCountsByKeyword" +)`/datasets/counts/by-keyword` + .addSuccess(Domain.DatasetCountsByKeywordResponse) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// GET /api/v1/datasets/counts/by-last-updated +// ----------------------------------------------------------------------------- + +const getDatasetCountsByLastUpdated = HttpApiEndpoint.get( + "getDatasetCountsByLastUpdated" +)`/datasets/counts/by-last-updated` + .addSuccess(Domain.DatasetCountsByLastUpdatedResponse) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// GET /api/vX/datasets/search +// ----------------------------------------------------------------------------- + +const searchDatasets = HttpApiEndpoint.get( + "searchDatasets" +)`/datasets/search` + .setUrlParams(Domain.SearchDatasetsParams) + .addSuccess(Domain.DatasetSearchResponse) + .addError(Errors.DatasetConversionError) + .addError(Errors.InvalidQueryParametersError) + .addError(Errors.LimitInvalidError) + .addError(Errors.LimitTooLargeError) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// GET /api/vX/datasets/search/ai +// ----------------------------------------------------------------------------- + +const aiSearchDatasets = HttpApiEndpoint.get( + "aiSearchDatasets" +)`/datasets/search/ai` + .setUrlParams(Domain.AiSearchDatasetsParams) + .addSuccess(Domain.DatasetAiSearchResponse) + .addError(Errors.DatasetConversionError) + .addError(Errors.InvalidQueryParametersError) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// GET /api/vX/datasets/{namespace}/{name}/versions +// ----------------------------------------------------------------------------- + +const listDatasetVersions = HttpApiEndpoint.get( + "listDatasetVersions" +)`/datasets/${datasetNamespaceParam}/${datasetNameParam}/versions` + .addSuccess(Domain.DatasetListVersionsResponse) + .addError(Errors.DatasetVersionConversionError) + .addError(Errors.InvalidSelectorError) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// GET /api/vX/datasets/{namespace}/{name}/versions/latest +// ----------------------------------------------------------------------------- + +const getLatestDatasetVersion = HttpApiEndpoint.get( + "getLatestDatasetVersion" +)`/datasets/${datasetNamespaceParam}/${datasetNameParam}/versions/latest` + .addSuccess(Domain.DatasetVersion) + .addError(Errors.DatasetVersionConversionError) + .addError(Errors.LatestDatasetVersionNotFoundError) + .addError(Errors.InvalidSelectorError) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// GET /api/vX/datasets/{namespace}/{name}/versions/{version} +// ----------------------------------------------------------------------------- + +const getDatasetVersionByRevision = HttpApiEndpoint.get( + "getDatasetVersionByRevision" +)`/datasets/${datasetNamespaceParam}/${datasetNameParam}/versions/${datasetRevisionParam}` + .addSuccess(Domain.DatasetVersion) + .addError(Errors.DatasetVersionConversionError) + .addError(Errors.DatasetVersionNotFoundError) + .addError(Errors.InvalidSelectorError) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// GET /api/v1/datasets/{namespace}/{name}/versions/latest/queries +// ----------------------------------------------------------------------------- + +const listLatestDatasetQueries = HttpApiEndpoint.get( + "listLatestDatasetQueries" +)`/datasets/${datasetNamespaceParam}/${datasetNameParam}/versions/latest/queries` + .addSuccess(Domain.DatasetListLatestQueriesResponse) + .addError(Errors.DatasetNotFoundError) + .addError(Errors.InvalidDatasetReferenceError) + .addError(Errors.InvalidSelectorError) + .addError(Errors.RegistryDatabaseError) + .addError(Errors.SavedQueryConversionError) + +// ----------------------------------------------------------------------------- +// GET /api/v1/datasets/{namespace}/{name}/versions/{version}/queries +// ----------------------------------------------------------------------------- + +const listDatasetQueries = HttpApiEndpoint.get( + "listDatasetQueries" +)`/datasets/${datasetNamespaceParam}/${datasetNameParam}/versions/${datasetRevisionParam}/queries` + .addSuccess(Domain.DatasetListQueriesResponse) + .addError(Errors.InvalidSelectorError) + .addError(Errors.RegistryDatabaseError) + .addError(Errors.SavedQueryConversionError) + +// ----------------------------------------------------------------------------- +// GET /api/v1/datasets/{namespace}/{name}/versions/latest/manifest +// ----------------------------------------------------------------------------- + +const getLatestDatasetManifest = HttpApiEndpoint.get( + "getLatestDatasetManifest" +)`/datasets/${datasetNamespaceParam}/${datasetNameParam}/versions/latest/manifest` + .addSuccess(Domain.DatasetGetLatestManifestResponse) + .addError(Errors.InvalidSelectorError) + .addError(Errors.InvalidManifestHashError) + .addError(Errors.ManifestNotFoundError) + .addError(Errors.ManifestRetrievalError) + .addError(Errors.ManifestDeserializationError) + +// ----------------------------------------------------------------------------- +// GET /api/v1/datasets/{namespace}/{name}/versions/{version}/manifest +// ----------------------------------------------------------------------------- + +const getDatasetManifest = HttpApiEndpoint.get( + "getLatestDatasetManifest" +)`/datasets/${datasetNamespaceParam}/${datasetNameParam}/versions/${datasetRevisionParam}/manifest` + .addSuccess(Domain.DatasetGetManifestResponse) + .addError(Errors.InvalidSelectorError) + .addError(Errors.InvalidManifestHashError) + .addError(Errors.ManifestNotFoundError) + .addError(Errors.ManifestRetrievalError) + .addError(Errors.ManifestDeserializationError) + +// ----------------------------------------------------------------------------- +// Group Definition +// ----------------------------------------------------------------------------- + +// TODO: implement the SSE endpoint (not yet possible to implement streaming endpoints with HttpApi) + +/** + * Api group for all dataset endpoints. + */ +export class DatasetsApiGroup extends HttpApiGroup.make("datasets") + .add(listDatasets) + .add(getDataset) + .add(getDatasetCountsByChain) + .add(getDatasetCountsByKeyword) + .add(getDatasetCountsByLastUpdated) + .add(searchDatasets) + .add(aiSearchDatasets) + .add(listDatasetVersions) + .add(getLatestDatasetVersion) + .add(getDatasetVersionByRevision) + .add(listLatestDatasetQueries) + .add(listDatasetQueries) + .add(getLatestDatasetManifest) + .add(getDatasetManifest) +{} + +// ============================================================================= +// Owned Datasets +// ============================================================================= + +// ----------------------------------------------------------------------------- +// GET /api/vX/owners/{owner}/datasets +// ----------------------------------------------------------------------------- + +const listOwnedDatasets = HttpApiEndpoint.get( + "listOwnedDatasets" +)`/owners/${datasetOwnerParam}/datasets` + .setUrlParams(Domain.ListOwnedDatasetsParams) + .addSuccess(Domain.ListMyDatasetsResponse) + .addError(Errors.DatasetConversionError) + .addError(Errors.InvalidDatasetOwnerPathError) + .addError(Errors.InvalidQueryParametersError) + .addError(Errors.LimitInvalidError) + .addError(Errors.LimitTooLargeError) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// GET /api/vX/owners/{owner}/datasets/search +// ----------------------------------------------------------------------------- + +const searchOwnedDatasets = HttpApiEndpoint.get( + "searchOwnedDatasets" +)`/owners/${datasetOwnerParam}/datasets/search` + .setUrlParams(Domain.SearchOwnedDatasetsParams) + .addSuccess(Domain.DatasetSearchResponse) + .addError(Errors.DatasetConversionError) + .addError(Errors.InvalidDatasetOwnerPathError) + .addError(Errors.InvalidQueryParametersError) + .addError(Errors.LimitInvalidError) + .addError(Errors.LimitTooLargeError) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// Group Definition +// ----------------------------------------------------------------------------- + +/** + * Api group for all owned dataset endpoints. + */ +export class OwnedDatasetsApiGroup extends HttpApiGroup.make("ownedDatasets") + .add(listOwnedDatasets) + .add(searchOwnedDatasets) +{} + +// ============================================================================= +// My Datasets +// ============================================================================= + +// ----------------------------------------------------------------------------- +// GET /api/vX/owners/@me/datasets +// ----------------------------------------------------------------------------- + +const listMyDatasets = HttpApiEndpoint.get( + "listMyDatasets" +)`/datasets` + .setUrlParams(Domain.ListOwnedDatasetsParams) + .setHeaders(Domain.BearerAuthHeader) + .addSuccess(Domain.AuthUserOwnedDatasetListResponse) + .addError(HttpApiError.Unauthorized) + .addError(Errors.DatasetConversionError) + .addError(Errors.InvalidQueryParametersError) + .addError(Errors.LimitInvalidError) + .addError(Errors.LimitTooLargeError) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// GET /api/vX/owners/@me/datasets/{namespace}/{name} +// ----------------------------------------------------------------------------- + +const getMyDatasetByFqdn = HttpApiEndpoint.get( + "getMyDatasetByFqdn" +)`/datasets/${datasetNamespaceParam}/${datasetNameParam}` + .setUrlParams(Domain.GetOwnedDatasetsByFqdnParams) + .setHeaders(Domain.BearerAuthHeader) + .addSuccess(Domain.Dataset) + .addError(HttpApiError.Unauthorized) + .addError(Errors.DatasetConversionError) + .addError(Errors.DatasetNotFoundError) + .addError(Errors.InvalidDatasetSelectorError) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// GET /api/v1/owners/@me/datasets/counts/by-chain +// ----------------------------------------------------------------------------- + +const getMyDatasetCountsByChain = HttpApiEndpoint.get( + "getMyDatasetCountsByChain" +)`/datasets/counts/by-chain` + .setHeaders(Domain.BearerAuthHeader) + .addSuccess(Domain.OwnedDatasetCountsByChainResponse) + .addError(HttpApiError.Unauthorized) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// GET /api/v1/owners/@me/datasets/counts/by-keyword +// ----------------------------------------------------------------------------- + +const getMyDatasetCountsByKeyword = HttpApiEndpoint.get( + "getMyDatasetCountsByKeyword" +)`/datasets/counts/by-keyword` + .setHeaders(Domain.BearerAuthHeader) + .addSuccess(Domain.OwnedDatasetCountsByKeywordResponse) + .addError(HttpApiError.Unauthorized) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// GET /api/v1/owners/@me/datasets/counts/by-last-updated +// ----------------------------------------------------------------------------- + +const getMyDatasetCountsByLastUpdated = HttpApiEndpoint.get( + "getMyDatasetCountsByLastUpdated" +)`/datasets/counts/by-last-updated` + .setHeaders(Domain.BearerAuthHeader) + .addSuccess(Domain.OwnedDatasetsCountByLastUpdatedResponse) + .addError(HttpApiError.Unauthorized) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// GET /api/v1/owners/@me/datasets/counts/by-status +// ----------------------------------------------------------------------------- + +const getMyDatasetCountsByStatus = HttpApiEndpoint.get( + "getMyDatasetCountsByStatus" +)`/datasets/counts/by-status` + .setHeaders(Domain.BearerAuthHeader) + .addSuccess(Domain.OwnedDatasetCountsByStatusResponse) + .addError(HttpApiError.Unauthorized) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// GET /api/v1/owners/@me/datasets/counts/by-visibility +// ----------------------------------------------------------------------------- + +const getMyDatasetCountsByVisibility = HttpApiEndpoint.get( + "getMyDatasetCountsByVisibility" +)`/datasets/counts/by-visibility` + .setHeaders(Domain.BearerAuthHeader) + .addSuccess(Domain.OwnedDatasetCountsByVisibilityResponse) + .addError(HttpApiError.Unauthorized) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// GET /api/vX/owners/@me/datasets/search +// ----------------------------------------------------------------------------- + +const searchMyDatasets = HttpApiEndpoint.get( + "searchMyDatasets" +)`/datasets/search` + .setUrlParams(Domain.SearchOwnedDatasetsParams) + .setHeaders(Domain.BearerAuthHeader) + .addSuccess(Domain.DatasetSearchResponse) + .addError(HttpApiError.Unauthorized) + .addError(Errors.DatasetConversionError) + .addError(Errors.InvalidQueryParametersError) + .addError(Errors.LimitInvalidError) + .addError(Errors.LimitTooLargeError) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// GET /api/v1/owners/@me/datasets/{namespace}/{name}/versions/{version}/queries +// ----------------------------------------------------------------------------- + +const listMyDatasetQueries = HttpApiEndpoint.get( + "listMyDatasetQueries" +)`/datasets/${datasetNamespaceParam}/${datasetNameParam}/versions/${datasetRevisionParam}/queries` + .setHeaders(Domain.BearerAuthHeader) + .addSuccess(Domain.OwnedDatasetListQueriesResponse) + .addError(HttpApiError.Unauthorized) + .addError(Errors.InvalidSelectorError) + .addError(Errors.ForbiddenError) + .addError(Errors.RegistryDatabaseError) + .addError(Errors.SavedQueryConversionError) + +// ----------------------------------------------------------------------------- +// POST /api/v1/owners/@me/datasets/publish +// ----------------------------------------------------------------------------- + +const publishMyDataset = HttpApiEndpoint.post( + "publishMyDataset" +)`/datasets/publish` + .setHeaders(Domain.BearerAuthHeader) + .setPayload(Domain.InsertDatasetPayload) + .addSuccess(HttpApiSchema.Created) + .addError(HttpApiError.Unauthorized) + .addError(Errors.DatasetConversionError) + .addError(Errors.InvalidManifestError) + .addError(Errors.InvalidRequestBodyError) + .addError(Errors.InvalidNamespaceError) + .addError(Errors.NamespaceAccessDeniedError) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// POST /api/v1/owners/@me/datasets/{namespace}/{name}/versions/publish +// ----------------------------------------------------------------------------- + +const publishMyDatasetVersion = HttpApiEndpoint.post( + "publishMyDatasetVersion" +)`/datasets/${datasetNamespaceParam}/${datasetNameParam}/versions/publish` + .setHeaders(Domain.BearerAuthHeader) + .setPayload(Domain.InsertDatasetVersion) + .addSuccess(HttpApiSchema.Created) + .addError(HttpApiError.Unauthorized) + .addError(Errors.DatasetVersionConversionError) + .addError(Errors.InvalidManifestError) + .addError(Errors.InvalidPathParametersError) + .addError(Errors.InvalidRequestBodyError) + .addError(Errors.NamespaceAccessDeniedError) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// PUT /api/v1/owners/@me/datasets/{namespace}/{name} +// ----------------------------------------------------------------------------- + +const updateMyDatasetMetadata = HttpApiEndpoint.put( + "updateMyDatasetMetadata" +)`/datasets/${datasetNamespaceParam}/${datasetNameParam}` + .setHeaders(Domain.BearerAuthHeader) + .setPayload(Domain.UpdateDatasetMetadataPayload) + .addSuccess(Domain.Dataset) + .addError(HttpApiError.Unauthorized) + .addError(Errors.DatasetConversionError) + .addError(Errors.InvalidPathParametersError) + .addError(Errors.InvalidRequestBodyError) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// PATCH /api/v1/owners/@me/datasets/{namespace}/{name}/visibility +// ----------------------------------------------------------------------------- + +const updateMyDatasetVisibility = HttpApiEndpoint.patch( + "updateMyDatasetVisibility" +)`/datasets/${datasetNamespaceParam}/${datasetNameParam}/visibility` + .setHeaders(Domain.BearerAuthHeader) + .setPayload(Domain.UpdateDatasetVisibilityPayload) + .addSuccess(Domain.Dataset) + .addError(HttpApiError.Unauthorized) + .addError(Errors.DatasetNotFoundError) + .addError(Errors.InvalidPathParametersError) + .addError(Errors.InvalidRequestBodyError) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// PATCH /api/v1/owners/@me/datasets/{namespace}/{name}/versions/{version} +// ----------------------------------------------------------------------------- + +const updateMyDatasetVersionStatus = HttpApiEndpoint.patch( + "updateMyDatasetVersionStatus" +)`/datasets/${datasetNamespaceParam}/${datasetNameParam}/versions/${datasetRevisionParam}` + .setHeaders(Domain.BearerAuthHeader) + .setPayload(Domain.UpdateDatasetVersionStatusPayload) + .addSuccess(Domain.DatasetVersion) + .addError(HttpApiError.Unauthorized) + .addError(Errors.DatasetNotFoundError) + .addError(Errors.DatasetVersionConversionError) + .addError(Errors.InvalidPathParametersError) + .addError(Errors.InvalidRequestBodyError) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// DELETE /api/v1/owners/@me/datasets/{namespace}/{name}/versions/{version} +// ----------------------------------------------------------------------------- + +const archiveMyDatasetVersion = HttpApiEndpoint.del( + "archiveMyDatasetVersion" +)`/datasets/${datasetNamespaceParam}/${datasetNameParam}/versions/${datasetRevisionParam}` + .setHeaders(Domain.BearerAuthHeader) + .addSuccess(Domain.ArchiveDatasetVersionResponse) + .addError(HttpApiError.Unauthorized) + .addError(Errors.DatasetNotFoundError) + .addError(Errors.InvalidPathParametersError) + .addError(Errors.RegistryDatabaseError) + +// ----------------------------------------------------------------------------- +// Group Definition +// ----------------------------------------------------------------------------- + +/** + * Api group for all dataset endpoints for the currently authenticated user. + */ +export class MyDatasetsApiGroup extends HttpApiGroup.make("myDatasets") + .add(listMyDatasets) + .add(getMyDatasetByFqdn) + .add(getMyDatasetCountsByChain) + .add(getMyDatasetCountsByKeyword) + .add(getMyDatasetCountsByLastUpdated) + .add(getMyDatasetCountsByStatus) + .add(getMyDatasetCountsByVisibility) + .add(listMyDatasetQueries) + .add(searchMyDatasets) + .add(publishMyDataset) + .add(publishMyDatasetVersion) + .add(updateMyDatasetVisibility) + .add(updateMyDatasetMetadata) + .add(updateMyDatasetVersionStatus) + .add(archiveMyDatasetVersion) + .prefix("/owners/@me") +{} + +// ============================================================================= +// API Definition +// ============================================================================= + +/** + * The specification for the Amp Registry v1 API. + */ +export class AmpRegistryApiV1 extends HttpApi.make("AmpRegistryApiV1") + .add(HealthApiGroup) + .add(DatasetsApiGroup) + .add(OwnedDatasetsApiGroup) + .add(MyDatasetsApiGroup) + .prefix("/api/v1") +{} diff --git a/packages/amp/src/registry/domain.ts b/packages/amp/src/registry/domain.ts new file mode 100644 index 0000000..3fecbcd --- /dev/null +++ b/packages/amp/src/registry/domain.ts @@ -0,0 +1,813 @@ +import * as Schema from "effect/Schema" +import * as Models from "../Models.ts" + +// TODO(Chris/Max/Sebastian): Should we consider moving these "general" schemas +// to the top-level models module (Which I'm considering moving into a /domain +// directory to separate things out a bit more)? + +// ============================================================================= +// General Schemas +// ============================================================================= + +export const PositiveIntFromString = Schema.NumberFromString.pipe( + Schema.int(), + Schema.positive() +).annotations({ identifier: "PositiveIntFromString" }) +export type PositiveIntFromString = typeof PositiveIntFromString.Type + +/** + * Represents a service status. + */ +export const ServiceStatus = Schema.Struct({ + "error": Schema.optionalWith(Schema.String, { nullable: true }), + "status": Schema.String +}).annotations({ identifier: "ServiceStatus" }) +export type ServiceStatus = typeof ServiceStatus.Type + +/** + * Time-based buckets for grouping datasets by last updated time + */ +export const LastUpdatedBucket = Schema.Literal( + "last_day", + "last_week", + "last_month", + "last_year" +).annotations({ identifier: "LastUpdatedBucket" }) +export type LastUpdatedBucket = typeof LastUpdatedBucket.Type + +export const DatasetSortBy = Schema.Literal( + "namespace", + "name", + "owner", + "created_at", + "updated_at" +).annotations({ identifier: "DatasetSortBy" }) +export type DatasetSortBy = typeof DatasetSortBy.Type + +export const DatasetSortDirection = Schema.Literal( + "asc", + "desc" +).annotations({ identifier: "DatasetSortDirection" }) +export type DatasetSortDirection = typeof DatasetSortDirection.Type + +/** + * Represents a dataset version ancestry reference. + */ +export const DatasetVersionAncestry = Schema.Struct({ + /** + * Dataset reference in the format: {namespace}/{name}@{version_tag}. Points + * to the DatasetVersion.dataset_reference. This allows version-pinned + * dependencies. + */ + "dataset_reference": Models.DatasetReferenceFromString +}).annotations({ identifier: "DatasetVersionAncestry" }) +export type DatasetVersionAncestry = typeof DatasetVersionAncestry.Type + +/** + * Represents the status of a dataset version. + */ +export const DatasetVersionStatus = Schema.Literal( + "draft", + "published", + "deprecated", + "archived" +).annotations({ identifier: "DatasetVersionStatus" }) +export type DatasetVersionStatus = typeof DatasetVersionStatus.Type + +/** + * Represents a dataset version. + */ +export const DatasetVersion = Schema.Struct({ + /** + * Array of ancestor DatasetVersion references that this version extends from + * (version-pinned dependencies). + */ + "ancestors": Schema.optionalWith(Schema.Array(DatasetVersionAncestry), { nullable: true }), + /** + * A description of what changed with this version. Allows developers of the + * Dataset to communicate to downstream consumers what has changed with this + * version from previous versions. Migration guides, etc. + */ + "changelog": Schema.optionalWith(Schema.String, { nullable: true }), + /** + * Timestamp when the DatasetVersion record was created (immutable). + */ + "created_at": Schema.String, + /** + * Dataset reference in the format: {namespace}/{name}@{version_tag}. This value is globally unique and is a pointer to a tagged and published Manifest. + */ + "dataset_reference": Models.DatasetReferenceFromString, + /** + * Array of descendant DatasetVersion references that extend from this version. + */ + "descendants": Schema.optionalWith(Schema.Array(DatasetVersionAncestry), { nullable: true }), + "status": DatasetVersionStatus, + /** + * The published version tag. This is basically the version label. Can be semver, a commit hash, or 'latest'. + */ + "version_tag": Models.DatasetRevision +}).annotations({ identifier: "DatasetVersion" }) +export type DatasetVersion = typeof DatasetVersion.Type + +/** + * Top-level container for a user-defined, tagged, and published Dataset. + * + * Contains metadata and discovery information for datasets. + */ +export const Dataset = Schema.Struct({ + /** + * The dataset name. Lowercase, alphanumeric with underscores. Cannot start + * with a number. + */ + "name": Models.DatasetName, + /** + * The dataset namespace. Logical grouping mechanism for datasets. Can be a + * user 0x address, username, or organization. + */ + "namespace": Models.DatasetNamespace, + /** + * Timestamp when the Dataset record was created (immutable). + */ + "created_at": Schema.String, + /** + * Computed link to the latest DatasetVersion reference in PURL format. + */ + "dataset_reference": Schema.optionalWith(Models.DatasetReferenceFromString, { nullable: true }), + /** + * Description of the dataset, its intended use, and purpose. + */ + "description": Schema.optionalWith(Models.DatasetDescription, { nullable: true }), + /** + * Chains being indexed by the Dataset. Used for discovery by chain. + */ + "indexing_chains": Schema.Array(Schema.String), + /** + * User-defined or derived keywords defining the usage of the dataset. + */ + "keywords": Schema.optionalWith(Schema.Array(Models.DatasetKeyword), { nullable: true }), + "latest_version": Schema.optionalWith(DatasetVersion, { nullable: true }), + /** + * Usage license covering the Dataset. + */ + "license": Schema.optionalWith(Models.DatasetLicense, { nullable: true }), + /** + * Owner of the Dataset. Can be an organization or user 0x address. + */ + "owner": Schema.String, + /** + * User-defined README for the Dataset providing usage examples and documentation. + */ + "readme": Schema.optionalWith(Models.DatasetReadme, { nullable: true }), + /** + * VCS repository URL containing the Dataset source code. + */ + "repository_url": Schema.optionalWith(Models.DatasetRepository, { nullable: true }), + /** + * Source of data being materialized by the Dataset (e.g., contract addresses, + * logs, transactions). + */ + "source": Schema.optionalWith(Schema.Array(Models.DatasetSource), { nullable: true }), + /** + * Timestamp when the Dataset record was last updated. + */ + "updated_at": Schema.String, + /** + * Link to all DatasetVersion records that this Dataset is a parent of. + */ + "versions": Schema.optionalWith(Schema.Array(DatasetVersion), { nullable: true }), + "visibility": Models.DatasetVisibility +}).annotations({ identifier: "Dataset" }) +export type Dataset = typeof Dataset.Type + +/** + * Dataset with search relevance score. Extends the base Dataset with a weighted score indicating how well it matches the search query. Higher scores indicate better relevance. + */ +export const DatasetWithScore = Schema.Struct({ + ...Dataset.fields, + /** + * Weighted relevance score indicating how well this dataset matches the search query. Higher scores indicate better relevance. Score is calculated based on matches in description, keywords, source, and indexing chains fields. + */ + "score": Schema.Number +}).annotations({ identifier: "DatasetWithScore" }) +export type DatasetWithScore = typeof DatasetWithScore.Type + +/** + * Count of datasets by indexing chain. + * + * Returns the chain name and the number of datasets indexing that chain. + */ +export const DatasetCountByChain = Schema.Struct({ + /** + * The indexing chain name (e.g., "mainnet", "arbitrum-one", "base-mainnet") + */ + "chain": Schema.String, + /** + * The count of Dataset records indexing this chain + */ + "count": Schema.Int +}).annotations({ identifier: "DatasetCountByChain" }) +export type DatasetCountByChain = typeof DatasetCountByChain.Type + +/** + * Count of datasets by keyword (tag). + * + * Returns the keyword and the number of datasets with that keyword. + */ +export const DatasetCountByKeyword = Schema.Struct({ + /** + * The count of Dataset records with this keyword + */ + "count": Schema.Int, + /** + * The keyword (e.g., "DeFi", "NFT", "logs") + */ + "keyword": Schema.String +}).annotations({ identifier: "DatasetCountByKeyword" }) +export type DatasetCountByKeyword = typeof DatasetCountByKeyword.Type + +/** + * Cumulative count of datasets by last updated time bucket. + * + * Counts are cumulative - a dataset updated 1 hour ago will be counted in + * all four buckets (last_day, last_week, last_month, and last_year). + */ +export const DatasetCountByLastUpdated = Schema.Struct({ + /** + * The time bucket + */ + "bucket": LastUpdatedBucket, + /** + * The count of Dataset records updated within this time period + */ + "count": Schema.Int +}).annotations({ identifier: "DatasetCountByLastUpdatedBucket" }) +export type DatasetCountByLastUpdated = typeof DatasetCountByLastUpdated.Type + +/** + * Count of datasets by version status. + * + * Returns the version status and the number of datasets that have at least one version with that status. + */ +export const DatasetCountByStatus = Schema.Struct({ + /** + * The count of Dataset records with at least one version in this status + */ + "count": Schema.Int, + /** + * The version status (Draft, Published, Deprecated, or Archived) + */ + "status": DatasetVersionStatus +}).annotations({ identifier: "DatasetCountByStatus" }) +export type DatasetCountByStatus = typeof DatasetCountByStatus.Type + +/** + * Count of datasets by visibility. + * + * Returns the visibility and the number of datasets with that visibility. + */ +export const DatasetCountByVisibility = Schema.Struct({ + /** + * The count of Dataset records with this visibility + */ + "count": Schema.Int, + /** + * The visibility (Public or Private) + */ + "visibility": Models.DatasetVisibility +}).annotations({ identifier: "DatasetCountByVisibility" }) +export type DatasetCountByVisibility = typeof DatasetCountByVisibility.Type + +/** + * Represents a saved query. + */ +export const SavedQuery = Schema.Struct({ + /** + * Timestamp when the SavedQuery was created + */ + "created_at": Schema.String, + /** + * Creator/owner of the saved query (ethereum address or user_id) + */ + "creator": Schema.String, + /** + * Optional description of what the query does + */ + "description": Schema.optionalWith(Schema.String, { nullable: true }), + /** + * Unique identifier for the saved query (UUID) + */ + "id": Schema.String.pipe( + Schema.pattern(new RegExp("^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$")) + ), + /** + * Name of the saved query + */ + "name": Schema.String, + /** + * The SQL query string + */ + "query": Schema.String, + /** + * Timestamp when the SavedQuery was last updated + */ + "updated_at": Schema.String, + "visibility": Models.DatasetVisibility +}).annotations({ identifier: "SavedQuery" }) +export type SavedQuery = typeof SavedQuery.Type + +// ============================================================================= +// URL Parameter Schemas +// ============================================================================= + +/** + * Parameters for listing datasets. + */ +export const ListDatasetsParams = Schema.Struct({ + "limit": Schema.optional(PositiveIntFromString), + "page": Schema.optional(PositiveIntFromString), + "sort_by": Schema.optional(DatasetSortBy), + "direction": Schema.optional(DatasetSortDirection), + "indexing_chains": Schema.optional(Schema.StringFromUriComponent), + "keywords": Schema.optional(Schema.StringFromUriComponent), + "last_updated": Schema.optional(LastUpdatedBucket) +}).annotations({ identifier: "ListDatasetsParams" }) +export type ListDatasetsParams = typeof ListDatasetsParams.Type + +/** + * Parameters for searching datasets. + */ +export const SearchDatasetsParams = Schema.Struct({ + "search": Schema.String, + "limit": Schema.optional(PositiveIntFromString), + "page": Schema.optional(PositiveIntFromString), + "indexing_chains": Schema.optional(Schema.StringFromUriComponent), + "keywords": Schema.optional(Schema.StringFromUriComponent), + "last_updated": Schema.optional(LastUpdatedBucket) +}).annotations({ identifier: "SearchDatasetsParams" }) +export type SearchDatasetsParams = typeof SearchDatasetsParams.Type + +/** + * Parameters for AI search of datasets. + */ +export const AiSearchDatasetsParams = Schema.Struct({ + "search": Schema.String +}).annotations({ identifier: "AiSearchDatasetsParams" }) +export type AiSearchDatasetsParams = typeof AiSearchDatasetsParams.Type + +/** + * Parameters for listing owned datasets. + */ +export const ListOwnedDatasetsParams = Schema.Struct({ + "limit": Schema.optional(PositiveIntFromString), + "page": Schema.optional(PositiveIntFromString), + "sort_by": Schema.optional(DatasetSortBy), + "direction": Schema.optional(DatasetSortDirection), + "indexing_chains": Schema.optional(Schema.StringFromUriComponent), + "keywords": Schema.optional(Schema.StringFromUriComponent), + "last_updated": Schema.optional(LastUpdatedBucket) +}).annotations({ identifier: "ListOwnedDatasetsParams" }) +export type ListOwnedDatasetsParams = typeof ListOwnedDatasetsParams.Type + +/** + * Parameters for listing owned datasets by dataset FQDN. + */ +export const GetOwnedDatasetsByFqdnParams = Schema.Struct({ + "namespace": Models.DatasetNamespace, + "name": Models.DatasetName +}).annotations({ identifier: "GetOwnedDatasetsByFqdnParams" }) +export type GetOwnedDatasetsByFqdnParams = typeof GetOwnedDatasetsByFqdnParams.Type + +/** + * Parameters for searching owned datasets. + */ +export const SearchOwnedDatasetsParams = Schema.Struct({ + "search": Schema.String, + "limit": Schema.optional(PositiveIntFromString), + "page": Schema.optional(PositiveIntFromString), + "indexing_chains": Schema.optional(Schema.StringFromUriComponent), + "keywords": Schema.optional(Schema.StringFromUriComponent), + "last_updated": Schema.optional(LastUpdatedBucket) +}).annotations({ identifier: "SearchOwnedDatasetsParams" }) +export type SearchOwnedDatasetsParams = typeof SearchOwnedDatasetsParams.Type + +/** + * Parameters for listing datasets by owner. + */ +export const ListMyDatasetsParams = Schema.Struct({ + "limit": Schema.optional(PositiveIntFromString), + "page": Schema.optional(PositiveIntFromString), + "sort_by": Schema.optional(DatasetSortBy), + "direction": Schema.optional(DatasetSortDirection), + "indexing_chains": Schema.optional(Schema.StringFromUriComponent), + "keywords": Schema.optional(Schema.StringFromUriComponent), + "last_updated": Schema.optional(LastUpdatedBucket) +}).annotations({ identifier: "ListMyDatasetsParams" }) +export type ListMyDatasetsParams = typeof ListMyDatasetsParams.Type + +/** + * Parameters for searching datasets by owner. + */ +export const SearchMyDatasetsParams = Schema.Struct({ + "search": Schema.String, + "limit": Schema.optional(PositiveIntFromString), + "page": Schema.optional(PositiveIntFromString), + "indexing_chains": Schema.optional(Schema.StringFromUriComponent), + "keywords": Schema.optional(Schema.StringFromUriComponent), + "last_updated": Schema.optional(LastUpdatedBucket) +}).annotations({ identifier: "SearchMyDatasetsParams" }) +export type SearchMyDatasetsParams = typeof SearchMyDatasetsParams.Type + +// ============================================================================= +// Request Header Schemas +// ============================================================================= + +/** + * Represents a bearer token header. + */ +export const BearerAuthHeader = Schema.Struct({ + Authorization: Schema.String.pipe( + Schema.startsWith("Bearer") + ) +}).annotations({ identifier: "BearerAuthHeader" }) +export type BearerAuthHeader = typeof BearerAuthHeader.Type + +// ============================================================================= +// Request Payload Schemas +// ============================================================================= + +/** + * Input for creating a new DatasetVersion. Contains the version tag, manifest hash, and manifest content. + */ +export const InsertDatasetVersion = Schema.Struct({ + /** + * Optional changelog describing what changed in this version. + */ + "changelog": Schema.optionalWith(Schema.String, { nullable: true }), + "kind": Models.DatasetKind, + /** + * Manifest JSON content. This should be a valid datasets_derived::Manifest structure. The SHA256 hash will be calculated server-side. + */ + "manifest": Schema.Record({ key: Schema.String, value: Schema.Unknown }), + "status": DatasetVersionStatus, + /** + * Version tag (e.g., '1.0.0', 'latest', '8e0acc0'). Pattern: lowercase, numbers, dots, underscores, hyphens. + */ + "version_tag": Models.DatasetVersion +}).annotations({ identifier: "InsertDatasetVersion" }) +export type InsertDatasetVersion = typeof InsertDatasetVersion.Type + +/** + * Input for creating a new Dataset. Contains metadata, discovery information, + * and the initial version to create. The owner will be automatically set to the + * authenticated user. + */ +export const InsertDatasetPayload = Schema.Struct({ + /** + * Description of the dataset, its intended use, and purpose. + */ + "description": Schema.optionalWith(Models.DatasetDescription, { nullable: true }), + /** + * Chains being indexed by the Dataset. Used for discovery by chain. + */ + "indexing_chains": Schema.Array(Schema.String), + /** + * User-defined keywords defining the usage of the dataset. + */ + "keywords": Schema.optionalWith(Schema.Array(Models.DatasetKeyword), { nullable: true }), + /** + * Usage license covering the Dataset. + */ + "license": Schema.optionalWith(Models.DatasetLicense, { nullable: true }), + /** + * The dataset name. Pattern: lowercase, alphanumeric with underscores, cannot start with a number. + */ + "name": Models.DatasetName, + /** + * The dataset namespace. Pattern: lowercase, numbers, underscores. + */ + "namespace": Models.DatasetNamespace, + /** + * User-defined README for the Dataset providing usage examples and documentation. + */ + "readme": Schema.optionalWith(Models.DatasetReadme, { nullable: true }), + /** + * VCS repository URL containing the Dataset source code. + */ + "repository_url": Schema.optionalWith(Models.DatasetRepository, { nullable: true }), + /** + * Source of data being materialized by the Dataset (e.g., contract addresses). + */ + "source": Schema.optionalWith(Schema.Array(Models.DatasetSource), { nullable: true }), + "version": InsertDatasetVersion, + "visibility": Models.DatasetVisibility +}).annotations({ identifier: "InsertDataset" }) +export type InsertDatasetPayload = typeof InsertDatasetPayload.Type + +/** + * Input for update the Datasets metadata fields: + * - keywords + * - README + * - sources + * - repository_url + * - license + * - description + */ +export const UpdateDatasetMetadataPayload = Schema.Struct({ + /** + * Dataset description + */ + "description": Schema.optionalWith(Models.DatasetDescription, { nullable: true }), + /** + * Chains being indexed by the dataset + */ + "indexing_chains": Schema.Array(Schema.String), + /** + * Keywords for dataset discovery + */ + "keywords": Schema.optionalWith(Schema.Array(Models.DatasetKeyword), { nullable: true }), + /** + * License covering the dataset + */ + "license": Schema.optionalWith(Models.DatasetLicense, { nullable: true }), + /** + * User-defined README for the dataset + */ + "readme": Schema.optionalWith(Models.DatasetReadme, { nullable: true }), + /** + * VCS repository URL + */ + "repository_url": Schema.optionalWith(Models.DatasetRepository, { nullable: true }), + /** + * Source of data being materialized + */ + "source": Schema.optionalWith(Schema.Array(Models.DatasetSource), { nullable: true }) +}).annotations({ identifier: "UpdateDatasetMetadataPayload" }) +export type UpdateDatasetMetadataPayload = typeof UpdateDatasetMetadataPayload.Type + +/** + * Input for updating a DatasetVersion's status + */ +export const UpdateDatasetVersionStatusPayload = Schema.Struct({ + /** + * The new status for the dataset version (Draft or Published) + * Note: Use the DELETE endpoint to archive a version + */ + "status": DatasetVersionStatus +}).annotations({ identifier: "UpdateDatasetVersionStatusPayload" }) +export type UpdateDatasetVersionStatusPayload = typeof UpdateDatasetVersionStatusPayload.Type + +/** + * Input for updating a Dataset's visibility + */ +export const UpdateDatasetVisibilityPayload = Schema.Struct({ + /** + * The new visibility level for the dataset + */ + "visibility": Models.DatasetVisibility +}).annotations({ identifier: "UpdateDatasetVisibilityPayload" }) +export type UpdateDatasetVisibilityPayload = typeof UpdateDatasetVisibilityPayload.Type + +// ============================================================================= +// Response Schemas +// ============================================================================= + +/** + * Represents a healthcheck response. + */ +export const HealthcheckResponse = Schema.Struct({ + "status": Schema.String, + "version": Schema.String +}).annotations({ identifier: "HealthcheckResponse" }) +export type HealthcheckResponse = typeof HealthcheckResponse.Type + +/** + * Response for listing datasets. + */ +export const DatasetListResponse = Schema.Struct({ + /** + * List of the datasets being returned in this page + */ + "datasets": Schema.Array(Dataset), + /** + * If true, there are more datasets that can be fetched + */ + "has_next_page": Schema.Boolean, + /** + * Total number of datasets matching the query filters + */ + "total_count": Schema.Int +}).annotations({ identifier: "DatasetListResponse" }) +export type DatasetListResponse = typeof DatasetListResponse.Type + +/** + * Response for datasets count by chain. + */ +export const DatasetCountsByChainResponse = Schema.Array(DatasetCountByChain).annotations({ + identifier: "DatasetCountsByChainResponse" +}) +export type DatasetCountsByChainResponse = typeof DatasetCountsByChainResponse.Type + +/** + * Response for datasets count by keyword. + */ +export const DatasetCountsByKeywordResponse = Schema.Array(DatasetCountByKeyword).annotations({ + identifier: "DatasetCountsByKeywordResponse" +}) +export type DatasetCountsByKeywordResponse = typeof DatasetCountsByKeywordResponse.Type + +/** + * Response for datasets count by last updated. + */ +export const DatasetCountsByLastUpdatedResponse = Schema.Array(DatasetCountByLastUpdated).annotations({ + identifier: "DatasetCountsByLastUpdatedResponse" +}) +export type DatasetCountsByLastUpdatedResponse = typeof DatasetCountsByLastUpdatedResponse.Type + +/** + * Response for searching datasets. + */ +export const DatasetSearchResponse = Schema.Struct({ + /** + * List of the datasets being returned in this page + */ + "datasets": Schema.Array(DatasetWithScore), + /** + * If true, there are more datasets that can be fetched + */ + "has_next_page": Schema.Boolean, + /** + * Total number of datasets matching the query filters + */ + "total_count": Schema.Int +}).annotations({ identifier: "DatasetSearchResponse" }) +export type DatasetSearchResponse = typeof DatasetSearchResponse.Type + +/** + * Response for AI search of datasets. + */ +export const DatasetAiSearchResponse = Schema.Array(DatasetWithScore).annotations({ + identifier: "DatasetAiSearchResponse" +}) +export type DatasetAiSearchResponse = typeof DatasetAiSearchResponse.Type + +/** + * Response for listing dataset versions. + */ +export const DatasetListVersionsResponse = Schema.Array(DatasetVersion).annotations({ + identifier: "DatasetListVersionsResponse" +}) +export type DatasetListVersionsResponse = typeof DatasetListVersionsResponse.Type + +/** + * Response for getting latest manifest. + */ +export const DatasetGetLatestManifestResponse = Schema.String.annotations({ + identifier: "DatasetGetLatestManifestResponse" +}) +export type DatasetGetLatestManifestResponse = typeof DatasetGetLatestManifestResponse.Type + +/** + * Response for getting a manifest. + */ +export const DatasetGetManifestResponse = Schema.String.annotations({ + identifier: "DatasetGetManifestResponse" +}) +export type DatasetGetManifestResponse = typeof DatasetGetManifestResponse.Type + +/** + * Response for listing latest queries. + */ +export const DatasetListLatestQueriesResponse = Schema.Array(SavedQuery).annotations({ + identifier: "DatasetListLatestQueriesResponse" +}) +export type DatasetsListLatestQueries = typeof DatasetListLatestQueriesResponse.Type + +/** + * Response for listing queries. + */ +export const DatasetListQueriesResponse = Schema.Array(SavedQuery).annotations({ + identifier: "DatasetListQueriesResponse" +}) +export type DatasetListQueriesResponse = typeof DatasetListQueriesResponse.Type + +/** + * Response for listing authenticated user's owned datasets. + */ +export const AuthUserOwnedDatasetListResponse = Schema.Struct({ + /** + * List of the datasets being returned in this page + */ + "datasets": Schema.Array(Dataset), + /** + * If true, there are more datasets that can be fetched + */ + "has_next_page": Schema.Boolean, + /** + * Total number of datasets matching the query filters + */ + "total_count": Schema.Int +}).annotations({ identifier: "AuthUserOwnedDatasetListResponse" }) +export type AuthUserOwnedDatasetListResponse = typeof AuthUserOwnedDatasetListResponse.Type + +/** + * Response for owned datasets count by chain. + */ +export const OwnedDatasetCountsByChainResponse = Schema.Array(DatasetCountByChain).annotations({ + identifier: "OwnedDatasetCountsByChainResponse" +}) +export type OwnedDatasetCountsByChainResponse = typeof OwnedDatasetCountsByChainResponse.Type + +/** + * Response for owned datasets count by keyword. + */ +export const OwnedDatasetCountsByKeywordResponse = Schema.Array(DatasetCountByKeyword).annotations({ + identifier: "OwnedDatasetCountsByKeywordResponse" +}) +export type OwnedDatasetCountsByKeywordResponse = typeof OwnedDatasetCountsByKeywordResponse.Type + +/** + * Response for owned datasets count by last updated. + */ +export const OwnedDatasetsCountByLastUpdatedResponse = Schema.Array(DatasetCountByLastUpdated).annotations({ + identifier: "OwnedDatasetsCountByLastUpdatedResponse" +}) +export type OwnedDatasetsCountByLastUpdatedResponse = typeof OwnedDatasetsCountByLastUpdatedResponse.Type + +/** + * Response for owned datasets count by status. + */ +export const OwnedDatasetCountsByStatusResponse = Schema.Array(DatasetCountByStatus).annotations({ + identifier: "OwnedDatasetCountsByStatusResponse" +}) +export type OwnedDatasetCountsByStatusResponse = typeof OwnedDatasetCountsByStatusResponse.Type + +/** + * Response for owned datasets count by visibility. + */ +export const OwnedDatasetCountsByVisibilityResponse = Schema.Array(DatasetCountByVisibility).annotations({ + identifier: "wnedDatasetCountsByVisibilityResponse" +}) +export type OwnedDatasetCountsByVisibilityResponse = typeof OwnedDatasetCountsByVisibilityResponse.Type + +/** + * Response for archiving a dataset version + */ +export const ArchiveDatasetVersionResponse = Schema.Struct({ + /** + * The reference of the archived dataset version + */ + "reference": Schema.String +}).annotations({ identifier: "ArchiveDatasetVersionResponse" }) +export type ArchiveDatasetVersionResponse = typeof ArchiveDatasetVersionResponse.Type + +/** + * Response for listing owned queries. + */ +export const OwnedDatasetListQueriesResponse = Schema.Array(SavedQuery).annotations({ + identifier: "OwnedDatasetListQueriesResponse" +}) +export type OwnedDatasetListQueriesResponse = typeof OwnedDatasetListQueriesResponse.Type + +/** + * Response for listing datasets by owner. + */ +export const ListMyDatasetsResponse = Schema.Struct({ + /** + * List of the datasets being returned in this page + */ + "datasets": Schema.Array(Dataset), + /** + * If true, there are more datasets that can be fetched + */ + "has_next_page": Schema.Boolean, + /** + * Total number of datasets matching the query filters + */ + "total_count": Schema.Int +}).annotations({ identifier: "ListMyDatasetsResponse" }) +export type ListMyDatasetsResponse = typeof ListMyDatasetsResponse.Type + +/** + * Represents a liveness response. + */ +export const LivenessResponse = Schema.Struct({ + "status": Schema.String +}).annotations({ identifier: "LivenessResponse" }) +export type LivenessResponse = typeof LivenessResponse.Type + +/** + * Represents readiness checks. + */ +export const ReadinessChecks = Schema.Struct({ + "database": ServiceStatus +}).annotations({ identifier: "ReadinessChecks" }) +export type ReadinessChecks = typeof ReadinessChecks.Type + +/** + * Represents a readiness response. + */ +export const ReadinessResponse = Schema.Struct({ + "checks": ReadinessChecks, + "status": Schema.String +}).annotations({ identifier: "ReadinessResponse" }) +export type ReadinessResponse = typeof ReadinessResponse.Type diff --git a/packages/amp/src/registry/error.ts b/packages/amp/src/registry/error.ts new file mode 100644 index 0000000..42c9b62 --- /dev/null +++ b/packages/amp/src/registry/error.ts @@ -0,0 +1,236 @@ +/** + * This module contains error definitions which represent the standard error + * responses returned by the Amp Registry API. + * + * Errors provide structured error details including a machine-readable error + * code and a human-readable message. + * + * ## Error Code Conventions + * - Error codes use SCREAMING_SNAKE_CASE (e.g., `DATASET_NOT_FOUND`) + * - Codes are stable and can be relied upon programmatically + * - Messages may change and should only be used for display/logging + * + * ## Example JSON Response + * ```json + * { + * "error_code": "DATASET_NOT_FOUND", + * "error_message": "dataset 'eth_mainnet' version '1.0.0' not found", + * "request_id": "a1b2c3d4-e5f6-7890-abcd-ef1234567890" + * } + * ``` + */ +import * as HttpApiSchema from "@effect/platform/HttpApiSchema" +import * as Schema from "effect/Schema" + +/** + * Machine-readable error code in SCREAMING_SNAKE_CASE format + * + * Error codes are stable across API versions and should be used + * for programmatic error handling. Examples: `INVALID_SELECTOR`, + * `DATASET_NOT_FOUND`, `REGISTRY_DB_ERROR` + */ +const ErrorCode = ( + code: Code +): Schema.PropertySignature<":", Code, "error_code", ":", Code> => + Schema.Literal(code).pipe( + Schema.propertySignature, + Schema.fromKey("error_code") + ) + +const BaseErrorFields = { + /** + * Human-readable error message + * + * Messages provide detailed context about the error but may change + * over time. Use `error_code` for programmatic decisions. + */ + message: Schema.String.pipe( + Schema.propertySignature, + Schema.fromKey("error_message") + ), + /** + * Request ID for tracing and correlation + * + * This ID can be used to correlate error responses with server logs + * for debugging and support purposes. The ID is generated per-request + * and appears in both logs and error responses. + */ + requestId: Schema.optional(Schema.String).pipe( + Schema.fromKey("request_id") + ) +} + +export class DatasetConversionError extends Schema.Class( + "Amp/RegistryApi/DatasetConversionError" +)({ + ...BaseErrorFields, + code: ErrorCode("DATASET_CONVERSION_ERROR") +}, HttpApiSchema.annotations({ status: 500 })) {} + +export class DatasetNotFoundError extends Schema.Class( + "Amp/RegistryApi/DatasetNotFoundError" +)({ + ...BaseErrorFields, + code: ErrorCode("DATASET_NOT_FOUND") +}, HttpApiSchema.annotations({ status: 404 })) {} + +export class DatasetVersionConversionError extends Schema.Class( + "Amp/RegistryApi/DatasetVersionConversionError" +)({ + ...BaseErrorFields, + code: ErrorCode("DATASET_VERSION_CONVERSION_ERROR") +}, HttpApiSchema.annotations({ status: 500 })) {} + +export class DatasetVersionNotFoundError extends Schema.Class( + "Amp/RegistryApi/DatasetVersionNotFoundError" +)({ + ...BaseErrorFields, + code: ErrorCode("VERSION_NOT_FOUND") +}, HttpApiSchema.annotations({ status: 404 })) {} + +export class ForbiddenError extends Schema.Class( + "Amp/RegistryApi/ForbiddenError" +)({ + ...BaseErrorFields, + code: ErrorCode("FORBIDDEN") +}, HttpApiSchema.annotations({ status: 403 })) {} + +export class InvalidDatasetOwnerPathError extends Schema.Class( + "Amp/RegistryApi/InvalidDatasetOwnerPathError" +)({ + ...BaseErrorFields, + code: ErrorCode("INVALID_DATASET_OWNER_PATH") +}, HttpApiSchema.annotations({ status: 400 })) {} + +export class InvalidDatasetReferenceError extends Schema.Class( + "Amp/RegistryApi/InvalidDatasetReferenceError" +)({ + ...BaseErrorFields, + code: ErrorCode("INVALID_REFERENCE") +}, HttpApiSchema.annotations({ status: 500 })) {} + +export class InvalidDatasetSelectorError extends Schema.Class( + "Amp/RegistryApi/InvalidDatasetSelectorError" +)({ + ...BaseErrorFields, + code: ErrorCode("INVALID_DATASET_SELECTOR") +}, HttpApiSchema.annotations({ status: 400 })) {} + +export class InvalidManifestError extends Schema.Class( + "Amp/RegistryApi/InvalidManifestError" +)({ + ...BaseErrorFields, + code: ErrorCode("INVALID_MANIFEST") +}, HttpApiSchema.annotations({ status: 400 })) {} + +export class InvalidManifestHashError extends Schema.Class( + "Amp/RegistryApi/InvalidManifestHashError" +)({ + ...BaseErrorFields, + code: ErrorCode("INVALID_MANIFEST_HASH") +}, HttpApiSchema.annotations({ status: 500 })) {} + +export class InvalidNamespaceError extends Schema.Class( + "Amp/RegistryApi/InvalidNamespaceError" +)({ + ...BaseErrorFields, + code: ErrorCode("INVALID_NAMESPACE") +}, HttpApiSchema.annotations({ status: 400 })) {} + +export class InvalidQueryParametersError extends Schema.Class( + "Amp/RegistryApi/InvalidQueryParametersError" +)({ + ...BaseErrorFields, + code: ErrorCode("INVALID_QUERY_PARAMETERS") +}, HttpApiSchema.annotations({ status: 400 })) {} + +export class InvalidPathParametersError extends Schema.Class( + "Amp/RegistryApi/InvalidPathParametersError" +)({ + ...BaseErrorFields, + code: ErrorCode("INVALID_PATH_PARAMETERS") +}, HttpApiSchema.annotations({ status: 400 })) {} + +export class InvalidRequestBodyError extends Schema.Class( + "Amp/RegistryApi/InvalidRequestBodyError" +)({ + ...BaseErrorFields, + code: ErrorCode("INVALID_REQUEST_BODY") +}, HttpApiSchema.annotations({ status: 400 })) {} + +export class InvalidSelectorError extends Schema.Class( + "Amp/RegistryApi/InvalidSelectorError" +)({ + ...BaseErrorFields, + code: ErrorCode("INVALID_SELECTOR") +}, HttpApiSchema.annotations({ status: 400 })) {} + +export class LatestDatasetVersionNotFoundError extends Schema.Class( + "Amp/RegistryApi/LatestDatasetVersionNotFoundError" +)({ + ...BaseErrorFields, + code: ErrorCode("LATEST_VERSION_NOT_FOUND") +}, HttpApiSchema.annotations({ status: 404 })) {} + +export class LimitInvalidError extends Schema.Class( + "Amp/RegistryApi/LimitInvalidError" +)({ + ...BaseErrorFields, + code: ErrorCode("LIMIT_INVALID") +}, HttpApiSchema.annotations({ status: 400 })) {} + +export class LimitTooLargeError extends Schema.Class( + "Amp/RegistryApi/LimitTooLargeError" +)({ + ...BaseErrorFields, + code: ErrorCode("LIMIT_TOO_LARGE") +}, HttpApiSchema.annotations({ status: 400 })) {} + +export class ManifestDeserializationError extends Schema.Class( + "Amp/RegistryApi/ManifestDeserializationError" +)({ + ...BaseErrorFields, + code: ErrorCode("MANIFEST_DESERIALIZATION_ERROR") +}, HttpApiSchema.annotations({ status: 500 })) {} + +export class ManifestNotFoundError extends Schema.Class( + "Amp/RegistryApi/ManifestNotFoundError" +)({ + ...BaseErrorFields, + code: ErrorCode("MANIFEST_NOT_FOUND") +}, HttpApiSchema.annotations({ status: 404 })) {} + +export class ManifestRetrievalError extends Schema.Class( + "Amp/RegistryApi/ManifestRetrievalError" +)({ + ...BaseErrorFields, + code: ErrorCode("MANIFEST_RETRIEVAL_ERROR") +}, HttpApiSchema.annotations({ status: 500 })) {} + +export class NamespaceAccessDeniedError extends Schema.Class( + "Amp/RegistryApi/NamespaceAccessDeniedError" +)({ + ...BaseErrorFields, + code: ErrorCode("NAMESPACE_ACCESS_DENIED") +}, HttpApiSchema.annotations({ status: 403 })) {} + +export class RegistryDatabaseError extends Schema.Class( + "Amp/RegistryApi/RegistryDatabaseError" +)({ + ...BaseErrorFields, + code: ErrorCode("AMP_REGISTRY_DB_ERROR") +}, HttpApiSchema.annotations({ status: 500 })) {} + +export class SavedQueryConversionError extends Schema.Class( + "Amp/RegistryApi/SavedQueryConversionError" +)({ + ...BaseErrorFields, + code: ErrorCode("SAVED_QUERY_CONVERSION_ERROR") +}, HttpApiSchema.annotations({ status: 500 })) {} + +export class ServiceUnavailableError extends Schema.Class( + "Amp/RegistryApi/ServiceUnavailableError" +)({ + ...BaseErrorFields, + code: ErrorCode("SERVICE_UNAVAILABLE") +}, HttpApiSchema.annotations({ status: 503 })) {} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 7704114..693187d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -61,10 +61,10 @@ importers: version: 8.50.0(eslint@9.39.2)(typescript@5.9.3) vite-tsconfig-paths: specifier: ^6.0.3 - version: 6.0.3(typescript@5.9.3)(vite@7.3.0(@types/node@25.0.3)) + version: 6.0.3(typescript@5.9.3)(vite@7.3.0(@types/node@25.0.3)(yaml@2.8.2)) vitest: specifier: ^4.0.16 - version: 4.0.16(@types/node@25.0.3)(@vitest/ui@4.0.16) + version: 4.0.16(@types/node@25.0.3)(@vitest/ui@4.0.16)(yaml@2.8.2) vitest-mock-express: specifier: ^2.2.0 version: 2.2.0 @@ -104,7 +104,7 @@ importers: version: 25.0.3 vitest: specifier: ^4.0.0 - version: 4.0.16(@types/node@25.0.3)(@vitest/ui@4.0.16) + version: 4.0.16(@types/node@25.0.3)(@vitest/ui@4.0.16)(yaml@2.8.2) scratchpad: dependencies: @@ -2216,6 +2216,11 @@ packages: utf-8-validate: optional: true + yaml@2.8.2: + resolution: {integrity: sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==} + engines: {node: '>= 14.6'} + hasBin: true + yocto-queue@0.1.0: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} @@ -2404,7 +2409,7 @@ snapshots: '@effect/vitest@0.27.0(effect@3.19.13)(vitest@4.0.16)': dependencies: effect: 3.19.13 - vitest: 4.0.16(@types/node@25.0.3)(@vitest/ui@4.0.16) + vitest: 4.0.16(@types/node@25.0.3)(@vitest/ui@4.0.16)(yaml@2.8.2) '@effect/workflow@0.16.0(@effect/experimental@0.58.0(@effect/platform@0.94.0(effect@3.19.13))(effect@3.19.13))(@effect/platform@0.94.0(effect@3.19.13))(@effect/rpc@0.73.0(@effect/platform@0.94.0(effect@3.19.13))(effect@3.19.13))(effect@3.19.13)': dependencies: @@ -2974,7 +2979,7 @@ snapshots: obug: 2.1.1 std-env: 3.10.0 tinyrainbow: 3.0.3 - vitest: 4.0.16(@types/node@25.0.3)(@vitest/ui@4.0.16) + vitest: 4.0.16(@types/node@25.0.3)(@vitest/ui@4.0.16)(yaml@2.8.2) transitivePeerDependencies: - supports-color @@ -2987,13 +2992,13 @@ snapshots: chai: 6.2.1 tinyrainbow: 3.0.3 - '@vitest/mocker@4.0.16(vite@7.3.0(@types/node@25.0.3))': + '@vitest/mocker@4.0.16(vite@7.3.0(@types/node@25.0.3)(yaml@2.8.2))': dependencies: '@vitest/spy': 4.0.16 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: - vite: 7.3.0(@types/node@25.0.3) + vite: 7.3.0(@types/node@25.0.3)(yaml@2.8.2) '@vitest/pretty-format@4.0.16': dependencies: @@ -3021,7 +3026,7 @@ snapshots: sirv: 3.0.2 tinyglobby: 0.2.15 tinyrainbow: 3.0.3 - vitest: 4.0.16(@types/node@25.0.3)(@vitest/ui@4.0.16) + vitest: 4.0.16(@types/node@25.0.3)(@vitest/ui@4.0.16)(yaml@2.8.2) '@vitest/utils@4.0.16': dependencies: @@ -4093,18 +4098,18 @@ snapshots: - utf-8-validate - zod - vite-tsconfig-paths@6.0.3(typescript@5.9.3)(vite@7.3.0(@types/node@25.0.3)): + vite-tsconfig-paths@6.0.3(typescript@5.9.3)(vite@7.3.0(@types/node@25.0.3)(yaml@2.8.2)): dependencies: debug: 4.4.3 globrex: 0.1.2 tsconfck: 3.1.6(typescript@5.9.3) optionalDependencies: - vite: 7.3.0(@types/node@25.0.3) + vite: 7.3.0(@types/node@25.0.3)(yaml@2.8.2) transitivePeerDependencies: - supports-color - typescript - vite@7.3.0(@types/node@25.0.3): + vite@7.3.0(@types/node@25.0.3)(yaml@2.8.2): dependencies: esbuild: 0.27.2 fdir: 6.5.0(picomatch@4.0.3) @@ -4115,15 +4120,16 @@ snapshots: optionalDependencies: '@types/node': 25.0.3 fsevents: 2.3.3 + yaml: 2.8.2 vitest-mock-express@2.2.0: dependencies: '@types/express': 4.17.25 - vitest@4.0.16(@types/node@25.0.3)(@vitest/ui@4.0.16): + vitest@4.0.16(@types/node@25.0.3)(@vitest/ui@4.0.16)(yaml@2.8.2): dependencies: '@vitest/expect': 4.0.16 - '@vitest/mocker': 4.0.16(vite@7.3.0(@types/node@25.0.3)) + '@vitest/mocker': 4.0.16(vite@7.3.0(@types/node@25.0.3)(yaml@2.8.2)) '@vitest/pretty-format': 4.0.16 '@vitest/runner': 4.0.16 '@vitest/snapshot': 4.0.16 @@ -4140,7 +4146,7 @@ snapshots: tinyexec: 1.0.2 tinyglobby: 0.2.15 tinyrainbow: 3.0.3 - vite: 7.3.0(@types/node@25.0.3) + vite: 7.3.0(@types/node@25.0.3)(yaml@2.8.2) why-is-node-running: 2.3.0 optionalDependencies: '@types/node': 25.0.3 @@ -4183,4 +4189,7 @@ snapshots: ws@8.18.3: {} + yaml@2.8.2: + optional: true + yocto-queue@0.1.0: {} From 283b5a82f2dc45c262efc59dec6f5ea5745cd64d Mon Sep 17 00:00:00 2001 From: Maxwell Brown Date: Thu, 15 Jan 2026 18:26:33 -0500 Subject: [PATCH 2/2] fix issues --- packages/amp/src/domain.ts | 673 ------------------------------- packages/amp/src/registry/api.ts | 10 +- 2 files changed, 5 insertions(+), 678 deletions(-) delete mode 100644 packages/amp/src/domain.ts diff --git a/packages/amp/src/domain.ts b/packages/amp/src/domain.ts deleted file mode 100644 index 544e7a1..0000000 --- a/packages/amp/src/domain.ts +++ /dev/null @@ -1,673 +0,0 @@ -import * as S from "effect/Schema" - -export class HealthcheckResponse extends S.Class("HealthcheckResponse")({ - "status": S.String, - "version": S.String -}) {} - -/** - * Time-based buckets for grouping datasets by last updated time - */ -export class LastUpdatedBucket extends S.Literal("last_day", "last_week", "last_month", "last_year") {} - -export class DatasetsListParams extends S.Struct({ - "limit": S.optionalWith(S.Int, { nullable: true }), - "page": S.optionalWith(S.Int, { nullable: true }), - "sort_by": S.optionalWith(S.String, { nullable: true }), - "direction": S.optionalWith(S.String, { nullable: true }), - "indexing_chains": S.optionalWith(S.Array(S.String), { nullable: true }), - "keywords": S.optionalWith(S.Array(S.String), { nullable: true }), - "last_updated": S.optionalWith(LastUpdatedBucket, { nullable: true }) -}) {} - -export class DatasetVersionAncestry extends S.Class("DatasetVersionAncestry")({ - /** - * Dataset reference in the format: {namespace}/{name}@{version_tag}. Points to the DatasetVersion.dataset_reference. This allows version-pinned dependencies. - */ - "dataset_reference": S.String.pipe(S.pattern(new RegExp("^[a-z0-9_]+/[a-z_][a-z0-9_]*@[a-z0-9._-]+$"))) -}) {} - -export class DatasetVersionStatus extends S.Literal("draft", "published", "deprecated", "archived") {} - -export class DatasetVersion extends S.Class("DatasetVersion")({ - /** - * Array of ancestor DatasetVersion references that this version extends from (version-pinned dependencies). - */ - "ancestors": S.optionalWith(S.Array(DatasetVersionAncestry), { nullable: true }), - /** - * A description of what changed with this version. Allows developers of the Dataset to communicate to downstream consumers what has changed with this version from previous versions. Migration guides, etc. - */ - "changelog": S.optionalWith(S.String, { nullable: true }), - /** - * Timestamp when the DatasetVersion record was created (immutable). - */ - "created_at": S.String, - /** - * Dataset reference in the format: {namespace}/{name}@{version_tag}. This value is globally unique and is a pointer to a tagged and published Manifest. - */ - "dataset_reference": S.String.pipe(S.pattern(new RegExp("^[a-z0-9_]+/[a-z_][a-z0-9_]*@[a-z0-9._-]+$"))), - /** - * Array of descendant DatasetVersion references that extend from this version. - */ - "descendants": S.optionalWith(S.Array(DatasetVersionAncestry), { nullable: true }), - "status": DatasetVersionStatus, - /** - * The published version tag. This is basically the version label. Can be semver, a commit hash, or 'latest'. - */ - "version_tag": S.String.pipe(S.pattern(new RegExp("^[a-z0-9._-]+$"))) -}) {} - -export class DatasetVisibility extends S.Literal("private", "public") {} - -/** - * Top-level container for a user-defined, tagged, and published Dataset. Contains metadata and discovery information for datasets. - */ -export class Dataset extends S.Class("Dataset")({ - /** - * Timestamp when the Dataset record was created (immutable). - */ - "created_at": S.String, - /** - * Computed link to the latest DatasetVersion reference in PURL format. - */ - "dataset_reference": S.optionalWith( - S.String.pipe(S.minLength(1), S.pattern(new RegExp("^[a-z0-9_]+/[a-z_][a-z0-9_]*@[a-z0-9._-]+$"))), - { nullable: true } - ), - /** - * Description of the dataset, its intended use, and purpose. - */ - "description": S.optionalWith(S.String.pipe(S.maxLength(1024)), { nullable: true }), - /** - * Chains being indexed by the Dataset. Used for discovery by chain. - */ - "indexing_chains": S.Array(S.String), - /** - * User-defined or derived keywords defining the usage of the dataset. - */ - "keywords": S.optionalWith(S.Array(S.String), { nullable: true }), - "latest_version": S.optionalWith(DatasetVersion, { nullable: true }), - /** - * Usage license covering the Dataset. - */ - "license": S.optionalWith(S.String, { nullable: true }), - /** - * The dataset name. Lowercase, alphanumeric with underscores. Cannot start with a number. - */ - "name": S.String.pipe(S.minLength(1), S.pattern(new RegExp("^[a-z_][a-z0-9_]*$"))), - /** - * The dataset namespace. Logical grouping mechanism for datasets. Can be a user 0x address, username, or organization. - */ - "namespace": S.String.pipe(S.minLength(1), S.pattern(new RegExp("^[a-z0-9_]*$"))), - /** - * Owner of the Dataset. Can be an organization or user 0x address. - */ - "owner": S.String, - /** - * User-defined README for the Dataset providing usage examples and documentation. - */ - "readme": S.optionalWith(S.String, { nullable: true }), - /** - * VCS repository URL containing the Dataset source code. - */ - "repository_url": S.optionalWith(S.String, { nullable: true }), - /** - * Source of data being materialized by the Dataset (e.g., contract addresses, logs, transactions). - */ - "source": S.optionalWith(S.Array(S.String), { nullable: true }), - /** - * Timestamp when the Dataset record was last updated. - */ - "updated_at": S.String, - /** - * Link to all DatasetVersion records that this Dataset is a parent of. - */ - "versions": S.optionalWith(S.Array(DatasetVersion), { nullable: true }), - "visibility": DatasetVisibility -}) {} - -export class DatasetListResponse extends S.Class("DatasetListResponse")({ - /** - * List of the datasets being returned in this page - */ - "datasets": S.Array(Dataset), - /** - * If true, there are more datasets that can be fetched - */ - "has_next_page": S.Boolean, - /** - * Total number of datasets matching the query filters - */ - "total_count": S.Int -}) {} - -/** - * Standard error response returned by the API - * - * This struct represents error information returned in HTTP error responses. - * It provides structured error details including a machine-readable error code - * and human-readable message. - * - * ## Error Code Conventions - * - Error codes use SCREAMING_SNAKE_CASE (e.g., `DATASET_NOT_FOUND`) - * - Codes are stable and can be relied upon programmatically - * - Messages may change and should only be used for display/logging - * - * ## Example JSON Response - * ```json - * { - * "error_code": "DATASET_NOT_FOUND", - * "error_message": "dataset 'eth_mainnet' version '1.0.0' not found", - * "request_id": "a1b2c3d4-e5f6-7890-abcd-ef1234567890" - * } - * ``` - */ -export class ErrorResponse extends S.Class("ErrorResponse")({ - /** - * Machine-readable error code in SCREAMING_SNAKE_CASE format - * - * Error codes are stable across API versions and should be used - * for programmatic error handling. Examples: `INVALID_SELECTOR`, - * `DATASET_NOT_FOUND`, `REGISTRY_DB_ERROR` - */ - "error_code": S.String, - /** - * Human-readable error message - * - * Messages provide detailed context about the error but may change - * over time. Use `error_code` for programmatic decisions. - */ - "error_message": S.String, - /** - * Request ID for tracing and correlation - * - * This ID can be used to correlate error responses with server logs - * for debugging and support purposes. The ID is generated per-request - * and appears in both logs and error responses. - */ - "request_id": S.optionalWith(S.String, { nullable: true }) -}) {} - -/** - * Count of datasets by indexing chain. - * - * Returns the chain name and the number of datasets indexing that chain. - */ -export class DatasetCountByChainDto extends S.Class("DatasetCountByChainDto")({ - /** - * The indexing chain name (e.g., "mainnet", "arbitrum-one", "base-mainnet") - */ - "chain": S.String, - /** - * The count of Dataset records indexing this chain - */ - "count": S.Int -}) {} - -export class DatasetsCountByChain200 extends S.Array(DatasetCountByChainDto) {} - -/** - * Count of datasets by keyword (tag). - * - * Returns the keyword and the number of datasets with that keyword. - */ -export class DatasetCountByKeywordDto extends S.Class("DatasetCountByKeywordDto")({ - /** - * The count of Dataset records with this keyword - */ - "count": S.Int, - /** - * The keyword (e.g., "DeFi", "NFT", "logs") - */ - "keyword": S.String -}) {} - -export class DatasetsCountByKeyword200 extends S.Array(DatasetCountByKeywordDto) {} - -/** - * Cumulative count of datasets by last updated time bucket. - * - * Counts are cumulative - a dataset updated 1 hour ago will be counted in - * all four buckets (last_day, last_week, last_month, and last_year). - */ -export class DatasetCountByLastUpdatedBucketDto - extends S.Class("DatasetCountByLastUpdatedBucketDto")({ - /** - * The time bucket - */ - "bucket": LastUpdatedBucket, - /** - * The count of Dataset records updated within this time period - */ - "count": S.Int - }) -{} - -export class DatasetsCountByLastUpdated200 extends S.Array(DatasetCountByLastUpdatedBucketDto) {} - -export class DatasetsSearchParams extends S.Struct({ - "search": S.String, - "limit": S.optionalWith(S.Int, { nullable: true }), - "page": S.optionalWith(S.Int, { nullable: true }), - "indexing_chains": S.optionalWith(S.String, { nullable: true }), - "keywords": S.optionalWith(S.String, { nullable: true }), - "last_updated": S.optionalWith(LastUpdatedBucket, { nullable: true }) -}) {} - -/** - * Dataset with search relevance score. Extends the base Dataset with a weighted score indicating how well it matches the search query. Higher scores indicate better relevance. - */ -export class DatasetWithScore extends S.Class("DatasetWithScore")({ - /** - * Timestamp when the Dataset record was created (immutable). - */ - "created_at": S.String, - /** - * Computed link to the latest DatasetVersion reference in PURL format. - */ - "dataset_reference": S.optionalWith( - S.String.pipe(S.minLength(1), S.pattern(new RegExp("^[a-z0-9_]+/[a-z_][a-z0-9_]*@[a-z0-9._-]+$"))), - { nullable: true } - ), - /** - * Description of the dataset, its intended use, and purpose. - */ - "description": S.optionalWith(S.String.pipe(S.maxLength(1024)), { nullable: true }), - /** - * Chains being indexed by the Dataset. Used for discovery by chain. - */ - "indexing_chains": S.Array(S.String), - /** - * User-defined or derived keywords defining the usage of the dataset. - */ - "keywords": S.optionalWith(S.Array(S.String), { nullable: true }), - "latest_version": S.optionalWith(DatasetVersion, { nullable: true }), - /** - * Usage license covering the Dataset. - */ - "license": S.optionalWith(S.String, { nullable: true }), - /** - * The dataset name. Lowercase, alphanumeric with underscores. Cannot start with a number. - */ - "name": S.String.pipe(S.minLength(1), S.pattern(new RegExp("^[a-z_][a-z0-9_]*$"))), - /** - * The dataset namespace. Logical grouping mechanism for datasets. Can be a user 0x address, username, or organization. - */ - "namespace": S.String.pipe(S.minLength(1), S.pattern(new RegExp("^[a-z0-9_]*$"))), - /** - * Owner of the Dataset. Can be an organization or user 0x address. - */ - "owner": S.String, - /** - * User-defined README for the Dataset providing usage examples and documentation. - */ - "readme": S.optionalWith(S.String, { nullable: true }), - /** - * VCS repository URL containing the Dataset source code. - */ - "repository_url": S.optionalWith(S.String, { nullable: true }), - /** - * Weighted relevance score indicating how well this dataset matches the search query. Higher scores indicate better relevance. Score is calculated based on matches in description, keywords, source, and indexing chains fields. - */ - "score": S.Number, - /** - * Source of data being materialized by the Dataset (e.g., contract addresses, logs, transactions). - */ - "source": S.optionalWith(S.Array(S.String), { nullable: true }), - /** - * Timestamp when the Dataset record was last updated. - */ - "updated_at": S.String, - /** - * Link to all DatasetVersion records that this Dataset is a parent of. - */ - "versions": S.optionalWith(S.Array(DatasetVersion), { nullable: true }), - "visibility": DatasetVisibility -}) {} - -export class DatasetSearchResponse extends S.Class("DatasetSearchResponse")({ - /** - * List of the datasets being returned in this page - */ - "datasets": S.Array(DatasetWithScore), - /** - * If true, there are more datasets that can be fetched - */ - "has_next_page": S.Boolean, - /** - * Total number of datasets matching the query filters - */ - "total_count": S.Int -}) {} - -export class DatasetsAiSearchParams extends S.Struct({ - "search": S.String -}) {} - -export class DatasetsAiSearch200 extends S.Array(DatasetWithScore) {} - -export class DatasetsListVersions200 extends S.Array(DatasetVersion) {} - -export class DatasetsGetLatestManifest200 extends S.String {} - -export class SavedQuery extends S.Class("SavedQuery")({ - /** - * Timestamp when the SavedQuery was created - */ - "created_at": S.String, - /** - * Creator/owner of the saved query (ethereum address or user_id) - */ - "creator": S.String, - /** - * Optional description of what the query does - */ - "description": S.optionalWith(S.String, { nullable: true }), - /** - * Unique identifier for the saved query (UUID) - */ - "id": S.String.pipe( - S.pattern(new RegExp("^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$")) - ), - /** - * Name of the saved query - */ - "name": S.String, - /** - * The SQL query string - */ - "query": S.String, - /** - * Timestamp when the SavedQuery was last updated - */ - "updated_at": S.String, - "visibility": DatasetVisibility -}) {} - -export class DatasetsListLatestQueries200 extends S.Array(SavedQuery) {} - -export class DatasetsGetManifest200 extends S.String {} - -export class DatasetsListQueries200 extends S.Array(SavedQuery) {} - -export class DatasetsOwnedListParams extends S.Struct({ - "limit": S.optionalWith(S.Int, { nullable: true }), - "page": S.optionalWith(S.Int, { nullable: true }), - "sort_by": S.optionalWith(S.String, { nullable: true }), - "direction": S.optionalWith(S.String, { nullable: true }), - "indexing_chains": S.optionalWith(S.String, { nullable: true }), - "keywords": S.optionalWith(S.String, { nullable: true }), - "last_updated": S.optionalWith(LastUpdatedBucket, { nullable: true }) -}) {} - -export class AuthUserOwnedDatasetListResponse - extends S.Class("AuthUserOwnedDatasetListResponse")({ - /** - * List of the datasets being returned in this page - */ - "datasets": S.Array(Dataset), - /** - * If true, there are more datasets that can be fetched - */ - "has_next_page": S.Boolean, - /** - * Total number of datasets matching the query filters - */ - "total_count": S.Int - }) -{} - -export class OwnedDatasetsCountByChain200 extends S.Array(DatasetCountByChainDto) {} - -export class OwnedDatasetsCountByKeyword200 extends S.Array(DatasetCountByKeywordDto) {} - -export class OwnedDatasetsCountByLastUpdated200 extends S.Array(DatasetCountByLastUpdatedBucketDto) {} - -/** - * Count of datasets by version status. - * - * Returns the version status and the number of datasets that have at least one version with that status. - */ -export class DatasetCountByStatusDto extends S.Class("DatasetCountByStatusDto")({ - /** - * The count of Dataset records with at least one version in this status - */ - "count": S.Int, - /** - * The version status (Draft, Published, Deprecated, or Archived) - */ - "status": DatasetVersionStatus -}) {} - -export class OwnedDatasetsCountByStatus200 extends S.Array(DatasetCountByStatusDto) {} - -/** - * Count of datasets by visibility. - * - * Returns the visibility and the number of datasets with that visibility. - */ -export class DatasetCountByVisibilityDto extends S.Class("DatasetCountByVisibilityDto")({ - /** - * The count of Dataset records with this visibility - */ - "count": S.Int, - /** - * The visibility (Public or Private) - */ - "visibility": DatasetVisibility -}) {} - -export class OwnedDatasetsCountByVisibility200 extends S.Array(DatasetCountByVisibilityDto) {} - -export class ManifestKind extends S.Literal("manifest", "evm-rpc", "eth-beacon", "firehose") {} - -/** - * Input for creating a new DatasetVersion. Contains the version tag, manifest hash, and manifest content. - */ -export class InsertDatasetVersion extends S.Class("InsertDatasetVersion")({ - /** - * Optional changelog describing what changed in this version. - */ - "changelog": S.optionalWith(S.String, { nullable: true }), - "kind": ManifestKind, - /** - * Manifest JSON content. This should be a valid datasets_derived::Manifest structure. The SHA256 hash will be calculated server-side. - */ - "manifest": S.Record({ key: S.String, value: S.Unknown }), - "status": DatasetVersionStatus, - /** - * Version tag (e.g., '1.0.0', 'latest', '8e0acc0'). Pattern: lowercase, numbers, dots, underscores, hyphens. - */ - "version_tag": S.String.pipe(S.pattern(new RegExp("^[a-z0-9._-]+$"))) -}) {} - -/** - * Input for creating a new Dataset. Contains metadata, discovery information, and the initial version to create. The owner will be automatically set to the authenticated user. - */ -export class InsertDataset extends S.Class("InsertDataset")({ - /** - * Description of the dataset, its intended use, and purpose. - */ - "description": S.optionalWith(S.String.pipe(S.maxLength(1024)), { nullable: true }), - /** - * Chains being indexed by the Dataset. Used for discovery by chain. - */ - "indexing_chains": S.Array(S.String), - /** - * User-defined keywords defining the usage of the dataset. - */ - "keywords": S.optionalWith(S.Array(S.String), { nullable: true }), - /** - * Usage license covering the Dataset. - */ - "license": S.optionalWith(S.String, { nullable: true }), - /** - * The dataset name. Pattern: lowercase, alphanumeric with underscores, cannot start with a number. - */ - "name": S.String.pipe(S.minLength(1), S.pattern(new RegExp("^[a-z_][a-z0-9_]*$"))), - /** - * The dataset namespace. Pattern: lowercase, numbers, underscores. - */ - "namespace": S.String.pipe(S.minLength(1), S.pattern(new RegExp("^[a-z0-9_]*$"))), - /** - * User-defined README for the Dataset providing usage examples and documentation. - */ - "readme": S.optionalWith(S.String, { nullable: true }), - /** - * VCS repository URL containing the Dataset source code. - */ - "repository_url": S.optionalWith( - S.String.pipe( - S.pattern( - new RegExp( - "^https?://[a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(\\\\.[a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*(/.*)?$" - ) - ) - ), - { nullable: true } - ), - /** - * Source of data being materialized by the Dataset (e.g., contract addresses). - */ - "source": S.optionalWith(S.Array(S.String), { nullable: true }), - "version": InsertDatasetVersion, - "visibility": DatasetVisibility -}) {} - -export class DatasetsOwnedSearchParams extends S.Struct({ - "search": S.String, - "limit": S.optionalWith(S.Int, { nullable: true }), - "page": S.optionalWith(S.Int, { nullable: true }), - "indexing_chains": S.optionalWith(S.String, { nullable: true }), - "keywords": S.optionalWith(S.String, { nullable: true }), - "last_updated": S.optionalWith(LastUpdatedBucket, { nullable: true }) -}) {} - -/** - * Input for update the Datasets metadata fields: - * - keywords - * - README - * - sources - * - repository_url - * - license - * - description - */ -export class UpdateDatasetMetadataDto extends S.Class("UpdateDatasetMetadataDto")({ - /** - * Dataset description - */ - "description": S.optionalWith(S.String, { nullable: true }), - /** - * Chains being indexed by the dataset - */ - "indexing_chains": S.Array(S.String), - /** - * Keywords for dataset discovery - */ - "keywords": S.optionalWith(S.Array(S.String), { nullable: true }), - /** - * License covering the dataset - */ - "license": S.optionalWith(S.String, { nullable: true }), - /** - * User-defined README for the dataset - */ - "readme": S.optionalWith(S.String, { nullable: true }), - /** - * VCS repository URL - */ - "repository_url": S.optionalWith(S.String, { nullable: true }), - /** - * Source of data being materialized - */ - "source": S.optionalWith(S.Array(S.String), { nullable: true }) -}) {} - -/** - * Response for archiving a dataset version - */ -export class ArchiveDatasetVersionResponse - extends S.Class("ArchiveDatasetVersionResponse")({ - /** - * The reference of the archived dataset version - */ - "reference": S.String - }) -{} - -/** - * Input for updating a DatasetVersion's status - */ -export class UpdateDatasetVersionStatusDto - extends S.Class("UpdateDatasetVersionStatusDto")({ - /** - * The new status for the dataset version (Draft or Published) - * Note: Use the DELETE endpoint to archive a version - */ - "status": DatasetVersionStatus - }) -{} - -export class DatasetsOwnedListQueries200 extends S.Array(SavedQuery) {} - -/** - * Input for updating a Dataset's visibility - */ -export class UpdateDatasetVisibilityDto extends S.Class("UpdateDatasetVisibilityDto")({ - /** - * The new visibility level for the dataset - */ - "visibility": DatasetVisibility -}) {} - -export class DatasetsOwnerListParams extends S.Struct({ - "limit": S.optionalWith(S.Int, { nullable: true }), - "page": S.optionalWith(S.Int, { nullable: true }), - "sort_by": S.optionalWith(S.String, { nullable: true }), - "direction": S.optionalWith(S.String, { nullable: true }), - "indexing_chains": S.optionalWith(S.String, { nullable: true }), - "keywords": S.optionalWith(S.String, { nullable: true }), - "last_updated": S.optionalWith(LastUpdatedBucket, { nullable: true }) -}) {} - -export class OwnerDatasetListResponse extends S.Class("OwnerDatasetListResponse")({ - /** - * List of the datasets being returned in this page - */ - "datasets": S.Array(Dataset), - /** - * If true, there are more datasets that can be fetched - */ - "has_next_page": S.Boolean, - /** - * Total number of datasets matching the query filters - */ - "total_count": S.Int -}) {} - -export class DatasetsOwnerSearchParams extends S.Struct({ - "search": S.String, - "limit": S.optionalWith(S.Int, { nullable: true }), - "page": S.optionalWith(S.Int, { nullable: true }), - "indexing_chains": S.optionalWith(S.String, { nullable: true }), - "keywords": S.optionalWith(S.String, { nullable: true }), - "last_updated": S.optionalWith(LastUpdatedBucket, { nullable: true }) -}) {} - -export class LivenessResponse extends S.Class("LivenessResponse")({ - "status": S.String -}) {} - -export class ServiceStatus extends S.Class("ServiceStatus")({ - "error": S.optionalWith(S.String, { nullable: true }), - "status": S.String -}) {} - -export class ReadinessChecks extends S.Class("ReadinessChecks")({ - "database": ServiceStatus -}) {} - -export class ReadinessResponse extends S.Class("ReadinessResponse")({ - "checks": ReadinessChecks, - "status": S.String -}) {} diff --git a/packages/amp/src/registry/api.ts b/packages/amp/src/registry/api.ts index 6489041..504c605 100644 --- a/packages/amp/src/registry/api.ts +++ b/packages/amp/src/registry/api.ts @@ -89,12 +89,12 @@ const listDatasets = HttpApiEndpoint.get( .addError(Errors.RegistryDatabaseError) // ----------------------------------------------------------------------------- -// GET /api/vX/datasets/{namespace}/{name}/versions/{version}/manifest +// GET /api/vX/datasets/{namespace}/{name} // ----------------------------------------------------------------------------- -const getDataset = HttpApiEndpoint.get( +const getDatasetByFqdn = HttpApiEndpoint.get( "getDatasetByFqdn" -)`/datasets/${datasetNamespaceParam}/${datasetNameParam}/versions/${datasetRevisionParam}/manifest` +)`/datasets/${datasetNamespaceParam}/${datasetNameParam}` .addSuccess(Domain.Dataset) .addError(Errors.DatasetConversionError) .addError(Errors.DatasetNotFoundError) @@ -242,7 +242,7 @@ const getLatestDatasetManifest = HttpApiEndpoint.get( // ----------------------------------------------------------------------------- const getDatasetManifest = HttpApiEndpoint.get( - "getLatestDatasetManifest" + "getDatasetManifest" )`/datasets/${datasetNamespaceParam}/${datasetNameParam}/versions/${datasetRevisionParam}/manifest` .addSuccess(Domain.DatasetGetManifestResponse) .addError(Errors.InvalidSelectorError) @@ -262,7 +262,7 @@ const getDatasetManifest = HttpApiEndpoint.get( */ export class DatasetsApiGroup extends HttpApiGroup.make("datasets") .add(listDatasets) - .add(getDataset) + .add(getDatasetByFqdn) .add(getDatasetCountsByChain) .add(getDatasetCountsByKeyword) .add(getDatasetCountsByLastUpdated)