Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@ resources:
database_instance_name: ${resources.database_instances.instance1.name}
logical_database_name: ${resources.database_catalogs.catalog1.database_name}
spec:
source_table_full_name: samples.nyctaxi.trips
# Use a unique source table per test run to avoid hitting the 20-table-per-source limit
source_table_full_name: main.test_synced_$UNIQUE_NAME.trips_source
scheduling_policy: SNAPSHOT
primary_key_columns:
- tpep_pickup_datetime
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
#!/bin/bash

# Clean up the temporary source table
echo "Cleaning up temporary source table"
$CLI tables delete main.test_synced_$UNIQUE_NAME.trips_source || true
$CLI schemas delete main.test_synced_$UNIQUE_NAME || true
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
#!/bin/bash

# Create a unique source table for this test run to avoid hitting the 20-table-per-source limit
echo "Creating temporary source table: main.test_synced_$UNIQUE_NAME.trips_source"

# Create schema using CLI
$CLI schemas create test_synced_$UNIQUE_NAME main -o json | jq '{full_name}'

# Create source table from samples.nyctaxi.trips using SQL API
# MSYS_NO_PATHCONV=1 prevents Git Bash on Windows from converting /api/... to C:/Program Files/Git/api/...
MSYS_NO_PATHCONV=1 $CLI api post "/api/2.0/sql/statements/" --json "{
\"warehouse_id\": \"$TEST_DEFAULT_WAREHOUSE_ID\",
\"statement\": \"CREATE TABLE main.test_synced_$UNIQUE_NAME.trips_source AS SELECT * FROM samples.nyctaxi.trips LIMIT 10\",
\"wait_timeout\": \"45s\"
}" > /dev/null
12 changes: 12 additions & 0 deletions acceptance/bundle/invariant/no_drift/script
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,12 @@
# Copy data files to test directory
cp -r "$TESTDIR/../data/." . &> LOG.cp

# Run init script if present
INIT_SCRIPT="$TESTDIR/../configs/$INPUT_CONFIG-init.sh"
if [ -f "$INIT_SCRIPT" ]; then
source "$INIT_SCRIPT" &> LOG.init
fi

envsubst < $TESTDIR/../configs/$INPUT_CONFIG > databricks.yml

cp databricks.yml LOG.config
Expand All @@ -16,6 +22,12 @@ cat LOG.validate | contains.py '!panic' '!internal error' > /dev/null
cleanup() {
trace $CLI bundle destroy --auto-approve &> LOG.destroy
cat LOG.destroy | contains.py '!panic' '!internal error' > /dev/null

# Run cleanup script if present
CLEANUP_SCRIPT="$TESTDIR/../configs/$INPUT_CONFIG-cleanup.sh"
if [ -f "$CLEANUP_SCRIPT" ]; then
source "$CLEANUP_SCRIPT" &> LOG.cleanup
fi
}

trap cleanup EXIT
Expand Down
9 changes: 9 additions & 0 deletions acceptance/bundle/invariant/test.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,12 @@ EnvMatrix.INPUT_CONFIG = [
"synced_database_table.yml.tmpl",
"volume.yml.tmpl",
]

# Fake SQL endpoint for local tests
[[Server]]
Pattern = "POST /api/2.0/sql/statements/"
Response.Body = '{"status": {"state": "SUCCEEDED"}, "manifest": {"schema": {"columns": []}}}'

[[Server]]
Pattern = "DELETE /api/2.1/unity-catalog/tables/{name}"
Response.Body = '{"status": "OK"}'
11 changes: 9 additions & 2 deletions bundle/direct/dresources/resources.yml
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,13 @@ resources:

# database_instances: no special config

# database_catalogs: no special config
database_catalogs:
ignore_remote_changes:
# Backend does not set this:
- create_database_if_not_exists

# synced_database_tables: no special config
synced_database_tables:
ignore_remote_changes:
# Backend does not set these fields in response (it sets effective_ counterparts instead)
- database_instance_name
- logical_database_name