Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/master.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4178,7 +4178,7 @@ jobs:
secrets: inherit
with:
runner_type: altinity-on-demand, altinity-regression-tester
commit: c07440a1ad14ffc5fc49ce90dff2f40c2e5f364d
commit: f4c832bb4047e55544ebbf85a02c6364605117c9
arch: release
build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
timeout_minutes: 300
Expand All @@ -4190,7 +4190,7 @@ jobs:
secrets: inherit
with:
runner_type: altinity-on-demand, altinity-regression-tester-aarch64
commit: c07440a1ad14ffc5fc49ce90dff2f40c2e5f364d
commit: f4c832bb4047e55544ebbf85a02c6364605117c9
arch: aarch64
build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
timeout_minutes: 300
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/pull_request.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4134,7 +4134,7 @@ jobs:
secrets: inherit
with:
runner_type: altinity-on-demand, altinity-regression-tester
commit: c07440a1ad14ffc5fc49ce90dff2f40c2e5f364d
commit: f4c832bb4047e55544ebbf85a02c6364605117c9
arch: release
build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
timeout_minutes: 300
Expand All @@ -4146,7 +4146,7 @@ jobs:
secrets: inherit
with:
runner_type: altinity-on-demand, altinity-regression-tester-aarch64
commit: c07440a1ad14ffc5fc49ce90dff2f40c2e5f364d
commit: f4c832bb4047e55544ebbf85a02c6364605117c9
arch: aarch64
build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
timeout_minutes: 300
Expand Down
70 changes: 70 additions & 0 deletions .github/workflows/regression.yml
Original file line number Diff line number Diff line change
Expand Up @@ -1005,6 +1005,76 @@ jobs:
name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ matrix.PART }}-${{ inputs.arch }}-artifacts
path: ${{ env.artifact_paths}}

S3Export:
if: |
fromJson(inputs.workflow_config).custom_data.ci_regression_jobs[0] == null ||
contains(fromJson(inputs.workflow_config).custom_data.ci_regression_jobs, 's3')
strategy:
fail-fast: false
matrix:
PART: [part, partition]
needs: [runner_labels_setup]
runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }}
timeout-minutes: ${{ inputs.timeout_minutes }}
steps:
- name: Checkout regression repo
uses: actions/checkout@v4
with:
repository: Altinity/clickhouse-regression
ref: ${{ inputs.commit }}
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
REPORTS_PATH=${{ runner.temp }}/reports_dir
SUITE=s3
PART=${{ matrix.PART }}
EOF
- name: Setup
run: .github/setup.sh
- name: Get deb url
env:
S3_BASE_URL: https://altinity-build-artifacts.s3.amazonaws.com/
PR_NUMBER: ${{ github.event.pull_request.number || 0 }}
run: |
mkdir -p $REPORTS_PATH
cat > $REPORTS_PATH/workflow_config.json << 'EOF'
${{ inputs.workflow_config }}
EOF

python3 .github/get-deb-url.py --github-env $GITHUB_ENV --workflow-config $REPORTS_PATH/workflow_config.json --s3-base-url $S3_BASE_URL --pr-number $PR_NUMBER --branch-name ${{ github.ref_name }} --commit-hash ${{ inputs.build_sha }} --binary

- name: Run ${{ env.SUITE }} suite
id: run_suite
run: EXITCODE=0;
python3
-u ${{ env.SUITE }}/regression.py
--clickhouse-binary-path ${{ env.clickhouse_path }}
--storage minio
--attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.PART }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
--only ":/try*" "minio/export tests/export ${{ matrix.PART }}/*"
${{ env.args }} || EXITCODE=$?;
.github/add_link_to_logs.sh;
exit $EXITCODE
- name: Set Commit Status
if: always()
run: python3 .github/set_builds_status.py
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
JOB_OUTCOME: ${{ steps.run_suite.outcome }}
SUITE_NAME: "Regression ${{ inputs.arch }} S3 Export ${{ matrix.PART }}"
- name: Create and upload logs
if: always()
run: .github/create_and_upload_logs.sh 1
- name: Upload logs to regression results database
if: always()
timeout-minutes: 20
run: .github/upload_results_to_database.sh 1
- uses: actions/upload-artifact@v4
if: always()
with:
name: ${{ env.SUITE }}-export-${{ matrix.PART }}-${{ inputs.arch }}-artifacts
path: ${{ env.artifact_paths}}

TieredStorage:
if: |
fromJson(inputs.workflow_config).custom_data.ci_regression_jobs[0] == null ||
Expand Down
2 changes: 1 addition & 1 deletion ci/praktika/yaml_additional_templates.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ class AltinityWorkflowTemplates:
echo "Workflow Run Report: [View Report]($REPORT_LINK)" >> $GITHUB_STEP_SUMMARY
"""
# Additional jobs
REGRESSION_HASH = "c07440a1ad14ffc5fc49ce90dff2f40c2e5f364d"
REGRESSION_HASH = "f4c832bb4047e55544ebbf85a02c6364605117c9"
ALTINITY_JOBS = {
"GrypeScan": r"""
GrypeScanServer:
Expand Down
4 changes: 2 additions & 2 deletions src/Interpreters/InterpreterShowTablesQuery.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -232,10 +232,10 @@ BlockIO InterpreterShowTablesQuery::execute()
}
auto rewritten_query = getRewrittenQuery();
String database = getContext()->resolveDatabase(query.getFrom());
if (DatabaseCatalog::instance().isDatalakeCatalog(database))
if (query.databases || DatabaseCatalog::instance().isDatalakeCatalog(database))
{
auto context_copy = Context::createCopy(getContext());
/// HACK To always show them in explicit "SHOW TABLES" queries
/// HACK To always show them in explicit "SHOW TABLES" and "SHOW DATABASES" queries
context_copy->setSetting("show_data_lake_catalogs_in_system_tables", true);
return executeQuery(rewritten_query, context_copy, QueryFlags{ .internal = true }).second;
}
Expand Down
2 changes: 2 additions & 0 deletions tests/integration/test_database_glue/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -617,6 +617,8 @@ def test_system_tables(started_cluster):
node.query(f"SELECT count() FROM {CATALOG_NAME}.`{namespace}.{table_name}`")
)

assert CATALOG_NAME in node.query("SHOW DATABASES")
assert table_name in node.query(f"SHOW TABLES FROM {CATALOG_NAME}")
# system.tables
assert int(node.query(f"SELECT count() FROM system.tables WHERE database = '{CATALOG_NAME}' and table ilike '%{root_namespace}%' SETTINGS show_data_lake_catalogs_in_system_tables = true").strip()) == 4
assert int(node.query(f"SELECT count() FROM system.tables WHERE database = '{CATALOG_NAME}' and table ilike '%{root_namespace}%'").strip()) == 0
Expand Down
Loading