From 93c02bb798c367ac3e7204230d83b559b71205dd Mon Sep 17 00:00:00 2001 From: strtgbb <146047128+strtgbb@users.noreply.github.com> Date: Fri, 30 Jan 2026 11:08:03 -0500 Subject: [PATCH 1/5] Merge pull request #1358 from Altinity/regression_fixes_25.8 Antalya 25.8 - Fix regression cancellation, reduce regression timeout, fix flaky check --- .github/workflows/master.yml | 4 +-- .github/workflows/pull_request.yml | 26 +++++++++---------- .../workflows/regression-reusable-suite.yml | 8 +++--- ci/defs/job_configs.py | 2 +- ci/praktika/yaml_additional_templates.py | 4 +-- ci/workflows/pull_request.py | 2 +- 6 files changed, 23 insertions(+), 23 deletions(-) diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 996d503a69f0..758a897f8aa8 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -4182,7 +4182,7 @@ jobs: commit: 979bb27171f92724bcd8f086989ba623f2e03fdc arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} - timeout_minutes: 300 + timeout_minutes: 210 workflow_config: ${{ needs.config_workflow.outputs.data }} RegressionTestsAarch64: needs: [config_workflow, build_arm_binary] @@ -4194,7 +4194,7 @@ jobs: commit: 979bb27171f92724bcd8f086989ba623f2e03fdc arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} - timeout_minutes: 300 + timeout_minutes: 210 workflow_config: ${{ needs.config_workflow.outputs.data }} SignRelease: diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 875d1043f798..425581b3cd0a 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -278,7 +278,7 @@ jobs: build_amd_debug: runs-on: [self-hosted, altinity-on-demand, altinity-builder] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, fast_test] if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9kZWJ1Zyk=') }} name: "Build (amd_debug)" outputs: @@ -323,7 +323,7 @@ jobs: build_amd_release: runs-on: [self-hosted, altinity-on-demand, altinity-builder] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, fast_test] if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9yZWxlYXNlKQ==') }} name: "Build (amd_release)" outputs: @@ -368,7 +368,7 @@ jobs: build_amd_asan: runs-on: [self-hosted, altinity-on-demand, altinity-builder] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, fast_test] if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9hc2FuKQ==') }} name: "Build (amd_asan)" outputs: @@ -413,7 +413,7 @@ jobs: build_amd_tsan: runs-on: [self-hosted, altinity-on-demand, altinity-builder] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, fast_test] if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF90c2FuKQ==') }} name: "Build (amd_tsan)" outputs: @@ -458,7 +458,7 @@ jobs: build_amd_msan: runs-on: [self-hosted, altinity-on-demand, altinity-builder] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, fast_test] if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9tc2FuKQ==') }} name: "Build (amd_msan)" outputs: @@ -503,7 +503,7 @@ jobs: build_amd_ubsan: runs-on: [self-hosted, altinity-on-demand, altinity-builder] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, fast_test] if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF91YnNhbik=') }} name: "Build (amd_ubsan)" outputs: @@ -548,7 +548,7 @@ jobs: build_amd_binary: runs-on: [self-hosted, altinity-on-demand, altinity-builder] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, fast_test] if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9iaW5hcnkp') }} name: "Build (amd_binary)" outputs: @@ -593,7 +593,7 @@ jobs: build_arm_release: runs-on: [self-hosted, altinity-on-demand, altinity-builder] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, fast_test] if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9yZWxlYXNlKQ==') }} name: "Build (arm_release)" outputs: @@ -638,7 +638,7 @@ jobs: build_arm_coverage: runs-on: [self-hosted, altinity-on-demand, altinity-builder] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, fast_test] if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9jb3ZlcmFnZSk=') }} name: "Build (arm_coverage)" outputs: @@ -683,7 +683,7 @@ jobs: build_arm_binary: runs-on: [self-hosted, altinity-on-demand, altinity-builder] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, fast_test] if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9iaW5hcnkp') }} name: "Build (arm_binary)" outputs: @@ -2167,7 +2167,7 @@ jobs: fi stateless_tests_amd_asan_flaky_check: - runs-on: [self-hosted, altinity-on-demand, altinity-style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_asan] if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYXNhbiwgZmxha3kgY2hlY2sp') }} name: "Stateless tests (amd_asan, flaky check)" @@ -4047,7 +4047,7 @@ jobs: commit: 979bb27171f92724bcd8f086989ba623f2e03fdc arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} - timeout_minutes: 300 + timeout_minutes: 210 workflow_config: ${{ needs.config_workflow.outputs.data }} RegressionTestsAarch64: needs: [config_workflow, build_arm_binary] @@ -4059,7 +4059,7 @@ jobs: commit: 979bb27171f92724bcd8f086989ba623f2e03fdc arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} - timeout_minutes: 300 + timeout_minutes: 210 workflow_config: ${{ needs.config_workflow.outputs.data }} FinishCIReport: diff --git a/.github/workflows/regression-reusable-suite.yml b/.github/workflows/regression-reusable-suite.yml index 45b2b09e06bb..dfcabb63da72 100644 --- a/.github/workflows/regression-reusable-suite.yml +++ b/.github/workflows/regression-reusable-suite.yml @@ -168,7 +168,7 @@ jobs: exit $EXITCODE - name: 📊 Set Commit Status - if: always() && inputs.set_commit_status + if: ${{ !cancelled() && inputs.set_commit_status }} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} JOB_OUTCOME: ${{ steps.run_suite.outcome }} @@ -176,16 +176,16 @@ jobs: run: python3 .github/set_builds_status.py - name: 📝 Create and upload logs - if: always() + if: ${{ !cancelled() }} run: .github/create_and_upload_logs.sh 1 - name: 📤 Upload logs to results database - if: always() + if: ${{ !cancelled() }} timeout-minutes: 20 run: .github/upload_results_to_database.sh 1 - uses: actions/upload-artifact@v4 - if: always() + if: ${{ !cancelled() }} with: name: ${{ format('{0}{1}-artifacts-{2}{3}', inputs.job_name != '' && inputs.job_name || inputs.suite_name, inputs.part != '' && format('_{0}', inputs.part) || '', inputs.runner_arch, contains(inputs.extra_args, '--use-keeper') && '_keeper' || '_zookeeper') }} path: ${{ env.artifact_paths }} diff --git a/ci/defs/job_configs.py b/ci/defs/job_configs.py index 671752a26b3c..afb335801410 100644 --- a/ci/defs/job_configs.py +++ b/ci/defs/job_configs.py @@ -349,7 +349,7 @@ class JobConfigs: stateless_tests_flaky_pr_jobs = common_ft_job_config.parametrize( Job.ParamSet( parameter="amd_asan, flaky check", - runs_on=RunnerLabels.AMD_SMALL_MEM, + runs_on=RunnerLabels.FUNC_TESTER_AMD, requires=[ArtifactNames.CH_AMD_ASAN], ), ) diff --git a/ci/praktika/yaml_additional_templates.py b/ci/praktika/yaml_additional_templates.py index ed0e48cc3cf8..000388936a95 100644 --- a/ci/praktika/yaml_additional_templates.py +++ b/ci/praktika/yaml_additional_templates.py @@ -71,7 +71,7 @@ class AltinityWorkflowTemplates: commit: {REGRESSION_HASH} arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} - timeout_minutes: 300 + timeout_minutes: 210 workflow_config: ${{ needs.config_workflow.outputs.data }} RegressionTestsAarch64: needs: [config_workflow, build_arm_binary] @@ -83,7 +83,7 @@ class AltinityWorkflowTemplates: commit: {REGRESSION_HASH} arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} - timeout_minutes: 300 + timeout_minutes: 210 workflow_config: ${{ needs.config_workflow.outputs.data }} """, "SignRelease": r""" diff --git a/ci/workflows/pull_request.py b/ci/workflows/pull_request.py index 1b3dfb141c57..d42ad12c00a1 100644 --- a/ci/workflows/pull_request.py +++ b/ci/workflows/pull_request.py @@ -34,7 +34,7 @@ job.set_dependency( [ # JobNames.STYLE_CHECK, # NOTE (strtgbb): we don't run style check - # JobNames.FAST_TEST, # NOTE (strtgbb): this takes too long, revisit later + JobNames.FAST_TEST, # JobConfigs.tidy_build_arm_jobs[0].name, # NOTE (strtgbb): this takes too long, revisit later ] ) From 88b6f1dedcd8e0ddd8a16dddc19522697940f735 Mon Sep 17 00:00:00 2001 From: strtgbb <146047128+strtgbb@users.noreply.github.com> Date: Wed, 25 Feb 2026 09:54:03 -0500 Subject: [PATCH 2/5] add missing regression suites --- .github/workflows/regression.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 239db719b6e1..2a94b172abbb 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -126,7 +126,7 @@ jobs: strategy: fail-fast: false matrix: - SUITE: [aes_encryption, atomic_insert, base_58, data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, functions, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, selects, session_timezone, swarms, version, window_functions] + SUITE: [aes_encryption, atomic_insert, attach, base_58, clickhouse_keeper_failover,data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, functions, jwt_authentication, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, selects, session_timezone, settings, swarms, version, window_functions] uses: ./.github/workflows/regression-reusable-suite.yml with: ref: ${{ inputs.commit }} From 7c75e844732fd73a1fc565b8722c673f1fed4e54 Mon Sep 17 00:00:00 2001 From: strtgbb <146047128+strtgbb@users.noreply.github.com> Date: Fri, 20 Feb 2026 13:13:53 -0500 Subject: [PATCH 3/5] set regression job.name properly --- .github/workflows/regression-reusable-suite.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/regression-reusable-suite.yml b/.github/workflows/regression-reusable-suite.yml index dfcabb63da72..2050eb109f36 100644 --- a/.github/workflows/regression-reusable-suite.yml +++ b/.github/workflows/regression-reusable-suite.yml @@ -74,6 +74,7 @@ jobs: SUITE_EXECUTABLE: ${{ inputs.suite_executable }} STORAGE: ${{ inputs.storage_path }} PART: ${{ inputs.part }} + REPORT_JOB_NAME: ${{ format('{0}{1}', inputs.job_name != '' && inputs.job_name || inputs.suite_name, inputs.part != '' && format('_{0}', inputs.part) || '') }} # AWS credentials AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} @@ -98,7 +99,7 @@ jobs: --local --collect-service-logs --output ${{ inputs.output_format }} - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" user.name="${GITHUB_ACTOR}" version="${{ fromJson(inputs.workflow_config).custom_data.version.string }}" package="$clickhouse_path" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" + --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" user.name="${GITHUB_ACTOR}" version="${{ fromJson(inputs.workflow_config).custom_data.version.string }}" package="$clickhouse_path" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$REPORT_JOB_NAME" job.retry=$GITHUB_RUN_ATTEMPT job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" --cicd --log raw.log ${{ inputs.flags != 'none' && inputs.flags || ''}} From a352d42e58bc95de0b8dadc8337afd9686bc99ba Mon Sep 17 00:00:00 2001 From: strtgbb <146047128+strtgbb@users.noreply.github.com> Date: Mon, 2 Mar 2026 09:09:15 -0500 Subject: [PATCH 4/5] fix parquet regression external storage args --- .github/workflows/regression.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 2a94b172abbb..ea8f78c9d4e0 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -346,7 +346,7 @@ jobs: build_sha: ${{ inputs.build_sha }} set_commit_status: true job_name: parquet_${{ matrix.STORAGE }} - regression_args: --storage ${{ matrix.STORAGE }} --aws-s3-bucket {{AWS_BUCKET}} --aws-s3-region {{AWS_REGION}} --aws-s3-key-id {{AWS_KEY_ID}} --aws-s3-access-key {{AWS_ACCESS_KEY}} + regression_args: --storage ${{ matrix.STORAGE }} --aws-s3-bucket {{AWS_BUCKET}} --aws-s3-region {{AWS_REGION}} --aws-s3-key-id {{AWS_KEY_ID}} --aws-s3-access-key {{AWS_ACCESS_KEY}} --only "/parquet/${{ matrix.STORAGE }}/*" secrets: inherit RBAC: From fb58295507fd29de8c9162c6bbd7768ccc3a4b3e Mon Sep 17 00:00:00 2001 From: strtgbb <146047128+strtgbb@users.noreply.github.com> Date: Thu, 19 Feb 2026 12:47:00 -0500 Subject: [PATCH 5/5] remove references to upstream from server/keeper dockerfile --- docker/keeper/Dockerfile | 14 ++----- docker/server/Dockerfile.alpine | 15 +++---- docker/server/Dockerfile.ubuntu | 74 ++------------------------------- tests/ci/docker_server.py | 4 +- 4 files changed, 13 insertions(+), 94 deletions(-) diff --git a/docker/keeper/Dockerfile b/docker/keeper/Dockerfile index b09ebf2e3ee3..aae7fe7b524f 100644 --- a/docker/keeper/Dockerfile +++ b/docker/keeper/Dockerfile @@ -35,11 +35,8 @@ RUN arch=${TARGETARCH:-amd64} \ arm64) ln -sf /lib/ld-2.35.so /lib/ld-linux-aarch64.so.1 ;; \ esac -# lts / testing / prestable / etc -ARG REPO_CHANNEL="stable" -ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}" -ARG VERSION="25.7.4.11" -ARG PACKAGES="clickhouse-keeper" +# NOTE (strtgbb): Removed install methods other than direct URL install to tidy the Dockerfile + ARG DIRECT_DOWNLOAD_URLS="" # user/group precreated explicitly with fixed uid/gid on purpose. @@ -63,12 +60,7 @@ RUN arch=${TARGETARCH:-amd64} \ && wget -c -q "$url" \ ; done \ else \ - for package in ${PACKAGES}; do \ - cd /tmp \ - && echo "Get ${REPOSITORY}/${package}-${VERSION}-${arch}.tgz" \ - && wget -c -q "${REPOSITORY}/${package}-${VERSION}-${arch}.tgz" \ - && wget -c -q "${REPOSITORY}/${package}-${VERSION}-${arch}.tgz.sha512" \ - ; done \ + exit 1; \ fi \ && cat *.tgz.sha512 | sha512sum -c \ && for file in *.tgz; do \ diff --git a/docker/server/Dockerfile.alpine b/docker/server/Dockerfile.alpine index f5aa9cff4792..39fde82efda5 100644 --- a/docker/server/Dockerfile.alpine +++ b/docker/server/Dockerfile.alpine @@ -32,11 +32,10 @@ RUN arch=${TARGETARCH:-amd64} \ arm64) ln -sf /lib/ld-2.35.so /lib/ld-linux-aarch64.so.1 ;; \ esac -# lts / testing / prestable / etc -ARG REPO_CHANNEL="stable" -ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}" -ARG VERSION="25.7.4.11" -ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static" + + +# NOTE (strtgbb): Removed install methods other than direct URL install to tidy the Dockerfile + ARG DIRECT_DOWNLOAD_URLS="" # user/group precreated explicitly with fixed uid/gid on purpose. @@ -59,11 +58,7 @@ RUN arch=${TARGETARCH:-amd64} \ && wget -c -q "$url" \ ; done \ else \ - for package in ${PACKAGES}; do \ - echo "Get ${REPOSITORY}/${package}-${VERSION}-${arch}.tgz" \ - && wget -c -q "${REPOSITORY}/${package}-${VERSION}-${arch}.tgz" \ - && wget -c -q "${REPOSITORY}/${package}-${VERSION}-${arch}.tgz.sha512" \ - ; done \ + exit 1; \ fi \ && cat *.tgz.sha512 | sed 's:/output/:/tmp/:' | sha512sum -c \ && for file in *.tgz; do \ diff --git a/docker/server/Dockerfile.ubuntu b/docker/server/Dockerfile.ubuntu index 278f3b2cd489..6c76b8dee446 100644 --- a/docker/server/Dockerfile.ubuntu +++ b/docker/server/Dockerfile.ubuntu @@ -30,23 +30,12 @@ RUN sed -i "s|http://archive.ubuntu.com|${apt_archive}|g" /etc/apt/sources.list && busybox --install -s \ && rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/* -ARG REPO_CHANNEL="stable" -ARG REPOSITORY="deb [signed-by=/usr/share/keyrings/clickhouse-keyring.gpg] https://packages.clickhouse.com/deb ${REPO_CHANNEL} main" -ARG VERSION="25.7.4.11" -ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static" - #docker-official-library:off # The part between `docker-official-library` tags is related to our builds -# set non-empty deb_location_url url to create a docker image -# from debs created by CI build, for example: -# docker build . --network host --build-arg version="21.4.1.6282" --build-arg deb_location_url="https://..." -t ... -ARG deb_location_url="" -ARG DIRECT_DOWNLOAD_URLS="" +# NOTE (strtgbb): Removed install methods other than direct URL install to tidy the Dockerfile -# set non-empty single_binary_location_url to create docker image -# from a single binary url (useful for non-standard builds - with sanitizers, for arm64). -ARG single_binary_location_url="" +ARG DIRECT_DOWNLOAD_URLS="" ARG TARGETARCH @@ -62,64 +51,7 @@ RUN if [ -n "${DIRECT_DOWNLOAD_URLS}" ]; then \ && rm -rf /tmp/* ; \ fi -# install from a web location with deb packages -RUN arch="${TARGETARCH:-amd64}" \ - && if [ -n "${deb_location_url}" ]; then \ - echo "installing from custom url with deb packages: ${deb_location_url}" \ - && rm -rf /tmp/clickhouse_debs \ - && mkdir -p /tmp/clickhouse_debs \ - && for package in ${PACKAGES}; do \ - { wget --progress=bar:force:noscroll "${deb_location_url}/${package}_${VERSION}_${arch}.deb" -P /tmp/clickhouse_debs || \ - wget --progress=bar:force:noscroll "${deb_location_url}/${package}_${VERSION}_all.deb" -P /tmp/clickhouse_debs ; } \ - || exit 1 \ - ; done \ - && dpkg -i /tmp/clickhouse_debs/*.deb \ - && rm -rf /tmp/* ; \ - fi - -# install from a single binary -RUN if [ -n "${single_binary_location_url}" ]; then \ - echo "installing from single binary url: ${single_binary_location_url}" \ - && rm -rf /tmp/clickhouse_binary \ - && mkdir -p /tmp/clickhouse_binary \ - && wget --progress=bar:force:noscroll "${single_binary_location_url}" -O /tmp/clickhouse_binary/clickhouse \ - && chmod +x /tmp/clickhouse_binary/clickhouse \ - && /tmp/clickhouse_binary/clickhouse install --user "clickhouse" --group "clickhouse" \ - && rm -rf /tmp/* ; \ - fi - -# The rest is the same in the official docker and in our build system -#docker-official-library:on - -# A fallback to installation from ClickHouse repository -# It works unless the clickhouse binary already exists -RUN clickhouse local -q 'SELECT 1' >/dev/null 2>&1 && exit 0 || : \ - ; apt-get update \ - && apt-get install --yes --no-install-recommends \ - dirmngr \ - gnupg2 \ - && mkdir -p /etc/apt/sources.list.d \ - && GNUPGHOME=$(mktemp -d) \ - && GNUPGHOME="$GNUPGHOME" gpg --batch --no-default-keyring \ - --keyring /usr/share/keyrings/clickhouse-keyring.gpg \ - --keyserver hkp://keyserver.ubuntu.com:80 \ - --recv-keys 3a9ea1193a97b548be1457d48919f6bd2b48d754 \ - && rm -rf "$GNUPGHOME" \ - && chmod +r /usr/share/keyrings/clickhouse-keyring.gpg \ - && echo "${REPOSITORY}" > /etc/apt/sources.list.d/clickhouse.list \ - && echo "installing from repository: ${REPOSITORY}" \ - && apt-get update \ - && for package in ${PACKAGES}; do \ - packages="${packages} ${package}=${VERSION}" \ - ; done \ - && apt-get install --yes --no-install-recommends ${packages} || exit 1 \ - && rm -rf \ - /var/lib/apt/lists/* \ - /var/cache/debconf \ - /tmp/* \ - && apt-get autoremove --purge -yq dirmngr gnupg2 \ - && chmod ugo+Xrw -R /etc/clickhouse-server /etc/clickhouse-client -# The last chmod is here to make the next one is No-op in docker official library Dockerfile +# NOTE (strtgbb): Removed install methods other than direct URL install to tidy the Dockerfile # post install # we need to allow "others" access to clickhouse folder, because docker container diff --git a/tests/ci/docker_server.py b/tests/ci/docker_server.py index c0bb7239affb..2abb41a5d60e 100644 --- a/tests/ci/docker_server.py +++ b/tests/ci/docker_server.py @@ -184,8 +184,8 @@ def buildx_args( args = [ f"--platform=linux/{arch}", f"--label=build-url={GITHUB_RUN_URL}", - f"--label=com.clickhouse.build.githash={git.sha}", - f"--label=com.clickhouse.build.version={version}", + f"--label=com.altinity.build.githash={git.sha}", + f"--label=com.altinity.build.version={version}", ] if direct_urls: args.append(f"--build-arg=DIRECT_DOWNLOAD_URLS='{' '.join(direct_urls)}'")