Skip unsupported timestamp type when reading views in BigQuery #71477
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: ci | |
on: | |
push: | |
branches: | |
- master | |
pull_request: | |
paths-ignore: | |
- 'docs/**' | |
- '**.md' | |
repository_dispatch: | |
types: [test-with-secrets-command] | |
defaults: | |
run: | |
shell: bash --noprofile --norc -euo pipefail {0} | |
env: | |
# An envar that signals to tests we are executing in the CI environment | |
CONTINUOUS_INTEGRATION: true | |
# allow overriding Maven command | |
MAVEN: ./mvnw | |
MAVEN_OPTS: "-Xmx512M -XX:+ExitOnOutOfMemoryError" | |
MAVEN_INSTALL_OPTS: "-Xmx3G -XX:+ExitOnOutOfMemoryError" | |
MAVEN_FAST_INSTALL: "-B -V -T 1C -DskipTests -Dmaven.source.skip=true -Dair.check.skip-all" | |
MAVEN_COMPILE_COMMITS: "-B --quiet -T 1C -DskipTests -Dmaven.source.skip=true -Dair.check.skip-all=true -Dmaven.javadoc.skip=true --no-snapshot-updates --no-transfer-progress -pl '!:trino-server-rpm'" | |
MAVEN_GIB: "-P gib -Dgib.referenceBranch=refs/remotes/origin/${{ github.event_name == 'pull_request' && github.event.pull_request.base.ref || github.event.repository.default_branch }}" | |
MAVEN_TEST: "-B -Dmaven.source.skip=true -Dair.check.skip-all --fail-at-end -P gib -Dgib.referenceBranch=refs/remotes/origin/${{ github.event_name == 'pull_request' && github.event.pull_request.base.ref || github.event.repository.default_branch }}" | |
# Testcontainers kills image pulls if they don't make progress for > 30s and retries for 2m before failing. This means | |
# that if an image doesn't download all it's layers within ~2m then any other concurrent pull will be killed because | |
# the Docker daemon only downloads 3 layers concurrently which prevents the other pull from making any progress. | |
# This value should be greater than the time taken for the longest image pull. | |
TESTCONTAINERS_PULL_PAUSE_TIMEOUT: 600 | |
# used by actions/cache to retry the download after this time: https://github.com/actions/cache/blob/main/workarounds.md#cache-segment-restore-timeout | |
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 5 | |
CI_SKIP_SECRETS_PRESENCE_CHECKS: ${{ secrets.CI_SKIP_SECRETS_PRESENCE_CHECKS }} | |
SECRETS_PRESENT: ${{ secrets.SECRETS_PRESENT }} | |
PTL_TMP_DOWNLOAD_PATH: /tmp/pt_java_downloads | |
# Cancel previous PR builds. | |
concurrency: | |
# Cancel all workflow runs except latest within a concurrency group. This is achieved by defining a concurrency group for the PR. | |
# Non-PR builds have singleton concurrency groups. | |
# When triggered by the repository_dispatch, add the expected SHA to avoid cancelling the run from the PR. | |
group: | | |
workflow=${{ github.workflow }}, | |
pr_number=${{ github.event_name == 'pull_request' && github.event.number || 'NA' }}, | |
dispatch_sha=${{ github.event_name == 'repository_dispatch' && github.event.client_payload.slash_command.args.named.sha || 'NA' }}, | |
commit_sha=${{ github.event_name != 'pull_request' && github.event_name != 'repository_dispatch' && github.sha || 'NA' }} | |
cancel-in-progress: true | |
jobs: | |
maven-checks: | |
runs-on: ubuntu-latest | |
name: maven-checks ${{ matrix.java-version }} | |
strategy: | |
fail-fast: false | |
matrix: | |
include: | |
- { java-version: 23, cache: 'true', cleanup-node: true } | |
- { java-version: 24-ea, cache: 'restore', cleanup-node: true } | |
timeout-minutes: 45 | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # checkout all commits, as the build result depends on `git describe` equivalent | |
ref: | | |
${{ github.event_name == 'repository_dispatch' && | |
github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && | |
format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} | |
- uses: ./.github/actions/setup | |
timeout-minutes: 10 | |
with: | |
cache: ${{ matrix.cache }} | |
java-version: ${{ matrix.java-version }} | |
cleanup-node: true | |
- name: Check SPI backward compatibility | |
run: | | |
export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" | |
$MAVEN clean install ${MAVEN_FAST_INSTALL} -pl :trino-spi -am | |
$MAVEN clean verify -B --strict-checksums -DskipTests -pl :trino-spi | |
- name: Maven Checks | |
run: | | |
export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" | |
$MAVEN clean verify -B --strict-checksums -V -T 1C -DskipTests -P ci -pl '!:trino-server-rpm' | |
- name: Remove Trino from local Maven repo to avoid caching it | |
# Avoid caching artifacts built in this job, cache should only include dependencies | |
if: steps.cache.outputs.cache-hit != 'true' && matrix.cache == 'true' | |
run: rm -rf ~/.m2/repository/io/trino/trino-* | |
artifact-checks: | |
runs-on: ubuntu-latest | |
timeout-minutes: 45 | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # checkout all commits, as the build result depends on `git describe` equivalent | |
ref: | | |
${{ github.event_name == 'repository_dispatch' && | |
github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && | |
format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} | |
- uses: ./.github/actions/setup | |
timeout-minutes: 10 | |
with: | |
cache: 'restore' | |
cleanup-node: true | |
- name: Maven Install | |
run: | | |
export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" | |
$MAVEN clean install ${MAVEN_FAST_INSTALL} -pl '!:trino-docs,!:trino-server-rpm' | |
- name: Test Server RPM | |
run: | | |
export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" | |
$MAVEN verify -B --strict-checksums -P ci -pl :trino-server-rpm | |
- name: Test JDBC shading | |
# Run only integration tests to verify JDBC driver shading | |
run: | | |
export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" | |
$MAVEN failsafe:integration-test failsafe:verify -B --strict-checksums -P ci -pl :trino-jdbc | |
- name: Clean Maven Output | |
run: $MAVEN clean -pl '!:trino-server,!:trino-cli' | |
- uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # v3 | |
with: | |
platforms: arm64,ppc64le | |
- name: Build and Test Docker Image | |
run: core/docker/build.sh | |
check-commits-dispatcher: | |
runs-on: ubuntu-latest | |
if: github.event_name == 'pull_request' | |
outputs: | |
matrix: ${{ steps.set-matrix.outputs.matrix }} | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # checkout all commits to be able to determine merge base | |
- name: Block illegal commits | |
uses: trinodb/github-actions/block-commits@c2991972560c5219d9ae5fb68c0c9d687ffcdd10 | |
with: | |
action-merge: fail | |
action-fixup: none | |
- name: Set matrix (dispatch commit checks) | |
id: set-matrix | |
run: | | |
# Make sure the PR branch contains the compile-commit composite job | |
if git merge-base --is-ancestor $( git rev-list HEAD -- .github/actions/compile-commit/action.yml | tail -n 1 ) ${{ github.event.pull_request.head.sha }} | |
then | |
# The HEAD commit of the PR can be safely ignored since it's already compiled in other jobs | |
# This is achieved by adding a tilde (~) after the HEAD sha | |
git log --reverse --pretty=format:'%H,%T,"%s"' refs/remotes/origin/${{ github.event.pull_request.base.ref }}..${{ github.event.pull_request.head.sha }}~ | ./.github/bin/prepare-check-commits-matrix.py > commit-matrix.json | |
else | |
echo -n '' > commit-matrix.json | |
fi | |
echo "Commit matrix: $(jq '.' commit-matrix.json)" | |
echo "matrix=$(jq -c '.' commit-matrix.json)" >> $GITHUB_OUTPUT | |
check-commit: | |
needs: check-commits-dispatcher | |
runs-on: ubuntu-latest | |
if: github.event_name == 'pull_request' && needs.check-commits-dispatcher.outputs.matrix != '' | |
strategy: | |
fail-fast: false | |
matrix: ${{ fromJson(needs.check-commits-dispatcher.outputs.matrix) }} | |
steps: | |
- uses: actions/checkout@v4 | |
if: matrix.commit != '' | |
with: | |
fetch-depth: 0 # checkout all commits to be able to determine merge base | |
ref: ${{ matrix.commit }} | |
# This composite job must be entirely standalone, and checked out from the correct commit before being executed. | |
# It can't accept any parameters defined in this workflow, because the values of those parameters would always be taken from | |
# PR HEAD since that is the commit the workflow is started for. This could lead to problems if those parameters were changed | |
# in the middle of a PR branch. | |
- uses: ./.github/actions/compile-commit | |
if: matrix.commit != '' | |
with: | |
base_ref: ${{ github.event.pull_request.base.ref }} | |
error-prone-checks: | |
runs-on: ubuntu-latest | |
timeout-minutes: 45 | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB | |
ref: | | |
${{ github.event_name == 'repository_dispatch' && | |
github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && | |
format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} | |
- uses: ./.github/actions/setup | |
timeout-minutes: 10 | |
with: | |
cache: restore | |
- name: Maven Install | |
run: | | |
# build everything to make sure dependencies of impacted modules are present | |
export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" | |
$MAVEN clean install ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -pl '!:trino-docs,!:trino-server,!:trino-server-rpm' | |
- name: Error Prone Checks | |
run: | | |
export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" | |
# Skip checks, these are run in `maven-checks` job and e.g. checkstyle is expensive. | |
$MAVEN ${MAVEN_TEST} -T 1C clean compile test-compile -DskipTests -Dair.check.skip-all=true ${MAVEN_GIB} -Dgib.buildUpstream=never -P errorprone-compiler \ | |
-pl '!:trino-docs,!:trino-server,!:trino-server-rpm' | |
test-jdbc-compatibility: | |
runs-on: ubuntu-latest | |
timeout-minutes: 30 | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # checkout tags so version in Manifest is set properly | |
ref: | | |
${{ github.event_name == 'repository_dispatch' && | |
github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && | |
format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} | |
- uses: ./.github/actions/setup | |
timeout-minutes: 10 | |
with: | |
cache: restore | |
- name: Maven Install | |
run: | | |
export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" | |
$MAVEN clean install ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -Dgib.logImpactedTo=gib-impacted.log -pl '!:trino-docs,!:trino-server,!:trino-server-rpm' | |
- name: Test old JDBC vs current server | |
id: tests-old | |
run: | | |
if [ ! -f gib-impacted.log ] || grep -q testing/trino-test-jdbc-compatibility-old-driver gib-impacted.log; then | |
testing/trino-test-jdbc-compatibility-old-driver/bin/run_tests.sh | |
fi | |
- name: Test current JDBC vs old server | |
id: tests-current | |
if: always() | |
run: | | |
if [ ! -f gib-impacted.log ] || grep -q testing/trino-test-jdbc-compatibility-old-server gib-impacted.log; then | |
$MAVEN test ${MAVEN_TEST} -pl :trino-test-jdbc-compatibility-old-server | |
fi | |
- name: Upload test results | |
uses: ./.github/actions/process-test-results | |
if: always() | |
with: | |
has-failed-tests: ${{ steps.tests-old.outcome == 'failure' || steps.tests-current.outcome == 'failure' }} | |
upload-heap-dump: ${{ env.SECRETS_PRESENT == '' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository }} | |
hive-tests: | |
runs-on: ubuntu-latest | |
strategy: | |
fail-fast: false | |
matrix: | |
config: | |
- config-hdp3 | |
# TODO: config-apache-hive3 | |
timeout-minutes: 60 | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB | |
ref: | | |
${{ github.event_name == 'repository_dispatch' && | |
github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && | |
format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} | |
- uses: ./.github/actions/setup | |
timeout-minutes: 10 | |
with: | |
cache: restore | |
- name: Install Hive Module | |
run: | | |
export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" | |
$MAVEN clean install ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -Dgib.logImpactedTo=gib-impacted.log -am -pl :trino-hive | |
- name: Run Hive AWS Tests | |
id: tests | |
env: | |
AWS_ACCESS_KEY_ID: ${{ vars.TRINO_AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.TRINO_AWS_SECRET_ACCESS_KEY }} | |
AWS_REGION: ${{ vars.TRINO_AWS_REGION }} | |
S3_BUCKET: ${{ vars.TRINO_S3_BUCKET }} | |
S3_BUCKET_ENDPOINT: "s3.${{ vars.TRINO_AWS_REGION }}.amazonaws.com" | |
run: | | |
if [ "${AWS_ACCESS_KEY_ID}" != "" ] && ( [ ! -f gib-impacted.log ] || grep -q plugin/trino-hive gib-impacted.log ); then | |
$MAVEN test ${MAVEN_TEST} -pl :trino-hive -P aws-tests | |
fi | |
- name: Run Hive Unity Tests | |
id: test-unity | |
env: | |
DATABRICKS_HOST: ${{ vars.TRINO_DATABRICKS_HOST }} | |
DATABRICKS_TOKEN: ${{ secrets.TRINO_DATABRICKS_TOKEN }} | |
DATABRICKS_UNITY_CATALOG_NAME: ${{ vars.TRINO_DATABRICKS_UNITY_CATALOG_NAME }} | |
run: | | |
if [ "${DATABRICKS_TOKEN}" != "" ] && ( [ ! -f gib-impacted.log ] || grep -q plugin/trino-hive gib-impacted.log ); then | |
$MAVEN test ${MAVEN_TEST} -pl :trino-hive -P unity-tests | |
fi | |
- name: Upload test results | |
uses: ./.github/actions/process-test-results | |
if: always() | |
with: | |
has-failed-tests: ${{ steps.tests.outcome == 'failure' }} | |
upload-heap-dump: ${{ env.SECRETS_PRESENT == '' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository }} | |
- name: Update PR check | |
uses: ./.github/actions/update-check | |
if: >- | |
failure() && | |
github.event_name == 'repository_dispatch' && | |
github.event.client_payload.slash_command.args.named.sha != '' && | |
github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha | |
with: | |
pull_request_number: ${{ github.event.client_payload.pull_request.number }} | |
check_name: ${{ github.job }} (${{ matrix.config }}) with secrets | |
conclusion: ${{ job.status }} | |
github_token: ${{ secrets.GITHUB_TOKEN }} | |
test-other-modules: | |
runs-on: ubuntu-latest | |
timeout-minutes: 60 | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB | |
ref: | | |
${{ github.event_name == 'repository_dispatch' && | |
github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && | |
format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} | |
- uses: ./.github/actions/setup | |
timeout-minutes: 10 | |
with: | |
cache: restore | |
cleanup-node: true | |
- name: Maven Install | |
run: | | |
export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" | |
$MAVEN clean install ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -pl '!:trino-docs,!:trino-server,!:trino-server-rpm' | |
- name: Maven Tests | |
id: tests | |
run: | | |
$MAVEN test ${MAVEN_TEST} -pl ' | |
!:trino-base-jdbc, | |
!:trino-bigquery, | |
!:trino-cassandra, | |
!:trino-clickhouse, | |
!:trino-delta-lake, | |
!:trino-docs, | |
!:trino-druid, | |
!:trino-elasticsearch, | |
!:trino-exasol, | |
!:trino-faulttolerant-tests, | |
!:trino-filesystem, | |
!:trino-filesystem-azure, | |
!:trino-filesystem-gcs, | |
!:trino-filesystem-manager, | |
!:trino-filesystem-s3, | |
!:trino-google-sheets, | |
!:trino-hdfs, | |
!:trino-hive, | |
!:trino-hudi, | |
!:trino-iceberg, | |
!:trino-ignite, | |
!:trino-jdbc, | |
!:trino-kafka, | |
!:trino-kudu, | |
!:trino-main, | |
!:trino-mariadb, | |
!:trino-memory, | |
!:trino-mongodb, | |
!:trino-mysql, | |
!:trino-opensearch, | |
!:trino-oracle, | |
!:trino-orc, | |
!:trino-parquet, | |
!:trino-phoenix5, | |
!:trino-pinot, | |
!:trino-postgresql, | |
!:trino-redis, | |
!:trino-redshift, | |
!:trino-resource-group-managers, | |
!:trino-server, | |
!:trino-server-rpm, | |
!:trino-singlestore, | |
!:trino-snowflake, | |
!:trino-sqlserver, | |
!:trino-test-jdbc-compatibility-old-server, | |
!:trino-tests, | |
!:trino-thrift' | |
- name: Upload test results | |
uses: ./.github/actions/process-test-results | |
if: always() | |
with: | |
has-failed-tests: ${{ steps.tests.outcome == 'failure' }} | |
upload-heap-dump: ${{ env.SECRETS_PRESENT == '' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository }} | |
build-test-matrix: | |
runs-on: ubuntu-latest | |
outputs: | |
matrix: ${{ steps.set-matrix.outputs.matrix }} | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB | |
ref: | | |
${{ github.event_name == 'repository_dispatch' && | |
github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && | |
format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} | |
- uses: ./.github/actions/setup | |
timeout-minutes: 10 | |
with: | |
cache: restore | |
- name: Update PR check | |
uses: ./.github/actions/update-check | |
if: >- | |
github.event_name == 'repository_dispatch' && | |
github.event.client_payload.slash_command.args.named.sha != '' && | |
github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha | |
with: | |
pull_request_number: ${{ github.event.client_payload.pull_request.number }} | |
github_token: ${{ secrets.GITHUB_TOKEN }} | |
- name: Maven validate | |
run: | | |
export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" | |
$MAVEN validate ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -Dgib.logImpactedTo=gib-impacted.log -P disable-check-spi-dependencies -pl '!:trino-docs' | |
- name: Set matrix | |
id: set-matrix | |
run: | | |
# GIB doesn't run on master, so make sure the file always exist | |
touch gib-impacted.log | |
cat <<EOF > .github/test-matrix.yaml | |
include: | |
- modules: | |
- client/trino-jdbc | |
- plugin/trino-base-jdbc | |
- plugin/trino-memory | |
- plugin/trino-thrift | |
- modules: | |
- lib/trino-orc | |
- lib/trino-parquet | |
- modules: | |
- lib/trino-filesystem | |
- lib/trino-filesystem-azure | |
- lib/trino-filesystem-manager | |
- lib/trino-filesystem-s3 | |
- lib/trino-hdfs | |
- { modules: core/trino-main } | |
- { modules: lib/trino-filesystem-azure, profile: cloud-tests } | |
- { modules: lib/trino-filesystem-gcs, profile: cloud-tests } | |
- { modules: lib/trino-filesystem-s3, profile: cloud-tests } | |
- { modules: lib/trino-hdfs, profile: cloud-tests } | |
- { modules: plugin/trino-bigquery } | |
- { modules: plugin/trino-bigquery, profile: cloud-tests-2 } | |
- { modules: plugin/trino-cassandra } | |
- { modules: plugin/trino-clickhouse } | |
- { modules: plugin/trino-delta-lake } | |
- { modules: plugin/trino-delta-lake, profile: cloud-tests } | |
- { modules: plugin/trino-delta-lake, profile: fte-tests } | |
- { modules: plugin/trino-druid } | |
- { modules: plugin/trino-elasticsearch } | |
- { modules: plugin/trino-exasol } | |
- { modules: plugin/trino-google-sheets } | |
- { modules: plugin/trino-hive } | |
- { modules: plugin/trino-hive, profile: fte-tests } | |
- { modules: plugin/trino-hive, profile: test-parquet } | |
- { modules: plugin/trino-hudi } | |
- { modules: plugin/trino-iceberg } | |
- { modules: plugin/trino-iceberg, profile: cloud-tests } | |
- { modules: plugin/trino-iceberg, profile: fte-tests } | |
- { modules: plugin/trino-iceberg, profile: minio-and-avro } | |
- { modules: plugin/trino-ignite } | |
- { modules: plugin/trino-kafka } | |
- { modules: plugin/trino-kudu } | |
- { modules: plugin/trino-mariadb } | |
- { modules: plugin/trino-mongodb } | |
- { modules: plugin/trino-mysql } | |
- { modules: plugin/trino-openlineage } | |
- { modules: plugin/trino-opensearch } | |
- { modules: plugin/trino-oracle } | |
- { modules: plugin/trino-phoenix5 } | |
- { modules: plugin/trino-pinot } | |
- { modules: plugin/trino-postgresql } | |
- { modules: plugin/trino-redis } | |
- { modules: plugin/trino-redshift } | |
- { modules: plugin/trino-redshift, profile: cloud-tests } | |
- { modules: plugin/trino-redshift, profile: fte-tests } | |
- { modules: plugin/trino-resource-group-managers } | |
- { modules: plugin/trino-singlestore } | |
- { modules: plugin/trino-snowflake } | |
- { modules: plugin/trino-snowflake, profile: cloud-tests } | |
- { modules: plugin/trino-sqlserver, runson: ubuntu-20.04 } | |
- { modules: testing/trino-faulttolerant-tests, profile: default } | |
- { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-delta } | |
- { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-hive } | |
- { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-iceberg } | |
- { modules: testing/trino-tests } | |
EOF | |
./.github/bin/build-matrix-from-impacted.py -v -i gib-impacted.log -m .github/test-matrix.yaml -o matrix.json | |
echo "Matrix: $(jq '.' matrix.json)" | |
echo "matrix=$(jq -c '.' matrix.json)" >> $GITHUB_OUTPUT | |
test: | |
runs-on: ${{ matrix.runson || 'ubuntu-latest' }} | |
needs: build-test-matrix | |
if: needs.build-test-matrix.outputs.matrix != '{}' | |
strategy: | |
fail-fast: false | |
matrix: ${{ fromJson(needs.build-test-matrix.outputs.matrix) }} | |
timeout-minutes: 60 | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB | |
ref: | | |
${{ github.event_name == 'repository_dispatch' && | |
github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && | |
format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} | |
- uses: ./.github/actions/setup | |
timeout-minutes: 10 | |
with: | |
cache: restore | |
cleanup-node: ${{ format('{0}', matrix.modules == 'plugin/trino-singlestore' || matrix.modules == 'plugin/trino-exasol') }} | |
java-version: ${{ matrix.jdk != '' && matrix.jdk || '23' }} | |
- name: Maven Install | |
run: | | |
export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" | |
$MAVEN clean install ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -am -pl "${{ matrix.modules }}" | |
- name: Maven Tests | |
id: tests | |
if: >- | |
matrix.modules != 'plugin/trino-singlestore' | |
&& ! (contains(matrix.modules, 'trino-bigquery') && contains(matrix.profile, 'cloud-tests-2')) | |
&& ! (contains(matrix.modules, 'trino-delta-lake') && contains(matrix.profile, 'cloud-tests')) | |
&& ! (contains(matrix.modules, 'trino-iceberg') && contains(matrix.profile, 'cloud-tests')) | |
&& ! (contains(matrix.modules, 'trino-redshift') && contains(matrix.profile, 'cloud-tests')) | |
&& ! (contains(matrix.modules, 'trino-redshift') && contains(matrix.profile, 'fte-tests')) | |
&& ! (contains(matrix.modules, 'trino-snowflake') && contains(matrix.profile, 'cloud-tests')) | |
&& ! (contains(matrix.modules, 'trino-filesystem-azure') && contains(matrix.profile, 'cloud-tests')) | |
&& ! (contains(matrix.modules, 'trino-filesystem-gcs') && contains(matrix.profile, 'cloud-tests')) | |
&& ! (contains(matrix.modules, 'trino-filesystem-s3') && contains(matrix.profile, 'cloud-tests')) | |
&& ! (contains(matrix.modules, 'trino-hdfs') && contains(matrix.profile, 'cloud-tests')) | |
run: $MAVEN test ${MAVEN_TEST} -pl ${{ matrix.modules }} ${{ matrix.profile != '' && format('-P {0}', matrix.profile) || '' }} | |
# Additional tests for selected modules | |
- name: HDFS file system cache isolated JVM tests | |
id: tests-hdfs-isolated | |
if: contains(matrix.modules, 'trino-hdfs') | |
run: | | |
$MAVEN test ${MAVEN_TEST} -pl :trino-hdfs -P test-isolated-jvm-suites | |
- name: Hadoop FileSystem Cloud Tests | |
id: tests-hdfs | |
env: | |
AWS_ACCESS_KEY_ID: ${{ vars.TRINO_AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.TRINO_AWS_SECRET_ACCESS_KEY }} | |
AWS_REGION: ${{ vars.TRINO_AWS_REGION }} | |
S3_BUCKET: ${{ vars.TRINO_S3_BUCKET }} | |
S3_BUCKET_ENDPOINT: "s3.${{ vars.TRINO_AWS_REGION }}.amazonaws.com" | |
if: >- | |
contains(matrix.modules, 'trino-hdfs') && contains(matrix.profile, 'cloud-tests') && | |
(env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || env.AWS_ACCESS_KEY_ID != '' || env.AWS_SECRET_ACCESS_KEY != '') | |
run: | | |
$MAVEN test ${MAVEN_TEST} -pl :trino-hdfs -P cloud-tests | |
- name: S3 FileSystem Cloud Tests | |
id: tests-s3 | |
env: | |
AWS_ACCESS_KEY_ID: ${{ vars.TRINO_AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.TRINO_AWS_SECRET_ACCESS_KEY }} | |
AWS_REGION: ${{ vars.TRINO_AWS_REGION }} | |
if: >- | |
contains(matrix.modules, 'trino-filesystem-s3') && contains(matrix.profile, 'cloud-tests') && | |
(env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || env.AWS_ACCESS_KEY_ID != '' || env.AWS_SECRET_ACCESS_KEY != '') | |
run: | | |
# Create an empty S3 bucket for S3 filesystem cloud tests and add the bucket name to GitHub environment variables | |
.github/bin/s3/setup-empty-s3-bucket.sh | |
EMPTY_S3_BUCKET=$(cat .github/bin/s3/.bucket-identifier) | |
export EMPTY_S3_BUCKET | |
$MAVEN test ${MAVEN_TEST} -pl ${{ matrix.modules }} ${{ format('-P {0}', matrix.profile) }} | |
- name: Cleanup ephemeral S3 buckets | |
env: | |
AWS_REGION: ${{ vars.TRINO_AWS_REGION }} | |
AWS_ACCESS_KEY_ID: ${{ vars.TRINO_AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.TRINO_AWS_SECRET_ACCESS_KEY }} | |
# Cancelled workflows may not have cleaned up the ephemeral bucket | |
if: always() | |
run: .github/bin/s3/delete-s3-bucket.sh || true | |
- name: Azure FileSystem Cloud Tests | |
id: tests-azure | |
env: | |
ABFS_FLAT_ACCOUNT: ${{ vars.AZURE_ABFS_FLAT_ACCOUNT }} | |
ABFS_FLAT_ACCESS_KEY: ${{ secrets.AZURE_ABFS_FLAT_ACCESS_KEY }} | |
ABFS_HIERARCHICAL_ACCOUNT: ${{ vars.AZURE_ABFS_HIERARCHICAL_ACCOUNT }} | |
ABFS_HIERARCHICAL_ACCESS_KEY: ${{ secrets.AZURE_ABFS_HIERARCHICAL_ACCESS_KEY }} | |
ABFS_OAUTH_TENANT_ID: ${{ vars.AZURE_ABFS_OAUTH_TENANT_ID }} | |
ABFS_OAUTH_CLIENT_ID: ${{ vars.AZURE_ABFS_OAUTH_CLIENT_ID }} | |
ABFS_OAUTH_CLIENT_SECRET: ${{ secrets.AZURE_ABFS_OAUTH_CLIENT_SECRET }} | |
# Run tests only if any of the secrets are present | |
if: >- | |
contains(matrix.modules, 'trino-filesystem-azure') && contains(matrix.profile, 'cloud-tests') && | |
(env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || env.ABFS_FLAT_ACCESS_KEY != '' || env.ABFS_HIERARCHICAL_ACCESS_KEY != '' || env.ABFS_OAUTH_CLIENT_SECRET != '') | |
run: | | |
$MAVEN test ${MAVEN_TEST} -pl ${{ matrix.modules }} ${{ format('-P {0}', matrix.profile) }} | |
- name: GCS FileSystem Cloud Tests | |
id: tests-gcs | |
env: | |
GCP_CREDENTIALS_KEY: ${{ secrets.GCP_CREDENTIALS_KEY }} | |
if: >- | |
contains(matrix.modules, 'trino-filesystem-gcs') && contains(matrix.profile, 'cloud-tests') && | |
(env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || env.GCP_CREDENTIALS_KEY != '') | |
run: | | |
$MAVEN test ${MAVEN_TEST} -pl ${{ matrix.modules }} ${{ format('-P {0}', matrix.profile) }} | |
- name: Cloud Delta Lake Tests | |
id: tests-delta | |
# Cloud tests are separate because they are time intensive, requiring cross-cloud network communication | |
env: | |
ABFS_CONTAINER: ${{ vars.AZURE_ABFS_HIERARCHICAL_CONTAINER }} | |
ABFS_ACCOUNT: ${{ vars.AZURE_ABFS_HIERARCHICAL_ACCOUNT }} | |
ABFS_ACCESSKEY: ${{ secrets.AZURE_ABFS_HIERARCHICAL_ACCESS_KEY }} | |
AWS_ACCESS_KEY_ID: ${{ vars.TRINO_AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.TRINO_AWS_SECRET_ACCESS_KEY }} | |
AWS_REGION: ${{ vars.TRINO_AWS_REGION }} | |
S3_BUCKET: ${{ vars.TRINO_S3_BUCKET }} | |
GCP_CREDENTIALS_KEY: ${{ secrets.GCP_CREDENTIALS_KEY }} | |
GCP_STORAGE_BUCKET: ${{ vars.GCP_STORAGE_BUCKET }} | |
# Run tests if any of the secrets is present. Do not skip tests when one secret renamed, or secret name has a typo. | |
if: >- | |
contains(matrix.modules, 'trino-delta-lake') && contains(matrix.profile, 'cloud-tests') && | |
(env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || env.ABFS_ACCOUNT != '' || env.ABFS_CONTAINER != '' || env.ABFS_ACCESSKEY != '' || env.AWS_ACCESS_KEY_ID != '' || env.AWS_SECRET_ACCESS_KEY != '' || env.GCP_CREDENTIALS_KEY != '') | |
run: | | |
$MAVEN test ${MAVEN_TEST} ${{ format('-P {0}', matrix.profile) }} -pl :trino-delta-lake \ | |
-Dtesting.azure-abfs-container="${ABFS_CONTAINER}" \ | |
-Dtesting.azure-abfs-account="${ABFS_ACCOUNT}" \ | |
-Dtesting.azure-abfs-access-key="${ABFS_ACCESSKEY}" \ | |
-Dtesting.gcp-storage-bucket="${GCP_STORAGE_BUCKET}" \ | |
-Dtesting.gcp-credentials-key="${GCP_CREDENTIALS_KEY}" | |
- name: Memsql Tests | |
id: tests-memsql | |
env: | |
MEMSQL_LICENSE: ${{ secrets.MEMSQL_LICENSE }} | |
if: matrix.modules == 'plugin/trino-singlestore' && (env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || env.MEMSQL_LICENSE != '') | |
run: | | |
$MAVEN test ${MAVEN_TEST} -pl :trino-singlestore -Dmemsql.license=${MEMSQL_LICENSE} | |
- name: Cloud BigQuery Tests | |
id: tests-bq | |
env: | |
BIGQUERY_CREDENTIALS_KEY: ${{ secrets.BIGQUERY_CREDENTIALS_KEY }} | |
GCP_STORAGE_BUCKET: ${{ vars.GCP_STORAGE_BUCKET }} | |
BIGQUERY_TESTING_PROJECT_ID: ${{ vars.BIGQUERY_TESTING_PROJECT_ID }} | |
BIGQUERY_TESTING_PARENT_PROJECT_ID: ${{ vars.BIGQUERY_TESTING_PARENT_PROJECT_ID }} | |
if: matrix.modules == 'plugin/trino-bigquery' && !contains(matrix.profile, 'cloud-tests-2') && (env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || env.BIGQUERY_CREDENTIALS_KEY != '') | |
run: | | |
$MAVEN test ${MAVEN_TEST} -pl :trino-bigquery -Pcloud-tests-1 \ | |
-Dbigquery.credentials-key="${BIGQUERY_CREDENTIALS_KEY}" \ | |
-Dtesting.gcp-storage-bucket="${GCP_STORAGE_BUCKET}" \ | |
-Dtesting.bigquery-project-id="${BIGQUERY_TESTING_PROJECT_ID}" \ | |
-Dtesting.bigquery-parent-project-id="${BIGQUERY_TESTING_PARENT_PROJECT_ID}" | |
- name: Cloud BigQuery Smoke Tests | |
id: tests-bq-smoke | |
env: | |
BIGQUERY_CREDENTIALS_KEY: ${{ secrets.BIGQUERY_CREDENTIALS_KEY }} | |
GCP_STORAGE_BUCKET: ${{ vars.GCP_STORAGE_BUCKET }} | |
if: matrix.modules == 'plugin/trino-bigquery' && contains(matrix.profile, 'cloud-tests-2') && (env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || env.BIGQUERY_CREDENTIALS_KEY != '') | |
run: | | |
$MAVEN test ${MAVEN_TEST} -pl :trino-bigquery -Pcloud-tests-2 \ | |
-Dbigquery.credentials-key="${BIGQUERY_CREDENTIALS_KEY}" \ | |
-Dtesting.gcp-storage-bucket="${GCP_STORAGE_BUCKET}" \ | |
-Dtesting.alternate-bq-project-id=bigquery-cicd-alternate | |
- name: Cloud BigQuery Case Insensitive Mapping Tests | |
id: tests-bq-ci | |
env: | |
BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY: ${{ secrets.BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY }} | |
if: matrix.modules == 'plugin/trino-bigquery' && !contains(matrix.profile, 'cloud-tests-2') && (env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || env.BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY != '') | |
run: | | |
$MAVEN test ${MAVEN_TEST} -pl :trino-bigquery -Pcloud-tests-case-insensitive-mapping -Dbigquery.credentials-key="${BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY}" | |
- name: Cloud Snowflake Tests | |
id: tests-snowflake | |
env: | |
SNOWFLAKE_URL: ${{ vars.SNOWFLAKE_URL }} | |
SNOWFLAKE_USER: ${{ vars.SNOWFLAKE_USER }} | |
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }} | |
SNOWFLAKE_DATABASE: ${{ vars.SNOWFLAKE_DATABASE }} | |
SNOWFLAKE_ROLE: ${{ vars.SNOWFLAKE_ROLE }} | |
SNOWFLAKE_WAREHOUSE: ${{ vars.SNOWFLAKE_WAREHOUSE }} | |
if: matrix.modules == 'plugin/trino-snowflake' && contains(matrix.profile, 'cloud-tests') && (env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || env.SNOWFLAKE_URL != '') | |
run: | | |
$MAVEN test ${MAVEN_TEST} -pl :trino-snowflake -Pcloud-tests \ | |
-Dsnowflake.test.server.url="${SNOWFLAKE_URL}" \ | |
-Dsnowflake.test.server.user="${SNOWFLAKE_USER}" \ | |
-Dsnowflake.test.server.password="${SNOWFLAKE_PASSWORD}" \ | |
-Dsnowflake.test.server.database="${SNOWFLAKE_DATABASE}" \ | |
-Dsnowflake.test.server.role="${SNOWFLAKE_ROLE}" \ | |
-Dsnowflake.test.server.warehouse="${SNOWFLAKE_WAREHOUSE}" | |
- name: Iceberg Cloud Tests | |
id: tests-iceberg | |
env: | |
AWS_ACCESS_KEY_ID: ${{ vars.TRINO_AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.TRINO_AWS_SECRET_ACCESS_KEY }} | |
AWS_REGION: ${{ vars.TRINO_AWS_REGION }} | |
S3_BUCKET: ${{ vars.TRINO_S3_BUCKET }} | |
GCP_CREDENTIALS_KEY: ${{ secrets.GCP_CREDENTIALS_KEY }} | |
GCP_STORAGE_BUCKET: ${{ vars.GCP_STORAGE_BUCKET }} | |
ABFS_CONTAINER: ${{ vars.AZURE_ABFS_HIERARCHICAL_CONTAINER }} | |
ABFS_ACCOUNT: ${{ vars.AZURE_ABFS_HIERARCHICAL_ACCOUNT }} | |
ABFS_ACCESS_KEY: ${{ secrets.AZURE_ABFS_HIERARCHICAL_ACCESS_KEY }} | |
SNOWFLAKE_USER: ${{ vars.SNOWFLAKE_USER }} | |
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }} | |
SNOWFLAKE_URL: ${{ vars.SNOWFLAKE_URL }} | |
SNOWFLAKE_DATABASE: ${{ vars.SNOWFLAKE_DATABASE }} | |
SNOWFLAKE_CATALOG_SCHEMA: ${{ vars.SNOWFLAKE_CATALOG_SCHEMA }} | |
SNOWFLAKE_ROLE: ${{ vars.SNOWFLAKE_ROLE }} | |
SNOWFLAKE_WAREHOUSE: ${{ vars.SNOWFLAKE_WAREHOUSE }} | |
SNOWFLAKE_CATALOG_S3_ACCESS_KEY_ID: ${{ vars.SNOWFLAKE_CATALOG_S3_ACCESS_KEY_ID }} | |
SNOWFLAKE_CATALOG_S3_SECRET_ACCESS_KEY: ${{ secrets.SNOWFLAKE_CATALOG_S3_SECRET_ACCESS_KEY }} | |
SNOWFLAKE_EXTERNAL_VOLUME: ${{ vars.SNOWFLAKE_EXTERNAL_VOLUME }} | |
SNOWFLAKE_CATALOG_S3_REGION: ${{ vars.SNOWFLAKE_CATALOG_S3_REGION }} | |
if: >- | |
contains(matrix.modules, 'trino-iceberg') && contains(matrix.profile, 'cloud-tests') && | |
(env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || env.AWS_ACCESS_KEY_ID != '' || env.AWS_SECRET_ACCESS_KEY != '' || env.GCP_CREDENTIALS_KEY != '') | |
run: | | |
$MAVEN test ${MAVEN_TEST} -pl :trino-iceberg ${{ format('-P {0}', matrix.profile) }} \ | |
-Dtesting.gcp-storage-bucket="${GCP_STORAGE_BUCKET}" \ | |
-Dtesting.gcp-credentials-key="${GCP_CREDENTIALS_KEY}" \ | |
-Dtesting.azure-abfs-container="${ABFS_CONTAINER}" \ | |
-Dtesting.azure-abfs-account="${ABFS_ACCOUNT}" \ | |
-Dtesting.azure-abfs-access-key="${ABFS_ACCESS_KEY}" \ | |
-Dtesting.snowflake.catalog.user="${SNOWFLAKE_USER}" \ | |
-Dtesting.snowflake.catalog.password="${SNOWFLAKE_PASSWORD}" \ | |
-Dtesting.snowflake.catalog.account-url="${SNOWFLAKE_URL}" \ | |
-Dtesting.snowflake.catalog.database="${SNOWFLAKE_DATABASE}" \ | |
-Dtesting.snowflake.catalog.schema="${SNOWFLAKE_CATALOG_SCHEMA}" \ | |
-Dtesting.snowflake.catalog.role="${SNOWFLAKE_ROLE}" \ | |
-Dtesting.snowflake.catalog.warehouse="${SNOWFLAKE_WAREHOUSE}" \ | |
-Dtesting.snowflake.catalog.s3.access-key="${SNOWFLAKE_CATALOG_S3_ACCESS_KEY_ID}" \ | |
-Dtesting.snowflake.catalog.s3.secret-key="${SNOWFLAKE_CATALOG_S3_SECRET_ACCESS_KEY}" \ | |
-Dtesting.snowflake.catalog.s3.external.volume="${SNOWFLAKE_EXTERNAL_VOLUME}" \ | |
-Dtesting.snowflake.catalog.s3.region="${SNOWFLAKE_CATALOG_S3_REGION}" | |
- name: Cloud Redshift Tests ${{ matrix.profile }} | |
id: tests-redshift | |
env: | |
AWS_REGION: ${{ vars.REDSHIFT_AWS_REGION }} | |
AWS_ACCESS_KEY_ID: ${{ vars.REDSHIFT_AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.REDSHIFT_AWS_SECRET_ACCESS_KEY }} | |
REDSHIFT_SUBNET_GROUP_NAME: ${{ vars.REDSHIFT_SUBNET_GROUP_NAME }} | |
REDSHIFT_IAM_ROLES: ${{ vars.REDSHIFT_IAM_ROLES }} | |
REDSHIFT_VPC_SECURITY_GROUP_IDS: ${{ vars.REDSHIFT_VPC_SECURITY_GROUP_IDS }} | |
REDSHIFT_S3_TPCH_TABLES_ROOT: ${{ vars.REDSHIFT_S3_TPCH_TABLES_ROOT }} | |
if: >- | |
contains(matrix.modules, 'trino-redshift') && | |
(contains(matrix.profile, 'cloud-tests') || contains(matrix.profile, 'fte-tests')) && | |
(env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || env.AWS_ACCESS_KEY_ID != '' || env.REDSHIFT_SUBNET_GROUP_NAME != '') | |
run: | | |
source .github/bin/redshift/setup-aws-redshift.sh | |
$MAVEN test ${MAVEN_TEST} -pl ${{ matrix.modules }} ${{ format('-P {0}', matrix.profile) }} \ | |
-Dtest.redshift.jdbc.user="${REDSHIFT_USER}" \ | |
-Dtest.redshift.jdbc.password="${REDSHIFT_PASSWORD}" \ | |
-Dtest.redshift.jdbc.endpoint="${REDSHIFT_ENDPOINT}:${REDSHIFT_PORT}/" \ | |
-Dtest.redshift.s3.tpch.tables.root="${REDSHIFT_S3_TPCH_TABLES_ROOT}" \ | |
-Dtest.redshift.iam.role="${REDSHIFT_IAM_ROLES}" \ | |
-Dtest.redshift.aws.region="${AWS_REGION}" \ | |
-Dtest.redshift.aws.access-key="${AWS_ACCESS_KEY_ID}" \ | |
-Dtest.redshift.aws.secret-key="${AWS_SECRET_ACCESS_KEY}" | |
- name: Cleanup ephemeral Redshift Cluster | |
env: | |
AWS_REGION: ${{ vars.REDSHIFT_AWS_REGION }} | |
AWS_ACCESS_KEY_ID: ${{ vars.REDSHIFT_AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.REDSHIFT_AWS_SECRET_ACCESS_KEY }} | |
# Cancelled workflows may have left the ephemeral cluster running | |
if: always() | |
run: .github/bin/redshift/delete-aws-redshift.sh | |
- name: Sanitize artifact name | |
if: always() | |
run: | | |
# Generate a valid artifact name and make it available to next steps as | |
# an environment variable ARTIFACT_NAME | |
# ", :, <, >, |, *, ?, \, / are not allowed in artifact names, replace it with an underscore | |
name=$(echo -n "${{ matrix.modules }}, ${{ matrix.profile }}, ${{ matrix.jdk }}" | sed -e 's/[":<>|\*\?\\\/]/_/g') | |
# final artifact name can't be longer than 128 characters | |
echo "ARTIFACT_NAME=${name:0:100}" >> $GITHUB_ENV | |
- name: Upload test results | |
uses: ./.github/actions/process-test-results | |
if: always() | |
with: | |
artifact-name: ${{ env.ARTIFACT_NAME }} | |
has-failed-tests: >- | |
${{ steps.tests.outcome == 'failure' | |
|| steps.tests-hdfs-isolated.outcome == 'failure' | |
|| steps.tests-hdfs.outcome == 'failure' | |
|| steps.tests-s3.outcome == 'failure' | |
|| steps.tests-azure.outcome == 'failure' | |
|| steps.tests-gcs.outcome == 'failure' | |
|| steps.tests-delta.outcome == 'failure' | |
|| steps.tests-memsql.outcome == 'failure' | |
|| steps.tests-bq.outcome == 'failure' | |
|| steps.tests-bq-ci.outcome == 'failure' | |
|| steps.tests-bq-smoke.outcome == 'failure' | |
|| steps.tests-iceberg.outcome == 'failure' | |
|| steps.tests-redshift.outcome == 'failure' | |
|| steps.tests-snowflake.outcome == 'failure' | |
}} | |
upload-heap-dump: ${{ env.SECRETS_PRESENT == '' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository }} | |
- name: Update PR check | |
uses: ./.github/actions/update-check | |
if: >- | |
failure() && | |
github.event_name == 'repository_dispatch' && | |
github.event.client_payload.slash_command.args.named.sha != '' && | |
github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha | |
with: | |
pull_request_number: ${{ github.event.client_payload.pull_request.number }} | |
check_name: ${{ github.job }} with secrets | |
conclusion: ${{ job.status }} | |
github_token: ${{ secrets.GITHUB_TOKEN }} | |
build-pt: | |
runs-on: ubuntu-latest | |
outputs: | |
matrix: ${{ steps.set-matrix.outputs.matrix }} | |
product-tests-changed: ${{ steps.filter.outputs.product-tests }} | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB | |
ref: | | |
${{ github.event_name == 'repository_dispatch' && | |
github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && | |
format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} | |
- uses: ./.github/actions/setup | |
timeout-minutes: 10 | |
with: | |
cache: restore | |
cleanup-node: true | |
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 | |
id: filter | |
with: | |
filters: | | |
product-tests: | |
- 'testing/trino-product-tests*/**' | |
- 'testing/trino-testing-services/**' | |
# run all tests when there are any changes in the trino-server Maven module | |
# because it doesn't define it's Trino dependencies and | |
# it relies on the Provisio plugin to find the right artifacts | |
- 'core/trino-server/**' | |
- '.github/**' | |
- name: Maven Install | |
run: | | |
export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" | |
$MAVEN clean install ${MAVEN_FAST_INSTALL} -pl '!:trino-docs,!:trino-server-rpm' | |
- name: Map impacted plugins to features | |
run: | | |
export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" | |
# build a list of impacted modules, ignoring modules that cannot affect either product tests or Trino | |
$MAVEN validate ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -Dgib.logImpactedTo=gib-impacted.log -pl '!:trino-docs,!:trino-tests,!:trino-faulttolerant-tests' | |
# GIB doesn't run on master, so make sure the file always exist | |
touch gib-impacted.log | |
testing/trino-plugin-reader/target/trino-plugin-reader-*-executable.jar -i gib-impacted.log -p core/trino-server/target/trino-server-*-hardlinks/plugin > impacted-features.log | |
echo "Impacted plugin features:" | |
cat impacted-features.log | |
- name: Product tests artifact | |
uses: actions/upload-artifact@v4 | |
with: | |
name: product tests and server tarball | |
path: | | |
core/trino-server/target/*.tar.gz | |
impacted-features.log | |
testing/trino-product-tests-launcher/target/*.jar | |
testing/trino-product-tests/target/*-executable.jar | |
client/trino-cli/target/*-executable.jar | |
retention-days: 1 | |
- id: prepare-matrix-template | |
run: | | |
cat <<EOF > .github/test-pt-matrix.yaml | |
config: | |
- default | |
suite: | |
- suite-1 | |
- suite-2 | |
- suite-3 | |
# suite-4 does not exist | |
- suite-5 | |
- suite-6-non-generic | |
- suite-hive-transactional | |
- suite-azure | |
- suite-delta-lake-databricks91 | |
- suite-delta-lake-databricks104 | |
- suite-delta-lake-databricks113 | |
- suite-delta-lake-databricks122 | |
- suite-delta-lake-databricks133 | |
- suite-delta-lake-databricks143 | |
- suite-delta-lake-databricks154 | |
- suite-databricks-unity-http-hms | |
- suite-gcs | |
- suite-clients | |
- suite-functions | |
- suite-tpch | |
- suite-tpcds | |
- suite-storage-formats-detailed | |
- suite-parquet | |
- suite-oauth2 | |
- suite-ldap | |
- suite-compatibility | |
- suite-all-connectors-smoke | |
- suite-delta-lake-oss | |
- suite-kafka | |
- suite-cassandra | |
- suite-clickhouse | |
- suite-mysql | |
- suite-iceberg | |
- suite-snowflake | |
- suite-hudi | |
- suite-ignite | |
exclude: | |
- suite: suite-azure | |
ignore exclusion if: >- | |
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || | |
vars.AZURE_ABFS_HIERARCHICAL_CONTAINER != '' || | |
vars.AZURE_ABFS_HIERARCHICAL_ACCOUNT != '' || | |
secrets.AZURE_ABFS_HIERARCHICAL_ACCESS_KEY != '' }} | |
- suite: suite-gcs | |
ignore exclusion if: >- | |
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.GCP_CREDENTIALS_KEY != '' }} | |
- suite: suite-delta-lake-databricks91 | |
ignore exclusion if: >- | |
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} | |
- suite: suite-delta-lake-databricks104 | |
ignore exclusion if: >- | |
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} | |
- suite: suite-delta-lake-databricks113 | |
ignore exclusion if: >- | |
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} | |
- suite: suite-delta-lake-databricks122 | |
ignore exclusion if: >- | |
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} | |
- suite: suite-delta-lake-databricks133 | |
ignore exclusion if: >- | |
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} | |
- suite: suite-delta-lake-databricks143 | |
ignore exclusion if: >- | |
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} | |
- suite: suite-delta-lake-databricks154 | |
ignore exclusion if: >- | |
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} | |
- suite: suite-databricks-unity-http-hms | |
config: hdp3 | |
- suite: suite-databricks-unity-http-hms | |
ignore exclusion if: >- | |
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} | |
- suite: suite-snowflake | |
ignore exclusion if: >- | |
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.SNOWFLAKE_PASSWORD != '' }} | |
ignore exclusion if: | |
# Do not use this property outside of the matrix configuration. | |
# | |
# This is added to all matrix entries so they may be conditionally | |
# excluded by adding them to the excludes list with a GHA expression | |
# for this property. | |
# - If the expression evaluates to true, it will never match the a | |
# actual value of the property, and will therefore not be excluded. | |
# - If the expression evaluates to false, it will match the actual | |
# value of the property, and the exclusion will apply normally. | |
- "false" | |
include: | |
# this suite is designed specifically for apache-hive3. TODO remove the suite once we can run all regular tests on apache-hive3. | |
- config: apache-hive3 | |
suite: suite-hms-only | |
# Workaround for https://github.com/microsoft/mssql-docker/issues/899 | |
- config: default | |
suite: suite-7-non-generic | |
runson: ubuntu-20.04 | |
EOF | |
- name: Build PT matrix (all) | |
if: | | |
github.event_name != 'pull_request' || | |
steps.filter.outputs.product-tests == 'true' || | |
contains(github.event.pull_request.labels.*.name, 'tests:all') || | |
contains(github.event.pull_request.labels.*.name, 'tests:all-product') | |
run: | | |
# converts entire YAML file into JSON - no filtering since we want all PTs to run | |
./.github/bin/build-pt-matrix-from-impacted-connectors.py -v -m .github/test-pt-matrix.yaml -o matrix.json | |
- name: Build PT matrix (impacted-features) | |
if: | | |
github.event_name == 'pull_request' && | |
steps.filter.outputs.product-tests == 'false' && | |
!contains(github.event.pull_request.labels.*.name, 'tests:all') && | |
!contains(github.event.pull_request.labels.*.name, 'product-tests:all') | |
# all these envs are required to be set by some product test environments | |
env: | |
ABFS_CONTAINER: "" | |
ABFS_ACCOUNT: "" | |
ABFS_ACCESS_KEY: "" | |
S3_BUCKET: "" | |
AWS_REGION: "" | |
TRINO_AWS_ACCESS_KEY_ID: "" | |
TRINO_AWS_SECRET_ACCESS_KEY: "" | |
DATABRICKS_91_JDBC_URL: "" | |
DATABRICKS_104_JDBC_URL: "" | |
DATABRICKS_113_JDBC_URL: "" | |
DATABRICKS_122_JDBC_URL: "" | |
DATABRICKS_133_JDBC_URL: "" | |
DATABRICKS_143_JDBC_URL: "" | |
DATABRICKS_154_JDBC_URL: "" | |
DATABRICKS_UNITY_JDBC_URL: "" | |
DATABRICKS_UNITY_CATALOG_NAME: "" | |
DATABRICKS_UNITY_EXTERNAL_LOCATION: "" | |
DATABRICKS_HOST: "" | |
DATABRICKS_LOGIN: "" | |
DATABRICKS_TOKEN: "" | |
GCP_CREDENTIALS_KEY: "" | |
GCP_STORAGE_BUCKET: "" | |
SNOWFLAKE_URL: "" | |
SNOWFLAKE_USER: "" | |
SNOWFLAKE_PASSWORD: "" | |
SNOWFLAKE_DATABASE: "" | |
SNOWFLAKE_ROLE: "" | |
SNOWFLAKE_WAREHOUSE: "" | |
TESTCONTAINERS_NEVER_PULL: true | |
run: | | |
# converts filtered YAML file into JSON | |
./.github/bin/build-pt-matrix-from-impacted-connectors.py -v -m .github/test-pt-matrix.yaml -i impacted-features.log -o matrix.json | |
- id: set-matrix | |
run: | | |
echo "Matrix: $(jq '.' matrix.json)" | |
echo "matrix=$(cat matrix.json)" >> $GITHUB_OUTPUT | |
pt: | |
runs-on: ${{ matrix.runson || 'ubuntu-latest' }} | |
# explicitly define the name to avoid adding the value of the `ignore exclusion if` matrix item | |
name: pt (${{ matrix.config }}, ${{ matrix.suite }}, ${{ matrix.jdk }}) | |
if: needs.build-pt.outputs.matrix != '{}' | |
strategy: | |
fail-fast: false | |
matrix: ${{ fromJson(needs.build-pt.outputs.matrix) }} | |
# PT Launcher's timeout defaults to 2h, add some margin | |
timeout-minutes: 130 | |
needs: build-pt | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # checkout all commits, as the build result depends on `git describe` equivalent | |
ref: | | |
${{ github.event_name == 'repository_dispatch' && | |
github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && | |
format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} | |
- uses: ./.github/actions/setup | |
timeout-minutes: 10 | |
with: | |
# The job doesn't build anything, so the ~/.m2/repository cache isn't useful | |
cache: 'false' | |
- name: Product tests artifact | |
uses: actions/download-artifact@v4 | |
with: | |
name: product tests and server tarball | |
- name: Fix artifact permissions | |
run: | | |
find . -type f -name \*-executable.jar -exec chmod 0777 {} \; | |
- name: Enable impact analysis | |
if: | | |
needs.build-pt.outputs.product-tests-changed == 'false' && | |
github.event_name == 'pull_request' && | |
!contains(github.event.pull_request.labels.*.name, 'tests:all') && | |
!contains(github.event.pull_request.labels.*.name, 'tests:all-product') | |
run: echo "PTL_OPTS=--impacted-features impacted-features.log" >> $GITHUB_ENV | |
- name: Product Tests | |
id: tests | |
env: | |
ABFS_CONTAINER: ${{ vars.AZURE_ABFS_HIERARCHICAL_CONTAINER }} | |
ABFS_ACCOUNT: ${{ vars.AZURE_ABFS_HIERARCHICAL_ACCOUNT }} | |
ABFS_ACCESS_KEY: ${{ secrets.AZURE_ABFS_HIERARCHICAL_ACCESS_KEY }} | |
S3_BUCKET: ${{ vars.TRINO_S3_BUCKET }} | |
AWS_REGION: ${{ vars.TRINO_AWS_REGION }} | |
TRINO_AWS_ACCESS_KEY_ID: ${{ vars.TRINO_AWS_ACCESS_KEY_ID }} | |
TRINO_AWS_SECRET_ACCESS_KEY: ${{ secrets.TRINO_AWS_SECRET_ACCESS_KEY }} | |
DATABRICKS_91_JDBC_URL: ${{ vars.DATABRICKS_91_JDBC_URL }} | |
DATABRICKS_104_JDBC_URL: ${{ vars.DATABRICKS_104_JDBC_URL }} | |
DATABRICKS_113_JDBC_URL: ${{ vars.DATABRICKS_113_JDBC_URL }} | |
DATABRICKS_122_JDBC_URL: ${{ vars.DATABRICKS_122_JDBC_URL }} | |
DATABRICKS_133_JDBC_URL: ${{ vars.DATABRICKS_133_JDBC_URL }} | |
DATABRICKS_143_JDBC_URL: ${{ vars.DATABRICKS_143_JDBC_URL }} | |
DATABRICKS_154_JDBC_URL: ${{ vars.DATABRICKS_154_JDBC_URL }} | |
DATABRICKS_UNITY_JDBC_URL: ${{ vars.DATABRICKS_UNITY_JDBC_URL }} | |
DATABRICKS_UNITY_CATALOG_NAME: ${{ vars.DATABRICKS_UNITY_CATALOG_NAME }} | |
DATABRICKS_UNITY_EXTERNAL_LOCATION: ${{ vars.DATABRICKS_UNITY_EXTERNAL_LOCATION }} | |
DATABRICKS_HOST: ${{ vars.DATABRICKS_HOST }} | |
DATABRICKS_LOGIN: token | |
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} | |
GCP_CREDENTIALS_KEY: ${{ secrets.GCP_CREDENTIALS_KEY }} | |
GCP_STORAGE_BUCKET: ${{ vars.GCP_STORAGE_BUCKET }} | |
SNOWFLAKE_URL: ${{ vars.SNOWFLAKE_URL }} | |
SNOWFLAKE_USER: ${{ vars.SNOWFLAKE_USER }} | |
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }} | |
SNOWFLAKE_DATABASE: ${{ vars.SNOWFLAKE_DATABASE }} | |
SNOWFLAKE_ROLE: ${{ vars.SNOWFLAKE_ROLE }} | |
SNOWFLAKE_WAREHOUSE: ${{ vars.SNOWFLAKE_WAREHOUSE }} | |
run: | | |
exec testing/trino-product-tests-launcher/target/trino-product-tests-launcher-*-executable.jar suite run \ | |
--suite ${{ matrix.suite }} \ | |
--config config-${{ matrix.config }} \ | |
${PTL_OPTS:-} \ | |
--bind=off --logs-dir logs/ --timeout 2h | |
- name: Upload test results | |
uses: ./.github/actions/process-test-results | |
if: always() | |
with: | |
artifact-name: pt (${{ matrix.config }}, ${{ matrix.suite }}, ${{ matrix.jdk }}) | |
has-failed-tests: ${{ steps.tests.outcome == 'failure' }} | |
upload-heap-dump: ${{ env.SECRETS_PRESENT == '' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository }} | |
- name: Update PR check | |
uses: ./.github/actions/update-check | |
if: >- | |
failure() && | |
github.event_name == 'repository_dispatch' && | |
github.event.client_payload.slash_command.args.named.sha != '' && | |
github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha | |
with: | |
pull_request_number: ${{ github.event.client_payload.pull_request.number }} | |
check_name: ${{ github.job }} with secrets | |
conclusion: ${{ job.status }} | |
github_token: ${{ secrets.GITHUB_TOKEN }} |