Skip to content

[bitnami/postgresql] Release 12.15.0-debian-11-r74 (#42115) #18096

[bitnami/postgresql] Release 12.15.0-debian-11-r74 (#42115)

[bitnami/postgresql] Release 12.15.0-debian-11-r74 (#42115) #18096

Workflow file for this run

name: '[CI/CD] CD Prepare'
run-name: "${{ github.event_name == 'workflow_dispatch' && format('Retrying SHA: {0}', inputs.sha) || '' }}"
on: # rebuild any PRs and main branch changes
workflow_dispatch:
inputs:
sha:
description: 'Commit to retry'
required: true
default: 'HEAD'
push:
branches:
- main
paths:
- 'bitnami/**'
# Remove all permissions by default.
permissions: {}
env:
CSP_API_URL: https://console.cloud.vmware.com
CSP_API_TOKEN: ${{ secrets.CSP_API_TOKEN }}
VIB_PUBLIC_URL: https://cp.bromelia.vmware.com
jobs:
prepare:
runs-on: ubuntu-latest
name: Retrieve metadata and auxiliary files
permissions:
contents: read
if: |
github.event_name == 'workflow_dispatch' ||
(github.event.head_commit.author.username == 'bitnami-bot' && github.event.forced == false)
outputs:
result: ${{ steps.get-publish-metadata.outputs.result }}
containers: ${{ steps.get-publish-metadata.outputs.containers }}
steps:
- name: Install s3cmd
run: sudo apt-get install -y s3cmd
- name: Checkout Repository
uses: actions/checkout@v3
# Full history is not required anymore
with:
ref: ${{github.event_name == 'workflow_dispatch' && inputs.sha || '' }}
fetch-depth: 1
- id: get-publish-metadata
name: Get information about containers to publish
env:
GITHUB_REPO: ${{ github.repository }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_COMMITS: ${{ github.event_name == 'workflow_dispatch' && format('[{{"id":"{0}"}}]', inputs.sha) || toJson(github.event.commits) }}
SHA_URL: ${{ format('{0}/tarball/{1}', github.event.repository.url, (github.event_name == 'workflow_dispatch' && inputs.sha || github.sha)) }}
SHA: ${{github.event_name == 'workflow_dispatch' && inputs.sha || '' }}
run: |
# Get all commits associated to the push
containers=()
while read -r commit; do
# Using the Github API to detect the files changed as git merge-base stops working when the branch is behind
url="https://api.github.com/repos/${GITHUB_REPO}/commits/${commit}"
files_changed_data=$(curl -s --header "authorization: Bearer ${GITHUB_TOKEN}" -X GET -G "$url")
files_changed="$(echo "$files_changed_data" | jq -r '.files[] | .filename')"
while read -r container; do
if [[ ! "${containers[*]}" =~ (^|[[:space:]])$container($|[[:space:]]) ]]; then
# Avoid duplicates
containers+=("${container}")
if [[ -d "${container}" ]]; then
tag="$(grep -oE "org.opencontainers.image.ref.name=\".+\"" "${container}/Dockerfile" | sed -nr "s|org.opencontainers.image.ref.name=\"(.+)\"|\1|p")"
if [[ -z "${tag}" ]]; then
echo "No tag found for: ${container}"
else
name="$(grep -oE "org.opencontainers.image.title=\".+\"" "${container}/Dockerfile" | sed -nr "s|org.opencontainers.image.title=\"(.+)\"|\1|p")"
app_version="$(grep -oE "org.opencontainers.image.version=\".+\"" "${container}/Dockerfile" | sed -nr "s|org.opencontainers.image.version=\"(.+)\"|\1|p")"
os_flavour_parsed="$(grep -oP "OS_FLAVOUR=\"\K[^\"]+" "${container}/Dockerfile" || true)"
os_flavour=${os_flavour_parsed:-scratch}
revision="${tag#"${app_version}-${os_flavour}-r"}"
rolling_tags="$(yq '.rolling-tags' "${container}/tags-info.yaml" -o json | jq -c)"
branch="$(echo "${container}" | awk -F '/' '{print $3}')"
dsl_path="${name}"
if [[ -d ".vib/${dsl_path}/${branch}" ]]; then
dsl_path="${dsl_path}/${branch}"
fi
# This is hack to avoid jq errors while getting the architectures
vib_publish="$(cat .vib/${dsl_path}/vib-publish.json | sed -e 's|{VIB_ENV_ROLLING_TAGS}|"${rolling_tags}"|')"
architectures="$(echo "${vib_publish}" | jq -cr '.phases.package.actions[] | select(.action_id == "container-image-package") | .params.architectures // ["linux/amd64"]')"
container_json=$(jq -n '{"name": $name, "path": $path, "os_flavour": $os_flavour, "branch": $branch, "app_version": $app_version, "revision": $revision, "sha": $sha, "sha_url": $sha_url, "dsl_path": $dsl_path, "tag": $tag, "rolling_tags": $rolling_tags, "architectures": $architectures}' \
--arg name "$name" --arg path "$container" --arg os_flavour "$os_flavour" --arg branch "$branch" --arg app_version "$app_version" --arg revision "$revision" --arg sha "$SHA" --arg sha_url "$SHA_URL" --arg dsl_path "$dsl_path" --arg tag "$tag" --argjson rolling_tags "$rolling_tags" --argjson architectures "$architectures")
containers_json+=("${container_json}")
fi
fi
fi
done <<< "$(echo "$files_changed" | xargs dirname | grep -o "^bitnami/[^/]*/[^/]*/[^/]*" | sort | uniq || true)"
done <<< "$(echo "${GITHUB_COMMITS}" | jq -r '.[] | .id')"
if [[ "${#containers[@]}" -le "0" ]]; then
echo "No changes detected in containers. The rest of the steps will be skipped."
echo "result=skip" >> $GITHUB_OUTPUT
else
publish_metadata=$(printf "%s\n" "${containers_json[@]}" | jq -s .)
echo "result=ok" >> $GITHUB_OUTPUT
echo "${publish_metadata}" > publish-metadata.json
fi
- name: Getting 3rd party packages for OSSPI
if: ${{ steps.get-publish-metadata.outputs.result == 'ok' }}
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }}
AWS_DEFAULT_REGION: us-east-1
run: |
# <version>-<revision>
# Example: 17.0.7-7-1
# version: semver but it can have also '-' like the upper example.
# revision: number
COMPONENT_VERSION_REGEX="([0-9]+\.[0-9]+\.[0-9]+(-.+)?)-([0-9]+)"
while read -r container; do
# If it's set from outside, can be changed
image_name=$(echo "$container" | jq -cr '.tag')
image_path=$(echo "$container" | jq -cr '.path')
version=$(echo "$container" | jq -cr '.app_version')
containers_path=$(echo "$container" | jq -cr '.path')
components_json_file="${containers_path}/prebuildfs/opt/bitnami/.bitnami_components.json"
# If the components.json file, so it seems has external packages
if [[ -f "$components_json_file" ]]; then
while read -r architecture; do
# Remove platform prefix.
arch="${architecture##*/}"
declare -A packages=()
# Iterating over the external components to get the involved elements
while read -r component_id; do
if [[ -n "$component_id" ]]; then
component_version_full=$(jq -cr '."'${component_id}'".version' "$components_json_file")
if [[ "$component_version_full" =~ $COMPONENT_VERSION_REGEX ]] && [[ "${#BASH_REMATCH[@]}" -ge 3 ]]; then
component_version=${BASH_REMATCH[1]}
component_distro=$(jq -cr '."'${component_id}'".distro' "$components_json_file")
# We will use the container arch instead of the component arch (component file has only amd64 references)
# component_arch=$(jq -cr '."'${component_id}'".arch' "$components_json_file")
component_arch=$arch
# Added "true" to avoid errors if compilation_recipe doesn't exists
compilation_recipe=$(s3cmd get "s3://${AWS_S3_BUCKET}/${component_id}/${component_version}/${component_distro}/${component_arch}/compilation-recipe.json" - 2>/dev/null || true)
# now getting each component to be reported
while read -r json_package; do
package_id="$(echo "$json_package" | jq -r '.id' )"
package_version="$(echo "$json_package" | jq -r '.version' )"
package_url="$(echo "$json_package" | jq -r '.source.upstreamSourceUrl')"
packages["$package_id"]="${package_version} ${package_url}"
done <<<"$(echo "$compilation_recipe" | jq -cr '.components[]')"
fi
fi
done <<<"$(jq -cr 'keys[]' "$components_json_file")"
# Now creating the JSON file with the format required by the OSSPI Scan
osspi_packages="[]"
for package_id in "${!packages[@]}"; do
read -r -a version_url <<< "${packages["$package_id"]}"
http_url="${version_url[1]#git+}"
if [[ -z "$http_url" ]]
then
echo "[WARNING] The URL for ${package_id}:${version_url[0]} is missing in the recipe"
else
# Concat new package to osspi_packages array
osspi_packages="$(jq '. += [{"_unique_id": $uniqueID, "name": $name, "version": $version, "url": $url, "repository": "other"}]' --arg uniqueID "other:${package_id}:${version_url[0]}" --arg name "$package_id" --arg version "${version_url[0]}" --arg url "$http_url" <<< "$osspi_packages")"
fi
done
jq -n '{"packages": $packages, "server": { "scanner": "custom", "type": "Hub"}}' --argjson packages "${osspi_packages}" > "${image_path}/osspi-packages-${arch}.json"
s3cmd put "${image_path}/osspi-packages-${arch}.json" "s3://${AWS_S3_BUCKET}/${asset}/${version}/${os_flavour}/${arch}/${revision}/packages.json"
done <<<"$(echo "$container" | jq -cr '.architectures[]')"
else
echo "$image_name:$version doesn't have external components.json"
fi
done <<<"$(jq -c '.[]' publish-metadata.json)"
- uses: actions/upload-artifact@v3
if: ${{ steps.get-publish-metadata.outputs.result == 'ok' }}
with:
name: publish-metadata.json
path: ./publish-metadata.json
- uses: actions/upload-artifact@v3
if: ${{ steps.get-publish-metadata.outputs.result == 'ok' }}
with:
name: packages.json
path: ~/work/containers/**/osspi-packages*.json