-
Notifications
You must be signed in to change notification settings - Fork 4.9k
177 lines (174 loc) · 11 KB
/
cd-prepare.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
name: '[CI/CD] CD Prepare'
run-name: "${{ github.event_name == 'workflow_dispatch' && format('Retrying SHA: {0}', inputs.sha) || '' }}"
on: # rebuild any PRs and main branch changes
workflow_dispatch:
inputs:
sha:
description: 'Commit to retry'
required: true
default: 'HEAD'
push:
branches:
- main
paths:
- 'bitnami/**'
# Remove all permissions by default.
permissions: {}
env:
CSP_API_URL: https://console.cloud.vmware.com
CSP_API_TOKEN: ${{ secrets.CSP_API_TOKEN }}
VIB_PUBLIC_URL: https://cp.bromelia.vmware.com
jobs:
prepare:
runs-on: ubuntu-latest
name: Retrieve metadata and auxiliary files
permissions:
contents: read
if: |
github.event_name == 'workflow_dispatch' ||
(github.event.head_commit.author.username == 'bitnami-bot' && github.event.forced == false)
outputs:
result: ${{ steps.get-publish-metadata.outputs.result }}
containers: ${{ steps.get-publish-metadata.outputs.containers }}
steps:
- name: Install s3cmd
run: sudo apt-get install -y s3cmd
- name: Checkout Repository
uses: actions/checkout@v3
# Full history is not required anymore
with:
ref: ${{github.event_name == 'workflow_dispatch' && inputs.sha || '' }}
fetch-depth: 1
- id: get-publish-metadata
name: Get information about containers to publish
env:
GITHUB_REPO: ${{ github.repository }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_COMMITS: ${{ github.event_name == 'workflow_dispatch' && format('[{{"id":"{0}"}}]', inputs.sha) || toJson(github.event.commits) }}
SHA_URL: ${{ format('{0}/tarball/{1}', github.event.repository.url, (github.event_name == 'workflow_dispatch' && inputs.sha || github.sha)) }}
SHA: ${{github.event_name == 'workflow_dispatch' && inputs.sha || '' }}
run: |
# Get all commits associated to the push
containers=()
while read -r commit; do
# Using the Github API to detect the files changed as git merge-base stops working when the branch is behind
url="https://api.github.com/repos/${GITHUB_REPO}/commits/${commit}"
files_changed_data=$(curl -s --header "authorization: Bearer ${GITHUB_TOKEN}" -X GET -G "$url")
files_changed="$(echo "$files_changed_data" | jq -r '.files[] | .filename')"
while read -r container; do
if [[ ! "${containers[*]}" =~ (^|[[:space:]])$container($|[[:space:]]) ]]; then
# Avoid duplicates
containers+=("${container}")
if [[ -d "${container}" ]]; then
tag="$(grep -oE "org.opencontainers.image.ref.name=\".+\"" "${container}/Dockerfile" | sed -nr "s|org.opencontainers.image.ref.name=\"(.+)\"|\1|p")"
if [[ -z "${tag}" ]]; then
echo "No tag found for: ${container}"
else
name="$(grep -oE "org.opencontainers.image.title=\".+\"" "${container}/Dockerfile" | sed -nr "s|org.opencontainers.image.title=\"(.+)\"|\1|p")"
app_version="$(grep -oE "org.opencontainers.image.version=\".+\"" "${container}/Dockerfile" | sed -nr "s|org.opencontainers.image.version=\"(.+)\"|\1|p")"
os_flavour_parsed="$(grep -oP "OS_FLAVOUR=\"\K[^\"]+" "${container}/Dockerfile" || true)"
os_flavour=${os_flavour_parsed:-scratch}
revision="${tag#"${app_version}-${os_flavour}-r"}"
rolling_tags="$(yq '.rolling-tags' "${container}/tags-info.yaml" -o json | jq -c)"
branch="$(echo "${container}" | awk -F '/' '{print $3}')"
dsl_path="${name}"
if [[ -d ".vib/${dsl_path}/${branch}" ]]; then
dsl_path="${dsl_path}/${branch}"
fi
# This is hack to avoid jq errors while getting the architectures
vib_publish="$(cat .vib/${dsl_path}/vib-publish.json | sed -e 's|{VIB_ENV_ROLLING_TAGS}|"${rolling_tags}"|')"
architectures="$(echo "${vib_publish}" | jq -cr '.phases.package.actions[] | select(.action_id == "container-image-package") | .params.architectures // ["linux/amd64"]')"
container_json=$(jq -n '{"name": $name, "path": $path, "os_flavour": $os_flavour, "branch": $branch, "app_version": $app_version, "revision": $revision, "sha": $sha, "sha_url": $sha_url, "dsl_path": $dsl_path, "tag": $tag, "rolling_tags": $rolling_tags, "architectures": $architectures}' \
--arg name "$name" --arg path "$container" --arg os_flavour "$os_flavour" --arg branch "$branch" --arg app_version "$app_version" --arg revision "$revision" --arg sha "$SHA" --arg sha_url "$SHA_URL" --arg dsl_path "$dsl_path" --arg tag "$tag" --argjson rolling_tags "$rolling_tags" --argjson architectures "$architectures")
containers_json+=("${container_json}")
fi
fi
fi
done <<< "$(echo "$files_changed" | xargs dirname | grep -o "^bitnami/[^/]*/[^/]*/[^/]*" | sort | uniq || true)"
done <<< "$(echo "${GITHUB_COMMITS}" | jq -r '.[] | .id')"
if [[ "${#containers[@]}" -le "0" ]]; then
echo "No changes detected in containers. The rest of the steps will be skipped."
echo "result=skip" >> $GITHUB_OUTPUT
else
publish_metadata=$(printf "%s\n" "${containers_json[@]}" | jq -s .)
echo "result=ok" >> $GITHUB_OUTPUT
echo "${publish_metadata}" > publish-metadata.json
fi
- name: Getting 3rd party packages for OSSPI
if: ${{ steps.get-publish-metadata.outputs.result == 'ok' }}
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }}
AWS_DEFAULT_REGION: us-east-1
run: |
# <version>-<revision>
# Example: 17.0.7-7-1
# version: semver but it can have also '-' like the upper example.
# revision: number
COMPONENT_VERSION_REGEX="([0-9]+\.[0-9]+\.[0-9]+(-.+)?)-([0-9]+)"
while read -r container; do
# If it's set from outside, can be changed
image_name=$(echo "$container" | jq -cr '.tag')
image_path=$(echo "$container" | jq -cr '.path')
version=$(echo "$container" | jq -cr '.app_version')
containers_path=$(echo "$container" | jq -cr '.path')
components_json_file="${containers_path}/prebuildfs/opt/bitnami/.bitnami_components.json"
# If the components.json file, so it seems has external packages
if [[ -f "$components_json_file" ]]; then
while read -r architecture; do
# Remove platform prefix.
arch="${architecture##*/}"
declare -A packages=()
# Iterating over the external components to get the involved elements
while read -r component_id; do
if [[ -n "$component_id" ]]; then
component_version_full=$(jq -cr '."'${component_id}'".version' "$components_json_file")
if [[ "$component_version_full" =~ $COMPONENT_VERSION_REGEX ]] && [[ "${#BASH_REMATCH[@]}" -ge 3 ]]; then
component_version=${BASH_REMATCH[1]}
component_distro=$(jq -cr '."'${component_id}'".distro' "$components_json_file")
# We will use the container arch instead of the component arch (component file has only amd64 references)
# component_arch=$(jq -cr '."'${component_id}'".arch' "$components_json_file")
component_arch=$arch
# Added "true" to avoid errors if compilation_recipe doesn't exists
compilation_recipe=$(s3cmd get "s3://${AWS_S3_BUCKET}/${component_id}/${component_version}/${component_distro}/${component_arch}/compilation-recipe.json" - 2>/dev/null || true)
# now getting each component to be reported
while read -r json_package; do
package_id="$(echo "$json_package" | jq -r '.id' )"
package_version="$(echo "$json_package" | jq -r '.version' )"
package_url="$(echo "$json_package" | jq -r '.source.upstreamSourceUrl')"
packages["$package_id"]="${package_version} ${package_url}"
done <<<"$(echo "$compilation_recipe" | jq -cr '.components[]')"
fi
fi
done <<<"$(jq -cr 'keys[]' "$components_json_file")"
# Now creating the JSON file with the format required by the OSSPI Scan
osspi_packages="[]"
for package_id in "${!packages[@]}"; do
read -r -a version_url <<< "${packages["$package_id"]}"
http_url="${version_url[1]#git+}"
if [[ -z "$http_url" ]]
then
echo "[WARNING] The URL for ${package_id}:${version_url[0]} is missing in the recipe"
else
# Concat new package to osspi_packages array
osspi_packages="$(jq '. += [{"_unique_id": $uniqueID, "name": $name, "version": $version, "url": $url, "repository": "other"}]' --arg uniqueID "other:${package_id}:${version_url[0]}" --arg name "$package_id" --arg version "${version_url[0]}" --arg url "$http_url" <<< "$osspi_packages")"
fi
done
jq -n '{"packages": $packages, "server": { "scanner": "custom", "type": "Hub"}}' --argjson packages "${osspi_packages}" > "${image_path}/osspi-packages-${arch}.json"
s3cmd put "${image_path}/osspi-packages-${arch}.json" "s3://${AWS_S3_BUCKET}/${asset}/${version}/${os_flavour}/${arch}/${revision}/packages.json"
done <<<"$(echo "$container" | jq -cr '.architectures[]')"
else
echo "$image_name:$version doesn't have external components.json"
fi
done <<<"$(jq -c '.[]' publish-metadata.json)"
- uses: actions/upload-artifact@v3
if: ${{ steps.get-publish-metadata.outputs.result == 'ok' }}
with:
name: publish-metadata.json
path: ./publish-metadata.json
- uses: actions/upload-artifact@v3
if: ${{ steps.get-publish-metadata.outputs.result == 'ok' }}
with:
name: packages.json
path: ~/work/containers/**/osspi-packages*.json