From 68e6b6cce5d427cc94f38b90106273208bff8792 Mon Sep 17 00:00:00 2001 From: Liviu Tomoiaga Date: Wed, 29 Apr 2026 16:32:09 +0300 Subject: [PATCH 1/4] ci: prepare_rpi_artefacts: Prepare RPI artefacts Signed-off-by: Liviu Tomoiaga --- .github/workflows/deploy_rpi_artifacts.yml | 285 +++++++++++++++++++++ ci/prepare_rpi_artifacts.sh | 181 +++++++++++++ 2 files changed, 466 insertions(+) create mode 100644 .github/workflows/deploy_rpi_artifacts.yml create mode 100755 ci/prepare_rpi_artifacts.sh diff --git a/.github/workflows/deploy_rpi_artifacts.yml b/.github/workflows/deploy_rpi_artifacts.yml new file mode 100644 index 00000000000000..ef92212c42ff32 --- /dev/null +++ b/.github/workflows/deploy_rpi_artifacts.yml @@ -0,0 +1,285 @@ +name: Prepare and Upload to Cloudsmith RPI Artifacts + +on: + workflow_call: + inputs: + artifacts: + required: true + type: string + BUILD_SOURCEBRANCH: + required: true + type: string + CLOUDSMITH_REPO: + required: false + type: string + default: 'sdg-linux-rpi' + +permissions: + id-token: write + contents: write + actions: read + +jobs: + prepare_and_upload_rpi_artefacts: + runs-on: [self-hosted, repo-only] + permissions: + id-token: write + contents: write + actions: read + + steps: + - uses: analogdevicesinc/doctools/checkout@action + + - name: Prepare path + run: | + rm -rf dist ; mkdir dist + + - name: Get sources + run: | + file=$(echo "${{ github.workflow_ref }}" | cut -d'/' -f3- | cut -d'@' -f1) + workflow_ref=$(awk ' + $0 ~ "uses:" && $0 ~ "prepare_rpi_artifacts.yml" { + sub(/^[[:space:]]*uses:[[:space:]]*/, "", $0); + print $0; + exit + } + ' "$file") + echo $workflow_ref + + org_repo="$(echo "$workflow_ref" | cut -d'/' -f1-2)" + ref="$(echo "$workflow_ref" | cut -d'@' -f2)" + + get_file () { + echo https://raw.githubusercontent.com/$org_repo/$ref/$1 + curl -sL -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" -o $1 \ + https://raw.githubusercontent.com/$org_repo/$ref/$1 + } + + mkdir -p ci + get_file ci/prepare_rpi_artifacts.sh + + echo "Contents of ci directory:" + ls -la ci/ + echo "Verifying script content:" + head -3 ci/prepare_rpi_artifacts.sh + if [[ ! -s ci/prepare_rpi_artifacts.sh ]] || ! head -1 ci/prepare_rpi_artifacts.sh | grep -q "^#"; then + echo "ERROR: Script download failed or invalid content" + exit 1 + fi + chmod +x ci/prepare_rpi_artifacts.sh + echo "Script is ready and executable" + + - name: Download artifacts + run: | + gh-get-workflow-artifacts() + { + artifacts=$(curl -sfL \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer $1 " \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/$2/actions/runs/$3/artifacts) + echo "$artifacts" + } + + gh-download-artifact () { + curl -sfL \ + -H "Authorization: Bearer $1" \ + -H "Accept: application/vnd.github+json" \ + -o "$2" \ + "$3" + } + + artifacts=$( + gh-get-workflow-artifacts ${{ secrets.GITHUB_TOKEN }} \ + ${{ github.repository }} \ + ${{ github.run_id }} + ) + + total_count=$(echo $artifacts | jq '.total_count' -r) + + if [[ "$total_count" == "null" ]] || [[ "$total_count" == "0" ]]; then + exit + fi + + artifacts_files=$(echo $artifacts | jq '[.artifacts[] | [.name, .archive_download_url]]' -r) + patterns="${{ inputs.artifacts }}" + echo $artifacts_files | jq -r '.[] | @tsv' | while IFS=$'\t' read -r name url; do + matched=0 + for p in $patterns; do + if [[ $name == $p ]]; then + matched=1 + break + fi + done + + if [[ "$matched" == "1" ]]; then + echo "$name" + gh-download-artifact ${{ secrets.GITHUB_TOKEN }} \ + "dist/$name" \ + "$url" + else + echo "$name no match, skipped" + fi + done + + - name: Prepare artifacts + run: | + echo "Extracting artifacts..." + for f in dist/*; do + [ -f "$f" ] || continue + + zip_file="$f.zip" + mv "$f" "$zip_file" + + dir_name=$(basename "$f") + mkdir -p "dist/$dir_name" + unzip -q "$zip_file" -d "dist/$dir_name" + + echo "Extracted: $zip_file -> dist/$dir_name/" + done + echo "Current path after extraction: $(pwd)" + echo "Contents of dist directory:" + ls -l dist + echo "Contents of working directory:" + ls -l + cd dist && ../ci/prepare_rpi_artifacts.sh + + - uses: cloudsmith-io/cloudsmith-cli-action@v1.0.5 + env: + CLOUDSMITH_SERVICE_SLUG: ${{ secrets.CLOUDSMITH_SERVICE_SLUG }} + if: ${{ env.CLOUDSMITH_SERVICE_SLUG != '' && env.CLOUDSMITH_SERVICE_SLUG != ' ' }} + with: + oidc-namespace: ${{ vars.CLOUDSMITH_NAMESPACE }} + oidc-service-slug: ${{ secrets.CLOUDSMITH_SERVICE_SLUG }} + oidc-auth-only: 'true' + + - name: Cloudsmith API Key + env: + CLOUDSMITH_SERVICE_SLUG: ${{ secrets.CLOUDSMITH_SERVICE_SLUG }} + if: ${{ env.CLOUDSMITH_SERVICE_SLUG == '' || env.CLOUDSMITH_SERVICE_SLUG == ' ' }} + run: | + CLOUDSMITH_API_KEY=$(echo "${{ secrets.CLOUDSMITH_API_KEY }}" | xargs) + echo "CLOUDSMITH_API_KEY=$CLOUDSMITH_API_KEY" >> "$GITHUB_ENV" + + - name: Cloudsmith upload assets + if: ${{ env.CLOUDSMITH_API_KEY != '' }} + run: | + cs-package-upload() + { + local api_key="$1" + local namespace="$2" + local repo="$3" + local file="$4" + local url="https://upload.cloudsmith.io/${namespace}/${repo}/${file}" + + echo "Uploading to: $url" >&2 + response=$(curl -sL --write-out "\nHTTP_STATUS:%{http_code}" \ + -T "$file" \ + -H "X-Api-Key: $api_key" \ + -H "Content-Sha256: $(shasum -a256 "$file" | cut -f1 -d' ')" \ + "$url") + http_status=$(echo "$response" | grep "HTTP_STATUS:" | cut -d':' -f2) + body=$(echo "$response" | sed '/HTTP_STATUS:/d') + echo "HTTP Status: $http_status" >&2 + echo "Response: $body" >&2 + if [[ -z "$http_status" || "$http_status" -ge 400 ]]; then + echo "ERROR: Upload failed with status $http_status" >&2 + return 1 + fi + package_file=$(echo "$body" | jq -r .identifier) + echo "$package_file" + } + + cs-package-store-raw () { + local api_key="$1" + local namespace="$2" + local repo="$3" + local pkg_file="$4" + local name="$5" + local tags="$6" + local version="$7" + local url="https://api.cloudsmith.com/v1/packages/${namespace}/${repo}/upload/raw/" + + echo "Storing package at: $url" >&2 + response=$(curl -sL --write-out "\nHTTP_STATUS:%{http_code}" \ + -X POST \ + -H "Accept: application/json" \ + -H "Content-Type: application/json" \ + -H "X-Api-Key: $api_key" \ + -d "{\"package_file\":\"$pkg_file\", \ + \"republish\":\"true\", \ + \"name\":\"$name\", \ + \"tags\": \"$tags\", \ + \"version\":\"$version\"}" \ + "$url") + http_status=$(echo "$response" | grep "HTTP_STATUS:" | cut -d':' -f2) + body=$(echo "$response" | sed '/HTTP_STATUS:/d') + echo "HTTP Status: $http_status" >&2 + echo "Response: $body" >&2 + if [[ -z "$http_status" || "$http_status" -ge 400 ]]; then + echo "ERROR: Store failed with status $http_status" >&2 + return 1 + fi + } + + if [ -z "$( ls -A 'dist' )" ]; then + echo "ERROR: dist directory is empty, no artifacts to upload" + exit 1 + fi + + # Calculate version and tags (before cd to ensure git works) + version="linux_rpi/${{ inputs.BUILD_SOURCEBRANCH }}/$(date +%Y_%m_%d-%H_%M)/" + tags="git_sha-$(git rev-parse --short HEAD);timestamp_$(date +%Y_%m_%d-%H_%M)" + + cd dist || { echo "ERROR: Failed to cd into dist" >&2; exit 1; } + + # Upload .tar.gz files + for file in *.tar.gz ; do + [ -f "$file" ] || continue + echo "Uploading: $file" + package_file=$( + cs-package-upload "${{ env.CLOUDSMITH_API_KEY }}" \ + "${{ vars.CLOUDSMITH_NAMESPACE }}" \ + "${{ inputs.CLOUDSMITH_REPO }}" \ + "$file" + ) + + if [[ -z "$package_file" || "$package_file" == "null" ]]; then + echo "ERROR: Failed to upload $file" >&2 + exit 1 + fi + + cs-package-store-raw "${{ env.CLOUDSMITH_API_KEY }}" \ + "${{ vars.CLOUDSMITH_NAMESPACE }}" \ + "${{ inputs.CLOUDSMITH_REPO }}" \ + "$package_file" \ + "$file" \ + "$tags" \ + "$version" || exit 1 + done + + # Upload rpi_archives_properties.txt + if [ -f "rpi_archives_properties.txt" ]; then + echo "Uploading: rpi_archives_properties.txt" + package_file=$( + cs-package-upload "${{ env.CLOUDSMITH_API_KEY }}" \ + "${{ vars.CLOUDSMITH_NAMESPACE }}" \ + "${{ inputs.CLOUDSMITH_REPO }}" \ + "rpi_archives_properties.txt" + ) + + if [[ -z "$package_file" || "$package_file" == "null" ]]; then + echo "ERROR: Failed to upload rpi_archives_properties.txt" >&2 + exit 1 + fi + + cs-package-store-raw "${{ env.CLOUDSMITH_API_KEY }}" \ + "${{ vars.CLOUDSMITH_NAMESPACE }}" \ + "${{ inputs.CLOUDSMITH_REPO }}" \ + "$package_file" \ + "rpi_archives_properties.txt" \ + "$tags" \ + "$version" || exit 1 + fi + + diff --git a/ci/prepare_rpi_artifacts.sh b/ci/prepare_rpi_artifacts.sh new file mode 100755 index 00000000000000..5711668ccd4b0d --- /dev/null +++ b/ci/prepare_rpi_artifacts.sh @@ -0,0 +1,181 @@ +# SPDX-License-Identifier: GPL-2.0-only +#!/bin/bash -e + +# SOURCE_DIRECTORY: where the downloaded artifacts are located +# OUTPUT_DIRECTORY: where to create the tar.gz archives (defaults to current directory) +SOURCE_DIRECTORY="${SOURCE_DIRECTORY:-$(pwd)}" +OUTPUT_DIRECTORY="${OUTPUT_DIRECTORY:-$(pwd)}" + +timestamp=$(date +%Y_%m_%d-%H_%M) + +# Extract git info from context.txt (replaces git commands for CI artifacts) +for ctx in "${SOURCE_DIRECTORY}"/adi_bcm*-gcc-arm*/context.txt; do + if [[ -f "$ctx" ]]; then + GIT_SHA=$(grep "^git_sha=" "$ctx" | cut -d'=' -f2) + git_sha_at=$(grep "^git_sha_at=" "$ctx" | cut -d'=' -f2) + GIT_SHA_DATE=$(date -d "@$git_sha_at" +"%Y-%m-%d-%H-%M" 2>/dev/null || echo "unknown") + break + fi +done + +# Temporary working directory +WORK_DIR=$(mktemp -d) +trap "rm -rf ${WORK_DIR}" EXIT + +#create version file found in the boot partition +create_version_file() { + mkdir -p ${WORK_DIR}/${1} + echo -e "RPI Boot Files: ${BUILD_SOURCEBRANCH} ${GIT_SHA_DATE}\n" > ${WORK_DIR}/${1}/version_rpi.txt + echo -e " Linux repository: https://github.com/analogdevicesinc/linux" >> ${WORK_DIR}/${1}/version_rpi.txt + echo -e " Linux branch: ${BUILD_SOURCEBRANCHNAME}" >> ${WORK_DIR}/${1}/version_rpi.txt + echo -e " Linux git sha: ${GIT_SHA}\n" >> ${WORK_DIR}/${1}/version_rpi.txt + echo -e "Supported RaspberryPi platforms:\n" >> ${WORK_DIR}/${1}/version_rpi.txt + list=($2) + for platform in "${list[@]}"; do + echo " ${platform}" >> ${WORK_DIR}/${1}/version_rpi.txt + done +} + +#extract and rename kernel image from boot directory +# Usage: extract_kernel_image +extract_kernel_image() { + local boot_dir="$1" + local output_file="$2" + local kernel_name=$(basename "$output_file") + + if [[ -f "${boot_dir}/Image.gz" ]]; then + echo " Extracting Image.gz -> ${kernel_name}" + gunzip -c "${boot_dir}/Image.gz" > "${output_file}" + elif [[ -f "${boot_dir}/Image" ]]; then + echo " Copying Image -> ${kernel_name}" + cp "${boot_dir}/Image" "${output_file}" + elif [[ -f "${boot_dir}/zImage" ]]; then + echo " Copying zImage -> ${kernel_name}" + cp "${boot_dir}/zImage" "${output_file}" + elif [[ -f "${boot_dir}/uImage" ]]; then + echo " Copying uImage -> ${kernel_name}" + cp "${boot_dir}/uImage" "${output_file}" + else + echo "ERROR: No kernel image found in ${boot_dir}" + return 1 + fi +} + +#prepare the structure of the folder containing artifacts +artifacts_structure() { + typeBCM_32bit=( "bcm2709" "bcm2711" "bcmrpi" ) + typeKERNEL_32bit=( "kernel7" "kernel7l" "kernel" ) + typeBCM_64bit=( "bcm2711" "bcm2712" ) + typeKERNEL_64bit=( "kernel8" "kernel_2712" ) + + echo "=== Processing 32-bit artifacts ===" + mkdir -p ${WORK_DIR}/32bit + mkdir -p ${WORK_DIR}/32bit/modules + create_version_file 32bit "${typeBCM_32bit[*]}" + for index in "${!typeBCM_32bit[@]}"; do + local artifact_dir="adi_${typeBCM_32bit[$index]}_defconfig-gcc-arm" + echo "Processing ${artifact_dir}..." + + # Extract and rename kernel image + extract_kernel_image \ + "${SOURCE_DIRECTORY}/${artifact_dir}/boot" \ + "${WORK_DIR}/32bit/${typeKERNEL_32bit[$index]}.img" + + # Copy modules + cp -r "${SOURCE_DIRECTORY}/${artifact_dir}/lib/modules"/* "${WORK_DIR}/32bit/modules/" + done + + # Copy DTBs and overlays from dtb-gcc artifact + cp ${SOURCE_DIRECTORY}/dtb-gcc/dtb/arch/arm/boot/dts/broadcom/*.dtb ${WORK_DIR}/32bit/ + mkdir -p ${WORK_DIR}/32bit/overlays + for overlay in ${SOURCE_DIRECTORY}/dtb-gcc/dtb/arch/arm/boot/dts/overlays/*-overlay.dtbo; do + base=$(basename "$overlay" -overlay.dtbo) + cp "$overlay" ${WORK_DIR}/32bit/overlays/${base}.dtbo + done + + if [ -z "$(ls ${WORK_DIR}/32bit/*.dtb 2>/dev/null)" ] || [ -z "$(ls ${WORK_DIR}/32bit/overlays/*.dtbo 2>/dev/null)" ]; then + echo "Missing one or more required files from the 32bit artifacts." + exit 1 + fi + + echo "Creating rpi_modules_32bit.tar.gz..." + tar -C ${WORK_DIR}/32bit/modules -czvf ${OUTPUT_DIRECTORY}/rpi_modules_32bit.tar.gz . >/dev/null + rm -r ${WORK_DIR}/32bit/modules + + echo "" + echo "=== Processing 64-bit artifacts ===" + mkdir -p ${WORK_DIR}/64bit + mkdir -p ${WORK_DIR}/64bit/modules + create_version_file 64bit "${typeBCM_64bit[*]}" + for index in "${!typeBCM_64bit[@]}"; do + local artifact_dir="adi_${typeBCM_64bit[$index]}_defconfig-gcc-arm64" + echo "Processing ${artifact_dir}..." + + # Extract and rename kernel image + extract_kernel_image \ + "${SOURCE_DIRECTORY}/${artifact_dir}/boot" \ + "${WORK_DIR}/64bit/${typeKERNEL_64bit[$index]}.img" + + # Copy modules + cp -r "${SOURCE_DIRECTORY}/${artifact_dir}/lib/modules"/* "${WORK_DIR}/64bit/modules/" + done + + # Copy DTBs and overlays from dtb-gcc artifact + cp ${SOURCE_DIRECTORY}/dtb-gcc/dtb/arch/arm64/boot/dts/broadcom/*.dtb ${WORK_DIR}/64bit/ + mkdir -p ${WORK_DIR}/64bit/overlays + for overlay in ${SOURCE_DIRECTORY}/dtb-gcc/dtb/arch/arm/boot/dts/overlays/*-overlay.dtbo; do + base=$(basename "$overlay" -overlay.dtbo) + cp "$overlay" ${WORK_DIR}/64bit/overlays/${base}.dtbo + done + + if [ -z "$(ls ${WORK_DIR}/64bit/*.dtb 2>/dev/null)" ] || [ -z "$(ls ${WORK_DIR}/64bit/overlays/*.dtbo 2>/dev/null)" ]; then + echo "Missing one or more required files from the 64bit artifacts." + exit 1 + fi + + echo "Creating rpi_modules_64bit.tar.gz..." + tar -C ${WORK_DIR}/64bit/modules -czvf ${OUTPUT_DIRECTORY}/rpi_modules_64bit.tar.gz . >/dev/null + rm -r ${WORK_DIR}/64bit/modules + + echo "" + echo "=== Artifacts structure created ===" +} + +#create final boot archives and properties file +artifacts_local() { + artifacts_structure + + echo "" + echo "=== Creating boot archives ===" + + cd ${WORK_DIR}/32bit || exit 1 + tar -czvf ${OUTPUT_DIRECTORY}/rpi_latest_boot_32bit.tar.gz . >/dev/null + md5_modules_32bit=$(md5sum ${OUTPUT_DIRECTORY}/rpi_modules_32bit.tar.gz | cut -d ' ' -f 1) + md5_boot_32bit=$(md5sum ${OUTPUT_DIRECTORY}/rpi_latest_boot_32bit.tar.gz | cut -d ' ' -f 1) + + cd ${WORK_DIR}/64bit || exit 1 + tar -czvf ${OUTPUT_DIRECTORY}/rpi_latest_boot_64bit.tar.gz . >/dev/null + md5_modules_64bit=$(md5sum ${OUTPUT_DIRECTORY}/rpi_modules_64bit.tar.gz | cut -d ' ' -f 1) + md5_boot_64bit=$(md5sum ${OUTPUT_DIRECTORY}/rpi_latest_boot_64bit.tar.gz | cut -d ' ' -f 1) + + cat > ${OUTPUT_DIRECTORY}/rpi_archives_properties.txt << EOF +git_branch=${BUILD_SOURCEBRANCHNAME} +https://swdownloads.analog.com/cse/linux_rpi/${BUILD_SOURCEBRANCHNAME}/rpi_modules_32bit.tar.gz +https://swdownloads.analog.com/cse/linux_rpi/${BUILD_SOURCEBRANCHNAME}/rpi_latest_boot_32bit.tar.gz +checksum_modules_32bit=${md5_modules_32bit} +checksum_boot_files_32bit=${md5_boot_32bit} +https://swdownloads.analog.com/cse/linux_rpi/${BUILD_SOURCEBRANCHNAME}/rpi_modules_64bit.tar.gz +https://swdownloads.analog.com/cse/linux_rpi/${BUILD_SOURCEBRANCHNAME}/rpi_latest_boot_64bit.tar.gz +checksum_modules_64bit=${md5_modules_64bit} +checksum_boot_files_64bit=${md5_boot_64bit} +git_sha=${GIT_SHA} +git_sha_date=${GIT_SHA_DATE} +EOF + + echo "" + echo "=== Done ===" + echo "Output: ${OUTPUT_DIRECTORY}/" + ls -lh ${OUTPUT_DIRECTORY}/rpi_*.tar.gz ${OUTPUT_DIRECTORY}/rpi_archives_properties.txt +} + +artifacts_${1:-local} From 3bfc21857a2837b607cd2cb951e597721b9e1a4b Mon Sep 17 00:00:00 2001 From: Liviu Tomoiaga Date: Wed, 29 Apr 2026 16:32:26 +0300 Subject: [PATCH 2/4] ci: prepare_sdg_artefacts: Prepare and deploy SDG Linux Artefacts Signed-off-by: Liviu Tomoiaga --- .../workflows/deploy-sdg-linux-artifacts.yml | 207 ++++++++++++ ci/prepare_artifacts_structure.sh | 294 ++++++++++++++++++ ci/prepare_sdg_linux_artifacts.sh | 264 ++++++++++++++++ 3 files changed, 765 insertions(+) create mode 100644 .github/workflows/deploy-sdg-linux-artifacts.yml create mode 100755 ci/prepare_artifacts_structure.sh create mode 100644 ci/prepare_sdg_linux_artifacts.sh diff --git a/.github/workflows/deploy-sdg-linux-artifacts.yml b/.github/workflows/deploy-sdg-linux-artifacts.yml new file mode 100644 index 00000000000000..c83a864b563976 --- /dev/null +++ b/.github/workflows/deploy-sdg-linux-artifacts.yml @@ -0,0 +1,207 @@ +name: Deploy SDG Linux Artifacts to Cloudsmith +on: + workflow_call: + inputs: + artifacts: + required: true + type: string + CLOUDSMITH_REPO: + required: false + type: string + default: 'sdg-linux' + BUILD_SOURCEBRANCH: + required: false + default: ${{ github.ref_name }} + type: string + PR_TARGET_BRANCH: + required: false + type: string + default: '' + PR_NUMBER: + required: false + type: string + default: '' + + +permissions: + id-token: write + contents: write + actions: read + + +jobs: + prepare_and_upload_sdg_linux_artefacts: + runs-on: [self-hosted, repo-only] + permissions: + id-token: write + contents: write + actions: read + + steps: + - uses: analogdevicesinc/doctools/checkout@action + + - name: Prepare path + run: | + rm -rf dist ; mkdir dist + + - name: Get sources + run: | + file=$(echo "${{ github.workflow_ref }}" | cut -d'/' -f3- | cut -d'@' -f1) + workflow_ref=$(awk ' + $0 ~ "uses:" && $0 ~ "deploy-sdg-linux-artifacts.yml" { + sub(/^[[:space:]]*uses:[[:space:]]*/, "", $0); + print $0; + exit + } + ' "$file") + echo $workflow_ref + + org_repo="$(echo "$workflow_ref" | cut -d'/' -f1-2)" + ref="$(echo "$workflow_ref" | cut -d'@' -f2)" + + get_file () { + echo https://raw.githubusercontent.com/$org_repo/$ref/$1 + curl -sL -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" -o $1 \ + https://raw.githubusercontent.com/$org_repo/$ref/$1 + } + + mkdir -p ci + get_file ci/prepare_sdg_linux_artifacts.sh + get_file ci/lib.sh + get_file ci/prepare_artifacts_structure.sh + + chmod +x ci/*.sh + + curl -sL -o upload_to_cloudsmith.py \ + https://raw.githubusercontent.com/analogdevicesinc/wiki-scripts/main/utils/cloudsmith_utils/upload_to_cloudsmith.py + + ls -la ci/ + + - name: Download workflow artifacts + run: | + gh-get-workflow-artifacts() + { + artifacts=$(curl -sfL \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer $1 " \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/$2/actions/runs/$3/artifacts) + echo "$artifacts" + } + + gh-download-artifact () { + curl -sfL \ + -H "Authorization: Bearer $1" \ + -H "Accept: application/vnd.github+json" \ + -o "$2" \ + "$3" + } + + artifacts=$( + gh-get-workflow-artifacts ${{ secrets.GITHUB_TOKEN }} \ + ${{ github.repository }} \ + ${{ github.run_id }} + ) + + total_count=$(echo $artifacts | jq '.total_count' -r) + + if [[ "$total_count" == "null" ]] || [[ "$total_count" == "0" ]]; then + exit + fi + + artifacts_files=$(echo $artifacts | jq '[.artifacts[] | [.name, .archive_download_url]]' -r) + patterns="${{ inputs.artifacts }}" + mkdir -p artefacts + echo $artifacts_files | jq -r '.[] | @tsv' | while IFS=$'\t' read -r name url; do + matched=0 + for p in $patterns; do + if [[ $name == $p ]]; then + matched=1 + break + fi + done + + if [[ "$matched" == "1" ]]; then + echo "$name" + gh-download-artifact ${{ secrets.GITHUB_TOKEN }} \ + "artefacts/$name.zip" \ + "$url" + else + echo "$name no match, skipped" + fi + done + + - name: Prepare raw files for upload + run: | + mkdir -p raw + for zip in artefacts/*.zip; do + name=$(basename "${zip%.zip}") + mkdir -p "raw/$name" + unzip -q "$zip" -d "raw/$name" + rm "$zip" + done + ls -la raw/ + + - name: Process artifacts + run: | + source ci/lib.sh + source ci/prepare_sdg_linux_artifacts.sh + process_artifacts + + - name: Setup Cloudsmith OIDC + uses: cloudsmith-io/cloudsmith-cli-action@v1.0.5 + with: + oidc-namespace: ${{ vars.CLOUDSMITH_NAMESPACE }} + oidc-service-slug: ${{ secrets.CLOUDSMITH_SERVICE_SLUG }} + oidc-auth-only: 'true' + + - name: Install Cloudsmith CLI + run: | + python3 -m venv venv + source ./venv/bin/activate + python3 -m ensurepip + pip3 install cloudsmith-cli + + - name: Generate structure and upload + run: | + source ./venv/bin/activate + + # Set timestamp + export TIMESTAMP=$(date +%Y_%m_%d-%H_%M_%S) + + # Get git SHA (PR source commit or build commit) + if [[ -n "${{ inputs.PR_NUMBER }}" ]]; then + export GIT_SHA="${{ github.event.pull_request.head.sha }}" + else + export GIT_SHA="${{ github.sha }}" + fi + + # Get git SHA date (format: YYYY-MM-DD-HH-MM) + MERGE_COMMIT_SHA=$(git rev-parse --short HEAD) + export GIT_SHA_DATE=$(git show -s --format=%cd --date=format:'%Y-%m-%d %H:%M' ${MERGE_COMMIT_SHA} | sed -e "s/ \|\:/-/g") + + # Determine branch name (works for main, release/*, feature/*, etc.) + BRANCH_NAME=$(echo "${{ inputs.BUILD_SOURCEBRANCH }}" | awk -F'/' '{print $NF}') + export BUILD_SOURCEBRANCHNAME="$BRANCH_NAME" + export SOURCE_DIRECTORY="$(pwd)" + + # Build version path based on branch type + if [[ -n "${{ inputs.PR_TARGET_BRANCH }}" ]]; then + VERSION_PATH="linux/PRs/${{ inputs.PR_TARGET_BRANCH }}/pr_${{ inputs.PR_NUMBER }}" + elif [[ "$BRANCH_NAME" == "main" ]]; then + VERSION_PATH="linux/main" + else + VERSION_PATH="linux/releases/$BRANCH_NAME" + fi + + source ci/lib.sh + source ci/prepare_artifacts_structure.sh + + # Upload + python3 upload_to_cloudsmith.py \ + --repo=${{ inputs.CLOUDSMITH_REPO }} \ + --version="${VERSION_PATH}/${TIMESTAMP}" \ + --local_path="${TIMESTAMP}" \ + --token="${CLOUDSMITH_API_KEY}" \ + --max_workers=10 + \ No newline at end of file diff --git a/ci/prepare_artifacts_structure.sh b/ci/prepare_artifacts_structure.sh new file mode 100755 index 00000000000000..26caa0a36ef306 --- /dev/null +++ b/ci/prepare_artifacts_structure.sh @@ -0,0 +1,294 @@ +#!/bin/bash +# SPDX-License-Identifier: GPL-2.0-only +# +# Artifacts Structure and Upload Script +# Reorganizes CI Linux build artifacts in a structured format. The full path +# will be used for versioning the artefact. +# +# Environment variables (should be exported before sourcing this script): +# SOURCE_DIRECTORY - Directory containing build artifacts (default: pwd) +# TIMESTAMP - Timestamp for output directory (default: auto-generated) +# BUILD_SOURCEBRANCHNAME - Git branch name (default: main) +# GIT_SHA - Git commit SHA (default: unknown) +# GIT_SHA_DATE - Git commit date in YYYY-MM-DD-HH-MM format (default: unknown) + +set -e +shopt -s nullglob # Prevent glob patterns from returning themselves when no match + +# Configuration +SOURCE_DIRECTORY="${SOURCE_DIRECTORY:-$(pwd)}" +TIMESTAMP="${TIMESTAMP:-$(date +%Y_%m_%d-%H_%M_%S)}" +BUILD_SOURCEBRANCHNAME="${BUILD_SOURCEBRANCHNAME:-main}" +GIT_SHA="${GIT_SHA:-unknown}" +GIT_SHA_DATE="${GIT_SHA_DATE:-unknown}" + +# Paths +RAW_DIR="${SOURCE_DIRECTORY}/raw" +DTB_BASE="${RAW_DIR}/dtb-gcc/dtb" +OUTPUT_DIR="${SOURCE_DIRECTORY}/${TIMESTAMP}" + +# Architecture to platform mapping +declare -A typeARCH +typeARCH=( + ["arm"]="arria10 cyclone5 zynq" + ["arm64"]="versal zynqmp" + ["microblaze"]="kc705 kcu105 vc707 vcu118 vcu128" +) + +# Kernel image locations per platform +declare -A image_to_copy +image_to_copy=( + ["arria10"]="socfpga_adi_defconfig-gcc-arm/boot/zImage" + ["cyclone5"]="socfpga_adi_defconfig-gcc-arm/boot/zImage" + ["zynq"]="zynq_xcomm_adv7511_defconfig-gcc-arm/boot/uImage" + ["versal"]="adi_versal_defconfig-gcc-arm64/boot/Image" + ["zynqmp"]="adi_zynqmp_defconfig-gcc-arm64/boot/Image" +) + +# DTB source paths per architecture +declare -A dtb_paths +dtb_paths=( + ["arm_xilinx"]="arch/arm/boot/dts/xilinx" + ["arm_intel"]="arch/arm/boot/dts/intel/socfpga" + ["arm64"]="arch/arm64/boot/dts/xilinx" + ["microblaze"]="arch/microblaze/boot/dts" +) + +# DTB patterns per platform +declare -A dtb_patterns +dtb_patterns=( + ["zynq"]="zynq-" + ["arria10"]="socfpga_arria10" + ["cyclone5"]="socfpga_cyclone5" + ["versal"]="versal-" + ["zynqmp"]="zynqmp-" +) + + +####################################### +# Log message with timestamp +####################################### +log() { + echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*" +} + +####################################### +# Create directory structure +####################################### +create_structure() { + log "Creating directory structure: ${OUTPUT_DIR}" + + mkdir -p "${OUTPUT_DIR}" + + for arch in "${!typeARCH[@]}"; do + mkdir -p "${OUTPUT_DIR}/${arch}" + + # Create platform subdirectories for non-microblaze architectures + if [[ "${arch}" != "microblaze" ]]; then + for platform in ${typeARCH[$arch]}; do + mkdir -p "${OUTPUT_DIR}/${arch}/${platform}" + done + fi + done +} + +####################################### +# Generate git properties file +####################################### +generate_properties() { + log "Generating git_properties.txt" + + cat > "${OUTPUT_DIR}/git_properties.txt" << EOF + git_branch=${BUILD_SOURCEBRANCHNAME} + git_sha=${GIT_SHA} + git_sha_date=${GIT_SHA_DATE} +EOF +} + +####################################### +# Create extlinux.conf for a platform +####################################### +create_extlinux() { + local platform=$1 + local output_dir=$2 + + if [[ "${platform}" == "arria10" ]]; then + dtb_name="socfpga_arria10_socdk_sdmmc.dtb" + else + dtb_name="socfpga.dtb" + fi + + cat > "${output_dir}/extlinux.conf" < ${arch}/${platform}/" + + # Create extlinux.conf for arria10 and cyclone5 + if [[ "${platform}" == "arria10" || "${platform}" == "cyclone5" ]]; then + create_extlinux "${platform}" "${image_dst}" + log " Created: extlinux.conf -> ${arch}/${platform}/" + fi + else + log " WARNING: Image not found: ${image_src}" + fi + done + done +} + +####################################### +# Copy DTBs for ARM architecture +####################################### +copy_arm_dtbs() { + log "Copying ARM DTBs" + + local count=0 + + # Copy zynq DTBs from xilinx directory + for dtb in "${DTB_BASE}/${dtb_paths[arm_xilinx]}"/zynq-*.dtb; do + if [[ -f "$dtb" ]]; then + cp "$dtb" "${OUTPUT_DIR}/arm/" + count=$((count + 1)) + fi + done + + # Copy arria10 and cyclone5 DTBs from intel/socfpga directory + for dtb in "${DTB_BASE}/${dtb_paths[arm_intel]}"/socfpga_arria10*.dtb; do + if [[ -f "$dtb" ]]; then + cp "$dtb" "${OUTPUT_DIR}/arm/" + count=$((count + 1)) + fi + done + + for dtb in "${DTB_BASE}/${dtb_paths[arm_intel]}"/socfpga_cyclone5*.dtb; do + if [[ -f "$dtb" ]]; then + cp "$dtb" "${OUTPUT_DIR}/arm/" + count=$((count + 1)) + fi + done + + log " Copied ${count} ARM DTBs" +} + +####################################### +# Copy DTBs for ARM64 architecture +####################################### +copy_arm64_dtbs() { + log "Copying ARM64 DTBs" + + local count=0 + + # Copy versal DTBs + for dtb in "${DTB_BASE}/${dtb_paths[arm64]}"/versal-*.dtb; do + if [[ -f "$dtb" ]]; then + cp "$dtb" "${OUTPUT_DIR}/arm64/" + count=$((count + 1)) + fi + done + + # Copy zynqmp DTBs + for dtb in "${DTB_BASE}/${dtb_paths[arm64]}"/zynqmp-*.dtb; do + if [[ -f "$dtb" ]]; then + cp "$dtb" "${OUTPUT_DIR}/arm64/" + count=$((count + 1)) + fi + done + + log " Copied ${count} ARM64 DTBs" +} + +####################################### +# Copy DTBs for Microblaze architecture +####################################### +copy_microblaze_dtbs() { + log "Copying Microblaze DTBs" + + local count=0 + + for dtb in "${DTB_BASE}/${dtb_paths[microblaze]}"/*.dtb; do + if [[ -f "$dtb" ]]; then + cp "$dtb" "${OUTPUT_DIR}/microblaze/" + count=$((count + 1)) + fi + done + + log " Copied ${count} Microblaze DTBs" +} + +####################################### +# Print summary +####################################### +print_summary() { + echo "" + echo "==========================================" + echo "Artifacts Structure Complete" + echo "==========================================" + echo "" + echo "Output directory: ${OUTPUT_DIR}" + echo "" + echo "Structure:" + echo " ${TIMESTAMP}/" + echo " ├── git_properties.txt" + echo " ├── arm/" + echo " │ ├── arria10/ (zImage, extlinux.conf)" + echo " │ ├── cyclone5/ (zImage, extlinux.conf)" + echo " │ ├── zynq/ (uImage)" + echo " │ └── *.dtb ($(ls "${OUTPUT_DIR}/arm/"*.dtb 2>/dev/null | wc -l) files)" + echo " ├── arm64/" + echo " │ ├── versal/ (Image)" + echo " │ ├── zynqmp/ (Image)" + echo " │ └── *.dtb ($(ls "${OUTPUT_DIR}/arm64/"*.dtb 2>/dev/null | wc -l) files)" + echo " └── microblaze/" + echo " └── *.dtb ($(ls "${OUTPUT_DIR}/microblaze/"*.dtb 2>/dev/null | wc -l) files)" + echo "" +} + +####################################### +# Main function +####################################### +main() { + + log "Starting artifacts structure" + log "Source: ${SOURCE_DIRECTORY}" + log "Output: ${OUTPUT_DIR}" + + # Validate source directory + if [[ ! -d "${RAW_DIR}" ]]; then + echo "ERROR: Raw directory not found: ${RAW_DIR}" + exit 1 + fi + + # Execute steps + create_structure + generate_properties + copy_kernel_images + copy_arm_dtbs + copy_arm64_dtbs + copy_microblaze_dtbs + print_summary + + log "Done!" +} + +# Run main function +main "$@" diff --git a/ci/prepare_sdg_linux_artifacts.sh b/ci/prepare_sdg_linux_artifacts.sh new file mode 100644 index 00000000000000..e060d7c4b65418 --- /dev/null +++ b/ci/prepare_sdg_linux_artifacts.sh @@ -0,0 +1,264 @@ +#!/bin/bash +# +# Process downloaded artifacts: unpack and expand images +# +# This script processes artifacts downloaded to raw/ and outputs to dist/ +# - Unpacks DTBs and kernel images +# - Expands microblaze/nios2 images by embedding DTBs +# + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +source "${SCRIPT_DIR}/lib.sh" + +_read_be32() { + local bytes=$(dd if="$1" bs=1 skip=$2 count=4 2>/dev/null | xxd -p) + echo $((16#$bytes)) +} + +_read_le32() { + local bytes=$(dd if="$1" bs=1 skip=$2 count=4 2>/dev/null | xxd -p) + echo $((16#${bytes:6:2}${bytes:4:2}${bytes:2:2}${bytes:0:2})) +} + +_write_le32() { + local value=$1 + printf '\x%02x\x%02x\x%02x\x%02x' 2>/dev/null \ + $((value & 0xff)) \ + $(((value >> 8) & 0xff)) \ + $(((value >> 16) & 0xff)) \ + $(((value >> 24) & 0xff)) +} + +_find_empty_dtb() { + # Finds dtb slot in a Image + # Assumes you built the Image with an 'empty' dtb (/dts-v1/; / { };) + # Returns: dtb_start dtb_size dtb_boundary dtb_available + local image=$1 + + python3 -c " +import struct +import sys + +# 'Empty' DTB struct block: FDT_BEGIN_NODE, '', FDT_END_NODE, FDT_END +EMPTY_STRUCT = bytes.fromhex('00000001000000000000000200000009') +DTB_MAGIC = 0xd00dfeed +OFF_DT_STRUCT = 56 + +with open('$image', 'rb') as f: + data = f.read() + +pos = 0 +while True: + pos = data.find(EMPTY_STRUCT, pos) + if pos == -1: + break + + dtb_start = pos - OFF_DT_STRUCT + if dtb_start < 0: + pos += 1 + continue + + magic = struct.unpack('>I', data[dtb_start:dtb_start+4])[0] + if magic != DTB_MAGIC: + pos += 1 + continue + + off_dt_struct = struct.unpack('>I', data[dtb_start+8:dtb_start+12])[0] + if off_dt_struct != OFF_DT_STRUCT: + pos += 1 + continue + + dtb_size = struct.unpack('>I', data[dtb_start+4:dtb_start+8])[0] + dtb_end = dtb_start + dtb_size + + # Find boundary (first non-zero byte after DTB) + boundary = dtb_end + while boundary < len(data) and data[boundary] == 0: + boundary += 1 + + available = boundary - dtb_start + print(f'{dtb_start} {dtb_size} {boundary} {available}') + sys.exit(0) + +sys.exit(1) +" +} + +_unpack_dtb () { + local name="$1" + local arch= + + echo " Unpack dtb" + + for path in "raw/$name/dtb/arch"/*; do + arch=$(echo "$path" | cut -d'/' -f5) + [[ "$arch" == "microblaze" ]] && continue + [[ "$arch" == "nios2" ]] && continue + + echo " $arch" + + mkdir -p "dist/$arch/boot" + cp -a "$path/boot/dts" "dist/$arch/boot" + done +} + +_unpack_kernel () { + local name="$1" + + ctx="raw/$name/context.txt" + [[ ! -f "$ctx" ]] && return + source "$ctx" + out="dist/$compiler_arch" + + echo " Unpack $kernel_defconfig" + + mkdir -p "$out/boot/kernel/$kernel_defconfig" + cp -a "raw/$name/boot/$kernel" "$out/boot/kernel/$kernel_defconfig" + + mkdir -p "$out/lib/modules_set/$kernel_defconfig" + cp -a "raw/$name/lib/modules/$kernel_release" "$out/lib/modules_set/$kernel_defconfig" +} + +_expand_microblaze () { + local image="dist/microblaze/boot/kernel/adi_mb_defconfig/simpleImage.generic.strip" + local path="raw/dtb-gcc/dtb/arch/microblaze/boot/dts" + local count=0 + + log_info "Expand microblaze" + + [[ ! -d "$path" ]] && return + [[ ! -f "$image" ]] && return + + local dtb_info + dtb_info=$(_find_empty_dtb "$image") + [ -z "$dtb_info" ] && { log_error "Empty DTB not found (expected /dts-v1/; / { };)" ; return 1 ; } + + local dtb_start dtb_size dtb_boundary dtb_available + read dtb_start dtb_size dtb_boundary dtb_available <<< "$dtb_info" + + echo " Found empty DTB at 0x$(printf '%x' $dtb_start) (size: $dtb_size, available: $dtb_available bytes)" + + local dtb_length=$((dtb_boundary - dtb_start)) + + for file in "$path"/*; do + filename=$(basename $file) + filename="${filename%.*}" + image_out="$(dirname $image)/simpleImage.$filename.strip" + + echo -n " $filename" + [ -f $image_out ] && { echo " (cached)" ; continue ;} + echo "" + + local new_dtb_size=$(stat -c%s "$file") + [ $new_dtb_size -gt $dtb_available ] && { log_error "DTB too large: $new_dtb_size > $dtb_available bytes" ; return 1 ; } + + local src_magic=$(_read_be32 "$file" 0) + [ $src_magic -ne $((0xd00dfeed)) ] && { log_error "Source DTB has invalid magic: 0x$(printf '%x' $src_magic)" ; return 1 ;} + + cp "$image" "$image_out" + dd if=/dev/zero status=none \ + of="$image_out" \ + bs=1 seek=$dtb_start conv=notrunc count=$dtb_length + dd if="$file" status=none \ + of="$image_out" \ + bs=1 seek=$dtb_start conv=notrunc + ((count+=1)) + done + + command rm $image + + echo " Expanded $count simpleImages" +} + +_expand_nios2 () { + local image="dist/nios2/boot/kernel/adi_nios2_defconfig/zImage" + local path="raw/dtb-gcc/dtb/arch/nios2/boot/dts" + local vmlinux_gz_offset=$((16#4064)) + local input_len_offset=$((16#4060)) + local count=0 + + log_info "Expand nios2" + + [[ ! -d "$path" ]] && return + [[ ! -f "$image" ]] && return + + local tmpdir=$(mktemp -d) + local size=$(_read_le32 "$image" $input_len_offset) + + dd if="$image" status=none \ + of="$tmpdir/vmlinux.gz" \ + bs=1 skip=$vmlinux_gz_offset count=$size + gunzip -c "$tmpdir/vmlinux.gz" > "$tmpdir/vmlinux.bin" + + local dtb_info + dtb_info=$(_find_empty_dtb "$tmpdir/vmlinux.bin") + [ -z "$dtb_info" ] && { log_error "Empty DTB not found (expected /dts-v1/; / { };)" ; command rm -r "$tmpdir" ; return 1 ; } + + local dtb_start dtb_size dtb_boundary dtb_available + read dtb_start dtb_size dtb_boundary dtb_available <<< "$dtb_info" + + echo " Found empty DTB at 0x$(printf '%x' $dtb_start) (size: $dtb_size, available: $dtb_available bytes)" + + for file in "$path"/*; do + filename=$(basename "${file%.*}") + image_out="$(dirname "$image")/zImage.$filename" + + echo -n " $filename" + [ -f "$image_out" ] && { echo " (cached)" ; continue ;} + + echo "" + local new_dtb_size=$(stat -c%s "$file") + [ $new_dtb_size -gt $dtb_available ] && { log_error "DTB too large: $new_dtb_size > $dtb_available bytes" ; return 1 ; } + local src_magic=$(_read_be32 "$file" 0) + [ $src_magic -ne $((0xd00dfeed)) ] && { log_error "Source DTB has invalid magic: 0x$(printf '%x' $src_magic)" ; return 1 ;} + + cp "$tmpdir/vmlinux.bin" "$tmpdir/vmlinux.0.bin" + dd if="$file" status=none \ + of="$tmpdir/vmlinux.0.bin" \ + bs=1 seek=$dtb_start conv=notrunc status=none + gzip -n -9 < "$tmpdir/vmlinux.0.bin" > "$tmpdir/vmlinux.0.gz" + + local new_size=$(stat -c %s "$tmpdir/vmlinux.0.gz") + cp "$image" "$image_out" + _write_le32 $new_size | dd status=none \ + of="$image_out" \ + bs=1 seek=$input_len_offset conv=notrunc + dd if="$tmpdir/vmlinux.0.gz" status=none \ + of="$image_out" \ + bs=1 seek=$vmlinux_gz_offset conv=notrunc + ((count+=1)) + done + + command rm -r "$tmpdir" + command rm "$image" + + echo " Expanded $count zImages" +} + +process_artifacts() { + log_step "Process artifacts" + + mkdir -p dist + + # Unpack all raw artifacts + for dir in raw/*/; do + [[ ! -d "$dir" ]] && continue + name=$(basename "$dir") + if [[ "$name" == "dtb-gcc" ]]; then + _unpack_dtb "$name" + elif [[ "$name" != *-headers ]]; then + _unpack_kernel "$name" + fi + done + + # Expand images + [ "$SKIP_EXPAND_MICROBLAZE" == "true" ] && log_info "Expand microblaze skipped" || _expand_microblaze + [ "$SKIP_EXPAND_NIOS2" == "true" ] && log_info "Expand nios2 skipped" || _expand_nios2 + + log_info "Wrote to dist/" +} + +# Run if executed directly +if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then + process_artifacts "$@" +fi From 9b341e5115e4b68c75682432b61cd7e07a0f7b00 Mon Sep 17 00:00:00 2001 From: Andreea Andrisan Date: Wed, 29 Apr 2026 16:32:43 +0300 Subject: [PATCH 3/4] ci: upload_to_cloudsmith: Use input variable if provided Signed-off-by: Andreea Andrisan --- .github/workflows/upload-to-cloudsmith.yml | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/.github/workflows/upload-to-cloudsmith.yml b/.github/workflows/upload-to-cloudsmith.yml index 055a302c5317dd..4e4ee7900ad349 100644 --- a/.github/workflows/upload-to-cloudsmith.yml +++ b/.github/workflows/upload-to-cloudsmith.yml @@ -6,6 +6,14 @@ on: artifacts: required: true type: string + cloudsmith-repository: + required: false + default: ${{ vars.CLOUDSMITH_REPOSITORY }} + type: string + version: + required: false + default: ${{ github.sha }} + type: string secrets: CLOUDSMITH_SERVICE_SLUG: required: false @@ -143,15 +151,15 @@ jobs: package_file=$( cs-package-upload ${{ env.CLOUDSMITH_API_KEY }} \ ${{ vars.CLOUDSMITH_NAMESPACE }} \ - ${{ vars.CLOUDSMITH_REPOSITORY }} \ + ${{ inputs.cloudsmith-repository }} \ "$file" ) cs-package-store-raw ${{ env.CLOUDSMITH_API_KEY }} \ ${{ vars.CLOUDSMITH_NAMESPACE }} \ - ${{ vars.CLOUDSMITH_REPOSITORY }} \ + ${{ inputs.cloudsmith-repository }} \ $package_file \ $file \ "$tags_" \ - ${GITHUB_SHA:0:12} + ${{ inputs.version }} done From ce4392641e420ef039d2a0b6300e79b3ec5a5f53 Mon Sep 17 00:00:00 2001 From: Liviu Tomoiaga Date: Mon, 11 May 2026 15:02:53 +0300 Subject: [PATCH 4/4] Fix review findings Signed-off-by: Liviu Tomoiaga --- .../workflows/deploy-sdg-linux-artifacts.yml | 34 ++++++++----------- .github/workflows/deploy_rpi_artifacts.yml | 23 ++++++------- 2 files changed, 24 insertions(+), 33 deletions(-) diff --git a/.github/workflows/deploy-sdg-linux-artifacts.yml b/.github/workflows/deploy-sdg-linux-artifacts.yml index c83a864b563976..9b3bca705ebe8b 100644 --- a/.github/workflows/deploy-sdg-linux-artifacts.yml +++ b/.github/workflows/deploy-sdg-linux-artifacts.yml @@ -5,19 +5,19 @@ on: artifacts: required: true type: string - CLOUDSMITH_REPO: + cloudsmith-repo: required: false type: string default: 'sdg-linux' - BUILD_SOURCEBRANCH: + build-sourcebranch: required: false default: ${{ github.ref_name }} type: string - PR_TARGET_BRANCH: + pr-target-branch: required: false type: string default: '' - PR_NUMBER: + pr-number: required: false type: string default: '' @@ -25,21 +25,19 @@ on: permissions: id-token: write - contents: write + contents: read actions: read jobs: - prepare_and_upload_sdg_linux_artefacts: - runs-on: [self-hosted, repo-only] + prepare_and_upload_sdg_linux_artifacts: + runs-on: [ubuntu-slim] permissions: id-token: write contents: write actions: read steps: - - uses: analogdevicesinc/doctools/checkout@action - - name: Prepare path run: | rm -rf dist ; mkdir dist @@ -55,7 +53,7 @@ jobs: } ' "$file") echo $workflow_ref - +####### Note - Set org_repo and ref to analogdevicesinc and ci right before mearging to ci branch ####### org_repo="$(echo "$workflow_ref" | cut -d'/' -f1-2)" ref="$(echo "$workflow_ref" | cut -d'@' -f2)" @@ -170,24 +168,20 @@ jobs: export TIMESTAMP=$(date +%Y_%m_%d-%H_%M_%S) # Get git SHA (PR source commit or build commit) - if [[ -n "${{ inputs.PR_NUMBER }}" ]]; then - export GIT_SHA="${{ github.event.pull_request.head.sha }}" - else - export GIT_SHA="${{ github.sha }}" - fi - + export GIT_SHA="${{ github.event.pull_request.head.sha || github.sha }}" + # Get git SHA date (format: YYYY-MM-DD-HH-MM) MERGE_COMMIT_SHA=$(git rev-parse --short HEAD) export GIT_SHA_DATE=$(git show -s --format=%cd --date=format:'%Y-%m-%d %H:%M' ${MERGE_COMMIT_SHA} | sed -e "s/ \|\:/-/g") # Determine branch name (works for main, release/*, feature/*, etc.) - BRANCH_NAME=$(echo "${{ inputs.BUILD_SOURCEBRANCH }}" | awk -F'/' '{print $NF}') + BRANCH_NAME=$(echo "${{ inputs.build-sourcebranch }}" | awk -F'/' '{print $NF}') export BUILD_SOURCEBRANCHNAME="$BRANCH_NAME" export SOURCE_DIRECTORY="$(pwd)" # Build version path based on branch type - if [[ -n "${{ inputs.PR_TARGET_BRANCH }}" ]]; then - VERSION_PATH="linux/PRs/${{ inputs.PR_TARGET_BRANCH }}/pr_${{ inputs.PR_NUMBER }}" + if [[ -n "${{ inputs.pr-target-branch }}" ]]; then + VERSION_PATH="linux/PRs/${{ inputs.pr-target-branch }}/pr_${{ inputs.pr-number }}" elif [[ "$BRANCH_NAME" == "main" ]]; then VERSION_PATH="linux/main" else @@ -199,7 +193,7 @@ jobs: # Upload python3 upload_to_cloudsmith.py \ - --repo=${{ inputs.CLOUDSMITH_REPO }} \ + --repo=${{ inputs.cloudsmith-repo }} \ --version="${VERSION_PATH}/${TIMESTAMP}" \ --local_path="${TIMESTAMP}" \ --token="${CLOUDSMITH_API_KEY}" \ diff --git a/.github/workflows/deploy_rpi_artifacts.yml b/.github/workflows/deploy_rpi_artifacts.yml index ef92212c42ff32..1a297ed38286f6 100644 --- a/.github/workflows/deploy_rpi_artifacts.yml +++ b/.github/workflows/deploy_rpi_artifacts.yml @@ -6,10 +6,10 @@ on: artifacts: required: true type: string - BUILD_SOURCEBRANCH: + build-sourcebranch: required: true type: string - CLOUDSMITH_REPO: + cloudsmith-repo: required: false type: string default: 'sdg-linux-rpi' @@ -21,15 +21,13 @@ permissions: jobs: prepare_and_upload_rpi_artefacts: - runs-on: [self-hosted, repo-only] + runs-on: [ubuntu-slim] permissions: id-token: write - contents: write + contents: read actions: read steps: - - uses: analogdevicesinc/doctools/checkout@action - - name: Prepare path run: | rm -rf dist ; mkdir dist @@ -63,7 +61,7 @@ jobs: echo "Verifying script content:" head -3 ci/prepare_rpi_artifacts.sh if [[ ! -s ci/prepare_rpi_artifacts.sh ]] || ! head -1 ci/prepare_rpi_artifacts.sh | grep -q "^#"; then - echo "ERROR: Script download failed or invalid content" + echo "::error ::deploy-rpi-artifacts: Script download failed or invalid content" exit 1 fi chmod +x ci/prepare_rpi_artifacts.sh @@ -228,7 +226,7 @@ jobs: fi # Calculate version and tags (before cd to ensure git works) - version="linux_rpi/${{ inputs.BUILD_SOURCEBRANCH }}/$(date +%Y_%m_%d-%H_%M)/" + version="linux_rpi/${{ inputs.build-sourcebranch }}/$(date +%Y_%m_%d-%H_%M)/" tags="git_sha-$(git rev-parse --short HEAD);timestamp_$(date +%Y_%m_%d-%H_%M)" cd dist || { echo "ERROR: Failed to cd into dist" >&2; exit 1; } @@ -240,7 +238,7 @@ jobs: package_file=$( cs-package-upload "${{ env.CLOUDSMITH_API_KEY }}" \ "${{ vars.CLOUDSMITH_NAMESPACE }}" \ - "${{ inputs.CLOUDSMITH_REPO }}" \ + "${{ inputs.cloudsmith-repo }}" \ "$file" ) @@ -251,7 +249,7 @@ jobs: cs-package-store-raw "${{ env.CLOUDSMITH_API_KEY }}" \ "${{ vars.CLOUDSMITH_NAMESPACE }}" \ - "${{ inputs.CLOUDSMITH_REPO }}" \ + "${{ inputs.cloudsmith-repo }}" \ "$package_file" \ "$file" \ "$tags" \ @@ -264,7 +262,7 @@ jobs: package_file=$( cs-package-upload "${{ env.CLOUDSMITH_API_KEY }}" \ "${{ vars.CLOUDSMITH_NAMESPACE }}" \ - "${{ inputs.CLOUDSMITH_REPO }}" \ + "${{ inputs.cloudsmith-repo }}" \ "rpi_archives_properties.txt" ) @@ -275,11 +273,10 @@ jobs: cs-package-store-raw "${{ env.CLOUDSMITH_API_KEY }}" \ "${{ vars.CLOUDSMITH_NAMESPACE }}" \ - "${{ inputs.CLOUDSMITH_REPO }}" \ + "${{ inputs.cloudsmith-repo }}" \ "$package_file" \ "rpi_archives_properties.txt" \ "$tags" \ "$version" || exit 1 fi -