From 36e580420ed9ae0040a2ba6e14c27203b7da7724 Mon Sep 17 00:00:00 2001 From: Furkan Sahin Date: Fri, 27 Mar 2026 14:01:13 +0000 Subject: [PATCH 1/9] Add GCE build scripts and cleanup support Add build-gce.sh that wraps the standard build with GCE-specific post-processing: GRUB reinstall for BIOS/UEFI boot, Google guest agent installation, and tar.gz packaging for gcloud image import. Extract post-processing into gce-postprocess.sh so it can be reused by both the standalone script and the CI workflow. Supports x86_64 (BIOS + VIRTIO_SCSI_MULTIQUEUE) and ARM64 (UEFI + GVNIC). Add GCE cleanup to cleanup-images.yml alongside MinIO, R2, and AWS: deletes GCE images and GCS tarballs by version suffix. Also fix chroot builds: set HOME/GOPATH in setup_packages.sh and use linux-modules-extra for gVNIC driver support on GCP. Co-Authored-By: Claude Opus 4.6 --- .github/workflows/cleanup-images.yml | 69 ++++++++++++++ build-gce.sh | 40 +++++++++ common/setup_base.sh | 2 +- common/setup_packages.sh | 2 + gce-postprocess.sh | 130 +++++++++++++++++++++++++++ 5 files changed, 242 insertions(+), 1 deletion(-) create mode 100755 build-gce.sh create mode 100755 gce-postprocess.sh diff --git a/.github/workflows/cleanup-images.yml b/.github/workflows/cleanup-images.yml index 2510329..2aaf3c7 100644 --- a/.github/workflows/cleanup-images.yml +++ b/.github/workflows/cleanup-images.yml @@ -29,6 +29,18 @@ on: description: "🗑️ Clean up AWS AMIs (deregister + delete snapshots)" default: true type: boolean + cleanup_gce: + description: "🗑️ Clean up GCE images and GCS tarballs" + default: false + type: boolean + gcp_project: + description: "GCP project ID for GCE cleanup" + type: string + default: "pelagic-logic-394811" + gcs_bucket: + description: "GCS bucket for GCE tarball cleanup" + type: string + default: "ubicloud-gce-images" aws_ami_regions: description: "AWS regions to clean up AMIs from (comma-separated)" type: string @@ -287,6 +299,63 @@ jobs: fi done + - name: Authenticate to GCP + if: ${{ inputs.cleanup_gce }} + uses: google-github-actions/auth@v2 + with: + credentials_json: ${{ secrets.GCP_SA_KEY }} + + - name: Set up Cloud SDK + if: ${{ inputs.cleanup_gce }} + uses: google-github-actions/setup-gcloud@v2 + + - name: Cleanup GCE images + if: ${{ inputs.cleanup_gce }} + run: | + project="${{ inputs.gcp_project }}" + bucket="${{ inputs.gcs_bucket }}" + + echo "### GCE Cleanup" >> $GITHUB_STEP_SUMMARY + + cleanup_gce_image() { + local image_name=$1 + local arch=$2 + + if gcloud compute images describe "${image_name}" --project="${project}" &>/dev/null; then + if [ "${{ inputs.dry_run }}" = "true" ]; then + echo "[DRY RUN] Would delete GCE image: ${image_name}" + echo "- [DRY RUN] Would delete ${arch} image: ${image_name}" >> $GITHUB_STEP_SUMMARY + else + gcloud compute images delete "${image_name}" --project="${project}" --quiet + echo "Deleted GCE image: ${image_name}" + echo "- Deleted ${arch} image: ${image_name}" >> $GITHUB_STEP_SUMMARY + fi + else + echo "GCE image not found: ${image_name}" + echo "- ${arch} image not found: ${image_name}" >> $GITHUB_STEP_SUMMARY + fi + + local tar_file="${image_name}.tar.gz" + if gcloud storage ls "gs://${bucket}/${tar_file}" &>/dev/null; then + if [ "${{ inputs.dry_run }}" = "true" ]; then + echo "[DRY RUN] Would delete tarball: gs://${bucket}/${tar_file}" + echo "- [DRY RUN] Would delete ${arch} tarball: ${tar_file}" >> $GITHUB_STEP_SUMMARY + else + gcloud storage rm "gs://${bucket}/${tar_file}" + echo "Deleted tarball: gs://${bucket}/${tar_file}" + echo "- Deleted ${arch} tarball: ${tar_file}" >> $GITHUB_STEP_SUMMARY + fi + fi + } + + if [ "${{ inputs.architecture }}" = "x64" ] || [ "${{ inputs.architecture }}" = "both" ]; then + cleanup_gce_image "${{ steps.set_image_names.outputs.x64_image_name }}" "x64" + fi + + if [ "${{ inputs.architecture }}" = "arm64" ] || [ "${{ inputs.architecture }}" = "both" ]; then + cleanup_gce_image "${{ steps.set_image_names.outputs.arm64_image_name }}" "arm64" + fi + - name: Summary run: | echo "" >> $GITHUB_STEP_SUMMARY diff --git a/build-gce.sh b/build-gce.sh new file mode 100755 index 0000000..2aef40f --- /dev/null +++ b/build-gce.sh @@ -0,0 +1,40 @@ +#!/bin/bash +set -uexo pipefail + +# Usage: ./build-gce.sh [size_gb] +# Builds a GCE-compatible PostgreSQL VM image. +# Runs the standard build (build.sh), then applies GCE-specific +# post-processing (gce-postprocess.sh): +# 1. GRUB reinstall for BIOS boot (virt-resize can corrupt it) +# 2. Google guest agent for metadata/SSH/startup-script support +# 3. Tar.gz packaging for gcloud compute images create + +TARGET_SIZE_GB="${1:-8}" + +HOST_ARCH=$(uname -m) +case $HOST_ARCH in + x86_64) IMAGE_ARCH="x64" ;; + aarch64) IMAGE_ARCH="arm64" ;; + *) echo "Unsupported architecture: $HOST_ARCH"; exit 1 ;; +esac + +# Step 1: Run the standard build +./build.sh "$TARGET_SIZE_GB" + +# Step 2: Apply GCE-specific post-processing +./gce-postprocess.sh "postgres-${IMAGE_ARCH}-image.raw" + +echo "=== GCE build complete ===" +echo "Upload and create GCE image with:" +echo " gcloud storage cp postgres-${IMAGE_ARCH}-gce-image.tar.gz gs://BUCKET/postgres-${IMAGE_ARCH}-gce-image.tar.gz" +if [ "$IMAGE_ARCH" = "arm64" ]; then + echo " gcloud compute images create postgres-ubuntu-2204-${IMAGE_ARCH}-YYYYMMDD \\" + echo " --source-uri=gs://BUCKET/postgres-${IMAGE_ARCH}-gce-image.tar.gz \\" + echo " --family=postgres-ubuntu-2204 \\" + echo " --guest-os-features=GVNIC,UEFI_COMPATIBLE" +else + echo " gcloud compute images create postgres-ubuntu-2204-${IMAGE_ARCH}-YYYYMMDD \\" + echo " --source-uri=gs://BUCKET/postgres-${IMAGE_ARCH}-gce-image.tar.gz \\" + echo " --family=postgres-ubuntu-2204 \\" + echo " --guest-os-features=VIRTIO_SCSI_MULTIQUEUE,GVNIC" +fi diff --git a/common/setup_base.sh b/common/setup_base.sh index 354bcba..d01acea 100644 --- a/common/setup_base.sh +++ b/common/setup_base.sh @@ -12,7 +12,7 @@ apt-get -qq -y satisfy 'openssh-server (>= 1:8.9p1-3ubuntu0.10)' echo "=== [setup_base.sh] Updating kernel ===" # Update to kernel 6.8.0-90-generic (Ubuntu 22.04's latest HWE kernel) -apt-get install -y linux-image-6.8.0-90-generic linux-headers-6.8.0-90-generic linux-tools-6.8.0-90-generic +apt-get install -y linux-image-6.8.0-90-generic linux-headers-6.8.0-90-generic linux-tools-6.8.0-90-generic linux-modules-extra-6.8.0-90-generic echo "=== [setup_base.sh] Installing ruby-bundler ===" apt-get install -y ruby-bundler diff --git a/common/setup_packages.sh b/common/setup_packages.sh index 52790d0..5fb679d 100644 --- a/common/setup_packages.sh +++ b/common/setup_packages.sh @@ -2,6 +2,8 @@ set -uexo pipefail export DEBIAN_FRONTEND=noninteractive +export HOME=/root +export GOPATH=/root/go # Read architecture from build_arch.env source /tmp/build_arch.env diff --git a/gce-postprocess.sh b/gce-postprocess.sh new file mode 100755 index 0000000..5d30040 --- /dev/null +++ b/gce-postprocess.sh @@ -0,0 +1,130 @@ +#!/bin/bash +set -uexo pipefail + +# Usage: ./gce-postprocess.sh +# Applies GCE-specific post-processing to a raw disk image: +# 1. GRUB reinstall for BIOS boot (virt-resize can corrupt it) +# 2. Google guest agent for metadata/SSH/startup-script support +# 3. Tar.gz packaging for gcloud compute images create +# +# The raw image is modified in place. +# Outputs: postgres-{arch}-gce-image.tar.gz in the current directory. + +IMAGE_FILE="${1:?Usage: gce-postprocess.sh }" + +HOST_ARCH=$(uname -m) +case $HOST_ARCH in + x86_64) IMAGE_ARCH="x64" ;; + aarch64) IMAGE_ARCH="arm64" ;; + *) echo "Unsupported architecture: $HOST_ARCH"; exit 1 ;; +esac + +echo "=== GCE post-processing: ${IMAGE_FILE} ===" + +# Step 1: Mount image and apply GCE-specific fixes +LOOP_DEV=$(losetup --find --show "${IMAGE_FILE}") +kpartx -av "${LOOP_DEV}" +sleep 2 + +LOOP_BASE=$(basename "${LOOP_DEV}") +ROOT_PART="" +for part in /dev/mapper/${LOOP_BASE}p*; do + if [ -b "$part" ]; then + FS_TYPE=$(blkid -o value -s TYPE "$part" 2>/dev/null || echo "") + if [ "$FS_TYPE" = "ext4" ]; then + ROOT_PART="$part" + break + fi + fi +done + +if [ -z "$ROOT_PART" ]; then + echo "Error: Could not find ext4 root partition" + exit 1 +fi + +MOUNT_POINT="/mnt/image" +mkdir -p "${MOUNT_POINT}" +mount "${ROOT_PART}" "${MOUNT_POINT}" + +# Set up DNS +mkdir -p "${MOUNT_POINT}/run/systemd/resolve" +cat /etc/resolv.conf > "${MOUNT_POINT}/etc/resolv.conf" || \ + echo "nameserver 8.8.8.8" > "${MOUNT_POINT}/etc/resolv.conf" + +mount --bind /dev "${MOUNT_POINT}/dev" +mount --bind /dev/pts "${MOUNT_POINT}/dev/pts" +mount --bind /proc "${MOUNT_POINT}/proc" +mount --bind /sys "${MOUNT_POINT}/sys" + +# Step 2: Reinstall GRUB (architecture-specific) +if [ "$HOST_ARCH" = "x86_64" ]; then + echo "=== GCE: Reinstalling GRUB for BIOS boot (x86_64) ===" + chroot "${MOUNT_POINT}" /bin/bash -c " + grub-install --target=i386-pc ${LOOP_DEV} + update-grub + " +else + echo "=== GCE: Updating GRUB for EFI boot (arm64) ===" + # ARM64 uses UEFI boot - the EFI partition from the Ubuntu cloud image + # is already correct. Just update grub config. + # Mount the EFI partition if present + EFI_PART="" + for part in /dev/mapper/${LOOP_BASE}p*; do + if [ -b "$part" ]; then + FS_TYPE=$(blkid -o value -s TYPE "$part" 2>/dev/null || echo "") + if [ "$FS_TYPE" = "vfat" ]; then + EFI_PART="$part" + break + fi + fi + done + if [ -n "$EFI_PART" ]; then + mkdir -p "${MOUNT_POINT}/boot/efi" + mount "$EFI_PART" "${MOUNT_POINT}/boot/efi" + chroot "${MOUNT_POINT}" /bin/bash -c "update-grub" + umount "${MOUNT_POINT}/boot/efi" + else + chroot "${MOUNT_POINT}" /bin/bash -c "update-grub" + fi +fi + +# Step 3: Install Google guest agent for metadata processing +echo "=== GCE: Installing Google guest agent ===" +chroot "${MOUNT_POINT}" /bin/bash -c " + set -uexo pipefail + export DEBIAN_FRONTEND=noninteractive + apt-add-repository -y universe + apt-get update -qq + apt-get install -y -qq google-guest-agent google-compute-engine + apt-get clean + rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/* +" + +# Step 4: Cleanup +echo "=== GCE: Final cleanup ===" +chroot "${MOUNT_POINT}" /bin/bash -c " + rm -rf /var/lib/cloud + cloud-init clean --logs || true +" +truncate -s 0 "${MOUNT_POINT}/etc/machine-id" + +# Unmount +umount "${MOUNT_POINT}/sys" || true +umount "${MOUNT_POINT}/proc" || true +umount "${MOUNT_POINT}/dev/pts" || true +umount "${MOUNT_POINT}/dev" || true +umount "${MOUNT_POINT}" +kpartx -dv "${LOOP_DEV}" +losetup -d "${LOOP_DEV}" + +# Step 5: Package as tar.gz for GCE +echo "=== GCE: Creating tar.gz for image import ===" +cp "${IMAGE_FILE}" disk.raw +tar -czf "postgres-${IMAGE_ARCH}-gce-image.tar.gz" disk.raw +rm disk.raw + +echo "Final GCE image:" +ls -lh "postgres-${IMAGE_ARCH}-gce-image.tar.gz" + +echo "=== GCE post-processing complete ===" From 4813132e911094d411eaf2f90e63e2813ef97512 Mon Sep 17 00:00:00 2001 From: Furkan Sahin Date: Fri, 27 Mar 2026 14:01:20 +0000 Subject: [PATCH 2/9] Add GCE as upload target in main build workflow Add upload_gce toggle alongside MinIO, R2, and AWS AMI. GCE steps post-process a copy of the raw image (preserving the original for other targets), upload the tar.gz to GCS, create a GCE image, and grant public access. Both x64 and arm64 jobs get GCE steps with arch-specific guest OS features. The create-ubicloud-pr job generates a pg_gce_image migration alongside the existing pg_aws_ami migration. Co-Authored-By: Claude Opus 4.6 --- .github/workflows/postgres-vm-image.yml | 284 ++++++++++++++++++++++++ 1 file changed, 284 insertions(+) diff --git a/.github/workflows/postgres-vm-image.yml b/.github/workflows/postgres-vm-image.yml index 7295f15..a26b24d 100644 --- a/.github/workflows/postgres-vm-image.yml +++ b/.github/workflows/postgres-vm-image.yml @@ -57,6 +57,18 @@ on: description: "Use AWS role-based authentication (if false, uses access keys)" default: false type: boolean + upload_gce: + description: "📤 Create GCE image" + default: false + type: boolean + gcp_project: + description: "GCP project ID for GCE image" + type: string + default: "pelagic-logic-394811" + gcs_bucket: + description: "GCS bucket for GCE image upload" + type: string + default: "ubicloud-gce-images" permissions: id-token: write @@ -72,6 +84,8 @@ jobs: sha256: ${{ steps.compute_sha.outputs.sha256 }} all_ami_ids: ${{ steps.copy_ami.outputs.all_ami_ids }} source_ami_id: ${{ steps.register_ami.outputs.ami_id }} + gce_image_name: ${{ steps.create_gce_image.outputs.image_name }} + gce_image_project: ${{ steps.create_gce_image.outputs.image_project }} steps: - name: Print inputs run: | @@ -492,6 +506,110 @@ jobs: echo "Cleaning up S3..." aws s3 rm s3://${{ steps.s3_upload.outputs.s3_bucket }}/${{ steps.set_image_name.outputs.S3_BUCKET_IMAGE_PREFIX }}/${{ steps.s3_upload.outputs.image_filename }} + # === GCE Image Steps === + - name: GCE post-processing + if: ${{ inputs.upload_gce && !inputs.build_only }} + run: | + image_filename=${{ steps.set_image_name.outputs.MINIO_IMAGE_NAME }}.raw + cp "${image_filename}" postgres-x64-gce-work.raw + sudo ./gce-postprocess.sh postgres-x64-gce-work.raw + rm -f postgres-x64-gce-work.raw + + - name: Set GCE image name + if: ${{ inputs.upload_gce && !inputs.build_only }} + id: set_gce_image_name + run: | + gce_image_name="postgres-ubuntu-2204-x64-${{ inputs.image_suffix }}" + echo "gce_image_name=${gce_image_name}" >> $GITHUB_OUTPUT + echo "GCE Image name: ${gce_image_name}" + + - name: Rename GCE tar.gz and compute SHA256 + if: ${{ inputs.upload_gce && !inputs.build_only }} + run: | + gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" + mv postgres-x64-gce-image.tar.gz "${gce_image_name}.tar.gz" + sha256sum "${gce_image_name}.tar.gz" > "${gce_image_name}.tar.gz.sha256" + + echo "### GCE Image (x64)" >> $GITHUB_STEP_SUMMARY + du -h "${gce_image_name}.tar.gz" >> $GITHUB_STEP_SUMMARY + echo "### GCE SHA256" >> $GITHUB_STEP_SUMMARY + cat "${gce_image_name}.tar.gz.sha256" >> $GITHUB_STEP_SUMMARY + + - name: Authenticate to GCP + if: ${{ inputs.upload_gce && !inputs.build_only }} + uses: google-github-actions/auth@v2 + with: + credentials_json: ${{ secrets.GCP_SA_KEY }} + + - name: Set up Cloud SDK + if: ${{ inputs.upload_gce && !inputs.build_only }} + uses: google-github-actions/setup-gcloud@v2 + + - name: Upload to GCS + if: ${{ inputs.upload_gce && !inputs.build_only }} + run: | + gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" + tar_file="${gce_image_name}.tar.gz" + bucket="${{ inputs.gcs_bucket }}" + + echo "Uploading ${tar_file} to gs://${bucket}/..." + gcloud storage cp "${tar_file}" "gs://${bucket}/${tar_file}" + + echo "### GCS Upload (x64)" >> $GITHUB_STEP_SUMMARY + echo "Uploaded to gs://${bucket}/${tar_file}" >> $GITHUB_STEP_SUMMARY + + - name: Create GCE image + if: ${{ inputs.upload_gce && !inputs.build_only }} + id: create_gce_image + run: | + gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" + tar_file="${gce_image_name}.tar.gz" + bucket="${{ inputs.gcs_bucket }}" + project="${{ inputs.gcp_project }}" + commit_sha="${{ github.sha }}" + + echo "Creating GCE image: ${gce_image_name}" + gcloud compute images create "${gce_image_name}" \ + --project="${project}" \ + --source-uri="gs://${bucket}/${tar_file}" \ + --guest-os-features=VIRTIO_SCSI_MULTIQUEUE,GVNIC \ + --labels="source=postgres-vm-images,commit=${commit_sha:0:8},arch=x64" + + echo "image_name=${gce_image_name}" >> $GITHUB_OUTPUT + echo "image_project=${project}" >> $GITHUB_OUTPUT + + echo "### GCE Image Created (x64)" >> $GITHUB_STEP_SUMMARY + echo "- Name: ${gce_image_name}" >> $GITHUB_STEP_SUMMARY + echo "- Project: ${project}" >> $GITHUB_STEP_SUMMARY + echo "- Commit: ${commit_sha:0:8}" >> $GITHUB_STEP_SUMMARY + + - name: Make GCE image public + if: ${{ inputs.upload_gce && !inputs.build_only }} + run: | + gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" + project="${{ inputs.gcp_project }}" + gcloud compute images add-iam-policy-binding "${gce_image_name}" \ + --project="${project}" \ + --member="allAuthenticatedUsers" \ + --role="roles/compute.imageUser" + + - name: Verify GCE image + if: ${{ inputs.upload_gce && !inputs.build_only }} + run: | + gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" + project="${{ inputs.gcp_project }}" + gcloud compute images describe "${gce_image_name}" \ + --project="${project}" \ + --format="table(name,family,status,diskSizeGb,creationTimestamp)" + + - name: Clean up GCS tar.gz + if: ${{ inputs.upload_gce && !inputs.build_only }} + continue-on-error: true + run: | + gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" + bucket="${{ inputs.gcs_bucket }}" + gcloud storage rm "gs://${bucket}/${gce_image_name}.tar.gz" + # arm64 build build-arm64: name: Build postgres-ubuntu-2204-arm64-${{ inputs.image_suffix }} @@ -501,6 +619,8 @@ jobs: sha256: ${{ steps.compute_sha.outputs.sha256 }} all_ami_ids: ${{ steps.copy_ami.outputs.all_ami_ids }} source_ami_id: ${{ steps.register_ami.outputs.ami_id }} + gce_image_name: ${{ steps.create_gce_image.outputs.image_name }} + gce_image_project: ${{ steps.create_gce_image.outputs.image_project }} steps: - name: Print inputs run: | @@ -872,6 +992,111 @@ jobs: echo "Cleaning up S3..." aws s3 rm s3://${{ steps.s3_upload.outputs.s3_bucket }}/${{ steps.set_image_name.outputs.S3_BUCKET_IMAGE_PREFIX }}/${{ steps.s3_upload.outputs.image_filename }} + # === GCE Image Steps === + - name: GCE post-processing + if: ${{ inputs.upload_gce && !inputs.build_only }} + run: | + image_filename=${{ steps.set_image_name.outputs.IMAGE_NAME }}.raw + cp "${image_filename}" postgres-arm64-gce-work.raw + sudo ./gce-postprocess.sh postgres-arm64-gce-work.raw + rm -f postgres-arm64-gce-work.raw + + - name: Set GCE image name + if: ${{ inputs.upload_gce && !inputs.build_only }} + id: set_gce_image_name + run: | + gce_image_name="postgres-ubuntu-2204-arm64-${{ inputs.image_suffix }}" + echo "gce_image_name=${gce_image_name}" >> $GITHUB_OUTPUT + echo "GCE Image name: ${gce_image_name}" + + - name: Rename GCE tar.gz and compute SHA256 + if: ${{ inputs.upload_gce && !inputs.build_only }} + run: | + gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" + mv postgres-arm64-gce-image.tar.gz "${gce_image_name}.tar.gz" + sha256sum "${gce_image_name}.tar.gz" > "${gce_image_name}.tar.gz.sha256" + + echo "### GCE Image (arm64)" >> $GITHUB_STEP_SUMMARY + du -h "${gce_image_name}.tar.gz" >> $GITHUB_STEP_SUMMARY + echo "### GCE SHA256" >> $GITHUB_STEP_SUMMARY + cat "${gce_image_name}.tar.gz.sha256" >> $GITHUB_STEP_SUMMARY + + - name: Authenticate to GCP + if: ${{ inputs.upload_gce && !inputs.build_only }} + uses: google-github-actions/auth@v2 + with: + credentials_json: ${{ secrets.GCP_SA_KEY }} + + - name: Set up Cloud SDK + if: ${{ inputs.upload_gce && !inputs.build_only }} + uses: google-github-actions/setup-gcloud@v2 + + - name: Upload to GCS + if: ${{ inputs.upload_gce && !inputs.build_only }} + run: | + gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" + tar_file="${gce_image_name}.tar.gz" + bucket="${{ inputs.gcs_bucket }}" + + echo "Uploading ${tar_file} to gs://${bucket}/..." + gcloud storage cp "${tar_file}" "gs://${bucket}/${tar_file}" + + echo "### GCS Upload (arm64)" >> $GITHUB_STEP_SUMMARY + echo "Uploaded to gs://${bucket}/${tar_file}" >> $GITHUB_STEP_SUMMARY + + - name: Create GCE image + if: ${{ inputs.upload_gce && !inputs.build_only }} + id: create_gce_image + run: | + gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" + tar_file="${gce_image_name}.tar.gz" + bucket="${{ inputs.gcs_bucket }}" + project="${{ inputs.gcp_project }}" + commit_sha="${{ github.sha }}" + + echo "Creating GCE image: ${gce_image_name}" + gcloud compute images create "${gce_image_name}" \ + --project="${project}" \ + --source-uri="gs://${bucket}/${tar_file}" \ + --guest-os-features=GVNIC,UEFI_COMPATIBLE \ + --architecture=ARM64 \ + --labels="source=postgres-vm-images,commit=${commit_sha:0:8},arch=arm64" + + echo "image_name=${gce_image_name}" >> $GITHUB_OUTPUT + echo "image_project=${project}" >> $GITHUB_OUTPUT + + echo "### GCE Image Created (arm64)" >> $GITHUB_STEP_SUMMARY + echo "- Name: ${gce_image_name}" >> $GITHUB_STEP_SUMMARY + echo "- Project: ${project}" >> $GITHUB_STEP_SUMMARY + echo "- Commit: ${commit_sha:0:8}" >> $GITHUB_STEP_SUMMARY + + - name: Make GCE image public + if: ${{ inputs.upload_gce && !inputs.build_only }} + run: | + gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" + project="${{ inputs.gcp_project }}" + gcloud compute images add-iam-policy-binding "${gce_image_name}" \ + --project="${project}" \ + --member="allAuthenticatedUsers" \ + --role="roles/compute.imageUser" + + - name: Verify GCE image + if: ${{ inputs.upload_gce && !inputs.build_only }} + run: | + gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" + project="${{ inputs.gcp_project }}" + gcloud compute images describe "${gce_image_name}" \ + --project="${project}" \ + --format="table(name,family,status,diskSizeGb,creationTimestamp)" + + - name: Clean up GCS tar.gz + if: ${{ inputs.upload_gce && !inputs.build_only }} + continue-on-error: true + run: | + gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" + bucket="${{ inputs.gcs_bucket }}" + gcloud storage rm "gs://${bucket}/${gce_image_name}.tar.gz" + # Create PR to ubicloud/ubicloud with updated image versions create-ubicloud-pr: name: Create PR to ubicloud/ubicloud @@ -913,6 +1138,11 @@ jobs: arm64_amis="us-west-2:${{ needs.build-arm64.outputs.source_ami_id }}" fi echo "arm64_ami_ids=${arm64_amis}" >> $GITHUB_OUTPUT + + # GCE image names + echo "x64_gce_image_name=${{ needs.build-x64.outputs.gce_image_name }}" >> $GITHUB_OUTPUT + echo "arm64_gce_image_name=${{ needs.build-arm64.outputs.gce_image_name }}" >> $GITHUB_OUTPUT + echo "gce_image_project=${{ needs.build-x64.outputs.gce_image_project }}" >> $GITHUB_OUTPUT fi - name: Clone ubicloud/ubicloud @@ -1037,6 +1267,56 @@ jobs: echo "Created migration file: ${migration_file}" cat "${migration_file}" + - name: Create GCE migration file + if: ${{ steps.collect.outputs.x64_gce_image_name != '' || steps.collect.outputs.arm64_gce_image_name != '' }} + run: | + cd ubicloud + timestamp=$(date +%Y%m%d) + migration_file="migrate/${timestamp}_update_pg_gce_images.rb" + + x64_gce_image="${{ steps.collect.outputs.x64_gce_image_name }}" + arm64_gce_image="${{ steps.collect.outputs.arm64_gce_image_name }}" + project="${{ steps.collect.outputs.gce_image_project }}" + + cat > "${migration_file}" << MIGRATION + # frozen_string_literal: true + + Sequel.migration do + up do + MIGRATION + sed -i 's/^ //' "${migration_file}" + + if [ -n "$x64_gce_image" ]; then + cat >> "${migration_file}" << MIGRATION + from(:pg_gce_image) + .where(gcp_project_id: "${project}", arch: "x64") + .update(gce_image_name: "${x64_gce_image}") + MIGRATION + sed -i 's/^ //' "${migration_file}" + fi + + if [ -n "$arm64_gce_image" ]; then + cat >> "${migration_file}" << MIGRATION + from(:pg_gce_image) + .where(gcp_project_id: "${project}", arch: "arm64") + .update(gce_image_name: "${arm64_gce_image}") + MIGRATION + sed -i 's/^ //' "${migration_file}" + fi + + cat >> "${migration_file}" << MIGRATION + end + + down do + raise Sequel::Error, "irreversible: previous GCE image names unknown" + end + end + MIGRATION + sed -i 's/^ //' "${migration_file}" + + echo "Created GCE migration file: ${migration_file}" + cat "${migration_file}" + - name: Create Pull Request env: GH_TOKEN: ${{ secrets.UBICLOUD_REPO_PAT }} @@ -1080,6 +1360,7 @@ jobs: --body "## Summary - Updates boot image version and SHA256 hashes in \`prog/download_boot_image.rb\` - Adds migration to update AWS AMI IDs in \`pg_aws_ami\` table + - Adds migration to update GCE image names in \`pg_gce_image\` table (if GCE images built) ## Image Version \`${{ inputs.image_suffix }}\` @@ -1087,6 +1368,9 @@ jobs: ## Changes - x64 SHA256: \`${{ steps.collect.outputs.x64_sha256 }}\` - arm64 SHA256: \`${{ steps.collect.outputs.arm64_sha256 }}\` + - GCE x64: \`${{ steps.collect.outputs.x64_gce_image_name }}\` + - GCE arm64: \`${{ steps.collect.outputs.arm64_gce_image_name }}\` + - GCE project: \`${{ steps.collect.outputs.gce_image_project }}\` 🤖 Generated by [postgres-vm-images](https://github.com/ubicloud/postgres-vm-images) workflow" From 14d3b5ada1faa6a9c288a099384bee894b7b62a6 Mon Sep 17 00:00:00 2001 From: Ubicloud Dev Date: Tue, 28 Apr 2026 15:38:47 +0000 Subject: [PATCH 3/9] Resolve GCP project from SA key instead of workflow input Drop the gcp_project workflow_dispatch input and read the project from the SA key JSON via google-github-actions/auth@v2's project_id output. The auth action already parses the credentials_json and exposes its project_id; threading the same value back through a separate input invites mismatch (e.g., key for ubicloud-images but input pointing at pelagic-logic-394811). Add id: gcp_auth to both x64 and arm64 auth steps and replace six inputs.gcp_project references with steps.gcp_auth.outputs.project_id. --- .github/workflows/postgres-vm-image.yml | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/.github/workflows/postgres-vm-image.yml b/.github/workflows/postgres-vm-image.yml index a26b24d..2575139 100644 --- a/.github/workflows/postgres-vm-image.yml +++ b/.github/workflows/postgres-vm-image.yml @@ -61,10 +61,6 @@ on: description: "📤 Create GCE image" default: false type: boolean - gcp_project: - description: "GCP project ID for GCE image" - type: string - default: "pelagic-logic-394811" gcs_bucket: description: "GCS bucket for GCE image upload" type: string @@ -537,6 +533,7 @@ jobs: - name: Authenticate to GCP if: ${{ inputs.upload_gce && !inputs.build_only }} + id: gcp_auth uses: google-github-actions/auth@v2 with: credentials_json: ${{ secrets.GCP_SA_KEY }} @@ -565,7 +562,7 @@ jobs: gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" tar_file="${gce_image_name}.tar.gz" bucket="${{ inputs.gcs_bucket }}" - project="${{ inputs.gcp_project }}" + project="${{ steps.gcp_auth.outputs.project_id }}" commit_sha="${{ github.sha }}" echo "Creating GCE image: ${gce_image_name}" @@ -587,7 +584,7 @@ jobs: if: ${{ inputs.upload_gce && !inputs.build_only }} run: | gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" - project="${{ inputs.gcp_project }}" + project="${{ steps.gcp_auth.outputs.project_id }}" gcloud compute images add-iam-policy-binding "${gce_image_name}" \ --project="${project}" \ --member="allAuthenticatedUsers" \ @@ -597,7 +594,7 @@ jobs: if: ${{ inputs.upload_gce && !inputs.build_only }} run: | gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" - project="${{ inputs.gcp_project }}" + project="${{ steps.gcp_auth.outputs.project_id }}" gcloud compute images describe "${gce_image_name}" \ --project="${project}" \ --format="table(name,family,status,diskSizeGb,creationTimestamp)" @@ -1023,6 +1020,7 @@ jobs: - name: Authenticate to GCP if: ${{ inputs.upload_gce && !inputs.build_only }} + id: gcp_auth uses: google-github-actions/auth@v2 with: credentials_json: ${{ secrets.GCP_SA_KEY }} @@ -1051,7 +1049,7 @@ jobs: gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" tar_file="${gce_image_name}.tar.gz" bucket="${{ inputs.gcs_bucket }}" - project="${{ inputs.gcp_project }}" + project="${{ steps.gcp_auth.outputs.project_id }}" commit_sha="${{ github.sha }}" echo "Creating GCE image: ${gce_image_name}" @@ -1074,7 +1072,7 @@ jobs: if: ${{ inputs.upload_gce && !inputs.build_only }} run: | gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" - project="${{ inputs.gcp_project }}" + project="${{ steps.gcp_auth.outputs.project_id }}" gcloud compute images add-iam-policy-binding "${gce_image_name}" \ --project="${project}" \ --member="allAuthenticatedUsers" \ @@ -1084,7 +1082,7 @@ jobs: if: ${{ inputs.upload_gce && !inputs.build_only }} run: | gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" - project="${{ inputs.gcp_project }}" + project="${{ steps.gcp_auth.outputs.project_id }}" gcloud compute images describe "${gce_image_name}" \ --project="${project}" \ --format="table(name,family,status,diskSizeGb,creationTimestamp)" From 62a15c90642e4044205fe1d80221732b4315bed5 Mon Sep 17 00:00:00 2001 From: Ubicloud Dev Date: Tue, 28 Apr 2026 15:40:46 +0000 Subject: [PATCH 4/9] Read GCS bucket name from secret instead of workflow input Drop the gcs_bucket workflow_dispatch input and read the bucket from secrets.GCS_BUCKET. This matches the AWS_S3_BUCKET and R2_BUCKET pattern already used elsewhere in the workflow, leaving only GCE-image-relevant toggles in workflow_dispatch. --- .github/workflows/postgres-vm-image.yml | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/.github/workflows/postgres-vm-image.yml b/.github/workflows/postgres-vm-image.yml index 2575139..a58270a 100644 --- a/.github/workflows/postgres-vm-image.yml +++ b/.github/workflows/postgres-vm-image.yml @@ -61,10 +61,6 @@ on: description: "📤 Create GCE image" default: false type: boolean - gcs_bucket: - description: "GCS bucket for GCE image upload" - type: string - default: "ubicloud-gce-images" permissions: id-token: write @@ -547,7 +543,7 @@ jobs: run: | gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" tar_file="${gce_image_name}.tar.gz" - bucket="${{ inputs.gcs_bucket }}" + bucket="${{ secrets.GCS_BUCKET }}" echo "Uploading ${tar_file} to gs://${bucket}/..." gcloud storage cp "${tar_file}" "gs://${bucket}/${tar_file}" @@ -561,7 +557,7 @@ jobs: run: | gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" tar_file="${gce_image_name}.tar.gz" - bucket="${{ inputs.gcs_bucket }}" + bucket="${{ secrets.GCS_BUCKET }}" project="${{ steps.gcp_auth.outputs.project_id }}" commit_sha="${{ github.sha }}" @@ -604,7 +600,7 @@ jobs: continue-on-error: true run: | gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" - bucket="${{ inputs.gcs_bucket }}" + bucket="${{ secrets.GCS_BUCKET }}" gcloud storage rm "gs://${bucket}/${gce_image_name}.tar.gz" # arm64 build @@ -1034,7 +1030,7 @@ jobs: run: | gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" tar_file="${gce_image_name}.tar.gz" - bucket="${{ inputs.gcs_bucket }}" + bucket="${{ secrets.GCS_BUCKET }}" echo "Uploading ${tar_file} to gs://${bucket}/..." gcloud storage cp "${tar_file}" "gs://${bucket}/${tar_file}" @@ -1048,7 +1044,7 @@ jobs: run: | gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" tar_file="${gce_image_name}.tar.gz" - bucket="${{ inputs.gcs_bucket }}" + bucket="${{ secrets.GCS_BUCKET }}" project="${{ steps.gcp_auth.outputs.project_id }}" commit_sha="${{ github.sha }}" @@ -1092,7 +1088,7 @@ jobs: continue-on-error: true run: | gce_image_name="${{ steps.set_gce_image_name.outputs.gce_image_name }}" - bucket="${{ inputs.gcs_bucket }}" + bucket="${{ secrets.GCS_BUCKET }}" gcloud storage rm "gs://${bucket}/${gce_image_name}.tar.gz" # Create PR to ubicloud/ubicloud with updated image versions From 84f641a83e054f91df6c473bab86c14d91740348 Mon Sep 17 00:00:00 2001 From: Ubicloud Dev Date: Tue, 28 Apr 2026 15:48:56 +0000 Subject: [PATCH 5/9] Move GCE image checkbox above AWS AMI in workflow_dispatch form --- .github/workflows/postgres-vm-image.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/postgres-vm-image.yml b/.github/workflows/postgres-vm-image.yml index a58270a..9ba33e2 100644 --- a/.github/workflows/postgres-vm-image.yml +++ b/.github/workflows/postgres-vm-image.yml @@ -37,6 +37,10 @@ on: description: "📤 Upload to R2 (ignored if build_only)" default: false type: boolean + upload_gce: + description: "📤 Create GCE image" + default: false + type: boolean upload_aws_ami: description: "📤 Create AWS AMI" default: false @@ -57,10 +61,6 @@ on: description: "Use AWS role-based authentication (if false, uses access keys)" default: false type: boolean - upload_gce: - description: "📤 Create GCE image" - default: false - type: boolean permissions: id-token: write From 881d6c8efd6e0467105d5441b0ca270bccfbe890 Mon Sep 17 00:00:00 2001 From: Ubicloud Dev Date: Tue, 28 Apr 2026 16:36:07 +0000 Subject: [PATCH 6/9] Sanitize image_suffix for GCE image name MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit GCE image names must match [a-z](?:[-a-z0-9]{0,61}[a-z0-9])? — no dots allowed. The shared image_suffix input uses "YYYYMMDD.X.Y" format (fine for AMIs) which makes GCE reject the create with: Invalid value for field 'resource.name': 'postgres-ubuntu-2204-x64-20260428.1.0' Replace dots with hyphens in the GCE-specific Set GCE image name step (both x64 and arm64), so suffix=20260428.1.0 yields postgres-ubuntu-2204-x64-20260428-1-0. AMI/MinIO/R2/GCS object names keep the dotted suffix verbatim. --- .github/workflows/postgres-vm-image.yml | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/workflows/postgres-vm-image.yml b/.github/workflows/postgres-vm-image.yml index 9ba33e2..2cfc7a7 100644 --- a/.github/workflows/postgres-vm-image.yml +++ b/.github/workflows/postgres-vm-image.yml @@ -511,7 +511,10 @@ jobs: if: ${{ inputs.upload_gce && !inputs.build_only }} id: set_gce_image_name run: | - gce_image_name="postgres-ubuntu-2204-x64-${{ inputs.image_suffix }}" + # GCE image names allow only [a-z0-9-]; image_suffix may contain + # dots (e.g. "20260428.1.0"), so swap them for hyphens. + suffix="${{ inputs.image_suffix }}" + gce_image_name="postgres-ubuntu-2204-x64-${suffix//./-}" echo "gce_image_name=${gce_image_name}" >> $GITHUB_OUTPUT echo "GCE Image name: ${gce_image_name}" @@ -998,7 +1001,10 @@ jobs: if: ${{ inputs.upload_gce && !inputs.build_only }} id: set_gce_image_name run: | - gce_image_name="postgres-ubuntu-2204-arm64-${{ inputs.image_suffix }}" + # GCE image names allow only [a-z0-9-]; image_suffix may contain + # dots (e.g. "20260428.1.0"), so swap them for hyphens. + suffix="${{ inputs.image_suffix }}" + gce_image_name="postgres-ubuntu-2204-arm64-${suffix//./-}" echo "gce_image_name=${gce_image_name}" >> $GITHUB_OUTPUT echo "GCE Image name: ${gce_image_name}" From 4ef3af15b4ade42d0563a183a664b08b8a9632bc Mon Sep 17 00:00:00 2001 From: Ubicloud Dev Date: Wed, 29 Apr 2026 07:49:56 +0000 Subject: [PATCH 7/9] Gate arm64 AWS steps on upload_aws_ami The arm64 build job conditionally ran on (build_arm64 || upload_aws_ami), so the job exists when only build_arm64 is selected. But the AWS-specific steps inside the job were gated only on !build_only, so they fired whenever arm64 ran - forcing every arm64 build to upload to S3, import a snapshot, register an AMI, copy across regions, etc., even with the AWS checkbox off. Add inputs.upload_aws_ami to the 12 AWS-only steps in arm64, matching the gating already used in the x64 job. Build_arm64 without upload_aws_ami now produces only the local raw image (plus MinIO/R2/GCE if those toggles are on). --- .github/workflows/postgres-vm-image.yml | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/postgres-vm-image.yml b/.github/workflows/postgres-vm-image.yml index 2cfc7a7..77c00cc 100644 --- a/.github/workflows/postgres-vm-image.yml +++ b/.github/workflows/postgres-vm-image.yml @@ -673,7 +673,7 @@ jobs: cat ${sha_filename} >> $GITHUB_STEP_SUMMARY - name: Install AWS CLI - if: ${{ !inputs.build_only }} + if: ${{ inputs.upload_aws_ami && !inputs.build_only }} run: | curl "https://awscli.amazonaws.com/awscli-exe-linux-aarch64.zip" -o "awscliv2.zip" unzip -q awscliv2.zip @@ -681,7 +681,7 @@ jobs: aws --version - name: Configure AWS credentials (role) - if: ${{ !inputs.build_only && inputs.use_aws_role }} + if: ${{ inputs.upload_aws_ami && !inputs.build_only && inputs.use_aws_role }} uses: aws-actions/configure-aws-credentials@v5 with: role-to-assume: ${{ secrets.AWS_ROLE_ARN }} @@ -689,7 +689,7 @@ jobs: aws-region: us-west-2 - name: Configure AWS credentials (access keys) - if: ${{ !inputs.build_only && !inputs.use_aws_role }} + if: ${{ inputs.upload_aws_ami && !inputs.build_only && !inputs.use_aws_role }} uses: aws-actions/configure-aws-credentials@v5 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} @@ -697,7 +697,7 @@ jobs: aws-region: us-west-2 - name: Upload image to S3 - if: ${{ !inputs.build_only }} + if: ${{ inputs.upload_aws_ami && !inputs.build_only }} id: s3_upload run: | image_filename=${{ steps.set_image_name.outputs.IMAGE_NAME }}.raw @@ -713,7 +713,7 @@ jobs: echo "s3_bucket=${s3_bucket}" >> $GITHUB_OUTPUT - name: Import snapshot to AWS - if: ${{ !inputs.build_only }} + if: ${{ inputs.upload_aws_ami && !inputs.build_only }} id: import_snapshot run: | image_filename=${{ steps.s3_upload.outputs.image_filename }} @@ -745,7 +745,7 @@ jobs: echo "import_task_id=${import_task_id}" >> $GITHUB_OUTPUT - name: Wait for snapshot import completion - if: ${{ !inputs.build_only }} + if: ${{ inputs.upload_aws_ami && !inputs.build_only }} id: wait_snapshot run: | import_task_id=${{ steps.import_snapshot.outputs.import_task_id }} @@ -790,7 +790,7 @@ jobs: --tags "Key=Name,Value=${image_filename}" "Key=Source,Value=postgres-vm-images" "Key=Architecture,Value=arm64" - name: Register AMI from snapshot - if: ${{ !inputs.build_only }} + if: ${{ inputs.upload_aws_ami && !inputs.build_only }} id: register_ami run: | ami_name=${{ steps.set_image_name.outputs.IMAGE_NAME }} @@ -819,7 +819,7 @@ jobs: echo "Region: us-west-2" >> $GITHUB_STEP_SUMMARY - name: Copy AMI to additional regions - if: ${{ inputs.aws_ami_regions != '' && !inputs.build_only }} + if: ${{ inputs.upload_aws_ami && inputs.aws_ami_regions != '' && !inputs.build_only }} id: copy_ami run: | ami_id=${{ steps.register_ami.outputs.ami_id }} @@ -875,7 +875,7 @@ jobs: echo "all_ami_ids=${all_ami_ids}" >> $GITHUB_OUTPUT - name: Wait for AMI copies and share - if: ${{ !inputs.build_only }} + if: ${{ inputs.upload_aws_ami && !inputs.build_only }} run: | ami_id=${{ steps.register_ami.outputs.ami_id }} all_ami_ids="${{ steps.copy_ami.outputs.all_ami_ids }}" @@ -952,7 +952,7 @@ jobs: done - name: Generate AMI IDs artifact - if: ${{ !inputs.build_only }} + if: ${{ inputs.upload_aws_ami && !inputs.build_only }} run: | all_ami_ids="${{ steps.copy_ami.outputs.all_ami_ids }}" @@ -975,14 +975,14 @@ jobs: cat ami-ids-arm64.yaml - name: Upload AMI IDs artifact - if: ${{ !inputs.build_only }} + if: ${{ inputs.upload_aws_ami && !inputs.build_only }} uses: actions/upload-artifact@v4 with: name: ami-ids-arm64 path: ami-ids-arm64.yaml - name: Clean up S3 - if: ${{ !inputs.build_only }} + if: ${{ inputs.upload_aws_ami && !inputs.build_only }} continue-on-error: true run: | echo "Cleaning up S3..." From e441e3c3686fe9d7ba3d84050b79095849683a60 Mon Sep 17 00:00:00 2001 From: Ubicloud Dev Date: Wed, 29 Apr 2026 07:59:24 +0000 Subject: [PATCH 8/9] Align cleanup workflow with build workflow GCP changes Three fixes to keep cleanup-images.yml symmetric with the build: 1. Drop gcp_project input; resolve project from the SA key via google-github-actions/auth@v2's project_id output (id: gcp_auth). 2. Drop gcs_bucket input; read bucket name from secrets.GCS_BUCKET, matching AWS_S3_BUCKET / R2_BUCKET pattern. 3. Compute a separate {x64,arm64}_gce_image_name with dots replaced by hyphens, mirroring the build workflow's sanitization. Without this the cleanup looked for postgres-ubuntu-2204-x64-20260428.1.0 which never exists in GCE - the actually-created image is named postgres-ubuntu-2204-x64-20260428-1-0. Tarball path uses the same sanitized name so the GCS object lookup matches the upload. MinIO/R2/AWS cleanup paths keep the dotted name as before. --- .github/workflows/cleanup-images.yml | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/.github/workflows/cleanup-images.yml b/.github/workflows/cleanup-images.yml index 2aaf3c7..38d35a9 100644 --- a/.github/workflows/cleanup-images.yml +++ b/.github/workflows/cleanup-images.yml @@ -33,14 +33,6 @@ on: description: "🗑️ Clean up GCE images and GCS tarballs" default: false type: boolean - gcp_project: - description: "GCP project ID for GCE cleanup" - type: string - default: "pelagic-logic-394811" - gcs_bucket: - description: "GCS bucket for GCE tarball cleanup" - type: string - default: "ubicloud-gce-images" aws_ami_regions: description: "AWS regions to clean up AMIs from (comma-separated)" type: string @@ -66,8 +58,17 @@ jobs: x64_image_name="postgres-ubuntu-2204-x64-${{ inputs.image_suffix }}" arm64_image_name="postgres-ubuntu-2204-arm64-${{ inputs.image_suffix }}" + # GCE image names cannot contain dots; build workflow sanitizes + # them the same way. Mirror that here so cleanup matches what + # was actually created. + suffix="${{ inputs.image_suffix }}" + x64_gce_image_name="postgres-ubuntu-2204-x64-${suffix//./-}" + arm64_gce_image_name="postgres-ubuntu-2204-arm64-${suffix//./-}" + echo "x64_image_name=${x64_image_name}" >> $GITHUB_OUTPUT echo "arm64_image_name=${arm64_image_name}" >> $GITHUB_OUTPUT + echo "x64_gce_image_name=${x64_gce_image_name}" >> $GITHUB_OUTPUT + echo "arm64_gce_image_name=${arm64_gce_image_name}" >> $GITHUB_OUTPUT echo "### Target Images" >> $GITHUB_STEP_SUMMARY if [ "${{ inputs.architecture }}" = "x64" ] || [ "${{ inputs.architecture }}" = "both" ]; then @@ -301,6 +302,7 @@ jobs: - name: Authenticate to GCP if: ${{ inputs.cleanup_gce }} + id: gcp_auth uses: google-github-actions/auth@v2 with: credentials_json: ${{ secrets.GCP_SA_KEY }} @@ -312,8 +314,8 @@ jobs: - name: Cleanup GCE images if: ${{ inputs.cleanup_gce }} run: | - project="${{ inputs.gcp_project }}" - bucket="${{ inputs.gcs_bucket }}" + project="${{ steps.gcp_auth.outputs.project_id }}" + bucket="${{ secrets.GCS_BUCKET }}" echo "### GCE Cleanup" >> $GITHUB_STEP_SUMMARY @@ -349,11 +351,11 @@ jobs: } if [ "${{ inputs.architecture }}" = "x64" ] || [ "${{ inputs.architecture }}" = "both" ]; then - cleanup_gce_image "${{ steps.set_image_names.outputs.x64_image_name }}" "x64" + cleanup_gce_image "${{ steps.set_image_names.outputs.x64_gce_image_name }}" "x64" fi if [ "${{ inputs.architecture }}" = "arm64" ] || [ "${{ inputs.architecture }}" = "both" ]; then - cleanup_gce_image "${{ steps.set_image_names.outputs.arm64_image_name }}" "arm64" + cleanup_gce_image "${{ steps.set_image_names.outputs.arm64_gce_image_name }}" "arm64" fi - name: Summary From d517236e3306e3f94ac1dea4b1db249c6f1ac060 Mon Sep 17 00:00:00 2001 From: Ubicloud Dev Date: Thu, 7 May 2026 10:30:52 +0000 Subject: [PATCH 9/9] Switch GCP auth to Workload Identity Federation iamKunalGupta noted on PR #20 that the workflows authenticate to GCP with a long-lived service account JSON key (secrets.GCP_SA_KEY), and recommended Workload Identity Federation per https://docs.cloud.google.com/iam/docs/workload-identity-federation. Replace the credentials_json input on all three google-github-actions/auth@v2 sites (postgres-vm-image.yml lines 538 and 1029, cleanup-images.yml line 312) with workload_identity_provider + service_account, both read from non-secret repo variables (vars.WIF_PROVIDER, vars.WIF_SERVICE_ACCOUNT). The provider path embeds the project number, pool ID, and provider ID; the service_account remains postgres-vm-images@ubicloud-images.iam.gserviceaccount.com so existing IAM bindings on the SA continue to apply unchanged. Add permissions: id-token: write at the workflow level on cleanup-images.yml (postgres-vm-image.yml already had it). This lets GitHub mint the OIDC token that the auth action exchanges for short-lived GCP credentials via the WIF pool. GCP-side setup that must exist before this commit lands: - Workload Identity Pool github-pool in ubicloud-images. - OIDC provider github-provider on that pool, issuer https://token.actions.githubusercontent.com, attribute condition assertion.repository_owner == 'ubicloud' so only ubicloud-org repos can mint tokens. - iam.workloadIdentityUser binding on postgres-vm-images@ubicloud-images.iam.gserviceaccount.com for principalSet ...attribute.repository/ubicloud/postgres-vm-images so impersonation is scoped to this single repo. After this lands and a dispatch run confirms the new auth path, the GCP_SA_KEY secret should be removed and the SA's underlying JSON key revoked in GCP IAM. Co-Authored-By: Claude Opus 4.7 (1M context) --- .github/workflows/cleanup-images.yml | 7 ++++++- .github/workflows/postgres-vm-image.yml | 6 ++++-- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/.github/workflows/cleanup-images.yml b/.github/workflows/cleanup-images.yml index 38d35a9..4654939 100644 --- a/.github/workflows/cleanup-images.yml +++ b/.github/workflows/cleanup-images.yml @@ -42,6 +42,10 @@ on: default: false type: boolean +permissions: + id-token: write + contents: read + jobs: cleanup: name: Cleanup images for ${{ inputs.image_suffix }} @@ -305,7 +309,8 @@ jobs: id: gcp_auth uses: google-github-actions/auth@v2 with: - credentials_json: ${{ secrets.GCP_SA_KEY }} + workload_identity_provider: ${{ vars.WIF_PROVIDER }} + service_account: ${{ vars.WIF_SERVICE_ACCOUNT }} - name: Set up Cloud SDK if: ${{ inputs.cleanup_gce }} diff --git a/.github/workflows/postgres-vm-image.yml b/.github/workflows/postgres-vm-image.yml index 77c00cc..2ec5b6e 100644 --- a/.github/workflows/postgres-vm-image.yml +++ b/.github/workflows/postgres-vm-image.yml @@ -535,7 +535,8 @@ jobs: id: gcp_auth uses: google-github-actions/auth@v2 with: - credentials_json: ${{ secrets.GCP_SA_KEY }} + workload_identity_provider: ${{ vars.WIF_PROVIDER }} + service_account: ${{ vars.WIF_SERVICE_ACCOUNT }} - name: Set up Cloud SDK if: ${{ inputs.upload_gce && !inputs.build_only }} @@ -1025,7 +1026,8 @@ jobs: id: gcp_auth uses: google-github-actions/auth@v2 with: - credentials_json: ${{ secrets.GCP_SA_KEY }} + workload_identity_provider: ${{ vars.WIF_PROVIDER }} + service_account: ${{ vars.WIF_SERVICE_ACCOUNT }} - name: Set up Cloud SDK if: ${{ inputs.upload_gce && !inputs.build_only }}