Skip to content

Release Build

Release Build #108

Workflow file for this run

name: Release Build
on:
workflow_dispatch:
inputs:
version:
description: The version to tag the release with, e.g., 1.2.0
required: true
aws_region:
description: 'Deploy lambda layer to aws regions'
required: true
default: 'us-east-1, us-east-2, us-west-1, us-west-2, ap-south-1, ap-northeast-3, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-northeast-1, ca-central-1, eu-central-1, eu-west-1, eu-west-2, eu-west-3, eu-north-1, sa-east-1, af-south-1, ap-east-1, ap-south-2, ap-southeast-3, ap-southeast-4, eu-central-2, eu-south-1, eu-south-2, il-central-1, me-central-1, me-south-1, ap-southeast-5, ap-southeast-7, mx-central-1, ca-west-1, cn-north-1, cn-northwest-1'
env:
AWS_DEFAULT_REGION: us-east-1
AWS_PUBLIC_ECR_REGION: us-east-1
AWS_PRIVATE_ECR_REGION: us-west-2
RELEASE_PUBLIC_REPOSITORY: public.ecr.aws/aws-observability/adot-autoinstrumentation-python
RELEASE_PRIVATE_REPOSITORY: 020628701572.dkr.ecr.us-west-2.amazonaws.com/adot-autoinstrumentation-python
RELEASE_PRIVATE_REGISTRY: 020628701572.dkr.ecr.us-west-2.amazonaws.com
PACKAGE_NAME: aws-opentelemetry-distro
VERSION: ${{ github.event.inputs.version }}
AWS_REGIONS: ${{ github.event.inputs.aws_region }}
WHEEL_ARTIFACT_NAME: aws_opentelemetry_distro-${{ github.event.inputs.version }}-py3-none-any.whl
SOURCE_ARTIFACT_NAME: aws_opentelemetry_distro-${{ github.event.inputs.version }}.tar.gz
# Legacy list of commercial regions to deploy to. New regions should NOT be added here, and instead should be added to the `aws_region` default input to the workflow.
LEGACY_COMMERCIAL_REGIONS: us-east-1, us-east-2, us-west-1, us-west-2, ap-south-1, ap-northeast-3, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-northeast-1, ca-central-1, eu-central-1, eu-west-1, eu-west-2, eu-west-3, eu-north-1, sa-east-1
LAYER_NAME: AWSOpenTelemetryDistroPython
LAYER_ARTIFACT_NAME: aws-opentelemetry-python-layer.zip
permissions:
id-token: write
contents: write
jobs:
download-artifacts:
environment: Release
runs-on: ubuntu-latest
outputs:
aws_regions_json: ${{ steps.set-matrix.outputs.aws_regions_json }}
steps:
- name: Checkout Repo @ SHA - ${{ github.sha }}
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
- name: Get main build run ID and download artifacts
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
WORKFLOW_ID=$(gh api repos/${{ github.repository }}/actions/workflows --jq '.workflows[] | select(.name=="Python Instrumentation Main Build") | .id')
LATEST_RUN=$(gh api repos/${{ github.repository }}/actions/workflows/$WORKFLOW_ID/runs --jq '[.workflow_runs[] | select(.head_branch=="${{ github.ref_name }}")] | sort_by(.created_at) | .[-1]')
STATUS=$(echo "$LATEST_RUN" | jq -r '.status')
CONCLUSION=$(echo "$LATEST_RUN" | jq -r '.conclusion')
RUN_ID=$(echo "$LATEST_RUN" | jq -r '.id')
if [ "$STATUS" = "in_progress" ] || [ "$STATUS" = "queued" ]; then
echo "Main build is still running (status: $STATUS). Cannot proceed with release."
exit 1
elif [ "$CONCLUSION" != "success" ]; then
echo "Latest main build on branch ${{ github.ref_name }} conclusion: $CONCLUSION"
exit 1
fi
echo "Main build succeeded (run ID: $RUN_ID), downloading artifacts..."
gh run download $RUN_ID -n release-artifacts -D dist
gh run download $RUN_ID -n layer.zip -D layer
- name: Upload SDK wheel artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2
with:
name: ${{ env.WHEEL_ARTIFACT_NAME }}
path: dist/${{ env.WHEEL_ARTIFACT_NAME }}
- name: Upload SDK source artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2
with:
name: ${{ env.SOURCE_ARTIFACT_NAME }}
path: dist/${{ env.SOURCE_ARTIFACT_NAME }}
- name: Upload layer artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2
with:
name: layer.zip
path: layer/${{ env.LAYER_ARTIFACT_NAME }}
- name: Set up regions matrix
id: set-matrix
run: |
IFS=',' read -ra REGIONS <<< "${{ env.AWS_REGIONS }}"
MATRIX="["
for region in "${REGIONS[@]}"; do
trimmed_region=$(echo "$region" | xargs)
MATRIX+="\"$trimmed_region\","
done
MATRIX="${MATRIX%,}]"
echo ${MATRIX}
echo "aws_regions_json=${MATRIX}" >> $GITHUB_OUTPUT
# publish-sdk:
# needs: download-artifacts
# runs-on: ubuntu-latest
# steps:
# - name: Checkout Repo @ SHA - ${{ github.sha }}
# uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
#
# - name: Set up Docker Buildx
# uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 #3.11.1
#
# - name: Configure AWS credentials for private ECR
# uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0
# with:
# role-to-assume: ${{ secrets.AWS_ROLE_ARN_ECR_RELEASE }}
# aws-region: ${{ env.AWS_PRIVATE_ECR_REGION }}
#
# - name: Log in to AWS private ECR
# uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0
# with:
# registry: ${{ env.RELEASE_PRIVATE_REGISTRY }}
#
# - name: Configure AWS credentials for public ECR
# uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0
# with:
# role-to-assume: ${{ secrets.AWS_ROLE_ARN_ECR_RELEASE }}
# aws-region: ${{ env.AWS_PUBLIC_ECR_REGION }}
#
# - name: Log in to AWS public ECR
# uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0
# with:
# registry: public.ecr.aws
#
# - name: Download SDK wheel artifact
# uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 #v5.0.0
# with:
# name: ${{ env.WHEEL_ARTIFACT_NAME }}
# path: dist-pypi
#
# - name: Download SDK source artifact
# uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 #v5.0.0
# with:
# name: ${{ env.SOURCE_ARTIFACT_NAME }}
# path: dist-pypi
#
# # The step below publishes to testpypi in order to catch any issues
# - name: Publish to TestPyPI
# uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e
# with:
# repository-url: https://test.pypi.org/legacy/
# attestations: false
# skip-existing: true
# verbose: true
# packages-dir: dist-pypi
#
# # Publish to prod PyPI
# - name: Publish to PyPI
# uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e
# with:
# skip-existing: true
# verbose: true
# packages-dir: dist-pypi
#
# # Publish to public ECR
# - name: Build and push public ECR image
# uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #v6.18.0
# with:
# push: true
# context: .
# file: ./Dockerfile
# platforms: linux/amd64,linux/arm64
# tags: |
# ${{ env.RELEASE_PUBLIC_REPOSITORY }}:v${{ env.VERSION }}
#
# # Publish to private ECR
# - name: Build and push private ECR image
# uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #v6.18.0
# with:
# push: true
# context: .
# file: ./Dockerfile
# platforms: linux/amd64,linux/arm64
# tags: |
# ${{ env.RELEASE_PRIVATE_REPOSITORY }}:v${{ env.VERSION }}
publish-layer-prod:
runs-on: ubuntu-latest
needs: [download-artifacts]
strategy:
matrix:
aws_region: ${{ fromJson(needs.download-artifacts.outputs.aws_regions_json) }}
steps:
- name: role arn
env:
LEGACY_COMMERCIAL_REGIONS: ${{ env.LEGACY_COMMERCIAL_REGIONS }}
run: |
LEGACY_COMMERCIAL_REGIONS_ARRAY=(${LEGACY_COMMERCIAL_REGIONS//,/ })
FOUND=false
for REGION in "${LEGACY_COMMERCIAL_REGIONS_ARRAY[@]}"; do
if [[ "$REGION" == "${{ matrix.aws_region }}" ]]; then
FOUND=true
break
fi
done
if [ "$FOUND" = true ]; then
echo "Found ${{ matrix.aws_region }} in LEGACY_COMMERCIAL_REGIONS"
SECRET_KEY="LAMBDA_LAYER_RELEASE"
else
echo "Not found ${{ matrix.aws_region }} in LEGACY_COMMERCIAL_REGIONS"
SECRET_KEY="${{ matrix.aws_region }}_LAMBDA_LAYER_RELEASE"
fi
SECRET_KEY=${SECRET_KEY//-/_}
echo "SECRET_KEY=${SECRET_KEY}" >> $GITHUB_ENV
- uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0
with:
role-to-assume: ${{ secrets[env.SECRET_KEY] }}
role-duration-seconds: 1200
aws-region: ${{ matrix.aws_region }}
- name: Get s3 bucket name for release
run: |
echo BUCKET_NAME=python-lambda-layer-${{ github.run_id }}-${{ matrix.aws_region }} | tee --append $GITHUB_ENV
- name: download layer.zip
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 #v5.0.0
with:
name: layer.zip
- name: Upload to S3 and Sign
continue-on-error: true
run: |
aws s3 mb s3://${{ env.BUCKET_NAME }}
aws s3 cp ${{ env.LAYER_ARTIFACT_NAME }} s3://${{ env.BUCKET_NAME }}
# Sign the layer
echo "Checking for signing profile..."
PROFILE=$(aws signer list-signing-profiles --query "profiles[?profileName=='ADOTLambdaLayerSigningProfile'].arn" --output text 2>/dev/null)
[ -z "$PROFILE" ] && echo "No signing profile found, skipping" && exit 0
echo "PROFILE is: $PROFILE"
echo "Starting signing job..."
# Capture both stdout and stderr to properly handle errors
SIGNING_OUTPUT=$(aws signer start-signing-job \
--source "s3={bucketName=${{ env.BUCKET_NAME }},key=${{ env.LAYER_ARTIFACT_NAME }},version=null}" \
--destination "s3={bucketName=${{ env.BUCKET_NAME }},prefix=signed-}" \
--profile-name ADOTLambdaLayerSigningProfile \
--query 'jobId' --output text 2>&1)
SIGNING_EXIT_CODE=$?
if [ $SIGNING_EXIT_CODE -ne 0 ]; then
echo "Signing job failed with exit code $SIGNING_EXIT_CODE"
echo "Error output: $SIGNING_OUTPUT"
exit 0 # Continue workflow but log the failure
fi
JOB_ID="$SIGNING_OUTPUT"
[ -z "$JOB_ID" ] && echo "No job ID returned" && exit 0
echo "Job ID: $JOB_ID"
echo "Waiting for signing job to complete..."
if ! aws signer wait successful-signing-job --job-id "$JOB_ID" 2>&1; then
echo "Warning: Signing job wait failed or timed out"
exit 0
fi
echo "Signing completed"
echo "Moving signed layer..."
SIGNED=$(aws signer describe-signing-job --job-id "$JOB_ID" --query 'signedObject.s3.key' --output text 2>&1)
DESCRIBE_EXIT_CODE=$?
if [ $DESCRIBE_EXIT_CODE -ne 0 ]; then
echo "Warning: Failed to describe signing job"
echo "Error: $SIGNED"
exit 0
fi
echo "SIGNED value: '$SIGNED'"
if [ -n "$SIGNED" ]; then
# Delete the original unsigned file first
aws s3 rm "s3://${{ env.BUCKET_NAME }}/${{ env.LAYER_ARTIFACT_NAME }}"
# Move the signed file to replace it
aws s3 mv "s3://${{ env.BUCKET_NAME }}/$SIGNED" "s3://${{ env.BUCKET_NAME }}/${{ env.LAYER_ARTIFACT_NAME }}"
echo "Signed layer moved successfully"
else
echo "No SIGNED value returned, skipping move"
fi
- name: Publish Layer Version
run: |
layerARN=$(
aws lambda publish-layer-version \
--layer-name ${{ env.LAYER_NAME }} \
--content S3Bucket=${{ env.BUCKET_NAME }},S3Key=${{ env.LAYER_ARTIFACT_NAME }} \
--compatible-runtimes python3.10 python3.11 python3.12 python3.13 \
--compatible-architectures "arm64" "x86_64" \
--license-info "Apache-2.0" \
--description "AWS Distro of OpenTelemetry Lambda Layer for Python Runtime" \
--query 'LayerVersionArn' \
--output text
)
echo $layerARN
echo "LAYER_ARN=${layerARN}" >> $GITHUB_ENV
mkdir ${{ env.LAYER_NAME }}
echo $layerARN > ${{ env.LAYER_NAME }}/${{ matrix.aws_region }}
cat ${{ env.LAYER_NAME }}/${{ matrix.aws_region }}
# Output SigningProfileVersionArn
aws lambda get-layer-version-by-arn \
--arn $layerARN \
--output json | jq -r '.Content.SigningProfileVersionArn'
- name: public layer
run: |
layerVersion=$(
aws lambda list-layer-versions \
--layer-name ${{ env.LAYER_NAME }} \
--query 'max_by(LayerVersions, &Version).Version'
)
aws lambda add-layer-version-permission \
--layer-name ${{ env.LAYER_NAME }} \
--version-number $layerVersion \
--principal "*" \
--statement-id publish \
--action lambda:GetLayerVersion
- name: upload layer arn artifact
if: ${{ success() }}
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2
with:
name: ${{ env.LAYER_NAME }}-${{ matrix.aws_region }}
path: ${{ env.LAYER_NAME }}/${{ matrix.aws_region }}
- name: clean s3
if: always()
run: |
aws s3 rb --force s3://${{ env.BUCKET_NAME }}
generate-lambda-release-note:
runs-on: ubuntu-latest
needs: publish-layer-prod
outputs:
layer-note: ${{ steps.layer-note.outputs.layer-note }}
steps:
- name: Checkout Repo @ SHA - ${{ github.sha }}
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
- uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd #v3.1.2
- name: download layerARNs
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 #v5.0.0
with:
pattern: ${{ env.LAYER_NAME }}-*
path: ${{ env.LAYER_NAME }}
merge-multiple: true
- name: show layerARNs
run: |
for file in ${{ env.LAYER_NAME }}/*
do
echo $file
cat $file
done
- name: generate layer-note
id: layer-note
working-directory: ${{ env.LAYER_NAME }}
run: |
echo "| Region | Layer ARN |" >> ../layer-note
echo "| ---- | ---- |" >> ../layer-note
for file in *
do
read arn < $file
echo "| " $file " | " $arn " |" >> ../layer-note
done
cd ..
{
echo "layer-note<<EOF"
cat layer-note
echo "EOF"
} >> $GITHUB_OUTPUT
cat layer-note
- name: generate tf layer
working-directory: ${{ env.LAYER_NAME }}
run: |
echo "locals {" >> ../layer_arns.tf
echo " sdk_layer_arns = {" >> ../layer_arns.tf
for file in *
do
read arn < $file
echo " \""$file"\" = \""$arn"\"" >> ../layer_arns.tf
done
cd ..
echo " }" >> layer_arns.tf
echo "}" >> layer_arns.tf
terraform fmt layer_arns.tf
cat layer_arns.tf
- name: generate layer ARN constants for CDK
working-directory: ${{ env.LAYER_NAME }}
run: |
echo "{" > ../layer_cdk
for file in *; do
read arn < "$file"
echo " \"$file\": \"$arn\"," >> ../layer_cdk
done
echo "}" >> ../layer_cdk
cat ../layer_cdk
publish-github:
needs: generate-lambda-release-note
runs-on: ubuntu-latest
steps:
- name: Checkout Repo @ SHA - ${{ github.sha }}
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
- name: Download SDK wheel artifact
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 #v5.0.0
with:
name: ${{ env.WHEEL_ARTIFACT_NAME }}
- name: Download layer.zip artifact
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 #v5.0.0
with:
name: layer.zip
- name: Rename layer file
run: |
cp ${{ env.LAYER_ARTIFACT_NAME }} layer.zip
# Publish to GitHub releases
- name: Create GH release
id: create_release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Extract all dependencies from pyproject.toml
DEPS=$(python3 -c "
import re
with open('aws-opentelemetry-distro/pyproject.toml', 'r') as f:
content = f.read()
deps_match = re.search(r'dependencies\s*=\s*\[(.*?)\]', content, re.DOTALL)
if deps_match:
deps_content = deps_match.group(1)
dep_lines = re.findall(r'\"([^\"]+)\"', deps_content)
formatted_deps = []
for dep_line in dep_lines:
if ' == ' in dep_line:
package, version = dep_line.split(' == ', 1)
formatted_deps.append(f'- \`{package}\` - {version}')
else:
formatted_deps.append(f'- \`{dep_line}\`')
print('\n'.join(formatted_deps))
")
# Extract CHANGELOG entries for this version
CHANGELOG_ENTRIES=$(python3 -c "
import re, os
version = os.environ['VERSION']
with open('CHANGELOG.md', 'r') as f:
content = f.read()
version_pattern = rf'## v{re.escape(version)}.*?\n(.*?)(?=\n## |\Z)'
version_match = re.search(version_pattern, content, re.DOTALL)
if version_match:
entries = version_match.group(1).strip()
if entries:
print(entries)
")
# Create release notes
cat > release_notes.md << EOF
$(if [ -n "$CHANGELOG_ENTRIES" ]; then echo "## What's Changed"; echo "$CHANGELOG_ENTRIES"; echo ""; fi)
## Upstream Components
$DEPS
## Release Artifacts
This release publishes to public ECR and PyPi.
* See ADOT Python auto-instrumentation Docker image v$VERSION in our public ECR repository:
https://gallery.ecr.aws/aws-observability/adot-autoinstrumentation-python
* See version $VERSION in our PyPi repository:
https://pypi.org/project/aws-opentelemetry-distro/
## Lambda Layer
This release includes the AWS OpenTelemetry Lambda Layer for Python version $VERSION-$(echo $GITHUB_SHA | cut -c1-7).
Lambda Layer ARNs:
${{ needs.generate-lambda-release-note.outputs.layer-note }}
EOF
shasum -a 256 ${{ env.WHEEL_ARTIFACT_NAME }} > ${{ env.WHEEL_ARTIFACT_NAME }}.sha256
shasum -a 256 layer.zip > layer.zip.sha256
gh release create --target "$GITHUB_REF_NAME" \
--title "Release v$VERSION" \
--notes-file release_notes.md \
--draft \
"v$VERSION" \
${{ env.WHEEL_ARTIFACT_NAME }} \
${{ env.WHEEL_ARTIFACT_NAME }}.sha256 \
layer.zip \
layer.zip.sha256