Skip to content

Commit 1baa1c2

Browse files
authored
simplify lambda layer signing step in release workflow (#726)
Remove defensive exit-code checks and debug output that were masking the real signing failure. The root cause (missing s3:GetObjectVersion permission) is being fixed in the IAM policy. *Issue #, if available:* *Description of changes:* By submitting this pull request, I confirm that you can use, modify, copy, and redistribute this contribution, under the terms of your choice.
1 parent e729533 commit 1baa1c2

1 file changed

Lines changed: 9 additions & 47 deletions

File tree

.github/workflows/release-build.yml

Lines changed: 9 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -233,60 +233,22 @@ jobs:
233233
run: |
234234
aws s3 mb s3://${{ env.BUCKET_NAME }}
235235
aws s3 cp ${{ env.LAYER_ARTIFACT_NAME }} s3://${{ env.BUCKET_NAME }}
236-
236+
237237
# Sign the layer
238-
echo "Checking for signing profile..."
239238
PROFILE=$(aws signer list-signing-profiles --query "profiles[?profileName=='ADOTLambdaLayerSigningProfile'].arn" --output text 2>/dev/null)
240239
[ -z "$PROFILE" ] && echo "No signing profile found, skipping" && exit 0
241240
242-
echo "PROFILE is: $PROFILE"
243-
244-
echo "Starting signing job..."
245-
# Capture both stdout and stderr to properly handle errors
246-
SIGNING_OUTPUT=$(aws signer start-signing-job \
241+
JOB_ID=$(aws signer start-signing-job \
247242
--source "s3={bucketName=${{ env.BUCKET_NAME }},key=${{ env.LAYER_ARTIFACT_NAME }},version=null}" \
248243
--destination "s3={bucketName=${{ env.BUCKET_NAME }},prefix=signed-}" \
249244
--profile-name ADOTLambdaLayerSigningProfile \
250-
--query 'jobId' --output text 2>&1)
251-
SIGNING_EXIT_CODE=$?
252-
253-
if [ $SIGNING_EXIT_CODE -ne 0 ]; then
254-
echo "Signing job failed with exit code $SIGNING_EXIT_CODE"
255-
echo "Error output: $SIGNING_OUTPUT"
256-
exit 0 # Continue workflow but log the failure
257-
fi
258-
259-
JOB_ID="$SIGNING_OUTPUT"
260-
[ -z "$JOB_ID" ] && echo "No job ID returned" && exit 0
261-
echo "Job ID: $JOB_ID"
262-
263-
echo "Waiting for signing job to complete..."
264-
if ! aws signer wait successful-signing-job --job-id "$JOB_ID" 2>&1; then
265-
echo "Warning: Signing job wait failed or timed out"
266-
exit 0
267-
fi
268-
echo "Signing completed"
269-
270-
echo "Moving signed layer..."
271-
SIGNED=$(aws signer describe-signing-job --job-id "$JOB_ID" --query 'signedObject.s3.key' --output text 2>&1)
272-
DESCRIBE_EXIT_CODE=$?
273-
274-
if [ $DESCRIBE_EXIT_CODE -ne 0 ]; then
275-
echo "Warning: Failed to describe signing job"
276-
echo "Error: $SIGNED"
277-
exit 0
278-
fi
279-
280-
echo "SIGNED value: '$SIGNED'"
281-
if [ -n "$SIGNED" ]; then
282-
# Delete the original unsigned file first
283-
aws s3 rm "s3://${{ env.BUCKET_NAME }}/${{ env.LAYER_ARTIFACT_NAME }}"
284-
# Move the signed file to replace it
285-
aws s3 mv "s3://${{ env.BUCKET_NAME }}/$SIGNED" "s3://${{ env.BUCKET_NAME }}/${{ env.LAYER_ARTIFACT_NAME }}"
286-
echo "Signed layer moved successfully"
287-
else
288-
echo "No SIGNED value returned, skipping move"
289-
fi
245+
--query 'jobId' --output text)
246+
247+
aws signer wait successful-signing-job --job-id "$JOB_ID"
248+
249+
SIGNED=$(aws signer describe-signing-job --job-id "$JOB_ID" --query 'signedObject.s3.key' --output text)
250+
aws s3 rm "s3://${{ env.BUCKET_NAME }}/${{ env.LAYER_ARTIFACT_NAME }}"
251+
aws s3 mv "s3://${{ env.BUCKET_NAME }}/$SIGNED" "s3://${{ env.BUCKET_NAME }}/${{ env.LAYER_ARTIFACT_NAME }}"
290252
291253
- name: Publish Layer Version
292254
run: |

0 commit comments

Comments
 (0)