diff --git a/tuts/093-amazon-eventbridge-gs/README.md b/tuts/093-amazon-eventbridge-gs/README.md new file mode 100644 index 00000000..cbc92ef3 --- /dev/null +++ b/tuts/093-amazon-eventbridge-gs/README.md @@ -0,0 +1,58 @@ +# EventBridge: Schedule a Lambda function + +Create an EventBridge scheduled rule that invokes a Lambda function every minute, verify execution in CloudWatch Logs, and clean up. + +## Source + +https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-get-started.html + +## Use case + +- ID: eventbridge/getting-started +- Phase: create +- Complexity: beginner +- Core actions: events:PutRule, events:PutTargets + +## What it does + +1. Creates an IAM execution role for Lambda +2. Creates a Node.js Lambda function that logs EventBridge events +3. Creates an EventBridge rule with a `rate(1 minute)` schedule +4. Grants EventBridge permission to invoke the function +5. Adds the Lambda function as the rule target +6. Waits for the rule to fire and verifies output in CloudWatch Logs + +## Running + +```bash +bash amazon-eventbridge-gs.sh +``` + +To auto-run with cleanup: + +```bash +echo 'y' | bash amazon-eventbridge-gs.sh +``` + +## Resources created + +- EventBridge rule (scheduled, rate 1 minute) +- Lambda function (Node.js 22) +- IAM role (with AWSLambdaBasicExecutionRole policy) +- CloudWatch log group (created automatically by Lambda) + +## Estimated time + +- Run: ~90 seconds (includes 65s wait for rule to fire) +- Cleanup: ~5 seconds + +## Cost + +Free tier eligible. Lambda and EventBridge invocations stay well within free tier limits for this tutorial. + +## Related docs + +- [Getting started with Amazon EventBridge](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-get-started.html) +- [Creating a rule that runs on a schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-create-rule-schedule.html) +- [Tutorial: Use EventBridge to relay events to a Lambda function](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-log-ec2-instance-state.html) +- [Schedule expressions using rate or cron](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-scheduled-rule-pattern.html) diff --git a/tuts/093-amazon-eventbridge-gs/REVISION-HISTORY.md b/tuts/093-amazon-eventbridge-gs/REVISION-HISTORY.md new file mode 100644 index 00000000..f97cb636 --- /dev/null +++ b/tuts/093-amazon-eventbridge-gs/REVISION-HISTORY.md @@ -0,0 +1,8 @@ +# Revision History: 093-amazon-eventbridge-gs + +## Shell (CLI script) + +### 2026-04-14 v1 published +- Type: functional +- Initial version + diff --git a/tuts/093-amazon-eventbridge-gs/amazon-eventbridge-gs.md b/tuts/093-amazon-eventbridge-gs/amazon-eventbridge-gs.md new file mode 100644 index 00000000..6a4b6f34 --- /dev/null +++ b/tuts/093-amazon-eventbridge-gs/amazon-eventbridge-gs.md @@ -0,0 +1,125 @@ +# Create an EventBridge rule that triggers a Lambda function on a schedule + +This tutorial shows you how to create an Amazon EventBridge scheduled rule that invokes an AWS Lambda function every minute. You create the Lambda function, set up the rule, verify the function runs by checking CloudWatch Logs, and then clean up. + +## Prerequisites + +- AWS CLI configured with credentials and a default region +- Permissions to create EventBridge rules, Lambda functions, and IAM roles + +## Step 1: Create an execution role + +Create an IAM role that grants the Lambda function permission to write logs. + +```bash +ROLE_ARN=$(aws iam create-role --role-name eb-tut-role \ + --assume-role-policy-document '{ + "Version":"2012-10-17", + "Statement":[{"Effect":"Allow","Principal":{"Service":"lambda.amazonaws.com"},"Action":"sts:AssumeRole"}] + }' --query 'Role.Arn' --output text) + +aws iam attach-role-policy --role-name eb-tut-role \ + --policy-arn arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole +``` + +Wait about 10 seconds for the role to propagate before creating the function. + +## Step 2: Create the Lambda function + +Create a Node.js function that logs each EventBridge event it receives. + +```javascript +// index.mjs +export const handler = async (event) => { + console.log('EventBridge event received:', JSON.stringify(event, null, 2)); + return { statusCode: 200, body: 'Event processed' }; +}; +``` + +Package and deploy: + +```bash +zip function.zip index.mjs + +aws lambda create-function --function-name eb-tut-handler \ + --zip-file fileb://function.zip \ + --handler index.handler --runtime nodejs22.x \ + --role $ROLE_ARN --timeout 30 \ + --architectures x86_64 +``` + +Wait for the function to become active: + +```bash +aws lambda wait function-active-v2 --function-name eb-tut-handler +``` + +## Step 3: Create an EventBridge scheduled rule + +Create a rule that fires every minute. + +```bash +RULE_ARN=$(aws events put-rule --name eb-tut-rule \ + --schedule-expression "rate(1 minute)" \ + --state ENABLED \ + --query 'RuleArn' --output text) +``` + +## Step 4: Grant EventBridge permission to invoke Lambda + +Add a resource-based policy that allows EventBridge to call the function. + +```bash +aws lambda add-permission --function-name eb-tut-handler \ + --statement-id eb-invoke --action lambda:InvokeFunction \ + --principal events.amazonaws.com --source-arn $RULE_ARN +``` + +## Step 5: Add the Lambda function as a target + +Attach the function to the rule so EventBridge invokes it on each trigger. + +```bash +FUNCTION_ARN=$(aws lambda get-function --function-name eb-tut-handler \ + --query 'Configuration.FunctionArn' --output text) + +aws events put-targets --rule eb-tut-rule \ + --targets "Id=lambda-target,Arn=$FUNCTION_ARN" +``` + +## Step 6: Verify in CloudWatch Logs + +Wait about 60 seconds for the rule to fire, then check the function's log output: + +```bash +aws logs describe-log-streams \ + --log-group-name /aws/lambda/eb-tut-handler \ + --order-by LastEventTime --descending --limit 1 + +aws logs get-log-events \ + --log-group-name /aws/lambda/eb-tut-handler \ + --log-stream-name \ + --query 'events[].message' --output text +``` + +You should see `EventBridge event received:` followed by the scheduled event JSON, which includes `"source": "aws.events"` and `"detail-type": "Scheduled Event"`. + +## Cleanup + +Delete all resources in reverse order: + +```bash +aws events remove-targets --rule eb-tut-rule --ids lambda-target +aws events delete-rule --name eb-tut-rule +aws lambda delete-function --function-name eb-tut-handler +aws iam detach-role-policy --role-name eb-tut-role \ + --policy-arn arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole +aws iam delete-role --role-name eb-tut-role +aws logs delete-log-group --log-group-name /aws/lambda/eb-tut-handler +``` + +The script automates all steps including cleanup. Run it with: + +```bash +bash amazon-eventbridge-gs.sh +``` diff --git a/tuts/093-amazon-eventbridge-gs/amazon-eventbridge-gs.sh b/tuts/093-amazon-eventbridge-gs/amazon-eventbridge-gs.sh new file mode 100644 index 00000000..f724ecd0 --- /dev/null +++ b/tuts/093-amazon-eventbridge-gs/amazon-eventbridge-gs.sh @@ -0,0 +1,130 @@ +#!/bin/bash +# Tutorial: Create an EventBridge rule that triggers a Lambda function +# Source: https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-get-started.html + +WORK_DIR=$(mktemp -d) +LOG_FILE="$WORK_DIR/eventbridge-$(date +%Y%m%d-%H%M%S).log" +exec > >(tee -a "$LOG_FILE") 2>&1 + +REGION=${AWS_DEFAULT_REGION:-${AWS_REGION:-$(aws configure get region 2>/dev/null)}} +if [ -z "$REGION" ]; then + echo "ERROR: No AWS region configured. Set one with: export AWS_DEFAULT_REGION=us-east-1" + exit 1 +fi +export AWS_DEFAULT_REGION="$REGION" +echo "Region: $REGION" + +RANDOM_ID=$(cat /dev/urandom | tr -dc 'a-z0-9' | fold -w 8 | head -n 1) +RULE_NAME="eb-tut-rule-${RANDOM_ID}" +FUNCTION_NAME="eb-tut-handler-${RANDOM_ID}" +ROLE_NAME="eb-tut-role-${RANDOM_ID}" + +handle_error() { echo "ERROR on line $1"; trap - ERR; cleanup; exit 1; } +trap 'handle_error $LINENO' ERR + +cleanup() { + echo "" + echo "Cleaning up resources..." + aws events remove-targets --rule "$RULE_NAME" --ids lambda-target > /dev/null 2>&1 && echo " Removed rule target" + aws events delete-rule --name "$RULE_NAME" 2>/dev/null && echo " Deleted rule $RULE_NAME" + aws lambda delete-function --function-name "$FUNCTION_NAME" 2>/dev/null && echo " Deleted function $FUNCTION_NAME" + aws iam detach-role-policy --role-name "$ROLE_NAME" \ + --policy-arn arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole 2>/dev/null + aws iam delete-role --role-name "$ROLE_NAME" 2>/dev/null && echo " Deleted role $ROLE_NAME" + aws logs delete-log-group --log-group-name "/aws/lambda/$FUNCTION_NAME" 2>/dev/null && echo " Deleted log group" + rm -rf "$WORK_DIR" + echo "Cleanup complete." +} + +# Step 1: Create IAM role +echo "Step 1: Creating IAM role: $ROLE_NAME" +ROLE_ARN=$(aws iam create-role --role-name "$ROLE_NAME" \ + --assume-role-policy-document '{ + "Version":"2012-10-17", + "Statement":[{"Effect":"Allow","Principal":{"Service":"lambda.amazonaws.com"},"Action":"sts:AssumeRole"}] + }' --query 'Role.Arn' --output text) +aws iam attach-role-policy --role-name "$ROLE_NAME" \ + --policy-arn arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole +echo " Role ARN: $ROLE_ARN" +echo " Waiting for role propagation..." +sleep 10 + +# Step 2: Create Lambda function +echo "Step 2: Creating Lambda function: $FUNCTION_NAME" +cat > "$WORK_DIR/index.mjs" << 'EOF' +export const handler = async (event) => { + console.log('EventBridge event received:', JSON.stringify(event, null, 2)); + return { statusCode: 200, body: 'Event processed' }; +}; +EOF +(cd "$WORK_DIR" && zip function.zip index.mjs > /dev/null) + +aws lambda create-function --function-name "$FUNCTION_NAME" \ + --zip-file "fileb://$WORK_DIR/function.zip" \ + --handler index.handler --runtime nodejs22.x \ + --role "$ROLE_ARN" --timeout 30 \ + --architectures x86_64 \ + --query 'FunctionArn' --output text +aws lambda wait function-active-v2 --function-name "$FUNCTION_NAME" +FUNCTION_ARN=$(aws lambda get-function --function-name "$FUNCTION_NAME" --query 'Configuration.FunctionArn' --output text) + +# Step 3: Create EventBridge rule (runs every minute) +echo "Step 3: Creating EventBridge rule: $RULE_NAME (every 1 minute)" +RULE_ARN=$(aws events put-rule --name "$RULE_NAME" \ + --schedule-expression "rate(1 minute)" \ + --state ENABLED \ + --query 'RuleArn' --output text) +echo " Rule ARN: $RULE_ARN" + +# Step 4: Grant EventBridge permission to invoke Lambda +echo "Step 4: Granting EventBridge permission to invoke Lambda" +aws lambda add-permission --function-name "$FUNCTION_NAME" \ + --statement-id eb-invoke --action lambda:InvokeFunction \ + --principal events.amazonaws.com --source-arn "$RULE_ARN" > /dev/null + +# Step 5: Add Lambda as target +echo "Step 5: Adding Lambda as rule target" +aws events put-targets --rule "$RULE_NAME" \ + --targets "Id=lambda-target,Arn=$FUNCTION_ARN" > /dev/null +echo " Target added" + +# Step 6: Wait for the rule to fire and check logs +echo "Step 6: Waiting for EventBridge to trigger Lambda (~60s)..." +sleep 65 + +LOG_GROUP="/aws/lambda/$FUNCTION_NAME" +FOUND_LOGS=false +for i in $(seq 1 10); do + LOG_STREAM=$(aws logs describe-log-streams --log-group-name "$LOG_GROUP" \ + --order-by LastEventTime --descending --limit 1 \ + --query 'logStreams[0].logStreamName' --output text 2>/dev/null || true) + if [ -n "$LOG_STREAM" ] && [ "$LOG_STREAM" != "None" ]; then + echo " Log stream: $LOG_STREAM" + echo " Recent events:" + aws logs get-log-events --log-group-name "$LOG_GROUP" \ + --log-stream-name "$LOG_STREAM" --limit 5 \ + --query 'events[].message' --output text + FOUND_LOGS=true + break + fi + sleep 5 +done +if [ "$FOUND_LOGS" = false ]; then + echo " Logs not available yet — the rule may not have fired yet" +fi + +echo "" +echo "Tutorial complete." +echo "Do you want to clean up all resources? (y/n): " +read -r CHOICE +if [[ "$CHOICE" =~ ^[Yy]$ ]]; then + cleanup +else + echo "Resources left running. The rule fires every minute." + echo "Manual cleanup:" + echo " aws events remove-targets --rule $RULE_NAME --ids lambda-target" + echo " aws events delete-rule --name $RULE_NAME" + echo " aws lambda delete-function --function-name $FUNCTION_NAME" + echo " aws iam detach-role-policy --role-name $ROLE_NAME --policy-arn arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole" + echo " aws iam delete-role --role-name $ROLE_NAME" +fi diff --git a/tuts/110-amazon-sqs-gs/README.md b/tuts/110-amazon-sqs-gs/README.md new file mode 100644 index 00000000..36d6226c --- /dev/null +++ b/tuts/110-amazon-sqs-gs/README.md @@ -0,0 +1,58 @@ +# SQS: Create queues and send messages + +## Source + +https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-getting-started.html + +## Use case + +- **ID**: sqs/getting-started +- **Level**: beginner +- **Core actions**: `sqs:CreateQueue`, `sqs:SendMessage` + +## Steps + +1. Create a standard queue +2. Create a dead-letter queue and configure redrive policy +3. Send messages (individual and batch) +4. Receive and process messages +5. Delete processed messages +6. Check queue attributes +7. Create a FIFO queue and send a message + +## Resources created + +| Resource | Type | +|----------|------| +| `tut-queue-` | Standard queue | +| `tut-dlq-` | Dead-letter queue | +| `tut-fifo-.fifo` | FIFO queue | + +## Cost + +Free tier includes 1 million requests/month. This tutorial sends fewer than 10 messages. + +## Duration + +~14 seconds + +## Related docs + +- [Getting started with Amazon SQS](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-getting-started.html) +- [Amazon SQS dead-letter queues](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-dead-letter-queues.html) +- [Amazon SQS FIFO queues](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/FIFO-queues.html) +- [Sending messages in batches](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-batch-api-actions.html) + +--- + +## Appendix + +| Field | Value | +|-------|-------| +| Date | 2026-04-14 | +| Script lines | 110 | +| Exit code | 0 | +| Runtime | 14s | +| Steps | 7 | +| Issues | Fixed f-string quoting | +| Version | v1 | diff --git a/tuts/110-amazon-sqs-gs/REVISION-HISTORY.md b/tuts/110-amazon-sqs-gs/REVISION-HISTORY.md new file mode 100644 index 00000000..7571efb6 --- /dev/null +++ b/tuts/110-amazon-sqs-gs/REVISION-HISTORY.md @@ -0,0 +1,8 @@ +# Revision History: 110-amazon-sqs-gs + +## Shell (CLI script) + +### 2026-04-14 v1 published +- Type: functional +- Initial version + diff --git a/tuts/110-amazon-sqs-gs/amazon-sqs-gs.md b/tuts/110-amazon-sqs-gs/amazon-sqs-gs.md new file mode 100644 index 00000000..69bad1c6 --- /dev/null +++ b/tuts/110-amazon-sqs-gs/amazon-sqs-gs.md @@ -0,0 +1,148 @@ +# Create queues and send messages with Amazon SQS + +## Overview + +In this tutorial, you use the AWS CLI to create a standard queue, a dead-letter queue, and a FIFO queue. You send individual and batch messages, receive and delete them, and inspect queue attributes. You then clean up all queues. + +## Prerequisites + +- AWS CLI installed and configured with appropriate permissions. +- An IAM principal with permissions for `sqs:CreateQueue`, `sqs:DeleteQueue`, `sqs:SendMessage`, `sqs:SendMessageBatch`, `sqs:ReceiveMessage`, `sqs:DeleteMessage`, `sqs:GetQueueAttributes`, and `sqs:SetQueueAttributes`. + +## Step 1: Create a standard queue + +Create a queue with a 30-second visibility timeout and 1-day message retention. + +```bash +RANDOM_ID=$(openssl rand -hex 4) +QUEUE_NAME="tut-queue-${RANDOM_ID}" + +QUEUE_URL=$(aws sqs create-queue --queue-name "$QUEUE_NAME" \ + --attributes '{"VisibilityTimeout":"30","MessageRetentionPeriod":"86400"}' \ + --query 'QueueUrl' --output text) +echo "Queue URL: $QUEUE_URL" +``` + +The visibility timeout controls how long a message stays hidden after a consumer receives it. If the consumer doesn't delete the message within that window, it becomes visible again. + +## Step 2: Create a dead-letter queue + +Create a second queue to capture messages that fail processing, then attach it to the main queue with a redrive policy. + +```bash +DLQ_NAME="tut-dlq-${RANDOM_ID}" + +DLQ_URL=$(aws sqs create-queue --queue-name "$DLQ_NAME" --query 'QueueUrl' --output text) +DLQ_ARN=$(aws sqs get-queue-attributes --queue-url "$DLQ_URL" \ + --attribute-names QueueArn --query 'Attributes.QueueArn' --output text) + +aws sqs set-queue-attributes --queue-url "$QUEUE_URL" \ + --attributes "{\"RedrivePolicy\":\"{\\\"deadLetterTargetArn\\\":\\\"$DLQ_ARN\\\",\\\"maxReceiveCount\\\":\\\"3\\\"}\"}" +``` + +After a message is received 3 times without being deleted, SQS moves it to the dead-letter queue. + +## Step 3: Send messages + +Send two individual messages and a batch of three. + +```bash +aws sqs send-message --queue-url "$QUEUE_URL" \ + --message-body "Hello from SQS tutorial" + +aws sqs send-message --queue-url "$QUEUE_URL" \ + --message-body "Message with attributes" \ + --message-attributes '{"Author":{"DataType":"String","StringValue":"Tutorial"}}' + +aws sqs send-message-batch --queue-url "$QUEUE_URL" --entries \ + '[{"Id":"m1","MessageBody":"Batch message 1"},{"Id":"m2","MessageBody":"Batch message 2"},{"Id":"m3","MessageBody":"Batch message 3"}]' +``` + +`send-message-batch` accepts up to 10 messages per call. Each entry needs a unique `Id` within the batch. + +## Step 4: Receive and process messages + +Receive up to 5 messages, including any message attributes. + +```bash +MSGS=$(aws sqs receive-message --queue-url "$QUEUE_URL" \ + --max-number-of-messages 5 \ + --message-attribute-names All --attribute-names All) +echo "$MSGS" | python3 -c " +import sys, json +msgs = json.load(sys.stdin).get('Messages', []) +for m in msgs: + print(f'Body: {m[\"Body\"]}') +print(f'Received {len(msgs)} messages') +" +``` + +Messages remain in the queue until explicitly deleted. If you don't delete them within the visibility timeout, they become available to other consumers. + +## Step 5: Delete processed messages + +Delete each received message using its receipt handle. + +```bash +echo "$MSGS" | python3 -c " +import sys, json, subprocess +msgs = json.load(sys.stdin).get('Messages', []) +for m in msgs: + subprocess.run(['aws', 'sqs', 'delete-message', + '--queue-url', '$QUEUE_URL', + '--receipt-handle', m['ReceiptHandle']], capture_output=True) +print(f'Deleted {len(msgs)} messages') +" +``` + +## Step 6: Check queue attributes + +View the queue's current configuration including message counts and redrive policy. + +```bash +aws sqs get-queue-attributes --queue-url "$QUEUE_URL" --attribute-names All \ + --query 'Attributes.{Messages:ApproximateNumberOfMessages,Visibility:VisibilityTimeout,Retention:MessageRetentionPeriod,DLQ:RedrivePolicy}' \ + --output table +``` + +## Step 7: Create a FIFO queue + +Create a FIFO queue with content-based deduplication and send a message. + +```bash +FIFO_NAME="tut-fifo-${RANDOM_ID}.fifo" + +FIFO_URL=$(aws sqs create-queue --queue-name "$FIFO_NAME" \ + --attributes '{"FifoQueue":"true","ContentBasedDeduplication":"true"}' \ + --query 'QueueUrl' --output text) + +aws sqs send-message --queue-url "$FIFO_URL" \ + --message-body "FIFO message" --message-group-id "tutorial" +``` + +FIFO queues guarantee exactly-once processing and strict ordering within each message group. The queue name must end with `.fifo`. + +## Cleanup + +Delete all three queues. + +```bash +aws sqs delete-queue --queue-url "$QUEUE_URL" +aws sqs delete-queue --queue-url "$DLQ_URL" +aws sqs delete-queue --queue-url "$FIFO_URL" +``` + +After deletion, the queue name becomes available again after 60 seconds. + +The script automates all steps including cleanup: + +```bash +bash amazon-sqs-gs.sh +``` + +## Related resources + +- [Getting started with Amazon SQS](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-getting-started.html) +- [Amazon SQS dead-letter queues](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-dead-letter-queues.html) +- [Amazon SQS FIFO queues](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/FIFO-queues.html) +- [Sending messages in batches](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-batch-api-actions.html) diff --git a/tuts/110-amazon-sqs-gs/amazon-sqs-gs.sh b/tuts/110-amazon-sqs-gs/amazon-sqs-gs.sh new file mode 100644 index 00000000..518395be --- /dev/null +++ b/tuts/110-amazon-sqs-gs/amazon-sqs-gs.sh @@ -0,0 +1,111 @@ +#!/bin/bash +# Tutorial: Create queues and send messages with Amazon SQS +# Source: https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-getting-started.html + +WORK_DIR=$(mktemp -d) +LOG_FILE="$WORK_DIR/sqs-$(date +%Y%m%d-%H%M%S).log" +exec > >(tee -a "$LOG_FILE") 2>&1 + +REGION=${AWS_DEFAULT_REGION:-${AWS_REGION:-$(aws configure get region 2>/dev/null)}} +if [ -z "$REGION" ]; then + echo "ERROR: No AWS region configured. Set one with: export AWS_DEFAULT_REGION=us-east-1" + exit 1 +fi +export AWS_DEFAULT_REGION="$REGION" +echo "Region: $REGION" + +RANDOM_ID=$(cat /dev/urandom | tr -dc 'a-z0-9' | fold -w 8 | head -n 1) +QUEUE_NAME="tut-queue-${RANDOM_ID}" +DLQ_NAME="tut-dlq-${RANDOM_ID}" +FIFO_NAME="tut-fifo-${RANDOM_ID}.fifo" + +handle_error() { echo "ERROR on line $1"; trap - ERR; cleanup; exit 1; } +trap 'handle_error $LINENO' ERR + +cleanup() { + echo "" + echo "Cleaning up resources..." + [ -n "$QUEUE_URL" ] && aws sqs delete-queue --queue-url "$QUEUE_URL" 2>/dev/null && echo " Deleted $QUEUE_NAME" + [ -n "$DLQ_URL" ] && aws sqs delete-queue --queue-url "$DLQ_URL" 2>/dev/null && echo " Deleted $DLQ_NAME" + [ -n "$FIFO_URL" ] && aws sqs delete-queue --queue-url "$FIFO_URL" 2>/dev/null && echo " Deleted $FIFO_NAME" + rm -rf "$WORK_DIR" + echo "Cleanup complete." +} + +# Step 1: Create a standard queue +echo "Step 1: Creating standard queue: $QUEUE_NAME" +QUEUE_URL=$(aws sqs create-queue --queue-name "$QUEUE_NAME" \ + --attributes '{"VisibilityTimeout":"30","MessageRetentionPeriod":"86400"}' \ + --query 'QueueUrl' --output text) +echo " URL: $QUEUE_URL" + +# Step 2: Create a dead-letter queue +echo "Step 2: Creating dead-letter queue: $DLQ_NAME" +DLQ_URL=$(aws sqs create-queue --queue-name "$DLQ_NAME" --query 'QueueUrl' --output text) +DLQ_ARN=$(aws sqs get-queue-attributes --queue-url "$DLQ_URL" --attribute-names QueueArn --query 'Attributes.QueueArn' --output text) + +# Configure redrive policy +aws sqs set-queue-attributes --queue-url "$QUEUE_URL" \ + --attributes "{\"RedrivePolicy\":\"{\\\"deadLetterTargetArn\\\":\\\"$DLQ_ARN\\\",\\\"maxReceiveCount\\\":\\\"3\\\"}\"}" +echo " DLQ configured (max receives: 3)" + +# Step 3: Send messages +echo "Step 3: Sending messages" +aws sqs send-message --queue-url "$QUEUE_URL" --message-body "Hello from SQS tutorial" > /dev/null +aws sqs send-message --queue-url "$QUEUE_URL" --message-body "Message with attributes" \ + --message-attributes '{"Author":{"DataType":"String","StringValue":"Tutorial"}}' > /dev/null +aws sqs send-message-batch --queue-url "$QUEUE_URL" --entries \ + '[{"Id":"m1","MessageBody":"Batch message 1"},{"Id":"m2","MessageBody":"Batch message 2"},{"Id":"m3","MessageBody":"Batch message 3"}]' > /dev/null +echo " Sent 5 messages (2 individual + 3 batch)" + +# Step 4: Receive and process messages +echo "Step 4: Receiving messages" +MSGS=$(aws sqs receive-message --queue-url "$QUEUE_URL" --max-number-of-messages 5 \ + --message-attribute-names All --attribute-names All) +echo "$MSGS" | python3 -c " +import sys,json +msgs=json.load(sys.stdin).get('Messages',[]) +for m in msgs: + print(f' Body: {m[\"Body\"]}') + attrs=m.get('MessageAttributes',{}) + for k,v in attrs.items(): + print(f' Attribute: {k}={v[\"StringValue\"]}') +print(f' Received {len(msgs)} messages') +" + +# Step 5: Delete processed messages +echo "Step 5: Deleting processed messages" +echo "$MSGS" | python3 -c " +import sys,json,subprocess +msgs=json.load(sys.stdin).get('Messages',[]) +for m in msgs: + subprocess.run(['aws','sqs','delete-message','--queue-url','$QUEUE_URL','--receipt-handle',m['ReceiptHandle']],capture_output=True) +print(f' Deleted {len(msgs)} messages') +" + +# Step 6: Check queue attributes +echo "Step 6: Queue attributes" +aws sqs get-queue-attributes --queue-url "$QUEUE_URL" --attribute-names All \ + --query 'Attributes.{Messages:ApproximateNumberOfMessages,Visibility:VisibilityTimeout,Retention:MessageRetentionPeriod,DLQ:RedrivePolicy}' --output table + +# Step 7: Create a FIFO queue +echo "Step 7: Creating FIFO queue: $FIFO_NAME" +FIFO_URL=$(aws sqs create-queue --queue-name "$FIFO_NAME" \ + --attributes '{"FifoQueue":"true","ContentBasedDeduplication":"true"}' \ + --query 'QueueUrl' --output text) +aws sqs send-message --queue-url "$FIFO_URL" --message-body "FIFO message" \ + --message-group-id "tutorial" > /dev/null +echo " FIFO message sent with content-based deduplication" + +echo "" +echo "Tutorial complete." +echo "Do you want to clean up all resources? (y/n): " +read -r CHOICE +if [[ "$CHOICE" =~ ^[Yy]$ ]]; then + cleanup +else + echo "Manual cleanup:" + echo " aws sqs delete-queue --queue-url $QUEUE_URL" + echo " aws sqs delete-queue --queue-url $DLQ_URL" + echo " aws sqs delete-queue --queue-url $FIFO_URL" +fi diff --git a/tuts/159-sns-message-filtering/README.md b/tuts/159-sns-message-filtering/README.md new file mode 100644 index 00000000..68475ccf --- /dev/null +++ b/tuts/159-sns-message-filtering/README.md @@ -0,0 +1,39 @@ +# Sns Filtering + +An AWS CLI tutorial that demonstrates Sns operations. + +## Running + +```bash +bash sns-filtering.sh +``` + +To auto-run with cleanup: + +```bash +echo 'y' | bash sns-filtering.sh +``` + +## What it does + +1. Creating topic and queues +2. Subscribing with filters +3. Publishing messages +4. Checking queues + +## Resources created + +- Queue +- Topic + +The script prompts you to clean up resources when it finishes. + +## Cost + +Free tier eligible for most operations. Clean up resources after use to avoid charges. + +## Related docs + +- [AWS CLI sns reference](https://docs.aws.amazon.com/cli/latest/reference/sns/index.html) +- [AWS CLI sqs reference](https://docs.aws.amazon.com/cli/latest/reference/sqs/index.html) + diff --git a/tuts/159-sns-message-filtering/REVISION-HISTORY.md b/tuts/159-sns-message-filtering/REVISION-HISTORY.md new file mode 100644 index 00000000..449edc75 --- /dev/null +++ b/tuts/159-sns-message-filtering/REVISION-HISTORY.md @@ -0,0 +1,8 @@ +# Revision History: 159-sns-message-filtering + +## Shell (CLI script) + +### 2026-04-14 v1 published +- Type: functional +- Initial version + diff --git a/tuts/159-sns-message-filtering/sns-filtering.md b/tuts/159-sns-message-filtering/sns-filtering.md new file mode 100644 index 00000000..f2267d5e --- /dev/null +++ b/tuts/159-sns-message-filtering/sns-filtering.md @@ -0,0 +1,27 @@ +# Sns Filtering + +## Prerequisites + +1. AWS CLI installed and configured (`aws configure`) +2. Appropriate IAM permissions for the AWS services used + +## Step 1: Creating topic and queues + +The script handles this step automatically. See `sns-filtering.sh` for the exact CLI commands. + +## Step 2: Subscribing with filters + +The script handles this step automatically. See `sns-filtering.sh` for the exact CLI commands. + +## Step 3: Publishing messages + +The script handles this step automatically. See `sns-filtering.sh` for the exact CLI commands. + +## Step 4: Checking queues + +The script handles this step automatically. See `sns-filtering.sh` for the exact CLI commands. + +## Cleanup + +The script prompts you to clean up all created resources. If you need to clean up manually, check the script log for the resource names that were created. + diff --git a/tuts/159-sns-message-filtering/sns-filtering.sh b/tuts/159-sns-message-filtering/sns-filtering.sh new file mode 100644 index 00000000..7d3a5345 --- /dev/null +++ b/tuts/159-sns-message-filtering/sns-filtering.sh @@ -0,0 +1,30 @@ +#!/bin/bash +WORK_DIR=$(mktemp -d); exec > >(tee -a "$WORK_DIR/sns-filter.log") 2>&1 +REGION=${AWS_DEFAULT_REGION:-${AWS_REGION:-$(aws configure get region 2>/dev/null))}; [ -z "$REGION" ] && echo "ERROR: No region" && exit 1; export AWS_DEFAULT_REGION="$REGION"; echo "Region: $REGION" +RANDOM_ID=$(cat /dev/urandom | tr -dc 'a-z0-9' | fold -w 8 | head -n 1); TOPIC="tut-filter-${RANDOM_ID}"; Q1="tut-orders-${RANDOM_ID}"; Q2="tut-alerts-${RANDOM_ID}" +handle_error() { echo "ERROR on line $1"; trap - ERR; cleanup; exit 1; }; trap 'handle_error $LINENO' ERR +cleanup() { echo ""; echo "Cleaning up..."; [ -n "$SUB1_ARN" ] && aws sns unsubscribe --subscription-arn "$SUB1_ARN" 2>/dev/null; [ -n "$SUB2_ARN" ] && aws sns unsubscribe --subscription-arn "$SUB2_ARN" 2>/dev/null; aws sns delete-topic --topic-arn "$TOPIC_ARN" 2>/dev/null && echo " Deleted topic"; [ -n "$Q1_URL" ] && aws sqs delete-queue --queue-url "$Q1_URL" 2>/dev/null; [ -n "$Q2_URL" ] && aws sqs delete-queue --queue-url "$Q2_URL" 2>/dev/null; echo " Deleted queues"; rm -rf "$WORK_DIR"; echo "Done."; } +echo "Step 1: Creating topic and queues" +TOPIC_ARN=$(aws sns create-topic --name "$TOPIC" --query 'TopicArn' --output text) +Q1_URL=$(aws sqs create-queue --queue-name "$Q1" --query 'QueueUrl' --output text) +Q2_URL=$(aws sqs create-queue --queue-name "$Q2" --query 'QueueUrl' --output text) +Q1_ARN=$(aws sqs get-queue-attributes --queue-url "$Q1_URL" --attribute-names QueueArn --query 'Attributes.QueueArn' --output text) +Q2_ARN=$(aws sqs get-queue-attributes --queue-url "$Q2_URL" --attribute-names QueueArn --query 'Attributes.QueueArn' --output text) +for Q_URL in "$Q1_URL" "$Q2_URL"; do Q_ARN=$(aws sqs get-queue-attributes --queue-url "$Q_URL" --attribute-names QueueArn --query 'Attributes.QueueArn' --output text); aws sqs set-queue-attributes --queue-url "$Q_URL" --attributes "{\"Policy\":\"{\\\"Version\\\":\\\"2012-10-17\\\",\\\"Statement\\\":[{\\\"Effect\\\":\\\"Allow\\\",\\\"Principal\\\":{\\\"Service\\\":\\\"sns.amazonaws.com\\\"},\\\"Action\\\":\\\"sqs:SendMessage\\\",\\\"Resource\\\":\\\"$Q_ARN\\\"}]}\"}"; done +echo " Topic: $TOPIC_ARN" +echo "Step 2: Subscribing with filters" +SUB1_ARN=$(aws sns subscribe --topic-arn "$TOPIC_ARN" --protocol sqs --notification-endpoint "$Q1_ARN" --attributes '{"FilterPolicy":"{\"event_type\":[\"order\"]}"}' --query 'SubscriptionArn' --output text) +SUB2_ARN=$(aws sns subscribe --topic-arn "$TOPIC_ARN" --protocol sqs --notification-endpoint "$Q2_ARN" --attributes '{"FilterPolicy":"{\"event_type\":[\"alert\"]}"}' --query 'SubscriptionArn' --output text) +echo " Orders queue: filters for event_type=order" +echo " Alerts queue: filters for event_type=alert" +echo "Step 3: Publishing messages" +aws sns publish --topic-arn "$TOPIC_ARN" --message "New order placed" --message-attributes '{"event_type":{"DataType":"String","StringValue":"order"}}' > /dev/null +aws sns publish --topic-arn "$TOPIC_ARN" --message "System alert" --message-attributes '{"event_type":{"DataType":"String","StringValue":"alert"}}' > /dev/null +aws sns publish --topic-arn "$TOPIC_ARN" --message "Another order" --message-attributes '{"event_type":{"DataType":"String","StringValue":"order"}}' > /dev/null +echo " Published 3 messages (2 orders, 1 alert)" +sleep 3 +echo "Step 4: Checking queues" +echo " Orders queue: $(aws sqs get-queue-attributes --queue-url "$Q1_URL" --attribute-names ApproximateNumberOfMessages --query 'Attributes.ApproximateNumberOfMessages' --output text) messages" +echo " Alerts queue: $(aws sqs get-queue-attributes --queue-url "$Q2_URL" --attribute-names ApproximateNumberOfMessages --query 'Attributes.ApproximateNumberOfMessages' --output text) messages" +echo ""; echo "Tutorial complete." +echo "Do you want to clean up? (y/n): "; read -r CHOICE; [[ "$CHOICE" =~ ^[Yy]$ ]] && cleanup diff --git a/tuts/165-ses-email-identity/README.md b/tuts/165-ses-email-identity/README.md new file mode 100644 index 00000000..5a671cdc --- /dev/null +++ b/tuts/165-ses-email-identity/README.md @@ -0,0 +1,37 @@ +# Ses Identity + +An AWS CLI tutorial that demonstrates Sesv2 operations. + +## Running + +```bash +bash ses-identity.sh +``` + +To auto-run with cleanup: + +```bash +echo 'y' | bash ses-identity.sh +``` + +## What it does + +1. Creating email identity (domain): $DOMAIN +2. Getting DKIM tokens +3. Getting sending quota +4. Listing identities + +## Resources created + +- Email Identity + +The script prompts you to clean up resources when it finishes. + +## Cost + +Free tier eligible for most operations. Clean up resources after use to avoid charges. + +## Related docs + +- [AWS CLI sesv2 reference](https://docs.aws.amazon.com/cli/latest/reference/sesv2/index.html) + diff --git a/tuts/165-ses-email-identity/REVISION-HISTORY.md b/tuts/165-ses-email-identity/REVISION-HISTORY.md new file mode 100644 index 00000000..5bcce6a8 --- /dev/null +++ b/tuts/165-ses-email-identity/REVISION-HISTORY.md @@ -0,0 +1,8 @@ +# Revision History: 165-ses-email-identity + +## Shell (CLI script) + +### 2026-04-14 v1 published +- Type: functional +- Initial version + diff --git a/tuts/165-ses-email-identity/ses-identity.md b/tuts/165-ses-email-identity/ses-identity.md new file mode 100644 index 00000000..0efafa1e --- /dev/null +++ b/tuts/165-ses-email-identity/ses-identity.md @@ -0,0 +1,27 @@ +# Ses Identity + +## Prerequisites + +1. AWS CLI installed and configured (`aws configure`) +2. Appropriate IAM permissions for the AWS services used + +## Step 1: Creating email identity (domain): $DOMAIN + +The script handles this step automatically. See `ses-identity.sh` for the exact CLI commands. + +## Step 2: Getting DKIM tokens + +The script handles this step automatically. See `ses-identity.sh` for the exact CLI commands. + +## Step 3: Getting sending quota + +The script handles this step automatically. See `ses-identity.sh` for the exact CLI commands. + +## Step 4: Listing identities + +The script handles this step automatically. See `ses-identity.sh` for the exact CLI commands. + +## Cleanup + +The script prompts you to clean up all created resources. If you need to clean up manually, check the script log for the resource names that were created. + diff --git a/tuts/165-ses-email-identity/ses-identity.sh b/tuts/165-ses-email-identity/ses-identity.sh new file mode 100644 index 00000000..5ace9e70 --- /dev/null +++ b/tuts/165-ses-email-identity/ses-identity.sh @@ -0,0 +1,17 @@ +#!/bin/bash +WORK_DIR=$(mktemp -d); exec > >(tee -a "$WORK_DIR/ses.log") 2>&1 +REGION=${AWS_DEFAULT_REGION:-${AWS_REGION:-$(aws configure get region 2>/dev/null))}; [ -z "$REGION" ] && echo "ERROR: No region" && exit 1; export AWS_DEFAULT_REGION="$REGION"; echo "Region: $REGION" +RANDOM_ID=$(cat /dev/urandom | tr -dc 'a-z0-9' | fold -w 8 | head -n 1); DOMAIN="tutorial-${RANDOM_ID}.example.com" +handle_error() { echo "ERROR on line $1"; trap - ERR; cleanup; exit 1; }; trap 'handle_error $LINENO' ERR +cleanup() { echo ""; echo "Cleaning up..."; aws sesv2 delete-email-identity --email-identity "$DOMAIN" 2>/dev/null && echo " Deleted identity"; rm -rf "$WORK_DIR"; echo "Done."; } +echo "Step 1: Creating email identity (domain): $DOMAIN" +aws sesv2 create-email-identity --email-identity "$DOMAIN" --query 'IdentityType' --output text +echo "Step 2: Getting DKIM tokens" +aws sesv2 get-email-identity --email-identity "$DOMAIN" --query 'DkimAttributes.{Status:SigningAttributesOrigin,Tokens:Tokens}' --output table 2>/dev/null || echo " DKIM tokens not available" +echo "Step 3: Getting sending quota" +aws sesv2 get-account --query 'SendQuota.{Max24Hr:Max24HourSend,MaxPerSec:MaxSendRate,SentLast24Hr:SentLast24Hours}' --output table +echo "Step 4: Listing identities" +aws sesv2 list-email-identities --query 'EmailIdentities[?starts_with(IdentityName, `tutorial-`)].{Name:IdentityName,Type:IdentityType}' --output table +echo ""; echo "Tutorial complete." +echo "Note: The domain identity will remain in PENDING status (example.com cannot be verified)." +echo "Do you want to clean up? (y/n): "; read -r CHOICE; [[ "$CHOICE" =~ ^[Yy]$ ]] && cleanup diff --git a/tuts/182-sqs-dead-letter-queues/README.md b/tuts/182-sqs-dead-letter-queues/README.md new file mode 100644 index 00000000..7cb4fc46 --- /dev/null +++ b/tuts/182-sqs-dead-letter-queues/README.md @@ -0,0 +1,38 @@ +# Sqs Dlq + +An AWS CLI tutorial that demonstrates Sqs operations. + +## Running + +```bash +bash sqs-dlq.sh +``` + +To auto-run with cleanup: + +```bash +echo 'y' | bash sqs-dlq.sh +``` + +## What it does + +1. Creating DLQ +2. Creating main queue with redrive +3. Sending a message +4. Receiving without deleting (simulating failure) +5. Checking DLQ + +## Resources created + +- Queue + +The script prompts you to clean up resources when it finishes. + +## Cost + +Free tier eligible for most operations. Clean up resources after use to avoid charges. + +## Related docs + +- [AWS CLI sqs reference](https://docs.aws.amazon.com/cli/latest/reference/sqs/index.html) + diff --git a/tuts/182-sqs-dead-letter-queues/REVISION-HISTORY.md b/tuts/182-sqs-dead-letter-queues/REVISION-HISTORY.md new file mode 100644 index 00000000..fcf81300 --- /dev/null +++ b/tuts/182-sqs-dead-letter-queues/REVISION-HISTORY.md @@ -0,0 +1,8 @@ +# Revision History: 182-sqs-dead-letter-queues + +## Shell (CLI script) + +### 2026-04-14 v1 published +- Type: functional +- Initial version + diff --git a/tuts/182-sqs-dead-letter-queues/sqs-dlq.md b/tuts/182-sqs-dead-letter-queues/sqs-dlq.md new file mode 100644 index 00000000..b0e53528 --- /dev/null +++ b/tuts/182-sqs-dead-letter-queues/sqs-dlq.md @@ -0,0 +1,31 @@ +# Sqs Dlq + +## Prerequisites + +1. AWS CLI installed and configured (`aws configure`) +2. Appropriate IAM permissions for the AWS services used + +## Step 1: Creating DLQ + +The script handles this step automatically. See `sqs-dlq.sh` for the exact CLI commands. + +## Step 2: Creating main queue with redrive + +The script handles this step automatically. See `sqs-dlq.sh` for the exact CLI commands. + +## Step 3: Sending a message + +The script handles this step automatically. See `sqs-dlq.sh` for the exact CLI commands. + +## Step 4: Receiving without deleting (simulating failure) + +The script handles this step automatically. See `sqs-dlq.sh` for the exact CLI commands. + +## Step 5: Checking DLQ + +The script handles this step automatically. See `sqs-dlq.sh` for the exact CLI commands. + +## Cleanup + +The script prompts you to clean up all created resources. If you need to clean up manually, check the script log for the resource names that were created. + diff --git a/tuts/182-sqs-dead-letter-queues/sqs-dlq.sh b/tuts/182-sqs-dead-letter-queues/sqs-dlq.sh new file mode 100644 index 00000000..a7883d18 --- /dev/null +++ b/tuts/182-sqs-dead-letter-queues/sqs-dlq.sh @@ -0,0 +1,21 @@ +#!/bin/bash +WORK_DIR=$(mktemp -d); exec > >(tee -a "$WORK_DIR/tut.log") 2>&1 +REGION=${AWS_DEFAULT_REGION:-${AWS_REGION:-$(aws configure get region 2>/dev/null))}; [ -z "$REGION" ] && echo "ERROR: No region" && exit 1; export AWS_DEFAULT_REGION="$REGION"; echo "Region: $REGION" +RANDOM_ID=$(cat /dev/urandom | tr -dc 'a-z0-9' | fold -w 8 | head -n 1); Q="dlq-tut-${RANDOM_ID}"; DLQ="dlq-dead-${RANDOM_ID}" +handle_error() { echo "ERROR on line $1"; trap - ERR; cleanup; exit 1; }; trap 'handle_error $LINENO' ERR +cleanup() { echo "Cleaning up..."; [ -n "$QU" ] && aws sqs delete-queue --queue-url "$QU" 2>/dev/null; [ -n "$DU" ] && aws sqs delete-queue --queue-url "$DU" 2>/dev/null; rm -rf "$WORK_DIR"; echo "Done."; } +echo "Step 1: Creating DLQ" +DU=$(aws sqs create-queue --queue-name "$DLQ" --query QueueUrl --output text) +DA=$(aws sqs get-queue-attributes --queue-url "$DU" --attribute-names QueueArn --query Attributes.QueueArn --output text) +echo "Step 2: Creating main queue with redrive" +QU=$(aws sqs create-queue --queue-name "$Q" --attributes "{\"RedrivePolicy\":\"{\\\"deadLetterTargetArn\\\":\\\"$DA\\\",\\\"maxReceiveCount\\\":\\\"2\\\"}\"}" --query QueueUrl --output text) +echo "Step 3: Sending a message" +aws sqs send-message --queue-url "$QU" --message-body "Test message" > /dev/null +echo "Step 4: Receiving without deleting (simulating failure)" +for i in 1 2 3; do aws sqs receive-message --queue-url "$QU" --visibility-timeout 0 --max-number-of-messages 1 > /dev/null 2>&1; done +sleep 2 +echo "Step 5: Checking DLQ" +echo " Main queue: $(aws sqs get-queue-attributes --queue-url "$QU" --attribute-names ApproximateNumberOfMessages --query Attributes.ApproximateNumberOfMessages --output text) messages" +echo " DLQ: $(aws sqs get-queue-attributes --queue-url "$DU" --attribute-names ApproximateNumberOfMessages --query Attributes.ApproximateNumberOfMessages --output text) messages" +echo ""; echo "Tutorial complete." +echo "Do you want to clean up? (y/n): "; read -r C; [[ "$C" =~ ^[Yy]$ ]] && cleanup