Skip to content

Commit 2cb2e42

Browse files
committed
Add storage tutorials (batch 8)
1 parent 49f07d9 commit 2cb2e42

5 files changed

Lines changed: 78 additions & 0 deletions

File tree

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
#!/bin/bash
2+
WORK_DIR=$(mktemp -d); exec > >(tee -a "$WORK_DIR/s3-encryption.log") 2>&1
3+
REGION=${AWS_DEFAULT_REGION:-$(aws configure get region 2>/dev/null)}; [ -z "$REGION" ] && echo "ERROR: No region" && exit 1; export AWS_DEFAULT_REGION="$REGION"; echo "Region: $REGION"
4+
echo "Step 1: Creating bucket"; B="enc-tut-$(openssl rand -hex 4)-$(aws sts get-caller-identity --query Account --output text)"; aws s3api create-bucket --bucket "$B" > /dev/null; echo "Step 2: Enabling SSE-S3"; aws s3api put-bucket-encryption --bucket "$B" --server-side-encryption-configuration '{"Rules":[{"ApplyServerSideEncryptionByDefault":{"SSEAlgorithm":"AES256"}}]}'; echo "Step 3: Checking encryption"; aws s3api get-bucket-encryption --bucket "$B" --query "ServerSideEncryptionConfiguration.Rules[0].ApplyServerSideEncryptionByDefault" --output table; echo "Step 4: Uploading encrypted object"; echo test > /tmp/enc.txt; aws s3 cp /tmp/enc.txt "s3://$B/test.txt" --quiet; aws s3api head-object --bucket "$B" --key test.txt --query "{Encryption:ServerSideEncryption}" --output table; echo "Do you want to clean up? (y/n): "; read -r C; [[ "$C" =~ ^[Yy]$ ]] && { aws s3 rm "s3://$B" --recursive --quiet; aws s3 rb "s3://$B"; echo Done; }
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
#!/bin/bash
2+
WORK_DIR=$(mktemp -d); exec > >(tee -a "$WORK_DIR/ap.log") 2>&1
3+
REGION=${AWS_DEFAULT_REGION:-$(aws configure get region 2>/dev/null)}; [ -z "$REGION" ] && echo "ERROR: No region" && exit 1; export AWS_DEFAULT_REGION="$REGION"; ACCOUNT=$(aws sts get-caller-identity --query Account --output text); echo "Region: $REGION"
4+
RANDOM_ID=$(openssl rand -hex 4); BUCKET="ap-tut-${RANDOM_ID}-${ACCOUNT}"; AP_NAME="tut-ap-${RANDOM_ID}"
5+
handle_error() { echo "ERROR on line $1"; trap - ERR; cleanup; exit 1; }; trap 'handle_error $LINENO' ERR
6+
cleanup() { echo ""; echo "Cleaning up..."; aws s3control delete-access-point --account-id "$ACCOUNT" --name "$AP_NAME" 2>/dev/null && echo " Deleted access point"; aws s3 rm "s3://$BUCKET" --recursive --quiet 2>/dev/null; aws s3 rb "s3://$BUCKET" 2>/dev/null && echo " Deleted bucket"; rm -rf "$WORK_DIR"; echo "Done."; }
7+
echo "Step 1: Creating bucket"
8+
if [ "$REGION" = "us-east-1" ]; then aws s3api create-bucket --bucket "$BUCKET" > /dev/null; else aws s3api create-bucket --bucket "$BUCKET" --create-bucket-configuration LocationConstraint="$REGION" > /dev/null; fi
9+
echo "Step 2: Creating access point: $AP_NAME"
10+
aws s3control create-access-point --account-id "$ACCOUNT" --name "$AP_NAME" --bucket "$BUCKET" > /dev/null
11+
echo " Access point created"
12+
echo "Step 3: Getting access point details"
13+
aws s3control get-access-point --account-id "$ACCOUNT" --name "$AP_NAME" --query '{Name:Name,Bucket:Bucket,NetworkOrigin:NetworkOrigin}' --output table
14+
echo "Step 4: Listing access points"
15+
aws s3control list-access-points --account-id "$ACCOUNT" --bucket "$BUCKET" --query 'AccessPointList[].{Name:Name,Bucket:Bucket}' --output table
16+
echo ""; echo "Tutorial complete."
17+
echo "Do you want to clean up? (y/n): "; read -r CHOICE; [[ "$CHOICE" =~ ^[Yy]$ ]] && cleanup
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
#!/bin/bash
2+
WORK_DIR=$(mktemp -d); exec > >(tee -a "$WORK_DIR/tut.log") 2>&1
3+
REGION=${AWS_DEFAULT_REGION:-$(aws configure get region 2>/dev/null)}; [ -z "$REGION" ] && echo "ERROR: No region" && exit 1; export AWS_DEFAULT_REGION="$REGION"; echo "Region: $REGION"
4+
RANDOM_ID=$(openssl rand -hex 4); ACCOUNT=$(aws sts get-caller-identity --query Account --output text)
5+
B="lock-tut-${RANDOM_ID}-${ACCOUNT}"
6+
handle_error() { echo "ERROR on line $1"; trap - ERR; cleanup; exit 1; }; trap 'handle_error $LINENO' ERR
7+
cleanup() { echo "Cleaning up..."; echo " Object Lock buckets require all versions to expire before deletion."; echo " Manual: aws s3api delete-bucket --bucket $B (after retention expires)"; rm -rf "$WORK_DIR"; echo "Done."; }
8+
echo "Step 1: Creating bucket with Object Lock"
9+
aws s3api create-bucket --bucket "$B" --object-lock-enabled-for-bucket > /dev/null
10+
aws s3api put-bucket-versioning --bucket "$B" --versioning-configuration Status=Enabled
11+
echo "Step 2: Setting default retention (1 day governance mode)"
12+
aws s3api put-object-lock-configuration --bucket "$B" --object-lock-configuration '{"ObjectLockEnabled":"Enabled","Rule":{"DefaultRetention":{"Mode":"GOVERNANCE","Days":1}}}'
13+
echo "Step 3: Getting lock configuration"
14+
aws s3api get-object-lock-configuration --bucket "$B" --query "ObjectLockConfiguration.Rule.DefaultRetention.{Mode:Mode,Days:Days}" --output table
15+
echo "Step 4: Uploading a locked object"
16+
echo "protected data" > "$WORK_DIR/data.txt"
17+
aws s3 cp "$WORK_DIR/data.txt" "s3://$B/data.txt" --quiet
18+
echo " Object uploaded with governance-mode retention"
19+
echo "Step 5: Verifying lock"
20+
aws s3api head-object --bucket "$B" --key data.txt --query "{Lock:ObjectLockMode,Retain:ObjectLockRetainUntilDate}" --output table
21+
echo ""; echo "Tutorial complete."
22+
echo "Note: Object Lock prevents deletion until retention expires."
23+
echo "Do you want to clean up? (y/n): "; read -r C; [[ "$C" =~ ^[Yy]$ ]] && cleanup
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
#!/bin/bash
2+
WORK_DIR=$(mktemp -d); exec > >(tee -a "$WORK_DIR/tut.log") 2>&1
3+
REGION=${AWS_DEFAULT_REGION:-$(aws configure get region 2>/dev/null)}; [ -z "$REGION" ] && echo "ERROR: No region" && exit 1; export AWS_DEFAULT_REGION="$REGION"; echo "Region: $REGION"
4+
RANDOM_ID=$(openssl rand -hex 4); ACCOUNT=$(aws sts get-caller-identity --query Account --output text)
5+
B="accel-tut-${RANDOM_ID}-${ACCOUNT}"
6+
cleanup() { echo "Cleaning up..."; aws s3 rm "s3://$B" --recursive --quiet 2>/dev/null; aws s3 rb "s3://$B" 2>/dev/null && echo " Deleted bucket"; rm -rf "$WORK_DIR"; echo "Done."; }
7+
echo "Step 1: Creating bucket"
8+
aws s3api create-bucket --bucket "$B" > /dev/null
9+
echo "Step 2: Enabling Transfer Acceleration"
10+
aws s3api put-bucket-accelerate-configuration --bucket "$B" --accelerate-configuration Status=Enabled
11+
echo " Acceleration enabled"
12+
echo "Step 3: Getting acceleration status"
13+
aws s3api get-bucket-accelerate-configuration --bucket "$B" --query '{Status:Status}' --output table
14+
echo "Step 4: Accelerated endpoint"
15+
echo " https://${B}.s3-accelerate.amazonaws.com"
16+
echo ""; echo "Tutorial complete."
17+
echo "Do you want to clean up? (y/n): "; read -r C; [[ "$C" =~ ^[Yy]$ ]] && cleanup
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
#!/bin/bash
2+
WORK_DIR=$(mktemp -d); exec > >(tee -a "$WORK_DIR/tut.log") 2>&1
3+
REGION=${AWS_DEFAULT_REGION:-$(aws configure get region 2>/dev/null)}; [ -z "$REGION" ] && echo "ERROR: No region" && exit 1; export AWS_DEFAULT_REGION="$REGION"; echo "Region: $REGION"
4+
RANDOM_ID=$(openssl rand -hex 4); ACCOUNT=$(aws sts get-caller-identity --query Account --output text)
5+
SRC="inv-src-${RANDOM_ID}-${ACCOUNT}"; DST="inv-dst-${RANDOM_ID}-${ACCOUNT}"
6+
handle_error() { echo "ERROR on line $1"; trap - ERR; cleanup; exit 1; }; trap '"'"'handle_error $LINENO'"'"' ERR
7+
cleanup() { echo "Cleaning up..."; aws s3api delete-bucket-inventory-configuration --bucket "$SRC" --id tutorial-inventory 2>/dev/null; aws s3 rm "s3://$SRC" --recursive --quiet 2>/dev/null; aws s3 rb "s3://$SRC" 2>/dev/null; aws s3 rm "s3://$DST" --recursive --quiet 2>/dev/null; aws s3 rb "s3://$DST" 2>/dev/null; rm -rf "$WORK_DIR"; echo "Done."; }
8+
echo "Step 1: Creating source and destination buckets"
9+
aws s3api create-bucket --bucket "$SRC" > /dev/null; aws s3api create-bucket --bucket "$DST" > /dev/null
10+
echo "Step 2: Configuring inventory"
11+
aws s3api put-bucket-inventory-configuration --bucket "$SRC" --id tutorial-inventory --inventory-configuration "{\"Destination\":{\"S3BucketDestination\":{\"Bucket\":\"arn:aws:s3:::$DST\",\"Format\":\"CSV\"}},\"IsEnabled\":true,\"Id\":\"tutorial-inventory\",\"IncludedObjectVersions\":\"Current\",\"Schedule\":{\"Frequency\":\"Weekly\"},\"OptionalFields\":[\"Size\",\"LastModifiedDate\",\"StorageClass\"]}"
12+
echo " Inventory configured (weekly, CSV)"
13+
echo "Step 3: Getting inventory configuration"
14+
aws s3api get-bucket-inventory-configuration --bucket "$SRC" --id tutorial-inventory --query "InventoryConfiguration.{Id:Id,Enabled:IsEnabled,Frequency:Schedule.Frequency}" --output table
15+
echo " Note: First inventory report generates within 48 hours"
16+
echo ""; echo "Tutorial complete."
17+
echo "Do you want to clean up? (y/n): "; read -r C; [[ "$C" =~ ^[Yy]$ ]] && cleanup

0 commit comments

Comments
 (0)