====== S3 ======
# create path in s3 for backup to export to
print("Create s3 path")
s3session.put_object(Bucket=bucket_london, Key=('redis_elasticache'+'/'))
$ aws s3 ls --profile nonprod_admin | grep -i 'dev\|test\|nonprod'
$ for B in $(aws s3 ls --profile nonprod_admin | grep -i 'dev\|test\|nonprod' | cut -f3 -d' ');
do
echo -n ${B}": "
aws s3 ls --summarize --human-readable --recursive s3://${B} --profile nonprod_admin | grep -i total | tr -d '\n'
echo ""
done
cdn-frontend Total Objects: 98 Total Size: 7.2 MiB
cdn-logging Total Objects: 135 Total Size: 256.2 KiB
ami-builder-dev-icon-web-hosting Total Objects: 5 Total Size: 5.4 KiB
ami-builder-dev-pipeline-artifacts Total Objects: 5234 Total Size: 3.6 GiB
===== Copy backup files to S3 with BASH =====
upload_backup.sh:-
#!/bin/bash
# Set up some variables
export AWS_ACCESS_KEY_ID="AKIATOPSECRET"
export AWS_SECRET_ACCESS_KEY="EVENMORESECRET"
export AWS_DEFAULT_REGION="eu-west-1"
BACKUP_PATH='/mnt/usb'
S3_BUCKET='s3://my-backups'
for FILE in $(ls -1 ${BACKUP_PATH})
do
printf "Copying to S3:- ${FILE}"
s3cp=$(aws s3 cp ${BACKUP_PATH}/${FILE} ${S3_BUCKET} --storage-class STANDARD_IA)
printf "CP result:- ${s3cp}\n\n"
done