Add scripts for mongo backup and restore.

This commit is contained in:
Ivaylo Novakov 2021-01-21 13:27:38 +01:00
parent 4ae61e2364
commit ce8e1d4eae
No known key found for this signature in database
GPG Key ID: 06B9354AB08BE9C6
5 changed files with 75 additions and 3 deletions

View File

@ -18,4 +18,4 @@ docker exec cockroach \
cockroach sql \
--host cockroach:26257 \
--certs-dir=/certs \
--execute="BACKUP TO 's3://skynet-crdb-backups/backups/$DT?AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID&AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY';"
--execute="BACKUP TO 's3://skynet-crdb-backups/backups/cockroach/$DT?AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID&AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY';"

View File

@ -22,4 +22,4 @@ docker exec cockroach \
cockroach sql \
--host cockroach:26257 \
--certs-dir=/certs \
--execute="RESTORE DATABASE defaultdb FROM 's3://skynet-crdb-backups/backups/$DT?AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID&AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY';"
--execute="RESTORE DATABASE defaultdb FROM 's3://skynet-crdb-backups/backups/cockroach/$DT?AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID&AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY';"

32
scripts/mongo_backup.sh Normal file
View File

@ -0,0 +1,32 @@
#!/bin/bash
# Get current working directory (pwd doesn't cut it)
cwd=$(cd -P -- "$(dirname -- "$0")" && pwd -P)
# Set the environment:
set -o allexport
source $cwd/../.env
set +o allexport
# Check for AWS credentials:
if [[ $AWS_ACCESS_KEY_ID == "" || $AWS_SECRET_ACCESS_KEY == "" ]]; then
echo "Missing AWS credentials!"
exit 1
fi
# Take the current datetime:
DT=`date +%Y-%m-%d`
# Check if a backup already exists:
totalFoundObjects=$(aws s3 ls s3://skynet-crdb-backups/backups/mongo/ --recursive --summarize | grep "$DT.tgz" | wc -l)
if [ "$totalFoundObjects" -eq "1" ]; then
echo "Backup already exists for today. Exiting."
exit 0
fi
# Create the backup:
docker exec mongo \
mongodump \
-o /data/db/backups/$DT \
mongodb://$SKYNET_DB_USER:$SKYNET_DB_PASS@$SKYNET_DB_HOST:$SKYNET_DB_PORT
# Compress the backup:
cd $cwd/../docker/data/mongo/db/backups/ && tar -czf $DT.tgz $DT && cd -
# Upload the backup to S3:
aws s3 cp $DT.tgz s3://skynet-crdb-backups/backups/mongo/
# Clean up
rm -rf $DT.tgz $cwd/../docker/data/mongo/db/backups/$DT

40
scripts/mongo_restore.sh Normal file
View File

@ -0,0 +1,40 @@
#!/bin/bash
BACKUP=$1
if [[ $BACKUP == "" ]]; then
echo "No backup name given. It should look like '2020-01-29'."
exit 1
fi
# Get current working directory (pwd doesn't cut it)
cwd=$(cd -P -- "$(dirname -- "$0")" && pwd -P)
# Set the environment:
set -o allexport
source $cwd/../.env
set +o allexport
# Check for AWS credentials:
if [[ $AWS_ACCESS_KEY_ID == "" || $AWS_SECRET_ACCESS_KEY == "" ]]; then
echo "Missing AWS credentials!"
exit 1
fi
# Check if the backup exists:
totalFoundObjects=$(aws s3 ls s3://skynet-crdb-backups/backups/mongo/ --recursive --summarize | grep "$DT.tgz" | wc -l)
if [ "$totalFoundObjects" -eq "0" ]; then
echo "This backup doesn't exist!"
exit 1
fi
# Get the backup from S3:
aws s3 cp s3://skynet-crdb-backups/backups/mongo/$BACKUP.tgz $BACKUP.tgz
# Prepare a clean `to_restore` dir:
rm -rf $cwd/../docker/data/mongo/db/backups/to_restore
mkdir -p $cwd/../docker/data/mongo/db/backups/to_restore
# Decompress the backup:
tar -xzf $BACKUP.tgz -C $cwd/../docker/data/mongo/db/backups/to_restore
rm $BACKUP.tgz
# Restore the backup:
docker exec mongo \
mongorestore \
mongodb://$SKYNET_DB_USER:$SKYNET_DB_PASS@$SKYNET_DB_HOST:$SKYNET_DB_PORT \
/data/db/backups/to_restore/$BACKUP
# Clean up:
rm -rf $cwd/../docker/data/mongo/db/backups/to_restore

View File

@ -14,7 +14,7 @@ cat /home/user/skynet-webportal/setup-scripts/support/authorized_keys >> /home/u
# Install apt packages
sudo apt-get update
sudo apt-get -y install ufw tmux ranger htop nload gcc g++ make git vim unzip curl
sudo apt-get -y install ufw tmux ranger htop nload gcc g++ make git vim unzip curl awscli
# Setup GIT credentials (so commands like git stash would work)
git config --global user.email "devs@nebulous.tech"