备份需要每天发生3次,并且只能在1周或更短时间内收集。然后它需要删除超过一个月的任何东西。这也需要每天运行3次。这是我到目前为止所做的,但我认为它不会做任何接近我正在寻找的事情
#!/bin/bash
#0 install necessary things
sudo apt-get update
sudo apt-get install unzip
sudo apt-get install libwww-perl libdatetime-perl
sudo apt-get install awscli
#1 extract date
var DATE_DAY= $(date +%Y-%m-%d)
#2 do mongo dump
mongodump --db test --collection rawData1$(echo $DATE_DAY)
mongodump --db test --collection rawData2$(echo $DATE_DAY)
mongodump --db test --collection rawData3$(echo $DATE_DAY)
mongodump --db test --collection rawData4$(echo $DATE_DAY)
mongodump --db test --collection rawData5$(echo $DATE_DAY)
#3 zip each one
zip /mongodump/rawData1$(echo $DATE_DAY) rawData1$(echo $DATE_DAY).zip
zip /mongodump/rawData2$(echo $DATE_DAY) rawData2$(echo $DATE_DAY).zip
zip /mongodump/rawData3$(echo $DATE_DAY) rawData3$(echo $DATE_DAY).zip
zip /mongodump/rawData4$(echo $DATE_DAY) rawData4$(echo $DATE_DAY).zip
zip /mongodump/rawData5$(echo $DATE_DAY) rawData5$(echo $DATE_DAY).zip
#4 upload to aws
aws s3 cp /rawData1/ s3://<com.backups>/.zip
aws s3 cp /rawData2/ s3://<com.backups>/.zip
aws s3 cp /rawData3/ s3://<com.backups>/.zip
aws s3 cp /rawData4/ s3://<com.backups>/.zip
aws s3 cp /rawData5/ s3://<com.backups>/.zip
#5 delete the old raw data collections
db.getCollection$(echo $DATE_DAY).drop()
#6 set up in cron tab so this script runs 3 times a day
crontab -e
00 00,08,16 * * * /bin/bash/home/myuser/myproject/production/mongoCronScript.sh