bash脚本
#!/bin/sh
MONGODB_SHELL='/usr/bin/mongo'
DUMP_UTILITY='/usr/bin/mongodump'
DB_NAME='amicus'
date_now=`date +%Y_%m_%d_%H_%M_%S`
dir_name='db_backup_'${date_now}
file_name='db_backup_'${date_now}'.bz2'
log() {
echo $1
}
do_cleanup(){
rm -rf db_backup_2010*
log 'cleaning up....'
}
do_backup(){
log 'snapshotting the db and creating archive' && \
${MONGODB_SHELL} admin fsync_lock.js && \
${DUMP_UTILITY} -d ${DB_NAME} -o ${dir_name} && tar -jcf $file_name ${dir_name}
${MONGODB_SHELL} admin unlock.js && \
log 'data backd up and created snapshot'
}
save_in_s3(){
log 'saving the backup archive in amazon S3' && \
python aws_s3.py set ${file_name} && \
log 'data backup saved in amazon s3'
}
do_backup && save_in_s3 && do_cleanup
aws_s3.py
ACCESS_KEY=''
SECRET=''
BUCKET_NAME='s3:///s3.amazonaws.com/database-backup' #note that you need to create this bucket first
from boto.s3.connection import S3Connection
from boto.s3.key import Key
def save_file_in_s3(filename):
conn = S3Connection(ACCESS_KEY,SECRET)
bucket = conn.get_bucket(BUCKET_NAME)
k = Key(bucket)
k.key = filename
k.set_contents_from_filename(filename)
def get_file_from_s3(filename):
conn = S3Connection(ACCESS_KEY,SECRET)
bucket = conn.get_bucket(BUCKET_NAME)
k = Key(bucket)
k.key = filename
k.get_contents_to_filename(filename)
def list_backup_in_s3():
conn = S3Connection(ACCESS_KEY,SECRET)
bucket = conn.get_bucket(BUCKET_NAME)
for i,key in enumerate(bucket.get_all_keys()):
print "[%s] %s" % (i,key.name)
def delete_all_backups():
#FIXME: validate filename exists
conn = S3Connection(ACCESS_KEY,key in enumerate(bucket.get_all_keys()):
print "deleting %s" % (key.name)
key.delete()
if __name__ == '__main__':
import sys
if len(sys.argv) < 3:
print 'Usage: %s
但不断收到此错误:
Traceback (most recent call last):
File "aws_s3.py",line 42,in
做了一点我的研究,发现它是boto中的某种bug.如何继续这个?
最佳答案
由于我没有得到任何更新如何使其工作,我在我的bash脚本中使用了s3cmd.但我还是要测试文件> 1gb.
原文链接:https://www.f2er.com/python/438457.html这是更新的代码 –
#!/bin/sh
MONGODB_SHELL='/usr/bin/mongo'
DUMP_UTILITY='/usr/bin/mongodump'
DB_NAME='amicus'
date_now=`date +%Y_%m_%d_%H_%M_%S`
dir_name='db_backup_'${date_now}
file_name='db_backup_'${date_now}'.bz2'
log() {
echo $1
}
do_cleanup(){
rm -rf db_backup_2010*
log 'cleaning up....'
}
do_backup(){
log 'snapshotting the db and creating archive' && \
${DUMP_UTILITY} -d ${DB_NAME} -o ${dir_name} && tar -jcf $file_name ${dir_name}
log 'data backd up and created snapshot'
}
save_in_s3(){
log 'saving the backup archive in amazon S3' && \
python aws_s3.py set ${file_name} && \
s3cmd put ${file_name} s3://YOURBUCKETNAME
log 'data backup saved in amazon s3'
}
do_backup && save_in_s3 && do_cleanup