如何使用Boto在Amazon S3上使用python脚本将文件从一个存储桶复制到另一个存储桶

时间:2018-12-07 06:30:48

标签: python amazon-s3

如何使用boto在Amazon S3上使用Python脚本将文件从一个存储桶复制到另一个存储桶?

我知道如何创建,但如何将其复制到另一个存储桶。

import boto
import boto.s3.connection

#CREATING A CONNECTION¶
access_key = 'MPB**********ITMO'
secret_key = '11t63y************XojO7b'

conn = boto.connect_s3(
        aws_access_key_id = access_key,
        aws_secret_access_key = secret_key,
        host = 'twg****.org.tw',
        is_secure=False,               # uncomment if you are not using ssl
        calling_format = boto.s3.connection.OrdinaryCallingFormat(),
        )

#CREATING A BUCKET¶
bucket = conn.create_bucket('aaaa')

参考:
https://github.com/boto/boto/blob/develop/docs/source/s3_tut.rst
http://docs.ceph.com/docs/master/radosgw/s3/python/

1 个答案:

答案 0 :(得分:2)

import boto
import boto.s3.connection


#CREATING A CONNECTION¶
access_key = 'MPB*******MO'
secret_key = '11t6******rVYXojO7b'

conn = boto.connect_s3(
        aws_access_key_id = access_key,
        aws_secret_access_key = secret_key,
        host = 'twg******.tw',
        is_secure=False,               # uncomment if you are not using ssl
        calling_format = boto.s3.connection.OrdinaryCallingFormat(),
        )    
src = conn.get_bucket('roger123weddec052335422018')
dst = conn.get_bucket('aaa/aa/')  

for k in src.list():
    # copy stuff to your destination here
    dst.copy_key(k.key, src.name, k.key)
    # then delete the source key
    #k.delete()

=========================================

获取子目录信息文件夹¶

folders = bucket.list("","/")
for folder in folders:
    print (folder.name)

======================================

创建文件夹¶

k = bucket.new_key('abc/123/')
k.set_contents_from_string('')

============================================

列出自已的桶¶

for bucket in conn.get_all_buckets():
        print ("{name}\t{created}".format(
                name = bucket.name,
                created = bucket.creation_date,
        ))

创建一个桶¶

#bucket = conn.create_bucket('willie20181121')
bucket = conn.create_bucket('roger123.Tuedec040445192018')
print(bucket.name)

================================================ =========

列出内容列表

foldername=','
    for key in bucket.list():
            print ("{name}\t{size}\t{modified}\t{xx}\t{yy}\t{zz}".format(
                    name = key.name, # = key.key
                    size = key.size,
                    modified = key.last_modified,
                    xx=key.set_contents_from_string,
                    yy=key.owner.id,
                zz=key.name.startswith('image'),
            #qq=bucket.name,
            #aa=key.set_contents_from_string.startswith('//'),
                   ))
            xxx = key.key
            #print(len(xxx.split('/')))
            if len(xxx.split('/'))==2: 
                 if foldername.find(xxx.split('/')[0])==-1:
                    foldername= foldername + xxx.split('/')[0] +","
    #print(foldername)

删除桶¶

#conn.delete_bucket('willietest20181121')

创建对象¶

 #key = bucket.new_key('hello.txt')
 #key.set_contents_from_string('Hello World!11:52')

下载对象(到文件)¶

 #key = bucket.get_key('hello.txt')
 #key.get_contents_to_filename('/home/willie/Desktop/hello.txt')

删除对象¶

#bucket.delete_key('hello.txt')

================================================ ========================== 插入文件

import boto
import boto.s3
import boto.s3.connection
import os.path
import sys

#https://gist.github.com/SavvyGuard/6115006

def percent_cb(complete, total):
    sys.stdout.write('.')
    sys.stdout.flush()

# Fill in info on data to upload
# destination bucket name
bucket_name = 'willie20181121_'
# source directory
sourceDir = '/home/willie/Desktop/x/'
# destination directory name (on s3)
destDir = '/test2/'

#max size in bytes before uploading in parts. between 1 and 5 GB recommended
MAX_SIZE = 20 * 1000 * 1000
#size of parts when uploading in parts
PART_SIZE = 6 * 1000 * 1000

access_key = 'MPBVAQPULDHZIFUQITMO'
secret_key = '11t63yDVZTlStKoBBxHl35HgUcgMOSNrVYXojO7b'

conn = boto.connect_s3(
        aws_access_key_id = access_key,
        aws_secret_access_key = secret_key,
        host = 'twgc-s3.nchc.org.tw',
        is_secure=False,               # uncomment if you are not using ssl
        calling_format = boto.s3.connection.OrdinaryCallingFormat(),
        )
bucket = conn.get_bucket(bucket_name,
        location=boto.s3.connection.Location.DEFAULT)


uploadFileNames = []
for (sourceDir, dirname, filename) in os.walk(sourceDir):
    #uploadFileNames.extend(filename)
    #print("=="+filename)
    break
uploadFileNames.extend(["1.jpg"])
uploadFileNames.extend(["2.py"])

for filename in uploadFileNames:
    sourcepath = os.path.join(sourceDir + filename)
    #sourcepath = os.path.join(filename)
    destpath = os.path.join(destDir, filename)
    print ('Uploading %s to Amazon S3 bucket %s' % \
           (sourcepath, bucket_name))
    #print("==="+ sourcepath)
    filesize = os.path.getsize(sourcepath)
    if filesize > MAX_SIZE:
        print ("multipart upload")
        mp = bucket.initiate_multipart_upload(destpath)
        fp = open(sourcepath,'rb')
        fp_num = 0
        while (fp.tell() < filesize):
            fp_num += 1
            print ("uploading part %i" %fp_num)
            mp.upload_part_from_file(fp, fp_num, cb=percent_cb, num_cb=10, size=PART_SIZE)

        mp.complete_upload()

    else:
        print ("singlepart upload")
        k = boto.s3.key.Key(bucket)
        k.key = destpath
        #print(sourcepath)
        k.set_contents_from_filename(sourcepath, cb=percent_cb, num_cb=10)

================= 兴奋性测试

try:
    key = bucket.get_key('Mail1.txt')    
    key.get_contents_to_filename('/home/willie/Desktop/mail.txt')
except Exception   as e:
    result="False"
    print("=="+str(e.args))