我是Python的新手,当我尝试将函数的结果存储在变量中并将其传递给User时,我尝试将内置函数os.system的输出传递给另一个User Defined函数定义的函数,它返回:
[root@sijr37wamxop570 ~]# /usr/local/bin/python3.6 Script_ICOS_Upload.py
File "Script_ICOS_Upload.py", line 62
multi_part_upload('570p', 'DB2Backup', BackupFile)
^
IndentationError: expected an indented block
我尝试将下面的块定义为函数bkp(),然后尝试将其也称为main,但是在cmd行上仍然收到缩进错误:
cmd = "ls -lrt /home/db2inst1/TWS.0.db2inst1.DBPART000.* | awk '{print $9;}'"
BackupFile = os.system(cmd, shell=True)
import ibm_boto3
from ibm_botocore.client import Config
import os
cmd = "ls -lrt /home/db2inst1/TWS.0.db2inst1.DBPART000.* | awk '{print $9;}'"
BackupFile = os.system(cmd, shell=True)
#p1 = subprocess.Popen(["ls -lrt /home/db2inst1/TWS.0.db2inst1.DBPART000.* | awk '{print $9;}'"], stdout = subprocess.PIPE)
#BackupFile = p1.communicate()[0]
#BKPFILE = print(BackupFile)
#BackupFile = subprocess.call('ls -lrt /home/db2inst1/TWS.0.db2inst1.DBPART000.* | awk '{print $9;}'', shell=True)
#BackupFile = subprocess.check_output(["ls -lrt /home/db2inst1/TWS.0.db2inst1.DBPART000.* | awk '{print $9;}'", shell=True)
#BackupFile = subprocess.Popen(['awk', '/f89e7000/ {print $2}', 'syscall_list.txt'], stdout=subprocess.PIPE).communicate()[0]
#BackupFile = Popen(["ls -lrt /home/db2inst1/TWS.0.db2inst1.DBPART000.* | awk '{print $9;}'", stdout=PIPE)
COS_ENDPOINT = "https://XXXXXXXXXXXXXXX.net/"
COS_API_KEY_ID = "XXXXXXXXXXXXXXXXX" # eg "W00YiRnLW4a3fTjMB-oiB-2ySfTrFBIQQWanc--P3byk"
COS_AUTH_ENDPOINT = "https://XXXXXXXXXXX/identity/token"
COS_RESOURCE_CRN = "XXXXXXXXXXXXXXXXX" # eg "crn:v1:bluemix:public:cloud-object-storage:global:a/3bf0d9003abfb5d29761c3e97696b71c:d6f04d83-6c4f-4a62-a165-696756d63903::"
# Create resource
cos = ibm_boto3.resource("s3",
ibm_api_key_id=COS_API_KEY_ID,
ibm_service_instance_id=COS_RESOURCE_CRN,
ibm_auth_endpoint=COS_AUTH_ENDPOINT,
config=Config(signature_version="oauth"),
endpoint_url=COS_ENDPOINT
)
def multi_part_upload(bucket_name, item_name, file_path):
try:
print("Starting file transfer for {0} to bucket: {1}\n".format(item_name, bucket_name))
# set 20 GB chunks
part_size = 1024 * 1024 * 1024 * 20
# set threadhold to 20 GB
file_threshold = 1024 * 1024 * 1024 * 20
# set the transfer threshold and chunk size
transfer_config = ibm_boto3.s3.transfer.TransferConfig(
multipart_threshold=file_threshold,
multipart_chunksize=part_size
)
# the upload_fileobj method will automatically execute a multi-part upload
# in 5 MB chunks for all files over 15 MB
with open(file_path, "rb") as file_data:
cos.Object(bucket_name, item_name).upload_fileobj(
Fileobj=file_data,
Config=transfer_config
)
print("Transfer for {0} Complete!\n".format(item_name))
except ClientError as be:
print("CLIENT ERROR: {0}\n".format(be))
except Exception as e:
print("Unable to complete multi-part upload: {0}".format(e))
def main():
multi_part_upload('570p', 'DB2Backup', BackupFile)
if __name__ == '__main__':
main()
Expected Result : the multi_part_upload uploads a File to the link.