这是我的主要职能。我在下面的快照中显示以下错误。
def main():
# Azure subscription ID
subscription_id = ''
# This program creates this resource group. If it's an existing resource group, comment out the code that creates the resource group
rg_name = ''
# The data factory name. It must be globally unique.
df_name = ''
# Specify your Active Directory client ID, client secret, and tenant ID
credentials = ServicePrincipalCredentials(client_id='', secret='', tenant='')
resource_client = ResourceManagementClient(credentials, subscription_id)
adf_client = DataFactoryManagementClient(credentials, subscription_id)
rg_params = {'location':'eastus'}
df_params = {'location':'eastus'}
# create the resource group
# comment out if the resource group already exits
#resource_client.resource_groups.create_or_update(rg_name, rg_params)
# Create a data factory
#df_resource = Factory(location='eastus')
#df = adf_client.factories.create_or_update(rg_name, df_name, df_resource)
#print_item(df)
#while df.provisioning_state != 'Succeeded':
# df = adf_client.factories.get(rg_name, df_name)
# time.sleep(1)
# Create an Azure Storage linked service
ls_name = ''
# Specify the name and key of your Azure Storage account
storage_string = SecureString('DefaultEndpointsProtocol=https;AccountName=;AccountKey=;EndpointSuffix=core.windows.net')
ls_azure_storage = AzureStorageLinkedService(connection_string=storage_string)
ls = adf_client.linked_services.create_or_update(rg_name, df_name, ls_name, ls_azure_storage)
print_item(ls)
# Create an Azure blob dataset (input)
ds_name = ''
ds_ls = LinkedServiceReference(ls_name)
blob_path= ''
blob_filename = ''
ds_azure_blob= AzureBlobDataset(ds_ls, folder_path=blob_path, file_name = blob_filename)
ds = adf_client.datasets.create_or_update(rg_name, df_name, ds_name, ds_azure_blob)
print_item(ds)
# Create an Azure blob dataset (output)
dsOut_name = ''
output_blobpath = ''
dsOut_azure_blob = AzureBlobDataset(ds_ls, folder_path=output_blobpath)
dsOut = adf_client.datasets.create_or_update(rg_name, df_name, dsOut_name, dsOut_azure_blob)
print_item(dsOut)
# Create a copy activity
act_name = ''
blob_source = BlobSource()
blob_sink = BlobSink()
dsin_ref = DatasetReference(ds_name)
dsOut_ref = DatasetReference(dsOut_name)
copy_activity = CopyActivity(act_name,inputs=[dsin_ref], outputs=[dsOut_ref], source=blob_source, sink=blob_sink)
# Create a pipeline with the copy activity
p_name = ''
params_for_pipeline = {}
p_obj = PipelineResource(activities=[copy_activity], parameters=params_for_pipeline)
p = adf_client.pipelines.create_or_update(rg_name, df_name, p_name, p_obj)
print_item(p)
# Create a pipeline run
run_response = adf_client.pipelines.create_run(rg_name, df_name, p_name,
{
}
)
# Monitor the pipeilne run
time.sleep(30)
pipeline_run = adf_client.pipeline_runs.get(rg_name, df_name, run_response.run_id)
print("\n\tPipeline run status: {}".format(pipeline_run.status))
activity_runs_paged = list(adf_client.activity_runs.list_by_pipeline_run(rg_name, df_name, pipeline_run.run_id, datetime.now() - timedelta(1), datetime.now() + timedelta(1)))
print_activity_run_details(activity_runs_paged[0])
我遇到以下错误: ErrorResponseException Traceback(最近一次通话) 在()中 ----> 1个main()
在main()中 37 38 ls_azure_storage = AzureStorageLinkedService(连接字符串=存储字符串) ---> 39 ls = adf_client.linked_services.create_or_update(rg_name,df_name,ls_name,ls_azure_storage) 40个print_item(ls) 41
/usr/local/lib/python2.7/dist-packages/azure/mgmt/datafactory/operations/linked_services_operations.pyc in create_or_update(self,resource_group_name,factory_name,linked_service_name,properties,if_match,custom_headers,raw,** operation_config) 170 171如果response.status_code不在[200]中: -> 172提高模型.ErrorResponseException(self._deserialize,response) 173 174反序列化=无
ErrorResponseException:操作返回了无效的状态码“禁止”
答案 0 :(得分:0)
通常,禁止表示您没有权限。您能检查一下您是否具有该数据工厂的写权限吗?
您使用的是ADF v1还是v2? 您是否尝试过在UI中创建管道,因为这是验证权限的简便方法?