从Google云端存储读取第35,000个文件时,数据流作业卡住了

时间:2019-05-08 08:12:35

标签: google-cloud-storage google-cloud-dataflow

class Mp3_to_npyFn(beam.DoFn):
    def process(self, element):
        filename, e = element

        # get mp3 from the storage
        bucket = storage.Client().get_bucket('BUCKET_NAME')
        blob = bucket.get_blob(filename)
        tmp_mp3 = TemporaryFile()
        blob.download_to_file(tmp_mp3)
        tmp_mp3.seek(0) 

        array = do_something(tmp_mp3)
        write_numpy_array(array)
        return something

def run():
    pp = beam.Pipeline(RUNNER,options=opts)
    l = (pp
         | 'Read TSV' >> ReadFromText(INPUT_TSV, skip_header_lines=1) 
         | 'Parse TSV' >> beam.Map(parseTSV) 
         | 'MP3 to npy' >> beam.ParDo(Mp3_to_npyFn())
        )
    job = pp.run()
    job.wait_until_finish()

Traceback (most recent call last):
  File "apache_beam/runners/common.py", line 744, in apache_beam.runners.common.DoFnRunner.process
  File "apache_beam/runners/common.py", line 423, in apache_beam.runners.common.SimpleInvoker.invoke_process
  File "main2_mod.py", line 57, in process
  File "/usr/local/lib/python3.7/site-packages/google/cloud/storage/client.py", line 227, in get_bucket
    bucket.reload(client=self)
  File "/usr/local/lib/python3.7/site-packages/google/cloud/storage/_helpers.py", line 130, in reload
    _target_object=self,
  File "/usr/local/lib/python3.7/site-packages/google/cloud/_http.py", line 293, in api_request
    raise exceptions.from_http_response(response)
google.api_core.exceptions.InternalServerError: 500 GET https://www.googleapis.com/storage/v1/b/my_db?projection=noAcl: Backend Error

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
  File "/usr/local/lib/python3.7/site-packages/dataflow_worker/batchworker.py", line 649, in do_work
    work_executor.execute()
  File "/usr/local/lib/python3.7/site-packages/dataflow_worker/executor.py", line 176, in execute
    op.start()
  File "dataflow_worker/native_operations.py", line 38, in dataflow_worker.native_operations.NativeReadOperation.start
  File "dataflow_worker/native_operations.py", line 39, in dataflow_worker.native_operations.NativeReadOperation.start
  File "dataflow_worker/native_operations.py", line 44, in dataflow_worker.native_operations.NativeReadOperation.start
  File "dataflow_worker/native_operations.py", line 54, in dataflow_worker.native_operations.NativeReadOperation.start
  File "apache_beam/runners/worker/operations.py", line 246, in apache_beam.runners.worker.operations.Operation.output
  File "apache_beam/runners/worker/operations.py", line 142, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive
  File "apache_beam/runners/worker/operations.py", line 560, in apache_beam.runners.worker.operations.DoOperation.process
  File "apache_beam/runners/worker/operations.py", line 561, in apache_beam.runners.worker.operations.DoOperation.process
  File "apache_beam/runners/common.py", line 740, in apache_beam.runners.common.DoFnRunner.receive
  File "apache_beam/runners/common.py", line 746, in apache_beam.runners.common.DoFnRunner.process
  File "apache_beam/runners/common.py", line 785, in apache_beam.runners.common.DoFnRunner._reraise_augmented
  File "apache_beam/runners/common.py", line 744, in apache_beam.runners.common.DoFnRunner.process
  File "apache_beam/runners/common.py", line 422, in apache_beam.runners.common.SimpleInvoker.invoke_process
  File "apache_beam/runners/common.py", line 870, in apache_beam.runners.common._OutputProcessor.process_outputs
  File "apache_beam/runners/worker/operations.py", line 142, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive
  File "apache_beam/runners/worker/operations.py", line 560, in apache_beam.runners.worker.operations.DoOperation.process
  File "apache_beam/runners/worker/operations.py", line 561, in apache_beam.runners.worker.operations.DoOperation.process
  File "apache_beam/runners/common.py", line 740, in apache_beam.runners.common.DoFnRunner.receive
  File "apache_beam/runners/common.py", line 746, in apache_beam.runners.common.DoFnRunner.process
  File "apache_beam/runners/common.py", line 800, in apache_beam.runners.common.DoFnRunner._reraise_augmented
  File "/usr/local/lib/python3.7/site-packages/future/utils/__init__.py", line 421, in raise_with_traceback
    raise exc.with_traceback(traceback)
  File "apache_beam/runners/common.py", line 744, in apache_beam.runners.common.DoFnRunner.process
  File "apache_beam/runners/common.py", line 423, in apache_beam.runners.common.SimpleInvoker.invoke_process
  File "main2_mod.py", line 57, in process
  File "/usr/local/lib/python3.7/site-packages/google/cloud/storage/client.py", line 227, in get_bucket
    bucket.reload(client=self)
  File "/usr/local/lib/python3.7/site-packages/google/cloud/storage/_helpers.py", line 130, in reload
    _target_object=self,
  File "/usr/local/lib/python3.7/site-packages/google/cloud/_http.py", line 293, in api_request
    raise exceptions.from_http_response(response)
google.api_core.exceptions.InternalServerError: 500 GET https://www.googleapis.com/storage/v1/b/cochlear_db?projection=noAcl: Backend Error [while running 'MP3 to npy']

tsv文件包含0.4M文件名(.mp3)的列表。解析后,它将读取每个mp3文件并执行一些处理。当我用tsv中的5个文件列表进行测试时,它工作正常。但是用0.4M文件进行测试时,它停留在  读取第35,000个错误500的文件。它似乎重试了很多次,最终失败了。

仅供参考,mp3文件位于“ gs://bucket_name/same_subdir/id_string.mp3”中,其中ID的排序方式为100001,100002,100003。

2 个答案:

答案 0 :(得分:0)

请使用GcsIO代替存储客户端。 请重试呼叫,如果遇到可重发的错误,请使用指数backoff

答案 1 :(得分:0)

我通过在管道中显式提供身份验证凭据解决了该问题。我认为,工人在失败后重试时会失去许可。

# get mp3 from the storage
    credentials = compute_engine.Credentials()
    project = <PROJECT_NAME>

    client = storage.Client(credentials=credentials, project=project)
    bucket = client.get_bucket(<BUCKET_NAME>)