Задание потока данных зависает при чтении 35-тысячного файла из облачного хранилища Google - PullRequest
0 голосов
/ 08 мая 2019
class Mp3_to_npyFn(beam.DoFn):
    def process(self, element):
        filename, e = element

        # get mp3 from the storage
        bucket = storage.Client().get_bucket('BUCKET_NAME')
        blob = bucket.get_blob(filename)
        tmp_mp3 = TemporaryFile()
        blob.download_to_file(tmp_mp3)
        tmp_mp3.seek(0) 

        array = do_something(tmp_mp3)
        write_numpy_array(array)
        return something

def run():
    pp = beam.Pipeline(RUNNER,options=opts)
    l = (pp
         | 'Read TSV' >> ReadFromText(INPUT_TSV, skip_header_lines=1) 
         | 'Parse TSV' >> beam.Map(parseTSV) 
         | 'MP3 to npy' >> beam.ParDo(Mp3_to_npyFn())
        )
    job = pp.run()
    job.wait_until_finish()

Traceback (most recent call last):
  File "apache_beam/runners/common.py", line 744, in apache_beam.runners.common.DoFnRunner.process
  File "apache_beam/runners/common.py", line 423, in apache_beam.runners.common.SimpleInvoker.invoke_process
  File "main2_mod.py", line 57, in process
  File "/usr/local/lib/python3.7/site-packages/google/cloud/storage/client.py", line 227, in get_bucket
    bucket.reload(client=self)
  File "/usr/local/lib/python3.7/site-packages/google/cloud/storage/_helpers.py", line 130, in reload
    _target_object=self,
  File "/usr/local/lib/python3.7/site-packages/google/cloud/_http.py", line 293, in api_request
    raise exceptions.from_http_response(response)
google.api_core.exceptions.InternalServerError: 500 GET https://www.googleapis.com/storage/v1/b/my_db?projection=noAcl: Backend Error

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
  File "/usr/local/lib/python3.7/site-packages/dataflow_worker/batchworker.py", line 649, in do_work
    work_executor.execute()
  File "/usr/local/lib/python3.7/site-packages/dataflow_worker/executor.py", line 176, in execute
    op.start()
  File "dataflow_worker/native_operations.py", line 38, in dataflow_worker.native_operations.NativeReadOperation.start
  File "dataflow_worker/native_operations.py", line 39, in dataflow_worker.native_operations.NativeReadOperation.start
  File "dataflow_worker/native_operations.py", line 44, in dataflow_worker.native_operations.NativeReadOperation.start
  File "dataflow_worker/native_operations.py", line 54, in dataflow_worker.native_operations.NativeReadOperation.start
  File "apache_beam/runners/worker/operations.py", line 246, in apache_beam.runners.worker.operations.Operation.output
  File "apache_beam/runners/worker/operations.py", line 142, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive
  File "apache_beam/runners/worker/operations.py", line 560, in apache_beam.runners.worker.operations.DoOperation.process
  File "apache_beam/runners/worker/operations.py", line 561, in apache_beam.runners.worker.operations.DoOperation.process
  File "apache_beam/runners/common.py", line 740, in apache_beam.runners.common.DoFnRunner.receive
  File "apache_beam/runners/common.py", line 746, in apache_beam.runners.common.DoFnRunner.process
  File "apache_beam/runners/common.py", line 785, in apache_beam.runners.common.DoFnRunner._reraise_augmented
  File "apache_beam/runners/common.py", line 744, in apache_beam.runners.common.DoFnRunner.process
  File "apache_beam/runners/common.py", line 422, in apache_beam.runners.common.SimpleInvoker.invoke_process
  File "apache_beam/runners/common.py", line 870, in apache_beam.runners.common._OutputProcessor.process_outputs
  File "apache_beam/runners/worker/operations.py", line 142, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive
  File "apache_beam/runners/worker/operations.py", line 560, in apache_beam.runners.worker.operations.DoOperation.process
  File "apache_beam/runners/worker/operations.py", line 561, in apache_beam.runners.worker.operations.DoOperation.process
  File "apache_beam/runners/common.py", line 740, in apache_beam.runners.common.DoFnRunner.receive
  File "apache_beam/runners/common.py", line 746, in apache_beam.runners.common.DoFnRunner.process
  File "apache_beam/runners/common.py", line 800, in apache_beam.runners.common.DoFnRunner._reraise_augmented
  File "/usr/local/lib/python3.7/site-packages/future/utils/__init__.py", line 421, in raise_with_traceback
    raise exc.with_traceback(traceback)
  File "apache_beam/runners/common.py", line 744, in apache_beam.runners.common.DoFnRunner.process
  File "apache_beam/runners/common.py", line 423, in apache_beam.runners.common.SimpleInvoker.invoke_process
  File "main2_mod.py", line 57, in process
  File "/usr/local/lib/python3.7/site-packages/google/cloud/storage/client.py", line 227, in get_bucket
    bucket.reload(client=self)
  File "/usr/local/lib/python3.7/site-packages/google/cloud/storage/_helpers.py", line 130, in reload
    _target_object=self,
  File "/usr/local/lib/python3.7/site-packages/google/cloud/_http.py", line 293, in api_request
    raise exceptions.from_http_response(response)
google.api_core.exceptions.InternalServerError: 500 GET https://www.googleapis.com/storage/v1/b/cochlear_db?projection=noAcl: Backend Error [while running 'MP3 to npy']

Файл tsv содержит список имен файлов по 0.4M (.mp3). После анализа он читает каждый mp3-файл и выполняет некоторые процессы. Когда я тестировал список из 5 файлов в TSV, он работал нормально. Но тестирование с файлами 0.4M застревает чтение 35 000-го файла с ошибкой 500. Кажется, что повторяется много раз и, наконец, происходит сбой.

FYI, mp3-файлы находятся в "gs: //bucket_name/same_subdir/id_string.mp3", где идентификаторы упорядочены как 100001,100002,100003.

Ответы [ 2 ]

1 голос
/ 10 мая 2019

Я решил проблему, явно указав учетные данные для аутентификации в конвейере. По моему мнению, рабочие теряют разрешение при повторной попытке после сбоя.

# get mp3 from the storage
    credentials = compute_engine.Credentials()
    project = <PROJECT_NAME>

    client = storage.Client(credentials=credentials, project=project)
    bucket = client.get_bucket(<BUCKET_NAME>)
0 голосов
/ 09 мая 2019

Пожалуйста, используйте GcsIO вместо клиента хранилища. Пожалуйста, повторите ваши звонки, и для повторяющихся ошибок используйте экспоненциальный откат .

...