我的数据流作业是从Python的Apache_beam触发的。当运行程序为默认运行器时,此方法已运行,但当运行机为DataFlowRunner时,此操作失败。我想知道某些数据流设置在GCP项目中不正确。
insertId: "18yr612ckq8"
labels: {
dataflow.googleapis.com/job_id: "2019-12-29_20_13_18-6351782926232365732"
dataflow.googleapis.com/job_name: "wordcountpy-test"
dataflow.googleapis.com/region: "us-central1"
}
logName: "projects/hsbc-9820327-cmbsp54-dev/logs/dataflow.googleapis.com%2Fjob-message"
receiveTimestamp: "2019-12-30T05:13:27.146833360Z"
resource: {
labels: {
job_id: "2019-12-29_20_13_18-6351782926232365732"
job_name: "wordcountpy-test"
project_id: "488006911152"
region: "us-central1"
step_id: ""
}
type: "dataflow_step"
}
severity: "ERROR"
textPayload: "Workflow failed. Causes: The Dataflow job appears to be stuck because no worker activity has been seen in the last 1h. You can get help with Cloud Dataflow at https://cloud.google.com/dataflow/support."
timestamp: "2019-12-30T05:13:25.787782564Z" enter code here