云功能是否有办法确认数据流作业是否成功?
我尝试过的云功能:
const google = require('googleapis');
exports.statusJob = function(event, callback) {
const file = event.data;
if (file.resourceState === 'exists' && file.name) {
console.log(file.name);
console.log(event.data);
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
if (authClient.createScopedRequired && authClient.createScopedRequired()) {
authClient = authClient.createScoped([
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/userinfo.email'
]);
}
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
dataflow.projects.jobs.get({
projectId: 'my-project-id',
resource: {
jobId: 'some_number'
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
}
};
包JSON:
{
"name": "test",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "ISC",
"dependencies": {
"googleapis": "^18.0.0"
}
}
以上对我来说非常有效 ONCE 。 我得到的回应是:
Dataflow template response: { id: 'some_number', projectId: 'my-project-id', name: 'cloud-fn', type: 'JOB_TYPE_BATCH', environment: { userAgent: { name: 'Google Cloud Dataflow SDK for Java', support: [Object], 'build.date': '2017-05-23 19:46', version: '2.0.0' }, version: { major: '6', job_type: 'JAVA_BATCH_AUTOSCALING' } }, currentState: 'JOB_STATE_DONE',........
然后每次发出错误后说:
problem running dataflow template, error was: Error: Missing required parameters: jobId at createAPIRequest (/user_code/node_modules/googleapis/lib/apirequest.js:110:14) at Object.get (/user_code/node_modules/googleapis/apis/dataflow/v1b3.js:670:16) at /user_code/index.js:22:29 at callback (/user_code/node_modules/googleapis/node_modules/google-auth-library/lib/auth/googleauth.js:42:14) at /user_code/node_modules/googleapis/node_modules/google-auth-library/lib/auth/googleauth.js:289:13 at _combinedTickCallback (internal/process/next_tick.js:73:7) at process._tickDomainCallback (internal/process/next_tick.js:128:9)
有人对此有所了解吗?
由于
答案 0 :(得分:1)
您可以使用Dataflow CLI确定作业是失败还是成功。它允许您列出作业并检查其失败/成功/运行/取消状态。
具体来说,要检查单个作业的状态,您可以运行:
gcloud beta dataflow jobs describe <JOB_ID>
有关详细信息,请查看文档:
https://cloud.google.com/dataflow/pipelines/dataflow-command-line-intf
答案 1 :(得分:0)
有效负载错误。应该是下面的样子。
dataflow.projects.jobs.get({
projectId: 'my-project-id',
jobId: 'some_number'
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
上面的代码正在使用Node.js 8(测试版)成功获取作业详细信息。