I'm replicating this Google authored tutorial我遇到了一个问题和错误,我无法弄清楚如何解决。
在Google Cloud Function上导入json到bigquery,我收到错误“TypeError:job.promise不是函数”
位于函数底部,有问题的代码是:
.then(([job]) => job.promise())
The error led me to this discussion about the API used,但我不明白如何解决错误。
我尝试.then(([ job ]) => waitJobFinish(job))
并删除该行可以解决错误,但不会插入任何内容。
第三个问题:我也找不到关于如何触发函数测试的文档,以便我可以在google云功能控制台中读取我的console.logs,这有助于解决这个问题。我可以测试这个函数的json POST部分,但我找不到什么json触发新文件的测试写入云存储 - 测试说必须包含一个桶但我不知道json要格式化什么(我用来测试帖子的json - >存储到云存储不起作用)
这是我已经完成的功能:
(function () {
'use strict';
// Get a reference to the Cloud Storage component
const storage = require('@google-cloud/storage')();
// Get a reference to the BigQuery component
const bigquery = require('@google-cloud/bigquery')();
function getTable () {
const dataset = bigquery.dataset("iterableToBigquery");
return dataset.get({ autoCreate: true })
.then(([dataset]) => dataset.table("iterableToBigquery").get({ autoCreate: true }));
}
//set trigger for new files to google storage bucket
exports.iterableToBigquery = (event) => {
const file = event.data;
if (file.resourceState === 'not_exists') {
// This was a deletion event, we don't want to process this
return;
}
return Promise.resolve()
.then(() => {
if (!file.bucket) {
throw new Error('Bucket not provided. Make sure you have a "bucket" property in your request');
} else if (!file.name) {
throw new Error('Filename not provided. Make sure you have a "name" property in your request');
}
return getTable();
})
.then(([table]) => {
const fileObj = storage.bucket(file.bucket).file(file.name);
console.log(`Starting job for ${file.name}`);
const metadata = {
autodetect: true,
sourceFormat: 'NEWLINE_DELIMITED_JSON'
};
return table.import(fileObj, metadata);
})
.then(([job]) => job.promise())
//.then(([ job ]) => waitJobFinish(job))
.then(() => console.log(`Job complete for ${file.name}`))
.catch((err) => {
console.log(`Job failed for ${file.name}`);
return Promise.reject(err);
});
};
}());
答案 0 :(得分:0)
所以我无法弄清楚如何修复谷歌的例子,但我能够从js获得这个加载来使用谷歌云功能中的以下代码:
'use strict';
/*jshint esversion: 6 */
// Get a reference to the Cloud Storage component
const storage = require('@google-cloud/storage')();
// Get a reference to the BigQuery component
const bigquery = require('@google-cloud/bigquery')();
exports.iterableToBigquery = (event) => {
const file = event.data;
if (file.resourceState === 'not_exists') {
// This was a deletion event, we don't want to process this
return;
}
const importmetadata = {
autodetect: false,
sourceFormat: 'NEWLINE_DELIMITED_JSON'
};
let job;
// Loads data from a Google Cloud Storage file into the table
bigquery
.dataset("analytics")
.table("iterable")
.import(storage.bucket(file.bucket).file(file.name),importmetadata)
.then(results => {
job = results[0];
console.log(`Job ${job.id} started.`);
// Wait for the job to finish
return job;
})
.then(metadata => {
// Check the job's status for errors
const errors = metadata.status.errors;
if (errors && errors.length > 0) {
throw errors;
}
})
.then(() => {
console.log(`Job ${job.id} completed.`);
})
.catch(err => {
console.error('ERROR:', err);
});
};