以下功能先前是由Firestore中的onCreate触发的,但我将其作为(新的)onCall来使用。我已经测试了它的变体,并且有20%的时间可以工作。从网上阅读的内容来看,这意味着我没有正确处理自己的退货和承诺。如果有人可以告诉我我在做什么错或给我提示,将不胜感激。
export const DataStoreCall = functions.https.onCall((datas) => {
const filePath = 'goodfile';
const tempLocalFile = path.join(os.tmpdir(), filePath);
var file = fs.createWriteStream(tempLocalFile);
const cors = require('cors')({ origin: true });
const newV = datas;
const db = admin.firestore();
const Id = datas.id;
const UpstRef = db.collection('Review').doc(Id)
https.get(newV.url, function (response) {
return async function () {
try {
response.pipe(file);
const colPath = 'dataStuff';
const a = file.path;
const b = newV.name
const colRef = db.collection(colPath)
const batch = db.batch();
let data;
if (b.endsWith(".json")) {
data = await fs.readJSON(a);
}
else if (b.endsWith(".csv")) {
data = await parseCSV(a);
}
else if (b.endsWith(".xlsx")) {
data = await parseXLSX(a);
}
else if (b.endsWith(".xlx")) {
data = await parseXLSX(a);
}
else {
throw "Unknown file extension.";
}
for (const item of data) {
const docRef = colRef.doc();
batch.set(docRef, item);
}
// Commit the batch
await batch.commit().then(() => {
//FIRESTORE Update
})
console.log("completed!");
} catch (error) {
console.log("failed!", error);
}
}
function parseCSV(path): Promise<any> {
return new Promise((resolve, reject) => {
let lineCount = 0;
///CSV read/parse code
resolve(data);
.on("error", err => reject(err));
}
function parseXLSX(path): Promise<any> {
return new Promise((resolve, reject) => {
//XLSX read/parse code
resolve(data);
})
}
})
我有出色的计划,并在存储上启用了cors / http请求
答案 0 :(得分:1)
我发现了对该功能的另一种更好的解决方案。我意识到Google云存储方法都适用于Firebase存储。我使用creatReadStream访问文件数据并将其发送到Firestore。
下面是代码。我称它约20次,但至今仍未失败。 (没有错误,请查看文档)
exports.getXl = functions.https.onCall((data) => {
const db = admin.firestore();
var filename =data.name
var storage = require('@google-cloud/storage')();
var bucket = storage.bucket('bucketName');
var XLSX = require('xlsx');
var remoteFile = bucket.file(`foldername/${filename}`);
const storename = data.storename
const colRef = db.collection(storename)
const gcsStream = remoteFile.createReadStream();
var allBuffer = new Promise((resolve, reject) => {
var buffers = [];
gcsStream.on('data', function (data) {
buffers.push(data);
});
gcsStream.on('end', function () {
var buffer = Buffer.concat(buffers);
var workbook = XLSX.read(buffer, {
type: "buffer"
});
var sheetName = workbook.SheetNames[0]
//CONVERTS STREAM TO JSON
var abe = XLSX.utils.sheet_to_json(workbook.Sheets[sheetName])
resolve(abe)
})
});
//ALL ONREQUEST FUNCTIONS HAVE TO RETURN SOMETHING.
return allBuffer.then(function (result) {
const batch =db.batch()
//USING BATCH BECUASE CLOUD FUNCTION WILL CUT OFF PROCESS IF ITS WRITING ONE DOC AT A TIME
//BATCH HAS A LIMIT OF 500 DOCUMENT WRITES AT A TIME SO LOOK UP HOW TO MANAGE CHUNKS IF NECESSARY
for (const item of result) {
const docRef = colRef.doc();
batch.set(docRef, item);
}
batch.commit()
console.log(result)
})