基于我读过的here,我正在使用fs.createWriteStream
向文件中写入一些JSON。我正在处理大约50个数据块。因此,在脚本开始时,我打开了流,然后使用一个函数将其与一些JSON一起传递进来,在编写时效果很好。
const myStream = fs.createWriteStream(
path.join(RESULTS_DIR, `my-file.json`),
{
flags: 'a'
}
)
function appendJsonToFile(stream, jsonToAppend) {
return new Promise((resolve, reject) => {
try {
stream.write(JSON.stringify(jsonToAppend, null, 2)
resolve('STREAM_WRITE_SUCCESS')
} catch (streamError) {
reject('STREAM_WRITE_FAILURE', streamError)
}
})
}
appendJsonToFile(myStream, someJson)
但是,由于要写入的每个数据都是对象数组,所以我最终在文件中得到的结构如下所示:
[
{
"data": "test data 1",
},
{
"data": "test data 2",
}
][
{
"data": "test data 3",
},
{
"data": "test data 4",
}
]
答案 0 :(得分:0)
如果文件始终像上面所说的那样格式化,那么您需要做的三件事是:
\n]
,因为末尾没有换行符),r+
模式和start
保存文件这是指向createWriteStream options的链接。
现在,另一点是第4点使效率降低,并质疑是否应在此处使用流传输的整个想法。我认为这确实是有道理的,但是这里的问题是,是否需要在两次写入之间使文件可读-如果没有,那么应该在中间使用transform stream并在文件之间添加刷新,并且在完成所有工作之后完成(beforeExit
之后,您只需结束流。
您可以按照定义执行此操作,但是我是一个名为scramjet的框架的作者,该框架使这些情况更容易实现:
const myStream = new scramjet.DataStream();
const file = path.join(RESULTS_DIR, `my-file.json`)
const start = fs.statSync(file).size - 2;
myStream
.flatten()
.toJSONArray()
.shift(1)
.pipe(fs.createWriteStream(
file,
{flags: 'r+', start}
));
function appendJsonToFile(stream, jsonToAppend) {
return new Promise((resolve, reject) => {
try {
stream.write(jsonToAppend)
resolve('STREAM_WRITE_SUCCESS')
} catch (streamError) {
reject('STREAM_WRITE_FAILURE', streamError)
}
})
}
appendJsonToFile(myStream, someJson)
process.on('beforeExit', myStream.end());
您可以像上面那样使用它,但是如果您希望通过简单的节点流来处理此问题,那么应该向正确的方向轻推。
答案 1 :(得分:0)
我将使用 Error 和 FILE NOT FOUND 处理程序解决代码。来自 Michał Karpacki 的解决方案。
const path = require('path');
const fs = require('fs');
const getFolderPath = () => __dirname || process.cwd();
const getFilePath = (fileName) => path.join(getFolderPath(), `${fileName}`);
/**
* @param {string} fileName - Included File Name & its Extension
* @param {Array<*>} arrayData
* @return {Promise<*>}
*/
const writeFileAsync = async (fileName, arrayData) => {
const filePath = getFilePath(fileName);
return new Promise((resolve, reject) => {
try {
const _WritableStream = fs.createWriteStream(filePath, {flags: 'r+', start: fs.statSync(filePath).size - 2});
_WritableStream.write(JSON.stringify(arrayData, null, 2).replace(/\[/, ','), (streamError) => {
return reject(['STREAM_WRITE_FAILURE', streamError]);
});
return resolve('STREAM_WRITE_SUCCESS');
} catch (streamError) {
/** ERROR NOT FOUND SUCH FILE OR DIRECTORY !*/
if (streamError.code === 'ENOENT') {
fs.mkdirSync(getFolderPath(), {recursive: true});
return resolve(fs.writeFileSync(filePath, JSON.stringify(
Array.from({...arrayData, length: arrayData.length}), null, 2
)));
}
/** ERROR OUT OF BOUND TO FILE SIZE RANGE - INVALID START POSITION FOR WRITE STREAM !*/
if (streamError instanceof RangeError) {
console.error(`> [ERR_OUT_OF_RANGE] =>`, streamError);
const _WritableStream = fs.createWriteStream(filePath, {flags: 'r+'});
return resolve(_WritableStream.write(JSON.stringify(arrayData, null, 2), (streamError) => {
return reject(['STREAM_WRITE_FAILURE', streamError]);
}));
}
return reject(['STREAM_WRITE_FAILURE', streamError]);
}
});
};
(() => writeFileAsync('test1.json',
[{
key: "value 1"
}, {
key: "value 2"
}]
))();
/* Output after 1st time run =>
[
{
"key": "value 1"
},
{
"key": "value 2"
}
]
*/
/* Output after 2nd time run =>
[
{
"key": "value 1"
},
{
"key": "value 2"
},
{
"key": "value 1"
},
{
"key": "value 2"
}
]
*/