我做了一些代码来解析一些文件中的东西。它有两条不同的行,一条是命令行和终点线。因此,有1 000 000行日志,这是50万个DB条目。 插入和更新数据库的问题是缓慢的。
function getDate(){
var date = new Date().getTime() + (2 * 60 * 60 * 1000);
return new Date(date).toISOString().replace(/T/, ' ').replace(/\..+/, ''); // DATETIME in format yyyy-mm-dd hh:mm:ss
}
var detailLog = false;
var express = require("express");
var mysql = require('mysql');
var connection = mysql.createConnection({
host : 'localhost',
user : 'root',
password : '',
database : 'logger'
});
var app = express();
connection.connect(function(err) {
if (!err) {
console.log(getDate(), "Database is connected ... \n");
} else {
console.log(getDate(), "Error connecting database ... \n");
}
});
var file = 'file.log';
var fs = require('fs');
var rl = require('readline').createInterface({
input : require('fs').createReadStream(file)
});
rl.on('line', function(line) {
//searching 'command' since it is in every odd line
if (line.search('command') >= 0) {
//parsing every odd line of log
// insert in logs table
var queryINS = "";
if (detailLog)
console.log(getDate(),'shooting INSERT ');
connection.query(queryINS,
function(err, rows, fields) {
if (err) {
console.log(getDate(), 'Error while performing
insert into logs query: ', queryINS,err);
}
});
if (detailLog)
console.log(getDate(),'INSERT done');
//searching 'Request finished' since it is in every even line
} else if (line.search('Request finished') >= 0) {
//parsing every even line of log
// update logs table
var queryUPD = "";
if (detailLog)
console.log(getDate(),'shooting UPDATE ');
connection.query(queryUPD,
function(err, rows, fields) {
if (err) {
console.log(getDate(), 'Error while performing update
logs query: ',queryUPD,err);
}
});
if (detailLog)
console.log(getDate(),'UPDATE done');
}
}).on('close', function() {
console.log(getDate(), 'Inserted logs into table.');
});