我正在尝试从节点JS中的Kafka主题读取每条消息,并搜索mongo DB中收到的消息的任何重复记录,否则在mongo DB中插入新记录。下面是为此编写的代码。
当NodeJS执行异步消息处理时,在从mongo DB查询第一条消息之前,会读取并处理来自Kafka主题的下一条消息,如果它不是重复的,则会插入。
var dataHandler = function (messageSet, topic, partition) {
messageSet.forEach(function (m) {
var m_val=m.message.value.toString('utf8');
if((m_val.slice(100 ,108)=="XXXXXXXX") || (m_val.slice(100 ,108)=="YYYYYYYY")){
senderInfo.requestID=m_val.slice(100,108);
senderInfo.fileIdentifier=m_val.slice(140,190);
senderInfo.sequenceNo=m_val.slice(191,199);
senderInfo.recCount=m_val.slice(200,210);
senderInfo.totHash=m_val.slice(213,231);
//* store the data in Mongo. check for duplicate record
//*var insert1 = JSON.parse(m.message.value.toString('utf8'));
MongoClient.connect(url, function(err, client) {
assert.equal(null, err);
console.log("Connected correctly to MongoDB server.");
/*mongodb = db; */
var db = client.db('Unitech');
var getResult = (callback) => {
console.log("request ID before mongo DB query is:"+senderInfo.requestID);
db.collection("Instream").find({"requestID":senderInfo.requestID})
.toArray(function(err, result) {
if (err) throw err;
callback(result.length);
});
}
getResult((result) => {
if(result>0){
console.log("Duplicate message sent:"+senderInfo.requestID+" "+senderInfo.sequenceNo);
} else{
db.collection('Instream').insertOne(senderInfo);
console.log("One message inserted");
client.close();
}
});
});
}
});
};
有没有办法可以同步方式处理mongo查询?