我在MongoDB上有一个非常大的集合,我想从该集合中删除重复的记录。我想到的第一个想法是删除索引并使用dropDups重建索引。但是,重复的数据太多,MongoDB无法处理。
所以我转向MapReduce寻求帮助。这是我目前的进展。
m = function () {
emit(this.myid, 1);
}
r = function (k, vals) {
return Array.sum(vals);
}
res = db.userList.mapReduce(m,r, { out : "myoutput" });
所有重复记录的“myid”都存储在“myoutput”集合中。但是,我不知道如何通过引用myoutput.myid从userList中删除记录。它假设是这样的:
db.myoutput.find({value: {$gt: 1}}).forEach(
function(obj) {
db.userList.remove(xxxxxxxxx) // I don't know how to do so
})
顺便说一句,使用foreach似乎会用理智的myid擦除所有记录。但我只是想删除重复的记录。例如:
{ "_id" : ObjectId("4edc6773e206a55d1c0000d8"), "myid" : 0 }
{ "_id" : ObjectId("4edc6780e206a55e6100011a"), "myid" : 0 }
{ "_id" : ObjectId("4edc6784e206a55ed30000c1"), "myid" : 0 }
最终结果应该只保留一条记录。有人可以给我一些帮助吗?
谢谢。 :)
答案 0 :(得分:8)
最干净的可能是写一个删除记录的客户端脚本:
db.myoutput.find({value: {$gt: 1}}).forEach(
function(obj) {
var cur = db.userList.find({ myid: obj._id }, {_id: 1});
var first = true;
while (cur.hasNext()) {
var doc = cur.next();
if (first) {first = false; continue;}
db.userList.remove({ _id: doc._id });
}
})
我没有测试过这段代码所以请务必仔细检查是否针对prod数据运行..
答案 1 :(得分:1)
虽然上述答案非常有效,但如果您的数据库/集合中有900K或3M记录,则确实非常慢。
如果处理大量数据,我建议走很长的路:
对于900K条目,这需要大约35秒(组查询)。
在PHP中实现:
$mongo_client = new MongoClient();
$collection = $mongo_client->selectCollection("main", "settings");
//Group by the field "code"
$keys = array("code" => 1);
//You must create objects for every field you wish to transfer (except the one grouped by - that gets auto-transferred)
$initial = array("location" => "", "name" => "", "score" => 0, "type" => "");
//The reduce function will set the grouped properties
$reduce = "function (obj, prev) { prev.location = obj.location; prev.name = obj.name; prev.score = obj.score; prev.type = obj.type; }";
$fh = fopen("Export.json", "w");
$unique_set = $collection->group($keys, $initial, $reduce);
fwrite($fh, json_encode($unique_set['retval']));
fclose($fh);
如果您只有很少的重复项,那么在PHP上运行它可能不是最佳选择,但我的设置有大量的重复项,因此最终的数据集很容易处理。也许有人会发现这对速度有用。 (转移到mongo shell应该相当容易。)
但是,请记住,您必须重新格式化最终文件,以便每行包含1个文档,以便与mongoimport一起使用。 (搜索/替换所有应该没问题。)
答案 2 :(得分:1)
实际上这里不需要mapreduce。 那这个呢 : ? 在mongo shell中粘贴代码:
function removeDupls (collectionName, keyField, reportEvery) {
if (reportEvery === undefined) {reportEvery=10;}
sort = {};
sort[keyField] = 1;
var myidLast;
var res = {docsCnt:0,docsRemoved:0}
db[collectionName].find().sort(sort).clone().forEach(
function(doc) {
res['docsCnt'] += 1;
if (doc.myid == myidLast) {db[collectionName].remove({_id:doc._id}); res['docsRemoved'] +=1;}
else {myidLast = doc.myid;}
if (res['docsCnt'] % reportEvery === 0) {print (JSON.stringify(res))}
}
);
return res;
}
然后调用它:
removeDupls('users','myid',1000)
这会起作用,可能会比任何mapreduce更快>删除工作(取决于您的重复文件的数量) 如果你想让它真的很快,你应该在临时数组中存储要删除的文件的_ids,然后使用批量删除。
答案 3 :(得分:0)
/*
* This map reduce will output a new collection: "duplicateinvoices"
* { "_id" : "12345", "value" : 2 }
* { "_id" : "23456", "value" : 2 }
* ...
**/
m = function () {
emit(this.MlsId, 1);
}
r = function (k, vals) {
return Array.sum(vals);
}
res = db.invoices.mapReduce(m,r, { out : "duplicateinvoices" });
/*
* We have two approaches (we should test wich is faster/reliable, i didn't
**/
/* OPTION 1 */
// We iterate over duplicateinvoices and get the media-hash
// of the ones with value > 1 the duplicates
db.duplicateinvoices.find({value: {$gt: 1}}).forEach(
function(invoice) {
// temporary save one of this objects into a variable
var obj = db.invoices.findOne({ media_hash: invoice._id });
// remove all media-hash matched invoices from invoice collection
db.invoices.remove({media_hash: invoice._id})
// insert again the previously saved object into collection
db.invoices.insert(obj)
}
)
/* OPTION 2 */
// We iterate over duplicateinvoices and get the media-hash
// of the ones with value > 1 the duplicates
db.duplicateinvoices.find({value: {$gt: 1}}).forEach(
function(invoice) {
// Invoices cursor with all the media_hash matched documents
var cur = db.invoices.find({ media_hash: invoice._id });
var first = true;
while (cur.hasNext()) {
var doc = cur.next();
// Skip the first one
if (first) {first = false; continue;}
// Delete the others matched documents
db.userList.remove({ _id: doc._id });
}
}
)
来源:
How to remove duplicate record in MongoDB by MapReduce? http://openmymind.net/2011/1/20/Understanding-Map-Reduce/ http://docs.mongodb.org/manual/tutorial/map-reduce-examples/