我们移动了数据库服务器,新服务器在技术上更快,并且具有更大的SSD,RAM和CPU。它有相同版本的mongodb 2.6。但我的更新查询比它应该慢。旧DB:0.225s,新DB:0.3s - 1.5s。所以它不稳定。
Update Query ($upsert: true):
{
"_id.date" : "2016-10-03",
"_id.week" : "Monday",
"_id.hour" : "19",
"_id.s_id" : "xxxx",
"_id.c_id" : "yyyy",
"_id.domain" : "pldt.com",
"_id.l_id" : "zzzzz",
"_id.o_id" : [
{
"or_id" : "wwww"
},
{
"o_id" : "rrrrrr"
}
],
"_id.browser" : "Chrome",
"_id.platform" : "Android",
"_id.isp" : "PLDT",
"_id.org" : "PLDT",
"_id.country" : "China",
"_id.city" : "",
"_id.n_id" : NumberLong(5),
"_id.device_type" : "mobile",
"_id.cd" : "0",
"_id.t_a_id" : "423525",
"_id.c1" : "1",
"_id.c2" : "2",
"_id.c3" : "3",
"_id.c4" : "4",
"_id.set" : "1",
"_id.device_name" : "Samsung Galaxy Note 4"
}
我有一个复合索引。但它似乎没有使用它。
{
"v" : 1,
"key" : {
"_id.date" : 1.0000000000000000,
"_id.week" : 1.0000000000000000,
"_id.hour" : 1.0000000000000000,
"_id.s_id" : 1.0000000000000000,
"_id.c_id" : 1.0000000000000000,
"_id.domain" : 1.0000000000000000,
"_id.l_id" : 1.0000000000000000,
"_id.o_id" : 1.0000000000000000,
"_id.browser" : 1.0000000000000000,
"_id.platform" : 1.0000000000000000,
"_id.isp" : 1.0000000000000000,
"_id.org" : 1.0000000000000000,
"_id.country" : 1.0000000000000000,
"_id.city" : 1.0000000000000000,
"_id.n_id" : 1.0000000000000000,
"_id.device_type" : 1.0000000000000000,
"_id.cloaked" : 1.0000000000000000,
"_id.t_a_id" : 1.0000000000000000,
"_id.c1" : 1.0000000000000000,
"_id.c2" : 1.0000000000000000,
"_id.c3" : 1.0000000000000000,
"_id.c4" : 1.0000000000000000,
"_id.set" : 1.0000000000000000,
"_id.device_name" : 1.0000000000000000
},
"name" : "upsert_20160804",
"ns" : "test2.map_collections1",
"background" : true
}
我也有一些复合指数。当我运行explain()时,它使用不同的索引。
/* 1 */
{
"cursor" : "BtreeCursor _id.date_1__id.c_id_1",
"isMultiKey" : false,
"n" : 1,
"nscannedObjects" : 29789,
"nscanned" : 29789,
"nscannedObjectsAllPlans" : 134802,
"nscannedAllPlans" : 148948,
"scanAndOrder" : false,
"indexOnly" : false,
"nYields" : 1163,
"nChunkSkips" : 0,
"millis" : 527,
"indexBounds" : {
"_id.date" : [
[
"2016-10-03",
"2016-10-03"
]
],
"_id.c_id" : [
[
"yyyy",
"yyyy"
]
]
},
"server" : "mongo:27017",
"filterSet" : false,
"stats" : {
"type" : "KEEP_MUTATIONS",
"works" : 29791,
"yields" : 1163,
"unyields" : 1163,
"invalidates" : 0,
"advanced" : 1,
"needTime" : 29788,
"needFetch" : 0,
"isEOF" : 1,
"children" : [
{
"type" : "FETCH",
"works" : 29790,
"yields" : 1163,
"unyields" : 1163,
"invalidates" : 0,
"advanced" : 1,
"needTime" : 29788,
"needFetch" : 0,
"isEOF" : 1,
"alreadyHasObj" : 0,
"forcedFetches" : 0,
"matchTested" : 1,
"children" : [
{
"type" : "IXSCAN",
"works" : 29790,
"yields" : 1163,
"unyields" : 1163,
"invalidates" : 0,
"advanced" : 29789,
"needTime" : 0,
"needFetch" : 0,
"isEOF" : 1,
"keyPattern" : "{ _id.date: 1, _id.c_id: 1 }",
"isMultiKey" : 0,
"boundsVerbose" : "field #0['_id.date']: [\"2016-10-03\", \"2016-10-03\"], field #1['_id.c_id']: [\"yyyy\", \"yyyy\"]",
"yieldMovedCursor" : 0,
"dupsTested" : 0,
"dupsDropped" : 0,
"seenInvalidated" : 0,
"matchTested" : 0,
"keysExamined" : 29789,
"children" : []
}
]
}
]
}
}
我有点不知所措。因为它应该只是平均不到200毫秒。并且它损害了使用它的脚本的性能。