我在下面有这些数据集,我想联合每组中的数据:
var data = {};
var datasets = [
{"_id":"58d6c806d7c80d5d44a35204","particles":{"timestamp":[1490470918708,1490470962915,1490470967186],"Particles":["108","108","109"]}},
{"_id":"58d6caf62552b75f26f56f91","particles":{"timestamp":[1490471670888,1490473309103],"Particles":["109","100"]}}
];
datasets.forEach(function(dataset, index) {
for (var key in dataset.particles) {
data[key] = dataset.particles[key];
}
});
console.log(data);
结果:
{ timestamp: [ 1490471670888, 1490473309103 ],
Particles: [ '109', '100' ] }
他们没有加入。我追求的结果是:
{ timestamp: [ 1490470918708,1490470962915,1490470967186, 1490471670888, 1490473309103 ],
Particles: [ '108','108','109', '109', '100' ] }
有什么想法吗?
修改
非常大型数据时,concat
或push
会更快吗?
答案 0 :(得分:2)
您必须对数组进行连续处理,而不仅仅是重新分配值。
var data = {};
var datasets = [
{"_id":"58d6c806d7c80d5d44a35204","particles":{"timestamp":[1490470918708,1490470962915,1490470967186],"Particles":["108","108","109"]}},
{"_id":"58d6caf62552b75f26f56f91","particles":{"timestamp":[1490471670888,1490473309103],"Particles":["109","100"]}}
];
datasets.forEach(function(dataset, index) {
for (var key in dataset.particles) {
// Check if key already exists.
if( data[key] === undefined )
data[key] = [];
// Add current timestamps.
data[key] = data[key].concat(dataset.particles[key]);
}
});
console.log(data);
答案 1 :(得分:1)
希望这是必需的:
var data = {};
var datasets = [{
"_id": "58d6c806d7c80d5d44a35204",
"particles": {
"timestamp": [1490470918708, 1490470962915, 1490470967186],
"Particles": ["108", "108", "109"]
}
},
{
"_id": "58d6caf62552b75f26f56f91",
"particles": {
"timestamp": [1490471670888, 1490473309103],
"Particles": ["109", "100"]
}
}
];
var output = {
}
datasets.forEach(function(dataset, index) {
for (var key in dataset.particles) {
if (typeof output[key] == 'undefined') {
output[key] = []
}
//console.log(dataset.particles[key])
for (var k of dataset.particles[key]) {
output[key].push(k)
}
}
});
console.log(output);
答案 2 :(得分:1)
在函数式编程样式中,您可以使用reduce
和Object.keys
:
var datasets = [
{"_id":"58d6c806d7c80d5d44a35204","particles":{"timestamp":[1490470918708,1490470962915,1490470967186],"Particles":["108","108","109"]}},
{"_id":"58d6caf62552b75f26f56f91","particles":{"timestamp":[1490471670888,1490473309103],"Particles":["109","100"]}}
];
var data = datasets.reduce ( (acc, {particles}) =>
Object.keys(particles).reduce ( (acc, key) =>
(acc[key] = (acc[key] || []).concat(particles[key]), acc), acc
), {} );
console.log(data);
.as-console-wrapper { max-height: 100% !important; top: 0; }
替代push
而不是concat
,以防性能问题:
var datasets = [
{"_id":"58d6c806d7c80d5d44a35204","particles":{"timestamp":[1490470918708,1490470962915,1490470967186],"Particles":["108","108","109"]}},
{"_id":"58d6caf62552b75f26f56f91","particles":{"timestamp":[1490471670888,1490473309103],"Particles":["109","100"]}}
];
var data = datasets.reduce ( (acc, {particles}) =>
Object.keys(particles).reduce ( (acc, key) =>
((acc[key] = (acc[key] || [])).push(...particles[key]), acc), acc
), {} );
console.log(data);
.as-console-wrapper { max-height: 100% !important; top: 0; }
但请注意,在此版本的代码中,push
将使particles[key]
中的每个单独的数组元素作为参数,占用堆栈空间,而堆栈空间又受限制。见"Is there a max number of arguments JavaScript functions can accept?"。您可以通过对push
中的每个元素执行单独的particles[key]
调用来克服此限制,但这会降低性能。所以最后你可以使用concat
版本更好。