I am trying to convert an array to a new set of arrays by value, in this case, id
.
Input
let array = [
{"item": {"id": 111, "name": "item1"}, "qty": 1},
{"item": {"id": 222, "name": "item2"}, "qty": 2},
{"item": {"id": 222, "name": "item3"}, "qty": 3}
];
Desired Output
let newArray = [
[{"item": {"id": 111, "name": "item1"}, "qty": 1}],
[{"item": {"id": 222, "name": "item2"}, "qty": 2},
{"item": {"id": 222, "name": "item3"}, "qty": 3}]
];
Using a standard groupBy
function, we can return two arrays sorted by id
.
function groupItemBy(array, property) {
var hash = {};
for (var i = 0; i < array.length; i++) {
if (!hash[array[i][property]]) hash[array[i][property]] = [];
hash[array[i][property]].push(array[i]);
}
return hash;
}
However, when trying to map these to new arrays the nested qty
data is lost.
function parse() {
let tmp = Object.values(groupItemBy(array.map(el => el.item), "id"));
tmp.forEach(item => {
console.log(item);
// do something with each item in array
})
}
Actual Output
let newArray = [
[{{"id": 111, "name": "item1"}],
[{"id": 222, "name": "item2"},
{"id": 222, "name": "item3"}]
];
How can the integrity of the associated data be maintained while grouping the original array into an array of sorted arrays?
I am trying to convert an array to a new set of arrays by value, in this case, id
.
Input
let array = [
{"item": {"id": 111, "name": "item1"}, "qty": 1},
{"item": {"id": 222, "name": "item2"}, "qty": 2},
{"item": {"id": 222, "name": "item3"}, "qty": 3}
];
Desired Output
let newArray = [
[{"item": {"id": 111, "name": "item1"}, "qty": 1}],
[{"item": {"id": 222, "name": "item2"}, "qty": 2},
{"item": {"id": 222, "name": "item3"}, "qty": 3}]
];
Using a standard groupBy
function, we can return two arrays sorted by id
.
function groupItemBy(array, property) {
var hash = {};
for (var i = 0; i < array.length; i++) {
if (!hash[array[i][property]]) hash[array[i][property]] = [];
hash[array[i][property]].push(array[i]);
}
return hash;
}
However, when trying to map these to new arrays the nested qty
data is lost.
function parse() {
let tmp = Object.values(groupItemBy(array.map(el => el.item), "id"));
tmp.forEach(item => {
console.log(item);
// do something with each item in array
})
}
Actual Output
let newArray = [
[{{"id": 111, "name": "item1"}],
[{"id": 222, "name": "item2"},
{"id": 222, "name": "item3"}]
];
How can the integrity of the associated data be maintained while grouping the original array into an array of sorted arrays?
Share Improve this question asked Feb 18, 2018 at 19:00 KVNAKVNA 9071 gold badge11 silver badges25 bronze badges 1-
1
You should really try lodash for this kind of stuff:
_.values(_.groupBy(array, 'item.id'))
– georg Commented Feb 18, 2018 at 19:35
3 Answers
Reset to default 11For this to work you will have to somehow tell the function where the key property is located. One can imagine very plex, nested objects, and several might have the same property names, so it even could lead to ambiguity if there is no such specification.
One way to tackle this is to make the function aware of dot-separated properties (in one string) -- a kind of "path". In your case that would be item.id
. With that information the function can know where to look for the id
value (in the nested object item
).
Obviously the function would split that string by those dots. It can then perform a reduce
on the resulting array of property names to locate the key value for each object in the array.
Here is how that could look:
let cart = [{"item": {"id": 111,"name": "item1", }, "qty": 10,}, {"item": {"id": 222,"name": "item2"},"qty": 1}, {"item": {"id": 222,"name": "item3"},"qty": 1,}];
function groupItemBy(array, property) {
var hash = {},
props = property.split('.');
for (var i = 0; i < array.length; i++) {
var key = props.reduce(function(acc, prop) {
return acc && acc[prop];
}, array[i]);
if (!hash[key]) hash[key] = [];
hash[key].push(array[i]);
}
return hash;
}
let grouped = Object.values(groupItemBy(cart, 'item.id'));
console.log(grouped);
.as-console-wrapper { max-height: 100% !important; top: 0; }
You can try to use reduce
to check, if you already seen current key, and if not - create it with empry array as a value, and push there the whole current object. Then you need to get values on result, and you good to go.
Object.values(arr.reduce((result, obj) => {
(result[obj.item.id] || (result[obj.item.id] = [])).push(obj);
return result;
}, {}));
Or, you can use Lodash and groupBy
, then you'll have this:
Object.values(_.groupBy(arr, "item.id")));
Play with this bin to see the result in console - https://codesandbox.io/s/qx14372v74
Summary
const transform = (array) => Object.values(array.reduce(
(all, curr) => {
const key = curr.item.id;
(all[key] || (all[key] = [])).push(curr);
return all;
}, {}
))
Using a Library
Using Ramda (disclaimer: I'm a Ramda author), I would do it like this, without thinking twice:
const transform = pose(values, groupBy(path(['item', 'id'])))
const results = transform(array)
Converting Library Code
Assuming we wouldn't want to include a library for this problem, I would work from that solution step-by-step. We first need to remove the pose
, which is easy with just two functions. We first add an explicit parameter:
const transform = (array) => pose(values, groupBy(path(['item', 'id'])))(array),
and then wrap the two calls in the pose
:
const transform = (array) => Object.values(groupBy(path(['item', 'id']), array))
Then we can easily replace path(['item', 'id'])
with obj => obj.item.id
. Note that we lose a little safety here. We could add an exception if there is no item
property. But doing it safely would add a lot more machinery.
const transform = (array) => Object.values(groupBy(obj => obj.item.id, array))
Then we could write a simple version of groupBy
:
const groupBy = (fn) => (list) => list.reduce(
(all, curr) => {
const key = fn(curr);
(all[key] || (all[key] = [])).push(curr);
return all;
},
{}
)
and replace the Ramda groupBy
with that, folding in our simple path
replacement with the values we prefer:
const transform = (array) => Object.values(array.reduce(
(all, curr) => {
const key = (obj => obj.item.id)(curr);
(all[key] || (all[key] = [])).push(curr);
return all;
}, {}
))
And with a little simplification, we get
const transform = (array) => Object.values(array.reduce(
(all, curr) => {
const key = curr.item.id;
(all[key] || (all[key] = [])).push(curr);
return all;
}, {}
))
const array = [{"item": {"id": 111, "name": "item1"}, "qty": 1}, {"item": {"id": 222, "name": "item2"}, "qty": 2}, {"item": {"id": 222, "name": "item3"}, "qty": 3}];
console.log(transform(array));
Building our own Mini-Library
Alternatively, we could actually create equivalents of those Ramda function, as they could be useful in all sorts of ways:
const groupBy = (fn) => (list) => list.reduce(
(all, curr) => {
const key = fn(curr);
(all[key] || (all[key] = [])).push(curr);
return all;
},
{}
)
const values = obj => Object.values(obj);
const pipe = (f1, ...fns) => (...args) => {
return fns.reduce((res, fn) => fn(res), f1.apply(null, args));
};
const path = (nodes) => (obj) => nodes.reduce((o, node) => o[node], obj)
const transform = pipe(groupBy(path(['item', 'id'])), values)
const array = [{"item": {"id": 111, "name": "item1"}, "qty": 1}, {"item": {"id": 222, "name": "item2"}, "qty": 2}, {"item": {"id": 222, "name": "item3"}, "qty": 3}]
console.log(transform(array))
(Note that I switched from pose
to pipe
, simply because it is easier to implement quickly. They have the same behavior but take their lists of functions in opposite order.)