Handle denormalized fields in recursiveFetch on client

This commit is contained in:
Berislav 2018-06-14 16:19:55 +02:00
parent 5631008ab6
commit 5a927d2714
2 changed files with 17 additions and 1 deletions

16
lib/query/lib/recursiveFetch.js Normal file → Executable file
View file

@ -31,8 +31,22 @@ function fetch(node, parentObject) {
_.each(node.collectionNodes, collectionNode => {
_.each(results, result => {
result[collectionNode.linkName] = fetch(collectionNode, result);
const collectionNodeResults = fetch(collectionNode, result);
result[collectionNode.linkName] = collectionNodeResults;
//delete result[node.linker.linkStorageField];
/**
* Push into the results, because snapBackCaches() in prepareForDelivery does not work otherwise.
* This is non-optimal, can we be sure that every item in results contains _id and add only if not in
* the results?
*
* Other possible ways:
* - do something like assemble() in storeHypernovaResults
* - pass node.results to accessor above and find with sift
*/
const currentIds = _.pluck(collectionNode.results, '_id');
collectionNode.results.push(...collectionNodeResults.filter(res => !_.contains(currentIds, res._id)));
})
});

View file

@ -201,6 +201,7 @@ describe('Client-side reducers', function() {
groupNames: 1,
groups: {
_id: 1,
name: 1,
},
},
});
@ -251,6 +252,7 @@ describe('Client-side reducers', function() {
data.posts.forEach(post => {
assert.isObject(post.authorCached);
assert.isDefined(post.authorCached.name);
// denormalized field should not be present
assert.isUndefined(post.authorCache);