mirror of
https://github.com/vale981/grapher
synced 2025-03-05 09:31:42 -05:00
Fixed #181 - deep caching
This commit is contained in:
parent
64fc26de69
commit
200d2fb2cc
10 changed files with 712 additions and 4 deletions
|
@ -1,3 +1,16 @@
|
|||
export const CacheSchema = new SimpleSchema({
|
||||
field: {type: String},
|
||||
body: {
|
||||
type: Object,
|
||||
blackbox: true,
|
||||
},
|
||||
bypassSchema: {
|
||||
type: Boolean,
|
||||
defaultValue: false,
|
||||
optional: true,
|
||||
}
|
||||
});
|
||||
|
||||
export default new SimpleSchema({
|
||||
type: {
|
||||
type: String,
|
||||
|
@ -39,6 +52,10 @@ export default new SimpleSchema({
|
|||
type: Boolean,
|
||||
defaultValue: false,
|
||||
optional: true
|
||||
},
|
||||
cache: {
|
||||
type: CacheSchema,
|
||||
optional: true,
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -3,8 +3,10 @@ import LinkManyMeta from './linkTypes/linkManyMeta.js';
|
|||
import LinkOne from './linkTypes/linkOne.js';
|
||||
import LinkOneMeta from './linkTypes/linkOneMeta.js';
|
||||
import LinkResolve from './linkTypes/linkResolve.js';
|
||||
import ConfigSchema from './config.schema.js';
|
||||
import ConfigSchema, {CacheSchema} from './config.schema.js';
|
||||
import smartArguments from './linkTypes/lib/smartArguments';
|
||||
import dot from 'dot-object';
|
||||
import {_} from 'meteor/underscore';
|
||||
|
||||
export default class Linker {
|
||||
/**
|
||||
|
@ -22,9 +24,8 @@ export default class Linker {
|
|||
this._extendSchema();
|
||||
|
||||
// initialize cascade removal hooks.
|
||||
if (linkConfig.autoremove) {
|
||||
this._initAutoremove();
|
||||
}
|
||||
this._initAutoremove();
|
||||
this._initCache();
|
||||
|
||||
if (this.isVirtual()) {
|
||||
// if it's a virtual field make sure that when this is deleted, it will be removed from the references
|
||||
|
@ -355,6 +356,10 @@ export default class Linker {
|
|||
}
|
||||
|
||||
_initAutoremove() {
|
||||
if (!this.linkConfig.autoremove) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.isVirtual()) {
|
||||
this.mainCollection.after.remove((userId, doc) => {
|
||||
this.getLinkedCollection().remove({
|
||||
|
@ -374,4 +379,77 @@ export default class Linker {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
_initCache() {
|
||||
if (!this.linkConfig.cache || !Meteor.isServer) {
|
||||
return;
|
||||
}
|
||||
|
||||
CacheSchema.validate(this.linkConfig.cache);
|
||||
|
||||
const packageExists = !!Package['herteby:denormalize'];
|
||||
if (!packageExists) {
|
||||
throw new Meteor.Error('missing-package', `Please add the herteby:denormalize package to your Meteor application in order to make caching work`)
|
||||
}
|
||||
|
||||
const {field, body, bypassSchema} = this.linkConfig.cache;
|
||||
let cacheConfig;
|
||||
|
||||
let referenceFieldSuffix = '';
|
||||
if (this.isMeta()) {
|
||||
referenceFieldSuffix = (this.isSingle() ? '._id' : ':_id');
|
||||
}
|
||||
|
||||
if (this.isVirtual()) {
|
||||
let inversedLink = this.linkConfig.relatedLinker.linkConfig;
|
||||
|
||||
let type = inversedLink.type == 'many' ? 'many-inverse' : 'inversed';
|
||||
|
||||
cacheConfig = {
|
||||
type: type,
|
||||
collection: this.linkConfig.collection,
|
||||
fields: body,
|
||||
referenceField: inversedLink.field + referenceFieldSuffix,
|
||||
cacheField: field,
|
||||
bypassSchema: !!bypassSchema
|
||||
};
|
||||
} else {
|
||||
cacheConfig = {
|
||||
type: this.linkConfig.type,
|
||||
collection: this.linkConfig.collection,
|
||||
fields: body,
|
||||
referenceField: this.linkConfig.field + referenceFieldSuffix,
|
||||
cacheField: field,
|
||||
bypassSchema: !!bypassSchema
|
||||
};
|
||||
}
|
||||
|
||||
this.mainCollection.cache(cacheConfig);
|
||||
}
|
||||
|
||||
/**
|
||||
* Verifies if this linker is cached. It can be cached from the inverse side as well.
|
||||
*
|
||||
* @returns {boolean}
|
||||
* @private
|
||||
*/
|
||||
isCached() {
|
||||
return !!this.linkConfig.cache;
|
||||
}
|
||||
|
||||
/**
|
||||
* Verifies if the body of the linked element does not contain fields outside the cache body
|
||||
*
|
||||
* @param body
|
||||
* @returns {boolean}
|
||||
* @private
|
||||
*/
|
||||
isSubBodyCache(body) {
|
||||
const cacheBody = this.linkConfig.cache.body;
|
||||
|
||||
const cacheBodyFields = _.keys(dot.dot(cacheBody));
|
||||
const bodyFields = _.keys(dot.dot(body));
|
||||
|
||||
return _.difference(bodyFields, cacheBodyFields).length === 0;
|
||||
}
|
||||
}
|
|
@ -40,6 +40,20 @@ export function createNodes(root) {
|
|||
let linker = root.collection.getLinker(fieldName);
|
||||
|
||||
if (linker) {
|
||||
// check if it is a cached link
|
||||
// if yes, then we need to explicitly define this at collection level
|
||||
// so when we transform the data for delivery, we move it to the link name
|
||||
if (linker.isCached()) {
|
||||
if (linker.isSubBodyCache(body)) {
|
||||
const cacheField = linker.linkConfig.cache.field;
|
||||
|
||||
root.snapCache(cacheField, fieldName);
|
||||
addFieldNode(body, cacheField, root);
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let subroot = new CollectionNode(linker.getLinkedCollection(), body, fieldName);
|
||||
root.add(subroot, linker);
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ import sift from 'sift';
|
|||
import {Minimongo} from 'meteor/minimongo';
|
||||
|
||||
export default (node) => {
|
||||
snapBackCaches(node);
|
||||
applyReducers(node);
|
||||
cleanReducerLeftovers(node);
|
||||
applyPostFilters(node);
|
||||
|
@ -169,6 +170,30 @@ function storeMetadata(element, parentElement, storage, isVirtual) {
|
|||
}
|
||||
}
|
||||
|
||||
function snapBackCaches(node) {
|
||||
node.collectionNodes.forEach(collectionNode => {
|
||||
snapBackCaches(collectionNode);
|
||||
});
|
||||
|
||||
if (!_.isEmpty(node.snapCaches)) {
|
||||
// process stuff
|
||||
_.each(node.snapCaches, (linkName, cacheField) => {
|
||||
const isSingle = _.contains(node.snapCachesSingles, cacheField);
|
||||
node.results.forEach(result => {
|
||||
if (result[cacheField]) {
|
||||
if (isSingle && _.isArray(result[cacheField])) {
|
||||
result[linkName] = _.first(result[cacheField]);
|
||||
} else {
|
||||
result[linkName] = result[cacheField];
|
||||
}
|
||||
|
||||
delete result[cacheField];
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// /**
|
||||
// * @param elements
|
||||
// * @param storage
|
||||
|
|
|
@ -20,6 +20,8 @@ export default class CollectionNode {
|
|||
this.scheduledForDeletion = false;
|
||||
this.reducers = [];
|
||||
this.results = [];
|
||||
this.snapCaches = {}; // {cacheField: linkName}
|
||||
this.snapCachesSingles = []; // [cacheField1, cacheField2]
|
||||
}
|
||||
|
||||
get collectionNodes() {
|
||||
|
@ -168,6 +170,20 @@ export default class CollectionNode {
|
|||
: (this.collection ? this.collection._name : 'N/A');
|
||||
}
|
||||
|
||||
/**
|
||||
* This is used for caching links
|
||||
*
|
||||
* @param cacheField
|
||||
* @param subLinkName
|
||||
*/
|
||||
snapCache(cacheField, subLinkName) {
|
||||
this.snapCaches[cacheField] = subLinkName;
|
||||
|
||||
if (this.collection.getLinker(subLinkName).isOneResult()) {
|
||||
this.snapCachesSingles.push(cacheField);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This method verifies whether to remove the linkStorageField form the results
|
||||
* unless you specify it in your query.
|
||||
|
|
117
lib/query/testing/link-cache/collections.js
Normal file
117
lib/query/testing/link-cache/collections.js
Normal file
|
@ -0,0 +1,117 @@
|
|||
import {Mongo} from 'meteor/mongo';
|
||||
|
||||
export const Authors = new Mongo.Collection('cache_authors');
|
||||
export const AuthorProfiles = new Mongo.Collection('cache_author_profiles');
|
||||
export const Posts = new Mongo.Collection('cache_posts');
|
||||
export const Groups = new Mongo.Collection('cache_groups');
|
||||
export const Categories = new Mongo.Collection('cache_categories');
|
||||
|
||||
Authors.remove({});
|
||||
AuthorProfiles.remove({});
|
||||
Posts.remove({});
|
||||
Groups.remove({});
|
||||
Categories.remove({});
|
||||
|
||||
Posts.addLinks({
|
||||
author: {
|
||||
type: 'one',
|
||||
collection: Authors,
|
||||
field: 'authorId',
|
||||
cache: {
|
||||
field: 'authorCache',
|
||||
body: {
|
||||
name: 1,
|
||||
address: 1,
|
||||
}
|
||||
}
|
||||
},
|
||||
categories: {
|
||||
type: 'many',
|
||||
metadata: true,
|
||||
collection: Categories,
|
||||
field: 'categoryIds',
|
||||
cache: {
|
||||
field: 'categoriesCache',
|
||||
body: {
|
||||
name: 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Authors.addLinks({
|
||||
posts: {
|
||||
collection: Posts,
|
||||
inversedBy: 'author',
|
||||
cache: {
|
||||
field: 'postCache',
|
||||
body: {
|
||||
title: 1,
|
||||
}
|
||||
}
|
||||
},
|
||||
groups: {
|
||||
type: 'many',
|
||||
collection: Groups,
|
||||
field: 'groupIds',
|
||||
cache: {
|
||||
field: 'groupsCache',
|
||||
body: {
|
||||
name: 1,
|
||||
}
|
||||
}
|
||||
},
|
||||
profile: {
|
||||
type: 'one',
|
||||
metadata: true,
|
||||
collection: AuthorProfiles,
|
||||
field: 'profileId',
|
||||
unique: true,
|
||||
cache: {
|
||||
field: 'profileCache',
|
||||
body: {
|
||||
name: 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
AuthorProfiles.addLinks({
|
||||
author: {
|
||||
collection: Authors,
|
||||
inversedBy: 'profile',
|
||||
unique: true,
|
||||
cache: {
|
||||
field: 'authorCache',
|
||||
body: {
|
||||
name: 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Groups.addLinks({
|
||||
authors: {
|
||||
collection: Authors,
|
||||
inversedBy: 'groups',
|
||||
cache: {
|
||||
field: 'authorsCache',
|
||||
body: {
|
||||
name: 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Categories.addLinks({
|
||||
posts: {
|
||||
collection: Posts,
|
||||
inversedBy: 'categories',
|
||||
cache: {
|
||||
field: 'postsCache',
|
||||
body: {
|
||||
title: 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
75
lib/query/testing/link-cache/fixtures.js
Normal file
75
lib/query/testing/link-cache/fixtures.js
Normal file
|
@ -0,0 +1,75 @@
|
|||
import {Authors, Groups, Posts, Categories, AuthorProfiles} from './collections';
|
||||
|
||||
const GROUPS = 3;
|
||||
const CATEGORIES = 3;
|
||||
const AUTHOR_PER_GROUPS = 3;
|
||||
const POSTS_PER_AUTHOR = 3;
|
||||
|
||||
export let categoryIds = [];
|
||||
export let groupIds = [];
|
||||
export let authorIds = [];
|
||||
export let postIds = [];
|
||||
|
||||
for (let i = 0; i < CATEGORIES; i++) {
|
||||
const categoryId = Categories.insert({
|
||||
name: `Category ${i}`
|
||||
});
|
||||
|
||||
categoryIds.push(categoryId);
|
||||
}
|
||||
|
||||
|
||||
for (let i = 0; i < GROUPS; i++) {
|
||||
const groupId = Groups.insert({
|
||||
name: `Group ${i}`
|
||||
});
|
||||
|
||||
groupIds.push(groupId);
|
||||
}
|
||||
|
||||
groupIds.forEach(groupId => {
|
||||
for (let i = 0; i < AUTHOR_PER_GROUPS; i++) {
|
||||
const authorId = Authors.insert({
|
||||
name: `Author ${authorIds.length}`,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
|
||||
const authorProfileId = AuthorProfiles.insert({
|
||||
name: `Author ${authorIds.length}`,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
|
||||
Authors.getLink(authorId, 'profile').set(authorProfileId);
|
||||
|
||||
authorIds.push(authorId);
|
||||
|
||||
// link it to the group
|
||||
const groupLink = Authors.getLink(authorId, 'groups');
|
||||
groupLink.add(groupId);
|
||||
|
||||
for (let j = 0; j < POSTS_PER_AUTHOR; j++) {
|
||||
createPost(authorId);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
function createPost(authorId) {
|
||||
const postId = Posts.insert({
|
||||
title: `Post ${postIds.length}`,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
|
||||
postIds.push(postId);
|
||||
|
||||
const authorLink = Posts.getLink(postId, 'author');
|
||||
authorLink.set(authorId);
|
||||
|
||||
const randomCategoryId = categoryIds[Math.floor(Math.random()*categoryIds.length)];
|
||||
|
||||
const categoriesLink = Posts.getLink(postId, 'categories');
|
||||
categoriesLink.add(randomCategoryId, {
|
||||
createdAt: new Date(),
|
||||
});
|
||||
|
||||
return postId;
|
||||
}
|
363
lib/query/testing/link-cache/server.test.js
Normal file
363
lib/query/testing/link-cache/server.test.js
Normal file
|
@ -0,0 +1,363 @@
|
|||
import './fixtures';
|
||||
import {createQuery} from 'meteor/cultofcoders:grapher';
|
||||
import {Authors, AuthorProfiles, Groups, Posts, Categories} from './collections';
|
||||
|
||||
describe('Query Link Cache', function () {
|
||||
it('Should work properly - One Direct', function () {
|
||||
let query = Posts.createQuery({
|
||||
$options: {limit: 5},
|
||||
author: {
|
||||
name: 1,
|
||||
}
|
||||
});
|
||||
|
||||
let insideFind = false;
|
||||
stubFind(Authors, function () {
|
||||
insideFind = true;
|
||||
});
|
||||
|
||||
// when fetching, Authors.find() should not be called
|
||||
let post = query.fetchOne();
|
||||
|
||||
assert.isFalse(insideFind);
|
||||
assert.isObject(post.author);
|
||||
|
||||
unstubFind(Authors);
|
||||
|
||||
// now that we specify an additional field, it should bypass the cache
|
||||
query = Posts.createQuery({
|
||||
author: {
|
||||
name: 1,
|
||||
createdAt: 1,
|
||||
}
|
||||
});
|
||||
|
||||
insideFind = false;
|
||||
stubFind(Authors, function () {
|
||||
insideFind = true;
|
||||
});
|
||||
|
||||
query.fetch();
|
||||
assert.isTrue(insideFind);
|
||||
|
||||
unstubFind(Authors);
|
||||
});
|
||||
|
||||
it('Should work properly - One Inversed', function () {
|
||||
let query = Authors.createQuery({
|
||||
$options: {limit: 2},
|
||||
posts: {
|
||||
title: 1,
|
||||
}
|
||||
});
|
||||
|
||||
let insideFind = false;
|
||||
stubFind(Posts, function () {
|
||||
insideFind = true;
|
||||
});
|
||||
|
||||
let author = query.fetchOne();
|
||||
|
||||
assert.isFalse(insideFind);
|
||||
assert.isArray(author.posts);
|
||||
assert.isObject(author.posts[0]);
|
||||
assert.isString(author.posts[0].title);
|
||||
|
||||
unstubFind(Posts);
|
||||
|
||||
// now that we specify an additional field, it should bypass the cache
|
||||
query = Authors.createQuery({
|
||||
$options: {limit: 2},
|
||||
posts: {
|
||||
title: 1,
|
||||
createdAt: 1,
|
||||
}
|
||||
});
|
||||
|
||||
insideFind = false;
|
||||
stubFind(Posts, function () {
|
||||
insideFind = true;
|
||||
});
|
||||
|
||||
query.fetch();
|
||||
assert.isTrue(insideFind);
|
||||
|
||||
unstubFind(Posts);
|
||||
});
|
||||
|
||||
it('Should work properly - One Meta Direct', function () {
|
||||
// console.log(Authors.find().fetch());
|
||||
|
||||
let query = Authors.createQuery({
|
||||
$options: {limit: 5},
|
||||
profile: {
|
||||
name: 1,
|
||||
}
|
||||
});
|
||||
|
||||
let insideFind = false;
|
||||
stubFind(AuthorProfiles, function () {
|
||||
insideFind = true;
|
||||
});
|
||||
|
||||
let author = query.fetchOne();
|
||||
|
||||
assert.isFalse(insideFind);
|
||||
assert.isObject(author.profile);
|
||||
|
||||
unstubFind(AuthorProfiles);
|
||||
|
||||
// now that we specify an additional field, it should bypass the cache
|
||||
query = Authors.createQuery({
|
||||
$options: {limit: 5},
|
||||
profile: {
|
||||
name: 1,
|
||||
createdAt: 1,
|
||||
}
|
||||
});
|
||||
|
||||
insideFind = false;
|
||||
stubFind(AuthorProfiles, function () {
|
||||
insideFind = true;
|
||||
});
|
||||
|
||||
query.fetch();
|
||||
assert.isTrue(insideFind);
|
||||
|
||||
unstubFind(AuthorProfiles);
|
||||
});
|
||||
|
||||
|
||||
it('Should work properly - One Meta Inversed', function () {
|
||||
let query = AuthorProfiles.createQuery({
|
||||
$options: {limit: 5},
|
||||
author: {
|
||||
name: 1,
|
||||
}
|
||||
});
|
||||
|
||||
let insideFind = false;
|
||||
stubFind(Authors, function () {
|
||||
insideFind = true;
|
||||
});
|
||||
|
||||
let profile = query.fetchOne();
|
||||
|
||||
assert.isFalse(insideFind);
|
||||
assert.isObject(profile.author);
|
||||
|
||||
unstubFind(Authors);
|
||||
|
||||
// now that we specify an additional field, it should bypass the cache
|
||||
query = AuthorProfiles.createQuery({
|
||||
$options: {limit: 5},
|
||||
author: {
|
||||
name: 1,
|
||||
createdAt: 1,
|
||||
}
|
||||
});
|
||||
|
||||
insideFind = false;
|
||||
stubFind(Authors, function () {
|
||||
insideFind = true;
|
||||
});
|
||||
|
||||
query.fetch();
|
||||
assert.isTrue(insideFind);
|
||||
|
||||
unstubFind(Authors);
|
||||
});
|
||||
|
||||
it('Should work properly - Many Direct', function () {
|
||||
let query = Authors.createQuery({
|
||||
$options: {limit: 5},
|
||||
groups: {
|
||||
name: 1,
|
||||
}
|
||||
});
|
||||
|
||||
let insideFind = false;
|
||||
stubFind(Groups, function () {
|
||||
insideFind = true;
|
||||
});
|
||||
|
||||
let author = query.fetchOne();
|
||||
|
||||
assert.isFalse(insideFind);
|
||||
assert.isArray(author.groups);
|
||||
assert.isObject(author.groups[0]);
|
||||
assert.isString(author.groups[0].name);
|
||||
|
||||
unstubFind(Groups);
|
||||
|
||||
query = Authors.createQuery({
|
||||
$options: {limit: 5},
|
||||
groups: {
|
||||
name: 1,
|
||||
createdAt: 1,
|
||||
}
|
||||
});
|
||||
|
||||
insideFind = false;
|
||||
stubFind(Groups, function () {
|
||||
insideFind = true;
|
||||
});
|
||||
|
||||
query.fetch();
|
||||
assert.isTrue(insideFind);
|
||||
|
||||
unstubFind(Groups);
|
||||
});
|
||||
|
||||
it('Should work properly - Many Inversed', function () {
|
||||
let query = Groups.createQuery({
|
||||
$options: {limit: 5},
|
||||
authors: {
|
||||
name: 1,
|
||||
}
|
||||
});
|
||||
|
||||
let insideFind = false;
|
||||
stubFind(Authors, function () {
|
||||
insideFind = true;
|
||||
});
|
||||
|
||||
let group = query.fetchOne();
|
||||
|
||||
assert.isFalse(insideFind);
|
||||
assert.isArray(group.authors);
|
||||
assert.isObject(group.authors[0]);
|
||||
assert.isString(group.authors[0].name);
|
||||
|
||||
unstubFind(Authors);
|
||||
|
||||
query = Groups.createQuery({
|
||||
$options: {limit: 5},
|
||||
authors: {
|
||||
name: 1,
|
||||
createdAt: 1,
|
||||
}
|
||||
});
|
||||
|
||||
insideFind = false;
|
||||
stubFind(Authors, function () {
|
||||
insideFind = true;
|
||||
});
|
||||
|
||||
query.fetch();
|
||||
assert.isTrue(insideFind);
|
||||
|
||||
unstubFind(Authors);
|
||||
});
|
||||
|
||||
it('Should work properly - Many Meta Direct', function () {
|
||||
// console.log(Posts.find({}, {limit: 2}).fetch());
|
||||
|
||||
let query = Posts.createQuery({
|
||||
$options: {limit: 5},
|
||||
categories: {
|
||||
name: 1,
|
||||
}
|
||||
});
|
||||
|
||||
let insideFind = false;
|
||||
stubFind(Categories, function () {
|
||||
insideFind = true;
|
||||
});
|
||||
|
||||
// when fetching, Authors.find() should not be called
|
||||
let post = query.fetchOne();
|
||||
|
||||
assert.isFalse(insideFind);
|
||||
assert.isArray(post.categories);
|
||||
assert.isObject(post.categories[0]);
|
||||
assert.isString(post.categories[0].name);
|
||||
|
||||
unstubFind(Categories);
|
||||
|
||||
// now that we specify an additional field, it should bypass the cache
|
||||
query = Posts.createQuery({
|
||||
categories: {
|
||||
name: 1,
|
||||
createdAt: 1,
|
||||
}
|
||||
});
|
||||
|
||||
insideFind = false;
|
||||
stubFind(Categories, function () {
|
||||
insideFind = true;
|
||||
});
|
||||
|
||||
query.fetch();
|
||||
assert.isTrue(insideFind);
|
||||
|
||||
unstubFind(Categories);
|
||||
});
|
||||
|
||||
it('Should work properly - Many Meta Inversed', function () {
|
||||
let query = Categories.createQuery({
|
||||
$options: {limit: 2},
|
||||
posts: {
|
||||
title: 1,
|
||||
}
|
||||
});
|
||||
|
||||
let insideFind = false;
|
||||
stubFind(Posts, function () {
|
||||
insideFind = true;
|
||||
});
|
||||
|
||||
let category = query.fetchOne();
|
||||
|
||||
assert.isFalse(insideFind);
|
||||
assert.isArray(category.posts);
|
||||
assert.isObject(category.posts[0]);
|
||||
assert.isString(category.posts[0].title);
|
||||
|
||||
unstubFind(Posts);
|
||||
|
||||
// now that we specify an additional field, it should bypass the cache
|
||||
query = Categories.createQuery({
|
||||
$options: {limit: 2},
|
||||
posts: {
|
||||
title: 1,
|
||||
createdAt: 1,
|
||||
}
|
||||
});
|
||||
|
||||
insideFind = false;
|
||||
stubFind(Posts, function () {
|
||||
insideFind = true;
|
||||
});
|
||||
|
||||
query.fetch();
|
||||
assert.isTrue(insideFind);
|
||||
|
||||
unstubFind(Posts);
|
||||
});
|
||||
});
|
||||
|
||||
function stubFind(collection, callback) {
|
||||
if (!collection.oldFind) {
|
||||
collection.oldFind = collection.find.bind(collection);
|
||||
collection.oldAggregate = collection.aggregate.bind(collection);
|
||||
}
|
||||
|
||||
collection.find = function () {
|
||||
callback();
|
||||
return this.oldFind.apply(collection, arguments);
|
||||
}.bind(collection);
|
||||
|
||||
collection.aggregate = function () {
|
||||
callback();
|
||||
return this.oldAggregate.apply(collection, arguments);
|
||||
}.bind(collection);
|
||||
}
|
||||
|
||||
function unstubFind(collection) {
|
||||
collection.find = collection.oldFind.bind(collection);
|
||||
collection.aggregate = collection.oldAggregate.bind(collection);
|
||||
|
||||
delete collection.oldFind;
|
||||
delete collection.oldAggregate;
|
||||
}
|
|
@ -2,6 +2,7 @@ import { createQuery } from 'meteor/cultofcoders:grapher';
|
|||
import Comments from './bootstrap/comments/collection.js';
|
||||
import './metaFilters.server.test';
|
||||
import './reducers.server.test';
|
||||
import './link-cache/server.test';
|
||||
|
||||
describe('Hypernova', function () {
|
||||
it('Should fetch One links correctly', function () {
|
||||
|
|
|
@ -30,6 +30,7 @@ Package.onUse(function (api) {
|
|||
'reywood:publish-composite@1.4.2',
|
||||
'dburles:mongo-collection-instances@0.3.5',
|
||||
'tmeasday:check-npm-versions@0.3.1',
|
||||
'herteby:denormalize@0.6.5',
|
||||
'meteorhacks:aggregate@1.3.0',
|
||||
];
|
||||
|
||||
|
@ -50,6 +51,7 @@ Package.onTest(function (api) {
|
|||
'dburles:mongo-collection-instances@0.3.5',
|
||||
'tmeasday:check-npm-versions@0.3.1',
|
||||
'meteorhacks:aggregate@1.3.0',
|
||||
'herteby:denormalize@0.6.5',
|
||||
'mongo'
|
||||
];
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue