mirror of
https://github.com/vale981/grapher
synced 2025-03-04 17:11:38 -05:00
Added subbody capability to namedQuery
This commit is contained in:
parent
e9167757b3
commit
cd611a9166
10 changed files with 92 additions and 10 deletions
|
@ -1,3 +1,8 @@
|
||||||
|
## 1.3
|
||||||
|
- Added link caching
|
||||||
|
- Added named query results caching
|
||||||
|
- Added subbody to NamedQuery
|
||||||
|
|
||||||
## 1.2.5
|
## 1.2.5
|
||||||
- Support for promises via .fetchSync and .fetchOneSync for client-side queries
|
- Support for promises via .fetchSync and .fetchOneSync for client-side queries
|
||||||
- Support for autoremove from inverse side as well
|
- Support for autoremove from inverse side as well
|
||||||
|
|
2
LICENSE
2
LICENSE
|
@ -1,4 +1,4 @@
|
||||||
Copyright (c) 2016 Theodor Diaconu <theodor.diaconu@cultofcoders.com>
|
Copyright (c) 2016-2018 Theodor Diaconu <theodor.diaconu@cultofcoders.com>
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,9 @@ import recursiveCompose from '../../query/lib/recursiveCompose.js';
|
||||||
import prepareForProcess from '../../query/lib/prepareForProcess.js';
|
import prepareForProcess from '../../query/lib/prepareForProcess.js';
|
||||||
import deepClone from 'lodash.cloneDeep';
|
import deepClone from 'lodash.cloneDeep';
|
||||||
import genCountEndpoint from '../../query/counts/genEndpoint.server';
|
import genCountEndpoint from '../../query/counts/genEndpoint.server';
|
||||||
|
import {SimpleSchema} from 'meteor/aldeed:simple-schema';
|
||||||
|
|
||||||
|
const specialParameters = ['$body'];
|
||||||
|
|
||||||
_.extend(NamedQuery.prototype, {
|
_.extend(NamedQuery.prototype, {
|
||||||
expose(config = {}) {
|
expose(config = {}) {
|
||||||
|
@ -114,7 +117,7 @@ _.extend(NamedQuery.prototype, {
|
||||||
}
|
}
|
||||||
|
|
||||||
let params = _.extend({}, self.params, newParams);
|
let params = _.extend({}, self.params, newParams);
|
||||||
let body = prepareForProcess(self.body, params);
|
const body = prepareForProcess(self.body, params);
|
||||||
|
|
||||||
const rootNode = createGraph(self.collection, body);
|
const rootNode = createGraph(self.collection, body);
|
||||||
|
|
||||||
|
@ -123,16 +126,18 @@ _.extend(NamedQuery.prototype, {
|
||||||
},
|
},
|
||||||
|
|
||||||
_validateParams(params) {
|
_validateParams(params) {
|
||||||
if (params && this.exposeConfig.schema) {
|
if (this.exposeConfig.schema) {
|
||||||
|
const paramsToValidate = _.omit(params, ...specialParameters);
|
||||||
|
|
||||||
if (process.env.NODE_ENV !== 'production') {
|
if (process.env.NODE_ENV !== 'production') {
|
||||||
try {
|
try {
|
||||||
this._paramSchema.validate(params);
|
this._paramSchema.validate(paramsToValidate);
|
||||||
} catch (validationError) {
|
} catch (validationError) {
|
||||||
console.error(`Invalid parameters supplied to query ${this.queryName}`, validationError);
|
console.error(`Invalid parameters supplied to query ${this.queryName}`, validationError);
|
||||||
throw validationError; // rethrow
|
throw validationError; // rethrow
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
this._paramSchema.validate(params);
|
this._paramSchema.validate(paramsToValidate);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,8 +3,14 @@ import postListExposure from './queries/postListExposure.js';
|
||||||
|
|
||||||
const postList = createQuery('postList', {
|
const postList = createQuery('postList', {
|
||||||
posts: {
|
posts: {
|
||||||
$filter({filters, params}) {
|
$filter({filters, options, params}) {
|
||||||
filters.title = params.title
|
if (params.title) {
|
||||||
|
filters.title = params.title;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (params.limit) {
|
||||||
|
options.limit = params.limit;
|
||||||
|
}
|
||||||
},
|
},
|
||||||
title: 1,
|
title: 1,
|
||||||
author: {
|
author: {
|
||||||
|
|
|
@ -70,4 +70,31 @@ describe('Named Query', function () {
|
||||||
done();
|
done();
|
||||||
}, 500)
|
}, 500)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('Should allow to securely fetch a subbody of a namedQuery', function () {
|
||||||
|
const query = createQuery({
|
||||||
|
postListExposure: {
|
||||||
|
limit: 5,
|
||||||
|
$body: {
|
||||||
|
title: 1,
|
||||||
|
createdAt: 1, // should fail
|
||||||
|
group: {
|
||||||
|
name: 1,
|
||||||
|
createdAt: 1, // should fail
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const data = query.fetch();
|
||||||
|
|
||||||
|
assert.isTrue(data.length > 1);
|
||||||
|
|
||||||
|
_.each(data, post => {
|
||||||
|
assert.isUndefined(post.createdAt);
|
||||||
|
assert.isUndefined(post.author);
|
||||||
|
assert.isObject(post.group);
|
||||||
|
assert.isUndefined(post.group.createdAt);
|
||||||
|
})
|
||||||
|
})
|
||||||
});
|
});
|
||||||
|
|
|
@ -4,7 +4,13 @@ import ReducerNode from '../nodes/reducerNode.js';
|
||||||
import dotize from './dotize.js';
|
import dotize from './dotize.js';
|
||||||
import createReducers from '../reducers/lib/createReducers';
|
import createReducers from '../reducers/lib/createReducers';
|
||||||
|
|
||||||
const specialFields = ['$filters', '$options', '$postFilters', '$postOptions', '$postFilter']; //keep $postFilter for legacy support
|
const specialFields = [
|
||||||
|
'$filters',
|
||||||
|
'$options',
|
||||||
|
'$postFilters',
|
||||||
|
'$postOptions',
|
||||||
|
'$postProcessing'
|
||||||
|
];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates node objects from the body
|
* Creates node objects from the body
|
||||||
|
|
23
lib/query/lib/intersectDeep.js
Normal file
23
lib/query/lib/intersectDeep.js
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
import dot from 'dot-object';
|
||||||
|
import {_} from 'meteor/underscore';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a named query that has a specific body, you can query its subbody
|
||||||
|
* This performs an intersection of the bodies allowed in each
|
||||||
|
*
|
||||||
|
* @param allowedBody
|
||||||
|
* @param clientBody
|
||||||
|
*/
|
||||||
|
export default function (allowedBody, clientBody) {
|
||||||
|
const allowedBodyDot = _.keys(dot.dot(allowedBody));
|
||||||
|
const clientBodyDot = _.keys(dot.dot(clientBody));
|
||||||
|
|
||||||
|
const intersection = _.intersection(allowedBodyDot, clientBodyDot);
|
||||||
|
|
||||||
|
const build = {};
|
||||||
|
intersection.forEach(intersectedField => {
|
||||||
|
build[intersectedField] = 1;
|
||||||
|
});
|
||||||
|
|
||||||
|
return dot.object(build);
|
||||||
|
}
|
|
@ -37,6 +37,7 @@ export function applyPostFilters(node) {
|
||||||
applyPostFilters(collectionNode);
|
applyPostFilters(collectionNode);
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
export function applyPostOptions(node) {
|
export function applyPostOptions(node) {
|
||||||
const options = node.props.$postOptions;
|
const options = node.props.$postOptions;
|
||||||
if (options) {
|
if (options) {
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import deepClone from 'lodash.cloneDeep';
|
import deepClone from 'lodash.cloneDeep';
|
||||||
|
import intersectDeep from './intersectDeep';
|
||||||
|
|
||||||
function defaultFilterFunction({
|
function defaultFilterFunction({
|
||||||
filters,
|
filters,
|
||||||
|
@ -62,6 +63,10 @@ function applyPagination(body, _params) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export default (_body, _params = {}) => {
|
export default (_body, _params = {}) => {
|
||||||
|
if (_params.$body) {
|
||||||
|
_body = intersectDeep(_body, _params.$body);
|
||||||
|
}
|
||||||
|
|
||||||
let body = deepClone(_body);
|
let body = deepClone(_body);
|
||||||
let params = deepClone(_params);
|
let params = deepClone(_params);
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,10 @@ const COMMENT_TEXT_SAMPLES = [
|
||||||
console.log('[testing] Loading test fixtures ...');
|
console.log('[testing] Loading test fixtures ...');
|
||||||
|
|
||||||
let tags = TAGS.map(name => Tags.insert({name}));
|
let tags = TAGS.map(name => Tags.insert({name}));
|
||||||
let groups = GROUPS.map(name => Groups.insert({name}));
|
let groups = GROUPS.map(name => Groups.insert({
|
||||||
|
name,
|
||||||
|
createdAt: new Date(),
|
||||||
|
}));
|
||||||
let authors = _.range(AUTHORS).map(idx => {
|
let authors = _.range(AUTHORS).map(idx => {
|
||||||
return Authors.insert({
|
return Authors.insert({
|
||||||
name: 'Author - ' + idx,
|
name: 'Author - ' + idx,
|
||||||
|
@ -47,7 +50,8 @@ _.each(authors, (author) => {
|
||||||
|
|
||||||
_.each(_.range(POST_PER_USER), (idx) => {
|
_.each(_.range(POST_PER_USER), (idx) => {
|
||||||
let post = {
|
let post = {
|
||||||
title: `User Post - ${idx}`
|
title: `User Post - ${idx}`,
|
||||||
|
createdAt: new Date(),
|
||||||
};
|
};
|
||||||
|
|
||||||
authorPostLink.add(post);
|
authorPostLink.add(post);
|
||||||
|
|
Loading…
Add table
Reference in a new issue