diff --git a/.gitignore b/.gitignore index b947077..7ca6d1b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,2 @@ node_modules/ -dist/ +# dist/ diff --git a/dist/cache.js b/dist/cache.js new file mode 100644 index 0000000..a739e23 --- /dev/null +++ b/dist/cache.js @@ -0,0 +1,242 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.prepFields = prepFields; +exports.getNestedValue = getNestedValue; +exports.createCachingMethods = exports.stringToId = exports.isValidObjectIdString = exports.idToString = void 0; + +var _dataloader = _interopRequireDefault(require("dataloader")); + +var _mongodb = require("mongodb"); + +var _bson = require("bson"); + +var _helpers = require("./helpers"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const idToString = id => { + if (id instanceof _mongodb.ObjectId) { + return id.toHexString(); + } else { + return id && id.toString ? id.toString() : id; + } +}; // https://www.geeksforgeeks.org/how-to-check-if-a-string-is-valid-mongodb-objectid-in-nodejs/ + + +exports.idToString = idToString; + +const isValidObjectIdString = string => _mongodb.ObjectId.isValid(string) && String(new _mongodb.ObjectId(string)) === string; + +exports.isValidObjectIdString = isValidObjectIdString; + +const stringToId = string => { + if (string instanceof _mongodb.ObjectId) { + return string; + } + + if (isValidObjectIdString(string)) { + return new _mongodb.ObjectId(string); + } + + return string; +}; + +exports.stringToId = stringToId; + +function prepFields(fields) { + const cleanedFields = {}; + Object.keys(fields).sort().forEach(key => { + if (typeof key !== 'undefined') { + cleanedFields[key] = Array.isArray(fields[key]) ? fields[key] : [fields[key]]; + } + }); + return { + loaderKey: _bson.EJSON.stringify(cleanedFields), + cleanedFields + }; +} // getNestedValue({ nested: { foo: 'bar' } }, 'nested.foo') +// => 'bar' + + +function getNestedValue(object, string) { + string = string.replace(/\[(\w+)\]/g, '.$1'); // convert indexes to properties + + string = string.replace(/^\./, ''); // strip a leading dot + + var a = string.split('.'); + + for (var i = 0, n = a.length; i < n; ++i) { + var k = a[i]; + + if (k in object) { + object = object[k]; + } else { + return; + } + } + + return object; +} // https://github.com/graphql/dataloader#batch-function +// "The Array of values must be the same length as the Array of keys." +// "Each index in the Array of values must correspond to the same index in the Array of keys." + + +const orderDocs = (fieldsArray, docs) => fieldsArray.map(fields => docs.filter(doc => { + for (let fieldName of Object.keys(fields)) { + const fieldValue = getNestedValue(fields, fieldName); + if (typeof fieldValue === 'undefined') continue; + const filterValuesArr = Array.isArray(fieldValue) ? fieldValue.map(val => idToString(val)) : [idToString(fieldValue)]; + const docValue = doc[fieldName]; + const docValuesArr = Array.isArray(docValue) ? docValue.map(val => idToString(val)) : [idToString(docValue)]; + let isMatch = false; + + for (const filterVal of filterValuesArr) { + if (docValuesArr.includes(filterVal)) { + isMatch = true; + } + } + + if (!isMatch) return false; + } + + return true; +})); + +const createCachingMethods = ({ + collection, + model, + cache +}) => { + const loader = new _dataloader.default(async ejsonArray => { + const fieldsArray = ejsonArray.map(_bson.EJSON.parse); + (0, _helpers.log)('fieldsArray', fieldsArray); + const filterArray = fieldsArray.reduce((filterArray, fields) => { + const existingFieldsFilter = filterArray.find(filter => [...Object.keys(filter)].sort().join() === [...Object.keys(fields)].sort().join()); + const filter = existingFieldsFilter || {}; + + for (const fieldName in fields) { + if (typeof fields[fieldName] === 'undefined') continue; + if (!filter[fieldName]) filter[fieldName] = { + $in: [] + }; + let newVals = Array.isArray(fields[fieldName]) ? fields[fieldName] : [fields[fieldName]]; + filter[fieldName].$in = [...filter[fieldName].$in, ...newVals.map(stringToId).filter(val => !filter[fieldName].$in.includes(val))]; + } + + if (existingFieldsFilter) return filterArray; + return [...filterArray, filter]; + }, []); + (0, _helpers.log)('filterArray: ', filterArray); + const filter = filterArray.length === 1 ? filterArray[0] : { + $or: filterArray + }; + (0, _helpers.log)('filter: ', filter); + const findPromise = model ? model.find(filter).lean({ + defaults: true + }).exec() : collection.find(filter).toArray(); + const results = await findPromise; + (0, _helpers.log)('results: ', results); + const orderedDocs = orderDocs(fieldsArray, results); + (0, _helpers.log)('orderedDocs: ', orderedDocs); + return orderedDocs; + }); + const cachePrefix = `mongo-${(0, _helpers.getCollection)(collection).collectionName}-`; + const methods = { + findOneById: async (_id, { + ttl + } = {}) => { + const cacheKey = cachePrefix + idToString(_id); + const cacheDoc = await cache.get(cacheKey); + (0, _helpers.log)('findOneById found in cache:', cacheDoc); + + if (cacheDoc) { + return _bson.EJSON.parse(cacheDoc); + } + + (0, _helpers.log)(`Dataloader.load: ${_bson.EJSON.stringify({ + _id + })}`); + const docs = await loader.load(_bson.EJSON.stringify({ + _id + })); + (0, _helpers.log)('Dataloader.load returned: ', docs); + + if (Number.isInteger(ttl)) { + // https://github.com/apollographql/apollo-server/tree/master/packages/apollo-server-caching#apollo-server-caching + cache.set(cacheKey, _bson.EJSON.stringify(docs[0]), { + ttl + }); + } + + return docs[0]; + }, + findManyByIds: (ids, { + ttl + } = {}) => { + return Promise.all(ids.map(id => methods.findOneById(id, { + ttl + }))); + }, + findByFields: async (fields, { + ttl + } = {}) => { + const { + cleanedFields, + loaderKey + } = prepFields(fields); + const cacheKey = cachePrefix + loaderKey; + const cacheDoc = await cache.get(cacheKey); + + if (cacheDoc) { + return _bson.EJSON.parse(cacheDoc); + } + + const fieldNames = Object.keys(cleanedFields); + let docs; + + if (fieldNames.length === 1) { + const field = cleanedFields[fieldNames[0]]; + const fieldArray = Array.isArray(field) ? field : [field]; + const docsArray = await Promise.all(fieldArray.map(value => { + const filter = {}; + filter[fieldNames[0]] = value; + return loader.load(_bson.EJSON.stringify(filter)); + })); + docs = [].concat(...docsArray); + } else { + docs = await loader.load(loaderKey); + } + + if (Number.isInteger(ttl)) { + // https://github.com/apollographql/apollo-server/tree/master/packages/apollo-server-caching#apollo-server-caching + cache.set(cacheKey, _bson.EJSON.stringify(docs), { + ttl + }); + } + + return docs; + }, + deleteFromCacheById: async _id => { + loader.clear(_bson.EJSON.stringify({ + _id + })); + const cacheKey = cachePrefix + idToString(_id); + (0, _helpers.log)('Deleting cache key: ', cacheKey); + await cache.delete(cacheKey); + }, + deleteFromCacheByFields: async fields => { + const { + loaderKey + } = prepFields(fields); + const cacheKey = cachePrefix + loaderKey; + loader.clear(loaderKey); + await cache.delete(cacheKey); + } + }; + return methods; +}; + +exports.createCachingMethods = createCachingMethods; \ No newline at end of file diff --git a/dist/datasource.js b/dist/datasource.js new file mode 100644 index 0000000..c4918a9 --- /dev/null +++ b/dist/datasource.js @@ -0,0 +1,50 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.MongoDataSource = void 0; + +var _apolloDatasource = require("apollo-datasource"); + +var _apolloServerErrors = require("apollo-server-errors"); + +var _apolloServerCaching = require("apollo-server-caching"); + +var _cache = require("./cache"); + +var _helpers = require("./helpers"); + +class MongoDataSource extends _apolloDatasource.DataSource { + constructor(collection) { + super(); + + if (!(0, _helpers.isCollectionOrModel)(collection)) { + throw new _apolloServerErrors.ApolloError('MongoDataSource constructor must be given a collection or Mongoose model'); + } + + if ((0, _helpers.isModel)(collection)) { + this.model = collection; + this.collection = this.model.collection; + } else { + this.collection = collection; + } + } // https://github.com/apollographql/apollo-server/blob/master/packages/apollo-datasource/src/index.ts + + + initialize({ + context, + cache + } = {}) { + this.context = context; + const methods = (0, _cache.createCachingMethods)({ + collection: this.collection, + model: this.model, + cache: cache || new _apolloServerCaching.InMemoryLRUCache() + }); + Object.assign(this, methods); + } + +} + +exports.MongoDataSource = MongoDataSource; \ No newline at end of file diff --git a/dist/helpers.js b/dist/helpers.js new file mode 100644 index 0000000..82fa3fd --- /dev/null +++ b/dist/helpers.js @@ -0,0 +1,28 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.log = exports.getCollection = exports.isCollectionOrModel = exports.isModel = void 0; +const TYPEOF_COLLECTION = 'object'; + +const isModel = x => Boolean(typeof x === 'function' && x.prototype && +/** + * @see https://github.com/Automattic/mongoose/blob/b4e0ae52a57b886bc7046d38332ce3b38a2f9acd/lib/model.js#L116 + */ +x.prototype.$isMongooseModelPrototype); + +exports.isModel = isModel; + +const isCollectionOrModel = x => Boolean(x && (typeof x === TYPEOF_COLLECTION || isModel(x))); + +exports.isCollectionOrModel = isCollectionOrModel; + +const getCollection = x => isModel(x) ? x.collection : x; + +exports.getCollection = getCollection; +const DEBUG = false; + +const log = (...args) => DEBUG && console.log(...args); + +exports.log = log; \ No newline at end of file diff --git a/dist/index.js b/dist/index.js new file mode 100644 index 0000000..1f1f965 --- /dev/null +++ b/dist/index.js @@ -0,0 +1,13 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "MongoDataSource", { + enumerable: true, + get: function () { + return _datasource.MongoDataSource; + } +}); + +var _datasource = require("./datasource"); \ No newline at end of file diff --git a/index.d.ts b/index.d.ts index f06b4dd..6936161 100644 --- a/index.d.ts +++ b/index.d.ts @@ -1,10 +1,12 @@ declare module 'apollo-datasource-mongodb' { import { DataSource } from 'apollo-datasource' - import { Collection as MongoCollection, ObjectId } from 'mongodb' + import { Collection as MongoCollection, ObjectId as MongoDBObjectId } from 'mongodb' import { Collection as MongooseCollection, Document, Model as MongooseModel, + LeanDocument, + ObjectId as MongooseObjectId } from 'mongoose' export type Collection> = T extends Document @@ -19,6 +21,8 @@ declare module 'apollo-datasource-mongodb' { ? U : Collection + type ObjectId = MongoDBObjectId | MongooseObjectId; + export interface Fields { [fieldName: string]: | string @@ -28,11 +32,13 @@ declare module 'apollo-datasource-mongodb' { | (string | number | boolean | ObjectId)[] } + type MongooseDocumentOrMongoCollection = MongoCollection | Document + export interface Options { ttl: number } - export class MongoDataSource extends DataSource< + export abstract class MongoDataSource, TContext = any> extends DataSource< TContext > { protected context: TContext @@ -41,22 +47,22 @@ declare module 'apollo-datasource-mongodb' { constructor(modelOrCollection: ModelOrCollection) - findOneById( + protected findOneById( id: ObjectId | string, options?: Options - ): Promise + ): Promise | null> - findManyByIds( + protected findManyByIds( ids: (ObjectId | string)[], options?: Options - ): Promise<(TData | null | undefined)[]> + ): Promise<(LeanDocument | null)[]> - findByFields( + protected findByFields( fields: Fields, options?: Options - ): Promise<(TData | null | undefined)[]> + ): Promise<(LeanDocument | null)[]> - deleteFromCacheById(id: ObjectId | string): Promise - deleteFromCacheByFields(fields: Fields): Promise + protected deleteFromCacheById(id: ObjectId | string): Promise + protected deleteFromCacheByFields(fields: Fields): Promise } } diff --git a/package-lock.json b/package-lock.json index 698f62a..02f3d0e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14463,6 +14463,15 @@ "strip-ansi": "^5.1.0" } }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.0" + } + }, "strip-ansi": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", diff --git a/src/cache.js b/src/cache.js index d36fdc6..a33ace3 100644 --- a/src/cache.js +++ b/src/cache.js @@ -135,7 +135,10 @@ export const createCachingMethods = ({ collection, model, cache }) => { log('filter: ', filter) const findPromise = model - ? model.find(filter).exec() + ? model + .find(filter) + .lean({ defaults: true }) + .exec() : collection.find(filter).toArray() const results = await findPromise