From 2ef84c29dd25e26118425b2e5ffbae3852fcaf53 Mon Sep 17 00:00:00 2001 From: matheusccastro Date: Sat, 4 May 2024 01:49:10 -0300 Subject: [PATCH 01/33] feat: making `redis-oplog` compatible with meteor 3.0. --- .npm/package/npm-shrinkwrap.json | 2 +- .versions | 1 - lib/cache/ObservableCollection.js | 77 ++++++++---------- lib/cache/mongoIdMap.js | 17 ++-- lib/init.js | 10 +-- lib/mongo/Mutator.js | 48 +++++------ lib/mongo/ObserveMultiplex.js | 89 +++++++++++---------- lib/mongo/PollingObserveDriver.js | 50 ++++++------ lib/mongo/RedisOplogObserveDriver.js | 15 ++-- lib/mongo/SyntheticMutator.js | 20 ++--- lib/mongo/allow-deny/docToValidate.js | 1 + lib/mongo/allow-deny/validatedInsert.js | 4 +- lib/mongo/allow-deny/validatedRemove.js | 6 +- lib/mongo/allow-deny/validatedUpdate.js | 6 +- lib/mongo/extendMongoCollection.js | 33 ++++++-- lib/mongo/extendObserveChanges.js | 4 - lib/mongo/lib/dispatchers.js | 42 +++++----- lib/mongo/lib/getMutationConfig.js | 6 +- lib/mongo/observeChanges.js | 10 ++- lib/processors/actions/reload.js | 18 ++--- lib/processors/actions/requery.js | 21 ++--- lib/processors/default.js | 24 +++--- lib/processors/direct.js | 28 +++---- lib/processors/limit-sort.js | 26 +++--- lib/processors/synthetic.js | 22 ++--- lib/redis/PubSubManager.js | 10 +-- lib/redis/RedisSubscriber.js | 8 +- lib/redis/RedisSubscriptionManager.js | 27 +++---- lib/redis/getRedisClient.js | 5 +- lib/utils/maybeWrapCallback.js | 14 ---- lib/utils/shouldPublicationBeWithPolling.js | 37 --------- lib/vent/Vent.js | 8 +- lib/vent/VentClient.js | 1 + package.js | 2 +- redis-oplog.js | 2 +- testing/polling/collections.js | 2 +- testing/publish-counts/collections.js | 3 - testing/publish-counts/server.js | 24 ------ testing/synthetic_mutators.js | 6 +- 39 files changed, 335 insertions(+), 394 deletions(-) delete mode 100644 lib/utils/maybeWrapCallback.js delete mode 100644 lib/utils/shouldPublicationBeWithPolling.js delete mode 100644 testing/publish-counts/collections.js delete mode 100644 testing/publish-counts/server.js diff --git a/.npm/package/npm-shrinkwrap.json b/.npm/package/npm-shrinkwrap.json index c857ab9b..a4ff58d2 100644 --- a/.npm/package/npm-shrinkwrap.json +++ b/.npm/package/npm-shrinkwrap.json @@ -1,5 +1,5 @@ { - "lockfileVersion": 1, + "lockfileVersion": 4, "dependencies": { "deep-extend": { "version": "0.6.0", diff --git a/.versions b/.versions index 03eb65d5..dfe4c52e 100644 --- a/.versions +++ b/.versions @@ -43,7 +43,6 @@ mongo@1.16.7 mongo-decimal@0.1.3 mongo-dev-server@1.1.0 mongo-id@1.0.8 -natestrauser:publish-performant-counts@0.1.2 npm-mongo@4.16.0 ordered-dict@1.1.0 practicalmeteor:mocha-core@1.0.1 diff --git a/lib/cache/ObservableCollection.js b/lib/cache/ObservableCollection.js index 127171df..6b2fbe12 100644 --- a/lib/cache/ObservableCollection.js +++ b/lib/cache/ObservableCollection.js @@ -21,8 +21,6 @@ const allowedOptions = [ 'namespaces', ]; -const { Matcher } = Minimongo; - export default class ObservableCollection { /** * Instantiate the collection @@ -38,28 +36,6 @@ export default class ObservableCollection { cursorDescription.collectionName ); - if (!this.collection) { - throw new Meteor.Error( - `We could not find the collection instance by name: "${ - this.collectionName - }", the cursor description was: ${JSON.stringify( - cursorDescription - )}` - ); - } - - // Here we apply the logic of changing the cursor based on the collection-level configuration - if (this.collection._redisOplog) { - const { cursor } = this.collection._redisOplog; - if (cursor) { - const context = DDP._CurrentPublicationInvocation.get(); - cursor.call( - context, - cursorDescription.options, - cursorDescription.selector - ); - } - } if (!this.collection) { throw new Meteor.Error( @@ -135,6 +111,21 @@ export default class ObservableCollection { ); } + async setupCollection() { + // Here we apply the logic of changing the cursor based on the collection-level configuration + if (this.collection._redisOplog) { + const { cursor } = this.collection._redisOplog; + if (cursor) { + const context = DDP._CurrentPublicationInvocation.get(); + await cursor.call( + context, + cursorDescription.options, + cursorDescription.selector + ); + } + } + } + /** * Function that checks whether or not the doc matches our filters * @@ -153,12 +144,12 @@ export default class ObservableCollection { * @param _id * @returns {boolean} */ - isEligibleByDB(_id) { + async isEligibleByDB(_id) { if (this.matcher) { - return !!this.collection.findOne( + return !!(await this.collection.findOneAsync( Object.assign({}, this.selector, { _id }), { fields: { _id: 1 } } - ); + )); } return true; @@ -167,17 +158,17 @@ export default class ObservableCollection { /** * Performs the initial search then puts them into the store. */ - init() { + async init() { if (this.__isInitialized) { return; // silently do nothing. } this.__isInitialized = true; - let data = this.cursor.fetch(); + let data = await this.cursor.fetchAsync(); - data.forEach(doc => { - this.add(doc, true); - }); + for (const doc of data) { + await this.add(doc, true); + } // This has too much control over multiplexer.. this.multiplexer.ready(); @@ -195,7 +186,7 @@ export default class ObservableCollection { * @param doc {Object} * @param safe {Boolean} If this is set to true, it assumes that the object is cleaned */ - add(doc, safe = false) { + async add(doc, safe = false) { doc = EJSON.clone(doc); if (!safe) { @@ -205,21 +196,21 @@ export default class ObservableCollection { } this.store.set(doc._id, doc); - this.multiplexer.added(doc._id, doc); + await this.multiplexer.added(doc._id, doc); } /** * We use this method when we receive updates for a document that is not yet in the observable collection store * @param docId */ - addById(docId) { + async addById(docId) { const { limit, skip, ...cleanedOptions } = this.options; - const doc = this.collection.findOne({ _id: docId }, cleanedOptions); + const doc = await this.collection.findOneAsync({ _id: docId }, cleanedOptions); this.store.set(docId, doc); if (doc) { - this.multiplexer.added(doc._id, doc); + await this.multiplexer.added(doc._id, doc); } } @@ -229,7 +220,7 @@ export default class ObservableCollection { * @param {object} doc * @param {array} modifiedFields */ - change(doc, modifiedFields) { + async change(doc, modifiedFields) { const docId = doc._id; const oldDoc = this.store.get(docId); if (oldDoc == null) { @@ -247,7 +238,7 @@ export default class ObservableCollection { ); if (!_.isEmpty(changed)) { - this.multiplexer.changed(docId, changed); + await this.multiplexer.changed(docId, changed); } } @@ -257,7 +248,7 @@ export default class ObservableCollection { * @param topLevelFields array * @private */ - changeSynthetic(docId, modifier, topLevelFields) { + async changeSynthetic(docId, modifier, topLevelFields) { if (!this.store.has(docId)) { return; } @@ -269,16 +260,16 @@ export default class ObservableCollection { var changed = DiffSequence.makeChangedFields(storedDoc, oldDoc); - this.multiplexer.changed(docId, changed); + await this.multiplexer.changed(docId, changed); } /** * @param docId */ - remove(docId) { + async remove(docId) { const doc = this.store.pop(docId); if (doc != null) { - this.multiplexer.removed(docId, doc); + await this.multiplexer.removed(docId, doc); } } diff --git a/lib/cache/mongoIdMap.js b/lib/cache/mongoIdMap.js index d1b8881f..bf338167 100644 --- a/lib/cache/mongoIdMap.js +++ b/lib/cache/mongoIdMap.js @@ -66,23 +66,24 @@ export class MongoIDMap { }); } - compareWith(other, callbacks) { + async compareWith(other, callbacks) { // operate on the _internal maps to avoid overhead of parsing id's. const leftMap = this._internal; const rightMap = other._internal; - leftMap.forEach((leftValue, key) => { + for (const [key, leftValue] of leftMap) { const rightValue = rightMap.get(key); if (rightValue != null) - callbacks.both && callbacks.both(this._idParse(key), leftValue, rightValue); + callbacks.both && await callbacks.both(this._idParse(key), leftValue, rightValue); else - callbacks.leftOnly && callbacks.leftOnly(this._idParse(key), leftValue); - }); + callbacks.leftOnly && await callbacks.leftOnly(this._idParse(key), leftValue); + } + if (callbacks.rightOnly) { - rightMap.forEach((rightValue, key) => { + for (const [key ,rightValue] of rightMap) { if (!leftMap.has(key)) - callbacks.rightOnly(this._idParse(key), rightValue); - }); + await callbacks.rightOnly(this._idParse(key), rightValue); + } } } diff --git a/lib/init.js b/lib/init.js index f56cd4e4..902a3efd 100644 --- a/lib/init.js +++ b/lib/init.js @@ -27,14 +27,12 @@ export default (config = {}) => { // this initializes the listener singleton with the proper onConnect functionality getRedisListener({ - onConnect() { + async onConnect() { // this will be executed initially, but since there won't be any observable collections, nothing will happen // PublicationFactory.reloadAll(); - RedisSubscriptionManager.getAllRedisSubscribers().forEach( - redisSubscriber => { - reload(redisSubscriber.observableCollection); - } - ); + for (const redisSubscriber of RedisSubscriptionManager.getAllRedisSubscribers()) { + await reload(redisSubscriber.observableCollection); + } }, }); diff --git a/lib/mongo/Mutator.js b/lib/mongo/Mutator.js index dd96345a..7f336bc1 100644 --- a/lib/mongo/Mutator.js +++ b/lib/mongo/Mutator.js @@ -49,8 +49,8 @@ export default class Mutator { } } - static insert(Originals, data, _config) { - const config = getMutationConfig(this, _config, { + static async insert(Originals, data, _config) { + const config = await getMutationConfig(this, _config, { doc: data, event: Events.INSERT }); @@ -64,7 +64,7 @@ export default class Mutator { } try { - const docId = Originals.insert.call(this, data); + const docId = await Originals.insert.call(this, data); // It's a callback if (_.isFunction(_config)) { @@ -77,10 +77,10 @@ export default class Mutator { let doc = { _id: docId }; if (!protectAgainstRaceConditions(this)) { - doc = Originals.findOne.call(this, docId); + doc = await Originals.findOne.call(this, docId); } - dispatchInsert( + await dispatchInsert( config.optimistic, this._name, config._channels, @@ -107,7 +107,7 @@ export default class Mutator { * @param callback * @returns {*} */ - static update(Originals, selector, modifier, _config, callback) { + static async update(Originals, selector, modifier, _config, callback) { if (_.isString(selector)) { selector = { _id: selector }; } @@ -117,7 +117,7 @@ export default class Mutator { _config = {}; } - const config = getMutationConfig(this, _config, { + const config = await getMutationConfig(this, _config, { event: Events.UPDATE, selector, modifier @@ -141,9 +141,9 @@ export default class Mutator { let docs; if (shouldIncludePrevDocument(this)) { - docs = this.find(selector, { ...findOptions, fields: {} }).fetch(); + docs = await this.find(selector, { ...findOptions, fields: {} }).fetchAsync(); } else { - docs = this.find(selector, findOptions).fetch(); + docs = await this.find(selector, findOptions).fetchAsync(); } let docIds = docs.map(doc => doc._id); @@ -170,7 +170,7 @@ export default class Mutator { }); try { - const result = Originals.update.call( + const result = await Originals.update.call( this, updateSelector, modifier, @@ -186,18 +186,18 @@ export default class Mutator { } if (!protectAgainstRaceConditions(this)) { - docs = this.find( + docs = await this.find( { _id: { $in: docIds } }, { ...findOptions, fields: {} } - ).fetch(); + ).fetchAsync(); } const { fields } = getFields(modifier); - dispatchUpdate( + await dispatchUpdate( config.optimistic, this._name, config._channels, @@ -226,7 +226,7 @@ export default class Mutator { * @param callback * @param docIds */ - static _handleUpsert( + static async _handleUpsert( Originals, selector, modifier, @@ -236,7 +236,7 @@ export default class Mutator { docs ) { try { - const data = Originals.update.call( + const data = await Originals.update.call( this, selector, modifier, @@ -257,10 +257,10 @@ export default class Mutator { }; if (!protectAgainstRaceConditions(this)) { - doc = this.findOne(doc._id); + doc = await this.findOneAsync(doc._id); } - dispatchInsert( + await dispatchInsert( config.optimistic, this._name, config._channels, @@ -283,9 +283,9 @@ export default class Mutator { } else { const { fields } = getFields(modifier); - docs = this.find(selector).fetch(); + docs = await this.find(selector).fetchAsync(); - dispatchUpdate( + await dispatchUpdate( config.optimistic, this._name, config._channels, @@ -319,10 +319,10 @@ export default class Mutator { * @param _config * @returns {*} */ - static remove(Originals, selector, _config) { + static async remove(Originals, selector, _config) { selector = Mongo.Collection._rewriteSelector(selector); - const config = getMutationConfig(this, _config, { + const config = await getMutationConfig(this, _config, { selector, event: Events.REMOVE }); @@ -347,7 +347,7 @@ export default class Mutator { } // TODO: optimization check if it has _id or _id with {$in} so we don't have to redo this. - const docs = this.find(selector, removeOptions).fetch(); + const docs = await this.find(selector, removeOptions).fetchAsync(); let docIds = docs.map(doc => doc._id); if (!selector._id) { @@ -355,7 +355,7 @@ export default class Mutator { } try { - const result = Originals.remove.call(this, removeSelector); + const result = await Originals.remove.call(this, removeSelector); if (_.isFunction(_config)) { const self = this; @@ -364,7 +364,7 @@ export default class Mutator { }); } - dispatchRemove( + await dispatchRemove( config.optimistic, this._name, config._channels, diff --git a/lib/mongo/ObserveMultiplex.js b/lib/mongo/ObserveMultiplex.js index 53e0c103..3ebe6e6f 100644 --- a/lib/mongo/ObserveMultiplex.js +++ b/lib/mongo/ObserveMultiplex.js @@ -6,8 +6,6 @@ import { _ } from 'meteor/underscore'; import { LocalCollection } from 'meteor/minimongo'; import OptimisticInvocation from './OptimisticInvocation'; -const Future = Npm.require('fibers/future'); - export function ObserveMultiplexer(options) { var self = this; @@ -23,9 +21,10 @@ export function ObserveMultiplexer(options) { self._ordered = options.ordered; self._onStop = options.onStop || function() {}; - self._queue = new Meteor._SynchronousQueue(); + self._queue = new Meteor._AsynchronousQueue(); self._handles = {}; - self._readyFuture = new Future(); + this._resolver = null; + this._readyPromise = new Promise(r => this._resolver = r).then(() => this._isReady = true); self._cache = new LocalCollection._CachingChangeObserver({ ordered: options.ordered, }); @@ -42,17 +41,9 @@ export function ObserveMultiplexer(options) { } Object.assign(ObserveMultiplexer.prototype, { - addHandleAndSendInitialAdds: function(handle) { + addHandleAndSendInitialAdds: async function(handle) { var self = this; - // Check this before calling runTask (even though runTask does the same - // check) so that we don't leak an ObserveMultiplexer on error by - // incrementing _addHandleTasksScheduledButNotPerformed and never - // decrementing it. - if (!self._queue.safeToRunTask()) - throw new Error( - "Can't call observeChanges from an observe callback on the same query" - ); ++self._addHandleTasksScheduledButNotPerformed; Package['facts-base'] && @@ -62,15 +53,16 @@ Object.assign(ObserveMultiplexer.prototype, { 1 ); - self._queue.runTask(function() { + self._queue.runTask(async function() { self._handles[handle._id] = handle; // Send out whatever adds we have so far (whether or not we the // multiplexer is ready). - self._sendAdds(handle); + await self._sendAdds(handle); --self._addHandleTasksScheduledButNotPerformed; }); + // *outside* the task, since otherwise we'd deadlock - self._readyFuture.wait(); + await this._readyPromise; }, // Remove an observe handle. If it was the last observe handle, call the @@ -79,7 +71,7 @@ Object.assign(ObserveMultiplexer.prototype, { // This is not synchronized with polls and handle additions: this means that // you can safely call it from within an observe callback, but it also means // that we have to be careful when we iterate over _handles. - removeHandle: function(id) { + removeHandle: async function(id) { var self = this; // This should not be possible: you can only call removeHandle by having @@ -103,10 +95,10 @@ Object.assign(ObserveMultiplexer.prototype, { _.isEmpty(self._handles) && self._addHandleTasksScheduledButNotPerformed === 0 ) { - self._stop(); + await self._stop(); } }, - _stop: function(options) { + _stop: async function(options) { var self = this; options = options || {}; @@ -117,7 +109,7 @@ Object.assign(ObserveMultiplexer.prototype, { // Call stop callback (which kills the underlying process which sends us // callbacks and removes us from the connection's dictionary). - self._onStop(); + await self._onStop(); Package['facts-base'] && Package['facts-base'].Facts.incrementServerFact( 'mongo-livedata', @@ -137,7 +129,12 @@ Object.assign(ObserveMultiplexer.prototype, { self._queue.queueTask(function() { if (self._ready()) throw Error("can't make ObserveMultiplex ready twice!"); - self._readyFuture.return(); + + if (!self._resolver) { + throw new Error("Missing resolver"); + } + + self._resolver(); }); }, @@ -147,13 +144,14 @@ Object.assign(ObserveMultiplexer.prototype, { // that meant that you managed to run the query once. It will stop this // ObserveMultiplex and cause addHandleAndSendInitialAdds calls (and thus // observeChanges calls) to throw the error. - queryError: function(err) { + queryError: async function(err) { var self = this; - self._queue.runTask(function() { + await self._queue.runTask(async function() { if (self._ready()) throw Error("can't claim query has an error after it worked!"); - self._stop({ fromQueryError: true }); - self._readyFuture.throw(err); + + await self._stop({ fromQueryError: true }); + throw err; }); }, @@ -162,12 +160,12 @@ Object.assign(ObserveMultiplexer.prototype, { // all handles. "ready" must have already been called on this multiplexer. onFlush: function(cb) { var self = this; - self._queue.queueTask(function() { + self._queue.queueTask(async function() { if (!self._ready()) throw Error( 'only call onFlush on a multiplexer that will be ready' ); - cb(); + await cb(); }); }, callbackNames: function() { @@ -177,15 +175,15 @@ Object.assign(ObserveMultiplexer.prototype, { else return ['added', 'changed', 'removed']; }, _ready: function() { - return this._readyFuture.isResolved(); + return !!this._isReady; }, - _applyCallback: function(callbackName, args) { + _applyCallback: async function(callbackName, args) { var self = this; const isOptimistic = !!OptimisticInvocation.get(); // TODO Add a debug message here const runType = isOptimistic ? 'runTask' : 'queueTask'; - self._queue[runType](function() { + await self._queue[runType](async function() { // If we stopped in the meantime, do nothing. if (!self._handles) return; @@ -194,7 +192,7 @@ Object.assign(ObserveMultiplexer.prototype, { // state from their arguments (assuming that their supplied callbacks // don't) and skip this clone. Currently 'changed' hangs on to state // though. - self._cache.applyChange[callbackName].apply( + await self._cache.applyChange[callbackName].apply( null, EJSON.clone(args) ); @@ -213,7 +211,7 @@ Object.assign(ObserveMultiplexer.prototype, { // can continue until these are done. (But we do have to be careful to not // use a handle that got removed, because removeHandle does not use the // queue; thus, we iterate over an array of keys that we control.) - Object.keys(self._handles).forEach(function(handleId) { + for (const handleId of Object.keys(self._handles)) { var handle = self._handles && self._handles[handleId]; if (!handle) return; var callback = handle['_' + callbackName]; @@ -222,7 +220,7 @@ Object.assign(ObserveMultiplexer.prototype, { // We silence out removed exceptions if (callback === 'removed') { try { - callback.apply(null, EJSON.clone(args)); + await callback.apply(null, EJSON.clone(args)); } catch (e) { // Supressing `removed non-existent exceptions` if (!isRemovedNonExistent(e)) { @@ -230,9 +228,9 @@ Object.assign(ObserveMultiplexer.prototype, { } } } else { - callback && callback.apply(null, EJSON.clone(args)); + callback && await callback.apply(null, EJSON.clone(args)); } - }); + } }); }, @@ -240,21 +238,24 @@ Object.assign(ObserveMultiplexer.prototype, { // (the task that is processing the addHandleAndSendInitialAdds call). It // synchronously invokes the handle's added or addedBefore; there's no need to // flush the queue afterwards to ensure that the callbacks get out. - _sendAdds: function(handle) { + _sendAdds: async function(handle) { var self = this; - if (self._queue.safeToRunTask()) + if (!self._queue._runningOrRunScheduled) throw Error('_sendAdds may only be called from within a task!'); + var add = self._ordered ? handle._addedBefore : handle._added; if (!add) return; // note: docs may be an _IdMap or an OrderedDict - self._cache.docs.forEach(function(doc, id) { + await self._cache.docs.forEachAsync(async function(doc, id) { if (!_.has(self._handles, handle._id)) throw Error('handle got removed before sending initial adds!'); var fields = EJSON.clone(doc); delete fields._id; - if (self._ordered) add(id, fields, null); + if (self._ordered) + await add(id, fields, null); // we're going in order, so add at end - else add(id, fields); + else + await add(id, fields); }); }, }); @@ -273,8 +274,8 @@ export function ObserveHandle(multiplexer, callbacks) { // ordered observe where for some reason you don't get ordering data on // the adds. I dunno, we wrote tests for it, there must have been a // reason. - self._addedBefore = function(id, fields, before) { - callbacks.added(id, fields); + self._addedBefore = async function(id, fields, before) { + await callbacks.added(id, fields); }; } }); @@ -282,9 +283,9 @@ export function ObserveHandle(multiplexer, callbacks) { self._id = nextObserveHandleId++; } -ObserveHandle.prototype.stop = function() { +ObserveHandle.prototype.stop = async function() { var self = this; if (self._stopped) return; self._stopped = true; - self._multiplexer.removeHandle(self._id); + await self._multiplexer.removeHandle(self._id); }; diff --git a/lib/mongo/PollingObserveDriver.js b/lib/mongo/PollingObserveDriver.js index f2a6f1d0..676514b6 100644 --- a/lib/mongo/PollingObserveDriver.js +++ b/lib/mongo/PollingObserveDriver.js @@ -88,7 +88,8 @@ export default function PollingObserveDriver(options) { /* ms */ ); // XXX figure out if we still need a queue - self._taskQueue = new Meteor._SynchronousQueue(); + self._taskQueue = new Meteor._AsynchronousQueue(); + // TODO: should this be async? var listenersHandle = listenAll(self._cursorDescription, function( notification ) { @@ -130,8 +131,9 @@ export default function PollingObserveDriver(options) { self._stopCallbacks.push(function() { Meteor.clearInterval(intervalHandle); }); - } // Make sure we actually poll soon! + } + // Make sure we actually poll soon! self._unthrottledEnsurePollIsScheduled(); Package.facts && @@ -149,8 +151,8 @@ Object.assign(PollingObserveDriver.prototype, { if (self._pollsScheduledButNotStarted > 0) return; ++self._pollsScheduledButNotStarted; - self._taskQueue.queueTask(function() { - self._pollMongo(); + self._taskQueue.queueTask(async function() { + await self._pollMongo(); }); }, // test-only interface for controlling polling. @@ -161,13 +163,13 @@ Object.assign(PollingObserveDriver.prototype, { // though.) // // _resumePolling immediately polls, and allows further polls to occur. - _suspendPolling: function() { + _suspendPolling: async function() { var self = this; // Pretend that there's another poll scheduled (which will prevent // _ensurePollIsScheduled from queueing any more polls). ++self._pollsScheduledButNotStarted; // Now block until all currently running or scheduled polls are done. - self._taskQueue.runTask(function() {}); // Confirm that there is only one "poll" (the fake one we're pretending to + await self._taskQueue.runTask(function() {}); // Confirm that there is only one "poll" (the fake one we're pretending to // have) scheduled. if (self._pollsScheduledButNotStarted !== 1) @@ -176,7 +178,7 @@ Object.assign(PollingObserveDriver.prototype, { self._pollsScheduledButNotStarted ); }, - _resumePolling: function() { + _resumePolling: async function() { var self = this; // We should be in the same state as in the end of _suspendPolling. if (self._pollsScheduledButNotStarted !== 1) @@ -186,11 +188,11 @@ Object.assign(PollingObserveDriver.prototype, { ); // Run a poll synchronously (which will counteract the // ++_pollsScheduledButNotStarted from _suspendPolling). - self._taskQueue.runTask(function() { - self._pollMongo(); + await self._taskQueue.runTask(async function() { + await self._pollMongo(); }); }, - _pollMongo: function() { + _pollMongo: async function() { var self = this; --self._pollsScheduledButNotStarted; if (self._stopped) return; @@ -204,13 +206,14 @@ Object.assign(PollingObserveDriver.prototype, { oldResults = self._ordered ? [] : new LocalCollection._IdMap(); } - self._testOnlyPollCallback && self._testOnlyPollCallback(); // Save the list of pending writes which this round will commit. + self._testOnlyPollCallback && await self._testOnlyPollCallback(); // Save the list of pending writes which this round will commit. var writesForCycle = self._pendingWrites; self._pendingWrites = []; // Get the new query results. (This yields.) try { - newResults = self._synchronousCursor.getRawObjects(self._ordered); + // TODO: should this be async? + newResults = await self._synchronousCursor.getRawObjects(self._ordered); } catch (e) { if (first && typeof e.code === 'number') { // This is an error document sent to us by mongod, not a connection @@ -267,23 +270,24 @@ Object.assign(PollingObserveDriver.prototype, { // commmitted. (If new writes have shown up in the meantime, there'll // already be another _pollMongo task scheduled.) - self._multiplexer.onFlush(function() { - _.each(writesForCycle, function(w) { - w.committed(); - }); + await self._multiplexer.onFlush(async function() { + for (const w of writesForCycle) { + await w.committed(); + } }); }, - stop: function() { + stop: async function() { var self = this; self._stopped = true; - _.each(self._stopCallbacks, function(c) { - c(); - }); // Release any write fences that are waiting on us. + for (const c of self._stopCallbacks) { + await c(); + } - _.each(self._pendingWrites, function(w) { - w.committed(); - }); + // Release any write fences that are waiting on us. + for (const w of self._pendingWrites) { + await w.committed(); + } Package.facts && Package.facts.Facts.incrementServerFact( diff --git a/lib/mongo/RedisOplogObserveDriver.js b/lib/mongo/RedisOplogObserveDriver.js index 11f45d99..4540462c 100644 --- a/lib/mongo/RedisOplogObserveDriver.js +++ b/lib/mongo/RedisOplogObserveDriver.js @@ -25,10 +25,9 @@ export default class RedisOplogObserveDriver { const { cursorDescription, multiplexer, matcher, sorter } = options; this._cursorDescription = options.cursorDescription; - this._mongoHandle = options.mongoHandle; this._multiplexer = options.multiplexer; - const strategy = getStrategy( + this.strategy = getStrategy( cursorDescription.selector, cursorDescription.options ); @@ -42,24 +41,28 @@ export default class RedisOplogObserveDriver { }); // Feels hackish to have it here, maybe move to ObservableCollections - if (strategy === Strategy.DEDICATED_CHANNELS) { + if (this.strategy === Strategy.DEDICATED_CHANNELS) { let oc = this.observableCollection; if (oc.selector._id) { oc.__containsOtherSelectorsThanId = Object.keys(oc.selector).length > 1; } } + } + + async init() { + await this.observableCollection.setupCollection(); // This is to mitigate the issue when we run init the first time on a subscription // And if you are using packages like reactive-publish // Because inside here we do a .find().fetch(), and that's considered reactive - Tracker.nonreactive(() => { - this.observableCollection.init(); + await Tracker.nonreactive(() => { + return this.observableCollection.init(); }); this.redisSubscriber = new RedisSubscriber( this.observableCollection, - strategy + this.strategy ); } diff --git a/lib/mongo/SyntheticMutator.js b/lib/mongo/SyntheticMutator.js index 5d2c9730..1b03e273 100644 --- a/lib/mongo/SyntheticMutator.js +++ b/lib/mongo/SyntheticMutator.js @@ -17,26 +17,26 @@ export default class SyntheticMutator { * @param channels * @param data */ - static publish(channels, data) { + static async publish(channels, data) { const client = getRedisPusher(); - channels.forEach(channel => { - client.publish(channel, EJSON.stringify(data)); - }); + for (const channel of channels) { + await client.publish(channel, EJSON.stringify(data)); + } } /** * @param channels * @param data */ - static insert(channels, data) { + static async insert(channels, data) { channels = SyntheticMutator._extractChannels(channels, data._id); if (!data._id) { data._id = Random.id(); } - SyntheticMutator.publish(channels, { + await SyntheticMutator.publish(channels, { [RedisPipe.EVENT]: Events.INSERT, [RedisPipe.SYNTHETIC]: true, [RedisPipe.DOC]: data, @@ -48,7 +48,7 @@ export default class SyntheticMutator { * @param _id * @param modifier */ - static update(channels, _id, modifier) { + static async update(channels, _id, modifier) { channels = SyntheticMutator._extractChannels(channels, _id); if (!containsOperators(modifier)) { @@ -67,17 +67,17 @@ export default class SyntheticMutator { [RedisPipe.MODIFIED_TOP_LEVEL_FIELDS]: topLevelFields, }; - SyntheticMutator.publish(channels, message); + await SyntheticMutator.publish(channels, message); } /** * @param channels * @param _id */ - static remove(channels, _id) { + static async remove(channels, _id) { channels = SyntheticMutator._extractChannels(channels, _id); - SyntheticMutator.publish(channels, { + await SyntheticMutator.publish(channels, { [RedisPipe.EVENT]: Events.REMOVE, [RedisPipe.SYNTHETIC]: true, [RedisPipe.DOC]: { _id }, diff --git a/lib/mongo/allow-deny/docToValidate.js b/lib/mongo/allow-deny/docToValidate.js index 2636685f..3d17c593 100644 --- a/lib/mongo/allow-deny/docToValidate.js +++ b/lib/mongo/allow-deny/docToValidate.js @@ -12,6 +12,7 @@ export default function docToValidate(validator, doc, generatedId) { if (generatedId !== null) { ret._id = generatedId } + // TODO: should we accept async transform functions? ret = validator.transform(ret) } return ret diff --git a/lib/mongo/allow-deny/validatedInsert.js b/lib/mongo/allow-deny/validatedInsert.js index ed7a8df3..b675bbc1 100644 --- a/lib/mongo/allow-deny/validatedInsert.js +++ b/lib/mongo/allow-deny/validatedInsert.js @@ -2,7 +2,7 @@ import { Meteor } from 'meteor/meteor' import { _ } from 'meteor/underscore' import docToValidate from './docToValidate' -export default function validatedInsert(userId, doc, generatedId) { +export default async function validatedInsert(userId, doc, generatedId) { // call user validators. // Any deny returns true means denied. if (_.any(this._validators.insert.deny, validator => @@ -19,5 +19,5 @@ export default function validatedInsert(userId, doc, generatedId) { // before actually inserting. if (generatedId !== null) doc._id = generatedId - this.insert(doc, {optimistic: true}) + await this.insertAsync(doc, { optimistic: true }); } diff --git a/lib/mongo/allow-deny/validatedRemove.js b/lib/mongo/allow-deny/validatedRemove.js index f325b795..61e8da15 100644 --- a/lib/mongo/allow-deny/validatedRemove.js +++ b/lib/mongo/allow-deny/validatedRemove.js @@ -3,7 +3,7 @@ import { Meteor } from 'meteor/meteor' import { _ } from 'meteor/underscore' import transformDoc from './transformDoc' -export default function validatedRemove(userId, selector) { +export default async function validatedRemove(userId, selector) { const findOptions = {transform: null} if (!this._validators.fetchAllFields) { findOptions.fields = {} @@ -14,7 +14,7 @@ export default function validatedRemove(userId, selector) { }) } - const doc = this._collection.findOne(selector, findOptions) + const doc = await this._collection.findOneAsync(selector, findOptions) if (!doc) { return 0 } @@ -35,5 +35,5 @@ export default function validatedRemove(userId, selector) { // rewrote the selector to {_id: {$in: [ids that we found]}} before passing to // Mongo to avoid races, but since selector is guaranteed to already just be // an ID, we don't have to any more. - return this.remove(selector, {optimistic: true}) + return this.removeAsync(selector, {optimistic: true}) } diff --git a/lib/mongo/allow-deny/validatedUpdate.js b/lib/mongo/allow-deny/validatedUpdate.js index 35f5f1e2..5c2a8fd7 100644 --- a/lib/mongo/allow-deny/validatedUpdate.js +++ b/lib/mongo/allow-deny/validatedUpdate.js @@ -28,7 +28,7 @@ const ALLOWED_UPDATE_OPERATIONS = { // control rules set by calls to `allow/deny` are satisfied. If all // pass, rewrite the mongo operation to use $in to set the list of // document ids to change ##ValidatedChange -export default function validatedUpdate(userId, selector, mutator, options) { +export default async function validatedUpdate(userId, selector, mutator, options) { check(mutator, Object); options = _.clone(options) || {}; @@ -90,7 +90,7 @@ export default function validatedUpdate(userId, selector, mutator, options) { }); } - const doc = this._collection.findOne(selector, findOptions); + const doc = await this._collection.findOneAsync(selector, findOptions); if (!doc) { // none satisfied! return 0; @@ -123,7 +123,7 @@ export default function validatedUpdate(userId, selector, mutator, options) { // avoid races, but since selector is guaranteed to already just be an ID, we // don't have to any more. - this.update( + await this.updateAsync( selector, mutator, Object.assign(options, { diff --git a/lib/mongo/extendMongoCollection.js b/lib/mongo/extendMongoCollection.js index 15b84dc1..b3a3af93 100644 --- a/lib/mongo/extendMongoCollection.js +++ b/lib/mongo/extendMongoCollection.js @@ -8,11 +8,11 @@ import extendObserveChanges from './extendObserveChanges'; export default () => { const Originals = { - insert: Mongo.Collection.prototype.insert, - update: Mongo.Collection.prototype.update, - remove: Mongo.Collection.prototype.remove, + insert: Mongo.Collection.prototype.insertAsync, + update: Mongo.Collection.prototype.updateAsync, + remove: Mongo.Collection.prototype.removeAsync, find: Mongo.Collection.prototype.find, - findOne: Mongo.Collection.prototype.findOne, + findOne: Mongo.Collection.prototype.findOneAsync, }; Mutator.init(); @@ -25,7 +25,11 @@ export default () => { * @param config * @returns {*} */ - insert(data, config) { + async insert(data, config) { + return Mutator.insert.call(this, Originals, data, config); + }, + + async insertAsync(data, config) { return Mutator.insert.call(this, Originals, data, config); }, @@ -36,7 +40,18 @@ export default () => { * @param callback * @returns {*} */ - update(selector, modifier, config, callback) { + async update(selector, modifier, config, callback) { + return Mutator.update.call( + this, + Originals, + selector, + modifier, + config, + callback + ); + }, + + async updateAsync(selector, modifier, config, callback) { return Mutator.update.call( this, Originals, @@ -52,7 +67,11 @@ export default () => { * @param config * @returns {*} */ - remove(selector, config) { + async remove(selector, config) { + return Mutator.remove.call(this, Originals, selector, config); + }, + + async removeAsync(selector, config) { return Mutator.remove.call(this, Originals, selector, config); }, diff --git a/lib/mongo/extendObserveChanges.js b/lib/mongo/extendObserveChanges.js index c3966ea2..22fc20e9 100644 --- a/lib/mongo/extendObserveChanges.js +++ b/lib/mongo/extendObserveChanges.js @@ -1,10 +1,6 @@ import { MongoInternals } from 'meteor/mongo'; import observeChanges from './observeChanges'; -export const MongoCursor = Object.getPrototypeOf( - MongoInternals.defaultRemoteCollectionDriver().mongo.find() -).constructor; - export default function() { MongoInternals.Connection.prototype._observeChanges = observeChanges; } diff --git a/lib/mongo/lib/dispatchers.js b/lib/mongo/lib/dispatchers.js index f49b1a72..7a3b09df 100644 --- a/lib/mongo/lib/dispatchers.js +++ b/lib/mongo/lib/dispatchers.js @@ -8,21 +8,21 @@ import getDedicatedChannel from '../../utils/getDedicatedChannel'; import Config from '../../config'; import OptimisticInvocation from '../OptimisticInvocation'; -const dispatchEvents = function(optimistic, collectionName, channels, events) { +const dispatchEvents = async function(optimistic, collectionName, channels, events) { if (optimistic) { - OptimisticInvocation.withValue(true, () => { - events.forEach(event => { + await OptimisticInvocation.withValue(true, async () => { + for (const event of events) { const docId = event[RedisPipe.DOC]._id; const dedicatedChannel = getDedicatedChannel( collectionName, docId ); - RedisSubscriptionManager.process(dedicatedChannel, event); - channels.forEach(channelName => { - RedisSubscriptionManager.process(channelName, event); - }); - }); + await RedisSubscriptionManager.process(dedicatedChannel, event); + for (const channelName of channels) { + await RedisSubscriptionManager.process(channelName, event); + } + } }); } @@ -30,21 +30,21 @@ const dispatchEvents = function(optimistic, collectionName, channels, events) { return; } - Meteor.defer(() => { + Meteor.defer(async () => { const client = getRedisPusher(); - events.forEach(event => { + for (const event of events) { const message = EJSON.stringify(event); - channels.forEach(channelName => { - client.publish(channelName, message); - }); + for (const channelName of channels) { + await client.publish(channelName, message); + } const docId = event[RedisPipe.DOC]._id; const dedicatedChannel = getDedicatedChannel(collectionName, docId); - client.publish(dedicatedChannel, message); - }); + await client.publish(dedicatedChannel, message); + } }); }; -const dispatchUpdate = function( +const dispatchUpdate = async function( optimistic, collectionName, channels, @@ -60,10 +60,10 @@ const dispatchUpdate = function( [RedisPipe.UID]: uid, })); - dispatchEvents(optimistic, collectionName, channels, events); + await dispatchEvents(optimistic, collectionName, channels, events); }; -const dispatchRemove = function(optimistic, collectionName, channels, docs) { +const dispatchRemove = async function(optimistic, collectionName, channels, docs) { const uid = optimistic ? RedisSubscriptionManager.uid : null; const events = docs.map(doc => ({ @@ -72,10 +72,10 @@ const dispatchRemove = function(optimistic, collectionName, channels, docs) { [RedisPipe.UID]: uid, })); - dispatchEvents(optimistic, collectionName, channels, events); + await dispatchEvents(optimistic, collectionName, channels, events); }; -const dispatchInsert = function(optimistic, collectionName, channels, doc) { +const dispatchInsert = async function(optimistic, collectionName, channels, doc) { const uid = optimistic ? RedisSubscriptionManager.uid : null; const event = { @@ -84,7 +84,7 @@ const dispatchInsert = function(optimistic, collectionName, channels, doc) { [RedisPipe.UID]: uid, }; - dispatchEvents(optimistic, collectionName, channels, [event]); + await dispatchEvents(optimistic, collectionName, channels, [event]); }; export { dispatchInsert, dispatchUpdate, dispatchRemove }; diff --git a/lib/mongo/lib/getMutationConfig.js b/lib/mongo/lib/getMutationConfig.js index 0a3cb78a..51fc4a93 100644 --- a/lib/mongo/lib/getMutationConfig.js +++ b/lib/mongo/lib/getMutationConfig.js @@ -6,7 +6,7 @@ import Config from '../../config'; * @param _config * @param mutationObject */ -export default function (collection, _config, mutationObject) { +export default async function (collection, _config, mutationObject) { const collectionName = collection._name; if (!_config || _.isFunction(_config)) { @@ -27,9 +27,9 @@ export default function (collection, _config, mutationObject) { let config = Object.assign({}, Config.mutationDefaults, defaultOverrides, _config); if (collection._redisOplog) { - const {mutation} = collection._redisOplog; + const { mutation } = collection._redisOplog; if (mutation) { - mutation.call(collection, config, mutationObject) + await mutation.call(collection, config, mutationObject) } } diff --git a/lib/mongo/observeChanges.js b/lib/mongo/observeChanges.js index 84a15781..cc4919a2 100644 --- a/lib/mongo/observeChanges.js +++ b/lib/mongo/observeChanges.js @@ -5,9 +5,10 @@ import RedisOplogObserveDriver from './RedisOplogObserveDriver'; import { ObserveMultiplexer, ObserveHandle } from './ObserveMultiplex'; import PollingObserveDriver from './PollingObserveDriver'; -export default function(cursorDescription, ordered, callbacks) { +export default async function(cursorDescription, ordered, callbacks) { const self = this; if (cursorDescription.options.tailable) { + // TODO: this is not blocking on current meteor 3.0 branch return self._observeChangesTailable( cursorDescription, ordered, @@ -48,9 +49,9 @@ export default function(cursorDescription, ordered, callbacks) { multiplexer = new ObserveMultiplexer({ ordered: ordered, - onStop: function() { + onStop: async function() { delete self._observeMultiplexers[observeKey]; - observeDriver.stop(); + await observeDriver.stop(); }, }); self._observeMultiplexers[observeKey] = multiplexer; @@ -132,11 +133,12 @@ export default function(cursorDescription, ordered, callbacks) { _testOnlyPollCallback: callbacks._testOnlyPollCallback, }); // This field is only set for use in tests. + await observeDriver.init(); multiplexer._observeDriver = observeDriver; } // Blocks until the initial adds have been sent. - multiplexer.addHandleAndSendInitialAdds(observeHandle); + await multiplexer.addHandleAndSendInitialAdds(observeHandle); return observeHandle; } diff --git a/lib/processors/actions/reload.js b/lib/processors/actions/reload.js index 49f9a305..45f2855b 100644 --- a/lib/processors/actions/reload.js +++ b/lib/processors/actions/reload.js @@ -7,24 +7,24 @@ import { MongoIDMap } from '../../cache/mongoIdMap'; * * @param observableCollection */ -export default function (observableCollection) { +export default async function (observableCollection) { const { store, cursor } = observableCollection; - const freshData = cursor.fetch(); + const freshData = await cursor.fetchAsync(); const newStore = new MongoIDMap(); freshData.forEach((doc) => newStore.set(doc._id, doc)); - store.compareWith(newStore, { - both(docId, oldDoc, newDoc) { + await store.compareWith(newStore, { + async both(docId, oldDoc, newDoc) { const modifiedFields = _.union(Object.keys(oldDoc), Object.keys(newDoc)); - observableCollection.change(newDoc, modifiedFields); + await observableCollection.change(newDoc, modifiedFields); }, - leftOnly(docId) { - observableCollection.remove(docId); + async leftOnly(docId) { + await observableCollection.remove(docId); }, - rightOnly(docId, newDoc) { - observableCollection.add(newDoc); + async rightOnly(docId, newDoc) { + await observableCollection.add(newDoc); }, }); } diff --git a/lib/processors/actions/requery.js b/lib/processors/actions/requery.js index 78b00f25..916333ab 100644 --- a/lib/processors/actions/requery.js +++ b/lib/processors/actions/requery.js @@ -8,25 +8,26 @@ import { MongoIDMap } from '../../cache/mongoIdMap'; * @param event * @param modifiedFields */ -export default function (observableCollection, newCommer, event, modifiedFields) { +export default async function (observableCollection, newCommer, event, modifiedFields) { const { store, selector, options } = observableCollection; const newStore = new MongoIDMap(); - const freshIds = observableCollection.collection.find( - selector, { ...options, fields: { _id: 1 } }).fetch(); + const freshIds = await observableCollection.collection.find( + selector, { ...options, fields: { _id: 1 } }).fetchAsync(); + freshIds.forEach(doc => newStore.set(doc._id, doc)); let added = false; - store.compareWith(newStore, { - leftOnly(docId) { - observableCollection.remove(docId); + await store.compareWith(newStore, { + async leftOnly(docId) { + await observableCollection.remove(docId); }, - rightOnly(docId) { + async rightOnly(docId) { if (newCommer && EJSON.equals(docId, newCommer._id)) { added = true; - observableCollection.add(newCommer); + await observableCollection.add(newCommer); } else { - observableCollection.addById(docId); + await observableCollection.addById(docId); } } }); @@ -38,6 +39,6 @@ export default function (observableCollection, newCommer, event, modifiedFields) && modifiedFields && !added && store.has(newCommer._id)) { - observableCollection.change(newCommer, modifiedFields); + await observableCollection.change(newCommer, modifiedFields); } } diff --git a/lib/processors/default.js b/lib/processors/default.js index 7af80f73..2cecc72b 100644 --- a/lib/processors/default.js +++ b/lib/processors/default.js @@ -6,16 +6,16 @@ import { Events } from '../constants'; * @param doc * @param modifiedFields */ -export default function(observableCollection, event, doc, modifiedFields) { +export default async function(observableCollection, event, doc, modifiedFields) { switch (event) { case Events.INSERT: - handleInsert(observableCollection, doc); + await handleInsert(observableCollection, doc); break; case Events.UPDATE: - handleUpdate(observableCollection, doc, modifiedFields); + await handleUpdate(observableCollection, doc, modifiedFields); break; case Events.REMOVE: - handleRemove(observableCollection, doc); + await handleRemove(observableCollection, doc); break; default: throw new Meteor.Error(`Invalid event specified: ${event}`); @@ -26,12 +26,12 @@ export default function(observableCollection, event, doc, modifiedFields) { * @param observableCollection * @param doc */ -const handleInsert = function(observableCollection, doc) { +const handleInsert = async function(observableCollection, doc) { if ( !observableCollection.contains(doc._id) && observableCollection.isEligible(doc) ) { - observableCollection.add(doc); + await observableCollection.add(doc); } }; @@ -40,16 +40,16 @@ const handleInsert = function(observableCollection, doc) { * @param doc * @param modifiedFields */ -const handleUpdate = function(observableCollection, doc, modifiedFields) { +const handleUpdate = async function(observableCollection, doc, modifiedFields) { if (observableCollection.isEligible(doc)) { if (observableCollection.contains(doc._id)) { - observableCollection.change(doc, modifiedFields); + await observableCollection.change(doc, modifiedFields); } else { - observableCollection.add(doc); + await observableCollection.add(doc); } } else { if (observableCollection.contains(doc._id)) { - observableCollection.remove(doc._id); + await observableCollection.remove(doc._id); } } }; @@ -58,8 +58,8 @@ const handleUpdate = function(observableCollection, doc, modifiedFields) { * @param observableCollection * @param doc */ -const handleRemove = function(observableCollection, doc) { +const handleRemove = async function(observableCollection, doc) { if (observableCollection.contains(doc._id)) { - observableCollection.remove(doc._id); + await observableCollection.remove(doc._id); } }; diff --git a/lib/processors/direct.js b/lib/processors/direct.js index af54d8ac..4561f8b3 100644 --- a/lib/processors/direct.js +++ b/lib/processors/direct.js @@ -6,16 +6,16 @@ import { Events } from '../constants'; * @param doc * @param modifiedFields */ -export default function(observableCollection, event, doc, modifiedFields) { +export default async function(observableCollection, event, doc, modifiedFields) { switch (event) { case Events.UPDATE: - handleUpdate(observableCollection, doc, modifiedFields); + await handleUpdate(observableCollection, doc, modifiedFields); break; case Events.REMOVE: - handleRemove(observableCollection, doc); + await handleRemove(observableCollection, doc); break; case Events.INSERT: - handleInsert(observableCollection, doc); + await handleInsert(observableCollection, doc); break; default: throw new Meteor.Error(`Invalid event specified: ${event}`); @@ -26,12 +26,12 @@ export default function(observableCollection, event, doc, modifiedFields) { * @param observableCollection * @param doc */ -const handleInsert = function(observableCollection, doc) { +const handleInsert = async function(observableCollection, doc) { if ( !observableCollection.contains(doc._id) && observableCollection.isEligible(doc) ) { - observableCollection.add(doc); + await observableCollection.add(doc); } }; @@ -40,26 +40,26 @@ const handleInsert = function(observableCollection, doc) { * @param doc * @param modifiedFields */ -const handleUpdate = function(observableCollection, doc, modifiedFields) { +const handleUpdate = async function(observableCollection, doc, modifiedFields) { const otherSelectors = observableCollection.__containsOtherSelectorsThanId; if (otherSelectors) { if (observableCollection.isEligible(doc)) { if (observableCollection.contains(doc._id)) { - observableCollection.change(doc, modifiedFields); + await observableCollection.change(doc, modifiedFields); } else { - observableCollection.add(doc); + await observableCollection.add(doc); } } else { if (observableCollection.contains(doc._id)) { - observableCollection.remove(doc._id); + await observableCollection.remove(doc._id); } } } else { if (observableCollection.contains(doc._id)) { - observableCollection.change(doc, modifiedFields); + await observableCollection.change(doc, modifiedFields); } else { - observableCollection.add(doc); + await observableCollection.add(doc); } } }; @@ -68,6 +68,6 @@ const handleUpdate = function(observableCollection, doc, modifiedFields) { * @param observableCollection * @param doc */ -const handleRemove = function(observableCollection, doc) { - observableCollection.remove(doc._id); +const handleRemove = async function(observableCollection, doc) { + await observableCollection.remove(doc._id); }; diff --git a/lib/processors/limit-sort.js b/lib/processors/limit-sort.js index a2e5af50..c78b5a3f 100644 --- a/lib/processors/limit-sort.js +++ b/lib/processors/limit-sort.js @@ -8,16 +8,16 @@ import requery from './actions/requery'; * @param doc * @param modifiedFields */ -export default function(observableCollection, event, doc, modifiedFields) { +export default async function(observableCollection, event, doc, modifiedFields) { switch (event) { case Events.INSERT: - handleInsert(observableCollection, doc); + await handleInsert(observableCollection, doc); break; case Events.UPDATE: - handleUpdate(observableCollection, doc, modifiedFields); + await handleUpdate(observableCollection, doc, modifiedFields); break; case Events.REMOVE: - handleRemove(observableCollection, doc); + await handleRemove(observableCollection, doc); break; default: throw new Meteor.Error(`Invalid event specified: ${event}`); @@ -28,9 +28,9 @@ export default function(observableCollection, event, doc, modifiedFields) { * @param observableCollection * @param doc */ -const handleInsert = function(observableCollection, doc) { +const handleInsert = async function(observableCollection, doc) { if (observableCollection.isEligible(doc)) { - requery(observableCollection, doc); + await requery(observableCollection, doc); } }; @@ -39,13 +39,13 @@ const handleInsert = function(observableCollection, doc) { * @param doc * @param modifiedFields */ -const handleUpdate = function(observableCollection, doc, modifiedFields) { +const handleUpdate = async function(observableCollection, doc, modifiedFields) { if (observableCollection.contains(doc._id)) { if (observableCollection.isEligible(doc)) { if ( hasSortFields(observableCollection.options.sort, modifiedFields) ) { - requery( + await requery( observableCollection, doc, Events.UPDATE, @@ -55,11 +55,11 @@ const handleUpdate = function(observableCollection, doc, modifiedFields) { observableCollection.change(doc, modifiedFields); } } else { - requery(observableCollection); + await requery(observableCollection); } } else { if (observableCollection.isEligible(doc)) { - requery( + await requery( observableCollection, doc, Events.UPDATE, @@ -73,12 +73,12 @@ const handleUpdate = function(observableCollection, doc, modifiedFields) { * @param observableCollection * @param doc */ -const handleRemove = function(observableCollection, doc) { +const handleRemove = async function(observableCollection, doc) { if (observableCollection.contains(doc._id)) { - requery(observableCollection, doc); + await requery(observableCollection, doc); } else { if (observableCollection.options.skip) { - requery(observableCollection, doc); + await requery(observableCollection, doc); } } }; diff --git a/lib/processors/synthetic.js b/lib/processors/synthetic.js index 1860e321..fabdb110 100644 --- a/lib/processors/synthetic.js +++ b/lib/processors/synthetic.js @@ -10,16 +10,16 @@ import { Events } from '../constants'; * @param modifier * @param modifiedTopLevelFields */ -export default function (observableCollection, event, doc, modifier, modifiedTopLevelFields) { +export default async function (observableCollection, event, doc, modifier, modifiedTopLevelFields) { switch (event) { case Events.INSERT: - handleInsert(observableCollection, doc); + await handleInsert(observableCollection, doc); break; case Events.UPDATE: - handleUpdate(observableCollection, doc, modifier, modifiedTopLevelFields); + await handleUpdate(observableCollection, doc, modifier, modifiedTopLevelFields); break; case Events.REMOVE: - handleRemove(observableCollection, doc); + await handleRemove(observableCollection, doc); break; default: throw new Meteor.Error(`Invalid event specified: ${event}`) @@ -30,9 +30,9 @@ export default function (observableCollection, event, doc, modifier, modifiedTop * @param observableCollection * @param doc */ -const handleInsert = function (observableCollection, doc) { +const handleInsert = async function (observableCollection, doc) { if (observableCollection.isEligible(doc)) { - observableCollection.add(doc, true); + await observableCollection.add(doc, true); } }; @@ -42,16 +42,16 @@ const handleInsert = function (observableCollection, doc) { * @param modifier * @param modifiedTopLevelFields */ -const handleUpdate = function (observableCollection, doc, modifier, modifiedTopLevelFields) { - observableCollection.changeSynthetic(doc._id, modifier, modifiedTopLevelFields); +const handleUpdate = async function (observableCollection, doc, modifier, modifiedTopLevelFields) { + await observableCollection.changeSynthetic(doc._id, modifier, modifiedTopLevelFields); }; /** * @param observableCollection * @param doc */ -const handleRemove = function (observableCollection, doc) { +const handleRemove = async function (observableCollection, doc) { if (observableCollection.contains(doc._id)) { - observableCollection.remove(doc._id); + await observableCollection.remove(doc._id); } -}; \ No newline at end of file +}; diff --git a/lib/redis/PubSubManager.js b/lib/redis/PubSubManager.js index 05a66833..ae314f7a 100644 --- a/lib/redis/PubSubManager.js +++ b/lib/redis/PubSubManager.js @@ -7,7 +7,7 @@ import { getRedisListener, getRedisPusher } from './getRedisClient'; export default class PubSubManager { constructor() { this.channelHandlers = {}; - this.queue = new Meteor._SynchronousQueue(); + this.queue = new Meteor._AsynchronousQueue(); this.listener = getRedisListener(); this.pusher = getRedisPusher(); @@ -65,12 +65,12 @@ export default class PubSubManager { _initMessageListener() { const self = this; - this.listener.on('message', Meteor.bindEnvironment(function(channel, _message) { + this.listener.on('message', Meteor.bindEnvironment(async function(channel, _message) { if (self.channelHandlers[channel]) { const message = EJSON.parse(_message); - self.channelHandlers[channel].forEach(channelHandler => { - channelHandler(message); - }) + for (const channelHandler of self.channelHandlers[channel]) { + await channelHandler(message); + } } })); } diff --git a/lib/redis/RedisSubscriber.js b/lib/redis/RedisSubscriber.js index 8e10348f..13883418 100644 --- a/lib/redis/RedisSubscriber.js +++ b/lib/redis/RedisSubscriber.js @@ -49,8 +49,8 @@ export default class RedisSubscriber { /** * @param args */ - process(...args) { - this.processor.call(null, this.observableCollection, ...args); + async process(...args) { + await this.processor.call(null, this.observableCollection, ...args); } /** @@ -59,8 +59,8 @@ export default class RedisSubscriber { * @param modifier * @param modifiedTopLevelFields */ - processSynthetic(event, doc, modifier, modifiedTopLevelFields) { - syntheticProcessor( + async processSynthetic(event, doc, modifier, modifiedTopLevelFields) { + return syntheticProcessor( this.observableCollection, event, doc, diff --git a/lib/redis/RedisSubscriptionManager.js b/lib/redis/RedisSubscriptionManager.js index 08aa1131..78336cf7 100644 --- a/lib/redis/RedisSubscriptionManager.js +++ b/lib/redis/RedisSubscriptionManager.js @@ -12,7 +12,7 @@ class RedisSubscriptionManager { return; } this.uid = Random.id(); - this.queue = new Meteor._SynchronousQueue(); + this.queue = new Meteor._AsynchronousQueue(); this.store = {}; // {channel: [RedisSubscribers]} this.channelHandlers = {}; // {channel: handler} @@ -82,7 +82,7 @@ class RedisSubscriptionManager { const self = this; const handler = function(message) { self.queue.queueTask(() => { - self.process(channel, message, true); + return self.process(channel, message, true); }); }; @@ -113,7 +113,7 @@ class RedisSubscriptionManager { * @param data * @param [fromRedis=false] */ - process(channel, data, fromRedis) { + async process(channel, data, fromRedis) { // messages from redis that contain our uid were handled // optimistically, so we can drop them. if (fromRedis && data[RedisPipe.UID] === this.uid) { @@ -144,7 +144,7 @@ class RedisSubscriptionManager { if (data[RedisPipe.EVENT] === Events.REMOVE) { doc = data[RedisPipe.DOC]; } else { - doc = this.getDoc(collection, subscribers, data); + doc = await this.getDoc(collection, subscribers, data); } // if by any chance it was deleted after it got dispatched @@ -153,9 +153,9 @@ class RedisSubscriptionManager { return; } - subscribers.forEach(redisSubscriber => { + for (const redisSubscriber of subscribers) { try { - redisSubscriber.process( + await redisSubscriber.process( data[RedisPipe.EVENT], doc, data[RedisPipe.FIELDS] @@ -165,11 +165,11 @@ class RedisSubscriptionManager { `[RedisSubscriptionManager] Exception while processing event: ${e.toString()}` ); } - }); + } } else { - subscribers.forEach(redisSubscriber => { + for (const redisSubscriber of subscribers) { try { - redisSubscriber.processSynthetic( + await redisSubscriber.processSynthetic( data[RedisPipe.EVENT], data[RedisPipe.DOC], data[RedisPipe.MODIFIER], @@ -180,7 +180,7 @@ class RedisSubscriptionManager { `[RedisSubscriptionManager] Exception while processing synthetic event: ${e.toString()}` ); } - }); + } } } @@ -189,8 +189,7 @@ class RedisSubscriptionManager { * @param subscribers * @param data */ - getDoc(collection, subscribers, data) { - const event = data[RedisPipe.EVENT]; + async getDoc(collection, subscribers, data) { let doc = data[RedisPipe.DOC]; if (collection._redisOplog && !collection._redisOplog.protectAgainstRaceConditions) { @@ -203,9 +202,9 @@ class RedisSubscriptionManager { const fieldsOfInterest = getFieldsOfInterestFromAll(subscribers); if (fieldsOfInterest === true) { - doc = collection.findOne(doc._id); + doc = await collection.findOneAsync(doc._id); } else { - doc = collection.findOne(doc._id, { fields: fieldsOfInterest }); + doc = await collection.findOneAsync(doc._id, { fields: fieldsOfInterest }); } return doc; diff --git a/lib/redis/getRedisClient.js b/lib/redis/getRedisClient.js index cda5a07c..9f4d5762 100644 --- a/lib/redis/getRedisClient.js +++ b/lib/redis/getRedisClient.js @@ -49,10 +49,11 @@ function attachEvents(client, {onConnect}) { const functions = ['connect', 'reconnecting', 'error', 'end']; functions.forEach(fn => { - redisListener.on(fn, Meteor.bindEnvironment(function (...args) { + redisListener.on(fn, Meteor.bindEnvironment(async function (...args) { if (fn === 'connect' && onConnect) { - onConnect(); + await onConnect(); } + if (Config.redisExtras.events[fn]) { return Config.redisExtras.events[fn](...args); } diff --git a/lib/utils/maybeWrapCallback.js b/lib/utils/maybeWrapCallback.js deleted file mode 100644 index 975c17ce..00000000 --- a/lib/utils/maybeWrapCallback.js +++ /dev/null @@ -1,14 +0,0 @@ -/** - * Function is used to wrap the callback function from a mutation if it exists - * - * @param callback - * @param fn - * @return {function(...[*])} - */ -export default function maybeWrapCallback(callback, fn) { - if (!callback) return - return (...args) => { - fn(...args) - return callback(...args) - } -} diff --git a/lib/utils/shouldPublicationBeWithPolling.js b/lib/utils/shouldPublicationBeWithPolling.js deleted file mode 100644 index b76e507a..00000000 --- a/lib/utils/shouldPublicationBeWithPolling.js +++ /dev/null @@ -1,37 +0,0 @@ -import { Meteor } from 'meteor/meteor'; -import { _ } from 'meteor/underscore'; - -export default function(cursors) { - let isDisabledOplog = undefined; - - if (cursors.length === 1) { - const [cursor] = cursors; - return isOplogDisabled(cursor); - } - - let disabledConfigs = []; - cursors.forEach(cursor => { - disabledConfigs.push(isOplogDisabled(cursor)); - }); - - const allTheSame = - _.every(disabledConfigs, c => c === true) || - _.every(disabledConfigs, c => c === false); - - if (!allTheSame) { - throw new Meteor.Error( - 'The array of cursors returned must all be reactive with oplog or polling, you are not allowed to mix them up.' - ); - } - - return disabledConfigs[0]; -} - -/** - * @param {*} cursor - */ -function isOplogDisabled(cursor) { - const config = cursor._cursorDescription || { options: {} }; - - return !!config.options.disableOplog; -} diff --git a/lib/vent/Vent.js b/lib/vent/Vent.js index 8e5d5c6e..f4c6258b 100644 --- a/lib/vent/Vent.js +++ b/lib/vent/Vent.js @@ -52,11 +52,11 @@ export default class Vent { * @private */ static _createPublishEndPoint(name, fn) { - return Meteor.publish(name, function (collectionId, ...args) { + return Meteor.publish(name, async function (collectionId, ...args) { Vent._extendPublishContext(this, name, collectionId); try { - fn.call(this, ...args); + await fn.call(this, ...args); } catch (e) { // we do this because the errors in here are silenced console.error(e); @@ -80,8 +80,8 @@ export default class Vent { Object.assign(context, { on(channel, redisEventHandler) { // create the handler for this channel - const handler = function(message) { - const data = redisEventHandler.call(context, message); + const handler = async function(message) { + const data = await redisEventHandler.call(context, message); if (data) { context._session.send({ diff --git a/lib/vent/VentClient.js b/lib/vent/VentClient.js index 21304d84..cd71b691 100644 --- a/lib/vent/VentClient.js +++ b/lib/vent/VentClient.js @@ -12,6 +12,7 @@ export default class VentClient { } subscribe(name, ...args) { + console.log("8999999999") const subscription = new VentClientSubscription(this, name); this.add(subscription); diff --git a/package.js b/package.js index 0205473b..4a4fba00 100644 --- a/package.js +++ b/package.js @@ -17,7 +17,7 @@ Npm.depends({ }); Package.onUse(function(api) { - api.versionsFrom(['1.12.2', '2.8.1', '2.12']); + api.versionsFrom(['1.12.2', '2.8.1', '2.12', '3.0-rc.0']); api.use([ 'underscore', 'ecmascript', diff --git a/redis-oplog.js b/redis-oplog.js index 0afe8ee9..4d688391 100644 --- a/redis-oplog.js +++ b/redis-oplog.js @@ -36,4 +36,4 @@ if (process.env.REDIS_OPLOG_SETTINGS) { init(JSON.parse(process.env.REDIS_OPLOG_SETTINGS)); } else if (Meteor.settings.redisOplog) { init(Meteor.settings.redisOplog); -} \ No newline at end of file +} diff --git a/testing/polling/collections.js b/testing/polling/collections.js index fc45b80d..78d7919a 100644 --- a/testing/polling/collections.js +++ b/testing/polling/collections.js @@ -3,7 +3,7 @@ import { Mongo } from 'meteor/mongo'; const Campaigns = new Mongo.Collection('campaign_searches'); if (Meteor.isServer) { - Campaigns._ensureIndex({ + Campaigns.createIndexAsync({ text: 'text' }); } diff --git a/testing/publish-counts/collections.js b/testing/publish-counts/collections.js deleted file mode 100644 index d6aaa6ce..00000000 --- a/testing/publish-counts/collections.js +++ /dev/null @@ -1,3 +0,0 @@ -import { Mongo } from 'meteor/mongo'; - -export const Items = new Mongo.Collection('performant_counts_items'); diff --git a/testing/publish-counts/server.js b/testing/publish-counts/server.js deleted file mode 100644 index 2ac113b3..00000000 --- a/testing/publish-counts/server.js +++ /dev/null @@ -1,24 +0,0 @@ -import { Meteor } from 'meteor/meteor'; -import { Counter } from 'meteor/natestrauser:publish-performant-counts' -import { Items } from './collections'; - -Meteor.publish('performant_counts', function () { - return new Counter( - 'items_count', - Items.find({}), - 100 - ) -}); - -Meteor.methods({ - 'performant_counts_boot'() { - Items.remove({}); - - Items.insert({name: 'Item 1'}); - Items.insert({name: 'Item 2'}); - Items.insert({name: 'Item 3'}); - }, - 'performant_counts_add'() { - Items.insert({name: 'Item'}); - } -}); diff --git a/testing/synthetic_mutators.js b/testing/synthetic_mutators.js index 20c2d50a..ef30e9d0 100644 --- a/testing/synthetic_mutators.js +++ b/testing/synthetic_mutators.js @@ -59,7 +59,9 @@ _.each(Collections, (Collection, key) => { }); waitForHandleToBeReady(handle).then(function () { - let _id = cursor.fetch()[0]._id; + // TODO: when the handle is ready not always the documents are + // on the collection. + const _id = cursor.fetch()[0]._id; assert.isString(_id); synthetic('update', _id, { @@ -186,4 +188,4 @@ _.each(Collections, (Collection, key) => { }); }); }); -}); \ No newline at end of file +}); From 9c61fdcd1c32be484d694f5de5a1becc34446288 Mon Sep 17 00:00:00 2001 From: matheusccastro Date: Sat, 4 May 2024 01:49:22 -0300 Subject: [PATCH 02/33] feat: updating tests --- package.js | 12 +++---- testing/boot.js | 27 ++++++++------- testing/collection_hooks.server.js | 24 +++++++------ testing/collection_transform.js | 7 ++-- testing/custom-publications/client.js | 1 - testing/include_prev_doc.js | 16 ++++----- testing/lib/helpers.js | 3 ++ testing/main.client.js | 1 - testing/main.server.js | 50 +++++++++++++-------------- testing/observe_callbacks.server.js | 30 ++++++++-------- testing/publish-counts/client.js | 19 ---------- testing/return_value.js | 27 ++++++++------- testing/transformations/server.js | 16 ++++----- 13 files changed, 111 insertions(+), 122 deletions(-) delete mode 100644 testing/publish-counts/client.js diff --git a/package.js b/package.js index 4a4fba00..d7b8e398 100644 --- a/package.js +++ b/package.js @@ -40,20 +40,18 @@ Package.onTest(function(api) { api.use('cultofcoders:redis-oplog'); // extensions - api.use('aldeed:collection2@3.0.0'); - api.use('reywood:publish-composite@1.7.3'); - api.use('natestrauser:publish-performant-counts@0.1.2'); - // api.use('socialize:user-presence@1.0.4'); + api.use('aldeed:collection2@4.0.1'); + api.use('reywood:publish-composite@1.8.9'); api.use('ecmascript'); api.use('tracker'); api.use('mongo'); api.use('random'); api.use('accounts-password'); - api.use('matb33:collection-hooks@1.1.2'); - api.use('alanning:roles@3.5.1'); + api.use('matb33:collection-hooks@1.3.1'); + api.use('alanning:roles@4.0.0-alpha.1'); - api.use(['meteortesting:mocha']); + api.use(['meteortesting:mocha@3.0.3-beta300.0']); api.mainModule('testing/main.server.js', 'server'); api.addFiles('testing/publishComposite/boot.js', 'server'); diff --git a/testing/boot.js b/testing/boot.js index b68373c7..6812fe1a 100644 --- a/testing/boot.js +++ b/testing/boot.js @@ -32,8 +32,8 @@ const opts = { Namespace: { namespace: 'some_namespace' }, }; const config = { - RaceConditionProne: { - suffix: 'race-condition-prone', + RaceConditionProne: { + suffix: 'race-condition-prone', disableSyntheticTests: true, }, Standard: { suffix: 'standard', channel: 'test_redis_collection' }, @@ -60,7 +60,7 @@ if (Meteor.isServer) { remove: () => false, }); - Meteor.publish(`publication.${config[key].suffix}`, function( + Meteor.publish(`publication.${config[key].suffix}`, async function( filters, options ) { @@ -68,34 +68,37 @@ if (Meteor.isServer) { }); Meteor.methods({ - [`create.${config[key].suffix}`](item, options = {}) { + async [`create.${config[key].suffix}`](item, options = {}) { if (_.isArray(item)) { - return _.map(item, i => - Collection.insert(i, Object.assign(options, opts[key])) - ); + const result = []; + for (const i of item) { + result.push(await Collection.insertAsync(i, Object.assign(options, opts[key]))) + } + + return result; } - return Collection.insert(item, Object.assign(options, opts[key])); + return Collection.insertAsync(item, Object.assign(options, opts[key])); }, [`fetch.${config[key].suffix}`](selector = {}, options = {}) { - return Collection.find(selector, options).fetch(); + return Collection.find(selector, options).fetchAsync(); }, [`update.${config[key].suffix}`](selectors, modifier, options) { - return Collection.update( + return Collection.updateAsync( selectors, modifier, Object.assign({}, opts[key], options) ); }, [`upsert.${config[key].suffix}`](selectors, modifier, options) { - return Collection.upsert( + return Collection.upsertAsync( selectors, modifier, Object.assign({}, opts[key], options) ); }, [`remove.${config[key].suffix}`](selectors, options = {}) { - return Collection.remove( + return Collection.removeAsync( selectors, Object.assign(options, opts[key]) ); diff --git a/testing/collection_hooks.server.js b/testing/collection_hooks.server.js index 398ffd32..e3d18aeb 100644 --- a/testing/collection_hooks.server.js +++ b/testing/collection_hooks.server.js @@ -1,9 +1,8 @@ -import { assert } from 'chai'; -import { _ } from 'meteor/underscore'; -import { Random } from 'meteor/random'; - -describe('It should work with collection:hooks', function () { +import {assert} from 'chai'; +import {_} from 'meteor/underscore'; +// TODO: collection-hooks is not migrated yet, there's a PR going on: https://github.com/Meteor-Community-Packages/meteor-collection-hooks/pull/309 +describe.skip('It should work with collection:hooks', function () { const opts = [ {}, { channel: 'xxx' }, @@ -14,9 +13,10 @@ describe('It should work with collection:hooks', function () { opts.forEach(options => { const Collection = new Mongo.Collection('test_redis_collection_hooks_' + idx++); - Collection.remove({}); - it('Should detect all types of changes: ' + JSON.stringify(options), function () { + it('Should detect all types of changes: ' + JSON.stringify(options), async function (done) { + await Collection.removeAsync({}); + let updates = { 'before.insert': false, 'after.insert': false, @@ -45,13 +45,15 @@ describe('It should work with collection:hooks', function () { updates['after.remove'] = true; }); - const id = Collection.insert({ someData: true }); - Collection.update(id, { someData: false }); - Collection.remove(id); + const id = await Collection.insertAsync({ someData: true }); + await Collection.updateAsync(id, { someData: false }); + await Collection.removeAsync(id); _.each(updates, (value, key) => { assert.isTrue(value, key); }) + + done(); }) }) -}); \ No newline at end of file +}); diff --git a/testing/collection_transform.js b/testing/collection_transform.js index 709d9a8b..5bff1f2a 100644 --- a/testing/collection_transform.js +++ b/testing/collection_transform.js @@ -8,9 +8,10 @@ const fooCollection = new Mongo.Collection('foo', { }); describe('Collection Transform', function () { - it('Should work with transform functions', function () { - fooCollection.insert({}); - const foo = fooCollection.findOne(); + it('Should work with transform functions', async function (done) { + await fooCollection.insertAsync({}); + const foo = await fooCollection.findOneAsync(); assert.isTrue(foo instanceof Foo); + done(); }) }); diff --git a/testing/custom-publications/client.js b/testing/custom-publications/client.js index 230624fc..7c4be6a7 100644 --- a/testing/custom-publications/client.js +++ b/testing/custom-publications/client.js @@ -1,7 +1,6 @@ import { assert } from 'chai'; import { Items } from './collections'; import { Meteor } from 'meteor/meteor'; -// import {Counter} from 'meteor/natestrauser:publish-performant-counts' describe('Testing custom publications functionality', function () { it('Should be able to retrieve the correct number', function (done) { diff --git a/testing/include_prev_doc.js b/testing/include_prev_doc.js index 58d2dae8..9634d630 100644 --- a/testing/include_prev_doc.js +++ b/testing/include_prev_doc.js @@ -13,7 +13,7 @@ PrevDocCollection.configureRedisOplog({ }); describe('PrevDocCollection Serverside', function () { - it('Should receive an insert event with prev doc', function (done) { + it('Should receive an insert event with prev doc', async function (done) { Config.pubSubManager.subscribe('test_redis_prev_doc', function (payload) { // make sure events have prev document values if (payload.e === 'u') { @@ -28,12 +28,12 @@ describe('PrevDocCollection Serverside', function () { const random = Random.id() // trigger insert update and removed redis events - PrevDocCollection.insert({ _id: `${random}`, value: 'oldValue' }); - PrevDocCollection.update({ _id: `${random}` }, { $set: { value: 'newValue' } }); - PrevDocCollection.remove({ _id: `${random}` }); + await PrevDocCollection.insertAsync({ _id: `${random}`, value: 'oldValue' }); + await PrevDocCollection.updateAsync({ _id: `${random}` }, { $set: { value: 'newValue' } }); + await PrevDocCollection.removeAsync({ _id: `${random}` }); }); - it('Should receive an insert event without prev doc', function (done) { + it('Should receive an insert event without prev doc', async function (done) { Config.pubSubManager.subscribe('test_redis_no_prev_doc', function (payload) { // make sure events do not have any prev document values // because NoPrevDocCollection does not have shouldIncludePrevDocument set @@ -48,9 +48,9 @@ describe('PrevDocCollection Serverside', function () { }); // trigger insert update and removed redis events - NoPrevDocCollection.insert({ _id: 'no_prev_doc_1', value: 'oldValue' }); - NoPrevDocCollection.update({ _id: 'no_prev_doc_1' }, { $set: { value: 'newValue' } }); - NoPrevDocCollection.remove({ _id: 'no_prev_doc_1' }); + await NoPrevDocCollection.insertAsync({ _id: 'no_prev_doc_1', value: 'oldValue' }); + await NoPrevDocCollection.updateAsync({ _id: 'no_prev_doc_1' }, { $set: { value: 'newValue' } }); + await NoPrevDocCollection.removeAsync({ _id: 'no_prev_doc_1' }); }); }); diff --git a/testing/lib/helpers.js b/testing/lib/helpers.js index d52fc836..4af7ace7 100644 --- a/testing/lib/helpers.js +++ b/testing/lib/helpers.js @@ -1,5 +1,6 @@ import { Meteor } from 'meteor/meteor'; import { waitForHandleToBeReady, callWithPromise } from './sync_utils'; +import {Collections} from "../boot"; export default (suffix) => { const create = (...args) => { @@ -51,6 +52,8 @@ export default (suffix) => { }; const subscribe = (...args) => { + console.log("555555") + console.log(Meteor.subscribe.toString()) return Meteor.subscribe(`publication.${suffix}`, ...args); }; diff --git a/testing/main.client.js b/testing/main.client.js index 7ce6b9d0..2cb01e76 100644 --- a/testing/main.client.js +++ b/testing/main.client.js @@ -7,7 +7,6 @@ import './publishComposite/client.test'; import './optimistic-ui/client.test'; // import './server-autorun/client'; import './transformations/client'; -import './publish-counts/client'; import './custom-publications/client'; import './collection-defaults/client'; import './vent/client'; diff --git a/testing/main.server.js b/testing/main.server.js index 343f9736..d9bbb668 100644 --- a/testing/main.server.js +++ b/testing/main.server.js @@ -11,7 +11,6 @@ import './observe_callbacks.server'; import './collection_transform'; // import './server-autorun/server'; import './transformations/server'; -import './publish-counts/server'; import './custom-publications/server'; import './vent/server'; import './accounts/server'; @@ -21,32 +20,33 @@ import './object-id/server'; import './include_prev_doc'; import './return_value'; -import { _ } from 'meteor/underscore'; -_.each(Collections, Collection => { - Collection.remove({}); +Meteor.startup(async () => { + for (const Collection of Object.values(Collections)) { + await Collection.removeAsync({}); - Collection.insert({ - title: 'A', - score: 20, - game: 'chess' - }); + await Collection.insertAsync({ + title: 'A', + score: 20, + game: 'chess' + }); - Collection.insert({ - title: 'B', - score: 30, - game: 'chess' - }); + await Collection.insertAsync({ + title: 'B', + score: 30, + game: 'chess' + }); - Collection.insert({ - title: 'C', - score: 10, - game: 'domino' - }); + await Collection.insertAsync({ + title: 'C', + score: 10, + game: 'domino' + }); - Collection.insert({ - title: 'D', - score: 40, - game: 'chess' - }); -}); + await Collection.insertAsync({ + title: 'D', + score: 40, + game: 'chess' + }); + } +}) diff --git a/testing/observe_callbacks.server.js b/testing/observe_callbacks.server.js index 5f2e6e83..2cd94303 100644 --- a/testing/observe_callbacks.server.js +++ b/testing/observe_callbacks.server.js @@ -4,55 +4,55 @@ import { Mongo } from 'meteor/mongo'; const Collection = new Mongo.Collection('test_observe_callbacks'); describe('Observe callbacks should work', function () { - it('Should work', function (done) { - Collection.remove({}); + it('Should work', async function (done) { + await Collection.removeAsync({}); const context = 'observe-callbacks'; let _id; let inAdded = false; - const handler = Collection.find({context}).observe({ - added(newDoc) { + const handler = await Collection.find({context}).observe({ + async added(newDoc) { assert.isObject(newDoc); assert.equal(newDoc.number, 10); - Collection.update(newDoc._id, { + await Collection.updateAsync(newDoc._id, { $set: {number: 20} }); }, - changed(newDoc, oldDoc) { + async changed(newDoc, oldDoc) { if (oldDoc.number === 10) { assert.isObject(newDoc); assert.isObject(oldDoc); assert.equal(newDoc.number, 20); assert.equal(oldDoc.number, 10); - Collection.remove(newDoc._id); + await Collection.removeAsync(newDoc._id); } }, - removed(oldDoc) { + async removed(oldDoc) { assert.isObject(oldDoc); assert.equal(oldDoc.number, 20); - handler.stop(); + await handler.stop(); done(); } }); assert.isFunction(handler.stop); - _id = Collection.insert({context, number: 10}); + await Collection.insertAsync({context, number: 10}); }); - it ('Should not be triggered if no changes are detected', function (done) { - Collection.remove({}); - const _id = Collection.insert({number: 10}); + it ('Should not be triggered if no changes are detected', async function (done) { + await Collection.removeAsync({}); + const _id = await Collection.insertAsync({number: 10}); let inChanged = false; - const handler = Collection.find().observe({ + const handler = await Collection.find().observe({ changed(newDoc, oldDoc) { inChanged = true; } }); - Collection.update(_id, { + await Collection.updateAsync(_id, { $set: {number: 10} }); diff --git a/testing/publish-counts/client.js b/testing/publish-counts/client.js deleted file mode 100644 index b2124100..00000000 --- a/testing/publish-counts/client.js +++ /dev/null @@ -1,19 +0,0 @@ -import { assert } from 'chai'; -import {Meteor} from 'meteor/meteor'; -import {Counter} from 'meteor/natestrauser:publish-performant-counts' - -describe('Testing publish-counts functionality', function () { - it('Should be able to retrieve the correct number', function (done) { - Meteor.call('performant_counts_boot', function () { - Meteor.subscribe('performant_counts', function () { - Meteor.call('performant_counts_add', function (err, res) { - setTimeout(function () { - assert.equal(Counter.get('items_count'), 4); - done(); - }, 200) - }); - }) - }) - }); -}); - diff --git a/testing/return_value.js b/testing/return_value.js index 3278da3c..acc8f3a4 100644 --- a/testing/return_value.js +++ b/testing/return_value.js @@ -5,19 +5,22 @@ describe('Collection', function () { let idx = 1; const Collection = new Mongo.Collection('test_return_value_' + idx++); - it('should return the amount of updated documents when updating', function () { - const id = Collection.insert({someData: true}) - const r = Collection.update(id, {someData: false}); - assert.strictEqual(r, 1) + it('should return the amount of updated documents when updating', async function (done) { + const id = await Collection.insertAsync({someData: true}) + const r = await Collection.updateAsync(id, {someData: false}); + assert.strictEqual(r, 1); + done(); }) - it('should return the amount of updated documents when upserting with update', function () { - const id = Collection.insert({someData: true}) - const r = Collection.update(id, {someData: false}, {upsert: true}); - assert.strictEqual(r, 1) + it('should return the amount of updated documents when upserting with update', async function (done) { + const id = await Collection.insertAsync({someData: true}) + const r = await Collection.updateAsync(id, {someData: false}, {upsert: true}); + assert.strictEqual(r, 1); + done(); }) - it('should return an object with the amount of updated documents when upserting', function () { - const id = Collection.insert({someData: true}) - const r = Collection.upsert(id, {someData: false}); - assert.deepEqual(r, {numberAffected: 1}) + it('should return an object with the amount of updated documents when upserting', async function (done) { + const id = await Collection.insertAsync({someData: true}) + const r = await Collection.upsertAsync(id, {someData: false}); + assert.deepEqual(r, {numberAffected: 1}); + done(); }) }); diff --git a/testing/transformations/server.js b/testing/transformations/server.js index 28c1f194..6f312d31 100644 --- a/testing/transformations/server.js +++ b/testing/transformations/server.js @@ -20,25 +20,25 @@ Meteor.publish('transformations_items_custom', function() { }); Meteor.methods({ - transformations_boot() { - Items.remove({}); - Items.insert({ context: 'client', title: 'hello1' }); + async transformations_boot() { + await Items.removeAsync({}); + await Items.insertAsync({ context: 'client', title: 'hello1' }); }, }); describe('Transformations - Server Test', function() { - it('Should transform properly', function(done) { + it('Should transform properly', async function(done) { const context = Random.id(); - const handle = Items.find({ + const handle = await Items.find({ context, }).observeChanges({ - added(docId, doc) { + async added(docId, doc) { assert.isTrue(doc.defaultServerTransform); - handle.stop(); + await handle.stop(); done(); }, }); - Items.insert({ context, title: 'hello2' }); + await Items.insertAsync({ context, title: 'hello2' }); }); }); From 777ddfadf73c8d6655f1d500b849fdb4665f4f79 Mon Sep 17 00:00:00 2001 From: matheusccastro Date: Mon, 6 May 2024 08:54:28 -0300 Subject: [PATCH 03/33] feat: add prettier --- .gitignore | 2 +- .prettierignore | 8 ++++++++ .prettierrc | 6 ++++++ package-lock.json | 31 +++++++++++++++++++++++++++++++ package.json | 26 ++++++++++++++++++++++++++ 5 files changed, 72 insertions(+), 1 deletion(-) create mode 100644 .prettierignore create mode 100644 .prettierrc create mode 100644 package-lock.json create mode 100644 package.json diff --git a/.gitignore b/.gitignore index 04913f99..58fadf0f 100644 --- a/.gitignore +++ b/.gitignore @@ -2,5 +2,5 @@ dump.rdb npm-debug.log .idea/ - +node_modules test/ diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 00000000..2d524127 --- /dev/null +++ b/.prettierignore @@ -0,0 +1,8 @@ +.idea/ +.github/ +test/ +docs/ +testing/ +node_modules/ +package-lock.json +test/ diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 00000000..77e3bfd3 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,6 @@ +{ + "semi": true, + "singleQuote": false, + "trailingComma": "es5", + "bracketSpacing": true +} diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000..be82e123 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,31 @@ +{ + "name": "redis-oplog", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "redis-oplog", + "version": "1.0.0", + "license": "ISC", + "devDependencies": { + "prettier": "^3.2.5" + } + }, + "node_modules/prettier": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.2.5.tgz", + "integrity": "sha512-3/GWa9aOC0YeD7LUfvOG2NiDyhOWRvt1k+rcKhOuYnMY24iiCphgneUfJDyFXd6rZCAnuLBv6UeAULtrhT/F4A==", + "dev": true, + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 00000000..2dd2f09f --- /dev/null +++ b/package.json @@ -0,0 +1,26 @@ +{ + "name": "redis-oplog", + "version": "1.0.0", + "description": "Replacement for Meteor's MongoDB oplog implementation", + "directories": { + "doc": "docs", + "lib": "lib", + "test": "test" + }, + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/cult-of-coders/redis-oplog.git" + }, + "author": "", + "license": "ISC", + "bugs": { + "url": "https://github.com/cult-of-coders/redis-oplog/issues" + }, + "homepage": "https://github.com/cult-of-coders/redis-oplog#readme", + "devDependencies": { + "prettier": "^3.2.5" + } +} From f89497f519333b6a4c187dbd74e2f22f282e4cc1 Mon Sep 17 00:00:00 2001 From: matheusccastro Date: Mon, 6 May 2024 08:55:25 -0300 Subject: [PATCH 04/33] feat: run prettier on whole project --- CHANGELOG.md | 11 + CONTRIBUTING.md | 11 +- README.md | 3 +- lib/cache/ObservableCollection.js | 562 ++++++++-------- lib/cache/lib/extractFieldsFromFilters.js | 47 +- lib/cache/lib/fieldProjectionIsExclusion.js | 8 +- lib/cache/lib/filterFieldsForFetching.js | 113 ++-- lib/cache/lib/getChannels.js | 51 +- lib/cache/lib/getSnapbackFields.js | 34 +- lib/cache/lib/getTopLevelFields.js | 10 +- lib/cache/mongoIdMap.js | 166 ++--- lib/config.js | 97 ++- lib/constants.js | 47 +- lib/debug.js | 16 +- lib/init.js | 70 +- lib/mongo/Mutator.js | 685 ++++++++++---------- lib/mongo/ObserveMultiplex.js | 539 ++++++++------- lib/mongo/PollingObserveDriver.js | 553 ++++++++-------- lib/mongo/RedisOplogObserveDriver.js | 215 +++--- lib/mongo/SyntheticMutator.js | 172 ++--- lib/mongo/allow-deny/docToValidate.js | 30 +- lib/mongo/allow-deny/transformDoc.js | 4 +- lib/mongo/allow-deny/validatedInsert.js | 44 +- lib/mongo/allow-deny/validatedRemove.js | 71 +- lib/mongo/allow-deny/validatedUpdate.js | 214 +++--- lib/mongo/extendMongoCollection.js | 200 +++--- lib/mongo/extendObserveChanges.js | 8 +- lib/mongo/lib/containsOperators.js | 8 +- lib/mongo/lib/dispatchers.js | 152 +++-- lib/mongo/lib/getMutationConfig.js | 55 +- lib/mongo/mongoCollectionNames.js | 16 +- lib/mongo/observeChanges.js | 267 ++++---- lib/processors/actions/reload.js | 36 +- lib/processors/actions/requery.js | 72 +- lib/processors/default.js | 81 ++- lib/processors/direct.js | 95 +-- lib/processors/getStrategy.js | 24 +- lib/processors/index.js | 22 +- lib/processors/lib/fieldsExist.js | 26 +- lib/processors/limit-sort.js | 105 ++- lib/processors/synthetic.js | 66 +- lib/redis/PubSubManager.js | 157 ++--- lib/redis/RedisSubscriber.js | 156 +++-- lib/redis/RedisSubscriptionManager.js | 380 +++++------ lib/redis/getRedisClient.js | 75 ++- lib/redis/lib/getFieldsOfInterestFromAll.js | 66 +- lib/utils/extractIdsFromSelector.js | 30 +- lib/utils/getChannelName.js | 4 +- lib/utils/getDedicatedChannel.js | 6 +- lib/utils/getFields.js | 66 +- lib/utils/isRemovedNonExistent.js | 2 +- lib/vent/Vent.js | 230 +++---- lib/vent/VentClient.js | 190 +++--- package.js | 94 +-- redis-oplog.client.js | 2 +- redis-oplog.js | 50 +- 56 files changed, 3294 insertions(+), 3220 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b1049d59..f0bb0d56 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ - Fix reactivity bug in fairly specific situations [#367](https://github.com/cult-of-coders/redis-oplog/issues/367) ### 2.2.0 + - Bumped minimum Meteor version to v1.12.2 - Updated tests to cover from Meteor v1.12.2 to the latest v2.12 - Added testing for Redis v7 @@ -15,41 +16,51 @@ - Fix SyntheticMutator not applying `globalRedisPrefix` ### 2.1.1 + - Fixes callback is not a function error when using SyntheticMutator.update ### 2.1.0 + - Meteor 2.6 support - Projections option support - Update Mocha tests - Update tests to use Meteor 1.12.2 to fix certificates issues ### 1.2.3 + - Redis connection failover handling - Re-fetching the up-to-date collection when Redis connection resumes - Bug fixes and improvements ### 1.2.2 + - Ability to merge db requests by channel - Bug fixes and improvements ### 1.2.1 + - Bug fixes and improvements ### 1.2.0 + - Optimistic UI fixes - Performance gains for methods - Fixes for publishComposite - Other bugs and code quality improvements ### 1.0.5 - 1.0.15 + - Bug fixes and improvements ### 1.0.5 + - Fix for infinite loop when overriding publish ### 1.0.4 + - Fix for update using positional operators ### 1.0.3 + - Added support for publish composite - Fixed randomly failing tests diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d5584094..4b2f5319 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -4,7 +4,8 @@ First, thank you for considering contributing to redis-oplog! It's people like you that make the open source community such a great community! 😊 -We welcome any type of contribution, not only code. You can help with +We welcome any type of contribution, not only code. You can help with + - **QA**: file bug reports, the more details you can give the better (e.g. screenshots with the console open) - **Marketing**: writing blog posts, howto's, printing stickers, ... - **Community**: presenting the project at meetups, organizing a dedicated meetup for the local community, ... @@ -13,7 +14,7 @@ We welcome any type of contribution, not only code. You can help with ## Your First Contribution -Working on your first Pull Request? You can learn how from this *free* series, [How to Contribute to an Open Source Project on GitHub](https://egghead.io/series/how-to-contribute-to-an-open-source-project-on-github). +Working on your first Pull Request? You can learn how from this _free_ series, [How to Contribute to an Open Source Project on GitHub](https://egghead.io/series/how-to-contribute-to-an-open-source-project-on-github). ## Submitting code @@ -26,7 +27,8 @@ It is also always helpful to have some context for your pull request. What was t ## Running Tests -### Setup +### Setup + ``` meteor create --release 1.12.2 --bare test cd test @@ -34,6 +36,7 @@ meteor npm i --save puppeteer@1.18.1 simpl-schema chai ``` ### Start Tests + ``` METEOR_PACKAGE_DIRS="../" TEST_BROWSER_DRIVER=puppeteer meteor test-packages --raw-logs --once --driver-package meteortesting:mocha ../ ``` @@ -55,14 +58,12 @@ You can also reach us at hello@redis-oplog.opencollective.com. Thank you to all the people who have already contributed to redis-oplog! - ### Backers Thank you to all our backers! [[Become a backer](https://opencollective.com/redis-oplog#backer)] - ### Sponsors Thank you to all our sponsors! (please ask your company to also support this open source project by [becoming a sponsor](https://opencollective.com/redis-oplog#sponsor)) diff --git a/README.md b/README.md index 6589cb8b..dac24be3 100644 --- a/README.md +++ b/README.md @@ -88,7 +88,7 @@ RedisOplog.stats() ### Events for Meteor (+ Redis Oplog, Grapher and GraphQL/Apollo) -* Meteor Night 2018 Slide: [Arguments for Meteor](https://drive.google.com/file/d/1Tx9vO-XezO3DI2uAYalXPvhJ-Avqc4-q/view) - Theodor Diaconu, CEO of Cult of Coders: “Redis Oplog, Grapher, and Apollo Live. +- Meteor Night 2018 Slide: [Arguments for Meteor](https://drive.google.com/file/d/1Tx9vO-XezO3DI2uAYalXPvhJ-Avqc4-q/view) - Theodor Diaconu, CEO of Cult of Coders: “Redis Oplog, Grapher, and Apollo Live. ### [Optimistic UI](docs/optimistic_ui.md) @@ -145,4 +145,3 @@ Support this project by becoming a sponsor. Your logo will show up here with a l - diff --git a/lib/cache/ObservableCollection.js b/lib/cache/ObservableCollection.js index 6b2fbe12..e8a399f8 100644 --- a/lib/cache/ObservableCollection.js +++ b/lib/cache/ObservableCollection.js @@ -1,329 +1,321 @@ -import { DiffSequence } from 'meteor/diff-sequence'; -import { _ } from 'meteor/underscore'; -import { LocalCollection, Minimongo } from 'meteor/minimongo'; -import fieldProjectionIsExclusion from './lib/fieldProjectionIsExclusion'; -import getChannels from './lib/getChannels'; -import extractFieldsFromFilters from './lib/extractFieldsFromFilters'; -import { MongoIDMap } from './mongoIdMap'; -import { EJSON } from 'meteor/ejson'; -import isRemovedNonExistent from '../utils/isRemovedNonExistent'; -import getStrategy from '../processors/getStrategy'; +import { DiffSequence } from "meteor/diff-sequence"; +import { _ } from "meteor/underscore"; +import { LocalCollection, Minimongo } from "meteor/minimongo"; +import fieldProjectionIsExclusion from "./lib/fieldProjectionIsExclusion"; +import getChannels from "./lib/getChannels"; +import extractFieldsFromFilters from "./lib/extractFieldsFromFilters"; +import { MongoIDMap } from "./mongoIdMap"; +import { EJSON } from "meteor/ejson"; +import isRemovedNonExistent from "../utils/isRemovedNonExistent"; +import getStrategy from "../processors/getStrategy"; const allowedOptions = [ - 'limit', - 'skip', - 'sort', - 'fields', - 'projection', - 'channels', - 'channel', - 'namespace', - 'namespaces', + "limit", + "skip", + "sort", + "fields", + "projection", + "channels", + "channel", + "namespace", + "namespaces", ]; export default class ObservableCollection { - /** - * Instantiate the collection - * @param {*} param - */ - constructor({ multiplexer, matcher, sorter, cursorDescription }) { - this.multiplexer = multiplexer; - this.matcher = matcher; - this.cursorDescription = cursorDescription; - - this.collectionName = this.cursorDescription.collectionName; - this.collection = Mongo.Collection.__getCollectionByName( - cursorDescription.collectionName - ); - + /** + * Instantiate the collection + * @param {*} param + */ + constructor({ multiplexer, matcher, sorter, cursorDescription }) { + this.multiplexer = multiplexer; + this.matcher = matcher; + this.cursorDescription = cursorDescription; + + this.collectionName = this.cursorDescription.collectionName; + this.collection = Mongo.Collection.__getCollectionByName( + cursorDescription.collectionName + ); + + if (!this.collection) { + throw new Meteor.Error( + "We could not properly identify the collection by its name: " + + this.collectionName + + ". Make sure you added redis-oplog package before any package that instantiates a collection." + ); + } - if (!this.collection) { - throw new Meteor.Error( - 'We could not properly identify the collection by its name: ' + - this.collectionName + - '. Make sure you added redis-oplog package before any package that instantiates a collection.' - ); - } + this.cursor = this.collection.find( + cursorDescription.selector, + cursorDescription.options + ); - this.cursor = this.collection.find( - cursorDescription.selector, - cursorDescription.options - ); + this.store = new MongoIDMap(); + this.selector = this.cursorDescription.selector || {}; - this.store = new MongoIDMap(); - this.selector = this.cursorDescription.selector || {}; - - if (_.isString(this.selector)) { - this.selector = { _id: this.selector }; - } - - if (this.cursorDescription.options) { - this.options = _.pick( - this.cursorDescription.options, - ...allowedOptions - ); - } else { - this.options = {}; - } - - var fields = this.options.projection || this.options.fields; - - // check for empty projector object and delete. - if (fields && _.isEmpty(fields)) { - delete this.options.projection; - delete this.options.fields; - } - - if (fields) { - this.fieldsArray = Object.keys(fields); - - if (!_.isArray(this.fieldsArray)) { - throw new Meteor.Error( - 'We could not properly extract any fields. "projection" or "fields" must be an object. This was provided: ' + - JSON.stringify(fields) - ); - } - - this.projectFieldsOnDoc = LocalCollection._compileProjection( - fields - ); - this.isFieldsProjectionByExclusion = fieldProjectionIsExclusion( - fields - ); - } - - this.channels = getChannels(this.collectionName, this.options); - this.fieldsOfInterest = this._getFieldsOfInterest(); - this.__isInitialized = false; - - var projection = fields || {}; - this._projectionFn = LocalCollection._compileProjection(projection); // Projection function, result of combining important fields for selector and - // existing fields projection - - this._sharedProjection = matcher.combineIntoProjection(projection); - if (sorter) { - this._sharedProjection = sorter.combineIntoProjection( - this._sharedProjection - ); - } - this._sharedProjectionFn = LocalCollection._compileProjection( - this._sharedProjection - ); + if (_.isString(this.selector)) { + this.selector = { _id: this.selector }; } - async setupCollection() { - // Here we apply the logic of changing the cursor based on the collection-level configuration - if (this.collection._redisOplog) { - const { cursor } = this.collection._redisOplog; - if (cursor) { - const context = DDP._CurrentPublicationInvocation.get(); - await cursor.call( - context, - cursorDescription.options, - cursorDescription.selector - ); - } - } + if (this.cursorDescription.options) { + this.options = _.pick(this.cursorDescription.options, ...allowedOptions); + } else { + this.options = {}; } - /** - * Function that checks whether or not the doc matches our filters - * - * @param doc - * @returns {*} - */ - isEligible(doc) { - if (this.matcher) { - return this.matcher.documentMatches(doc).result; - } - - return true; - } + var fields = this.options.projection || this.options.fields; - /** - * @param _id - * @returns {boolean} - */ - async isEligibleByDB(_id) { - if (this.matcher) { - return !!(await this.collection.findOneAsync( - Object.assign({}, this.selector, { _id }), - { fields: { _id: 1 } } - )); - } - - return true; + // check for empty projector object and delete. + if (fields && _.isEmpty(fields)) { + delete this.options.projection; + delete this.options.fields; } - /** - * Performs the initial search then puts them into the store. - */ - async init() { - if (this.__isInitialized) { - return; // silently do nothing. - } + if (fields) { + this.fieldsArray = Object.keys(fields); - this.__isInitialized = true; - let data = await this.cursor.fetchAsync(); + if (!_.isArray(this.fieldsArray)) { + throw new Meteor.Error( + 'We could not properly extract any fields. "projection" or "fields" must be an object. This was provided: ' + + JSON.stringify(fields) + ); + } + + this.projectFieldsOnDoc = LocalCollection._compileProjection(fields); + this.isFieldsProjectionByExclusion = fieldProjectionIsExclusion(fields); + } - for (const doc of data) { - await this.add(doc, true); - } + this.channels = getChannels(this.collectionName, this.options); + this.fieldsOfInterest = this._getFieldsOfInterest(); + this.__isInitialized = false; - // This has too much control over multiplexer.. - this.multiplexer.ready(); + var projection = fields || {}; + this._projectionFn = LocalCollection._compileProjection(projection); // Projection function, result of combining important fields for selector and + // existing fields projection + + this._sharedProjection = matcher.combineIntoProjection(projection); + if (sorter) { + this._sharedProjection = sorter.combineIntoProjection( + this._sharedProjection + ); + } + this._sharedProjectionFn = LocalCollection._compileProjection( + this._sharedProjection + ); + } + + async setupCollection() { + // Here we apply the logic of changing the cursor based on the collection-level configuration + if (this.collection._redisOplog) { + const { cursor } = this.collection._redisOplog; + if (cursor) { + const context = DDP._CurrentPublicationInvocation.get(); + await cursor.call( + context, + cursorDescription.options, + cursorDescription.selector + ); + } + } + } + + /** + * Function that checks whether or not the doc matches our filters + * + * @param doc + * @returns {*} + */ + isEligible(doc) { + if (this.matcher) { + return this.matcher.documentMatches(doc).result; } - /** - * @param docId - * @returns {boolean} - */ - contains(docId) { - return this.store.has(docId); + return true; + } + + /** + * @param _id + * @returns {boolean} + */ + async isEligibleByDB(_id) { + if (this.matcher) { + return !!(await this.collection.findOneAsync( + Object.assign({}, this.selector, { _id }), + { fields: { _id: 1 } } + )); } - /** - * @param doc {Object} - * @param safe {Boolean} If this is set to true, it assumes that the object is cleaned - */ - async add(doc, safe = false) { - doc = EJSON.clone(doc); - - if (!safe) { - if (this.fieldsArray) { - doc = this.projectFieldsOnDoc(doc); - } - } - - this.store.set(doc._id, doc); - await this.multiplexer.added(doc._id, doc); + return true; + } + + /** + * Performs the initial search then puts them into the store. + */ + async init() { + if (this.__isInitialized) { + return; // silently do nothing. } - /** - * We use this method when we receive updates for a document that is not yet in the observable collection store - * @param docId - */ - async addById(docId) { - const { limit, skip, ...cleanedOptions } = this.options; - const doc = await this.collection.findOneAsync({ _id: docId }, cleanedOptions); + this.__isInitialized = true; + let data = await this.cursor.fetchAsync(); - this.store.set(docId, doc); + for (const doc of data) { + await this.add(doc, true); + } - if (doc) { - await this.multiplexer.added(doc._id, doc); - } + // This has too much control over multiplexer.. + this.multiplexer.ready(); + } + + /** + * @param docId + * @returns {boolean} + */ + contains(docId) { + return this.store.has(docId); + } + + /** + * @param doc {Object} + * @param safe {Boolean} If this is set to true, it assumes that the object is cleaned + */ + async add(doc, safe = false) { + doc = EJSON.clone(doc); + + if (!safe) { + if (this.fieldsArray) { + doc = this.projectFieldsOnDoc(doc); + } } - /** - * Sends over the wire only the top fields of changes, because DDP client doesnt do deep merge. - * - * @param {object} doc - * @param {array} modifiedFields - */ - async change(doc, modifiedFields) { - const docId = doc._id; - const oldDoc = this.store.get(docId); - if (oldDoc == null) { - return; - } - - this.store.set(docId, this._sharedProjectionFn(doc)); - - var projectedNew = this._projectionFn(doc); - var projectedOld = this._projectionFn(oldDoc); - - var changed = DiffSequence.makeChangedFields( - projectedNew, - projectedOld - ); + this.store.set(doc._id, doc); + await this.multiplexer.added(doc._id, doc); + } + + /** + * We use this method when we receive updates for a document that is not yet in the observable collection store + * @param docId + */ + async addById(docId) { + const { limit, skip, ...cleanedOptions } = this.options; + const doc = await this.collection.findOneAsync( + { _id: docId }, + cleanedOptions + ); + + this.store.set(docId, doc); + + if (doc) { + await this.multiplexer.added(doc._id, doc); + } + } + + /** + * Sends over the wire only the top fields of changes, because DDP client doesnt do deep merge. + * + * @param {object} doc + * @param {array} modifiedFields + */ + async change(doc, modifiedFields) { + const docId = doc._id; + const oldDoc = this.store.get(docId); + if (oldDoc == null) { + return; + } - if (!_.isEmpty(changed)) { - await this.multiplexer.changed(docId, changed); - } + this.store.set(docId, this._sharedProjectionFn(doc)); + + var projectedNew = this._projectionFn(doc); + var projectedOld = this._projectionFn(oldDoc); + + var changed = DiffSequence.makeChangedFields(projectedNew, projectedOld); + + if (!_.isEmpty(changed)) { + await this.multiplexer.changed(docId, changed); + } + } + + /** + * @param docId string + * @param modifier object + * @param topLevelFields array + * @private + */ + async changeSynthetic(docId, modifier, topLevelFields) { + if (!this.store.has(docId)) { + return; } - /** - * @param docId string - * @param modifier object - * @param topLevelFields array - * @private - */ - async changeSynthetic(docId, modifier, topLevelFields) { - if (!this.store.has(docId)) { - return; - } + let storedDoc = this.store.get(docId); + let oldDoc = EJSON.clone(storedDoc); - let storedDoc = this.store.get(docId); - let oldDoc = EJSON.clone(storedDoc); + LocalCollection._modify(storedDoc, modifier); - LocalCollection._modify(storedDoc, modifier); + var changed = DiffSequence.makeChangedFields(storedDoc, oldDoc); - var changed = DiffSequence.makeChangedFields(storedDoc, oldDoc); + await this.multiplexer.changed(docId, changed); + } - await this.multiplexer.changed(docId, changed); + /** + * @param docId + */ + async remove(docId) { + const doc = this.store.pop(docId); + if (doc != null) { + await this.multiplexer.removed(docId, doc); } - - /** - * @param docId - */ - async remove(docId) { - const doc = this.store.pop(docId); - if (doc != null) { - await this.multiplexer.removed(docId, doc); - } + } + + /** + * Clears the store + */ + clearStore() { + this.store.clear(); + } + + /** + * Returns whether the limit of allowed documents is reached + * based on the selector options + */ + isLimitReached() { + if (this.options.limit) { + const size = this.store.size(); + return size >= this.options.limit; } - /** - * Clears the store - */ - clearStore() { - this.store.clear(); + return false; + } + + /** + * Used at initialization + * + * Creates and stores the fields specified in fields & filters + * If by any chance there are no fields specified, we return true + * + * @private + * @return {true|object} + */ + _getFieldsOfInterest() { + const fields = this.options.projection || this.options.fields; + + if (!fields) { + return true; } - /** - * Returns whether the limit of allowed documents is reached - * based on the selector options - */ - isLimitReached() { - if (this.options.limit) { - const size = this.store.size(); - return size >= this.options.limit; - } - - return false; + // if you have some fields excluded (high chances you don't, but we query for all fields either way) + // because it can get very tricky with future subscribers that may need some fields + if (this.isFieldsProjectionByExclusion) { + return true; } - /** - * Used at initialization - * - * Creates and stores the fields specified in fields & filters - * If by any chance there are no fields specified, we return true - * - * @private - * @return {true|object} - */ - _getFieldsOfInterest() { - const fields = this.options.projection || this.options.fields; - - if (!fields) { - return true; - } - - // if you have some fields excluded (high chances you don't, but we query for all fields either way) - // because it can get very tricky with future subscribers that may need some fields - if (this.isFieldsProjectionByExclusion) { - return true; - } - - // if we have options, we surely have fields array - let fieldsArray = this.fieldsArray.slice(); - if (Object.keys(this.selector).length > 0) { - fieldsArray = _.union( - fieldsArray, - extractFieldsFromFilters(this.selector) - ); - } - - return fieldsArray; + // if we have options, we surely have fields array + let fieldsArray = this.fieldsArray.slice(); + if (Object.keys(this.selector).length > 0) { + fieldsArray = _.union( + fieldsArray, + extractFieldsFromFilters(this.selector) + ); } + + return fieldsArray; + } } diff --git a/lib/cache/lib/extractFieldsFromFilters.js b/lib/cache/lib/extractFieldsFromFilters.js index e9fd4f75..fecb6881 100644 --- a/lib/cache/lib/extractFieldsFromFilters.js +++ b/lib/cache/lib/extractFieldsFromFilters.js @@ -1,36 +1,39 @@ -import { _ } from 'meteor/underscore'; +import { _ } from "meteor/underscore"; -const deepFilterFieldsArray = ['$and', '$or', '$nor']; -const deepFilterFieldsObject = ['$not']; +const deepFilterFieldsArray = ["$and", "$or", "$nor"]; +const deepFilterFieldsObject = ["$not"]; /** * Given a complex filtering option, extract the fields * @param filters */ function extractFieldsFromFilters(filters) { - let filterFields = []; + let filterFields = []; - _.each(filters, (value, field) => { - if (field[0] !== '$') { - filterFields.push(field); - } - }); + _.each(filters, (value, field) => { + if (field[0] !== "$") { + filterFields.push(field); + } + }); - deepFilterFieldsArray.forEach(field => { - if (filters[field]) { - filters[field].forEach(element => { - filterFields = _.union(filterFields, extractFieldsFromFilters(element)); - }); - } - }); + deepFilterFieldsArray.forEach((field) => { + if (filters[field]) { + filters[field].forEach((element) => { + filterFields = _.union(filterFields, extractFieldsFromFilters(element)); + }); + } + }); - deepFilterFieldsObject.forEach(field => { - if (filters[field]) { - filterFields = _.union(filterFields, extractFieldsFromFilters(filters[field])); - } - }); + deepFilterFieldsObject.forEach((field) => { + if (filters[field]) { + filterFields = _.union( + filterFields, + extractFieldsFromFilters(filters[field]) + ); + } + }); - return filterFields; + return filterFields; } export default extractFieldsFromFilters; diff --git a/lib/cache/lib/fieldProjectionIsExclusion.js b/lib/cache/lib/fieldProjectionIsExclusion.js index 1737ebe3..7e25e5df 100644 --- a/lib/cache/lib/fieldProjectionIsExclusion.js +++ b/lib/cache/lib/fieldProjectionIsExclusion.js @@ -1,5 +1,5 @@ export default (fields) => { - for (let value in fields) { - return fields[value] !== 1; - } -} \ No newline at end of file + for (let value in fields) { + return fields[value] !== 1; + } +}; diff --git a/lib/cache/lib/filterFieldsForFetching.js b/lib/cache/lib/filterFieldsForFetching.js index e9e418a3..1875c91e 100644 --- a/lib/cache/lib/filterFieldsForFetching.js +++ b/lib/cache/lib/filterFieldsForFetching.js @@ -3,73 +3,76 @@ * @param modifiedFields Array */ function filterAllowedFields(allowedFields, modifiedFields) { - let builder = {_id: 1}; + let builder = { _id: 1 }; - modifiedFields.forEach(modifiedField => { - for (let i = 0; i < allowedFields.length; i++) { - const allowedField = allowedFields[i]; + modifiedFields.forEach((modifiedField) => { + for (let i = 0; i < allowedFields.length; i++) { + const allowedField = allowedFields[i]; - // this should treat the case where modifiedField is a nest of allowedField like: - // modifiedField: 'profile.firstName' - // allowedField: 'profile' - // => modifiedField goes to builder - if (modifiedField === allowedField || modifiedField.indexOf(allowedField + '.') !== -1) { - builder[modifiedField] = 1; - return; - } + // this should treat the case where modifiedField is a nest of allowedField like: + // modifiedField: 'profile.firstName' + // allowedField: 'profile' + // => modifiedField goes to builder + if ( + modifiedField === allowedField || + modifiedField.indexOf(allowedField + ".") !== -1 + ) { + builder[modifiedField] = 1; + return; + } - // it should also treat the following case: - // modifiedField: 'address' - // allowedField: 'address.city' - // => allowedField goes to builder - if (allowedField.indexOf(modifiedField + '.') !== -1) { - builder[allowedField] = 1; - return; - } - } - }); + // it should also treat the following case: + // modifiedField: 'address' + // allowedField: 'address.city' + // => allowedField goes to builder + if (allowedField.indexOf(modifiedField + ".") !== -1) { + builder[allowedField] = 1; + return; + } + } + }); - return builder; + return builder; } function filterDisallowedFields(disallowedFields, modifiedFields) { - let builder = {_id: 1}; + let builder = { _id: 1 }; - modifiedFields.forEach(modifiedField => { - let isAllowed = true; - for (let i = 0; i < disallowedFields.length; i++) { - const disallowedField = disallowedFields[i]; + modifiedFields.forEach((modifiedField) => { + let isAllowed = true; + for (let i = 0; i < disallowedFields.length; i++) { + const disallowedField = disallowedFields[i]; - if (modifiedField === disallowedField) { - isAllowed = false; - break; - } + if (modifiedField === disallowedField) { + isAllowed = false; + break; + } - // modifiedField: profile - // disallowedField: profile.firstName - // => profile: 1, and field reprojection LocalCollection._ - // break - if (disallowedField.indexOf(modifiedField + '.') !== -1) { - isAllowed = false; - builder[modifiedField] = 1; - } + // modifiedField: profile + // disallowedField: profile.firstName + // => profile: 1, and field reprojection LocalCollection._ + // break + if (disallowedField.indexOf(modifiedField + ".") !== -1) { + isAllowed = false; + builder[modifiedField] = 1; + } - // modifiedField: address.city - // disallowedField: address - // isAllowed => false - // break - if (modifiedField.indexOf(disallowedField + '.') !== -1) { - isAllowed = false; - break; - } - } + // modifiedField: address.city + // disallowedField: address + // isAllowed => false + // break + if (modifiedField.indexOf(disallowedField + ".") !== -1) { + isAllowed = false; + break; + } + } - if (isAllowed) { - builder[modifiedField] = 1; - } - }); + if (isAllowed) { + builder[modifiedField] = 1; + } + }); - return builder; + return builder; } -export { filterAllowedFields, filterDisallowedFields }; \ No newline at end of file +export { filterAllowedFields, filterDisallowedFields }; diff --git a/lib/cache/lib/getChannels.js b/lib/cache/lib/getChannels.js index 659836c1..c1729763 100644 --- a/lib/cache/lib/getChannels.js +++ b/lib/cache/lib/getChannels.js @@ -1,31 +1,34 @@ -import getChannelName from '../../utils/getChannelName'; +import getChannelName from "../../utils/getChannelName"; -export default (collectionName, {namespace, channel, namespaces, channels} = {}) => { - let channelStrings = []; +export default ( + collectionName, + { namespace, channel, namespaces, channels } = {} +) => { + let channelStrings = []; - if (namespaces) { - namespaces.forEach(name => { - channelStrings.push(`${name}::${collectionName}`) - }) - } + if (namespaces) { + namespaces.forEach((name) => { + channelStrings.push(`${name}::${collectionName}`); + }); + } - if (namespace) { - channelStrings.push(`${namespace}::${collectionName}`); - } + if (namespace) { + channelStrings.push(`${namespace}::${collectionName}`); + } - if (channels) { - channels.forEach(name => { - channelStrings.push(name) - }) - } + if (channels) { + channels.forEach((name) => { + channelStrings.push(name); + }); + } - if (channel) { - channelStrings.push(channel); - } + if (channel) { + channelStrings.push(channel); + } - if (channelStrings.length === 0) { - channelStrings.push(collectionName); - } + if (channelStrings.length === 0) { + channelStrings.push(collectionName); + } - return channelStrings.map(getChannelName); -} + return channelStrings.map(getChannelName); +}; diff --git a/lib/cache/lib/getSnapbackFields.js b/lib/cache/lib/getSnapbackFields.js index 8e113025..011c3d6f 100644 --- a/lib/cache/lib/getSnapbackFields.js +++ b/lib/cache/lib/getSnapbackFields.js @@ -1,24 +1,24 @@ export default (doc, fields) => { - let snapbacks = []; + let snapbacks = []; - fields.forEach(field => { - if (field.indexOf('.') !== -1) { - let parts = field.split('.'); - parts.pop(); + fields.forEach((field) => { + if (field.indexOf(".") !== -1) { + let parts = field.split("."); + parts.pop(); - if (isArray(doc, parts)) { - snapbacks.push(parts.join('.')); - } - } - }); + if (isArray(doc, parts)) { + snapbacks.push(parts.join(".")); + } + } + }); - return snapbacks; -} + return snapbacks; +}; const isArray = (doc, parts) => { - if (parts.length > 1) { - return isArray(doc[parts[0]], parts.slice(1)); - } else { - return Array.isArray(doc[parts[0]]); - } + if (parts.length > 1) { + return isArray(doc[parts[0]], parts.slice(1)); + } else { + return Array.isArray(doc[parts[0]]); + } }; diff --git a/lib/cache/lib/getTopLevelFields.js b/lib/cache/lib/getTopLevelFields.js index 6e5887b5..a87a0239 100644 --- a/lib/cache/lib/getTopLevelFields.js +++ b/lib/cache/lib/getTopLevelFields.js @@ -1,11 +1,11 @@ function getTopLevelFields(fields) { - let topLevel = []; + let topLevel = []; - fields.forEach(field => { - topLevel.push(field.split('.')[0]); - }); + fields.forEach((field) => { + topLevel.push(field.split(".")[0]); + }); - return topLevel; + return topLevel; } export default getTopLevelFields; diff --git a/lib/cache/mongoIdMap.js b/lib/cache/mongoIdMap.js index bf338167..c796e6d6 100644 --- a/lib/cache/mongoIdMap.js +++ b/lib/cache/mongoIdMap.js @@ -1,90 +1,90 @@ -import { MongoID } from 'meteor/mongo-id'; +import { MongoID } from "meteor/mongo-id"; export class MongoIDMap { - - constructor(idStringify, idParse) { - this._internal = new Map(); - this._idStringify = idStringify || MongoID.idStringify; - this._idParse = idParse || MongoID.idParse; - } - - get(id) { - const key = this._idStringify(id); - return this._internal.get(key); + constructor(idStringify, idParse) { + this._internal = new Map(); + this._idStringify = idStringify || MongoID.idStringify; + this._idParse = idParse || MongoID.idParse; + } + + get(id) { + const key = this._idStringify(id); + return this._internal.get(key); + } + + pop(id) { + const key = this._idStringify(id); + const ret = this._internal.get(key); + this._internal.delete(key); + return ret; + } + + set(id, value) { + const key = this._idStringify(id); + this._internal.set(key, value); + } + + setDefault(id, def) { + const key = this._idStringify(id); + if (this._internal.has(key)) { + return this._internal.get(key); } - - pop(id) { - const key = this._idStringify(id); - const ret = this._internal.get(key); - this._internal.delete(key); - return ret; + this._internal.set(key, def); + return def; + } + + remove(id) { + const key = this._idStringify(id); + this._internal.delete(key); + } + + has(id) { + const key = this._idStringify(id); + return this._internal.has(key); + } + + size() { + return this._internal.size; + } + + empty() { + return this._internal.size === 0; + } + + clear() { + this._internal.clear(); + } + + keys() { + return Array.from(this._internal.keys()).map((key) => this._idParse(key)); + } + + forEach(iterator) { + this._internal.forEach((value, key) => { + iterator.call(null, value, this._idParse(key)); + }); + } + + async compareWith(other, callbacks) { + // operate on the _internal maps to avoid overhead of parsing id's. + const leftMap = this._internal; + const rightMap = other._internal; + + for (const [key, leftValue] of leftMap) { + const rightValue = rightMap.get(key); + if (rightValue != null) + callbacks.both && + (await callbacks.both(this._idParse(key), leftValue, rightValue)); + else + callbacks.leftOnly && + (await callbacks.leftOnly(this._idParse(key), leftValue)); } - set(id, value) { - const key = this._idStringify(id); - this._internal.set(key, value); - } - - setDefault(id, def) { - const key = this._idStringify(id); - if (this._internal.has(key)) { - return this._internal.get(key); - } - this._internal.set(key, def); - return def; - } - - remove(id) { - const key = this._idStringify(id); - this._internal.delete(key); - } - - has(id) { - const key = this._idStringify(id); - return this._internal.has(key); - } - - size() { - return this._internal.size; - } - - empty() { - return this._internal.size === 0; - } - - clear() { - this._internal.clear(); - } - - keys() { - return Array.from(this._internal.keys()).map(key => this._idParse(key)) - } - - forEach(iterator) { - this._internal.forEach((value, key) => { - iterator.call(null, value, this._idParse(key)); - }); - } - - async compareWith(other, callbacks) { - // operate on the _internal maps to avoid overhead of parsing id's. - const leftMap = this._internal; - const rightMap = other._internal; - - for (const [key, leftValue] of leftMap) { - const rightValue = rightMap.get(key); - if (rightValue != null) - callbacks.both && await callbacks.both(this._idParse(key), leftValue, rightValue); - else - callbacks.leftOnly && await callbacks.leftOnly(this._idParse(key), leftValue); - } - - if (callbacks.rightOnly) { - for (const [key ,rightValue] of rightMap) { - if (!leftMap.has(key)) - await callbacks.rightOnly(this._idParse(key), rightValue); - } - } - + if (callbacks.rightOnly) { + for (const [key, rightValue] of rightMap) { + if (!leftMap.has(key)) + await callbacks.rightOnly(this._idParse(key), rightValue); + } } + } } diff --git a/lib/config.js b/lib/config.js index dbcf4765..09b21a12 100644 --- a/lib/config.js +++ b/lib/config.js @@ -2,59 +2,54 @@ * In-Memory configuration storage */ let Config = { - isInitialized: false, - debug: false, - overridePublishFunction: true, - mutationDefaults: { - pushToRedis: true, - optimistic: true, + isInitialized: false, + debug: false, + overridePublishFunction: true, + mutationDefaults: { + pushToRedis: true, + optimistic: true, + }, + passConfigDown: false, + redis: { + port: 6379, + host: "127.0.0.1", + }, + globalRedisPrefix: "", + retryIntervalMs: 10000, + externalRedisPublisher: false, + redisExtras: { + retry_strategy: function (options) { + return Config.retryIntervalMs; + // reconnect after + // return Math.min(options.attempt * 100, 30000); }, - passConfigDown: false, - redis: { - port: 6379, - host: '127.0.0.1', - }, - globalRedisPrefix: '', - retryIntervalMs: 10000, - externalRedisPublisher: false, - redisExtras: { - retry_strategy: function(options) { - return Config.retryIntervalMs; - // reconnect after - // return Math.min(options.attempt * 100, 30000); - }, - events: { - end(err) { - console.error('RedisOplog - Connection to redis ended'); - }, - error(err) { - console.error( - `RedisOplog - An error occured: \n`, - JSON.stringify(err) - ); - }, - connect(err) { - if (!err) { - console.log( - 'RedisOplog - Established connection to redis.' - ); - } else { - console.error( - 'RedisOplog - There was an error when connecting to redis', - JSON.stringify(err) - ); - } - }, - reconnecting(err) { - if (err) { - console.error( - 'RedisOplog - There was an error when re-connecting to redis', - JSON.stringify(err) - ); - } - }, - }, + events: { + end(err) { + console.error("RedisOplog - Connection to redis ended"); + }, + error(err) { + console.error(`RedisOplog - An error occured: \n`, JSON.stringify(err)); + }, + connect(err) { + if (!err) { + console.log("RedisOplog - Established connection to redis."); + } else { + console.error( + "RedisOplog - There was an error when connecting to redis", + JSON.stringify(err) + ); + } + }, + reconnecting(err) { + if (err) { + console.error( + "RedisOplog - There was an error when re-connecting to redis", + JSON.stringify(err) + ); + } + }, }, + }, }; export default Config; diff --git a/lib/constants.js b/lib/constants.js index 6e09b4ef..5782dd6d 100644 --- a/lib/constants.js +++ b/lib/constants.js @@ -1,40 +1,35 @@ const RedisPipe = { - EVENT: 'e', - DOC: 'd', - FIELDS: 'f', - MODIFIER: 'm', - DOCUMENT_ID: 'id', - SYNTHETIC: 's', - UID: 'u', // this is the unique identity of a change request - MODIFIED_TOP_LEVEL_FIELDS: 'mt' + EVENT: "e", + DOC: "d", + FIELDS: "f", + MODIFIER: "m", + DOCUMENT_ID: "id", + SYNTHETIC: "s", + UID: "u", // this is the unique identity of a change request + MODIFIED_TOP_LEVEL_FIELDS: "mt", }; export default RedisPipe; const Events = { - INSERT: 'i', - UPDATE: 'u', - REMOVE: 'r' + INSERT: "i", + UPDATE: "u", + REMOVE: "r", }; const Strategy = { - DEFAULT: 'D', - DEDICATED_CHANNELS: 'DC', - LIMIT_SORT: 'LS' + DEFAULT: "D", + DEDICATED_CHANNELS: "DC", + LIMIT_SORT: "LS", }; const VentConstants = { - ID: 'i', - EVENT_VARIABLE: 'e', - PREFIX: '__vent', - getPrefix(id, name) { - return `${id}.${name}`; - } + ID: "i", + EVENT_VARIABLE: "e", + PREFIX: "__vent", + getPrefix(id, name) { + return `${id}.${name}`; + }, }; -export { - Events, - Strategy, - RedisPipe, - VentConstants -}; +export { Events, Strategy, RedisPipe, VentConstants }; diff --git a/lib/debug.js b/lib/debug.js index 7876c732..a52795e4 100644 --- a/lib/debug.js +++ b/lib/debug.js @@ -1,12 +1,12 @@ -import Config from './config'; +import Config from "./config"; export default (message, trace = false) => { - if (Config.debug) { - const timestamp = (new Date()).getTime(); - console.log(`[${timestamp}] - ` + message); + if (Config.debug) { + const timestamp = new Date().getTime(); + console.log(`[${timestamp}] - ` + message); - if (trace) { - console.log(trace); - } + if (trace) { + console.log(trace); } -} \ No newline at end of file + } +}; diff --git a/lib/init.js b/lib/init.js index 902a3efd..f001b4f5 100644 --- a/lib/init.js +++ b/lib/init.js @@ -1,41 +1,41 @@ // https://github.com/luin/ioredis#connect-to-redis -import Config from './config'; -import extendMongoCollection from './mongo/extendMongoCollection'; -import RedisSubscriptionManager from './redis/RedisSubscriptionManager'; -import PubSubManager from './redis/PubSubManager'; -import { getRedisListener } from './redis/getRedisClient'; -import deepExtend from 'deep-extend'; -import reload from './processors/actions/reload'; +import Config from "./config"; +import extendMongoCollection from "./mongo/extendMongoCollection"; +import RedisSubscriptionManager from "./redis/RedisSubscriptionManager"; +import PubSubManager from "./redis/PubSubManager"; +import { getRedisListener } from "./redis/getRedisClient"; +import deepExtend from "deep-extend"; +import reload from "./processors/actions/reload"; let isInitialized = false; export default (config = {}) => { - if (isInitialized) { - throw 'You cannot initialize RedisOplog twice.'; - } - - isInitialized = true; - - deepExtend(Config, config); - - Object.assign(Config, { - isInitialized: true, - oldPublish: Meteor.publish, - }); - - extendMongoCollection(); - - // this initializes the listener singleton with the proper onConnect functionality - getRedisListener({ - async onConnect() { - // this will be executed initially, but since there won't be any observable collections, nothing will happen - // PublicationFactory.reloadAll(); - for (const redisSubscriber of RedisSubscriptionManager.getAllRedisSubscribers()) { - await reload(redisSubscriber.observableCollection); - } - }, - }); - - RedisSubscriptionManager.init(); - Config.pubSubManager = new PubSubManager(); + if (isInitialized) { + throw "You cannot initialize RedisOplog twice."; + } + + isInitialized = true; + + deepExtend(Config, config); + + Object.assign(Config, { + isInitialized: true, + oldPublish: Meteor.publish, + }); + + extendMongoCollection(); + + // this initializes the listener singleton with the proper onConnect functionality + getRedisListener({ + async onConnect() { + // this will be executed initially, but since there won't be any observable collections, nothing will happen + // PublicationFactory.reloadAll(); + for (const redisSubscriber of RedisSubscriptionManager.getAllRedisSubscribers()) { + await reload(redisSubscriber.observableCollection); + } + }, + }); + + RedisSubscriptionManager.init(); + Config.pubSubManager = new PubSubManager(); }; diff --git a/lib/mongo/Mutator.js b/lib/mongo/Mutator.js index 7f336bc1..d6bb31de 100644 --- a/lib/mongo/Mutator.js +++ b/lib/mongo/Mutator.js @@ -1,403 +1,396 @@ -import { Meteor } from 'meteor/meteor'; -import { _ } from 'meteor/underscore'; +import { Meteor } from "meteor/meteor"; +import { _ } from "meteor/underscore"; import getMutationConfig from "./lib/getMutationConfig"; import getFields from "../utils/getFields"; import { - dispatchInsert, - dispatchUpdate, - dispatchRemove + dispatchInsert, + dispatchUpdate, + dispatchRemove, } from "./lib/dispatchers"; import Config from "../config"; import { Events } from "../constants"; function runCallbackInBackground(fn) { - Meteor.defer(Meteor.bindEnvironment(fn)); + Meteor.defer(Meteor.bindEnvironment(fn)); } function protectAgainstRaceConditions(collection) { - if (!collection._redisOplog) { - return true; - } - - return ( - collection._redisOplog && - collection._redisOplog.protectAgainstRaceConditions - ); + if (!collection._redisOplog) { + return true; + } + + return ( + collection._redisOplog && + collection._redisOplog.protectAgainstRaceConditions + ); } function shouldIncludePrevDocument(collection) { - return ( - collection._redisOplog && - collection._redisOplog.shouldIncludePrevDocument - ); + return ( + collection._redisOplog && collection._redisOplog.shouldIncludePrevDocument + ); } /** * The Mutator is the interface that does the required updates */ export default class Mutator { - static init() { - Mutator.passConfigDown = Config.passConfigDown; - - // regardless of your choice, these 2 packages must passConfigDown - // we do like this until we find a more elegant way - if ( - Package["aldeed:collection2"] !== undefined || - Package["aldeed:collection2-core"] !== undefined - ) { - Mutator.passConfigDown = true; - } + static init() { + Mutator.passConfigDown = Config.passConfigDown; + + // regardless of your choice, these 2 packages must passConfigDown + // we do like this until we find a more elegant way + if ( + Package["aldeed:collection2"] !== undefined || + Package["aldeed:collection2-core"] !== undefined + ) { + Mutator.passConfigDown = true; } - - static async insert(Originals, data, _config) { - const config = await getMutationConfig(this, _config, { - doc: data, - event: Events.INSERT - }); - - if (canUseOriginalMethod(config)) { - return Originals.insert.call( - this, - data, - _.isFunction(_config) ? _config : undefined - ); - } - - try { - const docId = await Originals.insert.call(this, data); - - // It's a callback - if (_.isFunction(_config)) { - const self = this; - runCallbackInBackground(function() { - _config.call(self, null, docId); - }); - } - - let doc = { _id: docId }; - - if (!protectAgainstRaceConditions(this)) { - doc = await Originals.findOne.call(this, docId); - } - - await dispatchInsert( - config.optimistic, - this._name, - config._channels, - doc - ); - - return docId; - } catch (e) { - if (_.isFunction(_config)) { - Meteor.defer(() => { - return _config.call(this, e); - }); - } else { - throw e; - } - } + } + + static async insert(Originals, data, _config) { + const config = await getMutationConfig(this, _config, { + doc: data, + event: Events.INSERT, + }); + + if (canUseOriginalMethod(config)) { + return Originals.insert.call( + this, + data, + _.isFunction(_config) ? _config : undefined + ); } - /** - * @param Originals - * @param selector - * @param modifier - * @param _config - * @param callback - * @returns {*} - */ - static async update(Originals, selector, modifier, _config, callback) { - if (_.isString(selector)) { - selector = { _id: selector }; - } - - if (_.isFunction(_config)) { - callback = _config; - _config = {}; - } + try { + const docId = await Originals.insert.call(this, data); - const config = await getMutationConfig(this, _config, { - event: Events.UPDATE, - selector, - modifier + // It's a callback + if (_.isFunction(_config)) { + const self = this; + runCallbackInBackground(function () { + _config.call(self, null, docId); }); - - if (canUseOriginalMethod(config)) { - return Originals.update.call( - this, - selector, - modifier, - _config, - callback - ); - } - - // searching the elements that will get updated by id - const findOptions = { fields: { _id: 1 }, transform: null }; - if (!config.multi) { - findOptions.limit = 1; - } - - let docs; - if (shouldIncludePrevDocument(this)) { - docs = await this.find(selector, { ...findOptions, fields: {} }).fetchAsync(); - } else { - docs = await this.find(selector, findOptions).fetchAsync(); - } - - let docIds = docs.map(doc => doc._id); - - if (config && config.upsert) { - return Mutator._handleUpsert.call( - this, - Originals, - selector, - modifier, - Object.assign({}, {_returnObject: false}, config), - callback, - docIds, - docs - ); - } - - // we do this because when we send to redis - // we need the exact _ids - // and we extend the selector, because if between finding the docIds and updating - // another matching insert sneaked in, it's update will not be pushed - const updateSelector = Object.assign({}, selector, { - _id: { $in: docIds } + } + + let doc = { _id: docId }; + + if (!protectAgainstRaceConditions(this)) { + doc = await Originals.findOne.call(this, docId); + } + + await dispatchInsert( + config.optimistic, + this._name, + config._channels, + doc + ); + + return docId; + } catch (e) { + if (_.isFunction(_config)) { + Meteor.defer(() => { + return _config.call(this, e); }); + } else { + throw e; + } + } + } + + /** + * @param Originals + * @param selector + * @param modifier + * @param _config + * @param callback + * @returns {*} + */ + static async update(Originals, selector, modifier, _config, callback) { + if (_.isString(selector)) { + selector = { _id: selector }; + } - try { - const result = await Originals.update.call( - this, - updateSelector, - modifier, - config - ); + if (_.isFunction(_config)) { + callback = _config; + _config = {}; + } - // phony callback emulation - if (callback) { - const self = this; - runCallbackInBackground(function() { - callback.call(self, null, result); - }); - } - - if (!protectAgainstRaceConditions(this)) { - docs = await this.find( - { _id: { $in: docIds } }, - { - ...findOptions, - fields: {} - } - ).fetchAsync(); - } + const config = await getMutationConfig(this, _config, { + event: Events.UPDATE, + selector, + modifier, + }); - const { fields } = getFields(modifier); + if (canUseOriginalMethod(config)) { + return Originals.update.call(this, selector, modifier, _config, callback); + } - await dispatchUpdate( - config.optimistic, - this._name, - config._channels, - docs, - fields - ); + // searching the elements that will get updated by id + const findOptions = { fields: { _id: 1 }, transform: null }; + if (!config.multi) { + findOptions.limit = 1; + } - return result; - } catch (e) { - if (callback) { - const self = this; - runCallbackInBackground(function() { - callback.call(self, e); - }); - } else { - throw e; - } - } + let docs; + if (shouldIncludePrevDocument(this)) { + docs = await this.find(selector, { + ...findOptions, + fields: {}, + }).fetchAsync(); + } else { + docs = await this.find(selector, findOptions).fetchAsync(); } - /** - * @param Originals - * @param selector - * @param modifier - * @param config - * @param callback - * @param docIds - */ - static async _handleUpsert( + let docIds = docs.map((doc) => doc._id); + + if (config && config.upsert) { + return Mutator._handleUpsert.call( + this, Originals, selector, modifier, - config, + Object.assign({}, { _returnObject: false }, config), callback, docIds, docs - ) { - try { - const data = await Originals.update.call( - this, - selector, - modifier, - Object.assign({}, config, { _returnObject: true }) - ); + ); + } - if (callback) { - const self = this; - runCallbackInBackground(function() { - callback.call(this, null, data); - }); - } - - if (config.pushToRedis) { - if (data.insertedId) { - let doc = { - _id: data.insertedId - }; - - if (!protectAgainstRaceConditions(this)) { - doc = await this.findOneAsync(doc._id); - } - - await dispatchInsert( - config.optimistic, - this._name, - config._channels, - doc - ); - } else { - // it means that we ran an upsert thinking there will be no docs - if ( - docIds.length === 0 || - data.numberAffected !== docIds.length - ) { - // there were no docs initially found matching the selector - // however a document sneeked in, resulting in a race-condition - // and if we look again for that document, we cannot retrieve it. - - // or a new document was added/modified to match selector before the actual update - console.warn( - "RedisOplog - Warning - A race condition occurred when running upsert." - ); - } else { - const { fields } = getFields(modifier); - - docs = await this.find(selector).fetchAsync(); - - await dispatchUpdate( - config.optimistic, - this._name, - config._channels, - docs, - fields - ); - } - } - } - - if(config._returnObject) { - return data; - } else { - return data.numberAffected; - } - } catch (e) { - if (callback) { - const self = this; - runCallbackInBackground(function() { - callback.call(self, e); - }); - } else { - throw e; - } - } + // we do this because when we send to redis + // we need the exact _ids + // and we extend the selector, because if between finding the docIds and updating + // another matching insert sneaked in, it's update will not be pushed + const updateSelector = Object.assign({}, selector, { + _id: { $in: docIds }, + }); + + try { + const result = await Originals.update.call( + this, + updateSelector, + modifier, + config + ); + + // phony callback emulation + if (callback) { + const self = this; + runCallbackInBackground(function () { + callback.call(self, null, result); + }); + } + + if (!protectAgainstRaceConditions(this)) { + docs = await this.find( + { _id: { $in: docIds } }, + { + ...findOptions, + fields: {}, + } + ).fetchAsync(); + } + + const { fields } = getFields(modifier); + + await dispatchUpdate( + config.optimistic, + this._name, + config._channels, + docs, + fields + ); + + return result; + } catch (e) { + if (callback) { + const self = this; + runCallbackInBackground(function () { + callback.call(self, e); + }); + } else { + throw e; + } } + } + + /** + * @param Originals + * @param selector + * @param modifier + * @param config + * @param callback + * @param docIds + */ + static async _handleUpsert( + Originals, + selector, + modifier, + config, + callback, + docIds, + docs + ) { + try { + const data = await Originals.update.call( + this, + selector, + modifier, + Object.assign({}, config, { _returnObject: true }) + ); - /** - * @param Originals - * @param selector - * @param _config - * @returns {*} - */ - static async remove(Originals, selector, _config) { - selector = Mongo.Collection._rewriteSelector(selector); - - const config = await getMutationConfig(this, _config, { - selector, - event: Events.REMOVE + if (callback) { + const self = this; + runCallbackInBackground(function () { + callback.call(this, null, data); }); + } + + if (config.pushToRedis) { + if (data.insertedId) { + let doc = { + _id: data.insertedId, + }; + + if (!protectAgainstRaceConditions(this)) { + doc = await this.findOneAsync(doc._id); + } + + await dispatchInsert( + config.optimistic, + this._name, + config._channels, + doc + ); + } else { + // it means that we ran an upsert thinking there will be no docs + if (docIds.length === 0 || data.numberAffected !== docIds.length) { + // there were no docs initially found matching the selector + // however a document sneeked in, resulting in a race-condition + // and if we look again for that document, we cannot retrieve it. + + // or a new document was added/modified to match selector before the actual update + console.warn( + "RedisOplog - Warning - A race condition occurred when running upsert." + ); + } else { + const { fields } = getFields(modifier); + + docs = await this.find(selector).fetchAsync(); - if (canUseOriginalMethod(config)) { - return Originals.remove.call( - this, - selector, - _.isFunction(_config) ? _config : undefined + await dispatchUpdate( + config.optimistic, + this._name, + config._channels, + docs, + fields ); + } } + } + + if (config._returnObject) { + return data; + } else { + return data.numberAffected; + } + } catch (e) { + if (callback) { + const self = this; + runCallbackInBackground(function () { + callback.call(self, e); + }); + } else { + throw e; + } + } + } + + /** + * @param Originals + * @param selector + * @param _config + * @returns {*} + */ + static async remove(Originals, selector, _config) { + selector = Mongo.Collection._rewriteSelector(selector); + + const config = await getMutationConfig(this, _config, { + selector, + event: Events.REMOVE, + }); + + if (canUseOriginalMethod(config)) { + return Originals.remove.call( + this, + selector, + _.isFunction(_config) ? _config : undefined + ); + } - const removeSelector = Object.assign({}, selector); - const removeOptions = { - fields: { _id: 1 }, - transform: null - }; + const removeSelector = Object.assign({}, selector); + const removeOptions = { + fields: { _id: 1 }, + transform: null, + }; - if (shouldIncludePrevDocument(this)) { - delete removeOptions.fields; - delete removeOptions.projection; - } + if (shouldIncludePrevDocument(this)) { + delete removeOptions.fields; + delete removeOptions.projection; + } - // TODO: optimization check if it has _id or _id with {$in} so we don't have to redo this. - const docs = await this.find(selector, removeOptions).fetchAsync(); - let docIds = docs.map(doc => doc._id); + // TODO: optimization check if it has _id or _id with {$in} so we don't have to redo this. + const docs = await this.find(selector, removeOptions).fetchAsync(); + let docIds = docs.map((doc) => doc._id); - if (!selector._id) { - removeSelector._id = { $in: docIds }; - } + if (!selector._id) { + removeSelector._id = { $in: docIds }; + } - try { - const result = await Originals.remove.call(this, removeSelector); - - if (_.isFunction(_config)) { - const self = this; - runCallbackInBackground(function() { - _config.call(self, null, result); - }); - } - - await dispatchRemove( - config.optimistic, - this._name, - config._channels, - docs - ); + try { + const result = await Originals.remove.call(this, removeSelector); - return result; - } catch (e) { - if (_.isFunction(_config)) { - const self = this; - runCallbackInBackground(function() { - _config.call(self, e); - }); - } else { - throw e; - } - } + if (_.isFunction(_config)) { + const self = this; + runCallbackInBackground(function () { + _config.call(self, null, result); + }); + } + + await dispatchRemove( + config.optimistic, + this._name, + config._channels, + docs + ); + + return result; + } catch (e) { + if (_.isFunction(_config)) { + const self = this; + runCallbackInBackground(function () { + _config.call(self, e); + }); + } else { + throw e; + } } + } } function canUseOriginalMethod(mutationConfig) { - // There are two cases where we can use the original mutators rather than - // our overriden ones: - // - // 1) The user set pushToRedis: false, indicating they don't need realtime - // updates at all. - // - // 2) The user is using an external redis publisher, so we don't need to - // figure out what to publish to redis, and this update doesn't need - // optimistic-ui processing, so we don't need to synchronously run - // observers. - return ( - !mutationConfig.pushToRedis || - (Config.externalRedisPublisher && !mutationConfig.optimistic) - ); + // There are two cases where we can use the original mutators rather than + // our overriden ones: + // + // 1) The user set pushToRedis: false, indicating they don't need realtime + // updates at all. + // + // 2) The user is using an external redis publisher, so we don't need to + // figure out what to publish to redis, and this update doesn't need + // optimistic-ui processing, so we don't need to synchronously run + // observers. + return ( + !mutationConfig.pushToRedis || + (Config.externalRedisPublisher && !mutationConfig.optimistic) + ); } diff --git a/lib/mongo/ObserveMultiplex.js b/lib/mongo/ObserveMultiplex.js index 3ebe6e6f..422a1bfc 100644 --- a/lib/mongo/ObserveMultiplex.js +++ b/lib/mongo/ObserveMultiplex.js @@ -1,291 +1,288 @@ // This code was started based on meteor/meteor github repository // This code is MIT and licensed to Meteor. -import { Meteor } from 'meteor/meteor'; -import { _ } from 'meteor/underscore'; -import { LocalCollection } from 'meteor/minimongo'; -import OptimisticInvocation from './OptimisticInvocation'; +import { Meteor } from "meteor/meteor"; +import { _ } from "meteor/underscore"; +import { LocalCollection } from "meteor/minimongo"; +import OptimisticInvocation from "./OptimisticInvocation"; export function ObserveMultiplexer(options) { + var self = this; + + if (!options || !_.has(options, "ordered")) + throw Error("must specified ordered"); + + Package["facts-base"] && + Package["facts-base"].Facts.incrementServerFact( + "mongo-livedata", + "observe-multiplexers", + 1 + ); + + self._ordered = options.ordered; + self._onStop = options.onStop || function () {}; + self._queue = new Meteor._AsynchronousQueue(); + self._handles = {}; + this._resolver = null; + this._readyPromise = new Promise((r) => (this._resolver = r)).then( + () => (this._isReady = true) + ); + self._cache = new LocalCollection._CachingChangeObserver({ + ordered: options.ordered, + }); + // Number of addHandleAndSendInitialAdds tasks scheduled but not yet + // running. removeHandle uses this to know if it's time to call the onStop + // callback. + self._addHandleTasksScheduledButNotPerformed = 0; + + _.each(self.callbackNames(), function (callbackName) { + self[callbackName] = function (/* ... */) { + self._applyCallback(callbackName, _.toArray(arguments)); + }; + }); +} + +Object.assign(ObserveMultiplexer.prototype, { + addHandleAndSendInitialAdds: async function (handle) { var self = this; - if (!options || !_.has(options, 'ordered')) - throw Error('must specified ordered'); - - Package['facts-base'] && - Package['facts-base'].Facts.incrementServerFact( - 'mongo-livedata', - 'observe-multiplexers', - 1 - ); - - self._ordered = options.ordered; - self._onStop = options.onStop || function() {}; - self._queue = new Meteor._AsynchronousQueue(); - self._handles = {}; - this._resolver = null; - this._readyPromise = new Promise(r => this._resolver = r).then(() => this._isReady = true); - self._cache = new LocalCollection._CachingChangeObserver({ - ordered: options.ordered, + ++self._addHandleTasksScheduledButNotPerformed; + + Package["facts-base"] && + Package["facts-base"].Facts.incrementServerFact( + "mongo-livedata", + "observe-handles", + 1 + ); + + self._queue.runTask(async function () { + self._handles[handle._id] = handle; + // Send out whatever adds we have so far (whether or not we the + // multiplexer is ready). + await self._sendAdds(handle); + --self._addHandleTasksScheduledButNotPerformed; }); - // Number of addHandleAndSendInitialAdds tasks scheduled but not yet - // running. removeHandle uses this to know if it's time to call the onStop - // callback. - self._addHandleTasksScheduledButNotPerformed = 0; - - _.each(self.callbackNames(), function(callbackName) { - self[callbackName] = function(/* ... */) { - self._applyCallback(callbackName, _.toArray(arguments)); - }; + + // *outside* the task, since otherwise we'd deadlock + await this._readyPromise; + }, + + // Remove an observe handle. If it was the last observe handle, call the + // onStop callback; you cannot add any more observe handles after this. + // + // This is not synchronized with polls and handle additions: this means that + // you can safely call it from within an observe callback, but it also means + // that we have to be careful when we iterate over _handles. + removeHandle: async function (id) { + var self = this; + + // This should not be possible: you can only call removeHandle by having + // access to the ObserveHandle, which isn't returned to user code until the + // multiplex is ready. + if (!self._ready()) + throw new Error("Can't remove handles until the multiplex is ready"); + + delete self._handles[id]; + + Package["facts-base"] && + Package["facts-base"].Facts.incrementServerFact( + "mongo-livedata", + "observe-handles", + -1 + ); + + if ( + _.isEmpty(self._handles) && + self._addHandleTasksScheduledButNotPerformed === 0 + ) { + await self._stop(); + } + }, + _stop: async function (options) { + var self = this; + options = options || {}; + + // It shouldn't be possible for us to stop when all our handles still + // haven't been returned from observeChanges! + if (!self._ready() && !options.fromQueryError) + throw Error("surprising _stop: not ready"); + + // Call stop callback (which kills the underlying process which sends us + // callbacks and removes us from the connection's dictionary). + await self._onStop(); + Package["facts-base"] && + Package["facts-base"].Facts.incrementServerFact( + "mongo-livedata", + "observe-multiplexers", + -1 + ); + + // Cause future addHandleAndSendInitialAdds calls to throw (but the onStop + // callback should make our connection forget about us). + self._handles = null; + }, + + // Allows all addHandleAndSendInitialAdds calls to return, once all preceding + // adds have been processed. Does not block. + ready: function () { + var self = this; + self._queue.queueTask(function () { + if (self._ready()) + throw Error("can't make ObserveMultiplex ready twice!"); + + if (!self._resolver) { + throw new Error("Missing resolver"); + } + + self._resolver(); }); -} + }, + + // If trying to execute the query results in an error, call this. This is + // intended for permanent errors, not transient network errors that could be + // fixed. It should only be called before ready(), because if you called ready + // that meant that you managed to run the query once. It will stop this + // ObserveMultiplex and cause addHandleAndSendInitialAdds calls (and thus + // observeChanges calls) to throw the error. + queryError: async function (err) { + var self = this; + await self._queue.runTask(async function () { + if (self._ready()) + throw Error("can't claim query has an error after it worked!"); -Object.assign(ObserveMultiplexer.prototype, { - addHandleAndSendInitialAdds: async function(handle) { - var self = this; - - ++self._addHandleTasksScheduledButNotPerformed; - - Package['facts-base'] && - Package['facts-base'].Facts.incrementServerFact( - 'mongo-livedata', - 'observe-handles', - 1 - ); - - self._queue.runTask(async function() { - self._handles[handle._id] = handle; - // Send out whatever adds we have so far (whether or not we the - // multiplexer is ready). - await self._sendAdds(handle); - --self._addHandleTasksScheduledButNotPerformed; - }); - - // *outside* the task, since otherwise we'd deadlock - await this._readyPromise; - }, - - // Remove an observe handle. If it was the last observe handle, call the - // onStop callback; you cannot add any more observe handles after this. - // - // This is not synchronized with polls and handle additions: this means that - // you can safely call it from within an observe callback, but it also means - // that we have to be careful when we iterate over _handles. - removeHandle: async function(id) { - var self = this; - - // This should not be possible: you can only call removeHandle by having - // access to the ObserveHandle, which isn't returned to user code until the - // multiplex is ready. - if (!self._ready()) - throw new Error( - "Can't remove handles until the multiplex is ready" - ); - - delete self._handles[id]; - - Package['facts-base'] && - Package['facts-base'].Facts.incrementServerFact( - 'mongo-livedata', - 'observe-handles', - -1 - ); - - if ( - _.isEmpty(self._handles) && - self._addHandleTasksScheduledButNotPerformed === 0 - ) { - await self._stop(); - } - }, - _stop: async function(options) { - var self = this; - options = options || {}; - - // It shouldn't be possible for us to stop when all our handles still - // haven't been returned from observeChanges! - if (!self._ready() && !options.fromQueryError) - throw Error('surprising _stop: not ready'); - - // Call stop callback (which kills the underlying process which sends us - // callbacks and removes us from the connection's dictionary). - await self._onStop(); - Package['facts-base'] && - Package['facts-base'].Facts.incrementServerFact( - 'mongo-livedata', - 'observe-multiplexers', - -1 - ); - - // Cause future addHandleAndSendInitialAdds calls to throw (but the onStop - // callback should make our connection forget about us). - self._handles = null; - }, - - // Allows all addHandleAndSendInitialAdds calls to return, once all preceding - // adds have been processed. Does not block. - ready: function() { - var self = this; - self._queue.queueTask(function() { - if (self._ready()) - throw Error("can't make ObserveMultiplex ready twice!"); - - if (!self._resolver) { - throw new Error("Missing resolver"); - } + await self._stop({ fromQueryError: true }); + throw err; + }); + }, - self._resolver(); - }); - }, - - // If trying to execute the query results in an error, call this. This is - // intended for permanent errors, not transient network errors that could be - // fixed. It should only be called before ready(), because if you called ready - // that meant that you managed to run the query once. It will stop this - // ObserveMultiplex and cause addHandleAndSendInitialAdds calls (and thus - // observeChanges calls) to throw the error. - queryError: async function(err) { - var self = this; - await self._queue.runTask(async function() { - if (self._ready()) - throw Error("can't claim query has an error after it worked!"); - - await self._stop({ fromQueryError: true }); - throw err; - }); - }, - - // Calls "cb" once the effects of all "ready", "addHandleAndSendInitialAdds" - // and observe callbacks which came before this call have been propagated to - // all handles. "ready" must have already been called on this multiplexer. - onFlush: function(cb) { - var self = this; - self._queue.queueTask(async function() { - if (!self._ready()) - throw Error( - 'only call onFlush on a multiplexer that will be ready' - ); - await cb(); - }); - }, - callbackNames: function() { - var self = this; - if (self._ordered) - return ['addedBefore', 'changed', 'movedBefore', 'removed']; - else return ['added', 'changed', 'removed']; - }, - _ready: function() { - return !!this._isReady; - }, - _applyCallback: async function(callbackName, args) { - var self = this; - - const isOptimistic = !!OptimisticInvocation.get(); - // TODO Add a debug message here - const runType = isOptimistic ? 'runTask' : 'queueTask'; - await self._queue[runType](async function() { - // If we stopped in the meantime, do nothing. - if (!self._handles) return; - - // First, apply the change to the cache. - // XXX We could make applyChange callbacks promise not to hang on to any - // state from their arguments (assuming that their supplied callbacks - // don't) and skip this clone. Currently 'changed' hangs on to state - // though. - await self._cache.applyChange[callbackName].apply( - null, - EJSON.clone(args) - ); - - // If we haven't finished the initial adds, then we should only be getting - // adds. - if ( - !self._ready() && - (callbackName !== 'added' && callbackName !== 'addedBefore') - ) { - throw new Error('Got ' + callbackName + ' during initial adds'); - } + // Calls "cb" once the effects of all "ready", "addHandleAndSendInitialAdds" + // and observe callbacks which came before this call have been propagated to + // all handles. "ready" must have already been called on this multiplexer. + onFlush: function (cb) { + var self = this; + self._queue.queueTask(async function () { + if (!self._ready()) + throw Error("only call onFlush on a multiplexer that will be ready"); + await cb(); + }); + }, + callbackNames: function () { + var self = this; + if (self._ordered) + return ["addedBefore", "changed", "movedBefore", "removed"]; + else return ["added", "changed", "removed"]; + }, + _ready: function () { + return !!this._isReady; + }, + _applyCallback: async function (callbackName, args) { + var self = this; - // Now multiplex the callbacks out to all observe handles. It's OK if - // these calls yield; since we're inside a task, no other use of our queue - // can continue until these are done. (But we do have to be careful to not - // use a handle that got removed, because removeHandle does not use the - // queue; thus, we iterate over an array of keys that we control.) - for (const handleId of Object.keys(self._handles)) { - var handle = self._handles && self._handles[handleId]; - if (!handle) return; - var callback = handle['_' + callbackName]; - // clone arguments so that callbacks can mutate their arguments - - // We silence out removed exceptions - if (callback === 'removed') { - try { - await callback.apply(null, EJSON.clone(args)); - } catch (e) { - // Supressing `removed non-existent exceptions` - if (!isRemovedNonExistent(e)) { - throw e; - } - } - } else { - callback && await callback.apply(null, EJSON.clone(args)); - } + const isOptimistic = !!OptimisticInvocation.get(); + // TODO Add a debug message here + const runType = isOptimistic ? "runTask" : "queueTask"; + await self._queue[runType](async function () { + // If we stopped in the meantime, do nothing. + if (!self._handles) return; + + // First, apply the change to the cache. + // XXX We could make applyChange callbacks promise not to hang on to any + // state from their arguments (assuming that their supplied callbacks + // don't) and skip this clone. Currently 'changed' hangs on to state + // though. + await self._cache.applyChange[callbackName].apply( + null, + EJSON.clone(args) + ); + + // If we haven't finished the initial adds, then we should only be getting + // adds. + if ( + !self._ready() && + callbackName !== "added" && + callbackName !== "addedBefore" + ) { + throw new Error("Got " + callbackName + " during initial adds"); + } + + // Now multiplex the callbacks out to all observe handles. It's OK if + // these calls yield; since we're inside a task, no other use of our queue + // can continue until these are done. (But we do have to be careful to not + // use a handle that got removed, because removeHandle does not use the + // queue; thus, we iterate over an array of keys that we control.) + for (const handleId of Object.keys(self._handles)) { + var handle = self._handles && self._handles[handleId]; + if (!handle) return; + var callback = handle["_" + callbackName]; + // clone arguments so that callbacks can mutate their arguments + + // We silence out removed exceptions + if (callback === "removed") { + try { + await callback.apply(null, EJSON.clone(args)); + } catch (e) { + // Supressing `removed non-existent exceptions` + if (!isRemovedNonExistent(e)) { + throw e; } - }); - }, - - // Sends initial adds to a handle. It should only be called from within a task - // (the task that is processing the addHandleAndSendInitialAdds call). It - // synchronously invokes the handle's added or addedBefore; there's no need to - // flush the queue afterwards to ensure that the callbacks get out. - _sendAdds: async function(handle) { - var self = this; - if (!self._queue._runningOrRunScheduled) - throw Error('_sendAdds may only be called from within a task!'); - - var add = self._ordered ? handle._addedBefore : handle._added; - if (!add) return; - // note: docs may be an _IdMap or an OrderedDict - await self._cache.docs.forEachAsync(async function(doc, id) { - if (!_.has(self._handles, handle._id)) - throw Error('handle got removed before sending initial adds!'); - var fields = EJSON.clone(doc); - delete fields._id; - if (self._ordered) - await add(id, fields, null); - // we're going in order, so add at end - else - await add(id, fields); - }); - }, + } + } else { + callback && (await callback.apply(null, EJSON.clone(args))); + } + } + }); + }, + + // Sends initial adds to a handle. It should only be called from within a task + // (the task that is processing the addHandleAndSendInitialAdds call). It + // synchronously invokes the handle's added or addedBefore; there's no need to + // flush the queue afterwards to ensure that the callbacks get out. + _sendAdds: async function (handle) { + var self = this; + if (!self._queue._runningOrRunScheduled) + throw Error("_sendAdds may only be called from within a task!"); + + var add = self._ordered ? handle._addedBefore : handle._added; + if (!add) return; + // note: docs may be an _IdMap or an OrderedDict + await self._cache.docs.forEachAsync(async function (doc, id) { + if (!_.has(self._handles, handle._id)) + throw Error("handle got removed before sending initial adds!"); + var fields = EJSON.clone(doc); + delete fields._id; + if (self._ordered) await add(id, fields, null); + // we're going in order, so add at end + else await add(id, fields); + }); + }, }); var nextObserveHandleId = 1; export function ObserveHandle(multiplexer, callbacks) { - var self = this; - // The end user is only supposed to call stop(). The other fields are - // accessible to the multiplexer, though. - self._multiplexer = multiplexer; - _.each(multiplexer.callbackNames(), function(name) { - if (callbacks[name]) { - self['_' + name] = callbacks[name]; - } else if (name === 'addedBefore' && callbacks.added) { - // Special case: if you specify "added" and "movedBefore", you get an - // ordered observe where for some reason you don't get ordering data on - // the adds. I dunno, we wrote tests for it, there must have been a - // reason. - self._addedBefore = async function(id, fields, before) { - await callbacks.added(id, fields); - }; - } - }); - self._stopped = false; - self._id = nextObserveHandleId++; + var self = this; + // The end user is only supposed to call stop(). The other fields are + // accessible to the multiplexer, though. + self._multiplexer = multiplexer; + _.each(multiplexer.callbackNames(), function (name) { + if (callbacks[name]) { + self["_" + name] = callbacks[name]; + } else if (name === "addedBefore" && callbacks.added) { + // Special case: if you specify "added" and "movedBefore", you get an + // ordered observe where for some reason you don't get ordering data on + // the adds. I dunno, we wrote tests for it, there must have been a + // reason. + self._addedBefore = async function (id, fields, before) { + await callbacks.added(id, fields); + }; + } + }); + self._stopped = false; + self._id = nextObserveHandleId++; } -ObserveHandle.prototype.stop = async function() { - var self = this; - if (self._stopped) return; - self._stopped = true; - await self._multiplexer.removeHandle(self._id); +ObserveHandle.prototype.stop = async function () { + var self = this; + if (self._stopped) return; + self._stopped = true; + await self._multiplexer.removeHandle(self._id); }; diff --git a/lib/mongo/PollingObserveDriver.js b/lib/mongo/PollingObserveDriver.js index 676514b6..573ac8ab 100644 --- a/lib/mongo/PollingObserveDriver.js +++ b/lib/mongo/PollingObserveDriver.js @@ -1,299 +1,298 @@ -import { _ } from 'meteor/underscore'; -import { LocalCollection } from 'meteor/minimongo'; +import { _ } from "meteor/underscore"; +import { LocalCollection } from "meteor/minimongo"; function listenAll(cursorDescription, listenCallback) { - var listeners = []; - forEachTrigger(cursorDescription, function(trigger) { - listeners.push( - DDPServer._InvalidationCrossbar.listen(trigger, listenCallback) - ); - }); - return { - stop: function() { - _.each(listeners, function(listener) { - listener.stop(); - }); - }, - }; + var listeners = []; + forEachTrigger(cursorDescription, function (trigger) { + listeners.push( + DDPServer._InvalidationCrossbar.listen(trigger, listenCallback) + ); + }); + return { + stop: function () { + _.each(listeners, function (listener) { + listener.stop(); + }); + }, + }; } function forEachTrigger(cursorDescription, triggerCallback) { - var key = { - collection: cursorDescription.collectionName, - }; + var key = { + collection: cursorDescription.collectionName, + }; + + var specificIds = LocalCollection._idsMatchedBySelector( + cursorDescription.selector + ); + + if (specificIds) { + _.each(specificIds, function (id) { + triggerCallback( + Object.assign( + { + id: id, + }, + key + ) + ); + }); - var specificIds = LocalCollection._idsMatchedBySelector( - cursorDescription.selector + triggerCallback( + Object.assign( + { + dropCollection: true, + id: null, + }, + key + ) ); + } else { + triggerCallback(key); + } // Everyone cares about the database being dropped. - if (specificIds) { - _.each(specificIds, function(id) { - triggerCallback( - Object.assign( - { - id: id, - }, - key - ) - ); - }); - - triggerCallback( - Object.assign( - { - dropCollection: true, - id: null, - }, - key - ) - ); - } else { - triggerCallback(key); - } // Everyone cares about the database being dropped. + triggerCallback({ + dropDatabase: true, + }); +} - triggerCallback({ - dropDatabase: true, +export default function PollingObserveDriver(options) { + var self = this; + self._cursorDescription = options.cursorDescription; + self._mongoHandle = options.mongoHandle; + self._ordered = options.ordered; + self._multiplexer = options.multiplexer; + self._stopCallbacks = []; + self._stopped = false; + self._synchronousCursor = self._mongoHandle._createSynchronousCursor( + self._cursorDescription + ); // previous results snapshot. on each poll cycle, diffs against + // results drives the callbacks. + + self._results = null; // The number of _pollMongo calls that have been added to self._taskQueue but + // have not started running. Used to make sure we never schedule more than one + // _pollMongo (other than possibly the one that is currently running). It's + // also used by _suspendPolling to pretend there's a poll scheduled. Usually, + // it's either 0 (for "no polls scheduled other than maybe one currently + // running") or 1 (for "a poll scheduled that isn't running yet"), but it can + // also be 2 if incremented by _suspendPolling. + + self._pollsScheduledButNotStarted = 0; + self._pendingWrites = []; // people to notify when polling completes + // Make sure to create a separately throttled function for each + // PollingObserveDriver object. + + self._ensurePollIsScheduled = _.throttle( + self._unthrottledEnsurePollIsScheduled, + self._cursorDescription.options.pollingThrottleMs || 50 + /* ms */ + ); // XXX figure out if we still need a queue + + self._taskQueue = new Meteor._AsynchronousQueue(); + // TODO: should this be async? + var listenersHandle = listenAll( + self._cursorDescription, + function (notification) { + // When someone does a transaction that might affect us, schedule a poll + // of the database. If that transaction happens inside of a write fence, + // block the fence until we've polled and notified observers. + var fence = DDPServer._CurrentWriteFence.get(); + + if (fence) self._pendingWrites.push(fence.beginWrite()); // Ensure a poll is scheduled... but if we already know that one is, + // don't hit the throttled _ensurePollIsScheduled function (which might + // lead to us calling it unnecessarily in ms). + + if (self._pollsScheduledButNotStarted === 0) + self._ensurePollIsScheduled(); + } + ); + + self._stopCallbacks.push(function () { + listenersHandle.stop(); + }); // every once and a while, poll even if we don't think we're dirty, for + // eventual consistency with database writes from outside the Meteor + // universe. + // + // For testing, there's an undocumented callback argument to observeChanges + // which disables time-based polling and gets called at the beginning of each + // poll. + + if (options._testOnlyPollCallback) { + self._testOnlyPollCallback = options._testOnlyPollCallback; + } else { + var pollingInterval = + self._cursorDescription.options.pollingIntervalMs || + self._cursorDescription.options._pollingInterval || // COMPAT with 1.2 + 10 * 1000; + var intervalHandle = Meteor.setInterval( + _.bind(self._ensurePollIsScheduled, self), + pollingInterval + ); + + self._stopCallbacks.push(function () { + Meteor.clearInterval(intervalHandle); }); + } + + // Make sure we actually poll soon! + self._unthrottledEnsurePollIsScheduled(); + + Package.facts && + Package.facts.Facts.incrementServerFact( + "mongo-livedata", + "observe-drivers-polling", + 1 + ); } -export default function PollingObserveDriver(options) { +Object.assign(PollingObserveDriver.prototype, { + // This is always called through _.throttle (except once at startup). + _unthrottledEnsurePollIsScheduled: function () { var self = this; - self._cursorDescription = options.cursorDescription; - self._mongoHandle = options.mongoHandle; - self._ordered = options.ordered; - self._multiplexer = options.multiplexer; - self._stopCallbacks = []; - self._stopped = false; - self._synchronousCursor = self._mongoHandle._createSynchronousCursor( - self._cursorDescription - ); // previous results snapshot. on each poll cycle, diffs against - // results drives the callbacks. - - self._results = null; // The number of _pollMongo calls that have been added to self._taskQueue but - // have not started running. Used to make sure we never schedule more than one - // _pollMongo (other than possibly the one that is currently running). It's - // also used by _suspendPolling to pretend there's a poll scheduled. Usually, - // it's either 0 (for "no polls scheduled other than maybe one currently - // running") or 1 (for "a poll scheduled that isn't running yet"), but it can - // also be 2 if incremented by _suspendPolling. - - self._pollsScheduledButNotStarted = 0; - self._pendingWrites = []; // people to notify when polling completes - // Make sure to create a separately throttled function for each - // PollingObserveDriver object. - - self._ensurePollIsScheduled = _.throttle( - self._unthrottledEnsurePollIsScheduled, - self._cursorDescription.options.pollingThrottleMs || 50 - /* ms */ - ); // XXX figure out if we still need a queue - - self._taskQueue = new Meteor._AsynchronousQueue(); - // TODO: should this be async? - var listenersHandle = listenAll(self._cursorDescription, function( - notification - ) { - // When someone does a transaction that might affect us, schedule a poll - // of the database. If that transaction happens inside of a write fence, - // block the fence until we've polled and notified observers. - var fence = DDPServer._CurrentWriteFence.get(); - - if (fence) self._pendingWrites.push(fence.beginWrite()); // Ensure a poll is scheduled... but if we already know that one is, - // don't hit the throttled _ensurePollIsScheduled function (which might - // lead to us calling it unnecessarily in ms). - - if (self._pollsScheduledButNotStarted === 0) - self._ensurePollIsScheduled(); + if (self._pollsScheduledButNotStarted > 0) return; + ++self._pollsScheduledButNotStarted; + + self._taskQueue.queueTask(async function () { + await self._pollMongo(); }); + }, + // test-only interface for controlling polling. + // + // _suspendPolling blocks until any currently running and scheduled polls are + // done, and prevents any further polls from being scheduled. (new + // ObserveHandles can be added and receive their initial added callbacks, + // though.) + // + // _resumePolling immediately polls, and allows further polls to occur. + _suspendPolling: async function () { + var self = this; // Pretend that there's another poll scheduled (which will prevent + // _ensurePollIsScheduled from queueing any more polls). + + ++self._pollsScheduledButNotStarted; // Now block until all currently running or scheduled polls are done. + + await self._taskQueue.runTask(function () {}); // Confirm that there is only one "poll" (the fake one we're pretending to + // have) scheduled. + + if (self._pollsScheduledButNotStarted !== 1) + throw new Error( + "_pollsScheduledButNotStarted is " + self._pollsScheduledButNotStarted + ); + }, + _resumePolling: async function () { + var self = this; // We should be in the same state as in the end of _suspendPolling. + + if (self._pollsScheduledButNotStarted !== 1) + throw new Error( + "_pollsScheduledButNotStarted is " + self._pollsScheduledButNotStarted + ); // Run a poll synchronously (which will counteract the + // ++_pollsScheduledButNotStarted from _suspendPolling). + + await self._taskQueue.runTask(async function () { + await self._pollMongo(); + }); + }, + _pollMongo: async function () { + var self = this; + --self._pollsScheduledButNotStarted; + if (self._stopped) return; + var first = false; + var newResults; + var oldResults = self._results; + + if (!oldResults) { + first = true; // XXX maybe use OrderedDict instead? + + oldResults = self._ordered ? [] : new LocalCollection._IdMap(); + } - self._stopCallbacks.push(function() { - listenersHandle.stop(); - }); // every once and a while, poll even if we don't think we're dirty, for - // eventual consistency with database writes from outside the Meteor - // universe. - // - // For testing, there's an undocumented callback argument to observeChanges - // which disables time-based polling and gets called at the beginning of each - // poll. - - if (options._testOnlyPollCallback) { - self._testOnlyPollCallback = options._testOnlyPollCallback; - } else { - var pollingInterval = - self._cursorDescription.options.pollingIntervalMs || - self._cursorDescription.options._pollingInterval || // COMPAT with 1.2 - 10 * 1000; - var intervalHandle = Meteor.setInterval( - _.bind(self._ensurePollIsScheduled, self), - pollingInterval + self._testOnlyPollCallback && (await self._testOnlyPollCallback()); // Save the list of pending writes which this round will commit. + + var writesForCycle = self._pendingWrites; + self._pendingWrites = []; // Get the new query results. (This yields.) + + try { + // TODO: should this be async? + newResults = await self._synchronousCursor.getRawObjects(self._ordered); + } catch (e) { + if (first && typeof e.code === "number") { + // This is an error document sent to us by mongod, not a connection + // error generated by the client. And we've never seen this query work + // successfully. Probably it's a bad selector or something, so we should + // NOT retry. Instead, we should halt the observe (which ends up calling + // `stop` on us). + self._multiplexer.queryError( + new Error( + "Exception while polling query " + + JSON.stringify(self._cursorDescription) + + ": " + + e.message + ) ); - self._stopCallbacks.push(function() { - Meteor.clearInterval(intervalHandle); - }); + return; + } // getRawObjects can throw if we're having trouble talking to the + // database. That's fine --- we will repoll later anyway. But we should + // make sure not to lose track of this cycle's writes. + // (It also can throw if there's just something invalid about this query; + // unfortunately the ObserveDriver API doesn't provide a good way to + // "cancel" the observe from the inside in this case. + + Array.prototype.push.apply(self._pendingWrites, writesForCycle); + + Meteor._debug( + "Exception while polling query " + + JSON.stringify(self._cursorDescription) + + ": " + + e.stack + ); + + return; + } // Run diffs. + + if (!self._stopped) { + LocalCollection._diffQueryChanges( + self._ordered, + oldResults, + newResults, + self._multiplexer + ); + } // Signals the multiplexer to allow all observeChanges calls that share this + // multiplexer to return. (This happens asynchronously, via the + // multiplexer's queue.) + + if (first) self._multiplexer.ready(); // Replace self._results atomically. (This assignment is what makes `first` + // stay through on the next cycle, so we've waited until after we've + // committed to ready-ing the multiplexer.) + + self._results = newResults; // Once the ObserveMultiplexer has processed everything we've done in this + // round, mark all the writes which existed before this call as + // commmitted. (If new writes have shown up in the meantime, there'll + // already be another _pollMongo task scheduled.) + + await self._multiplexer.onFlush(async function () { + for (const w of writesForCycle) { + await w.committed(); + } + }); + }, + stop: async function () { + var self = this; + self._stopped = true; + + for (const c of self._stopCallbacks) { + await c(); } - // Make sure we actually poll soon! - self._unthrottledEnsurePollIsScheduled(); + // Release any write fences that are waiting on us. + for (const w of self._pendingWrites) { + await w.committed(); + } Package.facts && - Package.facts.Facts.incrementServerFact( - 'mongo-livedata', - 'observe-drivers-polling', - 1 - ); -} - -Object.assign(PollingObserveDriver.prototype, { - // This is always called through _.throttle (except once at startup). - _unthrottledEnsurePollIsScheduled: function() { - var self = this; - if (self._pollsScheduledButNotStarted > 0) return; - ++self._pollsScheduledButNotStarted; - - self._taskQueue.queueTask(async function() { - await self._pollMongo(); - }); - }, - // test-only interface for controlling polling. - // - // _suspendPolling blocks until any currently running and scheduled polls are - // done, and prevents any further polls from being scheduled. (new - // ObserveHandles can be added and receive their initial added callbacks, - // though.) - // - // _resumePolling immediately polls, and allows further polls to occur. - _suspendPolling: async function() { - var self = this; // Pretend that there's another poll scheduled (which will prevent - // _ensurePollIsScheduled from queueing any more polls). - - ++self._pollsScheduledButNotStarted; // Now block until all currently running or scheduled polls are done. - - await self._taskQueue.runTask(function() {}); // Confirm that there is only one "poll" (the fake one we're pretending to - // have) scheduled. - - if (self._pollsScheduledButNotStarted !== 1) - throw new Error( - '_pollsScheduledButNotStarted is ' + - self._pollsScheduledButNotStarted - ); - }, - _resumePolling: async function() { - var self = this; // We should be in the same state as in the end of _suspendPolling. - - if (self._pollsScheduledButNotStarted !== 1) - throw new Error( - '_pollsScheduledButNotStarted is ' + - self._pollsScheduledButNotStarted - ); // Run a poll synchronously (which will counteract the - // ++_pollsScheduledButNotStarted from _suspendPolling). - - await self._taskQueue.runTask(async function() { - await self._pollMongo(); - }); - }, - _pollMongo: async function() { - var self = this; - --self._pollsScheduledButNotStarted; - if (self._stopped) return; - var first = false; - var newResults; - var oldResults = self._results; - - if (!oldResults) { - first = true; // XXX maybe use OrderedDict instead? - - oldResults = self._ordered ? [] : new LocalCollection._IdMap(); - } - - self._testOnlyPollCallback && await self._testOnlyPollCallback(); // Save the list of pending writes which this round will commit. - - var writesForCycle = self._pendingWrites; - self._pendingWrites = []; // Get the new query results. (This yields.) - - try { - // TODO: should this be async? - newResults = await self._synchronousCursor.getRawObjects(self._ordered); - } catch (e) { - if (first && typeof e.code === 'number') { - // This is an error document sent to us by mongod, not a connection - // error generated by the client. And we've never seen this query work - // successfully. Probably it's a bad selector or something, so we should - // NOT retry. Instead, we should halt the observe (which ends up calling - // `stop` on us). - self._multiplexer.queryError( - new Error( - 'Exception while polling query ' + - JSON.stringify(self._cursorDescription) + - ': ' + - e.message - ) - ); - - return; - } // getRawObjects can throw if we're having trouble talking to the - // database. That's fine --- we will repoll later anyway. But we should - // make sure not to lose track of this cycle's writes. - // (It also can throw if there's just something invalid about this query; - // unfortunately the ObserveDriver API doesn't provide a good way to - // "cancel" the observe from the inside in this case. - - Array.prototype.push.apply(self._pendingWrites, writesForCycle); - - Meteor._debug( - 'Exception while polling query ' + - JSON.stringify(self._cursorDescription) + - ': ' + - e.stack - ); - - return; - } // Run diffs. - - if (!self._stopped) { - LocalCollection._diffQueryChanges( - self._ordered, - oldResults, - newResults, - self._multiplexer - ); - } // Signals the multiplexer to allow all observeChanges calls that share this - // multiplexer to return. (This happens asynchronously, via the - // multiplexer's queue.) - - if (first) self._multiplexer.ready(); // Replace self._results atomically. (This assignment is what makes `first` - // stay through on the next cycle, so we've waited until after we've - // committed to ready-ing the multiplexer.) - - self._results = newResults; // Once the ObserveMultiplexer has processed everything we've done in this - // round, mark all the writes which existed before this call as - // commmitted. (If new writes have shown up in the meantime, there'll - // already be another _pollMongo task scheduled.) - - await self._multiplexer.onFlush(async function() { - for (const w of writesForCycle) { - await w.committed(); - } - }); - }, - stop: async function() { - var self = this; - self._stopped = true; - - for (const c of self._stopCallbacks) { - await c(); - } - - // Release any write fences that are waiting on us. - for (const w of self._pendingWrites) { - await w.committed(); - } - - Package.facts && - Package.facts.Facts.incrementServerFact( - 'mongo-livedata', - 'observe-drivers-polling', - -1 - ); - }, + Package.facts.Facts.incrementServerFact( + "mongo-livedata", + "observe-drivers-polling", + -1 + ); + }, }); diff --git a/lib/mongo/RedisOplogObserveDriver.js b/lib/mongo/RedisOplogObserveDriver.js index 4540462c..77d0251c 100644 --- a/lib/mongo/RedisOplogObserveDriver.js +++ b/lib/mongo/RedisOplogObserveDriver.js @@ -1,118 +1,117 @@ // This code was started based on meteor/meteor github repository // This code is MIT and licensed to Meteor -import { Tracker } from 'meteor/tracker'; -import getStrategy from '../processors/getStrategy'; -import { Strategy } from '../constants'; -import RedisSubscriber from '../redis/RedisSubscriber'; -import ObservableCollection from '../cache/ObservableCollection'; +import { Tracker } from "meteor/tracker"; +import getStrategy from "../processors/getStrategy"; +import { Strategy } from "../constants"; +import RedisSubscriber from "../redis/RedisSubscriber"; +import ObservableCollection from "../cache/ObservableCollection"; let currentId = 0; export default class RedisOplogObserveDriver { - options = { - cursorDescription: null, - mongoHandle: null, - multiplexer: null, - ordered: null, - matcher: null, - }; - - constructor(options) { - this._id = currentId; - currentId++; - - this.options = options; - const { cursorDescription, multiplexer, matcher, sorter } = options; - - this._cursorDescription = options.cursorDescription; - this._multiplexer = options.multiplexer; - - this.strategy = getStrategy( - cursorDescription.selector, - cursorDescription.options - ); - - // TODO send by object - this.observableCollection = new ObservableCollection({ - multiplexer, - matcher, - sorter, - cursorDescription, - }); - - // Feels hackish to have it here, maybe move to ObservableCollections - if (this.strategy === Strategy.DEDICATED_CHANNELS) { - let oc = this.observableCollection; - if (oc.selector._id) { - oc.__containsOtherSelectorsThanId = - Object.keys(oc.selector).length > 1; - } - } - } - - async init() { - await this.observableCollection.setupCollection(); - - // This is to mitigate the issue when we run init the first time on a subscription - // And if you are using packages like reactive-publish - // Because inside here we do a .find().fetch(), and that's considered reactive - await Tracker.nonreactive(() => { - return this.observableCollection.init(); - }); - - this.redisSubscriber = new RedisSubscriber( - this.observableCollection, - this.strategy - ); - } - - stop() { - this.redisSubscriber.stop(); - - this.observableCollection = null; - this.redisSubscriber = null; - - Package['facts-base'] && - Package['facts-base'].Facts.incrementServerFact( - 'mongo-livedata', - 'observe-drivers-oplog', - -1 - ); + options = { + cursorDescription: null, + mongoHandle: null, + multiplexer: null, + ordered: null, + matcher: null, + }; + + constructor(options) { + this._id = currentId; + currentId++; + + this.options = options; + const { cursorDescription, multiplexer, matcher, sorter } = options; + + this._cursorDescription = options.cursorDescription; + this._multiplexer = options.multiplexer; + + this.strategy = getStrategy( + cursorDescription.selector, + cursorDescription.options + ); + + // TODO send by object + this.observableCollection = new ObservableCollection({ + multiplexer, + matcher, + sorter, + cursorDescription, + }); + + // Feels hackish to have it here, maybe move to ObservableCollections + if (this.strategy === Strategy.DEDICATED_CHANNELS) { + let oc = this.observableCollection; + if (oc.selector._id) { + oc.__containsOtherSelectorsThanId = Object.keys(oc.selector).length > 1; + } } - - static cursorSupported(cursorDescription, matcher) { - // First, check the options. - var options = cursorDescription.options; - - // Did the user say no explicitly? - // underscored version of the option is COMPAT with 1.2 - if (options.disableOplog || options._disableOplog) return false; - - // If a fields projection option is given check if it is supported by - // minimongo (some operators are not supported). - - var fields = options.projection || options.fields; - - if (fields) { - try { - LocalCollection._checkSupportedProjection(fields); - } catch (e) { - if (e.name === 'MinimongoError') { - return false; - } else { - throw e; - } - } + } + + async init() { + await this.observableCollection.setupCollection(); + + // This is to mitigate the issue when we run init the first time on a subscription + // And if you are using packages like reactive-publish + // Because inside here we do a .find().fetch(), and that's considered reactive + await Tracker.nonreactive(() => { + return this.observableCollection.init(); + }); + + this.redisSubscriber = new RedisSubscriber( + this.observableCollection, + this.strategy + ); + } + + stop() { + this.redisSubscriber.stop(); + + this.observableCollection = null; + this.redisSubscriber = null; + + Package["facts-base"] && + Package["facts-base"].Facts.incrementServerFact( + "mongo-livedata", + "observe-drivers-oplog", + -1 + ); + } + + static cursorSupported(cursorDescription, matcher) { + // First, check the options. + var options = cursorDescription.options; + + // Did the user say no explicitly? + // underscored version of the option is COMPAT with 1.2 + if (options.disableOplog || options._disableOplog) return false; + + // If a fields projection option is given check if it is supported by + // minimongo (some operators are not supported). + + var fields = options.projection || options.fields; + + if (fields) { + try { + LocalCollection._checkSupportedProjection(fields); + } catch (e) { + if (e.name === "MinimongoError") { + return false; + } else { + throw e; } - - // We don't allow the following selectors: - // - $where (not confident that we provide the same JS environment - // as Mongo, and can yield!) - // - $near (has "interesting" properties in MongoDB, like the possibility - // of returning an ID multiple times, though even polling maybe - // have a bug there) - // XXX: once we support it, we would need to think more on how we - // initialize the comparators when we create the driver. - return !matcher.hasWhere() && !matcher.hasGeoQuery(); + } } + + // We don't allow the following selectors: + // - $where (not confident that we provide the same JS environment + // as Mongo, and can yield!) + // - $near (has "interesting" properties in MongoDB, like the possibility + // of returning an ID multiple times, though even polling maybe + // have a bug there) + // XXX: once we support it, we would need to think more on how we + // initialize the comparators when we create the driver. + return !matcher.hasWhere() && !matcher.hasGeoQuery(); + } } diff --git a/lib/mongo/SyntheticMutator.js b/lib/mongo/SyntheticMutator.js index 1b03e273..0287f5e0 100644 --- a/lib/mongo/SyntheticMutator.js +++ b/lib/mongo/SyntheticMutator.js @@ -1,108 +1,108 @@ -import { Mongo } from 'meteor/mongo'; -import { Random } from 'meteor/random'; -import { getRedisPusher } from '../redis/getRedisClient'; -import { EJSON } from 'meteor/ejson'; -import getFields from '../utils/getFields'; -import { Events, RedisPipe } from '../constants'; -import containsOperators from '../mongo/lib/containsOperators'; -import getChannels from '../cache/lib/getChannels'; -import getDedicatedChannel from '../utils/getDedicatedChannel'; +import { Mongo } from "meteor/mongo"; +import { Random } from "meteor/random"; +import { getRedisPusher } from "../redis/getRedisClient"; +import { EJSON } from "meteor/ejson"; +import getFields from "../utils/getFields"; +import { Events, RedisPipe } from "../constants"; +import containsOperators from "../mongo/lib/containsOperators"; +import getChannels from "../cache/lib/getChannels"; +import getDedicatedChannel from "../utils/getDedicatedChannel"; /** * call(Mongo.Collection).insert(data) * @param channelOrCollection {Mongo.Collection|string} */ export default class SyntheticMutator { - /** - * @param channels - * @param data - */ - static async publish(channels, data) { - const client = getRedisPusher(); + /** + * @param channels + * @param data + */ + static async publish(channels, data) { + const client = getRedisPusher(); - for (const channel of channels) { - await client.publish(channel, EJSON.stringify(data)); - } + for (const channel of channels) { + await client.publish(channel, EJSON.stringify(data)); } + } - /** - * @param channels - * @param data - */ - static async insert(channels, data) { - channels = SyntheticMutator._extractChannels(channels, data._id); - - if (!data._id) { - data._id = Random.id(); - } + /** + * @param channels + * @param data + */ + static async insert(channels, data) { + channels = SyntheticMutator._extractChannels(channels, data._id); - await SyntheticMutator.publish(channels, { - [RedisPipe.EVENT]: Events.INSERT, - [RedisPipe.SYNTHETIC]: true, - [RedisPipe.DOC]: data, - }); + if (!data._id) { + data._id = Random.id(); } - /** - * @param channels - * @param _id - * @param modifier - */ - static async update(channels, _id, modifier) { - channels = SyntheticMutator._extractChannels(channels, _id); + await SyntheticMutator.publish(channels, { + [RedisPipe.EVENT]: Events.INSERT, + [RedisPipe.SYNTHETIC]: true, + [RedisPipe.DOC]: data, + }); + } - if (!containsOperators(modifier)) { - throw new Meteor.Error( - 'Synthetic update can only be done through MongoDB operators.' - ); - } + /** + * @param channels + * @param _id + * @param modifier + */ + static async update(channels, _id, modifier) { + channels = SyntheticMutator._extractChannels(channels, _id); + + if (!containsOperators(modifier)) { + throw new Meteor.Error( + "Synthetic update can only be done through MongoDB operators." + ); + } - const { topLevelFields } = getFields(modifier); + const { topLevelFields } = getFields(modifier); - let message = { - [RedisPipe.EVENT]: Events.UPDATE, - [RedisPipe.SYNTHETIC]: true, - [RedisPipe.DOC]: { _id }, - [RedisPipe.MODIFIER]: modifier, - [RedisPipe.MODIFIED_TOP_LEVEL_FIELDS]: topLevelFields, - }; + let message = { + [RedisPipe.EVENT]: Events.UPDATE, + [RedisPipe.SYNTHETIC]: true, + [RedisPipe.DOC]: { _id }, + [RedisPipe.MODIFIER]: modifier, + [RedisPipe.MODIFIED_TOP_LEVEL_FIELDS]: topLevelFields, + }; - await SyntheticMutator.publish(channels, message); - } + await SyntheticMutator.publish(channels, message); + } - /** - * @param channels - * @param _id - */ - static async remove(channels, _id) { - channels = SyntheticMutator._extractChannels(channels, _id); + /** + * @param channels + * @param _id + */ + static async remove(channels, _id) { + channels = SyntheticMutator._extractChannels(channels, _id); - await SyntheticMutator.publish(channels, { - [RedisPipe.EVENT]: Events.REMOVE, - [RedisPipe.SYNTHETIC]: true, - [RedisPipe.DOC]: { _id }, - }); - } + await SyntheticMutator.publish(channels, { + [RedisPipe.EVENT]: Events.REMOVE, + [RedisPipe.SYNTHETIC]: true, + [RedisPipe.DOC]: { _id }, + }); + } - /** - * @param channels - * @param _id - * @returns {*} - * @private - */ - static _extractChannels(channels, _id) { - if (!Array.isArray(channels)) { - if (channels instanceof Mongo.Collection) { - const name = channels._name; - channels = getChannels(name); - if (_id) { - channels.push(getDedicatedChannel(name, _id)); - } - } else { - channels = [channels]; - } + /** + * @param channels + * @param _id + * @returns {*} + * @private + */ + static _extractChannels(channels, _id) { + if (!Array.isArray(channels)) { + if (channels instanceof Mongo.Collection) { + const name = channels._name; + channels = getChannels(name); + if (_id) { + channels.push(getDedicatedChannel(name, _id)); } - - return channels; + } else { + channels = [channels]; + } } + + return channels; + } } diff --git a/lib/mongo/allow-deny/docToValidate.js b/lib/mongo/allow-deny/docToValidate.js index 3d17c593..7f6eebf3 100644 --- a/lib/mongo/allow-deny/docToValidate.js +++ b/lib/mongo/allow-deny/docToValidate.js @@ -1,19 +1,19 @@ -import { EJSON } from 'meteor/ejson' +import { EJSON } from "meteor/ejson"; export default function docToValidate(validator, doc, generatedId) { - let ret = doc - if (validator.transform) { - ret = EJSON.clone(doc) - // If you set a server-side transform on your collection, then you don't get - // to tell the difference between "client specified the ID" and "server - // generated the ID", because transforms expect to get _id. If you want to - // do that check, you can do it with a specific - // `C.allow({insert: f, transform: null})` validator. - if (generatedId !== null) { - ret._id = generatedId - } - // TODO: should we accept async transform functions? - ret = validator.transform(ret) + let ret = doc; + if (validator.transform) { + ret = EJSON.clone(doc); + // If you set a server-side transform on your collection, then you don't get + // to tell the difference between "client specified the ID" and "server + // generated the ID", because transforms expect to get _id. If you want to + // do that check, you can do it with a specific + // `C.allow({insert: f, transform: null})` validator. + if (generatedId !== null) { + ret._id = generatedId; } - return ret + // TODO: should we accept async transform functions? + ret = validator.transform(ret); + } + return ret; } diff --git a/lib/mongo/allow-deny/transformDoc.js b/lib/mongo/allow-deny/transformDoc.js index 19fc589e..0d679fb3 100644 --- a/lib/mongo/allow-deny/transformDoc.js +++ b/lib/mongo/allow-deny/transformDoc.js @@ -1,4 +1,4 @@ export default function transformDoc(validator, doc) { - if (validator.transform) return validator.transform(doc) - return doc + if (validator.transform) return validator.transform(doc); + return doc; } diff --git a/lib/mongo/allow-deny/validatedInsert.js b/lib/mongo/allow-deny/validatedInsert.js index b675bbc1..91cd822f 100644 --- a/lib/mongo/allow-deny/validatedInsert.js +++ b/lib/mongo/allow-deny/validatedInsert.js @@ -1,23 +1,31 @@ -import { Meteor } from 'meteor/meteor' -import { _ } from 'meteor/underscore' -import docToValidate from './docToValidate' +import { Meteor } from "meteor/meteor"; +import { _ } from "meteor/underscore"; +import docToValidate from "./docToValidate"; export default async function validatedInsert(userId, doc, generatedId) { - // call user validators. - // Any deny returns true means denied. - if (_.any(this._validators.insert.deny, validator => - validator(userId, docToValidate(validator, doc, generatedId)))) { - throw new Meteor.Error(403, 'Access denied') - } - // Any allow returns true means proceed. Throw error if they all fail. - if (_.all(this._validators.insert.allow, validator => - !validator(userId, docToValidate(validator, doc, generatedId)))) { - throw new Meteor.Error(403, 'Access denied') - } + // call user validators. + // Any deny returns true means denied. + if ( + _.any(this._validators.insert.deny, (validator) => + validator(userId, docToValidate(validator, doc, generatedId)) + ) + ) { + throw new Meteor.Error(403, "Access denied"); + } + // Any allow returns true means proceed. Throw error if they all fail. + if ( + _.all( + this._validators.insert.allow, + (validator) => + !validator(userId, docToValidate(validator, doc, generatedId)) + ) + ) { + throw new Meteor.Error(403, "Access denied"); + } - // If we generated an ID above, insert it now: after the validation, but - // before actually inserting. - if (generatedId !== null) doc._id = generatedId + // If we generated an ID above, insert it now: after the validation, but + // before actually inserting. + if (generatedId !== null) doc._id = generatedId; - await this.insertAsync(doc, { optimistic: true }); + await this.insertAsync(doc, { optimistic: true }); } diff --git a/lib/mongo/allow-deny/validatedRemove.js b/lib/mongo/allow-deny/validatedRemove.js index 61e8da15..5b610a7f 100644 --- a/lib/mongo/allow-deny/validatedRemove.js +++ b/lib/mongo/allow-deny/validatedRemove.js @@ -1,39 +1,46 @@ /* eslint no-param-reassign: 0 no-underscore-dangle: 0 */ -import { Meteor } from 'meteor/meteor' -import { _ } from 'meteor/underscore' -import transformDoc from './transformDoc' +import { Meteor } from "meteor/meteor"; +import { _ } from "meteor/underscore"; +import transformDoc from "./transformDoc"; export default async function validatedRemove(userId, selector) { - const findOptions = {transform: null} - if (!this._validators.fetchAllFields) { - findOptions.fields = {} - findOptions.projection = {} - _.each(this._validators.fetch, fieldName => { - findOptions.fields[fieldName] = 1 - findOptions.projection[fieldName] = 1 - }) - } + const findOptions = { transform: null }; + if (!this._validators.fetchAllFields) { + findOptions.fields = {}; + findOptions.projection = {}; + _.each(this._validators.fetch, (fieldName) => { + findOptions.fields[fieldName] = 1; + findOptions.projection[fieldName] = 1; + }); + } - const doc = await this._collection.findOneAsync(selector, findOptions) - if (!doc) { - return 0 - } + const doc = await this._collection.findOneAsync(selector, findOptions); + if (!doc) { + return 0; + } - // call user validators. - // Any deny returns true means denied. - if (_.any(this._validators.remove.deny, validator => - validator(userId, transformDoc(validator, doc)))) { - throw new Meteor.Error(403, 'Access denied') - } - // Any allow returns true means proceed. Throw error if they all fail. - if (_.all(this._validators.remove.allow, validator => - !validator(userId, transformDoc(validator, doc)))) { - throw new Meteor.Error(403, 'Access denied') - } + // call user validators. + // Any deny returns true means denied. + if ( + _.any(this._validators.remove.deny, (validator) => + validator(userId, transformDoc(validator, doc)) + ) + ) { + throw new Meteor.Error(403, "Access denied"); + } + // Any allow returns true means proceed. Throw error if they all fail. + if ( + _.all( + this._validators.remove.allow, + (validator) => !validator(userId, transformDoc(validator, doc)) + ) + ) { + throw new Meteor.Error(403, "Access denied"); + } - // Back when we supported arbitrary client-provided selectors, we actually - // rewrote the selector to {_id: {$in: [ids that we found]}} before passing to - // Mongo to avoid races, but since selector is guaranteed to already just be - // an ID, we don't have to any more. - return this.removeAsync(selector, {optimistic: true}) + // Back when we supported arbitrary client-provided selectors, we actually + // rewrote the selector to {_id: {$in: [ids that we found]}} before passing to + // Mongo to avoid races, but since selector is guaranteed to already just be + // an ID, we don't have to any more. + return this.removeAsync(selector, { optimistic: true }); } diff --git a/lib/mongo/allow-deny/validatedUpdate.js b/lib/mongo/allow-deny/validatedUpdate.js index 5c2a8fd7..f92337d2 100644 --- a/lib/mongo/allow-deny/validatedUpdate.js +++ b/lib/mongo/allow-deny/validatedUpdate.js @@ -1,9 +1,9 @@ /* eslint no-param-reassign: 0 no-underscore-dangle: 0 */ -import { Meteor } from 'meteor/meteor'; -import { _ } from 'meteor/underscore'; -import { LocalCollection } from 'meteor/minimongo'; -import { check } from 'meteor/check'; -import transformDoc from './transformDoc'; +import { Meteor } from "meteor/meteor"; +import { _ } from "meteor/underscore"; +import { LocalCollection } from "meteor/minimongo"; +import { check } from "meteor/check"; +import transformDoc from "./transformDoc"; // Only allow these operations in validated updates. Specifically // whitelist operations, rather than blacklist, so new complex @@ -12,122 +12,126 @@ import transformDoc from './transformDoc'; // field. For now this contains all update operations except '$rename'. // http://docs.mongodb.org/manual/reference/operators/#update const ALLOWED_UPDATE_OPERATIONS = { - $inc: 1, - $set: 1, - $unset: 1, - $addToSet: 1, - $pop: 1, - $pullAll: 1, - $pull: 1, - $pushAll: 1, - $push: 1, - $bit: 1, + $inc: 1, + $set: 1, + $unset: 1, + $addToSet: 1, + $pop: 1, + $pullAll: 1, + $pull: 1, + $pushAll: 1, + $push: 1, + $bit: 1, }; // Simulate a mongo `update` operation while validating that the access // control rules set by calls to `allow/deny` are satisfied. If all // pass, rewrite the mongo operation to use $in to set the list of // document ids to change ##ValidatedChange -export default async function validatedUpdate(userId, selector, mutator, options) { - check(mutator, Object); - options = _.clone(options) || {}; +export default async function validatedUpdate( + userId, + selector, + mutator, + options +) { + check(mutator, Object); + options = _.clone(options) || {}; - if (!LocalCollection._selectorIsIdPerhapsAsObject(selector)) { - throw new Error('validated update should be of a single ID'); - } + if (!LocalCollection._selectorIsIdPerhapsAsObject(selector)) { + throw new Error("validated update should be of a single ID"); + } - // We don't support upserts because they don't fit nicely into allow/deny - // rules. - if (options.upsert) { - throw new Meteor.Error( - 403, - 'Access denied. Upserts not ' + - 'allowed in a restricted collection.' - ); - } + // We don't support upserts because they don't fit nicely into allow/deny + // rules. + if (options.upsert) { + throw new Meteor.Error( + 403, + "Access denied. Upserts not " + "allowed in a restricted collection." + ); + } - const noReplaceError = - 'Access denied. In a restricted collection you can only' + - ' update documents, not replace them. Use a Mongo update operator, such ' + - "as '$set'."; + const noReplaceError = + "Access denied. In a restricted collection you can only" + + " update documents, not replace them. Use a Mongo update operator, such " + + "as '$set'."; - // compute modified fields - const fields = []; - if (_.isEmpty(mutator)) { - throw new Meteor.Error(403, noReplaceError); - } - _.each(mutator, (params, op) => { - if (op.charAt(0) !== '$') { - throw new Meteor.Error(403, noReplaceError); - } else if (!_.has(ALLOWED_UPDATE_OPERATIONS, op)) { - throw new Meteor.Error( - 403, - `Access denied. Operator ${op} not allowed in a restricted collection.` - ); - } else { - Object.keys(params).forEach(field => { - // treat dotted fields as if they are replacing their - // top-level part - if (field.indexOf('.') !== -1) { - field = field.substring(0, field.indexOf('.')); - } - - // record the field we are trying to change - if (!fields.includes(field)) { - fields.push(field); - } - }); + // compute modified fields + const fields = []; + if (_.isEmpty(mutator)) { + throw new Meteor.Error(403, noReplaceError); + } + _.each(mutator, (params, op) => { + if (op.charAt(0) !== "$") { + throw new Meteor.Error(403, noReplaceError); + } else if (!_.has(ALLOWED_UPDATE_OPERATIONS, op)) { + throw new Meteor.Error( + 403, + `Access denied. Operator ${op} not allowed in a restricted collection.` + ); + } else { + Object.keys(params).forEach((field) => { + // treat dotted fields as if they are replacing their + // top-level part + if (field.indexOf(".") !== -1) { + field = field.substring(0, field.indexOf(".")); } - }); - const findOptions = { transform: null }; - if (!this._validators.fetchAllFields) { - findOptions.fields = {}; - findOptions.projection = {}; - _.each(this._validators.fetch, fieldName => { - findOptions.fields[fieldName] = 1; - findOptions.projection[fieldName] = 1; - }); + // record the field we are trying to change + if (!fields.includes(field)) { + fields.push(field); + } + }); } + }); - const doc = await this._collection.findOneAsync(selector, findOptions); - if (!doc) { - // none satisfied! - return 0; - } + const findOptions = { transform: null }; + if (!this._validators.fetchAllFields) { + findOptions.fields = {}; + findOptions.projection = {}; + _.each(this._validators.fetch, (fieldName) => { + findOptions.fields[fieldName] = 1; + findOptions.projection[fieldName] = 1; + }); + } - // call user validators. - // Any deny returns true means denied. - if ( - _.any(this._validators.update.deny, validator => { - const factoriedDoc = transformDoc(validator, doc); - return validator(userId, factoriedDoc, fields, mutator); - }) - ) { - throw new Meteor.Error(403, 'Access denied'); - } - // Any allow returns true means proceed. Throw error if they all fail. - if ( - _.all(this._validators.update.allow, validator => { - const factoriedDoc = transformDoc(validator, doc); - return !validator(userId, factoriedDoc, fields, mutator); - }) - ) { - throw new Meteor.Error(403, 'Access denied'); - } + const doc = await this._collection.findOneAsync(selector, findOptions); + if (!doc) { + // none satisfied! + return 0; + } - options._forbidReplace = true; + // call user validators. + // Any deny returns true means denied. + if ( + _.any(this._validators.update.deny, (validator) => { + const factoriedDoc = transformDoc(validator, doc); + return validator(userId, factoriedDoc, fields, mutator); + }) + ) { + throw new Meteor.Error(403, "Access denied"); + } + // Any allow returns true means proceed. Throw error if they all fail. + if ( + _.all(this._validators.update.allow, (validator) => { + const factoriedDoc = transformDoc(validator, doc); + return !validator(userId, factoriedDoc, fields, mutator); + }) + ) { + throw new Meteor.Error(403, "Access denied"); + } - // Back when we supported arbitrary client-provided selectors, we actually - // rewrote the selector to include an _id clause before passing to Mongo to - // avoid races, but since selector is guaranteed to already just be an ID, we - // don't have to any more. + options._forbidReplace = true; - await this.updateAsync( - selector, - mutator, - Object.assign(options, { - optimistic: true, - }) - ); + // Back when we supported arbitrary client-provided selectors, we actually + // rewrote the selector to include an _id clause before passing to Mongo to + // avoid races, but since selector is guaranteed to already just be an ID, we + // don't have to any more. + + await this.updateAsync( + selector, + mutator, + Object.assign(options, { + optimistic: true, + }) + ); } diff --git a/lib/mongo/extendMongoCollection.js b/lib/mongo/extendMongoCollection.js index b3a3af93..d0420251 100644 --- a/lib/mongo/extendMongoCollection.js +++ b/lib/mongo/extendMongoCollection.js @@ -1,116 +1,116 @@ -import { Mongo } from 'meteor/mongo'; -import { _ } from 'meteor/underscore'; -import _validatedInsert from './allow-deny/validatedInsert'; -import _validatedUpdate from './allow-deny/validatedUpdate'; -import _validatedRemove from './allow-deny/validatedRemove'; -import Mutator from './Mutator'; -import extendObserveChanges from './extendObserveChanges'; +import { Mongo } from "meteor/mongo"; +import { _ } from "meteor/underscore"; +import _validatedInsert from "./allow-deny/validatedInsert"; +import _validatedUpdate from "./allow-deny/validatedUpdate"; +import _validatedRemove from "./allow-deny/validatedRemove"; +import Mutator from "./Mutator"; +import extendObserveChanges from "./extendObserveChanges"; export default () => { - const Originals = { - insert: Mongo.Collection.prototype.insertAsync, - update: Mongo.Collection.prototype.updateAsync, - remove: Mongo.Collection.prototype.removeAsync, - find: Mongo.Collection.prototype.find, - findOne: Mongo.Collection.prototype.findOneAsync, - }; + const Originals = { + insert: Mongo.Collection.prototype.insertAsync, + update: Mongo.Collection.prototype.updateAsync, + remove: Mongo.Collection.prototype.removeAsync, + find: Mongo.Collection.prototype.find, + findOne: Mongo.Collection.prototype.findOneAsync, + }; - Mutator.init(); + Mutator.init(); - extendObserveChanges(); + extendObserveChanges(); - Object.assign(Mongo.Collection.prototype, { - /** - * @param data - * @param config - * @returns {*} - */ - async insert(data, config) { - return Mutator.insert.call(this, Originals, data, config); - }, + Object.assign(Mongo.Collection.prototype, { + /** + * @param data + * @param config + * @returns {*} + */ + async insert(data, config) { + return Mutator.insert.call(this, Originals, data, config); + }, - async insertAsync(data, config) { - return Mutator.insert.call(this, Originals, data, config); - }, + async insertAsync(data, config) { + return Mutator.insert.call(this, Originals, data, config); + }, - /** - * @param selector - * @param modifier - * @param config - * @param callback - * @returns {*} - */ - async update(selector, modifier, config, callback) { - return Mutator.update.call( - this, - Originals, - selector, - modifier, - config, - callback - ); - }, + /** + * @param selector + * @param modifier + * @param config + * @param callback + * @returns {*} + */ + async update(selector, modifier, config, callback) { + return Mutator.update.call( + this, + Originals, + selector, + modifier, + config, + callback + ); + }, - async updateAsync(selector, modifier, config, callback) { - return Mutator.update.call( - this, - Originals, - selector, - modifier, - config, - callback - ); - }, + async updateAsync(selector, modifier, config, callback) { + return Mutator.update.call( + this, + Originals, + selector, + modifier, + config, + callback + ); + }, - /** - * @param selector - * @param config - * @returns {*} - */ - async remove(selector, config) { - return Mutator.remove.call(this, Originals, selector, config); - }, + /** + * @param selector + * @param config + * @returns {*} + */ + async remove(selector, config) { + return Mutator.remove.call(this, Originals, selector, config); + }, - async removeAsync(selector, config) { - return Mutator.remove.call(this, Originals, selector, config); - }, + async removeAsync(selector, config) { + return Mutator.remove.call(this, Originals, selector, config); + }, - _validatedInsert, - _validatedUpdate, - _validatedRemove, + _validatedInsert, + _validatedUpdate, + _validatedRemove, - /** - * Configure defaults for your collection - * - * @param {function} mutation - * @param {function} cursor - * @param {boolean} shouldIncludePrevDocument - */ - configureRedisOplog({ mutation, cursor, ...rest }) { - this._redisOplog = { - shouldIncludePrevDocument: false, - protectAgainstRaceConditions: true, - ...rest - }; + /** + * Configure defaults for your collection + * + * @param {function} mutation + * @param {function} cursor + * @param {boolean} shouldIncludePrevDocument + */ + configureRedisOplog({ mutation, cursor, ...rest }) { + this._redisOplog = { + shouldIncludePrevDocument: false, + protectAgainstRaceConditions: true, + ...rest, + }; - if (mutation) { - if (!_.isFunction(mutation)) { - throw new Meteor.Error( - 'To configure defaults for the collection, "mutation" needs to be a function' - ); - } + if (mutation) { + if (!_.isFunction(mutation)) { + throw new Meteor.Error( + 'To configure defaults for the collection, "mutation" needs to be a function' + ); + } - this._redisOplog.mutation = mutation; - } - if (cursor) { - if (!_.isFunction(cursor)) { - throw new Meteor.Error( - 'To configure defaults for the collection, "cursor" needs to be a function' - ); - } + this._redisOplog.mutation = mutation; + } + if (cursor) { + if (!_.isFunction(cursor)) { + throw new Meteor.Error( + 'To configure defaults for the collection, "cursor" needs to be a function' + ); + } - this._redisOplog.cursor = cursor; - } - }, - }); + this._redisOplog.cursor = cursor; + } + }, + }); }; diff --git a/lib/mongo/extendObserveChanges.js b/lib/mongo/extendObserveChanges.js index 22fc20e9..968d5719 100644 --- a/lib/mongo/extendObserveChanges.js +++ b/lib/mongo/extendObserveChanges.js @@ -1,6 +1,6 @@ -import { MongoInternals } from 'meteor/mongo'; -import observeChanges from './observeChanges'; +import { MongoInternals } from "meteor/mongo"; +import observeChanges from "./observeChanges"; -export default function() { - MongoInternals.Connection.prototype._observeChanges = observeChanges; +export default function () { + MongoInternals.Connection.prototype._observeChanges = observeChanges; } diff --git a/lib/mongo/lib/containsOperators.js b/lib/mongo/lib/containsOperators.js index 2f40bb93..20f6af81 100644 --- a/lib/mongo/lib/containsOperators.js +++ b/lib/mongo/lib/containsOperators.js @@ -1,5 +1,5 @@ export default function (modifier) { - return _.some(modifier, function (value, operator) { - return /^\$/.test(operator); - }); -}; + return _.some(modifier, function (value, operator) { + return /^\$/.test(operator); + }); +} diff --git a/lib/mongo/lib/dispatchers.js b/lib/mongo/lib/dispatchers.js index 7a3b09df..0645c7d8 100644 --- a/lib/mongo/lib/dispatchers.js +++ b/lib/mongo/lib/dispatchers.js @@ -1,90 +1,102 @@ -import { Meteor } from 'meteor/meteor'; -import { DDPServer } from 'meteor/ddp-server'; -import { EJSON } from 'meteor/ejson'; -import { Events, RedisPipe } from '../../constants'; -import RedisSubscriptionManager from '../../redis/RedisSubscriptionManager'; -import { getRedisPusher } from '../../redis/getRedisClient'; -import getDedicatedChannel from '../../utils/getDedicatedChannel'; -import Config from '../../config'; -import OptimisticInvocation from '../OptimisticInvocation'; +import { Meteor } from "meteor/meteor"; +import { DDPServer } from "meteor/ddp-server"; +import { EJSON } from "meteor/ejson"; +import { Events, RedisPipe } from "../../constants"; +import RedisSubscriptionManager from "../../redis/RedisSubscriptionManager"; +import { getRedisPusher } from "../../redis/getRedisClient"; +import getDedicatedChannel from "../../utils/getDedicatedChannel"; +import Config from "../../config"; +import OptimisticInvocation from "../OptimisticInvocation"; -const dispatchEvents = async function(optimistic, collectionName, channels, events) { - if (optimistic) { - await OptimisticInvocation.withValue(true, async () => { - for (const event of events) { - const docId = event[RedisPipe.DOC]._id; - const dedicatedChannel = getDedicatedChannel( - collectionName, - docId - ); - - await RedisSubscriptionManager.process(dedicatedChannel, event); - for (const channelName of channels) { - await RedisSubscriptionManager.process(channelName, event); - } - } - }); - } - - if (Config.externalRedisPublisher) { - return; - } +const dispatchEvents = async function ( + optimistic, + collectionName, + channels, + events +) { + if (optimistic) { + await OptimisticInvocation.withValue(true, async () => { + for (const event of events) { + const docId = event[RedisPipe.DOC]._id; + const dedicatedChannel = getDedicatedChannel(collectionName, docId); - Meteor.defer(async () => { - const client = getRedisPusher(); - for (const event of events) { - const message = EJSON.stringify(event); - for (const channelName of channels) { - await client.publish(channelName, message); - } - const docId = event[RedisPipe.DOC]._id; - const dedicatedChannel = getDedicatedChannel(collectionName, docId); - await client.publish(dedicatedChannel, message); + await RedisSubscriptionManager.process(dedicatedChannel, event); + for (const channelName of channels) { + await RedisSubscriptionManager.process(channelName, event); } + } }); + } + + if (Config.externalRedisPublisher) { + return; + } + + Meteor.defer(async () => { + const client = getRedisPusher(); + for (const event of events) { + const message = EJSON.stringify(event); + for (const channelName of channels) { + await client.publish(channelName, message); + } + const docId = event[RedisPipe.DOC]._id; + const dedicatedChannel = getDedicatedChannel(collectionName, docId); + await client.publish(dedicatedChannel, message); + } + }); }; -const dispatchUpdate = async function( - optimistic, - collectionName, - channels, - docs, - fields +const dispatchUpdate = async function ( + optimistic, + collectionName, + channels, + docs, + fields ) { - const uid = optimistic ? RedisSubscriptionManager.uid : null; + const uid = optimistic ? RedisSubscriptionManager.uid : null; - const events = docs.map(doc => ({ - [RedisPipe.EVENT]: Events.UPDATE, - [RedisPipe.FIELDS]: fields, - [RedisPipe.DOC]: doc, - [RedisPipe.UID]: uid, - })); + const events = docs.map((doc) => ({ + [RedisPipe.EVENT]: Events.UPDATE, + [RedisPipe.FIELDS]: fields, + [RedisPipe.DOC]: doc, + [RedisPipe.UID]: uid, + })); - await dispatchEvents(optimistic, collectionName, channels, events); + await dispatchEvents(optimistic, collectionName, channels, events); }; -const dispatchRemove = async function(optimistic, collectionName, channels, docs) { - const uid = optimistic ? RedisSubscriptionManager.uid : null; +const dispatchRemove = async function ( + optimistic, + collectionName, + channels, + docs +) { + const uid = optimistic ? RedisSubscriptionManager.uid : null; - const events = docs.map(doc => ({ - [RedisPipe.EVENT]: Events.REMOVE, - [RedisPipe.DOC]: doc, - [RedisPipe.UID]: uid, - })); + const events = docs.map((doc) => ({ + [RedisPipe.EVENT]: Events.REMOVE, + [RedisPipe.DOC]: doc, + [RedisPipe.UID]: uid, + })); - await dispatchEvents(optimistic, collectionName, channels, events); + await dispatchEvents(optimistic, collectionName, channels, events); }; -const dispatchInsert = async function(optimistic, collectionName, channels, doc) { - const uid = optimistic ? RedisSubscriptionManager.uid : null; +const dispatchInsert = async function ( + optimistic, + collectionName, + channels, + doc +) { + const uid = optimistic ? RedisSubscriptionManager.uid : null; - const event = { - [RedisPipe.EVENT]: Events.INSERT, - [RedisPipe.DOC]: doc, - [RedisPipe.UID]: uid, - }; + const event = { + [RedisPipe.EVENT]: Events.INSERT, + [RedisPipe.DOC]: doc, + [RedisPipe.UID]: uid, + }; - await dispatchEvents(optimistic, collectionName, channels, [event]); + await dispatchEvents(optimistic, collectionName, channels, [event]); }; export { dispatchInsert, dispatchUpdate, dispatchRemove }; diff --git a/lib/mongo/lib/getMutationConfig.js b/lib/mongo/lib/getMutationConfig.js index 51fc4a93..cf60f3ab 100644 --- a/lib/mongo/lib/getMutationConfig.js +++ b/lib/mongo/lib/getMutationConfig.js @@ -1,5 +1,5 @@ -import getChannels from '../../cache/lib/getChannels'; -import Config from '../../config'; +import getChannels from "../../cache/lib/getChannels"; +import Config from "../../config"; /** * @param collection @@ -7,33 +7,38 @@ import Config from '../../config'; * @param mutationObject */ export default async function (collection, _config, mutationObject) { - const collectionName = collection._name; + const collectionName = collection._name; - if (!_config || _.isFunction(_config)) { - _config = {}; - } + if (!_config || _.isFunction(_config)) { + _config = {}; + } - const defaultOverrides = {}; - if (!DDP._CurrentMethodInvocation.get()) { - // If we're not in a method, then we should never need to do optimistic - // ui processing. - // - // However, we allow users to really force it by explicitly passing - // optimistic: true if they want to use the local-dispatch code path - // rather than going through Redis. - defaultOverrides.optimistic = false; - } + const defaultOverrides = {}; + if (!DDP._CurrentMethodInvocation.get()) { + // If we're not in a method, then we should never need to do optimistic + // ui processing. + // + // However, we allow users to really force it by explicitly passing + // optimistic: true if they want to use the local-dispatch code path + // rather than going through Redis. + defaultOverrides.optimistic = false; + } - let config = Object.assign({}, Config.mutationDefaults, defaultOverrides, _config); + let config = Object.assign( + {}, + Config.mutationDefaults, + defaultOverrides, + _config + ); - if (collection._redisOplog) { - const { mutation } = collection._redisOplog; - if (mutation) { - await mutation.call(collection, config, mutationObject) - } + if (collection._redisOplog) { + const { mutation } = collection._redisOplog; + if (mutation) { + await mutation.call(collection, config, mutationObject); } + } - config._channels = getChannels(collectionName, config); + config._channels = getChannels(collectionName, config); - return config; -}; + return config; +} diff --git a/lib/mongo/mongoCollectionNames.js b/lib/mongo/mongoCollectionNames.js index 1a6cea6b..151e2979 100644 --- a/lib/mongo/mongoCollectionNames.js +++ b/lib/mongo/mongoCollectionNames.js @@ -3,23 +3,23 @@ const map = {}; const constructor = Mongo.Collection; const proto = Mongo.Collection.prototype; -const hook = function() { - let ret = constructor.apply(this, arguments); - map[arguments[0]] = this; - return ret; +const hook = function () { + let ret = constructor.apply(this, arguments); + map[arguments[0]] = this; + return ret; }; hook.__getCollectionByName = function (name) { - return map[name]; + return map[name]; }; hook.prototype = proto; hook.prototype.constructor = hook; for (let prop in constructor) { - if (constructor.hasOwnProperty(prop)) { - hook[prop] = constructor[prop]; - } + if (constructor.hasOwnProperty(prop)) { + hook[prop] = constructor[prop]; + } } Mongo.Collection = hook; diff --git a/lib/mongo/observeChanges.js b/lib/mongo/observeChanges.js index cc4919a2..b9c863dc 100644 --- a/lib/mongo/observeChanges.js +++ b/lib/mongo/observeChanges.js @@ -1,144 +1,133 @@ // This code was started based on meteor/meteor github repository // This code is MIT and licensed to Meteor. -import { _ } from 'meteor/underscore'; -import RedisOplogObserveDriver from './RedisOplogObserveDriver'; -import { ObserveMultiplexer, ObserveHandle } from './ObserveMultiplex'; -import PollingObserveDriver from './PollingObserveDriver'; - -export default async function(cursorDescription, ordered, callbacks) { - const self = this; - if (cursorDescription.options.tailable) { - // TODO: this is not blocking on current meteor 3.0 branch - return self._observeChangesTailable( - cursorDescription, - ordered, - callbacks - ); - } - - // You may not filter out _id when observing changes, because the id is a core - // part of the observeChanges API. - - const fields = cursorDescription.options.projection || cursorDescription.options.fields; - - if ( - fields && - (fields._id === 0 || fields._id === false) - ) { - throw Error('You may not observe a cursor with {projection: {_id: 0}}'); +import { _ } from "meteor/underscore"; +import RedisOplogObserveDriver from "./RedisOplogObserveDriver"; +import { ObserveMultiplexer, ObserveHandle } from "./ObserveMultiplex"; +import PollingObserveDriver from "./PollingObserveDriver"; + +export default async function (cursorDescription, ordered, callbacks) { + const self = this; + if (cursorDescription.options.tailable) { + // TODO: this is not blocking on current meteor 3.0 branch + return self._observeChangesTailable(cursorDescription, ordered, callbacks); + } + + // You may not filter out _id when observing changes, because the id is a core + // part of the observeChanges API. + + const fields = + cursorDescription.options.projection || cursorDescription.options.fields; + + if (fields && (fields._id === 0 || fields._id === false)) { + throw Error("You may not observe a cursor with {projection: {_id: 0}}"); + } + + var observeKey = EJSON.stringify( + Object.assign( + { + ordered: ordered, + }, + cursorDescription + ) + ); + var multiplexer, observeDriver; + var firstHandle = false; // Find a matching ObserveMultiplexer, or create a new one. This next block is + // guaranteed to not yield (and it doesn't call anything that can observe a + // new query), so no other calls to this function can interleave with it. + + Meteor._noYieldsAllowed(function () { + if (_.has(self._observeMultiplexers, observeKey)) { + multiplexer = self._observeMultiplexers[observeKey]; + } else { + firstHandle = true; // Create a new ObserveMultiplexer. + + multiplexer = new ObserveMultiplexer({ + ordered: ordered, + onStop: async function () { + delete self._observeMultiplexers[observeKey]; + await observeDriver.stop(); + }, + }); + self._observeMultiplexers[observeKey] = multiplexer; } - - var observeKey = EJSON.stringify( - Object.assign( - { - ordered: ordered, - }, - cursorDescription - ) - ); - var multiplexer, observeDriver; - var firstHandle = false; // Find a matching ObserveMultiplexer, or create a new one. This next block is - // guaranteed to not yield (and it doesn't call anything that can observe a - // new query), so no other calls to this function can interleave with it. - - Meteor._noYieldsAllowed(function() { - if (_.has(self._observeMultiplexers, observeKey)) { - multiplexer = self._observeMultiplexers[observeKey]; - } else { - firstHandle = true; // Create a new ObserveMultiplexer. - - multiplexer = new ObserveMultiplexer({ - ordered: ordered, - onStop: async function() { - delete self._observeMultiplexers[observeKey]; - await observeDriver.stop(); - }, + }); + + var observeHandle = new ObserveHandle(multiplexer, callbacks); + + if (firstHandle) { + var matcher, sorter; + + var canUseOplog = _.all( + [ + function () { + // At a bare minimum, using the oplog requires us to have an oplog, to + // want unordered callbacks, and to not want a callback on the polls + // that won't happen. + return !ordered && !callbacks._testOnlyPollCallback; + }, + function () { + // We need to be able to compile the selector. Fall back to polling for + // some newfangled $selector that minimongo doesn't support yet. + try { + matcher = new Minimongo.Matcher(cursorDescription.selector); + return true; + } catch (e) { + // XXX make all compilation errors MinimongoError or something + // so that this doesn't ignore unrelated exceptions + return false; + } + }, + function () { + // ... and the selector itself needs to support oplog. + return RedisOplogObserveDriver.cursorSupported( + cursorDescription, + matcher + ); + }, + function () { + // And we need to be able to compile the sort, if any. eg, can't be + // {$natural: 1}. + if (!cursorDescription.options.sort) return true; + + try { + sorter = new Minimongo.Sorter(cursorDescription.options.sort, { + matcher: matcher, }); - self._observeMultiplexers[observeKey] = multiplexer; - } - }); - - var observeHandle = new ObserveHandle(multiplexer, callbacks); - - if (firstHandle) { - var matcher, sorter; - - var canUseOplog = _.all( - [ - function() { - // At a bare minimum, using the oplog requires us to have an oplog, to - // want unordered callbacks, and to not want a callback on the polls - // that won't happen. - return !ordered && !callbacks._testOnlyPollCallback; - }, - function() { - // We need to be able to compile the selector. Fall back to polling for - // some newfangled $selector that minimongo doesn't support yet. - try { - matcher = new Minimongo.Matcher( - cursorDescription.selector - ); - return true; - } catch (e) { - // XXX make all compilation errors MinimongoError or something - // so that this doesn't ignore unrelated exceptions - return false; - } - }, - function() { - // ... and the selector itself needs to support oplog. - return RedisOplogObserveDriver.cursorSupported( - cursorDescription, - matcher - ); - }, - function() { - // And we need to be able to compile the sort, if any. eg, can't be - // {$natural: 1}. - if (!cursorDescription.options.sort) return true; - - try { - sorter = new Minimongo.Sorter( - cursorDescription.options.sort, - { - matcher: matcher, - } - ); - return true; - } catch (e) { - // XXX make all compilation errors MinimongoError or something - // so that this doesn't ignore unrelated exceptions - return false; - } - }, - ], - function(f) { - return f(); - } - ); // invoke each function - - var driverClass = canUseOplog - ? RedisOplogObserveDriver - : PollingObserveDriver; - - observeDriver = new driverClass({ - cursorDescription: cursorDescription, - mongoHandle: self, - multiplexer: multiplexer, - ordered: ordered, - matcher: matcher, - // ignored by polling - sorter: sorter, - // ignored by polling - _testOnlyPollCallback: callbacks._testOnlyPollCallback, - }); // This field is only set for use in tests. - - await observeDriver.init(); - multiplexer._observeDriver = observeDriver; - } - - // Blocks until the initial adds have been sent. - await multiplexer.addHandleAndSendInitialAdds(observeHandle); - - return observeHandle; + return true; + } catch (e) { + // XXX make all compilation errors MinimongoError or something + // so that this doesn't ignore unrelated exceptions + return false; + } + }, + ], + function (f) { + return f(); + } + ); // invoke each function + + var driverClass = canUseOplog + ? RedisOplogObserveDriver + : PollingObserveDriver; + + observeDriver = new driverClass({ + cursorDescription: cursorDescription, + mongoHandle: self, + multiplexer: multiplexer, + ordered: ordered, + matcher: matcher, + // ignored by polling + sorter: sorter, + // ignored by polling + _testOnlyPollCallback: callbacks._testOnlyPollCallback, + }); // This field is only set for use in tests. + + await observeDriver.init(); + multiplexer._observeDriver = observeDriver; + } + + // Blocks until the initial adds have been sent. + await multiplexer.addHandleAndSendInitialAdds(observeHandle); + + return observeHandle; } diff --git a/lib/processors/actions/reload.js b/lib/processors/actions/reload.js index 45f2855b..e9951e59 100644 --- a/lib/processors/actions/reload.js +++ b/lib/processors/actions/reload.js @@ -1,5 +1,5 @@ -import { _ } from 'meteor/underscore'; -import { MongoIDMap } from '../../cache/mongoIdMap'; +import { _ } from "meteor/underscore"; +import { MongoIDMap } from "../../cache/mongoIdMap"; /** * Most likely used when redis connection resumes. @@ -8,23 +8,23 @@ import { MongoIDMap } from '../../cache/mongoIdMap'; * @param observableCollection */ export default async function (observableCollection) { - const { store, cursor } = observableCollection; + const { store, cursor } = observableCollection; - const freshData = await cursor.fetchAsync(); + const freshData = await cursor.fetchAsync(); - const newStore = new MongoIDMap(); - freshData.forEach((doc) => newStore.set(doc._id, doc)); + const newStore = new MongoIDMap(); + freshData.forEach((doc) => newStore.set(doc._id, doc)); - await store.compareWith(newStore, { - async both(docId, oldDoc, newDoc) { - const modifiedFields = _.union(Object.keys(oldDoc), Object.keys(newDoc)); - await observableCollection.change(newDoc, modifiedFields); - }, - async leftOnly(docId) { - await observableCollection.remove(docId); - }, - async rightOnly(docId, newDoc) { - await observableCollection.add(newDoc); - }, - }); + await store.compareWith(newStore, { + async both(docId, oldDoc, newDoc) { + const modifiedFields = _.union(Object.keys(oldDoc), Object.keys(newDoc)); + await observableCollection.change(newDoc, modifiedFields); + }, + async leftOnly(docId) { + await observableCollection.remove(docId); + }, + async rightOnly(docId, newDoc) { + await observableCollection.add(newDoc); + }, + }); } diff --git a/lib/processors/actions/requery.js b/lib/processors/actions/requery.js index 916333ab..5d2c6069 100644 --- a/lib/processors/actions/requery.js +++ b/lib/processors/actions/requery.js @@ -1,6 +1,6 @@ -import { EJSON } from 'meteor/ejson'; -import { Events } from '../../constants'; -import { MongoIDMap } from '../../cache/mongoIdMap'; +import { EJSON } from "meteor/ejson"; +import { Events } from "../../constants"; +import { MongoIDMap } from "../../cache/mongoIdMap"; /** * @param observableCollection @@ -8,37 +8,45 @@ import { MongoIDMap } from '../../cache/mongoIdMap'; * @param event * @param modifiedFields */ -export default async function (observableCollection, newCommer, event, modifiedFields) { - const { store, selector, options } = observableCollection; +export default async function ( + observableCollection, + newCommer, + event, + modifiedFields +) { + const { store, selector, options } = observableCollection; - const newStore = new MongoIDMap(); - const freshIds = await observableCollection.collection.find( - selector, { ...options, fields: { _id: 1 } }).fetchAsync(); + const newStore = new MongoIDMap(); + const freshIds = await observableCollection.collection + .find(selector, { ...options, fields: { _id: 1 } }) + .fetchAsync(); - freshIds.forEach(doc => newStore.set(doc._id, doc)); + freshIds.forEach((doc) => newStore.set(doc._id, doc)); - let added = false; - await store.compareWith(newStore, { - async leftOnly(docId) { - await observableCollection.remove(docId); - }, - async rightOnly(docId) { - if (newCommer && EJSON.equals(docId, newCommer._id)) { - added = true; - await observableCollection.add(newCommer); - } else { - await observableCollection.addById(docId); - } - } - }); + let added = false; + await store.compareWith(newStore, { + async leftOnly(docId) { + await observableCollection.remove(docId); + }, + async rightOnly(docId) { + if (newCommer && EJSON.equals(docId, newCommer._id)) { + added = true; + await observableCollection.add(newCommer); + } else { + await observableCollection.addById(docId); + } + }, + }); - // if we have an update, and we have a newcommer, that new commer may be inside the ids - // TODO: maybe refactor this in a separate action (?) - if (newCommer - && Events.UPDATE === event - && modifiedFields - && !added - && store.has(newCommer._id)) { - await observableCollection.change(newCommer, modifiedFields); - } + // if we have an update, and we have a newcommer, that new commer may be inside the ids + // TODO: maybe refactor this in a separate action (?) + if ( + newCommer && + Events.UPDATE === event && + modifiedFields && + !added && + store.has(newCommer._id) + ) { + await observableCollection.change(newCommer, modifiedFields); + } } diff --git a/lib/processors/default.js b/lib/processors/default.js index 2cecc72b..52545068 100644 --- a/lib/processors/default.js +++ b/lib/processors/default.js @@ -1,4 +1,4 @@ -import { Events } from '../constants'; +import { Events } from "../constants"; /** * @param observableCollection @@ -6,33 +6,38 @@ import { Events } from '../constants'; * @param doc * @param modifiedFields */ -export default async function(observableCollection, event, doc, modifiedFields) { - switch (event) { - case Events.INSERT: - await handleInsert(observableCollection, doc); - break; - case Events.UPDATE: - await handleUpdate(observableCollection, doc, modifiedFields); - break; - case Events.REMOVE: - await handleRemove(observableCollection, doc); - break; - default: - throw new Meteor.Error(`Invalid event specified: ${event}`); - } +export default async function ( + observableCollection, + event, + doc, + modifiedFields +) { + switch (event) { + case Events.INSERT: + await handleInsert(observableCollection, doc); + break; + case Events.UPDATE: + await handleUpdate(observableCollection, doc, modifiedFields); + break; + case Events.REMOVE: + await handleRemove(observableCollection, doc); + break; + default: + throw new Meteor.Error(`Invalid event specified: ${event}`); + } } /** * @param observableCollection * @param doc */ -const handleInsert = async function(observableCollection, doc) { - if ( - !observableCollection.contains(doc._id) && - observableCollection.isEligible(doc) - ) { - await observableCollection.add(doc); - } +const handleInsert = async function (observableCollection, doc) { + if ( + !observableCollection.contains(doc._id) && + observableCollection.isEligible(doc) + ) { + await observableCollection.add(doc); + } }; /** @@ -40,26 +45,30 @@ const handleInsert = async function(observableCollection, doc) { * @param doc * @param modifiedFields */ -const handleUpdate = async function(observableCollection, doc, modifiedFields) { - if (observableCollection.isEligible(doc)) { - if (observableCollection.contains(doc._id)) { - await observableCollection.change(doc, modifiedFields); - } else { - await observableCollection.add(doc); - } +const handleUpdate = async function ( + observableCollection, + doc, + modifiedFields +) { + if (observableCollection.isEligible(doc)) { + if (observableCollection.contains(doc._id)) { + await observableCollection.change(doc, modifiedFields); } else { - if (observableCollection.contains(doc._id)) { - await observableCollection.remove(doc._id); - } + await observableCollection.add(doc); } + } else { + if (observableCollection.contains(doc._id)) { + await observableCollection.remove(doc._id); + } + } }; /** * @param observableCollection * @param doc */ -const handleRemove = async function(observableCollection, doc) { - if (observableCollection.contains(doc._id)) { - await observableCollection.remove(doc._id); - } +const handleRemove = async function (observableCollection, doc) { + if (observableCollection.contains(doc._id)) { + await observableCollection.remove(doc._id); + } }; diff --git a/lib/processors/direct.js b/lib/processors/direct.js index 4561f8b3..78bfe318 100644 --- a/lib/processors/direct.js +++ b/lib/processors/direct.js @@ -1,4 +1,4 @@ -import { Events } from '../constants'; +import { Events } from "../constants"; /** * @param observableCollection @@ -6,33 +6,38 @@ import { Events } from '../constants'; * @param doc * @param modifiedFields */ -export default async function(observableCollection, event, doc, modifiedFields) { - switch (event) { - case Events.UPDATE: - await handleUpdate(observableCollection, doc, modifiedFields); - break; - case Events.REMOVE: - await handleRemove(observableCollection, doc); - break; - case Events.INSERT: - await handleInsert(observableCollection, doc); - break; - default: - throw new Meteor.Error(`Invalid event specified: ${event}`); - } +export default async function ( + observableCollection, + event, + doc, + modifiedFields +) { + switch (event) { + case Events.UPDATE: + await handleUpdate(observableCollection, doc, modifiedFields); + break; + case Events.REMOVE: + await handleRemove(observableCollection, doc); + break; + case Events.INSERT: + await handleInsert(observableCollection, doc); + break; + default: + throw new Meteor.Error(`Invalid event specified: ${event}`); + } } /** * @param observableCollection * @param doc */ -const handleInsert = async function(observableCollection, doc) { - if ( - !observableCollection.contains(doc._id) && - observableCollection.isEligible(doc) - ) { - await observableCollection.add(doc); - } +const handleInsert = async function (observableCollection, doc) { + if ( + !observableCollection.contains(doc._id) && + observableCollection.isEligible(doc) + ) { + await observableCollection.add(doc); + } }; /** @@ -40,34 +45,38 @@ const handleInsert = async function(observableCollection, doc) { * @param doc * @param modifiedFields */ -const handleUpdate = async function(observableCollection, doc, modifiedFields) { - const otherSelectors = observableCollection.__containsOtherSelectorsThanId; +const handleUpdate = async function ( + observableCollection, + doc, + modifiedFields +) { + const otherSelectors = observableCollection.__containsOtherSelectorsThanId; - if (otherSelectors) { - if (observableCollection.isEligible(doc)) { - if (observableCollection.contains(doc._id)) { - await observableCollection.change(doc, modifiedFields); - } else { - await observableCollection.add(doc); - } - } else { - if (observableCollection.contains(doc._id)) { - await observableCollection.remove(doc._id); - } - } + if (otherSelectors) { + if (observableCollection.isEligible(doc)) { + if (observableCollection.contains(doc._id)) { + await observableCollection.change(doc, modifiedFields); + } else { + await observableCollection.add(doc); + } + } else { + if (observableCollection.contains(doc._id)) { + await observableCollection.remove(doc._id); + } + } + } else { + if (observableCollection.contains(doc._id)) { + await observableCollection.change(doc, modifiedFields); } else { - if (observableCollection.contains(doc._id)) { - await observableCollection.change(doc, modifiedFields); - } else { - await observableCollection.add(doc); - } + await observableCollection.add(doc); } + } }; /** * @param observableCollection * @param doc */ -const handleRemove = async function(observableCollection, doc) { - await observableCollection.remove(doc._id); +const handleRemove = async function (observableCollection, doc) { + await observableCollection.remove(doc._id); }; diff --git a/lib/processors/getStrategy.js b/lib/processors/getStrategy.js index 6170e3b1..f147e48d 100644 --- a/lib/processors/getStrategy.js +++ b/lib/processors/getStrategy.js @@ -1,4 +1,4 @@ -import { Strategy } from '../constants'; +import { Strategy } from "../constants"; /** * @param selector @@ -6,18 +6,18 @@ import { Strategy } from '../constants'; * @returns {*} */ export default function getStrategy(selector = {}, options = {}) { - if (options.limit && !options.sort) { - options.sort = { _id: 1 }; - // throw new Meteor.Error(`Sorry, but you are not allowed to use "limit" without "sort" option.`); - } + if (options.limit && !options.sort) { + options.sort = { _id: 1 }; + // throw new Meteor.Error(`Sorry, but you are not allowed to use "limit" without "sort" option.`); + } - if (options.limit && options.sort) { - return Strategy.LIMIT_SORT; - } + if (options.limit && options.sort) { + return Strategy.LIMIT_SORT; + } - if (selector && selector._id) { - return Strategy.DEDICATED_CHANNELS; - } + if (selector && selector._id) { + return Strategy.DEDICATED_CHANNELS; + } - return Strategy.DEFAULT; + return Strategy.DEFAULT; } diff --git a/lib/processors/index.js b/lib/processors/index.js index 34805acc..3c85426e 100644 --- a/lib/processors/index.js +++ b/lib/processors/index.js @@ -1,22 +1,22 @@ -import { Strategy } from '../constants'; +import { Strategy } from "../constants"; -import defaultStrategy from './default'; -import directStrategy from './direct'; -import limitSortStrategy from './limit-sort'; -import getStrategy from './getStrategy'; +import defaultStrategy from "./default"; +import directStrategy from "./direct"; +import limitSortStrategy from "./limit-sort"; +import getStrategy from "./getStrategy"; const StrategyProcessorMap = { - [Strategy.LIMIT_SORT]: limitSortStrategy, - [Strategy.DEFAULT]: defaultStrategy, - [Strategy.DEDICATED_CHANNELS]: directStrategy + [Strategy.LIMIT_SORT]: limitSortStrategy, + [Strategy.DEFAULT]: defaultStrategy, + [Strategy.DEDICATED_CHANNELS]: directStrategy, }; -export { getStrategy } +export { getStrategy }; /** * @param strategy * @returns {*} */ export function getProcessor(strategy) { - return StrategyProcessorMap[strategy]; -} \ No newline at end of file + return StrategyProcessorMap[strategy]; +} diff --git a/lib/processors/lib/fieldsExist.js b/lib/processors/lib/fieldsExist.js index f96ee43a..650a1e3d 100644 --- a/lib/processors/lib/fieldsExist.js +++ b/lib/processors/lib/fieldsExist.js @@ -3,20 +3,22 @@ * @param fieldsArray {Array} ["profile", "roles.xx", "something" ] */ export function hasSortFields(fieldsObject, fieldsArray) { - const existingFields = Object.keys(fieldsObject); + const existingFields = Object.keys(fieldsObject); - for (let i = 0 ; i < fieldsArray.length ; i++) { - const field = fieldsArray[i]; - for (let j = 0; j < existingFields.length ; j++) { - const existingField = existingFields[j]; + for (let i = 0; i < fieldsArray.length; i++) { + const field = fieldsArray[i]; + for (let j = 0; j < existingFields.length; j++) { + const existingField = existingFields[j]; - if (existingField == field - || field.indexOf(existingField + '.') != -1 - || existingField.indexOf(field + '.') != -1) { - return true; - } - } + if ( + existingField == field || + field.indexOf(existingField + ".") != -1 || + existingField.indexOf(field + ".") != -1 + ) { + return true; + } } + } - return false; + return false; } diff --git a/lib/processors/limit-sort.js b/lib/processors/limit-sort.js index c78b5a3f..484fcf42 100644 --- a/lib/processors/limit-sort.js +++ b/lib/processors/limit-sort.js @@ -1,6 +1,6 @@ -import { Events } from '../constants'; -import { hasSortFields } from './lib/fieldsExist'; -import requery from './actions/requery'; +import { Events } from "../constants"; +import { hasSortFields } from "./lib/fieldsExist"; +import requery from "./actions/requery"; /** * @param observableCollection @@ -8,30 +8,35 @@ import requery from './actions/requery'; * @param doc * @param modifiedFields */ -export default async function(observableCollection, event, doc, modifiedFields) { - switch (event) { - case Events.INSERT: - await handleInsert(observableCollection, doc); - break; - case Events.UPDATE: - await handleUpdate(observableCollection, doc, modifiedFields); - break; - case Events.REMOVE: - await handleRemove(observableCollection, doc); - break; - default: - throw new Meteor.Error(`Invalid event specified: ${event}`); - } +export default async function ( + observableCollection, + event, + doc, + modifiedFields +) { + switch (event) { + case Events.INSERT: + await handleInsert(observableCollection, doc); + break; + case Events.UPDATE: + await handleUpdate(observableCollection, doc, modifiedFields); + break; + case Events.REMOVE: + await handleRemove(observableCollection, doc); + break; + default: + throw new Meteor.Error(`Invalid event specified: ${event}`); + } } /** * @param observableCollection * @param doc */ -const handleInsert = async function(observableCollection, doc) { - if (observableCollection.isEligible(doc)) { - await requery(observableCollection, doc); - } +const handleInsert = async function (observableCollection, doc) { + if (observableCollection.isEligible(doc)) { + await requery(observableCollection, doc); + } }; /** @@ -39,46 +44,38 @@ const handleInsert = async function(observableCollection, doc) { * @param doc * @param modifiedFields */ -const handleUpdate = async function(observableCollection, doc, modifiedFields) { - if (observableCollection.contains(doc._id)) { - if (observableCollection.isEligible(doc)) { - if ( - hasSortFields(observableCollection.options.sort, modifiedFields) - ) { - await requery( - observableCollection, - doc, - Events.UPDATE, - modifiedFields - ); - } else { - observableCollection.change(doc, modifiedFields); - } - } else { - await requery(observableCollection); - } +const handleUpdate = async function ( + observableCollection, + doc, + modifiedFields +) { + if (observableCollection.contains(doc._id)) { + if (observableCollection.isEligible(doc)) { + if (hasSortFields(observableCollection.options.sort, modifiedFields)) { + await requery(observableCollection, doc, Events.UPDATE, modifiedFields); + } else { + observableCollection.change(doc, modifiedFields); + } } else { - if (observableCollection.isEligible(doc)) { - await requery( - observableCollection, - doc, - Events.UPDATE, - modifiedFields - ); - } + await requery(observableCollection); } + } else { + if (observableCollection.isEligible(doc)) { + await requery(observableCollection, doc, Events.UPDATE, modifiedFields); + } + } }; /** * @param observableCollection * @param doc */ -const handleRemove = async function(observableCollection, doc) { - if (observableCollection.contains(doc._id)) { - await requery(observableCollection, doc); - } else { - if (observableCollection.options.skip) { - await requery(observableCollection, doc); - } +const handleRemove = async function (observableCollection, doc) { + if (observableCollection.contains(doc._id)) { + await requery(observableCollection, doc); + } else { + if (observableCollection.options.skip) { + await requery(observableCollection, doc); } + } }; diff --git a/lib/processors/synthetic.js b/lib/processors/synthetic.js index fabdb110..5760d68a 100644 --- a/lib/processors/synthetic.js +++ b/lib/processors/synthetic.js @@ -1,4 +1,4 @@ -import { Events } from '../constants'; +import { Events } from "../constants"; /** * Synthetic processors processes virtual mutations that aren't actually persisted in the database @@ -10,20 +10,31 @@ import { Events } from '../constants'; * @param modifier * @param modifiedTopLevelFields */ -export default async function (observableCollection, event, doc, modifier, modifiedTopLevelFields) { - switch (event) { - case Events.INSERT: - await handleInsert(observableCollection, doc); - break; - case Events.UPDATE: - await handleUpdate(observableCollection, doc, modifier, modifiedTopLevelFields); - break; - case Events.REMOVE: - await handleRemove(observableCollection, doc); - break; - default: - throw new Meteor.Error(`Invalid event specified: ${event}`) - } +export default async function ( + observableCollection, + event, + doc, + modifier, + modifiedTopLevelFields +) { + switch (event) { + case Events.INSERT: + await handleInsert(observableCollection, doc); + break; + case Events.UPDATE: + await handleUpdate( + observableCollection, + doc, + modifier, + modifiedTopLevelFields + ); + break; + case Events.REMOVE: + await handleRemove(observableCollection, doc); + break; + default: + throw new Meteor.Error(`Invalid event specified: ${event}`); + } } /** @@ -31,9 +42,9 @@ export default async function (observableCollection, event, doc, modifier, modif * @param doc */ const handleInsert = async function (observableCollection, doc) { - if (observableCollection.isEligible(doc)) { - await observableCollection.add(doc, true); - } + if (observableCollection.isEligible(doc)) { + await observableCollection.add(doc, true); + } }; /** @@ -42,8 +53,17 @@ const handleInsert = async function (observableCollection, doc) { * @param modifier * @param modifiedTopLevelFields */ -const handleUpdate = async function (observableCollection, doc, modifier, modifiedTopLevelFields) { - await observableCollection.changeSynthetic(doc._id, modifier, modifiedTopLevelFields); +const handleUpdate = async function ( + observableCollection, + doc, + modifier, + modifiedTopLevelFields +) { + await observableCollection.changeSynthetic( + doc._id, + modifier, + modifiedTopLevelFields + ); }; /** @@ -51,7 +71,7 @@ const handleUpdate = async function (observableCollection, doc, modifier, modifi * @param doc */ const handleRemove = async function (observableCollection, doc) { - if (observableCollection.contains(doc._id)) { - await observableCollection.remove(doc._id); - } + if (observableCollection.contains(doc._id)) { + await observableCollection.remove(doc._id); + } }; diff --git a/lib/redis/PubSubManager.js b/lib/redis/PubSubManager.js index ae314f7a..24158f3e 100644 --- a/lib/redis/PubSubManager.js +++ b/lib/redis/PubSubManager.js @@ -1,97 +1,102 @@ -import { getRedisListener, getRedisPusher } from './getRedisClient'; +import { getRedisListener, getRedisPusher } from "./getRedisClient"; /** * Manages communication with Redis * Unifies all libraries that use this */ export default class PubSubManager { - constructor() { - this.channelHandlers = {}; - this.queue = new Meteor._AsynchronousQueue(); + constructor() { + this.channelHandlers = {}; + this.queue = new Meteor._AsynchronousQueue(); - this.listener = getRedisListener(); - this.pusher = getRedisPusher(); + this.listener = getRedisListener(); + this.pusher = getRedisPusher(); - this._initMessageListener(); - } + this._initMessageListener(); + } - /** - * Pushes to Redis - * @param {string} channel - * @param {object} message - */ - publish(channel, message) { - this.pusher.publish(channel, EJSON.stringify(message)); - } + /** + * Pushes to Redis + * @param {string} channel + * @param {object} message + */ + publish(channel, message) { + this.pusher.publish(channel, EJSON.stringify(message)); + } - /** - * @param {string} channel - * @param {function} handler - */ - subscribe(channel, handler) { - this.queue.queueTask(() => { - if (!this.channelHandlers[channel]) { - this._initChannel(channel); - } + /** + * @param {string} channel + * @param {function} handler + */ + subscribe(channel, handler) { + this.queue.queueTask(() => { + if (!this.channelHandlers[channel]) { + this._initChannel(channel); + } - this.channelHandlers[channel].push(handler); - }); - } + this.channelHandlers[channel].push(handler); + }); + } - /** - * @param {string} channel - * @param {function} handler - */ - unsubscribe(channel, handler) { - this.queue.queueTask(() => { - if (!this.channelHandlers[channel]) { - return; - } + /** + * @param {string} channel + * @param {function} handler + */ + unsubscribe(channel, handler) { + this.queue.queueTask(() => { + if (!this.channelHandlers[channel]) { + return; + } - this.channelHandlers[channel] = this.channelHandlers[channel].filter(_handler => { - return _handler !== handler; - }); + this.channelHandlers[channel] = this.channelHandlers[channel].filter( + (_handler) => { + return _handler !== handler; + } + ); - if (this.channelHandlers[channel].length === 0) { - this._destroyChannel(channel); - } - }) - } + if (this.channelHandlers[channel].length === 0) { + this._destroyChannel(channel); + } + }); + } - /** - * Initializes listening for redis messages - * @private - */ - _initMessageListener() { - const self = this; + /** + * Initializes listening for redis messages + * @private + */ + _initMessageListener() { + const self = this; - this.listener.on('message', Meteor.bindEnvironment(async function(channel, _message) { - if (self.channelHandlers[channel]) { - const message = EJSON.parse(_message); - for (const channelHandler of self.channelHandlers[channel]) { - await channelHandler(message); - } - } - })); - } + this.listener.on( + "message", + Meteor.bindEnvironment(async function (channel, _message) { + if (self.channelHandlers[channel]) { + const message = EJSON.parse(_message); + for (const channelHandler of self.channelHandlers[channel]) { + await channelHandler(message); + } + } + }) + ); + } - /** - * @param channel - * @private - */ - _initChannel(channel) { - this.listener.subscribe(channel); + /** + * @param channel + * @private + */ + _initChannel(channel) { + this.listener.subscribe(channel); - this.channelHandlers[channel] = []; - } + this.channelHandlers[channel] = []; + } - /** - * @param channel - * @private - */ - _destroyChannel(channel) { - this.listener.unsubscribe(channel); + /** + * @param channel + * @private + */ + _destroyChannel(channel) { + this.listener.unsubscribe(channel); - delete this.channelHandlers[channel]; - } + delete this.channelHandlers[channel]; + } } diff --git a/lib/redis/RedisSubscriber.js b/lib/redis/RedisSubscriber.js index 13883418..b8f0e113 100644 --- a/lib/redis/RedisSubscriber.js +++ b/lib/redis/RedisSubscriber.js @@ -1,94 +1,90 @@ -import { Strategy } from '../constants'; -import { getProcessor } from '../processors'; -import { Meteor } from 'meteor/meteor'; -import extractIdsFromSelector from '../utils/extractIdsFromSelector'; -import RedisSubscriptionManager from './RedisSubscriptionManager'; -import syntheticProcessor from '../processors/synthetic'; -import getDedicatedChannel from '../utils/getDedicatedChannel'; +import { Strategy } from "../constants"; +import { getProcessor } from "../processors"; +import { Meteor } from "meteor/meteor"; +import extractIdsFromSelector from "../utils/extractIdsFromSelector"; +import RedisSubscriptionManager from "./RedisSubscriptionManager"; +import syntheticProcessor from "../processors/synthetic"; +import getDedicatedChannel from "../utils/getDedicatedChannel"; export default class RedisSubscriber { - /** - * @param observableCollection - * @param strategy - */ - constructor(observableCollection, strategy) { - this.observableCollection = observableCollection; - this.strategy = strategy; - this.processor = getProcessor(strategy); + /** + * @param observableCollection + * @param strategy + */ + constructor(observableCollection, strategy) { + this.observableCollection = observableCollection; + this.strategy = strategy; + this.processor = getProcessor(strategy); - // We do this because we override the behavior of dedicated "_id" channels - this.channels = this.getChannels(this.observableCollection.channels); + // We do this because we override the behavior of dedicated "_id" channels + this.channels = this.getChannels(this.observableCollection.channels); - RedisSubscriptionManager.attach(this); - } + RedisSubscriptionManager.attach(this); + } - /** - * @param channels - * @returns {*} - */ - getChannels(channels) { - const collectionName = this.observableCollection.collectionName; + /** + * @param channels + * @returns {*} + */ + getChannels(channels) { + const collectionName = this.observableCollection.collectionName; - switch (this.strategy) { - case Strategy.DEFAULT: - case Strategy.LIMIT_SORT: - return channels; - case Strategy.DEDICATED_CHANNELS: - const ids = extractIdsFromSelector( - this.observableCollection.selector - ); + switch (this.strategy) { + case Strategy.DEFAULT: + case Strategy.LIMIT_SORT: + return channels; + case Strategy.DEDICATED_CHANNELS: + const ids = extractIdsFromSelector(this.observableCollection.selector); - return ids.map(id => getDedicatedChannel(collectionName, id)); - default: - throw new Meteor.Error( - `Strategy could not be found: ${this.strategy}` - ); - } + return ids.map((id) => getDedicatedChannel(collectionName, id)); + default: + throw new Meteor.Error(`Strategy could not be found: ${this.strategy}`); } + } - /** - * @param args - */ - async process(...args) { - await this.processor.call(null, this.observableCollection, ...args); - } + /** + * @param args + */ + async process(...args) { + await this.processor.call(null, this.observableCollection, ...args); + } - /** - * @param event - * @param doc - * @param modifier - * @param modifiedTopLevelFields - */ - async processSynthetic(event, doc, modifier, modifiedTopLevelFields) { - return syntheticProcessor( - this.observableCollection, - event, - doc, - modifier, - modifiedTopLevelFields - ); - } + /** + * @param event + * @param doc + * @param modifier + * @param modifiedTopLevelFields + */ + async processSynthetic(event, doc, modifier, modifiedTopLevelFields) { + return syntheticProcessor( + this.observableCollection, + event, + doc, + modifier, + modifiedTopLevelFields + ); + } - /** - * Detaches from RedisSubscriptionManager - */ - stop() { - try { - RedisSubscriptionManager.detach(this); - } catch (e) { - console.warn( - `[RedisSubscriber] Weird! There was an error while stopping the publication: `, - e - ); - } + /** + * Detaches from RedisSubscriptionManager + */ + stop() { + try { + RedisSubscriptionManager.detach(this); + } catch (e) { + console.warn( + `[RedisSubscriber] Weird! There was an error while stopping the publication: `, + e + ); } + } - /** - * Retrieves the fields that are used for matching the validity of the document - * - * @returns {array} - */ - getFieldsOfInterest() { - return this.observableCollection.fieldsOfInterest; - } + /** + * Retrieves the fields that are used for matching the validity of the document + * + * @returns {array} + */ + getFieldsOfInterest() { + return this.observableCollection.fieldsOfInterest; + } } diff --git a/lib/redis/RedisSubscriptionManager.js b/lib/redis/RedisSubscriptionManager.js index 78336cf7..adb8598f 100644 --- a/lib/redis/RedisSubscriptionManager.js +++ b/lib/redis/RedisSubscriptionManager.js @@ -1,214 +1,214 @@ -import { Meteor } from 'meteor/meteor'; -import { Random } from 'meteor/random'; -import { _ } from 'meteor/underscore'; -import debug from '../debug'; -import { RedisPipe, Events } from '../constants'; -import getFieldsOfInterestFromAll from './lib/getFieldsOfInterestFromAll'; -import Config from '../config'; +import { Meteor } from "meteor/meteor"; +import { Random } from "meteor/random"; +import { _ } from "meteor/underscore"; +import debug from "../debug"; +import { RedisPipe, Events } from "../constants"; +import getFieldsOfInterestFromAll from "./lib/getFieldsOfInterestFromAll"; +import Config from "../config"; class RedisSubscriptionManager { - init() { - if (this.isInitialized) { - return; - } - this.uid = Random.id(); - this.queue = new Meteor._AsynchronousQueue(); - this.store = {}; // {channel: [RedisSubscribers]} - this.channelHandlers = {}; // {channel: handler} - - this.isInitialized = true; + init() { + if (this.isInitialized) { + return; + } + this.uid = Random.id(); + this.queue = new Meteor._AsynchronousQueue(); + this.store = {}; // {channel: [RedisSubscribers]} + this.channelHandlers = {}; // {channel: handler} + + this.isInitialized = true; + } + + /** + * Returns all RedisSubscribers regardless of channel + */ + getAllRedisSubscribers() { + let redisSubscribers = []; + for (let channel in this.store) { + this.store[channel].forEach((_redisSubscriber) => + redisSubscribers.push(_redisSubscriber) + ); } - /** - * Returns all RedisSubscribers regardless of channel - */ - getAllRedisSubscribers() { - let redisSubscribers = []; - for (let channel in this.store) { - this.store[channel].forEach(_redisSubscriber => - redisSubscribers.push(_redisSubscriber) - ); + return redisSubscribers; + } + + /** + * @param redisSubscriber + */ + attach(redisSubscriber) { + this.queue.queueTask(() => { + _.each(redisSubscriber.channels, (channel) => { + if (!this.store[channel]) { + this.initializeChannel(channel); } - return redisSubscribers; - } - - /** - * @param redisSubscriber - */ - attach(redisSubscriber) { - this.queue.queueTask(() => { - _.each(redisSubscriber.channels, channel => { - if (!this.store[channel]) { - this.initializeChannel(channel); - } - - this.store[channel].push(redisSubscriber); - }); - }); - } + this.store[channel].push(redisSubscriber); + }); + }); + } + + /** + * @param redisSubscriber + */ + detach(redisSubscriber) { + this.queue.queueTask(() => { + _.each(redisSubscriber.channels, (channel) => { + if (!this.store[channel]) { + return debug( + "[RedisSubscriptionManager] Trying to detach a subscriber on a non existent channels." + ); + } else { + this.store[channel] = _.without(this.store[channel], redisSubscriber); - /** - * @param redisSubscriber - */ - detach(redisSubscriber) { - this.queue.queueTask(() => { - _.each(redisSubscriber.channels, channel => { - if (!this.store[channel]) { - return debug( - '[RedisSubscriptionManager] Trying to detach a subscriber on a non existent channels.' - ); - } else { - this.store[channel] = _.without( - this.store[channel], - redisSubscriber - ); - - if (this.store[channel].length === 0) { - this.destroyChannel(channel); - } - } - }); - }); + if (this.store[channel].length === 0) { + this.destroyChannel(channel); + } + } + }); + }); + } + + /** + * @param channel + */ + initializeChannel(channel) { + debug(`[RedisSubscriptionManager] Subscribing to channel: ${channel}`); + + // create the handler for this channel + const self = this; + const handler = function (message) { + self.queue.queueTask(() => { + return self.process(channel, message, true); + }); + }; + + this.channelHandlers[channel] = handler; + this.store[channel] = []; + + const { pubSubManager } = Config; + pubSubManager.subscribe(channel, handler); + } + + /** + * @param channel + */ + destroyChannel(channel) { + debug(`[RedisSubscriptionManager] Unsubscribing from channel: ${channel}`); + + const { pubSubManager } = Config; + pubSubManager.unsubscribe(channel, this.channelHandlers[channel]); + + delete this.store[channel]; + delete this.channelHandlers[channel]; + } + + /** + * @param channel + * @param data + * @param [fromRedis=false] + */ + async process(channel, data, fromRedis) { + // messages from redis that contain our uid were handled + // optimistically, so we can drop them. + if (fromRedis && data[RedisPipe.UID] === this.uid) { + return; } - /** - * @param channel - */ - initializeChannel(channel) { - debug(`[RedisSubscriptionManager] Subscribing to channel: ${channel}`); - - // create the handler for this channel - const self = this; - const handler = function(message) { - self.queue.queueTask(() => { - return self.process(channel, message, true); - }); - }; - - this.channelHandlers[channel] = handler; - this.store[channel] = []; - - const { pubSubManager } = Config; - pubSubManager.subscribe(channel, handler); + const subscribers = this.store[channel]; + if (!subscribers) { + return; } - /** - * @param channel - */ - destroyChannel(channel) { - debug( - `[RedisSubscriptionManager] Unsubscribing from channel: ${channel}` - ); + let isSynthetic = data[RedisPipe.SYNTHETIC]; - const { pubSubManager } = Config; - pubSubManager.unsubscribe(channel, this.channelHandlers[channel]); + debug( + `[RedisSubscriptionManager] Received ${ + isSynthetic ? "synthetic " : "" + }event: "${data[RedisPipe.EVENT]}" to "${channel}"` + ); - delete this.store[channel]; - delete this.channelHandlers[channel]; + if (subscribers.length === 0) { + return; } - /** - * @param channel - * @param data - * @param [fromRedis=false] - */ - async process(channel, data, fromRedis) { - // messages from redis that contain our uid were handled - // optimistically, so we can drop them. - if (fromRedis && data[RedisPipe.UID] === this.uid) { - return; - } - - const subscribers = this.store[channel]; - if (!subscribers) { - return; + if (!isSynthetic) { + const collection = subscribers[0].observableCollection.collection; + + let doc; + if (data[RedisPipe.EVENT] === Events.REMOVE) { + doc = data[RedisPipe.DOC]; + } else { + doc = await this.getDoc(collection, subscribers, data); + } + + // if by any chance it was deleted after it got dispatched + // doc will be undefined + if (!doc) { + return; + } + + for (const redisSubscriber of subscribers) { + try { + await redisSubscriber.process( + data[RedisPipe.EVENT], + doc, + data[RedisPipe.FIELDS] + ); + } catch (e) { + debug( + `[RedisSubscriptionManager] Exception while processing event: ${e.toString()}` + ); } - - let isSynthetic = data[RedisPipe.SYNTHETIC]; - - debug( - `[RedisSubscriptionManager] Received ${ - isSynthetic ? 'synthetic ' : '' - }event: "${data[RedisPipe.EVENT]}" to "${channel}"` - ); - - if (subscribers.length === 0) { - return; - } - - if (!isSynthetic) { - const collection = subscribers[0].observableCollection.collection; - - let doc; - if (data[RedisPipe.EVENT] === Events.REMOVE) { - doc = data[RedisPipe.DOC]; - } else { - doc = await this.getDoc(collection, subscribers, data); - } - - // if by any chance it was deleted after it got dispatched - // doc will be undefined - if (!doc) { - return; - } - - for (const redisSubscriber of subscribers) { - try { - await redisSubscriber.process( - data[RedisPipe.EVENT], - doc, - data[RedisPipe.FIELDS] - ); - } catch (e) { - debug( - `[RedisSubscriptionManager] Exception while processing event: ${e.toString()}` - ); - } - } - } else { - for (const redisSubscriber of subscribers) { - try { - await redisSubscriber.processSynthetic( - data[RedisPipe.EVENT], - data[RedisPipe.DOC], - data[RedisPipe.MODIFIER], - data[RedisPipe.MODIFIED_TOP_LEVEL_FIELDS] - ); - } catch (e) { - debug( - `[RedisSubscriptionManager] Exception while processing synthetic event: ${e.toString()}` - ); - } - } + } + } else { + for (const redisSubscriber of subscribers) { + try { + await redisSubscriber.processSynthetic( + data[RedisPipe.EVENT], + data[RedisPipe.DOC], + data[RedisPipe.MODIFIER], + data[RedisPipe.MODIFIED_TOP_LEVEL_FIELDS] + ); + } catch (e) { + debug( + `[RedisSubscriptionManager] Exception while processing synthetic event: ${e.toString()}` + ); } + } + } + } + + /** + * @param collection + * @param subscribers + * @param data + */ + async getDoc(collection, subscribers, data) { + let doc = data[RedisPipe.DOC]; + + if ( + collection._redisOplog && + !collection._redisOplog.protectAgainstRaceConditions + ) { + // If there's no protection against race conditions + // It means we have received the full doc in doc + + return doc; } - /** - * @param collection - * @param subscribers - * @param data - */ - async getDoc(collection, subscribers, data) { - let doc = data[RedisPipe.DOC]; - - if (collection._redisOplog && !collection._redisOplog.protectAgainstRaceConditions) { - // If there's no protection against race conditions - // It means we have received the full doc in doc - - return doc; - } - - const fieldsOfInterest = getFieldsOfInterestFromAll(subscribers); - - if (fieldsOfInterest === true) { - doc = await collection.findOneAsync(doc._id); - } else { - doc = await collection.findOneAsync(doc._id, { fields: fieldsOfInterest }); - } + const fieldsOfInterest = getFieldsOfInterestFromAll(subscribers); - return doc; + if (fieldsOfInterest === true) { + doc = await collection.findOneAsync(doc._id); + } else { + doc = await collection.findOneAsync(doc._id, { + fields: fieldsOfInterest, + }); } + + return doc; + } } export default new RedisSubscriptionManager(); diff --git a/lib/redis/getRedisClient.js b/lib/redis/getRedisClient.js index 9f4d5762..5289c3df 100644 --- a/lib/redis/getRedisClient.js +++ b/lib/redis/getRedisClient.js @@ -1,6 +1,6 @@ -import redis from 'redis'; -import Config from '../config'; -import { Meteor } from 'meteor/meteor'; +import redis from "redis"; +import Config from "../config"; +import { Meteor } from "meteor/meteor"; // Redis requires two connections for pushing and listening to data let redisPusher; @@ -12,13 +12,15 @@ let redisListener; * @returns {*} */ export function getRedisPusher() { - if (!redisPusher) { - redisPusher = redis.createClient(Object.assign({}, Config.redis, { - retry_strategy: getRetryStrategy() - })); - } + if (!redisPusher) { + redisPusher = redis.createClient( + Object.assign({}, Config.redis, { + retry_strategy: getRetryStrategy(), + }) + ); + } - return redisPusher; + return redisPusher; } /** @@ -27,17 +29,19 @@ export function getRedisPusher() { * @param onConnect * @returns {*} */ -export function getRedisListener({onConnect} = {}) { - if (!redisListener) { - redisListener = redis.createClient(Object.assign({}, Config.redis, { - retry_strategy: getRetryStrategy() - })); +export function getRedisListener({ onConnect } = {}) { + if (!redisListener) { + redisListener = redis.createClient( + Object.assign({}, Config.redis, { + retry_strategy: getRetryStrategy(), + }) + ); - // we only attach events here - attachEvents(redisListener, {onConnect}); - } + // we only attach events here + attachEvents(redisListener, { onConnect }); + } - return redisListener; + return redisListener; } /** @@ -45,20 +49,23 @@ export function getRedisListener({onConnect} = {}) { * @param client * @param onConnect */ -function attachEvents(client, {onConnect}) { - const functions = ['connect', 'reconnecting', 'error', 'end']; +function attachEvents(client, { onConnect }) { + const functions = ["connect", "reconnecting", "error", "end"]; - functions.forEach(fn => { - redisListener.on(fn, Meteor.bindEnvironment(async function (...args) { - if (fn === 'connect' && onConnect) { - await onConnect(); - } + functions.forEach((fn) => { + redisListener.on( + fn, + Meteor.bindEnvironment(async function (...args) { + if (fn === "connect" && onConnect) { + await onConnect(); + } - if (Config.redisExtras.events[fn]) { - return Config.redisExtras.events[fn](...args); - } - })) - }); + if (Config.redisExtras.events[fn]) { + return Config.redisExtras.events[fn](...args); + } + }) + ); + }); } /** @@ -66,9 +73,9 @@ function attachEvents(client, {onConnect}) { * @returns {Function} */ function getRetryStrategy() { - return function(...args) { - if (Config.redisExtras.retry_strategy) { - return Config.redisExtras.retry_strategy(...args); - } + return function (...args) { + if (Config.redisExtras.retry_strategy) { + return Config.redisExtras.retry_strategy(...args); } + }; } diff --git a/lib/redis/lib/getFieldsOfInterestFromAll.js b/lib/redis/lib/getFieldsOfInterestFromAll.js index c8b24702..e84a3bc5 100644 --- a/lib/redis/lib/getFieldsOfInterestFromAll.js +++ b/lib/redis/lib/getFieldsOfInterestFromAll.js @@ -1,32 +1,32 @@ function getFieldsOfInterestFromAll(subscribers) { - let allFields = []; - for (let i = 0; i < subscribers.length; i++) { - const subscriber = subscribers[i]; - let fields = subscriber.getFieldsOfInterest(); - - if (fields === true) { - // end of story, there is an observableCollection that needs all fields - // therefore we will query for all fields - return true; - } else { - allFields = _.union(allFields, fields); - } + let allFields = []; + for (let i = 0; i < subscribers.length; i++) { + const subscriber = subscribers[i]; + let fields = subscriber.getFieldsOfInterest(); + + if (fields === true) { + // end of story, there is an observableCollection that needs all fields + // therefore we will query for all fields + return true; + } else { + allFields = _.union(allFields, fields); } + } - // this should not happen, but as a measure of safety - if (allFields.length === 0) { - return true; - } + // this should not happen, but as a measure of safety + if (allFields.length === 0) { + return true; + } - allFields = removeChildrenOfParents(allFields); + allFields = removeChildrenOfParents(allFields); - let fieldsObject = {}; + let fieldsObject = {}; - allFields.forEach(field => { - fieldsObject[field] = 1; - }); + allFields.forEach((field) => { + fieldsObject[field] = 1; + }); - return fieldsObject; + return fieldsObject; } /** @@ -34,20 +34,20 @@ function getFieldsOfInterestFromAll(subscribers) { * @return {array} array */ export function removeChildrenOfParents(array) { - let freshArray = []; - - array.forEach((element, idxe) => { - // add it to freshArray only if there's no field starting with {me} + '.' inside the array - const foundParent = array.find((subelement, idxs) => { - return idxe !== idxs && element.indexOf(`${subelement}.`) === 0; - }); + let freshArray = []; - if (!foundParent) { - freshArray.push(element); - } + array.forEach((element, idxe) => { + // add it to freshArray only if there's no field starting with {me} + '.' inside the array + const foundParent = array.find((subelement, idxs) => { + return idxe !== idxs && element.indexOf(`${subelement}.`) === 0; }); - return freshArray; + if (!foundParent) { + freshArray.push(element); + } + }); + + return freshArray; } export default getFieldsOfInterestFromAll; diff --git a/lib/utils/extractIdsFromSelector.js b/lib/utils/extractIdsFromSelector.js index 85ab3351..1c30e0a8 100644 --- a/lib/utils/extractIdsFromSelector.js +++ b/lib/utils/extractIdsFromSelector.js @@ -1,20 +1,20 @@ -import { _ } from 'meteor/underscore'; +import { _ } from "meteor/underscore"; -export default function(selector) { - const filter = selector._id; - let ids = []; +export default function (selector) { + const filter = selector._id; + let ids = []; - if (_.isObject(filter) && !filter._str) { - if (!filter.$in) { - throw new Meteor.Error( - `When you subscribe directly, you can't have other specified fields rather than $in` - ); - } - - ids = filter.$in; - } else { - ids.push(filter); + if (_.isObject(filter) && !filter._str) { + if (!filter.$in) { + throw new Meteor.Error( + `When you subscribe directly, you can't have other specified fields rather than $in` + ); } - return ids; + ids = filter.$in; + } else { + ids.push(filter); + } + + return ids; } diff --git a/lib/utils/getChannelName.js b/lib/utils/getChannelName.js index 52cbf57b..aee6f3ae 100644 --- a/lib/utils/getChannelName.js +++ b/lib/utils/getChannelName.js @@ -1,4 +1,4 @@ -import Config from '../config'; +import Config from "../config"; /** * Given a base channel name, applies the global prefix. @@ -7,5 +7,5 @@ import Config from '../config'; * @return {string} */ export default function getChannelName(baseChannelName) { - return (Config.globalRedisPrefix || '') + baseChannelName; + return (Config.globalRedisPrefix || "") + baseChannelName; } diff --git a/lib/utils/getDedicatedChannel.js b/lib/utils/getDedicatedChannel.js index 686ac854..bf800251 100644 --- a/lib/utils/getDedicatedChannel.js +++ b/lib/utils/getDedicatedChannel.js @@ -1,7 +1,7 @@ -import { MongoID } from 'meteor/mongo-id'; -import getChannelName from './getChannelName'; +import { MongoID } from "meteor/mongo-id"; +import getChannelName from "./getChannelName"; -export default function getDedicatedChannel(collectionName, docId){ +export default function getDedicatedChannel(collectionName, docId) { const channelName = `${collectionName}::${MongoID.idStringify(docId)}`; return getChannelName(channelName); } diff --git a/lib/utils/getFields.js b/lib/utils/getFields.js index 2457c0c7..a4fe6d8e 100644 --- a/lib/utils/getFields.js +++ b/lib/utils/getFields.js @@ -3,41 +3,41 @@ * @param mutator */ export default function getFields(mutator) { - // compute modified fields - var fields = []; - var topLevelFields = []; + // compute modified fields + var fields = []; + var topLevelFields = []; - Object.entries(mutator).forEach(function ([op, params]) { - if (op[0] == '$') { - Object.keys(params).forEach(function (field) { - // record the field we are trying to change - if (!fields.includes(field)) { - // fields.push(field); - // topLevelFields.push(field.split('.')[0]); + Object.entries(mutator).forEach(function ([op, params]) { + if (op[0] == "$") { + Object.keys(params).forEach(function (field) { + // record the field we are trying to change + if (!fields.includes(field)) { + // fields.push(field); + // topLevelFields.push(field.split('.')[0]); - // like { $set: { 'array.1.xx' } } - const specificPositionFieldMatch = (/\.[\d]+(\.)?/).exec(field); - if (specificPositionFieldMatch) { - fields.push(field.slice(0, specificPositionFieldMatch.index)); - } else { - if (field.indexOf('.$') !== -1) { - if (field.indexOf('.$.') !== -1) { - fields.push(field.split('.$.')[0]); - } else { - fields.push(field.split('.$')[0]); - } - } else { - fields.push(field); - } - } + // like { $set: { 'array.1.xx' } } + const specificPositionFieldMatch = /\.[\d]+(\.)?/.exec(field); + if (specificPositionFieldMatch) { + fields.push(field.slice(0, specificPositionFieldMatch.index)); + } else { + if (field.indexOf(".$") !== -1) { + if (field.indexOf(".$.") !== -1) { + fields.push(field.split(".$.")[0]); + } else { + fields.push(field.split(".$")[0]); + } + } else { + fields.push(field); + } + } - topLevelFields.push(field.split('.')[0]); - } - }); - } else { - fields.push(op) + topLevelFields.push(field.split(".")[0]); } - }); + }); + } else { + fields.push(op); + } + }); - return {fields, topLevelFields}; -}; + return { fields, topLevelFields }; +} diff --git a/lib/utils/isRemovedNonExistent.js b/lib/utils/isRemovedNonExistent.js index 26a0b6bd..c89ceae1 100644 --- a/lib/utils/isRemovedNonExistent.js +++ b/lib/utils/isRemovedNonExistent.js @@ -1,3 +1,3 @@ export default function isRemovedNonExistent(e) { - return e.toString().indexOf('Removed nonexistent document') !== -1; + return e.toString().indexOf("Removed nonexistent document") !== -1; } diff --git a/lib/vent/Vent.js b/lib/vent/Vent.js index f4c6258b..75b9f664 100644 --- a/lib/vent/Vent.js +++ b/lib/vent/Vent.js @@ -1,126 +1,130 @@ -import { VentConstants } from '../constants'; -import { Meteor } from 'meteor/meteor'; -import { _ } from 'meteor/underscore'; -import Config from '../config'; +import { VentConstants } from "../constants"; +import { Meteor } from "meteor/meteor"; +import { _ } from "meteor/underscore"; +import Config from "../config"; // TODO: // Unify listening of events with RedisSubscriptionManager export default class Vent { - /** - * @param name - * @param fn - * @returns {*|any|Observable} - */ - static publish(name, fn) { - // check initialization - if (!Config.isInitialized) { - throw new Meteor.Error('not-initialized', 'RedisOplog is not initialized at the time of defining this publish. Make sure you initialize it before'); - } - - if (_.isObject(name)) { - _.each(name, (fn, _name) => { - Vent.publish(_name, fn); - }); - - return; - } - - // validate if everything is in order - Vent._validateArguments(name, fn); - - // create the publication properly - return Vent._createPublishEndPoint(name, fn); + /** + * @param name + * @param fn + * @returns {*|any|Observable} + */ + static publish(name, fn) { + // check initialization + if (!Config.isInitialized) { + throw new Meteor.Error( + "not-initialized", + "RedisOplog is not initialized at the time of defining this publish. Make sure you initialize it before" + ); } - /** - * @param {string} channel - * @param {object} object - */ - static emit(channel, object) { - const {pubSubManager} = Config; + if (_.isObject(name)) { + _.each(name, (fn, _name) => { + Vent.publish(_name, fn); + }); - pubSubManager.publish(channel, object); + return; } - /** - * Creates the publish endpoint - * - * @param name - * @param fn - * @returns {*|any|Observable} - * @private - */ - static _createPublishEndPoint(name, fn) { - return Meteor.publish(name, async function (collectionId, ...args) { - Vent._extendPublishContext(this, name, collectionId); - - try { - await fn.call(this, ...args); - } catch (e) { - // we do this because the errors in here are silenced - console.error(e); - throw e; - } - - this.ready(); - }); - } - - /** - * @param context - * @param name - * @param collectionId - * @private - */ - static _extendPublishContext(context, name, collectionId) { - const channelHandlers = []; - const { pubSubManager } = Config; - - Object.assign(context, { - on(channel, redisEventHandler) { - // create the handler for this channel - const handler = async function(message) { - const data = await redisEventHandler.call(context, message); - - if (data) { - context._session.send({ - msg: 'changed', - [VentConstants.PREFIX]: '1', - id: VentConstants.getPrefix(collectionId, name), - [VentConstants.EVENT_VARIABLE]: data - }); - } - }; - channelHandlers.push({ channel, handler }); - pubSubManager.subscribe(channel, handler); - }, - }); - - context.onStop(function () { - channelHandlers.forEach(({ channel, handler }) => { - pubSubManager.unsubscribe(channel, handler); + // validate if everything is in order + Vent._validateArguments(name, fn); + + // create the publication properly + return Vent._createPublishEndPoint(name, fn); + } + + /** + * @param {string} channel + * @param {object} object + */ + static emit(channel, object) { + const { pubSubManager } = Config; + + pubSubManager.publish(channel, object); + } + + /** + * Creates the publish endpoint + * + * @param name + * @param fn + * @returns {*|any|Observable} + * @private + */ + static _createPublishEndPoint(name, fn) { + return Meteor.publish(name, async function (collectionId, ...args) { + Vent._extendPublishContext(this, name, collectionId); + + try { + await fn.call(this, ...args); + } catch (e) { + // we do this because the errors in here are silenced + console.error(e); + throw e; + } + + this.ready(); + }); + } + + /** + * @param context + * @param name + * @param collectionId + * @private + */ + static _extendPublishContext(context, name, collectionId) { + const channelHandlers = []; + const { pubSubManager } = Config; + + Object.assign(context, { + on(channel, redisEventHandler) { + // create the handler for this channel + const handler = async function (message) { + const data = await redisEventHandler.call(context, message); + + if (data) { + context._session.send({ + msg: "changed", + [VentConstants.PREFIX]: "1", + id: VentConstants.getPrefix(collectionId, name), + [VentConstants.EVENT_VARIABLE]: data, }); - }); - } - - /** - * @param name - * @param fn - * @private - */ - static _validateArguments(name, fn) { - // validate arguments - if (!_.isString(name)) { - if (!_.isObject(name)) { - throw new Meteor.Error('invalid-definition', 'Argument is invalid') - } - - } else { - if (!_.isFunction(fn)) { - throw new Meteor.Error('invalid-definition', 'The second argument needs to be a function') - } - } + } + }; + channelHandlers.push({ channel, handler }); + pubSubManager.subscribe(channel, handler); + }, + }); + + context.onStop(function () { + channelHandlers.forEach(({ channel, handler }) => { + pubSubManager.unsubscribe(channel, handler); + }); + }); + } + + /** + * @param name + * @param fn + * @private + */ + static _validateArguments(name, fn) { + // validate arguments + if (!_.isString(name)) { + if (!_.isObject(name)) { + throw new Meteor.Error("invalid-definition", "Argument is invalid"); + } + } else { + if (!_.isFunction(fn)) { + throw new Meteor.Error( + "invalid-definition", + "The second argument needs to be a function" + ); + } } + } } - diff --git a/lib/vent/VentClient.js b/lib/vent/VentClient.js index cd71b691..f3fe5588 100644 --- a/lib/vent/VentClient.js +++ b/lib/vent/VentClient.js @@ -1,108 +1,110 @@ -import { VentConstants } from '../constants'; -import { Random } from 'meteor/random'; -import { DDPCommon } from 'meteor/ddp-common'; +import { VentConstants } from "../constants"; +import { Random } from "meteor/random"; +import { DDPCommon } from "meteor/ddp-common"; /** * Handles vents inside Meteor */ export default class VentClient { - constructor() { - this.store = {}; - this.listen(Meteor.connection); - } - - subscribe(name, ...args) { - console.log("8999999999") - const subscription = new VentClientSubscription(this, name); - this.add(subscription); - - return subscription.subscribe(...args); - } - - listen(ddpConnection) { - ddpConnection._stream.on('message', (raw_msg) => { - // avoid parsing unnecessary ddp events - const search = `{"msg":"changed","${VentConstants.PREFIX}":"1`; - if (raw_msg.substr(0, search.length) !== search) { - return; - } - - const msg = DDPCommon.parseDDP(raw_msg); - const subscription = this.store[msg.id]; - if (subscription) { - subscription.handle(msg[VentConstants.EVENT_VARIABLE]); - } - }); - } - - /** - * {VentClientSubscription} - * @param subscription - */ - add(subscription) { - this.store[subscription.id] = subscription; - } - - /** - * @param {VentClientSubscription} subscription - */ - remove(subscription) { - delete this.store[subscription.id]; - } + constructor() { + this.store = {}; + this.listen(Meteor.connection); + } + + subscribe(name, ...args) { + console.log("8999999999"); + const subscription = new VentClientSubscription(this, name); + this.add(subscription); + + return subscription.subscribe(...args); + } + + listen(ddpConnection) { + ddpConnection._stream.on("message", (raw_msg) => { + // avoid parsing unnecessary ddp events + const search = `{"msg":"changed","${VentConstants.PREFIX}":"1`; + if (raw_msg.substr(0, search.length) !== search) { + return; + } + + const msg = DDPCommon.parseDDP(raw_msg); + const subscription = this.store[msg.id]; + if (subscription) { + subscription.handle(msg[VentConstants.EVENT_VARIABLE]); + } + }); + } + + /** + * {VentClientSubscription} + * @param subscription + */ + add(subscription) { + this.store[subscription.id] = subscription; + } + + /** + * @param {VentClientSubscription} subscription + */ + remove(subscription) { + delete this.store[subscription.id]; + } } /** * Handles Vent subscription */ class VentClientSubscription { - constructor(client, name) { - this.client = client; - this._name = name; - this._id = Random.id(); - } - - get id() { - return VentConstants.getPrefix(this._id, this._name); - } - - /** - * Subscribes to Meteor - * - * @param args - * @returns {*} - */ - subscribe(...args) { - const self = this; - - const handler = Meteor.subscribe(this._name, this._id, ...args); - - const oldStop = handler.stop; - Object.assign(handler, { - listen(eventHandler) { - if (!_.isFunction(eventHandler)) { - throw new Meteor.Error('invalid-argument', 'You should pass a function to listen()'); - } - - self._eventHandler = eventHandler; - }, - stop() { - self.client.remove(self); - - return oldStop.call(handler); - } - }); - - return handler; - } - - /** - * Watches the incomming events - */ - handle(event) { - if (this._eventHandler) { - this._eventHandler(event); - } else { - + constructor(client, name) { + this.client = client; + this._name = name; + this._id = Random.id(); + } + + get id() { + return VentConstants.getPrefix(this._id, this._name); + } + + /** + * Subscribes to Meteor + * + * @param args + * @returns {*} + */ + subscribe(...args) { + const self = this; + + const handler = Meteor.subscribe(this._name, this._id, ...args); + + const oldStop = handler.stop; + Object.assign(handler, { + listen(eventHandler) { + if (!_.isFunction(eventHandler)) { + throw new Meteor.Error( + "invalid-argument", + "You should pass a function to listen()" + ); } + + self._eventHandler = eventHandler; + }, + stop() { + self.client.remove(self); + + return oldStop.call(handler); + }, + }); + + return handler; + } + + /** + * Watches the incomming events + */ + handle(event) { + if (this._eventHandler) { + this._eventHandler(event); + } else { } + } } diff --git a/package.js b/package.js index d7b8e398..74668b34 100644 --- a/package.js +++ b/package.js @@ -1,61 +1,61 @@ Package.describe({ - name: 'cultofcoders:redis-oplog', - version: '2.2.1', - // Brief, one-line summary of the package. - summary: "Replacement for Meteor's MongoDB oplog implementation", - // URL to the Git repository containing the source code for this package. - git: 'https://github.com/cult-of-coders/redis-oplog', - // By default, Meteor will default to using README.md for documentation. - // To avoid submitting documentation, set this field to null. - documentation: 'README.md' + name: "cultofcoders:redis-oplog", + version: "2.2.1", + // Brief, one-line summary of the package. + summary: "Replacement for Meteor's MongoDB oplog implementation", + // URL to the Git repository containing the source code for this package. + git: "https://github.com/cult-of-coders/redis-oplog", + // By default, Meteor will default to using README.md for documentation. + // To avoid submitting documentation, set this field to null. + documentation: "README.md", }); Npm.depends({ - redis: '3.1.2', - 'deep-extend': '0.6.0', - 'lodash.clonedeep': '4.5.0' + redis: "3.1.2", + "deep-extend": "0.6.0", + "lodash.clonedeep": "4.5.0", }); -Package.onUse(function(api) { - api.versionsFrom(['1.12.2', '2.8.1', '2.12', '3.0-rc.0']); - api.use([ - 'underscore', - 'ecmascript', - 'ejson', - 'minimongo', - 'mongo', - 'random', - 'ddp-server', - 'diff-sequence', - 'id-map', - 'mongo-id', - 'tracker' - ]); - - api.mainModule('redis-oplog.js', 'server'); - api.mainModule('redis-oplog.client.js', 'client'); +Package.onUse(function (api) { + api.versionsFrom(["1.12.2", "2.8.1", "2.12", "3.0-rc.0"]); + api.use([ + "underscore", + "ecmascript", + "ejson", + "minimongo", + "mongo", + "random", + "ddp-server", + "diff-sequence", + "id-map", + "mongo-id", + "tracker", + ]); + + api.mainModule("redis-oplog.js", "server"); + api.mainModule("redis-oplog.client.js", "client"); }); -Package.onTest(function(api) { - api.use('cultofcoders:redis-oplog'); +Package.onTest(function (api) { + api.use("cultofcoders:redis-oplog"); - // extensions - api.use('aldeed:collection2@4.0.1'); - api.use('reywood:publish-composite@1.8.9'); + // extensions + api.use("aldeed:collection2@4.0.1"); + api.use("reywood:publish-composite@1.8.9"); - api.use('ecmascript'); - api.use('tracker'); - api.use('mongo'); - api.use('random'); - api.use('accounts-password'); - api.use('matb33:collection-hooks@1.3.1'); - api.use('alanning:roles@4.0.0-alpha.1'); + api.use("ecmascript"); + api.use("tracker"); + api.use("mongo"); + api.use("random"); + api.use("accounts-password"); + api.use("matb33:collection-hooks@1.3.1"); + api.use("alanning:roles@4.0.0-alpha.1"); - api.use(['meteortesting:mocha@3.0.3-beta300.0']); + api.use(["meteortesting:mocha@3.0.3-beta300.0"]); - api.mainModule('testing/main.server.js', 'server'); - api.addFiles('testing/publishComposite/boot.js', 'server'); - api.addFiles('testing/optimistic-ui/boot.js', 'server'); + api.mainModule("testing/main.server.js", "server"); + api.addFiles("testing/publishComposite/boot.js", "server"); + api.addFiles("testing/optimistic-ui/boot.js", "server"); - api.mainModule('testing/main.client.js', 'client'); + api.mainModule("testing/main.client.js", "client"); }); diff --git a/redis-oplog.client.js b/redis-oplog.client.js index 100108cd..16ec06c2 100644 --- a/redis-oplog.client.js +++ b/redis-oplog.client.js @@ -1,4 +1,4 @@ -import VentClient from './lib/vent/VentClient'; +import VentClient from "./lib/vent/VentClient"; const Vent = new VentClient(); diff --git a/redis-oplog.js b/redis-oplog.js index 4d688391..c725e802 100644 --- a/redis-oplog.js +++ b/redis-oplog.js @@ -1,39 +1,39 @@ -import './lib/mongo//mongoCollectionNames'; +import "./lib/mongo//mongoCollectionNames"; -import { RedisPipe, Events } from './lib/constants'; -import { Meteor } from 'meteor/meteor'; -import init from './lib/init'; -import Config from './lib/config'; -import { getRedisListener, getRedisPusher } from './lib/redis/getRedisClient'; -import SyntheticMutator from './lib/mongo/SyntheticMutator'; -import ObservableCollection from './lib/cache/ObservableCollection'; -import Vent from './lib/vent/Vent'; +import { RedisPipe, Events } from "./lib/constants"; +import { Meteor } from "meteor/meteor"; +import init from "./lib/init"; +import Config from "./lib/config"; +import { getRedisListener, getRedisPusher } from "./lib/redis/getRedisClient"; +import SyntheticMutator from "./lib/mongo/SyntheticMutator"; +import ObservableCollection from "./lib/cache/ObservableCollection"; +import Vent from "./lib/vent/Vent"; const RedisOplog = { - init, + init, }; // Warnings -Meteor.startup(function() { - if (Package['insecure']) { - console.log('RedisOplog does not support the insecure package.'); - } +Meteor.startup(function () { + if (Package["insecure"]) { + console.log("RedisOplog does not support the insecure package."); + } }); export { - RedisOplog, - SyntheticMutator, - ObservableCollection, - RedisPipe, - Config, - Events, - Vent, - getRedisListener, - getRedisPusher, + RedisOplog, + SyntheticMutator, + ObservableCollection, + RedisPipe, + Config, + Events, + Vent, + getRedisListener, + getRedisPusher, }; if (process.env.REDIS_OPLOG_SETTINGS) { - init(JSON.parse(process.env.REDIS_OPLOG_SETTINGS)); + init(JSON.parse(process.env.REDIS_OPLOG_SETTINGS)); } else if (Meteor.settings.redisOplog) { - init(Meteor.settings.redisOplog); + init(Meteor.settings.redisOplog); } From 94cd763ed8387628f8c81a884b6d55cd7752ac3f Mon Sep 17 00:00:00 2001 From: matheusccastro Date: Mon, 6 May 2024 16:43:51 -0300 Subject: [PATCH 05/33] feat: update tests and fixing issues found. --- lib/cache/ObservableCollection.js | 4 ++-- lib/mongo/ObserveMultiplex.js | 2 +- lib/mongo/PollingObserveDriver.js | 2 ++ lib/mongo/RedisOplogObserveDriver.js | 4 ++-- lib/redis/PubSubManager.js | 20 ++++++++++---------- lib/redis/RedisSubscriber.js | 2 +- lib/redis/RedisSubscriptionManager.js | 4 ++-- lib/vent/Vent.js | 5 +++-- lib/vent/VentClient.js | 1 - testing/accounts/server.js | 14 +++++++------- testing/boot.js | 6 ++++++ testing/collection-defaults/server.js | 6 +++--- testing/custom-publications/server.js | 14 +++++++------- testing/lib/helpers.js | 2 -- testing/object-id/server.js | 12 ++++++------ testing/optimistic-ui/boot.js | 3 +++ testing/polling/server.js | 12 ++++++------ testing/publishComposite/boot.js | 24 +++++++++++++++--------- testing/publishComposite/loadFixtures.js | 10 +++++----- testing/vent/boot.js | 4 ++-- 20 files changed, 83 insertions(+), 68 deletions(-) diff --git a/lib/cache/ObservableCollection.js b/lib/cache/ObservableCollection.js index e8a399f8..51ada6b6 100644 --- a/lib/cache/ObservableCollection.js +++ b/lib/cache/ObservableCollection.js @@ -111,8 +111,8 @@ export default class ObservableCollection { const context = DDP._CurrentPublicationInvocation.get(); await cursor.call( context, - cursorDescription.options, - cursorDescription.selector + this.cursorDescription.options, + this.cursorDescription.selector ); } } diff --git a/lib/mongo/ObserveMultiplex.js b/lib/mongo/ObserveMultiplex.js index 422a1bfc..d43c3aa4 100644 --- a/lib/mongo/ObserveMultiplex.js +++ b/lib/mongo/ObserveMultiplex.js @@ -55,7 +55,7 @@ Object.assign(ObserveMultiplexer.prototype, { 1 ); - self._queue.runTask(async function () { + await self._queue.runTask(async function () { self._handles[handle._id] = handle; // Send out whatever adds we have so far (whether or not we the // multiplexer is ready). diff --git a/lib/mongo/PollingObserveDriver.js b/lib/mongo/PollingObserveDriver.js index 573ac8ab..4d88aab8 100644 --- a/lib/mongo/PollingObserveDriver.js +++ b/lib/mongo/PollingObserveDriver.js @@ -146,6 +146,8 @@ export default function PollingObserveDriver(options) { } Object.assign(PollingObserveDriver.prototype, { + // Do nothing, this is just used to have compatibility with the init call when initializing it. + init() {}, // This is always called through _.throttle (except once at startup). _unthrottledEnsurePollIsScheduled: function () { var self = this; diff --git a/lib/mongo/RedisOplogObserveDriver.js b/lib/mongo/RedisOplogObserveDriver.js index 77d0251c..4945011e 100644 --- a/lib/mongo/RedisOplogObserveDriver.js +++ b/lib/mongo/RedisOplogObserveDriver.js @@ -55,8 +55,8 @@ export default class RedisOplogObserveDriver { // This is to mitigate the issue when we run init the first time on a subscription // And if you are using packages like reactive-publish // Because inside here we do a .find().fetch(), and that's considered reactive - await Tracker.nonreactive(() => { - return this.observableCollection.init(); + await Tracker.nonreactive(async () => { + await this.observableCollection.init(); }); this.redisSubscriber = new RedisSubscriber( diff --git a/lib/redis/PubSubManager.js b/lib/redis/PubSubManager.js index 24158f3e..04f68f87 100644 --- a/lib/redis/PubSubManager.js +++ b/lib/redis/PubSubManager.js @@ -20,8 +20,8 @@ export default class PubSubManager { * @param {string} channel * @param {object} message */ - publish(channel, message) { - this.pusher.publish(channel, EJSON.stringify(message)); + async publish(channel, message) { + await this.pusher.publish(channel, EJSON.stringify(message)); } /** @@ -29,9 +29,9 @@ export default class PubSubManager { * @param {function} handler */ subscribe(channel, handler) { - this.queue.queueTask(() => { + this.queue.queueTask(async () => { if (!this.channelHandlers[channel]) { - this._initChannel(channel); + await this._initChannel(channel); } this.channelHandlers[channel].push(handler); @@ -43,7 +43,7 @@ export default class PubSubManager { * @param {function} handler */ unsubscribe(channel, handler) { - this.queue.queueTask(() => { + this.queue.queueTask(async () => { if (!this.channelHandlers[channel]) { return; } @@ -55,7 +55,7 @@ export default class PubSubManager { ); if (this.channelHandlers[channel].length === 0) { - this._destroyChannel(channel); + await this._destroyChannel(channel); } }); } @@ -84,8 +84,8 @@ export default class PubSubManager { * @param channel * @private */ - _initChannel(channel) { - this.listener.subscribe(channel); + async _initChannel(channel) { + await this.listener.subscribe(channel); this.channelHandlers[channel] = []; } @@ -94,8 +94,8 @@ export default class PubSubManager { * @param channel * @private */ - _destroyChannel(channel) { - this.listener.unsubscribe(channel); + async _destroyChannel(channel) { + await this.listener.unsubscribe(channel); delete this.channelHandlers[channel]; } diff --git a/lib/redis/RedisSubscriber.js b/lib/redis/RedisSubscriber.js index b8f0e113..83849bdd 100644 --- a/lib/redis/RedisSubscriber.js +++ b/lib/redis/RedisSubscriber.js @@ -56,7 +56,7 @@ export default class RedisSubscriber { * @param modifiedTopLevelFields */ async processSynthetic(event, doc, modifier, modifiedTopLevelFields) { - return syntheticProcessor( + await syntheticProcessor( this.observableCollection, event, doc, diff --git a/lib/redis/RedisSubscriptionManager.js b/lib/redis/RedisSubscriptionManager.js index adb8598f..dc0ec191 100644 --- a/lib/redis/RedisSubscriptionManager.js +++ b/lib/redis/RedisSubscriptionManager.js @@ -78,8 +78,8 @@ class RedisSubscriptionManager { // create the handler for this channel const self = this; const handler = function (message) { - self.queue.queueTask(() => { - return self.process(channel, message, true); + self.queue.queueTask(async () => { + await self.process(channel, message, true); }); }; diff --git a/lib/vent/Vent.js b/lib/vent/Vent.js index 75b9f664..12aa0258 100644 --- a/lib/vent/Vent.js +++ b/lib/vent/Vent.js @@ -40,10 +40,10 @@ export default class Vent { * @param {string} channel * @param {object} object */ - static emit(channel, object) { + static async emit(channel, object) { const { pubSubManager } = Config; - pubSubManager.publish(channel, object); + await pubSubManager.publish(channel, object); } /** @@ -95,6 +95,7 @@ export default class Vent { }); } }; + channelHandlers.push({ channel, handler }); pubSubManager.subscribe(channel, handler); }, diff --git a/lib/vent/VentClient.js b/lib/vent/VentClient.js index f3fe5588..61c4be98 100644 --- a/lib/vent/VentClient.js +++ b/lib/vent/VentClient.js @@ -12,7 +12,6 @@ export default class VentClient { } subscribe(name, ...args) { - console.log("8999999999"); const subscription = new VentClientSubscription(this, name); this.add(subscription); diff --git a/testing/accounts/server.js b/testing/accounts/server.js index ea3922e1..88896327 100644 --- a/testing/accounts/server.js +++ b/testing/accounts/server.js @@ -33,8 +33,8 @@ Meteor.publish('accounts_userData', function(options) { ); }); -Meteor.publish('accounts_usersAssoc', function() { - let _groups = Roles.getGroupsForUser(this.userId, 'subscribed'); +Meteor.publish('accounts_usersAssoc', async function() { + let _groups = await Roles.getScopesForUserAsync(this.userId, 'subscribed'); return Meteor.users.find( { @@ -56,15 +56,15 @@ Meteor.publish('accounts_usersAssoc', function() { }); Meteor.methods({ - accounts_createUser(data) { + async accounts_createUser(data) { const email = `${Random.id()}@x.com`; - const userId = Accounts.createUser({ + const userId = await Accounts.createUserAsync({ username: Random.id(), email, password: '12345', }); - Meteor.users.update(userId, { + await Meteor.users.updateAsync(userId, { $set: data, }); @@ -73,8 +73,8 @@ Meteor.methods({ email, }; }, - accounts_updateUser(filters, modifier) { - Meteor.users.update(filters, modifier, { + async accounts_updateUser(filters, modifier) { + await Meteor.users.updateAsync(filters, modifier, { optimistic: false, }); }, diff --git a/testing/boot.js b/testing/boot.js index 6812fe1a..765da103 100644 --- a/testing/boot.js +++ b/testing/boot.js @@ -49,12 +49,18 @@ export { Collections, opts, config }; if (Meteor.isServer) { _.each(Collections, (Collection, key) => { Collection.allow({ + insertAsync: () => true, + updateAsync: () => true, + removeAsync: () => true, insert: () => true, update: () => true, remove: () => true, }); Collection.deny({ + insertAsync: () => false, + updateAsync: () => false, + removeAsync: () => false, insert: () => false, update: () => false, remove: () => false, diff --git a/testing/collection-defaults/server.js b/testing/collection-defaults/server.js index 39283747..6db50e17 100644 --- a/testing/collection-defaults/server.js +++ b/testing/collection-defaults/server.js @@ -20,12 +20,12 @@ Items.configureRedisOplog({ Meteor.methods({ 'collection_defaults.items.insert'(...args) { - return Items.insert(...args); + return Items.insertAsync(...args); }, 'collection_defaults.items.update'(...args) { - return Items.update(...args); + return Items.updateAsync(...args); }, 'collection_defaults.items.remove'(...args) { - return Items.remove(...args); + return Items.removeAsync(...args); }, }); diff --git a/testing/custom-publications/server.js b/testing/custom-publications/server.js index 584d474b..d61b56a5 100644 --- a/testing/custom-publications/server.js +++ b/testing/custom-publications/server.js @@ -1,9 +1,9 @@ import { Meteor } from 'meteor/meteor'; import { Items } from './collections'; -Meteor.publish('custom_publications', function () { +Meteor.publish('custom_publications', async function () { const cursor = Items.find(); - cursor.forEach(doc => { + await cursor.forEachAsync(doc => { this.added(Items._name, doc._id, doc); }); @@ -11,11 +11,11 @@ Meteor.publish('custom_publications', function () { }); Meteor.methods({ - 'custom_publications_boot'() { - Items.remove({}); + async 'custom_publications_boot'() { + await Items.removeAsync({}); - Items.insert({name: 'Item 1'}); - Items.insert({name: 'Item 2'}); - Items.insert({name: 'Item 3'}); + await Items.insertAsync({name: 'Item 1'}); + await Items.insertAsync({name: 'Item 2'}); + await Items.insertAsync({name: 'Item 3'}); } }); diff --git a/testing/lib/helpers.js b/testing/lib/helpers.js index 4af7ace7..10fde8a8 100644 --- a/testing/lib/helpers.js +++ b/testing/lib/helpers.js @@ -52,8 +52,6 @@ export default (suffix) => { }; const subscribe = (...args) => { - console.log("555555") - console.log(Meteor.subscribe.toString()) return Meteor.subscribe(`publication.${suffix}`, ...args); }; diff --git a/testing/object-id/server.js b/testing/object-id/server.js index afb62664..2b26353c 100644 --- a/testing/object-id/server.js +++ b/testing/object-id/server.js @@ -6,18 +6,18 @@ Meteor.publish('smart_ids', function(filters = {}) { }); Meteor.methods({ - smart_ids_reset(doc) { - SmartIds.remove({}); - const id1 = SmartIds.insert({ + async smart_ids_reset(doc) { + await SmartIds.removeAsync({}); + const id1 = await SmartIds.insertAsync({ text: 'John Doe' }); - const id2 = SmartIds.insert({ + const id2 = await SmartIds.insertAsync({ text: 'John Shmoe' }); return [id1, id2]; }, - smart_ids_insert(doc) { - SmartIds.insert(doc); + async smart_ids_insert(doc) { + await SmartIds.insertAsync(doc); } }); diff --git a/testing/optimistic-ui/boot.js b/testing/optimistic-ui/boot.js index cfb963da..deb40dcd 100644 --- a/testing/optimistic-ui/boot.js +++ b/testing/optimistic-ui/boot.js @@ -8,6 +8,9 @@ if (Meteor.isServer) { } Items.allow({ + insertAsync: () => true, + updateAsync: () => true, + removeAsync: () => true, insert: () => true, update: () => true, remove: () => true, diff --git a/testing/polling/server.js b/testing/polling/server.js index 09a23a11..65bc160c 100644 --- a/testing/polling/server.js +++ b/testing/polling/server.js @@ -10,16 +10,16 @@ Meteor.publish('campaign_search', function(search, pollingIntervalMs = 100) { }); Meteor.methods({ - campaign_search_reset(doc) { - Campaigns.remove({}); - Campaigns.insert({ + async campaign_search_reset(doc) { + await Campaigns.removeAsync({}); + await Campaigns.insertAsync({ text: 'John Doe' }); - Campaigns.insert({ + await Campaigns.insertAsync({ text: 'John Shmoe' }); }, - campaign_search_insert(doc) { - Campaigns.insert(doc); + async campaign_search_insert(doc) { + await Campaigns.insertAsync(doc); } }); diff --git a/testing/publishComposite/boot.js b/testing/publishComposite/boot.js index 3f57b224..ab137316 100644 --- a/testing/publishComposite/boot.js +++ b/testing/publishComposite/boot.js @@ -3,12 +3,18 @@ import { Items, Children } from './collections'; import loadFixtures from './loadFixtures'; Items.allow({ + insertAsync: () => true, + updateAsync: () => true, + removeAsync: () => true, insert: () => true, update: () => true, remove: () => true, }); Children.allow({ + insertAsync: () => true, + updateAsync: () => true, + removeAsync: () => true, insert: () => true, update: () => true, remove: () => true, @@ -23,32 +29,32 @@ Meteor.publishComposite('items_publish_composite', { children: [ { find(item) { - return Children.find({itemId: item._id}); + return Children.find({ itemId: item._id }); } } ] }); Meteor.methods({ - 'publish_composite.load_fixtures'() { - loadFixtures(); + async 'publish_composite.load_fixtures'() { + await loadFixtures(); }, 'publish_composite.items.insert'(...args) { - return Items.insert(...args) + return Items.insertAsync(...args) }, 'publish_composite.items.update'(...args) { - return Items.update(...args) + return Items.updateAsync(...args) }, 'publish_composite.items.remove'(...args) { - return Items.remove(...args) + return Items.removeAsync(...args) }, 'publish_composite.children.insert'(...args) { - return Children.insert(...args) + return Children.insertAsync(...args) }, 'publish_composite.children.update'(...args) { - return Children.update(...args) + return Children.updateAsync(...args) }, 'publish_composite.children.remove'(...args) { - return Children.remove(...args) + return Children.removeAsync(...args) } }); diff --git a/testing/publishComposite/loadFixtures.js b/testing/publishComposite/loadFixtures.js index f7428877..03d473bf 100644 --- a/testing/publishComposite/loadFixtures.js +++ b/testing/publishComposite/loadFixtures.js @@ -3,17 +3,17 @@ import { Items, Children } from './collections'; const ITEMS = 5; const CHILDREN_PER_ITEM = 5; -export default () => { - Items.remove({}); - Children.remove({}); +export default async () => { + await Items.removeAsync({}); + await Children.removeAsync({}); for (let i = 0; i < ITEMS; i++) { - const itemId = Items.insert({ + const itemId = await Items.insertAsync({ name: 'Name - ' + i }); for (let j = 0; j < CHILDREN_PER_ITEM; j++) { - Children.insert({ + await Children.insertAsync({ name: 'Child - ' + i + '- ' + j, itemId }) diff --git a/testing/vent/boot.js b/testing/vent/boot.js index 60b0c7c6..e2ef27db 100644 --- a/testing/vent/boot.js +++ b/testing/vent/boot.js @@ -16,9 +16,9 @@ Vent.publish({ }); Meteor.methods({ - 'vent_emit'({channel, object, times = 1}) { + async 'vent_emit'({channel, object, times = 1}) { for (let i = 0; i < times; i++) { - Vent.emit(channel, object) + await Vent.emit(channel, object) } } }); From 2be26227a6eb52302d40bb3dd10bcc25c4d97733 Mon Sep 17 00:00:00 2001 From: matheusccastro Date: Mon, 6 May 2024 17:27:58 -0300 Subject: [PATCH 06/33] feat: fix tests. --- lib/mongo/ObserveMultiplex.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/mongo/ObserveMultiplex.js b/lib/mongo/ObserveMultiplex.js index d43c3aa4..64510e02 100644 --- a/lib/mongo/ObserveMultiplex.js +++ b/lib/mongo/ObserveMultiplex.js @@ -36,8 +36,8 @@ export function ObserveMultiplexer(options) { self._addHandleTasksScheduledButNotPerformed = 0; _.each(self.callbackNames(), function (callbackName) { - self[callbackName] = function (/* ... */) { - self._applyCallback(callbackName, _.toArray(arguments)); + self[callbackName] = async function (/* ... */) { + await self._applyCallback(callbackName, _.toArray(arguments)); }; }); } From 803cb69d2b2b31faf4aa6d829f111356a8219d10 Mon Sep 17 00:00:00 2001 From: matheusccastro Date: Mon, 6 May 2024 19:02:26 -0300 Subject: [PATCH 07/33] feat: fix tests. --- lib/cache/ObservableCollection.js | 49 ++++++++++++++++------------ lib/mongo/RedisOplogObserveDriver.js | 17 +++------- 2 files changed, 34 insertions(+), 32 deletions(-) diff --git a/lib/cache/ObservableCollection.js b/lib/cache/ObservableCollection.js index 51ada6b6..0f9400e3 100644 --- a/lib/cache/ObservableCollection.js +++ b/lib/cache/ObservableCollection.js @@ -29,6 +29,7 @@ export default class ObservableCollection { constructor({ multiplexer, matcher, sorter, cursorDescription }) { this.multiplexer = multiplexer; this.matcher = matcher; + this.sorter = sorter; this.cursorDescription = cursorDescription; this.collectionName = this.cursorDescription.collectionName; @@ -36,6 +37,29 @@ export default class ObservableCollection { cursorDescription.collectionName ); + if (!this.collection) { + throw new Meteor.Error( + `We could not find the collection instance by name: "${ + this.collectionName + }", the cursor description was: ${JSON.stringify(cursorDescription)}` + ); + } + } + + async setupCollection() { + // Here we apply the logic of changing the cursor based on the collection-level configuration + if (this.collection._redisOplog) { + const { cursor } = this.collection._redisOplog; + if (cursor) { + const context = DDP._CurrentPublicationInvocation.get(); + await cursor.call( + context, + this.cursorDescription.options, + this.cursorDescription.selector + ); + } + } + if (!this.collection) { throw new Meteor.Error( "We could not properly identify the collection by its name: " + @@ -45,8 +69,8 @@ export default class ObservableCollection { } this.cursor = this.collection.find( - cursorDescription.selector, - cursorDescription.options + this.cursorDescription.selector, + this.cursorDescription.options ); this.store = new MongoIDMap(); @@ -92,9 +116,9 @@ export default class ObservableCollection { this._projectionFn = LocalCollection._compileProjection(projection); // Projection function, result of combining important fields for selector and // existing fields projection - this._sharedProjection = matcher.combineIntoProjection(projection); - if (sorter) { - this._sharedProjection = sorter.combineIntoProjection( + this._sharedProjection = this.matcher.combineIntoProjection(projection); + if (this.sorter) { + this._sharedProjection = this.sorter.combineIntoProjection( this._sharedProjection ); } @@ -103,21 +127,6 @@ export default class ObservableCollection { ); } - async setupCollection() { - // Here we apply the logic of changing the cursor based on the collection-level configuration - if (this.collection._redisOplog) { - const { cursor } = this.collection._redisOplog; - if (cursor) { - const context = DDP._CurrentPublicationInvocation.get(); - await cursor.call( - context, - this.cursorDescription.options, - this.cursorDescription.selector - ); - } - } - } - /** * Function that checks whether or not the doc matches our filters * diff --git a/lib/mongo/RedisOplogObserveDriver.js b/lib/mongo/RedisOplogObserveDriver.js index 4945011e..82a2d022 100644 --- a/lib/mongo/RedisOplogObserveDriver.js +++ b/lib/mongo/RedisOplogObserveDriver.js @@ -22,7 +22,7 @@ export default class RedisOplogObserveDriver { currentId++; this.options = options; - const { cursorDescription, multiplexer, matcher, sorter } = options; + const { cursorDescription } = options; this._cursorDescription = options.cursorDescription; this._multiplexer = options.multiplexer; @@ -31,14 +31,11 @@ export default class RedisOplogObserveDriver { cursorDescription.selector, cursorDescription.options ); + } - // TODO send by object - this.observableCollection = new ObservableCollection({ - multiplexer, - matcher, - sorter, - cursorDescription, - }); + async init() { + this.observableCollection = new ObservableCollection(this.options); + await this.observableCollection.setupCollection(); // Feels hackish to have it here, maybe move to ObservableCollections if (this.strategy === Strategy.DEDICATED_CHANNELS) { @@ -47,10 +44,6 @@ export default class RedisOplogObserveDriver { oc.__containsOtherSelectorsThanId = Object.keys(oc.selector).length > 1; } } - } - - async init() { - await this.observableCollection.setupCollection(); // This is to mitigate the issue when we run init the first time on a subscription // And if you are using packages like reactive-publish From 89922f41a23909fbb3232bc8b2fa0cb5f3d51fdf Mon Sep 17 00:00:00 2001 From: matheusccastro Date: Mon, 6 May 2024 19:07:13 -0300 Subject: [PATCH 08/33] chore: major version update and also fixing github workflows. --- .github/workflows/test.yml | 4 ++-- package.js | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 186e541c..5aca0114 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -7,7 +7,7 @@ jobs: runs-on: ubuntu-20.04 strategy: matrix: - meteor: [1.12.2, 2.6.1, 2.7.3, 2.8.1, 2.12] + meteor: ["3.0-rc.0"] redis-version: [4, 5, 6, 7] steps: @@ -26,7 +26,7 @@ jobs: run: | meteor create --release ${{ matrix.meteor }} --bare test cd test - meteor npm i --save puppeteer@1.18.1 simpl-schema chai + meteor npm i --save puppeteer@1.18.1 simpl-schema@3.4.6 chai@4.3.6 - name: Test working-directory: ./test run: METEOR_PACKAGE_DIRS="../" TEST_BROWSER_DRIVER=puppeteer meteor test-packages --raw-logs --once --driver-package meteortesting:mocha ../ diff --git a/package.js b/package.js index 74668b34..868e93a3 100644 --- a/package.js +++ b/package.js @@ -1,6 +1,6 @@ Package.describe({ name: "cultofcoders:redis-oplog", - version: "2.2.1", + version: "3.0-rc.0", // Brief, one-line summary of the package. summary: "Replacement for Meteor's MongoDB oplog implementation", // URL to the Git repository containing the source code for this package. @@ -17,7 +17,7 @@ Npm.depends({ }); Package.onUse(function (api) { - api.versionsFrom(["1.12.2", "2.8.1", "2.12", "3.0-rc.0"]); + api.versionsFrom(["3.0-rc.0"]); api.use([ "underscore", "ecmascript", From 8384400a291e323b93eacabdd5d61ab4464df3f4 Mon Sep 17 00:00:00 2001 From: Jan Dvorak Date: Thu, 18 Jul 2024 11:02:21 +0200 Subject: [PATCH 09/33] Prep 3.0-rc.2 release --- .github/workflows/test.yml | 2 +- package.js | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 5aca0114..222ac87c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -7,7 +7,7 @@ jobs: runs-on: ubuntu-20.04 strategy: matrix: - meteor: ["3.0-rc.0"] + meteor: ["3.0"] redis-version: [4, 5, 6, 7] steps: diff --git a/package.js b/package.js index 868e93a3..e6174d27 100644 --- a/package.js +++ b/package.js @@ -1,6 +1,6 @@ Package.describe({ name: "cultofcoders:redis-oplog", - version: "3.0-rc.0", + version: "3.0.0-rc.2", // Brief, one-line summary of the package. summary: "Replacement for Meteor's MongoDB oplog implementation", // URL to the Git repository containing the source code for this package. @@ -17,7 +17,7 @@ Npm.depends({ }); Package.onUse(function (api) { - api.versionsFrom(["3.0-rc.0"]); + api.versionsFrom(["3.0"]); api.use([ "underscore", "ecmascript", @@ -40,7 +40,7 @@ Package.onTest(function (api) { api.use("cultofcoders:redis-oplog"); // extensions - api.use("aldeed:collection2@4.0.1"); + api.use("aldeed:collection2@4.0.3"); api.use("reywood:publish-composite@1.8.9"); api.use("ecmascript"); @@ -48,10 +48,10 @@ Package.onTest(function (api) { api.use("mongo"); api.use("random"); api.use("accounts-password"); - api.use("matb33:collection-hooks@1.3.1"); - api.use("alanning:roles@4.0.0-alpha.1"); + api.use("matb33:collection-hooks@1.3.2"); + api.use("alanning:roles@4.0.0-rc.1"); - api.use(["meteortesting:mocha@3.0.3-beta300.0"]); + api.use(["meteortesting:mocha@3.0.0-rc.1"]); api.mainModule("testing/main.server.js", "server"); api.addFiles("testing/publishComposite/boot.js", "server"); From 1dae6c1814549ab41d79e1ff55facb95329067af Mon Sep 17 00:00:00 2001 From: Jan Dvorak Date: Thu, 18 Jul 2024 11:06:35 +0200 Subject: [PATCH 10/33] Published cultofcoders:redis-oplog@3.0.0-rc.2. --- .versions | 138 +++++++++++++++++++++++++++-------------------------- package.js | 2 +- 2 files changed, 72 insertions(+), 68 deletions(-) diff --git a/.versions b/.versions index dfe4c52e..6bed471e 100644 --- a/.versions +++ b/.versions @@ -1,67 +1,71 @@ -accounts-base@2.2.8 -accounts-password@2.3.4 -alanning:roles@3.5.1 -aldeed:collection2@3.0.6 -allow-deny@1.1.1 -babel-compiler@7.10.4 -babel-runtime@1.5.1 -base64@1.0.12 -binary-heap@1.0.11 -boilerplate-generator@1.7.1 -callback-hook@1.5.1 -check@1.3.2 -cultofcoders:redis-oplog@2.2.1 -ddp@1.4.1 -ddp-client@2.6.1 -ddp-common@1.4.0 -ddp-rate-limiter@1.2.0 -ddp-server@2.6.2 -diff-sequence@1.1.2 -dynamic-import@0.7.3 -ecmascript@0.16.7 -ecmascript-runtime@0.8.1 -ecmascript-runtime-client@0.12.1 -ecmascript-runtime-server@0.11.0 -ejson@1.1.3 -email@2.2.5 -fetch@0.1.3 -geojson-utils@1.0.11 -id-map@1.1.1 -inter-process-messaging@0.1.1 -local-test:cultofcoders:redis-oplog@2.2.1 -localstorage@1.2.0 -logging@1.3.2 -matb33:collection-hooks@1.1.4 -meteor@1.11.3 -meteortesting:browser-tests@0.1.2 -meteortesting:mocha@0.4.4 -minimongo@1.9.3 -modern-browsers@0.1.9 -modules@0.19.0 -modules-runtime@0.13.1 -mongo@1.16.7 -mongo-decimal@0.1.3 -mongo-dev-server@1.1.0 -mongo-id@1.0.8 -npm-mongo@4.16.0 -ordered-dict@1.1.0 -practicalmeteor:mocha-core@1.0.1 -promise@0.12.2 -raix:eventemitter@0.1.3 -random@1.2.1 -rate-limit@1.1.1 -react-fast-refresh@0.2.7 -reactive-var@1.0.12 -reload@1.3.1 -retry@1.1.0 -reywood:publish-composite@1.7.3 -routepolicy@1.1.1 -sha@1.0.9 -socket-stream-client@0.5.1 -tmeasday:check-npm-versions@0.3.2 -tracker@1.3.2 -underscore@1.0.13 -url@1.3.2 -webapp@1.13.5 -webapp-hashing@1.1.1 -zodern:types@1.0.9 +accounts-base@3.0.0 +accounts-password@3.0.0 +alanning:roles@4.0.0-rc.1 +aldeed:collection2@4.0.3 +aldeed:simple-schema@1.13.1 +allow-deny@2.0.0 +babel-compiler@7.11.0 +babel-runtime@1.5.2 +base64@1.0.13 +binary-heap@1.0.12 +boilerplate-generator@2.0.0 +callback-hook@1.6.0 +check@1.4.2 +core-runtime@1.0.0 +cultofcoders:redis-oplog@3.0.0-rc.2 +ddp@1.4.2 +ddp-client@3.0.0 +ddp-common@1.4.3 +ddp-rate-limiter@1.2.2 +ddp-server@3.0.0 +diff-sequence@1.1.3 +dynamic-import@0.7.4 +ecmascript@0.16.9 +ecmascript-runtime@0.8.2 +ecmascript-runtime-client@0.12.2 +ecmascript-runtime-server@0.11.1 +ejson@1.1.4 +email@3.0.0 +facts-base@1.0.2 +fetch@0.1.5 +geojson-utils@1.0.12 +http@1.0.1 +id-map@1.2.0 +inter-process-messaging@0.1.2 +local-test:cultofcoders:redis-oplog@3.0.0-rc.2 +localstorage@1.2.1 +logging@1.3.5 +matb33:collection-hooks@2.0.0-rc.2 +meteor@2.0.0 +meteortesting:browser-tests@1.7.0 +meteortesting:mocha@3.0.0-rc.1 +meteortesting:mocha-core@8.2.0 +minimongo@2.0.0 +modern-browsers@0.1.11 +modules@0.20.1 +modules-runtime@0.13.2 +mongo@2.0.0 +mongo-decimal@0.1.4-beta300.7 +mongo-dev-server@1.1.1 +mongo-id@1.0.9 +npm-mongo@4.16.2 +ordered-dict@1.2.0 +promise@1.0.0 +raix:eventemitter@1.0.0 +random@1.2.2 +rate-limit@1.1.2 +react-fast-refresh@0.2.9 +reactive-var@1.0.13 +reload@1.3.2 +retry@1.1.1 +reywood:publish-composite@1.8.9 +routepolicy@1.1.2 +sha@1.0.10 +socket-stream-client@0.5.3 +tracker@1.3.4 +typescript@5.4.3 +underscore@1.6.4 +url@1.3.3 +webapp@2.0.0 +webapp-hashing@1.1.2 +zodern:types@1.0.13 diff --git a/package.js b/package.js index e6174d27..81f3bab5 100644 --- a/package.js +++ b/package.js @@ -48,7 +48,7 @@ Package.onTest(function (api) { api.use("mongo"); api.use("random"); api.use("accounts-password"); - api.use("matb33:collection-hooks@1.3.2"); + api.use("matb33:collection-hooks@2.0.0-rc.2"); api.use("alanning:roles@4.0.0-rc.1"); api.use(["meteortesting:mocha@3.0.0-rc.1"]); From ea94507353144f452b568cf39bf71db3f517f1a8 Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Fri, 11 Oct 2024 12:35:34 -0400 Subject: [PATCH 11/33] fix version --- package.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.js b/package.js index 868e93a3..7aa784a4 100644 --- a/package.js +++ b/package.js @@ -1,6 +1,6 @@ Package.describe({ name: "cultofcoders:redis-oplog", - version: "3.0-rc.0", + version: "3.0.0-rc.2", // Brief, one-line summary of the package. summary: "Replacement for Meteor's MongoDB oplog implementation", // URL to the Git repository containing the source code for this package. From 24256f3aefba0151ba995431f9512f5da5e3343b Mon Sep 17 00:00:00 2001 From: Jan Dvorak Date: Tue, 12 Nov 2024 11:37:18 +0900 Subject: [PATCH 12/33] Update dependencies --- package.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/package.js b/package.js index 81f3bab5..7e64b54d 100644 --- a/package.js +++ b/package.js @@ -1,6 +1,6 @@ Package.describe({ name: "cultofcoders:redis-oplog", - version: "3.0.0-rc.2", + version: "3.0.0-rc.3", // Brief, one-line summary of the package. summary: "Replacement for Meteor's MongoDB oplog implementation", // URL to the Git repository containing the source code for this package. @@ -40,18 +40,18 @@ Package.onTest(function (api) { api.use("cultofcoders:redis-oplog"); // extensions - api.use("aldeed:collection2@4.0.3"); - api.use("reywood:publish-composite@1.8.9"); + api.use("aldeed:collection2@4.0.4"); + api.use("reywood:publish-composite@1.8.12"); api.use("ecmascript"); api.use("tracker"); api.use("mongo"); api.use("random"); api.use("accounts-password"); - api.use("matb33:collection-hooks@2.0.0-rc.2"); - api.use("alanning:roles@4.0.0-rc.1"); + api.use("matb33:collection-hooks@2.0.0-rc.4"); + api.use("alanning:roles@4.0.0"); - api.use(["meteortesting:mocha@3.0.0-rc.1"]); + api.use(["meteortesting:mocha@3.2.0"]); api.mainModule("testing/main.server.js", "server"); api.addFiles("testing/publishComposite/boot.js", "server"); From 4b2d68ad7ce29f8bc5e9f2ebd21e18cf539ada0e Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Thu, 21 Nov 2024 15:30:25 -0400 Subject: [PATCH 13/33] fix method name --- lib/mongo/PollingObserveDriver.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/mongo/PollingObserveDriver.js b/lib/mongo/PollingObserveDriver.js index 4d88aab8..34464291 100644 --- a/lib/mongo/PollingObserveDriver.js +++ b/lib/mongo/PollingObserveDriver.js @@ -1,5 +1,5 @@ -import { _ } from "meteor/underscore"; import { LocalCollection } from "meteor/minimongo"; +import { _ } from "meteor/underscore"; function listenAll(cursorDescription, listenCallback) { var listeners = []; @@ -64,7 +64,7 @@ export default function PollingObserveDriver(options) { self._multiplexer = options.multiplexer; self._stopCallbacks = []; self._stopped = false; - self._synchronousCursor = self._mongoHandle._createSynchronousCursor( + self._synchronousCursor = self._mongoHandle._createAsynchronousCursor( self._cursorDescription ); // previous results snapshot. on each poll cycle, diffs against // results drives the callbacks. From cecbbf93befd6ba2207e9c2601c4ba7f3a84de14 Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Thu, 21 Nov 2024 15:31:32 -0400 Subject: [PATCH 14/33] use local files instead of package --- testing/boot.js | 4 ++-- testing/vent/boot.js | 2 +- testing/vent/client.js | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/testing/boot.js b/testing/boot.js index 765da103..c2ebe8c4 100644 --- a/testing/boot.js +++ b/testing/boot.js @@ -1,6 +1,6 @@ import { Mongo } from 'meteor/mongo'; import { _ } from 'meteor/underscore'; -import { RedisOplog, SyntheticMutator } from 'meteor/cultofcoders:redis-oplog'; +import { RedisOplog, SyntheticMutator } from '../redis-oplog'; if (Meteor.isServer) { RedisOplog.init({ @@ -44,7 +44,7 @@ const config = { }, }; -export { Collections, opts, config }; +export { Collections, config, opts }; if (Meteor.isServer) { _.each(Collections, (Collection, key) => { diff --git a/testing/vent/boot.js b/testing/vent/boot.js index e2ef27db..e5ce9922 100644 --- a/testing/vent/boot.js +++ b/testing/vent/boot.js @@ -1,5 +1,5 @@ -import { Vent } from 'meteor/cultofcoders:redis-oplog'; import { Meteor } from 'meteor/meteor'; +import { Vent } from '../../redis-oplog'; Vent.publish({ 'threadMessage'({channel, shouldReturn = true}) { diff --git a/testing/vent/client.js b/testing/vent/client.js index 9e2575f8..efe21fb6 100644 --- a/testing/vent/client.js +++ b/testing/vent/client.js @@ -1,6 +1,6 @@ import { assert } from 'chai'; import { Random } from 'meteor/random'; -import { Vent } from 'meteor/cultofcoders:redis-oplog'; +import { Vent } from '../../redis-oplog'; describe('Vent', function () { it('Should receive the event accordingly', function (done) { From e720ed74552bd6d3e89a7648f7e62ce0ea62f010 Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Thu, 21 Nov 2024 15:32:20 -0400 Subject: [PATCH 15/33] adjust test matrix --- .github/workflows/test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 5aca0114..50456d78 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -7,7 +7,7 @@ jobs: runs-on: ubuntu-20.04 strategy: matrix: - meteor: ["3.0-rc.0"] + meteor: ["3.0.4", "3.1"] redis-version: [4, 5, 6, 7] steps: @@ -29,4 +29,4 @@ jobs: meteor npm i --save puppeteer@1.18.1 simpl-schema@3.4.6 chai@4.3.6 - name: Test working-directory: ./test - run: METEOR_PACKAGE_DIRS="../" TEST_BROWSER_DRIVER=puppeteer meteor test-packages --raw-logs --once --driver-package meteortesting:mocha ../ + run: METEOR_PACKAGE_DIRS="../" TEST_BROWSER_DRIVER=puppeteer meteor test-packages --raw-logs --once --driver-package meteortesting:mocha --release ${{ matrix.meteor }} ../ From 6dc8498434e174b2d86708fe58b20e2098001069 Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Thu, 21 Nov 2024 15:33:27 -0400 Subject: [PATCH 16/33] adjust versions --- package.js | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/package.js b/package.js index 7aa784a4..9c030a3b 100644 --- a/package.js +++ b/package.js @@ -1,6 +1,6 @@ Package.describe({ name: "cultofcoders:redis-oplog", - version: "3.0.0-rc.2", + version: "3.1.0", // Brief, one-line summary of the package. summary: "Replacement for Meteor's MongoDB oplog implementation", // URL to the Git repository containing the source code for this package. @@ -17,7 +17,7 @@ Npm.depends({ }); Package.onUse(function (api) { - api.versionsFrom(["3.0-rc.0"]); + api.versionsFrom(["3.1"]); api.use([ "underscore", "ecmascript", @@ -37,21 +37,22 @@ Package.onUse(function (api) { }); Package.onTest(function (api) { - api.use("cultofcoders:redis-oplog"); - // extensions - api.use("aldeed:collection2@4.0.1"); - api.use("reywood:publish-composite@1.8.9"); + api.use("aldeed:collection2@4.0.4"); + api.use("reywood:publish-composite@1.8.12"); + api.use("underscore"); api.use("ecmascript"); api.use("tracker"); api.use("mongo"); api.use("random"); api.use("accounts-password"); api.use("matb33:collection-hooks@1.3.1"); - api.use("alanning:roles@4.0.0-alpha.1"); + api.use("alanning:roles@4.0.0"); + api.use("ddp-server"); + api.use("ejson"); - api.use(["meteortesting:mocha@3.0.3-beta300.0"]); + api.use("meteortesting:mocha"); api.mainModule("testing/main.server.js", "server"); api.addFiles("testing/publishComposite/boot.js", "server"); From 64dcff7469e56ff558bf51ae2b7aa6bede772fd6 Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Fri, 22 Nov 2024 14:25:38 -0400 Subject: [PATCH 17/33] use latest ubuntu --- .github/workflows/test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 50456d78..12d6b764 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,10 +1,10 @@ name: Tests -on: [push, pull_request] +on: [pull_request] jobs: test: - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest strategy: matrix: meteor: ["3.0.4", "3.1"] From 081d1f8f7d4dd471be599c6681e7d9d41633bef8 Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Fri, 22 Nov 2024 14:33:55 -0400 Subject: [PATCH 18/33] make cursor backwards compatible --- lib/mongo/PollingObserveDriver.js | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/lib/mongo/PollingObserveDriver.js b/lib/mongo/PollingObserveDriver.js index 34464291..07a8b415 100644 --- a/lib/mongo/PollingObserveDriver.js +++ b/lib/mongo/PollingObserveDriver.js @@ -64,11 +64,13 @@ export default function PollingObserveDriver(options) { self._multiplexer = options.multiplexer; self._stopCallbacks = []; self._stopped = false; - self._synchronousCursor = self._mongoHandle._createAsynchronousCursor( - self._cursorDescription - ); // previous results snapshot. on each poll cycle, diffs against - // results drives the callbacks. + self._synchronousCursor = + self._mongoHandle._createAsynchronousCursor?.(self._cursorDescription) ?? + self._mongoHandle._createSynchronousCursor(self._cursorDescription); + + // previous results snapshot. on each poll cycle, diffs against + // results drives the callbacks. self._results = null; // The number of _pollMongo calls that have been added to self._taskQueue but // have not started running. Used to make sure we never schedule more than one // _pollMongo (other than possibly the one that is currently running). It's From b7e2781e44558042f10426bed6e729a7437c28f6 Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Fri, 22 Nov 2024 14:48:54 -0400 Subject: [PATCH 19/33] add commands --- .envrc | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 .envrc diff --git a/.envrc b/.envrc new file mode 100644 index 00000000..d863f3ef --- /dev/null +++ b/.envrc @@ -0,0 +1,20 @@ +function @test() { + local meteor_version=${1:-"3.1"} + + echo "🚀 Testing Redis Oplog with Meteor ${meteor_version}" + + rm -rf test + meteor create --release ${meteor_version} --bare test + + cd test + meteor npm i --save puppeteer@1.18.1 simpl-schema@3.4.6 chai@4.3.6 + + METEOR_PACKAGE_DIRS="../" TEST_BROWSER_DRIVER=puppeteer meteor test-packages \ + --raw-logs \ + --once \ + --driver-package meteortesting:mocha \ + --release ${meteor_version} \ + ../ + + cd .. +} \ No newline at end of file From 5947b5432b8542babc04feb313d8db2f84c3fecb Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Fri, 22 Nov 2024 15:17:46 -0400 Subject: [PATCH 20/33] exclude arch --- .envrc | 1 + 1 file changed, 1 insertion(+) diff --git a/.envrc b/.envrc index d863f3ef..a121e457 100644 --- a/.envrc +++ b/.envrc @@ -14,6 +14,7 @@ function @test() { --once \ --driver-package meteortesting:mocha \ --release ${meteor_version} \ + --exclude-archs=web.browser.legacy \ ../ cd .. From 2415b807aafafbdc7d9f3c3855328d823d254029 Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Fri, 22 Nov 2024 15:40:53 -0400 Subject: [PATCH 21/33] finetune tested versions --- .github/workflows/test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 12d6b764..4ba7adf8 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -7,8 +7,8 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - meteor: ["3.0.4", "3.1"] - redis-version: [4, 5, 6, 7] + meteor: ["3.0.3", "3.0.4", "3.1"] + redis-version: [7] steps: - uses: actions/checkout@v3 From d0345137899b62dc81c7652a0010d8ad27774138 Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Fri, 22 Nov 2024 15:42:21 -0400 Subject: [PATCH 22/33] adjust test --- testing/initial_add.js | 58 +++++++++++++++++++++--------------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/testing/initial_add.js b/testing/initial_add.js index 23462d47..db840ea6 100644 --- a/testing/initial_add.js +++ b/testing/initial_add.js @@ -1,34 +1,34 @@ import { assert } from 'chai'; -import {Mongo} from 'meteor/mongo'; +import { Mongo } from 'meteor/mongo'; -const InitialAddCollection = new Mongo.Collection('initial_add') +const InitialAddCollection = new Mongo.Collection('initial_add'); describe('Initial Add', function () { - let lastDocId; - before(function () { - InitialAddCollection.remove({}); - for (let i = 0; i <= 10; i++) { - lastDocId = InitialAddCollection.insert({number: i}) - } - }); + let lastDocId; + before(async function () { + await InitialAddCollection.removeAsync({}); + for (let i = 0; i <= 10; i++) { + lastDocId = await InitialAddCollection.insertAsync({ number: i }); + } + }); - it('Should not crash on initial add', function (done) { - Meteor.defer(() => { - let err; - InitialAddCollection.find().observeChanges({ - added(_id, doc) { - if (err) return; - Meteor._sleepForMs(10) // simulate a more costly operation - try { - assert.isDefined(doc) - } catch (e) { - err = e - } - } - }); - done(err); - }); - Meteor.defer(() => { - InitialAddCollection.remove({_id: lastDocId}) - }) - }) + it('Should not crash on initial add', function (done) { + Meteor.defer(async () => { + let err; + await InitialAddCollection.find().observeChanges({ + async added(_id, doc) { + if (err) return; + await Meteor._sleepForMs(10); // simulate a more costly operation + try { + assert.isDefined(doc); + } catch (e) { + err = e; + } + }, + }); + done(err); + }); + Meteor.defer(async () => { + await InitialAddCollection.removeAsync({ _id: lastDocId }); + }); + }); }); From f282e4457a4d1ca2a301e21d38b871c2fb665f64 Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Fri, 22 Nov 2024 15:47:33 -0400 Subject: [PATCH 23/33] disable fail fast --- .github/workflows/test.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 4ba7adf8..77174417 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -6,6 +6,7 @@ jobs: test: runs-on: ubuntu-latest strategy: + fail-fast: false matrix: meteor: ["3.0.3", "3.0.4", "3.1"] redis-version: [7] From 5bbf612f5f2f9c472e414b0af4ec93b94a848624 Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Fri, 22 Nov 2024 16:02:36 -0400 Subject: [PATCH 24/33] fix tests --- package.js | 2 ++ testing/boot.js | 2 +- testing/vent/boot.js | 2 +- testing/vent/client.js | 2 +- 4 files changed, 5 insertions(+), 3 deletions(-) diff --git a/package.js b/package.js index 9c030a3b..0dd8c584 100644 --- a/package.js +++ b/package.js @@ -37,6 +37,8 @@ Package.onUse(function (api) { }); Package.onTest(function (api) { + api.use("cultofcoders:redis-oplog"); + // extensions api.use("aldeed:collection2@4.0.4"); api.use("reywood:publish-composite@1.8.12"); diff --git a/testing/boot.js b/testing/boot.js index c2ebe8c4..f6d41980 100644 --- a/testing/boot.js +++ b/testing/boot.js @@ -1,6 +1,6 @@ +import { RedisOplog, SyntheticMutator } from 'meteor/cultofcoders:redis-oplog'; import { Mongo } from 'meteor/mongo'; import { _ } from 'meteor/underscore'; -import { RedisOplog, SyntheticMutator } from '../redis-oplog'; if (Meteor.isServer) { RedisOplog.init({ diff --git a/testing/vent/boot.js b/testing/vent/boot.js index e5ce9922..e2ef27db 100644 --- a/testing/vent/boot.js +++ b/testing/vent/boot.js @@ -1,5 +1,5 @@ +import { Vent } from 'meteor/cultofcoders:redis-oplog'; import { Meteor } from 'meteor/meteor'; -import { Vent } from '../../redis-oplog'; Vent.publish({ 'threadMessage'({channel, shouldReturn = true}) { diff --git a/testing/vent/client.js b/testing/vent/client.js index efe21fb6..151a21ab 100644 --- a/testing/vent/client.js +++ b/testing/vent/client.js @@ -1,6 +1,6 @@ import { assert } from 'chai'; +import { Vent } from 'meteor/cultofcoders:redis-oplog'; import { Random } from 'meteor/random'; -import { Vent } from '../../redis-oplog'; describe('Vent', function () { it('Should receive the event accordingly', function (done) { From 39b510b1b5279a4a0bea4e1d81fab6bd9e8577d3 Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Fri, 22 Nov 2024 16:13:19 -0400 Subject: [PATCH 25/33] add log to test --- package.js | 2 +- testing/optimistic-ui/client.test.js | 9 ++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/package.js b/package.js index 0dd8c584..13a7f9e3 100644 --- a/package.js +++ b/package.js @@ -17,7 +17,7 @@ Npm.depends({ }); Package.onUse(function (api) { - api.versionsFrom(["3.1"]); + api.versionsFrom(["3.0.1", "3.1"]); api.use([ "underscore", "ecmascript", diff --git a/testing/optimistic-ui/client.test.js b/testing/optimistic-ui/client.test.js index f4f5b15e..cd43c527 100644 --- a/testing/optimistic-ui/client.test.js +++ b/testing/optimistic-ui/client.test.js @@ -1,9 +1,9 @@ -import { Meteor } from 'meteor/meteor'; import { assert } from 'chai'; -import { Items } from './collections'; -import { waitForHandleToBeReady, callWithPromise } from '../lib/sync_utils'; +import { Meteor } from 'meteor/meteor'; import { Random } from 'meteor/random'; +import { callWithPromise, waitForHandleToBeReady } from '../lib/sync_utils'; import './boot'; +import { Items } from './collections'; describe('Optimistic UI', () => { it('Should not cause a flicker with method calls', function (done) { @@ -63,9 +63,12 @@ describe('Optimistic UI', () => { let alreadyIn = 0; const observer = cursor.observeChanges({ changed(docId, doc) { + console.log('changed', alreadyIn, doc); + alreadyIn++; if (alreadyIn > 1) { done('A flicker was caused.'); + return; } assert.lengthOf(doc.liked, 2); From 64c2b6de07aa755008b7ff5a2178079fbdc38c02 Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Mon, 25 Nov 2024 09:20:09 -0400 Subject: [PATCH 26/33] run cd in subshell --- .envrc | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/.envrc b/.envrc index a121e457..b7cab704 100644 --- a/.envrc +++ b/.envrc @@ -6,16 +6,16 @@ function @test() { rm -rf test meteor create --release ${meteor_version} --bare test - cd test - meteor npm i --save puppeteer@1.18.1 simpl-schema@3.4.6 chai@4.3.6 - - METEOR_PACKAGE_DIRS="../" TEST_BROWSER_DRIVER=puppeteer meteor test-packages \ - --raw-logs \ - --once \ - --driver-package meteortesting:mocha \ - --release ${meteor_version} \ - --exclude-archs=web.browser.legacy \ - ../ - - cd .. + ( + cd test + meteor npm i --save puppeteer@1.18.1 simpl-schema@3.4.6 chai@4.3.6 + + METEOR_PACKAGE_DIRS="../" TEST_BROWSER_DRIVER=puppeteer meteor test-packages \ + --raw-logs \ + --once \ + --driver-package meteortesting:mocha \ + --release ${meteor_version} \ + --exclude-archs=web.browser.legacy \ + ../ + ) } \ No newline at end of file From e63b5e62f32137ddaf371c17caa2f47471708d8b Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Mon, 25 Nov 2024 09:22:24 -0400 Subject: [PATCH 27/33] allow test to run independent from setup --- .envrc | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/.envrc b/.envrc index b7cab704..0bf3a890 100644 --- a/.envrc +++ b/.envrc @@ -1,7 +1,7 @@ -function @test() { +function @setup-test() { local meteor_version=${1:-"3.1"} - echo "🚀 Testing Redis Oplog with Meteor ${meteor_version}" + echo "🚀 Setting up test environment for Meteor ${meteor_version}" rm -rf test meteor create --release ${meteor_version} --bare test @@ -9,7 +9,16 @@ function @test() { ( cd test meteor npm i --save puppeteer@1.18.1 simpl-schema@3.4.6 chai@4.3.6 - + ) +} + +function @test() { + local meteor_version=${1:-"3.1"} + + echo "🚀 Running tests with Meteor ${meteor_version}" + + ( + cd test METEOR_PACKAGE_DIRS="../" TEST_BROWSER_DRIVER=puppeteer meteor test-packages \ --raw-logs \ --once \ From baba36b1dc44bb63415f327e81546ee34a11f236 Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Mon, 25 Nov 2024 16:29:54 -0400 Subject: [PATCH 28/33] fix allow deny issue --- lib/mongo/extendMongoCollection.js | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/lib/mongo/extendMongoCollection.js b/lib/mongo/extendMongoCollection.js index d0420251..85214b0a 100644 --- a/lib/mongo/extendMongoCollection.js +++ b/lib/mongo/extendMongoCollection.js @@ -1,10 +1,10 @@ import { Mongo } from "meteor/mongo"; import { _ } from "meteor/underscore"; import _validatedInsert from "./allow-deny/validatedInsert"; -import _validatedUpdate from "./allow-deny/validatedUpdate"; import _validatedRemove from "./allow-deny/validatedRemove"; -import Mutator from "./Mutator"; +import _validatedUpdate from "./allow-deny/validatedUpdate"; import extendObserveChanges from "./extendObserveChanges"; +import Mutator from "./Mutator"; export default () => { const Originals = { @@ -79,6 +79,10 @@ export default () => { _validatedUpdate, _validatedRemove, + _validatedInsertAsync: _validatedInsert, + _validatedUpdateAsync: _validatedUpdate, + _validatedRemoveAsync: _validatedRemove, + /** * Configure defaults for your collection * From 396e576d49a53016859b1386a108abf39f6c6467 Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Mon, 25 Nov 2024 16:30:12 -0400 Subject: [PATCH 29/33] add command for checkout --- .envrc | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/.envrc b/.envrc index 0bf3a890..89af92d0 100644 --- a/.envrc +++ b/.envrc @@ -27,4 +27,16 @@ function @test() { --exclude-archs=web.browser.legacy \ ../ ) +} + +function @testd { + ( + cd test + METEOR_PACKAGE_DIRS="../" TEST_BROWSER_DRIVER=puppeteer meteord test-packages \ + --raw-logs \ + --once \ + --driver-package meteortesting:mocha \ + --exclude-archs=web.browser.legacy \ + ../ + ) } \ No newline at end of file From 0886f34c815b80b522ec8168a14b151b91e9ffbf Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Wed, 4 Dec 2024 11:20:15 -0400 Subject: [PATCH 30/33] add perf improvements --- lib/cache/ObservableCollection.js | 13 ++++++------- lib/mongo/ObserveMultiplex.js | 24 +++++++++++++++++------- 2 files changed, 23 insertions(+), 14 deletions(-) diff --git a/lib/cache/ObservableCollection.js b/lib/cache/ObservableCollection.js index 0f9400e3..bd806f43 100644 --- a/lib/cache/ObservableCollection.js +++ b/lib/cache/ObservableCollection.js @@ -1,13 +1,11 @@ import { DiffSequence } from "meteor/diff-sequence"; +import { EJSON } from "meteor/ejson"; +import { LocalCollection } from "meteor/minimongo"; import { _ } from "meteor/underscore"; -import { LocalCollection, Minimongo } from "meteor/minimongo"; +import extractFieldsFromFilters from "./lib/extractFieldsFromFilters"; import fieldProjectionIsExclusion from "./lib/fieldProjectionIsExclusion"; import getChannels from "./lib/getChannels"; -import extractFieldsFromFilters from "./lib/extractFieldsFromFilters"; import { MongoIDMap } from "./mongoIdMap"; -import { EJSON } from "meteor/ejson"; -import isRemovedNonExistent from "../utils/isRemovedNonExistent"; -import getStrategy from "../processors/getStrategy"; const allowedOptions = [ "limit", @@ -188,11 +186,12 @@ export default class ObservableCollection { * @param safe {Boolean} If this is set to true, it assumes that the object is cleaned */ async add(doc, safe = false) { - doc = EJSON.clone(doc); - if (!safe) { if (this.fieldsArray) { + // projection function clones the document already. doc = this.projectFieldsOnDoc(doc); + } else { + doc = EJSON.clone(doc); } } diff --git a/lib/mongo/ObserveMultiplex.js b/lib/mongo/ObserveMultiplex.js index 64510e02..b0417be3 100644 --- a/lib/mongo/ObserveMultiplex.js +++ b/lib/mongo/ObserveMultiplex.js @@ -2,8 +2,8 @@ // This code is MIT and licensed to Meteor. import { Meteor } from "meteor/meteor"; -import { _ } from "meteor/underscore"; import { LocalCollection } from "meteor/minimongo"; +import { _ } from "meteor/underscore"; import OptimisticInvocation from "./OptimisticInvocation"; export function ObserveMultiplexer(options) { @@ -42,6 +42,16 @@ export function ObserveMultiplexer(options) { }); } +// Helper function for strict object freezing +function strictFreeze(obj) { + return new Proxy(obj, { + set() { + throw new Error("Cannot mutate a frozen object"); + } + }); +} +const freezeObject = Meteor.isProduction ? Object.freeze : strictFreeze; + Object.assign(ObserveMultiplexer.prototype, { addHandleAndSendInitialAdds: async function (handle) { var self = this; @@ -210,16 +220,17 @@ Object.assign(ObserveMultiplexer.prototype, { // can continue until these are done. (But we do have to be careful to not // use a handle that got removed, because removeHandle does not use the // queue; thus, we iterate over an array of keys that we control.) + + const safeArgs = freezeObject(args); + for (const handleId of Object.keys(self._handles)) { var handle = self._handles && self._handles[handleId]; if (!handle) return; var callback = handle["_" + callbackName]; - // clone arguments so that callbacks can mutate their arguments - // We silence out removed exceptions if (callback === "removed") { try { - await callback.apply(null, EJSON.clone(args)); + await callback.apply(null, safeArgs); } catch (e) { // Supressing `removed non-existent exceptions` if (!isRemovedNonExistent(e)) { @@ -227,7 +238,7 @@ Object.assign(ObserveMultiplexer.prototype, { } } } else { - callback && (await callback.apply(null, EJSON.clone(args))); + callback && (await callback.apply(null, safeArgs)); } } }); @@ -248,8 +259,7 @@ Object.assign(ObserveMultiplexer.prototype, { await self._cache.docs.forEachAsync(async function (doc, id) { if (!_.has(self._handles, handle._id)) throw Error("handle got removed before sending initial adds!"); - var fields = EJSON.clone(doc); - delete fields._id; + const { _id, ...fields } = doc; if (self._ordered) await add(id, fields, null); // we're going in order, so add at end else await add(id, fields); From ef88b11db49af7ad7a3c4d1450c09b4af3de0298 Mon Sep 17 00:00:00 2001 From: Leonardo Venturini Date: Mon, 9 Dec 2024 09:26:51 -0400 Subject: [PATCH 31/33] change version --- package.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.js b/package.js index 590a43f3..a7212693 100644 --- a/package.js +++ b/package.js @@ -1,6 +1,6 @@ Package.describe({ name: "cultofcoders:redis-oplog", - version: "3.1.0", + version: "3.0.0", // Brief, one-line summary of the package. summary: "Replacement for Meteor's MongoDB oplog implementation", // URL to the Git repository containing the source code for this package. From f17a838d8cd327b8c49a903db542c338b28c2b0e Mon Sep 17 00:00:00 2001 From: Pierluigi Beato Date: Wed, 11 Dec 2024 16:12:40 +0100 Subject: [PATCH 32/33] fix(update): Fixes the mongo selector in the update call if invoked with ObjectID only --- lib/mongo/Mutator.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/mongo/Mutator.js b/lib/mongo/Mutator.js index d6bb31de..b3f6e81d 100644 --- a/lib/mongo/Mutator.js +++ b/lib/mongo/Mutator.js @@ -9,6 +9,7 @@ import { } from "./lib/dispatchers"; import Config from "../config"; import { Events } from "../constants"; +import { Mongo } from 'meteor/mongo'; function runCallbackInBackground(fn) { Meteor.defer(Meteor.bindEnvironment(fn)); @@ -107,7 +108,7 @@ export default class Mutator { * @returns {*} */ static async update(Originals, selector, modifier, _config, callback) { - if (_.isString(selector)) { + if (_.isString(selector) || selector instanceof Mongo.ObjectID) { selector = { _id: selector }; } From f47a223a3d979200e6bb1eefc7640ed83bc13374 Mon Sep 17 00:00:00 2001 From: Jan Dvorak Date: Thu, 12 Dec 2024 11:48:49 +0900 Subject: [PATCH 33/33] Published cultofcoders:redis-oplog@3.0.0. --- .versions | 125 +++++++++++---------------------------------------- package.json | 2 +- 2 files changed, 28 insertions(+), 99 deletions(-) diff --git a/.versions b/.versions index 9fa898b6..3593204e 100644 --- a/.versions +++ b/.versions @@ -1,55 +1,54 @@ -accounts-base@3.0.0 -accounts-password@3.0.0 -alanning:roles@4.0.0-rc.1 -aldeed:collection2@4.0.3 +accounts-base@3.0.3 +accounts-password@3.0.3 +alanning:roles@4.0.0 +aldeed:collection2@4.0.4 aldeed:simple-schema@1.13.1 allow-deny@2.0.0 -babel-compiler@7.11.0 +babel-compiler@7.11.2 babel-runtime@1.5.2 base64@1.0.13 binary-heap@1.0.12 boilerplate-generator@2.0.0 callback-hook@1.6.0 -check@1.4.2 +check@1.4.4 core-runtime@1.0.0 -cultofcoders:redis-oplog@3.0.0-rc.2 +cultofcoders:redis-oplog@3.0.0 ddp@1.4.2 -ddp-client@3.0.0 -ddp-common@1.4.3 +ddp-client@3.0.3 +ddp-common@1.4.4 ddp-rate-limiter@1.2.2 -ddp-server@3.0.0 +ddp-server@3.0.3 diff-sequence@1.1.3 dynamic-import@0.7.4 -ecmascript@0.16.9 -ecmascript-runtime@0.8.2 +ecmascript@0.16.10 +ecmascript-runtime@0.8.3 ecmascript-runtime-client@0.12.2 ecmascript-runtime-server@0.11.1 ejson@1.1.4 -email@3.0.0 +email@3.1.1 facts-base@1.0.2 fetch@0.1.5 geojson-utils@1.0.12 -http@1.0.1 id-map@1.2.0 inter-process-messaging@0.1.2 -local-test:cultofcoders:redis-oplog@3.0.0-rc.2 +local-test:cultofcoders:redis-oplog@3.0.0 localstorage@1.2.1 logging@1.3.5 -matb33:collection-hooks@2.0.0-rc.2 -meteor@2.0.0 -meteortesting:browser-tests@1.7.0 -meteortesting:mocha@3.0.0-rc.1 -meteortesting:mocha-core@8.2.0 -minimongo@2.0.0 +matb33:collection-hooks@2.0.0 +meteor@2.0.2 +meteortesting:browser-tests@0.1.2 +meteortesting:mocha@0.4.4 +minimongo@2.0.1 modern-browsers@0.1.11 -modules@0.20.1 +modules@0.20.3 modules-runtime@0.13.2 -mongo@2.0.0 -mongo-decimal@0.1.4-beta300.7 +mongo@2.0.2 +mongo-decimal@0.1.4 mongo-dev-server@1.1.1 mongo-id@1.0.9 -npm-mongo@4.16.2 +npm-mongo@4.17.4 ordered-dict@1.2.0 +practicalmeteor:mocha-core@1.0.1 promise@1.0.0 raix:eventemitter@1.0.0 random@1.2.2 @@ -58,84 +57,14 @@ react-fast-refresh@0.2.9 reactive-var@1.0.13 reload@1.3.2 retry@1.1.1 -reywood:publish-composite@1.8.9 +reywood:publish-composite@1.8.12 routepolicy@1.1.2 sha@1.0.10 socket-stream-client@0.5.3 tracker@1.3.4 typescript@5.4.3 underscore@1.6.4 -url@1.3.3 -webapp@2.0.0 +url@1.3.5 +webapp@2.0.4 webapp-hashing@1.1.2 zodern:types@1.0.13 -======= -accounts-base@2.2.11 -accounts-password@2.4.0 -alanning:roles@3.5.1 -aldeed:collection2@3.0.6 -allow-deny@1.1.1 -babel-compiler@7.10.5 -babel-runtime@1.5.1 -base64@1.0.12 -binary-heap@1.0.11 -boilerplate-generator@1.7.2 -callback-hook@1.5.1 -check@1.4.1 -cultofcoders:redis-oplog@2.3.0 -ddp@1.4.1 -ddp-client@2.6.2 -ddp-common@1.4.1 -ddp-rate-limiter@1.2.1 -ddp-server@2.7.1 -diff-sequence@1.1.2 -dynamic-import@0.7.3 -ecmascript@0.16.8 -ecmascript-runtime@0.8.1 -ecmascript-runtime-client@0.12.1 -ecmascript-runtime-server@0.11.0 -ejson@1.1.3 -email@2.2.6 -fetch@0.1.4 -geojson-utils@1.0.11 -id-map@1.1.1 -inter-process-messaging@0.1.1 -local-test:cultofcoders:redis-oplog@2.3.0 -localstorage@1.2.0 -logging@1.3.4 -matb33:collection-hooks@1.1.4 -meteor@1.11.5 -meteortesting:browser-tests@0.1.2 -meteortesting:mocha@0.4.4 -minimongo@1.9.4 -modern-browsers@0.1.10 -modules@0.20.0 -modules-runtime@0.13.1 -mongo@1.16.10 -mongo-decimal@0.1.3 -mongo-dev-server@1.1.0 -mongo-id@1.0.8 -natestrauser:publish-performant-counts@0.1.2 -npm-mongo@4.17.2 -ordered-dict@1.1.0 -practicalmeteor:mocha-core@1.0.1 -promise@0.12.2 -raix:eventemitter@0.1.3 -random@1.2.1 -rate-limit@1.1.1 -react-fast-refresh@0.2.8 -reactive-var@1.0.12 -reload@1.3.1 -retry@1.1.0 -reywood:publish-composite@1.7.3 -routepolicy@1.1.1 -sha@1.0.9 -socket-stream-client@0.5.2 -tmeasday:check-npm-versions@0.3.2 -tracker@1.3.3 -typescript@4.9.5 -underscore@1.6.1 -url@1.3.2 -webapp@1.13.8 -webapp-hashing@1.1.1 -zodern:types@1.0.9 diff --git a/package.json b/package.json index 2dd2f09f..3ad20324 100644 --- a/package.json +++ b/package.json @@ -21,6 +21,6 @@ }, "homepage": "https://github.com/cult-of-coders/redis-oplog#readme", "devDependencies": { - "prettier": "^3.2.5" + "prettier": "^3.4.2" } }