(function (root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD
define([], factory);
} else if (typeof exports === 'object') {
// CommonJS
module.exports = factory();
} else {
// Browser globals
root.controldb = factory();
}
}(this, function () {
return (function () {
'use strict';
var hasOwnProperty = Object.prototype.hasOwnProperty;
function deepFreeze(obj) {
var prop, i;
if (Array.isArray(obj)) {
for (i = 0; i < obj.length; i++) {
deepFreeze(obj[i]);
}
freeze(obj);
} else if (obj !== null && (typeof obj === 'object')) {
for (prop in obj) {
if (obj.hasOwnProperty(prop)) {
deepFreeze(obj[prop]);
}
}
freeze(obj);
}
}
function freeze(obj) {
if (!Object.isFrozen(obj)) {
Object.freeze(obj);
}
}
function unFreeze(obj) {
if (!Object.isFrozen(obj)) {
return obj;
}
return clone(obj, 'shallow');
}
var Utils = {
copyProperties: function (src, dest) {
var prop;
for (prop in src) {
dest[prop] = src[prop];
}
},
// used to recursively scan hierarchical transform step object for param substitution
resolveTransformObject: function (subObj, params, depth) {
var prop,
pname;
if (typeof depth !== 'number') {
depth = 0;
}
if (++depth >= 10) return subObj;
for (prop in subObj) {
if (typeof subObj[prop] === 'string' && subObj[prop].indexOf("[%lktxp]") === 0) {
pname = subObj[prop].substring(8);
if (params.hasOwnProperty(pname)) {
subObj[prop] = params[pname];
}
} else if (typeof subObj[prop] === "object") {
subObj[prop] = Utils.resolveTransformObject(subObj[prop], params, depth);
}
}
return subObj;
},
// top level utility to resolve an entire (single) transform (array of steps) for parameter substitution
resolveTransformParams: function (transform, params) {
var idx,
clonedStep,
resolvedTransform = [];
if (typeof params === 'undefined') return transform;
// iterate all steps in the transform array
for (idx = 0; idx < transform.length; idx++) {
// clone transform so our scan/replace can operate directly on cloned transform
clonedStep = clone(transform[idx], "shallow-recurse-objects");
resolvedTransform.push(Utils.resolveTransformObject(clonedStep, params));
}
return resolvedTransform;
},
// By default (if usingDotNotation is false), looks up path in
// object via `object[path]`
//
// If `usingDotNotation` is true, then the path is assumed to
// represent a nested path. It can be in the form of an array of
// field names, or a period delimited string. The function will
// look up the value of object[path[0]], and then call
// result[path[1]] on the result, etc etc.
//
// If `usingDotNotation` is true, this function still supports
// non nested fields.
//
// `usingDotNotation` is a performance optimization. The caller
// may know that a path is *not* nested. In which case, this
// function avoids a costly string.split('.')
//
// examples:
// getIn({a: 1}, "a") => 1
// getIn({a: 1}, "a", true) => 1
// getIn({a: {b: 1}}, ["a", "b"], true) => 1
// getIn({a: {b: 1}}, "a.b", true) => 1
getIn: function (object, path, usingDotNotation) {
if (object == null) {
return undefined;
}
if (!usingDotNotation) {
return object[path];
}
if (typeof (path) === "string") {
path = path.split(".");
}
if (!Array.isArray(path)) {
throw new Error("path must be a string or array. Found " + typeof (path));
}
var index = 0,
length = path.length;
while (object != null && index < length) {
object = object[path[index++]];
}
return (index && index == length) ? object : undefined;
}
};
// wrapping in object to expose to default export for potential user override.
// warning: overriding these methods will override behavior for all control db instances in memory.
// warning: if you use binary indices these comparators should be the same for all inserts/updates/removes.
var Comparators = {
aeq: aeqHelper,
lt: ltHelper,
gt: gtHelper
};
/** Helper function for determining 'control' abstract equality which is a little more abstract than ==
* aeqHelper(5, '5') === true
* aeqHelper(5.0, '5') === true
* aeqHelper(new Date("1/1/2011"), new Date("1/1/2011")) === true
* aeqHelper({a:1}, {z:4}) === true (all objects sorted equally)
* aeqHelper([1, 2, 3], [1, 3]) === false
* aeqHelper([1, 2, 3], [1, 2, 3]) === true
* aeqHelper(undefined, null) === true
*/
function aeqHelper(prop1, prop2) {
var cv1, cv2, t1, t2;
if (prop1 === prop2) return true;
// 'falsy' and Boolean handling
if (!prop1 || !prop2 || prop1 === true || prop2 === true || prop1 !== prop1 || prop2 !== prop2) {
// dates and NaN conditions (typed dates before serialization)
switch (prop1) {
case undefined: t1 = 1; break;
case null: t1 = 1; break;
case false: t1 = 3; break;
case true: t1 = 4; break;
case "": t1 = 5; break;
default: t1 = (prop1 === prop1) ? 9 : 0; break;
}
switch (prop2) {
case undefined: t2 = 1; break;
case null: t2 = 1; break;
case false: t2 = 3; break;
case true: t2 = 4; break;
case "": t2 = 5; break;
default: t2 = (prop2 === prop2) ? 9 : 0; break;
}
// one or both is edge case
if (t1 !== 9 || t2 !== 9) {
return (t1 === t2);
}
}
// Handle 'Number-like' comparisons
cv1 = Number(prop1);
cv2 = Number(prop2);
// if one or both are 'number-like'...
if (cv1 === cv1 || cv2 === cv2) {
return (cv1 === cv2);
}
// not strict equal nor less than nor gt so must be mixed types, convert to string and use that to compare
cv1 = prop1.toString();
cv2 = prop2.toString();
return (cv1 == cv2);
}
/** Helper function for determining 'less-than' conditions for ops, sorting, and binary indices.
* In the future we might want $lt and $gt ops to use their own functionality/helper.
* Since binary indices on a property might need to index [12, NaN, new Date(), Infinity], we
* need this function (as well as gtHelper) to always ensure one value is LT, GT, or EQ to another.
*/
function ltHelper(prop1, prop2, equal) {
var cv1, cv2, t1, t2;
// if one of the params is falsy or strictly true or not equal to itself
// 0, 0.0, "", NaN, null, undefined, not defined, false, true
if (!prop1 || !prop2 || prop1 === true || prop2 === true || prop1 !== prop1 || prop2 !== prop2) {
switch (prop1) {
case undefined: t1 = 1; break;
case null: t1 = 1; break;
case false: t1 = 3; break;
case true: t1 = 4; break;
case "": t1 = 5; break;
// if strict equal probably 0 so sort higher, otherwise probably NaN so sort lower than even null
default: t1 = (prop1 === prop1) ? 9 : 0; break;
}
switch (prop2) {
case undefined: t2 = 1; break;
case null: t2 = 1; break;
case false: t2 = 3; break;
case true: t2 = 4; break;
case "": t2 = 5; break;
default: t2 = (prop2 === prop2) ? 9 : 0; break;
}
// one or both is edge case
if (t1 !== 9 || t2 !== 9) {
return (t1 === t2) ? equal : (t1 < t2);
}
}
// if both are numbers (string encoded or not), compare as numbers
cv1 = Number(prop1);
cv2 = Number(prop2);
if (cv1 === cv1 && cv2 === cv2) {
if (cv1 < cv2) return true;
if (cv1 > cv2) return false;
return equal;
}
if (cv1 === cv1 && cv2 !== cv2) {
return true;
}
if (cv2 === cv2 && cv1 !== cv1) {
return false;
}
if (prop1 < prop2) return true;
if (prop1 > prop2) return false;
if (prop1 == prop2) return equal;
// not strict equal nor less than nor gt so must be mixed types, convert to string and use that to compare
cv1 = prop1.toString();
cv2 = prop2.toString();
if (cv1 < cv2) {
return true;
}
if (cv1 == cv2) {
return equal;
}
return false;
}
function gtHelper(prop1, prop2, equal) {
var cv1, cv2, t1, t2;
// 'falsy' and Boolean handling
if (!prop1 || !prop2 || prop1 === true || prop2 === true || prop1 !== prop1 || prop2 !== prop2) {
switch (prop1) {
case undefined: t1 = 1; break;
case null: t1 = 1; break;
case false: t1 = 3; break;
case true: t1 = 4; break;
case "": t1 = 5; break;
// NaN 0
default: t1 = (prop1 === prop1) ? 9 : 0; break;
}
switch (prop2) {
case undefined: t2 = 1; break;
case null: t2 = 1; break;
case false: t2 = 3; break;
case true: t2 = 4; break;
case "": t2 = 5; break;
default: t2 = (prop2 === prop2) ? 9 : 0; break;
}
// one or both is edge case
if (t1 !== 9 || t2 !== 9) {
return (t1 === t2) ? equal : (t1 > t2);
}
}
// if both are numbers (string encoded or not), compare as numbers
cv1 = Number(prop1);
cv2 = Number(prop2);
if (cv1 === cv1 && cv2 === cv2) {
if (cv1 > cv2) return true;
if (cv1 < cv2) return false;
return equal;
}
if (cv1 === cv1 && cv2 !== cv2) {
return false;
}
if (cv2 === cv2 && cv1 !== cv1) {
return true;
}
if (prop1 > prop2) return true;
if (prop1 < prop2) return false;
if (prop1 == prop2) return equal;
// not strict equal nor less than nor gt so must be dates or mixed types
// convert to string and use that to compare
cv1 = prop1.toString();
cv2 = prop2.toString();
if (cv1 > cv2) {
return true;
}
if (cv1 == cv2) {
return equal;
}
return false;
}
function sortHelper(prop1, prop2, desc) {
if (Comparators.aeq(prop1, prop2)) return 0;
if (Comparators.lt(prop1, prop2, false)) {
return (desc) ? (1) : (-1);
}
if (Comparators.gt(prop1, prop2, false)) {
return (desc) ? (-1) : (1);
}
// not lt, not gt so implied equality-- date compatible
return 0;
}
/**
* compoundeval() - helper function for compoundsort(), performing individual object comparisons
*
* @param {array} properties - array of property names, in order, by which to evaluate sort order
* @param {object} obj1 - first object to compare
* @param {object} obj2 - second object to compare
* @returns {integer} 0, -1, or 1 to designate if identical (sortwise) or which should be first
*/
function compoundeval(properties, obj1, obj2) {
var res = 0;
var prop, field, val1, val2, arr, path;
for (var i = 0, len = properties.length; i < len; i++) {
prop = properties[i];
field = prop[0];
if (~field.indexOf('.')) {
arr = field.split('.');
val1 = Utils.getIn(obj1, arr, true);
val2 = Utils.getIn(obj2, arr, true);
} else {
val1 = obj1[field];
val2 = obj2[field];
}
res = sortHelper(val1, val2, prop[1]);
if (res !== 0) {
return res;
}
}
return 0;
}
/**
* dotSubScan - helper function used for dot notation queries.
*
* @param {object} root - object to traverse
* @param {array} paths - array of properties to drill into
* @param {function} fun - evaluation function to test with
* @param {any} value - comparative value to also pass to (compare) fun
* @param {any} extra - extra arg to also pass to compare fun
* @param {number} poffset - index of the item in 'paths' to start the sub-scan from
*/
function dotSubScan(root, paths, fun, value, extra, poffset) {
var pathOffset = poffset || 0;
var path = paths[pathOffset];
var valueFound = false;
var element;
if (root !== null && typeof root === 'object' && path in root) {
element = root[path];
}
if (pathOffset + 1 >= paths.length) {
// if we have already expanded out the dot notation,
// then just evaluate the test function and value on the element
valueFound = fun(element, value, extra);
} else if (Array.isArray(element)) {
for (var index = 0, len = element.length; index < len; index += 1) {
valueFound = dotSubScan(element[index], paths, fun, value, extra, pathOffset + 1);
if (valueFound === true) {
break;
}
}
} else {
valueFound = dotSubScan(element, paths, fun, value, extra, pathOffset + 1);
}
return valueFound;
}
function containsCheckFn(a) {
if (typeof a === 'string' || Array.isArray(a)) {
return function (b) {
return a.indexOf(b) !== -1;
};
} else if (typeof a === 'object' && a !== null) {
return function (b) {
return hasOwnProperty.call(a, b);
};
}
return null;
}
function doQueryOp(val, op, record) {
for (var p in op) {
if (hasOwnProperty.call(op, p)) {
return ControlOps[p](val, op[p], record);
}
}
return false;
}
var ControlOps = {
// comparison operators
// a is the value in the collection
// b is the query value
$eq: function (a, b) {
return a === b;
},
// abstract/loose equality
$aeq: function (a, b) {
return a == b;
},
$ne: function (a, b) {
// ecma 5 safe test for NaN
if (b !== b) {
// ecma 5 test value is not NaN
return (a === a);
}
return a !== b;
},
// date equality / control abstract equality test
$dteq: function (a, b) {
return Comparators.aeq(a, b);
},
// control comparisons: return identical unindexed results as indexed comparisons
$gt: function (a, b) {
return Comparators.gt(a, b, false);
},
$gte: function (a, b) {
return Comparators.gt(a, b, true);
},
$lt: function (a, b) {
return Comparators.lt(a, b, false);
},
$lte: function (a, b) {
return Comparators.lt(a, b, true);
},
// lightweight javascript comparisons
$jgt: function (a, b) {
return a > b;
},
$jgte: function (a, b) {
return a >= b;
},
$jlt: function (a, b) {
return a < b;
},
$jlte: function (a, b) {
return a <= b;
},
// ex : coll.find({'orderCount': {$between: [10, 50]}});
$between: function (a, vals) {
if (a === undefined || a === null) return false;
return (Comparators.gt(a, vals[0], true) && Comparators.lt(a, vals[1], true));
},
$jbetween: function (a, vals) {
if (a === undefined || a === null) return false;
return (a >= vals[0] && a <= vals[1]);
},
$in: function (a, b) {
return b.indexOf(a) !== -1;
},
$inSet: function (a, b) {
return b.has(a);
},
$nin: function (a, b) {
return b.indexOf(a) === -1;
},
$keyin: function (a, b) {
return a in b;
},
$nkeyin: function (a, b) {
return !(a in b);
},
$definedin: function (a, b) {
return b[a] !== undefined;
},
$undefinedin: function (a, b) {
return b[a] === undefined;
},
$regex: function (a, b) {
return b.test(a);
},
$containsString: function (a, b) {
return (typeof a === 'string') && (a.indexOf(b) !== -1);
},
$containsNone: function (a, b) {
return !ControlOps.$containsAny(a, b);
},
$containsAny: function (a, b) {
var checkFn = containsCheckFn(a);
if (checkFn !== null) {
return (Array.isArray(b)) ? (b.some(checkFn)) : (checkFn(b));
}
return false;
},
$contains: function (a, b) {
var checkFn = containsCheckFn(a);
if (checkFn !== null) {
return (Array.isArray(b)) ? (b.every(checkFn)) : (checkFn(b));
}
return false;
},
$elemMatch: function (a, b) {
if (Array.isArray(a)) {
return a.some(function (item) {
return Object.keys(b).every(function (property) {
var filter = b[property];
if (!(typeof filter === 'object' && filter)) {
filter = { $eq: filter };
}
if (property.indexOf('.') !== -1) {
return dotSubScan(item, property.split('.'), doQueryOp, b[property], item);
}
return doQueryOp(item[property], filter, item);
});
});
}
return false;
},
$type: function (a, b, record) {
var type = typeof a;
if (type === 'object') {
if (Array.isArray(a)) {
type = 'array';
} else if (a instanceof Date) {
type = 'date';
}
}
return (typeof b !== 'object') ? (type === b) : doQueryOp(type, b, record);
},
$finite: function (a, b) {
return (b === isFinite(a));
},
$size: function (a, b, record) {
if (Array.isArray(a)) {
return (typeof b !== 'object') ? (a.length === b) : doQueryOp(a.length, b, record);
}
return false;
},
$len: function (a, b, record) {
if (typeof a === 'string') {
return (typeof b !== 'object') ? (a.length === b) : doQueryOp(a.length, b, record);
}
return false;
},
$where: function (a, b) {
return b(a) === true;
},
// field-level logical operators
// a is the value in the collection
// b is the nested query operation (for '$not')
// or an array of nested query operations (for '$and' and '$or')
$not: function (a, b, record) {
return !doQueryOp(a, b, record);
},
$and: function (a, b, record) {
for (var idx = 0, len = b.length; idx < len; idx += 1) {
if (!doQueryOp(a, b[idx], record)) {
return false;
}
}
return true;
},
$or: function (a, b, record) {
for (var idx = 0, len = b.length; idx < len; idx += 1) {
if (doQueryOp(a, b[idx], record)) {
return true;
}
}
return false;
},
$exists: function (a, b) {
if (b) {
return a !== undefined;
} else {
return a === undefined;
}
}
};
// ops that can be used with { $$op: 'column-name' } syntax
var valueLevelOps = ['$eq', '$aeq', '$ne', '$dteq', '$gt', '$gte', '$lt', '$lte', '$jgt', '$jgte', '$jlt', '$jlte', '$type'];
valueLevelOps.forEach(function (op) {
var fun = ControlOps[op];
ControlOps['$' + op] = function (a, spec, record) {
if (typeof spec === 'string') {
return fun(a, record[spec]);
} else if (typeof spec === 'function') {
return fun(a, spec(record));
} else {
throw new Error('Invalid argument to $$ matcher');
}
};
});
// if an op is registered in this object, our 'calculateRange' can use it with our binary indices.
// if the op is registered to a function, we will run that function/op as a 2nd pass filter on results.
// those 2nd pass filter functions should be similar to ControlOps functions, accepting 2 vals to compare.
var indexedOps = {
$eq: ControlOps.$eq,
$aeq: true,
$dteq: true,
$gt: true,
$gte: true,
$lt: true,
$lte: true,
$in: true,
$between: true
};
function clone(data, method) {
if (data === null || data === undefined) {
return null;
}
var cloneMethod = method || 'parse-stringify',
cloned;
switch (cloneMethod) {
case "parse-stringify":
cloned = JSON.parse(JSON.stringify(data));
break;
case "jquery-extend-deep":
cloned = jQuery.extend(true, {}, data);
break;
case "shallow":
// more compatible method for older browsers
cloned = Object.create(data.constructor.prototype);
Object.keys(data).map(function (i) {
cloned[i] = data[i];
});
break;
case "shallow-assign":
// should be supported by newer environments/browsers
cloned = Object.create(data.constructor.prototype);
Object.assign(cloned, data);
break;
case "shallow-recurse-objects":
// shallow clone top level properties
cloned = clone(data, "shallow");
var keys = Object.keys(data);
// for each of the top level properties which are object literals, recursively shallow copy
keys.forEach(function (key) {
if (typeof data[key] === "object" && data[key].constructor.name === "Object") {
cloned[key] = clone(data[key], "shallow-recurse-objects");
} else if (Array.isArray(data[key])) {
cloned[key] = cloneObjectArray(data[key], "shallow-recurse-objects");
}
});
break;
default:
break;
}
return cloned;
}
function cloneObjectArray(objarray, method) {
if (method == "parse-stringify") {
return clone(objarray, method);
}
var result = [];
for (var i = 0, len = objarray.length; i < len; i++) {
result[i] = clone(objarray[i], method);
}
return result;
}
function localStorageAvailable() {
try {
return (window && window.localStorage !== undefined && window.localStorage !== null);
} catch (e) {
return false;
}
}
/**
* ControlDBEventEmitter is a minimalist version of EventEmitter. It enables any
* constructor that inherits EventEmitter to emit events and trigger
* listeners that have been added to the event through the on(event, callback) method
*
* @constructor ControlDBEventEmitter
*/
function ControlDBEventEmitter() { }
/**
* @prop {hashmap} events - a hashmap, with each property being an array of callbacks
* @memberof ControlDBEventEmitter
*/
ControlDBEventEmitter.prototype.events = {};
/**
* @prop {boolean} asyncListeners - boolean determines whether or not the callbacks associated with each event
* should happen in an async fashion or not
* Default is false, which means events are synchronous
* @memberof ControlDBEventEmitter
*/
ControlDBEventEmitter.prototype.asyncListeners = false;
/**
* on(eventName, listener) - adds a listener to the queue of callbacks associated to an event
* @param {string|string[]} eventName - the name(s) of the event(s) to listen to
* @param {function} listener - callback function of listener to attach
* @returns {int} the index of the callback in the array of listeners for a particular event
* @memberof ControlDBEventEmitter
*/
ControlDBEventEmitter.prototype.on = function (eventName, listener) {
var event;
var self = this;
if (Array.isArray(eventName)) {
eventName.forEach(function (currentEventName) {
self.on(currentEventName, listener);
});
return listener;
}
event = this.events[eventName];
if (!event) {
event = this.events[eventName] = [];
}
event.push(listener);
return listener;
};
/**
* emit(eventName, data) - emits a particular event
* with the option of passing optional parameters which are going to be processed by the callback
* provided signatures match (i.e. if passing emit(event, arg0, arg1) the listener should take two parameters)
* @param {string} eventName - the name of the event
* @param {object=} data - optional object passed with the event
* @memberof ControlDBEventEmitter
*/
ControlDBEventEmitter.prototype.emit = function (eventName) {
var self = this;
var selfArgs;
if (eventName && this.events[eventName]) {
if (this.events[eventName].length) {
selfArgs = Array.prototype.slice.call(arguments, 1);
this.events[eventName].forEach(function (listener) {
if (self.asyncListeners) {
setTimeout(function () {
listener.apply(self, selfArgs);
}, 1);
} else {
listener.apply(self, selfArgs);
}
});
}
} else {
throw new Error('No event ' + eventName + ' defined');
}
};
/**
* Alias of ControlDBEventEmitter.prototype.on
* addListener(eventName, listener) - adds a listener to the queue of callbacks associated to an event
* @param {string|string[]} eventName - the name(s) of the event(s) to listen to
* @param {function} listener - callback function of listener to attach
* @returns {int} the index of the callback in the array of listeners for a particular event
* @memberof ControlDBEventEmitter
*/
ControlDBEventEmitter.prototype.addListener = ControlDBEventEmitter.prototype.on;
/**
* removeListener() - removes the listener at position 'index' from the event 'eventName'
* @param {string|string[]} eventName - the name(s) of the event(s) which the listener is attached to
* @param {function} listener - the listener callback function to remove from emitter
* @memberof ControlDBEventEmitter
*/
ControlDBEventEmitter.prototype.removeListener = function (eventName, listener) {
var self = this;
if (Array.isArray(eventName)) {
eventName.forEach(function (currentEventName) {
self.removeListener(currentEventName, listener);
});
return;
}
if (this.events[eventName]) {
var listeners = this.events[eventName];
listeners.splice(listeners.indexOf(listener), 1);
}
};
/**
* ControlDB: The main database class
* @constructor ControlDB
* @implements ControlDBEventEmitter
* @param {string} filename - name of the file to be saved to
* @param {object=} options - (Optional) config options object
* @param {string} options.env - override environment detection as 'NODEJS', 'BROWSER', 'CORDOVA'
* @param {boolean} [options.verbose=false] - enable console output
* @param {boolean} [options.autosave=false] - enables autosave
* @param {int} [options.autosaveInterval=5000] - time interval (in milliseconds) between saves (if dirty)
* @param {boolean} [options.autoload=false] - enables autoload on control instantiation
* @param {function} options.autoloadCallback - user callback called after database load
* @param {adapter} options.adapter - an instance of a control persistence adapter
* @param {string} [options.serializationMethod='normal'] - ['normal', 'pretty', 'destructured']
* @param {string} options.destructureDelimiter - string delimiter used for destructured serialization
* @param {boolean} [options.throttledSaves=true] - debounces multiple calls to to saveDatabase reducing number of disk I/O operations
and guaranteeing proper serialization of the calls.
*/
function ControlDB(filename, options) {
this.filename = filename || 'controldb.db';
this.collections = [];
// persist version of code which created the database to the database.
// could use for upgrade scenarios
this.databaseVersion = 1.5;
this.engineVersion = 1.5;
// autosave support (disabled by default)
// pass autosave: true, autosaveInterval: 6000 in options to set 6 second autosave
this.autosave = false;
this.autosaveInterval = 5000;
this.autosaveHandle = null;
this.throttledSaves = true;
this.options = {};
// currently keeping persistenceMethod and persistenceAdapter as control level properties that
// will not or cannot be deserialized. You are required to configure persistence every time
// you instantiate a control object (or use default environment detection) in order to load the database anyways.
// persistenceMethod could be 'fs', 'localStorage', or 'adapter'
// this is optional option param, otherwise environment detection will be used
// if user passes their own adapter we will force this method to 'adapter' later, so no need to pass method option.
this.persistenceMethod = null;
// retain reference to optional (non-serializable) persistenceAdapter 'instance'
this.persistenceAdapter = null;
// flags used to throttle saves
this.throttledSavePending = false;
this.throttledCallbacks = [];
// enable console output if verbose flag is set (disabled by default)
this.verbose = options && options.hasOwnProperty('verbose') ? options.verbose : false;
this.events = {
'init': [],
'loaded': [],
'flushChanges': [],
'close': [],
'changes': [],
'warning': []
};
var getENV = function () {
if (typeof global !== 'undefined' && (global.android || global.NSObject)) {
// If no adapter assume nativescript which needs adapter to be passed manually
return 'NATIVESCRIPT'; //nativescript
}
if (typeof window === 'undefined') {
return 'NODEJS';
}
if (typeof global !== 'undefined' && global.window && typeof process !== 'undefined') {
return 'NODEJS'; //node-webkit
}
if (typeof document !== 'undefined') {
if (document.URL.indexOf('http://') === -1 && document.URL.indexOf('https://') === -1) {
return 'CORDOVA';
}
return 'BROWSER';
}
return 'CORDOVA';
};
// refactored environment detection due to invalid detection for browser environments.
// if they do not specify an options.env we want to detect env rather than default to nodejs.
// currently keeping two properties for similar thing (options.env and options.persistenceMethod)
// might want to review whether we can consolidate.
if (options && options.hasOwnProperty('env')) {
this.ENV = options.env;
} else {
this.ENV = getENV();
}
// not sure if this is necessary now that i have refactored the line above
if (this.ENV === 'undefined') {
this.ENV = 'NODEJS';
}
this.configureOptions(options, true);
this.on('init', this.clearChanges);
}
// db class is an EventEmitter
ControlDB.prototype = new ControlDBEventEmitter();
ControlDB.prototype.constructor = ControlDB;
// experimental support for browserify's abstract syntax scan to pick up dependency of indexed adapter.
// Hopefully, once this hits npm a browserify require of controldb should scan the main file and detect this indexed adapter reference.
ControlDB.prototype.getIndexedAdapter = function () {
var adapter;
if (typeof require === 'function') {
adapter = require("./controldb-indexed-adapter.js");
}
return adapter;
};
/**
* Allows reconfiguring database options
*
* @param {object} options - configuration options to apply to control db object
* @param {string} options.env - override environment detection as 'NODEJS', 'BROWSER', 'CORDOVA'
* @param {boolean} options.verbose - enable console output (default is 'false')
* @param {boolean} options.autosave - enables autosave
* @param {int} options.autosaveInterval - time interval (in milliseconds) between saves (if dirty)
* @param {boolean} options.autoload - enables autoload on control instantiation
* @param {function} options.autoloadCallback - user callback called after database load
* @param {adapter} options.adapter - an instance of a control persistence adapter
* @param {string} options.serializationMethod - ['normal', 'pretty', 'destructured']
* @param {string} options.destructureDelimiter - string delimiter used for destructured serialization
* @param {boolean} initialConfig - (internal) true is passed when control ctor is invoking
* @memberof ControlDB
*/
ControlDB.prototype.configureOptions = function (options, initialConfig) {
var defaultPersistence = {
'NODEJS': 'fs',
'BROWSER': 'localStorage',
'CORDOVA': 'localStorage',
'MEMORY': 'memory'
},
persistenceMethods = {
'fs': ControlDBFsAdapter,
'localStorage': ControlDBLocalStorageAdapter,
'memory': ControlDBMemoryAdapter
};
this.options = {};
this.persistenceMethod = null;
// retain reference to optional persistence adapter 'instance'
// currently keeping outside options because it can't be serialized
this.persistenceAdapter = null;
// process the options
if (typeof (options) !== 'undefined') {
this.options = options;
if (this.options.hasOwnProperty('persistenceMethod')) {
// check if the specified persistence method is known
if (typeof (persistenceMethods[options.persistenceMethod]) == 'function') {
this.persistenceMethod = options.persistenceMethod;
this.persistenceAdapter = new persistenceMethods[options.persistenceMethod]();
}
// should be throw an error here, or just fall back to defaults ??
}
// if user passes adapter, set persistence mode to adapter and retain persistence adapter instance
if (this.options.hasOwnProperty('adapter')) {
this.persistenceMethod = 'adapter';
this.persistenceAdapter = options.adapter;
this.options.adapter = null;
// if true, will keep track of dirty ids
this.isIncremental = this.persistenceAdapter.mode === 'incremental';
}
// if they want to load database on control instantiation, now is a good time to load... after adapter set and before possible autosave initiation
if (options.autoload && initialConfig) {
// for autoload, let the constructor complete before firing callback
var self = this;
setTimeout(function () {
self.loadDatabase(options, options.autoloadCallback);
}, 1);
}
if (this.options.hasOwnProperty('autosaveInterval')) {
this.autosaveDisable();
this.autosaveInterval = parseInt(this.options.autosaveInterval, 10);
}
if (this.options.hasOwnProperty('autosave') && this.options.autosave) {
this.autosaveDisable();
this.autosave = true;
if (this.options.hasOwnProperty('autosaveCallback')) {
this.autosaveEnable(options, options.autosaveCallback);
} else {
this.autosaveEnable();
}
}
if (this.options.hasOwnProperty('throttledSaves')) {
this.throttledSaves = this.options.throttledSaves;
}
} // end of options processing
// ensure defaults exists for options which were not set
if (!this.options.hasOwnProperty('serializationMethod')) {
this.options.serializationMethod = 'normal';
}
// ensure passed or default option exists
if (!this.options.hasOwnProperty('destructureDelimiter')) {
this.options.destructureDelimiter = '$<\n';
}
// if by now there is no adapter specified by user nor derived from persistenceMethod: use sensible defaults
if (this.persistenceAdapter === null) {
this.persistenceMethod = defaultPersistence[this.ENV];
if (this.persistenceMethod) {
this.persistenceAdapter = new persistenceMethods[this.persistenceMethod]();
}
}
};
/**
* Copies 'this' database into a new ControlDB instance. Object references are shared to make lightweight.
*
* @param {object} options - apply or override collection level settings
* @param {bool} options.removeNonSerializable - nulls properties not safe for serialization.
* @memberof ControlDB
*/
ControlDB.prototype.copy = function (options) {
// in case running in an environment without accurate environment detection, pass 'NA'
var databaseCopy = new ControlDB(this.filename, { env: "NA" });
var clen, idx;
options = options || {};
// currently inverting and letting loadJSONObject do most of the work
databaseCopy.loadJSONObject(this, { retainDirtyFlags: true });
// since our JSON serializeReplacer is not invoked for reference database adapters, this will let us mimic
if (options.hasOwnProperty("removeNonSerializable") && options.removeNonSerializable === true) {
databaseCopy.autosaveHandle = null;
databaseCopy.persistenceAdapter = null;
clen = databaseCopy.collections.length;
for (idx = 0; idx < clen; idx++) {
databaseCopy.collections[idx].constraints = null;
databaseCopy.collections[idx].ttl = null;
}
}
return databaseCopy;
};
/**
* Adds a collection to the database.
* @param {string} name - name of collection to add
* @param {object=} options - (optional) options to configure collection with.
* @param {array=} [options.unique=[]] - array of property names to define unique constraints for
* @param {array=} [options.exact=[]] - array of property names to define exact constraints for
* @param {array=} [options.indices=[]] - array property names to define binary indexes for
* @param {boolean} [options.asyncListeners=false] - whether listeners are called asynchronously
* @param {boolean} [options.disableMeta=false] - set to true to disable meta property on documents
* @param {boolean} [options.disableChangesApi=true] - set to false to enable Changes Api
* @param {boolean} [options.disableDeltaChangesApi=true] - set to false to enable Delta Changes API (requires Changes API, forces cloning)
* @param {boolean} [options.autoupdate=false] - use Object.observe to update objects automatically
* @param {boolean} [options.clone=false] - specify whether inserts and queries clone to/from user
* @param {string} [options.cloneMethod='parse-stringify'] - 'parse-stringify', 'jquery-extend-deep', 'shallow, 'shallow-assign'
* @param {int=} options.ttl - age of document (in ms.) before document is considered aged/stale.
* @param {int=} options.ttlInterval - time interval for clearing out 'aged' documents; not set by default.
* @returns {Collection} a reference to the collection which was just added
* @memberof ControlDB
*/
ControlDB.prototype.addCollection = function (name, options) {
var i,
len = this.collections.length;
if (options && options.disableMeta === true) {
if (options.disableChangesApi === false) {
throw new Error("disableMeta option cannot be passed as true when disableChangesApi is passed as false");
}
if (options.disableDeltaChangesApi === false) {
throw new Error("disableMeta option cannot be passed as true when disableDeltaChangesApi is passed as false");
}
if (typeof options.ttl === "number" && options.ttl > 0) {
throw new Error("disableMeta option cannot be passed as true when ttl is enabled");
}
}
for (i = 0; i < len; i += 1) {
if (this.collections[i].name === name) {
return this.collections[i];
}
}
var collection = new Collection(name, options);
collection.isIncremental = this.isIncremental;
this.collections.push(collection);
if (this.verbose)
collection.controlConsoleWrapper = console;
return collection;
};
ControlDB.prototype.loadCollection = function (collection) {
if (!collection.name) {
throw new Error('Collection must have a name property to be loaded');
}
this.collections.push(collection);
};
/**
* Retrieves reference to a collection by name.
* @param {string} collectionName - name of collection to look up
* @returns {Collection} Reference to collection in database by that name, or null if not found
* @memberof ControlDB
*/
ControlDB.prototype.getCollection = function (collectionName) {
var i,
len = this.collections.length;
for (i = 0; i < len; i += 1) {
if (this.collections[i].name === collectionName) {
return this.collections[i];
}
}
// no such collection
this.emit('warning', 'collection ' + collectionName + ' not found');
return null;
};
/**
* Renames an existing control collection
* @param {string} oldName - name of collection to rename
* @param {string} newName - new name of collection
* @returns {Collection} reference to the newly renamed collection
* @memberof ControlDB
*/
ControlDB.prototype.renameCollection = function (oldName, newName) {
var c = this.getCollection(oldName);
if (c) {
c.name = newName;
}
return c;
};
/**
* Returns a list of collections in the database.
* @returns {object[]} array of objects containing 'name', 'type', and 'count' properties.
* @memberof ControlDB
*/
ControlDB.prototype.listCollections = function () {
var i = this.collections.length,
colls = [];
while (i--) {
colls.push({
name: this.collections[i].name,
type: this.collections[i].objType,
count: this.collections[i].data.length
});
}
return colls;
};
/**
* Removes a collection from the database.
* @param {string} collectionName - name of collection to remove
* @memberof ControlDB
*/
ControlDB.prototype.removeCollection = function (collectionName) {
var i,
len = this.collections.length;
for (i = 0; i < len; i += 1) {
if (this.collections[i].name === collectionName) {
var tmpcol = new Collection(collectionName, {});
var curcol = this.collections[i];
for (var prop in curcol) {
if (curcol.hasOwnProperty(prop) && tmpcol.hasOwnProperty(prop)) {
curcol[prop] = tmpcol[prop];
}
}
this.collections.splice(i, 1);
return;
}
}
};
ControlDB.prototype.getName = function () {
return this.name;
};
/**
* serializeReplacer - used to prevent certain properties from being serialized
*
*/
ControlDB.prototype.serializeReplacer = function (key, value) {
switch (key) {
case 'autosaveHandle':
case 'persistenceAdapter':
case 'constraints':
case 'ttl':
return null;
case 'throttledSavePending':
case 'throttledCallbacks':
return undefined;
case 'controlConsoleWrapper':
return null;
default:
return value;
}
};
/**
* Serialize database to a string which can be loaded via {@link ControlDB#loadJSON}
*
* @returns {string} Stringified representation of the control database.
* @memberof ControlDB
*/
ControlDB.prototype.serialize = function (options) {
options = options || {};
if (!options.hasOwnProperty("serializationMethod")) {
options.serializationMethod = this.options.serializationMethod;
}
switch (options.serializationMethod) {
case "normal": return JSON.stringify(this, this.serializeReplacer);
case "pretty": return JSON.stringify(this, this.serializeReplacer, 2);
case "destructured": return this.serializeDestructured(); // use default options
default: return JSON.stringify(this, this.serializeReplacer);
}
};
// alias of serialize
ControlDB.prototype.toJson = ControlDB.prototype.serialize;
/**
* Database level destructured JSON serialization routine to allow alternate serialization methods.
* Internally, ControlDB supports destructuring via control "serializationMethod' option and
* the optional ControlDBPartitioningAdapter class. It is also available if you wish to do
* your own structured persistence or data exchange.
*
* @param {object=} options - output format options for use externally to control
* @param {bool=} options.partitioned - (default: false) whether db and each collection are separate
* @param {int=} options.partition - can be used to only output an individual collection or db (-1)
* @param {bool=} options.delimited - (default: true) whether subitems are delimited or subarrays
* @param {string=} options.delimiter - override default delimiter
*
* @returns {string|array} A custom, restructured aggregation of independent serializations.
* @memberof ControlDB
*/
ControlDB.prototype.serializeDestructured = function (options) {
var idx, sidx, result, resultlen;
var reconstruct = [];
var dbcopy;
options = options || {};
if (!options.hasOwnProperty("partitioned")) {
options.partitioned = false;
}
if (!options.hasOwnProperty("delimited")) {
options.delimited = true;
}
if (!options.hasOwnProperty("delimiter")) {
options.delimiter = this.options.destructureDelimiter;
}
// 'partitioned' along with 'partition' of 0 or greater is a request for single collection serialization
if (options.partitioned === true && options.hasOwnProperty("partition") && options.partition >= 0) {
return this.serializeCollection({
delimited: options.delimited,
delimiter: options.delimiter,
collectionIndex: options.partition
});
}
// not just an individual collection, so we will need to serialize db container via shallow copy
dbcopy = new ControlDB(this.filename);
dbcopy.loadJSONObject(this);
for (idx = 0; idx < dbcopy.collections.length; idx++) {
dbcopy.collections[idx].data = [];
}
// if we -only- wanted the db container portion, return it now
if (options.partitioned === true && options.partition === -1) {
// since we are deconstructing, override serializationMethod to normal for here
return dbcopy.serialize({
serializationMethod: "normal"
});
}
// at this point we must be deconstructing the entire database
// start by pushing db serialization into first array element
reconstruct.push(dbcopy.serialize({
serializationMethod: "normal"
}));
dbcopy = null;
// push collection data into subsequent elements
for (idx = 0; idx < this.collections.length; idx++) {
result = this.serializeCollection({
delimited: options.delimited,
delimiter: options.delimiter,
collectionIndex: idx
});
// NDA : Non-Delimited Array : one iterable concatenated array with empty string collection partitions
if (options.partitioned === false && options.delimited === false) {
if (!Array.isArray(result)) {
throw new Error("a nondelimited, non partitioned collection serialization did not return an expected array");
}
// Array.concat would probably duplicate memory overhead for copying strings.
// Instead copy each individually, and clear old value after each copy.
// Hopefully this will allow g.c. to reduce memory pressure, if needed.
resultlen = result.length;
for (sidx = 0; sidx < resultlen; sidx++) {
reconstruct.push(result[sidx]);
result[sidx] = null;
}
reconstruct.push("");
}
else {
reconstruct.push(result);
}
}
// Reconstruct / present results according to four combinations : D, DA, NDA, NDAA
if (options.partitioned) {
// DA : Delimited Array of strings [0] db [1] collection [n] collection { partitioned: true, delimited: true }
// useful for simple future adaptations of existing persistence adapters to save collections separately
if (options.delimited) {
return reconstruct;
}
// NDAA : Non-Delimited Array with subArrays. db at [0] and collection subarrays at [n] { partitioned: true, delimited : false }
// This format might be the most versatile for 'rolling your own' partitioned sync or save.
// Memory overhead can be reduced by specifying a specific partition, but at this code path they did not, so its all.
else {
return reconstruct;
}
}
else {
// D : one big Delimited string { partitioned: false, delimited : true }
// This is the method ControlDB will use internally if 'destructured'.
// Little memory overhead improvements but does not require multiple asynchronous adapter call scheduling
if (options.delimited) {
// indicate no more collections
reconstruct.push("");
return reconstruct.join(options.delimiter);
}
// NDA : Non-Delimited Array : one iterable array with empty string collection partitions { partitioned: false, delimited: false }
// This format might be best candidate for custom synchronous syncs or saves
else {
// indicate no more collections
reconstruct.push("");
return reconstruct;
}
}
reconstruct.push("");
return reconstruct.join(delim);
};
/**
* Collection level utility method to serialize a collection in a 'destructured' format
*
* @param {object=} options - used to determine output of method
* @param {int} options.delimited - whether to return single delimited string or an array
* @param {string} options.delimiter - (optional) if delimited, this is delimiter to use
* @param {int} options.collectionIndex - specify which collection to serialize data for
*
* @returns {string|array} A custom, restructured aggregation of independent serializations for a single collection.
* @memberof ControlDB
*/
ControlDB.prototype.serializeCollection = function (options) {
var doccount,
docidx,
resultlines = [];
options = options || {};
if (!options.hasOwnProperty("delimited")) {
options.delimited = true;
}
if (!options.hasOwnProperty("collectionIndex")) {
throw new Error("serializeCollection called without 'collectionIndex' option");
}
doccount = this.collections[options.collectionIndex].data.length;
resultlines = [];
for (docidx = 0; docidx < doccount; docidx++) {
resultlines.push(JSON.stringify(this.collections[options.collectionIndex].data[docidx]));
}
// D and DA
if (options.delimited) {
// indicate no more documents in collection (via empty delimited string)
resultlines.push("");
return resultlines.join(options.delimiter);
}
else {
// NDAA and NDA
return resultlines;
}
};
/**
* Database level destructured JSON deserialization routine to minimize memory overhead.
* Internally, ControlDB supports destructuring via control "serializationMethod' option and
* the optional ControlDBPartitioningAdapter class. It is also available if you wish to do
* your own structured persistence or data exchange.
*
* @param {string|array} destructuredSource - destructured json or array to deserialize from
* @param {object=} options - source format options
* @param {bool=} [options.partitioned=false] - whether db and each collection are separate
* @param {int=} options.partition - can be used to deserialize only a single partition
* @param {bool=} [options.delimited=true] - whether subitems are delimited or subarrays
* @param {string=} options.delimiter - override default delimiter
*
* @returns {object|array} An object representation of the deserialized database, not yet applied to 'this' db or document array
* @memberof ControlDB
*/
ControlDB.prototype.deserializeDestructured = function (destructuredSource, options) {
var workarray = [];
var len, cdb;
var idx, collIndex = 0, collCount, lineIndex = 1, done = false;
var currLine, currObject;
options = options || {};
if (!options.hasOwnProperty("partitioned")) {
options.partitioned = false;
}
if (!options.hasOwnProperty("delimited")) {
options.delimited = true;
}
if (!options.hasOwnProperty("delimiter")) {
options.delimiter = this.options.destructureDelimiter;
}
// Partitioned
// DA : Delimited Array of strings [0] db [1] collection [n] collection { partitioned: true, delimited: true }
// NDAA : Non-Delimited Array with subArrays. db at [0] and collection subarrays at [n] { partitioned: true, delimited : false }
// -or- single partition
if (options.partitioned) {
// handle single partition
if (options.hasOwnProperty('partition')) {
// db only
if (options.partition === -1) {
cdb = JSON.parse(destructuredSource[0]);
return cdb;
}
// single collection, return doc array
return this.deserializeCollection(destructuredSource[options.partition + 1], options);
}
// Otherwise we are restoring an entire partitioned db
cdb = JSON.parse(destructuredSource[0]);
collCount = cdb.collections.length;
for (collIndex = 0; collIndex < collCount; collIndex++) {
// attach each collection docarray to container collection data, add 1 to collection array index since db is at 0
cdb.collections[collIndex].data = this.deserializeCollection(destructuredSource[collIndex + 1], options);
}
return cdb;
}
// Non-Partitioned
// D : one big Delimited string { partitioned: false, delimited : true }
// NDA : Non-Delimited Array : one iterable array with empty string collection partitions { partitioned: false, delimited: false }
// D
if (options.delimited) {
workarray = destructuredSource.split(options.delimiter);
destructuredSource = null; // lower memory pressure
len = workarray.length;
if (len === 0) {
return null;
}
}
// NDA
else {
workarray = destructuredSource;
}
// first line is database and collection shells
cdb = JSON.parse(workarray[0]);
collCount = cdb.collections.length;
workarray[0] = null;
while (!done) {
currLine = workarray[lineIndex];
// empty string indicates either end of collection or end of file
if (workarray[lineIndex] === "") {
// if no more collections to load into, we are done
if (++collIndex > collCount) {
done = true;
}
}
else {
currObject = JSON.parse(workarray[lineIndex]);
cdb.collections[collIndex].data.push(currObject);
}
// lower memory pressure and advance iterator
workarray[lineIndex++] = null;
}
return cdb;
};
/**
* Collection level utility function to deserializes a destructured collection.
*
* @param {string|array} destructuredSource - destructured representation of collection to inflate
* @param {object=} options - used to describe format of destructuredSource input
* @param {int=} [options.delimited=false] - whether source is delimited string or an array
* @param {string=} options.delimiter - if delimited, this is delimiter to use (if other than default)
*
* @returns {array} an array of documents to attach to collection.data.
* @memberof ControlDB
*/
ControlDB.prototype.deserializeCollection = function (destructuredSource, options) {
var workarray = [];
var idx, len;
options = options || {};
if (!options.hasOwnProperty("partitioned")) {
options.partitioned = false;
}
if (!options.hasOwnProperty("delimited")) {
options.delimited = true;
}
if (!options.hasOwnProperty("delimiter")) {
options.delimiter = this.options.destructureDelimiter;
}
if (options.delimited) {
workarray = destructuredSource.split(options.delimiter);
workarray.pop();
}
else {
workarray = destructuredSource;
}
len = workarray.length;
for (idx = 0; idx < len; idx++) {
workarray[idx] = JSON.parse(workarray[idx]);
}
return workarray;
};
/**
* Inflates a control database from a serialized JSON string
*
* @param {string} serializedDb - a serialized control database string
* @param {object=} options - apply or override collection level settings
* @param {bool} options.retainDirtyFlags - whether collection dirty flags will be preserved
* @memberof ControlDB
*/
ControlDB.prototype.loadJSON = function (serializedDb, options) {
var dbObject;
if (serializedDb.length === 0) {
dbObject = {};
} else {
// using option defined in instantiated db not what was in serialized db
switch (this.options.serializationMethod) {
case "normal":
case "pretty": dbObject = JSON.parse(serializedDb); break;
case "destructured": dbObject = this.deserializeDestructured(serializedDb); break;
default: dbObject = JSON.parse(serializedDb); break;
}
}
this.loadJSONObject(dbObject, options);
};
/**
* Inflates a control database from a JS object
*
* @param {object} dbObject - a serialized control database string
* @param {object=} options - apply or override collection level settings
* @param {bool} options.retainDirtyFlags - whether collection dirty flags will be preserved
* @memberof ControlDB
*/
ControlDB.prototype.loadJSONObject = function (dbObject, options) {
var i = 0,
len = dbObject.collections ? dbObject.collections.length : 0,
coll,
copyColl,
clen,
j,
loader,
collObj;
this.name = dbObject.name;
// restore save throttled boolean only if not defined in options
if (dbObject.hasOwnProperty('throttledSaves') && options && !options.hasOwnProperty('throttledSaves')) {
this.throttledSaves = dbObject.throttledSaves;
}
this.collections = [];
function makeLoader(coll) {
var collOptions = options[coll.name];
var inflater;
if (collOptions.proto) {
inflater = collOptions.inflate || Utils.copyProperties;
return function (data) {
var collObj = new (collOptions.proto)();
inflater(data, collObj);
return collObj;
};
}
return collOptions.inflate;
}
for (i; i < len; i += 1) {
coll = dbObject.collections[i];
copyColl = this.addCollection(coll.name, {
disableChangesApi: coll.disableChangesApi,
disableDeltaChangesApi: coll.disableDeltaChangesApi,
disableMeta: coll.disableMeta,
disableFreeze: coll.hasOwnProperty('disableFreeze') ? coll.disableFreeze : true
});
copyColl.adaptiveBinaryIndices = coll.hasOwnProperty('adaptiveBinaryIndices') ? (coll.adaptiveBinaryIndices === true) : false;
copyColl.transactional = coll.transactional;
copyColl.asyncListeners = coll.asyncListeners;
copyColl.cloneObjects = coll.cloneObjects;
copyColl.cloneMethod = coll.cloneMethod || "parse-stringify";
copyColl.autoupdate = coll.autoupdate;
copyColl.changes = coll.changes;
copyColl.dirtyIds = coll.dirtyIds || [];
if (options && options.retainDirtyFlags === true) {
copyColl.dirty = coll.dirty;
}
else {
copyColl.dirty = false;
}
if (coll.getData) {
if ((options && options.hasOwnProperty(coll.name)) || !copyColl.disableFreeze || copyColl.autoupdate) {
throw new Error("this collection cannot be loaded lazily: " + coll.name);
}
copyColl.getData = coll.getData;
Object.defineProperty(copyColl, 'data', {
/* jshint loopfunc:true */
get: function () {
var data = this.getData();
this.getData = null;
Object.defineProperty(this, 'data', { value: data });
return data;
}
/* jshint loopfunc:false */
});
} else {
// load each element individually
clen = coll.data.length;
j = 0;
if (options && options.hasOwnProperty(coll.name)) {
loader = makeLoader(coll);
for (j; j < clen; j++) {
collObj = loader(coll.data[j]);
copyColl.data[j] = collObj;
copyColl.addAutoUpdateObserver(collObj);
if (!copyColl.disableFreeze) {
deepFreeze(copyColl.data[j]);
}
}
} else {
for (j; j < clen; j++) {
copyColl.data[j] = coll.data[j];
copyColl.addAutoUpdateObserver(copyColl.data[j]);
if (!copyColl.disableFreeze) {
deepFreeze(copyColl.data[j]);
}
}
}
}
copyColl.maxId = (typeof coll.maxId === 'undefined') ? 0 : coll.maxId;
if (typeof (coll.binaryIndices) !== 'undefined') {
copyColl.binaryIndices = coll.binaryIndices;
}
if (typeof coll.transforms !== 'undefined') {
copyColl.transforms = coll.transforms;
}
// regenerate unique indexes
copyColl.uniqueNames = [];
if (coll.hasOwnProperty("uniqueNames")) {
copyColl.uniqueNames = coll.uniqueNames;
}
// in case they are loading a database created before we added dynamic views, handle undefined
if (typeof (coll.DynamicViews) === 'undefined') continue;
// reinflate DynamicViews and attached Resultsets
for (var idx = 0; idx < coll.DynamicViews.length; idx++) {
var colldv = coll.DynamicViews[idx];
var dv = copyColl.addDynamicView(colldv.name, colldv.options);
dv.resultdata = colldv.resultdata;
dv.resultsdirty = colldv.resultsdirty;
dv.filterPipeline = colldv.filterPipeline;
dv.sortCriteriaSimple = colldv.sortCriteriaSimple;
dv.sortCriteria = colldv.sortCriteria;
dv.sortFunction = null;
dv.sortDirty = colldv.sortDirty;
if (!copyColl.disableFreeze) {
deepFreeze(dv.filterPipeline);
if (dv.sortCriteriaSimple) {
deepFreeze(dv.sortCriteriaSimple);
} else if (dv.sortCriteria) {
deepFreeze(dv.sortCriteria);
}
}
dv.resultset.filteredrows = colldv.resultset.filteredrows;
dv.resultset.filterInitialized = colldv.resultset.filterInitialized;
dv.rematerialize({
removeWhereFilters: true
});
}
// Upgrade Logic for binary index refactoring at version 1.5
if (dbObject.databaseVersion < 1.5) {
// rebuild all indices
copyColl.ensureAllIndexes(true);
copyColl.dirty = true;
}
}
};
/**
* Emits the close event. In autosave scenarios, if the database is dirty, this will save and disable timer.
* Does not actually destroy the db.
*
* @param {function=} callback - (Optional) if supplied will be registered with close event before emitting.
* @memberof ControlDB
*/
ControlDB.prototype.close = function (callback) {
// for autosave scenarios, we will let close perform final save (if dirty)
// For web use, you might call from window.onbeforeunload to shutdown database, saving pending changes
if (this.autosave) {
this.autosaveDisable();
if (this.autosaveDirty()) {
this.saveDatabase(callback);
callback = undefined;
}
}
if (callback) {
this.on('close', callback);
}
this.emit('close');
};
/**-------------------------+
| Changes API |
+--------------------------*/
/**
* The Changes API enables the tracking the changes occurred in the collections since the beginning of the session,
* so it's possible to create a differential dataset for synchronization purposes (possibly to a remote db)
*/
/**
* (Changes API) : takes all the changes stored in each
* collection and creates a single array for the entire database. If an array of names
* of collections is passed then only the included collections will be tracked.
*
* @param {array=} optional array of collection names. No arg means all collections are processed.
* @returns {array} array of changes
* @see private method createChange() in Collection
* @memberof ControlDB
*/
ControlDB.prototype.generateChangesNotification = function (arrayOfCollectionNames) {
function getCollName(coll) {
return coll.name;
}
var changes = [],
selectedCollections = arrayOfCollectionNames || this.collections.map(getCollName);
this.collections.forEach(function (coll) {
if (selectedCollections.indexOf(getCollName(coll)) !== -1) {
changes = changes.concat(coll.getChanges());
}
});
return changes;
};
/**
* (Changes API) - stringify changes for network transmission
* @returns {string} string representation of the changes
* @memberof ControlDB
*/
ControlDB.prototype.serializeChanges = function (collectionNamesArray) {
return JSON.stringify(this.generateChangesNotification(collectionNamesArray));
};
/**
* (Changes API) : clears all the changes in all collections.
* @memberof ControlDB
*/
ControlDB.prototype.clearChanges = function () {
this.collections.forEach(function (coll) {
if (coll.flushChanges) {
coll.flushChanges();
}
});
};
/*------------------+
| PERSISTENCE |
-------------------*/
/** there are two build in persistence adapters for internal use
* fs for use in Nodejs type environments
* localStorage for use in browser environment
* defined as helper classes here so its easy and clean to use
*/
/**
* In in-memory persistence adapter for an in-memory database.
* This simple 'key/value' adapter is intended for unit testing and diagnostics.
*
* @param {object=} options - memory adapter options
* @param {boolean} [options.asyncResponses=false] - whether callbacks are invoked asynchronously
* @param {int} [options.asyncTimeout=50] - timeout in ms to queue callbacks
* @constructor ControlDBMemoryAdapter
*/
function ControlDBMemoryAdapter(options) {
this.hashStore = {};
this.options = options || {};
if (!this.options.hasOwnProperty('asyncResponses')) {
this.options.asyncResponses = false;
}
if (!this.options.hasOwnProperty('asyncTimeout')) {
this.options.asyncTimeout = 50; // 50 ms default
}
}
/**
* Loads a serialized database from its in-memory store.
* (ControlDB persistence adapter interface function)
*
* @param {string} dbname - name of the database (filename/keyname)
* @param {function} callback - adapter callback to return load result to caller
* @memberof ControlDBMemoryAdapter
*/
ControlDBMemoryAdapter.prototype.loadDatabase = function (dbname, callback) {
var self = this;
if (this.options.asyncResponses) {
setTimeout(function () {
if (self.hashStore.hasOwnProperty(dbname)) {
callback(self.hashStore[dbname].value);
}
else {
// database doesn't exist, return falsy
callback(null);
}
}, this.options.asyncTimeout);
}
else {
if (this.hashStore.hasOwnProperty(dbname)) {
// database doesn't exist, return falsy
callback(this.hashStore[dbname].value);
}
else {
callback(null);
}
}
};
/**
* Saves a serialized database to its in-memory store.
* (ControlDB persistence adapter interface function)
*
* @param {string} dbname - name of the database (filename/keyname)
* @param {function} callback - adapter callback to return load result to caller
* @memberof ControlDBMemoryAdapter
*/
ControlDBMemoryAdapter.prototype.saveDatabase = function (dbname, dbstring, callback) {
var self = this;
var saveCount;
if (this.options.asyncResponses) {
setTimeout(function () {
saveCount = (self.hashStore.hasOwnProperty(dbname) ? self.hashStore[dbname].savecount : 0);
self.hashStore[dbname] = {
savecount: saveCount + 1,
lastsave: new Date(),
value: dbstring
};
callback();
}, this.options.asyncTimeout);
}
else {
saveCount = (this.hashStore.hasOwnProperty(dbname) ? this.hashStore[dbname].savecount : 0);
this.hashStore[dbname] = {
savecount: saveCount + 1,
lastsave: new Date(),
value: dbstring
};
callback();
}
};
/**
* Deletes a database from its in-memory store.
*
* @param {string} dbname - name of the database (filename/keyname)
* @param {function} callback - function to call when done
* @memberof ControlDBMemoryAdapter
*/
ControlDBMemoryAdapter.prototype.deleteDatabase = function (dbname, callback) {
if (this.hashStore.hasOwnProperty(dbname)) {
delete this.hashStore[dbname];
}
if (typeof callback === "function") {
callback();
}
};
/**
* An adapter for adapters. Converts a non reference mode adapter into a reference mode adapter
* which can perform destructuring and partioning. Each collection will be stored in its own key/save and
* only dirty collections will be saved. If you turn on paging with default page size of 25megs and save
* a 75 meg collection it should use up roughly 3 save slots (key/value pairs sent to inner adapter).
* A dirty collection that spans three pages will save all three pages again
* Paging mode was added mainly because Chrome has issues saving 'too large' of a string within a
* single indexeddb row. If a single document update causes the collection to be flagged as dirty, all
* of that collection's pages will be written on next save.
*
* @param {object} adapter - reference to a 'non-reference' mode control adapter instance.
* @param {object=} options - configuration options for partitioning and paging
* @param {bool} options.paging - (default: false) set to true to enable paging collection data.
* @param {int} options.pageSize - (default : 25MB) you can use this to limit size of strings passed to inner adapter.
* @param {string} options.delimiter - allows you to override the default delimeter
* @constructor ControlDBPartitioningAdapter
*/
function ControlDBPartitioningAdapter(adapter, options) {
this.mode = "reference";
this.adapter = null;
this.options = options || {};
this.dbref = null;
this.dbname = "";
this.pageIterator = {};
// verify user passed an appropriate adapter
if (adapter) {
if (adapter.mode === "reference") {
throw new Error("ControlDBPartitioningAdapter cannot be instantiated with a reference mode adapter");
}
else {
this.adapter = adapter;
}
}
else {
throw new Error("ControlDBPartitioningAdapter requires a (non-reference mode) adapter on construction");
}
// set collection paging defaults
if (!this.options.hasOwnProperty("paging")) {
this.options.paging = false;
}
// default to page size of 25 megs (can be up to your largest serialized object size larger than this)
if (!this.options.hasOwnProperty("pageSize")) {
this.options.pageSize = 25 * 1024 * 1024;
}
if (!this.options.hasOwnProperty("delimiter")) {
this.options.delimiter = '$<\n';
}
}
/**
* Loads a database which was partitioned into several key/value saves.
* (ControlDB persistence adapter interface function)
*
* @param {string} dbname - name of the database (filename/keyname)
* @param {function} callback - adapter callback to return load result to caller
* @memberof ControlDBPartitioningAdapter
*/
ControlDBPartitioningAdapter.prototype.loadDatabase = function (dbname, callback) {
var self = this;
this.dbname = dbname;
this.dbref = new ControlDB(dbname);
// load the db container (without data)
this.adapter.loadDatabase(dbname, function (result) {
// empty database condition is for inner adapter return null/undefined/falsy
if (!result) {
// partition 0 not found so new database, no need to try to load other partitions.
// return same falsy result to loadDatabase to signify no database exists (yet)
callback(result);
return;
}
if (typeof result !== "string") {
callback(new Error("ControlDBPartitioningAdapter received an unexpected response from inner adapter loadDatabase()"));
}
// I will want to use control destructuring helper methods so i will inflate into typed instance
var db = JSON.parse(result);
self.dbref.loadJSONObject(db);
db = null;
var clen = self.dbref.collections.length;
if (self.dbref.collections.length === 0) {
callback(self.dbref);
return;
}
self.pageIterator = {
collection: 0,
pageIndex: 0
};
self.loadNextPartition(0, function () {
callback(self.dbref);
});
});
};
/**
* Used to sequentially load each collection partition, one at a time.
*
* @param {int} partition - ordinal collection position to load next
* @param {function} callback - adapter callback to return load result to caller
*/
ControlDBPartitioningAdapter.prototype.loadNextPartition = function (partition, callback) {
var keyname = this.dbname + "." + partition;
var self = this;
if (this.options.paging === true) {
this.pageIterator.pageIndex = 0;
this.loadNextPage(callback);
return;
}
this.adapter.loadDatabase(keyname, function (result) {
var data = self.dbref.deserializeCollection(result, { delimited: true, collectionIndex: partition });
self.dbref.collections[partition].data = data;
if (++partition < self.dbref.collections.length) {
self.loadNextPartition(partition, callback);
}
else {
callback();
}
});
};
/**
* Used to sequentially load the next page of collection partition, one at a time.
*
* @param {function} callback - adapter callback to return load result to caller
*/
ControlDBPartitioningAdapter.prototype.loadNextPage = function (callback) {
// calculate name for next saved page in sequence
var keyname = this.dbname + "." + this.pageIterator.collection + "." + this.pageIterator.pageIndex;
var self = this;
// load whatever page is next in sequence
this.adapter.loadDatabase(keyname, function (result) {
var data = result.split(self.options.delimiter);
result = ""; // free up memory now that we have split it into array
var dlen = data.length;
var idx;
// detect if last page by presence of final empty string element and remove it if so
var isLastPage = (data[dlen - 1] === "");
if (isLastPage) {
data.pop();
dlen = data.length;
// empty collections are just a delimiter meaning two blank items
if (data[dlen - 1] === "" && dlen === 1) {
data.pop();
dlen = data.length;
}
}
// convert stringified array elements to object instances and push to collection data
for (idx = 0; idx < dlen; idx++) {
self.dbref.collections[self.pageIterator.collection].data.push(JSON.parse(data[idx]));
data[idx] = null;
}
data = [];
// if last page, we are done with this partition
if (isLastPage) {
// if there are more partitions, kick off next partition load
if (++self.pageIterator.collection < self.dbref.collections.length) {
self.loadNextPartition(self.pageIterator.collection, callback);
}
else {
callback();
}
}
else {
self.pageIterator.pageIndex++;
self.loadNextPage(callback);
}
});
};
/**
* Saves a database by partioning into separate key/value saves.
* (ControlDB 'reference mode' persistence adapter interface function)
*
* @param {string} dbname - name of the database (filename/keyname)
* @param {object} dbref - reference to database which we will partition and save.
* @param {function} callback - adapter callback to return load result to caller
*
* @memberof ControlDBPartitioningAdapter
*/
ControlDBPartitioningAdapter.prototype.exportDatabase = function (dbname, dbref, callback) {
var self = this;
var idx, clen = dbref.collections.length;
this.dbref = dbref;
this.dbname = dbname;
// queue up dirty partitions to be saved
this.dirtyPartitions = [-1];
for (idx = 0; idx < clen; idx++) {
if (dbref.collections[idx].dirty) {
this.dirtyPartitions.push(idx);
}
}
this.saveNextPartition(function (err) {
callback(err);
});
};
/**
* Helper method used internally to save each dirty collection, one at a time.
*
* @param {function} callback - adapter callback to return load result to caller
*/
ControlDBPartitioningAdapter.prototype.saveNextPartition = function (callback) {
var self = this;
var partition = this.dirtyPartitions.shift();
var keyname = this.dbname + ((partition === -1) ? "" : ("." + partition));
// if we are doing paging and this is collection partition
if (this.options.paging && partition !== -1) {
this.pageIterator = {
collection: partition,
docIndex: 0,
pageIndex: 0
};
// since saveNextPage recursively calls itself until done, our callback means this whole paged partition is finished
this.saveNextPage(function (err) {
if (self.dirtyPartitions.length === 0) {
callback(err);
}
else {
self.saveNextPartition(callback);
}
});
return;
}
// otherwise this is 'non-paged' partioning...
var result = this.dbref.serializeDestructured({
partitioned: true,
delimited: true,
partition: partition
});
this.adapter.saveDatabase(keyname, result, function (err) {
if (err) {
callback(err);
return;
}
if (self.dirtyPartitions.length === 0) {
callback(null);
}
else {
self.saveNextPartition(callback);
}
});
};
/**
* Helper method used internally to generate and save the next page of the current (dirty) partition.
*
* @param {function} callback - adapter callback to return load result to caller
*/
ControlDBPartitioningAdapter.prototype.saveNextPage = function (callback) {
var self = this;
var coll = this.dbref.collections[this.pageIterator.collection];
var keyname = this.dbname + "." + this.pageIterator.collection + "." + this.pageIterator.pageIndex;
var pageLen = 0,
cdlen = coll.data.length,
delimlen = this.options.delimiter.length;
var serializedObject = "",
pageBuilder = "";
var doneWithPartition = false,
doneWithPage = false;
var pageSaveCallback = function (err) {
pageBuilder = "";
if (err) {
callback(err);
}
// update meta properties then continue process by invoking callback
if (doneWithPartition) {
callback(null);
}
else {
self.pageIterator.pageIndex++;
self.saveNextPage(callback);
}
};
if (coll.data.length === 0) {
doneWithPartition = true;
}
while (true) {
if (!doneWithPartition) {
// serialize object
serializedObject = JSON.stringify(coll.data[this.pageIterator.docIndex]);
pageBuilder += serializedObject;
pageLen += serializedObject.length;
// if no more documents in collection to add, we are done with partition
if (++this.pageIterator.docIndex >= cdlen) doneWithPartition = true;
}
// if our current page is bigger than defined pageSize, we are done with page
if (pageLen >= this.options.pageSize) doneWithPage = true;
// if not done with current page, need delimiter before next item
// if done with partition we also want a delmiter to indicate 'end of pages' final empty row
if (!doneWithPage || doneWithPartition) {
pageBuilder += this.options.delimiter;
pageLen += delimlen;
}
// if we are done with page save it and pass off to next recursive call or callback
if (doneWithPartition || doneWithPage) {
this.adapter.saveDatabase(keyname, pageBuilder, pageSaveCallback);
return;
}
}
};
/**
* A control persistence adapter which persists using node fs module
* @constructor ControlDBFsAdapter
*/
function ControlDBFsAdapter() {
try {
this.fs = require('fs');
} catch (e) {
this.fs = null;
}
}
/**
* loadDatabase() - Load data from file, will throw an error if the file does not exist
* @param {string} dbname - the filename of the database to load
* @param {function} callback - the callback to handle the result
* @memberof ControlDBFsAdapter
*/
ControlDBFsAdapter.prototype.loadDatabase = function loadDatabase(dbname, callback) {
var self = this;
this.fs.stat(dbname, function (err, stats) {
if (!err && stats.isFile()) {
self.fs.readFile(dbname, {
encoding: 'utf8'
}, function readFileCallback(err, data) {
if (err) {
callback(new Error(err));
} else {
callback(data);
}
});
}
else {
callback(null);
}
});
};
/**
* saveDatabase() - save data to file, will throw an error if the file can't be saved
* might want to expand this to avoid dataloss on partial save
* @param {string} dbname - the filename of the database to load
* @param {function} callback - the callback to handle the result
* @memberof ControlDBFsAdapter
*/
ControlDBFsAdapter.prototype.saveDatabase = function saveDatabase(dbname, dbstring, callback) {
var self = this;
var tmpdbname = dbname + '~';
this.fs.writeFile(tmpdbname, dbstring, function writeFileCallback(err) {
if (err) {
callback(new Error(err));
} else {
self.fs.rename(tmpdbname, dbname, callback);
}
});
};
/**
* deleteDatabase() - delete the database file, will throw an error if the
* file can't be deleted
* @param {string} dbname - the filename of the database to delete
* @param {function} callback - the callback to handle the result
* @memberof ControlDBFsAdapter
*/
ControlDBFsAdapter.prototype.deleteDatabase = function deleteDatabase(dbname, callback) {
this.fs.unlink(dbname, function deleteDatabaseCallback(err) {
if (err) {
callback(new Error(err));
} else {
callback();
}
});
};
/**
* A control persistence adapter which persists to web browser's local storage object
* @constructor ControlDBLocalStorageAdapter
*/
function ControlDBLocalStorageAdapter() { }
/**
* loadDatabase() - Load data from localstorage
* @param {string} dbname - the name of the database to load
* @param {function} callback - the callback to handle the result
* @memberof ControlDBLocalStorageAdapter
*/
ControlDBLocalStorageAdapter.prototype.loadDatabase = function loadDatabase(dbname, callback) {
if (localStorageAvailable()) {
callback(localStorage.getItem(dbname));
} else {
callback(new Error('localStorage is not available'));
}
};
/**
* saveDatabase() - save data to localstorage, will throw an error if the file can't be saved
* might want to expand this to avoid dataloss on partial save
* @param {string} dbname - the filename of the database to load
* @param {function} callback - the callback to handle the result
* @memberof ControlDBLocalStorageAdapter
*/
ControlDBLocalStorageAdapter.prototype.saveDatabase = function saveDatabase(dbname, dbstring, callback) {
if (localStorageAvailable()) {
localStorage.setItem(dbname, dbstring);
callback(null);
} else {
callback(new Error('localStorage is not available'));
}
};
/**
* deleteDatabase() - delete the database from localstorage, will throw an error if it
* can't be deleted
* @param {string} dbname - the filename of the database to delete
* @param {function} callback - the callback to handle the result
* @memberof ControlDBLocalStorageAdapter
*/
ControlDBLocalStorageAdapter.prototype.deleteDatabase = function deleteDatabase(dbname, callback) {
if (localStorageAvailable()) {
localStorage.removeItem(dbname);
callback(null);
} else {
callback(new Error('localStorage is not available'));
}
};
/**
* Wait for throttledSaves to complete and invoke your callback when drained or duration is met.
*
* @param {function} callback - callback to fire when save queue is drained, it is passed a sucess parameter value
* @param {object=} options - configuration options
* @param {boolean} options.recursiveWait - (default: true) if after queue is drained, another save was kicked off, wait for it
* @param {bool} options.recursiveWaitLimit - (default: false) limit our recursive waiting to a duration
* @param {int} options.recursiveWaitLimitDelay - (default: 2000) cutoff in ms to stop recursively re-draining
* @memberof ControlDB
*/
ControlDB.prototype.throttledSaveDrain = function (callback, options) {
var self = this;
var now = (new Date()).getTime();
if (!this.throttledSaves) {
callback(true);
}
options = options || {};
if (!options.hasOwnProperty('recursiveWait')) {
options.recursiveWait = true;
}
if (!options.hasOwnProperty('recursiveWaitLimit')) {
options.recursiveWaitLimit = false;
}
if (!options.hasOwnProperty('recursiveWaitLimitDuration')) {
options.recursiveWaitLimitDuration = 2000;
}
if (!options.hasOwnProperty('started')) {
options.started = (new Date()).getTime();
}
// if save is pending
if (this.throttledSaves && this.throttledSavePending) {
// if we want to wait until we are in a state where there are no pending saves at all
if (options.recursiveWait) {
// queue the following meta callback for when it completes
this.throttledCallbacks.push(function () {
// if there is now another save pending...
if (self.throttledSavePending) {
// if we wish to wait only so long and we have exceeded limit of our waiting, callback with false success value
if (options.recursiveWaitLimit && (now - options.started > options.recursiveWaitLimitDuration)) {
callback(false);
return;
}
// it must be ok to wait on next queue drain
self.throttledSaveDrain(callback, options);
return;
}
// no pending saves so callback with true success
else {
callback(true);
return;
}
});
}
// just notify when current queue is depleted
else {
this.throttledCallbacks.push(callback);
return;
}
}
// no save pending, just callback
else {
callback(true);
}
};
/**
* Internal load logic, decoupled from throttling/contention logic
*
* @param {object} options - not currently used (remove or allow overrides?)
* @param {function=} callback - (Optional) user supplied async callback / error handler
*/
ControlDB.prototype.loadDatabaseInternal = function (options, callback) {
var cFun = callback || function (err, data) {
if (err) {
throw err;
}
},
self = this;
// the persistenceAdapter should be present if all is ok, but check to be sure.
if (this.persistenceAdapter !== null) {
this.persistenceAdapter.loadDatabase(this.filename, function loadDatabaseCallback(dbString) {
if (typeof (dbString) === 'string') {
var parseSuccess = false;
try {
self.loadJSON(dbString, options || {});
parseSuccess = true;
} catch (err) {
cFun(err);
}
if (parseSuccess) {
cFun(null);
self.emit('loaded', 'database ' + self.filename + ' loaded');
}
} else {
// falsy result means new database
if (!dbString) {
cFun(null);
self.emit('loaded', 'empty database ' + self.filename + ' loaded');
return;
}
// instanceof error means load faulted
if (dbString instanceof Error) {
cFun(dbString);
return;
}
// if adapter has returned an js object (other than null or error) attempt to load from JSON object
if (typeof (dbString) === "object") {
self.loadJSONObject(dbString, options || {});
cFun(null); // return null on success
self.emit('loaded', 'database ' + self.filename + ' loaded');
return;
}
cFun("unexpected adapter response : " + dbString);
}
});
} else {
cFun(new Error('persistenceAdapter not configured'));
}
};
/**
* Handles manually loading from file system, local storage, or adapter (such as indexeddb)
* This method utilizes control configuration options (if provided) to determine which
* persistence method to use, or environment detection (if configuration was not provided).
* To avoid contention with any throttledSaves, we will drain the save queue first.
*
* If you are configured with autosave, you do not need to call this method yourself.
*
* @param {object} options - if throttling saves and loads, this controls how we drain save queue before loading
* @param {boolean} options.recursiveWait - (default: true) wait recursively until no saves are queued
* @param {bool} options.recursiveWaitLimit - (default: false) limit our recursive waiting to a duration
* @param {int} options.recursiveWaitLimitDelay - (default: 2000) cutoff in ms to stop recursively re-draining
* @param {function=} callback - (Optional) user supplied async callback / error handler
* @memberof ControlDB
* @example
* db.loadDatabase({}, function(err) {
* if (err) {
* console.log("error : " + err);
* }
* else {
* console.log("database loaded.");
* }
* });
*/
ControlDB.prototype.loadDatabase = function (options, callback) {
var self = this;
// if throttling disabled, just call internal
if (!this.throttledSaves) {
this.loadDatabaseInternal(options, callback);
return;
}
// try to drain any pending saves in the queue to lock it for loading
this.throttledSaveDrain(function (success) {
if (success) {
// pause/throttle saving until loading is done
self.throttledSavePending = true;
self.loadDatabaseInternal(options, function (err) {
// now that we are finished loading, if no saves were throttled, disable flag
if (self.throttledCallbacks.length === 0) {
self.throttledSavePending = false;
}
// if saves requests came in while loading, kick off new save to kick off resume saves
else {
self.saveDatabase();
}
if (typeof callback === 'function') {
callback(err);
}
});
return;
}
else {
if (typeof callback === 'function') {
callback(new Error("Unable to pause save throttling long enough to read database"));
}
}
}, options);
};
/**
* Internal save logic, decoupled from save throttling logic
*/
ControlDB.prototype.saveDatabaseInternal = function (callback) {
var cFun = callback || function (err) {
if (err) {
throw err;
}
return;
};
var self = this;
// the persistenceAdapter should be present if all is ok, but check to be sure.
if (!this.persistenceAdapter) {
cFun(new Error('persistenceAdapter not configured'));
return;
}
// run incremental, reference, or normal mode adapters, depending on what's available
if (this.persistenceAdapter.mode === "incremental") {
var cachedDirty;
// ignore autosave until we copy control (only then we can clear dirty flags,
// but if we don't do it now, autosave will be triggered a lot unnecessarily)
this.ignoreAutosave = true;
this.persistenceAdapter.saveDatabase(
this.filename,
function getControlCopy() {
self.ignoreAutosave = false;
if (cachedDirty) {
cFun(new Error('adapter error - getControlCopy called more than once'));
return;
}
var controlCopy = self.copy({ removeNonSerializable: true });
// remember and clear dirty ids -- we must do it before the save so that if
// and update occurs between here and callback, it will get saved later
cachedDirty = self.collections.map(function (collection) {
return [collection.dirty, collection.dirtyIds];
});
self.collections.forEach(function (col) {
col.dirty = false;
col.dirtyIds = [];
});
return controlCopy;
},
function exportDatabaseCallback(err) {
self.ignoreAutosave = false;
if (err && cachedDirty) {
// roll back dirty IDs to be saved later
self.collections.forEach(function (col, i) {
var cached = cachedDirty[i];
col.dirty = col.dirty || cached[0];
col.dirtyIds = col.dirtyIds.concat(cached[1]);
});
}
cFun(err);
});
} else if (this.persistenceAdapter.mode === "reference" && typeof this.persistenceAdapter.exportDatabase === "function") {
// TODO: dirty should be cleared here
// filename may seem redundant but loadDatabase will need to expect this same filename
this.persistenceAdapter.exportDatabase(this.filename, this.copy({ removeNonSerializable: true }), function exportDatabaseCallback(err) {
self.autosaveClearFlags();
cFun(err);
});
}
// otherwise just pass the serialized database to adapter
else {
// persistenceAdapter might be asynchronous, so we must clear `dirty` immediately
// or autosave won't work if an update occurs between here and the callback
// TODO: This should be stored and rolled back in case of DB save failure
this.autosaveClearFlags();
this.persistenceAdapter.saveDatabase(this.filename, this.serialize(), function saveDatabasecallback(err) {
cFun(err);
});
}
};
/**
* Handles manually saving to file system, local storage, or adapter (such as indexeddb)
* This method utilizes control configuration options (if provided) to determine which
* persistence method to use, or environment detection (if configuration was not provided).
*
* If you are configured with autosave, you do not need to call this method yourself.
*
* @param {function=} callback - (Optional) user supplied async callback / error handler
* @memberof ControlDB
* @example
* db.saveDatabase(function(err) {
* if (err) {
* console.log("error : " + err);
* }
* else {
* console.log("database saved.");
* }
* });
*/
ControlDB.prototype.saveDatabase = function (callback) {
if (!this.throttledSaves) {
this.saveDatabaseInternal(callback);
return;
}
if (this.throttledSavePending) {
this.throttledCallbacks.push(callback);
return;
}
var localCallbacks = this.throttledCallbacks;
this.throttledCallbacks = [];
localCallbacks.unshift(callback);
this.throttledSavePending = true;
var self = this;
this.saveDatabaseInternal(function (err) {
self.throttledSavePending = false;
localCallbacks.forEach(function (pcb) {
if (typeof pcb === 'function') {
// Queue the callbacks so we first finish this method execution
setTimeout(function () {
pcb(err);
}, 1);
}
});
// since this is called async, future requests may have come in, if so.. kick off next save
if (self.throttledCallbacks.length > 0) {
self.saveDatabase();
}
});
};
// alias
ControlDB.prototype.save = ControlDB.prototype.saveDatabase;
/**
* Handles deleting a database from file system, local
* storage, or adapter (indexeddb)
* This method utilizes control configuration options (if provided) to determine which
* persistence method to use, or environment detection (if configuration was not provided).
*
* @param {function=} callback - (Optional) user supplied async callback / error handler
* @memberof ControlDB
*/
ControlDB.prototype.deleteDatabase = function (options, callback) {
var cFun = callback || function (err, data) {
if (err) {
throw err;
}
};
// we aren't even using options, so we will support syntax where
// callback is passed as first and only argument
if (typeof options === 'function' && !callback) {
cFun = options;
}
// the persistenceAdapter should be present if all is ok, but check to be sure.
if (this.persistenceAdapter !== null) {
this.persistenceAdapter.deleteDatabase(this.filename, function deleteDatabaseCallback(err) {
cFun(err);
});
} else {
cFun(new Error('persistenceAdapter not configured'));
}
};
/**
* autosaveDirty - check whether any collections are 'dirty' meaning we need to save (entire) database
*
* @returns {boolean} - true if database has changed since last autosave, false if not.
*/
ControlDB.prototype.autosaveDirty = function () {
for (var idx = 0; idx < this.collections.length; idx++) {
if (this.collections[idx].dirty) {
return true;
}
}
return false;
};
/**
* autosaveClearFlags - resets dirty flags on all collections.
* Called from saveDatabase() after db is saved.
*
*/
ControlDB.prototype.autosaveClearFlags = function () {
for (var idx = 0; idx < this.collections.length; idx++) {
this.collections[idx].dirty = false;
}
};
/**
* autosaveEnable - begin a javascript interval to periodically save the database.
*
* @param {object} options - not currently used (remove or allow overrides?)
* @param {function=} callback - (Optional) user supplied async callback
*/
ControlDB.prototype.autosaveEnable = function (options, callback) {
this.autosave = true;
var delay = 5000,
self = this;
if (typeof (this.autosaveInterval) !== 'undefined' && this.autosaveInterval !== null) {
delay = this.autosaveInterval;
}
this.autosaveHandle = setInterval(function autosaveHandleInterval() {
// use of dirty flag will need to be hierarchical since mods are done at collection level with no visibility of 'db'
// so next step will be to implement collection level dirty flags set on insert/update/remove
// along with control level isdirty() function which iterates all collections to see if any are dirty
if (self.autosaveDirty() && !self.ignoreAutosave) {
self.saveDatabase(callback);
}
}, delay);
};
/**
* autosaveDisable - stop the autosave interval timer.
*
*/
ControlDB.prototype.autosaveDisable = function () {
if (typeof (this.autosaveHandle) !== 'undefined' && this.autosaveHandle !== null) {
clearInterval(this.autosaveHandle);
this.autosaveHandle = null;
}
};
/**
* Resultset class allowing chainable queries. Intended to be instanced internally.
* Collection.find(), Collection.where(), and Collection.chain() instantiate this.
*
* @example
* mycollection.chain()
* .find({ 'doors' : 4 })
* .where(function(obj) { return obj.name === 'Toyota' })
* .data();
*
* @constructor Resultset
* @param {Collection} collection - The collection which this Resultset will query against.
*/
function Resultset(collection, options) {
options = options || {};
// retain reference to collection we are querying against
this.collection = collection;
this.filteredrows = [];
this.filterInitialized = false;
return this;
}
/**
* reset() - Reset the resultset to its initial state.
*
* @returns {Resultset} Reference to this resultset, for future chain operations.
*/
Resultset.prototype.reset = function () {
if (this.filteredrows.length > 0) {
this.filteredrows = [];
}
this.filterInitialized = false;
return this;
};
/**
* toJSON() - Override of toJSON to avoid circular references
*
*/
Resultset.prototype.toJSON = function () {
var copy = this.copy();
copy.collection = null;
return copy;
};
/**
* Allows you to limit the number of documents passed to next chain operation.
* A resultset copy() is made to avoid altering original resultset.
*
* @param {int} qty - The number of documents to return.
* @returns {Resultset} Returns a copy of the resultset, limited by qty, for subsequent chain ops.
* @memberof Resultset
* // find the two oldest users
* var result = users.chain().simplesort("age", true).limit(2).data();
*/
Resultset.prototype.limit = function (qty) {
// if this has no filters applied, we need to populate filteredrows first
if (!this.filterInitialized && this.filteredrows.length === 0) {
this.filteredrows = this.collection.prepareFullDocIndex();
}
var rscopy = new Resultset(this.collection);
rscopy.filteredrows = this.filteredrows.slice(0, qty);
rscopy.filterInitialized = true;
return rscopy;
};
/**
* Used for skipping 'pos' number of documents in the resultset.
*
* @param {int} pos - Number of documents to skip; all preceding documents are filtered out.
* @returns {Resultset} Returns a copy of the resultset, containing docs starting at 'pos' for subsequent chain ops.
* @memberof Resultset
* // find everyone but the two oldest users
* var result = users.chain().simplesort("age", true).offset(2).data();
*/
Resultset.prototype.offset = function (pos) {
// if this has no filters applied, we need to populate filteredrows first
if (!this.filterInitialized && this.filteredrows.length === 0) {
this.filteredrows = this.collection.prepareFullDocIndex();
}
var rscopy = new Resultset(this.collection);
rscopy.filteredrows = this.filteredrows.slice(pos);
rscopy.filterInitialized = true;
return rscopy;
};
/**
* copy() - To support reuse of resultset in branched query situations.
*
* @returns {Resultset} Returns a copy of the resultset (set) but the underlying document references will be the same.
* @memberof Resultset
*/
Resultset.prototype.copy = function () {
var result = new Resultset(this.collection);
if (this.filteredrows.length > 0) {
result.filteredrows = this.filteredrows.slice();
}
result.filterInitialized = this.filterInitialized;
return result;
};
/**
* Alias of copy()
* @memberof Resultset
*/
Resultset.prototype.branch = Resultset.prototype.copy;
/**
* transform() - executes a named collection transform or raw array of transform steps against the resultset.
*
* @param transform {(string|array)} - name of collection transform or raw transform array
* @param parameters {object=} - (Optional) object property hash of parameters, if the transform requires them.
* @returns {Resultset} either (this) resultset or a clone of of this resultset (depending on steps)
* @memberof Resultset
* @example
* users.addTransform('CountryFilter', [
* {
* type: 'find',
* value: {
* 'country': { $eq: '[%lktxp]Country' }
* }
* },
* {
* type: 'simplesort',
* property: 'age',
* options: { desc: false}
* }
* ]);
* var results = users.chain().transform("CountryFilter", { Country: 'fr' }).data();
*/
Resultset.prototype.transform = function (transform, parameters) {
var idx,
step,
rs = this;
// if transform is name, then do lookup first
if (typeof transform === 'string') {
if (this.collection.transforms.hasOwnProperty(transform)) {
transform = this.collection.transforms[transform];
}
}
// either they passed in raw transform array or we looked it up, so process
if (typeof transform !== 'object' || !Array.isArray(transform)) {
throw new Error("Invalid transform");
}
if (typeof parameters !== 'undefined') {
transform = Utils.resolveTransformParams(transform, parameters);
}
for (idx = 0; idx < transform.length; idx++) {
step = transform[idx];
switch (step.type) {
case "find":
rs.find(step.value);
break;
case "where":
rs.where(step.value);
break;
case "simplesort":
rs.simplesort(step.property, step.desc || step.options);
break;
case "compoundsort":
rs.compoundsort(step.value);
break;
case "sort":
rs.sort(step.value);
break;
case "limit":
rs = rs.limit(step.value);
break; // limit makes copy so update reference
case "offset":
rs = rs.offset(step.value);
break; // offset makes copy so update reference
case "map":
rs = rs.map(step.value, step.dataOptions);
break;
case "eqJoin":
rs = rs.eqJoin(step.joinData, step.leftJoinKey, step.rightJoinKey, step.mapFun, step.dataOptions);
break;
// following cases break chain by returning array data so make any of these last in transform steps
case "mapReduce":
rs = rs.mapReduce(step.mapFunction, step.reduceFunction);
break;
// following cases update documents in current filtered resultset (use carefully)
case "update":
rs.update(step.value);
break;
case "remove":
rs.remove();
break;
default:
break;
}
}
return rs;
};
/**
* User supplied compare function is provided two documents to compare. (chainable)
* @example
* rslt.sort(function(obj1, obj2) {
* if (obj1.name === obj2.name) return 0;
* if (obj1.name > obj2.name) return 1;
* if (obj1.name < obj2.name) return -1;
* });
*
* @param {function} comparefun - A javascript compare function used for sorting.
* @returns {Resultset} Reference to this resultset, sorted, for future chain operations.
* @memberof Resultset
*/
Resultset.prototype.sort = function (comparefun) {
// if this has no filters applied, just we need to populate filteredrows first
if (!this.filterInitialized && this.filteredrows.length === 0) {
this.filteredrows = this.collection.prepareFullDocIndex();
}
var wrappedComparer =
(function (userComparer, data) {
return function (a, b) {
return userComparer(data[a], data[b]);
};
})(comparefun, this.collection.data);
this.filteredrows.sort(wrappedComparer);
return this;
};
/**
* Simpler, loose evaluation for user to sort based on a property name. (chainable).
* Sorting based on the same lt/gt helper functions used for binary indices.
*
* @param {string} propname - name of property to sort by.
* @param {object|bool=} options - boolean to specify if isdescending, or options object
* @param {boolean} [options.desc=false] - whether to sort descending
* @param {boolean} [options.disableIndexIntersect=false] - whether we should explicity not use array intersection.
* @param {boolean} [options.forceIndexIntersect=false] - force array intersection (if binary index exists).
* @param {boolean} [options.useJavascriptSorting=false] - whether results are sorted via basic javascript sort.
* @returns {Resultset} Reference to this resultset, sorted, for future chain operations.
* @memberof Resultset
* @example
* var results = users.chain().simplesort('age').data();
*/
Resultset.prototype.simplesort = function (propname, options) {
var eff,
targetEff = 10,
dc = this.collection.data.length,
frl = this.filteredrows.length,
hasBinaryIndex = this.collection.binaryIndices.hasOwnProperty(propname);
if (typeof (options) === 'undefined' || options === false) {
options = { desc: false };
}
if (options === true) {
options = { desc: true };
}
// if nothing in filtered rows array...
if (frl === 0) {
// if the filter is initialized to be empty resultset, do nothing
if (this.filterInitialized) {
return this;
}
// otherwise no filters applied implies all documents, so we need to populate filteredrows first
// if we have a binary index, we can just use that instead of sorting (again)
if (this.collection.binaryIndices.hasOwnProperty(propname)) {
// make sure index is up-to-date
this.collection.ensureIndex(propname);
// copy index values into filteredrows
this.filteredrows = this.collection.binaryIndices[propname].values.slice(0);
if (options.desc) {
this.filteredrows.reverse();
}
// we are done, return this (resultset) for further chain ops
return this;
}
// otherwise initialize array for sort below
else {
// build full document index (to be sorted subsequently)
this.filteredrows = this.collection.prepareFullDocIndex();
}
}
// otherwise we had results to begin with, see if we qualify for index intercept optimization
else {
// If already filtered, but we want to leverage binary index on sort.
// This will use custom array intection algorithm.
if (!options.disableIndexIntersect && hasBinaryIndex) {
// calculate filter efficiency
eff = dc / frl;
// when javascript sort fallback is enabled, you generally need more than ~17% of total docs in resultset
// before array intersect is determined to be the faster algorithm, otherwise leave at 10% for control sort.
if (options.useJavascriptSorting) {
targetEff = 6;
}
// anything more than ratio of 10:1 (total documents/current results) should use old sort code path
// So we will only use array intersection if you have more than 10% of total docs in your current resultset.
if (eff <= targetEff || options.forceIndexIntersect) {
var idx, fr = this.filteredrows;
var io = {};
// set up hashobject for simple 'inclusion test' with existing (filtered) results
for (idx = 0; idx < frl; idx++) {
io[fr[idx]] = true;
}
// grab full sorted binary index array
var pv = this.collection.binaryIndices[propname].values;
// filter by existing results
this.filteredrows = pv.filter(function (n) { return io[n]; });
if (options.desc) {
this.filteredrows.reverse();
}
return this;
}
}
}
// at this point, we will not be able to leverage binary index so we will have to do an array sort
// if we have opted to use simplified javascript comparison function...
if (options.useJavascriptSorting) {
return this.sort(function (obj1, obj2) {
if (obj1[propname] === obj2[propname]) return 0;
if (obj1[propname] > obj2[propname]) return 1;
if (obj1[propname] < obj2[propname]) return -1;
});
}
// otherwise use control sort which will return same results if column is indexed or not
var wrappedComparer =
(function (prop, desc, data) {
var val1, val2, arr;
return function (a, b) {
if (~prop.indexOf('.')) {
arr = prop.split('.');
val1 = Utils.getIn(data[a], arr, true);
val2 = Utils.getIn(data[b], arr, true);
} else {
val1 = data[a][prop];
val2 = data[b][prop];
}
return sortHelper(val1, val2, desc);
};
})(propname, options.desc, this.collection.data);
this.filteredrows.sort(wrappedComparer);
return this;
};
/**
* Allows sorting a resultset based on multiple columns.
* @example
* // to sort by age and then name (both ascending)
* rs.compoundsort(['age', 'name']);
* // to sort by age (ascending) and then by name (descending)
* rs.compoundsort(['age', ['name', true]]);
*
* @param {array} properties - array of property names or subarray of [propertyname, isdesc] used evaluate sort order
* @returns {Resultset} Reference to this resultset, sorted, for future chain operations.
* @memberof Resultset
*/
Resultset.prototype.compoundsort = function (properties) {
if (properties.length === 0) {
throw new Error("Invalid call to compoundsort, need at least one property");
}
var prop;
if (properties.length === 1) {
prop = properties[0];
if (Array.isArray(prop)) {
return this.simplesort(prop[0], prop[1]);
}
return this.simplesort(prop, false);
}
// unify the structure of 'properties' to avoid checking it repeatedly while sorting
for (var i = 0, len = properties.length; i < len; i += 1) {
prop = properties[i];
if (!Array.isArray(prop)) {
properties[i] = [prop, false];
}
}
// if this has no filters applied, just we need to populate filteredrows first
if (!this.filterInitialized && this.filteredrows.length === 0) {
this.filteredrows = this.collection.prepareFullDocIndex();
}
var wrappedComparer =
(function (props, data) {
return function (a, b) {
return compoundeval(props, data[a], data[b]);
};
})(properties, this.collection.data);
this.filteredrows.sort(wrappedComparer);
return this;
};
/**
* findOr() - oversee the operation of OR'ed query expressions.
* OR'ed expression evaluation runs each expression individually against the full collection,
* and finally does a set OR on each expression's results.
* Each evaluation can utilize a binary index to prevent multiple linear array scans.
*
* @param {array} expressionArray - array of expressions
* @returns {Resultset} this resultset for further chain ops.
*/
Resultset.prototype.findOr = function (expressionArray) {
var fr = null,
fri = 0,
frlen = 0,
docset = [],
idxset = [],
idx = 0,
origCount = this.count();
// If filter is already initialized, then we query against only those items already in filter.
// This means no index utilization for fields, so hopefully its filtered to a smallish filteredrows.
for (var ei = 0, elen = expressionArray.length; ei < elen; ei++) {
// we need to branch existing query to run each filter separately and combine results
fr = this.branch().find(expressionArray[ei]).filteredrows;
frlen = fr.length;
// add any document 'hits'
for (fri = 0; fri < frlen; fri++) {
idx = fr[fri];
if (idxset[idx] === undefined) {
idxset[idx] = true;
docset.push(idx);
}
}
}
this.filteredrows = docset;
this.filterInitialized = true;
return this;
};
Resultset.prototype.$or = Resultset.prototype.findOr;
// precompile recursively
function precompileQuery(operator, value) {
// for regex ops, precompile
if (operator === '$regex') {
if (Array.isArray(value)) {
value = new RegExp(value[0], value[1]);
} else if (!(value instanceof RegExp)) {
value = new RegExp(value);
}
}
else if (typeof value === 'object') {
for (var key in value) {
if (key === '$regex' || typeof value[key] === 'object') {
value[key] = precompileQuery(key, value[key]);
}
}
}
return value;
}
/**
* findAnd() - oversee the operation of AND'ed query expressions.
* AND'ed expression evaluation runs each expression progressively against the full collection,
* internally utilizing existing chained resultset functionality.
* Only the first filter can utilize a binary index.
*
* @param {array} expressionArray - array of expressions
* @returns {Resultset} this resultset for further chain ops.
*/
Resultset.prototype.findAnd = function (expressionArray) {
// we have already implementing method chaining in this (our Resultset class)
// so lets just progressively apply user supplied and filters
for (var i = 0, len = expressionArray.length; i < len; i++) {
if (this.count() === 0) {
return this;
}
this.find(expressionArray[i]);
}
return this;
};
Resultset.prototype.$and = Resultset.prototype.findAnd;
/**
* Used for querying via a mongo-style query object.
*
* @param {object} query - A mongo-style query object used for filtering current results.
* @param {boolean=} firstOnly - (Optional) Used by collection.findOne()
* @returns {Resultset} this resultset for further chain ops.
* @memberof Resultset
* @example
* var over30 = users.chain().find({ age: { $gte: 30 } }).data();
*/
Resultset.prototype.find = function (query, firstOnly) {
if (this.collection.data.length === 0) {
this.filteredrows = [];
this.filterInitialized = true;
return this;
}
var queryObject = query || 'getAll',
p,
property,
queryObjectOp,
obj,
operator,
value,
key,
searchByIndex = false,
result = [],
filters = [],
index = null;
// flag if this was invoked via findOne()
firstOnly = firstOnly || false;
if (typeof queryObject === 'object') {
for (p in queryObject) {
obj = {};
obj[p] = queryObject[p];
filters.push(obj);
if (hasOwnProperty.call(queryObject, p)) {
property = p;
queryObjectOp = queryObject[p];
}
}
// if more than one expression in single query object,
// convert implicit $and to explicit $and
if (filters.length > 1) {
return this.find({ '$and': filters }, firstOnly);
}
}
// apply no filters if they want all
if (!property || queryObject === 'getAll') {
if (firstOnly) {
if (this.filterInitialized) {
this.filteredrows = this.filteredrows.slice(0, 1);
} else {
this.filteredrows = (this.collection.data.length > 0) ? [0] : [];
this.filterInitialized = true;
}
}
return this;
}
// injecting $and and $or expression tree evaluation here.
if (property === '$and' || property === '$or') {
this[property](queryObjectOp);
// for chained find with firstonly,
if (firstOnly && this.filteredrows.length > 1) {
this.filteredrows = this.filteredrows.slice(0, 1);
}
return this;
}
// see if query object is in shorthand mode (assuming eq operator)
if (queryObjectOp === null || (typeof queryObjectOp !== 'object' || queryObjectOp instanceof Date)) {
operator = '$eq';
value = queryObjectOp;
} else if (typeof queryObjectOp === 'object') {
for (key in queryObjectOp) {
if (hasOwnProperty.call(queryObjectOp, key)) {
operator = key;
value = queryObjectOp[key];
break;
}
}
} else {
throw new Error('Do not know what you want to do.');
}
if (operator === '$regex' || typeof value === 'object') {
value = precompileQuery(operator, value);
}
// if user is deep querying the object such as find('name.first': 'odin')
var usingDotNotation = (property.indexOf('.') !== -1);
// if an index exists for the property being queried against, use it
// for now only enabling where it is the first filter applied and prop is indexed
var doIndexCheck = !this.filterInitialized;
if (doIndexCheck && this.collection.binaryIndices[property] && indexedOps[operator]) {
// this is where our lazy index rebuilding will take place
// basically we will leave all indexes dirty until we need them
// so here we will rebuild only the index tied to this property
// ensureIndex() will only rebuild if flagged as dirty since we are not passing force=true param
if (this.collection.adaptiveBinaryIndices !== true) {
this.collection.ensureIndex(property);
}
searchByIndex = true;
index = this.collection.binaryIndices[property];
}
// opportunistically speed up $in searches from O(n*m) to O(n*log m)
if (!searchByIndex && operator === '$in' && Array.isArray(value) && typeof Set !== 'undefined') {
value = new Set(value);
operator = '$inSet';
}
// the comparison function
var fun = ControlOps[operator];
// "shortcut" for collection data
var t = this.collection.data;
// filter data length
var i = 0,
len = 0;
// Query executed differently depending on :
// - whether the property being queried has an index defined
// - if chained, we handle first pass differently for initial filteredrows[] population
//
// For performance reasons, each case has its own if block to minimize in-loop calculations
var filter, rowIdx = 0, record;
// If the filteredrows[] is already initialized, use it
if (this.filterInitialized) {
filter = this.filteredrows;
len = filter.length;
// currently supporting dot notation for non-indexed conditions only
if (usingDotNotation) {
property = property.split('.');
for (i = 0; i < len; i++) {
rowIdx = filter[i];
record = t[rowIdx];
if (dotSubScan(record, property, fun, value, record)) {
result.push(rowIdx);
if (firstOnly) {
this.filteredrows = result;
return this;
}
}
}
} else {
for (i = 0; i < len; i++) {
rowIdx = filter[i];
record = t[rowIdx];
if (fun(record[property], value, record)) {
result.push(rowIdx);
if (firstOnly) {
this.filteredrows = result;
return this;
}
}
}
}
}
// first chained query so work against data[] but put results in filteredrows
else {
// if not searching by index
if (!searchByIndex) {
len = t.length;
if (usingDotNotation) {
property = property.split('.');
for (i = 0; i < len; i++) {
record = t[i];
if (dotSubScan(record, property, fun, value, record)) {
result.push(i);
if (firstOnly) {
this.filteredrows = result;
this.filterInitialized = true;
return this;
}
}
}
} else {
for (i = 0; i < len; i++) {
record = t[i];
if (fun(record[property], value, record)) {
result.push(i);
if (firstOnly) {
this.filteredrows = result;
this.filterInitialized = true;
return this;
}
}
}
}
} else {
// search by index
var segm = this.collection.calculateRange(operator, property, value);
if (operator !== '$in') {
for (i = segm[0]; i <= segm[1]; i++) {
if (indexedOps[operator] !== true) {
// must be a function, implying 2nd phase filtering of results from calculateRange
if (indexedOps[operator](Utils.getIn(t[index.values[i]], property, usingDotNotation), value)) {
result.push(index.values[i]);
if (firstOnly) {
this.filteredrows = result;
this.filterInitialized = true;
return this;
}
}
}
else {
result.push(index.values[i]);
if (firstOnly) {
this.filteredrows = result;
this.filterInitialized = true;
return this;
}
}
}
} else {
for (i = 0, len = segm.length; i < len; i++) {
result.push(index.values[segm[i]]);
if (firstOnly) {
this.filteredrows = result;
this.filterInitialized = true;
return this;
}
}
}
}
}
this.filteredrows = result;
this.filterInitialized = true; // next time work against filteredrows[]
return this;
};
/**
* where() - Used for filtering via a javascript filter function.
*
* @param {function} fun - A javascript function used for filtering current results by.
* @returns {Resultset} this resultset for further chain ops.
* @memberof Resultset
* @example
* var over30 = users.chain().where(function(obj) { return obj.age >= 30; }.data();
*/
Resultset.prototype.where = function (fun) {
var viewFunction,
result = [];
if ('function' === typeof fun) {
viewFunction = fun;
} else {
throw new TypeError('Argument is not a stored view or a function');
}
try {
// If the filteredrows[] is already initialized, use it
if (this.filterInitialized) {
var j = this.filteredrows.length;
while (j--) {
if (viewFunction(this.collection.data[this.filteredrows[j]]) === true) {
result.push(this.filteredrows[j]);
}
}
this.filteredrows = result;
return this;
}
// otherwise this is initial chained op, work against data, push into filteredrows[]
else {
var k = this.collection.data.length;
while (k--) {
if (viewFunction(this.collection.data[k]) === true) {
result.push(k);
}
}
this.filteredrows = result;
this.filterInitialized = true;
return this;
}
} catch (err) {
throw err;
}
};
/**
* count() - returns the number of documents in the resultset.
*
* @returns {number} The number of documents in the resultset.
* @memberof Resultset
* @example
* var over30Count = users.chain().find({ age: { $gte: 30 } }).count();
*/
Resultset.prototype.count = function () {
if (this.filterInitialized) {
return this.filteredrows.length;
}
return this.collection.count();
};
/**
* Terminates the chain and returns array of filtered documents
*
* @param {object=} options - allows specifying 'forceClones' and 'forceCloneMethod' options.
* @param {boolean} options.forceClones - Allows forcing the return of cloned objects even when
* the collection is not configured for clone object.
* @param {string} options.forceCloneMethod - Allows overriding the default or collection specified cloning method.
* Possible values include 'parse-stringify', 'jquery-extend-deep', 'shallow', 'shallow-assign'
* @param {bool} options.removeMeta - Will force clones and strip $ctrl and meta properties from documents
*
* @returns {array} Array of documents in the resultset
* @memberof Resultset
* @example
* var resutls = users.chain().find({ age: 34 }).data();
*/
Resultset.prototype.docs = function (options) {
var result = [],
data = this.collection.data,
obj,
len,
i,
method;
options = options || {};
// if user opts to strip meta, then force clones and use 'shallow' if 'force' options are not present
if (options.removeMeta && !options.forceClones) {
options.forceClones = true;
options.forceCloneMethod = options.forceCloneMethod || 'shallow';
}
// if collection has delta changes active, then force clones and use 'parse-stringify' for effective change tracking of nested objects
// if collection is immutable freeze and unFreeze takes care of cloning
if (!this.collection.disableDeltaChangesApi && this.collection.disableFreeze) {
options.forceClones = true;
options.forceCloneMethod = 'parse-stringify';
}
// if this has no filters applied, just return collection.data
if (!this.filterInitialized) {
if (this.filteredrows.length === 0) {
// determine whether we need to clone objects or not
if (this.collection.cloneObjects || options.forceClones) {
len = data.length;
method = options.forceCloneMethod || this.collection.cloneMethod;
for (i = 0; i < len; i++) {
obj = clone(data[i], method);
if (options.removeMeta) {
delete obj.$ctrl;
delete obj.meta;
}
result.push(obj);
}
return result;
}
// otherwise we are not cloning so return sliced array with same object references
else {
return data.slice();
}
} else {
// filteredrows must have been set manually, so use it
this.filterInitialized = true;
}
}
var fr = this.filteredrows;
len = fr.length;
if (this.collection.cloneObjects || options.forceClones) {
method = options.forceCloneMethod || this.collection.cloneMethod;
for (i = 0; i < len; i++) {
obj = clone(data[fr[i]], method);
if (options.removeMeta) {
delete obj.$ctrl;
delete obj.meta;
}
result.push(obj);
}
} else {
for (i = 0; i < len; i++) {
result.push(data[fr[i]]);
}
}
return result;
};
/**
* Used to run an update operation on all documents currently in the resultset.
*
* @param {function} updateFunction - User supplied updateFunction(obj) will be executed for each document object.
* @returns {Resultset} this resultset for further chain ops.
* @memberof Resultset
* @example
* users.chain().find({ country: 'de' }).update(function(user) {
* user.phoneFormat = "+49 AAAA BBBBBB";
* });
*/
Resultset.prototype.update = function (updateFunction) {
if (typeof (updateFunction) !== "function") {
throw new TypeError('Argument is not a function');
}
// if this has no filters applied, we need to populate filteredrows first
if (!this.filterInitialized && this.filteredrows.length === 0) {
this.filteredrows = this.collection.prepareFullDocIndex();
}
var obj, len = this.filteredrows.length,
rcd = this.collection.data;
// pass in each document object currently in resultset to user supplied updateFunction
for (var idx = 0; idx < len; idx++) {
// if we have cloning option specified or are doing differential delta changes, clone object first
if (!this.disableFreeze || this.collection.cloneObjects || !this.collection.disableDeltaChangesApi) {
obj = clone(rcd[this.filteredrows[idx]], this.collection.cloneMethod);
updateFunction(obj);
this.collection.update(obj);
}
else {
// no need to clone, so just perform update on collection data object instance
updateFunction(rcd[this.filteredrows[idx]]);
this.collection.update(rcd[this.filteredrows[idx]]);
}
}
return this;
};
/**
* Removes all document objects which are currently in resultset from collection (as well as resultset)
*
* @returns {Resultset} this (empty) resultset for further chain ops.
* @memberof Resultset
* @example
* // remove users inactive since 1/1/2001
* users.chain().find({ lastActive: { $lte: new Date("1/1/2001").getTime() } }).remove();
*/
Resultset.prototype.remove = function () {
// if this has no filters applied, we need to populate filteredrows first
if (!this.filterInitialized && this.filteredrows.length === 0) {
this.filteredrows = this.collection.prepareFullDocIndex();
}
this.collection.removeBatchByPositions(this.filteredrows);
this.filteredrows = [];
return this;
};
/**
* data transformation via user supplied functions
*
* @param {function} mapFunction - this function accepts a single document for you to transform and return
* @param {function} reduceFunction - this function accepts many (array of map outputs) and returns single value
* @returns {value} The output of your reduceFunction
* @memberof Resultset
* @example
* var db = new controldb("order.db");
* var orders = db.addCollection("orders");
* orders.insert([{ qty: 4, unitCost: 100.00 }, { qty: 10, unitCost: 999.99 }, { qty: 2, unitCost: 49.99 }]);
*
* function mapfun (obj) { return obj.qty*obj.unitCost };
* function reducefun(array) {
* var grandTotal=0;
* array.forEach(function(orderTotal) { grandTotal += orderTotal; });
* return grandTotal;
* }
* var grandOrderTotal = orders.chain().mapReduce(mapfun, reducefun);
* console.log(grandOrderTotal);
*/
Resultset.prototype.mapReduce = function (mapFunction, reduceFunction) {
try {
return reduceFunction(this.docs().map(mapFunction));
} catch (err) {
throw err;
}
};
/**
* eqJoin() - Left joining two sets of data. Join keys can be defined or calculated properties
* eqJoin expects the right join key values to be unique. Otherwise left data will be joined on the last joinData object with that key
* @param {Array|Resultset|Collection} joinData - Data array to join to.
* @param {(string|function)} leftJoinKey - Property name in this result set to join on or a function to produce a value to join on
* @param {(string|function)} rightJoinKey - Property name in the joinData to join on or a function to produce a value to join on
* @param {function=} mapFun - (Optional) A function that receives each matching pair and maps them into output objects - function(left,right){return joinedObject}
* @param {object=} dataOptions - options to data() before input to your map function
* @param {bool} dataOptions.removeMeta - allows removing meta before calling mapFun
* @param {boolean} dataOptions.forceClones - forcing the return of cloned objects to your map object
* @param {string} dataOptions.forceCloneMethod - Allows overriding the default or collection specified cloning method.
* @returns {Resultset} A resultset with data in the format [{left: leftObj, right: rightObj}]
* @memberof Resultset
* @example
* var db = new controldb('sandbox.db');
*
* var products = db.addCollection('products');
* var orders = db.addCollection('orders');
*
* products.insert({ productId: "100234", name: "flywheel energy storage", unitCost: 19999.99 });
* products.insert({ productId: "140491", name: "300F super capacitor", unitCost: 129.99 });
* products.insert({ productId: "271941", name: "fuel cell", unitCost: 3999.99 });
* products.insert({ productId: "174592", name: "390V 3AH lithium bank", unitCost: 4999.99 });
*
* orders.insert({ orderDate : new Date("12/1/2017").getTime(), prodId: "174592", qty: 2, customerId: 2 });
* orders.insert({ orderDate : new Date("4/15/2016").getTime(), prodId: "271941", qty: 1, customerId: 1 });
* orders.insert({ orderDate : new Date("3/12/2017").getTime(), prodId: "140491", qty: 4, customerId: 4 });
* orders.insert({ orderDate : new Date("7/31/2017").getTime(), prodId: "100234", qty: 7, customerId: 3 });
* orders.insert({ orderDate : new Date("8/3/2016").getTime(), prodId: "174592", qty: 3, customerId: 5 });
*
* var mapfun = function(left, right) {
* return {
* orderId: left.$ctrl,
* orderDate: new Date(left.orderDate) + '',
* customerId: left.customerId,
* qty: left.qty,
* productId: left.prodId,
* prodName: right.name,
* prodCost: right.unitCost,
* orderTotal: +((right.unitCost * left.qty).toFixed(2))
* };
* };
*
* // join orders with relevant product info via eqJoin
* var orderSummary = orders.chain().eqJoin(products, "prodId", "productId", mapfun).data();
*
* console.log(orderSummary);
*/
Resultset.prototype.eqJoin = function (joinData, leftJoinKey, rightJoinKey, mapFun, dataOptions) {
var leftData = [],
leftDataLength,
rightData = [],
rightDataLength,
key,
result = [],
leftKeyisFunction = typeof leftJoinKey === 'function',
rightKeyisFunction = typeof rightJoinKey === 'function',
joinMap = {};
//get the left data
leftData = this.docs(dataOptions);
leftDataLength = leftData.length;
//get the right data
if (joinData instanceof Collection) {
rightData = joinData.chain().data(dataOptions);
} else if (joinData instanceof Resultset) {
rightData = joinData.docs(dataOptions);
} else if (Array.isArray(joinData)) {
rightData = joinData;
} else {
throw new TypeError('joinData needs to be an array or result set');
}
rightDataLength = rightData.length;
//construct a lookup table
for (var i = 0; i < rightDataLength; i++) {
key = rightKeyisFunction ? rightJoinKey(rightData[i]) : rightData[i][rightJoinKey];
joinMap[key] = rightData[i];
}
if (!mapFun) {
mapFun = function (left, right) {
return {
left: left,
right: right
};
};
}
//Run map function over each object in the resultset
for (var j = 0; j < leftDataLength; j++) {
key = leftKeyisFunction ? leftJoinKey(leftData[j]) : leftData[j][leftJoinKey];
result.push(mapFun(leftData[j], joinMap[key] || {}));
}
//return return a new resultset with no filters
this.collection = new Collection('joinData');
this.collection.insert(result);
this.filteredrows = [];
this.filterInitialized = false;
return this;
};
/**
* Applies a map function into a new collection for further chaining.
* @param {function} mapFun - javascript map function
* @param {object=} dataOptions - options to data() before input to your map function
* @param {bool} dataOptions.removeMeta - allows removing meta before calling mapFun
* @param {boolean} dataOptions.forceClones - forcing the return of cloned objects to your map object
* @param {string} dataOptions.forceCloneMethod - Allows overriding the default or collection specified cloning method.
* @memberof Resultset
* @example
* var orders.chain().find({ productId: 32 }).map(function(obj) {
* return {
* orderId: $ctrl,
* productId: productId,
* quantity: qty
* };
* });
*/
Resultset.prototype.map = function (mapFun, dataOptions) {
var data = this.docs(dataOptions).map(mapFun);
//return return a new resultset with no filters
this.collection = new Collection('mappedData');
this.collection.insert(data);
this.filteredrows = [];
this.filterInitialized = false;
return this;
};
/**
* DynamicView class is a versatile 'live' view class which can have filters and sorts applied.
* Collection.addDynamicView(name) instantiates this DynamicView object and notifies it
* whenever documents are add/updated/removed so it can remain up-to-date. (chainable)
*
* @example
* var mydv = mycollection.addDynamicView('test'); // default is non-persistent
* mydv.applyFind({ 'doors' : 4 });
* mydv.applyWhere(function(obj) { return obj.name === 'Toyota'; });
* var results = mydv.data();
*
* @constructor DynamicView
* @implements ControlDBEventEmitter
* @param {Collection} collection - A reference to the collection to work against
* @param {string} name - The name of this dynamic view
* @param {object=} options - (Optional) Pass in object with 'persistent' and/or 'sortPriority' options.
* @param {boolean} [options.persistent=false] - indicates if view is to main internal results array in 'resultdata'
* @param {string} [options.sortPriority='passive'] - 'passive' (sorts performed on call to data) or 'active' (after updates)
* @param {number} options.minRebuildInterval - minimum rebuild interval (need clarification to docs here)
* @see {@link Collection#addDynamicView} to construct instances of DynamicView
*/
function DynamicView(collection, name, options) {
this.collection = collection;
this.name = name;
this.rebuildPending = false;
this.options = options || {};
if (!this.options.hasOwnProperty('persistent')) {
this.options.persistent = false;
}
// 'persistentSortPriority':
// 'passive' will defer the sort phase until they call data(). (most efficient overall)
// 'active' will sort async whenever next idle. (prioritizes read speeds)
if (!this.options.hasOwnProperty('sortPriority')) {
this.options.sortPriority = 'passive';
}
if (!this.options.hasOwnProperty('minRebuildInterval')) {
this.options.minRebuildInterval = 1;
}
this.resultset = new Resultset(collection);
this.resultdata = [];
this.resultsdirty = false;
this.cachedresultset = null;
// keep ordered filter pipeline
this.filterPipeline = [];
if (!this.collection.disableFreeze) {
Object.freeze(this.filterPipeline);
}
// sorting member variables
// we only support one active search, applied using applySort() or applySimpleSort()
this.sortFunction = null;
this.sortCriteria = null;
this.sortCriteriaSimple = null;
this.sortDirty = false;
// for now just have 1 event for when we finally rebuilt lazy view
// once we refactor transactions, i will tie in certain transactional events
this.events = {
'rebuild': [],
'filter': [],
'sort': []
};
}
DynamicView.prototype = new ControlDBEventEmitter();
DynamicView.prototype.constructor = DynamicView;
/**
* getSort() - used to get the current sort
*
* @returns function (sortFunction) or array (sortCriteria) or object (sortCriteriaSimple)
*/
DynamicView.prototype.getSort = function () {
return this.sortFunction || this.sortCriteria || this.sortCriteriaSimple;
};
/**
* rematerialize() - internally used immediately after deserialization (loading)
* This will clear out and reapply filterPipeline ops, recreating the view.
* Since where filters do not persist correctly, this method allows
* restoring the view to state where user can re-apply those where filters.
*
* @param {Object=} options - (Optional) allows specification of 'removeWhereFilters' option
* @returns {DynamicView} This dynamic view for further chained ops.
* @memberof DynamicView
* @fires DynamicView.rebuild
*/
DynamicView.prototype.rematerialize = function (options) {
var fpl,
fpi,
idx;
options = options || {};
this.resultdata = [];
this.resultsdirty = true;
this.resultset = new Resultset(this.collection);
if (this.sortFunction || this.sortCriteria || this.sortCriteriaSimple) {
this.sortDirty = true;
}
var wasFrozen = Object.isFrozen(this.filterPipeline);
if (options.hasOwnProperty('removeWhereFilters')) {
// for each view see if it had any where filters applied... since they don't
// serialize those functions lets remove those invalid filters
if (wasFrozen) {
this.filterPipeline = this.filterPipeline.slice();
}
fpl = this.filterPipeline.length;
fpi = fpl;
while (fpi--) {
if (this.filterPipeline[fpi].type === 'where') {
if (fpi !== this.filterPipeline.length - 1) {
this.filterPipeline[fpi] = this.filterPipeline[this.filterPipeline.length - 1];
}
this.filterPipeline.length--;
}
}
}
// back up old filter pipeline, clear filter pipeline, and reapply pipeline ops
var ofp = this.filterPipeline;
this.filterPipeline = [];
// now re-apply 'find' filterPipeline ops
fpl = ofp.length;
for (idx = 0; idx < fpl; idx++) {
this.applyFind(ofp[idx].val, ofp[idx].uid);
}
if (wasFrozen) {
Object.freeze(this.filterPipeline);
}
// during creation of unit tests, i will remove this forced refresh and leave lazy
this.docs();
// emit rebuild event in case user wants to be notified
this.emit('rebuild', this);
return this;
};
/**
* branchResultset() - Makes a copy of the internal resultset for branched queries.
* Unlike this dynamic view, the branched resultset will not be 'live' updated,
* so your branched query should be immediately resolved and not held for future evaluation.
*
* @param {(string|array=)} transform - Optional name of collection transform, or an array of transform steps
* @param {object=} parameters - optional parameters (if optional transform requires them)
* @returns {Resultset} A copy of the internal resultset for branched queries.
* @memberof DynamicView
* @example
* var db = new controldb('test');
* var coll = db.addCollection('mydocs');
* var dv = coll.addDynamicView('myview');
* var tx = [
* {
* type: 'offset',
* value: '[%lktxp]pageStart'
* },
* {
* type: 'limit',
* value: '[%lktxp]pageSize'
* }
* ];
* coll.addTransform('viewPaging', tx);
*
* // add some records
*
* var results = dv.branchResultset('viewPaging', { pageStart: 10, pageSize: 10 }).data();
*/
DynamicView.prototype.branchResultset = function (transform, parameters) {
var rs = this.resultset.branch();
if (typeof transform === 'undefined') {
return rs;
}
return rs.transform(transform, parameters);
};
/**
* toJSON() - Override of toJSON to avoid circular references
*
*/
DynamicView.prototype.toJSON = function () {
var copy = new DynamicView(this.collection, this.name, this.options);
copy.resultset = this.resultset;
copy.resultdata = []; // let's not save data (copy) to minimize size
copy.resultsdirty = true;
copy.filterPipeline = this.filterPipeline;
copy.sortFunction = this.sortFunction;
copy.sortCriteria = this.sortCriteria;
copy.sortCriteriaSimple = this.sortCriteriaSimple || null;
copy.sortDirty = this.sortDirty;
// avoid circular reference, reapply in db.loadJSON()
copy.collection = null;
return copy;
};
/**
* removeFilters() - Used to clear pipeline and reset dynamic view to initial state.
* Existing options should be retained.
* @param {object=} options - configure removeFilter behavior
* @param {boolean=} options.queueSortPhase - (default: false) if true we will async rebuild view (maybe set default to true in future?)
* @memberof DynamicView
*/
DynamicView.prototype.removeFilters = function (options) {
options = options || {};
this.rebuildPending = false;
this.resultset.reset();
this.resultdata = [];
this.resultsdirty = true;
this.cachedresultset = null;
var wasFrozen = Object.isFrozen(this.filterPipeline);
var filterChanged = this.filterPipeline.length > 0;
// keep ordered filter pipeline
this.filterPipeline = [];
if (wasFrozen) {
Object.freeze(this.filterPipeline);
}
// sorting member variables
// we only support one active search, applied using applySort() or applySimpleSort()
this.sortFunction = null;
this.sortCriteria = null;
this.sortCriteriaSimple = null;
this.sortDirty = false;
if (options.queueSortPhase === true) {
this.queueSortPhase();
}
if (filterChanged) {
this.emit('filter');
}
};
/**
* applySort() - Used to apply a sort to the dynamic view
* @example
* dv.applySort(function(obj1, obj2) {
* if (obj1.name === obj2.name) return 0;
* if (obj1.name > obj2.name) return 1;
* if (obj1.name < obj2.name) return -1;
* });
*
* @param {function} comparefun - a javascript compare function used for sorting
* @returns {DynamicView} this DynamicView object, for further chain ops.
* @memberof DynamicView
*/
DynamicView.prototype.applySort = function (comparefun) {
this.sortFunction = comparefun;
this.sortCriteria = null;
this.sortCriteriaSimple = null;
this.queueSortPhase();
this.emit('sort');
return this;
};
/**
* applySimpleSort() - Used to specify a property used for view translation.
* @example
* dv.applySimpleSort("name");
*
* @param {string} propname - Name of property by which to sort.
* @param {object|boolean=} options - boolean for sort descending or options object
* @param {boolean} [options.desc=false] - whether we should sort descending.
* @param {boolean} [options.disableIndexIntersect=false] - whether we should explicity not use array intersection.
* @param {boolean} [options.forceIndexIntersect=false] - force array intersection (if binary index exists).
* @param {boolean} [options.useJavascriptSorting=false] - whether results are sorted via basic javascript sort.
* @returns {DynamicView} this DynamicView object, for further chain ops.
* @memberof DynamicView
*/
DynamicView.prototype.applySimpleSort = function (propname, options) {
this.sortCriteriaSimple = { propname: propname, options: options || false };
if (!this.collection.disableFreeze) {
deepFreeze(this.sortCriteriaSimple);
}
this.sortCriteria = null;
this.sortFunction = null;
this.queueSortPhase();
this.emit('sort');
return this;
};
/**
* applySortCriteria() - Allows sorting a resultset based on multiple columns.
* @example
* // to sort by age and then name (both ascending)
* dv.applySortCriteria(['age', 'name']);
* // to sort by age (ascending) and then by name (descending)
* dv.applySortCriteria(['age', ['name', true]);
* // to sort by age (descending) and then by name (descending)
* dv.applySortCriteria(['age', true], ['name', true]);
*
* @param {array} properties - array of property names or subarray of [propertyname, isdesc] used evaluate sort order
* @returns {DynamicView} Reference to this DynamicView, sorted, for future chain operations.
* @memberof DynamicView
*/
DynamicView.prototype.applySortCriteria = function (criteria) {
this.sortCriteria = criteria;
if (!this.collection.disableFreeze) {
deepFreeze(this.sortCriteria);
}
this.sortCriteriaSimple = null;
this.sortFunction = null;
this.queueSortPhase();
this.emit('sort');
return this;
};
/**
* startTransaction() - marks the beginning of a transaction.
*
* @returns {DynamicView} this DynamicView object, for further chain ops.
*/
DynamicView.prototype.startTransaction = function () {
this.cachedresultset = this.resultset.copy();
return this;
};
/**
* commit() - commits a transaction.
*
* @returns {DynamicView} this DynamicView object, for further chain ops.
*/
DynamicView.prototype.commit = function () {
this.cachedresultset = null;
return this;
};
/**
* rollback() - rolls back a transaction.
*
* @returns {DynamicView} this DynamicView object, for further chain ops.
*/
DynamicView.prototype.rollback = function () {
this.resultset = this.cachedresultset;
if (this.options.persistent) {
// for now just rebuild the persistent dynamic view data in this worst case scenario
// (a persistent view utilizing transactions which get rolled back), we already know the filter so not too bad.
this.resultdata = this.resultset.docs();
this.emit('rebuild', this);
}
return this;
};
/**
* Implementation detail.
* _indexOfFilterWithId() - Find the index of a filter in the pipeline, by that filter's ID.
*
* @param {(string|number)} uid - The unique ID of the filter.
* @returns {number}: index of the referenced filter in the pipeline; -1 if not found.
*/
DynamicView.prototype._indexOfFilterWithId = function (uid) {
if (typeof uid === 'string' || typeof uid === 'number') {
for (var idx = 0, len = this.filterPipeline.length; idx < len; idx += 1) {
if (uid === this.filterPipeline[idx].uid) {
return idx;
}
}
}
return -1;
};
/**
* Implementation detail.
* _addFilter() - Add the filter object to the end of view's filter pipeline and apply the filter to the resultset.
*
* @param {object} filter - The filter object. Refer to applyFilter() for extra details.
*/
DynamicView.prototype._addFilter = function (filter) {
var wasFrozen = Object.isFrozen(this.filterPipeline);
if (wasFrozen) {
this.filterPipeline = this.filterPipeline.slice();
}
if (!this.collection.disableFreeze) {
deepFreeze(filter);
}
this.filterPipeline.push(filter);
if (wasFrozen) {
Object.freeze(this.filterPipeline);
}
this.resultset[filter.type](filter.val);
};
/**
* reapplyFilters() - Reapply all the filters in the current pipeline.
*
* @returns {DynamicView} this DynamicView object, for further chain ops.
*/
DynamicView.prototype.reapplyFilters = function () {
this.resultset.reset();
this.cachedresultset = null;
if (this.options.persistent) {
this.resultdata = [];
this.resultsdirty = true;
}
var filters = this.filterPipeline;
var wasFrozen = Object.isFrozen(filters);
this.filterPipeline = [];
for (var idx = 0, len = filters.length; idx < len; idx += 1) {
this._addFilter(filters[idx]);
}
if (wasFrozen) {
Object.freeze(this.filterPipeline);
}
if (this.sortFunction || this.sortCriteria || this.sortCriteriaSimple) {
this.queueSortPhase();
} else {
this.queueRebuildEvent();
}
this.emit('filter');
return this;
};
/**
* applyFilter() - Adds or updates a filter in the DynamicView filter pipeline
*
* @param {object} filter - A filter object to add to the pipeline.
* The object is in the format { 'type': filter_type, 'val', filter_param, 'uid', optional_filter_id }
* @returns {DynamicView} this DynamicView object, for further chain ops.
* @memberof DynamicView
*/
DynamicView.prototype.applyFilter = function (filter) {
var idx = this._indexOfFilterWithId(filter.uid);
if (idx >= 0) {
var wasFrozen = Object.isFrozen(this.filterPipeline);
if (wasFrozen) {
this.filterPipeline = this.filterPipeline.slice();
}
this.filterPipeline[idx] = filter;
if (wasFrozen) {
freeze(filter);
Object.freeze(this.filterPipeline);
}
return this.reapplyFilters();
}
this.cachedresultset = null;
if (this.options.persistent) {
this.resultdata = [];
this.resultsdirty = true;
}
this._addFilter(filter);
if (this.sortFunction || this.sortCriteria || this.sortCriteriaSimple) {
this.queueSortPhase();
} else {
this.queueRebuildEvent();
}
this.emit('filter');
return this;
};
/**
* applyFind() - Adds or updates a mongo-style query option in the DynamicView filter pipeline
*
* @param {object} query - A mongo-style query object to apply to pipeline
* @param {(string|number)=} uid - Optional: The unique ID of this filter, to reference it in the future.
* @returns {DynamicView} this DynamicView object, for further chain ops.
* @memberof DynamicView
*/
DynamicView.prototype.applyFind = function (query, uid) {
this.applyFilter({
type: 'find',
val: query,
uid: uid
});
return this;
};
/**
* applyWhere() - Adds or updates a javascript filter function in the DynamicView filter pipeline
*
* @param {function} fun - A javascript filter function to apply to pipeline
* @param {(string|number)=} uid - Optional: The unique ID of this filter, to reference it in the future.
* @returns {DynamicView} this DynamicView object, for further chain ops.
* @memberof DynamicView
*/
DynamicView.prototype.applyWhere = function (fun, uid) {
this.applyFilter({
type: 'where',
val: fun,
uid: uid
});
return this;
};
/**
* removeFilter() - Remove the specified filter from the DynamicView filter pipeline
*
* @param {(string|number)} uid - The unique ID of the filter to be removed.
* @returns {DynamicView} this DynamicView object, for further chain ops.
* @memberof DynamicView
*/
DynamicView.prototype.removeFilter = function (uid) {
var idx = this._indexOfFilterWithId(uid);
if (idx < 0) {
throw new Error("Dynamic view does not contain a filter with ID: " + uid);
}
var wasFrozen = Object.isFrozen(this.filterPipeline);
if (wasFrozen) {
this.filterPipeline = this.filterPipeline.slice();
}
this.filterPipeline.splice(idx, 1);
if (wasFrozen) {
Object.freeze(this.filterPipeline);
}
this.reapplyFilters();
return this;
};
/**
* count() - returns the number of documents representing the current DynamicView contents.
*
* @returns {number} The number of documents representing the current DynamicView contents.
* @memberof DynamicView
*/
DynamicView.prototype.count = function () {
// in order to be accurate we will pay the minimum cost (and not alter dv state management)
// recurring resultset data resolutions should know internally its already up to date.
// for persistent data this will not update resultdata nor fire rebuild event.
if (this.resultsdirty) {
this.resultdata = this.resultset.docs();
}
return this.resultset.count();
};
/**
* data() - resolves and pending filtering and sorting, then returns document array as result.
*
* @param {object=} options - optional parameters to pass to resultset.data() if non-persistent
* @param {boolean} options.forceClones - Allows forcing the return of cloned objects even when
* the collection is not configured for clone object.
* @param {string} options.forceCloneMethod - Allows overriding the default or collection specified cloning method.
* Possible values include 'parse-stringify', 'jquery-extend-deep', 'shallow', 'shallow-assign'
* @param {bool} options.removeMeta - Will force clones and strip $ctrl and meta properties from documents
* @returns {array} An array of documents representing the current DynamicView contents.
* @memberof DynamicView
*/
DynamicView.prototype.docs = function (options) {
// using final sort phase as 'catch all' for a few use cases which require full rebuild
if (this.sortDirty || this.resultsdirty) {
this.performSortPhase({
suppressRebuildEvent: true
});
}
return (this.options.persistent) ? (this.resultdata) : (this.resultset.docs(options));
};
/**
* queueRebuildEvent() - When the view is not sorted we may still wish to be notified of rebuild events.
* This event will throttle and queue a single rebuild event when batches of updates affect the view.
*/
DynamicView.prototype.queueRebuildEvent = function () {
if (this.rebuildPending) {
return;
}
this.rebuildPending = true;
var self = this;
setTimeout(function () {
if (self.rebuildPending) {
self.rebuildPending = false;
self.emit('rebuild', self);
}
}, this.options.minRebuildInterval);
};
/**
* queueSortPhase : If the view is sorted we will throttle sorting to either :
* (1) passive - when the user calls data(), or
* (2) active - once they stop updating and yield js thread control
*/
DynamicView.prototype.queueSortPhase = function () {
// already queued? exit without queuing again
if (this.sortDirty) {
return;
}
this.sortDirty = true;
var self = this;
if (this.options.sortPriority === "active") {
// active sorting... once they are done and yield js thread, run async performSortPhase()
setTimeout(function () {
self.performSortPhase();
}, this.options.minRebuildInterval);
} else {
// must be passive sorting... since not calling performSortPhase (until data call), lets use queueRebuildEvent to
// potentially notify user that data has changed.
this.queueRebuildEvent();
}
};
/**
* performSortPhase() - invoked synchronously or asynchronously to perform final sort phase (if needed)
*
*/
DynamicView.prototype.performSortPhase = function (options) {
// async call to this may have been pre-empted by synchronous call to data before async could fire
if (!this.sortDirty && !this.resultsdirty) {
return;
}
options = options || {};
if (this.sortDirty) {
if (this.sortFunction) {
this.resultset.sort(this.sortFunction);
} else if (this.sortCriteria) {
this.resultset.compoundsort(this.sortCriteria);
} else if (this.sortCriteriaSimple) {
this.resultset.simplesort(this.sortCriteriaSimple.propname, this.sortCriteriaSimple.options);
}
this.sortDirty = false;
}
if (this.options.persistent) {
// persistent view, rebuild local resultdata array
this.resultdata = this.resultset.docs();
this.resultsdirty = false;
}
if (!options.suppressRebuildEvent) {
this.emit('rebuild', this);
}
};
/**
* evaluateDocument() - internal method for (re)evaluating document inclusion.
* Called by : collection.insert() and collection.update().
*
* @param {int} objIndex - index of document to (re)run through filter pipeline.
* @param {bool} isNew - true if the document was just added to the collection.
*/
DynamicView.prototype.evaluateDocument = function (objIndex, isNew) {
// if no filter applied yet, the result 'set' should remain 'everything'
if (!this.resultset.filterInitialized) {
if (this.options.persistent) {
this.resultdata = this.resultset.docs();
}
// need to re-sort to sort new document
if (this.sortFunction || this.sortCriteria || this.sortCriteriaSimple) {
this.queueSortPhase();
} else {
this.queueRebuildEvent();
}
return;
}
var ofr = this.resultset.filteredrows;
var oldPos = (isNew) ? (-1) : (ofr.indexOf(+objIndex));
var oldlen = ofr.length;
// creating a 1-element resultset to run filter chain ops on to see if that doc passes filters;
// mostly efficient algorithm, slight stack overhead price (this function is called on inserts and updates)
var evalResultset = new Resultset(this.collection);
evalResultset.filteredrows = [objIndex];
evalResultset.filterInitialized = true;
var filter;
for (var idx = 0, len = this.filterPipeline.length; idx < len; idx++) {
filter = this.filterPipeline[idx];
evalResultset[filter.type](filter.val);
}
// not a true position, but -1 if not pass our filter(s), 0 if passed filter(s)
var newPos = (evalResultset.filteredrows.length === 0) ? -1 : 0;
// wasn't in old, shouldn't be now... do nothing
if (oldPos === -1 && newPos === -1) return;
// wasn't in resultset, should be now... add
if (oldPos === -1 && newPos !== -1) {
ofr.push(objIndex);
if (this.options.persistent) {
this.resultdata.push(this.collection.data[objIndex]);
}
// need to re-sort to sort new document
if (this.sortFunction || this.sortCriteria || this.sortCriteriaSimple) {
this.queueSortPhase();
} else {
this.queueRebuildEvent();
}
return;
}
// was in resultset, shouldn't be now... delete
if (oldPos !== -1 && newPos === -1) {
if (oldPos < oldlen - 1) {
ofr.splice(oldPos, 1);
if (this.options.persistent) {
this.resultdata.splice(oldPos, 1);
}
} else {
ofr.length = oldlen - 1;
if (this.options.persistent) {
this.resultdata.length = oldlen - 1;
}
}
// in case changes to data altered a sort column
if (this.sortFunction || this.sortCriteria || this.sortCriteriaSimple) {
this.queueSortPhase();
} else {
this.queueRebuildEvent();
}
return;
}
// was in resultset, should still be now... (update persistent only?)
if (oldPos !== -1 && newPos !== -1) {
if (this.options.persistent) {
// in case document changed, replace persistent view data with the latest collection.data document
this.resultdata[oldPos] = this.collection.data[objIndex];
}
// in case changes to data altered a sort column
if (this.sortFunction || this.sortCriteria || this.sortCriteriaSimple) {
this.queueSortPhase();
} else {
this.queueRebuildEvent();
}
return;
}
};
/**
* removeDocument() - internal function called on collection.delete()
* @param {number|number[]} objIndex - index of document to (re)run through filter pipeline.
*/
DynamicView.prototype.removeDocument = function (objIndex) {
var idx, rmidx, rmlen, rxo = {}, fxo = {};
var adjels = [];
var drs = this.resultset;
var fr = this.resultset.filteredrows;
var frlen = fr.length;
// if no filter applied yet, the result 'set' should remain 'everything'
if (!this.resultset.filterInitialized) {
if (this.options.persistent) {
this.resultdata = this.resultset.docs();
}
// in case changes to data altered a sort column
if (this.sortFunction || this.sortCriteria || this.sortCriteriaSimple) {
this.queueSortPhase();
} else {
this.queueRebuildEvent();
}
return;
}
// if passed single index, wrap in array
if (!Array.isArray(objIndex)) {
objIndex = [objIndex];
}
rmlen = objIndex.length;
// create intersection object of data indices to remove
for (rmidx = 0; rmidx < rmlen; rmidx++) {
rxo[objIndex[rmidx]] = true;
}
// pivot remove data indices into remove filteredrows indices and dump in hashobject
for (idx = 0; idx < frlen; idx++) {
if (rxo[fr[idx]]) fxo[idx] = true;
}
// if any of the removed items were in our filteredrows...
if (Object.keys(fxo).length > 0) {
// remove them from filtered rows
this.resultset.filteredrows = this.resultset.filteredrows.filter(function (di, idx) { return !fxo[idx]; });
// if persistent...
if (this.options.persistent) {
// remove from resultdata
this.resultdata = this.resultdata.filter(function (obj, idx) { return !fxo[idx]; });
}
// and queue sorts
if (this.sortFunction || this.sortCriteria || this.sortCriteriaSimple) {
this.queueSortPhase();
} else {
this.queueRebuildEvent();
}
}
// to remove holes, we need to 'shift down' indices, this filter function finds number of positions to shift
var filt = function (idx) { return function (di) { return di < drs.filteredrows[idx]; }; };
frlen = drs.filteredrows.length;
for (idx = 0; idx < frlen; idx++) {
// grab subset of removed elements where data index is less than current filtered row data index;
// use this to determine how many positions iterated remaining data index needs to be 'shifted down'
adjels = objIndex.filter(filt(idx));
drs.filteredrows[idx] -= adjels.length;
}
};
/**
* mapReduce() - data transformation via user supplied functions
*
* @param {function} mapFunction - this function accepts a single document for you to transform and return
* @param {function} reduceFunction - this function accepts many (array of map outputs) and returns single value
* @returns The output of your reduceFunction
* @memberof DynamicView
*/
DynamicView.prototype.mapReduce = function (mapFunction, reduceFunction) {
try {
return reduceFunction(this.docs().map(mapFunction));
} catch (err) {
throw err;
}
};
/**
* Collection class that handles documents of same type
* @constructor Collection
* @implements ControlDBEventEmitter
* @param {string} name - collection name
* @param {(array|object)=} options - (optional) array of property names to be indicized OR a configuration object
* @param {object=} [options.schema] - schema definition object, validates documents added to collection if defined
* @param {array=} [options.unique=[]] - array of property names to define unique constraints for
* @param {array=} [options.exact=[]] - array of property names to define exact constraints for
* @param {array=} [options.indices=[]] - array property names to define binary indexes for
* @param {boolean} [options.adaptiveBinaryIndices=true] - collection indices will be actively rebuilt rather than lazily
* @param {boolean} [options.asyncListeners=false] - whether listeners are invoked asynchronously
* @param {boolean} [options.disableMeta=false] - set to true to disable meta property on documents
* @param {boolean} [options.disableChangesApi=true] - set to false to enable Changes API
* @param {boolean} [options.disableDeltaChangesApi=true] - set to false to enable Delta Changes API (requires Changes API, forces cloning)
* @param {boolean} [options.autoupdate=false] - use Object.observe to update objects automatically
* @param {boolean} [options.clone=false] - specify whether inserts and queries clone to/from user
* @param {boolean} [options.serializableIndices=true[]] - converts date values on binary indexed properties to epoch time
* @param {boolean} [options.disableFreeze=true] - when false all docs are frozen
* @param {string} [options.cloneMethod='parse-stringify'] - 'parse-stringify', 'jquery-extend-deep', 'shallow', 'shallow-assign'
* @param {int=} options.ttl - age of document (in ms.) before document is considered aged/stale.
* @param {int=} options.ttlInterval - time interval for clearing out 'aged' documents; not set by default.
* @see {@link ControlDB#addCollection} for normal creation of collections
*/
function Collection(name, options) {
// the name of the collection
this.name = name;
// the data held by the collection
this.data = [];
this.idIndex = null; // position->$ctrl index (built lazily)
this.binaryIndices = {}; // user defined indexes
this.constraints = {
unique: {},
exact: {}
};
// unique contraints contain duplicate object references, so they are not persisted.
// we will keep track of properties which have unique contraint applied here, and regenerate lazily
this.uniqueNames = [];
// transforms will be used to store frequently used query chains as a series of steps
// which itself can be stored along with the database.
this.transforms = {};
// the object type of the collection
this.objType = name;
// in autosave scenarios we will use collection level dirty flags to determine whether save is needed.
// currently, if any collection is dirty we will autosave the whole database if autosave is configured.
// defaulting to true since this is called from addCollection and adding a collection should trigger save
this.dirty = true;
// private holders for cached data
this.cachedIndex = null;
this.cachedBinaryIndex = null;
this.cachedData = null;
var self = this;
/* OPTIONS */
options = options || {};
// exact match and unique constraints
if (options.hasOwnProperty('unique')) {
if (!Array.isArray(options.unique)) {
options.unique = [options.unique];
}
// save names; actual index is built lazily
options.unique.forEach(function (prop) {
self.uniqueNames.push(prop);
});
}
if (options.hasOwnProperty('exact')) {
options.exact.forEach(function (prop) {
self.constraints.exact[prop] = new ExactIndex(prop);
});
}
this.schema = options.hasOwnProperty('schema') ? options.schema : null;
// if set to true we will optimally keep indices 'fresh' during insert/update/remove ops (never dirty/never needs rebuild)
// if you frequently intersperse insert/update/remove ops between find ops this will likely be significantly faster option.
this.adaptiveBinaryIndices = options.hasOwnProperty('adaptiveBinaryIndices') ? options.adaptiveBinaryIndices : true;
// is collection transactional
this.transactional = options.hasOwnProperty('transactional') ? options.transactional : false;
// options to clone objects when inserting them
this.cloneObjects = options.hasOwnProperty('clone') ? options.clone : true;
// default clone method (if enabled) is parse-stringify
this.cloneMethod = options.hasOwnProperty('cloneMethod') ? options.cloneMethod : "parse-stringify";
// option to make event listeners async, default is sync
this.asyncListeners = options.hasOwnProperty('asyncListeners') ? options.asyncListeners : false;
// if set to true we will not maintain a meta property for a document
this.disableMeta = options.hasOwnProperty('disableMeta') ? options.disableMeta : false;
// disable track changes
this.disableChangesApi = options.hasOwnProperty('disableChangesApi') ? options.disableChangesApi : true;
// disable delta update object style on changes
this.disableDeltaChangesApi = options.hasOwnProperty('disableDeltaChangesApi') ? options.disableDeltaChangesApi : true;
if (this.disableChangesApi) { this.disableDeltaChangesApi = true; }
// option to observe objects and update them automatically, ignored if Object.observe is not supported
this.autoupdate = options.hasOwnProperty('autoupdate') ? options.autoupdate : false;
// by default, if you insert a document into a collection with binary indices, if those indexed properties contain
// a DateTime we will convert to epoch time format so that (across serializations) its value position will be the
// same 'after' serialization as it was 'before'.
this.serializableIndices = options.hasOwnProperty('serializableIndices') ? options.serializableIndices : true;
// option to deep freeze all documents
this.disableFreeze = options.hasOwnProperty('disableFreeze') ? options.disableFreeze : true;
//option to activate a cleaner daemon - clears "aged" documents at set intervals.
this.ttl = {
age: null,
ttlInterval: null,
daemon: null
};
this.setTTL(options.ttl || -1, options.ttlInterval);
// currentMaxId - change manually at your own peril!
this.maxId = 0;
this.DynamicViews = [];
// events
this.events = {
'insert': [],
'update': [],
'pre-insert': [],
'pre-update': [],
'close': [],
'flushbuffer': [],
'error': [],
'delete': [],
'warning': []
};
// changes are tracked by collection and aggregated by the db
this.changes = [];
// lightweight changes tracking (control IDs only) for optimized db saving
this.dirtyIds = [];
// initialize optional user-supplied indices array ['age', 'lname', 'zip']
var indices = [];
if (options && options.indices) {
if (Object.prototype.toString.call(options.indices) === '[object Array]') {
indices = options.indices;
} else if (typeof options.indices === 'string') {
indices = [options.indices];
} else {
throw new TypeError('Indices needs to be a string or an array of strings');
}
}
for (var idx = 0; idx < indices.length; idx++) {
this.ensureIndex(indices[idx]);
}
function observerCallback(changes) {
var changedObjects = typeof Set === 'function' ? new Set() : [];
if (!changedObjects.add)
changedObjects.add = function (object) {
if (this.indexOf(object) === -1)
this.push(object);
return this;
};
changes.forEach(function (change) {
changedObjects.add(change.object);
});
changedObjects.forEach(function (object) {
if (!hasOwnProperty.call(object, '$ctrl'))
return self.removeAutoUpdateObserver(object);
try {
self.update(object);
} catch (err) { }
});
}
this.observerCallback = observerCallback;
//Compare changed object (which is a forced clone) with existing object and return the delta
function getChangeDelta(obj, old) {
if (old) {
return getObjectDelta(old, obj);
}
else {
return JSON.parse(JSON.stringify(obj));
}
}
this.getChangeDelta = getChangeDelta;
function getObjectDelta(oldObject, newObject) {
var propertyNames = newObject !== null && typeof newObject === 'object' ? Object.keys(newObject) : null;
if (propertyNames && propertyNames.length && ['string', 'boolean', 'number'].indexOf(typeof (newObject)) < 0) {
var delta = {};
for (var i = 0; i < propertyNames.length; i++) {
var propertyName = propertyNames[i];
if (newObject.hasOwnProperty(propertyName)) {
if (!oldObject.hasOwnProperty(propertyName) || self.uniqueNames.indexOf(propertyName) >= 0 || propertyName == '$ctrl' || propertyName == 'meta') {
delta[propertyName] = newObject[propertyName];
}
else {
var propertyDelta = getObjectDelta(oldObject[propertyName], newObject[propertyName]);
if (typeof propertyDelta !== "undefined" && propertyDelta != {}) {
delta[propertyName] = propertyDelta;
}
}
}
}
return Object.keys(delta).length === 0 ? undefined : delta;
}
else {
return oldObject === newObject ? undefined : newObject;
}
}
this.getObjectDelta = getObjectDelta;
// clear all the changes
function flushChanges() {
self.changes = [];
}
this.getChanges = function () {
return self.changes;
};
this.flushChanges = flushChanges;
this.setChangesApi = function (enabled) {
self.disableChangesApi = !enabled;
if (!enabled) { self.disableDeltaChangesApi = false; }
};
this.on('delete', function deleteCallback(obj) {
if (!self.disableChangesApi) {
self.createChange(self.name, 'R', obj);
}
});
this.on('warning', function (warning) {
self.controlConsoleWrapper.warn(warning);
});
// for de-serialization purposes
flushChanges();
}
Collection.prototype = new ControlDBEventEmitter();
Collection.prototype.contructor = Collection;
/*
* For ChangeAPI default to clone entire object, for delta changes create object with only differences (+ $ctrl and meta)
*/
Collection.prototype.createChange = function (name, op, obj, old) {
this.changes.push({
name: name,
operation: op,
obj: op == 'U' && !this.disableDeltaChangesApi ? this.getChangeDelta(obj, old) : JSON.parse(JSON.stringify(obj))
});
};
Collection.prototype.insertMeta = function (obj) {
var len, idx;
if (this.disableMeta || !obj) {
return;
}
// if batch insert
if (Array.isArray(obj)) {
len = obj.length;
for (idx = 0; idx < len; idx++) {
if (!obj[idx].hasOwnProperty('meta')) {
obj[idx].meta = {};
}
obj[idx].meta.created = (new Date()).getTime();
obj[idx].meta.revision = 0;
}
return;
}
// single object
if (!obj.meta) {
obj.meta = {};
}
obj.meta.created = (new Date()).getTime();
obj.meta.revision = 0;
};
Collection.prototype.updateMeta = function (obj) {
if (this.disableMeta || !obj) {
return obj;
}
if (!this.disableFreeze) {
obj = unFreeze(obj);
obj.meta = unFreeze(obj.meta);
}
obj.meta.updated = (new Date()).getTime();
obj.meta.revision += 1;
return obj;
};
Collection.prototype.createInsertChange = function (obj) {
this.createChange(this.name, 'I', obj);
};
Collection.prototype.createUpdateChange = function (obj, old) {
this.createChange(this.name, 'U', obj, old);
};
Collection.prototype.insertMetaWithChange = function (obj) {
this.insertMeta(obj);
this.createInsertChange(obj);
};
Collection.prototype.updateMetaWithChange = function (obj, old, objFrozen) {
obj = this.updateMeta(obj, objFrozen);
this.createUpdateChange(obj, old);
return obj;
};
Collection.prototype.controlConsoleWrapper = {
log: function () { },
warn: function () { },
error: function () { },
};
Collection.prototype.addAutoUpdateObserver = function (object) {
if (!this.autoupdate || typeof Object.observe !== 'function')
return;
Object.observe(object, this.observerCallback, ['add', 'update', 'delete', 'reconfigure', 'setPrototype']);
};
Collection.prototype.removeAutoUpdateObserver = function (object) {
if (!this.autoupdate || typeof Object.observe !== 'function')
return;
Object.unobserve(object, this.observerCallback);
};
/**
* Adds a named collection transform to the collection
* @param {string} name - name to associate with transform
* @param {array} transform - an array of transformation 'step' objects to save into the collection
* @memberof Collection
* @example
* users.addTransform('progeny', [
* {
* type: 'find',
* value: {
* 'age': {'$lte': 40}
* }
* }
* ]);
*
* var results = users.chain('progeny').data();
*/
Collection.prototype.addTransform = function (name, transform) {
if (this.transforms.hasOwnProperty(name)) {
throw new Error("a transform by that name already exists");
}
this.transforms[name] = transform;
};
/**
* Retrieves a named transform from the collection.
* @param {string} name - name of the transform to lookup.
* @memberof Collection
*/
Collection.prototype.getTransform = function (name) {
return this.transforms[name];
};
/**
* Updates a named collection transform to the collection
* @param {string} name - name to associate with transform
* @param {object} transform - a transformation object to save into collection
* @memberof Collection
*/
Collection.prototype.setTransform = function (name, transform) {
this.transforms[name] = transform;
};
/**
* Removes a named collection transform from the collection
* @param {string} name - name of collection transform to remove
* @memberof Collection
*/
Collection.prototype.removeTransform = function (name) {
delete this.transforms[name];
};
Collection.prototype.byExample = function (template) {
var k, obj, query;
query = [];
for (k in template) {
if (!template.hasOwnProperty(k)) continue;
query.push((
obj = {},
obj[k] = template[k],
obj
));
}
return {
'$and': query
};
};
Collection.prototype.findObject = function (template) {
return this.findOne(this.byExample(template));
};
Collection.prototype.findObjects = function (template) {
return this.find(this.byExample(template));
};
/*----------------------------+
| TTL daemon |
+----------------------------*/
Collection.prototype.ttlDaemonFuncGen = function () {
var collection = this;
var age = this.ttl.age;
return function ttlDaemon() {
var now = Date.now();
var toRemove = collection.chain().where(function daemonFilter(member) {
var timestamp = member.meta.updated || member.meta.created;
var diff = now - timestamp;
return age < diff;
});
toRemove.remove();
};
};
/**
* Updates or applies collection TTL settings.
* @param {int} age - age (in ms) to expire document from collection
* @param {int} interval - time (in ms) to clear collection of aged documents.
* @memberof Collection
*/
Collection.prototype.setTTL = function (age, interval) {
if (age < 0) {
clearInterval(this.ttl.daemon);
} else {
this.ttl.age = age;
this.ttl.ttlInterval = interval;
this.ttl.daemon = setInterval(this.ttlDaemonFuncGen(), interval);
}
};
/*----------------------------+
| INDEXING |
+----------------------------*/
/**
* create a row filter that covers all documents in the collection
*/
Collection.prototype.prepareFullDocIndex = function () {
var len = this.data.length;
var indexes = new Array(len);
for (var i = 0; i < len; i += 1) {
indexes[i] = i;
}
return indexes;
};
/**
* Will allow reconfiguring certain collection options.
* @param {boolean} options.adaptiveBinaryIndices - collection indices will be actively rebuilt rather than lazily
* @memberof Collection
*/
Collection.prototype.configureOptions = function (options) {
options = options || {};
if (options.hasOwnProperty('adaptiveBinaryIndices')) {
this.adaptiveBinaryIndices = options.adaptiveBinaryIndices;
// if switching to adaptive binary indices, make sure none are 'dirty'
if (this.adaptiveBinaryIndices) {
this.ensureAllIndexes();
}
}
};
/**
* Ensure binary index on a certain field
* @param {string} property - name of property to create binary index on
* @param {boolean=} force - (Optional) flag indicating whether to construct index immediately
* @memberof Collection
*/
Collection.prototype.ensureIndex = function (property, force) {
// optional parameter to force rebuild whether flagged as dirty or not
if (typeof (force) === 'undefined') {
force = false;
}
if (property === null || property === undefined) {
throw new Error('Attempting to set index without an associated property');
}
if (this.binaryIndices[property] && !force) {
if (!this.binaryIndices[property].dirty) return;
}
// if the index is already defined and we are using adaptiveBinaryIndices and we are not forcing a rebuild, return.
if (this.adaptiveBinaryIndices === true && this.binaryIndices.hasOwnProperty(property) && !force) {
return;
}
var index = {
'name': property,
'dirty': true,
'values': this.prepareFullDocIndex()
};
this.binaryIndices[property] = index;
var wrappedComparer =
(function (prop, data) {
var val1, val2;
var propPath = ~prop.indexOf('.') ? prop.split('.') : false;
return function (a, b) {
if (propPath) {
val1 = Utils.getIn(data[a], propPath, true);
val2 = Utils.getIn(data[b], propPath, true);
} else {
val1 = data[a][prop];
val2 = data[b][prop];
}
if (val1 !== val2) {
if (Comparators.lt(val1, val2, false)) return -1;
if (Comparators.gt(val1, val2, false)) return 1;
}
return 0;
};
})(property, this.data);
index.values.sort(wrappedComparer);
index.dirty = false;
this.dirty = true; // for autosave scenarios
};
/**
* Perform checks to determine validity/consistency of all binary indices
* @param {object=} options - optional configuration object
* @param {boolean} [options.randomSampling=false] - whether (faster) random sampling should be used
* @param {number} [options.randomSamplingFactor=0.10] - percentage of total rows to randomly sample
* @param {boolean} [options.repair=false] - whether to fix problems if they are encountered
* @returns {string[]} array of index names where problems were found.
* @memberof Collection
* @example
* // check all indices on a collection, returns array of invalid index names
* var result = coll.checkAllIndexes({ repair: true, randomSampling: true, randomSamplingFactor: 0.15 });
* if (result.length > 0) {
* results.forEach(function(name) {
* console.log('problem encountered with index : ' + name);
* });
* }
*/
Collection.prototype.checkAllIndexes = function (options) {
var key, bIndices = this.binaryIndices;
var results = [], result;
for (key in bIndices) {
if (hasOwnProperty.call(bIndices, key)) {
result = this.checkIndex(key, options);
if (!result) {
results.push(key);
}
}
}
return results;
};
/**
* Perform checks to determine validity/consistency of a binary index
* @param {string} property - name of the binary-indexed property to check
* @param {object=} options - optional configuration object
* @param {boolean} [options.randomSampling=false] - whether (faster) random sampling should be used
* @param {number} [options.randomSamplingFactor=0.10] - percentage of total rows to randomly sample
* @param {boolean} [options.repair=false] - whether to fix problems if they are encountered
* @returns {boolean} whether the index was found to be valid (before optional correcting).
* @memberof Collection
* @example
* // full test
* var valid = coll.checkIndex('name');
* // full test with repair (if issues found)
* valid = coll.checkIndex('name', { repair: true });
* // random sampling (default is 10% of total document count)
* valid = coll.checkIndex('name', { randomSampling: true });
* // random sampling (sample 20% of total document count)
* valid = coll.checkIndex('name', { randomSampling: true, randomSamplingFactor: 0.20 });
* // random sampling (implied boolean)
* valid = coll.checkIndex('name', { randomSamplingFactor: 0.20 });
* // random sampling with repair (if issues found)
* valid = coll.checkIndex('name', { repair: true, randomSampling: true });
*/
Collection.prototype.checkIndex = function (property, options) {
options = options || {};
// if 'randomSamplingFactor' specified but not 'randomSampling', assume true
if (options.randomSamplingFactor && options.randomSampling !== false) {
options.randomSampling = true;
}
options.randomSamplingFactor = options.randomSamplingFactor || 0.1;
if (options.randomSamplingFactor < 0 || options.randomSamplingFactor > 1) {
options.randomSamplingFactor = 0.1;
}
var valid = true, idx, iter, pos, len, biv;
// make sure we are passed a valid binary index name
if (!this.binaryIndices.hasOwnProperty(property)) {
throw new Error("called checkIndex on property without an index: " + property);
}
// if lazy indexing, rebuild only if flagged as dirty
if (!this.adaptiveBinaryIndices) {
this.ensureIndex(property);
}
biv = this.binaryIndices[property].values;
len = biv.length;
// if the index has an incorrect number of values
if (len !== this.data.length) {
if (options.repair) {
this.ensureIndex(property, true);
}
return false;
}
if (len === 0) {
return true;
}
var usingDotNotation = (property.indexOf('.') !== -1);
if (len === 1) {
valid = (biv[0] === 0);
}
else {
if (options.randomSampling) {
// validate first and last
if (!ControlOps.$lte(Utils.getIn(this.data[biv[0]], property, usingDotNotation),
Utils.getIn(this.data[biv[1]], property, usingDotNotation))) {
valid = false;
}
if (!ControlOps.$lte(Utils.getIn(this.data[biv[len - 2]], property, usingDotNotation),
Utils.getIn(this.data[biv[len - 1]], property, usingDotNotation))) {
valid = false;
}
// if first and last positions are sorted correctly with their nearest neighbor,
// continue onto random sampling phase...
if (valid) {
// # random samplings = total count * sampling factor
iter = Math.floor((len - 1) * options.randomSamplingFactor);
// for each random sampling, validate that the binary index is sequenced properly
// with next higher value.
for (idx = 0; idx < iter - 1; idx++) {
// calculate random position
pos = Math.floor(Math.random() * (len - 1));
if (!ControlOps.$lte(Utils.getIn(this.data[biv[pos]], property, usingDotNotation),
Utils.getIn(this.data[biv[pos + 1]], property, usingDotNotation))) {
valid = false;
break;
}
}
}
}
else {
// validate that the binary index is sequenced properly
for (idx = 0; idx < len - 1; idx++) {
if (!ControlOps.$lte(Utils.getIn(this.data[biv[idx]], property, usingDotNotation),
Utils.getIn(this.data[biv[idx + 1]], property, usingDotNotation))) {
valid = false;
break;
}
}
}
}
// if incorrectly sequenced and we are to fix problems, rebuild index
if (!valid && options.repair) {
this.ensureIndex(property, true);
}
return valid;
};
Collection.prototype.getBinaryIndexValues = function (property) {
var idx, idxvals = this.binaryIndices[property].values;
var result = [];
for (idx = 0; idx < idxvals.length; idx++) {
result.push(Utils.getIn(this.data[idxvals[idx]], property, true));
}
return result;
};
/**
* Returns a named unique index
* @param {string} field - indexed field name
* @param {boolean} force - if `true`, will rebuild index; otherwise, function may return null
*/
Collection.prototype.getUniqueIndex = function (field, force) {
var index = this.constraints.unique[field];
if (!index && force) {
return this.ensureUniqueIndex(field);
}
return index;
};
Collection.prototype.ensureUniqueIndex = function (field) {
var index = this.constraints.unique[field];
if (!index) {
// keep track of new unique index for regenerate after database (re)load.
if (this.uniqueNames.indexOf(field) == -1) {
this.uniqueNames.push(field);
}
}
// if index already existed, (re)loading it will likely cause collisions, rebuild always
this.constraints.unique[field] = index = new UniqueIndex(field);
this.data.forEach(function (obj) {
index.set(obj);
});
return index;
};
/**
* Ensure all binary indices
* @param {boolean} force - whether to force rebuild of existing lazy binary indices
* @memberof Collection
*/
Collection.prototype.ensureAllIndexes = function (force) {
var key, bIndices = this.binaryIndices;
for (key in bIndices) {
if (hasOwnProperty.call(bIndices, key)) {
this.ensureIndex(key, force);
}
}
};
/**
* Internal method used to flag all lazy index as dirty
*/
Collection.prototype.flagBinaryIndexesDirty = function () {
var key, bIndices = this.binaryIndices;
for (key in bIndices) {
if (hasOwnProperty.call(bIndices, key)) {
bIndices[key].dirty = true;
}
}
};
/**
* Internal method used to flag a lazy index as dirty
*/
Collection.prototype.flagBinaryIndexDirty = function (index) {
if (this.binaryIndices[index])
this.binaryIndices[index].dirty = true;
};
/**
* Quickly determine number of documents in collection (or query)
* @param {object=} query - (optional) query object to count results of
* @returns {number} number of documents in the collection
* @memberof Collection
*/
Collection.prototype.count = function (query) {
if (!query) {
return this.data.length;
}
return this.chain().find(query).filteredrows.length;
};
/**
* Rebuild idIndex
*/
Collection.prototype.ensureId = function () {
if (this.idIndex) {
return;
}
var data = this.data,
i = 0;
var len = data.length;
var index = new Array(len);
for (i; i < len; i++) {
index[i] = data[i].$ctrl;
}
this.idIndex = index;
};
/**
* Rebuild idIndex async with callback - useful for background syncing with a remote server
*/
Collection.prototype.ensureIdAsync = function (callback) {
this.async(function () {
this.ensureId();
}, callback);
};
/**
* Add a dynamic view to the collection
* @param {string} name - name of dynamic view to add
* @param {object=} options - options to configure dynamic view with
* @param {boolean} [options.persistent=false] - indicates if view is to main internal results array in 'resultdata'
* @param {string} [options.sortPriority='passive'] - 'passive' (sorts performed on call to data) or 'active' (after updates)
* @param {number} options.minRebuildInterval - minimum rebuild interval (need clarification to docs here)
* @returns {DynamicView} reference to the dynamic view added
* @memberof Collection
* @example
* var pview = users.addDynamicView('progeny');
* pview.applyFind({'age': {'$lte': 40}});
* pview.applySimpleSort('name');
*
* var results = pview.data();
**/
Collection.prototype.addDynamicView = function (name, options) {
var dv = new DynamicView(this, name, options);
this.DynamicViews.push(dv);
return dv;
};
/**
* Remove a dynamic view from the collection
* @param {string} name - name of dynamic view to remove
* @memberof Collection
**/
Collection.prototype.removeDynamicView = function (name) {
this.DynamicViews =
this.DynamicViews.filter(function (dv) { return dv.name !== name; });
};
/**
* Look up dynamic view reference from within the collection
* @param {string} name - name of dynamic view to retrieve reference of
* @returns {DynamicView} A reference to the dynamic view with that name
* @memberof Collection
**/
Collection.prototype.getDynamicView = function (name) {
for (var idx = 0; idx < this.DynamicViews.length; idx++) {
if (this.DynamicViews[idx].name === name) {
return this.DynamicViews[idx];
}
}
return null;
};
/**
* Applies a 'mongo-like' find query object and passes all results to an update function.
* For filter function querying you should migrate to [updateWhere()]{@link Collection#updateWhere}.
*
* @param {object|function} filterObject - 'mongo-like' query object (or deprecated filterFunction mode)
* @param {function} updateFunction - update function to run against filtered documents
* @memberof Collection
*/
Collection.prototype.findAndUpdate = function (filterObject, updateFunction) {
if (typeof (filterObject) === "function") {
this.updateWhere(filterObject, updateFunction);
}
else {
this.chain().find(filterObject).update(updateFunction);
}
};
/**
* Applies a 'mongo-like' find query object removes all documents which match that filter.
*
* @param {object} filterObject - 'mongo-like' query object
* @memberof Collection
*/
Collection.prototype.findAndRemove = function (filterObject) {
this.chain().find(filterObject).remove();
};
/**
* Adds object(s) to collection, ensure object(s) have meta properties, clone it if necessary, etc.
* @param {(object|array)} doc - the document (or array of documents) to be inserted
* @param {boolean=} overrideAdaptiveIndices - (optional) if `true`, adaptive indicies will be
* temporarily disabled and then fully rebuilt after batch. This will be faster for
* large inserts, but slower for small/medium inserts in large collections
* @returns {(object|array)} document or documents inserted
* @memberof Collection
* @example
* users.insert({
* name: 'Odin',
* age: 50,
* address: 'Asgard'
* });
*
* // alternatively, insert array of documents
* users.insert([{ name: 'Thor', age: 35}, { name: 'ControlDB', age: 30}]);
*/
Collection.prototype.insert = function (doc, overrideAdaptiveIndices) {
if (!Array.isArray(doc)) {
return this.insertOne(doc);
}
// holder to the clone of the object inserted if collections is set to clone objects
var obj;
var results = [];
// if not cloning, disable adaptive binary indices for the duration of the batch insert,
// followed by lazy rebuild and re-enabling adaptive indices after batch insert.
var adaptiveBatchOverride = overrideAdaptiveIndices && !this.cloneObjects &&
this.adaptiveBinaryIndices && Object.keys(this.binaryIndices).length > 0;
if (adaptiveBatchOverride) {
this.adaptiveBinaryIndices = false;
}
try {
this.emit('pre-insert', doc);
for (var i = 0, len = doc.length; i < len; i++) {
obj = this.insertOne(doc[i], true);
if (!obj) {
return undefined;
}
results.push(obj);
}
} finally {
if (adaptiveBatchOverride) {
this.ensureAllIndexes();
this.adaptiveBinaryIndices = true;
}
}
// at the 'batch' level, if clone option is true then emitted docs are clones
this.emit('insert', results);
// if clone option is set, clone return values
results = this.cloneObjects ? clone(results, this.cloneMethod) : results;
return results.length === 1 ? results[0] : results;
};
/**
* Adds a single object, ensures it has meta properties, clone it if necessary, etc.
* @param {object} doc - the document to be inserted
* @param {boolean} bulkInsert - quiet pre-insert and insert event emits
* @returns {object} document or 'undefined' if there was a problem inserting it
*/
Collection.prototype.insertOne = function (doc, bulkInsert) {
var err = null;
var returnObj;
if (typeof doc !== 'object') {
err = new TypeError('Document needs to be an object');
} else if (doc === null) {
err = new TypeError('Object cannot be null');
}
if (err !== null) {
this.emit('error', err);
throw err;
}
// if configured to clone, do so now... otherwise just use same obj reference
var obj = this.cloneObjects ? clone(doc, this.cloneMethod) : doc;
if (!this.disableFreeze) {
obj = unFreeze(obj);
}
if (!this.disableMeta) {
if (typeof obj.meta === 'undefined') {
obj.meta = {
revision: 0,
created: 0
};
} else if (!this.disableFreeze) {
obj.meta = unFreeze(obj.meta);
}
}
// both 'pre-insert' and 'insert' events are passed internal data reference even when cloning
// insert needs internal reference because that is where control itself listens to add meta
if (!bulkInsert) {
this.emit('pre-insert', obj);
}
if (!this.add(obj)) {
return undefined;
}
// update meta and store changes if ChangesAPI is enabled
// (moved from "insert" event listener to allow internal reference to be used)
if (this.disableChangesApi) {
this.insertMeta(obj);
} else {
this.insertMetaWithChange(obj);
}
if (!this.disableFreeze) {
deepFreeze(obj);
}
// if cloning is enabled, emit insert event with clone of new object
returnObj = this.cloneObjects ? clone(obj, this.cloneMethod) : obj;
if (!bulkInsert) {
this.emit('insert', returnObj);
}
this.addAutoUpdateObserver(returnObj);
return returnObj;
};
/**
* Empties the collection.
* @param {object=} options - configure clear behavior
* @param {bool=} [options.removeIndices=false] - whether to remove indices in addition to data
* @memberof Collection
*/
Collection.prototype.clear = function (options) {
var self = this;
options = options || {};
this.data = [];
this.idIndex = null;
this.cachedIndex = null;
this.cachedBinaryIndex = null;
this.cachedData = null;
this.maxId = 0;
this.DynamicViews = [];
this.dirty = true;
this.constraints = {
unique: {},
exact: {}
};
// if removing indices entirely
if (options.removeIndices === true) {
this.binaryIndices = {};
this.uniqueNames = [];
}
// clear indices but leave definitions in place
else {
// clear binary indices
var keys = Object.keys(this.binaryIndices);
keys.forEach(function (biname) {
self.binaryIndices[biname].dirty = false;
self.binaryIndices[biname].values = [];
});
}
};
/**
* Updates an object and notifies collection that the document has changed.
* @param {object} doc - document to update within the collection
* @memberof Collection
*/
Collection.prototype.update = function (doc) {
var adaptiveBatchOverride, k, len;
if (Array.isArray(doc)) {
len = doc.length;
// if not cloning, disable adaptive binary indices for the duration of the batch update,
// followed by lazy rebuild and re-enabling adaptive indices after batch update.
adaptiveBatchOverride = !this.cloneObjects &&
this.adaptiveBinaryIndices && Object.keys(this.binaryIndices).length > 0;
if (adaptiveBatchOverride) {
this.adaptiveBinaryIndices = false;
}
try {
for (k = 0; k < len; k += 1) {
this.update(doc[k]);
}
}
finally {
if (adaptiveBatchOverride) {
this.ensureAllIndexes();
this.adaptiveBinaryIndices = true;
}
}
return;
}
// verify object is a properly formed document
if (!hasOwnProperty.call(doc, '$ctrl')) {
throw new Error('Trying to update unsynced document. Please save the document first by using insert() or addMany()');
}
try {
this.startTransaction();
var arr = this.get(doc.$ctrl, true),
oldInternal, // ref to existing obj
newInternal, // ref to new internal obj
position,
self = this;
if (!arr) {
throw new Error('Trying to update a document not in collection.');
}
oldInternal = arr[0]; // -internal- obj ref
position = arr[1]; // position in data array
// if configured to clone, do so now... otherwise just use same obj reference
newInternal = this.cloneObjects || (!this.disableDeltaChangesApi && this.disableFreeze) ? clone(doc, this.cloneMethod) : doc;
this.emit('pre-update', doc);
if (this.schema) {
doc = validateSchema(doc, this.schema, key);
if (doc instanceof Error) {
throw doc;
}
}
this.uniqueNames.forEach(function (key) {
self.getUniqueIndex(key, true).update(oldInternal, newInternal);
});
// operate the update
this.data[position] = newInternal;
if (newInternal !== doc) {
this.addAutoUpdateObserver(doc);
}
// now that we can efficiently determine the data[] position of newly added document,
// submit it for all registered DynamicViews to evaluate for inclusion/exclusion
for (var idx = 0; idx < this.DynamicViews.length; idx++) {
this.DynamicViews[idx].evaluateDocument(position, false);
}
var key;
if (this.adaptiveBinaryIndices) {
// for each binary index defined in collection, immediately update rather than flag for lazy rebuild
var bIndices = this.binaryIndices;
for (key in bIndices) {
this.adaptiveBinaryIndexUpdate(position, key);
}
}
else {
this.flagBinaryIndexesDirty();
}
this.idIndex[position] = newInternal.$ctrl;
//this.flagBinaryIndexesDirty();
if (this.isIncremental) {
this.dirtyIds.push(newInternal.$ctrl);
}
this.commit();
this.dirty = true; // for autosave scenarios
// update meta and store changes if ChangesAPI is enabled
if (this.disableChangesApi) {
newInternal = this.updateMeta(newInternal);
} else {
newInternal = this.updateMetaWithChange(newInternal, oldInternal);
}
if (!this.disableFreeze) {
deepFreeze(newInternal);
}
var returnObj;
// if cloning is enabled, emit 'update' event and return with clone of new object
if (this.cloneObjects) {
returnObj = clone(newInternal, this.cloneMethod);
}
else {
returnObj = newInternal;
}
this.emit('update', returnObj, oldInternal);
return returnObj;
} catch (err) {
this.rollback();
this.controlConsoleWrapper.error(err.message);
this.emit('error', err);
throw (err); // re-throw error so user does not think it succeeded
}
};
/**
* Add object to collection
*/
Collection.prototype.add = function (obj) {
// if parameter isn't object exit with throw
if ('object' !== typeof obj) {
throw new TypeError('Object being added needs to be an object');
}
// if object you are adding already has id column it is either already in the collection
// or the object is carrying its own 'id' property. If it also has a meta property,
// then this is already in collection so throw error, otherwise rename to originalId and continue adding.
if (typeof (obj.$ctrl) !== 'undefined') {
throw new Error('Document is already in collection, please use update()');
}
/*
* try adding object to collection
*/
try {
this.startTransaction();
this.maxId++;
if (isNaN(this.maxId)) {
this.maxId = (this.data[this.data.length - 1].$ctrl + 1);
}
var newId = this.maxId;
obj.$ctrl = newId;
if (!this.disableMeta) {
obj.meta.version = 0;
}
if (this.schema) {
obj = validateSchema(obj, this.schema, key);
if (obj instanceof Error) {
throw obj;
}
}
for (var i = 0, len = this.uniqueNames.length; i < len; i++) {
this.getUniqueIndex(this.uniqueNames[i], true).set(obj);
}
if (this.idIndex) {
this.idIndex.push(newId);
}
if (this.isIncremental) {
this.dirtyIds.push(newId);
}
// add the object
this.data.push(obj);
var addedPos = this.data.length - 1;
// now that we can efficiently determine the data[] position of newly added document,
// submit it for all registered DynamicViews to evaluate for inclusion/exclusion
var dvlen = this.DynamicViews.length;
for (i = 0; i < dvlen; i++) {
this.DynamicViews[i].evaluateDocument(addedPos, true);
}
if (this.adaptiveBinaryIndices) {
// for each binary index defined in collection, immediately update rather than flag for lazy rebuild
var bIndices = this.binaryIndices;
for (var key in bIndices) {
this.adaptiveBinaryIndexInsert(addedPos, key);
}
}
else {
this.flagBinaryIndexesDirty();
}
this.commit();
this.dirty = true; // for autosave scenarios
return (this.cloneObjects) ? (clone(obj, this.cloneMethod)) : (obj);
} catch (err) {
this.rollback();
this.controlConsoleWrapper.error(err.message);
this.emit('error', err);
throw (err); // re-throw error so user does not think it succeeded
}
};
/**
* Applies a filter function and passes all results to an update function.
*
* @param {function} filterFunction - filter function whose results will execute update
* @param {function} updateFunction - update function to run against filtered documents
* @memberof Collection
*/
Collection.prototype.updateWhere = function (filterFunction, updateFunction) {
var results = this.where(filterFunction),
i = 0,
obj;
try {
for (i; i < results.length; i++) {
obj = updateFunction(results[i]);
this.update(obj);
}
} catch (err) {
this.rollback();
this.controlConsoleWrapper.error(err.message);
}
};
/**
* Remove all documents matching supplied filter function.
* For 'mongo-like' querying you should migrate to [findAndRemove()]{@link Collection#findAndRemove}.
* @param {function|object} query - query object to filter on
* @memberof Collection
*/
Collection.prototype.removeWhere = function (query) {
var list;
if (typeof query === 'function') {
list = this.data.filter(query);
this.remove(list);
} else {
this.chain().find(query).remove();
}
};
Collection.prototype.removeDataOnly = function () {
this.remove(this.data.slice());
};
/**
* Internal method to remove a batch of documents from the collection.
* @param {number[]} positions - data/idIndex positions to remove
*/
Collection.prototype.removeBatchByPositions = function (positions) {
var len = positions.length;
var xo = {};
var dlen, didx, idx;
var bic = Object.keys(this.binaryIndices).length;
var uic = Object.keys(this.constraints.unique).length;
var adaptiveOverride = this.adaptiveBinaryIndices && Object.keys(this.binaryIndices).length > 0;
var doc, self = this;
try {
this.startTransaction();
// create hashobject for positional removal inclusion tests...
// all keys defined in this hashobject represent $ctrl ids of the documents to remove.
this.ensureId();
for (idx = 0; idx < len; idx++) {
xo[this.idIndex[positions[idx]]] = true;
}
// if we will need to notify dynamic views and/or binary indices to update themselves...
dlen = this.DynamicViews.length;
if ((dlen > 0) || (bic > 0) || (uic > 0)) {
if (dlen > 0) {
// notify dynamic views to remove relevant documents at data positions
for (didx = 0; didx < dlen; didx++) {
// notify dv of remove (passing batch/array of positions)
this.DynamicViews[didx].removeDocument(positions);
}
}
// notify binary indices to update
if (this.adaptiveBinaryIndices && !adaptiveOverride) {
// for each binary index defined in collection, immediately update rather than flag for lazy rebuild
var key, bIndices = this.binaryIndices;
for (key in bIndices) {
this.adaptiveBinaryIndexRemove(positions, key);
}
}
else {
this.flagBinaryIndexesDirty();
}
if (uic) {
this.uniqueNames.forEach(function (key) {
var index = self.getUniqueIndex(key);
if (index) {
for (idx = 0; idx < len; idx++) {
doc = self.data[positions[idx]];
if (doc[key] !== null && doc[key] !== undefined) {
index.remove(doc[key]);
}
}
}
});
}
}
// emit 'delete' events only of listeners are attached.
// since data not removed yet, in future we can emit single delete event with array...
// for now that might be breaking change to put in potential 1.6 or ControlDB (controldb2) version
if (!this.disableChangesApi || this.events.delete.length > 1) {
for (idx = 0; idx < len; idx++) {
this.emit('delete', this.data[positions[idx]]);
}
}
// remove from data[] :
// filter collection data for items not in inclusion hashobject
this.data = this.data.filter(function (obj) {
return !xo[obj.$ctrl];
});
if (this.isIncremental) {
for (idx = 0; idx < len; idx++) {
this.dirtyIds.push(this.idIndex[positions[idx]]);
}
}
// remove from idIndex[] :
// filter idIndex for items not in inclusion hashobject
this.idIndex = this.idIndex.filter(function (id) {
return !xo[id];
});
if (this.adaptiveBinaryIndices && adaptiveOverride) {
this.adaptiveBinaryIndices = false;
this.ensureAllIndexes(true);
this.adaptiveBinaryIndices = true;
}
this.commit();
// flag collection as dirty for autosave
this.dirty = true;
}
catch (err) {
this.rollback();
if (adaptiveOverride) {
this.adaptiveBinaryIndices = true;
}
this.controlConsoleWrapper.error(err.message);
this.emit('error', err);
return null;
}
};
/**
* Internal method called by remove()
* @param {object[]|number[]} batch - array of documents or $ctrl ids to remove
*/
Collection.prototype.removeBatch = function (batch) {
var len = batch.length,
dlen = this.data.length,
idx;
var xlt = {};
var posx = [];
// create lookup hashobject to translate $ctrl id to position
for (idx = 0; idx < dlen; idx++) {
xlt[this.data[idx].$ctrl] = idx;
}
// iterate the batch
for (idx = 0; idx < len; idx++) {
if (typeof (batch[idx]) === 'object') {
posx.push(xlt[batch[idx].$ctrl]);
}
else {
posx.push(xlt[batch[idx]]);
}
}
this.removeBatchByPositions(posx);
};
/**
* Remove a document from the collection
* @param {object} doc - document to remove from collection
* @memberof Collection
*/
Collection.prototype.remove = function (doc) {
var frozen;
if (typeof doc === 'number') {
doc = this.get(doc);
}
if ('object' !== typeof doc) {
throw new Error('Parameter is not an object');
}
if (Array.isArray(doc)) {
this.removeBatch(doc);
return;
}
if (!hasOwnProperty.call(doc, '$ctrl')) {
throw new Error('Object is not a document stored in the collection');
}
try {
this.startTransaction();
var arr = this.get(doc.$ctrl, true),
// obj = arr[0],
position = arr[1];
var self = this;
this.uniqueNames.forEach(function (key) {
if (doc[key] !== null && typeof doc[key] !== 'undefined') {
var index = self.getUniqueIndex(key);
if (index) {
index.remove(doc[key]);
}
}
});
// now that we can efficiently determine the data[] position of newly added document,
// submit it for all registered DynamicViews to remove
for (var idx = 0; idx < this.DynamicViews.length; idx++) {
this.DynamicViews[idx].removeDocument(position);
}
if (this.adaptiveBinaryIndices) {
// for each binary index defined in collection, immediately update rather than flag for lazy rebuild
var key, bIndices = this.binaryIndices;
for (key in bIndices) {
this.adaptiveBinaryIndexRemove(position, key);
}
}
else {
this.flagBinaryIndexesDirty();
}
this.data.splice(position, 1);
this.removeAutoUpdateObserver(doc);
// remove id from idIndex
this.idIndex.splice(position, 1);
if (this.isIncremental) {
this.dirtyIds.push(doc.$ctrl);
}
this.commit();
this.dirty = true; // for autosave scenarios
this.emit('delete', arr[0]);
if (!this.disableFreeze) {
doc = unFreeze(doc);
}
delete doc.$ctrl;
delete doc.meta;
if (!this.disableFreeze) {
freeze(doc);
}
return doc;
} catch (err) {
this.rollback();
this.controlConsoleWrapper.error(err.message);
this.emit('error', err);
return null;
}
};
/*---------------------+
| Finding methods |
+----------------------*/
/**
* Get by Id - faster than other methods because of the searching algorithm
* @param {int} id - $ctrl id of document you want to retrieve
* @param {boolean} returnPosition - if 'true' we will return [object, position]
* @returns {(object|array|null)} Object reference if document was found, null if not,
* or an array if 'returnPosition' was passed.
* @memberof Collection
*/
Collection.prototype.get = function (id, returnPosition) {
if (!this.idIndex) {
this.ensureId();
}
var retpos = returnPosition || false,
data = this.idIndex,
max = data.length - 1,
min = 0,
mid = (min + max) >> 1;
id = typeof id === 'number' ? id : parseInt(id, 10);
if (isNaN(id)) {
throw new TypeError('Passed id is not an integer');
}
while (data[min] < data[max]) {
mid = (min + max) >> 1;
if (data[mid] < id) {
min = mid + 1;
} else {
max = mid;
}
}
if (max === min && data[min] === id) {
if (retpos) {
return [this.data[min], min];
}
return this.data[min];
}
return null;
};
/**
* Perform binary range lookup for the data[dataPosition][binaryIndexName] property value
* Since multiple documents may contain the same value (which the index is sorted on),
* we hone in on range and then linear scan range to find exact index array position.
* @param {int} dataPosition : coll.data array index/position
* @param {string} binaryIndexName : index to search for dataPosition in
*/
Collection.prototype.getBinaryIndexPosition = function (dataPosition, binaryIndexName) {
var val = Utils.getIn(this.data[dataPosition], binaryIndexName, true);
var index = this.binaryIndices[binaryIndexName].values;
// i think calculateRange can probably be moved to collection
// as it doesn't seem to need resultset. need to verify
var range = this.calculateRange("$eq", binaryIndexName, val);
if (range[0] === 0 && range[1] === -1) {
// uhoh didn't find range
return null;
}
var min = range[0];
var max = range[1];
// narrow down the sub-segment of index values
// where the indexed property value exactly matches our
// value and then linear scan to find exact -index- position
for (var idx = min; idx <= max; idx++) {
if (index[idx] === dataPosition) return idx;
}
// uhoh
return null;
};
/**
* Adaptively insert a selected item to the index.
* @param {int} dataPosition : coll.data array index/position
* @param {string} binaryIndexName : index to search for dataPosition in
*/
Collection.prototype.adaptiveBinaryIndexInsert = function (dataPosition, binaryIndexName) {
var usingDotNotation = (binaryIndexName.indexOf('.') !== -1);
var index = this.binaryIndices[binaryIndexName].values;
var val = Utils.getIn(this.data[dataPosition], binaryIndexName, usingDotNotation);
// If you are inserting a javascript Date value into a binary index, convert to epoch time
if (this.serializableIndices === true && val instanceof Date) {
this.data[dataPosition][binaryIndexName] = val.getTime();
val = Utils.getIn(this.data[dataPosition], binaryIndexName);
}
var idxPos = (index.length === 0) ? 0 : this.calculateRangeStart(binaryIndexName, val, true, usingDotNotation);
// insert new data index into our binary index at the proper sorted location for relevant property calculated by idxPos.
// doing this after adjusting dataPositions so no clash with previous item at that position.
this.binaryIndices[binaryIndexName].values.splice(idxPos, 0, dataPosition);
};
/**
* Adaptively update a selected item within an index.
* @param {int} dataPosition : coll.data array index/position
* @param {string} binaryIndexName : index to search for dataPosition in
*/
Collection.prototype.adaptiveBinaryIndexUpdate = function (dataPosition, binaryIndexName) {
// linear scan needed to find old position within index unless we optimize for clone scenarios later
// within (my) node 5.6.0, the following for() loop with strict compare is -much- faster than indexOf()
var idxPos,
index = this.binaryIndices[binaryIndexName].values,
len = index.length;
for (idxPos = 0; idxPos < len; idxPos++) {
if (index[idxPos] === dataPosition) break;
}
//var idxPos = this.binaryIndices[binaryIndexName].values.indexOf(dataPosition);
this.binaryIndices[binaryIndexName].values.splice(idxPos, 1);
//this.adaptiveBinaryIndexRemove(dataPosition, binaryIndexName, true);
this.adaptiveBinaryIndexInsert(dataPosition, binaryIndexName);
};
/**
* Adaptively remove a selected item from the index.
* @param {number|number[]} dataPosition : coll.data array index/position
* @param {string} binaryIndexName : index to search for dataPosition in
*/
Collection.prototype.adaptiveBinaryIndexRemove = function (dataPosition, binaryIndexName, removedFromIndexOnly) {
var bi = this.binaryIndices[binaryIndexName];
var len, idx, rmidx, rmlen, rxo = {};
var curr, shift, idxPos;
if (Array.isArray(dataPosition)) {
// when called from chained remove, and only one document in array,
// it will be faster to use old algorithm
rmlen = dataPosition.length;
if (rmlen === 1) {
dataPosition = dataPosition[0];
}
// we were passed an array (batch) of documents so use this 'batch optimized' algorithm
else {
for (rmidx = 0; rmidx < rmlen; rmidx++) {
rxo[dataPosition[rmidx]] = true;
}
// remove document from index (with filter function)
bi.values = bi.values.filter(function (di) { return !rxo[di]; });
// if we passed this optional flag parameter, we are calling from adaptiveBinaryIndexUpdate,
// in which case data positions stay the same.
if (removedFromIndexOnly === true) {
return;
}
var sortedPositions = dataPosition.slice();
sortedPositions.sort(function (a, b) { return a - b; });
// to remove holes, we need to 'shift down' the index's data array positions
// we need to adjust array positions -1 for each index data positions greater than removed positions
len = bi.values.length;
for (idx = 0; idx < len; idx++) {
curr = bi.values[idx];
shift = 0;
for (rmidx = 0; rmidx < rmlen && curr > sortedPositions[rmidx]; rmidx++) {
shift++;
}
bi.values[idx] -= shift;
}
// batch processed, bail out
return;
}
// not a batch so continue...
}
idxPos = this.getBinaryIndexPosition(dataPosition, binaryIndexName);
if (idxPos === null) {
// throw new Error('unable to determine binary index position');
return null;
}
// remove document from index (with splice)
bi.values.splice(idxPos, 1);
// if we passed this optional flag parameter, we are calling from adaptiveBinaryIndexUpdate,
// in which case data positions stay the same.
if (removedFromIndexOnly === true) {
return;
}
// since index stores data array positions, if we remove a document
// we need to adjust array positions -1 for all document positions greater than removed position
len = bi.values.length;
for (idx = 0; idx < len; idx++) {
if (bi.values[idx] > dataPosition) {
bi.values[idx]--;
}
}
};
/**
* Internal method used for index maintenance and indexed searching.
* Calculates the beginning of an index range for a given value.
* For index maintainance (adaptive:true), we will return a valid index position to insert to.
* For querying (adaptive:false/undefined), we will :
* return lower bound/index of range of that value (if found)
* return next lower index position if not found (hole)
* If index is empty it is assumed to be handled at higher level, so
* this method assumes there is at least 1 document in index.
*
* @param {string} prop - name of property which has binary index
* @param {any} val - value to find within index
* @param {bool?} adaptive - if true, we will return insert position
*/
Collection.prototype.calculateRangeStart = function (prop, val, adaptive, usingDotNotation) {
var rcd = this.data;
var index = this.binaryIndices[prop].values;
var min = 0;
var max = index.length - 1;
var mid = 0;
if (index.length === 0) {
return -1;
}
var minVal = Utils.getIn(rcd[index[min]], prop, usingDotNotation);
var maxVal = Utils.getIn(rcd[index[max]], prop, usingDotNotation);
// hone in on start position of value
while (min < max) {
mid = (min + max) >> 1;
if (Comparators.lt(Utils.getIn(rcd[index[mid]], prop, usingDotNotation), val, false)) {
min = mid + 1;
} else {
max = mid;
}
}
var lbound = min;
// found it... return it
if (Comparators.aeq(val, Utils.getIn(rcd[index[lbound]], prop, usingDotNotation))) {
return lbound;
}
// if not in index and our value is less than the found one
if (Comparators.lt(val, Utils.getIn(rcd[index[lbound]], prop, usingDotNotation), false)) {
return adaptive ? lbound : lbound - 1;
}
// not in index and our value is greater than the found one
return adaptive ? lbound + 1 : lbound;
};
/**
* Internal method used for indexed $between. Given a prop (index name), and a value
* (which may or may not yet exist) this will find the final position of that upper range value.
*/
Collection.prototype.calculateRangeEnd = function (prop, val, usingDotNotation) {
var rcd = this.data;
var index = this.binaryIndices[prop].values;
var min = 0;
var max = index.length - 1;
var mid = 0;
if (index.length === 0) {
return -1;
}
var minVal = Utils.getIn(rcd[index[min]], prop, usingDotNotation);
var maxVal = Utils.getIn(rcd[index[max]], prop, usingDotNotation);
// hone in on start position of value
while (min < max) {
mid = (min + max) >> 1;
if (Comparators.lt(val, Utils.getIn(rcd[index[mid]], prop, usingDotNotation), false)) {
max = mid;
} else {
min = mid + 1;
}
}
var ubound = max;
// only eq if last element in array is our val
if (Comparators.aeq(val, Utils.getIn(rcd[index[ubound]], prop, usingDotNotation))) {
return ubound;
}
// if not in index and our value is less than the found one
if (Comparators.gt(val, Utils.getIn(rcd[index[ubound]], prop, usingDotNotation), false)) {
return ubound + 1;
}
// either hole or first nonmatch
if (Comparators.aeq(val, Utils.getIn(rcd[index[ubound - 1]], prop, usingDotNotation))) {
return ubound - 1;
}
// hole, so ubound if nearest gt than the val we were looking for
return ubound;
};
/**
* calculateRange() - Binary Search utility method to find range/segment of values matching criteria.
* this is used for collection.find() and first find filter of resultset/dynview
* slightly different than get() binary search in that get() hones in on 1 value,
* but we have to hone in on many (range)
* @param {string} op - operation, such as $eq
* @param {string} prop - name of property to calculate range for
* @param {object} val - value to use for range calculation.
* @returns {array} [start, end] index array positions
*/
Collection.prototype.calculateRange = function (op, prop, val) {
var rcd = this.data;
var index = this.binaryIndices[prop].values;
var min = 0;
var max = index.length - 1;
var mid = 0;
var lbound, lval;
var ubound, uval;
// when no documents are in collection, return empty range condition
if (rcd.length === 0) {
return [0, -1];
}
var usingDotNotation = (prop.indexOf('.') !== -1);
var minVal = Utils.getIn(rcd[index[min]], prop, usingDotNotation);
var maxVal = Utils.getIn(rcd[index[max]], prop, usingDotNotation);
// if value falls outside of our range return [0, -1] to designate no results
switch (op) {
case '$eq':
case '$aeq':
if (Comparators.lt(val, minVal, false) || Comparators.gt(val, maxVal, false)) {
return [0, -1];
}
break;
case '$dteq':
if (Comparators.lt(val, minVal, false) || Comparators.gt(val, maxVal, false)) {
return [0, -1];
}
break;
case '$gt':
// none are within range
if (Comparators.gt(val, maxVal, true)) {
return [0, -1];
}
// all are within range
if (Comparators.gt(minVal, val, false)) {
return [min, max];
}
break;
case '$gte':
// none are within range
if (Comparators.gt(val, maxVal, false)) {
return [0, -1];
}
// all are within range
if (Comparators.gt(minVal, val, true)) {
return [min, max];
}
break;
case '$lt':
// none are within range
if (Comparators.lt(val, minVal, true)) {
return [0, -1];
}
// all are within range
if (Comparators.lt(maxVal, val, false)) {
return [min, max];
}
break;
case '$lte':
// none are within range
if (Comparators.lt(val, minVal, false)) {
return [0, -1];
}
// all are within range
if (Comparators.lt(maxVal, val, true)) {
return [min, max];
}
break;
case '$between':
// none are within range (low range is greater)
if (Comparators.gt(val[0], maxVal, false)) {
return [0, -1];
}
// none are within range (high range lower)
if (Comparators.lt(val[1], minVal, false)) {
return [0, -1];
}
lbound = this.calculateRangeStart(prop, val[0], false, usingDotNotation);
ubound = this.calculateRangeEnd(prop, val[1], usingDotNotation);
if (lbound < 0) lbound++;
if (ubound > max) ubound--;
if (!Comparators.gt(Utils.getIn(rcd[index[lbound]], prop, usingDotNotation), val[0], true)) lbound++;
if (!Comparators.lt(Utils.getIn(rcd[index[ubound]], prop, usingDotNotation), val[1], true)) ubound--;
if (ubound < lbound) return [0, -1];
return ([lbound, ubound]);
case '$in':
var idxset = [],
segResult = [];
// query each value '$eq' operator and merge the seqment results.
for (var j = 0, len = val.length; j < len; j++) {
var seg = this.calculateRange('$eq', prop, val[j]);
for (var i = seg[0]; i <= seg[1]; i++) {
if (idxset[i] === undefined) {
idxset[i] = true;
segResult.push(i);
}
}
}
return segResult;
}
// determine lbound where needed
switch (op) {
case '$eq':
case '$aeq':
case '$dteq':
case '$gte':
case '$lt':
lbound = this.calculateRangeStart(prop, val, false, usingDotNotation);
lval = Utils.getIn(rcd[index[lbound]], prop, usingDotNotation);
break;
default: break;
}
// determine ubound where needed
switch (op) {
case '$eq':
case '$aeq':
case '$dteq':
case '$lte':
case '$gt':
ubound = this.calculateRangeEnd(prop, val, usingDotNotation);
uval = Utils.getIn(rcd[index[ubound]], prop, usingDotNotation);
break;
default: break;
}
switch (op) {
case '$eq':
case '$aeq':
case '$dteq':
// if hole (not found)
if (!Comparators.aeq(lval, val)) {
return [0, -1];
}
return [lbound, ubound];
case '$gt':
// if hole (not found) ub position is already greater
if (!Comparators.aeq(Utils.getIn(rcd[index[ubound]], prop, usingDotNotation), val)) {
return [ubound, max];
}
// otherwise (found) so ubound is still equal, get next
return [ubound + 1, max];
case '$gte':
// if hole (not found) lb position marks left outside of range
if (!Comparators.aeq(Utils.getIn(rcd[index[lbound]], prop, usingDotNotation), val)) {
return [lbound + 1, max];
}
// otherwise (found) so lb is first position where its equal
return [lbound, max];
case '$lt':
// if hole (not found) position already is less than
if (!Comparators.aeq(Utils.getIn(rcd[index[lbound]], prop, usingDotNotation), val)) {
return [min, lbound];
}
// otherwise (found) so lb marks left inside of eq range, get previous
return [min, lbound - 1];
case '$lte':
// if hole (not found) ub position marks right outside so get previous
if (!Comparators.aeq(Utils.getIn(rcd[index[ubound]], prop, usingDotNotation), val)) {
return [min, ubound - 1];
}
// otherwise (found) so ub is last position where its still equal
return [min, ubound];
default:
return [0, rcd.length - 1];
}
};
/**
* Checks if a document exists in the collection.
* @param {object} query - query object used to perform search with
* @returns {boolean} True if document exists, false if not.
* @memberof Collection
*/
Collection.prototype.exists = function (query) {
return !!this.find(query)[0];
};
/**
* Allows you to limit the number of documents passed to next chain operation.
* A resultset copy() is made to avoid altering original resultset.
*
* @param {int} qty - The number of documents to return.
* @returns {Resultset} Returns a copy of the resultset, limited by qty, for subsequent chain ops.
* @memberof Resultset
* // find the two oldest users
* var result = users.chain().simplesort("age", true).limit(2).data();
*/
Collection.prototype.limit = function (qty) {
return this.chain().limit(qty);
};
/**
* Used for skipping 'pos' number of documents in the resultset.
*
* @param {int} pos - Number of documents to skip; all preceding documents are filtered out.
* @returns {Resultset} Returns a copy of the resultset, containing docs starting at 'pos' for subsequent chain ops.
* @memberof Resultset
* // find everyone but the two oldest users
* var result = users.chain().simplesort("age", true).offset(2).data();
*/
Collection.prototype.offset = function (pos) {
return this.chain().offset(pos);
};
/**
* copy() - To support reuse of resultset in branched query situations.
*
* @returns {Resultset} Returns a copy of the resultset (set) but the underlying document references will be the same.
* @memberof Resultset
*/
Collection.prototype.copy = function () {
return this.chain().copy();
};
/**
* transform() - executes a named collection transform or raw array of transform steps against the resultset.
*
* @param transform {(string|array)} - name of collection transform or raw transform array
* @param parameters {object=} - (Optional) object property hash of parameters, if the transform requires them.
* @returns {Resultset} either (this) resultset or a clone of of this resultset (depending on steps)
* @memberof Resultset
* @example
* users.addTransform('CountryFilter', [
* {
* type: 'find',
* value: {
* 'country': { $eq: '[%lktxp]Country' }
* }
* },
* {
* type: 'simplesort',
* property: 'age',
* options: { desc: false}
* }
* ]);
* var results = users.chain().transform("CountryFilter", { Country: 'fr' }).data();
*/
Collection.prototype.transform = function (transform, parameters) {
return this.chain().transform(transform, parameters);
};
/**
* User supplied compare function is provided two documents to compare. (chainable)
* @example
* rslt.sort(function(obj1, obj2) {
* if (obj1.name === obj2.name) return 0;
* if (obj1.name > obj2.name) return 1;
* if (obj1.name < obj2.name) return -1;
* });
*
* @param {function} comparefun - A javascript compare function used for sorting.
* @returns {Resultset} Reference to this resultset, sorted, for future chain operations.
* @memberof Resultset
*/
Collection.prototype.sort = function (comparefun) {
return this.chain().sort(comparefun);
};
/**
* Simpler, loose evaluation for user to sort based on a property name. (chainable).
* Sorting based on the same lt/gt helper functions used for binary indices.
*
* @param {string} propname - name of property to sort by.
* @param {object|bool=} options - boolean to specify if isdescending, or options object
* @param {boolean} [options.desc=false] - whether to sort descending
* @param {boolean} [options.disableIndexIntersect=false] - whether we should explicity not use array intersection.
* @param {boolean} [options.forceIndexIntersect=false] - force array intersection (if binary index exists).
* @param {boolean} [options.useJavascriptSorting=false] - whether results are sorted via basic javascript sort.
* @returns {Resultset} Reference to this resultset, sorted, for future chain operations.
* @memberof Resultset
* @example
* var results = users.chain().simplesort('age').data();
*/
Collection.prototype.simplesort = function (propname, options) {
return this.chain().simplesort(propname, options);
};
/**
* Allows sorting a resultset based on multiple columns.
* @example
* // to sort by age and then name (both ascending)
* rs.compoundsort(['age', 'name']);
* // to sort by age (ascending) and then by name (descending)
* rs.compoundsort(['age', ['name', true]]);
*
* @param {array} properties - array of property names or subarray of [propertyname, isdesc] used evaluate sort order
* @returns {Resultset} Reference to this resultset, sorted, for future chain operations.
* @memberof Resultset
*/
Collection.prototype.compoundsort = function (properties) {
return this.chain().compoundsort(properties);
};
/**
* findOr() - oversee the operation of OR'ed query expressions.
* OR'ed expression evaluation runs each expression individually against the full collection,
* and finally does a set OR on each expression's results.
* Each evaluation can utilize a binary index to prevent multiple linear array scans.
*
* @param {array} expressionArray - array of expressions
* @returns {Resultset} this resultset for further chain ops.
*/
Collection.prototype.findOr = function (expressionArray) {
return this.chain().findOr(expressionArray);
};
/**
* findAnd() - oversee the operation of AND'ed query expressions.
* AND'ed expression evaluation runs each expression progressively against the full collection,
* internally utilizing existing chained resultset functionality.
* Only the first filter can utilize a binary index.
*
* @param {array} expressionArray - array of expressions
* @returns {Resultset} this resultset for further chain ops.
*/
Collection.prototype.findAnd = function (expressionArray) {
return this.chain().findAnd(expressionArray);
};
/**
* Used for querying via a mongo-style query object.
*
* @param {object} query - A mongo-style query object used for filtering current results.
* @param {boolean=} firstOnly - (Optional) Used by collection.findOne()
* @returns {Resultset} this resultset for further chain ops.
* @memberof Resultset
* @example
* var over30 = users.chain().find({ age: { $gte: 30 } }).data();
*/
Collection.prototype.find = function (query, firstOnly) {
return this.chain().find(query, firstOnly);
};
/**
* Query the collection by supplying a javascript filter function.
* @example
* var results = coll.where(function(obj) {
* return obj.legs === 8;
* });
*
* @param {function} fun - filter function to run against all collection docs
* @returns {array} all documents which pass your filter function
* @memberof Collection
*/
Collection.prototype.where = function (fun) {
return this.chain().where(fun);
};
/**
* Terminates the chain and returns array of filtered documents
*
* @param {object=} options - allows specifying 'forceClones' and 'forceCloneMethod' options.
* @param {boolean} options.forceClones - Allows forcing the return of cloned objects even when
* the collection is not configured for clone object.
* @param {string} options.forceCloneMethod - Allows overriding the default or collection specified cloning method.
* Possible values include 'parse-stringify', 'jquery-extend-deep', 'shallow', 'shallow-assign'
* @param {bool} options.removeMeta - Will force clones and strip $ctrl and meta properties from documents
*
* @returns {array} Array of documents in the resultset
* @memberof Resultset
* @example
* var resutls = users.chain().find({ age: 34 }).data();
*/
Collection.prototype.docs = function (options) {
return this.chain().docs(options);
};
/**
* Map Reduce operation
*
* @param {function} mapFunction - function to use as map function
* @param {function} reduceFunction - function to use as reduce function
* @returns {data} The result of your mapReduce operation
* @memberof Collection
*/
Collection.prototype.mapReduce = function (mapFunction, reduceFunction) {
return this.chain().mapReduce(mapFunction, reduceFunction);
};
/**
* Applies a map function into a new collection for further chaining.
* @param {function} mapFun - javascript map function
* @param {object=} dataOptions - options to data() before input to your map function
* @param {bool} dataOptions.removeMeta - allows removing meta before calling mapFun
* @param {boolean} dataOptions.forceClones - forcing the return of cloned objects to your map object
* @param {string} dataOptions.forceCloneMethod - Allows overriding the default or collection specified cloning method.
* @memberof Resultset
* @example
* var orders.chain().find({ productId: 32 }).map(function(obj) {
* return {
* orderId: $ctrl,
* productId: productId,
* quantity: qty
* };
* });
*/
Collection.prototype.map = function (mapFun, dataOptions) {
return this.chain().map(mapFun, dataOptions);
};
/**
* Retrieve doc by Unique index
* @param {string} field - name of uniquely indexed property to use when doing lookup
* @param {value} value - unique value to search for
* @returns {object} document matching the value passed
* @memberof Collection
*/
Collection.prototype.by = function (field, value) {
var self;
if (value === undefined) {
self = this;
return function (value) {
return self.by(field, value);
};
}
var result = this.getUniqueIndex(field, true).get(value);
if (!this.cloneObjects) {
return result;
} else {
return clone(result, this.cloneMethod);
}
};
/**
* Find one object by index property, by property equal to value
* @param {object} query - query object used to perform search with
* @returns {(object|null)} First matching document, or null if none
* @memberof Collection
*/
Collection.prototype.findOne = function (query) {
query = query || {};
// Instantiate Resultset and exec find op passing firstOnly = true param
var result = this.chain().find(query, true).docs();
if (Array.isArray(result) && result.length === 0) {
return null;
} else {
if (!this.cloneObjects) {
return result[0];
} else {
return clone(result[0], this.cloneMethod);
}
}
};
/**
* Chain method, used for beginning a series of chained find() and/or view() operations
* on a collection.
*
* @param {string|array=} transform - named transform or array of transform steps
* @param {object=} parameters - Object containing properties representing parameters to substitute
* @returns {Resultset} (this) resultset, or data array if any map or join functions where called
* @memberof Collection
*/
Collection.prototype.chain = function (transform, parameters) {
var rs = new Resultset(this);
if (typeof transform === 'undefined') {
return rs;
}
return rs.transform(transform, parameters);
};
/**
* Find object by unindexed field by property equal to value,
* simply iterates and returns the first element matching the query
*/
Collection.prototype.findOneUnindexed = function (prop, value) {
var i = this.data.length,
doc;
while (i--) {
if (Utils.getIn(this.data[i], prop, true) === value) {
doc = this.data[i];
return doc;
}
}
return null;
};
/**
* Transaction methods
*/
/** start the transation */
Collection.prototype.startTransaction = function () {
if (this.transactional) {
this.cachedData = clone(this.data, this.cloneMethod);
this.cachedIndex = this.idIndex;
this.cachedBinaryIndex = this.binaryIndices;
this.cachedDirtyIds = this.dirtyIds;
// propagate startTransaction to dynamic views
for (var idx = 0; idx < this.DynamicViews.length; idx++) {
this.DynamicViews[idx].startTransaction();
}
}
};
/** commit the transation */
Collection.prototype.commit = function () {
if (this.transactional) {
this.cachedData = null;
this.cachedIndex = null;
this.cachedBinaryIndex = null;
this.cachedDirtyIds = null;
// propagate commit to dynamic views
for (var idx = 0; idx < this.DynamicViews.length; idx++) {
this.DynamicViews[idx].commit();
}
}
};
/** roll back the transation */
Collection.prototype.rollback = function () {
if (this.transactional) {
if (this.cachedData !== null && this.cachedIndex !== null) {
this.data = this.cachedData;
this.idIndex = this.cachedIndex;
this.binaryIndices = this.cachedBinaryIndex;
this.dirtyIds = this.cachedDirtyIds;
}
// propagate rollback to dynamic views
for (var idx = 0; idx < this.DynamicViews.length; idx++) {
this.DynamicViews[idx].rollback();
}
}
};
// async executor. This is only to enable callbacks at the end of the execution.
Collection.prototype.async = function (fun, callback) {
setTimeout(function () {
if (typeof fun === 'function') {
fun();
callback();
} else {
throw new TypeError('Argument passed for async execution is not a function');
}
}, 0);
};
/**
* Join two collections on specified properties
*
* @param {array|Resultset|Collection} joinData - array of documents to 'join' to this collection
* @param {string} leftJoinProp - property name in collection
* @param {string} rightJoinProp - property name in joinData
* @param {function=} mapFun - (Optional) map function to use
* @param {object=} dataOptions - options to data() before input to your map function
* @param {bool} dataOptions.removeMeta - allows removing meta before calling mapFun
* @param {boolean} dataOptions.forceClones - forcing the return of cloned objects to your map object
* @param {string} dataOptions.forceCloneMethod - Allows overriding the default or collection specified cloning method.
* @returns {Resultset} Result of the mapping operation
* @memberof Collection
*/
Collection.prototype.eqJoin = function (joinData, leftJoinProp, rightJoinProp, mapFun, dataOptions) {
// logic in Resultset class
return new Resultset(this).eqJoin(joinData, leftJoinProp, rightJoinProp, mapFun, dataOptions);
};
/* ------ STAGING API -------- */
/**
* stages: a map of uniquely identified 'stages', which hold copies of objects to be
* manipulated without affecting the data in the original collection
*/
Collection.prototype.stages = {};
/**
* (Staging API) create a stage and/or retrieve it
* @memberof Collection
*/
Collection.prototype.getStage = function (name) {
if (!this.stages[name]) {
this.stages[name] = {};
}
return this.stages[name];
};
/**
* a collection of objects recording the changes applied through a commmitStage
*/
Collection.prototype.commitLog = [];
/**
* (Staging API) create a copy of an object and insert it into a stage
* @memberof Collection
*/
Collection.prototype.stage = function (stageName, obj) {
var copy = JSON.parse(JSON.stringify(obj));
this.getStage(stageName)[obj.$ctrl] = copy;
return copy;
};
/**
* (Staging API) re-attach all objects to the original collection, so indexes and views can be rebuilt
* then create a message to be inserted in the commitlog
* @param {string} stageName - name of stage
* @param {string} message
* @memberof Collection
*/
Collection.prototype.commitStage = function (stageName, message) {
var stage = this.getStage(stageName),
prop,
timestamp = new Date().getTime();
for (prop in stage) {
this.update(stage[prop]);
this.commitLog.push({
timestamp: timestamp,
message: message,
data: JSON.parse(JSON.stringify(stage[prop]))
});
}
this.stages[stageName] = {};
};
Collection.prototype.no_op = function () {
return;
};
/**
* @memberof Collection
*/
Collection.prototype.extract = function (field) {
var i = 0,
len = this.data.length,
isDotNotation = isDeepProperty(field),
result = [];
for (i; i < len; i += 1) {
result.push(deepProperty(this.data[i], field, isDotNotation));
}
return result;
};
/**
* @memberof Collection
*/
Collection.prototype.max = function (field) {
return Math.max.apply(null, this.extract(field));
};
/**
* @memberof Collection
*/
Collection.prototype.min = function (field) {
return Math.min.apply(null, this.extract(field));
};
/**
* @memberof Collection
*/
Collection.prototype.maxRecord = function (field) {
var i = 0,
len = this.data.length,
deep = isDeepProperty(field),
result = {
index: 0,
value: undefined
},
max;
for (i; i < len; i += 1) {
if (max !== undefined) {
if (max < deepProperty(this.data[i], field, deep)) {
max = deepProperty(this.data[i], field, deep);
result.index = this.data[i].$ctrl;
}
} else {
max = deepProperty(this.data[i], field, deep);
result.index = this.data[i].$ctrl;
}
}
result.value = max;
return result;
};
/**
* @memberof Collection
*/
Collection.prototype.minRecord = function (field) {
var i = 0,
len = this.data.length,
deep = isDeepProperty(field),
result = {
index: 0,
value: undefined
},
min;
for (i; i < len; i += 1) {
if (min !== undefined) {
if (min > deepProperty(this.data[i], field, deep)) {
min = deepProperty(this.data[i], field, deep);
result.index = this.data[i].$ctrl;
}
} else {
min = deepProperty(this.data[i], field, deep);
result.index = this.data[i].$ctrl;
}
}
result.value = min;
return result;
};
/**
* @memberof Collection
*/
Collection.prototype.extractNumerical = function (field) {
return this.extract(field).map(parseBase10).filter(Number).filter(function (n) {
return !(isNaN(n));
});
};
/**
* Calculates the average numerical value of a property
*
* @param {string} field - name of property in docs to average
* @returns {number} average of property in all docs in the collection
* @memberof Collection
*/
Collection.prototype.avg = function (field) {
return average(this.extractNumerical(field));
};
/**
* Calculate standard deviation of a field
* @memberof Collection
* @param {string} field
*/
Collection.prototype.stdDev = function (field) {
return standardDeviation(this.extractNumerical(field));
};
/**
* @memberof Collection
* @param {string} field
*/
Collection.prototype.mode = function (field) {
var dict = {},
data = this.extract(field);
data.forEach(function (obj) {
if (dict[obj]) {
dict[obj] += 1;
} else {
dict[obj] = 1;
}
});
var max,
prop, mode;
for (prop in dict) {
if (max) {
if (max < dict[prop]) {
mode = prop;
}
} else {
mode = prop;
max = dict[prop];
}
}
return mode;
};
/**
* @memberof Collection
* @param {string} field - property name
*/
Collection.prototype.median = function (field) {
var values = this.extractNumerical(field);
values.sort(sub);
var half = Math.floor(values.length / 2);
if (values.length % 2) {
return values[half];
} else {
return (values[half - 1] + values[half]) / 2.0;
}
};
/**
* General utils, including statistical functions
*/
function isDeepProperty(field) {
return field.indexOf('.') !== -1;
}
function parseBase10(num) {
return parseFloat(num, 10);
}
function isNotUndefined(obj) {
return obj !== undefined;
}
function add(a, b) {
return a + b;
}
function sub(a, b) {
return a - b;
}
function median(values) {
values.sort(sub);
var half = Math.floor(values.length / 2);
return (values.length % 2) ? values[half] : ((values[half - 1] + values[half]) / 2.0);
}
function average(array) {
return (array.reduce(add, 0)) / array.length;
}
function standardDeviation(values) {
var avg = average(values);
var squareDiffs = values.map(function (value) {
var diff = value - avg;
var sqrDiff = diff * diff;
return sqrDiff;
});
var avgSquareDiff = average(squareDiffs);
var stdDev = Math.sqrt(avgSquareDiff);
return stdDev;
}
function deepProperty(obj, property, isDeep) {
if (isDeep === false) {
// pass without processing
return obj[property];
}
var pieces = property.split('.'),
root = obj;
while (pieces.length > 0) {
root = root[pieces.shift()];
}
return root;
}
function binarySearch(array, item, fun) {
var lo = 0,
hi = array.length,
compared,
mid;
while (lo < hi) {
mid = (lo + hi) >> 1;
compared = fun.apply(null, [item, array[mid]]);
if (compared === 0) {
return {
found: true,
index: mid
};
} else if (compared < 0) {
hi = mid;
} else {
lo = mid + 1;
}
}
return {
found: false,
index: hi
};
}
function BSonSort(fun) {
return function (array, item) {
return binarySearch(array, item, fun);
};
}
function validateSchema(doc, schema) {
function validate(input, template, key) {
if (input == null && template.required && template.default == null) {
return new Error(`${key} is required.`);
}
if (input == null && template.default != null) {
if (typeof template.default === 'function') {
input = template.default();
}
else {
input = template.default;
}
}
if (input != null && template.minlength) {
if (input.length < template.minlength) {
return new Error(`${key}: ${input} length is shorter than min length ${template.minlength}`);
}
}
if (input != null && template.maxlength) {
if (input.length > template.maxlength) {
return new Error(`${key}: ${input} length is longer than max length ${template.maxlength}`);
}
}
if (input != null && template.min) {
if (input < template.min) {
return new Error(`${key}: ${input} is less than min value ${template.min}`);
}
}
if (input != null && template.max) {
if (input > template.max) {
return new Error(`${key}: ${input} is greater than max value ${template.max}`);
}
}
if (input != null && template.validation) {
const result = template.validation(input);
if (result !== true) {
return new Error(`${key}: ${typeof result === 'string' ? result : 'validation failed' }`);
}
}
if (input != null && template.enum) {
if (!template.enum.includes(input)) {
return new Error(`${key}: ${input} is not in enum values ${template.enum}`);
}
}
if (input != null) {
if (Array.isArray(template.type)) {
if (!Array.isArray(input)) {
return new Error(`${key}: ${input} must be an array.`);
}
for (let i = 0; i < input.length; i++) {
const res = validate(input[i], template.type[0], key);
if (res instanceof Error) {
return res;
}
input[i] = res;
}
}
else if (template.type.name === "Object") {
if (typeof input !== 'object') {
return new Error(`${key}: ${input} must be an object.`);
}
if (template.properties == null || typeof template.properties !== 'object') {
return new Error(`Object ${key} must have a properties object.`);
}
const objKeys = new Set([...Object.keys(input), ...Object.keys(template.properties)]);
for (const objKey of objKeys) {
if (template.properties.hasOwnProperty(objKey) === false) {
return new Error(`${objKey} is not a valid property.`);
}
let objTemplate = template.properties[objKey];
if (typeof objTemplate !== 'object') {
objTemplate = { type: objTemplate };
}
const res = validate(input[objKey], objTemplate, objKey);
if (res instanceof Error) {
return res;
}
input[objKey] = res;
}
}
else if (input.constructor.name === "Number" || input.constructor.name === "String" || input.constructor.name === "Boolean") {
if (input.constructor.name !== template.type.name) {
return new Error(`${key}: input must be of type ${template.type.name}.`);
}
}
else{
return new Error(`${key}: input must be one of the following types: Object, Number, String, Boolean.`);
}
}
return input;
}
const keys = [...Object.keys(doc), ...Object.keys(schema)];
for (const key of keys) {
if (key === '$ctrl' || key === 'meta') {
continue;
}
if (schema.hasOwnProperty(key) === false) {
return new Error(`${key} is not a valid property.`);
}
let template = schema[key];
if (typeof template !== 'object') {
template = { type: template };
}
let res = validate(doc[key], template, key);
if (res instanceof Error) {
return res;
}
doc[key] = res;
}
return doc;
}
function KeyValueStore() { }
KeyValueStore.prototype = {
keys: [],
values: [],
sort: function (a, b) {
return (a < b) ? -1 : ((a > b) ? 1 : 0);
},
setSort: function (fun) {
this.bs = new BSonSort(fun);
},
bs: function () {
return new BSonSort(this.sort);
},
set: function (key, value) {
var pos = this.bs(this.keys, key);
if (pos.found) {
this.values[pos.index] = value;
} else {
this.keys.splice(pos.index, 0, key);
this.values.splice(pos.index, 0, value);
}
},
get: function (key) {
return this.values[binarySearch(this.keys, key, this.sort).index];
}
};
function UniqueIndex(uniqueField) {
this.field = uniqueField;
this.keyMap = Object.create(null);
this.controlMap = Object.create(null);
}
UniqueIndex.prototype.keyMap = {};
UniqueIndex.prototype.controlMap = {};
UniqueIndex.prototype.set = function (obj) {
var fieldValue = obj[this.field];
if (fieldValue !== null && typeof (fieldValue) !== 'undefined') {
if (this.keyMap[fieldValue]) {
throw new Error('Duplicate key for property ' + this.field + ': ' + fieldValue);
} else {
this.keyMap[fieldValue] = obj;
this.controlMap[obj.$ctrl] = fieldValue;
}
}
};
UniqueIndex.prototype.get = function (key) {
return this.keyMap[key];
};
UniqueIndex.prototype.byId = function (id) {
return this.keyMap[this.controlMap[id]];
};
/**
* Updates a document's unique index given an updated object.
* @param {Object} obj Original document object
* @param {Object} doc New document object (likely the same as obj)
*/
UniqueIndex.prototype.update = function (obj, doc) {
if (this.controlMap[obj.$ctrl] !== doc[this.field]) {
var old = this.controlMap[obj.$ctrl];
this.set(doc);
// make the old key fail bool test, while avoiding the use of delete (mem-leak prone)
this.keyMap[old] = undefined;
} else {
this.keyMap[obj[this.field]] = doc;
}
};
UniqueIndex.prototype.remove = function (key) {
var obj = this.keyMap[key];
if (obj !== null && typeof obj !== 'undefined') {
// avoid using `delete`
this.keyMap[key] = undefined;
this.controlMap[obj.$ctrl] = undefined;
} else {
throw new Error('Key is not in unique index: ' + this.field);
}
};
UniqueIndex.prototype.clear = function () {
this.keyMap = Object.create(null);
this.controlMap = Object.create(null);
};
function ExactIndex(exactField) {
this.index = Object.create(null);
this.field = exactField;
}
// add the value you want returned to the key in the index
ExactIndex.prototype = {
set: function add(key, val) {
if (this.index[key]) {
this.index[key].push(val);
} else {
this.index[key] = [val];
}
},
// remove the value from the index, if the value was the last one, remove the key
remove: function remove(key, val) {
var idxSet = this.index[key];
for (var i in idxSet) {
if (idxSet[i] == val) {
idxSet.splice(i, 1);
}
}
if (idxSet.length < 1) {
this.index[key] = undefined;
}
},
// get the values related to the key, could be more than one
get: function get(key) {
return this.index[key];
},
// clear will zap the index
clear: function clear(key) {
this.index = {};
}
};
function SortedIndex(sortedField) {
this.field = sortedField;
}
SortedIndex.prototype = {
keys: [],
values: [],
// set the default sort
sort: function (a, b) {
return (a < b) ? -1 : ((a > b) ? 1 : 0);
},
bs: function () {
return new BSonSort(this.sort);
},
// and allow override of the default sort
setSort: function (fun) {
this.bs = new BSonSort(fun);
},
// add the value you want returned to the key in the index
set: function (key, value) {
var pos = binarySearch(this.keys, key, this.sort);
if (pos.found) {
this.values[pos.index].push(value);
} else {
this.keys.splice(pos.index, 0, key);
this.values.splice(pos.index, 0, [value]);
}
},
// get all values which have a key == the given key
get: function (key) {
var bsr = binarySearch(this.keys, key, this.sort);
if (bsr.found) {
return this.values[bsr.index];
} else {
return [];
}
},
// get all values which have a key < the given key
getLt: function (key) {
var bsr = binarySearch(this.keys, key, this.sort);
var pos = bsr.index;
if (bsr.found) pos--;
return this.getAll(key, 0, pos);
},
// get all values which have a key > the given key
getGt: function (key) {
var bsr = binarySearch(this.keys, key, this.sort);
var pos = bsr.index;
if (bsr.found) pos++;
return this.getAll(key, pos, this.keys.length);
},
// get all vals from start to end
getAll: function (key, start, end) {
var results = [];
for (var i = start; i < end; i++) {
results = results.concat(this.values[i]);
}
return results;
},
// just in case someone wants to do something smart with ranges
getPos: function (key) {
return binarySearch(this.keys, key, this.sort);
},
// remove the value from the index, if the value was the last one, remove the key
remove: function (key, value) {
var pos = binarySearch(this.keys, key, this.sort).index;
var idxSet = this.values[pos];
for (var i in idxSet) {
if (idxSet[i] == value) idxSet.splice(i, 1);
}
if (idxSet.length < 1) {
this.keys.splice(pos, 1);
this.values.splice(pos, 1);
}
},
// clear will zap the index
clear: function () {
this.keys = [];
this.values = [];
}
};
ControlDB.deepFreeze = deepFreeze;
ControlDB.freeze = freeze;
ControlDB.unFreeze = unFreeze;
ControlDB.ControlOps = ControlOps;
ControlDB.Collection = Collection;
ControlDB.DynamicView = DynamicView;
ControlDB.Resultset = Resultset;
ControlDB.KeyValueStore = KeyValueStore;
ControlDB.ControlDBMemoryAdapter = ControlDBMemoryAdapter;
ControlDB.ControlDBPartitioningAdapter = ControlDBPartitioningAdapter;
ControlDB.ControlDBLocalStorageAdapter = ControlDBLocalStorageAdapter;
ControlDB.ControlDBFsAdapter = ControlDBFsAdapter;
ControlDB.persistenceAdapters = {
fs: ControlDBFsAdapter,
localStorage: ControlDBLocalStorageAdapter
};
ControlDB.aeq = aeqHelper;
ControlDB.lt = ltHelper;
ControlDB.gt = gtHelper;
ControlDB.Comparators = Comparators;
return ControlDB;
}());
}));