stuff
This commit is contained in:
204
buildfiles/app/node_modules/nedb/lib/cursor.js
generated
vendored
Executable file
204
buildfiles/app/node_modules/nedb/lib/cursor.js
generated
vendored
Executable file
@ -0,0 +1,204 @@
|
||||
/**
|
||||
* Manage access to data, be it to find, update or remove it
|
||||
*/
|
||||
var model = require('./model')
|
||||
, _ = require('underscore')
|
||||
;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Create a new cursor for this collection
|
||||
* @param {Datastore} db - The datastore this cursor is bound to
|
||||
* @param {Query} query - The query this cursor will operate on
|
||||
* @param {Function} execFn - Handler to be executed after cursor has found the results and before the callback passed to find/findOne/update/remove
|
||||
*/
|
||||
function Cursor (db, query, execFn) {
|
||||
this.db = db;
|
||||
this.query = query || {};
|
||||
if (execFn) { this.execFn = execFn; }
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Set a limit to the number of results
|
||||
*/
|
||||
Cursor.prototype.limit = function(limit) {
|
||||
this._limit = limit;
|
||||
return this;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Skip a the number of results
|
||||
*/
|
||||
Cursor.prototype.skip = function(skip) {
|
||||
this._skip = skip;
|
||||
return this;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Sort results of the query
|
||||
* @param {SortQuery} sortQuery - SortQuery is { field: order }, field can use the dot-notation, order is 1 for ascending and -1 for descending
|
||||
*/
|
||||
Cursor.prototype.sort = function(sortQuery) {
|
||||
this._sort = sortQuery;
|
||||
return this;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Add the use of a projection
|
||||
* @param {Object} projection - MongoDB-style projection. {} means take all fields. Then it's { key1: 1, key2: 1 } to take only key1 and key2
|
||||
* { key1: 0, key2: 0 } to omit only key1 and key2. Except _id, you can't mix takes and omits
|
||||
*/
|
||||
Cursor.prototype.projection = function(projection) {
|
||||
this._projection = projection;
|
||||
return this;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Apply the projection
|
||||
*/
|
||||
Cursor.prototype.project = function (candidates) {
|
||||
var res = [], self = this
|
||||
, keepId, action, keys
|
||||
;
|
||||
|
||||
if (this._projection === undefined || Object.keys(this._projection).length === 0) {
|
||||
return candidates;
|
||||
}
|
||||
|
||||
keepId = this._projection._id === 0 ? false : true;
|
||||
this._projection = _.omit(this._projection, '_id');
|
||||
|
||||
// Check for consistency
|
||||
keys = Object.keys(this._projection);
|
||||
keys.forEach(function (k) {
|
||||
if (action !== undefined && self._projection[k] !== action) { throw new Error("Can't both keep and omit fields except for _id"); }
|
||||
action = self._projection[k];
|
||||
});
|
||||
|
||||
// Do the actual projection
|
||||
candidates.forEach(function (candidate) {
|
||||
var toPush;
|
||||
if (action === 1) { // pick-type projection
|
||||
toPush = { $set: {} };
|
||||
keys.forEach(function (k) {
|
||||
toPush.$set[k] = model.getDotValue(candidate, k);
|
||||
if (toPush.$set[k] === undefined) { delete toPush.$set[k]; }
|
||||
});
|
||||
toPush = model.modify({}, toPush);
|
||||
} else { // omit-type projection
|
||||
toPush = { $unset: {} };
|
||||
keys.forEach(function (k) { toPush.$unset[k] = true });
|
||||
toPush = model.modify(candidate, toPush);
|
||||
}
|
||||
if (keepId) {
|
||||
toPush._id = candidate._id;
|
||||
} else {
|
||||
delete toPush._id;
|
||||
}
|
||||
res.push(toPush);
|
||||
});
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get all matching elements
|
||||
* Will return pointers to matched elements (shallow copies), returning full copies is the role of find or findOne
|
||||
* This is an internal function, use exec which uses the executor
|
||||
*
|
||||
* @param {Function} callback - Signature: err, results
|
||||
*/
|
||||
Cursor.prototype._exec = function(_callback) {
|
||||
var res = [], added = 0, skipped = 0, self = this
|
||||
, error = null
|
||||
, i, keys, key
|
||||
;
|
||||
|
||||
function callback (error, res) {
|
||||
if (self.execFn) {
|
||||
return self.execFn(error, res, _callback);
|
||||
} else {
|
||||
return _callback(error, res);
|
||||
}
|
||||
}
|
||||
|
||||
this.db.getCandidates(this.query, function (err, candidates) {
|
||||
if (err) { return callback(err); }
|
||||
|
||||
try {
|
||||
for (i = 0; i < candidates.length; i += 1) {
|
||||
if (model.match(candidates[i], self.query)) {
|
||||
// If a sort is defined, wait for the results to be sorted before applying limit and skip
|
||||
if (!self._sort) {
|
||||
if (self._skip && self._skip > skipped) {
|
||||
skipped += 1;
|
||||
} else {
|
||||
res.push(candidates[i]);
|
||||
added += 1;
|
||||
if (self._limit && self._limit <= added) { break; }
|
||||
}
|
||||
} else {
|
||||
res.push(candidates[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
// Apply all sorts
|
||||
if (self._sort) {
|
||||
keys = Object.keys(self._sort);
|
||||
|
||||
// Sorting
|
||||
var criteria = [];
|
||||
for (i = 0; i < keys.length; i++) {
|
||||
key = keys[i];
|
||||
criteria.push({ key: key, direction: self._sort[key] });
|
||||
}
|
||||
res.sort(function(a, b) {
|
||||
var criterion, compare, i;
|
||||
for (i = 0; i < criteria.length; i++) {
|
||||
criterion = criteria[i];
|
||||
compare = criterion.direction * model.compareThings(model.getDotValue(a, criterion.key), model.getDotValue(b, criterion.key), self.db.compareStrings);
|
||||
if (compare !== 0) {
|
||||
return compare;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
|
||||
// Applying limit and skip
|
||||
var limit = self._limit || res.length
|
||||
, skip = self._skip || 0;
|
||||
|
||||
res = res.slice(skip, skip + limit);
|
||||
}
|
||||
|
||||
// Apply projection
|
||||
try {
|
||||
res = self.project(res);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
res = undefined;
|
||||
}
|
||||
|
||||
return callback(error, res);
|
||||
});
|
||||
};
|
||||
|
||||
Cursor.prototype.exec = function () {
|
||||
this.db.executor.push({ this: this, fn: this._exec, arguments: arguments });
|
||||
};
|
||||
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports = Cursor;
|
22
buildfiles/app/node_modules/nedb/lib/customUtils.js
generated
vendored
Executable file
22
buildfiles/app/node_modules/nedb/lib/customUtils.js
generated
vendored
Executable file
@ -0,0 +1,22 @@
|
||||
var crypto = require('crypto')
|
||||
;
|
||||
|
||||
/**
|
||||
* Return a random alphanumerical string of length len
|
||||
* There is a very small probability (less than 1/1,000,000) for the length to be less than len
|
||||
* (il the base64 conversion yields too many pluses and slashes) but
|
||||
* that's not an issue here
|
||||
* The probability of a collision is extremely small (need 3*10^12 documents to have one chance in a million of a collision)
|
||||
* See http://en.wikipedia.org/wiki/Birthday_problem
|
||||
*/
|
||||
function uid (len) {
|
||||
return crypto.randomBytes(Math.ceil(Math.max(8, len * 2)))
|
||||
.toString('base64')
|
||||
.replace(/[+\/]/g, '')
|
||||
.slice(0, len);
|
||||
}
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports.uid = uid;
|
||||
|
704
buildfiles/app/node_modules/nedb/lib/datastore.js
generated
vendored
Executable file
704
buildfiles/app/node_modules/nedb/lib/datastore.js
generated
vendored
Executable file
@ -0,0 +1,704 @@
|
||||
var customUtils = require('./customUtils')
|
||||
, model = require('./model')
|
||||
, async = require('async')
|
||||
, Executor = require('./executor')
|
||||
, Index = require('./indexes')
|
||||
, util = require('util')
|
||||
, _ = require('underscore')
|
||||
, Persistence = require('./persistence')
|
||||
, Cursor = require('./cursor')
|
||||
;
|
||||
|
||||
|
||||
/**
|
||||
* Create a new collection
|
||||
* @param {String} options.filename Optional, datastore will be in-memory only if not provided
|
||||
* @param {Boolean} options.timestampData Optional, defaults to false. If set to true, createdAt and updatedAt will be created and populated automatically (if not specified by user)
|
||||
* @param {Boolean} options.inMemoryOnly Optional, defaults to false
|
||||
* @param {String} options.nodeWebkitAppName Optional, specify the name of your NW app if you want options.filename to be relative to the directory where
|
||||
* Node Webkit stores application data such as cookies and local storage (the best place to store data in my opinion)
|
||||
* @param {Boolean} options.autoload Optional, defaults to false
|
||||
* @param {Function} options.onload Optional, if autoload is used this will be called after the load database with the error object as parameter. If you don't pass it the error will be thrown
|
||||
* @param {Function} options.afterSerialization/options.beforeDeserialization Optional, serialization hooks
|
||||
* @param {Number} options.corruptAlertThreshold Optional, threshold after which an alert is thrown if too much data is corrupt
|
||||
* @param {Function} options.compareStrings Optional, string comparison function that overrides default for sorting
|
||||
*
|
||||
* Event Emitter - Events
|
||||
* * compaction.done - Fired whenever a compaction operation was finished
|
||||
*/
|
||||
function Datastore (options) {
|
||||
var filename;
|
||||
|
||||
// Retrocompatibility with v0.6 and before
|
||||
if (typeof options === 'string') {
|
||||
filename = options;
|
||||
this.inMemoryOnly = false; // Default
|
||||
} else {
|
||||
options = options || {};
|
||||
filename = options.filename;
|
||||
this.inMemoryOnly = options.inMemoryOnly || false;
|
||||
this.autoload = options.autoload || false;
|
||||
this.timestampData = options.timestampData || false;
|
||||
}
|
||||
|
||||
// Determine whether in memory or persistent
|
||||
if (!filename || typeof filename !== 'string' || filename.length === 0) {
|
||||
this.filename = null;
|
||||
this.inMemoryOnly = true;
|
||||
} else {
|
||||
this.filename = filename;
|
||||
}
|
||||
|
||||
// String comparison function
|
||||
this.compareStrings = options.compareStrings;
|
||||
|
||||
// Persistence handling
|
||||
this.persistence = new Persistence({ db: this, nodeWebkitAppName: options.nodeWebkitAppName
|
||||
, afterSerialization: options.afterSerialization
|
||||
, beforeDeserialization: options.beforeDeserialization
|
||||
, corruptAlertThreshold: options.corruptAlertThreshold
|
||||
});
|
||||
|
||||
// This new executor is ready if we don't use persistence
|
||||
// If we do, it will only be ready once loadDatabase is called
|
||||
this.executor = new Executor();
|
||||
if (this.inMemoryOnly) { this.executor.ready = true; }
|
||||
|
||||
// Indexed by field name, dot notation can be used
|
||||
// _id is always indexed and since _ids are generated randomly the underlying
|
||||
// binary is always well-balanced
|
||||
this.indexes = {};
|
||||
this.indexes._id = new Index({ fieldName: '_id', unique: true });
|
||||
this.ttlIndexes = {};
|
||||
|
||||
// Queue a load of the database right away and call the onload handler
|
||||
// By default (no onload handler), if there is an error there, no operation will be possible so warn the user by throwing an exception
|
||||
if (this.autoload) { this.loadDatabase(options.onload || function (err) {
|
||||
if (err) { throw err; }
|
||||
}); }
|
||||
}
|
||||
|
||||
util.inherits(Datastore, require('events').EventEmitter);
|
||||
|
||||
|
||||
/**
|
||||
* Load the database from the datafile, and trigger the execution of buffered commands if any
|
||||
*/
|
||||
Datastore.prototype.loadDatabase = function () {
|
||||
this.executor.push({ this: this.persistence, fn: this.persistence.loadDatabase, arguments: arguments }, true);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get an array of all the data in the database
|
||||
*/
|
||||
Datastore.prototype.getAllData = function () {
|
||||
return this.indexes._id.getAll();
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Reset all currently defined indexes
|
||||
*/
|
||||
Datastore.prototype.resetIndexes = function (newData) {
|
||||
var self = this;
|
||||
|
||||
Object.keys(this.indexes).forEach(function (i) {
|
||||
self.indexes[i].reset(newData);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Ensure an index is kept for this field. Same parameters as lib/indexes
|
||||
* For now this function is synchronous, we need to test how much time it takes
|
||||
* We use an async API for consistency with the rest of the code
|
||||
* @param {String} options.fieldName
|
||||
* @param {Boolean} options.unique
|
||||
* @param {Boolean} options.sparse
|
||||
* @param {Number} options.expireAfterSeconds - Optional, if set this index becomes a TTL index (only works on Date fields, not arrays of Date)
|
||||
* @param {Function} cb Optional callback, signature: err
|
||||
*/
|
||||
Datastore.prototype.ensureIndex = function (options, cb) {
|
||||
var err
|
||||
, callback = cb || function () {};
|
||||
|
||||
options = options || {};
|
||||
|
||||
if (!options.fieldName) {
|
||||
err = new Error("Cannot create an index without a fieldName");
|
||||
err.missingFieldName = true;
|
||||
return callback(err);
|
||||
}
|
||||
if (this.indexes[options.fieldName]) { return callback(null); }
|
||||
|
||||
this.indexes[options.fieldName] = new Index(options);
|
||||
if (options.expireAfterSeconds !== undefined) { this.ttlIndexes[options.fieldName] = options.expireAfterSeconds; } // With this implementation index creation is not necessary to ensure TTL but we stick with MongoDB's API here
|
||||
|
||||
try {
|
||||
this.indexes[options.fieldName].insert(this.getAllData());
|
||||
} catch (e) {
|
||||
delete this.indexes[options.fieldName];
|
||||
return callback(e);
|
||||
}
|
||||
|
||||
// We may want to force all options to be persisted including defaults, not just the ones passed the index creation function
|
||||
this.persistence.persistNewState([{ $$indexCreated: options }], function (err) {
|
||||
if (err) { return callback(err); }
|
||||
return callback(null);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Remove an index
|
||||
* @param {String} fieldName
|
||||
* @param {Function} cb Optional callback, signature: err
|
||||
*/
|
||||
Datastore.prototype.removeIndex = function (fieldName, cb) {
|
||||
var callback = cb || function () {};
|
||||
|
||||
delete this.indexes[fieldName];
|
||||
|
||||
this.persistence.persistNewState([{ $$indexRemoved: fieldName }], function (err) {
|
||||
if (err) { return callback(err); }
|
||||
return callback(null);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Add one or several document(s) to all indexes
|
||||
*/
|
||||
Datastore.prototype.addToIndexes = function (doc) {
|
||||
var i, failingIndex, error
|
||||
, keys = Object.keys(this.indexes)
|
||||
;
|
||||
|
||||
for (i = 0; i < keys.length; i += 1) {
|
||||
try {
|
||||
this.indexes[keys[i]].insert(doc);
|
||||
} catch (e) {
|
||||
failingIndex = i;
|
||||
error = e;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If an error happened, we need to rollback the insert on all other indexes
|
||||
if (error) {
|
||||
for (i = 0; i < failingIndex; i += 1) {
|
||||
this.indexes[keys[i]].remove(doc);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Remove one or several document(s) from all indexes
|
||||
*/
|
||||
Datastore.prototype.removeFromIndexes = function (doc) {
|
||||
var self = this;
|
||||
|
||||
Object.keys(this.indexes).forEach(function (i) {
|
||||
self.indexes[i].remove(doc);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Update one or several documents in all indexes
|
||||
* To update multiple documents, oldDoc must be an array of { oldDoc, newDoc } pairs
|
||||
* If one update violates a constraint, all changes are rolled back
|
||||
*/
|
||||
Datastore.prototype.updateIndexes = function (oldDoc, newDoc) {
|
||||
var i, failingIndex, error
|
||||
, keys = Object.keys(this.indexes)
|
||||
;
|
||||
|
||||
for (i = 0; i < keys.length; i += 1) {
|
||||
try {
|
||||
this.indexes[keys[i]].update(oldDoc, newDoc);
|
||||
} catch (e) {
|
||||
failingIndex = i;
|
||||
error = e;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If an error happened, we need to rollback the update on all other indexes
|
||||
if (error) {
|
||||
for (i = 0; i < failingIndex; i += 1) {
|
||||
this.indexes[keys[i]].revertUpdate(oldDoc, newDoc);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Return the list of candidates for a given query
|
||||
* Crude implementation for now, we return the candidates given by the first usable index if any
|
||||
* We try the following query types, in this order: basic match, $in match, comparison match
|
||||
* One way to make it better would be to enable the use of multiple indexes if the first usable index
|
||||
* returns too much data. I may do it in the future.
|
||||
*
|
||||
* Returned candidates will be scanned to find and remove all expired documents
|
||||
*
|
||||
* @param {Query} query
|
||||
* @param {Boolean} dontExpireStaleDocs Optional, defaults to false, if true don't remove stale docs. Useful for the remove function which shouldn't be impacted by expirations
|
||||
* @param {Function} callback Signature err, docs
|
||||
*/
|
||||
Datastore.prototype.getCandidates = function (query, dontExpireStaleDocs, callback) {
|
||||
var indexNames = Object.keys(this.indexes)
|
||||
, self = this
|
||||
, usableQueryKeys;
|
||||
|
||||
if (typeof dontExpireStaleDocs === 'function') {
|
||||
callback = dontExpireStaleDocs;
|
||||
dontExpireStaleDocs = false;
|
||||
}
|
||||
|
||||
async.waterfall([
|
||||
// STEP 1: get candidates list by checking indexes from most to least frequent usecase
|
||||
function (cb) {
|
||||
// For a basic match
|
||||
usableQueryKeys = [];
|
||||
Object.keys(query).forEach(function (k) {
|
||||
if (typeof query[k] === 'string' || typeof query[k] === 'number' || typeof query[k] === 'boolean' || util.isDate(query[k]) || query[k] === null) {
|
||||
usableQueryKeys.push(k);
|
||||
}
|
||||
});
|
||||
usableQueryKeys = _.intersection(usableQueryKeys, indexNames);
|
||||
if (usableQueryKeys.length > 0) {
|
||||
return cb(null, self.indexes[usableQueryKeys[0]].getMatching(query[usableQueryKeys[0]]));
|
||||
}
|
||||
|
||||
// For a $in match
|
||||
usableQueryKeys = [];
|
||||
Object.keys(query).forEach(function (k) {
|
||||
if (query[k] && query[k].hasOwnProperty('$in')) {
|
||||
usableQueryKeys.push(k);
|
||||
}
|
||||
});
|
||||
usableQueryKeys = _.intersection(usableQueryKeys, indexNames);
|
||||
if (usableQueryKeys.length > 0) {
|
||||
return cb(null, self.indexes[usableQueryKeys[0]].getMatching(query[usableQueryKeys[0]].$in));
|
||||
}
|
||||
|
||||
// For a comparison match
|
||||
usableQueryKeys = [];
|
||||
Object.keys(query).forEach(function (k) {
|
||||
if (query[k] && (query[k].hasOwnProperty('$lt') || query[k].hasOwnProperty('$lte') || query[k].hasOwnProperty('$gt') || query[k].hasOwnProperty('$gte'))) {
|
||||
usableQueryKeys.push(k);
|
||||
}
|
||||
});
|
||||
usableQueryKeys = _.intersection(usableQueryKeys, indexNames);
|
||||
if (usableQueryKeys.length > 0) {
|
||||
return cb(null, self.indexes[usableQueryKeys[0]].getBetweenBounds(query[usableQueryKeys[0]]));
|
||||
}
|
||||
|
||||
// By default, return all the DB data
|
||||
return cb(null, self.getAllData());
|
||||
}
|
||||
// STEP 2: remove all expired documents
|
||||
, function (docs) {
|
||||
if (dontExpireStaleDocs) { return callback(null, docs); }
|
||||
|
||||
var expiredDocsIds = [], validDocs = [], ttlIndexesFieldNames = Object.keys(self.ttlIndexes);
|
||||
|
||||
docs.forEach(function (doc) {
|
||||
var valid = true;
|
||||
ttlIndexesFieldNames.forEach(function (i) {
|
||||
if (doc[i] !== undefined && util.isDate(doc[i]) && Date.now() > doc[i].getTime() + self.ttlIndexes[i] * 1000) {
|
||||
valid = false;
|
||||
}
|
||||
});
|
||||
if (valid) { validDocs.push(doc); } else { expiredDocsIds.push(doc._id); }
|
||||
});
|
||||
|
||||
async.eachSeries(expiredDocsIds, function (_id, cb) {
|
||||
self._remove({ _id: _id }, {}, function (err) {
|
||||
if (err) { return callback(err); }
|
||||
return cb();
|
||||
});
|
||||
}, function (err) {
|
||||
return callback(null, validDocs);
|
||||
});
|
||||
}]);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Insert a new document
|
||||
* @param {Function} cb Optional callback, signature: err, insertedDoc
|
||||
*
|
||||
* @api private Use Datastore.insert which has the same signature
|
||||
*/
|
||||
Datastore.prototype._insert = function (newDoc, cb) {
|
||||
var callback = cb || function () {}
|
||||
, preparedDoc
|
||||
;
|
||||
|
||||
try {
|
||||
preparedDoc = this.prepareDocumentForInsertion(newDoc)
|
||||
this._insertInCache(preparedDoc);
|
||||
} catch (e) {
|
||||
return callback(e);
|
||||
}
|
||||
|
||||
this.persistence.persistNewState(util.isArray(preparedDoc) ? preparedDoc : [preparedDoc], function (err) {
|
||||
if (err) { return callback(err); }
|
||||
return callback(null, model.deepCopy(preparedDoc));
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a new _id that's not already in use
|
||||
*/
|
||||
Datastore.prototype.createNewId = function () {
|
||||
var tentativeId = customUtils.uid(16);
|
||||
// Try as many times as needed to get an unused _id. As explained in customUtils, the probability of this ever happening is extremely small, so this is O(1)
|
||||
if (this.indexes._id.getMatching(tentativeId).length > 0) {
|
||||
tentativeId = this.createNewId();
|
||||
}
|
||||
return tentativeId;
|
||||
};
|
||||
|
||||
/**
|
||||
* Prepare a document (or array of documents) to be inserted in a database
|
||||
* Meaning adds _id and timestamps if necessary on a copy of newDoc to avoid any side effect on user input
|
||||
* @api private
|
||||
*/
|
||||
Datastore.prototype.prepareDocumentForInsertion = function (newDoc) {
|
||||
var preparedDoc, self = this;
|
||||
|
||||
if (util.isArray(newDoc)) {
|
||||
preparedDoc = [];
|
||||
newDoc.forEach(function (doc) { preparedDoc.push(self.prepareDocumentForInsertion(doc)); });
|
||||
} else {
|
||||
preparedDoc = model.deepCopy(newDoc);
|
||||
if (preparedDoc._id === undefined) { preparedDoc._id = this.createNewId(); }
|
||||
var now = new Date();
|
||||
if (this.timestampData && preparedDoc.createdAt === undefined) { preparedDoc.createdAt = now; }
|
||||
if (this.timestampData && preparedDoc.updatedAt === undefined) { preparedDoc.updatedAt = now; }
|
||||
model.checkObject(preparedDoc);
|
||||
}
|
||||
|
||||
return preparedDoc;
|
||||
};
|
||||
|
||||
/**
|
||||
* If newDoc is an array of documents, this will insert all documents in the cache
|
||||
* @api private
|
||||
*/
|
||||
Datastore.prototype._insertInCache = function (preparedDoc) {
|
||||
if (util.isArray(preparedDoc)) {
|
||||
this._insertMultipleDocsInCache(preparedDoc);
|
||||
} else {
|
||||
this.addToIndexes(preparedDoc);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* If one insertion fails (e.g. because of a unique constraint), roll back all previous
|
||||
* inserts and throws the error
|
||||
* @api private
|
||||
*/
|
||||
Datastore.prototype._insertMultipleDocsInCache = function (preparedDocs) {
|
||||
var i, failingI, error;
|
||||
|
||||
for (i = 0; i < preparedDocs.length; i += 1) {
|
||||
try {
|
||||
this.addToIndexes(preparedDocs[i]);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
failingI = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (error) {
|
||||
for (i = 0; i < failingI; i += 1) {
|
||||
this.removeFromIndexes(preparedDocs[i]);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
Datastore.prototype.insert = function () {
|
||||
this.executor.push({ this: this, fn: this._insert, arguments: arguments });
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Count all documents matching the query
|
||||
* @param {Object} query MongoDB-style query
|
||||
*/
|
||||
Datastore.prototype.count = function(query, callback) {
|
||||
var cursor = new Cursor(this, query, function(err, docs, callback) {
|
||||
if (err) { return callback(err); }
|
||||
return callback(null, docs.length);
|
||||
});
|
||||
|
||||
if (typeof callback === 'function') {
|
||||
cursor.exec(callback);
|
||||
} else {
|
||||
return cursor;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Find all documents matching the query
|
||||
* If no callback is passed, we return the cursor so that user can limit, skip and finally exec
|
||||
* @param {Object} query MongoDB-style query
|
||||
* @param {Object} projection MongoDB-style projection
|
||||
*/
|
||||
Datastore.prototype.find = function (query, projection, callback) {
|
||||
switch (arguments.length) {
|
||||
case 1:
|
||||
projection = {};
|
||||
// callback is undefined, will return a cursor
|
||||
break;
|
||||
case 2:
|
||||
if (typeof projection === 'function') {
|
||||
callback = projection;
|
||||
projection = {};
|
||||
} // If not assume projection is an object and callback undefined
|
||||
break;
|
||||
}
|
||||
|
||||
var cursor = new Cursor(this, query, function(err, docs, callback) {
|
||||
var res = [], i;
|
||||
|
||||
if (err) { return callback(err); }
|
||||
|
||||
for (i = 0; i < docs.length; i += 1) {
|
||||
res.push(model.deepCopy(docs[i]));
|
||||
}
|
||||
return callback(null, res);
|
||||
});
|
||||
|
||||
cursor.projection(projection);
|
||||
if (typeof callback === 'function') {
|
||||
cursor.exec(callback);
|
||||
} else {
|
||||
return cursor;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Find one document matching the query
|
||||
* @param {Object} query MongoDB-style query
|
||||
* @param {Object} projection MongoDB-style projection
|
||||
*/
|
||||
Datastore.prototype.findOne = function (query, projection, callback) {
|
||||
switch (arguments.length) {
|
||||
case 1:
|
||||
projection = {};
|
||||
// callback is undefined, will return a cursor
|
||||
break;
|
||||
case 2:
|
||||
if (typeof projection === 'function') {
|
||||
callback = projection;
|
||||
projection = {};
|
||||
} // If not assume projection is an object and callback undefined
|
||||
break;
|
||||
}
|
||||
|
||||
var cursor = new Cursor(this, query, function(err, docs, callback) {
|
||||
if (err) { return callback(err); }
|
||||
if (docs.length === 1) {
|
||||
return callback(null, model.deepCopy(docs[0]));
|
||||
} else {
|
||||
return callback(null, null);
|
||||
}
|
||||
});
|
||||
|
||||
cursor.projection(projection).limit(1);
|
||||
if (typeof callback === 'function') {
|
||||
cursor.exec(callback);
|
||||
} else {
|
||||
return cursor;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Update all docs matching query
|
||||
* @param {Object} query
|
||||
* @param {Object} updateQuery
|
||||
* @param {Object} options Optional options
|
||||
* options.multi If true, can update multiple documents (defaults to false)
|
||||
* options.upsert If true, document is inserted if the query doesn't match anything
|
||||
* options.returnUpdatedDocs Defaults to false, if true return as third argument the array of updated matched documents (even if no change actually took place)
|
||||
* @param {Function} cb Optional callback, signature: (err, numAffected, affectedDocuments, upsert)
|
||||
* If update was an upsert, upsert flag is set to true
|
||||
* affectedDocuments can be one of the following:
|
||||
* * For an upsert, the upserted document
|
||||
* * For an update with returnUpdatedDocs option false, null
|
||||
* * For an update with returnUpdatedDocs true and multi false, the updated document
|
||||
* * For an update with returnUpdatedDocs true and multi true, the array of updated documents
|
||||
*
|
||||
* WARNING: The API was changed between v1.7.4 and v1.8, for consistency and readability reasons. Prior and including to v1.7.4,
|
||||
* the callback signature was (err, numAffected, updated) where updated was the updated document in case of an upsert
|
||||
* or the array of updated documents for an update if the returnUpdatedDocs option was true. That meant that the type of
|
||||
* affectedDocuments in a non multi update depended on whether there was an upsert or not, leaving only two ways for the
|
||||
* user to check whether an upsert had occured: checking the type of affectedDocuments or running another find query on
|
||||
* the whole dataset to check its size. Both options being ugly, the breaking change was necessary.
|
||||
*
|
||||
* @api private Use Datastore.update which has the same signature
|
||||
*/
|
||||
Datastore.prototype._update = function (query, updateQuery, options, cb) {
|
||||
var callback
|
||||
, self = this
|
||||
, numReplaced = 0
|
||||
, multi, upsert
|
||||
, i
|
||||
;
|
||||
|
||||
if (typeof options === 'function') { cb = options; options = {}; }
|
||||
callback = cb || function () {};
|
||||
multi = options.multi !== undefined ? options.multi : false;
|
||||
upsert = options.upsert !== undefined ? options.upsert : false;
|
||||
|
||||
async.waterfall([
|
||||
function (cb) { // If upsert option is set, check whether we need to insert the doc
|
||||
if (!upsert) { return cb(); }
|
||||
|
||||
// Need to use an internal function not tied to the executor to avoid deadlock
|
||||
var cursor = new Cursor(self, query);
|
||||
cursor.limit(1)._exec(function (err, docs) {
|
||||
if (err) { return callback(err); }
|
||||
if (docs.length === 1) {
|
||||
return cb();
|
||||
} else {
|
||||
var toBeInserted;
|
||||
|
||||
try {
|
||||
model.checkObject(updateQuery);
|
||||
// updateQuery is a simple object with no modifier, use it as the document to insert
|
||||
toBeInserted = updateQuery;
|
||||
} catch (e) {
|
||||
// updateQuery contains modifiers, use the find query as the base,
|
||||
// strip it from all operators and update it according to updateQuery
|
||||
try {
|
||||
toBeInserted = model.modify(model.deepCopy(query, true), updateQuery);
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
}
|
||||
|
||||
return self._insert(toBeInserted, function (err, newDoc) {
|
||||
if (err) { return callback(err); }
|
||||
return callback(null, 1, newDoc, true);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
, function () { // Perform the update
|
||||
var modifiedDoc , modifications = [], createdAt;
|
||||
|
||||
self.getCandidates(query, function (err, candidates) {
|
||||
if (err) { return callback(err); }
|
||||
|
||||
// Preparing update (if an error is thrown here neither the datafile nor
|
||||
// the in-memory indexes are affected)
|
||||
try {
|
||||
for (i = 0; i < candidates.length; i += 1) {
|
||||
if (model.match(candidates[i], query) && (multi || numReplaced === 0)) {
|
||||
numReplaced += 1;
|
||||
if (self.timestampData) { createdAt = candidates[i].createdAt; }
|
||||
modifiedDoc = model.modify(candidates[i], updateQuery);
|
||||
if (self.timestampData) {
|
||||
modifiedDoc.createdAt = createdAt;
|
||||
modifiedDoc.updatedAt = new Date();
|
||||
}
|
||||
modifications.push({ oldDoc: candidates[i], newDoc: modifiedDoc });
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
// Change the docs in memory
|
||||
try {
|
||||
self.updateIndexes(modifications);
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
// Update the datafile
|
||||
var updatedDocs = _.pluck(modifications, 'newDoc');
|
||||
self.persistence.persistNewState(updatedDocs, function (err) {
|
||||
if (err) { return callback(err); }
|
||||
if (!options.returnUpdatedDocs) {
|
||||
return callback(null, numReplaced);
|
||||
} else {
|
||||
var updatedDocsDC = [];
|
||||
updatedDocs.forEach(function (doc) { updatedDocsDC.push(model.deepCopy(doc)); });
|
||||
if (! multi) { updatedDocsDC = updatedDocsDC[0]; }
|
||||
return callback(null, numReplaced, updatedDocsDC);
|
||||
}
|
||||
});
|
||||
});
|
||||
}]);
|
||||
};
|
||||
|
||||
Datastore.prototype.update = function () {
|
||||
this.executor.push({ this: this, fn: this._update, arguments: arguments });
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Remove all docs matching the query
|
||||
* For now very naive implementation (similar to update)
|
||||
* @param {Object} query
|
||||
* @param {Object} options Optional options
|
||||
* options.multi If true, can update multiple documents (defaults to false)
|
||||
* @param {Function} cb Optional callback, signature: err, numRemoved
|
||||
*
|
||||
* @api private Use Datastore.remove which has the same signature
|
||||
*/
|
||||
Datastore.prototype._remove = function (query, options, cb) {
|
||||
var callback
|
||||
, self = this, numRemoved = 0, removedDocs = [], multi
|
||||
;
|
||||
|
||||
if (typeof options === 'function') { cb = options; options = {}; }
|
||||
callback = cb || function () {};
|
||||
multi = options.multi !== undefined ? options.multi : false;
|
||||
|
||||
this.getCandidates(query, true, function (err, candidates) {
|
||||
if (err) { return callback(err); }
|
||||
|
||||
try {
|
||||
candidates.forEach(function (d) {
|
||||
if (model.match(d, query) && (multi || numRemoved === 0)) {
|
||||
numRemoved += 1;
|
||||
removedDocs.push({ $$deleted: true, _id: d._id });
|
||||
self.removeFromIndexes(d);
|
||||
}
|
||||
});
|
||||
} catch (err) { return callback(err); }
|
||||
|
||||
self.persistence.persistNewState(removedDocs, function (err) {
|
||||
if (err) { return callback(err); }
|
||||
return callback(null, numRemoved);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
Datastore.prototype.remove = function () {
|
||||
this.executor.push({ this: this, fn: this._remove, arguments: arguments });
|
||||
};
|
||||
|
||||
|
||||
|
||||
module.exports = Datastore;
|
78
buildfiles/app/node_modules/nedb/lib/executor.js
generated
vendored
Executable file
78
buildfiles/app/node_modules/nedb/lib/executor.js
generated
vendored
Executable file
@ -0,0 +1,78 @@
|
||||
/**
|
||||
* Responsible for sequentially executing actions on the database
|
||||
*/
|
||||
|
||||
var async = require('async')
|
||||
;
|
||||
|
||||
function Executor () {
|
||||
this.buffer = [];
|
||||
this.ready = false;
|
||||
|
||||
// This queue will execute all commands, one-by-one in order
|
||||
this.queue = async.queue(function (task, cb) {
|
||||
var newArguments = [];
|
||||
|
||||
// task.arguments is an array-like object on which adding a new field doesn't work, so we transform it into a real array
|
||||
for (var i = 0; i < task.arguments.length; i += 1) { newArguments.push(task.arguments[i]); }
|
||||
var lastArg = task.arguments[task.arguments.length - 1];
|
||||
|
||||
// Always tell the queue task is complete. Execute callback if any was given.
|
||||
if (typeof lastArg === 'function') {
|
||||
// Callback was supplied
|
||||
newArguments[newArguments.length - 1] = function () {
|
||||
if (typeof setImmediate === 'function') {
|
||||
setImmediate(cb);
|
||||
} else {
|
||||
process.nextTick(cb);
|
||||
}
|
||||
lastArg.apply(null, arguments);
|
||||
};
|
||||
} else if (!lastArg && task.arguments.length !== 0) {
|
||||
// false/undefined/null supplied as callbback
|
||||
newArguments[newArguments.length - 1] = function () { cb(); };
|
||||
} else {
|
||||
// Nothing supplied as callback
|
||||
newArguments.push(function () { cb(); });
|
||||
}
|
||||
|
||||
|
||||
task.fn.apply(task.this, newArguments);
|
||||
}, 1);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* If executor is ready, queue task (and process it immediately if executor was idle)
|
||||
* If not, buffer task for later processing
|
||||
* @param {Object} task
|
||||
* task.this - Object to use as this
|
||||
* task.fn - Function to execute
|
||||
* task.arguments - Array of arguments, IMPORTANT: only the last argument may be a function (the callback)
|
||||
* and the last argument cannot be false/undefined/null
|
||||
* @param {Boolean} forceQueuing Optional (defaults to false) force executor to queue task even if it is not ready
|
||||
*/
|
||||
Executor.prototype.push = function (task, forceQueuing) {
|
||||
if (this.ready || forceQueuing) {
|
||||
this.queue.push(task);
|
||||
} else {
|
||||
this.buffer.push(task);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Queue all tasks in buffer (in the same order they came in)
|
||||
* Automatically sets executor as ready
|
||||
*/
|
||||
Executor.prototype.processBuffer = function () {
|
||||
var i;
|
||||
this.ready = true;
|
||||
for (i = 0; i < this.buffer.length; i += 1) { this.queue.push(this.buffer[i]); }
|
||||
this.buffer = [];
|
||||
};
|
||||
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports = Executor;
|
294
buildfiles/app/node_modules/nedb/lib/indexes.js
generated
vendored
Executable file
294
buildfiles/app/node_modules/nedb/lib/indexes.js
generated
vendored
Executable file
@ -0,0 +1,294 @@
|
||||
var BinarySearchTree = require('binary-search-tree').AVLTree
|
||||
, model = require('./model')
|
||||
, _ = require('underscore')
|
||||
, util = require('util')
|
||||
;
|
||||
|
||||
/**
|
||||
* Two indexed pointers are equal iif they point to the same place
|
||||
*/
|
||||
function checkValueEquality (a, b) {
|
||||
return a === b;
|
||||
}
|
||||
|
||||
/**
|
||||
* Type-aware projection
|
||||
*/
|
||||
function projectForUnique (elt) {
|
||||
if (elt === null) { return '$null'; }
|
||||
if (typeof elt === 'string') { return '$string' + elt; }
|
||||
if (typeof elt === 'boolean') { return '$boolean' + elt; }
|
||||
if (typeof elt === 'number') { return '$number' + elt; }
|
||||
if (util.isArray(elt)) { return '$date' + elt.getTime(); }
|
||||
|
||||
return elt; // Arrays and objects, will check for pointer equality
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Create a new index
|
||||
* All methods on an index guarantee that either the whole operation was successful and the index changed
|
||||
* or the operation was unsuccessful and an error is thrown while the index is unchanged
|
||||
* @param {String} options.fieldName On which field should the index apply (can use dot notation to index on sub fields)
|
||||
* @param {Boolean} options.unique Optional, enforce a unique constraint (default: false)
|
||||
* @param {Boolean} options.sparse Optional, allow a sparse index (we can have documents for which fieldName is undefined) (default: false)
|
||||
*/
|
||||
function Index (options) {
|
||||
this.fieldName = options.fieldName;
|
||||
this.unique = options.unique || false;
|
||||
this.sparse = options.sparse || false;
|
||||
|
||||
this.treeOptions = { unique: this.unique, compareKeys: model.compareThings, checkValueEquality: checkValueEquality };
|
||||
|
||||
this.reset(); // No data in the beginning
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Reset an index
|
||||
* @param {Document or Array of documents} newData Optional, data to initialize the index with
|
||||
* If an error is thrown during insertion, the index is not modified
|
||||
*/
|
||||
Index.prototype.reset = function (newData) {
|
||||
this.tree = new BinarySearchTree(this.treeOptions);
|
||||
|
||||
if (newData) { this.insert(newData); }
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Insert a new document in the index
|
||||
* If an array is passed, we insert all its elements (if one insertion fails the index is not modified)
|
||||
* O(log(n))
|
||||
*/
|
||||
Index.prototype.insert = function (doc) {
|
||||
var key, self = this
|
||||
, keys, i, failingI, error
|
||||
;
|
||||
|
||||
if (util.isArray(doc)) { this.insertMultipleDocs(doc); return; }
|
||||
|
||||
key = model.getDotValue(doc, this.fieldName);
|
||||
|
||||
// We don't index documents that don't contain the field if the index is sparse
|
||||
if (key === undefined && this.sparse) { return; }
|
||||
|
||||
if (!util.isArray(key)) {
|
||||
this.tree.insert(key, doc);
|
||||
} else {
|
||||
// If an insert fails due to a unique constraint, roll back all inserts before it
|
||||
keys = _.uniq(key, projectForUnique);
|
||||
|
||||
for (i = 0; i < keys.length; i += 1) {
|
||||
try {
|
||||
this.tree.insert(keys[i], doc);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
failingI = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (error) {
|
||||
for (i = 0; i < failingI; i += 1) {
|
||||
this.tree.delete(keys[i], doc);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Insert an array of documents in the index
|
||||
* If a constraint is violated, the changes should be rolled back and an error thrown
|
||||
*
|
||||
* @API private
|
||||
*/
|
||||
Index.prototype.insertMultipleDocs = function (docs) {
|
||||
var i, error, failingI;
|
||||
|
||||
for (i = 0; i < docs.length; i += 1) {
|
||||
try {
|
||||
this.insert(docs[i]);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
failingI = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (error) {
|
||||
for (i = 0; i < failingI; i += 1) {
|
||||
this.remove(docs[i]);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Remove a document from the index
|
||||
* If an array is passed, we remove all its elements
|
||||
* The remove operation is safe with regards to the 'unique' constraint
|
||||
* O(log(n))
|
||||
*/
|
||||
Index.prototype.remove = function (doc) {
|
||||
var key, self = this;
|
||||
|
||||
if (util.isArray(doc)) { doc.forEach(function (d) { self.remove(d); }); return; }
|
||||
|
||||
key = model.getDotValue(doc, this.fieldName);
|
||||
|
||||
if (key === undefined && this.sparse) { return; }
|
||||
|
||||
if (!util.isArray(key)) {
|
||||
this.tree.delete(key, doc);
|
||||
} else {
|
||||
_.uniq(key, projectForUnique).forEach(function (_key) {
|
||||
self.tree.delete(_key, doc);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Update a document in the index
|
||||
* If a constraint is violated, changes are rolled back and an error thrown
|
||||
* Naive implementation, still in O(log(n))
|
||||
*/
|
||||
Index.prototype.update = function (oldDoc, newDoc) {
|
||||
if (util.isArray(oldDoc)) { this.updateMultipleDocs(oldDoc); return; }
|
||||
|
||||
this.remove(oldDoc);
|
||||
|
||||
try {
|
||||
this.insert(newDoc);
|
||||
} catch (e) {
|
||||
this.insert(oldDoc);
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Update multiple documents in the index
|
||||
* If a constraint is violated, the changes need to be rolled back
|
||||
* and an error thrown
|
||||
* @param {Array of oldDoc, newDoc pairs} pairs
|
||||
*
|
||||
* @API private
|
||||
*/
|
||||
Index.prototype.updateMultipleDocs = function (pairs) {
|
||||
var i, failingI, error;
|
||||
|
||||
for (i = 0; i < pairs.length; i += 1) {
|
||||
this.remove(pairs[i].oldDoc);
|
||||
}
|
||||
|
||||
for (i = 0; i < pairs.length; i += 1) {
|
||||
try {
|
||||
this.insert(pairs[i].newDoc);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
failingI = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If an error was raised, roll back changes in the inverse order
|
||||
if (error) {
|
||||
for (i = 0; i < failingI; i += 1) {
|
||||
this.remove(pairs[i].newDoc);
|
||||
}
|
||||
|
||||
for (i = 0; i < pairs.length; i += 1) {
|
||||
this.insert(pairs[i].oldDoc);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Revert an update
|
||||
*/
|
||||
Index.prototype.revertUpdate = function (oldDoc, newDoc) {
|
||||
var revert = [];
|
||||
|
||||
if (!util.isArray(oldDoc)) {
|
||||
this.update(newDoc, oldDoc);
|
||||
} else {
|
||||
oldDoc.forEach(function (pair) {
|
||||
revert.push({ oldDoc: pair.newDoc, newDoc: pair.oldDoc });
|
||||
});
|
||||
this.update(revert);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get all documents in index whose key match value (if it is a Thing) or one of the elements of value (if it is an array of Things)
|
||||
* @param {Thing} value Value to match the key against
|
||||
* @return {Array of documents}
|
||||
*/
|
||||
Index.prototype.getMatching = function (value) {
|
||||
var self = this;
|
||||
|
||||
if (!util.isArray(value)) {
|
||||
return self.tree.search(value);
|
||||
} else {
|
||||
var _res = {}, res = [];
|
||||
|
||||
value.forEach(function (v) {
|
||||
self.getMatching(v).forEach(function (doc) {
|
||||
_res[doc._id] = doc;
|
||||
});
|
||||
});
|
||||
|
||||
Object.keys(_res).forEach(function (_id) {
|
||||
res.push(_res[_id]);
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get all documents in index whose key is between bounds are they are defined by query
|
||||
* Documents are sorted by key
|
||||
* @param {Query} query
|
||||
* @return {Array of documents}
|
||||
*/
|
||||
Index.prototype.getBetweenBounds = function (query) {
|
||||
return this.tree.betweenBounds(query);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get all elements in the index
|
||||
* @return {Array of documents}
|
||||
*/
|
||||
Index.prototype.getAll = function () {
|
||||
var res = [];
|
||||
|
||||
this.tree.executeOnEveryNode(function (node) {
|
||||
var i;
|
||||
|
||||
for (i = 0; i < node.data.length; i += 1) {
|
||||
res.push(node.data[i]);
|
||||
}
|
||||
});
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports = Index;
|
835
buildfiles/app/node_modules/nedb/lib/model.js
generated
vendored
Executable file
835
buildfiles/app/node_modules/nedb/lib/model.js
generated
vendored
Executable file
@ -0,0 +1,835 @@
|
||||
/**
|
||||
* Handle models (i.e. docs)
|
||||
* Serialization/deserialization
|
||||
* Copying
|
||||
* Querying, update
|
||||
*/
|
||||
|
||||
var util = require('util')
|
||||
, _ = require('underscore')
|
||||
, modifierFunctions = {}
|
||||
, lastStepModifierFunctions = {}
|
||||
, comparisonFunctions = {}
|
||||
, logicalOperators = {}
|
||||
, arrayComparisonFunctions = {}
|
||||
;
|
||||
|
||||
|
||||
/**
|
||||
* Check a key, throw an error if the key is non valid
|
||||
* @param {String} k key
|
||||
* @param {Model} v value, needed to treat the Date edge case
|
||||
* Non-treatable edge cases here: if part of the object if of the form { $$date: number } or { $$deleted: true }
|
||||
* Its serialized-then-deserialized version it will transformed into a Date object
|
||||
* But you really need to want it to trigger such behaviour, even when warned not to use '$' at the beginning of the field names...
|
||||
*/
|
||||
function checkKey (k, v) {
|
||||
if (typeof k === 'number') {
|
||||
k = k.toString();
|
||||
}
|
||||
|
||||
if (k[0] === '$' && !(k === '$$date' && typeof v === 'number') && !(k === '$$deleted' && v === true) && !(k === '$$indexCreated') && !(k === '$$indexRemoved')) {
|
||||
throw new Error('Field names cannot begin with the $ character');
|
||||
}
|
||||
|
||||
if (k.indexOf('.') !== -1) {
|
||||
throw new Error('Field names cannot contain a .');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Check a DB object and throw an error if it's not valid
|
||||
* Works by applying the above checkKey function to all fields recursively
|
||||
*/
|
||||
function checkObject (obj) {
|
||||
if (util.isArray(obj)) {
|
||||
obj.forEach(function (o) {
|
||||
checkObject(o);
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof obj === 'object' && obj !== null) {
|
||||
Object.keys(obj).forEach(function (k) {
|
||||
checkKey(k, obj[k]);
|
||||
checkObject(obj[k]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Serialize an object to be persisted to a one-line string
|
||||
* For serialization/deserialization, we use the native JSON parser and not eval or Function
|
||||
* That gives us less freedom but data entered in the database may come from users
|
||||
* so eval and the like are not safe
|
||||
* Accepted primitive types: Number, String, Boolean, Date, null
|
||||
* Accepted secondary types: Objects, Arrays
|
||||
*/
|
||||
function serialize (obj) {
|
||||
var res;
|
||||
|
||||
res = JSON.stringify(obj, function (k, v) {
|
||||
checkKey(k, v);
|
||||
|
||||
if (v === undefined) { return undefined; }
|
||||
if (v === null) { return null; }
|
||||
|
||||
// Hackish way of checking if object is Date (this way it works between execution contexts in node-webkit).
|
||||
// We can't use value directly because for dates it is already string in this function (date.toJSON was already called), so we use this
|
||||
if (typeof this[k].getTime === 'function') { return { $$date: this[k].getTime() }; }
|
||||
|
||||
return v;
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* From a one-line representation of an object generate by the serialize function
|
||||
* Return the object itself
|
||||
*/
|
||||
function deserialize (rawData) {
|
||||
return JSON.parse(rawData, function (k, v) {
|
||||
if (k === '$$date') { return new Date(v); }
|
||||
if (typeof v === 'string' || typeof v === 'number' || typeof v === 'boolean' || v === null) { return v; }
|
||||
if (v && v.$$date) { return v.$$date; }
|
||||
|
||||
return v;
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Deep copy a DB object
|
||||
* The optional strictKeys flag (defaulting to false) indicates whether to copy everything or only fields
|
||||
* where the keys are valid, i.e. don't begin with $ and don't contain a .
|
||||
*/
|
||||
function deepCopy (obj, strictKeys) {
|
||||
var res;
|
||||
|
||||
if ( typeof obj === 'boolean' ||
|
||||
typeof obj === 'number' ||
|
||||
typeof obj === 'string' ||
|
||||
obj === null ||
|
||||
(util.isDate(obj)) ) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
if (util.isArray(obj)) {
|
||||
res = [];
|
||||
obj.forEach(function (o) { res.push(deepCopy(o, strictKeys)); });
|
||||
return res;
|
||||
}
|
||||
|
||||
if (typeof obj === 'object') {
|
||||
res = {};
|
||||
Object.keys(obj).forEach(function (k) {
|
||||
if (!strictKeys || (k[0] !== '$' && k.indexOf('.') === -1)) {
|
||||
res[k] = deepCopy(obj[k], strictKeys);
|
||||
}
|
||||
});
|
||||
return res;
|
||||
}
|
||||
|
||||
return undefined; // For now everything else is undefined. We should probably throw an error instead
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Tells if an object is a primitive type or a "real" object
|
||||
* Arrays are considered primitive
|
||||
*/
|
||||
function isPrimitiveType (obj) {
|
||||
return ( typeof obj === 'boolean' ||
|
||||
typeof obj === 'number' ||
|
||||
typeof obj === 'string' ||
|
||||
obj === null ||
|
||||
util.isDate(obj) ||
|
||||
util.isArray(obj));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Utility functions for comparing things
|
||||
* Assumes type checking was already done (a and b already have the same type)
|
||||
* compareNSB works for numbers, strings and booleans
|
||||
*/
|
||||
function compareNSB (a, b) {
|
||||
if (a < b) { return -1; }
|
||||
if (a > b) { return 1; }
|
||||
return 0;
|
||||
}
|
||||
|
||||
function compareArrays (a, b) {
|
||||
var i, comp;
|
||||
|
||||
for (i = 0; i < Math.min(a.length, b.length); i += 1) {
|
||||
comp = compareThings(a[i], b[i]);
|
||||
|
||||
if (comp !== 0) { return comp; }
|
||||
}
|
||||
|
||||
// Common section was identical, longest one wins
|
||||
return compareNSB(a.length, b.length);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Compare { things U undefined }
|
||||
* Things are defined as any native types (string, number, boolean, null, date) and objects
|
||||
* We need to compare with undefined as it will be used in indexes
|
||||
* In the case of objects and arrays, we deep-compare
|
||||
* If two objects dont have the same type, the (arbitrary) type hierarchy is: undefined, null, number, strings, boolean, dates, arrays, objects
|
||||
* Return -1 if a < b, 1 if a > b and 0 if a = b (note that equality here is NOT the same as defined in areThingsEqual!)
|
||||
*
|
||||
* @param {Function} _compareStrings String comparing function, returning -1, 0 or 1, overriding default string comparison (useful for languages with accented letters)
|
||||
*/
|
||||
function compareThings (a, b, _compareStrings) {
|
||||
var aKeys, bKeys, comp, i
|
||||
, compareStrings = _compareStrings || compareNSB;
|
||||
|
||||
// undefined
|
||||
if (a === undefined) { return b === undefined ? 0 : -1; }
|
||||
if (b === undefined) { return a === undefined ? 0 : 1; }
|
||||
|
||||
// null
|
||||
if (a === null) { return b === null ? 0 : -1; }
|
||||
if (b === null) { return a === null ? 0 : 1; }
|
||||
|
||||
// Numbers
|
||||
if (typeof a === 'number') { return typeof b === 'number' ? compareNSB(a, b) : -1; }
|
||||
if (typeof b === 'number') { return typeof a === 'number' ? compareNSB(a, b) : 1; }
|
||||
|
||||
// Strings
|
||||
if (typeof a === 'string') { return typeof b === 'string' ? compareStrings(a, b) : -1; }
|
||||
if (typeof b === 'string') { return typeof a === 'string' ? compareStrings(a, b) : 1; }
|
||||
|
||||
// Booleans
|
||||
if (typeof a === 'boolean') { return typeof b === 'boolean' ? compareNSB(a, b) : -1; }
|
||||
if (typeof b === 'boolean') { return typeof a === 'boolean' ? compareNSB(a, b) : 1; }
|
||||
|
||||
// Dates
|
||||
if (util.isDate(a)) { return util.isDate(b) ? compareNSB(a.getTime(), b.getTime()) : -1; }
|
||||
if (util.isDate(b)) { return util.isDate(a) ? compareNSB(a.getTime(), b.getTime()) : 1; }
|
||||
|
||||
// Arrays (first element is most significant and so on)
|
||||
if (util.isArray(a)) { return util.isArray(b) ? compareArrays(a, b) : -1; }
|
||||
if (util.isArray(b)) { return util.isArray(a) ? compareArrays(a, b) : 1; }
|
||||
|
||||
// Objects
|
||||
aKeys = Object.keys(a).sort();
|
||||
bKeys = Object.keys(b).sort();
|
||||
|
||||
for (i = 0; i < Math.min(aKeys.length, bKeys.length); i += 1) {
|
||||
comp = compareThings(a[aKeys[i]], b[bKeys[i]]);
|
||||
|
||||
if (comp !== 0) { return comp; }
|
||||
}
|
||||
|
||||
return compareNSB(aKeys.length, bKeys.length);
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==============================================================
|
||||
// Updating documents
|
||||
// ==============================================================
|
||||
|
||||
/**
|
||||
* The signature of modifier functions is as follows
|
||||
* Their structure is always the same: recursively follow the dot notation while creating
|
||||
* the nested documents if needed, then apply the "last step modifier"
|
||||
* @param {Object} obj The model to modify
|
||||
* @param {String} field Can contain dots, in that case that means we will set a subfield recursively
|
||||
* @param {Model} value
|
||||
*/
|
||||
|
||||
/**
|
||||
* Set a field to a new value
|
||||
*/
|
||||
lastStepModifierFunctions.$set = function (obj, field, value) {
|
||||
obj[field] = value;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Unset a field
|
||||
*/
|
||||
lastStepModifierFunctions.$unset = function (obj, field, value) {
|
||||
delete obj[field];
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Push an element to the end of an array field
|
||||
* Optional modifier $each instead of value to push several values
|
||||
* Optional modifier $slice to slice the resulting array, see https://docs.mongodb.org/manual/reference/operator/update/slice/
|
||||
* Différeence with MongoDB: if $slice is specified and not $each, we act as if value is an empty array
|
||||
*/
|
||||
lastStepModifierFunctions.$push = function (obj, field, value) {
|
||||
// Create the array if it doesn't exist
|
||||
if (!obj.hasOwnProperty(field)) { obj[field] = []; }
|
||||
|
||||
if (!util.isArray(obj[field])) { throw new Error("Can't $push an element on non-array values"); }
|
||||
|
||||
if (value !== null && typeof value === 'object' && value.$slice && value.$each === undefined) {
|
||||
value.$each = [];
|
||||
}
|
||||
|
||||
if (value !== null && typeof value === 'object' && value.$each) {
|
||||
if (Object.keys(value).length >= 3 || (Object.keys(value).length === 2 && value.$slice === undefined)) { throw new Error("Can only use $slice in cunjunction with $each when $push to array"); }
|
||||
if (!util.isArray(value.$each)) { throw new Error("$each requires an array value"); }
|
||||
|
||||
value.$each.forEach(function (v) {
|
||||
obj[field].push(v);
|
||||
});
|
||||
|
||||
if (value.$slice === undefined || typeof value.$slice !== 'number') { return; }
|
||||
|
||||
if (value.$slice === 0) {
|
||||
obj[field] = [];
|
||||
} else {
|
||||
var start, end, n = obj[field].length;
|
||||
if (value.$slice < 0) {
|
||||
start = Math.max(0, n + value.$slice);
|
||||
end = n;
|
||||
} else if (value.$slice > 0) {
|
||||
start = 0;
|
||||
end = Math.min(n, value.$slice);
|
||||
}
|
||||
obj[field] = obj[field].slice(start, end);
|
||||
}
|
||||
} else {
|
||||
obj[field].push(value);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Add an element to an array field only if it is not already in it
|
||||
* No modification if the element is already in the array
|
||||
* Note that it doesn't check whether the original array contains duplicates
|
||||
*/
|
||||
lastStepModifierFunctions.$addToSet = function (obj, field, value) {
|
||||
var addToSet = true;
|
||||
|
||||
// Create the array if it doesn't exist
|
||||
if (!obj.hasOwnProperty(field)) { obj[field] = []; }
|
||||
|
||||
if (!util.isArray(obj[field])) { throw new Error("Can't $addToSet an element on non-array values"); }
|
||||
|
||||
if (value !== null && typeof value === 'object' && value.$each) {
|
||||
if (Object.keys(value).length > 1) { throw new Error("Can't use another field in conjunction with $each"); }
|
||||
if (!util.isArray(value.$each)) { throw new Error("$each requires an array value"); }
|
||||
|
||||
value.$each.forEach(function (v) {
|
||||
lastStepModifierFunctions.$addToSet(obj, field, v);
|
||||
});
|
||||
} else {
|
||||
obj[field].forEach(function (v) {
|
||||
if (compareThings(v, value) === 0) { addToSet = false; }
|
||||
});
|
||||
if (addToSet) { obj[field].push(value); }
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Remove the first or last element of an array
|
||||
*/
|
||||
lastStepModifierFunctions.$pop = function (obj, field, value) {
|
||||
if (!util.isArray(obj[field])) { throw new Error("Can't $pop an element from non-array values"); }
|
||||
if (typeof value !== 'number') { throw new Error(value + " isn't an integer, can't use it with $pop"); }
|
||||
if (value === 0) { return; }
|
||||
|
||||
if (value > 0) {
|
||||
obj[field] = obj[field].slice(0, obj[field].length - 1);
|
||||
} else {
|
||||
obj[field] = obj[field].slice(1);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Removes all instances of a value from an existing array
|
||||
*/
|
||||
lastStepModifierFunctions.$pull = function (obj, field, value) {
|
||||
var arr, i;
|
||||
|
||||
if (!util.isArray(obj[field])) { throw new Error("Can't $pull an element from non-array values"); }
|
||||
|
||||
arr = obj[field];
|
||||
for (i = arr.length - 1; i >= 0; i -= 1) {
|
||||
if (match(arr[i], value)) {
|
||||
arr.splice(i, 1);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Increment a numeric field's value
|
||||
*/
|
||||
lastStepModifierFunctions.$inc = function (obj, field, value) {
|
||||
if (typeof value !== 'number') { throw new Error(value + " must be a number"); }
|
||||
|
||||
if (typeof obj[field] !== 'number') {
|
||||
if (!_.has(obj, field)) {
|
||||
obj[field] = value;
|
||||
} else {
|
||||
throw new Error("Don't use the $inc modifier on non-number fields");
|
||||
}
|
||||
} else {
|
||||
obj[field] += value;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Updates the value of the field, only if specified field is greater than the current value of the field
|
||||
*/
|
||||
lastStepModifierFunctions.$max = function (obj, field, value) {
|
||||
if (typeof obj[field] === 'undefined') {
|
||||
obj[field] = value;
|
||||
} else if (value > obj[field]) {
|
||||
obj[field] = value;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Updates the value of the field, only if specified field is smaller than the current value of the field
|
||||
*/
|
||||
lastStepModifierFunctions.$min = function (obj, field, value) {
|
||||
if (typeof obj[field] === 'undefined') {
|
||||
obj[field] = value;
|
||||
} else if (value < obj[field]) {
|
||||
obj[field] = value;
|
||||
}
|
||||
};
|
||||
|
||||
// Given its name, create the complete modifier function
|
||||
function createModifierFunction (modifier) {
|
||||
return function (obj, field, value) {
|
||||
var fieldParts = typeof field === 'string' ? field.split('.') : field;
|
||||
|
||||
if (fieldParts.length === 1) {
|
||||
lastStepModifierFunctions[modifier](obj, field, value);
|
||||
} else {
|
||||
if (obj[fieldParts[0]] === undefined) {
|
||||
if (modifier === '$unset') { return; } // Bad looking specific fix, needs to be generalized modifiers that behave like $unset are implemented
|
||||
obj[fieldParts[0]] = {};
|
||||
}
|
||||
modifierFunctions[modifier](obj[fieldParts[0]], fieldParts.slice(1), value);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Actually create all modifier functions
|
||||
Object.keys(lastStepModifierFunctions).forEach(function (modifier) {
|
||||
modifierFunctions[modifier] = createModifierFunction(modifier);
|
||||
});
|
||||
|
||||
|
||||
/**
|
||||
* Modify a DB object according to an update query
|
||||
*/
|
||||
function modify (obj, updateQuery) {
|
||||
var keys = Object.keys(updateQuery)
|
||||
, firstChars = _.map(keys, function (item) { return item[0]; })
|
||||
, dollarFirstChars = _.filter(firstChars, function (c) { return c === '$'; })
|
||||
, newDoc, modifiers
|
||||
;
|
||||
|
||||
if (keys.indexOf('_id') !== -1 && updateQuery._id !== obj._id) { throw new Error("You cannot change a document's _id"); }
|
||||
|
||||
if (dollarFirstChars.length !== 0 && dollarFirstChars.length !== firstChars.length) {
|
||||
throw new Error("You cannot mix modifiers and normal fields");
|
||||
}
|
||||
|
||||
if (dollarFirstChars.length === 0) {
|
||||
// Simply replace the object with the update query contents
|
||||
newDoc = deepCopy(updateQuery);
|
||||
newDoc._id = obj._id;
|
||||
} else {
|
||||
// Apply modifiers
|
||||
modifiers = _.uniq(keys);
|
||||
newDoc = deepCopy(obj);
|
||||
modifiers.forEach(function (m) {
|
||||
var keys;
|
||||
|
||||
if (!modifierFunctions[m]) { throw new Error("Unknown modifier " + m); }
|
||||
|
||||
// Can't rely on Object.keys throwing on non objects since ES6
|
||||
// Not 100% satisfying as non objects can be interpreted as objects but no false negatives so we can live with it
|
||||
if (typeof updateQuery[m] !== 'object') {
|
||||
throw new Error("Modifier " + m + "'s argument must be an object");
|
||||
}
|
||||
|
||||
keys = Object.keys(updateQuery[m]);
|
||||
keys.forEach(function (k) {
|
||||
modifierFunctions[m](newDoc, k, updateQuery[m][k]);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Check result is valid and return it
|
||||
checkObject(newDoc);
|
||||
|
||||
if (obj._id !== newDoc._id) { throw new Error("You can't change a document's _id"); }
|
||||
return newDoc;
|
||||
};
|
||||
|
||||
|
||||
// ==============================================================
|
||||
// Finding documents
|
||||
// ==============================================================
|
||||
|
||||
/**
|
||||
* Get a value from object with dot notation
|
||||
* @param {Object} obj
|
||||
* @param {String} field
|
||||
*/
|
||||
function getDotValue (obj, field) {
|
||||
var fieldParts = typeof field === 'string' ? field.split('.') : field
|
||||
, i, objs;
|
||||
|
||||
if (!obj) { return undefined; } // field cannot be empty so that means we should return undefined so that nothing can match
|
||||
|
||||
if (fieldParts.length === 0) { return obj; }
|
||||
|
||||
if (fieldParts.length === 1) { return obj[fieldParts[0]]; }
|
||||
|
||||
if (util.isArray(obj[fieldParts[0]])) {
|
||||
// If the next field is an integer, return only this item of the array
|
||||
i = parseInt(fieldParts[1], 10);
|
||||
if (typeof i === 'number' && !isNaN(i)) {
|
||||
return getDotValue(obj[fieldParts[0]][i], fieldParts.slice(2))
|
||||
}
|
||||
|
||||
// Return the array of values
|
||||
objs = new Array();
|
||||
for (i = 0; i < obj[fieldParts[0]].length; i += 1) {
|
||||
objs.push(getDotValue(obj[fieldParts[0]][i], fieldParts.slice(1)));
|
||||
}
|
||||
return objs;
|
||||
} else {
|
||||
return getDotValue(obj[fieldParts[0]], fieldParts.slice(1));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Check whether 'things' are equal
|
||||
* Things are defined as any native types (string, number, boolean, null, date) and objects
|
||||
* In the case of object, we check deep equality
|
||||
* Returns true if they are, false otherwise
|
||||
*/
|
||||
function areThingsEqual (a, b) {
|
||||
var aKeys , bKeys , i;
|
||||
|
||||
// Strings, booleans, numbers, null
|
||||
if (a === null || typeof a === 'string' || typeof a === 'boolean' || typeof a === 'number' ||
|
||||
b === null || typeof b === 'string' || typeof b === 'boolean' || typeof b === 'number') { return a === b; }
|
||||
|
||||
// Dates
|
||||
if (util.isDate(a) || util.isDate(b)) { return util.isDate(a) && util.isDate(b) && a.getTime() === b.getTime(); }
|
||||
|
||||
// Arrays (no match since arrays are used as a $in)
|
||||
// undefined (no match since they mean field doesn't exist and can't be serialized)
|
||||
if ((!(util.isArray(a) && util.isArray(b)) && (util.isArray(a) || util.isArray(b))) || a === undefined || b === undefined) { return false; }
|
||||
|
||||
// General objects (check for deep equality)
|
||||
// a and b should be objects at this point
|
||||
try {
|
||||
aKeys = Object.keys(a);
|
||||
bKeys = Object.keys(b);
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (aKeys.length !== bKeys.length) { return false; }
|
||||
for (i = 0; i < aKeys.length; i += 1) {
|
||||
if (bKeys.indexOf(aKeys[i]) === -1) { return false; }
|
||||
if (!areThingsEqual(a[aKeys[i]], b[aKeys[i]])) { return false; }
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Check that two values are comparable
|
||||
*/
|
||||
function areComparable (a, b) {
|
||||
if (typeof a !== 'string' && typeof a !== 'number' && !util.isDate(a) &&
|
||||
typeof b !== 'string' && typeof b !== 'number' && !util.isDate(b)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (typeof a !== typeof b) { return false; }
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Arithmetic and comparison operators
|
||||
* @param {Native value} a Value in the object
|
||||
* @param {Native value} b Value in the query
|
||||
*/
|
||||
comparisonFunctions.$lt = function (a, b) {
|
||||
return areComparable(a, b) && a < b;
|
||||
};
|
||||
|
||||
comparisonFunctions.$lte = function (a, b) {
|
||||
return areComparable(a, b) && a <= b;
|
||||
};
|
||||
|
||||
comparisonFunctions.$gt = function (a, b) {
|
||||
return areComparable(a, b) && a > b;
|
||||
};
|
||||
|
||||
comparisonFunctions.$gte = function (a, b) {
|
||||
return areComparable(a, b) && a >= b;
|
||||
};
|
||||
|
||||
comparisonFunctions.$ne = function (a, b) {
|
||||
if (a === undefined) { return true; }
|
||||
return !areThingsEqual(a, b);
|
||||
};
|
||||
|
||||
comparisonFunctions.$in = function (a, b) {
|
||||
var i;
|
||||
|
||||
if (!util.isArray(b)) { throw new Error("$in operator called with a non-array"); }
|
||||
|
||||
for (i = 0; i < b.length; i += 1) {
|
||||
if (areThingsEqual(a, b[i])) { return true; }
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
comparisonFunctions.$nin = function (a, b) {
|
||||
if (!util.isArray(b)) { throw new Error("$nin operator called with a non-array"); }
|
||||
|
||||
return !comparisonFunctions.$in(a, b);
|
||||
};
|
||||
|
||||
comparisonFunctions.$regex = function (a, b) {
|
||||
if (!util.isRegExp(b)) { throw new Error("$regex operator called with non regular expression"); }
|
||||
|
||||
if (typeof a !== 'string') {
|
||||
return false
|
||||
} else {
|
||||
return b.test(a);
|
||||
}
|
||||
};
|
||||
|
||||
comparisonFunctions.$exists = function (value, exists) {
|
||||
if (exists || exists === '') { // This will be true for all values of exists except false, null, undefined and 0
|
||||
exists = true; // That's strange behaviour (we should only use true/false) but that's the way Mongo does it...
|
||||
} else {
|
||||
exists = false;
|
||||
}
|
||||
|
||||
if (value === undefined) {
|
||||
return !exists
|
||||
} else {
|
||||
return exists;
|
||||
}
|
||||
};
|
||||
|
||||
// Specific to arrays
|
||||
comparisonFunctions.$size = function (obj, value) {
|
||||
if (!util.isArray(obj)) { return false; }
|
||||
if (value % 1 !== 0) { throw new Error("$size operator called without an integer"); }
|
||||
|
||||
return (obj.length == value);
|
||||
};
|
||||
comparisonFunctions.$elemMatch = function (obj, value) {
|
||||
if (!util.isArray(obj)) { return false; }
|
||||
var i = obj.length;
|
||||
var result = false; // Initialize result
|
||||
while (i--) {
|
||||
if (match(obj[i], value)) { // If match for array element, return true
|
||||
result = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
arrayComparisonFunctions.$size = true;
|
||||
arrayComparisonFunctions.$elemMatch = true;
|
||||
|
||||
|
||||
/**
|
||||
* Match any of the subqueries
|
||||
* @param {Model} obj
|
||||
* @param {Array of Queries} query
|
||||
*/
|
||||
logicalOperators.$or = function (obj, query) {
|
||||
var i;
|
||||
|
||||
if (!util.isArray(query)) { throw new Error("$or operator used without an array"); }
|
||||
|
||||
for (i = 0; i < query.length; i += 1) {
|
||||
if (match(obj, query[i])) { return true; }
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Match all of the subqueries
|
||||
* @param {Model} obj
|
||||
* @param {Array of Queries} query
|
||||
*/
|
||||
logicalOperators.$and = function (obj, query) {
|
||||
var i;
|
||||
|
||||
if (!util.isArray(query)) { throw new Error("$and operator used without an array"); }
|
||||
|
||||
for (i = 0; i < query.length; i += 1) {
|
||||
if (!match(obj, query[i])) { return false; }
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Inverted match of the query
|
||||
* @param {Model} obj
|
||||
* @param {Query} query
|
||||
*/
|
||||
logicalOperators.$not = function (obj, query) {
|
||||
return !match(obj, query);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Use a function to match
|
||||
* @param {Model} obj
|
||||
* @param {Query} query
|
||||
*/
|
||||
logicalOperators.$where = function (obj, fn) {
|
||||
var result;
|
||||
|
||||
if (!_.isFunction(fn)) { throw new Error("$where operator used without a function"); }
|
||||
|
||||
result = fn.call(obj);
|
||||
if (!_.isBoolean(result)) { throw new Error("$where function must return boolean"); }
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Tell if a given document matches a query
|
||||
* @param {Object} obj Document to check
|
||||
* @param {Object} query
|
||||
*/
|
||||
function match (obj, query) {
|
||||
var queryKeys, queryKey, queryValue, i;
|
||||
|
||||
// Primitive query against a primitive type
|
||||
// This is a bit of a hack since we construct an object with an arbitrary key only to dereference it later
|
||||
// But I don't have time for a cleaner implementation now
|
||||
if (isPrimitiveType(obj) || isPrimitiveType(query)) {
|
||||
return matchQueryPart({ needAKey: obj }, 'needAKey', query);
|
||||
}
|
||||
|
||||
// Normal query
|
||||
queryKeys = Object.keys(query);
|
||||
for (i = 0; i < queryKeys.length; i += 1) {
|
||||
queryKey = queryKeys[i];
|
||||
queryValue = query[queryKey];
|
||||
|
||||
if (queryKey[0] === '$') {
|
||||
if (!logicalOperators[queryKey]) { throw new Error("Unknown logical operator " + queryKey); }
|
||||
if (!logicalOperators[queryKey](obj, queryValue)) { return false; }
|
||||
} else {
|
||||
if (!matchQueryPart(obj, queryKey, queryValue)) { return false; }
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Match an object against a specific { key: value } part of a query
|
||||
* if the treatObjAsValue flag is set, don't try to match every part separately, but the array as a whole
|
||||
*/
|
||||
function matchQueryPart (obj, queryKey, queryValue, treatObjAsValue) {
|
||||
var objValue = getDotValue(obj, queryKey)
|
||||
, i, keys, firstChars, dollarFirstChars;
|
||||
|
||||
// Check if the value is an array if we don't force a treatment as value
|
||||
if (util.isArray(objValue) && !treatObjAsValue) {
|
||||
// If the queryValue is an array, try to perform an exact match
|
||||
if (util.isArray(queryValue)) {
|
||||
return matchQueryPart(obj, queryKey, queryValue, true);
|
||||
}
|
||||
|
||||
// Check if we are using an array-specific comparison function
|
||||
if (queryValue !== null && typeof queryValue === 'object' && !util.isRegExp(queryValue)) {
|
||||
keys = Object.keys(queryValue);
|
||||
for (i = 0; i < keys.length; i += 1) {
|
||||
if (arrayComparisonFunctions[keys[i]]) { return matchQueryPart(obj, queryKey, queryValue, true); }
|
||||
}
|
||||
}
|
||||
|
||||
// If not, treat it as an array of { obj, query } where there needs to be at least one match
|
||||
for (i = 0; i < objValue.length; i += 1) {
|
||||
if (matchQueryPart({ k: objValue[i] }, 'k', queryValue)) { return true; } // k here could be any string
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// queryValue is an actual object. Determine whether it contains comparison operators
|
||||
// or only normal fields. Mixed objects are not allowed
|
||||
if (queryValue !== null && typeof queryValue === 'object' && !util.isRegExp(queryValue) && !util.isArray(queryValue)) {
|
||||
keys = Object.keys(queryValue);
|
||||
firstChars = _.map(keys, function (item) { return item[0]; });
|
||||
dollarFirstChars = _.filter(firstChars, function (c) { return c === '$'; });
|
||||
|
||||
if (dollarFirstChars.length !== 0 && dollarFirstChars.length !== firstChars.length) {
|
||||
throw new Error("You cannot mix operators and normal fields");
|
||||
}
|
||||
|
||||
// queryValue is an object of this form: { $comparisonOperator1: value1, ... }
|
||||
if (dollarFirstChars.length > 0) {
|
||||
for (i = 0; i < keys.length; i += 1) {
|
||||
if (!comparisonFunctions[keys[i]]) { throw new Error("Unknown comparison function " + keys[i]); }
|
||||
|
||||
if (!comparisonFunctions[keys[i]](objValue, queryValue[keys[i]])) { return false; }
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Using regular expressions with basic querying
|
||||
if (util.isRegExp(queryValue)) { return comparisonFunctions.$regex(objValue, queryValue); }
|
||||
|
||||
// queryValue is either a native value or a normal object
|
||||
// Basic matching is possible
|
||||
if (!areThingsEqual(objValue, queryValue)) { return false; }
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports.serialize = serialize;
|
||||
module.exports.deserialize = deserialize;
|
||||
module.exports.deepCopy = deepCopy;
|
||||
module.exports.checkObject = checkObject;
|
||||
module.exports.isPrimitiveType = isPrimitiveType;
|
||||
module.exports.modify = modify;
|
||||
module.exports.getDotValue = getDotValue;
|
||||
module.exports.match = match;
|
||||
module.exports.areThingsEqual = areThingsEqual;
|
||||
module.exports.compareThings = compareThings;
|
314
buildfiles/app/node_modules/nedb/lib/persistence.js
generated
vendored
Executable file
314
buildfiles/app/node_modules/nedb/lib/persistence.js
generated
vendored
Executable file
@ -0,0 +1,314 @@
|
||||
/**
|
||||
* Handle every persistence-related task
|
||||
* The interface Datastore expects to be implemented is
|
||||
* * Persistence.loadDatabase(callback) and callback has signature err
|
||||
* * Persistence.persistNewState(newDocs, callback) where newDocs is an array of documents and callback has signature err
|
||||
*/
|
||||
|
||||
var storage = require('./storage')
|
||||
, path = require('path')
|
||||
, model = require('./model')
|
||||
, async = require('async')
|
||||
, customUtils = require('./customUtils')
|
||||
, Index = require('./indexes')
|
||||
;
|
||||
|
||||
|
||||
/**
|
||||
* Create a new Persistence object for database options.db
|
||||
* @param {Datastore} options.db
|
||||
* @param {Boolean} options.nodeWebkitAppName Optional, specify the name of your NW app if you want options.filename to be relative to the directory where
|
||||
* Node Webkit stores application data such as cookies and local storage (the best place to store data in my opinion)
|
||||
*/
|
||||
function Persistence (options) {
|
||||
var i, j, randomString;
|
||||
|
||||
this.db = options.db;
|
||||
this.inMemoryOnly = this.db.inMemoryOnly;
|
||||
this.filename = this.db.filename;
|
||||
this.corruptAlertThreshold = options.corruptAlertThreshold !== undefined ? options.corruptAlertThreshold : 0.1;
|
||||
|
||||
if (!this.inMemoryOnly && this.filename && this.filename.charAt(this.filename.length - 1) === '~') {
|
||||
throw new Error("The datafile name can't end with a ~, which is reserved for crash safe backup files");
|
||||
}
|
||||
|
||||
// After serialization and before deserialization hooks with some basic sanity checks
|
||||
if (options.afterSerialization && !options.beforeDeserialization) {
|
||||
throw new Error("Serialization hook defined but deserialization hook undefined, cautiously refusing to start NeDB to prevent dataloss");
|
||||
}
|
||||
if (!options.afterSerialization && options.beforeDeserialization) {
|
||||
throw new Error("Serialization hook undefined but deserialization hook defined, cautiously refusing to start NeDB to prevent dataloss");
|
||||
}
|
||||
this.afterSerialization = options.afterSerialization || function (s) { return s; };
|
||||
this.beforeDeserialization = options.beforeDeserialization || function (s) { return s; };
|
||||
for (i = 1; i < 30; i += 1) {
|
||||
for (j = 0; j < 10; j += 1) {
|
||||
randomString = customUtils.uid(i);
|
||||
if (this.beforeDeserialization(this.afterSerialization(randomString)) !== randomString) {
|
||||
throw new Error("beforeDeserialization is not the reverse of afterSerialization, cautiously refusing to start NeDB to prevent dataloss");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For NW apps, store data in the same directory where NW stores application data
|
||||
if (this.filename && options.nodeWebkitAppName) {
|
||||
console.log("==================================================================");
|
||||
console.log("WARNING: The nodeWebkitAppName option is deprecated");
|
||||
console.log("To get the path to the directory where Node Webkit stores the data");
|
||||
console.log("for your app, use the internal nw.gui module like this");
|
||||
console.log("require('nw.gui').App.dataPath");
|
||||
console.log("See https://github.com/rogerwang/node-webkit/issues/500");
|
||||
console.log("==================================================================");
|
||||
this.filename = Persistence.getNWAppFilename(options.nodeWebkitAppName, this.filename);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Check if a directory exists and create it on the fly if it is not the case
|
||||
* cb is optional, signature: err
|
||||
*/
|
||||
Persistence.ensureDirectoryExists = function (dir, cb) {
|
||||
var callback = cb || function () {}
|
||||
;
|
||||
|
||||
storage.mkdirp(dir, function (err) { return callback(err); });
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Return the path the datafile if the given filename is relative to the directory where Node Webkit stores
|
||||
* data for this application. Probably the best place to store data
|
||||
*/
|
||||
Persistence.getNWAppFilename = function (appName, relativeFilename) {
|
||||
var home;
|
||||
|
||||
switch (process.platform) {
|
||||
case 'win32':
|
||||
case 'win64':
|
||||
home = process.env.LOCALAPPDATA || process.env.APPDATA;
|
||||
if (!home) { throw new Error("Couldn't find the base application data folder"); }
|
||||
home = path.join(home, appName);
|
||||
break;
|
||||
case 'darwin':
|
||||
home = process.env.HOME;
|
||||
if (!home) { throw new Error("Couldn't find the base application data directory"); }
|
||||
home = path.join(home, 'Library', 'Application Support', appName);
|
||||
break;
|
||||
case 'linux':
|
||||
home = process.env.HOME;
|
||||
if (!home) { throw new Error("Couldn't find the base application data directory"); }
|
||||
home = path.join(home, '.config', appName);
|
||||
break;
|
||||
default:
|
||||
throw new Error("Can't use the Node Webkit relative path for platform " + process.platform);
|
||||
break;
|
||||
}
|
||||
|
||||
return path.join(home, 'nedb-data', relativeFilename);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Persist cached database
|
||||
* This serves as a compaction function since the cache always contains only the number of documents in the collection
|
||||
* while the data file is append-only so it may grow larger
|
||||
* @param {Function} cb Optional callback, signature: err
|
||||
*/
|
||||
Persistence.prototype.persistCachedDatabase = function (cb) {
|
||||
var callback = cb || function () {}
|
||||
, toPersist = ''
|
||||
, self = this
|
||||
;
|
||||
|
||||
if (this.inMemoryOnly) { return callback(null); }
|
||||
|
||||
this.db.getAllData().forEach(function (doc) {
|
||||
toPersist += self.afterSerialization(model.serialize(doc)) + '\n';
|
||||
});
|
||||
Object.keys(this.db.indexes).forEach(function (fieldName) {
|
||||
if (fieldName != "_id") { // The special _id index is managed by datastore.js, the others need to be persisted
|
||||
toPersist += self.afterSerialization(model.serialize({ $$indexCreated: { fieldName: fieldName, unique: self.db.indexes[fieldName].unique, sparse: self.db.indexes[fieldName].sparse }})) + '\n';
|
||||
}
|
||||
});
|
||||
|
||||
storage.crashSafeWriteFile(this.filename, toPersist, function (err) {
|
||||
if (err) { return callback(err); }
|
||||
self.db.emit('compaction.done');
|
||||
return callback(null);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Queue a rewrite of the datafile
|
||||
*/
|
||||
Persistence.prototype.compactDatafile = function () {
|
||||
this.db.executor.push({ this: this, fn: this.persistCachedDatabase, arguments: [] });
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Set automatic compaction every interval ms
|
||||
* @param {Number} interval in milliseconds, with an enforced minimum of 5 seconds
|
||||
*/
|
||||
Persistence.prototype.setAutocompactionInterval = function (interval) {
|
||||
var self = this
|
||||
, minInterval = 5000
|
||||
, realInterval = Math.max(interval || 0, minInterval)
|
||||
;
|
||||
|
||||
this.stopAutocompaction();
|
||||
|
||||
this.autocompactionIntervalId = setInterval(function () {
|
||||
self.compactDatafile();
|
||||
}, realInterval);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Stop autocompaction (do nothing if autocompaction was not running)
|
||||
*/
|
||||
Persistence.prototype.stopAutocompaction = function () {
|
||||
if (this.autocompactionIntervalId) { clearInterval(this.autocompactionIntervalId); }
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Persist new state for the given newDocs (can be insertion, update or removal)
|
||||
* Use an append-only format
|
||||
* @param {Array} newDocs Can be empty if no doc was updated/removed
|
||||
* @param {Function} cb Optional, signature: err
|
||||
*/
|
||||
Persistence.prototype.persistNewState = function (newDocs, cb) {
|
||||
var self = this
|
||||
, toPersist = ''
|
||||
, callback = cb || function () {}
|
||||
;
|
||||
|
||||
// In-memory only datastore
|
||||
if (self.inMemoryOnly) { return callback(null); }
|
||||
|
||||
newDocs.forEach(function (doc) {
|
||||
toPersist += self.afterSerialization(model.serialize(doc)) + '\n';
|
||||
});
|
||||
|
||||
if (toPersist.length === 0) { return callback(null); }
|
||||
|
||||
storage.appendFile(self.filename, toPersist, 'utf8', function (err) {
|
||||
return callback(err);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* From a database's raw data, return the corresponding
|
||||
* machine understandable collection
|
||||
*/
|
||||
Persistence.prototype.treatRawData = function (rawData) {
|
||||
var data = rawData.split('\n')
|
||||
, dataById = {}
|
||||
, tdata = []
|
||||
, i
|
||||
, indexes = {}
|
||||
, corruptItems = -1 // Last line of every data file is usually blank so not really corrupt
|
||||
;
|
||||
|
||||
for (i = 0; i < data.length; i += 1) {
|
||||
var doc;
|
||||
|
||||
try {
|
||||
doc = model.deserialize(this.beforeDeserialization(data[i]));
|
||||
if (doc._id) {
|
||||
if (doc.$$deleted === true) {
|
||||
delete dataById[doc._id];
|
||||
} else {
|
||||
dataById[doc._id] = doc;
|
||||
}
|
||||
} else if (doc.$$indexCreated && doc.$$indexCreated.fieldName != undefined) {
|
||||
indexes[doc.$$indexCreated.fieldName] = doc.$$indexCreated;
|
||||
} else if (typeof doc.$$indexRemoved === "string") {
|
||||
delete indexes[doc.$$indexRemoved];
|
||||
}
|
||||
} catch (e) {
|
||||
corruptItems += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// A bit lenient on corruption
|
||||
if (data.length > 0 && corruptItems / data.length > this.corruptAlertThreshold) {
|
||||
throw new Error("More than " + Math.floor(100 * this.corruptAlertThreshold) + "% of the data file is corrupt, the wrong beforeDeserialization hook may be used. Cautiously refusing to start NeDB to prevent dataloss");
|
||||
}
|
||||
|
||||
Object.keys(dataById).forEach(function (k) {
|
||||
tdata.push(dataById[k]);
|
||||
});
|
||||
|
||||
return { data: tdata, indexes: indexes };
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Load the database
|
||||
* 1) Create all indexes
|
||||
* 2) Insert all data
|
||||
* 3) Compact the database
|
||||
* This means pulling data out of the data file or creating it if it doesn't exist
|
||||
* Also, all data is persisted right away, which has the effect of compacting the database file
|
||||
* This operation is very quick at startup for a big collection (60ms for ~10k docs)
|
||||
* @param {Function} cb Optional callback, signature: err
|
||||
*/
|
||||
Persistence.prototype.loadDatabase = function (cb) {
|
||||
var callback = cb || function () {}
|
||||
, self = this
|
||||
;
|
||||
|
||||
self.db.resetIndexes();
|
||||
|
||||
// In-memory only datastore
|
||||
if (self.inMemoryOnly) { return callback(null); }
|
||||
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
Persistence.ensureDirectoryExists(path.dirname(self.filename), function (err) {
|
||||
storage.ensureDatafileIntegrity(self.filename, function (err) {
|
||||
storage.readFile(self.filename, 'utf8', function (err, rawData) {
|
||||
if (err) { return cb(err); }
|
||||
|
||||
try {
|
||||
var treatedData = self.treatRawData(rawData);
|
||||
} catch (e) {
|
||||
return cb(e);
|
||||
}
|
||||
|
||||
// Recreate all indexes in the datafile
|
||||
Object.keys(treatedData.indexes).forEach(function (key) {
|
||||
self.db.indexes[key] = new Index(treatedData.indexes[key]);
|
||||
});
|
||||
|
||||
// Fill cached database (i.e. all indexes) with data
|
||||
try {
|
||||
self.db.resetIndexes(treatedData.data);
|
||||
} catch (e) {
|
||||
self.db.resetIndexes(); // Rollback any index which didn't fail
|
||||
return cb(e);
|
||||
}
|
||||
|
||||
self.db.persistence.persistCachedDatabase(cb);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
], function (err) {
|
||||
if (err) { return callback(err); }
|
||||
|
||||
self.db.executor.processBuffer();
|
||||
return callback(null);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports = Persistence;
|
136
buildfiles/app/node_modules/nedb/lib/storage.js
generated
vendored
Executable file
136
buildfiles/app/node_modules/nedb/lib/storage.js
generated
vendored
Executable file
@ -0,0 +1,136 @@
|
||||
/**
|
||||
* Way data is stored for this database
|
||||
* For a Node.js/Node Webkit database it's the file system
|
||||
* For a browser-side database it's localforage which chooses the best option depending on user browser (IndexedDB then WebSQL then localStorage)
|
||||
*
|
||||
* This version is the Node.js/Node Webkit version
|
||||
* It's essentially fs, mkdirp and crash safe write and read functions
|
||||
*/
|
||||
|
||||
var fs = require('fs')
|
||||
, mkdirp = require('mkdirp')
|
||||
, async = require('async')
|
||||
, path = require('path')
|
||||
, storage = {}
|
||||
;
|
||||
|
||||
storage.exists = fs.exists;
|
||||
storage.rename = fs.rename;
|
||||
storage.writeFile = fs.writeFile;
|
||||
storage.unlink = fs.unlink;
|
||||
storage.appendFile = fs.appendFile;
|
||||
storage.readFile = fs.readFile;
|
||||
storage.mkdirp = mkdirp;
|
||||
|
||||
|
||||
/**
|
||||
* Explicit name ...
|
||||
*/
|
||||
storage.ensureFileDoesntExist = function (file, callback) {
|
||||
storage.exists(file, function (exists) {
|
||||
if (!exists) { return callback(null); }
|
||||
|
||||
storage.unlink(file, function (err) { return callback(err); });
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Flush data in OS buffer to storage if corresponding option is set
|
||||
* @param {String} options.filename
|
||||
* @param {Boolean} options.isDir Optional, defaults to false
|
||||
* If options is a string, it is assumed that the flush of the file (not dir) called options was requested
|
||||
*/
|
||||
storage.flushToStorage = function (options, callback) {
|
||||
var filename, flags;
|
||||
if (typeof options === 'string') {
|
||||
filename = options;
|
||||
flags = 'r+';
|
||||
} else {
|
||||
filename = options.filename;
|
||||
flags = options.isDir ? 'r' : 'r+';
|
||||
}
|
||||
|
||||
// Windows can't fsync (FlushFileBuffers) directories. We can live with this as it cannot cause 100% dataloss
|
||||
// except in the very rare event of the first time database is loaded and a crash happens
|
||||
if (flags === 'r' && (process.platform === 'win32' || process.platform === 'win64')) { return callback(null); }
|
||||
|
||||
fs.open(filename, flags, function (err, fd) {
|
||||
if (err) { return callback(err); }
|
||||
fs.fsync(fd, function (errFS) {
|
||||
fs.close(fd, function (errC) {
|
||||
if (errFS || errC) {
|
||||
var e = new Error('Failed to flush to storage');
|
||||
e.errorOnFsync = errFS;
|
||||
e.errorOnClose = errC;
|
||||
return callback(e);
|
||||
} else {
|
||||
return callback(null);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost)
|
||||
* @param {String} filename
|
||||
* @param {String} data
|
||||
* @param {Function} cb Optional callback, signature: err
|
||||
*/
|
||||
storage.crashSafeWriteFile = function (filename, data, cb) {
|
||||
var callback = cb || function () {}
|
||||
, tempFilename = filename + '~';
|
||||
|
||||
async.waterfall([
|
||||
async.apply(storage.flushToStorage, { filename: path.dirname(filename), isDir: true })
|
||||
, function (cb) {
|
||||
storage.exists(filename, function (exists) {
|
||||
if (exists) {
|
||||
storage.flushToStorage(filename, function (err) { return cb(err); });
|
||||
} else {
|
||||
return cb();
|
||||
}
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
storage.writeFile(tempFilename, data, function (err) { return cb(err); });
|
||||
}
|
||||
, async.apply(storage.flushToStorage, tempFilename)
|
||||
, function (cb) {
|
||||
storage.rename(tempFilename, filename, function (err) { return cb(err); });
|
||||
}
|
||||
, async.apply(storage.flushToStorage, { filename: path.dirname(filename), isDir: true })
|
||||
], function (err) { return callback(err); })
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Ensure the datafile contains all the data, even if there was a crash during a full file write
|
||||
* @param {String} filename
|
||||
* @param {Function} callback signature: err
|
||||
*/
|
||||
storage.ensureDatafileIntegrity = function (filename, callback) {
|
||||
var tempFilename = filename + '~';
|
||||
|
||||
storage.exists(filename, function (filenameExists) {
|
||||
// Write was successful
|
||||
if (filenameExists) { return callback(null); }
|
||||
|
||||
storage.exists(tempFilename, function (oldFilenameExists) {
|
||||
// New database
|
||||
if (!oldFilenameExists) {
|
||||
return storage.writeFile(filename, '', 'utf8', function (err) { callback(err); });
|
||||
}
|
||||
|
||||
// Write failed, use old version
|
||||
storage.rename(tempFilename, filename, function (err) { return callback(err); });
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports = storage;
|
Reference in New Issue
Block a user