mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
decaffeinate: Convert DeleteQueueManager.coffee and 58 other files to JS
This commit is contained in:
parent
249b7247b5
commit
1fa8882674
59 changed files with 9218 additions and 6972 deletions
|
@ -1,79 +1,102 @@
|
|||
Settings = require('settings-sharelatex')
|
||||
RedisManager = require "./RedisManager"
|
||||
ProjectManager = require "./ProjectManager"
|
||||
logger = require "logger-sharelatex"
|
||||
metrics = require "./Metrics"
|
||||
async = require "async"
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let DeleteQueueManager;
|
||||
const Settings = require('settings-sharelatex');
|
||||
const RedisManager = require("./RedisManager");
|
||||
const ProjectManager = require("./ProjectManager");
|
||||
const logger = require("logger-sharelatex");
|
||||
const metrics = require("./Metrics");
|
||||
const async = require("async");
|
||||
|
||||
# Maintain a sorted set of project flushAndDelete requests, ordered by timestamp
|
||||
# (ZADD), and process them from oldest to newest. A flushAndDelete request comes
|
||||
# from real-time and is triggered when a user leaves a project.
|
||||
#
|
||||
# The aim is to remove the project from redis 5 minutes after the last request
|
||||
# if there has been no activity (document updates) in that time. If there is
|
||||
# activity we can expect a further flushAndDelete request when the editing user
|
||||
# leaves the project.
|
||||
#
|
||||
# If a new flushAndDelete request comes in while an existing request is already
|
||||
# in the queue we update the timestamp as we can postpone flushing further.
|
||||
#
|
||||
# Documents are processed by checking the queue, seeing if the first entry is
|
||||
# older than 5 minutes, and popping it from the queue in that case.
|
||||
// Maintain a sorted set of project flushAndDelete requests, ordered by timestamp
|
||||
// (ZADD), and process them from oldest to newest. A flushAndDelete request comes
|
||||
// from real-time and is triggered when a user leaves a project.
|
||||
//
|
||||
// The aim is to remove the project from redis 5 minutes after the last request
|
||||
// if there has been no activity (document updates) in that time. If there is
|
||||
// activity we can expect a further flushAndDelete request when the editing user
|
||||
// leaves the project.
|
||||
//
|
||||
// If a new flushAndDelete request comes in while an existing request is already
|
||||
// in the queue we update the timestamp as we can postpone flushing further.
|
||||
//
|
||||
// Documents are processed by checking the queue, seeing if the first entry is
|
||||
// older than 5 minutes, and popping it from the queue in that case.
|
||||
|
||||
module.exports = DeleteQueueManager =
|
||||
flushAndDeleteOldProjects: (options, callback) ->
|
||||
startTime = Date.now()
|
||||
cutoffTime = startTime - options.min_delete_age + 100 * (Math.random() - 0.5)
|
||||
count = 0
|
||||
module.exports = (DeleteQueueManager = {
|
||||
flushAndDeleteOldProjects(options, callback) {
|
||||
const startTime = Date.now();
|
||||
const cutoffTime = (startTime - options.min_delete_age) + (100 * (Math.random() - 0.5));
|
||||
let count = 0;
|
||||
|
||||
flushProjectIfNotModified = (project_id, flushTimestamp, cb) ->
|
||||
ProjectManager.getProjectDocsTimestamps project_id, (err, timestamps) ->
|
||||
return callback(err) if err?
|
||||
if timestamps.length == 0
|
||||
logger.log {project_id}, "skipping flush of queued project - no timestamps"
|
||||
return cb()
|
||||
# are any of the timestamps newer than the time the project was flushed?
|
||||
for timestamp in timestamps when timestamp > flushTimestamp
|
||||
metrics.inc "queued-delete-skipped"
|
||||
logger.debug {project_id, timestamps, flushTimestamp}, "found newer timestamp, will skip delete"
|
||||
return cb()
|
||||
logger.log {project_id, flushTimestamp}, "flushing queued project"
|
||||
ProjectManager.flushAndDeleteProjectWithLocks project_id, {skip_history_flush: false}, (err) ->
|
||||
if err?
|
||||
logger.err {project_id, err}, "error flushing queued project"
|
||||
metrics.inc "queued-delete-completed"
|
||||
return cb(null, true)
|
||||
const flushProjectIfNotModified = (project_id, flushTimestamp, cb) => ProjectManager.getProjectDocsTimestamps(project_id, function(err, timestamps) {
|
||||
if (err != null) { return callback(err); }
|
||||
if (timestamps.length === 0) {
|
||||
logger.log({project_id}, "skipping flush of queued project - no timestamps");
|
||||
return cb();
|
||||
}
|
||||
// are any of the timestamps newer than the time the project was flushed?
|
||||
for (let timestamp of Array.from(timestamps)) {
|
||||
if (timestamp > flushTimestamp) {
|
||||
metrics.inc("queued-delete-skipped");
|
||||
logger.debug({project_id, timestamps, flushTimestamp}, "found newer timestamp, will skip delete");
|
||||
return cb();
|
||||
}
|
||||
}
|
||||
logger.log({project_id, flushTimestamp}, "flushing queued project");
|
||||
return ProjectManager.flushAndDeleteProjectWithLocks(project_id, {skip_history_flush: false}, function(err) {
|
||||
if (err != null) {
|
||||
logger.err({project_id, err}, "error flushing queued project");
|
||||
}
|
||||
metrics.inc("queued-delete-completed");
|
||||
return cb(null, true);
|
||||
});
|
||||
});
|
||||
|
||||
flushNextProject = () ->
|
||||
now = Date.now()
|
||||
if now - startTime > options.timeout
|
||||
logger.log "hit time limit on flushing old projects"
|
||||
return callback(null, count)
|
||||
if count > options.limit
|
||||
logger.log "hit count limit on flushing old projects"
|
||||
return callback(null, count)
|
||||
RedisManager.getNextProjectToFlushAndDelete cutoffTime, (err, project_id, flushTimestamp, queueLength) ->
|
||||
return callback(err) if err?
|
||||
return callback(null, count) if !project_id?
|
||||
logger.log {project_id, queueLength: queueLength}, "flushing queued project"
|
||||
metrics.globalGauge "queued-flush-backlog", queueLength
|
||||
flushProjectIfNotModified project_id, flushTimestamp, (err, flushed) ->
|
||||
count++ if flushed
|
||||
flushNextProject()
|
||||
var flushNextProject = function() {
|
||||
const now = Date.now();
|
||||
if ((now - startTime) > options.timeout) {
|
||||
logger.log("hit time limit on flushing old projects");
|
||||
return callback(null, count);
|
||||
}
|
||||
if (count > options.limit) {
|
||||
logger.log("hit count limit on flushing old projects");
|
||||
return callback(null, count);
|
||||
}
|
||||
return RedisManager.getNextProjectToFlushAndDelete(cutoffTime, function(err, project_id, flushTimestamp, queueLength) {
|
||||
if (err != null) { return callback(err); }
|
||||
if ((project_id == null)) { return callback(null, count); }
|
||||
logger.log({project_id, queueLength}, "flushing queued project");
|
||||
metrics.globalGauge("queued-flush-backlog", queueLength);
|
||||
return flushProjectIfNotModified(project_id, flushTimestamp, function(err, flushed) {
|
||||
if (flushed) { count++; }
|
||||
return flushNextProject();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
flushNextProject()
|
||||
return flushNextProject();
|
||||
},
|
||||
|
||||
startBackgroundFlush: () ->
|
||||
SHORT_DELAY = 10
|
||||
LONG_DELAY = 1000
|
||||
doFlush = () ->
|
||||
if Settings.shuttingDown
|
||||
logger.warn "discontinuing background flush due to shutdown"
|
||||
return
|
||||
DeleteQueueManager.flushAndDeleteOldProjects {
|
||||
startBackgroundFlush() {
|
||||
const SHORT_DELAY = 10;
|
||||
const LONG_DELAY = 1000;
|
||||
var doFlush = function() {
|
||||
if (Settings.shuttingDown) {
|
||||
logger.warn("discontinuing background flush due to shutdown");
|
||||
return;
|
||||
}
|
||||
return DeleteQueueManager.flushAndDeleteOldProjects({
|
||||
timeout:1000,
|
||||
min_delete_age:3*60*1000,
|
||||
limit:1000 # high value, to ensure we always flush enough projects
|
||||
}, (err, flushed) ->
|
||||
setTimeout doFlush, (if flushed > 10 then SHORT_DELAY else LONG_DELAY)
|
||||
doFlush()
|
||||
limit:1000 // high value, to ensure we always flush enough projects
|
||||
}, (err, flushed) => setTimeout(doFlush, (flushed > 10 ? SHORT_DELAY : LONG_DELAY)));
|
||||
};
|
||||
return doFlush();
|
||||
}
|
||||
});
|
||||
|
|
|
@ -1,31 +1,48 @@
|
|||
diff_match_patch = require("../lib/diff_match_patch").diff_match_patch
|
||||
dmp = new diff_match_patch()
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let DiffCodec;
|
||||
const {
|
||||
diff_match_patch
|
||||
} = require("../lib/diff_match_patch");
|
||||
const dmp = new diff_match_patch();
|
||||
|
||||
module.exports = DiffCodec =
|
||||
ADDED: 1
|
||||
REMOVED: -1
|
||||
UNCHANGED: 0
|
||||
module.exports = (DiffCodec = {
|
||||
ADDED: 1,
|
||||
REMOVED: -1,
|
||||
UNCHANGED: 0,
|
||||
|
||||
diffAsShareJsOp: (before, after, callback = (error, ops) ->) ->
|
||||
diffs = dmp.diff_main(before.join("\n"), after.join("\n"))
|
||||
dmp.diff_cleanupSemantic(diffs)
|
||||
diffAsShareJsOp(before, after, callback) {
|
||||
if (callback == null) { callback = function(error, ops) {}; }
|
||||
const diffs = dmp.diff_main(before.join("\n"), after.join("\n"));
|
||||
dmp.diff_cleanupSemantic(diffs);
|
||||
|
||||
ops = []
|
||||
position = 0
|
||||
for diff in diffs
|
||||
type = diff[0]
|
||||
content = diff[1]
|
||||
if type == @ADDED
|
||||
ops.push
|
||||
i: content
|
||||
const ops = [];
|
||||
let position = 0;
|
||||
for (let diff of Array.from(diffs)) {
|
||||
const type = diff[0];
|
||||
const content = diff[1];
|
||||
if (type === this.ADDED) {
|
||||
ops.push({
|
||||
i: content,
|
||||
p: position
|
||||
position += content.length
|
||||
else if type == @REMOVED
|
||||
ops.push
|
||||
d: content
|
||||
});
|
||||
position += content.length;
|
||||
} else if (type === this.REMOVED) {
|
||||
ops.push({
|
||||
d: content,
|
||||
p: position
|
||||
else if type == @UNCHANGED
|
||||
position += content.length
|
||||
else
|
||||
throw "Unknown type"
|
||||
callback null, ops
|
||||
});
|
||||
} else if (type === this.UNCHANGED) {
|
||||
position += content.length;
|
||||
} else {
|
||||
throw "Unknown type";
|
||||
}
|
||||
}
|
||||
return callback(null, ops);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -1,55 +1,81 @@
|
|||
Settings = require('settings-sharelatex')
|
||||
logger = require('logger-sharelatex')
|
||||
Keys = require('./UpdateKeys')
|
||||
redis = require("redis-sharelatex")
|
||||
Errors = require("./Errors")
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS202: Simplify dynamic range loops
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let DispatchManager;
|
||||
const Settings = require('settings-sharelatex');
|
||||
const logger = require('logger-sharelatex');
|
||||
const Keys = require('./UpdateKeys');
|
||||
const redis = require("redis-sharelatex");
|
||||
const Errors = require("./Errors");
|
||||
|
||||
UpdateManager = require('./UpdateManager')
|
||||
Metrics = require('./Metrics')
|
||||
RateLimitManager = require('./RateLimitManager')
|
||||
const UpdateManager = require('./UpdateManager');
|
||||
const Metrics = require('./Metrics');
|
||||
const RateLimitManager = require('./RateLimitManager');
|
||||
|
||||
module.exports = DispatchManager =
|
||||
createDispatcher: (RateLimiter) ->
|
||||
client = redis.createClient(Settings.redis.documentupdater)
|
||||
worker = {
|
||||
client: client
|
||||
_waitForUpdateThenDispatchWorker: (callback = (error) ->) ->
|
||||
timer = new Metrics.Timer "worker.waiting"
|
||||
worker.client.blpop "pending-updates-list", 0, (error, result) ->
|
||||
logger.log("getting pending-updates-list", error, result)
|
||||
timer.done()
|
||||
return callback(error) if error?
|
||||
return callback() if !result?
|
||||
[list_name, doc_key] = result
|
||||
[project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key)
|
||||
# Dispatch this in the background
|
||||
backgroundTask = (cb) ->
|
||||
UpdateManager.processOutstandingUpdatesWithLock project_id, doc_id, (error) ->
|
||||
# log everything except OpRangeNotAvailable errors, these are normal
|
||||
if error?
|
||||
# downgrade OpRangeNotAvailable and "Delete component" errors so they are not sent to sentry
|
||||
logAsWarning = (error instanceof Errors.OpRangeNotAvailableError) || (error instanceof Errors.DeleteMismatchError)
|
||||
if logAsWarning
|
||||
logger.warn err: error, project_id: project_id, doc_id: doc_id, "error processing update"
|
||||
else
|
||||
logger.error err: error, project_id: project_id, doc_id: doc_id, "error processing update"
|
||||
cb()
|
||||
RateLimiter.run backgroundTask, callback
|
||||
module.exports = (DispatchManager = {
|
||||
createDispatcher(RateLimiter) {
|
||||
const client = redis.createClient(Settings.redis.documentupdater);
|
||||
var worker = {
|
||||
client,
|
||||
_waitForUpdateThenDispatchWorker(callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const timer = new Metrics.Timer("worker.waiting");
|
||||
return worker.client.blpop("pending-updates-list", 0, function(error, result) {
|
||||
logger.log("getting pending-updates-list", error, result);
|
||||
timer.done();
|
||||
if (error != null) { return callback(error); }
|
||||
if ((result == null)) { return callback(); }
|
||||
const [list_name, doc_key] = Array.from(result);
|
||||
const [project_id, doc_id] = Array.from(Keys.splitProjectIdAndDocId(doc_key));
|
||||
// Dispatch this in the background
|
||||
const backgroundTask = cb => UpdateManager.processOutstandingUpdatesWithLock(project_id, doc_id, function(error) {
|
||||
// log everything except OpRangeNotAvailable errors, these are normal
|
||||
if (error != null) {
|
||||
// downgrade OpRangeNotAvailable and "Delete component" errors so they are not sent to sentry
|
||||
const logAsWarning = (error instanceof Errors.OpRangeNotAvailableError) || (error instanceof Errors.DeleteMismatchError);
|
||||
if (logAsWarning) {
|
||||
logger.warn({err: error, project_id, doc_id}, "error processing update");
|
||||
} else {
|
||||
logger.error({err: error, project_id, doc_id}, "error processing update");
|
||||
}
|
||||
}
|
||||
return cb();
|
||||
});
|
||||
return RateLimiter.run(backgroundTask, callback);
|
||||
});
|
||||
},
|
||||
|
||||
run: () ->
|
||||
return if Settings.shuttingDown
|
||||
worker._waitForUpdateThenDispatchWorker (error) =>
|
||||
if error?
|
||||
logger.error err: error, "Error in worker process"
|
||||
throw error
|
||||
else
|
||||
worker.run()
|
||||
}
|
||||
run() {
|
||||
if (Settings.shuttingDown) { return; }
|
||||
return worker._waitForUpdateThenDispatchWorker(error => {
|
||||
if (error != null) {
|
||||
logger.error({err: error}, "Error in worker process");
|
||||
throw error;
|
||||
} else {
|
||||
return worker.run();
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return worker
|
||||
return worker;
|
||||
},
|
||||
|
||||
createAndStartDispatchers: (number) ->
|
||||
RateLimiter = new RateLimitManager(number)
|
||||
for i in [1..number]
|
||||
worker = DispatchManager.createDispatcher(RateLimiter)
|
||||
worker.run()
|
||||
createAndStartDispatchers(number) {
|
||||
const RateLimiter = new RateLimitManager(number);
|
||||
return (() => {
|
||||
const result = [];
|
||||
for (let i = 1, end = number, asc = 1 <= end; asc ? i <= end : i >= end; asc ? i++ : i--) {
|
||||
const worker = DispatchManager.createDispatcher(RateLimiter);
|
||||
result.push(worker.run());
|
||||
}
|
||||
return result;
|
||||
})();
|
||||
}
|
||||
});
|
||||
|
|
|
@ -1,243 +1,340 @@
|
|||
RedisManager = require "./RedisManager"
|
||||
ProjectHistoryRedisManager = require "./ProjectHistoryRedisManager"
|
||||
PersistenceManager = require "./PersistenceManager"
|
||||
DiffCodec = require "./DiffCodec"
|
||||
logger = require "logger-sharelatex"
|
||||
Metrics = require "./Metrics"
|
||||
HistoryManager = require "./HistoryManager"
|
||||
RealTimeRedisManager = require "./RealTimeRedisManager"
|
||||
Errors = require "./Errors"
|
||||
RangesManager = require "./RangesManager"
|
||||
async = require "async"
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let DocumentManager;
|
||||
const RedisManager = require("./RedisManager");
|
||||
const ProjectHistoryRedisManager = require("./ProjectHistoryRedisManager");
|
||||
const PersistenceManager = require("./PersistenceManager");
|
||||
const DiffCodec = require("./DiffCodec");
|
||||
const logger = require("logger-sharelatex");
|
||||
const Metrics = require("./Metrics");
|
||||
const HistoryManager = require("./HistoryManager");
|
||||
const RealTimeRedisManager = require("./RealTimeRedisManager");
|
||||
const Errors = require("./Errors");
|
||||
const RangesManager = require("./RangesManager");
|
||||
const async = require("async");
|
||||
|
||||
MAX_UNFLUSHED_AGE = 300 * 1000 # 5 mins, document should be flushed to mongo this time after a change
|
||||
const MAX_UNFLUSHED_AGE = 300 * 1000; // 5 mins, document should be flushed to mongo this time after a change
|
||||
|
||||
module.exports = DocumentManager =
|
||||
getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) ->) ->
|
||||
timer = new Metrics.Timer("docManager.getDoc")
|
||||
callback = (args...) ->
|
||||
timer.done()
|
||||
_callback(args...)
|
||||
module.exports = (DocumentManager = {
|
||||
getDoc(project_id, doc_id, _callback) {
|
||||
if (_callback == null) { _callback = function(error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) {}; }
|
||||
const timer = new Metrics.Timer("docManager.getDoc");
|
||||
const callback = function(...args) {
|
||||
timer.done();
|
||||
return _callback(...Array.from(args || []));
|
||||
};
|
||||
|
||||
RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime) ->
|
||||
return callback(error) if error?
|
||||
if !lines? or !version?
|
||||
logger.log {project_id, doc_id}, "doc not in redis so getting from persistence API"
|
||||
PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId, projectHistoryType) ->
|
||||
return callback(error) if error?
|
||||
logger.log {project_id, doc_id, lines, version, pathname, projectHistoryId, projectHistoryType}, "got doc from persistence API"
|
||||
RedisManager.putDocInMemory project_id, doc_id, lines, version, ranges, pathname, projectHistoryId, (error) ->
|
||||
return callback(error) if error?
|
||||
RedisManager.setHistoryType doc_id, projectHistoryType, (error) ->
|
||||
return callback(error) if error?
|
||||
callback null, lines, version, ranges || {}, pathname, projectHistoryId, null, false
|
||||
else
|
||||
callback null, lines, version, ranges, pathname, projectHistoryId, unflushedTime, true
|
||||
return RedisManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId, unflushedTime) {
|
||||
if (error != null) { return callback(error); }
|
||||
if ((lines == null) || (version == null)) {
|
||||
logger.log({project_id, doc_id}, "doc not in redis so getting from persistence API");
|
||||
return PersistenceManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId, projectHistoryType) {
|
||||
if (error != null) { return callback(error); }
|
||||
logger.log({project_id, doc_id, lines, version, pathname, projectHistoryId, projectHistoryType}, "got doc from persistence API");
|
||||
return RedisManager.putDocInMemory(project_id, doc_id, lines, version, ranges, pathname, projectHistoryId, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return RedisManager.setHistoryType(doc_id, projectHistoryType, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return callback(null, lines, version, ranges || {}, pathname, projectHistoryId, null, false);
|
||||
});
|
||||
});
|
||||
});
|
||||
} else {
|
||||
return callback(null, lines, version, ranges, pathname, projectHistoryId, unflushedTime, true);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, ops, ranges, pathname, projectHistoryId) ->) ->
|
||||
timer = new Metrics.Timer("docManager.getDocAndRecentOps")
|
||||
callback = (args...) ->
|
||||
timer.done()
|
||||
_callback(args...)
|
||||
getDocAndRecentOps(project_id, doc_id, fromVersion, _callback) {
|
||||
if (_callback == null) { _callback = function(error, lines, version, ops, ranges, pathname, projectHistoryId) {}; }
|
||||
const timer = new Metrics.Timer("docManager.getDocAndRecentOps");
|
||||
const callback = function(...args) {
|
||||
timer.done();
|
||||
return _callback(...Array.from(args || []));
|
||||
};
|
||||
|
||||
DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) ->
|
||||
return callback(error) if error?
|
||||
if fromVersion == -1
|
||||
callback null, lines, version, [], ranges, pathname, projectHistoryId
|
||||
else
|
||||
RedisManager.getPreviousDocOps doc_id, fromVersion, version, (error, ops) ->
|
||||
return callback(error) if error?
|
||||
callback null, lines, version, ops, ranges, pathname, projectHistoryId
|
||||
return DocumentManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId) {
|
||||
if (error != null) { return callback(error); }
|
||||
if (fromVersion === -1) {
|
||||
return callback(null, lines, version, [], ranges, pathname, projectHistoryId);
|
||||
} else {
|
||||
return RedisManager.getPreviousDocOps(doc_id, fromVersion, version, function(error, ops) {
|
||||
if (error != null) { return callback(error); }
|
||||
return callback(null, lines, version, ops, ranges, pathname, projectHistoryId);
|
||||
});
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
setDoc: (project_id, doc_id, newLines, source, user_id, undoing, _callback = (error) ->) ->
|
||||
timer = new Metrics.Timer("docManager.setDoc")
|
||||
callback = (args...) ->
|
||||
timer.done()
|
||||
_callback(args...)
|
||||
setDoc(project_id, doc_id, newLines, source, user_id, undoing, _callback) {
|
||||
if (_callback == null) { _callback = function(error) {}; }
|
||||
const timer = new Metrics.Timer("docManager.setDoc");
|
||||
const callback = function(...args) {
|
||||
timer.done();
|
||||
return _callback(...Array.from(args || []));
|
||||
};
|
||||
|
||||
if !newLines?
|
||||
return callback(new Error("No lines were provided to setDoc"))
|
||||
if ((newLines == null)) {
|
||||
return callback(new Error("No lines were provided to setDoc"));
|
||||
}
|
||||
|
||||
UpdateManager = require "./UpdateManager"
|
||||
DocumentManager.getDoc project_id, doc_id, (error, oldLines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) ->
|
||||
return callback(error) if error?
|
||||
const UpdateManager = require("./UpdateManager");
|
||||
return DocumentManager.getDoc(project_id, doc_id, function(error, oldLines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) {
|
||||
if (error != null) { return callback(error); }
|
||||
|
||||
if oldLines? and oldLines.length > 0 and oldLines[0].text?
|
||||
logger.log doc_id: doc_id, project_id: project_id, oldLines: oldLines, newLines: newLines, "document is JSON so not updating"
|
||||
return callback(null)
|
||||
if ((oldLines != null) && (oldLines.length > 0) && (oldLines[0].text != null)) {
|
||||
logger.log({doc_id, project_id, oldLines, newLines}, "document is JSON so not updating");
|
||||
return callback(null);
|
||||
}
|
||||
|
||||
logger.log doc_id: doc_id, project_id: project_id, oldLines: oldLines, newLines: newLines, "setting a document via http"
|
||||
DiffCodec.diffAsShareJsOp oldLines, newLines, (error, op) ->
|
||||
return callback(error) if error?
|
||||
if undoing
|
||||
for o in op or []
|
||||
o.u = true # Turn on undo flag for each op for track changes
|
||||
update =
|
||||
doc: doc_id
|
||||
op: op
|
||||
v: version
|
||||
meta:
|
||||
type: "external"
|
||||
source: source
|
||||
user_id: user_id
|
||||
UpdateManager.applyUpdate project_id, doc_id, update, (error) ->
|
||||
return callback(error) if error?
|
||||
# If the document was loaded already, then someone has it open
|
||||
# in a project, and the usual flushing mechanism will happen.
|
||||
# Otherwise we should remove it immediately since nothing else
|
||||
# is using it.
|
||||
if alreadyLoaded
|
||||
DocumentManager.flushDocIfLoaded project_id, doc_id, (error) ->
|
||||
return callback(error) if error?
|
||||
callback null
|
||||
else
|
||||
DocumentManager.flushAndDeleteDoc project_id, doc_id, {}, (error) ->
|
||||
# There is no harm in flushing project history if the previous
|
||||
# call failed and sometimes it is required
|
||||
HistoryManager.flushProjectChangesAsync project_id
|
||||
logger.log({doc_id, project_id, oldLines, newLines}, "setting a document via http");
|
||||
return DiffCodec.diffAsShareJsOp(oldLines, newLines, function(error, op) {
|
||||
if (error != null) { return callback(error); }
|
||||
if (undoing) {
|
||||
for (let o of Array.from(op || [])) {
|
||||
o.u = true;
|
||||
} // Turn on undo flag for each op for track changes
|
||||
}
|
||||
const update = {
|
||||
doc: doc_id,
|
||||
op,
|
||||
v: version,
|
||||
meta: {
|
||||
type: "external",
|
||||
source,
|
||||
user_id
|
||||
}
|
||||
};
|
||||
return UpdateManager.applyUpdate(project_id, doc_id, update, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
// If the document was loaded already, then someone has it open
|
||||
// in a project, and the usual flushing mechanism will happen.
|
||||
// Otherwise we should remove it immediately since nothing else
|
||||
// is using it.
|
||||
if (alreadyLoaded) {
|
||||
return DocumentManager.flushDocIfLoaded(project_id, doc_id, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return callback(null);
|
||||
});
|
||||
} else {
|
||||
return DocumentManager.flushAndDeleteDoc(project_id, doc_id, {}, function(error) {
|
||||
// There is no harm in flushing project history if the previous
|
||||
// call failed and sometimes it is required
|
||||
HistoryManager.flushProjectChangesAsync(project_id);
|
||||
|
||||
return callback(error) if error?
|
||||
callback null
|
||||
if (error != null) { return callback(error); }
|
||||
return callback(null);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
flushDocIfLoaded: (project_id, doc_id, _callback = (error) ->) ->
|
||||
timer = new Metrics.Timer("docManager.flushDocIfLoaded")
|
||||
callback = (args...) ->
|
||||
timer.done()
|
||||
_callback(args...)
|
||||
RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy) ->
|
||||
return callback(error) if error?
|
||||
if !lines? or !version?
|
||||
logger.log project_id: project_id, doc_id: doc_id, "doc is not loaded so not flushing"
|
||||
callback null # TODO: return a flag to bail out, as we go on to remove doc from memory?
|
||||
else
|
||||
logger.log project_id: project_id, doc_id: doc_id, version: version, "flushing doc"
|
||||
PersistenceManager.setDoc project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy, (error) ->
|
||||
return callback(error) if error?
|
||||
RedisManager.clearUnflushedTime doc_id, callback
|
||||
flushDocIfLoaded(project_id, doc_id, _callback) {
|
||||
if (_callback == null) { _callback = function(error) {}; }
|
||||
const timer = new Metrics.Timer("docManager.flushDocIfLoaded");
|
||||
const callback = function(...args) {
|
||||
timer.done();
|
||||
return _callback(...Array.from(args || []));
|
||||
};
|
||||
return RedisManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy) {
|
||||
if (error != null) { return callback(error); }
|
||||
if ((lines == null) || (version == null)) {
|
||||
logger.log({project_id, doc_id}, "doc is not loaded so not flushing");
|
||||
return callback(null); // TODO: return a flag to bail out, as we go on to remove doc from memory?
|
||||
} else {
|
||||
logger.log({project_id, doc_id, version}, "flushing doc");
|
||||
return PersistenceManager.setDoc(project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return RedisManager.clearUnflushedTime(doc_id, callback);
|
||||
});
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
flushAndDeleteDoc: (project_id, doc_id, options, _callback) ->
|
||||
timer = new Metrics.Timer("docManager.flushAndDeleteDoc")
|
||||
callback = (args...) ->
|
||||
timer.done()
|
||||
_callback(args...)
|
||||
flushAndDeleteDoc(project_id, doc_id, options, _callback) {
|
||||
const timer = new Metrics.Timer("docManager.flushAndDeleteDoc");
|
||||
const callback = function(...args) {
|
||||
timer.done();
|
||||
return _callback(...Array.from(args || []));
|
||||
};
|
||||
|
||||
DocumentManager.flushDocIfLoaded project_id, doc_id, (error) ->
|
||||
if error?
|
||||
if options.ignoreFlushErrors
|
||||
logger.warn {project_id: project_id, doc_id: doc_id, err: error}, "ignoring flush error while deleting document"
|
||||
else
|
||||
return callback(error)
|
||||
return DocumentManager.flushDocIfLoaded(project_id, doc_id, function(error) {
|
||||
if (error != null) {
|
||||
if (options.ignoreFlushErrors) {
|
||||
logger.warn({project_id, doc_id, err: error}, "ignoring flush error while deleting document");
|
||||
} else {
|
||||
return callback(error);
|
||||
}
|
||||
}
|
||||
|
||||
# Flush in the background since it requires a http request
|
||||
HistoryManager.flushDocChangesAsync project_id, doc_id
|
||||
// Flush in the background since it requires a http request
|
||||
HistoryManager.flushDocChangesAsync(project_id, doc_id);
|
||||
|
||||
RedisManager.removeDocFromMemory project_id, doc_id, (error) ->
|
||||
return callback(error) if error?
|
||||
callback null
|
||||
return RedisManager.removeDocFromMemory(project_id, doc_id, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return callback(null);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
acceptChanges: (project_id, doc_id, change_ids = [], _callback = (error) ->) ->
|
||||
timer = new Metrics.Timer("docManager.acceptChanges")
|
||||
callback = (args...) ->
|
||||
timer.done()
|
||||
_callback(args...)
|
||||
acceptChanges(project_id, doc_id, change_ids, _callback) {
|
||||
if (change_ids == null) { change_ids = []; }
|
||||
if (_callback == null) { _callback = function(error) {}; }
|
||||
const timer = new Metrics.Timer("docManager.acceptChanges");
|
||||
const callback = function(...args) {
|
||||
timer.done();
|
||||
return _callback(...Array.from(args || []));
|
||||
};
|
||||
|
||||
DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges) ->
|
||||
return callback(error) if error?
|
||||
if !lines? or !version?
|
||||
return callback(new Errors.NotFoundError("document not found: #{doc_id}"))
|
||||
RangesManager.acceptChanges change_ids, ranges, (error, new_ranges) ->
|
||||
return callback(error) if error?
|
||||
RedisManager.updateDocument project_id, doc_id, lines, version, [], new_ranges, {}, (error) ->
|
||||
return callback(error) if error?
|
||||
callback()
|
||||
return DocumentManager.getDoc(project_id, doc_id, function(error, lines, version, ranges) {
|
||||
if (error != null) { return callback(error); }
|
||||
if ((lines == null) || (version == null)) {
|
||||
return callback(new Errors.NotFoundError(`document not found: ${doc_id}`));
|
||||
}
|
||||
return RangesManager.acceptChanges(change_ids, ranges, function(error, new_ranges) {
|
||||
if (error != null) { return callback(error); }
|
||||
return RedisManager.updateDocument(project_id, doc_id, lines, version, [], new_ranges, {}, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return callback();
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
deleteComment: (project_id, doc_id, comment_id, _callback = (error) ->) ->
|
||||
timer = new Metrics.Timer("docManager.deleteComment")
|
||||
callback = (args...) ->
|
||||
timer.done()
|
||||
_callback(args...)
|
||||
deleteComment(project_id, doc_id, comment_id, _callback) {
|
||||
if (_callback == null) { _callback = function(error) {}; }
|
||||
const timer = new Metrics.Timer("docManager.deleteComment");
|
||||
const callback = function(...args) {
|
||||
timer.done();
|
||||
return _callback(...Array.from(args || []));
|
||||
};
|
||||
|
||||
DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges) ->
|
||||
return callback(error) if error?
|
||||
if !lines? or !version?
|
||||
return callback(new Errors.NotFoundError("document not found: #{doc_id}"))
|
||||
RangesManager.deleteComment comment_id, ranges, (error, new_ranges) ->
|
||||
return callback(error) if error?
|
||||
RedisManager.updateDocument project_id, doc_id, lines, version, [], new_ranges, {}, (error) ->
|
||||
return callback(error) if error?
|
||||
callback()
|
||||
return DocumentManager.getDoc(project_id, doc_id, function(error, lines, version, ranges) {
|
||||
if (error != null) { return callback(error); }
|
||||
if ((lines == null) || (version == null)) {
|
||||
return callback(new Errors.NotFoundError(`document not found: ${doc_id}`));
|
||||
}
|
||||
return RangesManager.deleteComment(comment_id, ranges, function(error, new_ranges) {
|
||||
if (error != null) { return callback(error); }
|
||||
return RedisManager.updateDocument(project_id, doc_id, lines, version, [], new_ranges, {}, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return callback();
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
renameDoc: (project_id, doc_id, user_id, update, projectHistoryId, _callback = (error) ->) ->
|
||||
timer = new Metrics.Timer("docManager.updateProject")
|
||||
callback = (args...) ->
|
||||
timer.done()
|
||||
_callback(args...)
|
||||
renameDoc(project_id, doc_id, user_id, update, projectHistoryId, _callback) {
|
||||
if (_callback == null) { _callback = function(error) {}; }
|
||||
const timer = new Metrics.Timer("docManager.updateProject");
|
||||
const callback = function(...args) {
|
||||
timer.done();
|
||||
return _callback(...Array.from(args || []));
|
||||
};
|
||||
|
||||
RedisManager.renameDoc project_id, doc_id, user_id, update, projectHistoryId, callback
|
||||
return RedisManager.renameDoc(project_id, doc_id, user_id, update, projectHistoryId, callback);
|
||||
},
|
||||
|
||||
getDocAndFlushIfOld: (project_id, doc_id, callback = (error, doc) ->) ->
|
||||
DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) ->
|
||||
return callback(error) if error?
|
||||
# if doc was already loaded see if it needs to be flushed
|
||||
if alreadyLoaded and unflushedTime? and (Date.now() - unflushedTime) > MAX_UNFLUSHED_AGE
|
||||
DocumentManager.flushDocIfLoaded project_id, doc_id, (error) ->
|
||||
return callback(error) if error?
|
||||
callback(null, lines, version)
|
||||
else
|
||||
callback(null, lines, version)
|
||||
getDocAndFlushIfOld(project_id, doc_id, callback) {
|
||||
if (callback == null) { callback = function(error, doc) {}; }
|
||||
return DocumentManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) {
|
||||
if (error != null) { return callback(error); }
|
||||
// if doc was already loaded see if it needs to be flushed
|
||||
if (alreadyLoaded && (unflushedTime != null) && ((Date.now() - unflushedTime) > MAX_UNFLUSHED_AGE)) {
|
||||
return DocumentManager.flushDocIfLoaded(project_id, doc_id, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return callback(null, lines, version);
|
||||
});
|
||||
} else {
|
||||
return callback(null, lines, version);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
resyncDocContents: (project_id, doc_id, callback) ->
|
||||
logger.log {project_id: project_id, doc_id: doc_id}, "start resyncing doc contents"
|
||||
RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) ->
|
||||
return callback(error) if error?
|
||||
resyncDocContents(project_id, doc_id, callback) {
|
||||
logger.log({project_id, doc_id}, "start resyncing doc contents");
|
||||
return RedisManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId) {
|
||||
if (error != null) { return callback(error); }
|
||||
|
||||
if !lines? or !version?
|
||||
logger.log {project_id: project_id, doc_id: doc_id}, "resyncing doc contents - not found in redis - retrieving from web"
|
||||
PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) ->
|
||||
if error?
|
||||
logger.error {project_id: project_id, doc_id: doc_id, getDocError: error}, "resyncing doc contents - error retrieving from web"
|
||||
return callback(error)
|
||||
ProjectHistoryRedisManager.queueResyncDocContent project_id, projectHistoryId, doc_id, lines, version, pathname, callback
|
||||
else
|
||||
logger.log {project_id: project_id, doc_id: doc_id}, "resyncing doc contents - doc in redis - will queue in redis"
|
||||
ProjectHistoryRedisManager.queueResyncDocContent project_id, projectHistoryId, doc_id, lines, version, pathname, callback
|
||||
if ((lines == null) || (version == null)) {
|
||||
logger.log({project_id, doc_id}, "resyncing doc contents - not found in redis - retrieving from web");
|
||||
return PersistenceManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId) {
|
||||
if (error != null) {
|
||||
logger.error({project_id, doc_id, getDocError: error}, "resyncing doc contents - error retrieving from web");
|
||||
return callback(error);
|
||||
}
|
||||
return ProjectHistoryRedisManager.queueResyncDocContent(project_id, projectHistoryId, doc_id, lines, version, pathname, callback);
|
||||
});
|
||||
} else {
|
||||
logger.log({project_id, doc_id}, "resyncing doc contents - doc in redis - will queue in redis");
|
||||
return ProjectHistoryRedisManager.queueResyncDocContent(project_id, projectHistoryId, doc_id, lines, version, pathname, callback);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
getDocWithLock: (project_id, doc_id, callback = (error, lines, version) ->) ->
|
||||
UpdateManager = require "./UpdateManager"
|
||||
UpdateManager.lockUpdatesAndDo DocumentManager.getDoc, project_id, doc_id, callback
|
||||
getDocWithLock(project_id, doc_id, callback) {
|
||||
if (callback == null) { callback = function(error, lines, version) {}; }
|
||||
const UpdateManager = require("./UpdateManager");
|
||||
return UpdateManager.lockUpdatesAndDo(DocumentManager.getDoc, project_id, doc_id, callback);
|
||||
},
|
||||
|
||||
getDocAndRecentOpsWithLock: (project_id, doc_id, fromVersion, callback = (error, lines, version, ops, ranges, pathname, projectHistoryId) ->) ->
|
||||
UpdateManager = require "./UpdateManager"
|
||||
UpdateManager.lockUpdatesAndDo DocumentManager.getDocAndRecentOps, project_id, doc_id, fromVersion, callback
|
||||
getDocAndRecentOpsWithLock(project_id, doc_id, fromVersion, callback) {
|
||||
if (callback == null) { callback = function(error, lines, version, ops, ranges, pathname, projectHistoryId) {}; }
|
||||
const UpdateManager = require("./UpdateManager");
|
||||
return UpdateManager.lockUpdatesAndDo(DocumentManager.getDocAndRecentOps, project_id, doc_id, fromVersion, callback);
|
||||
},
|
||||
|
||||
getDocAndFlushIfOldWithLock: (project_id, doc_id, callback = (error, doc) ->) ->
|
||||
UpdateManager = require "./UpdateManager"
|
||||
UpdateManager.lockUpdatesAndDo DocumentManager.getDocAndFlushIfOld, project_id, doc_id, callback
|
||||
getDocAndFlushIfOldWithLock(project_id, doc_id, callback) {
|
||||
if (callback == null) { callback = function(error, doc) {}; }
|
||||
const UpdateManager = require("./UpdateManager");
|
||||
return UpdateManager.lockUpdatesAndDo(DocumentManager.getDocAndFlushIfOld, project_id, doc_id, callback);
|
||||
},
|
||||
|
||||
setDocWithLock: (project_id, doc_id, lines, source, user_id, undoing, callback = (error) ->) ->
|
||||
UpdateManager = require "./UpdateManager"
|
||||
UpdateManager.lockUpdatesAndDo DocumentManager.setDoc, project_id, doc_id, lines, source, user_id, undoing, callback
|
||||
setDocWithLock(project_id, doc_id, lines, source, user_id, undoing, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const UpdateManager = require("./UpdateManager");
|
||||
return UpdateManager.lockUpdatesAndDo(DocumentManager.setDoc, project_id, doc_id, lines, source, user_id, undoing, callback);
|
||||
},
|
||||
|
||||
flushDocIfLoadedWithLock: (project_id, doc_id, callback = (error) ->) ->
|
||||
UpdateManager = require "./UpdateManager"
|
||||
UpdateManager.lockUpdatesAndDo DocumentManager.flushDocIfLoaded, project_id, doc_id, callback
|
||||
flushDocIfLoadedWithLock(project_id, doc_id, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const UpdateManager = require("./UpdateManager");
|
||||
return UpdateManager.lockUpdatesAndDo(DocumentManager.flushDocIfLoaded, project_id, doc_id, callback);
|
||||
},
|
||||
|
||||
flushAndDeleteDocWithLock: (project_id, doc_id, options, callback) ->
|
||||
UpdateManager = require "./UpdateManager"
|
||||
UpdateManager.lockUpdatesAndDo DocumentManager.flushAndDeleteDoc, project_id, doc_id, options, callback
|
||||
flushAndDeleteDocWithLock(project_id, doc_id, options, callback) {
|
||||
const UpdateManager = require("./UpdateManager");
|
||||
return UpdateManager.lockUpdatesAndDo(DocumentManager.flushAndDeleteDoc, project_id, doc_id, options, callback);
|
||||
},
|
||||
|
||||
acceptChangesWithLock: (project_id, doc_id, change_ids, callback = (error) ->) ->
|
||||
UpdateManager = require "./UpdateManager"
|
||||
UpdateManager.lockUpdatesAndDo DocumentManager.acceptChanges, project_id, doc_id, change_ids, callback
|
||||
acceptChangesWithLock(project_id, doc_id, change_ids, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const UpdateManager = require("./UpdateManager");
|
||||
return UpdateManager.lockUpdatesAndDo(DocumentManager.acceptChanges, project_id, doc_id, change_ids, callback);
|
||||
},
|
||||
|
||||
deleteCommentWithLock: (project_id, doc_id, thread_id, callback = (error) ->) ->
|
||||
UpdateManager = require "./UpdateManager"
|
||||
UpdateManager.lockUpdatesAndDo DocumentManager.deleteComment, project_id, doc_id, thread_id, callback
|
||||
deleteCommentWithLock(project_id, doc_id, thread_id, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const UpdateManager = require("./UpdateManager");
|
||||
return UpdateManager.lockUpdatesAndDo(DocumentManager.deleteComment, project_id, doc_id, thread_id, callback);
|
||||
},
|
||||
|
||||
renameDocWithLock: (project_id, doc_id, user_id, update, projectHistoryId, callback = (error) ->) ->
|
||||
UpdateManager = require "./UpdateManager"
|
||||
UpdateManager.lockUpdatesAndDo DocumentManager.renameDoc, project_id, doc_id, user_id, update, projectHistoryId, callback
|
||||
renameDocWithLock(project_id, doc_id, user_id, update, projectHistoryId, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const UpdateManager = require("./UpdateManager");
|
||||
return UpdateManager.lockUpdatesAndDo(DocumentManager.renameDoc, project_id, doc_id, user_id, update, projectHistoryId, callback);
|
||||
},
|
||||
|
||||
resyncDocContentsWithLock: (project_id, doc_id, callback = (error) ->) ->
|
||||
UpdateManager = require "./UpdateManager"
|
||||
UpdateManager.lockUpdatesAndDo DocumentManager.resyncDocContents, project_id, doc_id, callback
|
||||
resyncDocContentsWithLock(project_id, doc_id, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const UpdateManager = require("./UpdateManager");
|
||||
return UpdateManager.lockUpdatesAndDo(DocumentManager.resyncDocContents, project_id, doc_id, callback);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -1,33 +1,39 @@
|
|||
NotFoundError = (message) ->
|
||||
error = new Error(message)
|
||||
error.name = "NotFoundError"
|
||||
error.__proto__ = NotFoundError.prototype
|
||||
return error
|
||||
NotFoundError.prototype.__proto__ = Error.prototype
|
||||
let Errors;
|
||||
var NotFoundError = function(message) {
|
||||
const error = new Error(message);
|
||||
error.name = "NotFoundError";
|
||||
error.__proto__ = NotFoundError.prototype;
|
||||
return error;
|
||||
};
|
||||
NotFoundError.prototype.__proto__ = Error.prototype;
|
||||
|
||||
OpRangeNotAvailableError = (message) ->
|
||||
error = new Error(message)
|
||||
error.name = "OpRangeNotAvailableError"
|
||||
error.__proto__ = OpRangeNotAvailableError.prototype
|
||||
return error
|
||||
OpRangeNotAvailableError.prototype.__proto__ = Error.prototype
|
||||
var OpRangeNotAvailableError = function(message) {
|
||||
const error = new Error(message);
|
||||
error.name = "OpRangeNotAvailableError";
|
||||
error.__proto__ = OpRangeNotAvailableError.prototype;
|
||||
return error;
|
||||
};
|
||||
OpRangeNotAvailableError.prototype.__proto__ = Error.prototype;
|
||||
|
||||
ProjectStateChangedError = (message) ->
|
||||
error = new Error(message)
|
||||
error.name = "ProjectStateChangedError"
|
||||
error.__proto__ = ProjectStateChangedError.prototype
|
||||
return error
|
||||
ProjectStateChangedError.prototype.__proto__ = Error.prototype
|
||||
var ProjectStateChangedError = function(message) {
|
||||
const error = new Error(message);
|
||||
error.name = "ProjectStateChangedError";
|
||||
error.__proto__ = ProjectStateChangedError.prototype;
|
||||
return error;
|
||||
};
|
||||
ProjectStateChangedError.prototype.__proto__ = Error.prototype;
|
||||
|
||||
DeleteMismatchError = (message) ->
|
||||
error = new Error(message)
|
||||
error.name = "DeleteMismatchError"
|
||||
error.__proto__ = DeleteMismatchError.prototype
|
||||
return error
|
||||
DeleteMismatchError.prototype.__proto__ = Error.prototype
|
||||
var DeleteMismatchError = function(message) {
|
||||
const error = new Error(message);
|
||||
error.name = "DeleteMismatchError";
|
||||
error.__proto__ = DeleteMismatchError.prototype;
|
||||
return error;
|
||||
};
|
||||
DeleteMismatchError.prototype.__proto__ = Error.prototype;
|
||||
|
||||
module.exports = Errors =
|
||||
NotFoundError: NotFoundError
|
||||
OpRangeNotAvailableError: OpRangeNotAvailableError
|
||||
ProjectStateChangedError: ProjectStateChangedError
|
||||
DeleteMismatchError: DeleteMismatchError
|
||||
module.exports = (Errors = {
|
||||
NotFoundError,
|
||||
OpRangeNotAvailableError,
|
||||
ProjectStateChangedError,
|
||||
DeleteMismatchError
|
||||
});
|
||||
|
|
|
@ -1,107 +1,144 @@
|
|||
async = require "async"
|
||||
logger = require "logger-sharelatex"
|
||||
request = require "request"
|
||||
Settings = require "settings-sharelatex"
|
||||
HistoryRedisManager = require "./HistoryRedisManager"
|
||||
ProjectHistoryRedisManager = require "./ProjectHistoryRedisManager"
|
||||
RedisManager = require "./RedisManager"
|
||||
metrics = require "./Metrics"
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let HistoryManager;
|
||||
const async = require("async");
|
||||
const logger = require("logger-sharelatex");
|
||||
const request = require("request");
|
||||
const Settings = require("settings-sharelatex");
|
||||
const HistoryRedisManager = require("./HistoryRedisManager");
|
||||
const ProjectHistoryRedisManager = require("./ProjectHistoryRedisManager");
|
||||
const RedisManager = require("./RedisManager");
|
||||
const metrics = require("./Metrics");
|
||||
|
||||
module.exports = HistoryManager =
|
||||
flushDocChangesAsync: (project_id, doc_id) ->
|
||||
if !Settings.apis?.trackchanges?
|
||||
logger.warn { doc_id }, "track changes API is not configured, so not flushing"
|
||||
return
|
||||
RedisManager.getHistoryType doc_id, (err, projectHistoryType) ->
|
||||
if err?
|
||||
logger.warn {err, doc_id}, "error getting history type"
|
||||
# if there's an error continue and flush to track-changes for safety
|
||||
if Settings.disableDoubleFlush and projectHistoryType is "project-history"
|
||||
logger.debug {doc_id, projectHistoryType}, "skipping track-changes flush"
|
||||
else
|
||||
metrics.inc 'history-flush', 1, { status: 'track-changes'}
|
||||
url = "#{Settings.apis.trackchanges.url}/project/#{project_id}/doc/#{doc_id}/flush"
|
||||
logger.log { project_id, doc_id, url, projectHistoryType }, "flushing doc in track changes api"
|
||||
request.post url, (error, res, body)->
|
||||
if error?
|
||||
logger.error { error, doc_id, project_id}, "track changes doc to track changes api"
|
||||
else if res.statusCode < 200 and res.statusCode >= 300
|
||||
logger.error { doc_id, project_id }, "track changes api returned a failure status code: #{res.statusCode}"
|
||||
module.exports = (HistoryManager = {
|
||||
flushDocChangesAsync(project_id, doc_id) {
|
||||
if (((Settings.apis != null ? Settings.apis.trackchanges : undefined) == null)) {
|
||||
logger.warn({ doc_id }, "track changes API is not configured, so not flushing");
|
||||
return;
|
||||
}
|
||||
return RedisManager.getHistoryType(doc_id, function(err, projectHistoryType) {
|
||||
if (err != null) {
|
||||
logger.warn({err, doc_id}, "error getting history type");
|
||||
}
|
||||
// if there's an error continue and flush to track-changes for safety
|
||||
if (Settings.disableDoubleFlush && (projectHistoryType === "project-history")) {
|
||||
return logger.debug({doc_id, projectHistoryType}, "skipping track-changes flush");
|
||||
} else {
|
||||
metrics.inc('history-flush', 1, { status: 'track-changes'});
|
||||
const url = `${Settings.apis.trackchanges.url}/project/${project_id}/doc/${doc_id}/flush`;
|
||||
logger.log({ project_id, doc_id, url, projectHistoryType }, "flushing doc in track changes api");
|
||||
return request.post(url, function(error, res, body){
|
||||
if (error != null) {
|
||||
return logger.error({ error, doc_id, project_id}, "track changes doc to track changes api");
|
||||
} else if ((res.statusCode < 200) && (res.statusCode >= 300)) {
|
||||
return logger.error({ doc_id, project_id }, `track changes api returned a failure status code: ${res.statusCode}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
# flush changes in the background
|
||||
flushProjectChangesAsync: (project_id) ->
|
||||
return if !Settings.apis?.project_history?.enabled
|
||||
HistoryManager.flushProjectChanges project_id, {background:true}, ->
|
||||
// flush changes in the background
|
||||
flushProjectChangesAsync(project_id) {
|
||||
if (!__guard__(Settings.apis != null ? Settings.apis.project_history : undefined, x => x.enabled)) { return; }
|
||||
return HistoryManager.flushProjectChanges(project_id, {background:true}, function() {});
|
||||
},
|
||||
|
||||
# flush changes and callback (for when we need to know the queue is flushed)
|
||||
flushProjectChanges: (project_id, options, callback = (error) ->) ->
|
||||
return callback() if !Settings.apis?.project_history?.enabled
|
||||
if options.skip_history_flush
|
||||
logger.log {project_id}, "skipping flush of project history"
|
||||
return callback()
|
||||
metrics.inc 'history-flush', 1, { status: 'project-history'}
|
||||
url = "#{Settings.apis.project_history.url}/project/#{project_id}/flush"
|
||||
qs = {}
|
||||
qs.background = true if options.background # pass on the background flush option if present
|
||||
logger.log { project_id, url, qs }, "flushing doc in project history api"
|
||||
request.post {url: url, qs: qs}, (error, res, body)->
|
||||
if error?
|
||||
logger.error { error, project_id}, "project history doc to track changes api"
|
||||
return callback(error)
|
||||
else if res.statusCode < 200 and res.statusCode >= 300
|
||||
logger.error { project_id }, "project history api returned a failure status code: #{res.statusCode}"
|
||||
return callback(error)
|
||||
else
|
||||
return callback()
|
||||
// flush changes and callback (for when we need to know the queue is flushed)
|
||||
flushProjectChanges(project_id, options, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
if (!__guard__(Settings.apis != null ? Settings.apis.project_history : undefined, x => x.enabled)) { return callback(); }
|
||||
if (options.skip_history_flush) {
|
||||
logger.log({project_id}, "skipping flush of project history");
|
||||
return callback();
|
||||
}
|
||||
metrics.inc('history-flush', 1, { status: 'project-history'});
|
||||
const url = `${Settings.apis.project_history.url}/project/${project_id}/flush`;
|
||||
const qs = {};
|
||||
if (options.background) { qs.background = true; } // pass on the background flush option if present
|
||||
logger.log({ project_id, url, qs }, "flushing doc in project history api");
|
||||
return request.post({url, qs}, function(error, res, body){
|
||||
if (error != null) {
|
||||
logger.error({ error, project_id}, "project history doc to track changes api");
|
||||
return callback(error);
|
||||
} else if ((res.statusCode < 200) && (res.statusCode >= 300)) {
|
||||
logger.error({ project_id }, `project history api returned a failure status code: ${res.statusCode}`);
|
||||
return callback(error);
|
||||
} else {
|
||||
return callback();
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
FLUSH_DOC_EVERY_N_OPS: 100
|
||||
FLUSH_PROJECT_EVERY_N_OPS: 500
|
||||
FLUSH_DOC_EVERY_N_OPS: 100,
|
||||
FLUSH_PROJECT_EVERY_N_OPS: 500,
|
||||
|
||||
recordAndFlushHistoryOps: (project_id, doc_id, ops = [], doc_ops_length, project_ops_length, callback = (error) ->) ->
|
||||
if ops.length == 0
|
||||
return callback()
|
||||
recordAndFlushHistoryOps(project_id, doc_id, ops, doc_ops_length, project_ops_length, callback) {
|
||||
if (ops == null) { ops = []; }
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
if (ops.length === 0) {
|
||||
return callback();
|
||||
}
|
||||
|
||||
# record updates for project history
|
||||
if Settings.apis?.project_history?.enabled
|
||||
if HistoryManager.shouldFlushHistoryOps(project_ops_length, ops.length, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS)
|
||||
# Do this in the background since it uses HTTP and so may be too
|
||||
# slow to wait for when processing a doc update.
|
||||
logger.log { project_ops_length, project_id }, "flushing project history api"
|
||||
HistoryManager.flushProjectChangesAsync project_id
|
||||
// record updates for project history
|
||||
if (__guard__(Settings.apis != null ? Settings.apis.project_history : undefined, x => x.enabled)) {
|
||||
if (HistoryManager.shouldFlushHistoryOps(project_ops_length, ops.length, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS)) {
|
||||
// Do this in the background since it uses HTTP and so may be too
|
||||
// slow to wait for when processing a doc update.
|
||||
logger.log({ project_ops_length, project_id }, "flushing project history api");
|
||||
HistoryManager.flushProjectChangesAsync(project_id);
|
||||
}
|
||||
}
|
||||
|
||||
# if the doc_ops_length is undefined it means the project is not using track-changes
|
||||
# so we can bail out here
|
||||
if typeof(doc_ops_length) is 'undefined'
|
||||
logger.debug { project_id, doc_id}, "skipping flush to track-changes, only using project-history"
|
||||
return callback()
|
||||
// if the doc_ops_length is undefined it means the project is not using track-changes
|
||||
// so we can bail out here
|
||||
if (typeof(doc_ops_length) === 'undefined') {
|
||||
logger.debug({ project_id, doc_id}, "skipping flush to track-changes, only using project-history");
|
||||
return callback();
|
||||
}
|
||||
|
||||
# record updates for track-changes
|
||||
HistoryRedisManager.recordDocHasHistoryOps project_id, doc_id, ops, (error) ->
|
||||
return callback(error) if error?
|
||||
if HistoryManager.shouldFlushHistoryOps(doc_ops_length, ops.length, HistoryManager.FLUSH_DOC_EVERY_N_OPS)
|
||||
# Do this in the background since it uses HTTP and so may be too
|
||||
# slow to wait for when processing a doc update.
|
||||
logger.log { doc_ops_length, doc_id, project_id }, "flushing track changes api"
|
||||
HistoryManager.flushDocChangesAsync project_id, doc_id
|
||||
callback()
|
||||
// record updates for track-changes
|
||||
return HistoryRedisManager.recordDocHasHistoryOps(project_id, doc_id, ops, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
if (HistoryManager.shouldFlushHistoryOps(doc_ops_length, ops.length, HistoryManager.FLUSH_DOC_EVERY_N_OPS)) {
|
||||
// Do this in the background since it uses HTTP and so may be too
|
||||
// slow to wait for when processing a doc update.
|
||||
logger.log({ doc_ops_length, doc_id, project_id }, "flushing track changes api");
|
||||
HistoryManager.flushDocChangesAsync(project_id, doc_id);
|
||||
}
|
||||
return callback();
|
||||
});
|
||||
},
|
||||
|
||||
shouldFlushHistoryOps: (length, ops_length, threshold) ->
|
||||
return false if !length # don't flush unless we know the length
|
||||
# We want to flush every 100 ops, i.e. 100, 200, 300, etc
|
||||
# Find out which 'block' (i.e. 0-99, 100-199) we were in before and after pushing these
|
||||
# ops. If we've changed, then we've gone over a multiple of 100 and should flush.
|
||||
# (Most of the time, we will only hit 100 and then flushing will put us back to 0)
|
||||
previousLength = length - ops_length
|
||||
prevBlock = Math.floor(previousLength / threshold)
|
||||
newBlock = Math.floor(length / threshold)
|
||||
return newBlock != prevBlock
|
||||
shouldFlushHistoryOps(length, ops_length, threshold) {
|
||||
if (!length) { return false; } // don't flush unless we know the length
|
||||
// We want to flush every 100 ops, i.e. 100, 200, 300, etc
|
||||
// Find out which 'block' (i.e. 0-99, 100-199) we were in before and after pushing these
|
||||
// ops. If we've changed, then we've gone over a multiple of 100 and should flush.
|
||||
// (Most of the time, we will only hit 100 and then flushing will put us back to 0)
|
||||
const previousLength = length - ops_length;
|
||||
const prevBlock = Math.floor(previousLength / threshold);
|
||||
const newBlock = Math.floor(length / threshold);
|
||||
return newBlock !== prevBlock;
|
||||
},
|
||||
|
||||
MAX_PARALLEL_REQUESTS: 4
|
||||
MAX_PARALLEL_REQUESTS: 4,
|
||||
|
||||
resyncProjectHistory: (project_id, projectHistoryId, docs, files, callback) ->
|
||||
ProjectHistoryRedisManager.queueResyncProjectStructure project_id, projectHistoryId, docs, files, (error) ->
|
||||
return callback(error) if error?
|
||||
DocumentManager = require "./DocumentManager"
|
||||
resyncDoc = (doc, cb) ->
|
||||
DocumentManager.resyncDocContentsWithLock project_id, doc.doc, cb
|
||||
async.eachLimit docs, HistoryManager.MAX_PARALLEL_REQUESTS, resyncDoc, callback
|
||||
resyncProjectHistory(project_id, projectHistoryId, docs, files, callback) {
|
||||
return ProjectHistoryRedisManager.queueResyncProjectStructure(project_id, projectHistoryId, docs, files, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
const DocumentManager = require("./DocumentManager");
|
||||
const resyncDoc = (doc, cb) => DocumentManager.resyncDocContentsWithLock(project_id, doc.doc, cb);
|
||||
return async.eachLimit(docs, HistoryManager.MAX_PARALLEL_REQUESTS, resyncDoc, callback);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined;
|
||||
}
|
|
@ -1,13 +1,26 @@
|
|||
Settings = require('settings-sharelatex')
|
||||
rclient = require("redis-sharelatex").createClient(Settings.redis.history)
|
||||
Keys = Settings.redis.history.key_schema
|
||||
logger = require('logger-sharelatex')
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let HistoryRedisManager;
|
||||
const Settings = require('settings-sharelatex');
|
||||
const rclient = require("redis-sharelatex").createClient(Settings.redis.history);
|
||||
const Keys = Settings.redis.history.key_schema;
|
||||
const logger = require('logger-sharelatex');
|
||||
|
||||
module.exports = HistoryRedisManager =
|
||||
recordDocHasHistoryOps: (project_id, doc_id, ops = [], callback = (error) ->) ->
|
||||
if ops.length == 0
|
||||
return callback(new Error("cannot push no ops")) # This should never be called with no ops, but protect against a redis error if we sent an empty array to rpush
|
||||
logger.log project_id: project_id, doc_id: doc_id, "marking doc in project for history ops"
|
||||
rclient.sadd Keys.docsWithHistoryOps({project_id}), doc_id, (error) ->
|
||||
return callback(error) if error?
|
||||
callback()
|
||||
module.exports = (HistoryRedisManager = {
|
||||
recordDocHasHistoryOps(project_id, doc_id, ops, callback) {
|
||||
if (ops == null) { ops = []; }
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
if (ops.length === 0) {
|
||||
return callback(new Error("cannot push no ops")); // This should never be called with no ops, but protect against a redis error if we sent an empty array to rpush
|
||||
}
|
||||
logger.log({project_id, doc_id}, "marking doc in project for history ops");
|
||||
return rclient.sadd(Keys.docsWithHistoryOps({project_id}), doc_id, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return callback();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
|
|
@ -1,231 +1,336 @@
|
|||
DocumentManager = require "./DocumentManager"
|
||||
HistoryManager = require "./HistoryManager"
|
||||
ProjectManager = require "./ProjectManager"
|
||||
Errors = require "./Errors"
|
||||
logger = require "logger-sharelatex"
|
||||
Metrics = require "./Metrics"
|
||||
ProjectFlusher = require("./ProjectFlusher")
|
||||
DeleteQueueManager = require("./DeleteQueueManager")
|
||||
async = require "async"
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let HttpController;
|
||||
const DocumentManager = require("./DocumentManager");
|
||||
const HistoryManager = require("./HistoryManager");
|
||||
const ProjectManager = require("./ProjectManager");
|
||||
const Errors = require("./Errors");
|
||||
const logger = require("logger-sharelatex");
|
||||
const Metrics = require("./Metrics");
|
||||
const ProjectFlusher = require("./ProjectFlusher");
|
||||
const DeleteQueueManager = require("./DeleteQueueManager");
|
||||
const async = require("async");
|
||||
|
||||
TWO_MEGABYTES = 2 * 1024 * 1024
|
||||
const TWO_MEGABYTES = 2 * 1024 * 1024;
|
||||
|
||||
module.exports = HttpController =
|
||||
getDoc: (req, res, next = (error) ->) ->
|
||||
doc_id = req.params.doc_id
|
||||
project_id = req.params.project_id
|
||||
logger.log project_id: project_id, doc_id: doc_id, "getting doc via http"
|
||||
timer = new Metrics.Timer("http.getDoc")
|
||||
module.exports = (HttpController = {
|
||||
getDoc(req, res, next) {
|
||||
let fromVersion;
|
||||
if (next == null) { next = function(error) {}; }
|
||||
const {
|
||||
doc_id
|
||||
} = req.params;
|
||||
const {
|
||||
project_id
|
||||
} = req.params;
|
||||
logger.log({project_id, doc_id}, "getting doc via http");
|
||||
const timer = new Metrics.Timer("http.getDoc");
|
||||
|
||||
if req.query?.fromVersion?
|
||||
fromVersion = parseInt(req.query.fromVersion, 10)
|
||||
else
|
||||
fromVersion = -1
|
||||
if ((req.query != null ? req.query.fromVersion : undefined) != null) {
|
||||
fromVersion = parseInt(req.query.fromVersion, 10);
|
||||
} else {
|
||||
fromVersion = -1;
|
||||
}
|
||||
|
||||
DocumentManager.getDocAndRecentOpsWithLock project_id, doc_id, fromVersion, (error, lines, version, ops, ranges, pathname) ->
|
||||
timer.done()
|
||||
return next(error) if error?
|
||||
logger.log project_id: project_id, doc_id: doc_id, "got doc via http"
|
||||
if !lines? or !version?
|
||||
return next(new Errors.NotFoundError("document not found"))
|
||||
res.json
|
||||
id: doc_id
|
||||
lines: lines
|
||||
version: version
|
||||
ops: ops
|
||||
ranges: ranges
|
||||
pathname: pathname
|
||||
return DocumentManager.getDocAndRecentOpsWithLock(project_id, doc_id, fromVersion, function(error, lines, version, ops, ranges, pathname) {
|
||||
timer.done();
|
||||
if (error != null) { return next(error); }
|
||||
logger.log({project_id, doc_id}, "got doc via http");
|
||||
if ((lines == null) || (version == null)) {
|
||||
return next(new Errors.NotFoundError("document not found"));
|
||||
}
|
||||
return res.json({
|
||||
id: doc_id,
|
||||
lines,
|
||||
version,
|
||||
ops,
|
||||
ranges,
|
||||
pathname
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
_getTotalSizeOfLines: (lines) ->
|
||||
size = 0
|
||||
for line in lines
|
||||
size += (line.length + 1)
|
||||
return size
|
||||
_getTotalSizeOfLines(lines) {
|
||||
let size = 0;
|
||||
for (let line of Array.from(lines)) {
|
||||
size += (line.length + 1);
|
||||
}
|
||||
return size;
|
||||
},
|
||||
|
||||
getProjectDocsAndFlushIfOld: (req, res, next = (error) ->) ->
|
||||
project_id = req.params.project_id
|
||||
projectStateHash = req.query?.state
|
||||
# exclude is string of existing docs "id:version,id:version,..."
|
||||
excludeItems = req.query?.exclude?.split(',') or []
|
||||
logger.log project_id: project_id, exclude: excludeItems, "getting docs via http"
|
||||
timer = new Metrics.Timer("http.getAllDocs")
|
||||
excludeVersions = {}
|
||||
for item in excludeItems
|
||||
[id,version] = item?.split(':')
|
||||
excludeVersions[id] = version
|
||||
logger.log {project_id: project_id, projectStateHash: projectStateHash, excludeVersions: excludeVersions}, "excluding versions"
|
||||
ProjectManager.getProjectDocsAndFlushIfOld project_id, projectStateHash, excludeVersions, (error, result) ->
|
||||
timer.done()
|
||||
if error instanceof Errors.ProjectStateChangedError
|
||||
res.sendStatus 409 # conflict
|
||||
else if error?
|
||||
return next(error)
|
||||
else
|
||||
logger.log project_id: project_id, result: ("#{doc._id}:#{doc.v}" for doc in result), "got docs via http"
|
||||
res.send result
|
||||
getProjectDocsAndFlushIfOld(req, res, next) {
|
||||
if (next == null) { next = function(error) {}; }
|
||||
const {
|
||||
project_id
|
||||
} = req.params;
|
||||
const projectStateHash = req.query != null ? req.query.state : undefined;
|
||||
// exclude is string of existing docs "id:version,id:version,..."
|
||||
const excludeItems = __guard__(req.query != null ? req.query.exclude : undefined, x => x.split(',')) || [];
|
||||
logger.log({project_id, exclude: excludeItems}, "getting docs via http");
|
||||
const timer = new Metrics.Timer("http.getAllDocs");
|
||||
const excludeVersions = {};
|
||||
for (let item of Array.from(excludeItems)) {
|
||||
const [id,version] = Array.from(item != null ? item.split(':') : undefined);
|
||||
excludeVersions[id] = version;
|
||||
}
|
||||
logger.log({project_id, projectStateHash, excludeVersions}, "excluding versions");
|
||||
return ProjectManager.getProjectDocsAndFlushIfOld(project_id, projectStateHash, excludeVersions, function(error, result) {
|
||||
timer.done();
|
||||
if (error instanceof Errors.ProjectStateChangedError) {
|
||||
return res.sendStatus(409); // conflict
|
||||
} else if (error != null) {
|
||||
return next(error);
|
||||
} else {
|
||||
logger.log({project_id, result: ((Array.from(result).map((doc) => `${doc._id}:${doc.v}`)))}, "got docs via http");
|
||||
return res.send(result);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
clearProjectState: (req, res, next = (error) ->) ->
|
||||
project_id = req.params.project_id
|
||||
timer = new Metrics.Timer("http.clearProjectState")
|
||||
logger.log project_id: project_id, "clearing project state via http"
|
||||
ProjectManager.clearProjectState project_id, (error) ->
|
||||
timer.done()
|
||||
if error?
|
||||
return next(error)
|
||||
else
|
||||
res.sendStatus 200
|
||||
clearProjectState(req, res, next) {
|
||||
if (next == null) { next = function(error) {}; }
|
||||
const {
|
||||
project_id
|
||||
} = req.params;
|
||||
const timer = new Metrics.Timer("http.clearProjectState");
|
||||
logger.log({project_id}, "clearing project state via http");
|
||||
return ProjectManager.clearProjectState(project_id, function(error) {
|
||||
timer.done();
|
||||
if (error != null) {
|
||||
return next(error);
|
||||
} else {
|
||||
return res.sendStatus(200);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
setDoc: (req, res, next = (error) ->) ->
|
||||
doc_id = req.params.doc_id
|
||||
project_id = req.params.project_id
|
||||
{lines, source, user_id, undoing} = req.body
|
||||
lineSize = HttpController._getTotalSizeOfLines(lines)
|
||||
if lineSize > TWO_MEGABYTES
|
||||
logger.log {project_id, doc_id, source, lineSize, user_id}, "document too large, returning 406 response"
|
||||
return res.sendStatus 406
|
||||
logger.log {project_id, doc_id, lines, source, user_id, undoing}, "setting doc via http"
|
||||
timer = new Metrics.Timer("http.setDoc")
|
||||
DocumentManager.setDocWithLock project_id, doc_id, lines, source, user_id, undoing, (error) ->
|
||||
timer.done()
|
||||
return next(error) if error?
|
||||
logger.log project_id: project_id, doc_id: doc_id, "set doc via http"
|
||||
res.sendStatus 204 # No Content
|
||||
setDoc(req, res, next) {
|
||||
if (next == null) { next = function(error) {}; }
|
||||
const {
|
||||
doc_id
|
||||
} = req.params;
|
||||
const {
|
||||
project_id
|
||||
} = req.params;
|
||||
const {lines, source, user_id, undoing} = req.body;
|
||||
const lineSize = HttpController._getTotalSizeOfLines(lines);
|
||||
if (lineSize > TWO_MEGABYTES) {
|
||||
logger.log({project_id, doc_id, source, lineSize, user_id}, "document too large, returning 406 response");
|
||||
return res.sendStatus(406);
|
||||
}
|
||||
logger.log({project_id, doc_id, lines, source, user_id, undoing}, "setting doc via http");
|
||||
const timer = new Metrics.Timer("http.setDoc");
|
||||
return DocumentManager.setDocWithLock(project_id, doc_id, lines, source, user_id, undoing, function(error) {
|
||||
timer.done();
|
||||
if (error != null) { return next(error); }
|
||||
logger.log({project_id, doc_id}, "set doc via http");
|
||||
return res.sendStatus(204);
|
||||
});
|
||||
}, // No Content
|
||||
|
||||
|
||||
flushDocIfLoaded: (req, res, next = (error) ->) ->
|
||||
doc_id = req.params.doc_id
|
||||
project_id = req.params.project_id
|
||||
logger.log project_id: project_id, doc_id: doc_id, "flushing doc via http"
|
||||
timer = new Metrics.Timer("http.flushDoc")
|
||||
DocumentManager.flushDocIfLoadedWithLock project_id, doc_id, (error) ->
|
||||
timer.done()
|
||||
return next(error) if error?
|
||||
logger.log project_id: project_id, doc_id: doc_id, "flushed doc via http"
|
||||
res.sendStatus 204 # No Content
|
||||
flushDocIfLoaded(req, res, next) {
|
||||
if (next == null) { next = function(error) {}; }
|
||||
const {
|
||||
doc_id
|
||||
} = req.params;
|
||||
const {
|
||||
project_id
|
||||
} = req.params;
|
||||
logger.log({project_id, doc_id}, "flushing doc via http");
|
||||
const timer = new Metrics.Timer("http.flushDoc");
|
||||
return DocumentManager.flushDocIfLoadedWithLock(project_id, doc_id, function(error) {
|
||||
timer.done();
|
||||
if (error != null) { return next(error); }
|
||||
logger.log({project_id, doc_id}, "flushed doc via http");
|
||||
return res.sendStatus(204);
|
||||
});
|
||||
}, // No Content
|
||||
|
||||
deleteDoc: (req, res, next = (error) ->) ->
|
||||
doc_id = req.params.doc_id
|
||||
project_id = req.params.project_id
|
||||
ignoreFlushErrors = req.query.ignore_flush_errors == 'true'
|
||||
timer = new Metrics.Timer("http.deleteDoc")
|
||||
logger.log project_id: project_id, doc_id: doc_id, "deleting doc via http"
|
||||
DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, { ignoreFlushErrors: ignoreFlushErrors }, (error) ->
|
||||
timer.done()
|
||||
# There is no harm in flushing project history if the previous call
|
||||
# failed and sometimes it is required
|
||||
HistoryManager.flushProjectChangesAsync project_id
|
||||
deleteDoc(req, res, next) {
|
||||
if (next == null) { next = function(error) {}; }
|
||||
const {
|
||||
doc_id
|
||||
} = req.params;
|
||||
const {
|
||||
project_id
|
||||
} = req.params;
|
||||
const ignoreFlushErrors = req.query.ignore_flush_errors === 'true';
|
||||
const timer = new Metrics.Timer("http.deleteDoc");
|
||||
logger.log({project_id, doc_id}, "deleting doc via http");
|
||||
return DocumentManager.flushAndDeleteDocWithLock(project_id, doc_id, { ignoreFlushErrors }, function(error) {
|
||||
timer.done();
|
||||
// There is no harm in flushing project history if the previous call
|
||||
// failed and sometimes it is required
|
||||
HistoryManager.flushProjectChangesAsync(project_id);
|
||||
|
||||
return next(error) if error?
|
||||
logger.log project_id: project_id, doc_id: doc_id, "deleted doc via http"
|
||||
res.sendStatus 204 # No Content
|
||||
if (error != null) { return next(error); }
|
||||
logger.log({project_id, doc_id}, "deleted doc via http");
|
||||
return res.sendStatus(204);
|
||||
});
|
||||
}, // No Content
|
||||
|
||||
flushProject: (req, res, next = (error) ->) ->
|
||||
project_id = req.params.project_id
|
||||
logger.log project_id: project_id, "flushing project via http"
|
||||
timer = new Metrics.Timer("http.flushProject")
|
||||
ProjectManager.flushProjectWithLocks project_id, (error) ->
|
||||
timer.done()
|
||||
return next(error) if error?
|
||||
logger.log project_id: project_id, "flushed project via http"
|
||||
res.sendStatus 204 # No Content
|
||||
flushProject(req, res, next) {
|
||||
if (next == null) { next = function(error) {}; }
|
||||
const {
|
||||
project_id
|
||||
} = req.params;
|
||||
logger.log({project_id}, "flushing project via http");
|
||||
const timer = new Metrics.Timer("http.flushProject");
|
||||
return ProjectManager.flushProjectWithLocks(project_id, function(error) {
|
||||
timer.done();
|
||||
if (error != null) { return next(error); }
|
||||
logger.log({project_id}, "flushed project via http");
|
||||
return res.sendStatus(204);
|
||||
});
|
||||
}, // No Content
|
||||
|
||||
deleteProject: (req, res, next = (error) ->) ->
|
||||
project_id = req.params.project_id
|
||||
logger.log project_id: project_id, "deleting project via http"
|
||||
options = {}
|
||||
options.background = true if req.query?.background # allow non-urgent flushes to be queued
|
||||
options.skip_history_flush = true if req.query?.shutdown # don't flush history when realtime shuts down
|
||||
if req.query?.background
|
||||
ProjectManager.queueFlushAndDeleteProject project_id, (error) ->
|
||||
return next(error) if error?
|
||||
logger.log project_id: project_id, "queue delete of project via http"
|
||||
res.sendStatus 204 # No Content
|
||||
else
|
||||
timer = new Metrics.Timer("http.deleteProject")
|
||||
ProjectManager.flushAndDeleteProjectWithLocks project_id, options, (error) ->
|
||||
timer.done()
|
||||
return next(error) if error?
|
||||
logger.log project_id: project_id, "deleted project via http"
|
||||
res.sendStatus 204 # No Content
|
||||
deleteProject(req, res, next) {
|
||||
if (next == null) { next = function(error) {}; }
|
||||
const {
|
||||
project_id
|
||||
} = req.params;
|
||||
logger.log({project_id}, "deleting project via http");
|
||||
const options = {};
|
||||
if (req.query != null ? req.query.background : undefined) { options.background = true; } // allow non-urgent flushes to be queued
|
||||
if (req.query != null ? req.query.shutdown : undefined) { options.skip_history_flush = true; } // don't flush history when realtime shuts down
|
||||
if (req.query != null ? req.query.background : undefined) {
|
||||
return ProjectManager.queueFlushAndDeleteProject(project_id, function(error) {
|
||||
if (error != null) { return next(error); }
|
||||
logger.log({project_id}, "queue delete of project via http");
|
||||
return res.sendStatus(204);
|
||||
}); // No Content
|
||||
} else {
|
||||
const timer = new Metrics.Timer("http.deleteProject");
|
||||
return ProjectManager.flushAndDeleteProjectWithLocks(project_id, options, function(error) {
|
||||
timer.done();
|
||||
if (error != null) { return next(error); }
|
||||
logger.log({project_id}, "deleted project via http");
|
||||
return res.sendStatus(204);
|
||||
});
|
||||
}
|
||||
}, // No Content
|
||||
|
||||
deleteMultipleProjects: (req, res, next = (error) ->) ->
|
||||
project_ids = req.body?.project_ids || []
|
||||
logger.log project_ids: project_ids, "deleting multiple projects via http"
|
||||
async.eachSeries project_ids, (project_id, cb) ->
|
||||
logger.log project_id: project_id, "queue delete of project via http"
|
||||
ProjectManager.queueFlushAndDeleteProject project_id, cb
|
||||
, (error) ->
|
||||
return next(error) if error?
|
||||
res.sendStatus 204 # No Content
|
||||
deleteMultipleProjects(req, res, next) {
|
||||
if (next == null) { next = function(error) {}; }
|
||||
const project_ids = (req.body != null ? req.body.project_ids : undefined) || [];
|
||||
logger.log({project_ids}, "deleting multiple projects via http");
|
||||
return async.eachSeries(project_ids, function(project_id, cb) {
|
||||
logger.log({project_id}, "queue delete of project via http");
|
||||
return ProjectManager.queueFlushAndDeleteProject(project_id, cb);
|
||||
}
|
||||
, function(error) {
|
||||
if (error != null) { return next(error); }
|
||||
return res.sendStatus(204);
|
||||
});
|
||||
}, // No Content
|
||||
|
||||
acceptChanges: (req, res, next = (error) ->) ->
|
||||
{project_id, doc_id} = req.params
|
||||
change_ids = req.body?.change_ids
|
||||
if !change_ids?
|
||||
change_ids = [ req.params.change_id ]
|
||||
logger.log {project_id, doc_id}, "accepting #{ change_ids.length } changes via http"
|
||||
timer = new Metrics.Timer("http.acceptChanges")
|
||||
DocumentManager.acceptChangesWithLock project_id, doc_id, change_ids, (error) ->
|
||||
timer.done()
|
||||
return next(error) if error?
|
||||
logger.log {project_id, doc_id}, "accepted #{ change_ids.length } changes via http"
|
||||
res.sendStatus 204 # No Content
|
||||
acceptChanges(req, res, next) {
|
||||
if (next == null) { next = function(error) {}; }
|
||||
const {project_id, doc_id} = req.params;
|
||||
let change_ids = req.body != null ? req.body.change_ids : undefined;
|
||||
if ((change_ids == null)) {
|
||||
change_ids = [ req.params.change_id ];
|
||||
}
|
||||
logger.log({project_id, doc_id}, `accepting ${ change_ids.length } changes via http`);
|
||||
const timer = new Metrics.Timer("http.acceptChanges");
|
||||
return DocumentManager.acceptChangesWithLock(project_id, doc_id, change_ids, function(error) {
|
||||
timer.done();
|
||||
if (error != null) { return next(error); }
|
||||
logger.log({project_id, doc_id}, `accepted ${ change_ids.length } changes via http`);
|
||||
return res.sendStatus(204);
|
||||
});
|
||||
}, // No Content
|
||||
|
||||
deleteComment: (req, res, next = (error) ->) ->
|
||||
{project_id, doc_id, comment_id} = req.params
|
||||
logger.log {project_id, doc_id, comment_id}, "deleting comment via http"
|
||||
timer = new Metrics.Timer("http.deleteComment")
|
||||
DocumentManager.deleteCommentWithLock project_id, doc_id, comment_id, (error) ->
|
||||
timer.done()
|
||||
return next(error) if error?
|
||||
logger.log {project_id, doc_id, comment_id}, "deleted comment via http"
|
||||
res.sendStatus 204 # No Content
|
||||
deleteComment(req, res, next) {
|
||||
if (next == null) { next = function(error) {}; }
|
||||
const {project_id, doc_id, comment_id} = req.params;
|
||||
logger.log({project_id, doc_id, comment_id}, "deleting comment via http");
|
||||
const timer = new Metrics.Timer("http.deleteComment");
|
||||
return DocumentManager.deleteCommentWithLock(project_id, doc_id, comment_id, function(error) {
|
||||
timer.done();
|
||||
if (error != null) { return next(error); }
|
||||
logger.log({project_id, doc_id, comment_id}, "deleted comment via http");
|
||||
return res.sendStatus(204);
|
||||
});
|
||||
}, // No Content
|
||||
|
||||
updateProject: (req, res, next = (error) ->) ->
|
||||
timer = new Metrics.Timer("http.updateProject")
|
||||
project_id = req.params.project_id
|
||||
{projectHistoryId, userId, docUpdates, fileUpdates, version} = req.body
|
||||
logger.log {project_id, docUpdates, fileUpdates, version}, "updating project via http"
|
||||
updateProject(req, res, next) {
|
||||
if (next == null) { next = function(error) {}; }
|
||||
const timer = new Metrics.Timer("http.updateProject");
|
||||
const {
|
||||
project_id
|
||||
} = req.params;
|
||||
const {projectHistoryId, userId, docUpdates, fileUpdates, version} = req.body;
|
||||
logger.log({project_id, docUpdates, fileUpdates, version}, "updating project via http");
|
||||
|
||||
ProjectManager.updateProjectWithLocks project_id, projectHistoryId, userId, docUpdates, fileUpdates, version, (error) ->
|
||||
timer.done()
|
||||
return next(error) if error?
|
||||
logger.log project_id: project_id, "updated project via http"
|
||||
res.sendStatus 204 # No Content
|
||||
return ProjectManager.updateProjectWithLocks(project_id, projectHistoryId, userId, docUpdates, fileUpdates, version, function(error) {
|
||||
timer.done();
|
||||
if (error != null) { return next(error); }
|
||||
logger.log({project_id}, "updated project via http");
|
||||
return res.sendStatus(204);
|
||||
});
|
||||
}, // No Content
|
||||
|
||||
resyncProjectHistory: (req, res, next = (error) ->) ->
|
||||
project_id = req.params.project_id
|
||||
{projectHistoryId, docs, files} = req.body
|
||||
resyncProjectHistory(req, res, next) {
|
||||
if (next == null) { next = function(error) {}; }
|
||||
const {
|
||||
project_id
|
||||
} = req.params;
|
||||
const {projectHistoryId, docs, files} = req.body;
|
||||
|
||||
logger.log {project_id, docs, files}, "queuing project history resync via http"
|
||||
HistoryManager.resyncProjectHistory project_id, projectHistoryId, docs, files, (error) ->
|
||||
return next(error) if error?
|
||||
logger.log {project_id}, "queued project history resync via http"
|
||||
res.sendStatus 204
|
||||
logger.log({project_id, docs, files}, "queuing project history resync via http");
|
||||
return HistoryManager.resyncProjectHistory(project_id, projectHistoryId, docs, files, function(error) {
|
||||
if (error != null) { return next(error); }
|
||||
logger.log({project_id}, "queued project history resync via http");
|
||||
return res.sendStatus(204);
|
||||
});
|
||||
},
|
||||
|
||||
flushAllProjects: (req, res, next = (error)-> )->
|
||||
res.setTimeout(5 * 60 * 1000)
|
||||
options =
|
||||
limit : req.query.limit || 1000
|
||||
concurrency : req.query.concurrency || 5
|
||||
flushAllProjects(req, res, next ){
|
||||
if (next == null) { next = function(error){}; }
|
||||
res.setTimeout(5 * 60 * 1000);
|
||||
const options = {
|
||||
limit : req.query.limit || 1000,
|
||||
concurrency : req.query.concurrency || 5,
|
||||
dryRun : req.query.dryRun || false
|
||||
ProjectFlusher.flushAllProjects options, (err, project_ids)->
|
||||
if err?
|
||||
logger.err err:err, "error bulk flushing projects"
|
||||
res.sendStatus 500
|
||||
else
|
||||
res.send project_ids
|
||||
};
|
||||
return ProjectFlusher.flushAllProjects(options, function(err, project_ids){
|
||||
if (err != null) {
|
||||
logger.err({err}, "error bulk flushing projects");
|
||||
return res.sendStatus(500);
|
||||
} else {
|
||||
return res.send(project_ids);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
flushQueuedProjects: (req, res, next = (error) ->) ->
|
||||
res.setTimeout(10 * 60 * 1000)
|
||||
options =
|
||||
limit : req.query.limit || 1000
|
||||
timeout: 5 * 60 * 1000
|
||||
min_delete_age: req.query.min_delete_age || 5 * 60 * 1000
|
||||
DeleteQueueManager.flushAndDeleteOldProjects options, (err, flushed)->
|
||||
if err?
|
||||
logger.err err:err, "error flushing old projects"
|
||||
res.sendStatus 500
|
||||
else
|
||||
logger.log {flushed: flushed}, "flush of queued projects completed"
|
||||
res.send {flushed: flushed}
|
||||
flushQueuedProjects(req, res, next) {
|
||||
if (next == null) { next = function(error) {}; }
|
||||
res.setTimeout(10 * 60 * 1000);
|
||||
const options = {
|
||||
limit : req.query.limit || 1000,
|
||||
timeout: 5 * 60 * 1000,
|
||||
min_delete_age: req.query.min_delete_age || (5 * 60 * 1000)
|
||||
};
|
||||
return DeleteQueueManager.flushAndDeleteOldProjects(options, function(err, flushed){
|
||||
if (err != null) {
|
||||
logger.err({err}, "error flushing old projects");
|
||||
return res.sendStatus(500);
|
||||
} else {
|
||||
logger.log({flushed}, "flush of queued projects completed");
|
||||
return res.send({flushed});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined;
|
||||
}
|
|
@ -1,102 +1,131 @@
|
|||
metrics = require('./Metrics')
|
||||
Settings = require('settings-sharelatex')
|
||||
redis = require("redis-sharelatex")
|
||||
rclient = redis.createClient(Settings.redis.lock)
|
||||
keys = Settings.redis.lock.key_schema
|
||||
logger = require "logger-sharelatex"
|
||||
os = require "os"
|
||||
crypto = require "crypto"
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let LockManager;
|
||||
const metrics = require('./Metrics');
|
||||
const Settings = require('settings-sharelatex');
|
||||
const redis = require("redis-sharelatex");
|
||||
const rclient = redis.createClient(Settings.redis.lock);
|
||||
const keys = Settings.redis.lock.key_schema;
|
||||
const logger = require("logger-sharelatex");
|
||||
const os = require("os");
|
||||
const crypto = require("crypto");
|
||||
|
||||
Profiler = require "./Profiler"
|
||||
const Profiler = require("./Profiler");
|
||||
|
||||
HOST = os.hostname()
|
||||
PID = process.pid
|
||||
RND = crypto.randomBytes(4).toString('hex')
|
||||
COUNT = 0
|
||||
const HOST = os.hostname();
|
||||
const PID = process.pid;
|
||||
const RND = crypto.randomBytes(4).toString('hex');
|
||||
let COUNT = 0;
|
||||
|
||||
MAX_REDIS_REQUEST_LENGTH = 5000 # 5 seconds
|
||||
const MAX_REDIS_REQUEST_LENGTH = 5000; // 5 seconds
|
||||
|
||||
module.exports = LockManager =
|
||||
LOCK_TEST_INTERVAL: 50 # 50ms between each test of the lock
|
||||
MAX_TEST_INTERVAL: 1000 # back off to 1s between each test of the lock
|
||||
MAX_LOCK_WAIT_TIME: 10000 # 10s maximum time to spend trying to get the lock
|
||||
LOCK_TTL: 30 # seconds. Time until lock auto expires in redis.
|
||||
module.exports = (LockManager = {
|
||||
LOCK_TEST_INTERVAL: 50, // 50ms between each test of the lock
|
||||
MAX_TEST_INTERVAL: 1000, // back off to 1s between each test of the lock
|
||||
MAX_LOCK_WAIT_TIME: 10000, // 10s maximum time to spend trying to get the lock
|
||||
LOCK_TTL: 30, // seconds. Time until lock auto expires in redis.
|
||||
|
||||
# Use a signed lock value as described in
|
||||
# http://redis.io/topics/distlock#correct-implementation-with-a-single-instance
|
||||
# to prevent accidental unlocking by multiple processes
|
||||
randomLock : () ->
|
||||
time = Date.now()
|
||||
return "locked:host=#{HOST}:pid=#{PID}:random=#{RND}:time=#{time}:count=#{COUNT++}"
|
||||
// Use a signed lock value as described in
|
||||
// http://redis.io/topics/distlock#correct-implementation-with-a-single-instance
|
||||
// to prevent accidental unlocking by multiple processes
|
||||
randomLock() {
|
||||
const time = Date.now();
|
||||
return `locked:host=${HOST}:pid=${PID}:random=${RND}:time=${time}:count=${COUNT++}`;
|
||||
},
|
||||
|
||||
unlockScript: 'if redis.call("get", KEYS[1]) == ARGV[1] then return redis.call("del", KEYS[1]) else return 0 end';
|
||||
unlockScript: 'if redis.call("get", KEYS[1]) == ARGV[1] then return redis.call("del", KEYS[1]) else return 0 end',
|
||||
|
||||
tryLock : (doc_id, callback = (err, isFree)->)->
|
||||
lockValue = LockManager.randomLock()
|
||||
key = keys.blockingKey(doc_id:doc_id)
|
||||
profile = new Profiler("tryLock", {doc_id, key, lockValue})
|
||||
rclient.set key, lockValue, "EX", @LOCK_TTL, "NX", (err, gotLock)->
|
||||
return callback(err) if err?
|
||||
if gotLock == "OK"
|
||||
metrics.inc "doc-not-blocking"
|
||||
timeTaken = profile.log("got lock").end()
|
||||
if timeTaken > MAX_REDIS_REQUEST_LENGTH
|
||||
# took too long, so try to free the lock
|
||||
LockManager.releaseLock doc_id, lockValue, (err, result) ->
|
||||
return callback(err) if err? # error freeing lock
|
||||
callback null, false # tell caller they didn't get the lock
|
||||
else
|
||||
callback null, true, lockValue
|
||||
else
|
||||
metrics.inc "doc-blocking"
|
||||
profile.log("doc is locked").end()
|
||||
callback null, false
|
||||
tryLock(doc_id, callback){
|
||||
if (callback == null) { callback = function(err, isFree){}; }
|
||||
const lockValue = LockManager.randomLock();
|
||||
const key = keys.blockingKey({doc_id});
|
||||
const profile = new Profiler("tryLock", {doc_id, key, lockValue});
|
||||
return rclient.set(key, lockValue, "EX", this.LOCK_TTL, "NX", function(err, gotLock){
|
||||
if (err != null) { return callback(err); }
|
||||
if (gotLock === "OK") {
|
||||
metrics.inc("doc-not-blocking");
|
||||
const timeTaken = profile.log("got lock").end();
|
||||
if (timeTaken > MAX_REDIS_REQUEST_LENGTH) {
|
||||
// took too long, so try to free the lock
|
||||
return LockManager.releaseLock(doc_id, lockValue, function(err, result) {
|
||||
if (err != null) { return callback(err); } // error freeing lock
|
||||
return callback(null, false);
|
||||
}); // tell caller they didn't get the lock
|
||||
} else {
|
||||
return callback(null, true, lockValue);
|
||||
}
|
||||
} else {
|
||||
metrics.inc("doc-blocking");
|
||||
profile.log("doc is locked").end();
|
||||
return callback(null, false);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
getLock: (doc_id, callback = (error, lockValue) ->) ->
|
||||
startTime = Date.now()
|
||||
testInterval = LockManager.LOCK_TEST_INTERVAL
|
||||
profile = new Profiler("getLock", {doc_id})
|
||||
do attempt = () ->
|
||||
if Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME
|
||||
e = new Error("Timeout")
|
||||
e.doc_id = doc_id
|
||||
profile.log("timeout").end()
|
||||
return callback(e)
|
||||
getLock(doc_id, callback) {
|
||||
let attempt;
|
||||
if (callback == null) { callback = function(error, lockValue) {}; }
|
||||
const startTime = Date.now();
|
||||
let testInterval = LockManager.LOCK_TEST_INTERVAL;
|
||||
const profile = new Profiler("getLock", {doc_id});
|
||||
return (attempt = function() {
|
||||
if ((Date.now() - startTime) > LockManager.MAX_LOCK_WAIT_TIME) {
|
||||
const e = new Error("Timeout");
|
||||
e.doc_id = doc_id;
|
||||
profile.log("timeout").end();
|
||||
return callback(e);
|
||||
}
|
||||
|
||||
LockManager.tryLock doc_id, (error, gotLock, lockValue) ->
|
||||
return callback(error) if error?
|
||||
profile.log("tryLock")
|
||||
if gotLock
|
||||
profile.end()
|
||||
callback(null, lockValue)
|
||||
else
|
||||
setTimeout attempt, testInterval
|
||||
# back off when the lock is taken to avoid overloading
|
||||
testInterval = Math.min(testInterval * 2, LockManager.MAX_TEST_INTERVAL)
|
||||
return LockManager.tryLock(doc_id, function(error, gotLock, lockValue) {
|
||||
if (error != null) { return callback(error); }
|
||||
profile.log("tryLock");
|
||||
if (gotLock) {
|
||||
profile.end();
|
||||
return callback(null, lockValue);
|
||||
} else {
|
||||
setTimeout(attempt, testInterval);
|
||||
// back off when the lock is taken to avoid overloading
|
||||
return testInterval = Math.min(testInterval * 2, LockManager.MAX_TEST_INTERVAL);
|
||||
}
|
||||
});
|
||||
})();
|
||||
},
|
||||
|
||||
checkLock: (doc_id, callback = (err, isFree)->)->
|
||||
key = keys.blockingKey(doc_id:doc_id)
|
||||
rclient.exists key, (err, exists) ->
|
||||
return callback(err) if err?
|
||||
exists = parseInt exists
|
||||
if exists == 1
|
||||
metrics.inc "doc-blocking"
|
||||
callback null, false
|
||||
else
|
||||
metrics.inc "doc-not-blocking"
|
||||
callback null, true
|
||||
checkLock(doc_id, callback){
|
||||
if (callback == null) { callback = function(err, isFree){}; }
|
||||
const key = keys.blockingKey({doc_id});
|
||||
return rclient.exists(key, function(err, exists) {
|
||||
if (err != null) { return callback(err); }
|
||||
exists = parseInt(exists);
|
||||
if (exists === 1) {
|
||||
metrics.inc("doc-blocking");
|
||||
return callback(null, false);
|
||||
} else {
|
||||
metrics.inc("doc-not-blocking");
|
||||
return callback(null, true);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
releaseLock: (doc_id, lockValue, callback)->
|
||||
key = keys.blockingKey(doc_id:doc_id)
|
||||
profile = new Profiler("releaseLock", {doc_id, key, lockValue})
|
||||
rclient.eval LockManager.unlockScript, 1, key, lockValue, (err, result) ->
|
||||
if err?
|
||||
return callback(err)
|
||||
else if result? and result isnt 1 # successful unlock should release exactly one key
|
||||
profile.log("unlockScript:expired-lock").end()
|
||||
logger.error {doc_id:doc_id, key:key, lockValue:lockValue, redis_err:err, redis_result:result}, "unlocking error"
|
||||
metrics.inc "unlock-error"
|
||||
return callback(new Error("tried to release timed out lock"))
|
||||
else
|
||||
profile.log("unlockScript:ok").end()
|
||||
callback(null,result)
|
||||
releaseLock(doc_id, lockValue, callback){
|
||||
const key = keys.blockingKey({doc_id});
|
||||
const profile = new Profiler("releaseLock", {doc_id, key, lockValue});
|
||||
return rclient.eval(LockManager.unlockScript, 1, key, lockValue, function(err, result) {
|
||||
if (err != null) {
|
||||
return callback(err);
|
||||
} else if ((result != null) && (result !== 1)) { // successful unlock should release exactly one key
|
||||
profile.log("unlockScript:expired-lock").end();
|
||||
logger.error({doc_id, key, lockValue, redis_err:err, redis_result:result}, "unlocking error");
|
||||
metrics.inc("unlock-error");
|
||||
return callback(new Error("tried to release timed out lock"));
|
||||
} else {
|
||||
profile.log("unlockScript:ok").end();
|
||||
return callback(null,result);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
|
|
@ -1,25 +1,41 @@
|
|||
_ = require('lodash')
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const _ = require('lodash');
|
||||
|
||||
showLength = (thing) ->
|
||||
if thing?.length then thing.length else thing
|
||||
const showLength = function(thing) {
|
||||
if ((thing != null ? thing.length : undefined)) { return thing.length; } else { return thing; }
|
||||
};
|
||||
|
||||
showUpdateLength = (update) ->
|
||||
if update?.op instanceof Array
|
||||
copy = _.cloneDeep(update)
|
||||
copy.op.forEach (element, index) ->
|
||||
copy.op[index].i = element.i.length if element?.i?.length?
|
||||
copy.op[index].d = element.d.length if element?.d?.length?
|
||||
copy.op[index].c = element.c.length if element?.c?.length?
|
||||
copy
|
||||
else
|
||||
update
|
||||
const showUpdateLength = function(update) {
|
||||
if ((update != null ? update.op : undefined) instanceof Array) {
|
||||
const copy = _.cloneDeep(update);
|
||||
copy.op.forEach(function(element, index) {
|
||||
if (__guard__(element != null ? element.i : undefined, x => x.length) != null) { copy.op[index].i = element.i.length; }
|
||||
if (__guard__(element != null ? element.d : undefined, x1 => x1.length) != null) { copy.op[index].d = element.d.length; }
|
||||
if (__guard__(element != null ? element.c : undefined, x2 => x2.length) != null) { return copy.op[index].c = element.c.length; }
|
||||
});
|
||||
return copy;
|
||||
} else {
|
||||
return update;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports =
|
||||
# replace long values with their length
|
||||
lines: showLength
|
||||
oldLines: showLength
|
||||
newLines: showLength
|
||||
docLines: showLength
|
||||
newDocLines: showLength
|
||||
ranges: showLength
|
||||
module.exports = {
|
||||
// replace long values with their length
|
||||
lines: showLength,
|
||||
oldLines: showLength,
|
||||
newLines: showLength,
|
||||
docLines: showLength,
|
||||
newDocLines: showLength,
|
||||
ranges: showLength,
|
||||
update: showUpdateLength
|
||||
};
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined;
|
||||
}
|
|
@ -1 +1 @@
|
|||
module.exports = require "metrics-sharelatex"
|
||||
module.exports = require("metrics-sharelatex");
|
|
@ -1,100 +1,134 @@
|
|||
Settings = require "settings-sharelatex"
|
||||
Errors = require "./Errors"
|
||||
Metrics = require "./Metrics"
|
||||
logger = require "logger-sharelatex"
|
||||
request = (require("requestretry")).defaults({
|
||||
maxAttempts: 2
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let PersistenceManager;
|
||||
const Settings = require("settings-sharelatex");
|
||||
const Errors = require("./Errors");
|
||||
const Metrics = require("./Metrics");
|
||||
const logger = require("logger-sharelatex");
|
||||
const request = (require("requestretry")).defaults({
|
||||
maxAttempts: 2,
|
||||
retryDelay: 10
|
||||
})
|
||||
});
|
||||
|
||||
# We have to be quick with HTTP calls because we're holding a lock that
|
||||
# expires after 30 seconds. We can't let any errors in the rest of the stack
|
||||
# hold us up, and need to bail out quickly if there is a problem.
|
||||
MAX_HTTP_REQUEST_LENGTH = 5000 # 5 seconds
|
||||
// We have to be quick with HTTP calls because we're holding a lock that
|
||||
// expires after 30 seconds. We can't let any errors in the rest of the stack
|
||||
// hold us up, and need to bail out quickly if there is a problem.
|
||||
const MAX_HTTP_REQUEST_LENGTH = 5000; // 5 seconds
|
||||
|
||||
updateMetric = (method, error, response) ->
|
||||
# find the status, with special handling for connection timeouts
|
||||
# https://github.com/request/request#timeouts
|
||||
status = if error?.connect is true
|
||||
"#{error.code} (connect)"
|
||||
else if error?
|
||||
error.code
|
||||
else if response?
|
||||
response.statusCode
|
||||
Metrics.inc method, 1, {status: status}
|
||||
if error?.attempts > 1
|
||||
Metrics.inc "#{method}-retries", 1, {status: 'error'}
|
||||
if response?.attempts > 1
|
||||
Metrics.inc "#{method}-retries", 1, {status: 'success'}
|
||||
const updateMetric = function(method, error, response) {
|
||||
// find the status, with special handling for connection timeouts
|
||||
// https://github.com/request/request#timeouts
|
||||
const status = (() => {
|
||||
if ((error != null ? error.connect : undefined) === true) {
|
||||
return `${error.code} (connect)`;
|
||||
} else if (error != null) {
|
||||
return error.code;
|
||||
} else if (response != null) {
|
||||
return response.statusCode;
|
||||
}
|
||||
})();
|
||||
Metrics.inc(method, 1, {status});
|
||||
if ((error != null ? error.attempts : undefined) > 1) {
|
||||
Metrics.inc(`${method}-retries`, 1, {status: 'error'});
|
||||
}
|
||||
if ((response != null ? response.attempts : undefined) > 1) {
|
||||
return Metrics.inc(`${method}-retries`, 1, {status: 'success'});
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = PersistenceManager =
|
||||
getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, projectHistoryId, projectHistoryType) ->) ->
|
||||
timer = new Metrics.Timer("persistenceManager.getDoc")
|
||||
callback = (args...) ->
|
||||
timer.done()
|
||||
_callback(args...)
|
||||
module.exports = (PersistenceManager = {
|
||||
getDoc(project_id, doc_id, _callback) {
|
||||
if (_callback == null) { _callback = function(error, lines, version, ranges, pathname, projectHistoryId, projectHistoryType) {}; }
|
||||
const timer = new Metrics.Timer("persistenceManager.getDoc");
|
||||
const callback = function(...args) {
|
||||
timer.done();
|
||||
return _callback(...Array.from(args || []));
|
||||
};
|
||||
|
||||
url = "#{Settings.apis.web.url}/project/#{project_id}/doc/#{doc_id}"
|
||||
request {
|
||||
url: url
|
||||
method: "GET"
|
||||
headers:
|
||||
const url = `${Settings.apis.web.url}/project/${project_id}/doc/${doc_id}`;
|
||||
return request({
|
||||
url,
|
||||
method: "GET",
|
||||
headers: {
|
||||
"accept": "application/json"
|
||||
auth:
|
||||
user: Settings.apis.web.user
|
||||
pass: Settings.apis.web.pass
|
||||
},
|
||||
auth: {
|
||||
user: Settings.apis.web.user,
|
||||
pass: Settings.apis.web.pass,
|
||||
sendImmediately: true
|
||||
jar: false
|
||||
},
|
||||
jar: false,
|
||||
timeout: MAX_HTTP_REQUEST_LENGTH
|
||||
}, (error, res, body) ->
|
||||
updateMetric('getDoc', error, res)
|
||||
return callback(error) if error?
|
||||
if res.statusCode >= 200 and res.statusCode < 300
|
||||
try
|
||||
body = JSON.parse body
|
||||
catch e
|
||||
return callback(e)
|
||||
if !body.lines?
|
||||
return callback(new Error("web API response had no doc lines"))
|
||||
if !body.version? or not body.version instanceof Number
|
||||
return callback(new Error("web API response had no valid doc version"))
|
||||
if !body.pathname?
|
||||
return callback(new Error("web API response had no valid doc pathname"))
|
||||
return callback null, body.lines, body.version, body.ranges, body.pathname, body.projectHistoryId, body.projectHistoryType
|
||||
else if res.statusCode == 404
|
||||
return callback(new Errors.NotFoundError("doc not not found: #{url}"))
|
||||
else
|
||||
return callback(new Error("error accessing web API: #{url} #{res.statusCode}"))
|
||||
}, function(error, res, body) {
|
||||
updateMetric('getDoc', error, res);
|
||||
if (error != null) { return callback(error); }
|
||||
if ((res.statusCode >= 200) && (res.statusCode < 300)) {
|
||||
try {
|
||||
body = JSON.parse(body);
|
||||
} catch (e) {
|
||||
return callback(e);
|
||||
}
|
||||
if ((body.lines == null)) {
|
||||
return callback(new Error("web API response had no doc lines"));
|
||||
}
|
||||
if ((body.version == null) || !body.version instanceof Number) {
|
||||
return callback(new Error("web API response had no valid doc version"));
|
||||
}
|
||||
if ((body.pathname == null)) {
|
||||
return callback(new Error("web API response had no valid doc pathname"));
|
||||
}
|
||||
return callback(null, body.lines, body.version, body.ranges, body.pathname, body.projectHistoryId, body.projectHistoryType);
|
||||
} else if (res.statusCode === 404) {
|
||||
return callback(new Errors.NotFoundError(`doc not not found: ${url}`));
|
||||
} else {
|
||||
return callback(new Error(`error accessing web API: ${url} ${res.statusCode}`));
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
setDoc: (project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy,_callback = (error) ->) ->
|
||||
timer = new Metrics.Timer("persistenceManager.setDoc")
|
||||
callback = (args...) ->
|
||||
timer.done()
|
||||
_callback(args...)
|
||||
setDoc(project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy,_callback) {
|
||||
if (_callback == null) { _callback = function(error) {}; }
|
||||
const timer = new Metrics.Timer("persistenceManager.setDoc");
|
||||
const callback = function(...args) {
|
||||
timer.done();
|
||||
return _callback(...Array.from(args || []));
|
||||
};
|
||||
|
||||
url = "#{Settings.apis.web.url}/project/#{project_id}/doc/#{doc_id}"
|
||||
request {
|
||||
url: url
|
||||
method: "POST"
|
||||
json:
|
||||
lines: lines
|
||||
ranges: ranges
|
||||
version: version
|
||||
lastUpdatedBy: lastUpdatedBy
|
||||
lastUpdatedAt: lastUpdatedAt
|
||||
auth:
|
||||
user: Settings.apis.web.user
|
||||
pass: Settings.apis.web.pass
|
||||
const url = `${Settings.apis.web.url}/project/${project_id}/doc/${doc_id}`;
|
||||
return request({
|
||||
url,
|
||||
method: "POST",
|
||||
json: {
|
||||
lines,
|
||||
ranges,
|
||||
version,
|
||||
lastUpdatedBy,
|
||||
lastUpdatedAt
|
||||
},
|
||||
auth: {
|
||||
user: Settings.apis.web.user,
|
||||
pass: Settings.apis.web.pass,
|
||||
sendImmediately: true
|
||||
jar: false
|
||||
},
|
||||
jar: false,
|
||||
timeout: MAX_HTTP_REQUEST_LENGTH
|
||||
}, (error, res, body) ->
|
||||
updateMetric('setDoc', error, res)
|
||||
return callback(error) if error?
|
||||
if res.statusCode >= 200 and res.statusCode < 300
|
||||
return callback null
|
||||
else if res.statusCode == 404
|
||||
return callback(new Errors.NotFoundError("doc not not found: #{url}"))
|
||||
else
|
||||
return callback(new Error("error accessing web API: #{url} #{res.statusCode}"))
|
||||
}, function(error, res, body) {
|
||||
updateMetric('setDoc', error, res);
|
||||
if (error != null) { return callback(error); }
|
||||
if ((res.statusCode >= 200) && (res.statusCode < 300)) {
|
||||
return callback(null);
|
||||
} else if (res.statusCode === 404) {
|
||||
return callback(new Errors.NotFoundError(`doc not not found: ${url}`));
|
||||
} else {
|
||||
return callback(new Error(`error accessing web API: ${url} ${res.statusCode}`));
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -1,34 +1,56 @@
|
|||
Settings = require('settings-sharelatex')
|
||||
logger = require('logger-sharelatex')
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS206: Consider reworking classes to avoid initClass
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let Profiler;
|
||||
const Settings = require('settings-sharelatex');
|
||||
const logger = require('logger-sharelatex');
|
||||
|
||||
deltaMs = (ta, tb) ->
|
||||
nanoSeconds = (ta[0]-tb[0])*1e9 + (ta[1]-tb[1])
|
||||
milliSeconds = Math.floor(nanoSeconds*1e-6)
|
||||
return milliSeconds
|
||||
const deltaMs = function(ta, tb) {
|
||||
const nanoSeconds = ((ta[0]-tb[0])*1e9) + (ta[1]-tb[1]);
|
||||
const milliSeconds = Math.floor(nanoSeconds*1e-6);
|
||||
return milliSeconds;
|
||||
};
|
||||
|
||||
module.exports = class Profiler
|
||||
LOG_CUTOFF_TIME: 1000
|
||||
module.exports = (Profiler = (function() {
|
||||
Profiler = class Profiler {
|
||||
static initClass() {
|
||||
this.prototype.LOG_CUTOFF_TIME = 1000;
|
||||
}
|
||||
|
||||
constructor: (@name, @args) ->
|
||||
@t0 = @t = process.hrtime()
|
||||
@start = new Date()
|
||||
@updateTimes = []
|
||||
constructor(name, args) {
|
||||
this.name = name;
|
||||
this.args = args;
|
||||
this.t0 = (this.t = process.hrtime());
|
||||
this.start = new Date();
|
||||
this.updateTimes = [];
|
||||
}
|
||||
|
||||
log: (label) ->
|
||||
t1 = process.hrtime()
|
||||
dtMilliSec = deltaMs(t1, @t)
|
||||
@t = t1
|
||||
@updateTimes.push [label, dtMilliSec] # timings in ms
|
||||
return @ # make it chainable
|
||||
log(label) {
|
||||
const t1 = process.hrtime();
|
||||
const dtMilliSec = deltaMs(t1, this.t);
|
||||
this.t = t1;
|
||||
this.updateTimes.push([label, dtMilliSec]); // timings in ms
|
||||
return this; // make it chainable
|
||||
}
|
||||
|
||||
end: (message) ->
|
||||
totalTime = deltaMs(@t, @t0)
|
||||
if totalTime > @LOG_CUTOFF_TIME # log anything greater than cutoff
|
||||
args = {}
|
||||
for k,v of @args
|
||||
args[k] = v
|
||||
args.updateTimes = @updateTimes
|
||||
args.start = @start
|
||||
args.end = new Date()
|
||||
logger.log args, @name
|
||||
return totalTime
|
||||
end(message) {
|
||||
const totalTime = deltaMs(this.t, this.t0);
|
||||
if (totalTime > this.LOG_CUTOFF_TIME) { // log anything greater than cutoff
|
||||
const args = {};
|
||||
for (let k in this.args) {
|
||||
const v = this.args[k];
|
||||
args[k] = v;
|
||||
}
|
||||
args.updateTimes = this.updateTimes;
|
||||
args.start = this.start;
|
||||
args.end = new Date();
|
||||
logger.log(args, this.name);
|
||||
}
|
||||
return totalTime;
|
||||
}
|
||||
};
|
||||
Profiler.initClass();
|
||||
return Profiler;
|
||||
})());
|
||||
|
|
|
@ -1,73 +1,101 @@
|
|||
request = require("request")
|
||||
Settings = require('settings-sharelatex')
|
||||
RedisManager = require("./RedisManager")
|
||||
rclient = RedisManager.rclient
|
||||
docUpdaterKeys = Settings.redis.documentupdater.key_schema
|
||||
async = require("async")
|
||||
ProjectManager = require("./ProjectManager")
|
||||
_ = require("lodash")
|
||||
logger = require("logger-sharelatex")
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const request = require("request");
|
||||
const Settings = require('settings-sharelatex');
|
||||
const RedisManager = require("./RedisManager");
|
||||
const {
|
||||
rclient
|
||||
} = RedisManager;
|
||||
const docUpdaterKeys = Settings.redis.documentupdater.key_schema;
|
||||
const async = require("async");
|
||||
const ProjectManager = require("./ProjectManager");
|
||||
const _ = require("lodash");
|
||||
const logger = require("logger-sharelatex");
|
||||
|
||||
ProjectFlusher =
|
||||
var ProjectFlusher = {
|
||||
|
||||
# iterate over keys asynchronously using redis scan (non-blocking)
|
||||
# handle all the cluster nodes or single redis server
|
||||
_getKeys: (pattern, limit, callback) ->
|
||||
nodes = rclient.nodes?('master') || [ rclient ];
|
||||
doKeyLookupForNode = (node, cb) ->
|
||||
ProjectFlusher._getKeysFromNode node, pattern, limit, cb
|
||||
async.concatSeries nodes, doKeyLookupForNode, callback
|
||||
// iterate over keys asynchronously using redis scan (non-blocking)
|
||||
// handle all the cluster nodes or single redis server
|
||||
_getKeys(pattern, limit, callback) {
|
||||
const nodes = (typeof rclient.nodes === 'function' ? rclient.nodes('master') : undefined) || [ rclient ];
|
||||
const doKeyLookupForNode = (node, cb) => ProjectFlusher._getKeysFromNode(node, pattern, limit, cb);
|
||||
return async.concatSeries(nodes, doKeyLookupForNode, callback);
|
||||
},
|
||||
|
||||
_getKeysFromNode: (node, pattern, limit = 1000, callback) ->
|
||||
cursor = 0 # redis iterator
|
||||
keySet = {} # use hash to avoid duplicate results
|
||||
batchSize = if limit? then Math.min(limit, 1000) else 1000
|
||||
# scan over all keys looking for pattern
|
||||
doIteration = (cb) ->
|
||||
node.scan cursor, "MATCH", pattern, "COUNT", batchSize, (error, reply) ->
|
||||
return callback(error) if error?
|
||||
[cursor, keys] = reply
|
||||
for key in keys
|
||||
keySet[key] = true
|
||||
keys = Object.keys(keySet)
|
||||
noResults = cursor == "0" # redis returns string results not numeric
|
||||
limitReached = (limit? && keys.length >= limit)
|
||||
if noResults || limitReached
|
||||
return callback(null, keys)
|
||||
else
|
||||
setTimeout doIteration, 10 # avoid hitting redis too hard
|
||||
doIteration()
|
||||
_getKeysFromNode(node, pattern, limit, callback) {
|
||||
if (limit == null) { limit = 1000; }
|
||||
let cursor = 0; // redis iterator
|
||||
const keySet = {}; // use hash to avoid duplicate results
|
||||
const batchSize = (limit != null) ? Math.min(limit, 1000) : 1000;
|
||||
// scan over all keys looking for pattern
|
||||
var doIteration = cb => // avoid hitting redis too hard
|
||||
node.scan(cursor, "MATCH", pattern, "COUNT", batchSize, function(error, reply) {
|
||||
let keys;
|
||||
if (error != null) { return callback(error); }
|
||||
[cursor, keys] = Array.from(reply);
|
||||
for (let key of Array.from(keys)) {
|
||||
keySet[key] = true;
|
||||
}
|
||||
keys = Object.keys(keySet);
|
||||
const noResults = cursor === "0"; // redis returns string results not numeric
|
||||
const limitReached = ((limit != null) && (keys.length >= limit));
|
||||
if (noResults || limitReached) {
|
||||
return callback(null, keys);
|
||||
} else {
|
||||
return setTimeout(doIteration, 10);
|
||||
}
|
||||
});
|
||||
return doIteration();
|
||||
},
|
||||
|
||||
# extract ids from keys like DocsWithHistoryOps:57fd0b1f53a8396d22b2c24b
|
||||
# or docsInProject:{57fd0b1f53a8396d22b2c24b} (for redis cluster)
|
||||
_extractIds: (keyList) ->
|
||||
ids = for key in keyList
|
||||
m = key.match(/:\{?([0-9a-f]{24})\}?/) # extract object id
|
||||
m[1]
|
||||
return ids
|
||||
// extract ids from keys like DocsWithHistoryOps:57fd0b1f53a8396d22b2c24b
|
||||
// or docsInProject:{57fd0b1f53a8396d22b2c24b} (for redis cluster)
|
||||
_extractIds(keyList) {
|
||||
const ids = (() => {
|
||||
const result = [];
|
||||
for (let key of Array.from(keyList)) {
|
||||
const m = key.match(/:\{?([0-9a-f]{24})\}?/); // extract object id
|
||||
result.push(m[1]);
|
||||
}
|
||||
return result;
|
||||
})();
|
||||
return ids;
|
||||
},
|
||||
|
||||
flushAllProjects: (options, callback)->
|
||||
logger.log options:options, "flushing all projects"
|
||||
ProjectFlusher._getKeys docUpdaterKeys.docsInProject({project_id:"*"}), options.limit, (error, project_keys) ->
|
||||
if error?
|
||||
logger.err err:error, "error getting keys for flushing"
|
||||
return callback(error)
|
||||
project_ids = ProjectFlusher._extractIds(project_keys)
|
||||
if options.dryRun
|
||||
return callback(null, project_ids)
|
||||
jobs = _.map project_ids, (project_id)->
|
||||
return (cb)->
|
||||
ProjectManager.flushAndDeleteProjectWithLocks project_id, {background:true}, cb
|
||||
async.parallelLimit async.reflectAll(jobs), options.concurrency, (error, results)->
|
||||
success = []
|
||||
failure = []
|
||||
_.each results, (result, i)->
|
||||
if result.error?
|
||||
failure.push(project_ids[i])
|
||||
else
|
||||
success.push(project_ids[i])
|
||||
logger.log success:success, failure:failure, "finished flushing all projects"
|
||||
return callback(error, {success:success, failure:failure})
|
||||
flushAllProjects(options, callback){
|
||||
logger.log({options}, "flushing all projects");
|
||||
return ProjectFlusher._getKeys(docUpdaterKeys.docsInProject({project_id:"*"}), options.limit, function(error, project_keys) {
|
||||
if (error != null) {
|
||||
logger.err({err:error}, "error getting keys for flushing");
|
||||
return callback(error);
|
||||
}
|
||||
const project_ids = ProjectFlusher._extractIds(project_keys);
|
||||
if (options.dryRun) {
|
||||
return callback(null, project_ids);
|
||||
}
|
||||
const jobs = _.map(project_ids, project_id => cb => ProjectManager.flushAndDeleteProjectWithLocks(project_id, {background:true}, cb));
|
||||
return async.parallelLimit(async.reflectAll(jobs), options.concurrency, function(error, results){
|
||||
const success = [];
|
||||
const failure = [];
|
||||
_.each(results, function(result, i){
|
||||
if (result.error != null) {
|
||||
return failure.push(project_ids[i]);
|
||||
} else {
|
||||
return success.push(project_ids[i]);
|
||||
}
|
||||
});
|
||||
logger.log({success, failure}, "finished flushing all projects");
|
||||
return callback(error, {success, failure});
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
module.exports = ProjectFlusher
|
||||
module.exports = ProjectFlusher;
|
|
@ -1,79 +1,111 @@
|
|||
Settings = require('settings-sharelatex')
|
||||
projectHistoryKeys = Settings.redis?.project_history?.key_schema
|
||||
rclient = require("redis-sharelatex").createClient(Settings.redis.project_history)
|
||||
logger = require('logger-sharelatex')
|
||||
metrics = require('./Metrics')
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS201: Simplify complex destructure assignments
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let ProjectHistoryRedisManager;
|
||||
const Settings = require('settings-sharelatex');
|
||||
const projectHistoryKeys = __guard__(Settings.redis != null ? Settings.redis.project_history : undefined, x => x.key_schema);
|
||||
const rclient = require("redis-sharelatex").createClient(Settings.redis.project_history);
|
||||
const logger = require('logger-sharelatex');
|
||||
const metrics = require('./Metrics');
|
||||
|
||||
module.exports = ProjectHistoryRedisManager =
|
||||
queueOps: (project_id, ops..., callback = (error, projectUpdateCount) ->) ->
|
||||
# Record metric for ops pushed onto queue
|
||||
for op in ops
|
||||
metrics.summary "redis.projectHistoryOps", op.length, {status: "push"}
|
||||
multi = rclient.multi()
|
||||
# Push the ops onto the project history queue
|
||||
multi.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops...
|
||||
# To record the age of the oldest op on the queue set a timestamp if not
|
||||
# already present (SETNX).
|
||||
multi.setnx projectHistoryKeys.projectHistoryFirstOpTimestamp({project_id}), Date.now()
|
||||
multi.exec (error, result) ->
|
||||
return callback(error) if error?
|
||||
# return the number of entries pushed onto the project history queue
|
||||
callback null, result[0]
|
||||
module.exports = (ProjectHistoryRedisManager = {
|
||||
queueOps(project_id, ...rest) {
|
||||
// Record metric for ops pushed onto queue
|
||||
const adjustedLength = Math.max(rest.length, 1), ops = rest.slice(0, adjustedLength - 1), val = rest[adjustedLength - 1], callback = val != null ? val : function(error, projectUpdateCount) {};
|
||||
for (let op of Array.from(ops)) {
|
||||
metrics.summary("redis.projectHistoryOps", op.length, {status: "push"});
|
||||
}
|
||||
const multi = rclient.multi();
|
||||
// Push the ops onto the project history queue
|
||||
multi.rpush(projectHistoryKeys.projectHistoryOps({project_id}), ...Array.from(ops));
|
||||
// To record the age of the oldest op on the queue set a timestamp if not
|
||||
// already present (SETNX).
|
||||
multi.setnx(projectHistoryKeys.projectHistoryFirstOpTimestamp({project_id}), Date.now());
|
||||
return multi.exec(function(error, result) {
|
||||
if (error != null) { return callback(error); }
|
||||
// return the number of entries pushed onto the project history queue
|
||||
return callback(null, result[0]);});
|
||||
},
|
||||
|
||||
|
||||
queueRenameEntity: (project_id, projectHistoryId, entity_type, entity_id, user_id, projectUpdate, callback) ->
|
||||
projectUpdate =
|
||||
pathname: projectUpdate.pathname
|
||||
new_pathname: projectUpdate.newPathname
|
||||
meta:
|
||||
user_id: user_id
|
||||
queueRenameEntity(project_id, projectHistoryId, entity_type, entity_id, user_id, projectUpdate, callback) {
|
||||
projectUpdate = {
|
||||
pathname: projectUpdate.pathname,
|
||||
new_pathname: projectUpdate.newPathname,
|
||||
meta: {
|
||||
user_id,
|
||||
ts: new Date()
|
||||
version: projectUpdate.version
|
||||
projectHistoryId: projectHistoryId
|
||||
projectUpdate[entity_type] = entity_id
|
||||
},
|
||||
version: projectUpdate.version,
|
||||
projectHistoryId
|
||||
};
|
||||
projectUpdate[entity_type] = entity_id;
|
||||
|
||||
logger.log {project_id, projectUpdate}, "queue rename operation to project-history"
|
||||
jsonUpdate = JSON.stringify(projectUpdate)
|
||||
logger.log({project_id, projectUpdate}, "queue rename operation to project-history");
|
||||
const jsonUpdate = JSON.stringify(projectUpdate);
|
||||
|
||||
ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback
|
||||
return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback);
|
||||
},
|
||||
|
||||
queueAddEntity: (project_id, projectHistoryId, entity_type, entitiy_id, user_id, projectUpdate, callback = (error) ->) ->
|
||||
projectUpdate =
|
||||
pathname: projectUpdate.pathname
|
||||
docLines: projectUpdate.docLines
|
||||
url: projectUpdate.url
|
||||
meta:
|
||||
user_id: user_id
|
||||
queueAddEntity(project_id, projectHistoryId, entity_type, entitiy_id, user_id, projectUpdate, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
projectUpdate = {
|
||||
pathname: projectUpdate.pathname,
|
||||
docLines: projectUpdate.docLines,
|
||||
url: projectUpdate.url,
|
||||
meta: {
|
||||
user_id,
|
||||
ts: new Date()
|
||||
version: projectUpdate.version
|
||||
projectHistoryId: projectHistoryId
|
||||
projectUpdate[entity_type] = entitiy_id
|
||||
},
|
||||
version: projectUpdate.version,
|
||||
projectHistoryId
|
||||
};
|
||||
projectUpdate[entity_type] = entitiy_id;
|
||||
|
||||
logger.log {project_id, projectUpdate}, "queue add operation to project-history"
|
||||
jsonUpdate = JSON.stringify(projectUpdate)
|
||||
logger.log({project_id, projectUpdate}, "queue add operation to project-history");
|
||||
const jsonUpdate = JSON.stringify(projectUpdate);
|
||||
|
||||
ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback
|
||||
return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback);
|
||||
},
|
||||
|
||||
queueResyncProjectStructure: (project_id, projectHistoryId, docs, files, callback) ->
|
||||
logger.log {project_id, docs, files}, "queue project structure resync"
|
||||
projectUpdate =
|
||||
resyncProjectStructure: { docs, files }
|
||||
projectHistoryId: projectHistoryId
|
||||
meta:
|
||||
queueResyncProjectStructure(project_id, projectHistoryId, docs, files, callback) {
|
||||
logger.log({project_id, docs, files}, "queue project structure resync");
|
||||
const projectUpdate = {
|
||||
resyncProjectStructure: { docs, files },
|
||||
projectHistoryId,
|
||||
meta: {
|
||||
ts: new Date()
|
||||
jsonUpdate = JSON.stringify projectUpdate
|
||||
ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback
|
||||
}
|
||||
};
|
||||
const jsonUpdate = JSON.stringify(projectUpdate);
|
||||
return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback);
|
||||
},
|
||||
|
||||
queueResyncDocContent: (project_id, projectHistoryId, doc_id, lines, version, pathname, callback) ->
|
||||
logger.log {project_id, doc_id, lines, version, pathname}, "queue doc content resync"
|
||||
projectUpdate =
|
||||
resyncDocContent:
|
||||
queueResyncDocContent(project_id, projectHistoryId, doc_id, lines, version, pathname, callback) {
|
||||
logger.log({project_id, doc_id, lines, version, pathname}, "queue doc content resync");
|
||||
const projectUpdate = {
|
||||
resyncDocContent: {
|
||||
content: lines.join("\n"),
|
||||
version: version
|
||||
projectHistoryId: projectHistoryId
|
||||
path: pathname
|
||||
doc: doc_id
|
||||
meta:
|
||||
version
|
||||
},
|
||||
projectHistoryId,
|
||||
path: pathname,
|
||||
doc: doc_id,
|
||||
meta: {
|
||||
ts: new Date()
|
||||
jsonUpdate = JSON.stringify projectUpdate
|
||||
ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback
|
||||
}
|
||||
};
|
||||
const jsonUpdate = JSON.stringify(projectUpdate);
|
||||
return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback);
|
||||
}
|
||||
});
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined;
|
||||
}
|
|
@ -1,168 +1,225 @@
|
|||
RedisManager = require "./RedisManager"
|
||||
ProjectHistoryRedisManager = require "./ProjectHistoryRedisManager"
|
||||
DocumentManager = require "./DocumentManager"
|
||||
HistoryManager = require "./HistoryManager"
|
||||
async = require "async"
|
||||
logger = require "logger-sharelatex"
|
||||
Metrics = require "./Metrics"
|
||||
Errors = require "./Errors"
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let ProjectManager;
|
||||
const RedisManager = require("./RedisManager");
|
||||
const ProjectHistoryRedisManager = require("./ProjectHistoryRedisManager");
|
||||
const DocumentManager = require("./DocumentManager");
|
||||
const HistoryManager = require("./HistoryManager");
|
||||
const async = require("async");
|
||||
const logger = require("logger-sharelatex");
|
||||
const Metrics = require("./Metrics");
|
||||
const Errors = require("./Errors");
|
||||
|
||||
module.exports = ProjectManager =
|
||||
flushProjectWithLocks: (project_id, _callback = (error) ->) ->
|
||||
timer = new Metrics.Timer("projectManager.flushProjectWithLocks")
|
||||
callback = (args...) ->
|
||||
timer.done()
|
||||
_callback(args...)
|
||||
module.exports = (ProjectManager = {
|
||||
flushProjectWithLocks(project_id, _callback) {
|
||||
if (_callback == null) { _callback = function(error) {}; }
|
||||
const timer = new Metrics.Timer("projectManager.flushProjectWithLocks");
|
||||
const callback = function(...args) {
|
||||
timer.done();
|
||||
return _callback(...Array.from(args || []));
|
||||
};
|
||||
|
||||
RedisManager.getDocIdsInProject project_id, (error, doc_ids) ->
|
||||
return callback(error) if error?
|
||||
jobs = []
|
||||
errors = []
|
||||
for doc_id in (doc_ids or [])
|
||||
do (doc_id) ->
|
||||
jobs.push (callback) ->
|
||||
DocumentManager.flushDocIfLoadedWithLock project_id, doc_id, (error) ->
|
||||
if error? and error instanceof Errors.NotFoundError
|
||||
logger.warn err: error, project_id: project_id, doc_id: doc_id, "found deleted doc when flushing"
|
||||
callback()
|
||||
else if error?
|
||||
logger.error err: error, project_id: project_id, doc_id: doc_id, "error flushing doc"
|
||||
errors.push(error)
|
||||
callback()
|
||||
else
|
||||
callback()
|
||||
return RedisManager.getDocIdsInProject(project_id, function(error, doc_ids) {
|
||||
if (error != null) { return callback(error); }
|
||||
const jobs = [];
|
||||
const errors = [];
|
||||
for (let doc_id of Array.from((doc_ids || []))) {
|
||||
((doc_id => jobs.push(callback => DocumentManager.flushDocIfLoadedWithLock(project_id, doc_id, function(error) {
|
||||
if ((error != null) && error instanceof Errors.NotFoundError) {
|
||||
logger.warn({err: error, project_id, doc_id}, "found deleted doc when flushing");
|
||||
return callback();
|
||||
} else if (error != null) {
|
||||
logger.error({err: error, project_id, doc_id}, "error flushing doc");
|
||||
errors.push(error);
|
||||
return callback();
|
||||
} else {
|
||||
return callback();
|
||||
}
|
||||
}))))(doc_id);
|
||||
}
|
||||
|
||||
logger.log project_id: project_id, doc_ids: doc_ids, "flushing docs"
|
||||
async.series jobs, () ->
|
||||
if errors.length > 0
|
||||
callback new Error("Errors flushing docs. See log for details")
|
||||
else
|
||||
callback(null)
|
||||
logger.log({project_id, doc_ids}, "flushing docs");
|
||||
return async.series(jobs, function() {
|
||||
if (errors.length > 0) {
|
||||
return callback(new Error("Errors flushing docs. See log for details"));
|
||||
} else {
|
||||
return callback(null);
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
flushAndDeleteProjectWithLocks: (project_id, options, _callback = (error) ->) ->
|
||||
timer = new Metrics.Timer("projectManager.flushAndDeleteProjectWithLocks")
|
||||
callback = (args...) ->
|
||||
timer.done()
|
||||
_callback(args...)
|
||||
flushAndDeleteProjectWithLocks(project_id, options, _callback) {
|
||||
if (_callback == null) { _callback = function(error) {}; }
|
||||
const timer = new Metrics.Timer("projectManager.flushAndDeleteProjectWithLocks");
|
||||
const callback = function(...args) {
|
||||
timer.done();
|
||||
return _callback(...Array.from(args || []));
|
||||
};
|
||||
|
||||
RedisManager.getDocIdsInProject project_id, (error, doc_ids) ->
|
||||
return callback(error) if error?
|
||||
jobs = []
|
||||
errors = []
|
||||
for doc_id in (doc_ids or [])
|
||||
do (doc_id) ->
|
||||
jobs.push (callback) ->
|
||||
DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, {}, (error) ->
|
||||
if error?
|
||||
logger.error err: error, project_id: project_id, doc_id: doc_id, "error deleting doc"
|
||||
errors.push(error)
|
||||
callback()
|
||||
return RedisManager.getDocIdsInProject(project_id, function(error, doc_ids) {
|
||||
if (error != null) { return callback(error); }
|
||||
const jobs = [];
|
||||
const errors = [];
|
||||
for (let doc_id of Array.from((doc_ids || []))) {
|
||||
((doc_id => jobs.push(callback => DocumentManager.flushAndDeleteDocWithLock(project_id, doc_id, {}, function(error) {
|
||||
if (error != null) {
|
||||
logger.error({err: error, project_id, doc_id}, "error deleting doc");
|
||||
errors.push(error);
|
||||
}
|
||||
return callback();
|
||||
}))))(doc_id);
|
||||
}
|
||||
|
||||
logger.log project_id: project_id, doc_ids: doc_ids, "deleting docs"
|
||||
async.series jobs, () ->
|
||||
# When deleting the project here we want to ensure that project
|
||||
# history is completely flushed because the project may be
|
||||
# deleted in web after this call completes, and so further
|
||||
# attempts to flush would fail after that.
|
||||
HistoryManager.flushProjectChanges project_id, options, (error) ->
|
||||
if errors.length > 0
|
||||
callback new Error("Errors deleting docs. See log for details")
|
||||
else if error?
|
||||
callback(error)
|
||||
else
|
||||
callback(null)
|
||||
logger.log({project_id, doc_ids}, "deleting docs");
|
||||
return async.series(jobs, () => // When deleting the project here we want to ensure that project
|
||||
// history is completely flushed because the project may be
|
||||
// deleted in web after this call completes, and so further
|
||||
// attempts to flush would fail after that.
|
||||
HistoryManager.flushProjectChanges(project_id, options, function(error) {
|
||||
if (errors.length > 0) {
|
||||
return callback(new Error("Errors deleting docs. See log for details"));
|
||||
} else if (error != null) {
|
||||
return callback(error);
|
||||
} else {
|
||||
return callback(null);
|
||||
}
|
||||
}));
|
||||
});
|
||||
},
|
||||
|
||||
queueFlushAndDeleteProject: (project_id, callback = (error) ->) ->
|
||||
RedisManager.queueFlushAndDeleteProject project_id, (error) ->
|
||||
if error?
|
||||
logger.error {project_id: project_id, error:error}, "error adding project to flush and delete queue"
|
||||
return callback(error)
|
||||
Metrics.inc "queued-delete"
|
||||
callback()
|
||||
queueFlushAndDeleteProject(project_id, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return RedisManager.queueFlushAndDeleteProject(project_id, function(error) {
|
||||
if (error != null) {
|
||||
logger.error({project_id, error}, "error adding project to flush and delete queue");
|
||||
return callback(error);
|
||||
}
|
||||
Metrics.inc("queued-delete");
|
||||
return callback();
|
||||
});
|
||||
},
|
||||
|
||||
getProjectDocsTimestamps: (project_id, callback = (error) ->) ->
|
||||
RedisManager.getDocIdsInProject project_id, (error, doc_ids) ->
|
||||
return callback(error) if error?
|
||||
return callback(null, []) if !doc_ids?.length
|
||||
RedisManager.getDocTimestamps doc_ids, (error, timestamps) ->
|
||||
return callback(error) if error?
|
||||
callback(null, timestamps)
|
||||
getProjectDocsTimestamps(project_id, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return RedisManager.getDocIdsInProject(project_id, function(error, doc_ids) {
|
||||
if (error != null) { return callback(error); }
|
||||
if (!(doc_ids != null ? doc_ids.length : undefined)) { return callback(null, []); }
|
||||
return RedisManager.getDocTimestamps(doc_ids, function(error, timestamps) {
|
||||
if (error != null) { return callback(error); }
|
||||
return callback(null, timestamps);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
getProjectDocsAndFlushIfOld: (project_id, projectStateHash, excludeVersions = {}, _callback = (error, docs) ->) ->
|
||||
timer = new Metrics.Timer("projectManager.getProjectDocsAndFlushIfOld")
|
||||
callback = (args...) ->
|
||||
timer.done()
|
||||
_callback(args...)
|
||||
getProjectDocsAndFlushIfOld(project_id, projectStateHash, excludeVersions, _callback) {
|
||||
if (excludeVersions == null) { excludeVersions = {}; }
|
||||
if (_callback == null) { _callback = function(error, docs) {}; }
|
||||
const timer = new Metrics.Timer("projectManager.getProjectDocsAndFlushIfOld");
|
||||
const callback = function(...args) {
|
||||
timer.done();
|
||||
return _callback(...Array.from(args || []));
|
||||
};
|
||||
|
||||
RedisManager.checkOrSetProjectState project_id, projectStateHash, (error, projectStateChanged) ->
|
||||
if error?
|
||||
logger.error err: error, project_id: project_id, "error getting/setting project state in getProjectDocsAndFlushIfOld"
|
||||
return callback(error)
|
||||
# we can't return docs if project structure has changed
|
||||
if projectStateChanged
|
||||
return callback Errors.ProjectStateChangedError("project state changed")
|
||||
# project structure hasn't changed, return doc content from redis
|
||||
RedisManager.getDocIdsInProject project_id, (error, doc_ids) ->
|
||||
if error?
|
||||
logger.error err: error, project_id: project_id, "error getting doc ids in getProjectDocs"
|
||||
return callback(error)
|
||||
jobs = []
|
||||
for doc_id in doc_ids or []
|
||||
do (doc_id) ->
|
||||
jobs.push (cb) ->
|
||||
# get the doc lines from redis
|
||||
DocumentManager.getDocAndFlushIfOldWithLock project_id, doc_id, (err, lines, version) ->
|
||||
if err?
|
||||
logger.error err:err, project_id: project_id, doc_id: doc_id, "error getting project doc lines in getProjectDocsAndFlushIfOld"
|
||||
return cb(err)
|
||||
doc = {_id:doc_id, lines:lines, v:version} # create a doc object to return
|
||||
cb(null, doc)
|
||||
async.series jobs, (error, docs) ->
|
||||
return callback(error) if error?
|
||||
callback(null, docs)
|
||||
return RedisManager.checkOrSetProjectState(project_id, projectStateHash, function(error, projectStateChanged) {
|
||||
if (error != null) {
|
||||
logger.error({err: error, project_id}, "error getting/setting project state in getProjectDocsAndFlushIfOld");
|
||||
return callback(error);
|
||||
}
|
||||
// we can't return docs if project structure has changed
|
||||
if (projectStateChanged) {
|
||||
return callback(Errors.ProjectStateChangedError("project state changed"));
|
||||
}
|
||||
// project structure hasn't changed, return doc content from redis
|
||||
return RedisManager.getDocIdsInProject(project_id, function(error, doc_ids) {
|
||||
if (error != null) {
|
||||
logger.error({err: error, project_id}, "error getting doc ids in getProjectDocs");
|
||||
return callback(error);
|
||||
}
|
||||
const jobs = [];
|
||||
for (let doc_id of Array.from(doc_ids || [])) {
|
||||
((doc_id => jobs.push(cb => // get the doc lines from redis
|
||||
DocumentManager.getDocAndFlushIfOldWithLock(project_id, doc_id, function(err, lines, version) {
|
||||
if (err != null) {
|
||||
logger.error({err, project_id, doc_id}, "error getting project doc lines in getProjectDocsAndFlushIfOld");
|
||||
return cb(err);
|
||||
}
|
||||
const doc = {_id:doc_id, lines, v:version}; // create a doc object to return
|
||||
return cb(null, doc);
|
||||
}))))(doc_id);
|
||||
}
|
||||
return async.series(jobs, function(error, docs) {
|
||||
if (error != null) { return callback(error); }
|
||||
return callback(null, docs);
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
clearProjectState: (project_id, callback = (error) ->) ->
|
||||
RedisManager.clearProjectState project_id, callback
|
||||
clearProjectState(project_id, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return RedisManager.clearProjectState(project_id, callback);
|
||||
},
|
||||
|
||||
updateProjectWithLocks: (project_id, projectHistoryId, user_id, docUpdates, fileUpdates, version, _callback = (error) ->) ->
|
||||
timer = new Metrics.Timer("projectManager.updateProject")
|
||||
callback = (args...) ->
|
||||
timer.done()
|
||||
_callback(args...)
|
||||
updateProjectWithLocks(project_id, projectHistoryId, user_id, docUpdates, fileUpdates, version, _callback) {
|
||||
if (_callback == null) { _callback = function(error) {}; }
|
||||
const timer = new Metrics.Timer("projectManager.updateProject");
|
||||
const callback = function(...args) {
|
||||
timer.done();
|
||||
return _callback(...Array.from(args || []));
|
||||
};
|
||||
|
||||
project_version = version
|
||||
project_subversion = 0 # project versions can have multiple operations
|
||||
const project_version = version;
|
||||
let project_subversion = 0; // project versions can have multiple operations
|
||||
|
||||
project_ops_length = 0
|
||||
let project_ops_length = 0;
|
||||
|
||||
handleDocUpdate = (projectUpdate, cb) ->
|
||||
doc_id = projectUpdate.id
|
||||
projectUpdate.version = "#{project_version}.#{project_subversion++}"
|
||||
if projectUpdate.docLines?
|
||||
ProjectHistoryRedisManager.queueAddEntity project_id, projectHistoryId, 'doc', doc_id, user_id, projectUpdate, (error, count) ->
|
||||
project_ops_length = count
|
||||
cb(error)
|
||||
else
|
||||
DocumentManager.renameDocWithLock project_id, doc_id, user_id, projectUpdate, projectHistoryId, (error, count) ->
|
||||
project_ops_length = count
|
||||
cb(error)
|
||||
const handleDocUpdate = function(projectUpdate, cb) {
|
||||
const doc_id = projectUpdate.id;
|
||||
projectUpdate.version = `${project_version}.${project_subversion++}`;
|
||||
if (projectUpdate.docLines != null) {
|
||||
return ProjectHistoryRedisManager.queueAddEntity(project_id, projectHistoryId, 'doc', doc_id, user_id, projectUpdate, function(error, count) {
|
||||
project_ops_length = count;
|
||||
return cb(error);
|
||||
});
|
||||
} else {
|
||||
return DocumentManager.renameDocWithLock(project_id, doc_id, user_id, projectUpdate, projectHistoryId, function(error, count) {
|
||||
project_ops_length = count;
|
||||
return cb(error);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
handleFileUpdate = (projectUpdate, cb) ->
|
||||
file_id = projectUpdate.id
|
||||
projectUpdate.version = "#{project_version}.#{project_subversion++}"
|
||||
if projectUpdate.url?
|
||||
ProjectHistoryRedisManager.queueAddEntity project_id, projectHistoryId, 'file', file_id, user_id, projectUpdate, (error, count) ->
|
||||
project_ops_length = count
|
||||
cb(error)
|
||||
else
|
||||
ProjectHistoryRedisManager.queueRenameEntity project_id, projectHistoryId, 'file', file_id, user_id, projectUpdate, (error, count) ->
|
||||
project_ops_length = count
|
||||
cb(error)
|
||||
const handleFileUpdate = function(projectUpdate, cb) {
|
||||
const file_id = projectUpdate.id;
|
||||
projectUpdate.version = `${project_version}.${project_subversion++}`;
|
||||
if (projectUpdate.url != null) {
|
||||
return ProjectHistoryRedisManager.queueAddEntity(project_id, projectHistoryId, 'file', file_id, user_id, projectUpdate, function(error, count) {
|
||||
project_ops_length = count;
|
||||
return cb(error);
|
||||
});
|
||||
} else {
|
||||
return ProjectHistoryRedisManager.queueRenameEntity(project_id, projectHistoryId, 'file', file_id, user_id, projectUpdate, function(error, count) {
|
||||
project_ops_length = count;
|
||||
return cb(error);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
async.eachSeries docUpdates, handleDocUpdate, (error) ->
|
||||
return callback(error) if error?
|
||||
async.eachSeries fileUpdates, handleFileUpdate, (error) ->
|
||||
return callback(error) if error?
|
||||
if HistoryManager.shouldFlushHistoryOps(project_ops_length, docUpdates.length + fileUpdates.length, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS)
|
||||
HistoryManager.flushProjectChangesAsync project_id
|
||||
callback()
|
||||
return async.eachSeries(docUpdates, handleDocUpdate, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return async.eachSeries(fileUpdates, handleFileUpdate, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
if (HistoryManager.shouldFlushHistoryOps(project_ops_length, docUpdates.length + fileUpdates.length, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS)) {
|
||||
HistoryManager.flushProjectChangesAsync(project_id);
|
||||
}
|
||||
return callback();
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
|
|
@ -1,76 +1,112 @@
|
|||
RangesTracker = require "./RangesTracker"
|
||||
logger = require "logger-sharelatex"
|
||||
_ = require "lodash"
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let RangesManager;
|
||||
const RangesTracker = require("./RangesTracker");
|
||||
const logger = require("logger-sharelatex");
|
||||
const _ = require("lodash");
|
||||
|
||||
module.exports = RangesManager =
|
||||
MAX_COMMENTS: 500
|
||||
MAX_CHANGES: 2000
|
||||
module.exports = (RangesManager = {
|
||||
MAX_COMMENTS: 500,
|
||||
MAX_CHANGES: 2000,
|
||||
|
||||
applyUpdate: (project_id, doc_id, entries = {}, updates = [], newDocLines, callback = (error, new_entries, ranges_were_collapsed) ->) ->
|
||||
{changes, comments} = _.cloneDeep(entries)
|
||||
rangesTracker = new RangesTracker(changes, comments)
|
||||
emptyRangeCountBefore = RangesManager._emptyRangesCount(rangesTracker)
|
||||
for update in updates
|
||||
rangesTracker.track_changes = !!update.meta.tc
|
||||
if !!update.meta.tc
|
||||
rangesTracker.setIdSeed(update.meta.tc)
|
||||
for op in update.op
|
||||
try
|
||||
rangesTracker.applyOp(op, { user_id: update.meta?.user_id })
|
||||
catch error
|
||||
return callback(error)
|
||||
applyUpdate(project_id, doc_id, entries, updates, newDocLines, callback) {
|
||||
let error;
|
||||
if (entries == null) { entries = {}; }
|
||||
if (updates == null) { updates = []; }
|
||||
if (callback == null) { callback = function(error, new_entries, ranges_were_collapsed) {}; }
|
||||
const {changes, comments} = _.cloneDeep(entries);
|
||||
const rangesTracker = new RangesTracker(changes, comments);
|
||||
const emptyRangeCountBefore = RangesManager._emptyRangesCount(rangesTracker);
|
||||
for (let update of Array.from(updates)) {
|
||||
rangesTracker.track_changes = !!update.meta.tc;
|
||||
if (!!update.meta.tc) {
|
||||
rangesTracker.setIdSeed(update.meta.tc);
|
||||
}
|
||||
for (let op of Array.from(update.op)) {
|
||||
try {
|
||||
rangesTracker.applyOp(op, { user_id: (update.meta != null ? update.meta.user_id : undefined) });
|
||||
} catch (error1) {
|
||||
error = error1;
|
||||
return callback(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if rangesTracker.changes?.length > RangesManager.MAX_CHANGES or rangesTracker.comments?.length > RangesManager.MAX_COMMENTS
|
||||
return callback new Error("too many comments or tracked changes")
|
||||
if (((rangesTracker.changes != null ? rangesTracker.changes.length : undefined) > RangesManager.MAX_CHANGES) || ((rangesTracker.comments != null ? rangesTracker.comments.length : undefined) > RangesManager.MAX_COMMENTS)) {
|
||||
return callback(new Error("too many comments or tracked changes"));
|
||||
}
|
||||
|
||||
try
|
||||
# This is a consistency check that all of our ranges and
|
||||
# comments still match the corresponding text
|
||||
rangesTracker.validate(newDocLines.join("\n"))
|
||||
catch error
|
||||
logger.error {err: error, project_id, doc_id, newDocLines, updates}, "error validating ranges"
|
||||
return callback(error)
|
||||
try {
|
||||
// This is a consistency check that all of our ranges and
|
||||
// comments still match the corresponding text
|
||||
rangesTracker.validate(newDocLines.join("\n"));
|
||||
} catch (error2) {
|
||||
error = error2;
|
||||
logger.error({err: error, project_id, doc_id, newDocLines, updates}, "error validating ranges");
|
||||
return callback(error);
|
||||
}
|
||||
|
||||
emptyRangeCountAfter = RangesManager._emptyRangesCount(rangesTracker)
|
||||
rangesWereCollapsed = emptyRangeCountAfter > emptyRangeCountBefore
|
||||
response = RangesManager._getRanges rangesTracker
|
||||
logger.log {project_id, doc_id, changesCount: response.changes?.length, commentsCount: response.comments?.length, rangesWereCollapsed}, "applied updates to ranges"
|
||||
callback null, response, rangesWereCollapsed
|
||||
const emptyRangeCountAfter = RangesManager._emptyRangesCount(rangesTracker);
|
||||
const rangesWereCollapsed = emptyRangeCountAfter > emptyRangeCountBefore;
|
||||
const response = RangesManager._getRanges(rangesTracker);
|
||||
logger.log({project_id, doc_id, changesCount: (response.changes != null ? response.changes.length : undefined), commentsCount: (response.comments != null ? response.comments.length : undefined), rangesWereCollapsed}, "applied updates to ranges");
|
||||
return callback(null, response, rangesWereCollapsed);
|
||||
},
|
||||
|
||||
acceptChanges: (change_ids, ranges, callback = (error, ranges) ->) ->
|
||||
{changes, comments} = ranges
|
||||
logger.log "accepting #{ change_ids.length } changes in ranges"
|
||||
rangesTracker = new RangesTracker(changes, comments)
|
||||
rangesTracker.removeChangeIds(change_ids)
|
||||
response = RangesManager._getRanges(rangesTracker)
|
||||
callback null, response
|
||||
acceptChanges(change_ids, ranges, callback) {
|
||||
if (callback == null) { callback = function(error, ranges) {}; }
|
||||
const {changes, comments} = ranges;
|
||||
logger.log(`accepting ${ change_ids.length } changes in ranges`);
|
||||
const rangesTracker = new RangesTracker(changes, comments);
|
||||
rangesTracker.removeChangeIds(change_ids);
|
||||
const response = RangesManager._getRanges(rangesTracker);
|
||||
return callback(null, response);
|
||||
},
|
||||
|
||||
deleteComment: (comment_id, ranges, callback = (error, ranges) ->) ->
|
||||
{changes, comments} = ranges
|
||||
logger.log {comment_id}, "deleting comment in ranges"
|
||||
rangesTracker = new RangesTracker(changes, comments)
|
||||
rangesTracker.removeCommentId(comment_id)
|
||||
response = RangesManager._getRanges(rangesTracker)
|
||||
callback null, response
|
||||
deleteComment(comment_id, ranges, callback) {
|
||||
if (callback == null) { callback = function(error, ranges) {}; }
|
||||
const {changes, comments} = ranges;
|
||||
logger.log({comment_id}, "deleting comment in ranges");
|
||||
const rangesTracker = new RangesTracker(changes, comments);
|
||||
rangesTracker.removeCommentId(comment_id);
|
||||
const response = RangesManager._getRanges(rangesTracker);
|
||||
return callback(null, response);
|
||||
},
|
||||
|
||||
_getRanges: (rangesTracker) ->
|
||||
# Return the minimal data structure needed, since most documents won't have any
|
||||
# changes or comments
|
||||
response = {}
|
||||
if rangesTracker.changes?.length > 0
|
||||
response ?= {}
|
||||
response.changes = rangesTracker.changes
|
||||
if rangesTracker.comments?.length > 0
|
||||
response ?= {}
|
||||
response.comments = rangesTracker.comments
|
||||
return response
|
||||
_getRanges(rangesTracker) {
|
||||
// Return the minimal data structure needed, since most documents won't have any
|
||||
// changes or comments
|
||||
let response = {};
|
||||
if ((rangesTracker.changes != null ? rangesTracker.changes.length : undefined) > 0) {
|
||||
if (response == null) { response = {}; }
|
||||
response.changes = rangesTracker.changes;
|
||||
}
|
||||
if ((rangesTracker.comments != null ? rangesTracker.comments.length : undefined) > 0) {
|
||||
if (response == null) { response = {}; }
|
||||
response.comments = rangesTracker.comments;
|
||||
}
|
||||
return response;
|
||||
},
|
||||
|
||||
_emptyRangesCount: (ranges) ->
|
||||
count = 0
|
||||
for comment in (ranges.comments or [])
|
||||
if comment.op.c == ""
|
||||
count++
|
||||
for change in (ranges.changes or []) when change.op.i?
|
||||
if change.op.i == ""
|
||||
count++
|
||||
return count
|
||||
_emptyRangesCount(ranges) {
|
||||
let count = 0;
|
||||
for (let comment of Array.from((ranges.comments || []))) {
|
||||
if (comment.op.c === "") {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
for (let change of Array.from((ranges.changes || []))) {
|
||||
if (change.op.i != null) {
|
||||
if (change.op.i === "") {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
}
|
||||
return count;
|
||||
}
|
||||
});
|
File diff suppressed because it is too large
Load diff
|
@ -1,39 +1,58 @@
|
|||
Settings = require('settings-sharelatex')
|
||||
logger = require('logger-sharelatex')
|
||||
Metrics = require('./Metrics')
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let RateLimiter;
|
||||
const Settings = require('settings-sharelatex');
|
||||
const logger = require('logger-sharelatex');
|
||||
const Metrics = require('./Metrics');
|
||||
|
||||
module.exports = class RateLimiter
|
||||
module.exports = (RateLimiter = class RateLimiter {
|
||||
|
||||
constructor: (number = 10) ->
|
||||
@ActiveWorkerCount = 0
|
||||
@CurrentWorkerLimit = number
|
||||
@BaseWorkerCount = number
|
||||
constructor(number) {
|
||||
if (number == null) { number = 10; }
|
||||
this.ActiveWorkerCount = 0;
|
||||
this.CurrentWorkerLimit = number;
|
||||
this.BaseWorkerCount = number;
|
||||
}
|
||||
|
||||
_adjustLimitUp: () ->
|
||||
@CurrentWorkerLimit += 0.1 # allow target worker limit to increase gradually
|
||||
Metrics.gauge "currentLimit", Math.ceil(@CurrentWorkerLimit)
|
||||
_adjustLimitUp() {
|
||||
this.CurrentWorkerLimit += 0.1; // allow target worker limit to increase gradually
|
||||
return Metrics.gauge("currentLimit", Math.ceil(this.CurrentWorkerLimit));
|
||||
}
|
||||
|
||||
_adjustLimitDown: () ->
|
||||
@CurrentWorkerLimit = Math.max @BaseWorkerCount, (@CurrentWorkerLimit * 0.9)
|
||||
logger.log {currentLimit: Math.ceil(@CurrentWorkerLimit)}, "reducing rate limit"
|
||||
Metrics.gauge "currentLimit", Math.ceil(@CurrentWorkerLimit)
|
||||
_adjustLimitDown() {
|
||||
this.CurrentWorkerLimit = Math.max(this.BaseWorkerCount, (this.CurrentWorkerLimit * 0.9));
|
||||
logger.log({currentLimit: Math.ceil(this.CurrentWorkerLimit)}, "reducing rate limit");
|
||||
return Metrics.gauge("currentLimit", Math.ceil(this.CurrentWorkerLimit));
|
||||
}
|
||||
|
||||
_trackAndRun: (task, callback = () ->) ->
|
||||
@ActiveWorkerCount++
|
||||
Metrics.gauge "processingUpdates", @ActiveWorkerCount
|
||||
task (err) =>
|
||||
@ActiveWorkerCount--
|
||||
Metrics.gauge "processingUpdates", @ActiveWorkerCount
|
||||
callback(err)
|
||||
_trackAndRun(task, callback) {
|
||||
if (callback == null) { callback = function() {}; }
|
||||
this.ActiveWorkerCount++;
|
||||
Metrics.gauge("processingUpdates", this.ActiveWorkerCount);
|
||||
return task(err => {
|
||||
this.ActiveWorkerCount--;
|
||||
Metrics.gauge("processingUpdates", this.ActiveWorkerCount);
|
||||
return callback(err);
|
||||
});
|
||||
}
|
||||
|
||||
run: (task, callback) ->
|
||||
if @ActiveWorkerCount < @CurrentWorkerLimit
|
||||
@_trackAndRun task # below the limit, just put the task in the background
|
||||
callback() # return immediately
|
||||
if @CurrentWorkerLimit > @BaseWorkerCount
|
||||
@_adjustLimitDown()
|
||||
else
|
||||
logger.log {active: @ActiveWorkerCount, currentLimit: Math.ceil(@CurrentWorkerLimit)}, "hit rate limit"
|
||||
@_trackAndRun task, (err) =>
|
||||
@_adjustLimitUp() if !err? # don't increment rate limit if there was an error
|
||||
callback(err) # only return after task completes
|
||||
run(task, callback) {
|
||||
if (this.ActiveWorkerCount < this.CurrentWorkerLimit) {
|
||||
this._trackAndRun(task); // below the limit, just put the task in the background
|
||||
callback(); // return immediately
|
||||
if (this.CurrentWorkerLimit > this.BaseWorkerCount) {
|
||||
return this._adjustLimitDown();
|
||||
}
|
||||
} else {
|
||||
logger.log({active: this.ActiveWorkerCount, currentLimit: Math.ceil(this.CurrentWorkerLimit)}, "hit rate limit");
|
||||
return this._trackAndRun(task, err => {
|
||||
if ((err == null)) { this._adjustLimitUp(); } // don't increment rate limit if there was an error
|
||||
return callback(err);
|
||||
}); // only return after task completes
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -1,52 +1,73 @@
|
|||
Settings = require('settings-sharelatex')
|
||||
rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater)
|
||||
pubsubClient = require("redis-sharelatex").createClient(Settings.redis.pubsub)
|
||||
Keys = Settings.redis.documentupdater.key_schema
|
||||
logger = require('logger-sharelatex')
|
||||
os = require "os"
|
||||
crypto = require "crypto"
|
||||
metrics = require('./Metrics')
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let RealTimeRedisManager;
|
||||
const Settings = require('settings-sharelatex');
|
||||
const rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater);
|
||||
const pubsubClient = require("redis-sharelatex").createClient(Settings.redis.pubsub);
|
||||
const Keys = Settings.redis.documentupdater.key_schema;
|
||||
const logger = require('logger-sharelatex');
|
||||
const os = require("os");
|
||||
const crypto = require("crypto");
|
||||
const metrics = require('./Metrics');
|
||||
|
||||
HOST = os.hostname()
|
||||
RND = crypto.randomBytes(4).toString('hex') # generate a random key for this process
|
||||
COUNT = 0
|
||||
const HOST = os.hostname();
|
||||
const RND = crypto.randomBytes(4).toString('hex'); // generate a random key for this process
|
||||
let COUNT = 0;
|
||||
|
||||
MAX_OPS_PER_ITERATION = 8 # process a limited number of ops for safety
|
||||
const MAX_OPS_PER_ITERATION = 8; // process a limited number of ops for safety
|
||||
|
||||
module.exports = RealTimeRedisManager =
|
||||
getPendingUpdatesForDoc : (doc_id, callback)->
|
||||
multi = rclient.multi()
|
||||
multi.lrange Keys.pendingUpdates({doc_id}), 0, (MAX_OPS_PER_ITERATION-1)
|
||||
multi.ltrim Keys.pendingUpdates({doc_id}), MAX_OPS_PER_ITERATION, -1
|
||||
multi.exec (error, replys) ->
|
||||
return callback(error) if error?
|
||||
jsonUpdates = replys[0]
|
||||
for jsonUpdate in jsonUpdates
|
||||
# record metric for each update removed from queue
|
||||
metrics.summary "redis.pendingUpdates", jsonUpdate.length, {status: "pop"}
|
||||
updates = []
|
||||
for jsonUpdate in jsonUpdates
|
||||
try
|
||||
update = JSON.parse jsonUpdate
|
||||
catch e
|
||||
return callback e
|
||||
updates.push update
|
||||
callback error, updates
|
||||
module.exports = (RealTimeRedisManager = {
|
||||
getPendingUpdatesForDoc(doc_id, callback){
|
||||
const multi = rclient.multi();
|
||||
multi.lrange(Keys.pendingUpdates({doc_id}), 0, (MAX_OPS_PER_ITERATION-1));
|
||||
multi.ltrim(Keys.pendingUpdates({doc_id}), MAX_OPS_PER_ITERATION, -1);
|
||||
return multi.exec(function(error, replys) {
|
||||
let jsonUpdate;
|
||||
if (error != null) { return callback(error); }
|
||||
const jsonUpdates = replys[0];
|
||||
for (jsonUpdate of Array.from(jsonUpdates)) {
|
||||
// record metric for each update removed from queue
|
||||
metrics.summary("redis.pendingUpdates", jsonUpdate.length, {status: "pop"});
|
||||
}
|
||||
const updates = [];
|
||||
for (jsonUpdate of Array.from(jsonUpdates)) {
|
||||
var update;
|
||||
try {
|
||||
update = JSON.parse(jsonUpdate);
|
||||
} catch (e) {
|
||||
return callback(e);
|
||||
}
|
||||
updates.push(update);
|
||||
}
|
||||
return callback(error, updates);
|
||||
});
|
||||
},
|
||||
|
||||
getUpdatesLength: (doc_id, callback)->
|
||||
rclient.llen Keys.pendingUpdates({doc_id}), callback
|
||||
getUpdatesLength(doc_id, callback){
|
||||
return rclient.llen(Keys.pendingUpdates({doc_id}), callback);
|
||||
},
|
||||
|
||||
sendData: (data) ->
|
||||
# create a unique message id using a counter
|
||||
message_id = "doc:#{HOST}:#{RND}-#{COUNT++}"
|
||||
data?._id = message_id
|
||||
sendData(data) {
|
||||
// create a unique message id using a counter
|
||||
const message_id = `doc:${HOST}:${RND}-${COUNT++}`;
|
||||
if (data != null) {
|
||||
data._id = message_id;
|
||||
}
|
||||
|
||||
blob = JSON.stringify(data)
|
||||
metrics.summary "redis.publish.applied-ops", blob.length
|
||||
const blob = JSON.stringify(data);
|
||||
metrics.summary("redis.publish.applied-ops", blob.length);
|
||||
|
||||
# publish on separate channels for individual projects and docs when
|
||||
# configured (needs realtime to be configured for this too).
|
||||
if Settings.publishOnIndividualChannels
|
||||
pubsubClient.publish "applied-ops:#{data.doc_id}", blob
|
||||
else
|
||||
pubsubClient.publish "applied-ops", blob
|
||||
// publish on separate channels for individual projects and docs when
|
||||
// configured (needs realtime to be configured for this too).
|
||||
if (Settings.publishOnIndividualChannels) {
|
||||
return pubsubClient.publish(`applied-ops:${data.doc_id}`, blob);
|
||||
} else {
|
||||
return pubsubClient.publish("applied-ops", blob);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -1,376 +1,484 @@
|
|||
Settings = require('settings-sharelatex')
|
||||
rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater)
|
||||
logger = require('logger-sharelatex')
|
||||
metrics = require('./Metrics')
|
||||
Errors = require "./Errors"
|
||||
crypto = require "crypto"
|
||||
async = require "async"
|
||||
ProjectHistoryRedisManager = require "./ProjectHistoryRedisManager"
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS201: Simplify complex destructure assignments
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let RedisManager;
|
||||
const Settings = require('settings-sharelatex');
|
||||
const rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater);
|
||||
const logger = require('logger-sharelatex');
|
||||
const metrics = require('./Metrics');
|
||||
const Errors = require("./Errors");
|
||||
const crypto = require("crypto");
|
||||
const async = require("async");
|
||||
const ProjectHistoryRedisManager = require("./ProjectHistoryRedisManager");
|
||||
|
||||
# Sometimes Redis calls take an unexpectedly long time. We have to be
|
||||
# quick with Redis calls because we're holding a lock that expires
|
||||
# after 30 seconds. We can't let any errors in the rest of the stack
|
||||
# hold us up, and need to bail out quickly if there is a problem.
|
||||
MAX_REDIS_REQUEST_LENGTH = 5000 # 5 seconds
|
||||
// Sometimes Redis calls take an unexpectedly long time. We have to be
|
||||
// quick with Redis calls because we're holding a lock that expires
|
||||
// after 30 seconds. We can't let any errors in the rest of the stack
|
||||
// hold us up, and need to bail out quickly if there is a problem.
|
||||
const MAX_REDIS_REQUEST_LENGTH = 5000; // 5 seconds
|
||||
|
||||
# Make times easy to read
|
||||
minutes = 60 # seconds for Redis expire
|
||||
// Make times easy to read
|
||||
const minutes = 60; // seconds for Redis expire
|
||||
|
||||
logHashErrors = Settings.documentupdater?.logHashErrors
|
||||
logHashReadErrors = logHashErrors?.read
|
||||
const logHashErrors = Settings.documentupdater != null ? Settings.documentupdater.logHashErrors : undefined;
|
||||
const logHashReadErrors = logHashErrors != null ? logHashErrors.read : undefined;
|
||||
|
||||
MEGABYTES = 1024 * 1024
|
||||
MAX_RANGES_SIZE = 3 * MEGABYTES
|
||||
const MEGABYTES = 1024 * 1024;
|
||||
const MAX_RANGES_SIZE = 3 * MEGABYTES;
|
||||
|
||||
keys = Settings.redis.documentupdater.key_schema
|
||||
historyKeys = Settings.redis.history.key_schema # note: this is track changes, not project-history
|
||||
const keys = Settings.redis.documentupdater.key_schema;
|
||||
const historyKeys = Settings.redis.history.key_schema; // note: this is track changes, not project-history
|
||||
|
||||
module.exports = RedisManager =
|
||||
rclient: rclient
|
||||
module.exports = (RedisManager = {
|
||||
rclient,
|
||||
|
||||
putDocInMemory : (project_id, doc_id, docLines, version, ranges, pathname, projectHistoryId, _callback)->
|
||||
timer = new metrics.Timer("redis.put-doc")
|
||||
callback = (error) ->
|
||||
timer.done()
|
||||
_callback(error)
|
||||
docLines = JSON.stringify(docLines)
|
||||
if docLines.indexOf("\u0000") != -1
|
||||
error = new Error("null bytes found in doc lines")
|
||||
# this check was added to catch memory corruption in JSON.stringify.
|
||||
# It sometimes returned null bytes at the end of the string.
|
||||
logger.error {err: error, doc_id: doc_id, docLines: docLines}, error.message
|
||||
return callback(error)
|
||||
docHash = RedisManager._computeHash(docLines)
|
||||
# record bytes sent to redis
|
||||
metrics.summary "redis.docLines", docLines.length, {status: "set"}
|
||||
logger.log {project_id, doc_id, version, docHash, pathname, projectHistoryId}, "putting doc in redis"
|
||||
RedisManager._serializeRanges ranges, (error, ranges) ->
|
||||
if error?
|
||||
logger.error {err: error, doc_id, project_id}, error.message
|
||||
return callback(error)
|
||||
multi = rclient.multi()
|
||||
multi.set keys.docLines(doc_id:doc_id), docLines
|
||||
multi.set keys.projectKey({doc_id:doc_id}), project_id
|
||||
multi.set keys.docVersion(doc_id:doc_id), version
|
||||
multi.set keys.docHash(doc_id:doc_id), docHash
|
||||
if ranges?
|
||||
multi.set keys.ranges(doc_id:doc_id), ranges
|
||||
else
|
||||
multi.del keys.ranges(doc_id:doc_id)
|
||||
multi.set keys.pathname(doc_id:doc_id), pathname
|
||||
multi.set keys.projectHistoryId(doc_id:doc_id), projectHistoryId
|
||||
multi.exec (error, result) ->
|
||||
return callback(error) if error?
|
||||
# update docsInProject set
|
||||
rclient.sadd keys.docsInProject(project_id:project_id), doc_id, callback
|
||||
putDocInMemory(project_id, doc_id, docLines, version, ranges, pathname, projectHistoryId, _callback){
|
||||
const timer = new metrics.Timer("redis.put-doc");
|
||||
const callback = function(error) {
|
||||
timer.done();
|
||||
return _callback(error);
|
||||
};
|
||||
docLines = JSON.stringify(docLines);
|
||||
if (docLines.indexOf("\u0000") !== -1) {
|
||||
const error = new Error("null bytes found in doc lines");
|
||||
// this check was added to catch memory corruption in JSON.stringify.
|
||||
// It sometimes returned null bytes at the end of the string.
|
||||
logger.error({err: error, doc_id, docLines}, error.message);
|
||||
return callback(error);
|
||||
}
|
||||
const docHash = RedisManager._computeHash(docLines);
|
||||
// record bytes sent to redis
|
||||
metrics.summary("redis.docLines", docLines.length, {status: "set"});
|
||||
logger.log({project_id, doc_id, version, docHash, pathname, projectHistoryId}, "putting doc in redis");
|
||||
return RedisManager._serializeRanges(ranges, function(error, ranges) {
|
||||
if (error != null) {
|
||||
logger.error({err: error, doc_id, project_id}, error.message);
|
||||
return callback(error);
|
||||
}
|
||||
const multi = rclient.multi();
|
||||
multi.set(keys.docLines({doc_id}), docLines);
|
||||
multi.set(keys.projectKey({doc_id}), project_id);
|
||||
multi.set(keys.docVersion({doc_id}), version);
|
||||
multi.set(keys.docHash({doc_id}), docHash);
|
||||
if (ranges != null) {
|
||||
multi.set(keys.ranges({doc_id}), ranges);
|
||||
} else {
|
||||
multi.del(keys.ranges({doc_id}));
|
||||
}
|
||||
multi.set(keys.pathname({doc_id}), pathname);
|
||||
multi.set(keys.projectHistoryId({doc_id}), projectHistoryId);
|
||||
return multi.exec(function(error, result) {
|
||||
if (error != null) { return callback(error); }
|
||||
// update docsInProject set
|
||||
return rclient.sadd(keys.docsInProject({project_id}), doc_id, callback);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
removeDocFromMemory : (project_id, doc_id, _callback)->
|
||||
logger.log project_id:project_id, doc_id:doc_id, "removing doc from redis"
|
||||
callback = (err) ->
|
||||
if err?
|
||||
logger.err project_id:project_id, doc_id:doc_id, err:err, "error removing doc from redis"
|
||||
_callback(err)
|
||||
else
|
||||
logger.log project_id:project_id, doc_id:doc_id, "removed doc from redis"
|
||||
_callback()
|
||||
removeDocFromMemory(project_id, doc_id, _callback){
|
||||
logger.log({project_id, doc_id}, "removing doc from redis");
|
||||
const callback = function(err) {
|
||||
if (err != null) {
|
||||
logger.err({project_id, doc_id, err}, "error removing doc from redis");
|
||||
return _callback(err);
|
||||
} else {
|
||||
logger.log({project_id, doc_id}, "removed doc from redis");
|
||||
return _callback();
|
||||
}
|
||||
};
|
||||
|
||||
multi = rclient.multi()
|
||||
multi.strlen keys.docLines(doc_id:doc_id)
|
||||
multi.del keys.docLines(doc_id:doc_id)
|
||||
multi.del keys.projectKey(doc_id:doc_id)
|
||||
multi.del keys.docVersion(doc_id:doc_id)
|
||||
multi.del keys.docHash(doc_id:doc_id)
|
||||
multi.del keys.ranges(doc_id:doc_id)
|
||||
multi.del keys.pathname(doc_id:doc_id)
|
||||
multi.del keys.projectHistoryId(doc_id:doc_id)
|
||||
multi.del keys.projectHistoryType(doc_id:doc_id)
|
||||
multi.del keys.unflushedTime(doc_id:doc_id)
|
||||
multi.del keys.lastUpdatedAt(doc_id: doc_id)
|
||||
multi.del keys.lastUpdatedBy(doc_id: doc_id)
|
||||
multi.exec (error, response) ->
|
||||
return callback(error) if error?
|
||||
length = response?[0]
|
||||
if length > 0
|
||||
# record bytes freed in redis
|
||||
metrics.summary "redis.docLines", length, {status: "del"}
|
||||
multi = rclient.multi()
|
||||
multi.srem keys.docsInProject(project_id:project_id), doc_id
|
||||
multi.del keys.projectState(project_id:project_id)
|
||||
multi.exec callback
|
||||
let multi = rclient.multi();
|
||||
multi.strlen(keys.docLines({doc_id}));
|
||||
multi.del(keys.docLines({doc_id}));
|
||||
multi.del(keys.projectKey({doc_id}));
|
||||
multi.del(keys.docVersion({doc_id}));
|
||||
multi.del(keys.docHash({doc_id}));
|
||||
multi.del(keys.ranges({doc_id}));
|
||||
multi.del(keys.pathname({doc_id}));
|
||||
multi.del(keys.projectHistoryId({doc_id}));
|
||||
multi.del(keys.projectHistoryType({doc_id}));
|
||||
multi.del(keys.unflushedTime({doc_id}));
|
||||
multi.del(keys.lastUpdatedAt({doc_id}));
|
||||
multi.del(keys.lastUpdatedBy({doc_id}));
|
||||
return multi.exec(function(error, response) {
|
||||
if (error != null) { return callback(error); }
|
||||
const length = response != null ? response[0] : undefined;
|
||||
if (length > 0) {
|
||||
// record bytes freed in redis
|
||||
metrics.summary("redis.docLines", length, {status: "del"});
|
||||
}
|
||||
multi = rclient.multi();
|
||||
multi.srem(keys.docsInProject({project_id}), doc_id);
|
||||
multi.del(keys.projectState({project_id}));
|
||||
return multi.exec(callback);
|
||||
});
|
||||
},
|
||||
|
||||
checkOrSetProjectState: (project_id, newState, callback = (error, stateChanged) ->) ->
|
||||
multi = rclient.multi()
|
||||
multi.getset keys.projectState(project_id:project_id), newState
|
||||
multi.expire keys.projectState(project_id:project_id), 30 * minutes
|
||||
multi.exec (error, response) ->
|
||||
return callback(error) if error?
|
||||
logger.log project_id: project_id, newState:newState, oldState: response[0], "checking project state"
|
||||
callback(null, response[0] isnt newState)
|
||||
checkOrSetProjectState(project_id, newState, callback) {
|
||||
if (callback == null) { callback = function(error, stateChanged) {}; }
|
||||
const multi = rclient.multi();
|
||||
multi.getset(keys.projectState({project_id}), newState);
|
||||
multi.expire(keys.projectState({project_id}), 30 * minutes);
|
||||
return multi.exec(function(error, response) {
|
||||
if (error != null) { return callback(error); }
|
||||
logger.log({project_id, newState, oldState: response[0]}, "checking project state");
|
||||
return callback(null, response[0] !== newState);
|
||||
});
|
||||
},
|
||||
|
||||
clearProjectState: (project_id, callback = (error) ->) ->
|
||||
rclient.del keys.projectState(project_id:project_id), callback
|
||||
clearProjectState(project_id, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return rclient.del(keys.projectState({project_id}), callback);
|
||||
},
|
||||
|
||||
getDoc : (project_id, doc_id, callback = (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime) ->)->
|
||||
timer = new metrics.Timer("redis.get-doc")
|
||||
multi = rclient.multi()
|
||||
multi.get keys.docLines(doc_id:doc_id)
|
||||
multi.get keys.docVersion(doc_id:doc_id)
|
||||
multi.get keys.docHash(doc_id:doc_id)
|
||||
multi.get keys.projectKey(doc_id:doc_id)
|
||||
multi.get keys.ranges(doc_id:doc_id)
|
||||
multi.get keys.pathname(doc_id:doc_id)
|
||||
multi.get keys.projectHistoryId(doc_id:doc_id)
|
||||
multi.get keys.unflushedTime(doc_id:doc_id)
|
||||
multi.get keys.lastUpdatedAt(doc_id: doc_id)
|
||||
multi.get keys.lastUpdatedBy(doc_id: doc_id)
|
||||
multi.exec (error, [docLines, version, storedHash, doc_project_id, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy])->
|
||||
timeSpan = timer.done()
|
||||
return callback(error) if error?
|
||||
# check if request took too long and bail out. only do this for
|
||||
# get, because it is the first call in each update, so if this
|
||||
# passes we'll assume others have a reasonable chance to succeed.
|
||||
if timeSpan > MAX_REDIS_REQUEST_LENGTH
|
||||
error = new Error("redis getDoc exceeded timeout")
|
||||
return callback(error)
|
||||
# record bytes loaded from redis
|
||||
if docLines?
|
||||
metrics.summary "redis.docLines", docLines.length, {status: "get"}
|
||||
# check sha1 hash value if present
|
||||
if docLines? and storedHash?
|
||||
computedHash = RedisManager._computeHash(docLines)
|
||||
if logHashReadErrors and computedHash isnt storedHash
|
||||
logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, computedHash: computedHash, storedHash: storedHash, docLines:docLines, "hash mismatch on retrieved document"
|
||||
getDoc(project_id, doc_id, callback){
|
||||
if (callback == null) { callback = function(error, lines, version, ranges, pathname, projectHistoryId, unflushedTime) {}; }
|
||||
const timer = new metrics.Timer("redis.get-doc");
|
||||
const multi = rclient.multi();
|
||||
multi.get(keys.docLines({doc_id}));
|
||||
multi.get(keys.docVersion({doc_id}));
|
||||
multi.get(keys.docHash({doc_id}));
|
||||
multi.get(keys.projectKey({doc_id}));
|
||||
multi.get(keys.ranges({doc_id}));
|
||||
multi.get(keys.pathname({doc_id}));
|
||||
multi.get(keys.projectHistoryId({doc_id}));
|
||||
multi.get(keys.unflushedTime({doc_id}));
|
||||
multi.get(keys.lastUpdatedAt({doc_id}));
|
||||
multi.get(keys.lastUpdatedBy({doc_id}));
|
||||
return multi.exec(function(error, ...rest){
|
||||
let [docLines, version, storedHash, doc_project_id, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy] = Array.from(rest[0]);
|
||||
const timeSpan = timer.done();
|
||||
if (error != null) { return callback(error); }
|
||||
// check if request took too long and bail out. only do this for
|
||||
// get, because it is the first call in each update, so if this
|
||||
// passes we'll assume others have a reasonable chance to succeed.
|
||||
if (timeSpan > MAX_REDIS_REQUEST_LENGTH) {
|
||||
error = new Error("redis getDoc exceeded timeout");
|
||||
return callback(error);
|
||||
}
|
||||
// record bytes loaded from redis
|
||||
if (docLines != null) {
|
||||
metrics.summary("redis.docLines", docLines.length, {status: "get"});
|
||||
}
|
||||
// check sha1 hash value if present
|
||||
if ((docLines != null) && (storedHash != null)) {
|
||||
const computedHash = RedisManager._computeHash(docLines);
|
||||
if (logHashReadErrors && (computedHash !== storedHash)) {
|
||||
logger.error({project_id, doc_id, doc_project_id, computedHash, storedHash, docLines}, "hash mismatch on retrieved document");
|
||||
}
|
||||
}
|
||||
|
||||
try
|
||||
docLines = JSON.parse docLines
|
||||
ranges = RedisManager._deserializeRanges(ranges)
|
||||
catch e
|
||||
return callback(e)
|
||||
try {
|
||||
docLines = JSON.parse(docLines);
|
||||
ranges = RedisManager._deserializeRanges(ranges);
|
||||
} catch (e) {
|
||||
return callback(e);
|
||||
}
|
||||
|
||||
version = parseInt(version or 0, 10)
|
||||
# check doc is in requested project
|
||||
if doc_project_id? and doc_project_id isnt project_id
|
||||
logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, "doc not in project"
|
||||
return callback(new Errors.NotFoundError("document not found"))
|
||||
version = parseInt(version || 0, 10);
|
||||
// check doc is in requested project
|
||||
if ((doc_project_id != null) && (doc_project_id !== project_id)) {
|
||||
logger.error({project_id, doc_id, doc_project_id}, "doc not in project");
|
||||
return callback(new Errors.NotFoundError("document not found"));
|
||||
}
|
||||
|
||||
if projectHistoryId?
|
||||
projectHistoryId = parseInt(projectHistoryId)
|
||||
if (projectHistoryId != null) {
|
||||
projectHistoryId = parseInt(projectHistoryId);
|
||||
}
|
||||
|
||||
# doc is not in redis, bail out
|
||||
if !docLines?
|
||||
return callback null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy
|
||||
// doc is not in redis, bail out
|
||||
if ((docLines == null)) {
|
||||
return callback(null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy);
|
||||
}
|
||||
|
||||
# doc should be in project set, check if missing (workaround for missing docs from putDoc)
|
||||
rclient.sadd keys.docsInProject(project_id:project_id), doc_id, (error, result) ->
|
||||
return callback(error) if error?
|
||||
if result isnt 0 # doc should already be in set
|
||||
logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, "doc missing from docsInProject set"
|
||||
callback null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy
|
||||
// doc should be in project set, check if missing (workaround for missing docs from putDoc)
|
||||
return rclient.sadd(keys.docsInProject({project_id}), doc_id, function(error, result) {
|
||||
if (error != null) { return callback(error); }
|
||||
if (result !== 0) { // doc should already be in set
|
||||
logger.error({project_id, doc_id, doc_project_id}, "doc missing from docsInProject set");
|
||||
}
|
||||
return callback(null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
getDocVersion: (doc_id, callback = (error, version, projectHistoryType) ->) ->
|
||||
rclient.mget keys.docVersion(doc_id: doc_id), keys.projectHistoryType(doc_id:doc_id), (error, result) ->
|
||||
return callback(error) if error?
|
||||
[version, projectHistoryType] = result || []
|
||||
version = parseInt(version, 10)
|
||||
callback null, version, projectHistoryType
|
||||
getDocVersion(doc_id, callback) {
|
||||
if (callback == null) { callback = function(error, version, projectHistoryType) {}; }
|
||||
return rclient.mget(keys.docVersion({doc_id}), keys.projectHistoryType({doc_id}), function(error, result) {
|
||||
if (error != null) { return callback(error); }
|
||||
let [version, projectHistoryType] = Array.from(result || []);
|
||||
version = parseInt(version, 10);
|
||||
return callback(null, version, projectHistoryType);
|
||||
});
|
||||
},
|
||||
|
||||
getDocLines: (doc_id, callback = (error, version) ->) ->
|
||||
rclient.get keys.docLines(doc_id: doc_id), (error, docLines) ->
|
||||
return callback(error) if error?
|
||||
callback null, docLines
|
||||
getDocLines(doc_id, callback) {
|
||||
if (callback == null) { callback = function(error, version) {}; }
|
||||
return rclient.get(keys.docLines({doc_id}), function(error, docLines) {
|
||||
if (error != null) { return callback(error); }
|
||||
return callback(null, docLines);
|
||||
});
|
||||
},
|
||||
|
||||
getPreviousDocOps: (doc_id, start, end, callback = (error, jsonOps) ->) ->
|
||||
timer = new metrics.Timer("redis.get-prev-docops")
|
||||
rclient.llen keys.docOps(doc_id: doc_id), (error, length) ->
|
||||
return callback(error) if error?
|
||||
rclient.get keys.docVersion(doc_id: doc_id), (error, version) ->
|
||||
return callback(error) if error?
|
||||
version = parseInt(version, 10)
|
||||
first_version_in_redis = version - length
|
||||
getPreviousDocOps(doc_id, start, end, callback) {
|
||||
if (callback == null) { callback = function(error, jsonOps) {}; }
|
||||
const timer = new metrics.Timer("redis.get-prev-docops");
|
||||
return rclient.llen(keys.docOps({doc_id}), function(error, length) {
|
||||
if (error != null) { return callback(error); }
|
||||
return rclient.get(keys.docVersion({doc_id}), function(error, version) {
|
||||
if (error != null) { return callback(error); }
|
||||
version = parseInt(version, 10);
|
||||
const first_version_in_redis = version - length;
|
||||
|
||||
if start < first_version_in_redis or end > version
|
||||
error = new Errors.OpRangeNotAvailableError("doc ops range is not loaded in redis")
|
||||
logger.warn {err: error, doc_id, length, version, start, end}, "doc ops range is not loaded in redis"
|
||||
return callback(error)
|
||||
if ((start < first_version_in_redis) || (end > version)) {
|
||||
error = new Errors.OpRangeNotAvailableError("doc ops range is not loaded in redis");
|
||||
logger.warn({err: error, doc_id, length, version, start, end}, "doc ops range is not loaded in redis");
|
||||
return callback(error);
|
||||
}
|
||||
|
||||
start = start - first_version_in_redis
|
||||
if end > -1
|
||||
end = end - first_version_in_redis
|
||||
start = start - first_version_in_redis;
|
||||
if (end > -1) {
|
||||
end = end - first_version_in_redis;
|
||||
}
|
||||
|
||||
if isNaN(start) or isNaN(end)
|
||||
error = new Error("inconsistent version or lengths")
|
||||
logger.error {err: error, doc_id, length, version, start, end}, "inconsistent version or length"
|
||||
return callback(error)
|
||||
if (isNaN(start) || isNaN(end)) {
|
||||
error = new Error("inconsistent version or lengths");
|
||||
logger.error({err: error, doc_id, length, version, start, end}, "inconsistent version or length");
|
||||
return callback(error);
|
||||
}
|
||||
|
||||
rclient.lrange keys.docOps(doc_id: doc_id), start, end, (error, jsonOps) ->
|
||||
return callback(error) if error?
|
||||
try
|
||||
ops = jsonOps.map (jsonOp) -> JSON.parse jsonOp
|
||||
catch e
|
||||
return callback(e)
|
||||
timeSpan = timer.done()
|
||||
if timeSpan > MAX_REDIS_REQUEST_LENGTH
|
||||
error = new Error("redis getPreviousDocOps exceeded timeout")
|
||||
return callback(error)
|
||||
callback null, ops
|
||||
return rclient.lrange(keys.docOps({doc_id}), start, end, function(error, jsonOps) {
|
||||
let ops;
|
||||
if (error != null) { return callback(error); }
|
||||
try {
|
||||
ops = jsonOps.map(jsonOp => JSON.parse(jsonOp));
|
||||
} catch (e) {
|
||||
return callback(e);
|
||||
}
|
||||
const timeSpan = timer.done();
|
||||
if (timeSpan > MAX_REDIS_REQUEST_LENGTH) {
|
||||
error = new Error("redis getPreviousDocOps exceeded timeout");
|
||||
return callback(error);
|
||||
}
|
||||
return callback(null, ops);
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
getHistoryType: (doc_id, callback = (error, projectHistoryType) ->) ->
|
||||
rclient.get keys.projectHistoryType(doc_id:doc_id), (error, projectHistoryType) ->
|
||||
return callback(error) if error?
|
||||
callback null, projectHistoryType
|
||||
getHistoryType(doc_id, callback) {
|
||||
if (callback == null) { callback = function(error, projectHistoryType) {}; }
|
||||
return rclient.get(keys.projectHistoryType({doc_id}), function(error, projectHistoryType) {
|
||||
if (error != null) { return callback(error); }
|
||||
return callback(null, projectHistoryType);
|
||||
});
|
||||
},
|
||||
|
||||
setHistoryType: (doc_id, projectHistoryType, callback = (error) ->) ->
|
||||
rclient.set keys.projectHistoryType(doc_id:doc_id), projectHistoryType, callback
|
||||
setHistoryType(doc_id, projectHistoryType, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return rclient.set(keys.projectHistoryType({doc_id}), projectHistoryType, callback);
|
||||
},
|
||||
|
||||
DOC_OPS_TTL: 60 * minutes
|
||||
DOC_OPS_MAX_LENGTH: 100
|
||||
updateDocument : (project_id, doc_id, docLines, newVersion, appliedOps = [], ranges, updateMeta, callback = (error) ->)->
|
||||
RedisManager.getDocVersion doc_id, (error, currentVersion, projectHistoryType) ->
|
||||
return callback(error) if error?
|
||||
if currentVersion + appliedOps.length != newVersion
|
||||
error = new Error("Version mismatch. '#{doc_id}' is corrupted.")
|
||||
logger.error {err: error, doc_id, currentVersion, newVersion, opsLength: appliedOps.length}, "version mismatch"
|
||||
return callback(error)
|
||||
DOC_OPS_TTL: 60 * minutes,
|
||||
DOC_OPS_MAX_LENGTH: 100,
|
||||
updateDocument(project_id, doc_id, docLines, newVersion, appliedOps, ranges, updateMeta, callback){
|
||||
if (appliedOps == null) { appliedOps = []; }
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return RedisManager.getDocVersion(doc_id, function(error, currentVersion, projectHistoryType) {
|
||||
if (error != null) { return callback(error); }
|
||||
if ((currentVersion + appliedOps.length) !== newVersion) {
|
||||
error = new Error(`Version mismatch. '${doc_id}' is corrupted.`);
|
||||
logger.error({err: error, doc_id, currentVersion, newVersion, opsLength: appliedOps.length}, "version mismatch");
|
||||
return callback(error);
|
||||
}
|
||||
|
||||
jsonOps = appliedOps.map (op) -> JSON.stringify op
|
||||
for op in jsonOps
|
||||
if op.indexOf("\u0000") != -1
|
||||
error = new Error("null bytes found in jsonOps")
|
||||
# this check was added to catch memory corruption in JSON.stringify
|
||||
logger.error {err: error, doc_id: doc_id, jsonOps: jsonOps}, error.message
|
||||
return callback(error)
|
||||
const jsonOps = appliedOps.map(op => JSON.stringify(op));
|
||||
for (let op of Array.from(jsonOps)) {
|
||||
if (op.indexOf("\u0000") !== -1) {
|
||||
error = new Error("null bytes found in jsonOps");
|
||||
// this check was added to catch memory corruption in JSON.stringify
|
||||
logger.error({err: error, doc_id, jsonOps}, error.message);
|
||||
return callback(error);
|
||||
}
|
||||
}
|
||||
|
||||
newDocLines = JSON.stringify(docLines)
|
||||
if newDocLines.indexOf("\u0000") != -1
|
||||
error = new Error("null bytes found in doc lines")
|
||||
# this check was added to catch memory corruption in JSON.stringify
|
||||
logger.error {err: error, doc_id: doc_id, newDocLines: newDocLines}, error.message
|
||||
return callback(error)
|
||||
newHash = RedisManager._computeHash(newDocLines)
|
||||
const newDocLines = JSON.stringify(docLines);
|
||||
if (newDocLines.indexOf("\u0000") !== -1) {
|
||||
error = new Error("null bytes found in doc lines");
|
||||
// this check was added to catch memory corruption in JSON.stringify
|
||||
logger.error({err: error, doc_id, newDocLines}, error.message);
|
||||
return callback(error);
|
||||
}
|
||||
const newHash = RedisManager._computeHash(newDocLines);
|
||||
|
||||
opVersions = appliedOps.map (op) -> op?.v
|
||||
logger.log doc_id: doc_id, version: newVersion, hash: newHash, op_versions: opVersions, "updating doc in redis"
|
||||
# record bytes sent to redis in update
|
||||
metrics.summary "redis.docLines", newDocLines.length, {status: "update"}
|
||||
RedisManager._serializeRanges ranges, (error, ranges) ->
|
||||
if error?
|
||||
logger.error {err: error, doc_id}, error.message
|
||||
return callback(error)
|
||||
if ranges? and ranges.indexOf("\u0000") != -1
|
||||
error = new Error("null bytes found in ranges")
|
||||
# this check was added to catch memory corruption in JSON.stringify
|
||||
logger.error err: error, doc_id: doc_id, ranges: ranges, error.message
|
||||
return callback(error)
|
||||
multi = rclient.multi()
|
||||
multi.set keys.docLines(doc_id:doc_id), newDocLines # index 0
|
||||
multi.set keys.docVersion(doc_id:doc_id), newVersion # index 1
|
||||
multi.set keys.docHash(doc_id:doc_id), newHash # index 2
|
||||
multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1 # index 3
|
||||
if ranges?
|
||||
multi.set keys.ranges(doc_id:doc_id), ranges # index 4
|
||||
else
|
||||
multi.del keys.ranges(doc_id:doc_id) # also index 4
|
||||
# push the ops last so we can get the lengths at fixed index position 7
|
||||
if jsonOps.length > 0
|
||||
multi.rpush keys.docOps(doc_id: doc_id), jsonOps... # index 5
|
||||
# expire must come after rpush since before it will be a no-op if the list is empty
|
||||
multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL # index 6
|
||||
if projectHistoryType is "project-history"
|
||||
metrics.inc 'history-queue', 1, {status: 'skip-track-changes'}
|
||||
logger.log {doc_id}, "skipping push of uncompressed ops for project using project-history"
|
||||
else
|
||||
# project is using old track-changes history service
|
||||
metrics.inc 'history-queue', 1, {status: 'track-changes'}
|
||||
multi.rpush historyKeys.uncompressedHistoryOps(doc_id: doc_id), jsonOps... # index 7
|
||||
# Set the unflushed timestamp to the current time if the doc
|
||||
# hasn't been modified before (the content in mongo has been
|
||||
# valid up to this point). Otherwise leave it alone ("NX" flag).
|
||||
multi.set keys.unflushedTime(doc_id: doc_id), Date.now(), "NX"
|
||||
multi.set keys.lastUpdatedAt(doc_id: doc_id), Date.now() # index 8
|
||||
if updateMeta?.user_id
|
||||
multi.set keys.lastUpdatedBy(doc_id: doc_id), updateMeta.user_id # index 9
|
||||
else
|
||||
multi.del keys.lastUpdatedBy(doc_id: doc_id) # index 9
|
||||
multi.exec (error, result) ->
|
||||
return callback(error) if error?
|
||||
const opVersions = appliedOps.map(op => op != null ? op.v : undefined);
|
||||
logger.log({doc_id, version: newVersion, hash: newHash, op_versions: opVersions}, "updating doc in redis");
|
||||
// record bytes sent to redis in update
|
||||
metrics.summary("redis.docLines", newDocLines.length, {status: "update"});
|
||||
return RedisManager._serializeRanges(ranges, function(error, ranges) {
|
||||
if (error != null) {
|
||||
logger.error({err: error, doc_id}, error.message);
|
||||
return callback(error);
|
||||
}
|
||||
if ((ranges != null) && (ranges.indexOf("\u0000") !== -1)) {
|
||||
error = new Error("null bytes found in ranges");
|
||||
// this check was added to catch memory corruption in JSON.stringify
|
||||
logger.error({err: error, doc_id, ranges}, error.message);
|
||||
return callback(error);
|
||||
}
|
||||
const multi = rclient.multi();
|
||||
multi.set(keys.docLines({doc_id}), newDocLines); // index 0
|
||||
multi.set(keys.docVersion({doc_id}), newVersion); // index 1
|
||||
multi.set(keys.docHash({doc_id}), newHash); // index 2
|
||||
multi.ltrim(keys.docOps({doc_id}), -RedisManager.DOC_OPS_MAX_LENGTH, -1); // index 3
|
||||
if (ranges != null) {
|
||||
multi.set(keys.ranges({doc_id}), ranges); // index 4
|
||||
} else {
|
||||
multi.del(keys.ranges({doc_id})); // also index 4
|
||||
}
|
||||
// push the ops last so we can get the lengths at fixed index position 7
|
||||
if (jsonOps.length > 0) {
|
||||
multi.rpush(keys.docOps({doc_id}), ...Array.from(jsonOps)); // index 5
|
||||
// expire must come after rpush since before it will be a no-op if the list is empty
|
||||
multi.expire(keys.docOps({doc_id}), RedisManager.DOC_OPS_TTL); // index 6
|
||||
if (projectHistoryType === "project-history") {
|
||||
metrics.inc('history-queue', 1, {status: 'skip-track-changes'});
|
||||
logger.log({doc_id}, "skipping push of uncompressed ops for project using project-history");
|
||||
} else {
|
||||
// project is using old track-changes history service
|
||||
metrics.inc('history-queue', 1, {status: 'track-changes'});
|
||||
multi.rpush(historyKeys.uncompressedHistoryOps({doc_id}), ...Array.from(jsonOps)); // index 7
|
||||
}
|
||||
// Set the unflushed timestamp to the current time if the doc
|
||||
// hasn't been modified before (the content in mongo has been
|
||||
// valid up to this point). Otherwise leave it alone ("NX" flag).
|
||||
multi.set(keys.unflushedTime({doc_id}), Date.now(), "NX");
|
||||
multi.set(keys.lastUpdatedAt({doc_id}), Date.now()); // index 8
|
||||
if ((updateMeta != null ? updateMeta.user_id : undefined)) {
|
||||
multi.set(keys.lastUpdatedBy({doc_id}), updateMeta.user_id); // index 9
|
||||
} else {
|
||||
multi.del(keys.lastUpdatedBy({doc_id})); // index 9
|
||||
}
|
||||
}
|
||||
return multi.exec(function(error, result) {
|
||||
let docUpdateCount;
|
||||
if (error != null) { return callback(error); }
|
||||
|
||||
if projectHistoryType is 'project-history'
|
||||
docUpdateCount = undefined # only using project history, don't bother with track-changes
|
||||
else
|
||||
# project is using old track-changes history service
|
||||
docUpdateCount = result[7] # length of uncompressedHistoryOps queue (index 7)
|
||||
if (projectHistoryType === 'project-history') {
|
||||
docUpdateCount = undefined; // only using project history, don't bother with track-changes
|
||||
} else {
|
||||
// project is using old track-changes history service
|
||||
docUpdateCount = result[7]; // length of uncompressedHistoryOps queue (index 7)
|
||||
}
|
||||
|
||||
if jsonOps.length > 0 && Settings.apis?.project_history?.enabled
|
||||
metrics.inc 'history-queue', 1, {status: 'project-history'}
|
||||
ProjectHistoryRedisManager.queueOps project_id, jsonOps..., (error, projectUpdateCount) ->
|
||||
callback null, docUpdateCount, projectUpdateCount
|
||||
else
|
||||
callback null, docUpdateCount
|
||||
if ((jsonOps.length > 0) && __guard__(Settings.apis != null ? Settings.apis.project_history : undefined, x => x.enabled)) {
|
||||
metrics.inc('history-queue', 1, {status: 'project-history'});
|
||||
return ProjectHistoryRedisManager.queueOps(project_id, ...Array.from(jsonOps), (error, projectUpdateCount) => callback(null, docUpdateCount, projectUpdateCount));
|
||||
} else {
|
||||
return callback(null, docUpdateCount);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
renameDoc: (project_id, doc_id, user_id, update, projectHistoryId, callback = (error) ->) ->
|
||||
RedisManager.getDoc project_id, doc_id, (error, lines, version) ->
|
||||
return callback(error) if error?
|
||||
renameDoc(project_id, doc_id, user_id, update, projectHistoryId, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return RedisManager.getDoc(project_id, doc_id, function(error, lines, version) {
|
||||
if (error != null) { return callback(error); }
|
||||
|
||||
if lines? and version?
|
||||
rclient.set keys.pathname(doc_id:doc_id), update.newPathname, (error) ->
|
||||
return callback(error) if error?
|
||||
ProjectHistoryRedisManager.queueRenameEntity project_id, projectHistoryId, 'doc', doc_id, user_id, update, callback
|
||||
else
|
||||
ProjectHistoryRedisManager.queueRenameEntity project_id, projectHistoryId, 'doc', doc_id, user_id, update, callback
|
||||
if ((lines != null) && (version != null)) {
|
||||
return rclient.set(keys.pathname({doc_id}), update.newPathname, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return ProjectHistoryRedisManager.queueRenameEntity(project_id, projectHistoryId, 'doc', doc_id, user_id, update, callback);
|
||||
});
|
||||
} else {
|
||||
return ProjectHistoryRedisManager.queueRenameEntity(project_id, projectHistoryId, 'doc', doc_id, user_id, update, callback);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
clearUnflushedTime: (doc_id, callback = (error) ->) ->
|
||||
rclient.del keys.unflushedTime(doc_id:doc_id), callback
|
||||
clearUnflushedTime(doc_id, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return rclient.del(keys.unflushedTime({doc_id}), callback);
|
||||
},
|
||||
|
||||
getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) ->
|
||||
rclient.smembers keys.docsInProject(project_id: project_id), callback
|
||||
getDocIdsInProject(project_id, callback) {
|
||||
if (callback == null) { callback = function(error, doc_ids) {}; }
|
||||
return rclient.smembers(keys.docsInProject({project_id}), callback);
|
||||
},
|
||||
|
||||
getDocTimestamps: (doc_ids, callback = (error, result) ->) ->
|
||||
# get lastupdatedat timestamps for an array of doc_ids
|
||||
async.mapSeries doc_ids, (doc_id, cb) ->
|
||||
rclient.get keys.lastUpdatedAt(doc_id: doc_id), cb
|
||||
, callback
|
||||
getDocTimestamps(doc_ids, callback) {
|
||||
// get lastupdatedat timestamps for an array of doc_ids
|
||||
if (callback == null) { callback = function(error, result) {}; }
|
||||
return async.mapSeries(doc_ids, (doc_id, cb) => rclient.get(keys.lastUpdatedAt({doc_id}), cb)
|
||||
, callback);
|
||||
},
|
||||
|
||||
queueFlushAndDeleteProject: (project_id, callback) ->
|
||||
# store the project id in a sorted set ordered by time with a random offset to smooth out spikes
|
||||
SMOOTHING_OFFSET = if Settings.smoothingOffset > 0 then Math.round(Settings.smoothingOffset * Math.random()) else 0
|
||||
rclient.zadd keys.flushAndDeleteQueue(), Date.now() + SMOOTHING_OFFSET, project_id, callback
|
||||
queueFlushAndDeleteProject(project_id, callback) {
|
||||
// store the project id in a sorted set ordered by time with a random offset to smooth out spikes
|
||||
const SMOOTHING_OFFSET = Settings.smoothingOffset > 0 ? Math.round(Settings.smoothingOffset * Math.random()) : 0;
|
||||
return rclient.zadd(keys.flushAndDeleteQueue(), Date.now() + SMOOTHING_OFFSET, project_id, callback);
|
||||
},
|
||||
|
||||
getNextProjectToFlushAndDelete: (cutoffTime, callback = (error, key, timestamp)->) ->
|
||||
# find the oldest queued flush that is before the cutoff time
|
||||
rclient.zrangebyscore keys.flushAndDeleteQueue(), 0, cutoffTime, "WITHSCORES", "LIMIT", 0, 1, (err, reply) ->
|
||||
return callback(err) if err?
|
||||
return callback() if !reply?.length # return if no projects ready to be processed
|
||||
# pop the oldest entry (get and remove in a multi)
|
||||
multi = rclient.multi()
|
||||
# Poor man's version of ZPOPMIN, which is only available in Redis 5.
|
||||
multi.zrange keys.flushAndDeleteQueue(), 0, 0, "WITHSCORES"
|
||||
multi.zremrangebyrank keys.flushAndDeleteQueue(), 0, 0
|
||||
multi.zcard keys.flushAndDeleteQueue() # the total length of the queue (for metrics)
|
||||
multi.exec (err, reply) ->
|
||||
return callback(err) if err?
|
||||
return callback() if !reply?.length
|
||||
[key, timestamp] = reply[0]
|
||||
queueLength = reply[2]
|
||||
callback(null, key, timestamp, queueLength)
|
||||
getNextProjectToFlushAndDelete(cutoffTime, callback) {
|
||||
// find the oldest queued flush that is before the cutoff time
|
||||
if (callback == null) { callback = function(error, key, timestamp){}; }
|
||||
return rclient.zrangebyscore(keys.flushAndDeleteQueue(), 0, cutoffTime, "WITHSCORES", "LIMIT", 0, 1, function(err, reply) {
|
||||
if (err != null) { return callback(err); }
|
||||
if (!(reply != null ? reply.length : undefined)) { return callback(); } // return if no projects ready to be processed
|
||||
// pop the oldest entry (get and remove in a multi)
|
||||
const multi = rclient.multi();
|
||||
// Poor man's version of ZPOPMIN, which is only available in Redis 5.
|
||||
multi.zrange(keys.flushAndDeleteQueue(), 0, 0, "WITHSCORES");
|
||||
multi.zremrangebyrank(keys.flushAndDeleteQueue(), 0, 0);
|
||||
multi.zcard(keys.flushAndDeleteQueue()); // the total length of the queue (for metrics)
|
||||
return multi.exec(function(err, reply) {
|
||||
if (err != null) { return callback(err); }
|
||||
if (!(reply != null ? reply.length : undefined)) { return callback(); }
|
||||
const [key, timestamp] = Array.from(reply[0]);
|
||||
const queueLength = reply[2];
|
||||
return callback(null, key, timestamp, queueLength);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
_serializeRanges: (ranges, callback = (error, serializedRanges) ->) ->
|
||||
jsonRanges = JSON.stringify(ranges)
|
||||
if jsonRanges? and jsonRanges.length > MAX_RANGES_SIZE
|
||||
return callback new Error("ranges are too large")
|
||||
if jsonRanges == '{}'
|
||||
# Most doc will have empty ranges so don't fill redis with lots of '{}' keys
|
||||
jsonRanges = null
|
||||
return callback null, jsonRanges
|
||||
_serializeRanges(ranges, callback) {
|
||||
if (callback == null) { callback = function(error, serializedRanges) {}; }
|
||||
let jsonRanges = JSON.stringify(ranges);
|
||||
if ((jsonRanges != null) && (jsonRanges.length > MAX_RANGES_SIZE)) {
|
||||
return callback(new Error("ranges are too large"));
|
||||
}
|
||||
if (jsonRanges === '{}') {
|
||||
// Most doc will have empty ranges so don't fill redis with lots of '{}' keys
|
||||
jsonRanges = null;
|
||||
}
|
||||
return callback(null, jsonRanges);
|
||||
},
|
||||
|
||||
_deserializeRanges: (ranges) ->
|
||||
if !ranges? or ranges == ""
|
||||
return {}
|
||||
else
|
||||
return JSON.parse(ranges)
|
||||
_deserializeRanges(ranges) {
|
||||
if ((ranges == null) || (ranges === "")) {
|
||||
return {};
|
||||
} else {
|
||||
return JSON.parse(ranges);
|
||||
}
|
||||
},
|
||||
|
||||
_computeHash: (docLines) ->
|
||||
# use sha1 checksum of doclines to detect data corruption.
|
||||
#
|
||||
# note: must specify 'utf8' encoding explicitly, as the default is
|
||||
# binary in node < v5
|
||||
return crypto.createHash('sha1').update(docLines, 'utf8').digest('hex')
|
||||
_computeHash(docLines) {
|
||||
// use sha1 checksum of doclines to detect data corruption.
|
||||
//
|
||||
// note: must specify 'utf8' encoding explicitly, as the default is
|
||||
// binary in node < v5
|
||||
return crypto.createHash('sha1').update(docLines, 'utf8').digest('hex');
|
||||
}
|
||||
});
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined;
|
||||
}
|
|
@ -1,44 +1,64 @@
|
|||
Keys = require('./UpdateKeys')
|
||||
RedisManager = require "./RedisManager"
|
||||
Errors = require "./Errors"
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let ShareJsDB;
|
||||
const Keys = require('./UpdateKeys');
|
||||
const RedisManager = require("./RedisManager");
|
||||
const Errors = require("./Errors");
|
||||
|
||||
module.exports = class ShareJsDB
|
||||
constructor: (@project_id, @doc_id, @lines, @version) ->
|
||||
@appliedOps = {}
|
||||
# ShareJS calls this detacted from the instance, so we need
|
||||
# bind it to keep our context that can access @appliedOps
|
||||
@writeOp = @_writeOp.bind(@)
|
||||
module.exports = (ShareJsDB = class ShareJsDB {
|
||||
constructor(project_id, doc_id, lines, version) {
|
||||
this.project_id = project_id;
|
||||
this.doc_id = doc_id;
|
||||
this.lines = lines;
|
||||
this.version = version;
|
||||
this.appliedOps = {};
|
||||
// ShareJS calls this detacted from the instance, so we need
|
||||
// bind it to keep our context that can access @appliedOps
|
||||
this.writeOp = this._writeOp.bind(this);
|
||||
}
|
||||
|
||||
getOps: (doc_key, start, end, callback) ->
|
||||
if start == end
|
||||
return callback null, []
|
||||
getOps(doc_key, start, end, callback) {
|
||||
if (start === end) {
|
||||
return callback(null, []);
|
||||
}
|
||||
|
||||
# In redis, lrange values are inclusive.
|
||||
if end?
|
||||
end--
|
||||
else
|
||||
end = -1
|
||||
// In redis, lrange values are inclusive.
|
||||
if (end != null) {
|
||||
end--;
|
||||
} else {
|
||||
end = -1;
|
||||
}
|
||||
|
||||
[project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key)
|
||||
RedisManager.getPreviousDocOps doc_id, start, end, callback
|
||||
const [project_id, doc_id] = Array.from(Keys.splitProjectIdAndDocId(doc_key));
|
||||
return RedisManager.getPreviousDocOps(doc_id, start, end, callback);
|
||||
}
|
||||
|
||||
_writeOp: (doc_key, opData, callback) ->
|
||||
@appliedOps[doc_key] ?= []
|
||||
@appliedOps[doc_key].push opData
|
||||
callback()
|
||||
_writeOp(doc_key, opData, callback) {
|
||||
if (this.appliedOps[doc_key] == null) { this.appliedOps[doc_key] = []; }
|
||||
this.appliedOps[doc_key].push(opData);
|
||||
return callback();
|
||||
}
|
||||
|
||||
getSnapshot: (doc_key, callback) ->
|
||||
if doc_key != Keys.combineProjectIdAndDocId(@project_id, @doc_id)
|
||||
return callback(new Errors.NotFoundError("unexpected doc_key #{doc_key}, expected #{Keys.combineProjectIdAndDocId(@project_id, @doc_id)}"))
|
||||
else
|
||||
return callback null, {
|
||||
snapshot: @lines.join("\n")
|
||||
v: parseInt(@version, 10)
|
||||
getSnapshot(doc_key, callback) {
|
||||
if (doc_key !== Keys.combineProjectIdAndDocId(this.project_id, this.doc_id)) {
|
||||
return callback(new Errors.NotFoundError(`unexpected doc_key ${doc_key}, expected ${Keys.combineProjectIdAndDocId(this.project_id, this.doc_id)}`));
|
||||
} else {
|
||||
return callback(null, {
|
||||
snapshot: this.lines.join("\n"),
|
||||
v: parseInt(this.version, 10),
|
||||
type: "text"
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
# To be able to remove a doc from the ShareJS memory
|
||||
# we need to called Model::delete, which calls this
|
||||
# method on the database. However, we will handle removing
|
||||
# it from Redis ourselves
|
||||
delete: (docName, dbMeta, callback) -> callback()
|
||||
// To be able to remove a doc from the ShareJS memory
|
||||
// we need to called Model::delete, which calls this
|
||||
// method on the database. However, we will handle removing
|
||||
// it from Redis ourselves
|
||||
delete(docName, dbMeta, callback) { return callback(); }
|
||||
});
|
||||
|
|
|
@ -1,80 +1,102 @@
|
|||
ShareJsModel = require "./sharejs/server/model"
|
||||
ShareJsDB = require "./ShareJsDB"
|
||||
logger = require "logger-sharelatex"
|
||||
Settings = require('settings-sharelatex')
|
||||
Keys = require "./UpdateKeys"
|
||||
{EventEmitter} = require "events"
|
||||
util = require "util"
|
||||
RealTimeRedisManager = require "./RealTimeRedisManager"
|
||||
crypto = require "crypto"
|
||||
metrics = require('./Metrics')
|
||||
Errors = require("./Errors")
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let ShareJsUpdateManager;
|
||||
const ShareJsModel = require("./sharejs/server/model");
|
||||
const ShareJsDB = require("./ShareJsDB");
|
||||
const logger = require("logger-sharelatex");
|
||||
const Settings = require('settings-sharelatex');
|
||||
const Keys = require("./UpdateKeys");
|
||||
const {EventEmitter} = require("events");
|
||||
const util = require("util");
|
||||
const RealTimeRedisManager = require("./RealTimeRedisManager");
|
||||
const crypto = require("crypto");
|
||||
const metrics = require('./Metrics');
|
||||
const Errors = require("./Errors");
|
||||
|
||||
ShareJsModel:: = {}
|
||||
util.inherits ShareJsModel, EventEmitter
|
||||
ShareJsModel.prototype = {};
|
||||
util.inherits(ShareJsModel, EventEmitter);
|
||||
|
||||
MAX_AGE_OF_OP = 80
|
||||
const MAX_AGE_OF_OP = 80;
|
||||
|
||||
module.exports = ShareJsUpdateManager =
|
||||
getNewShareJsModel: (project_id, doc_id, lines, version) ->
|
||||
db = new ShareJsDB(project_id, doc_id, lines, version)
|
||||
model = new ShareJsModel(db, maxDocLength: Settings.max_doc_length, maximumAge: MAX_AGE_OF_OP)
|
||||
model.db = db
|
||||
return model
|
||||
module.exports = (ShareJsUpdateManager = {
|
||||
getNewShareJsModel(project_id, doc_id, lines, version) {
|
||||
const db = new ShareJsDB(project_id, doc_id, lines, version);
|
||||
const model = new ShareJsModel(db, {maxDocLength: Settings.max_doc_length, maximumAge: MAX_AGE_OF_OP});
|
||||
model.db = db;
|
||||
return model;
|
||||
},
|
||||
|
||||
applyUpdate: (project_id, doc_id, update, lines, version, callback = (error, updatedDocLines) ->) ->
|
||||
logger.log project_id: project_id, doc_id: doc_id, update: update, "applying sharejs updates"
|
||||
jobs = []
|
||||
# record the update version before it is modified
|
||||
incomingUpdateVersion = update.v
|
||||
# We could use a global model for all docs, but we're hitting issues with the
|
||||
# internal state of ShareJS not being accessible for clearing caches, and
|
||||
# getting stuck due to queued callbacks (line 260 of sharejs/server/model.coffee)
|
||||
# This adds a small but hopefully acceptable overhead (~12ms per 1000 updates on
|
||||
# my 2009 MBP).
|
||||
model = @getNewShareJsModel(project_id, doc_id, lines, version)
|
||||
@_listenForOps(model)
|
||||
doc_key = Keys.combineProjectIdAndDocId(project_id, doc_id)
|
||||
model.applyOp doc_key, update, (error) ->
|
||||
if error?
|
||||
if error == "Op already submitted"
|
||||
metrics.inc "sharejs.already-submitted"
|
||||
logger.warn {project_id, doc_id, update}, "op has already been submitted"
|
||||
update.dup = true
|
||||
ShareJsUpdateManager._sendOp(project_id, doc_id, update)
|
||||
else if /^Delete component/.test(error)
|
||||
metrics.inc "sharejs.delete-mismatch"
|
||||
logger.warn {project_id, doc_id, update, shareJsErr: error}, "sharejs delete does not match"
|
||||
error = new Errors.DeleteMismatchError("Delete component does not match")
|
||||
return callback(error)
|
||||
else
|
||||
metrics.inc "sharejs.other-error"
|
||||
return callback(error)
|
||||
logger.log project_id: project_id, doc_id: doc_id, error: error, "applied update"
|
||||
model.getSnapshot doc_key, (error, data) =>
|
||||
return callback(error) if error?
|
||||
# only check hash when present and no other updates have been applied
|
||||
if update.hash? and incomingUpdateVersion == version
|
||||
ourHash = ShareJsUpdateManager._computeHash(data.snapshot)
|
||||
if ourHash != update.hash
|
||||
metrics.inc "sharejs.hash-fail"
|
||||
return callback(new Error("Invalid hash"))
|
||||
else
|
||||
metrics.inc "sharejs.hash-pass", 0.001
|
||||
docLines = data.snapshot.split(/\r\n|\n|\r/)
|
||||
callback(null, docLines, data.v, model.db.appliedOps[doc_key] or [])
|
||||
applyUpdate(project_id, doc_id, update, lines, version, callback) {
|
||||
if (callback == null) { callback = function(error, updatedDocLines) {}; }
|
||||
logger.log({project_id, doc_id, update}, "applying sharejs updates");
|
||||
const jobs = [];
|
||||
// record the update version before it is modified
|
||||
const incomingUpdateVersion = update.v;
|
||||
// We could use a global model for all docs, but we're hitting issues with the
|
||||
// internal state of ShareJS not being accessible for clearing caches, and
|
||||
// getting stuck due to queued callbacks (line 260 of sharejs/server/model.coffee)
|
||||
// This adds a small but hopefully acceptable overhead (~12ms per 1000 updates on
|
||||
// my 2009 MBP).
|
||||
const model = this.getNewShareJsModel(project_id, doc_id, lines, version);
|
||||
this._listenForOps(model);
|
||||
const doc_key = Keys.combineProjectIdAndDocId(project_id, doc_id);
|
||||
return model.applyOp(doc_key, update, function(error) {
|
||||
if (error != null) {
|
||||
if (error === "Op already submitted") {
|
||||
metrics.inc("sharejs.already-submitted");
|
||||
logger.warn({project_id, doc_id, update}, "op has already been submitted");
|
||||
update.dup = true;
|
||||
ShareJsUpdateManager._sendOp(project_id, doc_id, update);
|
||||
} else if (/^Delete component/.test(error)) {
|
||||
metrics.inc("sharejs.delete-mismatch");
|
||||
logger.warn({project_id, doc_id, update, shareJsErr: error}, "sharejs delete does not match");
|
||||
error = new Errors.DeleteMismatchError("Delete component does not match");
|
||||
return callback(error);
|
||||
} else {
|
||||
metrics.inc("sharejs.other-error");
|
||||
return callback(error);
|
||||
}
|
||||
}
|
||||
logger.log({project_id, doc_id, error}, "applied update");
|
||||
return model.getSnapshot(doc_key, (error, data) => {
|
||||
if (error != null) { return callback(error); }
|
||||
// only check hash when present and no other updates have been applied
|
||||
if ((update.hash != null) && (incomingUpdateVersion === version)) {
|
||||
const ourHash = ShareJsUpdateManager._computeHash(data.snapshot);
|
||||
if (ourHash !== update.hash) {
|
||||
metrics.inc("sharejs.hash-fail");
|
||||
return callback(new Error("Invalid hash"));
|
||||
} else {
|
||||
metrics.inc("sharejs.hash-pass", 0.001);
|
||||
}
|
||||
}
|
||||
const docLines = data.snapshot.split(/\r\n|\n|\r/);
|
||||
return callback(null, docLines, data.v, model.db.appliedOps[doc_key] || []);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
_listenForOps: (model) ->
|
||||
model.on "applyOp", (doc_key, opData) ->
|
||||
[project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key)
|
||||
ShareJsUpdateManager._sendOp(project_id, doc_id, opData)
|
||||
_listenForOps(model) {
|
||||
return model.on("applyOp", function(doc_key, opData) {
|
||||
const [project_id, doc_id] = Array.from(Keys.splitProjectIdAndDocId(doc_key));
|
||||
return ShareJsUpdateManager._sendOp(project_id, doc_id, opData);
|
||||
});
|
||||
},
|
||||
|
||||
_sendOp: (project_id, doc_id, op) ->
|
||||
RealTimeRedisManager.sendData {project_id, doc_id, op}
|
||||
_sendOp(project_id, doc_id, op) {
|
||||
return RealTimeRedisManager.sendData({project_id, doc_id, op});
|
||||
},
|
||||
|
||||
_computeHash: (content) ->
|
||||
_computeHash(content) {
|
||||
return crypto.createHash('sha1')
|
||||
.update("blob " + content.length + "\x00")
|
||||
.update(content, 'utf8')
|
||||
.digest('hex')
|
||||
.digest('hex');
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -1,42 +1,62 @@
|
|||
{db, ObjectId} = require "./mongojs"
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let SnapshotManager;
|
||||
const {db, ObjectId} = require("./mongojs");
|
||||
|
||||
module.exports = SnapshotManager =
|
||||
recordSnapshot: (project_id, doc_id, version, pathname, lines, ranges, callback) ->
|
||||
try
|
||||
project_id = ObjectId(project_id)
|
||||
doc_id = ObjectId(doc_id)
|
||||
catch error
|
||||
return callback(error)
|
||||
db.docSnapshots.insert {
|
||||
module.exports = (SnapshotManager = {
|
||||
recordSnapshot(project_id, doc_id, version, pathname, lines, ranges, callback) {
|
||||
try {
|
||||
project_id = ObjectId(project_id);
|
||||
doc_id = ObjectId(doc_id);
|
||||
} catch (error) {
|
||||
return callback(error);
|
||||
}
|
||||
return db.docSnapshots.insert({
|
||||
project_id, doc_id, version, lines, pathname,
|
||||
ranges: SnapshotManager.jsonRangesToMongo(ranges),
|
||||
ts: new Date()
|
||||
}, callback
|
||||
# Suggested indexes:
|
||||
# db.docSnapshots.createIndex({project_id:1, doc_id:1})
|
||||
# db.docSnapshots.createIndex({ts:1},{expiresAfterSeconds: 30*24*3600)) # expires after 30 days
|
||||
}, callback);
|
||||
},
|
||||
// Suggested indexes:
|
||||
// db.docSnapshots.createIndex({project_id:1, doc_id:1})
|
||||
// db.docSnapshots.createIndex({ts:1},{expiresAfterSeconds: 30*24*3600)) # expires after 30 days
|
||||
|
||||
jsonRangesToMongo: (ranges) ->
|
||||
return null if !ranges?
|
||||
jsonRangesToMongo(ranges) {
|
||||
if ((ranges == null)) { return null; }
|
||||
|
||||
updateMetadata = (metadata) ->
|
||||
if metadata?.ts?
|
||||
metadata.ts = new Date(metadata.ts)
|
||||
if metadata?.user_id?
|
||||
metadata.user_id = SnapshotManager._safeObjectId(metadata.user_id)
|
||||
const updateMetadata = function(metadata) {
|
||||
if ((metadata != null ? metadata.ts : undefined) != null) {
|
||||
metadata.ts = new Date(metadata.ts);
|
||||
}
|
||||
if ((metadata != null ? metadata.user_id : undefined) != null) {
|
||||
return metadata.user_id = SnapshotManager._safeObjectId(metadata.user_id);
|
||||
}
|
||||
};
|
||||
|
||||
for change in ranges.changes or []
|
||||
change.id = SnapshotManager._safeObjectId(change.id)
|
||||
updateMetadata(change.metadata)
|
||||
for comment in ranges.comments or []
|
||||
comment.id = SnapshotManager._safeObjectId(comment.id)
|
||||
if comment.op?.t?
|
||||
comment.op.t = SnapshotManager._safeObjectId(comment.op.t)
|
||||
updateMetadata(comment.metadata)
|
||||
return ranges
|
||||
for (let change of Array.from(ranges.changes || [])) {
|
||||
change.id = SnapshotManager._safeObjectId(change.id);
|
||||
updateMetadata(change.metadata);
|
||||
}
|
||||
for (let comment of Array.from(ranges.comments || [])) {
|
||||
comment.id = SnapshotManager._safeObjectId(comment.id);
|
||||
if ((comment.op != null ? comment.op.t : undefined) != null) {
|
||||
comment.op.t = SnapshotManager._safeObjectId(comment.op.t);
|
||||
}
|
||||
updateMetadata(comment.metadata);
|
||||
}
|
||||
return ranges;
|
||||
},
|
||||
|
||||
_safeObjectId: (data) ->
|
||||
try
|
||||
return ObjectId(data)
|
||||
catch error
|
||||
return data
|
||||
_safeObjectId(data) {
|
||||
try {
|
||||
return ObjectId(data);
|
||||
} catch (error) {
|
||||
return data;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
module.exports =
|
||||
combineProjectIdAndDocId: (project_id, doc_id) -> "#{project_id}:#{doc_id}"
|
||||
splitProjectIdAndDocId: (project_and_doc_id) -> project_and_doc_id.split(":")
|
||||
module.exports = {
|
||||
combineProjectIdAndDocId(project_id, doc_id) { return `${project_id}:${doc_id}`; },
|
||||
splitProjectIdAndDocId(project_and_doc_id) { return project_and_doc_id.split(":"); }
|
||||
};
|
||||
|
|
|
@ -1,170 +1,232 @@
|
|||
LockManager = require "./LockManager"
|
||||
RedisManager = require "./RedisManager"
|
||||
RealTimeRedisManager = require "./RealTimeRedisManager"
|
||||
ShareJsUpdateManager = require "./ShareJsUpdateManager"
|
||||
HistoryManager = require "./HistoryManager"
|
||||
Settings = require('settings-sharelatex')
|
||||
_ = require("lodash")
|
||||
async = require("async")
|
||||
logger = require('logger-sharelatex')
|
||||
Metrics = require "./Metrics"
|
||||
Errors = require "./Errors"
|
||||
DocumentManager = require "./DocumentManager"
|
||||
RangesManager = require "./RangesManager"
|
||||
SnapshotManager = require "./SnapshotManager"
|
||||
Profiler = require "./Profiler"
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS201: Simplify complex destructure assignments
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let UpdateManager;
|
||||
const LockManager = require("./LockManager");
|
||||
const RedisManager = require("./RedisManager");
|
||||
const RealTimeRedisManager = require("./RealTimeRedisManager");
|
||||
const ShareJsUpdateManager = require("./ShareJsUpdateManager");
|
||||
const HistoryManager = require("./HistoryManager");
|
||||
const Settings = require('settings-sharelatex');
|
||||
const _ = require("lodash");
|
||||
const async = require("async");
|
||||
const logger = require('logger-sharelatex');
|
||||
const Metrics = require("./Metrics");
|
||||
const Errors = require("./Errors");
|
||||
const DocumentManager = require("./DocumentManager");
|
||||
const RangesManager = require("./RangesManager");
|
||||
const SnapshotManager = require("./SnapshotManager");
|
||||
const Profiler = require("./Profiler");
|
||||
|
||||
module.exports = UpdateManager =
|
||||
processOutstandingUpdates: (project_id, doc_id, callback = (error) ->) ->
|
||||
timer = new Metrics.Timer("updateManager.processOutstandingUpdates")
|
||||
UpdateManager.fetchAndApplyUpdates project_id, doc_id, (error) ->
|
||||
timer.done()
|
||||
return callback(error) if error?
|
||||
callback()
|
||||
module.exports = (UpdateManager = {
|
||||
processOutstandingUpdates(project_id, doc_id, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const timer = new Metrics.Timer("updateManager.processOutstandingUpdates");
|
||||
return UpdateManager.fetchAndApplyUpdates(project_id, doc_id, function(error) {
|
||||
timer.done();
|
||||
if (error != null) { return callback(error); }
|
||||
return callback();
|
||||
});
|
||||
},
|
||||
|
||||
processOutstandingUpdatesWithLock: (project_id, doc_id, callback = (error) ->) ->
|
||||
profile = new Profiler("processOutstandingUpdatesWithLock", {project_id, doc_id})
|
||||
LockManager.tryLock doc_id, (error, gotLock, lockValue) =>
|
||||
return callback(error) if error?
|
||||
return callback() if !gotLock
|
||||
profile.log("tryLock")
|
||||
UpdateManager.processOutstandingUpdates project_id, doc_id, (error) ->
|
||||
return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback) if error?
|
||||
profile.log("processOutstandingUpdates")
|
||||
LockManager.releaseLock doc_id, lockValue, (error) =>
|
||||
return callback(error) if error?
|
||||
profile.log("releaseLock").end()
|
||||
UpdateManager.continueProcessingUpdatesWithLock project_id, doc_id, callback
|
||||
processOutstandingUpdatesWithLock(project_id, doc_id, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const profile = new Profiler("processOutstandingUpdatesWithLock", {project_id, doc_id});
|
||||
return LockManager.tryLock(doc_id, (error, gotLock, lockValue) => {
|
||||
if (error != null) { return callback(error); }
|
||||
if (!gotLock) { return callback(); }
|
||||
profile.log("tryLock");
|
||||
return UpdateManager.processOutstandingUpdates(project_id, doc_id, function(error) {
|
||||
if (error != null) { return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback); }
|
||||
profile.log("processOutstandingUpdates");
|
||||
return LockManager.releaseLock(doc_id, lockValue, error => {
|
||||
if (error != null) { return callback(error); }
|
||||
profile.log("releaseLock").end();
|
||||
return UpdateManager.continueProcessingUpdatesWithLock(project_id, doc_id, callback);
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
continueProcessingUpdatesWithLock: (project_id, doc_id, callback = (error) ->) ->
|
||||
RealTimeRedisManager.getUpdatesLength doc_id, (error, length) =>
|
||||
return callback(error) if error?
|
||||
if length > 0
|
||||
UpdateManager.processOutstandingUpdatesWithLock project_id, doc_id, callback
|
||||
else
|
||||
callback()
|
||||
continueProcessingUpdatesWithLock(project_id, doc_id, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return RealTimeRedisManager.getUpdatesLength(doc_id, (error, length) => {
|
||||
if (error != null) { return callback(error); }
|
||||
if (length > 0) {
|
||||
return UpdateManager.processOutstandingUpdatesWithLock(project_id, doc_id, callback);
|
||||
} else {
|
||||
return callback();
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
fetchAndApplyUpdates: (project_id, doc_id, callback = (error) ->) ->
|
||||
profile = new Profiler("fetchAndApplyUpdates", {project_id, doc_id})
|
||||
RealTimeRedisManager.getPendingUpdatesForDoc doc_id, (error, updates) =>
|
||||
return callback(error) if error?
|
||||
logger.log {project_id: project_id, doc_id: doc_id, count: updates.length}, "processing updates"
|
||||
if updates.length == 0
|
||||
return callback()
|
||||
profile.log("getPendingUpdatesForDoc")
|
||||
doUpdate = (update, cb)->
|
||||
UpdateManager.applyUpdate project_id, doc_id, update, (err) ->
|
||||
profile.log("applyUpdate")
|
||||
cb(err)
|
||||
finalCallback = (err) ->
|
||||
profile.log("async done").end()
|
||||
callback(err)
|
||||
async.eachSeries updates, doUpdate, finalCallback
|
||||
fetchAndApplyUpdates(project_id, doc_id, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const profile = new Profiler("fetchAndApplyUpdates", {project_id, doc_id});
|
||||
return RealTimeRedisManager.getPendingUpdatesForDoc(doc_id, (error, updates) => {
|
||||
if (error != null) { return callback(error); }
|
||||
logger.log({project_id, doc_id, count: updates.length}, "processing updates");
|
||||
if (updates.length === 0) {
|
||||
return callback();
|
||||
}
|
||||
profile.log("getPendingUpdatesForDoc");
|
||||
const doUpdate = (update, cb) => UpdateManager.applyUpdate(project_id, doc_id, update, function(err) {
|
||||
profile.log("applyUpdate");
|
||||
return cb(err);
|
||||
});
|
||||
const finalCallback = function(err) {
|
||||
profile.log("async done").end();
|
||||
return callback(err);
|
||||
};
|
||||
return async.eachSeries(updates, doUpdate, finalCallback);
|
||||
});
|
||||
},
|
||||
|
||||
applyUpdate: (project_id, doc_id, update, _callback = (error) ->) ->
|
||||
callback = (error) ->
|
||||
if error?
|
||||
RealTimeRedisManager.sendData {project_id, doc_id, error: error.message || error}
|
||||
profile.log("sendData")
|
||||
profile.end()
|
||||
_callback(error)
|
||||
applyUpdate(project_id, doc_id, update, _callback) {
|
||||
if (_callback == null) { _callback = function(error) {}; }
|
||||
const callback = function(error) {
|
||||
if (error != null) {
|
||||
RealTimeRedisManager.sendData({project_id, doc_id, error: error.message || error});
|
||||
profile.log("sendData");
|
||||
}
|
||||
profile.end();
|
||||
return _callback(error);
|
||||
};
|
||||
|
||||
profile = new Profiler("applyUpdate", {project_id, doc_id})
|
||||
UpdateManager._sanitizeUpdate update
|
||||
profile.log("sanitizeUpdate")
|
||||
DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) ->
|
||||
profile.log("getDoc")
|
||||
return callback(error) if error?
|
||||
if !lines? or !version?
|
||||
return callback(new Errors.NotFoundError("document not found: #{doc_id}"))
|
||||
previousVersion = version
|
||||
ShareJsUpdateManager.applyUpdate project_id, doc_id, update, lines, version, (error, updatedDocLines, version, appliedOps) ->
|
||||
profile.log("sharejs.applyUpdate")
|
||||
return callback(error) if error?
|
||||
RangesManager.applyUpdate project_id, doc_id, ranges, appliedOps, updatedDocLines, (error, new_ranges, ranges_were_collapsed) ->
|
||||
UpdateManager._addProjectHistoryMetadataToOps(appliedOps, pathname, projectHistoryId, lines)
|
||||
profile.log("RangesManager.applyUpdate")
|
||||
return callback(error) if error?
|
||||
RedisManager.updateDocument project_id, doc_id, updatedDocLines, version, appliedOps, new_ranges, update.meta, (error, doc_ops_length, project_ops_length) ->
|
||||
profile.log("RedisManager.updateDocument")
|
||||
return callback(error) if error?
|
||||
HistoryManager.recordAndFlushHistoryOps project_id, doc_id, appliedOps, doc_ops_length, project_ops_length, (error) ->
|
||||
profile.log("recordAndFlushHistoryOps")
|
||||
return callback(error) if error?
|
||||
if ranges_were_collapsed
|
||||
logger.log {project_id, doc_id, previousVersion, lines, ranges, update}, "update collapsed some ranges, snapshotting previous content"
|
||||
# Do this last, since it's a mongo call, and so potentially longest running
|
||||
# If it overruns the lock, it's ok, since all of our redis work is done
|
||||
SnapshotManager.recordSnapshot project_id, doc_id, previousVersion, pathname, lines, ranges, (error) ->
|
||||
if error?
|
||||
logger.error {err: error, project_id, doc_id, version, lines, ranges}, "error recording snapshot"
|
||||
return callback(error)
|
||||
else
|
||||
callback()
|
||||
else
|
||||
callback()
|
||||
var profile = new Profiler("applyUpdate", {project_id, doc_id});
|
||||
UpdateManager._sanitizeUpdate(update);
|
||||
profile.log("sanitizeUpdate");
|
||||
return DocumentManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId) {
|
||||
profile.log("getDoc");
|
||||
if (error != null) { return callback(error); }
|
||||
if ((lines == null) || (version == null)) {
|
||||
return callback(new Errors.NotFoundError(`document not found: ${doc_id}`));
|
||||
}
|
||||
const previousVersion = version;
|
||||
return ShareJsUpdateManager.applyUpdate(project_id, doc_id, update, lines, version, function(error, updatedDocLines, version, appliedOps) {
|
||||
profile.log("sharejs.applyUpdate");
|
||||
if (error != null) { return callback(error); }
|
||||
return RangesManager.applyUpdate(project_id, doc_id, ranges, appliedOps, updatedDocLines, function(error, new_ranges, ranges_were_collapsed) {
|
||||
UpdateManager._addProjectHistoryMetadataToOps(appliedOps, pathname, projectHistoryId, lines);
|
||||
profile.log("RangesManager.applyUpdate");
|
||||
if (error != null) { return callback(error); }
|
||||
return RedisManager.updateDocument(project_id, doc_id, updatedDocLines, version, appliedOps, new_ranges, update.meta, function(error, doc_ops_length, project_ops_length) {
|
||||
profile.log("RedisManager.updateDocument");
|
||||
if (error != null) { return callback(error); }
|
||||
return HistoryManager.recordAndFlushHistoryOps(project_id, doc_id, appliedOps, doc_ops_length, project_ops_length, function(error) {
|
||||
profile.log("recordAndFlushHistoryOps");
|
||||
if (error != null) { return callback(error); }
|
||||
if (ranges_were_collapsed) {
|
||||
logger.log({project_id, doc_id, previousVersion, lines, ranges, update}, "update collapsed some ranges, snapshotting previous content");
|
||||
// Do this last, since it's a mongo call, and so potentially longest running
|
||||
// If it overruns the lock, it's ok, since all of our redis work is done
|
||||
return SnapshotManager.recordSnapshot(project_id, doc_id, previousVersion, pathname, lines, ranges, function(error) {
|
||||
if (error != null) {
|
||||
logger.error({err: error, project_id, doc_id, version, lines, ranges}, "error recording snapshot");
|
||||
return callback(error);
|
||||
} else {
|
||||
return callback();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
return callback();
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
lockUpdatesAndDo: (method, project_id, doc_id, args..., callback) ->
|
||||
profile = new Profiler("lockUpdatesAndDo", {project_id, doc_id})
|
||||
LockManager.getLock doc_id, (error, lockValue) ->
|
||||
profile.log("getLock")
|
||||
return callback(error) if error?
|
||||
UpdateManager.processOutstandingUpdates project_id, doc_id, (error) ->
|
||||
return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback) if error?
|
||||
profile.log("processOutstandingUpdates")
|
||||
method project_id, doc_id, args..., (error, response_args...) ->
|
||||
return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback) if error?
|
||||
profile.log("method")
|
||||
LockManager.releaseLock doc_id, lockValue, (error) ->
|
||||
return callback(error) if error?
|
||||
profile.log("releaseLock").end()
|
||||
callback null, response_args...
|
||||
# We held the lock for a while so updates might have queued up
|
||||
UpdateManager.continueProcessingUpdatesWithLock project_id, doc_id
|
||||
lockUpdatesAndDo(method, project_id, doc_id, ...rest) {
|
||||
const adjustedLength = Math.max(rest.length, 1), args = rest.slice(0, adjustedLength - 1), callback = rest[adjustedLength - 1];
|
||||
const profile = new Profiler("lockUpdatesAndDo", {project_id, doc_id});
|
||||
return LockManager.getLock(doc_id, function(error, lockValue) {
|
||||
profile.log("getLock");
|
||||
if (error != null) { return callback(error); }
|
||||
return UpdateManager.processOutstandingUpdates(project_id, doc_id, function(error) {
|
||||
if (error != null) { return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback); }
|
||||
profile.log("processOutstandingUpdates");
|
||||
return method(project_id, doc_id, ...Array.from(args), function(error, ...response_args) {
|
||||
if (error != null) { return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback); }
|
||||
profile.log("method");
|
||||
return LockManager.releaseLock(doc_id, lockValue, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
profile.log("releaseLock").end();
|
||||
callback(null, ...Array.from(response_args));
|
||||
// We held the lock for a while so updates might have queued up
|
||||
return UpdateManager.continueProcessingUpdatesWithLock(project_id, doc_id);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
_handleErrorInsideLock: (doc_id, lockValue, original_error, callback = (error) ->) ->
|
||||
LockManager.releaseLock doc_id, lockValue, (lock_error) ->
|
||||
callback(original_error)
|
||||
_handleErrorInsideLock(doc_id, lockValue, original_error, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return LockManager.releaseLock(doc_id, lockValue, lock_error => callback(original_error));
|
||||
},
|
||||
|
||||
_sanitizeUpdate: (update) ->
|
||||
# In Javascript, characters are 16-bits wide. It does not understand surrogates as characters.
|
||||
#
|
||||
# From Wikipedia (http://en.wikipedia.org/wiki/Plane_(Unicode)#Basic_Multilingual_Plane):
|
||||
# "The High Surrogates (U+D800–U+DBFF) and Low Surrogate (U+DC00–U+DFFF) codes are reserved
|
||||
# for encoding non-BMP characters in UTF-16 by using a pair of 16-bit codes: one High Surrogate
|
||||
# and one Low Surrogate. A single surrogate code point will never be assigned a character.""
|
||||
#
|
||||
# The main offender seems to be \uD835 as a stand alone character, which would be the first
|
||||
# 16-bit character of a blackboard bold character (http://www.fileformat.info/info/unicode/char/1d400/index.htm).
|
||||
# Something must be going on client side that is screwing up the encoding and splitting the
|
||||
# two 16-bit characters so that \uD835 is standalone.
|
||||
for op in update.op or []
|
||||
if op.i?
|
||||
# Replace high and low surrogate characters with 'replacement character' (\uFFFD)
|
||||
op.i = op.i.replace(/[\uD800-\uDFFF]/g, "\uFFFD")
|
||||
return update
|
||||
_sanitizeUpdate(update) {
|
||||
// In Javascript, characters are 16-bits wide. It does not understand surrogates as characters.
|
||||
//
|
||||
// From Wikipedia (http://en.wikipedia.org/wiki/Plane_(Unicode)#Basic_Multilingual_Plane):
|
||||
// "The High Surrogates (U+D800–U+DBFF) and Low Surrogate (U+DC00–U+DFFF) codes are reserved
|
||||
// for encoding non-BMP characters in UTF-16 by using a pair of 16-bit codes: one High Surrogate
|
||||
// and one Low Surrogate. A single surrogate code point will never be assigned a character.""
|
||||
//
|
||||
// The main offender seems to be \uD835 as a stand alone character, which would be the first
|
||||
// 16-bit character of a blackboard bold character (http://www.fileformat.info/info/unicode/char/1d400/index.htm).
|
||||
// Something must be going on client side that is screwing up the encoding and splitting the
|
||||
// two 16-bit characters so that \uD835 is standalone.
|
||||
for (let op of Array.from(update.op || [])) {
|
||||
if (op.i != null) {
|
||||
// Replace high and low surrogate characters with 'replacement character' (\uFFFD)
|
||||
op.i = op.i.replace(/[\uD800-\uDFFF]/g, "\uFFFD");
|
||||
}
|
||||
}
|
||||
return update;
|
||||
},
|
||||
|
||||
_addProjectHistoryMetadataToOps: (updates, pathname, projectHistoryId, lines) ->
|
||||
doc_length = _.reduce lines,
|
||||
(chars, line) -> chars + line.length,
|
||||
0
|
||||
doc_length += lines.length - 1 # count newline characters
|
||||
updates.forEach (update) ->
|
||||
update.projectHistoryId = projectHistoryId
|
||||
update.meta ||= {}
|
||||
update.meta.pathname = pathname
|
||||
update.meta.doc_length = doc_length
|
||||
# Each update may contain multiple ops, i.e.
|
||||
# [{
|
||||
# ops: [{i: "foo", p: 4}, {d: "bar", p:8}]
|
||||
# }, {
|
||||
# ops: [{d: "baz", p: 40}, {i: "qux", p:8}]
|
||||
# }]
|
||||
# We want to include the doc_length at the start of each update,
|
||||
# before it's ops are applied. However, we need to track any
|
||||
# changes to it for the next update.
|
||||
for op in update.op
|
||||
if op.i?
|
||||
doc_length += op.i.length
|
||||
if op.d?
|
||||
doc_length -= op.d.length
|
||||
_addProjectHistoryMetadataToOps(updates, pathname, projectHistoryId, lines) {
|
||||
let doc_length = _.reduce(lines,
|
||||
(chars, line) => chars + line.length,
|
||||
0);
|
||||
doc_length += lines.length - 1; // count newline characters
|
||||
return updates.forEach(function(update) {
|
||||
update.projectHistoryId = projectHistoryId;
|
||||
if (!update.meta) { update.meta = {}; }
|
||||
update.meta.pathname = pathname;
|
||||
update.meta.doc_length = doc_length;
|
||||
// Each update may contain multiple ops, i.e.
|
||||
// [{
|
||||
// ops: [{i: "foo", p: 4}, {d: "bar", p:8}]
|
||||
// }, {
|
||||
// ops: [{d: "baz", p: 40}, {i: "qux", p:8}]
|
||||
// }]
|
||||
// We want to include the doc_length at the start of each update,
|
||||
// before it's ops are applied. However, we need to track any
|
||||
// changes to it for the next update.
|
||||
return (() => {
|
||||
const result = [];
|
||||
for (let op of Array.from(update.op)) {
|
||||
if (op.i != null) {
|
||||
doc_length += op.i.length;
|
||||
}
|
||||
if (op.d != null) {
|
||||
result.push(doc_length -= op.d.length);
|
||||
} else {
|
||||
result.push(undefined);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
})();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
|
|
@ -1,12 +1,21 @@
|
|||
Settings = require "settings-sharelatex"
|
||||
mongojs = require "mongojs"
|
||||
db = mongojs(Settings.mongo.url, ["docSnapshots"])
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const Settings = require("settings-sharelatex");
|
||||
const mongojs = require("mongojs");
|
||||
const db = mongojs(Settings.mongo.url, ["docSnapshots"]);
|
||||
|
||||
module.exports =
|
||||
db: db
|
||||
ObjectId: mongojs.ObjectId
|
||||
healthCheck: (callback) ->
|
||||
db.runCommand {ping: 1}, (err, res) ->
|
||||
return callback(err) if err?
|
||||
return callback(new Error("failed mongo ping")) if !res.ok
|
||||
callback()
|
||||
module.exports = {
|
||||
db,
|
||||
ObjectId: mongojs.ObjectId,
|
||||
healthCheck(callback) {
|
||||
return db.runCommand({ping: 1}, function(err, res) {
|
||||
if (err != null) { return callback(err); }
|
||||
if (!res.ok) { return callback(new Error("failed mongo ping")); }
|
||||
return callback();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,22 +1,30 @@
|
|||
# This is a simple type used for testing other OT code. Each op is [expectedSnapshot, increment]
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// This is a simple type used for testing other OT code. Each op is [expectedSnapshot, increment]
|
||||
|
||||
exports.name = 'count'
|
||||
exports.create = -> 1
|
||||
exports.name = 'count';
|
||||
exports.create = () => 1;
|
||||
|
||||
exports.apply = (snapshot, op) ->
|
||||
[v, inc] = op
|
||||
throw new Error "Op #{v} != snapshot #{snapshot}" unless snapshot == v
|
||||
snapshot + inc
|
||||
exports.apply = function(snapshot, op) {
|
||||
const [v, inc] = Array.from(op);
|
||||
if (snapshot !== v) { throw new Error(`Op ${v} != snapshot ${snapshot}`); }
|
||||
return snapshot + inc;
|
||||
};
|
||||
|
||||
# transform op1 by op2. Return transformed version of op1.
|
||||
exports.transform = (op1, op2) ->
|
||||
throw new Error "Op1 #{op1[0]} != op2 #{op2[0]}" unless op1[0] == op2[0]
|
||||
[op1[0] + op2[1], op1[1]]
|
||||
// transform op1 by op2. Return transformed version of op1.
|
||||
exports.transform = function(op1, op2) {
|
||||
if (op1[0] !== op2[0]) { throw new Error(`Op1 ${op1[0]} != op2 ${op2[0]}`); }
|
||||
return [op1[0] + op2[1], op1[1]];
|
||||
};
|
||||
|
||||
exports.compose = (op1, op2) ->
|
||||
throw new Error "Op1 #{op1} + 1 != op2 #{op2}" unless op1[0] + op1[1] == op2[0]
|
||||
[op1[0], op1[1] + op2[1]]
|
||||
exports.compose = function(op1, op2) {
|
||||
if ((op1[0] + op1[1]) !== op2[0]) { throw new Error(`Op1 ${op1} + 1 != op2 ${op2}`); }
|
||||
return [op1[0], op1[1] + op2[1]];
|
||||
};
|
||||
|
||||
exports.generateRandomOp = (doc) ->
|
||||
[[doc, 1], doc + 1]
|
||||
exports.generateRandomOp = doc => [[doc, 1], doc + 1];
|
||||
|
||||
|
|
|
@ -1,65 +1,87 @@
|
|||
# These methods let you build a transform function from a transformComponent function
|
||||
# for OT types like text and JSON in which operations are lists of components
|
||||
# and transforming them requires N^2 work.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// These methods let you build a transform function from a transformComponent function
|
||||
// for OT types like text and JSON in which operations are lists of components
|
||||
// and transforming them requires N^2 work.
|
||||
|
||||
# Add transform and transformX functions for an OT type which has transformComponent defined.
|
||||
# transformComponent(destination array, component, other component, side)
|
||||
exports['_bt'] = bootstrapTransform = (type, transformComponent, checkValidOp, append) ->
|
||||
transformComponentX = (left, right, destLeft, destRight) ->
|
||||
transformComponent destLeft, left, right, 'left'
|
||||
transformComponent destRight, right, left, 'right'
|
||||
// Add transform and transformX functions for an OT type which has transformComponent defined.
|
||||
// transformComponent(destination array, component, other component, side)
|
||||
let bootstrapTransform;
|
||||
exports['_bt'] = (bootstrapTransform = function(type, transformComponent, checkValidOp, append) {
|
||||
let transformX;
|
||||
const transformComponentX = function(left, right, destLeft, destRight) {
|
||||
transformComponent(destLeft, left, right, 'left');
|
||||
return transformComponent(destRight, right, left, 'right');
|
||||
};
|
||||
|
||||
# Transforms rightOp by leftOp. Returns ['rightOp', clientOp']
|
||||
type.transformX = type['transformX'] = transformX = (leftOp, rightOp) ->
|
||||
checkValidOp leftOp
|
||||
checkValidOp rightOp
|
||||
// Transforms rightOp by leftOp. Returns ['rightOp', clientOp']
|
||||
type.transformX = (type['transformX'] = (transformX = function(leftOp, rightOp) {
|
||||
checkValidOp(leftOp);
|
||||
checkValidOp(rightOp);
|
||||
|
||||
newRightOp = []
|
||||
const newRightOp = [];
|
||||
|
||||
for rightComponent in rightOp
|
||||
# Generate newLeftOp by composing leftOp by rightComponent
|
||||
newLeftOp = []
|
||||
for (let rightComponent of Array.from(rightOp)) {
|
||||
// Generate newLeftOp by composing leftOp by rightComponent
|
||||
const newLeftOp = [];
|
||||
|
||||
k = 0
|
||||
while k < leftOp.length
|
||||
nextC = []
|
||||
transformComponentX leftOp[k], rightComponent, newLeftOp, nextC
|
||||
k++
|
||||
let k = 0;
|
||||
while (k < leftOp.length) {
|
||||
var l;
|
||||
const nextC = [];
|
||||
transformComponentX(leftOp[k], rightComponent, newLeftOp, nextC);
|
||||
k++;
|
||||
|
||||
if nextC.length == 1
|
||||
rightComponent = nextC[0]
|
||||
else if nextC.length == 0
|
||||
append newLeftOp, l for l in leftOp[k..]
|
||||
rightComponent = null
|
||||
break
|
||||
else
|
||||
# Recurse.
|
||||
[l_, r_] = transformX leftOp[k..], nextC
|
||||
append newLeftOp, l for l in l_
|
||||
append newRightOp, r for r in r_
|
||||
rightComponent = null
|
||||
break
|
||||
if (nextC.length === 1) {
|
||||
rightComponent = nextC[0];
|
||||
} else if (nextC.length === 0) {
|
||||
for (l of Array.from(leftOp.slice(k))) { append(newLeftOp, l); }
|
||||
rightComponent = null;
|
||||
break;
|
||||
} else {
|
||||
// Recurse.
|
||||
const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC));
|
||||
for (l of Array.from(l_)) { append(newLeftOp, l); }
|
||||
for (let r of Array.from(r_)) { append(newRightOp, r); }
|
||||
rightComponent = null;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
append newRightOp, rightComponent if rightComponent?
|
||||
leftOp = newLeftOp
|
||||
if (rightComponent != null) { append(newRightOp, rightComponent); }
|
||||
leftOp = newLeftOp;
|
||||
}
|
||||
|
||||
[leftOp, newRightOp]
|
||||
return [leftOp, newRightOp];
|
||||
}));
|
||||
|
||||
# Transforms op with specified type ('left' or 'right') by otherOp.
|
||||
type.transform = type['transform'] = (op, otherOp, type) ->
|
||||
throw new Error "type must be 'left' or 'right'" unless type == 'left' or type == 'right'
|
||||
// Transforms op with specified type ('left' or 'right') by otherOp.
|
||||
return type.transform = (type['transform'] = function(op, otherOp, type) {
|
||||
let _;
|
||||
if ((type !== 'left') && (type !== 'right')) { throw new Error("type must be 'left' or 'right'"); }
|
||||
|
||||
return op if otherOp.length == 0
|
||||
if (otherOp.length === 0) { return op; }
|
||||
|
||||
# TODO: Benchmark with and without this line. I _think_ it'll make a big difference...?
|
||||
return transformComponent [], op[0], otherOp[0], type if op.length == 1 and otherOp.length == 1
|
||||
// TODO: Benchmark with and without this line. I _think_ it'll make a big difference...?
|
||||
if ((op.length === 1) && (otherOp.length === 1)) { return transformComponent([], op[0], otherOp[0], type); }
|
||||
|
||||
if type == 'left'
|
||||
[left, _] = transformX op, otherOp
|
||||
left
|
||||
else
|
||||
[_, right] = transformX otherOp, op
|
||||
right
|
||||
if (type === 'left') {
|
||||
let left;
|
||||
[left, _] = Array.from(transformX(op, otherOp));
|
||||
return left;
|
||||
} else {
|
||||
let right;
|
||||
[_, right] = Array.from(transformX(otherOp, op));
|
||||
return right;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if typeof WEB is 'undefined'
|
||||
exports.bootstrapTransform = bootstrapTransform
|
||||
if (typeof WEB === 'undefined') {
|
||||
exports.bootstrapTransform = bootstrapTransform;
|
||||
}
|
||||
|
|
|
@ -1,15 +1,21 @@
|
|||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
|
||||
register = (file) ->
|
||||
type = require file
|
||||
exports[type.name] = type
|
||||
try require "#{file}-api"
|
||||
const register = function(file) {
|
||||
const type = require(file);
|
||||
exports[type.name] = type;
|
||||
try { return require(`${file}-api`); } catch (error) {}
|
||||
};
|
||||
|
||||
# Import all the built-in types.
|
||||
register './simple'
|
||||
register './count'
|
||||
// Import all the built-in types.
|
||||
register('./simple');
|
||||
register('./count');
|
||||
|
||||
register './text'
|
||||
register './text-composable'
|
||||
register './text-tp2'
|
||||
register('./text');
|
||||
register('./text-composable');
|
||||
register('./text-tp2');
|
||||
|
||||
register './json'
|
||||
register('./json');
|
||||
|
|
|
@ -1,180 +1,273 @@
|
|||
# API for JSON OT
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// API for JSON OT
|
||||
|
||||
json = require './json' if typeof WEB is 'undefined'
|
||||
let json;
|
||||
if (typeof WEB === 'undefined') { json = require('./json'); }
|
||||
|
||||
if WEB?
|
||||
extendDoc = exports.extendDoc
|
||||
exports.extendDoc = (name, fn) ->
|
||||
SubDoc::[name] = fn
|
||||
extendDoc name, fn
|
||||
if (typeof WEB !== 'undefined' && WEB !== null) {
|
||||
const {
|
||||
extendDoc
|
||||
} = exports;
|
||||
exports.extendDoc = function(name, fn) {
|
||||
SubDoc.prototype[name] = fn;
|
||||
return extendDoc(name, fn);
|
||||
};
|
||||
}
|
||||
|
||||
depath = (path) ->
|
||||
if path.length == 1 and path[0].constructor == Array
|
||||
path[0]
|
||||
else path
|
||||
const depath = function(path) {
|
||||
if ((path.length === 1) && (path[0].constructor === Array)) {
|
||||
return path[0];
|
||||
} else { return path; }
|
||||
};
|
||||
|
||||
class SubDoc
|
||||
constructor: (@doc, @path) ->
|
||||
at: (path...) -> @doc.at @path.concat depath path
|
||||
get: -> @doc.getAt @path
|
||||
# for objects and lists
|
||||
set: (value, cb) -> @doc.setAt @path, value, cb
|
||||
# for strings and lists.
|
||||
insert: (pos, value, cb) -> @doc.insertAt @path, pos, value, cb
|
||||
# for strings
|
||||
del: (pos, length, cb) -> @doc.deleteTextAt @path, length, pos, cb
|
||||
# for objects and lists
|
||||
remove: (cb) -> @doc.removeAt @path, cb
|
||||
push: (value, cb) -> @insert @get().length, value, cb
|
||||
move: (from, to, cb) -> @doc.moveAt @path, from, to, cb
|
||||
add: (amount, cb) -> @doc.addAt @path, amount, cb
|
||||
on: (event, cb) -> @doc.addListener @path, event, cb
|
||||
removeListener: (l) -> @doc.removeListener l
|
||||
class SubDoc {
|
||||
constructor(doc, path) {
|
||||
this.doc = doc;
|
||||
this.path = path;
|
||||
}
|
||||
at(...path) { return this.doc.at(this.path.concat(depath(path))); }
|
||||
get() { return this.doc.getAt(this.path); }
|
||||
// for objects and lists
|
||||
set(value, cb) { return this.doc.setAt(this.path, value, cb); }
|
||||
// for strings and lists.
|
||||
insert(pos, value, cb) { return this.doc.insertAt(this.path, pos, value, cb); }
|
||||
// for strings
|
||||
del(pos, length, cb) { return this.doc.deleteTextAt(this.path, length, pos, cb); }
|
||||
// for objects and lists
|
||||
remove(cb) { return this.doc.removeAt(this.path, cb); }
|
||||
push(value, cb) { return this.insert(this.get().length, value, cb); }
|
||||
move(from, to, cb) { return this.doc.moveAt(this.path, from, to, cb); }
|
||||
add(amount, cb) { return this.doc.addAt(this.path, amount, cb); }
|
||||
on(event, cb) { return this.doc.addListener(this.path, event, cb); }
|
||||
removeListener(l) { return this.doc.removeListener(l); }
|
||||
|
||||
# text API compatibility
|
||||
getLength: -> @get().length
|
||||
getText: -> @get()
|
||||
// text API compatibility
|
||||
getLength() { return this.get().length; }
|
||||
getText() { return this.get(); }
|
||||
}
|
||||
|
||||
traverse = (snapshot, path) ->
|
||||
container = data:snapshot
|
||||
key = 'data'
|
||||
elem = container
|
||||
for p in path
|
||||
elem = elem[key]
|
||||
key = p
|
||||
throw new Error 'bad path' if typeof elem == 'undefined'
|
||||
{elem, key}
|
||||
const traverse = function(snapshot, path) {
|
||||
const container = {data:snapshot};
|
||||
let key = 'data';
|
||||
let elem = container;
|
||||
for (let p of Array.from(path)) {
|
||||
elem = elem[key];
|
||||
key = p;
|
||||
if (typeof elem === 'undefined') { throw new Error('bad path'); }
|
||||
}
|
||||
return {elem, key};
|
||||
};
|
||||
|
||||
pathEquals = (p1, p2) ->
|
||||
return false if p1.length != p2.length
|
||||
for e,i in p1
|
||||
return false if e != p2[i]
|
||||
true
|
||||
const pathEquals = function(p1, p2) {
|
||||
if (p1.length !== p2.length) { return false; }
|
||||
for (let i = 0; i < p1.length; i++) {
|
||||
const e = p1[i];
|
||||
if (e !== p2[i]) { return false; }
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
json.api =
|
||||
provides: {json:true}
|
||||
json.api = {
|
||||
provides: {json:true},
|
||||
|
||||
at: (path...) -> new SubDoc this, depath path
|
||||
at(...path) { return new SubDoc(this, depath(path)); },
|
||||
|
||||
get: -> @snapshot
|
||||
set: (value, cb) -> @setAt [], value, cb
|
||||
get() { return this.snapshot; },
|
||||
set(value, cb) { return this.setAt([], value, cb); },
|
||||
|
||||
getAt: (path) ->
|
||||
{elem, key} = traverse @snapshot, path
|
||||
return elem[key]
|
||||
getAt(path) {
|
||||
const {elem, key} = traverse(this.snapshot, path);
|
||||
return elem[key];
|
||||
},
|
||||
|
||||
setAt: (path, value, cb) ->
|
||||
{elem, key} = traverse @snapshot, path
|
||||
op = {p:path}
|
||||
if elem.constructor == Array
|
||||
op.li = value
|
||||
op.ld = elem[key] if typeof elem[key] != 'undefined'
|
||||
else if typeof elem == 'object'
|
||||
op.oi = value
|
||||
op.od = elem[key] if typeof elem[key] != 'undefined'
|
||||
else throw new Error 'bad path'
|
||||
@submitOp [op], cb
|
||||
setAt(path, value, cb) {
|
||||
const {elem, key} = traverse(this.snapshot, path);
|
||||
const op = {p:path};
|
||||
if (elem.constructor === Array) {
|
||||
op.li = value;
|
||||
if (typeof elem[key] !== 'undefined') { op.ld = elem[key]; }
|
||||
} else if (typeof elem === 'object') {
|
||||
op.oi = value;
|
||||
if (typeof elem[key] !== 'undefined') { op.od = elem[key]; }
|
||||
} else { throw new Error('bad path'); }
|
||||
return this.submitOp([op], cb);
|
||||
},
|
||||
|
||||
removeAt: (path, cb) ->
|
||||
{elem, key} = traverse @snapshot, path
|
||||
throw new Error 'no element at that path' unless typeof elem[key] != 'undefined'
|
||||
op = {p:path}
|
||||
if elem.constructor == Array
|
||||
op.ld = elem[key]
|
||||
else if typeof elem == 'object'
|
||||
op.od = elem[key]
|
||||
else throw new Error 'bad path'
|
||||
@submitOp [op], cb
|
||||
removeAt(path, cb) {
|
||||
const {elem, key} = traverse(this.snapshot, path);
|
||||
if (typeof elem[key] === 'undefined') { throw new Error('no element at that path'); }
|
||||
const op = {p:path};
|
||||
if (elem.constructor === Array) {
|
||||
op.ld = elem[key];
|
||||
} else if (typeof elem === 'object') {
|
||||
op.od = elem[key];
|
||||
} else { throw new Error('bad path'); }
|
||||
return this.submitOp([op], cb);
|
||||
},
|
||||
|
||||
insertAt: (path, pos, value, cb) ->
|
||||
{elem, key} = traverse @snapshot, path
|
||||
op = {p:path.concat pos}
|
||||
if elem[key].constructor == Array
|
||||
op.li = value
|
||||
else if typeof elem[key] == 'string'
|
||||
op.si = value
|
||||
@submitOp [op], cb
|
||||
insertAt(path, pos, value, cb) {
|
||||
const {elem, key} = traverse(this.snapshot, path);
|
||||
const op = {p:path.concat(pos)};
|
||||
if (elem[key].constructor === Array) {
|
||||
op.li = value;
|
||||
} else if (typeof elem[key] === 'string') {
|
||||
op.si = value;
|
||||
}
|
||||
return this.submitOp([op], cb);
|
||||
},
|
||||
|
||||
moveAt: (path, from, to, cb) ->
|
||||
op = [{p:path.concat(from), lm:to}]
|
||||
@submitOp op, cb
|
||||
moveAt(path, from, to, cb) {
|
||||
const op = [{p:path.concat(from), lm:to}];
|
||||
return this.submitOp(op, cb);
|
||||
},
|
||||
|
||||
addAt: (path, amount, cb) ->
|
||||
op = [{p:path, na:amount}]
|
||||
@submitOp op, cb
|
||||
addAt(path, amount, cb) {
|
||||
const op = [{p:path, na:amount}];
|
||||
return this.submitOp(op, cb);
|
||||
},
|
||||
|
||||
deleteTextAt: (path, length, pos, cb) ->
|
||||
{elem, key} = traverse @snapshot, path
|
||||
op = [{p:path.concat(pos), sd:elem[key][pos...(pos + length)]}]
|
||||
@submitOp op, cb
|
||||
deleteTextAt(path, length, pos, cb) {
|
||||
const {elem, key} = traverse(this.snapshot, path);
|
||||
const op = [{p:path.concat(pos), sd:elem[key].slice(pos, (pos + length))}];
|
||||
return this.submitOp(op, cb);
|
||||
},
|
||||
|
||||
addListener: (path, event, cb) ->
|
||||
l = {path, event, cb}
|
||||
@_listeners.push l
|
||||
l
|
||||
removeListener: (l) ->
|
||||
i = @_listeners.indexOf l
|
||||
return false if i < 0
|
||||
@_listeners.splice i, 1
|
||||
return true
|
||||
_register: ->
|
||||
@_listeners = []
|
||||
@on 'change', (op) ->
|
||||
for c in op
|
||||
if c.na != undefined or c.si != undefined or c.sd != undefined
|
||||
# no change to structure
|
||||
continue
|
||||
to_remove = []
|
||||
for l, i in @_listeners
|
||||
# Transform a dummy op by the incoming op to work out what
|
||||
# should happen to the listener.
|
||||
dummy = {p:l.path, na:0}
|
||||
xformed = @type.transformComponent [], dummy, c, 'left'
|
||||
if xformed.length == 0
|
||||
# The op was transformed to noop, so we should delete the listener.
|
||||
to_remove.push i
|
||||
else if xformed.length == 1
|
||||
# The op remained, so grab its new path into the listener.
|
||||
l.path = xformed[0].p
|
||||
else
|
||||
throw new Error "Bad assumption in json-api: xforming an 'si' op will always result in 0 or 1 components."
|
||||
to_remove.sort (a, b) -> b - a
|
||||
for i in to_remove
|
||||
@_listeners.splice i, 1
|
||||
@on 'remoteop', (op) ->
|
||||
for c in op
|
||||
match_path = if c.na == undefined then c.p[...c.p.length-1] else c.p
|
||||
for {path, event, cb} in @_listeners
|
||||
if pathEquals path, match_path
|
||||
switch event
|
||||
when 'insert'
|
||||
if c.li != undefined and c.ld == undefined
|
||||
cb(c.p[c.p.length-1], c.li)
|
||||
else if c.oi != undefined and c.od == undefined
|
||||
cb(c.p[c.p.length-1], c.oi)
|
||||
else if c.si != undefined
|
||||
cb(c.p[c.p.length-1], c.si)
|
||||
when 'delete'
|
||||
if c.li == undefined and c.ld != undefined
|
||||
cb(c.p[c.p.length-1], c.ld)
|
||||
else if c.oi == undefined and c.od != undefined
|
||||
cb(c.p[c.p.length-1], c.od)
|
||||
else if c.sd != undefined
|
||||
cb(c.p[c.p.length-1], c.sd)
|
||||
when 'replace'
|
||||
if c.li != undefined and c.ld != undefined
|
||||
cb(c.p[c.p.length-1], c.ld, c.li)
|
||||
else if c.oi != undefined and c.od != undefined
|
||||
cb(c.p[c.p.length-1], c.od, c.oi)
|
||||
when 'move'
|
||||
if c.lm != undefined
|
||||
cb(c.p[c.p.length-1], c.lm)
|
||||
when 'add'
|
||||
if c.na != undefined
|
||||
cb(c.na)
|
||||
else if (common = @type.commonPath match_path, path)?
|
||||
if event == 'child op'
|
||||
if match_path.length == path.length == common
|
||||
throw new Error "paths match length and have commonality, but aren't equal?"
|
||||
child_path = c.p[common+1..]
|
||||
cb(child_path, c)
|
||||
addListener(path, event, cb) {
|
||||
const l = {path, event, cb};
|
||||
this._listeners.push(l);
|
||||
return l;
|
||||
},
|
||||
removeListener(l) {
|
||||
const i = this._listeners.indexOf(l);
|
||||
if (i < 0) { return false; }
|
||||
this._listeners.splice(i, 1);
|
||||
return true;
|
||||
},
|
||||
_register() {
|
||||
this._listeners = [];
|
||||
this.on('change', function(op) {
|
||||
return (() => {
|
||||
const result = [];
|
||||
for (let c of Array.from(op)) {
|
||||
var i;
|
||||
if ((c.na !== undefined) || (c.si !== undefined) || (c.sd !== undefined)) {
|
||||
// no change to structure
|
||||
continue;
|
||||
}
|
||||
var to_remove = [];
|
||||
for (i = 0; i < this._listeners.length; i++) {
|
||||
// Transform a dummy op by the incoming op to work out what
|
||||
// should happen to the listener.
|
||||
const l = this._listeners[i];
|
||||
const dummy = {p:l.path, na:0};
|
||||
const xformed = this.type.transformComponent([], dummy, c, 'left');
|
||||
if (xformed.length === 0) {
|
||||
// The op was transformed to noop, so we should delete the listener.
|
||||
to_remove.push(i);
|
||||
} else if (xformed.length === 1) {
|
||||
// The op remained, so grab its new path into the listener.
|
||||
l.path = xformed[0].p;
|
||||
} else {
|
||||
throw new Error("Bad assumption in json-api: xforming an 'si' op will always result in 0 or 1 components.");
|
||||
}
|
||||
}
|
||||
to_remove.sort((a, b) => b - a);
|
||||
result.push((() => {
|
||||
const result1 = [];
|
||||
for (i of Array.from(to_remove)) {
|
||||
result1.push(this._listeners.splice(i, 1));
|
||||
}
|
||||
return result1;
|
||||
})());
|
||||
}
|
||||
return result;
|
||||
})();
|
||||
});
|
||||
return this.on('remoteop', function(op) {
|
||||
return (() => {
|
||||
const result = [];
|
||||
for (var c of Array.from(op)) {
|
||||
var match_path = c.na === undefined ? c.p.slice(0, c.p.length-1) : c.p;
|
||||
result.push((() => {
|
||||
const result1 = [];
|
||||
for (let {path, event, cb} of Array.from(this._listeners)) {
|
||||
var common;
|
||||
if (pathEquals(path, match_path)) {
|
||||
switch (event) {
|
||||
case 'insert':
|
||||
if ((c.li !== undefined) && (c.ld === undefined)) {
|
||||
result1.push(cb(c.p[c.p.length-1], c.li));
|
||||
} else if ((c.oi !== undefined) && (c.od === undefined)) {
|
||||
result1.push(cb(c.p[c.p.length-1], c.oi));
|
||||
} else if (c.si !== undefined) {
|
||||
result1.push(cb(c.p[c.p.length-1], c.si));
|
||||
} else {
|
||||
result1.push(undefined);
|
||||
}
|
||||
break;
|
||||
case 'delete':
|
||||
if ((c.li === undefined) && (c.ld !== undefined)) {
|
||||
result1.push(cb(c.p[c.p.length-1], c.ld));
|
||||
} else if ((c.oi === undefined) && (c.od !== undefined)) {
|
||||
result1.push(cb(c.p[c.p.length-1], c.od));
|
||||
} else if (c.sd !== undefined) {
|
||||
result1.push(cb(c.p[c.p.length-1], c.sd));
|
||||
} else {
|
||||
result1.push(undefined);
|
||||
}
|
||||
break;
|
||||
case 'replace':
|
||||
if ((c.li !== undefined) && (c.ld !== undefined)) {
|
||||
result1.push(cb(c.p[c.p.length-1], c.ld, c.li));
|
||||
} else if ((c.oi !== undefined) && (c.od !== undefined)) {
|
||||
result1.push(cb(c.p[c.p.length-1], c.od, c.oi));
|
||||
} else {
|
||||
result1.push(undefined);
|
||||
}
|
||||
break;
|
||||
case 'move':
|
||||
if (c.lm !== undefined) {
|
||||
result1.push(cb(c.p[c.p.length-1], c.lm));
|
||||
} else {
|
||||
result1.push(undefined);
|
||||
}
|
||||
break;
|
||||
case 'add':
|
||||
if (c.na !== undefined) {
|
||||
result1.push(cb(c.na));
|
||||
} else {
|
||||
result1.push(undefined);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
result1.push(undefined);
|
||||
}
|
||||
} else if ((common = this.type.commonPath(match_path, path)) != null) {
|
||||
if (event === 'child op') {
|
||||
if (match_path.length === path.length && path.length === common) {
|
||||
throw new Error("paths match length and have commonality, but aren't equal?");
|
||||
}
|
||||
const child_path = c.p.slice(common+1);
|
||||
result1.push(cb(child_path, c));
|
||||
} else {
|
||||
result1.push(undefined);
|
||||
}
|
||||
} else {
|
||||
result1.push(undefined);
|
||||
}
|
||||
}
|
||||
return result1;
|
||||
})());
|
||||
}
|
||||
return result;
|
||||
})();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,441 +1,534 @@
|
|||
# This is the implementation of the JSON OT type.
|
||||
#
|
||||
# Spec is here: https://github.com/josephg/ShareJS/wiki/JSON-Operations
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// This is the implementation of the JSON OT type.
|
||||
//
|
||||
// Spec is here: https://github.com/josephg/ShareJS/wiki/JSON-Operations
|
||||
|
||||
if WEB?
|
||||
text = exports.types.text
|
||||
else
|
||||
text = require './text'
|
||||
let text;
|
||||
if (typeof WEB !== 'undefined' && WEB !== null) {
|
||||
({
|
||||
text
|
||||
} = exports.types);
|
||||
} else {
|
||||
text = require('./text');
|
||||
}
|
||||
|
||||
json = {}
|
||||
const json = {};
|
||||
|
||||
json.name = 'json'
|
||||
json.name = 'json';
|
||||
|
||||
json.create = -> null
|
||||
json.create = () => null;
|
||||
|
||||
json.invertComponent = (c) ->
|
||||
c_ = {p: c.p}
|
||||
c_.sd = c.si if c.si != undefined
|
||||
c_.si = c.sd if c.sd != undefined
|
||||
c_.od = c.oi if c.oi != undefined
|
||||
c_.oi = c.od if c.od != undefined
|
||||
c_.ld = c.li if c.li != undefined
|
||||
c_.li = c.ld if c.ld != undefined
|
||||
c_.na = -c.na if c.na != undefined
|
||||
if c.lm != undefined
|
||||
c_.lm = c.p[c.p.length-1]
|
||||
c_.p = c.p[0...c.p.length - 1].concat([c.lm])
|
||||
c_
|
||||
json.invertComponent = function(c) {
|
||||
const c_ = {p: c.p};
|
||||
if (c.si !== undefined) { c_.sd = c.si; }
|
||||
if (c.sd !== undefined) { c_.si = c.sd; }
|
||||
if (c.oi !== undefined) { c_.od = c.oi; }
|
||||
if (c.od !== undefined) { c_.oi = c.od; }
|
||||
if (c.li !== undefined) { c_.ld = c.li; }
|
||||
if (c.ld !== undefined) { c_.li = c.ld; }
|
||||
if (c.na !== undefined) { c_.na = -c.na; }
|
||||
if (c.lm !== undefined) {
|
||||
c_.lm = c.p[c.p.length-1];
|
||||
c_.p = c.p.slice(0, c.p.length - 1).concat([c.lm]);
|
||||
}
|
||||
return c_;
|
||||
};
|
||||
|
||||
json.invert = (op) -> json.invertComponent c for c in op.slice().reverse()
|
||||
json.invert = op => Array.from(op.slice().reverse()).map((c) => json.invertComponent(c));
|
||||
|
||||
json.checkValidOp = (op) ->
|
||||
json.checkValidOp = function(op) {};
|
||||
|
||||
isArray = (o) -> Object.prototype.toString.call(o) == '[object Array]'
|
||||
json.checkList = (elem) ->
|
||||
throw new Error 'Referenced element not a list' unless isArray(elem)
|
||||
const isArray = o => Object.prototype.toString.call(o) === '[object Array]';
|
||||
json.checkList = function(elem) {
|
||||
if (!isArray(elem)) { throw new Error('Referenced element not a list'); }
|
||||
};
|
||||
|
||||
json.checkObj = (elem) ->
|
||||
throw new Error "Referenced element not an object (it was #{JSON.stringify elem})" unless elem.constructor is Object
|
||||
json.checkObj = function(elem) {
|
||||
if (elem.constructor !== Object) { throw new Error(`Referenced element not an object (it was ${JSON.stringify(elem)})`); }
|
||||
};
|
||||
|
||||
json.apply = (snapshot, op) ->
|
||||
json.checkValidOp op
|
||||
op = clone op
|
||||
json.apply = function(snapshot, op) {
|
||||
json.checkValidOp(op);
|
||||
op = clone(op);
|
||||
|
||||
container = {data: clone snapshot}
|
||||
const container = {data: clone(snapshot)};
|
||||
|
||||
try
|
||||
for c, i in op
|
||||
parent = null
|
||||
parentkey = null
|
||||
elem = container
|
||||
key = 'data'
|
||||
try {
|
||||
for (let i = 0; i < op.length; i++) {
|
||||
const c = op[i];
|
||||
let parent = null;
|
||||
let parentkey = null;
|
||||
let elem = container;
|
||||
let key = 'data';
|
||||
|
||||
for p in c.p
|
||||
parent = elem
|
||||
parentkey = key
|
||||
elem = elem[key]
|
||||
key = p
|
||||
for (let p of Array.from(c.p)) {
|
||||
parent = elem;
|
||||
parentkey = key;
|
||||
elem = elem[key];
|
||||
key = p;
|
||||
|
||||
throw new Error 'Path invalid' unless parent?
|
||||
if (parent == null) { throw new Error('Path invalid'); }
|
||||
}
|
||||
|
||||
if c.na != undefined
|
||||
# Number add
|
||||
throw new Error 'Referenced element not a number' unless typeof elem[key] is 'number'
|
||||
elem[key] += c.na
|
||||
if (c.na !== undefined) {
|
||||
// Number add
|
||||
if (typeof elem[key] !== 'number') { throw new Error('Referenced element not a number'); }
|
||||
elem[key] += c.na;
|
||||
|
||||
else if c.si != undefined
|
||||
# String insert
|
||||
throw new Error "Referenced element not a string (it was #{JSON.stringify elem})" unless typeof elem is 'string'
|
||||
parent[parentkey] = elem[...key] + c.si + elem[key..]
|
||||
else if c.sd != undefined
|
||||
# String delete
|
||||
throw new Error 'Referenced element not a string' unless typeof elem is 'string'
|
||||
throw new Error 'Deleted string does not match' unless elem[key...key + c.sd.length] == c.sd
|
||||
parent[parentkey] = elem[...key] + elem[key + c.sd.length..]
|
||||
} else if (c.si !== undefined) {
|
||||
// String insert
|
||||
if (typeof elem !== 'string') { throw new Error(`Referenced element not a string (it was ${JSON.stringify(elem)})`); }
|
||||
parent[parentkey] = elem.slice(0, key) + c.si + elem.slice(key);
|
||||
} else if (c.sd !== undefined) {
|
||||
// String delete
|
||||
if (typeof elem !== 'string') { throw new Error('Referenced element not a string'); }
|
||||
if (elem.slice(key, key + c.sd.length) !== c.sd) { throw new Error('Deleted string does not match'); }
|
||||
parent[parentkey] = elem.slice(0, key) + elem.slice(key + c.sd.length);
|
||||
|
||||
else if c.li != undefined && c.ld != undefined
|
||||
# List replace
|
||||
json.checkList elem
|
||||
} else if ((c.li !== undefined) && (c.ld !== undefined)) {
|
||||
// List replace
|
||||
json.checkList(elem);
|
||||
|
||||
# Should check the list element matches c.ld
|
||||
elem[key] = c.li
|
||||
else if c.li != undefined
|
||||
# List insert
|
||||
json.checkList elem
|
||||
// Should check the list element matches c.ld
|
||||
elem[key] = c.li;
|
||||
} else if (c.li !== undefined) {
|
||||
// List insert
|
||||
json.checkList(elem);
|
||||
|
||||
elem.splice key, 0, c.li
|
||||
else if c.ld != undefined
|
||||
# List delete
|
||||
json.checkList elem
|
||||
elem.splice(key, 0, c.li);
|
||||
} else if (c.ld !== undefined) {
|
||||
// List delete
|
||||
json.checkList(elem);
|
||||
|
||||
# Should check the list element matches c.ld here too.
|
||||
elem.splice key, 1
|
||||
else if c.lm != undefined
|
||||
# List move
|
||||
json.checkList elem
|
||||
if c.lm != key
|
||||
e = elem[key]
|
||||
# Remove it...
|
||||
elem.splice key, 1
|
||||
# And insert it back.
|
||||
elem.splice c.lm, 0, e
|
||||
// Should check the list element matches c.ld here too.
|
||||
elem.splice(key, 1);
|
||||
} else if (c.lm !== undefined) {
|
||||
// List move
|
||||
json.checkList(elem);
|
||||
if (c.lm !== key) {
|
||||
const e = elem[key];
|
||||
// Remove it...
|
||||
elem.splice(key, 1);
|
||||
// And insert it back.
|
||||
elem.splice(c.lm, 0, e);
|
||||
}
|
||||
|
||||
else if c.oi != undefined
|
||||
# Object insert / replace
|
||||
json.checkObj elem
|
||||
} else if (c.oi !== undefined) {
|
||||
// Object insert / replace
|
||||
json.checkObj(elem);
|
||||
|
||||
# Should check that elem[key] == c.od
|
||||
elem[key] = c.oi
|
||||
else if c.od != undefined
|
||||
# Object delete
|
||||
json.checkObj elem
|
||||
// Should check that elem[key] == c.od
|
||||
elem[key] = c.oi;
|
||||
} else if (c.od !== undefined) {
|
||||
// Object delete
|
||||
json.checkObj(elem);
|
||||
|
||||
# Should check that elem[key] == c.od
|
||||
delete elem[key]
|
||||
else
|
||||
throw new Error 'invalid / missing instruction in op'
|
||||
catch error
|
||||
# TODO: Roll back all already applied changes. Write tests before implementing this code.
|
||||
throw error
|
||||
// Should check that elem[key] == c.od
|
||||
delete elem[key];
|
||||
} else {
|
||||
throw new Error('invalid / missing instruction in op');
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// TODO: Roll back all already applied changes. Write tests before implementing this code.
|
||||
throw error;
|
||||
}
|
||||
|
||||
container.data
|
||||
return container.data;
|
||||
};
|
||||
|
||||
# Checks if two paths, p1 and p2 match.
|
||||
json.pathMatches = (p1, p2, ignoreLast) ->
|
||||
return false unless p1.length == p2.length
|
||||
// Checks if two paths, p1 and p2 match.
|
||||
json.pathMatches = function(p1, p2, ignoreLast) {
|
||||
if (p1.length !== p2.length) { return false; }
|
||||
|
||||
for p, i in p1
|
||||
return false if p != p2[i] and (!ignoreLast or i != p1.length - 1)
|
||||
for (let i = 0; i < p1.length; i++) {
|
||||
const p = p1[i];
|
||||
if ((p !== p2[i]) && (!ignoreLast || (i !== (p1.length - 1)))) { return false; }
|
||||
}
|
||||
|
||||
true
|
||||
return true;
|
||||
};
|
||||
|
||||
json.append = (dest, c) ->
|
||||
c = clone c
|
||||
if dest.length != 0 and json.pathMatches c.p, (last = dest[dest.length - 1]).p
|
||||
if last.na != undefined and c.na != undefined
|
||||
dest[dest.length - 1] = { p: last.p, na: last.na + c.na }
|
||||
else if last.li != undefined and c.li == undefined and c.ld == last.li
|
||||
# insert immediately followed by delete becomes a noop.
|
||||
if last.ld != undefined
|
||||
# leave the delete part of the replace
|
||||
delete last.li
|
||||
else
|
||||
dest.pop()
|
||||
else if last.od != undefined and last.oi == undefined and
|
||||
c.oi != undefined and c.od == undefined
|
||||
last.oi = c.oi
|
||||
else if c.lm != undefined and c.p[c.p.length-1] == c.lm
|
||||
null # don't do anything
|
||||
else
|
||||
dest.push c
|
||||
else
|
||||
dest.push c
|
||||
json.append = function(dest, c) {
|
||||
let last;
|
||||
c = clone(c);
|
||||
if ((dest.length !== 0) && json.pathMatches(c.p, (last = dest[dest.length - 1]).p)) {
|
||||
if ((last.na !== undefined) && (c.na !== undefined)) {
|
||||
return dest[dest.length - 1] = { p: last.p, na: last.na + c.na };
|
||||
} else if ((last.li !== undefined) && (c.li === undefined) && (c.ld === last.li)) {
|
||||
// insert immediately followed by delete becomes a noop.
|
||||
if (last.ld !== undefined) {
|
||||
// leave the delete part of the replace
|
||||
return delete last.li;
|
||||
} else {
|
||||
return dest.pop();
|
||||
}
|
||||
} else if ((last.od !== undefined) && (last.oi === undefined) &&
|
||||
(c.oi !== undefined) && (c.od === undefined)) {
|
||||
return last.oi = c.oi;
|
||||
} else if ((c.lm !== undefined) && (c.p[c.p.length-1] === c.lm)) {
|
||||
return null; // don't do anything
|
||||
} else {
|
||||
return dest.push(c);
|
||||
}
|
||||
} else {
|
||||
return dest.push(c);
|
||||
}
|
||||
};
|
||||
|
||||
json.compose = (op1, op2) ->
|
||||
json.checkValidOp op1
|
||||
json.checkValidOp op2
|
||||
json.compose = function(op1, op2) {
|
||||
json.checkValidOp(op1);
|
||||
json.checkValidOp(op2);
|
||||
|
||||
newOp = clone op1
|
||||
json.append newOp, c for c in op2
|
||||
const newOp = clone(op1);
|
||||
for (let c of Array.from(op2)) { json.append(newOp, c); }
|
||||
|
||||
newOp
|
||||
return newOp;
|
||||
};
|
||||
|
||||
json.normalize = (op) ->
|
||||
newOp = []
|
||||
json.normalize = function(op) {
|
||||
const newOp = [];
|
||||
|
||||
op = [op] unless isArray op
|
||||
if (!isArray(op)) { op = [op]; }
|
||||
|
||||
for c in op
|
||||
c.p ?= []
|
||||
json.append newOp, c
|
||||
for (let c of Array.from(op)) {
|
||||
if (c.p == null) { c.p = []; }
|
||||
json.append(newOp, c);
|
||||
}
|
||||
|
||||
newOp
|
||||
return newOp;
|
||||
};
|
||||
|
||||
# hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming
|
||||
# we have browser support for JSON.
|
||||
# http://jsperf.com/cloning-an-object/12
|
||||
clone = (o) -> JSON.parse(JSON.stringify o)
|
||||
// hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming
|
||||
// we have browser support for JSON.
|
||||
// http://jsperf.com/cloning-an-object/12
|
||||
var clone = o => JSON.parse(JSON.stringify(o));
|
||||
|
||||
json.commonPath = (p1, p2) ->
|
||||
p1 = p1.slice()
|
||||
p2 = p2.slice()
|
||||
p1.unshift('data')
|
||||
p2.unshift('data')
|
||||
p1 = p1[...p1.length-1]
|
||||
p2 = p2[...p2.length-1]
|
||||
return -1 if p2.length == 0
|
||||
i = 0
|
||||
while p1[i] == p2[i] && i < p1.length
|
||||
i++
|
||||
if i == p2.length
|
||||
return i-1
|
||||
return
|
||||
json.commonPath = function(p1, p2) {
|
||||
p1 = p1.slice();
|
||||
p2 = p2.slice();
|
||||
p1.unshift('data');
|
||||
p2.unshift('data');
|
||||
p1 = p1.slice(0, p1.length-1);
|
||||
p2 = p2.slice(0, p2.length-1);
|
||||
if (p2.length === 0) { return -1; }
|
||||
let i = 0;
|
||||
while ((p1[i] === p2[i]) && (i < p1.length)) {
|
||||
i++;
|
||||
if (i === p2.length) {
|
||||
return i-1;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
# transform c so it applies to a document with otherC applied.
|
||||
json.transformComponent = (dest, c, otherC, type) ->
|
||||
c = clone c
|
||||
c.p.push(0) if c.na != undefined
|
||||
otherC.p.push(0) if otherC.na != undefined
|
||||
// transform c so it applies to a document with otherC applied.
|
||||
json.transformComponent = function(dest, c, otherC, type) {
|
||||
let oc;
|
||||
c = clone(c);
|
||||
if (c.na !== undefined) { c.p.push(0); }
|
||||
if (otherC.na !== undefined) { otherC.p.push(0); }
|
||||
|
||||
common = json.commonPath c.p, otherC.p
|
||||
common2 = json.commonPath otherC.p, c.p
|
||||
const common = json.commonPath(c.p, otherC.p);
|
||||
const common2 = json.commonPath(otherC.p, c.p);
|
||||
|
||||
cplength = c.p.length
|
||||
otherCplength = otherC.p.length
|
||||
const cplength = c.p.length;
|
||||
const otherCplength = otherC.p.length;
|
||||
|
||||
c.p.pop() if c.na != undefined # hax
|
||||
otherC.p.pop() if otherC.na != undefined
|
||||
if (c.na !== undefined) { c.p.pop(); } // hax
|
||||
if (otherC.na !== undefined) { otherC.p.pop(); }
|
||||
|
||||
if otherC.na
|
||||
if common2? && otherCplength >= cplength && otherC.p[common2] == c.p[common2]
|
||||
if c.ld != undefined
|
||||
oc = clone otherC
|
||||
oc.p = oc.p[cplength..]
|
||||
c.ld = json.apply clone(c.ld), [oc]
|
||||
else if c.od != undefined
|
||||
oc = clone otherC
|
||||
oc.p = oc.p[cplength..]
|
||||
c.od = json.apply clone(c.od), [oc]
|
||||
json.append dest, c
|
||||
return dest
|
||||
if (otherC.na) {
|
||||
if ((common2 != null) && (otherCplength >= cplength) && (otherC.p[common2] === c.p[common2])) {
|
||||
if (c.ld !== undefined) {
|
||||
oc = clone(otherC);
|
||||
oc.p = oc.p.slice(cplength);
|
||||
c.ld = json.apply(clone(c.ld), [oc]);
|
||||
} else if (c.od !== undefined) {
|
||||
oc = clone(otherC);
|
||||
oc.p = oc.p.slice(cplength);
|
||||
c.od = json.apply(clone(c.od), [oc]);
|
||||
}
|
||||
}
|
||||
json.append(dest, c);
|
||||
return dest;
|
||||
}
|
||||
|
||||
if common2? && otherCplength > cplength && c.p[common2] == otherC.p[common2]
|
||||
# transform based on c
|
||||
if c.ld != undefined
|
||||
oc = clone otherC
|
||||
oc.p = oc.p[cplength..]
|
||||
c.ld = json.apply clone(c.ld), [oc]
|
||||
else if c.od != undefined
|
||||
oc = clone otherC
|
||||
oc.p = oc.p[cplength..]
|
||||
c.od = json.apply clone(c.od), [oc]
|
||||
if ((common2 != null) && (otherCplength > cplength) && (c.p[common2] === otherC.p[common2])) {
|
||||
// transform based on c
|
||||
if (c.ld !== undefined) {
|
||||
oc = clone(otherC);
|
||||
oc.p = oc.p.slice(cplength);
|
||||
c.ld = json.apply(clone(c.ld), [oc]);
|
||||
} else if (c.od !== undefined) {
|
||||
oc = clone(otherC);
|
||||
oc.p = oc.p.slice(cplength);
|
||||
c.od = json.apply(clone(c.od), [oc]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if common?
|
||||
commonOperand = cplength == otherCplength
|
||||
# transform based on otherC
|
||||
if otherC.na != undefined
|
||||
# this case is handled above due to icky path hax
|
||||
else if otherC.si != undefined || otherC.sd != undefined
|
||||
# String op vs string op - pass through to text type
|
||||
if c.si != undefined || c.sd != undefined
|
||||
throw new Error("must be a string?") unless commonOperand
|
||||
if (common != null) {
|
||||
let from, p, to;
|
||||
const commonOperand = cplength === otherCplength;
|
||||
// transform based on otherC
|
||||
if (otherC.na !== undefined) {
|
||||
// this case is handled above due to icky path hax
|
||||
} else if ((otherC.si !== undefined) || (otherC.sd !== undefined)) {
|
||||
// String op vs string op - pass through to text type
|
||||
if ((c.si !== undefined) || (c.sd !== undefined)) {
|
||||
if (!commonOperand) { throw new Error("must be a string?"); }
|
||||
|
||||
# Convert an op component to a text op component
|
||||
convert = (component) ->
|
||||
newC = p:component.p[component.p.length - 1]
|
||||
if component.si
|
||||
newC.i = component.si
|
||||
else
|
||||
newC.d = component.sd
|
||||
newC
|
||||
// Convert an op component to a text op component
|
||||
const convert = function(component) {
|
||||
const newC = {p:component.p[component.p.length - 1]};
|
||||
if (component.si) {
|
||||
newC.i = component.si;
|
||||
} else {
|
||||
newC.d = component.sd;
|
||||
}
|
||||
return newC;
|
||||
};
|
||||
|
||||
tc1 = convert c
|
||||
tc2 = convert otherC
|
||||
const tc1 = convert(c);
|
||||
const tc2 = convert(otherC);
|
||||
|
||||
res = []
|
||||
text._tc res, tc1, tc2, type
|
||||
for tc in res
|
||||
jc = { p: c.p[...common] }
|
||||
jc.p.push(tc.p)
|
||||
jc.si = tc.i if tc.i?
|
||||
jc.sd = tc.d if tc.d?
|
||||
json.append dest, jc
|
||||
return dest
|
||||
else if otherC.li != undefined && otherC.ld != undefined
|
||||
if otherC.p[common] == c.p[common]
|
||||
# noop
|
||||
if !commonOperand
|
||||
# we're below the deleted element, so -> noop
|
||||
return dest
|
||||
else if c.ld != undefined
|
||||
# we're trying to delete the same element, -> noop
|
||||
if c.li != undefined and type == 'left'
|
||||
# we're both replacing one element with another. only one can
|
||||
# survive!
|
||||
c.ld = clone otherC.li
|
||||
else
|
||||
return dest
|
||||
else if otherC.li != undefined
|
||||
if c.li != undefined and c.ld == undefined and commonOperand and c.p[common] == otherC.p[common]
|
||||
# in li vs. li, left wins.
|
||||
if type == 'right'
|
||||
c.p[common]++
|
||||
else if otherC.p[common] <= c.p[common]
|
||||
c.p[common]++
|
||||
const res = [];
|
||||
text._tc(res, tc1, tc2, type);
|
||||
for (let tc of Array.from(res)) {
|
||||
const jc = { p: c.p.slice(0, common) };
|
||||
jc.p.push(tc.p);
|
||||
if (tc.i != null) { jc.si = tc.i; }
|
||||
if (tc.d != null) { jc.sd = tc.d; }
|
||||
json.append(dest, jc);
|
||||
}
|
||||
return dest;
|
||||
}
|
||||
} else if ((otherC.li !== undefined) && (otherC.ld !== undefined)) {
|
||||
if (otherC.p[common] === c.p[common]) {
|
||||
// noop
|
||||
if (!commonOperand) {
|
||||
// we're below the deleted element, so -> noop
|
||||
return dest;
|
||||
} else if (c.ld !== undefined) {
|
||||
// we're trying to delete the same element, -> noop
|
||||
if ((c.li !== undefined) && (type === 'left')) {
|
||||
// we're both replacing one element with another. only one can
|
||||
// survive!
|
||||
c.ld = clone(otherC.li);
|
||||
} else {
|
||||
return dest;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (otherC.li !== undefined) {
|
||||
if ((c.li !== undefined) && (c.ld === undefined) && commonOperand && (c.p[common] === otherC.p[common])) {
|
||||
// in li vs. li, left wins.
|
||||
if (type === 'right') {
|
||||
c.p[common]++;
|
||||
}
|
||||
} else if (otherC.p[common] <= c.p[common]) {
|
||||
c.p[common]++;
|
||||
}
|
||||
|
||||
if c.lm != undefined
|
||||
if commonOperand
|
||||
# otherC edits the same list we edit
|
||||
if otherC.p[common] <= c.lm
|
||||
c.lm++
|
||||
# changing c.from is handled above.
|
||||
else if otherC.ld != undefined
|
||||
if c.lm != undefined
|
||||
if commonOperand
|
||||
if otherC.p[common] == c.p[common]
|
||||
# they deleted the thing we're trying to move
|
||||
return dest
|
||||
# otherC edits the same list we edit
|
||||
p = otherC.p[common]
|
||||
from = c.p[common]
|
||||
to = c.lm
|
||||
if p < to || (p == to && from < to)
|
||||
c.lm--
|
||||
if (c.lm !== undefined) {
|
||||
if (commonOperand) {
|
||||
// otherC edits the same list we edit
|
||||
if (otherC.p[common] <= c.lm) {
|
||||
c.lm++;
|
||||
}
|
||||
}
|
||||
}
|
||||
// changing c.from is handled above.
|
||||
} else if (otherC.ld !== undefined) {
|
||||
if (c.lm !== undefined) {
|
||||
if (commonOperand) {
|
||||
if (otherC.p[common] === c.p[common]) {
|
||||
// they deleted the thing we're trying to move
|
||||
return dest;
|
||||
}
|
||||
// otherC edits the same list we edit
|
||||
p = otherC.p[common];
|
||||
from = c.p[common];
|
||||
to = c.lm;
|
||||
if ((p < to) || ((p === to) && (from < to))) {
|
||||
c.lm--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if otherC.p[common] < c.p[common]
|
||||
c.p[common]--
|
||||
else if otherC.p[common] == c.p[common]
|
||||
if otherCplength < cplength
|
||||
# we're below the deleted element, so -> noop
|
||||
return dest
|
||||
else if c.ld != undefined
|
||||
if c.li != undefined
|
||||
# we're replacing, they're deleting. we become an insert.
|
||||
delete c.ld
|
||||
else
|
||||
# we're trying to delete the same element, -> noop
|
||||
return dest
|
||||
else if otherC.lm != undefined
|
||||
if c.lm != undefined and cplength == otherCplength
|
||||
# lm vs lm, here we go!
|
||||
from = c.p[common]
|
||||
to = c.lm
|
||||
otherFrom = otherC.p[common]
|
||||
otherTo = otherC.lm
|
||||
if otherFrom != otherTo
|
||||
# if otherFrom == otherTo, we don't need to change our op.
|
||||
if (otherC.p[common] < c.p[common]) {
|
||||
c.p[common]--;
|
||||
} else if (otherC.p[common] === c.p[common]) {
|
||||
if (otherCplength < cplength) {
|
||||
// we're below the deleted element, so -> noop
|
||||
return dest;
|
||||
} else if (c.ld !== undefined) {
|
||||
if (c.li !== undefined) {
|
||||
// we're replacing, they're deleting. we become an insert.
|
||||
delete c.ld;
|
||||
} else {
|
||||
// we're trying to delete the same element, -> noop
|
||||
return dest;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (otherC.lm !== undefined) {
|
||||
if ((c.lm !== undefined) && (cplength === otherCplength)) {
|
||||
// lm vs lm, here we go!
|
||||
from = c.p[common];
|
||||
to = c.lm;
|
||||
const otherFrom = otherC.p[common];
|
||||
const otherTo = otherC.lm;
|
||||
if (otherFrom !== otherTo) {
|
||||
// if otherFrom == otherTo, we don't need to change our op.
|
||||
|
||||
# where did my thing go?
|
||||
if from == otherFrom
|
||||
# they moved it! tie break.
|
||||
if type == 'left'
|
||||
c.p[common] = otherTo
|
||||
if from == to # ugh
|
||||
c.lm = otherTo
|
||||
else
|
||||
return dest
|
||||
else
|
||||
# they moved around it
|
||||
if from > otherFrom
|
||||
c.p[common]--
|
||||
if from > otherTo
|
||||
c.p[common]++
|
||||
else if from == otherTo
|
||||
if otherFrom > otherTo
|
||||
c.p[common]++
|
||||
if from == to # ugh, again
|
||||
c.lm++
|
||||
// where did my thing go?
|
||||
if (from === otherFrom) {
|
||||
// they moved it! tie break.
|
||||
if (type === 'left') {
|
||||
c.p[common] = otherTo;
|
||||
if (from === to) { // ugh
|
||||
c.lm = otherTo;
|
||||
}
|
||||
} else {
|
||||
return dest;
|
||||
}
|
||||
} else {
|
||||
// they moved around it
|
||||
if (from > otherFrom) {
|
||||
c.p[common]--;
|
||||
}
|
||||
if (from > otherTo) {
|
||||
c.p[common]++;
|
||||
} else if (from === otherTo) {
|
||||
if (otherFrom > otherTo) {
|
||||
c.p[common]++;
|
||||
if (from === to) { // ugh, again
|
||||
c.lm++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# step 2: where am i going to put it?
|
||||
if to > otherFrom
|
||||
c.lm--
|
||||
else if to == otherFrom
|
||||
if to > from
|
||||
c.lm--
|
||||
if to > otherTo
|
||||
c.lm++
|
||||
else if to == otherTo
|
||||
# if we're both moving in the same direction, tie break
|
||||
if (otherTo > otherFrom and to > from) or
|
||||
(otherTo < otherFrom and to < from)
|
||||
if type == 'right'
|
||||
c.lm++
|
||||
else
|
||||
if to > from
|
||||
c.lm++
|
||||
else if to == otherFrom
|
||||
c.lm--
|
||||
else if c.li != undefined and c.ld == undefined and commonOperand
|
||||
# li
|
||||
from = otherC.p[common]
|
||||
to = otherC.lm
|
||||
p = c.p[common]
|
||||
if p > from
|
||||
c.p[common]--
|
||||
if p > to
|
||||
c.p[common]++
|
||||
else
|
||||
# ld, ld+li, si, sd, na, oi, od, oi+od, any li on an element beneath
|
||||
# the lm
|
||||
#
|
||||
# i.e. things care about where their item is after the move.
|
||||
from = otherC.p[common]
|
||||
to = otherC.lm
|
||||
p = c.p[common]
|
||||
if p == from
|
||||
c.p[common] = to
|
||||
else
|
||||
if p > from
|
||||
c.p[common]--
|
||||
if p > to
|
||||
c.p[common]++
|
||||
else if p == to
|
||||
if from > to
|
||||
c.p[common]++
|
||||
else if otherC.oi != undefined && otherC.od != undefined
|
||||
if c.p[common] == otherC.p[common]
|
||||
if c.oi != undefined and commonOperand
|
||||
# we inserted where someone else replaced
|
||||
if type == 'right'
|
||||
# left wins
|
||||
return dest
|
||||
else
|
||||
# we win, make our op replace what they inserted
|
||||
c.od = otherC.oi
|
||||
else
|
||||
# -> noop if the other component is deleting the same object (or any
|
||||
# parent)
|
||||
return dest
|
||||
else if otherC.oi != undefined
|
||||
if c.oi != undefined and c.p[common] == otherC.p[common]
|
||||
# left wins if we try to insert at the same place
|
||||
if type == 'left'
|
||||
json.append dest, {p:c.p, od:otherC.oi}
|
||||
else
|
||||
return dest
|
||||
else if otherC.od != undefined
|
||||
if c.p[common] == otherC.p[common]
|
||||
return dest if !commonOperand
|
||||
if c.oi != undefined
|
||||
delete c.od
|
||||
else
|
||||
return dest
|
||||
// step 2: where am i going to put it?
|
||||
if (to > otherFrom) {
|
||||
c.lm--;
|
||||
} else if (to === otherFrom) {
|
||||
if (to > from) {
|
||||
c.lm--;
|
||||
}
|
||||
}
|
||||
if (to > otherTo) {
|
||||
c.lm++;
|
||||
} else if (to === otherTo) {
|
||||
// if we're both moving in the same direction, tie break
|
||||
if (((otherTo > otherFrom) && (to > from)) ||
|
||||
((otherTo < otherFrom) && (to < from))) {
|
||||
if (type === 'right') {
|
||||
c.lm++;
|
||||
}
|
||||
} else {
|
||||
if (to > from) {
|
||||
c.lm++;
|
||||
} else if (to === otherFrom) {
|
||||
c.lm--;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if ((c.li !== undefined) && (c.ld === undefined) && commonOperand) {
|
||||
// li
|
||||
from = otherC.p[common];
|
||||
to = otherC.lm;
|
||||
p = c.p[common];
|
||||
if (p > from) {
|
||||
c.p[common]--;
|
||||
}
|
||||
if (p > to) {
|
||||
c.p[common]++;
|
||||
}
|
||||
} else {
|
||||
// ld, ld+li, si, sd, na, oi, od, oi+od, any li on an element beneath
|
||||
// the lm
|
||||
//
|
||||
// i.e. things care about where their item is after the move.
|
||||
from = otherC.p[common];
|
||||
to = otherC.lm;
|
||||
p = c.p[common];
|
||||
if (p === from) {
|
||||
c.p[common] = to;
|
||||
} else {
|
||||
if (p > from) {
|
||||
c.p[common]--;
|
||||
}
|
||||
if (p > to) {
|
||||
c.p[common]++;
|
||||
} else if (p === to) {
|
||||
if (from > to) {
|
||||
c.p[common]++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if ((otherC.oi !== undefined) && (otherC.od !== undefined)) {
|
||||
if (c.p[common] === otherC.p[common]) {
|
||||
if ((c.oi !== undefined) && commonOperand) {
|
||||
// we inserted where someone else replaced
|
||||
if (type === 'right') {
|
||||
// left wins
|
||||
return dest;
|
||||
} else {
|
||||
// we win, make our op replace what they inserted
|
||||
c.od = otherC.oi;
|
||||
}
|
||||
} else {
|
||||
// -> noop if the other component is deleting the same object (or any
|
||||
// parent)
|
||||
return dest;
|
||||
}
|
||||
}
|
||||
} else if (otherC.oi !== undefined) {
|
||||
if ((c.oi !== undefined) && (c.p[common] === otherC.p[common])) {
|
||||
// left wins if we try to insert at the same place
|
||||
if (type === 'left') {
|
||||
json.append(dest, {p:c.p, od:otherC.oi});
|
||||
} else {
|
||||
return dest;
|
||||
}
|
||||
}
|
||||
} else if (otherC.od !== undefined) {
|
||||
if (c.p[common] === otherC.p[common]) {
|
||||
if (!commonOperand) { return dest; }
|
||||
if (c.oi !== undefined) {
|
||||
delete c.od;
|
||||
} else {
|
||||
return dest;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
json.append dest, c
|
||||
return dest
|
||||
json.append(dest, c);
|
||||
return dest;
|
||||
};
|
||||
|
||||
if WEB?
|
||||
exports.types ||= {}
|
||||
if (typeof WEB !== 'undefined' && WEB !== null) {
|
||||
if (!exports.types) { exports.types = {}; }
|
||||
|
||||
# This is kind of awful - come up with a better way to hook this helper code up.
|
||||
exports._bt(json, json.transformComponent, json.checkValidOp, json.append)
|
||||
// This is kind of awful - come up with a better way to hook this helper code up.
|
||||
exports._bt(json, json.transformComponent, json.checkValidOp, json.append);
|
||||
|
||||
# [] is used to prevent closure from renaming types.text
|
||||
exports.types.json = json
|
||||
else
|
||||
module.exports = json
|
||||
// [] is used to prevent closure from renaming types.text
|
||||
exports.types.json = json;
|
||||
} else {
|
||||
module.exports = json;
|
||||
|
||||
require('./helpers').bootstrapTransform(json, json.transformComponent, json.checkValidOp, json.append)
|
||||
require('./helpers').bootstrapTransform(json, json.transformComponent, json.checkValidOp, json.append);
|
||||
}
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -1,42 +1,52 @@
|
|||
# A synchronous processing queue. The queue calls process on the arguments,
|
||||
# ensuring that process() is only executing once at a time.
|
||||
#
|
||||
# process(data, callback) _MUST_ eventually call its callback.
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# queue = require 'syncqueue'
|
||||
#
|
||||
# fn = queue (data, callback) ->
|
||||
# asyncthing data, ->
|
||||
# callback(321)
|
||||
#
|
||||
# fn(1)
|
||||
# fn(2)
|
||||
# fn(3, (result) -> console.log(result))
|
||||
#
|
||||
# ^--- async thing will only be running once at any time.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// A synchronous processing queue. The queue calls process on the arguments,
|
||||
// ensuring that process() is only executing once at a time.
|
||||
//
|
||||
// process(data, callback) _MUST_ eventually call its callback.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// queue = require 'syncqueue'
|
||||
//
|
||||
// fn = queue (data, callback) ->
|
||||
// asyncthing data, ->
|
||||
// callback(321)
|
||||
//
|
||||
// fn(1)
|
||||
// fn(2)
|
||||
// fn(3, (result) -> console.log(result))
|
||||
//
|
||||
// ^--- async thing will only be running once at any time.
|
||||
|
||||
module.exports = (process) ->
|
||||
throw new Error('process is not a function') unless typeof process == 'function'
|
||||
queue = []
|
||||
module.exports = function(process) {
|
||||
if (typeof process !== 'function') { throw new Error('process is not a function'); }
|
||||
const queue = [];
|
||||
|
||||
enqueue = (data, callback) ->
|
||||
queue.push [data, callback]
|
||||
flush()
|
||||
const enqueue = function(data, callback) {
|
||||
queue.push([data, callback]);
|
||||
return flush();
|
||||
};
|
||||
|
||||
enqueue.busy = false
|
||||
enqueue.busy = false;
|
||||
|
||||
flush = ->
|
||||
return if enqueue.busy or queue.length == 0
|
||||
var flush = function() {
|
||||
if (enqueue.busy || (queue.length === 0)) { return; }
|
||||
|
||||
enqueue.busy = true
|
||||
[data, callback] = queue.shift()
|
||||
process data, (result...) -> # TODO: Make this not use varargs - varargs are really slow.
|
||||
enqueue.busy = false
|
||||
# This is called after busy = false so a user can check if enqueue.busy is set in the callback.
|
||||
callback.apply null, result if callback
|
||||
flush()
|
||||
enqueue.busy = true;
|
||||
const [data, callback] = Array.from(queue.shift());
|
||||
return process(data, function(...result) { // TODO: Make this not use varargs - varargs are really slow.
|
||||
enqueue.busy = false;
|
||||
// This is called after busy = false so a user can check if enqueue.busy is set in the callback.
|
||||
if (callback) { callback.apply(null, result); }
|
||||
return flush();
|
||||
});
|
||||
};
|
||||
|
||||
enqueue
|
||||
return enqueue;
|
||||
};
|
||||
|
||||
|
|
|
@ -1,38 +1,48 @@
|
|||
# This is a really simple OT type. Its not compiled with the web client, but it could be.
|
||||
#
|
||||
# Its mostly included for demonstration purposes and its used in a lot of unit tests.
|
||||
#
|
||||
# This defines a really simple text OT type which only allows inserts. (No deletes).
|
||||
#
|
||||
# Ops look like:
|
||||
# {position:#, text:"asdf"}
|
||||
#
|
||||
# Document snapshots look like:
|
||||
# {str:string}
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// This is a really simple OT type. Its not compiled with the web client, but it could be.
|
||||
//
|
||||
// Its mostly included for demonstration purposes and its used in a lot of unit tests.
|
||||
//
|
||||
// This defines a really simple text OT type which only allows inserts. (No deletes).
|
||||
//
|
||||
// Ops look like:
|
||||
// {position:#, text:"asdf"}
|
||||
//
|
||||
// Document snapshots look like:
|
||||
// {str:string}
|
||||
|
||||
module.exports =
|
||||
# The name of the OT type. The type is stored in types[type.name]. The name can be
|
||||
# used in place of the actual type in all the API methods.
|
||||
name: 'simple'
|
||||
module.exports = {
|
||||
// The name of the OT type. The type is stored in types[type.name]. The name can be
|
||||
// used in place of the actual type in all the API methods.
|
||||
name: 'simple',
|
||||
|
||||
# Create a new document snapshot
|
||||
create: -> {str:""}
|
||||
// Create a new document snapshot
|
||||
create() { return {str:""}; },
|
||||
|
||||
# Apply the given op to the document snapshot. Returns the new snapshot.
|
||||
#
|
||||
# The original snapshot should not be modified.
|
||||
apply: (snapshot, op) ->
|
||||
throw new Error 'Invalid position' unless 0 <= op.position <= snapshot.str.length
|
||||
// Apply the given op to the document snapshot. Returns the new snapshot.
|
||||
//
|
||||
// The original snapshot should not be modified.
|
||||
apply(snapshot, op) {
|
||||
if (!(0 <= op.position && op.position <= snapshot.str.length)) { throw new Error('Invalid position'); }
|
||||
|
||||
str = snapshot.str
|
||||
str = str.slice(0, op.position) + op.text + str.slice(op.position)
|
||||
{str}
|
||||
let {
|
||||
str
|
||||
} = snapshot;
|
||||
str = str.slice(0, op.position) + op.text + str.slice(op.position);
|
||||
return {str};
|
||||
},
|
||||
|
||||
# transform op1 by op2. Return transformed version of op1.
|
||||
# sym describes the symmetry of the op. Its 'left' or 'right' depending on whether the
|
||||
# op being transformed comes from the client or the server.
|
||||
transform: (op1, op2, sym) ->
|
||||
pos = op1.position
|
||||
pos += op2.text.length if op2.position < pos or (op2.position == pos and sym is 'left')
|
||||
// transform op1 by op2. Return transformed version of op1.
|
||||
// sym describes the symmetry of the op. Its 'left' or 'right' depending on whether the
|
||||
// op being transformed comes from the client or the server.
|
||||
transform(op1, op2, sym) {
|
||||
let pos = op1.position;
|
||||
if ((op2.position < pos) || ((op2.position === pos) && (sym === 'left'))) { pos += op2.text.length; }
|
||||
|
||||
return {position:pos, text:op1.text}
|
||||
return {position:pos, text:op1.text};
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,42 +1,52 @@
|
|||
# A synchronous processing queue. The queue calls process on the arguments,
|
||||
# ensuring that process() is only executing once at a time.
|
||||
#
|
||||
# process(data, callback) _MUST_ eventually call its callback.
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# queue = require 'syncqueue'
|
||||
#
|
||||
# fn = queue (data, callback) ->
|
||||
# asyncthing data, ->
|
||||
# callback(321)
|
||||
#
|
||||
# fn(1)
|
||||
# fn(2)
|
||||
# fn(3, (result) -> console.log(result))
|
||||
#
|
||||
# ^--- async thing will only be running once at any time.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// A synchronous processing queue. The queue calls process on the arguments,
|
||||
// ensuring that process() is only executing once at a time.
|
||||
//
|
||||
// process(data, callback) _MUST_ eventually call its callback.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// queue = require 'syncqueue'
|
||||
//
|
||||
// fn = queue (data, callback) ->
|
||||
// asyncthing data, ->
|
||||
// callback(321)
|
||||
//
|
||||
// fn(1)
|
||||
// fn(2)
|
||||
// fn(3, (result) -> console.log(result))
|
||||
//
|
||||
// ^--- async thing will only be running once at any time.
|
||||
|
||||
module.exports = (process) ->
|
||||
throw new Error('process is not a function') unless typeof process == 'function'
|
||||
queue = []
|
||||
module.exports = function(process) {
|
||||
if (typeof process !== 'function') { throw new Error('process is not a function'); }
|
||||
const queue = [];
|
||||
|
||||
enqueue = (data, callback) ->
|
||||
queue.push [data, callback]
|
||||
flush()
|
||||
const enqueue = function(data, callback) {
|
||||
queue.push([data, callback]);
|
||||
return flush();
|
||||
};
|
||||
|
||||
enqueue.busy = false
|
||||
enqueue.busy = false;
|
||||
|
||||
flush = ->
|
||||
return if enqueue.busy or queue.length == 0
|
||||
var flush = function() {
|
||||
if (enqueue.busy || (queue.length === 0)) { return; }
|
||||
|
||||
enqueue.busy = true
|
||||
[data, callback] = queue.shift()
|
||||
process data, (result...) -> # TODO: Make this not use varargs - varargs are really slow.
|
||||
enqueue.busy = false
|
||||
# This is called after busy = false so a user can check if enqueue.busy is set in the callback.
|
||||
callback.apply null, result if callback
|
||||
flush()
|
||||
enqueue.busy = true;
|
||||
const [data, callback] = Array.from(queue.shift());
|
||||
return process(data, function(...result) { // TODO: Make this not use varargs - varargs are really slow.
|
||||
enqueue.busy = false;
|
||||
// This is called after busy = false so a user can check if enqueue.busy is set in the callback.
|
||||
if (callback) { callback.apply(null, result); }
|
||||
return flush();
|
||||
});
|
||||
};
|
||||
|
||||
enqueue
|
||||
return enqueue;
|
||||
};
|
||||
|
||||
|
|
|
@ -1,32 +1,44 @@
|
|||
# Text document API for text
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// Text document API for text
|
||||
|
||||
text = require './text' if typeof WEB is 'undefined'
|
||||
let text;
|
||||
if (typeof WEB === 'undefined') { text = require('./text'); }
|
||||
|
||||
text.api =
|
||||
provides: {text:true}
|
||||
text.api = {
|
||||
provides: {text:true},
|
||||
|
||||
# The number of characters in the string
|
||||
getLength: -> @snapshot.length
|
||||
// The number of characters in the string
|
||||
getLength() { return this.snapshot.length; },
|
||||
|
||||
# Get the text contents of a document
|
||||
getText: -> @snapshot
|
||||
// Get the text contents of a document
|
||||
getText() { return this.snapshot; },
|
||||
|
||||
insert: (pos, text, callback) ->
|
||||
op = [{p:pos, i:text}]
|
||||
insert(pos, text, callback) {
|
||||
const op = [{p:pos, i:text}];
|
||||
|
||||
@submitOp op, callback
|
||||
op
|
||||
this.submitOp(op, callback);
|
||||
return op;
|
||||
},
|
||||
|
||||
del: (pos, length, callback) ->
|
||||
op = [{p:pos, d:@snapshot[pos...(pos + length)]}]
|
||||
del(pos, length, callback) {
|
||||
const op = [{p:pos, d:this.snapshot.slice(pos, (pos + length))}];
|
||||
|
||||
@submitOp op, callback
|
||||
op
|
||||
this.submitOp(op, callback);
|
||||
return op;
|
||||
},
|
||||
|
||||
_register: ->
|
||||
@on 'remoteop', (op) ->
|
||||
for component in op
|
||||
if component.i != undefined
|
||||
@emit 'insert', component.p, component.i
|
||||
else
|
||||
@emit 'delete', component.p, component.d
|
||||
_register() {
|
||||
return this.on('remoteop', function(op) {
|
||||
return Array.from(op).map((component) =>
|
||||
component.i !== undefined ?
|
||||
this.emit('insert', component.p, component.i)
|
||||
:
|
||||
this.emit('delete', component.p, component.d));
|
||||
});
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,43 +1,64 @@
|
|||
# Text document API for text
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// Text document API for text
|
||||
|
||||
if WEB?
|
||||
type = exports.types['text-composable']
|
||||
else
|
||||
type = require './text-composable'
|
||||
let type;
|
||||
if (typeof WEB !== 'undefined' && WEB !== null) {
|
||||
type = exports.types['text-composable'];
|
||||
} else {
|
||||
type = require('./text-composable');
|
||||
}
|
||||
|
||||
type.api =
|
||||
provides: {'text':true}
|
||||
type.api = {
|
||||
provides: {'text':true},
|
||||
|
||||
# The number of characters in the string
|
||||
'getLength': -> @snapshot.length
|
||||
// The number of characters in the string
|
||||
'getLength'() { return this.snapshot.length; },
|
||||
|
||||
# Get the text contents of a document
|
||||
'getText': -> @snapshot
|
||||
// Get the text contents of a document
|
||||
'getText'() { return this.snapshot; },
|
||||
|
||||
'insert': (pos, text, callback) ->
|
||||
op = type.normalize [pos, 'i':text, (@snapshot.length - pos)]
|
||||
'insert'(pos, text, callback) {
|
||||
const op = type.normalize([pos, {'i':text}, (this.snapshot.length - pos)]);
|
||||
|
||||
@submitOp op, callback
|
||||
op
|
||||
this.submitOp(op, callback);
|
||||
return op;
|
||||
},
|
||||
|
||||
'del': (pos, length, callback) ->
|
||||
op = type.normalize [pos, 'd':@snapshot[pos...(pos + length)], (@snapshot.length - pos - length)]
|
||||
'del'(pos, length, callback) {
|
||||
const op = type.normalize([pos, {'d':this.snapshot.slice(pos, (pos + length))}, (this.snapshot.length - pos - length)]);
|
||||
|
||||
@submitOp op, callback
|
||||
op
|
||||
this.submitOp(op, callback);
|
||||
return op;
|
||||
},
|
||||
|
||||
_register: ->
|
||||
@on 'remoteop', (op) ->
|
||||
pos = 0
|
||||
for component in op
|
||||
if typeof component is 'number'
|
||||
pos += component
|
||||
else if component.i != undefined
|
||||
@emit 'insert', pos, component.i
|
||||
pos += component.i.length
|
||||
else
|
||||
# delete
|
||||
@emit 'delete', pos, component.d
|
||||
# We don't increment pos, because the position
|
||||
# specified is after the delete has happened.
|
||||
_register() {
|
||||
return this.on('remoteop', function(op) {
|
||||
let pos = 0;
|
||||
return (() => {
|
||||
const result = [];
|
||||
for (let component of Array.from(op)) {
|
||||
if (typeof component === 'number') {
|
||||
result.push(pos += component);
|
||||
} else if (component.i !== undefined) {
|
||||
this.emit('insert', pos, component.i);
|
||||
result.push(pos += component.i.length);
|
||||
} else {
|
||||
// delete
|
||||
result.push(this.emit('delete', pos, component.d));
|
||||
}
|
||||
}
|
||||
return result;
|
||||
})();
|
||||
});
|
||||
}
|
||||
};
|
||||
// We don't increment pos, because the position
|
||||
// specified is after the delete has happened.
|
||||
|
||||
|
|
|
@ -1,261 +1,315 @@
|
|||
# An alternate composable implementation for text. This is much closer
|
||||
# to the implementation used by google wave.
|
||||
#
|
||||
# Ops are lists of components which iterate over the whole document.
|
||||
# Components are either:
|
||||
# A number N: Skip N characters in the original document
|
||||
# {i:'str'}: Insert 'str' at the current position in the document
|
||||
# {d:'str'}: Delete 'str', which appears at the current position in the document
|
||||
#
|
||||
# Eg: [3, {i:'hi'}, 5, {d:'internet'}]
|
||||
#
|
||||
# Snapshots are strings.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// An alternate composable implementation for text. This is much closer
|
||||
// to the implementation used by google wave.
|
||||
//
|
||||
// Ops are lists of components which iterate over the whole document.
|
||||
// Components are either:
|
||||
// A number N: Skip N characters in the original document
|
||||
// {i:'str'}: Insert 'str' at the current position in the document
|
||||
// {d:'str'}: Delete 'str', which appears at the current position in the document
|
||||
//
|
||||
// Eg: [3, {i:'hi'}, 5, {d:'internet'}]
|
||||
//
|
||||
// Snapshots are strings.
|
||||
|
||||
p = -> #require('util').debug
|
||||
i = -> #require('util').inspect
|
||||
let makeAppend;
|
||||
const p = function() {}; //require('util').debug
|
||||
const i = function() {}; //require('util').inspect
|
||||
|
||||
exports = if WEB? then {} else module.exports
|
||||
const exports = (typeof WEB !== 'undefined' && WEB !== null) ? {} : module.exports;
|
||||
|
||||
exports.name = 'text-composable'
|
||||
exports.name = 'text-composable';
|
||||
|
||||
exports.create = -> ''
|
||||
exports.create = () => '';
|
||||
|
||||
# -------- Utility methods
|
||||
// -------- Utility methods
|
||||
|
||||
checkOp = (op) ->
|
||||
throw new Error('Op must be an array of components') unless Array.isArray(op)
|
||||
last = null
|
||||
for c in op
|
||||
if typeof(c) == 'object'
|
||||
throw new Error("Invalid op component: #{i c}") unless (c.i? && c.i.length > 0) or (c.d? && c.d.length > 0)
|
||||
else
|
||||
throw new Error('Op components must be objects or numbers') unless typeof(c) == 'number'
|
||||
throw new Error('Skip components must be a positive number') unless c > 0
|
||||
throw new Error('Adjacent skip components should be added') if typeof(last) == 'number'
|
||||
const checkOp = function(op) {
|
||||
if (!Array.isArray(op)) { throw new Error('Op must be an array of components'); }
|
||||
let last = null;
|
||||
return (() => {
|
||||
const result = [];
|
||||
for (let c of Array.from(op)) {
|
||||
if (typeof(c) === 'object') {
|
||||
if (((c.i == null) || !(c.i.length > 0)) && ((c.d == null) || !(c.d.length > 0))) { throw new Error(`Invalid op component: ${i(c)}`); }
|
||||
} else {
|
||||
if (typeof(c) !== 'number') { throw new Error('Op components must be objects or numbers'); }
|
||||
if (!(c > 0)) { throw new Error('Skip components must be a positive number'); }
|
||||
if (typeof(last) === 'number') { throw new Error('Adjacent skip components should be added'); }
|
||||
}
|
||||
|
||||
last = c
|
||||
result.push(last = c);
|
||||
}
|
||||
return result;
|
||||
})();
|
||||
};
|
||||
|
||||
# Makes a function for appending components to a given op.
|
||||
# Exported for the randomOpGenerator.
|
||||
exports._makeAppend = makeAppend = (op) -> (component) ->
|
||||
if component == 0 || component.i == '' || component.d == ''
|
||||
return
|
||||
else if op.length == 0
|
||||
op.push component
|
||||
else if typeof(component) == 'number' && typeof(op[op.length - 1]) == 'number'
|
||||
op[op.length - 1] += component
|
||||
else if component.i? && op[op.length - 1].i?
|
||||
op[op.length - 1].i += component.i
|
||||
else if component.d? && op[op.length - 1].d?
|
||||
op[op.length - 1].d += component.d
|
||||
else
|
||||
op.push component
|
||||
// Makes a function for appending components to a given op.
|
||||
// Exported for the randomOpGenerator.
|
||||
exports._makeAppend = (makeAppend = op => (function(component) {
|
||||
if ((component === 0) || (component.i === '') || (component.d === '')) {
|
||||
return;
|
||||
} else if (op.length === 0) {
|
||||
return op.push(component);
|
||||
} else if ((typeof(component) === 'number') && (typeof(op[op.length - 1]) === 'number')) {
|
||||
return op[op.length - 1] += component;
|
||||
} else if ((component.i != null) && (op[op.length - 1].i != null)) {
|
||||
return op[op.length - 1].i += component.i;
|
||||
} else if ((component.d != null) && (op[op.length - 1].d != null)) {
|
||||
return op[op.length - 1].d += component.d;
|
||||
} else {
|
||||
return op.push(component);
|
||||
}
|
||||
}));
|
||||
|
||||
# checkOp op
|
||||
// checkOp op
|
||||
|
||||
# Makes 2 functions for taking components from the start of an op, and for peeking
|
||||
# at the next op that could be taken.
|
||||
makeTake = (op) ->
|
||||
# The index of the next component to take
|
||||
idx = 0
|
||||
# The offset into the component
|
||||
offset = 0
|
||||
// Makes 2 functions for taking components from the start of an op, and for peeking
|
||||
// at the next op that could be taken.
|
||||
const makeTake = function(op) {
|
||||
// The index of the next component to take
|
||||
let idx = 0;
|
||||
// The offset into the component
|
||||
let offset = 0;
|
||||
|
||||
# Take up to length n from the front of op. If n is null, take the next
|
||||
# op component. If indivisableField == 'd', delete components won't be separated.
|
||||
# If indivisableField == 'i', insert components won't be separated.
|
||||
take = (n, indivisableField) ->
|
||||
return null if idx == op.length
|
||||
#assert.notStrictEqual op.length, i, 'The op is too short to traverse the document'
|
||||
// Take up to length n from the front of op. If n is null, take the next
|
||||
// op component. If indivisableField == 'd', delete components won't be separated.
|
||||
// If indivisableField == 'i', insert components won't be separated.
|
||||
const take = function(n, indivisableField) {
|
||||
let c;
|
||||
if (idx === op.length) { return null; }
|
||||
//assert.notStrictEqual op.length, i, 'The op is too short to traverse the document'
|
||||
|
||||
if typeof(op[idx]) == 'number'
|
||||
if !n? or op[idx] - offset <= n
|
||||
c = op[idx] - offset
|
||||
++idx; offset = 0
|
||||
c
|
||||
else
|
||||
offset += n
|
||||
n
|
||||
else
|
||||
# Take from the string
|
||||
field = if op[idx].i then 'i' else 'd'
|
||||
c = {}
|
||||
if !n? or op[idx][field].length - offset <= n or field == indivisableField
|
||||
c[field] = op[idx][field][offset..]
|
||||
++idx; offset = 0
|
||||
else
|
||||
c[field] = op[idx][field][offset...(offset + n)]
|
||||
offset += n
|
||||
c
|
||||
if (typeof(op[idx]) === 'number') {
|
||||
if ((n == null) || ((op[idx] - offset) <= n)) {
|
||||
c = op[idx] - offset;
|
||||
++idx; offset = 0;
|
||||
return c;
|
||||
} else {
|
||||
offset += n;
|
||||
return n;
|
||||
}
|
||||
} else {
|
||||
// Take from the string
|
||||
const field = op[idx].i ? 'i' : 'd';
|
||||
c = {};
|
||||
if ((n == null) || ((op[idx][field].length - offset) <= n) || (field === indivisableField)) {
|
||||
c[field] = op[idx][field].slice(offset);
|
||||
++idx; offset = 0;
|
||||
} else {
|
||||
c[field] = op[idx][field].slice(offset, (offset + n));
|
||||
offset += n;
|
||||
}
|
||||
return c;
|
||||
}
|
||||
};
|
||||
|
||||
peekType = () ->
|
||||
op[idx]
|
||||
const peekType = () => op[idx];
|
||||
|
||||
[take, peekType]
|
||||
return [take, peekType];
|
||||
};
|
||||
|
||||
# Find and return the length of an op component
|
||||
componentLength = (component) ->
|
||||
if typeof(component) == 'number'
|
||||
component
|
||||
else if component.i?
|
||||
component.i.length
|
||||
else
|
||||
component.d.length
|
||||
// Find and return the length of an op component
|
||||
const componentLength = function(component) {
|
||||
if (typeof(component) === 'number') {
|
||||
return component;
|
||||
} else if (component.i != null) {
|
||||
return component.i.length;
|
||||
} else {
|
||||
return component.d.length;
|
||||
}
|
||||
};
|
||||
|
||||
# Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate
|
||||
# adjacent inserts and deletes.
|
||||
exports.normalize = (op) ->
|
||||
newOp = []
|
||||
append = makeAppend newOp
|
||||
append component for component in op
|
||||
newOp
|
||||
// Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate
|
||||
// adjacent inserts and deletes.
|
||||
exports.normalize = function(op) {
|
||||
const newOp = [];
|
||||
const append = makeAppend(newOp);
|
||||
for (let component of Array.from(op)) { append(component); }
|
||||
return newOp;
|
||||
};
|
||||
|
||||
# Apply the op to the string. Returns the new string.
|
||||
exports.apply = (str, op) ->
|
||||
p "Applying #{i op} to '#{str}'"
|
||||
throw new Error('Snapshot should be a string') unless typeof(str) == 'string'
|
||||
checkOp op
|
||||
// Apply the op to the string. Returns the new string.
|
||||
exports.apply = function(str, op) {
|
||||
p(`Applying ${i(op)} to '${str}'`);
|
||||
if (typeof(str) !== 'string') { throw new Error('Snapshot should be a string'); }
|
||||
checkOp(op);
|
||||
|
||||
pos = 0
|
||||
newDoc = []
|
||||
const pos = 0;
|
||||
const newDoc = [];
|
||||
|
||||
for component in op
|
||||
if typeof(component) == 'number'
|
||||
throw new Error('The op is too long for this document') if component > str.length
|
||||
newDoc.push str[...component]
|
||||
str = str[component..]
|
||||
else if component.i?
|
||||
newDoc.push component.i
|
||||
else
|
||||
throw new Error("The deleted text '#{component.d}' doesn't match the next characters in the document '#{str[...component.d.length]}'") unless component.d == str[...component.d.length]
|
||||
str = str[component.d.length..]
|
||||
for (let component of Array.from(op)) {
|
||||
if (typeof(component) === 'number') {
|
||||
if (component > str.length) { throw new Error('The op is too long for this document'); }
|
||||
newDoc.push(str.slice(0, component));
|
||||
str = str.slice(component);
|
||||
} else if (component.i != null) {
|
||||
newDoc.push(component.i);
|
||||
} else {
|
||||
if (component.d !== str.slice(0, component.d.length)) { throw new Error(`The deleted text '${component.d}' doesn't match the next characters in the document '${str.slice(0, component.d.length)}'`); }
|
||||
str = str.slice(component.d.length);
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error("The applied op doesn't traverse the entire document") unless '' == str
|
||||
if ('' !== str) { throw new Error("The applied op doesn't traverse the entire document"); }
|
||||
|
||||
newDoc.join ''
|
||||
return newDoc.join('');
|
||||
};
|
||||
|
||||
# transform op1 by op2. Return transformed version of op1.
|
||||
# op1 and op2 are unchanged by transform.
|
||||
exports.transform = (op, otherOp, side) ->
|
||||
throw new Error "side (#{side} must be 'left' or 'right'" unless side == 'left' or side == 'right'
|
||||
// transform op1 by op2. Return transformed version of op1.
|
||||
// op1 and op2 are unchanged by transform.
|
||||
exports.transform = function(op, otherOp, side) {
|
||||
let component;
|
||||
if ((side !== 'left') && (side !== 'right')) { throw new Error(`side (${side} must be 'left' or 'right'`); }
|
||||
|
||||
checkOp op
|
||||
checkOp otherOp
|
||||
newOp = []
|
||||
checkOp(op);
|
||||
checkOp(otherOp);
|
||||
const newOp = [];
|
||||
|
||||
append = makeAppend newOp
|
||||
[take, peek] = makeTake op
|
||||
const append = makeAppend(newOp);
|
||||
const [take, peek] = Array.from(makeTake(op));
|
||||
|
||||
for component in otherOp
|
||||
if typeof(component) == 'number' # Skip
|
||||
length = component
|
||||
while length > 0
|
||||
chunk = take(length, 'i')
|
||||
throw new Error('The op traverses more elements than the document has') unless chunk != null
|
||||
for (component of Array.from(otherOp)) {
|
||||
var chunk, length;
|
||||
if (typeof(component) === 'number') { // Skip
|
||||
length = component;
|
||||
while (length > 0) {
|
||||
chunk = take(length, 'i');
|
||||
if (chunk === null) { throw new Error('The op traverses more elements than the document has'); }
|
||||
|
||||
append chunk
|
||||
length -= componentLength chunk unless typeof(chunk) == 'object' && chunk.i?
|
||||
else if component.i? # Insert
|
||||
if side == 'left'
|
||||
# The left insert should go first.
|
||||
o = peek()
|
||||
append take() if o?.i
|
||||
append(chunk);
|
||||
if ((typeof(chunk) !== 'object') || (chunk.i == null)) { length -= componentLength(chunk); }
|
||||
}
|
||||
} else if (component.i != null) { // Insert
|
||||
if (side === 'left') {
|
||||
// The left insert should go first.
|
||||
const o = peek();
|
||||
if (o != null ? o.i : undefined) { append(take()); }
|
||||
}
|
||||
|
||||
# Otherwise, skip the inserted text.
|
||||
append(component.i.length)
|
||||
else # Delete.
|
||||
#assert.ok component.d
|
||||
length = component.d.length
|
||||
while length > 0
|
||||
chunk = take(length, 'i')
|
||||
throw new Error('The op traverses more elements than the document has') unless chunk != null
|
||||
// Otherwise, skip the inserted text.
|
||||
append(component.i.length);
|
||||
} else { // Delete.
|
||||
//assert.ok component.d
|
||||
({
|
||||
length
|
||||
} = component.d);
|
||||
while (length > 0) {
|
||||
chunk = take(length, 'i');
|
||||
if (chunk === null) { throw new Error('The op traverses more elements than the document has'); }
|
||||
|
||||
if typeof(chunk) == 'number'
|
||||
length -= chunk
|
||||
else if chunk.i?
|
||||
append(chunk)
|
||||
else
|
||||
#assert.ok chunk.d
|
||||
# The delete is unnecessary now.
|
||||
length -= chunk.d.length
|
||||
if (typeof(chunk) === 'number') {
|
||||
length -= chunk;
|
||||
} else if (chunk.i != null) {
|
||||
append(chunk);
|
||||
} else {
|
||||
//assert.ok chunk.d
|
||||
// The delete is unnecessary now.
|
||||
length -= chunk.d.length;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Append extras from op1
|
||||
while (component = take())
|
||||
throw new Error "Remaining fragments in the op: #{i component}" unless component?.i?
|
||||
append component
|
||||
// Append extras from op1
|
||||
while (component = take()) {
|
||||
if ((component != null ? component.i : undefined) == null) { throw new Error(`Remaining fragments in the op: ${i(component)}`); }
|
||||
append(component);
|
||||
}
|
||||
|
||||
newOp
|
||||
return newOp;
|
||||
};
|
||||
|
||||
|
||||
# Compose 2 ops into 1 op.
|
||||
exports.compose = (op1, op2) ->
|
||||
p "COMPOSE #{i op1} + #{i op2}"
|
||||
checkOp op1
|
||||
checkOp op2
|
||||
// Compose 2 ops into 1 op.
|
||||
exports.compose = function(op1, op2) {
|
||||
let component;
|
||||
p(`COMPOSE ${i(op1)} + ${i(op2)}`);
|
||||
checkOp(op1);
|
||||
checkOp(op2);
|
||||
|
||||
result = []
|
||||
const result = [];
|
||||
|
||||
append = makeAppend result
|
||||
[take, _] = makeTake op1
|
||||
const append = makeAppend(result);
|
||||
const [take, _] = Array.from(makeTake(op1));
|
||||
|
||||
for component in op2
|
||||
if typeof(component) == 'number' # Skip
|
||||
length = component
|
||||
while length > 0
|
||||
chunk = take(length, 'd')
|
||||
throw new Error('The op traverses more elements than the document has') unless chunk != null
|
||||
for (component of Array.from(op2)) {
|
||||
var chunk, length;
|
||||
if (typeof(component) === 'number') { // Skip
|
||||
length = component;
|
||||
while (length > 0) {
|
||||
chunk = take(length, 'd');
|
||||
if (chunk === null) { throw new Error('The op traverses more elements than the document has'); }
|
||||
|
||||
append chunk
|
||||
length -= componentLength chunk unless typeof(chunk) == 'object' && chunk.d?
|
||||
append(chunk);
|
||||
if ((typeof(chunk) !== 'object') || (chunk.d == null)) { length -= componentLength(chunk); }
|
||||
}
|
||||
|
||||
else if component.i? # Insert
|
||||
append {i:component.i}
|
||||
} else if (component.i != null) { // Insert
|
||||
append({i:component.i});
|
||||
|
||||
else # Delete
|
||||
offset = 0
|
||||
while offset < component.d.length
|
||||
chunk = take(component.d.length - offset, 'd')
|
||||
throw new Error('The op traverses more elements than the document has') unless chunk != null
|
||||
} else { // Delete
|
||||
let offset = 0;
|
||||
while (offset < component.d.length) {
|
||||
chunk = take(component.d.length - offset, 'd');
|
||||
if (chunk === null) { throw new Error('The op traverses more elements than the document has'); }
|
||||
|
||||
# If its delete, append it. If its skip, drop it and decrease length. If its insert, check the strings match, drop it and decrease length.
|
||||
if typeof(chunk) == 'number'
|
||||
append {d:component.d[offset...(offset + chunk)]}
|
||||
offset += chunk
|
||||
else if chunk.i?
|
||||
throw new Error("The deleted text doesn't match the inserted text") unless component.d[offset...(offset + chunk.i.length)] == chunk.i
|
||||
offset += chunk.i.length
|
||||
# The ops cancel each other out.
|
||||
else
|
||||
# Delete
|
||||
append chunk
|
||||
// If its delete, append it. If its skip, drop it and decrease length. If its insert, check the strings match, drop it and decrease length.
|
||||
if (typeof(chunk) === 'number') {
|
||||
append({d:component.d.slice(offset, (offset + chunk))});
|
||||
offset += chunk;
|
||||
} else if (chunk.i != null) {
|
||||
if (component.d.slice(offset, (offset + chunk.i.length)) !== chunk.i) { throw new Error("The deleted text doesn't match the inserted text"); }
|
||||
offset += chunk.i.length;
|
||||
// The ops cancel each other out.
|
||||
} else {
|
||||
// Delete
|
||||
append(chunk);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Append extras from op1
|
||||
while (component = take())
|
||||
throw new Error "Trailing stuff in op1 #{i component}" unless component?.d?
|
||||
append component
|
||||
// Append extras from op1
|
||||
while (component = take()) {
|
||||
if ((component != null ? component.d : undefined) == null) { throw new Error(`Trailing stuff in op1 ${i(component)}`); }
|
||||
append(component);
|
||||
}
|
||||
|
||||
result
|
||||
return result;
|
||||
};
|
||||
|
||||
|
||||
invertComponent = (c) ->
|
||||
if typeof(c) == 'number'
|
||||
c
|
||||
else if c.i?
|
||||
{d:c.i}
|
||||
else
|
||||
{i:c.d}
|
||||
const invertComponent = function(c) {
|
||||
if (typeof(c) === 'number') {
|
||||
return c;
|
||||
} else if (c.i != null) {
|
||||
return {d:c.i};
|
||||
} else {
|
||||
return {i:c.d};
|
||||
}
|
||||
};
|
||||
|
||||
# Invert an op
|
||||
exports.invert = (op) ->
|
||||
result = []
|
||||
append = makeAppend result
|
||||
// Invert an op
|
||||
exports.invert = function(op) {
|
||||
const result = [];
|
||||
const append = makeAppend(result);
|
||||
|
||||
append(invertComponent component) for component in op
|
||||
for (let component of Array.from(op)) { append(invertComponent(component)); }
|
||||
|
||||
result
|
||||
return result;
|
||||
};
|
||||
|
||||
if window?
|
||||
window.ot ||= {}
|
||||
window.ot.types ||= {}
|
||||
window.ot.types.text = exports
|
||||
if (typeof window !== 'undefined' && window !== null) {
|
||||
if (!window.ot) { window.ot = {}; }
|
||||
if (!window.ot.types) { window.ot.types = {}; }
|
||||
window.ot.types.text = exports;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,89 +1,118 @@
|
|||
# Text document API for text-tp2
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// Text document API for text-tp2
|
||||
|
||||
if WEB?
|
||||
type = exports.types['text-tp2']
|
||||
else
|
||||
type = require './text-tp2'
|
||||
let type;
|
||||
if (typeof WEB !== 'undefined' && WEB !== null) {
|
||||
type = exports.types['text-tp2'];
|
||||
} else {
|
||||
type = require('./text-tp2');
|
||||
}
|
||||
|
||||
{_takeDoc:takeDoc, _append:append} = type
|
||||
const {_takeDoc:takeDoc, _append:append} = type;
|
||||
|
||||
appendSkipChars = (op, doc, pos, maxlength) ->
|
||||
while (maxlength == undefined || maxlength > 0) and pos.index < doc.data.length
|
||||
part = takeDoc doc, pos, maxlength, true
|
||||
maxlength -= part.length if maxlength != undefined and typeof part is 'string'
|
||||
append op, (part.length || part)
|
||||
const appendSkipChars = (op, doc, pos, maxlength) => (() => {
|
||||
const result = [];
|
||||
while (((maxlength === undefined) || (maxlength > 0)) && (pos.index < doc.data.length)) {
|
||||
const part = takeDoc(doc, pos, maxlength, true);
|
||||
if ((maxlength !== undefined) && (typeof part === 'string')) { maxlength -= part.length; }
|
||||
result.push(append(op, (part.length || part)));
|
||||
}
|
||||
return result;
|
||||
})();
|
||||
|
||||
type['api'] =
|
||||
'provides': {'text':true}
|
||||
type['api'] = {
|
||||
'provides': {'text':true},
|
||||
|
||||
# The number of characters in the string
|
||||
'getLength': -> @snapshot.charLength
|
||||
// The number of characters in the string
|
||||
'getLength'() { return this.snapshot.charLength; },
|
||||
|
||||
# Flatten a document into a string
|
||||
'getText': ->
|
||||
strings = (elem for elem in @snapshot.data when typeof elem is 'string')
|
||||
strings.join ''
|
||||
// Flatten a document into a string
|
||||
'getText'() {
|
||||
const strings = (Array.from(this.snapshot.data).filter((elem) => typeof elem === 'string'));
|
||||
return strings.join('');
|
||||
},
|
||||
|
||||
'insert': (pos, text, callback) ->
|
||||
pos = 0 if pos == undefined
|
||||
'insert'(pos, text, callback) {
|
||||
if (pos === undefined) { pos = 0; }
|
||||
|
||||
op = []
|
||||
docPos = {index:0, offset:0}
|
||||
const op = [];
|
||||
const docPos = {index:0, offset:0};
|
||||
|
||||
appendSkipChars op, @snapshot, docPos, pos
|
||||
append op, {'i':text}
|
||||
appendSkipChars op, @snapshot, docPos
|
||||
appendSkipChars(op, this.snapshot, docPos, pos);
|
||||
append(op, {'i':text});
|
||||
appendSkipChars(op, this.snapshot, docPos);
|
||||
|
||||
@submitOp op, callback
|
||||
op
|
||||
this.submitOp(op, callback);
|
||||
return op;
|
||||
},
|
||||
|
||||
'del': (pos, length, callback) ->
|
||||
op = []
|
||||
docPos = {index:0, offset:0}
|
||||
'del'(pos, length, callback) {
|
||||
const op = [];
|
||||
const docPos = {index:0, offset:0};
|
||||
|
||||
appendSkipChars op, @snapshot, docPos, pos
|
||||
appendSkipChars(op, this.snapshot, docPos, pos);
|
||||
|
||||
while length > 0
|
||||
part = takeDoc @snapshot, docPos, length, true
|
||||
if typeof part is 'string'
|
||||
append op, {'d':part.length}
|
||||
length -= part.length
|
||||
else
|
||||
append op, part
|
||||
while (length > 0) {
|
||||
const part = takeDoc(this.snapshot, docPos, length, true);
|
||||
if (typeof part === 'string') {
|
||||
append(op, {'d':part.length});
|
||||
length -= part.length;
|
||||
} else {
|
||||
append(op, part);
|
||||
}
|
||||
}
|
||||
|
||||
appendSkipChars op, @snapshot, docPos
|
||||
appendSkipChars(op, this.snapshot, docPos);
|
||||
|
||||
@submitOp op, callback
|
||||
op
|
||||
this.submitOp(op, callback);
|
||||
return op;
|
||||
},
|
||||
|
||||
'_register': ->
|
||||
# Interpret recieved ops + generate more detailed events for them
|
||||
@on 'remoteop', (op, snapshot) ->
|
||||
textPos = 0
|
||||
docPos = {index:0, offset:0}
|
||||
'_register'() {
|
||||
// Interpret recieved ops + generate more detailed events for them
|
||||
return this.on('remoteop', function(op, snapshot) {
|
||||
let textPos = 0;
|
||||
const docPos = {index:0, offset:0};
|
||||
|
||||
for component in op
|
||||
if typeof component is 'number'
|
||||
# Skip
|
||||
remainder = component
|
||||
while remainder > 0
|
||||
part = takeDoc snapshot, docPos, remainder
|
||||
if typeof part is 'string'
|
||||
textPos += part.length
|
||||
remainder -= part.length || part
|
||||
else if component.i != undefined
|
||||
# Insert
|
||||
if typeof component.i is 'string'
|
||||
@emit 'insert', textPos, component.i
|
||||
textPos += component.i.length
|
||||
else
|
||||
# Delete
|
||||
remainder = component.d
|
||||
while remainder > 0
|
||||
part = takeDoc snapshot, docPos, remainder
|
||||
if typeof part is 'string'
|
||||
@emit 'delete', textPos, part
|
||||
remainder -= part.length || part
|
||||
for (let component of Array.from(op)) {
|
||||
var part, remainder;
|
||||
if (typeof component === 'number') {
|
||||
// Skip
|
||||
remainder = component;
|
||||
while (remainder > 0) {
|
||||
part = takeDoc(snapshot, docPos, remainder);
|
||||
if (typeof part === 'string') {
|
||||
textPos += part.length;
|
||||
}
|
||||
remainder -= part.length || part;
|
||||
}
|
||||
} else if (component.i !== undefined) {
|
||||
// Insert
|
||||
if (typeof component.i === 'string') {
|
||||
this.emit('insert', textPos, component.i);
|
||||
textPos += component.i.length;
|
||||
}
|
||||
} else {
|
||||
// Delete
|
||||
remainder = component.d;
|
||||
while (remainder > 0) {
|
||||
part = takeDoc(snapshot, docPos, remainder);
|
||||
if (typeof part === 'string') {
|
||||
this.emit('delete', textPos, part);
|
||||
}
|
||||
remainder -= part.length || part;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -1,322 +1,398 @@
|
|||
# A TP2 implementation of text, following this spec:
|
||||
# http://code.google.com/p/lightwave/source/browse/trunk/experimental/ot/README
|
||||
#
|
||||
# A document is made up of a string and a set of tombstones inserted throughout
|
||||
# the string. For example, 'some ', (2 tombstones), 'string'.
|
||||
#
|
||||
# This is encoded in a document as: {s:'some string', t:[5, -2, 6]}
|
||||
#
|
||||
# Ops are lists of components which iterate over the whole document.
|
||||
# Components are either:
|
||||
# N: Skip N characters in the original document
|
||||
# {i:'str'}: Insert 'str' at the current position in the document
|
||||
# {i:N}: Insert N tombstones at the current position in the document
|
||||
# {d:N}: Delete (tombstone) N characters at the current position in the document
|
||||
#
|
||||
# Eg: [3, {i:'hi'}, 5, {d:8}]
|
||||
#
|
||||
# Snapshots are lists with characters and tombstones. Characters are stored in strings
|
||||
# and adjacent tombstones are flattened into numbers.
|
||||
#
|
||||
# Eg, the document: 'Hello .....world' ('.' denotes tombstoned (deleted) characters)
|
||||
# would be represented by a document snapshot of ['Hello ', 5, 'world']
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// A TP2 implementation of text, following this spec:
|
||||
// http://code.google.com/p/lightwave/source/browse/trunk/experimental/ot/README
|
||||
//
|
||||
// A document is made up of a string and a set of tombstones inserted throughout
|
||||
// the string. For example, 'some ', (2 tombstones), 'string'.
|
||||
//
|
||||
// This is encoded in a document as: {s:'some string', t:[5, -2, 6]}
|
||||
//
|
||||
// Ops are lists of components which iterate over the whole document.
|
||||
// Components are either:
|
||||
// N: Skip N characters in the original document
|
||||
// {i:'str'}: Insert 'str' at the current position in the document
|
||||
// {i:N}: Insert N tombstones at the current position in the document
|
||||
// {d:N}: Delete (tombstone) N characters at the current position in the document
|
||||
//
|
||||
// Eg: [3, {i:'hi'}, 5, {d:8}]
|
||||
//
|
||||
// Snapshots are lists with characters and tombstones. Characters are stored in strings
|
||||
// and adjacent tombstones are flattened into numbers.
|
||||
//
|
||||
// Eg, the document: 'Hello .....world' ('.' denotes tombstoned (deleted) characters)
|
||||
// would be represented by a document snapshot of ['Hello ', 5, 'world']
|
||||
|
||||
type =
|
||||
name: 'text-tp2'
|
||||
tp2: true
|
||||
create: -> {charLength:0, totalLength:0, positionCache:[], data:[]}
|
||||
serialize: (doc) ->
|
||||
throw new Error 'invalid doc snapshot' unless doc.data
|
||||
doc.data
|
||||
deserialize: (data) ->
|
||||
doc = type.create()
|
||||
doc.data = data
|
||||
let append, appendDoc, takeDoc;
|
||||
var type = {
|
||||
name: 'text-tp2',
|
||||
tp2: true,
|
||||
create() { return {charLength:0, totalLength:0, positionCache:[], data:[]}; },
|
||||
serialize(doc) {
|
||||
if (!doc.data) { throw new Error('invalid doc snapshot'); }
|
||||
return doc.data;
|
||||
},
|
||||
deserialize(data) {
|
||||
const doc = type.create();
|
||||
doc.data = data;
|
||||
|
||||
for component in data
|
||||
if typeof component is 'string'
|
||||
doc.charLength += component.length
|
||||
doc.totalLength += component.length
|
||||
else
|
||||
doc.totalLength += component
|
||||
for (let component of Array.from(data)) {
|
||||
if (typeof component === 'string') {
|
||||
doc.charLength += component.length;
|
||||
doc.totalLength += component.length;
|
||||
} else {
|
||||
doc.totalLength += component;
|
||||
}
|
||||
}
|
||||
|
||||
doc
|
||||
return doc;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
checkOp = (op) ->
|
||||
throw new Error('Op must be an array of components') unless Array.isArray(op)
|
||||
last = null
|
||||
for c in op
|
||||
if typeof(c) == 'object'
|
||||
if c.i != undefined
|
||||
throw new Error('Inserts must insert a string or a +ive number') unless (typeof(c.i) == 'string' and c.i.length > 0) or (typeof(c.i) == 'number' and c.i > 0)
|
||||
else if c.d != undefined
|
||||
throw new Error('Deletes must be a +ive number') unless typeof(c.d) == 'number' and c.d > 0
|
||||
else
|
||||
throw new Error('Operation component must define .i or .d')
|
||||
else
|
||||
throw new Error('Op components must be objects or numbers') unless typeof(c) == 'number'
|
||||
throw new Error('Skip components must be a positive number') unless c > 0
|
||||
throw new Error('Adjacent skip components should be combined') if typeof(last) == 'number'
|
||||
const checkOp = function(op) {
|
||||
if (!Array.isArray(op)) { throw new Error('Op must be an array of components'); }
|
||||
let last = null;
|
||||
return (() => {
|
||||
const result = [];
|
||||
for (let c of Array.from(op)) {
|
||||
if (typeof(c) === 'object') {
|
||||
if (c.i !== undefined) {
|
||||
if (((typeof(c.i) !== 'string') || !(c.i.length > 0)) && ((typeof(c.i) !== 'number') || !(c.i > 0))) { throw new Error('Inserts must insert a string or a +ive number'); }
|
||||
} else if (c.d !== undefined) {
|
||||
if ((typeof(c.d) !== 'number') || !(c.d > 0)) { throw new Error('Deletes must be a +ive number'); }
|
||||
} else {
|
||||
throw new Error('Operation component must define .i or .d');
|
||||
}
|
||||
} else {
|
||||
if (typeof(c) !== 'number') { throw new Error('Op components must be objects or numbers'); }
|
||||
if (!(c > 0)) { throw new Error('Skip components must be a positive number'); }
|
||||
if (typeof(last) === 'number') { throw new Error('Adjacent skip components should be combined'); }
|
||||
}
|
||||
|
||||
last = c
|
||||
result.push(last = c);
|
||||
}
|
||||
return result;
|
||||
})();
|
||||
};
|
||||
|
||||
# Take the next part from the specified position in a document snapshot.
|
||||
# position = {index, offset}. It will be updated.
|
||||
type._takeDoc = takeDoc = (doc, position, maxlength, tombsIndivisible) ->
|
||||
throw new Error 'Operation goes past the end of the document' if position.index >= doc.data.length
|
||||
// Take the next part from the specified position in a document snapshot.
|
||||
// position = {index, offset}. It will be updated.
|
||||
type._takeDoc = (takeDoc = function(doc, position, maxlength, tombsIndivisible) {
|
||||
if (position.index >= doc.data.length) { throw new Error('Operation goes past the end of the document'); }
|
||||
|
||||
part = doc.data[position.index]
|
||||
# peel off data[0]
|
||||
result = if typeof(part) == 'string'
|
||||
if maxlength != undefined
|
||||
part[position.offset...(position.offset + maxlength)]
|
||||
else
|
||||
part[position.offset...]
|
||||
else
|
||||
if maxlength == undefined or tombsIndivisible
|
||||
const part = doc.data[position.index];
|
||||
// peel off data[0]
|
||||
const result = typeof(part) === 'string' ?
|
||||
maxlength !== undefined ?
|
||||
part.slice(position.offset, (position.offset + maxlength))
|
||||
:
|
||||
part.slice(position.offset)
|
||||
:
|
||||
(maxlength === undefined) || tombsIndivisible ?
|
||||
part - position.offset
|
||||
else
|
||||
Math.min(maxlength, part - position.offset)
|
||||
:
|
||||
Math.min(maxlength, part - position.offset);
|
||||
|
||||
resultLen = result.length || result
|
||||
const resultLen = result.length || result;
|
||||
|
||||
if (part.length || part) - position.offset > resultLen
|
||||
position.offset += resultLen
|
||||
else
|
||||
position.index++
|
||||
position.offset = 0
|
||||
if (((part.length || part) - position.offset) > resultLen) {
|
||||
position.offset += resultLen;
|
||||
} else {
|
||||
position.index++;
|
||||
position.offset = 0;
|
||||
}
|
||||
|
||||
result
|
||||
return result;
|
||||
});
|
||||
|
||||
# Append a part to the end of a document
|
||||
type._appendDoc = appendDoc = (doc, p) ->
|
||||
return if p == 0 or p == ''
|
||||
// Append a part to the end of a document
|
||||
type._appendDoc = (appendDoc = function(doc, p) {
|
||||
if ((p === 0) || (p === '')) { return; }
|
||||
|
||||
if typeof p is 'string'
|
||||
doc.charLength += p.length
|
||||
doc.totalLength += p.length
|
||||
else
|
||||
doc.totalLength += p
|
||||
if (typeof p === 'string') {
|
||||
doc.charLength += p.length;
|
||||
doc.totalLength += p.length;
|
||||
} else {
|
||||
doc.totalLength += p;
|
||||
}
|
||||
|
||||
data = doc.data
|
||||
if data.length == 0
|
||||
data.push p
|
||||
else if typeof(data[data.length - 1]) == typeof(p)
|
||||
data[data.length - 1] += p
|
||||
else
|
||||
data.push p
|
||||
return
|
||||
const {
|
||||
data
|
||||
} = doc;
|
||||
if (data.length === 0) {
|
||||
data.push(p);
|
||||
} else if (typeof(data[data.length - 1]) === typeof(p)) {
|
||||
data[data.length - 1] += p;
|
||||
} else {
|
||||
data.push(p);
|
||||
}
|
||||
});
|
||||
|
||||
# Apply the op to the document. The document is not modified in the process.
|
||||
type.apply = (doc, op) ->
|
||||
unless doc.totalLength != undefined and doc.charLength != undefined and doc.data.length != undefined
|
||||
throw new Error('Snapshot is invalid')
|
||||
// Apply the op to the document. The document is not modified in the process.
|
||||
type.apply = function(doc, op) {
|
||||
if ((doc.totalLength === undefined) || (doc.charLength === undefined) || (doc.data.length === undefined)) {
|
||||
throw new Error('Snapshot is invalid');
|
||||
}
|
||||
|
||||
checkOp op
|
||||
checkOp(op);
|
||||
|
||||
newDoc = type.create()
|
||||
position = {index:0, offset:0}
|
||||
const newDoc = type.create();
|
||||
const position = {index:0, offset:0};
|
||||
|
||||
for component in op
|
||||
if typeof(component) is 'number'
|
||||
remainder = component
|
||||
while remainder > 0
|
||||
part = takeDoc doc, position, remainder
|
||||
for (let component of Array.from(op)) {
|
||||
var part, remainder;
|
||||
if (typeof(component) === 'number') {
|
||||
remainder = component;
|
||||
while (remainder > 0) {
|
||||
part = takeDoc(doc, position, remainder);
|
||||
|
||||
appendDoc newDoc, part
|
||||
remainder -= part.length || part
|
||||
appendDoc(newDoc, part);
|
||||
remainder -= part.length || part;
|
||||
}
|
||||
|
||||
else if component.i != undefined
|
||||
appendDoc newDoc, component.i
|
||||
else if component.d != undefined
|
||||
remainder = component.d
|
||||
while remainder > 0
|
||||
part = takeDoc doc, position, remainder
|
||||
remainder -= part.length || part
|
||||
appendDoc newDoc, component.d
|
||||
} else if (component.i !== undefined) {
|
||||
appendDoc(newDoc, component.i);
|
||||
} else if (component.d !== undefined) {
|
||||
remainder = component.d;
|
||||
while (remainder > 0) {
|
||||
part = takeDoc(doc, position, remainder);
|
||||
remainder -= part.length || part;
|
||||
}
|
||||
appendDoc(newDoc, component.d);
|
||||
}
|
||||
}
|
||||
|
||||
newDoc
|
||||
return newDoc;
|
||||
};
|
||||
|
||||
# Append an op component to the end of the specified op.
|
||||
# Exported for the randomOpGenerator.
|
||||
type._append = append = (op, component) ->
|
||||
if component == 0 || component.i == '' || component.i == 0 || component.d == 0
|
||||
return
|
||||
else if op.length == 0
|
||||
op.push component
|
||||
else
|
||||
last = op[op.length - 1]
|
||||
if typeof(component) == 'number' && typeof(last) == 'number'
|
||||
op[op.length - 1] += component
|
||||
else if component.i != undefined && last.i? && typeof(last.i) == typeof(component.i)
|
||||
last.i += component.i
|
||||
else if component.d != undefined && last.d?
|
||||
last.d += component.d
|
||||
else
|
||||
op.push component
|
||||
// Append an op component to the end of the specified op.
|
||||
// Exported for the randomOpGenerator.
|
||||
type._append = (append = function(op, component) {
|
||||
if ((component === 0) || (component.i === '') || (component.i === 0) || (component.d === 0)) {
|
||||
return;
|
||||
} else if (op.length === 0) {
|
||||
return op.push(component);
|
||||
} else {
|
||||
const last = op[op.length - 1];
|
||||
if ((typeof(component) === 'number') && (typeof(last) === 'number')) {
|
||||
return op[op.length - 1] += component;
|
||||
} else if ((component.i !== undefined) && (last.i != null) && (typeof(last.i) === typeof(component.i))) {
|
||||
return last.i += component.i;
|
||||
} else if ((component.d !== undefined) && (last.d != null)) {
|
||||
return last.d += component.d;
|
||||
} else {
|
||||
return op.push(component);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
# Makes 2 functions for taking components from the start of an op, and for peeking
|
||||
# at the next op that could be taken.
|
||||
makeTake = (op) ->
|
||||
# The index of the next component to take
|
||||
index = 0
|
||||
# The offset into the component
|
||||
offset = 0
|
||||
// Makes 2 functions for taking components from the start of an op, and for peeking
|
||||
// at the next op that could be taken.
|
||||
const makeTake = function(op) {
|
||||
// The index of the next component to take
|
||||
let index = 0;
|
||||
// The offset into the component
|
||||
let offset = 0;
|
||||
|
||||
# Take up to length maxlength from the op. If maxlength is not defined, there is no max.
|
||||
# If insertsIndivisible is true, inserts (& insert tombstones) won't be separated.
|
||||
#
|
||||
# Returns null when op is fully consumed.
|
||||
take = (maxlength, insertsIndivisible) ->
|
||||
return null if index == op.length
|
||||
// Take up to length maxlength from the op. If maxlength is not defined, there is no max.
|
||||
// If insertsIndivisible is true, inserts (& insert tombstones) won't be separated.
|
||||
//
|
||||
// Returns null when op is fully consumed.
|
||||
const take = function(maxlength, insertsIndivisible) {
|
||||
let current;
|
||||
if (index === op.length) { return null; }
|
||||
|
||||
e = op[index]
|
||||
if typeof((current = e)) == 'number' or typeof((current = e.i)) == 'number' or (current = e.d) != undefined
|
||||
if !maxlength? or current - offset <= maxlength or (insertsIndivisible and e.i != undefined)
|
||||
# Return the rest of the current element.
|
||||
c = current - offset
|
||||
++index; offset = 0
|
||||
else
|
||||
offset += maxlength
|
||||
c = maxlength
|
||||
if e.i != undefined then {i:c} else if e.d != undefined then {d:c} else c
|
||||
else
|
||||
# Take from the inserted string
|
||||
if !maxlength? or e.i.length - offset <= maxlength or insertsIndivisible
|
||||
result = {i:e.i[offset..]}
|
||||
++index; offset = 0
|
||||
else
|
||||
result = {i:e.i[offset...offset + maxlength]}
|
||||
offset += maxlength
|
||||
result
|
||||
const e = op[index];
|
||||
if ((typeof((current = e)) === 'number') || (typeof((current = e.i)) === 'number') || ((current = e.d) !== undefined)) {
|
||||
let c;
|
||||
if ((maxlength == null) || ((current - offset) <= maxlength) || (insertsIndivisible && (e.i !== undefined))) {
|
||||
// Return the rest of the current element.
|
||||
c = current - offset;
|
||||
++index; offset = 0;
|
||||
} else {
|
||||
offset += maxlength;
|
||||
c = maxlength;
|
||||
}
|
||||
if (e.i !== undefined) { return {i:c}; } else if (e.d !== undefined) { return {d:c}; } else { return c; }
|
||||
} else {
|
||||
// Take from the inserted string
|
||||
let result;
|
||||
if ((maxlength == null) || ((e.i.length - offset) <= maxlength) || insertsIndivisible) {
|
||||
result = {i:e.i.slice(offset)};
|
||||
++index; offset = 0;
|
||||
} else {
|
||||
result = {i:e.i.slice(offset, offset + maxlength)};
|
||||
offset += maxlength;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
peekType = -> op[index]
|
||||
const peekType = () => op[index];
|
||||
|
||||
[take, peekType]
|
||||
return [take, peekType];
|
||||
};
|
||||
|
||||
# Find and return the length of an op component
|
||||
componentLength = (component) ->
|
||||
if typeof(component) == 'number'
|
||||
component
|
||||
else if typeof(component.i) == 'string'
|
||||
component.i.length
|
||||
else
|
||||
# This should work because c.d and c.i must be +ive.
|
||||
component.d or component.i
|
||||
// Find and return the length of an op component
|
||||
const componentLength = function(component) {
|
||||
if (typeof(component) === 'number') {
|
||||
return component;
|
||||
} else if (typeof(component.i) === 'string') {
|
||||
return component.i.length;
|
||||
} else {
|
||||
// This should work because c.d and c.i must be +ive.
|
||||
return component.d || component.i;
|
||||
}
|
||||
};
|
||||
|
||||
# Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate
|
||||
# adjacent inserts and deletes.
|
||||
type.normalize = (op) ->
|
||||
newOp = []
|
||||
append newOp, component for component in op
|
||||
newOp
|
||||
// Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate
|
||||
// adjacent inserts and deletes.
|
||||
type.normalize = function(op) {
|
||||
const newOp = [];
|
||||
for (let component of Array.from(op)) { append(newOp, component); }
|
||||
return newOp;
|
||||
};
|
||||
|
||||
# This is a helper method to transform and prune. goForwards is true for transform, false for prune.
|
||||
transformer = (op, otherOp, goForwards, side) ->
|
||||
checkOp op
|
||||
checkOp otherOp
|
||||
newOp = []
|
||||
// This is a helper method to transform and prune. goForwards is true for transform, false for prune.
|
||||
const transformer = function(op, otherOp, goForwards, side) {
|
||||
let component;
|
||||
checkOp(op);
|
||||
checkOp(otherOp);
|
||||
const newOp = [];
|
||||
|
||||
[take, peek] = makeTake op
|
||||
const [take, peek] = Array.from(makeTake(op));
|
||||
|
||||
for component in otherOp
|
||||
length = componentLength component
|
||||
for (component of Array.from(otherOp)) {
|
||||
var chunk;
|
||||
let length = componentLength(component);
|
||||
|
||||
if component.i != undefined # Insert text or tombs
|
||||
if goForwards # transform - insert skips over inserted parts
|
||||
if side == 'left'
|
||||
# The left insert should go first.
|
||||
append newOp, take() while peek()?.i != undefined
|
||||
if (component.i !== undefined) { // Insert text or tombs
|
||||
if (goForwards) { // transform - insert skips over inserted parts
|
||||
if (side === 'left') {
|
||||
// The left insert should go first.
|
||||
while (__guard__(peek(), x => x.i) !== undefined) { append(newOp, take()); }
|
||||
}
|
||||
|
||||
# In any case, skip the inserted text.
|
||||
append newOp, length
|
||||
// In any case, skip the inserted text.
|
||||
append(newOp, length);
|
||||
|
||||
else # Prune. Remove skips for inserts.
|
||||
while length > 0
|
||||
chunk = take length, true
|
||||
} else { // Prune. Remove skips for inserts.
|
||||
while (length > 0) {
|
||||
chunk = take(length, true);
|
||||
|
||||
throw new Error 'The transformed op is invalid' unless chunk != null
|
||||
throw new Error 'The transformed op deletes locally inserted characters - it cannot be purged of the insert.' if chunk.d != undefined
|
||||
if (chunk === null) { throw new Error('The transformed op is invalid'); }
|
||||
if (chunk.d !== undefined) { throw new Error('The transformed op deletes locally inserted characters - it cannot be purged of the insert.'); }
|
||||
|
||||
if typeof chunk is 'number'
|
||||
length -= chunk
|
||||
else
|
||||
append newOp, chunk
|
||||
if (typeof chunk === 'number') {
|
||||
length -= chunk;
|
||||
} else {
|
||||
append(newOp, chunk);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
else # Skip or delete
|
||||
while length > 0
|
||||
chunk = take length, true
|
||||
throw new Error('The op traverses more elements than the document has') unless chunk != null
|
||||
} else { // Skip or delete
|
||||
while (length > 0) {
|
||||
chunk = take(length, true);
|
||||
if (chunk === null) { throw new Error('The op traverses more elements than the document has'); }
|
||||
|
||||
append newOp, chunk
|
||||
length -= componentLength chunk unless chunk.i
|
||||
append(newOp, chunk);
|
||||
if (!chunk.i) { length -= componentLength(chunk); }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Append extras from op1
|
||||
while (component = take())
|
||||
throw new Error "Remaining fragments in the op: #{component}" unless component.i != undefined
|
||||
append newOp, component
|
||||
// Append extras from op1
|
||||
while (component = take()) {
|
||||
if (component.i === undefined) { throw new Error(`Remaining fragments in the op: ${component}`); }
|
||||
append(newOp, component);
|
||||
}
|
||||
|
||||
newOp
|
||||
return newOp;
|
||||
};
|
||||
|
||||
# transform op1 by op2. Return transformed version of op1.
|
||||
# op1 and op2 are unchanged by transform.
|
||||
# side should be 'left' or 'right', depending on if op1.id <> op2.id. 'left' == client op.
|
||||
type.transform = (op, otherOp, side) ->
|
||||
throw new Error "side (#{side}) should be 'left' or 'right'" unless side == 'left' or side == 'right'
|
||||
transformer op, otherOp, true, side
|
||||
// transform op1 by op2. Return transformed version of op1.
|
||||
// op1 and op2 are unchanged by transform.
|
||||
// side should be 'left' or 'right', depending on if op1.id <> op2.id. 'left' == client op.
|
||||
type.transform = function(op, otherOp, side) {
|
||||
if ((side !== 'left') && (side !== 'right')) { throw new Error(`side (${side}) should be 'left' or 'right'`); }
|
||||
return transformer(op, otherOp, true, side);
|
||||
};
|
||||
|
||||
# Prune is the inverse of transform.
|
||||
type.prune = (op, otherOp) -> transformer op, otherOp, false
|
||||
// Prune is the inverse of transform.
|
||||
type.prune = (op, otherOp) => transformer(op, otherOp, false);
|
||||
|
||||
# Compose 2 ops into 1 op.
|
||||
type.compose = (op1, op2) ->
|
||||
return op2 if op1 == null or op1 == undefined
|
||||
// Compose 2 ops into 1 op.
|
||||
type.compose = function(op1, op2) {
|
||||
let component;
|
||||
if ((op1 === null) || (op1 === undefined)) { return op2; }
|
||||
|
||||
checkOp op1
|
||||
checkOp op2
|
||||
checkOp(op1);
|
||||
checkOp(op2);
|
||||
|
||||
result = []
|
||||
const result = [];
|
||||
|
||||
[take, _] = makeTake op1
|
||||
const [take, _] = Array.from(makeTake(op1));
|
||||
|
||||
for component in op2
|
||||
for (component of Array.from(op2)) {
|
||||
|
||||
if typeof(component) == 'number' # Skip
|
||||
# Just copy from op1.
|
||||
length = component
|
||||
while length > 0
|
||||
chunk = take length
|
||||
throw new Error('The op traverses more elements than the document has') unless chunk != null
|
||||
var chunk, length;
|
||||
if (typeof(component) === 'number') { // Skip
|
||||
// Just copy from op1.
|
||||
length = component;
|
||||
while (length > 0) {
|
||||
chunk = take(length);
|
||||
if (chunk === null) { throw new Error('The op traverses more elements than the document has'); }
|
||||
|
||||
append result, chunk
|
||||
length -= componentLength chunk
|
||||
append(result, chunk);
|
||||
length -= componentLength(chunk);
|
||||
}
|
||||
|
||||
else if component.i != undefined # Insert
|
||||
append result, {i:component.i}
|
||||
} else if (component.i !== undefined) { // Insert
|
||||
append(result, {i:component.i});
|
||||
|
||||
else # Delete
|
||||
length = component.d
|
||||
while length > 0
|
||||
chunk = take length
|
||||
throw new Error('The op traverses more elements than the document has') unless chunk != null
|
||||
} else { // Delete
|
||||
length = component.d;
|
||||
while (length > 0) {
|
||||
chunk = take(length);
|
||||
if (chunk === null) { throw new Error('The op traverses more elements than the document has'); }
|
||||
|
||||
chunkLength = componentLength chunk
|
||||
if chunk.i != undefined
|
||||
append result, {i:chunkLength}
|
||||
else
|
||||
append result, {d:chunkLength}
|
||||
const chunkLength = componentLength(chunk);
|
||||
if (chunk.i !== undefined) {
|
||||
append(result, {i:chunkLength});
|
||||
} else {
|
||||
append(result, {d:chunkLength});
|
||||
}
|
||||
|
||||
length -= chunkLength
|
||||
length -= chunkLength;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Append extras from op1
|
||||
while (component = take())
|
||||
throw new Error "Remaining fragments in op1: #{component}" unless component.i != undefined
|
||||
append result, component
|
||||
// Append extras from op1
|
||||
while (component = take()) {
|
||||
if (component.i === undefined) { throw new Error(`Remaining fragments in op1: ${component}`); }
|
||||
append(result, component);
|
||||
}
|
||||
|
||||
result
|
||||
return result;
|
||||
};
|
||||
|
||||
if WEB?
|
||||
exports.types['text-tp2'] = type
|
||||
else
|
||||
module.exports = type
|
||||
if (typeof WEB !== 'undefined' && WEB !== null) {
|
||||
exports.types['text-tp2'] = type;
|
||||
} else {
|
||||
module.exports = type;
|
||||
}
|
||||
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined;
|
||||
}
|
|
@ -1,209 +1,245 @@
|
|||
# A simple text implementation
|
||||
#
|
||||
# Operations are lists of components.
|
||||
# Each component either inserts or deletes at a specified position in the document.
|
||||
#
|
||||
# Components are either:
|
||||
# {i:'str', p:100}: Insert 'str' at position 100 in the document
|
||||
# {d:'str', p:100}: Delete 'str' at position 100 in the document
|
||||
#
|
||||
# Components in an operation are executed sequentially, so the position of components
|
||||
# assumes previous components have already executed.
|
||||
#
|
||||
# Eg: This op:
|
||||
# [{i:'abc', p:0}]
|
||||
# is equivalent to this op:
|
||||
# [{i:'a', p:0}, {i:'b', p:1}, {i:'c', p:2}]
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// A simple text implementation
|
||||
//
|
||||
// Operations are lists of components.
|
||||
// Each component either inserts or deletes at a specified position in the document.
|
||||
//
|
||||
// Components are either:
|
||||
// {i:'str', p:100}: Insert 'str' at position 100 in the document
|
||||
// {d:'str', p:100}: Delete 'str' at position 100 in the document
|
||||
//
|
||||
// Components in an operation are executed sequentially, so the position of components
|
||||
// assumes previous components have already executed.
|
||||
//
|
||||
// Eg: This op:
|
||||
// [{i:'abc', p:0}]
|
||||
// is equivalent to this op:
|
||||
// [{i:'a', p:0}, {i:'b', p:1}, {i:'c', p:2}]
|
||||
|
||||
# NOTE: The global scope here is shared with other sharejs files when built with closure.
|
||||
# Be careful what ends up in your namespace.
|
||||
// NOTE: The global scope here is shared with other sharejs files when built with closure.
|
||||
// Be careful what ends up in your namespace.
|
||||
|
||||
text = {}
|
||||
let append, transformComponent;
|
||||
const text = {};
|
||||
|
||||
text.name = 'text'
|
||||
text.name = 'text';
|
||||
|
||||
text.create = -> ''
|
||||
text.create = () => '';
|
||||
|
||||
strInject = (s1, pos, s2) -> s1[...pos] + s2 + s1[pos..]
|
||||
const strInject = (s1, pos, s2) => s1.slice(0, pos) + s2 + s1.slice(pos);
|
||||
|
||||
checkValidComponent = (c) ->
|
||||
throw new Error 'component missing position field' if typeof c.p != 'number'
|
||||
const checkValidComponent = function(c) {
|
||||
if (typeof c.p !== 'number') { throw new Error('component missing position field'); }
|
||||
|
||||
i_type = typeof c.i
|
||||
d_type = typeof c.d
|
||||
throw new Error 'component needs an i or d field' unless (i_type == 'string') ^ (d_type == 'string')
|
||||
const i_type = typeof c.i;
|
||||
const d_type = typeof c.d;
|
||||
if (!((i_type === 'string') ^ (d_type === 'string'))) { throw new Error('component needs an i or d field'); }
|
||||
|
||||
throw new Error 'position cannot be negative' unless c.p >= 0
|
||||
if (!(c.p >= 0)) { throw new Error('position cannot be negative'); }
|
||||
};
|
||||
|
||||
checkValidOp = (op) ->
|
||||
checkValidComponent(c) for c in op
|
||||
true
|
||||
const checkValidOp = function(op) {
|
||||
for (let c of Array.from(op)) { checkValidComponent(c); }
|
||||
return true;
|
||||
};
|
||||
|
||||
text.apply = (snapshot, op) ->
|
||||
checkValidOp op
|
||||
for component in op
|
||||
if component.i?
|
||||
snapshot = strInject snapshot, component.p, component.i
|
||||
else
|
||||
deleted = snapshot[component.p...(component.p + component.d.length)]
|
||||
throw new Error "Delete component '#{component.d}' does not match deleted text '#{deleted}'" unless component.d == deleted
|
||||
snapshot = snapshot[...component.p] + snapshot[(component.p + component.d.length)..]
|
||||
text.apply = function(snapshot, op) {
|
||||
checkValidOp(op);
|
||||
for (let component of Array.from(op)) {
|
||||
if (component.i != null) {
|
||||
snapshot = strInject(snapshot, component.p, component.i);
|
||||
} else {
|
||||
const deleted = snapshot.slice(component.p, (component.p + component.d.length));
|
||||
if (component.d !== deleted) { throw new Error(`Delete component '${component.d}' does not match deleted text '${deleted}'`); }
|
||||
snapshot = snapshot.slice(0, component.p) + snapshot.slice((component.p + component.d.length));
|
||||
}
|
||||
}
|
||||
|
||||
snapshot
|
||||
return snapshot;
|
||||
};
|
||||
|
||||
|
||||
# Exported for use by the random op generator.
|
||||
#
|
||||
# For simplicity, this version of append does not compress adjacent inserts and deletes of
|
||||
# the same text. It would be nice to change that at some stage.
|
||||
text._append = append = (newOp, c) ->
|
||||
return if c.i == '' or c.d == ''
|
||||
if newOp.length == 0
|
||||
newOp.push c
|
||||
else
|
||||
last = newOp[newOp.length - 1]
|
||||
// Exported for use by the random op generator.
|
||||
//
|
||||
// For simplicity, this version of append does not compress adjacent inserts and deletes of
|
||||
// the same text. It would be nice to change that at some stage.
|
||||
text._append = (append = function(newOp, c) {
|
||||
if ((c.i === '') || (c.d === '')) { return; }
|
||||
if (newOp.length === 0) {
|
||||
return newOp.push(c);
|
||||
} else {
|
||||
const last = newOp[newOp.length - 1];
|
||||
|
||||
# Compose the insert into the previous insert if possible
|
||||
if last.i? && c.i? and last.p <= c.p <= (last.p + last.i.length)
|
||||
newOp[newOp.length - 1] = {i:strInject(last.i, c.p - last.p, c.i), p:last.p}
|
||||
else if last.d? && c.d? and c.p <= last.p <= (c.p + c.d.length)
|
||||
newOp[newOp.length - 1] = {d:strInject(c.d, last.p - c.p, last.d), p:c.p}
|
||||
else
|
||||
newOp.push c
|
||||
// Compose the insert into the previous insert if possible
|
||||
if ((last.i != null) && (c.i != null) && (last.p <= c.p && c.p <= (last.p + last.i.length))) {
|
||||
return newOp[newOp.length - 1] = {i:strInject(last.i, c.p - last.p, c.i), p:last.p};
|
||||
} else if ((last.d != null) && (c.d != null) && (c.p <= last.p && last.p <= (c.p + c.d.length))) {
|
||||
return newOp[newOp.length - 1] = {d:strInject(c.d, last.p - c.p, last.d), p:c.p};
|
||||
} else {
|
||||
return newOp.push(c);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
text.compose = (op1, op2) ->
|
||||
checkValidOp op1
|
||||
checkValidOp op2
|
||||
text.compose = function(op1, op2) {
|
||||
checkValidOp(op1);
|
||||
checkValidOp(op2);
|
||||
|
||||
newOp = op1.slice()
|
||||
append newOp, c for c in op2
|
||||
const newOp = op1.slice();
|
||||
for (let c of Array.from(op2)) { append(newOp, c); }
|
||||
|
||||
newOp
|
||||
return newOp;
|
||||
};
|
||||
|
||||
# Attempt to compress the op components together 'as much as possible'.
|
||||
# This implementation preserves order and preserves create/delete pairs.
|
||||
text.compress = (op) -> text.compose [], op
|
||||
// Attempt to compress the op components together 'as much as possible'.
|
||||
// This implementation preserves order and preserves create/delete pairs.
|
||||
text.compress = op => text.compose([], op);
|
||||
|
||||
text.normalize = (op) ->
|
||||
newOp = []
|
||||
text.normalize = function(op) {
|
||||
const newOp = [];
|
||||
|
||||
# Normalize should allow ops which are a single (unwrapped) component:
|
||||
# {i:'asdf', p:23}.
|
||||
# There's no good way to test if something is an array:
|
||||
# http://perfectionkills.com/instanceof-considered-harmful-or-how-to-write-a-robust-isarray/
|
||||
# so this is probably the least bad solution.
|
||||
op = [op] if op.i? or op.p?
|
||||
// Normalize should allow ops which are a single (unwrapped) component:
|
||||
// {i:'asdf', p:23}.
|
||||
// There's no good way to test if something is an array:
|
||||
// http://perfectionkills.com/instanceof-considered-harmful-or-how-to-write-a-robust-isarray/
|
||||
// so this is probably the least bad solution.
|
||||
if ((op.i != null) || (op.p != null)) { op = [op]; }
|
||||
|
||||
for c in op
|
||||
c.p ?= 0
|
||||
append newOp, c
|
||||
for (let c of Array.from(op)) {
|
||||
if (c.p == null) { c.p = 0; }
|
||||
append(newOp, c);
|
||||
}
|
||||
|
||||
newOp
|
||||
return newOp;
|
||||
};
|
||||
|
||||
# This helper method transforms a position by an op component.
|
||||
#
|
||||
# If c is an insert, insertAfter specifies whether the transform
|
||||
# is pushed after the insert (true) or before it (false).
|
||||
#
|
||||
# insertAfter is optional for deletes.
|
||||
transformPosition = (pos, c, insertAfter) ->
|
||||
if c.i?
|
||||
if c.p < pos || (c.p == pos && insertAfter)
|
||||
pos + c.i.length
|
||||
else
|
||||
pos
|
||||
else
|
||||
# I think this could also be written as: Math.min(c.p, Math.min(c.p - otherC.p, otherC.d.length))
|
||||
# but I think its harder to read that way, and it compiles using ternary operators anyway
|
||||
# so its no slower written like this.
|
||||
if pos <= c.p
|
||||
pos
|
||||
else if pos <= c.p + c.d.length
|
||||
c.p
|
||||
else
|
||||
pos - c.d.length
|
||||
// This helper method transforms a position by an op component.
|
||||
//
|
||||
// If c is an insert, insertAfter specifies whether the transform
|
||||
// is pushed after the insert (true) or before it (false).
|
||||
//
|
||||
// insertAfter is optional for deletes.
|
||||
const transformPosition = function(pos, c, insertAfter) {
|
||||
if (c.i != null) {
|
||||
if ((c.p < pos) || ((c.p === pos) && insertAfter)) {
|
||||
return pos + c.i.length;
|
||||
} else {
|
||||
return pos;
|
||||
}
|
||||
} else {
|
||||
// I think this could also be written as: Math.min(c.p, Math.min(c.p - otherC.p, otherC.d.length))
|
||||
// but I think its harder to read that way, and it compiles using ternary operators anyway
|
||||
// so its no slower written like this.
|
||||
if (pos <= c.p) {
|
||||
return pos;
|
||||
} else if (pos <= (c.p + c.d.length)) {
|
||||
return c.p;
|
||||
} else {
|
||||
return pos - c.d.length;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
# Helper method to transform a cursor position as a result of an op.
|
||||
#
|
||||
# Like transformPosition above, if c is an insert, insertAfter specifies whether the cursor position
|
||||
# is pushed after an insert (true) or before it (false).
|
||||
text.transformCursor = (position, op, side) ->
|
||||
insertAfter = side == 'right'
|
||||
position = transformPosition position, c, insertAfter for c in op
|
||||
position
|
||||
// Helper method to transform a cursor position as a result of an op.
|
||||
//
|
||||
// Like transformPosition above, if c is an insert, insertAfter specifies whether the cursor position
|
||||
// is pushed after an insert (true) or before it (false).
|
||||
text.transformCursor = function(position, op, side) {
|
||||
const insertAfter = side === 'right';
|
||||
for (let c of Array.from(op)) { position = transformPosition(position, c, insertAfter); }
|
||||
return position;
|
||||
};
|
||||
|
||||
# Transform an op component by another op component. Asymmetric.
|
||||
# The result will be appended to destination.
|
||||
#
|
||||
# exported for use in JSON type
|
||||
text._tc = transformComponent = (dest, c, otherC, side) ->
|
||||
checkValidOp [c]
|
||||
checkValidOp [otherC]
|
||||
// Transform an op component by another op component. Asymmetric.
|
||||
// The result will be appended to destination.
|
||||
//
|
||||
// exported for use in JSON type
|
||||
text._tc = (transformComponent = function(dest, c, otherC, side) {
|
||||
checkValidOp([c]);
|
||||
checkValidOp([otherC]);
|
||||
|
||||
if c.i?
|
||||
append dest, {i:c.i, p:transformPosition(c.p, otherC, side == 'right')}
|
||||
if (c.i != null) {
|
||||
append(dest, {i:c.i, p:transformPosition(c.p, otherC, side === 'right')});
|
||||
|
||||
else # Delete
|
||||
if otherC.i? # delete vs insert
|
||||
s = c.d
|
||||
if c.p < otherC.p
|
||||
append dest, {d:s[...otherC.p - c.p], p:c.p}
|
||||
s = s[(otherC.p - c.p)..]
|
||||
if s != ''
|
||||
append dest, {d:s, p:c.p + otherC.i.length}
|
||||
} else { // Delete
|
||||
if (otherC.i != null) { // delete vs insert
|
||||
let s = c.d;
|
||||
if (c.p < otherC.p) {
|
||||
append(dest, {d:s.slice(0, otherC.p - c.p), p:c.p});
|
||||
s = s.slice((otherC.p - c.p));
|
||||
}
|
||||
if (s !== '') {
|
||||
append(dest, {d:s, p:c.p + otherC.i.length});
|
||||
}
|
||||
|
||||
else # Delete vs delete
|
||||
if c.p >= otherC.p + otherC.d.length
|
||||
append dest, {d:c.d, p:c.p - otherC.d.length}
|
||||
else if c.p + c.d.length <= otherC.p
|
||||
append dest, c
|
||||
else
|
||||
# They overlap somewhere.
|
||||
newC = {d:'', p:c.p}
|
||||
if c.p < otherC.p
|
||||
newC.d = c.d[...(otherC.p - c.p)]
|
||||
if c.p + c.d.length > otherC.p + otherC.d.length
|
||||
newC.d += c.d[(otherC.p + otherC.d.length - c.p)..]
|
||||
} else { // Delete vs delete
|
||||
if (c.p >= (otherC.p + otherC.d.length)) {
|
||||
append(dest, {d:c.d, p:c.p - otherC.d.length});
|
||||
} else if ((c.p + c.d.length) <= otherC.p) {
|
||||
append(dest, c);
|
||||
} else {
|
||||
// They overlap somewhere.
|
||||
const newC = {d:'', p:c.p};
|
||||
if (c.p < otherC.p) {
|
||||
newC.d = c.d.slice(0, (otherC.p - c.p));
|
||||
}
|
||||
if ((c.p + c.d.length) > (otherC.p + otherC.d.length)) {
|
||||
newC.d += c.d.slice(((otherC.p + otherC.d.length) - c.p));
|
||||
}
|
||||
|
||||
# This is entirely optional - just for a check that the deleted
|
||||
# text in the two ops matches
|
||||
intersectStart = Math.max c.p, otherC.p
|
||||
intersectEnd = Math.min c.p + c.d.length, otherC.p + otherC.d.length
|
||||
cIntersect = c.d[intersectStart - c.p...intersectEnd - c.p]
|
||||
otherIntersect = otherC.d[intersectStart - otherC.p...intersectEnd - otherC.p]
|
||||
throw new Error 'Delete ops delete different text in the same region of the document' unless cIntersect == otherIntersect
|
||||
// This is entirely optional - just for a check that the deleted
|
||||
// text in the two ops matches
|
||||
const intersectStart = Math.max(c.p, otherC.p);
|
||||
const intersectEnd = Math.min(c.p + c.d.length, otherC.p + otherC.d.length);
|
||||
const cIntersect = c.d.slice(intersectStart - c.p, intersectEnd - c.p);
|
||||
const otherIntersect = otherC.d.slice(intersectStart - otherC.p, intersectEnd - otherC.p);
|
||||
if (cIntersect !== otherIntersect) { throw new Error('Delete ops delete different text in the same region of the document'); }
|
||||
|
||||
if newC.d != ''
|
||||
# This could be rewritten similarly to insert v delete, above.
|
||||
newC.p = transformPosition newC.p, otherC
|
||||
append dest, newC
|
||||
if (newC.d !== '') {
|
||||
// This could be rewritten similarly to insert v delete, above.
|
||||
newC.p = transformPosition(newC.p, otherC);
|
||||
append(dest, newC);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dest
|
||||
return dest;
|
||||
});
|
||||
|
||||
invertComponent = (c) ->
|
||||
if c.i?
|
||||
{d:c.i, p:c.p}
|
||||
else
|
||||
{i:c.d, p:c.p}
|
||||
const invertComponent = function(c) {
|
||||
if (c.i != null) {
|
||||
return {d:c.i, p:c.p};
|
||||
} else {
|
||||
return {i:c.d, p:c.p};
|
||||
}
|
||||
};
|
||||
|
||||
# No need to use append for invert, because the components won't be able to
|
||||
# cancel with one another.
|
||||
text.invert = (op) -> (invertComponent c for c in op.slice().reverse())
|
||||
// No need to use append for invert, because the components won't be able to
|
||||
// cancel with one another.
|
||||
text.invert = op => Array.from(op.slice().reverse()).map((c) => invertComponent(c));
|
||||
|
||||
|
||||
if WEB?
|
||||
exports.types ||= {}
|
||||
if (typeof WEB !== 'undefined' && WEB !== null) {
|
||||
if (!exports.types) { exports.types = {}; }
|
||||
|
||||
# This is kind of awful - come up with a better way to hook this helper code up.
|
||||
bootstrapTransform(text, transformComponent, checkValidOp, append)
|
||||
// This is kind of awful - come up with a better way to hook this helper code up.
|
||||
bootstrapTransform(text, transformComponent, checkValidOp, append);
|
||||
|
||||
# [] is used to prevent closure from renaming types.text
|
||||
exports.types.text = text
|
||||
else
|
||||
module.exports = text
|
||||
// [] is used to prevent closure from renaming types.text
|
||||
exports.types.text = text;
|
||||
} else {
|
||||
module.exports = text;
|
||||
|
||||
# The text type really shouldn't need this - it should be possible to define
|
||||
# an efficient transform function by making a sort of transform map and passing each
|
||||
# op component through it.
|
||||
require('./helpers').bootstrapTransform(text, transformComponent, checkValidOp, append)
|
||||
// The text type really shouldn't need this - it should be possible to define
|
||||
// an efficient transform function by making a sort of transform map and passing each
|
||||
// op component through it.
|
||||
require('./helpers').bootstrapTransform(text, transformComponent, checkValidOp, append);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,22 +1,30 @@
|
|||
# This is a simple type used for testing other OT code. Each op is [expectedSnapshot, increment]
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// This is a simple type used for testing other OT code. Each op is [expectedSnapshot, increment]
|
||||
|
||||
exports.name = 'count'
|
||||
exports.create = -> 1
|
||||
exports.name = 'count';
|
||||
exports.create = () => 1;
|
||||
|
||||
exports.apply = (snapshot, op) ->
|
||||
[v, inc] = op
|
||||
throw new Error "Op #{v} != snapshot #{snapshot}" unless snapshot == v
|
||||
snapshot + inc
|
||||
exports.apply = function(snapshot, op) {
|
||||
const [v, inc] = Array.from(op);
|
||||
if (snapshot !== v) { throw new Error(`Op ${v} != snapshot ${snapshot}`); }
|
||||
return snapshot + inc;
|
||||
};
|
||||
|
||||
# transform op1 by op2. Return transformed version of op1.
|
||||
exports.transform = (op1, op2) ->
|
||||
throw new Error "Op1 #{op1[0]} != op2 #{op2[0]}" unless op1[0] == op2[0]
|
||||
[op1[0] + op2[1], op1[1]]
|
||||
// transform op1 by op2. Return transformed version of op1.
|
||||
exports.transform = function(op1, op2) {
|
||||
if (op1[0] !== op2[0]) { throw new Error(`Op1 ${op1[0]} != op2 ${op2[0]}`); }
|
||||
return [op1[0] + op2[1], op1[1]];
|
||||
};
|
||||
|
||||
exports.compose = (op1, op2) ->
|
||||
throw new Error "Op1 #{op1} + 1 != op2 #{op2}" unless op1[0] + op1[1] == op2[0]
|
||||
[op1[0], op1[1] + op2[1]]
|
||||
exports.compose = function(op1, op2) {
|
||||
if ((op1[0] + op1[1]) !== op2[0]) { throw new Error(`Op1 ${op1} + 1 != op2 ${op2}`); }
|
||||
return [op1[0], op1[1] + op2[1]];
|
||||
};
|
||||
|
||||
exports.generateRandomOp = (doc) ->
|
||||
[[doc, 1], doc + 1]
|
||||
exports.generateRandomOp = doc => [[doc, 1], doc + 1];
|
||||
|
||||
|
|
|
@ -1,65 +1,87 @@
|
|||
# These methods let you build a transform function from a transformComponent function
|
||||
# for OT types like text and JSON in which operations are lists of components
|
||||
# and transforming them requires N^2 work.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// These methods let you build a transform function from a transformComponent function
|
||||
// for OT types like text and JSON in which operations are lists of components
|
||||
// and transforming them requires N^2 work.
|
||||
|
||||
# Add transform and transformX functions for an OT type which has transformComponent defined.
|
||||
# transformComponent(destination array, component, other component, side)
|
||||
exports['_bt'] = bootstrapTransform = (type, transformComponent, checkValidOp, append) ->
|
||||
transformComponentX = (left, right, destLeft, destRight) ->
|
||||
transformComponent destLeft, left, right, 'left'
|
||||
transformComponent destRight, right, left, 'right'
|
||||
// Add transform and transformX functions for an OT type which has transformComponent defined.
|
||||
// transformComponent(destination array, component, other component, side)
|
||||
let bootstrapTransform;
|
||||
exports['_bt'] = (bootstrapTransform = function(type, transformComponent, checkValidOp, append) {
|
||||
let transformX;
|
||||
const transformComponentX = function(left, right, destLeft, destRight) {
|
||||
transformComponent(destLeft, left, right, 'left');
|
||||
return transformComponent(destRight, right, left, 'right');
|
||||
};
|
||||
|
||||
# Transforms rightOp by leftOp. Returns ['rightOp', clientOp']
|
||||
type.transformX = type['transformX'] = transformX = (leftOp, rightOp) ->
|
||||
checkValidOp leftOp
|
||||
checkValidOp rightOp
|
||||
// Transforms rightOp by leftOp. Returns ['rightOp', clientOp']
|
||||
type.transformX = (type['transformX'] = (transformX = function(leftOp, rightOp) {
|
||||
checkValidOp(leftOp);
|
||||
checkValidOp(rightOp);
|
||||
|
||||
newRightOp = []
|
||||
const newRightOp = [];
|
||||
|
||||
for rightComponent in rightOp
|
||||
# Generate newLeftOp by composing leftOp by rightComponent
|
||||
newLeftOp = []
|
||||
for (let rightComponent of Array.from(rightOp)) {
|
||||
// Generate newLeftOp by composing leftOp by rightComponent
|
||||
const newLeftOp = [];
|
||||
|
||||
k = 0
|
||||
while k < leftOp.length
|
||||
nextC = []
|
||||
transformComponentX leftOp[k], rightComponent, newLeftOp, nextC
|
||||
k++
|
||||
let k = 0;
|
||||
while (k < leftOp.length) {
|
||||
var l;
|
||||
const nextC = [];
|
||||
transformComponentX(leftOp[k], rightComponent, newLeftOp, nextC);
|
||||
k++;
|
||||
|
||||
if nextC.length == 1
|
||||
rightComponent = nextC[0]
|
||||
else if nextC.length == 0
|
||||
append newLeftOp, l for l in leftOp[k..]
|
||||
rightComponent = null
|
||||
break
|
||||
else
|
||||
# Recurse.
|
||||
[l_, r_] = transformX leftOp[k..], nextC
|
||||
append newLeftOp, l for l in l_
|
||||
append newRightOp, r for r in r_
|
||||
rightComponent = null
|
||||
break
|
||||
if (nextC.length === 1) {
|
||||
rightComponent = nextC[0];
|
||||
} else if (nextC.length === 0) {
|
||||
for (l of Array.from(leftOp.slice(k))) { append(newLeftOp, l); }
|
||||
rightComponent = null;
|
||||
break;
|
||||
} else {
|
||||
// Recurse.
|
||||
const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC));
|
||||
for (l of Array.from(l_)) { append(newLeftOp, l); }
|
||||
for (let r of Array.from(r_)) { append(newRightOp, r); }
|
||||
rightComponent = null;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
append newRightOp, rightComponent if rightComponent?
|
||||
leftOp = newLeftOp
|
||||
if (rightComponent != null) { append(newRightOp, rightComponent); }
|
||||
leftOp = newLeftOp;
|
||||
}
|
||||
|
||||
[leftOp, newRightOp]
|
||||
return [leftOp, newRightOp];
|
||||
}));
|
||||
|
||||
# Transforms op with specified type ('left' or 'right') by otherOp.
|
||||
type.transform = type['transform'] = (op, otherOp, type) ->
|
||||
throw new Error "type must be 'left' or 'right'" unless type == 'left' or type == 'right'
|
||||
// Transforms op with specified type ('left' or 'right') by otherOp.
|
||||
return type.transform = (type['transform'] = function(op, otherOp, type) {
|
||||
let _;
|
||||
if ((type !== 'left') && (type !== 'right')) { throw new Error("type must be 'left' or 'right'"); }
|
||||
|
||||
return op if otherOp.length == 0
|
||||
if (otherOp.length === 0) { return op; }
|
||||
|
||||
# TODO: Benchmark with and without this line. I _think_ it'll make a big difference...?
|
||||
return transformComponent [], op[0], otherOp[0], type if op.length == 1 and otherOp.length == 1
|
||||
// TODO: Benchmark with and without this line. I _think_ it'll make a big difference...?
|
||||
if ((op.length === 1) && (otherOp.length === 1)) { return transformComponent([], op[0], otherOp[0], type); }
|
||||
|
||||
if type == 'left'
|
||||
[left, _] = transformX op, otherOp
|
||||
left
|
||||
else
|
||||
[_, right] = transformX otherOp, op
|
||||
right
|
||||
if (type === 'left') {
|
||||
let left;
|
||||
[left, _] = Array.from(transformX(op, otherOp));
|
||||
return left;
|
||||
} else {
|
||||
let right;
|
||||
[_, right] = Array.from(transformX(otherOp, op));
|
||||
return right;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if typeof WEB is 'undefined'
|
||||
exports.bootstrapTransform = bootstrapTransform
|
||||
if (typeof WEB === 'undefined') {
|
||||
exports.bootstrapTransform = bootstrapTransform;
|
||||
}
|
||||
|
|
|
@ -1,15 +1,21 @@
|
|||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
|
||||
register = (file) ->
|
||||
type = require file
|
||||
exports[type.name] = type
|
||||
try require "#{file}-api"
|
||||
const register = function(file) {
|
||||
const type = require(file);
|
||||
exports[type.name] = type;
|
||||
try { return require(`${file}-api`); } catch (error) {}
|
||||
};
|
||||
|
||||
# Import all the built-in types.
|
||||
register './simple'
|
||||
register './count'
|
||||
// Import all the built-in types.
|
||||
register('./simple');
|
||||
register('./count');
|
||||
|
||||
register './text'
|
||||
register './text-composable'
|
||||
register './text-tp2'
|
||||
register('./text');
|
||||
register('./text-composable');
|
||||
register('./text-tp2');
|
||||
|
||||
register './json'
|
||||
register('./json');
|
||||
|
|
|
@ -1,180 +1,273 @@
|
|||
# API for JSON OT
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// API for JSON OT
|
||||
|
||||
json = require './json' if typeof WEB is 'undefined'
|
||||
let json;
|
||||
if (typeof WEB === 'undefined') { json = require('./json'); }
|
||||
|
||||
if WEB?
|
||||
extendDoc = exports.extendDoc
|
||||
exports.extendDoc = (name, fn) ->
|
||||
SubDoc::[name] = fn
|
||||
extendDoc name, fn
|
||||
if (typeof WEB !== 'undefined' && WEB !== null) {
|
||||
const {
|
||||
extendDoc
|
||||
} = exports;
|
||||
exports.extendDoc = function(name, fn) {
|
||||
SubDoc.prototype[name] = fn;
|
||||
return extendDoc(name, fn);
|
||||
};
|
||||
}
|
||||
|
||||
depath = (path) ->
|
||||
if path.length == 1 and path[0].constructor == Array
|
||||
path[0]
|
||||
else path
|
||||
const depath = function(path) {
|
||||
if ((path.length === 1) && (path[0].constructor === Array)) {
|
||||
return path[0];
|
||||
} else { return path; }
|
||||
};
|
||||
|
||||
class SubDoc
|
||||
constructor: (@doc, @path) ->
|
||||
at: (path...) -> @doc.at @path.concat depath path
|
||||
get: -> @doc.getAt @path
|
||||
# for objects and lists
|
||||
set: (value, cb) -> @doc.setAt @path, value, cb
|
||||
# for strings and lists.
|
||||
insert: (pos, value, cb) -> @doc.insertAt @path, pos, value, cb
|
||||
# for strings
|
||||
del: (pos, length, cb) -> @doc.deleteTextAt @path, length, pos, cb
|
||||
# for objects and lists
|
||||
remove: (cb) -> @doc.removeAt @path, cb
|
||||
push: (value, cb) -> @insert @get().length, value, cb
|
||||
move: (from, to, cb) -> @doc.moveAt @path, from, to, cb
|
||||
add: (amount, cb) -> @doc.addAt @path, amount, cb
|
||||
on: (event, cb) -> @doc.addListener @path, event, cb
|
||||
removeListener: (l) -> @doc.removeListener l
|
||||
class SubDoc {
|
||||
constructor(doc, path) {
|
||||
this.doc = doc;
|
||||
this.path = path;
|
||||
}
|
||||
at(...path) { return this.doc.at(this.path.concat(depath(path))); }
|
||||
get() { return this.doc.getAt(this.path); }
|
||||
// for objects and lists
|
||||
set(value, cb) { return this.doc.setAt(this.path, value, cb); }
|
||||
// for strings and lists.
|
||||
insert(pos, value, cb) { return this.doc.insertAt(this.path, pos, value, cb); }
|
||||
// for strings
|
||||
del(pos, length, cb) { return this.doc.deleteTextAt(this.path, length, pos, cb); }
|
||||
// for objects and lists
|
||||
remove(cb) { return this.doc.removeAt(this.path, cb); }
|
||||
push(value, cb) { return this.insert(this.get().length, value, cb); }
|
||||
move(from, to, cb) { return this.doc.moveAt(this.path, from, to, cb); }
|
||||
add(amount, cb) { return this.doc.addAt(this.path, amount, cb); }
|
||||
on(event, cb) { return this.doc.addListener(this.path, event, cb); }
|
||||
removeListener(l) { return this.doc.removeListener(l); }
|
||||
|
||||
# text API compatibility
|
||||
getLength: -> @get().length
|
||||
getText: -> @get()
|
||||
// text API compatibility
|
||||
getLength() { return this.get().length; }
|
||||
getText() { return this.get(); }
|
||||
}
|
||||
|
||||
traverse = (snapshot, path) ->
|
||||
container = data:snapshot
|
||||
key = 'data'
|
||||
elem = container
|
||||
for p in path
|
||||
elem = elem[key]
|
||||
key = p
|
||||
throw new Error 'bad path' if typeof elem == 'undefined'
|
||||
{elem, key}
|
||||
const traverse = function(snapshot, path) {
|
||||
const container = {data:snapshot};
|
||||
let key = 'data';
|
||||
let elem = container;
|
||||
for (let p of Array.from(path)) {
|
||||
elem = elem[key];
|
||||
key = p;
|
||||
if (typeof elem === 'undefined') { throw new Error('bad path'); }
|
||||
}
|
||||
return {elem, key};
|
||||
};
|
||||
|
||||
pathEquals = (p1, p2) ->
|
||||
return false if p1.length != p2.length
|
||||
for e,i in p1
|
||||
return false if e != p2[i]
|
||||
true
|
||||
const pathEquals = function(p1, p2) {
|
||||
if (p1.length !== p2.length) { return false; }
|
||||
for (let i = 0; i < p1.length; i++) {
|
||||
const e = p1[i];
|
||||
if (e !== p2[i]) { return false; }
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
json.api =
|
||||
provides: {json:true}
|
||||
json.api = {
|
||||
provides: {json:true},
|
||||
|
||||
at: (path...) -> new SubDoc this, depath path
|
||||
at(...path) { return new SubDoc(this, depath(path)); },
|
||||
|
||||
get: -> @snapshot
|
||||
set: (value, cb) -> @setAt [], value, cb
|
||||
get() { return this.snapshot; },
|
||||
set(value, cb) { return this.setAt([], value, cb); },
|
||||
|
||||
getAt: (path) ->
|
||||
{elem, key} = traverse @snapshot, path
|
||||
return elem[key]
|
||||
getAt(path) {
|
||||
const {elem, key} = traverse(this.snapshot, path);
|
||||
return elem[key];
|
||||
},
|
||||
|
||||
setAt: (path, value, cb) ->
|
||||
{elem, key} = traverse @snapshot, path
|
||||
op = {p:path}
|
||||
if elem.constructor == Array
|
||||
op.li = value
|
||||
op.ld = elem[key] if typeof elem[key] != 'undefined'
|
||||
else if typeof elem == 'object'
|
||||
op.oi = value
|
||||
op.od = elem[key] if typeof elem[key] != 'undefined'
|
||||
else throw new Error 'bad path'
|
||||
@submitOp [op], cb
|
||||
setAt(path, value, cb) {
|
||||
const {elem, key} = traverse(this.snapshot, path);
|
||||
const op = {p:path};
|
||||
if (elem.constructor === Array) {
|
||||
op.li = value;
|
||||
if (typeof elem[key] !== 'undefined') { op.ld = elem[key]; }
|
||||
} else if (typeof elem === 'object') {
|
||||
op.oi = value;
|
||||
if (typeof elem[key] !== 'undefined') { op.od = elem[key]; }
|
||||
} else { throw new Error('bad path'); }
|
||||
return this.submitOp([op], cb);
|
||||
},
|
||||
|
||||
removeAt: (path, cb) ->
|
||||
{elem, key} = traverse @snapshot, path
|
||||
throw new Error 'no element at that path' unless typeof elem[key] != 'undefined'
|
||||
op = {p:path}
|
||||
if elem.constructor == Array
|
||||
op.ld = elem[key]
|
||||
else if typeof elem == 'object'
|
||||
op.od = elem[key]
|
||||
else throw new Error 'bad path'
|
||||
@submitOp [op], cb
|
||||
removeAt(path, cb) {
|
||||
const {elem, key} = traverse(this.snapshot, path);
|
||||
if (typeof elem[key] === 'undefined') { throw new Error('no element at that path'); }
|
||||
const op = {p:path};
|
||||
if (elem.constructor === Array) {
|
||||
op.ld = elem[key];
|
||||
} else if (typeof elem === 'object') {
|
||||
op.od = elem[key];
|
||||
} else { throw new Error('bad path'); }
|
||||
return this.submitOp([op], cb);
|
||||
},
|
||||
|
||||
insertAt: (path, pos, value, cb) ->
|
||||
{elem, key} = traverse @snapshot, path
|
||||
op = {p:path.concat pos}
|
||||
if elem[key].constructor == Array
|
||||
op.li = value
|
||||
else if typeof elem[key] == 'string'
|
||||
op.si = value
|
||||
@submitOp [op], cb
|
||||
insertAt(path, pos, value, cb) {
|
||||
const {elem, key} = traverse(this.snapshot, path);
|
||||
const op = {p:path.concat(pos)};
|
||||
if (elem[key].constructor === Array) {
|
||||
op.li = value;
|
||||
} else if (typeof elem[key] === 'string') {
|
||||
op.si = value;
|
||||
}
|
||||
return this.submitOp([op], cb);
|
||||
},
|
||||
|
||||
moveAt: (path, from, to, cb) ->
|
||||
op = [{p:path.concat(from), lm:to}]
|
||||
@submitOp op, cb
|
||||
moveAt(path, from, to, cb) {
|
||||
const op = [{p:path.concat(from), lm:to}];
|
||||
return this.submitOp(op, cb);
|
||||
},
|
||||
|
||||
addAt: (path, amount, cb) ->
|
||||
op = [{p:path, na:amount}]
|
||||
@submitOp op, cb
|
||||
addAt(path, amount, cb) {
|
||||
const op = [{p:path, na:amount}];
|
||||
return this.submitOp(op, cb);
|
||||
},
|
||||
|
||||
deleteTextAt: (path, length, pos, cb) ->
|
||||
{elem, key} = traverse @snapshot, path
|
||||
op = [{p:path.concat(pos), sd:elem[key][pos...(pos + length)]}]
|
||||
@submitOp op, cb
|
||||
deleteTextAt(path, length, pos, cb) {
|
||||
const {elem, key} = traverse(this.snapshot, path);
|
||||
const op = [{p:path.concat(pos), sd:elem[key].slice(pos, (pos + length))}];
|
||||
return this.submitOp(op, cb);
|
||||
},
|
||||
|
||||
addListener: (path, event, cb) ->
|
||||
l = {path, event, cb}
|
||||
@_listeners.push l
|
||||
l
|
||||
removeListener: (l) ->
|
||||
i = @_listeners.indexOf l
|
||||
return false if i < 0
|
||||
@_listeners.splice i, 1
|
||||
return true
|
||||
_register: ->
|
||||
@_listeners = []
|
||||
@on 'change', (op) ->
|
||||
for c in op
|
||||
if c.na != undefined or c.si != undefined or c.sd != undefined
|
||||
# no change to structure
|
||||
continue
|
||||
to_remove = []
|
||||
for l, i in @_listeners
|
||||
# Transform a dummy op by the incoming op to work out what
|
||||
# should happen to the listener.
|
||||
dummy = {p:l.path, na:0}
|
||||
xformed = @type.transformComponent [], dummy, c, 'left'
|
||||
if xformed.length == 0
|
||||
# The op was transformed to noop, so we should delete the listener.
|
||||
to_remove.push i
|
||||
else if xformed.length == 1
|
||||
# The op remained, so grab its new path into the listener.
|
||||
l.path = xformed[0].p
|
||||
else
|
||||
throw new Error "Bad assumption in json-api: xforming an 'si' op will always result in 0 or 1 components."
|
||||
to_remove.sort (a, b) -> b - a
|
||||
for i in to_remove
|
||||
@_listeners.splice i, 1
|
||||
@on 'remoteop', (op) ->
|
||||
for c in op
|
||||
match_path = if c.na == undefined then c.p[...c.p.length-1] else c.p
|
||||
for {path, event, cb} in @_listeners
|
||||
if pathEquals path, match_path
|
||||
switch event
|
||||
when 'insert'
|
||||
if c.li != undefined and c.ld == undefined
|
||||
cb(c.p[c.p.length-1], c.li)
|
||||
else if c.oi != undefined and c.od == undefined
|
||||
cb(c.p[c.p.length-1], c.oi)
|
||||
else if c.si != undefined
|
||||
cb(c.p[c.p.length-1], c.si)
|
||||
when 'delete'
|
||||
if c.li == undefined and c.ld != undefined
|
||||
cb(c.p[c.p.length-1], c.ld)
|
||||
else if c.oi == undefined and c.od != undefined
|
||||
cb(c.p[c.p.length-1], c.od)
|
||||
else if c.sd != undefined
|
||||
cb(c.p[c.p.length-1], c.sd)
|
||||
when 'replace'
|
||||
if c.li != undefined and c.ld != undefined
|
||||
cb(c.p[c.p.length-1], c.ld, c.li)
|
||||
else if c.oi != undefined and c.od != undefined
|
||||
cb(c.p[c.p.length-1], c.od, c.oi)
|
||||
when 'move'
|
||||
if c.lm != undefined
|
||||
cb(c.p[c.p.length-1], c.lm)
|
||||
when 'add'
|
||||
if c.na != undefined
|
||||
cb(c.na)
|
||||
else if (common = @type.commonPath match_path, path)?
|
||||
if event == 'child op'
|
||||
if match_path.length == path.length == common
|
||||
throw new Error "paths match length and have commonality, but aren't equal?"
|
||||
child_path = c.p[common+1..]
|
||||
cb(child_path, c)
|
||||
addListener(path, event, cb) {
|
||||
const l = {path, event, cb};
|
||||
this._listeners.push(l);
|
||||
return l;
|
||||
},
|
||||
removeListener(l) {
|
||||
const i = this._listeners.indexOf(l);
|
||||
if (i < 0) { return false; }
|
||||
this._listeners.splice(i, 1);
|
||||
return true;
|
||||
},
|
||||
_register() {
|
||||
this._listeners = [];
|
||||
this.on('change', function(op) {
|
||||
return (() => {
|
||||
const result = [];
|
||||
for (let c of Array.from(op)) {
|
||||
var i;
|
||||
if ((c.na !== undefined) || (c.si !== undefined) || (c.sd !== undefined)) {
|
||||
// no change to structure
|
||||
continue;
|
||||
}
|
||||
var to_remove = [];
|
||||
for (i = 0; i < this._listeners.length; i++) {
|
||||
// Transform a dummy op by the incoming op to work out what
|
||||
// should happen to the listener.
|
||||
const l = this._listeners[i];
|
||||
const dummy = {p:l.path, na:0};
|
||||
const xformed = this.type.transformComponent([], dummy, c, 'left');
|
||||
if (xformed.length === 0) {
|
||||
// The op was transformed to noop, so we should delete the listener.
|
||||
to_remove.push(i);
|
||||
} else if (xformed.length === 1) {
|
||||
// The op remained, so grab its new path into the listener.
|
||||
l.path = xformed[0].p;
|
||||
} else {
|
||||
throw new Error("Bad assumption in json-api: xforming an 'si' op will always result in 0 or 1 components.");
|
||||
}
|
||||
}
|
||||
to_remove.sort((a, b) => b - a);
|
||||
result.push((() => {
|
||||
const result1 = [];
|
||||
for (i of Array.from(to_remove)) {
|
||||
result1.push(this._listeners.splice(i, 1));
|
||||
}
|
||||
return result1;
|
||||
})());
|
||||
}
|
||||
return result;
|
||||
})();
|
||||
});
|
||||
return this.on('remoteop', function(op) {
|
||||
return (() => {
|
||||
const result = [];
|
||||
for (var c of Array.from(op)) {
|
||||
var match_path = c.na === undefined ? c.p.slice(0, c.p.length-1) : c.p;
|
||||
result.push((() => {
|
||||
const result1 = [];
|
||||
for (let {path, event, cb} of Array.from(this._listeners)) {
|
||||
var common;
|
||||
if (pathEquals(path, match_path)) {
|
||||
switch (event) {
|
||||
case 'insert':
|
||||
if ((c.li !== undefined) && (c.ld === undefined)) {
|
||||
result1.push(cb(c.p[c.p.length-1], c.li));
|
||||
} else if ((c.oi !== undefined) && (c.od === undefined)) {
|
||||
result1.push(cb(c.p[c.p.length-1], c.oi));
|
||||
} else if (c.si !== undefined) {
|
||||
result1.push(cb(c.p[c.p.length-1], c.si));
|
||||
} else {
|
||||
result1.push(undefined);
|
||||
}
|
||||
break;
|
||||
case 'delete':
|
||||
if ((c.li === undefined) && (c.ld !== undefined)) {
|
||||
result1.push(cb(c.p[c.p.length-1], c.ld));
|
||||
} else if ((c.oi === undefined) && (c.od !== undefined)) {
|
||||
result1.push(cb(c.p[c.p.length-1], c.od));
|
||||
} else if (c.sd !== undefined) {
|
||||
result1.push(cb(c.p[c.p.length-1], c.sd));
|
||||
} else {
|
||||
result1.push(undefined);
|
||||
}
|
||||
break;
|
||||
case 'replace':
|
||||
if ((c.li !== undefined) && (c.ld !== undefined)) {
|
||||
result1.push(cb(c.p[c.p.length-1], c.ld, c.li));
|
||||
} else if ((c.oi !== undefined) && (c.od !== undefined)) {
|
||||
result1.push(cb(c.p[c.p.length-1], c.od, c.oi));
|
||||
} else {
|
||||
result1.push(undefined);
|
||||
}
|
||||
break;
|
||||
case 'move':
|
||||
if (c.lm !== undefined) {
|
||||
result1.push(cb(c.p[c.p.length-1], c.lm));
|
||||
} else {
|
||||
result1.push(undefined);
|
||||
}
|
||||
break;
|
||||
case 'add':
|
||||
if (c.na !== undefined) {
|
||||
result1.push(cb(c.na));
|
||||
} else {
|
||||
result1.push(undefined);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
result1.push(undefined);
|
||||
}
|
||||
} else if ((common = this.type.commonPath(match_path, path)) != null) {
|
||||
if (event === 'child op') {
|
||||
if (match_path.length === path.length && path.length === common) {
|
||||
throw new Error("paths match length and have commonality, but aren't equal?");
|
||||
}
|
||||
const child_path = c.p.slice(common+1);
|
||||
result1.push(cb(child_path, c));
|
||||
} else {
|
||||
result1.push(undefined);
|
||||
}
|
||||
} else {
|
||||
result1.push(undefined);
|
||||
}
|
||||
}
|
||||
return result1;
|
||||
})());
|
||||
}
|
||||
return result;
|
||||
})();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,441 +1,534 @@
|
|||
# This is the implementation of the JSON OT type.
|
||||
#
|
||||
# Spec is here: https://github.com/josephg/ShareJS/wiki/JSON-Operations
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// This is the implementation of the JSON OT type.
|
||||
//
|
||||
// Spec is here: https://github.com/josephg/ShareJS/wiki/JSON-Operations
|
||||
|
||||
if WEB?
|
||||
text = exports.types.text
|
||||
else
|
||||
text = require './text'
|
||||
let text;
|
||||
if (typeof WEB !== 'undefined' && WEB !== null) {
|
||||
({
|
||||
text
|
||||
} = exports.types);
|
||||
} else {
|
||||
text = require('./text');
|
||||
}
|
||||
|
||||
json = {}
|
||||
const json = {};
|
||||
|
||||
json.name = 'json'
|
||||
json.name = 'json';
|
||||
|
||||
json.create = -> null
|
||||
json.create = () => null;
|
||||
|
||||
json.invertComponent = (c) ->
|
||||
c_ = {p: c.p}
|
||||
c_.sd = c.si if c.si != undefined
|
||||
c_.si = c.sd if c.sd != undefined
|
||||
c_.od = c.oi if c.oi != undefined
|
||||
c_.oi = c.od if c.od != undefined
|
||||
c_.ld = c.li if c.li != undefined
|
||||
c_.li = c.ld if c.ld != undefined
|
||||
c_.na = -c.na if c.na != undefined
|
||||
if c.lm != undefined
|
||||
c_.lm = c.p[c.p.length-1]
|
||||
c_.p = c.p[0...c.p.length - 1].concat([c.lm])
|
||||
c_
|
||||
json.invertComponent = function(c) {
|
||||
const c_ = {p: c.p};
|
||||
if (c.si !== undefined) { c_.sd = c.si; }
|
||||
if (c.sd !== undefined) { c_.si = c.sd; }
|
||||
if (c.oi !== undefined) { c_.od = c.oi; }
|
||||
if (c.od !== undefined) { c_.oi = c.od; }
|
||||
if (c.li !== undefined) { c_.ld = c.li; }
|
||||
if (c.ld !== undefined) { c_.li = c.ld; }
|
||||
if (c.na !== undefined) { c_.na = -c.na; }
|
||||
if (c.lm !== undefined) {
|
||||
c_.lm = c.p[c.p.length-1];
|
||||
c_.p = c.p.slice(0, c.p.length - 1).concat([c.lm]);
|
||||
}
|
||||
return c_;
|
||||
};
|
||||
|
||||
json.invert = (op) -> json.invertComponent c for c in op.slice().reverse()
|
||||
json.invert = op => Array.from(op.slice().reverse()).map((c) => json.invertComponent(c));
|
||||
|
||||
json.checkValidOp = (op) ->
|
||||
json.checkValidOp = function(op) {};
|
||||
|
||||
isArray = (o) -> Object.prototype.toString.call(o) == '[object Array]'
|
||||
json.checkList = (elem) ->
|
||||
throw new Error 'Referenced element not a list' unless isArray(elem)
|
||||
const isArray = o => Object.prototype.toString.call(o) === '[object Array]';
|
||||
json.checkList = function(elem) {
|
||||
if (!isArray(elem)) { throw new Error('Referenced element not a list'); }
|
||||
};
|
||||
|
||||
json.checkObj = (elem) ->
|
||||
throw new Error "Referenced element not an object (it was #{JSON.stringify elem})" unless elem.constructor is Object
|
||||
json.checkObj = function(elem) {
|
||||
if (elem.constructor !== Object) { throw new Error(`Referenced element not an object (it was ${JSON.stringify(elem)})`); }
|
||||
};
|
||||
|
||||
json.apply = (snapshot, op) ->
|
||||
json.checkValidOp op
|
||||
op = clone op
|
||||
json.apply = function(snapshot, op) {
|
||||
json.checkValidOp(op);
|
||||
op = clone(op);
|
||||
|
||||
container = {data: clone snapshot}
|
||||
const container = {data: clone(snapshot)};
|
||||
|
||||
try
|
||||
for c, i in op
|
||||
parent = null
|
||||
parentkey = null
|
||||
elem = container
|
||||
key = 'data'
|
||||
try {
|
||||
for (let i = 0; i < op.length; i++) {
|
||||
const c = op[i];
|
||||
let parent = null;
|
||||
let parentkey = null;
|
||||
let elem = container;
|
||||
let key = 'data';
|
||||
|
||||
for p in c.p
|
||||
parent = elem
|
||||
parentkey = key
|
||||
elem = elem[key]
|
||||
key = p
|
||||
for (let p of Array.from(c.p)) {
|
||||
parent = elem;
|
||||
parentkey = key;
|
||||
elem = elem[key];
|
||||
key = p;
|
||||
|
||||
throw new Error 'Path invalid' unless parent?
|
||||
if (parent == null) { throw new Error('Path invalid'); }
|
||||
}
|
||||
|
||||
if c.na != undefined
|
||||
# Number add
|
||||
throw new Error 'Referenced element not a number' unless typeof elem[key] is 'number'
|
||||
elem[key] += c.na
|
||||
if (c.na !== undefined) {
|
||||
// Number add
|
||||
if (typeof elem[key] !== 'number') { throw new Error('Referenced element not a number'); }
|
||||
elem[key] += c.na;
|
||||
|
||||
else if c.si != undefined
|
||||
# String insert
|
||||
throw new Error "Referenced element not a string (it was #{JSON.stringify elem})" unless typeof elem is 'string'
|
||||
parent[parentkey] = elem[...key] + c.si + elem[key..]
|
||||
else if c.sd != undefined
|
||||
# String delete
|
||||
throw new Error 'Referenced element not a string' unless typeof elem is 'string'
|
||||
throw new Error 'Deleted string does not match' unless elem[key...key + c.sd.length] == c.sd
|
||||
parent[parentkey] = elem[...key] + elem[key + c.sd.length..]
|
||||
} else if (c.si !== undefined) {
|
||||
// String insert
|
||||
if (typeof elem !== 'string') { throw new Error(`Referenced element not a string (it was ${JSON.stringify(elem)})`); }
|
||||
parent[parentkey] = elem.slice(0, key) + c.si + elem.slice(key);
|
||||
} else if (c.sd !== undefined) {
|
||||
// String delete
|
||||
if (typeof elem !== 'string') { throw new Error('Referenced element not a string'); }
|
||||
if (elem.slice(key, key + c.sd.length) !== c.sd) { throw new Error('Deleted string does not match'); }
|
||||
parent[parentkey] = elem.slice(0, key) + elem.slice(key + c.sd.length);
|
||||
|
||||
else if c.li != undefined && c.ld != undefined
|
||||
# List replace
|
||||
json.checkList elem
|
||||
} else if ((c.li !== undefined) && (c.ld !== undefined)) {
|
||||
// List replace
|
||||
json.checkList(elem);
|
||||
|
||||
# Should check the list element matches c.ld
|
||||
elem[key] = c.li
|
||||
else if c.li != undefined
|
||||
# List insert
|
||||
json.checkList elem
|
||||
// Should check the list element matches c.ld
|
||||
elem[key] = c.li;
|
||||
} else if (c.li !== undefined) {
|
||||
// List insert
|
||||
json.checkList(elem);
|
||||
|
||||
elem.splice key, 0, c.li
|
||||
else if c.ld != undefined
|
||||
# List delete
|
||||
json.checkList elem
|
||||
elem.splice(key, 0, c.li);
|
||||
} else if (c.ld !== undefined) {
|
||||
// List delete
|
||||
json.checkList(elem);
|
||||
|
||||
# Should check the list element matches c.ld here too.
|
||||
elem.splice key, 1
|
||||
else if c.lm != undefined
|
||||
# List move
|
||||
json.checkList elem
|
||||
if c.lm != key
|
||||
e = elem[key]
|
||||
# Remove it...
|
||||
elem.splice key, 1
|
||||
# And insert it back.
|
||||
elem.splice c.lm, 0, e
|
||||
// Should check the list element matches c.ld here too.
|
||||
elem.splice(key, 1);
|
||||
} else if (c.lm !== undefined) {
|
||||
// List move
|
||||
json.checkList(elem);
|
||||
if (c.lm !== key) {
|
||||
const e = elem[key];
|
||||
// Remove it...
|
||||
elem.splice(key, 1);
|
||||
// And insert it back.
|
||||
elem.splice(c.lm, 0, e);
|
||||
}
|
||||
|
||||
else if c.oi != undefined
|
||||
# Object insert / replace
|
||||
json.checkObj elem
|
||||
} else if (c.oi !== undefined) {
|
||||
// Object insert / replace
|
||||
json.checkObj(elem);
|
||||
|
||||
# Should check that elem[key] == c.od
|
||||
elem[key] = c.oi
|
||||
else if c.od != undefined
|
||||
# Object delete
|
||||
json.checkObj elem
|
||||
// Should check that elem[key] == c.od
|
||||
elem[key] = c.oi;
|
||||
} else if (c.od !== undefined) {
|
||||
// Object delete
|
||||
json.checkObj(elem);
|
||||
|
||||
# Should check that elem[key] == c.od
|
||||
delete elem[key]
|
||||
else
|
||||
throw new Error 'invalid / missing instruction in op'
|
||||
catch error
|
||||
# TODO: Roll back all already applied changes. Write tests before implementing this code.
|
||||
throw error
|
||||
// Should check that elem[key] == c.od
|
||||
delete elem[key];
|
||||
} else {
|
||||
throw new Error('invalid / missing instruction in op');
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// TODO: Roll back all already applied changes. Write tests before implementing this code.
|
||||
throw error;
|
||||
}
|
||||
|
||||
container.data
|
||||
return container.data;
|
||||
};
|
||||
|
||||
# Checks if two paths, p1 and p2 match.
|
||||
json.pathMatches = (p1, p2, ignoreLast) ->
|
||||
return false unless p1.length == p2.length
|
||||
// Checks if two paths, p1 and p2 match.
|
||||
json.pathMatches = function(p1, p2, ignoreLast) {
|
||||
if (p1.length !== p2.length) { return false; }
|
||||
|
||||
for p, i in p1
|
||||
return false if p != p2[i] and (!ignoreLast or i != p1.length - 1)
|
||||
for (let i = 0; i < p1.length; i++) {
|
||||
const p = p1[i];
|
||||
if ((p !== p2[i]) && (!ignoreLast || (i !== (p1.length - 1)))) { return false; }
|
||||
}
|
||||
|
||||
true
|
||||
return true;
|
||||
};
|
||||
|
||||
json.append = (dest, c) ->
|
||||
c = clone c
|
||||
if dest.length != 0 and json.pathMatches c.p, (last = dest[dest.length - 1]).p
|
||||
if last.na != undefined and c.na != undefined
|
||||
dest[dest.length - 1] = { p: last.p, na: last.na + c.na }
|
||||
else if last.li != undefined and c.li == undefined and c.ld == last.li
|
||||
# insert immediately followed by delete becomes a noop.
|
||||
if last.ld != undefined
|
||||
# leave the delete part of the replace
|
||||
delete last.li
|
||||
else
|
||||
dest.pop()
|
||||
else if last.od != undefined and last.oi == undefined and
|
||||
c.oi != undefined and c.od == undefined
|
||||
last.oi = c.oi
|
||||
else if c.lm != undefined and c.p[c.p.length-1] == c.lm
|
||||
null # don't do anything
|
||||
else
|
||||
dest.push c
|
||||
else
|
||||
dest.push c
|
||||
json.append = function(dest, c) {
|
||||
let last;
|
||||
c = clone(c);
|
||||
if ((dest.length !== 0) && json.pathMatches(c.p, (last = dest[dest.length - 1]).p)) {
|
||||
if ((last.na !== undefined) && (c.na !== undefined)) {
|
||||
return dest[dest.length - 1] = { p: last.p, na: last.na + c.na };
|
||||
} else if ((last.li !== undefined) && (c.li === undefined) && (c.ld === last.li)) {
|
||||
// insert immediately followed by delete becomes a noop.
|
||||
if (last.ld !== undefined) {
|
||||
// leave the delete part of the replace
|
||||
return delete last.li;
|
||||
} else {
|
||||
return dest.pop();
|
||||
}
|
||||
} else if ((last.od !== undefined) && (last.oi === undefined) &&
|
||||
(c.oi !== undefined) && (c.od === undefined)) {
|
||||
return last.oi = c.oi;
|
||||
} else if ((c.lm !== undefined) && (c.p[c.p.length-1] === c.lm)) {
|
||||
return null; // don't do anything
|
||||
} else {
|
||||
return dest.push(c);
|
||||
}
|
||||
} else {
|
||||
return dest.push(c);
|
||||
}
|
||||
};
|
||||
|
||||
json.compose = (op1, op2) ->
|
||||
json.checkValidOp op1
|
||||
json.checkValidOp op2
|
||||
json.compose = function(op1, op2) {
|
||||
json.checkValidOp(op1);
|
||||
json.checkValidOp(op2);
|
||||
|
||||
newOp = clone op1
|
||||
json.append newOp, c for c in op2
|
||||
const newOp = clone(op1);
|
||||
for (let c of Array.from(op2)) { json.append(newOp, c); }
|
||||
|
||||
newOp
|
||||
return newOp;
|
||||
};
|
||||
|
||||
json.normalize = (op) ->
|
||||
newOp = []
|
||||
json.normalize = function(op) {
|
||||
const newOp = [];
|
||||
|
||||
op = [op] unless isArray op
|
||||
if (!isArray(op)) { op = [op]; }
|
||||
|
||||
for c in op
|
||||
c.p ?= []
|
||||
json.append newOp, c
|
||||
for (let c of Array.from(op)) {
|
||||
if (c.p == null) { c.p = []; }
|
||||
json.append(newOp, c);
|
||||
}
|
||||
|
||||
newOp
|
||||
return newOp;
|
||||
};
|
||||
|
||||
# hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming
|
||||
# we have browser support for JSON.
|
||||
# http://jsperf.com/cloning-an-object/12
|
||||
clone = (o) -> JSON.parse(JSON.stringify o)
|
||||
// hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming
|
||||
// we have browser support for JSON.
|
||||
// http://jsperf.com/cloning-an-object/12
|
||||
var clone = o => JSON.parse(JSON.stringify(o));
|
||||
|
||||
json.commonPath = (p1, p2) ->
|
||||
p1 = p1.slice()
|
||||
p2 = p2.slice()
|
||||
p1.unshift('data')
|
||||
p2.unshift('data')
|
||||
p1 = p1[...p1.length-1]
|
||||
p2 = p2[...p2.length-1]
|
||||
return -1 if p2.length == 0
|
||||
i = 0
|
||||
while p1[i] == p2[i] && i < p1.length
|
||||
i++
|
||||
if i == p2.length
|
||||
return i-1
|
||||
return
|
||||
json.commonPath = function(p1, p2) {
|
||||
p1 = p1.slice();
|
||||
p2 = p2.slice();
|
||||
p1.unshift('data');
|
||||
p2.unshift('data');
|
||||
p1 = p1.slice(0, p1.length-1);
|
||||
p2 = p2.slice(0, p2.length-1);
|
||||
if (p2.length === 0) { return -1; }
|
||||
let i = 0;
|
||||
while ((p1[i] === p2[i]) && (i < p1.length)) {
|
||||
i++;
|
||||
if (i === p2.length) {
|
||||
return i-1;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
# transform c so it applies to a document with otherC applied.
|
||||
json.transformComponent = (dest, c, otherC, type) ->
|
||||
c = clone c
|
||||
c.p.push(0) if c.na != undefined
|
||||
otherC.p.push(0) if otherC.na != undefined
|
||||
// transform c so it applies to a document with otherC applied.
|
||||
json.transformComponent = function(dest, c, otherC, type) {
|
||||
let oc;
|
||||
c = clone(c);
|
||||
if (c.na !== undefined) { c.p.push(0); }
|
||||
if (otherC.na !== undefined) { otherC.p.push(0); }
|
||||
|
||||
common = json.commonPath c.p, otherC.p
|
||||
common2 = json.commonPath otherC.p, c.p
|
||||
const common = json.commonPath(c.p, otherC.p);
|
||||
const common2 = json.commonPath(otherC.p, c.p);
|
||||
|
||||
cplength = c.p.length
|
||||
otherCplength = otherC.p.length
|
||||
const cplength = c.p.length;
|
||||
const otherCplength = otherC.p.length;
|
||||
|
||||
c.p.pop() if c.na != undefined # hax
|
||||
otherC.p.pop() if otherC.na != undefined
|
||||
if (c.na !== undefined) { c.p.pop(); } // hax
|
||||
if (otherC.na !== undefined) { otherC.p.pop(); }
|
||||
|
||||
if otherC.na
|
||||
if common2? && otherCplength >= cplength && otherC.p[common2] == c.p[common2]
|
||||
if c.ld != undefined
|
||||
oc = clone otherC
|
||||
oc.p = oc.p[cplength..]
|
||||
c.ld = json.apply clone(c.ld), [oc]
|
||||
else if c.od != undefined
|
||||
oc = clone otherC
|
||||
oc.p = oc.p[cplength..]
|
||||
c.od = json.apply clone(c.od), [oc]
|
||||
json.append dest, c
|
||||
return dest
|
||||
if (otherC.na) {
|
||||
if ((common2 != null) && (otherCplength >= cplength) && (otherC.p[common2] === c.p[common2])) {
|
||||
if (c.ld !== undefined) {
|
||||
oc = clone(otherC);
|
||||
oc.p = oc.p.slice(cplength);
|
||||
c.ld = json.apply(clone(c.ld), [oc]);
|
||||
} else if (c.od !== undefined) {
|
||||
oc = clone(otherC);
|
||||
oc.p = oc.p.slice(cplength);
|
||||
c.od = json.apply(clone(c.od), [oc]);
|
||||
}
|
||||
}
|
||||
json.append(dest, c);
|
||||
return dest;
|
||||
}
|
||||
|
||||
if common2? && otherCplength > cplength && c.p[common2] == otherC.p[common2]
|
||||
# transform based on c
|
||||
if c.ld != undefined
|
||||
oc = clone otherC
|
||||
oc.p = oc.p[cplength..]
|
||||
c.ld = json.apply clone(c.ld), [oc]
|
||||
else if c.od != undefined
|
||||
oc = clone otherC
|
||||
oc.p = oc.p[cplength..]
|
||||
c.od = json.apply clone(c.od), [oc]
|
||||
if ((common2 != null) && (otherCplength > cplength) && (c.p[common2] === otherC.p[common2])) {
|
||||
// transform based on c
|
||||
if (c.ld !== undefined) {
|
||||
oc = clone(otherC);
|
||||
oc.p = oc.p.slice(cplength);
|
||||
c.ld = json.apply(clone(c.ld), [oc]);
|
||||
} else if (c.od !== undefined) {
|
||||
oc = clone(otherC);
|
||||
oc.p = oc.p.slice(cplength);
|
||||
c.od = json.apply(clone(c.od), [oc]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if common?
|
||||
commonOperand = cplength == otherCplength
|
||||
# transform based on otherC
|
||||
if otherC.na != undefined
|
||||
# this case is handled above due to icky path hax
|
||||
else if otherC.si != undefined || otherC.sd != undefined
|
||||
# String op vs string op - pass through to text type
|
||||
if c.si != undefined || c.sd != undefined
|
||||
throw new Error("must be a string?") unless commonOperand
|
||||
if (common != null) {
|
||||
let from, p, to;
|
||||
const commonOperand = cplength === otherCplength;
|
||||
// transform based on otherC
|
||||
if (otherC.na !== undefined) {
|
||||
// this case is handled above due to icky path hax
|
||||
} else if ((otherC.si !== undefined) || (otherC.sd !== undefined)) {
|
||||
// String op vs string op - pass through to text type
|
||||
if ((c.si !== undefined) || (c.sd !== undefined)) {
|
||||
if (!commonOperand) { throw new Error("must be a string?"); }
|
||||
|
||||
# Convert an op component to a text op component
|
||||
convert = (component) ->
|
||||
newC = p:component.p[component.p.length - 1]
|
||||
if component.si
|
||||
newC.i = component.si
|
||||
else
|
||||
newC.d = component.sd
|
||||
newC
|
||||
// Convert an op component to a text op component
|
||||
const convert = function(component) {
|
||||
const newC = {p:component.p[component.p.length - 1]};
|
||||
if (component.si) {
|
||||
newC.i = component.si;
|
||||
} else {
|
||||
newC.d = component.sd;
|
||||
}
|
||||
return newC;
|
||||
};
|
||||
|
||||
tc1 = convert c
|
||||
tc2 = convert otherC
|
||||
const tc1 = convert(c);
|
||||
const tc2 = convert(otherC);
|
||||
|
||||
res = []
|
||||
text._tc res, tc1, tc2, type
|
||||
for tc in res
|
||||
jc = { p: c.p[...common] }
|
||||
jc.p.push(tc.p)
|
||||
jc.si = tc.i if tc.i?
|
||||
jc.sd = tc.d if tc.d?
|
||||
json.append dest, jc
|
||||
return dest
|
||||
else if otherC.li != undefined && otherC.ld != undefined
|
||||
if otherC.p[common] == c.p[common]
|
||||
# noop
|
||||
if !commonOperand
|
||||
# we're below the deleted element, so -> noop
|
||||
return dest
|
||||
else if c.ld != undefined
|
||||
# we're trying to delete the same element, -> noop
|
||||
if c.li != undefined and type == 'left'
|
||||
# we're both replacing one element with another. only one can
|
||||
# survive!
|
||||
c.ld = clone otherC.li
|
||||
else
|
||||
return dest
|
||||
else if otherC.li != undefined
|
||||
if c.li != undefined and c.ld == undefined and commonOperand and c.p[common] == otherC.p[common]
|
||||
# in li vs. li, left wins.
|
||||
if type == 'right'
|
||||
c.p[common]++
|
||||
else if otherC.p[common] <= c.p[common]
|
||||
c.p[common]++
|
||||
const res = [];
|
||||
text._tc(res, tc1, tc2, type);
|
||||
for (let tc of Array.from(res)) {
|
||||
const jc = { p: c.p.slice(0, common) };
|
||||
jc.p.push(tc.p);
|
||||
if (tc.i != null) { jc.si = tc.i; }
|
||||
if (tc.d != null) { jc.sd = tc.d; }
|
||||
json.append(dest, jc);
|
||||
}
|
||||
return dest;
|
||||
}
|
||||
} else if ((otherC.li !== undefined) && (otherC.ld !== undefined)) {
|
||||
if (otherC.p[common] === c.p[common]) {
|
||||
// noop
|
||||
if (!commonOperand) {
|
||||
// we're below the deleted element, so -> noop
|
||||
return dest;
|
||||
} else if (c.ld !== undefined) {
|
||||
// we're trying to delete the same element, -> noop
|
||||
if ((c.li !== undefined) && (type === 'left')) {
|
||||
// we're both replacing one element with another. only one can
|
||||
// survive!
|
||||
c.ld = clone(otherC.li);
|
||||
} else {
|
||||
return dest;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (otherC.li !== undefined) {
|
||||
if ((c.li !== undefined) && (c.ld === undefined) && commonOperand && (c.p[common] === otherC.p[common])) {
|
||||
// in li vs. li, left wins.
|
||||
if (type === 'right') {
|
||||
c.p[common]++;
|
||||
}
|
||||
} else if (otherC.p[common] <= c.p[common]) {
|
||||
c.p[common]++;
|
||||
}
|
||||
|
||||
if c.lm != undefined
|
||||
if commonOperand
|
||||
# otherC edits the same list we edit
|
||||
if otherC.p[common] <= c.lm
|
||||
c.lm++
|
||||
# changing c.from is handled above.
|
||||
else if otherC.ld != undefined
|
||||
if c.lm != undefined
|
||||
if commonOperand
|
||||
if otherC.p[common] == c.p[common]
|
||||
# they deleted the thing we're trying to move
|
||||
return dest
|
||||
# otherC edits the same list we edit
|
||||
p = otherC.p[common]
|
||||
from = c.p[common]
|
||||
to = c.lm
|
||||
if p < to || (p == to && from < to)
|
||||
c.lm--
|
||||
if (c.lm !== undefined) {
|
||||
if (commonOperand) {
|
||||
// otherC edits the same list we edit
|
||||
if (otherC.p[common] <= c.lm) {
|
||||
c.lm++;
|
||||
}
|
||||
}
|
||||
}
|
||||
// changing c.from is handled above.
|
||||
} else if (otherC.ld !== undefined) {
|
||||
if (c.lm !== undefined) {
|
||||
if (commonOperand) {
|
||||
if (otherC.p[common] === c.p[common]) {
|
||||
// they deleted the thing we're trying to move
|
||||
return dest;
|
||||
}
|
||||
// otherC edits the same list we edit
|
||||
p = otherC.p[common];
|
||||
from = c.p[common];
|
||||
to = c.lm;
|
||||
if ((p < to) || ((p === to) && (from < to))) {
|
||||
c.lm--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if otherC.p[common] < c.p[common]
|
||||
c.p[common]--
|
||||
else if otherC.p[common] == c.p[common]
|
||||
if otherCplength < cplength
|
||||
# we're below the deleted element, so -> noop
|
||||
return dest
|
||||
else if c.ld != undefined
|
||||
if c.li != undefined
|
||||
# we're replacing, they're deleting. we become an insert.
|
||||
delete c.ld
|
||||
else
|
||||
# we're trying to delete the same element, -> noop
|
||||
return dest
|
||||
else if otherC.lm != undefined
|
||||
if c.lm != undefined and cplength == otherCplength
|
||||
# lm vs lm, here we go!
|
||||
from = c.p[common]
|
||||
to = c.lm
|
||||
otherFrom = otherC.p[common]
|
||||
otherTo = otherC.lm
|
||||
if otherFrom != otherTo
|
||||
# if otherFrom == otherTo, we don't need to change our op.
|
||||
if (otherC.p[common] < c.p[common]) {
|
||||
c.p[common]--;
|
||||
} else if (otherC.p[common] === c.p[common]) {
|
||||
if (otherCplength < cplength) {
|
||||
// we're below the deleted element, so -> noop
|
||||
return dest;
|
||||
} else if (c.ld !== undefined) {
|
||||
if (c.li !== undefined) {
|
||||
// we're replacing, they're deleting. we become an insert.
|
||||
delete c.ld;
|
||||
} else {
|
||||
// we're trying to delete the same element, -> noop
|
||||
return dest;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (otherC.lm !== undefined) {
|
||||
if ((c.lm !== undefined) && (cplength === otherCplength)) {
|
||||
// lm vs lm, here we go!
|
||||
from = c.p[common];
|
||||
to = c.lm;
|
||||
const otherFrom = otherC.p[common];
|
||||
const otherTo = otherC.lm;
|
||||
if (otherFrom !== otherTo) {
|
||||
// if otherFrom == otherTo, we don't need to change our op.
|
||||
|
||||
# where did my thing go?
|
||||
if from == otherFrom
|
||||
# they moved it! tie break.
|
||||
if type == 'left'
|
||||
c.p[common] = otherTo
|
||||
if from == to # ugh
|
||||
c.lm = otherTo
|
||||
else
|
||||
return dest
|
||||
else
|
||||
# they moved around it
|
||||
if from > otherFrom
|
||||
c.p[common]--
|
||||
if from > otherTo
|
||||
c.p[common]++
|
||||
else if from == otherTo
|
||||
if otherFrom > otherTo
|
||||
c.p[common]++
|
||||
if from == to # ugh, again
|
||||
c.lm++
|
||||
// where did my thing go?
|
||||
if (from === otherFrom) {
|
||||
// they moved it! tie break.
|
||||
if (type === 'left') {
|
||||
c.p[common] = otherTo;
|
||||
if (from === to) { // ugh
|
||||
c.lm = otherTo;
|
||||
}
|
||||
} else {
|
||||
return dest;
|
||||
}
|
||||
} else {
|
||||
// they moved around it
|
||||
if (from > otherFrom) {
|
||||
c.p[common]--;
|
||||
}
|
||||
if (from > otherTo) {
|
||||
c.p[common]++;
|
||||
} else if (from === otherTo) {
|
||||
if (otherFrom > otherTo) {
|
||||
c.p[common]++;
|
||||
if (from === to) { // ugh, again
|
||||
c.lm++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# step 2: where am i going to put it?
|
||||
if to > otherFrom
|
||||
c.lm--
|
||||
else if to == otherFrom
|
||||
if to > from
|
||||
c.lm--
|
||||
if to > otherTo
|
||||
c.lm++
|
||||
else if to == otherTo
|
||||
# if we're both moving in the same direction, tie break
|
||||
if (otherTo > otherFrom and to > from) or
|
||||
(otherTo < otherFrom and to < from)
|
||||
if type == 'right'
|
||||
c.lm++
|
||||
else
|
||||
if to > from
|
||||
c.lm++
|
||||
else if to == otherFrom
|
||||
c.lm--
|
||||
else if c.li != undefined and c.ld == undefined and commonOperand
|
||||
# li
|
||||
from = otherC.p[common]
|
||||
to = otherC.lm
|
||||
p = c.p[common]
|
||||
if p > from
|
||||
c.p[common]--
|
||||
if p > to
|
||||
c.p[common]++
|
||||
else
|
||||
# ld, ld+li, si, sd, na, oi, od, oi+od, any li on an element beneath
|
||||
# the lm
|
||||
#
|
||||
# i.e. things care about where their item is after the move.
|
||||
from = otherC.p[common]
|
||||
to = otherC.lm
|
||||
p = c.p[common]
|
||||
if p == from
|
||||
c.p[common] = to
|
||||
else
|
||||
if p > from
|
||||
c.p[common]--
|
||||
if p > to
|
||||
c.p[common]++
|
||||
else if p == to
|
||||
if from > to
|
||||
c.p[common]++
|
||||
else if otherC.oi != undefined && otherC.od != undefined
|
||||
if c.p[common] == otherC.p[common]
|
||||
if c.oi != undefined and commonOperand
|
||||
# we inserted where someone else replaced
|
||||
if type == 'right'
|
||||
# left wins
|
||||
return dest
|
||||
else
|
||||
# we win, make our op replace what they inserted
|
||||
c.od = otherC.oi
|
||||
else
|
||||
# -> noop if the other component is deleting the same object (or any
|
||||
# parent)
|
||||
return dest
|
||||
else if otherC.oi != undefined
|
||||
if c.oi != undefined and c.p[common] == otherC.p[common]
|
||||
# left wins if we try to insert at the same place
|
||||
if type == 'left'
|
||||
json.append dest, {p:c.p, od:otherC.oi}
|
||||
else
|
||||
return dest
|
||||
else if otherC.od != undefined
|
||||
if c.p[common] == otherC.p[common]
|
||||
return dest if !commonOperand
|
||||
if c.oi != undefined
|
||||
delete c.od
|
||||
else
|
||||
return dest
|
||||
// step 2: where am i going to put it?
|
||||
if (to > otherFrom) {
|
||||
c.lm--;
|
||||
} else if (to === otherFrom) {
|
||||
if (to > from) {
|
||||
c.lm--;
|
||||
}
|
||||
}
|
||||
if (to > otherTo) {
|
||||
c.lm++;
|
||||
} else if (to === otherTo) {
|
||||
// if we're both moving in the same direction, tie break
|
||||
if (((otherTo > otherFrom) && (to > from)) ||
|
||||
((otherTo < otherFrom) && (to < from))) {
|
||||
if (type === 'right') {
|
||||
c.lm++;
|
||||
}
|
||||
} else {
|
||||
if (to > from) {
|
||||
c.lm++;
|
||||
} else if (to === otherFrom) {
|
||||
c.lm--;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if ((c.li !== undefined) && (c.ld === undefined) && commonOperand) {
|
||||
// li
|
||||
from = otherC.p[common];
|
||||
to = otherC.lm;
|
||||
p = c.p[common];
|
||||
if (p > from) {
|
||||
c.p[common]--;
|
||||
}
|
||||
if (p > to) {
|
||||
c.p[common]++;
|
||||
}
|
||||
} else {
|
||||
// ld, ld+li, si, sd, na, oi, od, oi+od, any li on an element beneath
|
||||
// the lm
|
||||
//
|
||||
// i.e. things care about where their item is after the move.
|
||||
from = otherC.p[common];
|
||||
to = otherC.lm;
|
||||
p = c.p[common];
|
||||
if (p === from) {
|
||||
c.p[common] = to;
|
||||
} else {
|
||||
if (p > from) {
|
||||
c.p[common]--;
|
||||
}
|
||||
if (p > to) {
|
||||
c.p[common]++;
|
||||
} else if (p === to) {
|
||||
if (from > to) {
|
||||
c.p[common]++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if ((otherC.oi !== undefined) && (otherC.od !== undefined)) {
|
||||
if (c.p[common] === otherC.p[common]) {
|
||||
if ((c.oi !== undefined) && commonOperand) {
|
||||
// we inserted where someone else replaced
|
||||
if (type === 'right') {
|
||||
// left wins
|
||||
return dest;
|
||||
} else {
|
||||
// we win, make our op replace what they inserted
|
||||
c.od = otherC.oi;
|
||||
}
|
||||
} else {
|
||||
// -> noop if the other component is deleting the same object (or any
|
||||
// parent)
|
||||
return dest;
|
||||
}
|
||||
}
|
||||
} else if (otherC.oi !== undefined) {
|
||||
if ((c.oi !== undefined) && (c.p[common] === otherC.p[common])) {
|
||||
// left wins if we try to insert at the same place
|
||||
if (type === 'left') {
|
||||
json.append(dest, {p:c.p, od:otherC.oi});
|
||||
} else {
|
||||
return dest;
|
||||
}
|
||||
}
|
||||
} else if (otherC.od !== undefined) {
|
||||
if (c.p[common] === otherC.p[common]) {
|
||||
if (!commonOperand) { return dest; }
|
||||
if (c.oi !== undefined) {
|
||||
delete c.od;
|
||||
} else {
|
||||
return dest;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
json.append dest, c
|
||||
return dest
|
||||
json.append(dest, c);
|
||||
return dest;
|
||||
};
|
||||
|
||||
if WEB?
|
||||
exports.types ||= {}
|
||||
if (typeof WEB !== 'undefined' && WEB !== null) {
|
||||
if (!exports.types) { exports.types = {}; }
|
||||
|
||||
# This is kind of awful - come up with a better way to hook this helper code up.
|
||||
exports._bt(json, json.transformComponent, json.checkValidOp, json.append)
|
||||
// This is kind of awful - come up with a better way to hook this helper code up.
|
||||
exports._bt(json, json.transformComponent, json.checkValidOp, json.append);
|
||||
|
||||
# [] is used to prevent closure from renaming types.text
|
||||
exports.types.json = json
|
||||
else
|
||||
module.exports = json
|
||||
// [] is used to prevent closure from renaming types.text
|
||||
exports.types.json = json;
|
||||
} else {
|
||||
module.exports = json;
|
||||
|
||||
require('./helpers').bootstrapTransform(json, json.transformComponent, json.checkValidOp, json.append)
|
||||
require('./helpers').bootstrapTransform(json, json.transformComponent, json.checkValidOp, json.append);
|
||||
}
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,38 +1,48 @@
|
|||
# This is a really simple OT type. Its not compiled with the web client, but it could be.
|
||||
#
|
||||
# Its mostly included for demonstration purposes and its used in a lot of unit tests.
|
||||
#
|
||||
# This defines a really simple text OT type which only allows inserts. (No deletes).
|
||||
#
|
||||
# Ops look like:
|
||||
# {position:#, text:"asdf"}
|
||||
#
|
||||
# Document snapshots look like:
|
||||
# {str:string}
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// This is a really simple OT type. Its not compiled with the web client, but it could be.
|
||||
//
|
||||
// Its mostly included for demonstration purposes and its used in a lot of unit tests.
|
||||
//
|
||||
// This defines a really simple text OT type which only allows inserts. (No deletes).
|
||||
//
|
||||
// Ops look like:
|
||||
// {position:#, text:"asdf"}
|
||||
//
|
||||
// Document snapshots look like:
|
||||
// {str:string}
|
||||
|
||||
module.exports =
|
||||
# The name of the OT type. The type is stored in types[type.name]. The name can be
|
||||
# used in place of the actual type in all the API methods.
|
||||
name: 'simple'
|
||||
module.exports = {
|
||||
// The name of the OT type. The type is stored in types[type.name]. The name can be
|
||||
// used in place of the actual type in all the API methods.
|
||||
name: 'simple',
|
||||
|
||||
# Create a new document snapshot
|
||||
create: -> {str:""}
|
||||
// Create a new document snapshot
|
||||
create() { return {str:""}; },
|
||||
|
||||
# Apply the given op to the document snapshot. Returns the new snapshot.
|
||||
#
|
||||
# The original snapshot should not be modified.
|
||||
apply: (snapshot, op) ->
|
||||
throw new Error 'Invalid position' unless 0 <= op.position <= snapshot.str.length
|
||||
// Apply the given op to the document snapshot. Returns the new snapshot.
|
||||
//
|
||||
// The original snapshot should not be modified.
|
||||
apply(snapshot, op) {
|
||||
if (!(0 <= op.position && op.position <= snapshot.str.length)) { throw new Error('Invalid position'); }
|
||||
|
||||
str = snapshot.str
|
||||
str = str.slice(0, op.position) + op.text + str.slice(op.position)
|
||||
{str}
|
||||
let {
|
||||
str
|
||||
} = snapshot;
|
||||
str = str.slice(0, op.position) + op.text + str.slice(op.position);
|
||||
return {str};
|
||||
},
|
||||
|
||||
# transform op1 by op2. Return transformed version of op1.
|
||||
# sym describes the symmetry of the op. Its 'left' or 'right' depending on whether the
|
||||
# op being transformed comes from the client or the server.
|
||||
transform: (op1, op2, sym) ->
|
||||
pos = op1.position
|
||||
pos += op2.text.length if op2.position < pos or (op2.position == pos and sym is 'left')
|
||||
// transform op1 by op2. Return transformed version of op1.
|
||||
// sym describes the symmetry of the op. Its 'left' or 'right' depending on whether the
|
||||
// op being transformed comes from the client or the server.
|
||||
transform(op1, op2, sym) {
|
||||
let pos = op1.position;
|
||||
if ((op2.position < pos) || ((op2.position === pos) && (sym === 'left'))) { pos += op2.text.length; }
|
||||
|
||||
return {position:pos, text:op1.text}
|
||||
return {position:pos, text:op1.text};
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,42 +1,52 @@
|
|||
# A synchronous processing queue. The queue calls process on the arguments,
|
||||
# ensuring that process() is only executing once at a time.
|
||||
#
|
||||
# process(data, callback) _MUST_ eventually call its callback.
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# queue = require 'syncqueue'
|
||||
#
|
||||
# fn = queue (data, callback) ->
|
||||
# asyncthing data, ->
|
||||
# callback(321)
|
||||
#
|
||||
# fn(1)
|
||||
# fn(2)
|
||||
# fn(3, (result) -> console.log(result))
|
||||
#
|
||||
# ^--- async thing will only be running once at any time.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// A synchronous processing queue. The queue calls process on the arguments,
|
||||
// ensuring that process() is only executing once at a time.
|
||||
//
|
||||
// process(data, callback) _MUST_ eventually call its callback.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// queue = require 'syncqueue'
|
||||
//
|
||||
// fn = queue (data, callback) ->
|
||||
// asyncthing data, ->
|
||||
// callback(321)
|
||||
//
|
||||
// fn(1)
|
||||
// fn(2)
|
||||
// fn(3, (result) -> console.log(result))
|
||||
//
|
||||
// ^--- async thing will only be running once at any time.
|
||||
|
||||
module.exports = (process) ->
|
||||
throw new Error('process is not a function') unless typeof process == 'function'
|
||||
queue = []
|
||||
module.exports = function(process) {
|
||||
if (typeof process !== 'function') { throw new Error('process is not a function'); }
|
||||
const queue = [];
|
||||
|
||||
enqueue = (data, callback) ->
|
||||
queue.push [data, callback]
|
||||
flush()
|
||||
const enqueue = function(data, callback) {
|
||||
queue.push([data, callback]);
|
||||
return flush();
|
||||
};
|
||||
|
||||
enqueue.busy = false
|
||||
enqueue.busy = false;
|
||||
|
||||
flush = ->
|
||||
return if enqueue.busy or queue.length == 0
|
||||
var flush = function() {
|
||||
if (enqueue.busy || (queue.length === 0)) { return; }
|
||||
|
||||
enqueue.busy = true
|
||||
[data, callback] = queue.shift()
|
||||
process data, (result...) -> # TODO: Make this not use varargs - varargs are really slow.
|
||||
enqueue.busy = false
|
||||
# This is called after busy = false so a user can check if enqueue.busy is set in the callback.
|
||||
callback.apply null, result if callback
|
||||
flush()
|
||||
enqueue.busy = true;
|
||||
const [data, callback] = Array.from(queue.shift());
|
||||
return process(data, function(...result) { // TODO: Make this not use varargs - varargs are really slow.
|
||||
enqueue.busy = false;
|
||||
// This is called after busy = false so a user can check if enqueue.busy is set in the callback.
|
||||
if (callback) { callback.apply(null, result); }
|
||||
return flush();
|
||||
});
|
||||
};
|
||||
|
||||
enqueue
|
||||
return enqueue;
|
||||
};
|
||||
|
||||
|
|
|
@ -1,32 +1,44 @@
|
|||
# Text document API for text
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// Text document API for text
|
||||
|
||||
text = require './text' if typeof WEB is 'undefined'
|
||||
let text;
|
||||
if (typeof WEB === 'undefined') { text = require('./text'); }
|
||||
|
||||
text.api =
|
||||
provides: {text:true}
|
||||
text.api = {
|
||||
provides: {text:true},
|
||||
|
||||
# The number of characters in the string
|
||||
getLength: -> @snapshot.length
|
||||
// The number of characters in the string
|
||||
getLength() { return this.snapshot.length; },
|
||||
|
||||
# Get the text contents of a document
|
||||
getText: -> @snapshot
|
||||
// Get the text contents of a document
|
||||
getText() { return this.snapshot; },
|
||||
|
||||
insert: (pos, text, callback) ->
|
||||
op = [{p:pos, i:text}]
|
||||
insert(pos, text, callback) {
|
||||
const op = [{p:pos, i:text}];
|
||||
|
||||
@submitOp op, callback
|
||||
op
|
||||
this.submitOp(op, callback);
|
||||
return op;
|
||||
},
|
||||
|
||||
del: (pos, length, callback) ->
|
||||
op = [{p:pos, d:@snapshot[pos...(pos + length)]}]
|
||||
del(pos, length, callback) {
|
||||
const op = [{p:pos, d:this.snapshot.slice(pos, (pos + length))}];
|
||||
|
||||
@submitOp op, callback
|
||||
op
|
||||
this.submitOp(op, callback);
|
||||
return op;
|
||||
},
|
||||
|
||||
_register: ->
|
||||
@on 'remoteop', (op) ->
|
||||
for component in op
|
||||
if component.i != undefined
|
||||
@emit 'insert', component.p, component.i
|
||||
else
|
||||
@emit 'delete', component.p, component.d
|
||||
_register() {
|
||||
return this.on('remoteop', function(op) {
|
||||
return Array.from(op).map((component) =>
|
||||
component.i !== undefined ?
|
||||
this.emit('insert', component.p, component.i)
|
||||
:
|
||||
this.emit('delete', component.p, component.d));
|
||||
});
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,43 +1,64 @@
|
|||
# Text document API for text
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// Text document API for text
|
||||
|
||||
if WEB?
|
||||
type = exports.types['text-composable']
|
||||
else
|
||||
type = require './text-composable'
|
||||
let type;
|
||||
if (typeof WEB !== 'undefined' && WEB !== null) {
|
||||
type = exports.types['text-composable'];
|
||||
} else {
|
||||
type = require('./text-composable');
|
||||
}
|
||||
|
||||
type.api =
|
||||
provides: {'text':true}
|
||||
type.api = {
|
||||
provides: {'text':true},
|
||||
|
||||
# The number of characters in the string
|
||||
'getLength': -> @snapshot.length
|
||||
// The number of characters in the string
|
||||
'getLength'() { return this.snapshot.length; },
|
||||
|
||||
# Get the text contents of a document
|
||||
'getText': -> @snapshot
|
||||
// Get the text contents of a document
|
||||
'getText'() { return this.snapshot; },
|
||||
|
||||
'insert': (pos, text, callback) ->
|
||||
op = type.normalize [pos, 'i':text, (@snapshot.length - pos)]
|
||||
'insert'(pos, text, callback) {
|
||||
const op = type.normalize([pos, {'i':text}, (this.snapshot.length - pos)]);
|
||||
|
||||
@submitOp op, callback
|
||||
op
|
||||
this.submitOp(op, callback);
|
||||
return op;
|
||||
},
|
||||
|
||||
'del': (pos, length, callback) ->
|
||||
op = type.normalize [pos, 'd':@snapshot[pos...(pos + length)], (@snapshot.length - pos - length)]
|
||||
'del'(pos, length, callback) {
|
||||
const op = type.normalize([pos, {'d':this.snapshot.slice(pos, (pos + length))}, (this.snapshot.length - pos - length)]);
|
||||
|
||||
@submitOp op, callback
|
||||
op
|
||||
this.submitOp(op, callback);
|
||||
return op;
|
||||
},
|
||||
|
||||
_register: ->
|
||||
@on 'remoteop', (op) ->
|
||||
pos = 0
|
||||
for component in op
|
||||
if typeof component is 'number'
|
||||
pos += component
|
||||
else if component.i != undefined
|
||||
@emit 'insert', pos, component.i
|
||||
pos += component.i.length
|
||||
else
|
||||
# delete
|
||||
@emit 'delete', pos, component.d
|
||||
# We don't increment pos, because the position
|
||||
# specified is after the delete has happened.
|
||||
_register() {
|
||||
return this.on('remoteop', function(op) {
|
||||
let pos = 0;
|
||||
return (() => {
|
||||
const result = [];
|
||||
for (let component of Array.from(op)) {
|
||||
if (typeof component === 'number') {
|
||||
result.push(pos += component);
|
||||
} else if (component.i !== undefined) {
|
||||
this.emit('insert', pos, component.i);
|
||||
result.push(pos += component.i.length);
|
||||
} else {
|
||||
// delete
|
||||
result.push(this.emit('delete', pos, component.d));
|
||||
}
|
||||
}
|
||||
return result;
|
||||
})();
|
||||
});
|
||||
}
|
||||
};
|
||||
// We don't increment pos, because the position
|
||||
// specified is after the delete has happened.
|
||||
|
||||
|
|
|
@ -1,261 +1,315 @@
|
|||
# An alternate composable implementation for text. This is much closer
|
||||
# to the implementation used by google wave.
|
||||
#
|
||||
# Ops are lists of components which iterate over the whole document.
|
||||
# Components are either:
|
||||
# A number N: Skip N characters in the original document
|
||||
# {i:'str'}: Insert 'str' at the current position in the document
|
||||
# {d:'str'}: Delete 'str', which appears at the current position in the document
|
||||
#
|
||||
# Eg: [3, {i:'hi'}, 5, {d:'internet'}]
|
||||
#
|
||||
# Snapshots are strings.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// An alternate composable implementation for text. This is much closer
|
||||
// to the implementation used by google wave.
|
||||
//
|
||||
// Ops are lists of components which iterate over the whole document.
|
||||
// Components are either:
|
||||
// A number N: Skip N characters in the original document
|
||||
// {i:'str'}: Insert 'str' at the current position in the document
|
||||
// {d:'str'}: Delete 'str', which appears at the current position in the document
|
||||
//
|
||||
// Eg: [3, {i:'hi'}, 5, {d:'internet'}]
|
||||
//
|
||||
// Snapshots are strings.
|
||||
|
||||
p = -> #require('util').debug
|
||||
i = -> #require('util').inspect
|
||||
let makeAppend;
|
||||
const p = function() {}; //require('util').debug
|
||||
const i = function() {}; //require('util').inspect
|
||||
|
||||
exports = if WEB? then {} else module.exports
|
||||
const exports = (typeof WEB !== 'undefined' && WEB !== null) ? {} : module.exports;
|
||||
|
||||
exports.name = 'text-composable'
|
||||
exports.name = 'text-composable';
|
||||
|
||||
exports.create = -> ''
|
||||
exports.create = () => '';
|
||||
|
||||
# -------- Utility methods
|
||||
// -------- Utility methods
|
||||
|
||||
checkOp = (op) ->
|
||||
throw new Error('Op must be an array of components') unless Array.isArray(op)
|
||||
last = null
|
||||
for c in op
|
||||
if typeof(c) == 'object'
|
||||
throw new Error("Invalid op component: #{i c}") unless (c.i? && c.i.length > 0) or (c.d? && c.d.length > 0)
|
||||
else
|
||||
throw new Error('Op components must be objects or numbers') unless typeof(c) == 'number'
|
||||
throw new Error('Skip components must be a positive number') unless c > 0
|
||||
throw new Error('Adjacent skip components should be added') if typeof(last) == 'number'
|
||||
const checkOp = function(op) {
|
||||
if (!Array.isArray(op)) { throw new Error('Op must be an array of components'); }
|
||||
let last = null;
|
||||
return (() => {
|
||||
const result = [];
|
||||
for (let c of Array.from(op)) {
|
||||
if (typeof(c) === 'object') {
|
||||
if (((c.i == null) || !(c.i.length > 0)) && ((c.d == null) || !(c.d.length > 0))) { throw new Error(`Invalid op component: ${i(c)}`); }
|
||||
} else {
|
||||
if (typeof(c) !== 'number') { throw new Error('Op components must be objects or numbers'); }
|
||||
if (!(c > 0)) { throw new Error('Skip components must be a positive number'); }
|
||||
if (typeof(last) === 'number') { throw new Error('Adjacent skip components should be added'); }
|
||||
}
|
||||
|
||||
last = c
|
||||
result.push(last = c);
|
||||
}
|
||||
return result;
|
||||
})();
|
||||
};
|
||||
|
||||
# Makes a function for appending components to a given op.
|
||||
# Exported for the randomOpGenerator.
|
||||
exports._makeAppend = makeAppend = (op) -> (component) ->
|
||||
if component == 0 || component.i == '' || component.d == ''
|
||||
return
|
||||
else if op.length == 0
|
||||
op.push component
|
||||
else if typeof(component) == 'number' && typeof(op[op.length - 1]) == 'number'
|
||||
op[op.length - 1] += component
|
||||
else if component.i? && op[op.length - 1].i?
|
||||
op[op.length - 1].i += component.i
|
||||
else if component.d? && op[op.length - 1].d?
|
||||
op[op.length - 1].d += component.d
|
||||
else
|
||||
op.push component
|
||||
// Makes a function for appending components to a given op.
|
||||
// Exported for the randomOpGenerator.
|
||||
exports._makeAppend = (makeAppend = op => (function(component) {
|
||||
if ((component === 0) || (component.i === '') || (component.d === '')) {
|
||||
return;
|
||||
} else if (op.length === 0) {
|
||||
return op.push(component);
|
||||
} else if ((typeof(component) === 'number') && (typeof(op[op.length - 1]) === 'number')) {
|
||||
return op[op.length - 1] += component;
|
||||
} else if ((component.i != null) && (op[op.length - 1].i != null)) {
|
||||
return op[op.length - 1].i += component.i;
|
||||
} else if ((component.d != null) && (op[op.length - 1].d != null)) {
|
||||
return op[op.length - 1].d += component.d;
|
||||
} else {
|
||||
return op.push(component);
|
||||
}
|
||||
}));
|
||||
|
||||
# checkOp op
|
||||
// checkOp op
|
||||
|
||||
# Makes 2 functions for taking components from the start of an op, and for peeking
|
||||
# at the next op that could be taken.
|
||||
makeTake = (op) ->
|
||||
# The index of the next component to take
|
||||
idx = 0
|
||||
# The offset into the component
|
||||
offset = 0
|
||||
// Makes 2 functions for taking components from the start of an op, and for peeking
|
||||
// at the next op that could be taken.
|
||||
const makeTake = function(op) {
|
||||
// The index of the next component to take
|
||||
let idx = 0;
|
||||
// The offset into the component
|
||||
let offset = 0;
|
||||
|
||||
# Take up to length n from the front of op. If n is null, take the next
|
||||
# op component. If indivisableField == 'd', delete components won't be separated.
|
||||
# If indivisableField == 'i', insert components won't be separated.
|
||||
take = (n, indivisableField) ->
|
||||
return null if idx == op.length
|
||||
#assert.notStrictEqual op.length, i, 'The op is too short to traverse the document'
|
||||
// Take up to length n from the front of op. If n is null, take the next
|
||||
// op component. If indivisableField == 'd', delete components won't be separated.
|
||||
// If indivisableField == 'i', insert components won't be separated.
|
||||
const take = function(n, indivisableField) {
|
||||
let c;
|
||||
if (idx === op.length) { return null; }
|
||||
//assert.notStrictEqual op.length, i, 'The op is too short to traverse the document'
|
||||
|
||||
if typeof(op[idx]) == 'number'
|
||||
if !n? or op[idx] - offset <= n
|
||||
c = op[idx] - offset
|
||||
++idx; offset = 0
|
||||
c
|
||||
else
|
||||
offset += n
|
||||
n
|
||||
else
|
||||
# Take from the string
|
||||
field = if op[idx].i then 'i' else 'd'
|
||||
c = {}
|
||||
if !n? or op[idx][field].length - offset <= n or field == indivisableField
|
||||
c[field] = op[idx][field][offset..]
|
||||
++idx; offset = 0
|
||||
else
|
||||
c[field] = op[idx][field][offset...(offset + n)]
|
||||
offset += n
|
||||
c
|
||||
if (typeof(op[idx]) === 'number') {
|
||||
if ((n == null) || ((op[idx] - offset) <= n)) {
|
||||
c = op[idx] - offset;
|
||||
++idx; offset = 0;
|
||||
return c;
|
||||
} else {
|
||||
offset += n;
|
||||
return n;
|
||||
}
|
||||
} else {
|
||||
// Take from the string
|
||||
const field = op[idx].i ? 'i' : 'd';
|
||||
c = {};
|
||||
if ((n == null) || ((op[idx][field].length - offset) <= n) || (field === indivisableField)) {
|
||||
c[field] = op[idx][field].slice(offset);
|
||||
++idx; offset = 0;
|
||||
} else {
|
||||
c[field] = op[idx][field].slice(offset, (offset + n));
|
||||
offset += n;
|
||||
}
|
||||
return c;
|
||||
}
|
||||
};
|
||||
|
||||
peekType = () ->
|
||||
op[idx]
|
||||
const peekType = () => op[idx];
|
||||
|
||||
[take, peekType]
|
||||
return [take, peekType];
|
||||
};
|
||||
|
||||
# Find and return the length of an op component
|
||||
componentLength = (component) ->
|
||||
if typeof(component) == 'number'
|
||||
component
|
||||
else if component.i?
|
||||
component.i.length
|
||||
else
|
||||
component.d.length
|
||||
// Find and return the length of an op component
|
||||
const componentLength = function(component) {
|
||||
if (typeof(component) === 'number') {
|
||||
return component;
|
||||
} else if (component.i != null) {
|
||||
return component.i.length;
|
||||
} else {
|
||||
return component.d.length;
|
||||
}
|
||||
};
|
||||
|
||||
# Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate
|
||||
# adjacent inserts and deletes.
|
||||
exports.normalize = (op) ->
|
||||
newOp = []
|
||||
append = makeAppend newOp
|
||||
append component for component in op
|
||||
newOp
|
||||
// Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate
|
||||
// adjacent inserts and deletes.
|
||||
exports.normalize = function(op) {
|
||||
const newOp = [];
|
||||
const append = makeAppend(newOp);
|
||||
for (let component of Array.from(op)) { append(component); }
|
||||
return newOp;
|
||||
};
|
||||
|
||||
# Apply the op to the string. Returns the new string.
|
||||
exports.apply = (str, op) ->
|
||||
p "Applying #{i op} to '#{str}'"
|
||||
throw new Error('Snapshot should be a string') unless typeof(str) == 'string'
|
||||
checkOp op
|
||||
// Apply the op to the string. Returns the new string.
|
||||
exports.apply = function(str, op) {
|
||||
p(`Applying ${i(op)} to '${str}'`);
|
||||
if (typeof(str) !== 'string') { throw new Error('Snapshot should be a string'); }
|
||||
checkOp(op);
|
||||
|
||||
pos = 0
|
||||
newDoc = []
|
||||
const pos = 0;
|
||||
const newDoc = [];
|
||||
|
||||
for component in op
|
||||
if typeof(component) == 'number'
|
||||
throw new Error('The op is too long for this document') if component > str.length
|
||||
newDoc.push str[...component]
|
||||
str = str[component..]
|
||||
else if component.i?
|
||||
newDoc.push component.i
|
||||
else
|
||||
throw new Error("The deleted text '#{component.d}' doesn't match the next characters in the document '#{str[...component.d.length]}'") unless component.d == str[...component.d.length]
|
||||
str = str[component.d.length..]
|
||||
for (let component of Array.from(op)) {
|
||||
if (typeof(component) === 'number') {
|
||||
if (component > str.length) { throw new Error('The op is too long for this document'); }
|
||||
newDoc.push(str.slice(0, component));
|
||||
str = str.slice(component);
|
||||
} else if (component.i != null) {
|
||||
newDoc.push(component.i);
|
||||
} else {
|
||||
if (component.d !== str.slice(0, component.d.length)) { throw new Error(`The deleted text '${component.d}' doesn't match the next characters in the document '${str.slice(0, component.d.length)}'`); }
|
||||
str = str.slice(component.d.length);
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error("The applied op doesn't traverse the entire document") unless '' == str
|
||||
if ('' !== str) { throw new Error("The applied op doesn't traverse the entire document"); }
|
||||
|
||||
newDoc.join ''
|
||||
return newDoc.join('');
|
||||
};
|
||||
|
||||
# transform op1 by op2. Return transformed version of op1.
|
||||
# op1 and op2 are unchanged by transform.
|
||||
exports.transform = (op, otherOp, side) ->
|
||||
throw new Error "side (#{side} must be 'left' or 'right'" unless side == 'left' or side == 'right'
|
||||
// transform op1 by op2. Return transformed version of op1.
|
||||
// op1 and op2 are unchanged by transform.
|
||||
exports.transform = function(op, otherOp, side) {
|
||||
let component;
|
||||
if ((side !== 'left') && (side !== 'right')) { throw new Error(`side (${side} must be 'left' or 'right'`); }
|
||||
|
||||
checkOp op
|
||||
checkOp otherOp
|
||||
newOp = []
|
||||
checkOp(op);
|
||||
checkOp(otherOp);
|
||||
const newOp = [];
|
||||
|
||||
append = makeAppend newOp
|
||||
[take, peek] = makeTake op
|
||||
const append = makeAppend(newOp);
|
||||
const [take, peek] = Array.from(makeTake(op));
|
||||
|
||||
for component in otherOp
|
||||
if typeof(component) == 'number' # Skip
|
||||
length = component
|
||||
while length > 0
|
||||
chunk = take(length, 'i')
|
||||
throw new Error('The op traverses more elements than the document has') unless chunk != null
|
||||
for (component of Array.from(otherOp)) {
|
||||
var chunk, length;
|
||||
if (typeof(component) === 'number') { // Skip
|
||||
length = component;
|
||||
while (length > 0) {
|
||||
chunk = take(length, 'i');
|
||||
if (chunk === null) { throw new Error('The op traverses more elements than the document has'); }
|
||||
|
||||
append chunk
|
||||
length -= componentLength chunk unless typeof(chunk) == 'object' && chunk.i?
|
||||
else if component.i? # Insert
|
||||
if side == 'left'
|
||||
# The left insert should go first.
|
||||
o = peek()
|
||||
append take() if o?.i
|
||||
append(chunk);
|
||||
if ((typeof(chunk) !== 'object') || (chunk.i == null)) { length -= componentLength(chunk); }
|
||||
}
|
||||
} else if (component.i != null) { // Insert
|
||||
if (side === 'left') {
|
||||
// The left insert should go first.
|
||||
const o = peek();
|
||||
if (o != null ? o.i : undefined) { append(take()); }
|
||||
}
|
||||
|
||||
# Otherwise, skip the inserted text.
|
||||
append(component.i.length)
|
||||
else # Delete.
|
||||
#assert.ok component.d
|
||||
length = component.d.length
|
||||
while length > 0
|
||||
chunk = take(length, 'i')
|
||||
throw new Error('The op traverses more elements than the document has') unless chunk != null
|
||||
// Otherwise, skip the inserted text.
|
||||
append(component.i.length);
|
||||
} else { // Delete.
|
||||
//assert.ok component.d
|
||||
({
|
||||
length
|
||||
} = component.d);
|
||||
while (length > 0) {
|
||||
chunk = take(length, 'i');
|
||||
if (chunk === null) { throw new Error('The op traverses more elements than the document has'); }
|
||||
|
||||
if typeof(chunk) == 'number'
|
||||
length -= chunk
|
||||
else if chunk.i?
|
||||
append(chunk)
|
||||
else
|
||||
#assert.ok chunk.d
|
||||
# The delete is unnecessary now.
|
||||
length -= chunk.d.length
|
||||
if (typeof(chunk) === 'number') {
|
||||
length -= chunk;
|
||||
} else if (chunk.i != null) {
|
||||
append(chunk);
|
||||
} else {
|
||||
//assert.ok chunk.d
|
||||
// The delete is unnecessary now.
|
||||
length -= chunk.d.length;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Append extras from op1
|
||||
while (component = take())
|
||||
throw new Error "Remaining fragments in the op: #{i component}" unless component?.i?
|
||||
append component
|
||||
// Append extras from op1
|
||||
while (component = take()) {
|
||||
if ((component != null ? component.i : undefined) == null) { throw new Error(`Remaining fragments in the op: ${i(component)}`); }
|
||||
append(component);
|
||||
}
|
||||
|
||||
newOp
|
||||
return newOp;
|
||||
};
|
||||
|
||||
|
||||
# Compose 2 ops into 1 op.
|
||||
exports.compose = (op1, op2) ->
|
||||
p "COMPOSE #{i op1} + #{i op2}"
|
||||
checkOp op1
|
||||
checkOp op2
|
||||
// Compose 2 ops into 1 op.
|
||||
exports.compose = function(op1, op2) {
|
||||
let component;
|
||||
p(`COMPOSE ${i(op1)} + ${i(op2)}`);
|
||||
checkOp(op1);
|
||||
checkOp(op2);
|
||||
|
||||
result = []
|
||||
const result = [];
|
||||
|
||||
append = makeAppend result
|
||||
[take, _] = makeTake op1
|
||||
const append = makeAppend(result);
|
||||
const [take, _] = Array.from(makeTake(op1));
|
||||
|
||||
for component in op2
|
||||
if typeof(component) == 'number' # Skip
|
||||
length = component
|
||||
while length > 0
|
||||
chunk = take(length, 'd')
|
||||
throw new Error('The op traverses more elements than the document has') unless chunk != null
|
||||
for (component of Array.from(op2)) {
|
||||
var chunk, length;
|
||||
if (typeof(component) === 'number') { // Skip
|
||||
length = component;
|
||||
while (length > 0) {
|
||||
chunk = take(length, 'd');
|
||||
if (chunk === null) { throw new Error('The op traverses more elements than the document has'); }
|
||||
|
||||
append chunk
|
||||
length -= componentLength chunk unless typeof(chunk) == 'object' && chunk.d?
|
||||
append(chunk);
|
||||
if ((typeof(chunk) !== 'object') || (chunk.d == null)) { length -= componentLength(chunk); }
|
||||
}
|
||||
|
||||
else if component.i? # Insert
|
||||
append {i:component.i}
|
||||
} else if (component.i != null) { // Insert
|
||||
append({i:component.i});
|
||||
|
||||
else # Delete
|
||||
offset = 0
|
||||
while offset < component.d.length
|
||||
chunk = take(component.d.length - offset, 'd')
|
||||
throw new Error('The op traverses more elements than the document has') unless chunk != null
|
||||
} else { // Delete
|
||||
let offset = 0;
|
||||
while (offset < component.d.length) {
|
||||
chunk = take(component.d.length - offset, 'd');
|
||||
if (chunk === null) { throw new Error('The op traverses more elements than the document has'); }
|
||||
|
||||
# If its delete, append it. If its skip, drop it and decrease length. If its insert, check the strings match, drop it and decrease length.
|
||||
if typeof(chunk) == 'number'
|
||||
append {d:component.d[offset...(offset + chunk)]}
|
||||
offset += chunk
|
||||
else if chunk.i?
|
||||
throw new Error("The deleted text doesn't match the inserted text") unless component.d[offset...(offset + chunk.i.length)] == chunk.i
|
||||
offset += chunk.i.length
|
||||
# The ops cancel each other out.
|
||||
else
|
||||
# Delete
|
||||
append chunk
|
||||
// If its delete, append it. If its skip, drop it and decrease length. If its insert, check the strings match, drop it and decrease length.
|
||||
if (typeof(chunk) === 'number') {
|
||||
append({d:component.d.slice(offset, (offset + chunk))});
|
||||
offset += chunk;
|
||||
} else if (chunk.i != null) {
|
||||
if (component.d.slice(offset, (offset + chunk.i.length)) !== chunk.i) { throw new Error("The deleted text doesn't match the inserted text"); }
|
||||
offset += chunk.i.length;
|
||||
// The ops cancel each other out.
|
||||
} else {
|
||||
// Delete
|
||||
append(chunk);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Append extras from op1
|
||||
while (component = take())
|
||||
throw new Error "Trailing stuff in op1 #{i component}" unless component?.d?
|
||||
append component
|
||||
// Append extras from op1
|
||||
while (component = take()) {
|
||||
if ((component != null ? component.d : undefined) == null) { throw new Error(`Trailing stuff in op1 ${i(component)}`); }
|
||||
append(component);
|
||||
}
|
||||
|
||||
result
|
||||
return result;
|
||||
};
|
||||
|
||||
|
||||
invertComponent = (c) ->
|
||||
if typeof(c) == 'number'
|
||||
c
|
||||
else if c.i?
|
||||
{d:c.i}
|
||||
else
|
||||
{i:c.d}
|
||||
const invertComponent = function(c) {
|
||||
if (typeof(c) === 'number') {
|
||||
return c;
|
||||
} else if (c.i != null) {
|
||||
return {d:c.i};
|
||||
} else {
|
||||
return {i:c.d};
|
||||
}
|
||||
};
|
||||
|
||||
# Invert an op
|
||||
exports.invert = (op) ->
|
||||
result = []
|
||||
append = makeAppend result
|
||||
// Invert an op
|
||||
exports.invert = function(op) {
|
||||
const result = [];
|
||||
const append = makeAppend(result);
|
||||
|
||||
append(invertComponent component) for component in op
|
||||
for (let component of Array.from(op)) { append(invertComponent(component)); }
|
||||
|
||||
result
|
||||
return result;
|
||||
};
|
||||
|
||||
if window?
|
||||
window.ot ||= {}
|
||||
window.ot.types ||= {}
|
||||
window.ot.types.text = exports
|
||||
if (typeof window !== 'undefined' && window !== null) {
|
||||
if (!window.ot) { window.ot = {}; }
|
||||
if (!window.ot.types) { window.ot.types = {}; }
|
||||
window.ot.types.text = exports;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,89 +1,118 @@
|
|||
# Text document API for text-tp2
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// Text document API for text-tp2
|
||||
|
||||
if WEB?
|
||||
type = exports.types['text-tp2']
|
||||
else
|
||||
type = require './text-tp2'
|
||||
let type;
|
||||
if (typeof WEB !== 'undefined' && WEB !== null) {
|
||||
type = exports.types['text-tp2'];
|
||||
} else {
|
||||
type = require('./text-tp2');
|
||||
}
|
||||
|
||||
{_takeDoc:takeDoc, _append:append} = type
|
||||
const {_takeDoc:takeDoc, _append:append} = type;
|
||||
|
||||
appendSkipChars = (op, doc, pos, maxlength) ->
|
||||
while (maxlength == undefined || maxlength > 0) and pos.index < doc.data.length
|
||||
part = takeDoc doc, pos, maxlength, true
|
||||
maxlength -= part.length if maxlength != undefined and typeof part is 'string'
|
||||
append op, (part.length || part)
|
||||
const appendSkipChars = (op, doc, pos, maxlength) => (() => {
|
||||
const result = [];
|
||||
while (((maxlength === undefined) || (maxlength > 0)) && (pos.index < doc.data.length)) {
|
||||
const part = takeDoc(doc, pos, maxlength, true);
|
||||
if ((maxlength !== undefined) && (typeof part === 'string')) { maxlength -= part.length; }
|
||||
result.push(append(op, (part.length || part)));
|
||||
}
|
||||
return result;
|
||||
})();
|
||||
|
||||
type['api'] =
|
||||
'provides': {'text':true}
|
||||
type['api'] = {
|
||||
'provides': {'text':true},
|
||||
|
||||
# The number of characters in the string
|
||||
'getLength': -> @snapshot.charLength
|
||||
// The number of characters in the string
|
||||
'getLength'() { return this.snapshot.charLength; },
|
||||
|
||||
# Flatten a document into a string
|
||||
'getText': ->
|
||||
strings = (elem for elem in @snapshot.data when typeof elem is 'string')
|
||||
strings.join ''
|
||||
// Flatten a document into a string
|
||||
'getText'() {
|
||||
const strings = (Array.from(this.snapshot.data).filter((elem) => typeof elem === 'string'));
|
||||
return strings.join('');
|
||||
},
|
||||
|
||||
'insert': (pos, text, callback) ->
|
||||
pos = 0 if pos == undefined
|
||||
'insert'(pos, text, callback) {
|
||||
if (pos === undefined) { pos = 0; }
|
||||
|
||||
op = []
|
||||
docPos = {index:0, offset:0}
|
||||
const op = [];
|
||||
const docPos = {index:0, offset:0};
|
||||
|
||||
appendSkipChars op, @snapshot, docPos, pos
|
||||
append op, {'i':text}
|
||||
appendSkipChars op, @snapshot, docPos
|
||||
appendSkipChars(op, this.snapshot, docPos, pos);
|
||||
append(op, {'i':text});
|
||||
appendSkipChars(op, this.snapshot, docPos);
|
||||
|
||||
@submitOp op, callback
|
||||
op
|
||||
this.submitOp(op, callback);
|
||||
return op;
|
||||
},
|
||||
|
||||
'del': (pos, length, callback) ->
|
||||
op = []
|
||||
docPos = {index:0, offset:0}
|
||||
'del'(pos, length, callback) {
|
||||
const op = [];
|
||||
const docPos = {index:0, offset:0};
|
||||
|
||||
appendSkipChars op, @snapshot, docPos, pos
|
||||
appendSkipChars(op, this.snapshot, docPos, pos);
|
||||
|
||||
while length > 0
|
||||
part = takeDoc @snapshot, docPos, length, true
|
||||
if typeof part is 'string'
|
||||
append op, {'d':part.length}
|
||||
length -= part.length
|
||||
else
|
||||
append op, part
|
||||
while (length > 0) {
|
||||
const part = takeDoc(this.snapshot, docPos, length, true);
|
||||
if (typeof part === 'string') {
|
||||
append(op, {'d':part.length});
|
||||
length -= part.length;
|
||||
} else {
|
||||
append(op, part);
|
||||
}
|
||||
}
|
||||
|
||||
appendSkipChars op, @snapshot, docPos
|
||||
appendSkipChars(op, this.snapshot, docPos);
|
||||
|
||||
@submitOp op, callback
|
||||
op
|
||||
this.submitOp(op, callback);
|
||||
return op;
|
||||
},
|
||||
|
||||
'_register': ->
|
||||
# Interpret recieved ops + generate more detailed events for them
|
||||
@on 'remoteop', (op, snapshot) ->
|
||||
textPos = 0
|
||||
docPos = {index:0, offset:0}
|
||||
'_register'() {
|
||||
// Interpret recieved ops + generate more detailed events for them
|
||||
return this.on('remoteop', function(op, snapshot) {
|
||||
let textPos = 0;
|
||||
const docPos = {index:0, offset:0};
|
||||
|
||||
for component in op
|
||||
if typeof component is 'number'
|
||||
# Skip
|
||||
remainder = component
|
||||
while remainder > 0
|
||||
part = takeDoc snapshot, docPos, remainder
|
||||
if typeof part is 'string'
|
||||
textPos += part.length
|
||||
remainder -= part.length || part
|
||||
else if component.i != undefined
|
||||
# Insert
|
||||
if typeof component.i is 'string'
|
||||
@emit 'insert', textPos, component.i
|
||||
textPos += component.i.length
|
||||
else
|
||||
# Delete
|
||||
remainder = component.d
|
||||
while remainder > 0
|
||||
part = takeDoc snapshot, docPos, remainder
|
||||
if typeof part is 'string'
|
||||
@emit 'delete', textPos, part
|
||||
remainder -= part.length || part
|
||||
for (let component of Array.from(op)) {
|
||||
var part, remainder;
|
||||
if (typeof component === 'number') {
|
||||
// Skip
|
||||
remainder = component;
|
||||
while (remainder > 0) {
|
||||
part = takeDoc(snapshot, docPos, remainder);
|
||||
if (typeof part === 'string') {
|
||||
textPos += part.length;
|
||||
}
|
||||
remainder -= part.length || part;
|
||||
}
|
||||
} else if (component.i !== undefined) {
|
||||
// Insert
|
||||
if (typeof component.i === 'string') {
|
||||
this.emit('insert', textPos, component.i);
|
||||
textPos += component.i.length;
|
||||
}
|
||||
} else {
|
||||
// Delete
|
||||
remainder = component.d;
|
||||
while (remainder > 0) {
|
||||
part = takeDoc(snapshot, docPos, remainder);
|
||||
if (typeof part === 'string') {
|
||||
this.emit('delete', textPos, part);
|
||||
}
|
||||
remainder -= part.length || part;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -1,322 +1,398 @@
|
|||
# A TP2 implementation of text, following this spec:
|
||||
# http://code.google.com/p/lightwave/source/browse/trunk/experimental/ot/README
|
||||
#
|
||||
# A document is made up of a string and a set of tombstones inserted throughout
|
||||
# the string. For example, 'some ', (2 tombstones), 'string'.
|
||||
#
|
||||
# This is encoded in a document as: {s:'some string', t:[5, -2, 6]}
|
||||
#
|
||||
# Ops are lists of components which iterate over the whole document.
|
||||
# Components are either:
|
||||
# N: Skip N characters in the original document
|
||||
# {i:'str'}: Insert 'str' at the current position in the document
|
||||
# {i:N}: Insert N tombstones at the current position in the document
|
||||
# {d:N}: Delete (tombstone) N characters at the current position in the document
|
||||
#
|
||||
# Eg: [3, {i:'hi'}, 5, {d:8}]
|
||||
#
|
||||
# Snapshots are lists with characters and tombstones. Characters are stored in strings
|
||||
# and adjacent tombstones are flattened into numbers.
|
||||
#
|
||||
# Eg, the document: 'Hello .....world' ('.' denotes tombstoned (deleted) characters)
|
||||
# would be represented by a document snapshot of ['Hello ', 5, 'world']
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// A TP2 implementation of text, following this spec:
|
||||
// http://code.google.com/p/lightwave/source/browse/trunk/experimental/ot/README
|
||||
//
|
||||
// A document is made up of a string and a set of tombstones inserted throughout
|
||||
// the string. For example, 'some ', (2 tombstones), 'string'.
|
||||
//
|
||||
// This is encoded in a document as: {s:'some string', t:[5, -2, 6]}
|
||||
//
|
||||
// Ops are lists of components which iterate over the whole document.
|
||||
// Components are either:
|
||||
// N: Skip N characters in the original document
|
||||
// {i:'str'}: Insert 'str' at the current position in the document
|
||||
// {i:N}: Insert N tombstones at the current position in the document
|
||||
// {d:N}: Delete (tombstone) N characters at the current position in the document
|
||||
//
|
||||
// Eg: [3, {i:'hi'}, 5, {d:8}]
|
||||
//
|
||||
// Snapshots are lists with characters and tombstones. Characters are stored in strings
|
||||
// and adjacent tombstones are flattened into numbers.
|
||||
//
|
||||
// Eg, the document: 'Hello .....world' ('.' denotes tombstoned (deleted) characters)
|
||||
// would be represented by a document snapshot of ['Hello ', 5, 'world']
|
||||
|
||||
type =
|
||||
name: 'text-tp2'
|
||||
tp2: true
|
||||
create: -> {charLength:0, totalLength:0, positionCache:[], data:[]}
|
||||
serialize: (doc) ->
|
||||
throw new Error 'invalid doc snapshot' unless doc.data
|
||||
doc.data
|
||||
deserialize: (data) ->
|
||||
doc = type.create()
|
||||
doc.data = data
|
||||
let append, appendDoc, takeDoc;
|
||||
var type = {
|
||||
name: 'text-tp2',
|
||||
tp2: true,
|
||||
create() { return {charLength:0, totalLength:0, positionCache:[], data:[]}; },
|
||||
serialize(doc) {
|
||||
if (!doc.data) { throw new Error('invalid doc snapshot'); }
|
||||
return doc.data;
|
||||
},
|
||||
deserialize(data) {
|
||||
const doc = type.create();
|
||||
doc.data = data;
|
||||
|
||||
for component in data
|
||||
if typeof component is 'string'
|
||||
doc.charLength += component.length
|
||||
doc.totalLength += component.length
|
||||
else
|
||||
doc.totalLength += component
|
||||
for (let component of Array.from(data)) {
|
||||
if (typeof component === 'string') {
|
||||
doc.charLength += component.length;
|
||||
doc.totalLength += component.length;
|
||||
} else {
|
||||
doc.totalLength += component;
|
||||
}
|
||||
}
|
||||
|
||||
doc
|
||||
return doc;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
checkOp = (op) ->
|
||||
throw new Error('Op must be an array of components') unless Array.isArray(op)
|
||||
last = null
|
||||
for c in op
|
||||
if typeof(c) == 'object'
|
||||
if c.i != undefined
|
||||
throw new Error('Inserts must insert a string or a +ive number') unless (typeof(c.i) == 'string' and c.i.length > 0) or (typeof(c.i) == 'number' and c.i > 0)
|
||||
else if c.d != undefined
|
||||
throw new Error('Deletes must be a +ive number') unless typeof(c.d) == 'number' and c.d > 0
|
||||
else
|
||||
throw new Error('Operation component must define .i or .d')
|
||||
else
|
||||
throw new Error('Op components must be objects or numbers') unless typeof(c) == 'number'
|
||||
throw new Error('Skip components must be a positive number') unless c > 0
|
||||
throw new Error('Adjacent skip components should be combined') if typeof(last) == 'number'
|
||||
const checkOp = function(op) {
|
||||
if (!Array.isArray(op)) { throw new Error('Op must be an array of components'); }
|
||||
let last = null;
|
||||
return (() => {
|
||||
const result = [];
|
||||
for (let c of Array.from(op)) {
|
||||
if (typeof(c) === 'object') {
|
||||
if (c.i !== undefined) {
|
||||
if (((typeof(c.i) !== 'string') || !(c.i.length > 0)) && ((typeof(c.i) !== 'number') || !(c.i > 0))) { throw new Error('Inserts must insert a string or a +ive number'); }
|
||||
} else if (c.d !== undefined) {
|
||||
if ((typeof(c.d) !== 'number') || !(c.d > 0)) { throw new Error('Deletes must be a +ive number'); }
|
||||
} else {
|
||||
throw new Error('Operation component must define .i or .d');
|
||||
}
|
||||
} else {
|
||||
if (typeof(c) !== 'number') { throw new Error('Op components must be objects or numbers'); }
|
||||
if (!(c > 0)) { throw new Error('Skip components must be a positive number'); }
|
||||
if (typeof(last) === 'number') { throw new Error('Adjacent skip components should be combined'); }
|
||||
}
|
||||
|
||||
last = c
|
||||
result.push(last = c);
|
||||
}
|
||||
return result;
|
||||
})();
|
||||
};
|
||||
|
||||
# Take the next part from the specified position in a document snapshot.
|
||||
# position = {index, offset}. It will be updated.
|
||||
type._takeDoc = takeDoc = (doc, position, maxlength, tombsIndivisible) ->
|
||||
throw new Error 'Operation goes past the end of the document' if position.index >= doc.data.length
|
||||
// Take the next part from the specified position in a document snapshot.
|
||||
// position = {index, offset}. It will be updated.
|
||||
type._takeDoc = (takeDoc = function(doc, position, maxlength, tombsIndivisible) {
|
||||
if (position.index >= doc.data.length) { throw new Error('Operation goes past the end of the document'); }
|
||||
|
||||
part = doc.data[position.index]
|
||||
# peel off data[0]
|
||||
result = if typeof(part) == 'string'
|
||||
if maxlength != undefined
|
||||
part[position.offset...(position.offset + maxlength)]
|
||||
else
|
||||
part[position.offset...]
|
||||
else
|
||||
if maxlength == undefined or tombsIndivisible
|
||||
const part = doc.data[position.index];
|
||||
// peel off data[0]
|
||||
const result = typeof(part) === 'string' ?
|
||||
maxlength !== undefined ?
|
||||
part.slice(position.offset, (position.offset + maxlength))
|
||||
:
|
||||
part.slice(position.offset)
|
||||
:
|
||||
(maxlength === undefined) || tombsIndivisible ?
|
||||
part - position.offset
|
||||
else
|
||||
Math.min(maxlength, part - position.offset)
|
||||
:
|
||||
Math.min(maxlength, part - position.offset);
|
||||
|
||||
resultLen = result.length || result
|
||||
const resultLen = result.length || result;
|
||||
|
||||
if (part.length || part) - position.offset > resultLen
|
||||
position.offset += resultLen
|
||||
else
|
||||
position.index++
|
||||
position.offset = 0
|
||||
if (((part.length || part) - position.offset) > resultLen) {
|
||||
position.offset += resultLen;
|
||||
} else {
|
||||
position.index++;
|
||||
position.offset = 0;
|
||||
}
|
||||
|
||||
result
|
||||
return result;
|
||||
});
|
||||
|
||||
# Append a part to the end of a document
|
||||
type._appendDoc = appendDoc = (doc, p) ->
|
||||
return if p == 0 or p == ''
|
||||
// Append a part to the end of a document
|
||||
type._appendDoc = (appendDoc = function(doc, p) {
|
||||
if ((p === 0) || (p === '')) { return; }
|
||||
|
||||
if typeof p is 'string'
|
||||
doc.charLength += p.length
|
||||
doc.totalLength += p.length
|
||||
else
|
||||
doc.totalLength += p
|
||||
if (typeof p === 'string') {
|
||||
doc.charLength += p.length;
|
||||
doc.totalLength += p.length;
|
||||
} else {
|
||||
doc.totalLength += p;
|
||||
}
|
||||
|
||||
data = doc.data
|
||||
if data.length == 0
|
||||
data.push p
|
||||
else if typeof(data[data.length - 1]) == typeof(p)
|
||||
data[data.length - 1] += p
|
||||
else
|
||||
data.push p
|
||||
return
|
||||
const {
|
||||
data
|
||||
} = doc;
|
||||
if (data.length === 0) {
|
||||
data.push(p);
|
||||
} else if (typeof(data[data.length - 1]) === typeof(p)) {
|
||||
data[data.length - 1] += p;
|
||||
} else {
|
||||
data.push(p);
|
||||
}
|
||||
});
|
||||
|
||||
# Apply the op to the document. The document is not modified in the process.
|
||||
type.apply = (doc, op) ->
|
||||
unless doc.totalLength != undefined and doc.charLength != undefined and doc.data.length != undefined
|
||||
throw new Error('Snapshot is invalid')
|
||||
// Apply the op to the document. The document is not modified in the process.
|
||||
type.apply = function(doc, op) {
|
||||
if ((doc.totalLength === undefined) || (doc.charLength === undefined) || (doc.data.length === undefined)) {
|
||||
throw new Error('Snapshot is invalid');
|
||||
}
|
||||
|
||||
checkOp op
|
||||
checkOp(op);
|
||||
|
||||
newDoc = type.create()
|
||||
position = {index:0, offset:0}
|
||||
const newDoc = type.create();
|
||||
const position = {index:0, offset:0};
|
||||
|
||||
for component in op
|
||||
if typeof(component) is 'number'
|
||||
remainder = component
|
||||
while remainder > 0
|
||||
part = takeDoc doc, position, remainder
|
||||
for (let component of Array.from(op)) {
|
||||
var part, remainder;
|
||||
if (typeof(component) === 'number') {
|
||||
remainder = component;
|
||||
while (remainder > 0) {
|
||||
part = takeDoc(doc, position, remainder);
|
||||
|
||||
appendDoc newDoc, part
|
||||
remainder -= part.length || part
|
||||
appendDoc(newDoc, part);
|
||||
remainder -= part.length || part;
|
||||
}
|
||||
|
||||
else if component.i != undefined
|
||||
appendDoc newDoc, component.i
|
||||
else if component.d != undefined
|
||||
remainder = component.d
|
||||
while remainder > 0
|
||||
part = takeDoc doc, position, remainder
|
||||
remainder -= part.length || part
|
||||
appendDoc newDoc, component.d
|
||||
} else if (component.i !== undefined) {
|
||||
appendDoc(newDoc, component.i);
|
||||
} else if (component.d !== undefined) {
|
||||
remainder = component.d;
|
||||
while (remainder > 0) {
|
||||
part = takeDoc(doc, position, remainder);
|
||||
remainder -= part.length || part;
|
||||
}
|
||||
appendDoc(newDoc, component.d);
|
||||
}
|
||||
}
|
||||
|
||||
newDoc
|
||||
return newDoc;
|
||||
};
|
||||
|
||||
# Append an op component to the end of the specified op.
|
||||
# Exported for the randomOpGenerator.
|
||||
type._append = append = (op, component) ->
|
||||
if component == 0 || component.i == '' || component.i == 0 || component.d == 0
|
||||
return
|
||||
else if op.length == 0
|
||||
op.push component
|
||||
else
|
||||
last = op[op.length - 1]
|
||||
if typeof(component) == 'number' && typeof(last) == 'number'
|
||||
op[op.length - 1] += component
|
||||
else if component.i != undefined && last.i? && typeof(last.i) == typeof(component.i)
|
||||
last.i += component.i
|
||||
else if component.d != undefined && last.d?
|
||||
last.d += component.d
|
||||
else
|
||||
op.push component
|
||||
// Append an op component to the end of the specified op.
|
||||
// Exported for the randomOpGenerator.
|
||||
type._append = (append = function(op, component) {
|
||||
if ((component === 0) || (component.i === '') || (component.i === 0) || (component.d === 0)) {
|
||||
return;
|
||||
} else if (op.length === 0) {
|
||||
return op.push(component);
|
||||
} else {
|
||||
const last = op[op.length - 1];
|
||||
if ((typeof(component) === 'number') && (typeof(last) === 'number')) {
|
||||
return op[op.length - 1] += component;
|
||||
} else if ((component.i !== undefined) && (last.i != null) && (typeof(last.i) === typeof(component.i))) {
|
||||
return last.i += component.i;
|
||||
} else if ((component.d !== undefined) && (last.d != null)) {
|
||||
return last.d += component.d;
|
||||
} else {
|
||||
return op.push(component);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
# Makes 2 functions for taking components from the start of an op, and for peeking
|
||||
# at the next op that could be taken.
|
||||
makeTake = (op) ->
|
||||
# The index of the next component to take
|
||||
index = 0
|
||||
# The offset into the component
|
||||
offset = 0
|
||||
// Makes 2 functions for taking components from the start of an op, and for peeking
|
||||
// at the next op that could be taken.
|
||||
const makeTake = function(op) {
|
||||
// The index of the next component to take
|
||||
let index = 0;
|
||||
// The offset into the component
|
||||
let offset = 0;
|
||||
|
||||
# Take up to length maxlength from the op. If maxlength is not defined, there is no max.
|
||||
# If insertsIndivisible is true, inserts (& insert tombstones) won't be separated.
|
||||
#
|
||||
# Returns null when op is fully consumed.
|
||||
take = (maxlength, insertsIndivisible) ->
|
||||
return null if index == op.length
|
||||
// Take up to length maxlength from the op. If maxlength is not defined, there is no max.
|
||||
// If insertsIndivisible is true, inserts (& insert tombstones) won't be separated.
|
||||
//
|
||||
// Returns null when op is fully consumed.
|
||||
const take = function(maxlength, insertsIndivisible) {
|
||||
let current;
|
||||
if (index === op.length) { return null; }
|
||||
|
||||
e = op[index]
|
||||
if typeof((current = e)) == 'number' or typeof((current = e.i)) == 'number' or (current = e.d) != undefined
|
||||
if !maxlength? or current - offset <= maxlength or (insertsIndivisible and e.i != undefined)
|
||||
# Return the rest of the current element.
|
||||
c = current - offset
|
||||
++index; offset = 0
|
||||
else
|
||||
offset += maxlength
|
||||
c = maxlength
|
||||
if e.i != undefined then {i:c} else if e.d != undefined then {d:c} else c
|
||||
else
|
||||
# Take from the inserted string
|
||||
if !maxlength? or e.i.length - offset <= maxlength or insertsIndivisible
|
||||
result = {i:e.i[offset..]}
|
||||
++index; offset = 0
|
||||
else
|
||||
result = {i:e.i[offset...offset + maxlength]}
|
||||
offset += maxlength
|
||||
result
|
||||
const e = op[index];
|
||||
if ((typeof((current = e)) === 'number') || (typeof((current = e.i)) === 'number') || ((current = e.d) !== undefined)) {
|
||||
let c;
|
||||
if ((maxlength == null) || ((current - offset) <= maxlength) || (insertsIndivisible && (e.i !== undefined))) {
|
||||
// Return the rest of the current element.
|
||||
c = current - offset;
|
||||
++index; offset = 0;
|
||||
} else {
|
||||
offset += maxlength;
|
||||
c = maxlength;
|
||||
}
|
||||
if (e.i !== undefined) { return {i:c}; } else if (e.d !== undefined) { return {d:c}; } else { return c; }
|
||||
} else {
|
||||
// Take from the inserted string
|
||||
let result;
|
||||
if ((maxlength == null) || ((e.i.length - offset) <= maxlength) || insertsIndivisible) {
|
||||
result = {i:e.i.slice(offset)};
|
||||
++index; offset = 0;
|
||||
} else {
|
||||
result = {i:e.i.slice(offset, offset + maxlength)};
|
||||
offset += maxlength;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
peekType = -> op[index]
|
||||
const peekType = () => op[index];
|
||||
|
||||
[take, peekType]
|
||||
return [take, peekType];
|
||||
};
|
||||
|
||||
# Find and return the length of an op component
|
||||
componentLength = (component) ->
|
||||
if typeof(component) == 'number'
|
||||
component
|
||||
else if typeof(component.i) == 'string'
|
||||
component.i.length
|
||||
else
|
||||
# This should work because c.d and c.i must be +ive.
|
||||
component.d or component.i
|
||||
// Find and return the length of an op component
|
||||
const componentLength = function(component) {
|
||||
if (typeof(component) === 'number') {
|
||||
return component;
|
||||
} else if (typeof(component.i) === 'string') {
|
||||
return component.i.length;
|
||||
} else {
|
||||
// This should work because c.d and c.i must be +ive.
|
||||
return component.d || component.i;
|
||||
}
|
||||
};
|
||||
|
||||
# Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate
|
||||
# adjacent inserts and deletes.
|
||||
type.normalize = (op) ->
|
||||
newOp = []
|
||||
append newOp, component for component in op
|
||||
newOp
|
||||
// Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate
|
||||
// adjacent inserts and deletes.
|
||||
type.normalize = function(op) {
|
||||
const newOp = [];
|
||||
for (let component of Array.from(op)) { append(newOp, component); }
|
||||
return newOp;
|
||||
};
|
||||
|
||||
# This is a helper method to transform and prune. goForwards is true for transform, false for prune.
|
||||
transformer = (op, otherOp, goForwards, side) ->
|
||||
checkOp op
|
||||
checkOp otherOp
|
||||
newOp = []
|
||||
// This is a helper method to transform and prune. goForwards is true for transform, false for prune.
|
||||
const transformer = function(op, otherOp, goForwards, side) {
|
||||
let component;
|
||||
checkOp(op);
|
||||
checkOp(otherOp);
|
||||
const newOp = [];
|
||||
|
||||
[take, peek] = makeTake op
|
||||
const [take, peek] = Array.from(makeTake(op));
|
||||
|
||||
for component in otherOp
|
||||
length = componentLength component
|
||||
for (component of Array.from(otherOp)) {
|
||||
var chunk;
|
||||
let length = componentLength(component);
|
||||
|
||||
if component.i != undefined # Insert text or tombs
|
||||
if goForwards # transform - insert skips over inserted parts
|
||||
if side == 'left'
|
||||
# The left insert should go first.
|
||||
append newOp, take() while peek()?.i != undefined
|
||||
if (component.i !== undefined) { // Insert text or tombs
|
||||
if (goForwards) { // transform - insert skips over inserted parts
|
||||
if (side === 'left') {
|
||||
// The left insert should go first.
|
||||
while (__guard__(peek(), x => x.i) !== undefined) { append(newOp, take()); }
|
||||
}
|
||||
|
||||
# In any case, skip the inserted text.
|
||||
append newOp, length
|
||||
// In any case, skip the inserted text.
|
||||
append(newOp, length);
|
||||
|
||||
else # Prune. Remove skips for inserts.
|
||||
while length > 0
|
||||
chunk = take length, true
|
||||
} else { // Prune. Remove skips for inserts.
|
||||
while (length > 0) {
|
||||
chunk = take(length, true);
|
||||
|
||||
throw new Error 'The transformed op is invalid' unless chunk != null
|
||||
throw new Error 'The transformed op deletes locally inserted characters - it cannot be purged of the insert.' if chunk.d != undefined
|
||||
if (chunk === null) { throw new Error('The transformed op is invalid'); }
|
||||
if (chunk.d !== undefined) { throw new Error('The transformed op deletes locally inserted characters - it cannot be purged of the insert.'); }
|
||||
|
||||
if typeof chunk is 'number'
|
||||
length -= chunk
|
||||
else
|
||||
append newOp, chunk
|
||||
if (typeof chunk === 'number') {
|
||||
length -= chunk;
|
||||
} else {
|
||||
append(newOp, chunk);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
else # Skip or delete
|
||||
while length > 0
|
||||
chunk = take length, true
|
||||
throw new Error('The op traverses more elements than the document has') unless chunk != null
|
||||
} else { // Skip or delete
|
||||
while (length > 0) {
|
||||
chunk = take(length, true);
|
||||
if (chunk === null) { throw new Error('The op traverses more elements than the document has'); }
|
||||
|
||||
append newOp, chunk
|
||||
length -= componentLength chunk unless chunk.i
|
||||
append(newOp, chunk);
|
||||
if (!chunk.i) { length -= componentLength(chunk); }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Append extras from op1
|
||||
while (component = take())
|
||||
throw new Error "Remaining fragments in the op: #{component}" unless component.i != undefined
|
||||
append newOp, component
|
||||
// Append extras from op1
|
||||
while (component = take()) {
|
||||
if (component.i === undefined) { throw new Error(`Remaining fragments in the op: ${component}`); }
|
||||
append(newOp, component);
|
||||
}
|
||||
|
||||
newOp
|
||||
return newOp;
|
||||
};
|
||||
|
||||
# transform op1 by op2. Return transformed version of op1.
|
||||
# op1 and op2 are unchanged by transform.
|
||||
# side should be 'left' or 'right', depending on if op1.id <> op2.id. 'left' == client op.
|
||||
type.transform = (op, otherOp, side) ->
|
||||
throw new Error "side (#{side}) should be 'left' or 'right'" unless side == 'left' or side == 'right'
|
||||
transformer op, otherOp, true, side
|
||||
// transform op1 by op2. Return transformed version of op1.
|
||||
// op1 and op2 are unchanged by transform.
|
||||
// side should be 'left' or 'right', depending on if op1.id <> op2.id. 'left' == client op.
|
||||
type.transform = function(op, otherOp, side) {
|
||||
if ((side !== 'left') && (side !== 'right')) { throw new Error(`side (${side}) should be 'left' or 'right'`); }
|
||||
return transformer(op, otherOp, true, side);
|
||||
};
|
||||
|
||||
# Prune is the inverse of transform.
|
||||
type.prune = (op, otherOp) -> transformer op, otherOp, false
|
||||
// Prune is the inverse of transform.
|
||||
type.prune = (op, otherOp) => transformer(op, otherOp, false);
|
||||
|
||||
# Compose 2 ops into 1 op.
|
||||
type.compose = (op1, op2) ->
|
||||
return op2 if op1 == null or op1 == undefined
|
||||
// Compose 2 ops into 1 op.
|
||||
type.compose = function(op1, op2) {
|
||||
let component;
|
||||
if ((op1 === null) || (op1 === undefined)) { return op2; }
|
||||
|
||||
checkOp op1
|
||||
checkOp op2
|
||||
checkOp(op1);
|
||||
checkOp(op2);
|
||||
|
||||
result = []
|
||||
const result = [];
|
||||
|
||||
[take, _] = makeTake op1
|
||||
const [take, _] = Array.from(makeTake(op1));
|
||||
|
||||
for component in op2
|
||||
for (component of Array.from(op2)) {
|
||||
|
||||
if typeof(component) == 'number' # Skip
|
||||
# Just copy from op1.
|
||||
length = component
|
||||
while length > 0
|
||||
chunk = take length
|
||||
throw new Error('The op traverses more elements than the document has') unless chunk != null
|
||||
var chunk, length;
|
||||
if (typeof(component) === 'number') { // Skip
|
||||
// Just copy from op1.
|
||||
length = component;
|
||||
while (length > 0) {
|
||||
chunk = take(length);
|
||||
if (chunk === null) { throw new Error('The op traverses more elements than the document has'); }
|
||||
|
||||
append result, chunk
|
||||
length -= componentLength chunk
|
||||
append(result, chunk);
|
||||
length -= componentLength(chunk);
|
||||
}
|
||||
|
||||
else if component.i != undefined # Insert
|
||||
append result, {i:component.i}
|
||||
} else if (component.i !== undefined) { // Insert
|
||||
append(result, {i:component.i});
|
||||
|
||||
else # Delete
|
||||
length = component.d
|
||||
while length > 0
|
||||
chunk = take length
|
||||
throw new Error('The op traverses more elements than the document has') unless chunk != null
|
||||
} else { // Delete
|
||||
length = component.d;
|
||||
while (length > 0) {
|
||||
chunk = take(length);
|
||||
if (chunk === null) { throw new Error('The op traverses more elements than the document has'); }
|
||||
|
||||
chunkLength = componentLength chunk
|
||||
if chunk.i != undefined
|
||||
append result, {i:chunkLength}
|
||||
else
|
||||
append result, {d:chunkLength}
|
||||
const chunkLength = componentLength(chunk);
|
||||
if (chunk.i !== undefined) {
|
||||
append(result, {i:chunkLength});
|
||||
} else {
|
||||
append(result, {d:chunkLength});
|
||||
}
|
||||
|
||||
length -= chunkLength
|
||||
length -= chunkLength;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Append extras from op1
|
||||
while (component = take())
|
||||
throw new Error "Remaining fragments in op1: #{component}" unless component.i != undefined
|
||||
append result, component
|
||||
// Append extras from op1
|
||||
while (component = take()) {
|
||||
if (component.i === undefined) { throw new Error(`Remaining fragments in op1: ${component}`); }
|
||||
append(result, component);
|
||||
}
|
||||
|
||||
result
|
||||
return result;
|
||||
};
|
||||
|
||||
if WEB?
|
||||
exports.types['text-tp2'] = type
|
||||
else
|
||||
module.exports = type
|
||||
if (typeof WEB !== 'undefined' && WEB !== null) {
|
||||
exports.types['text-tp2'] = type;
|
||||
} else {
|
||||
module.exports = type;
|
||||
}
|
||||
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined;
|
||||
}
|
|
@ -1,263 +1,305 @@
|
|||
# A simple text implementation
|
||||
#
|
||||
# Operations are lists of components.
|
||||
# Each component either inserts or deletes at a specified position in the document.
|
||||
#
|
||||
# Components are either:
|
||||
# {i:'str', p:100}: Insert 'str' at position 100 in the document
|
||||
# {d:'str', p:100}: Delete 'str' at position 100 in the document
|
||||
#
|
||||
# Components in an operation are executed sequentially, so the position of components
|
||||
# assumes previous components have already executed.
|
||||
#
|
||||
# Eg: This op:
|
||||
# [{i:'abc', p:0}]
|
||||
# is equivalent to this op:
|
||||
# [{i:'a', p:0}, {i:'b', p:1}, {i:'c', p:2}]
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
// A simple text implementation
|
||||
//
|
||||
// Operations are lists of components.
|
||||
// Each component either inserts or deletes at a specified position in the document.
|
||||
//
|
||||
// Components are either:
|
||||
// {i:'str', p:100}: Insert 'str' at position 100 in the document
|
||||
// {d:'str', p:100}: Delete 'str' at position 100 in the document
|
||||
//
|
||||
// Components in an operation are executed sequentially, so the position of components
|
||||
// assumes previous components have already executed.
|
||||
//
|
||||
// Eg: This op:
|
||||
// [{i:'abc', p:0}]
|
||||
// is equivalent to this op:
|
||||
// [{i:'a', p:0}, {i:'b', p:1}, {i:'c', p:2}]
|
||||
|
||||
# NOTE: The global scope here is shared with other sharejs files when built with closure.
|
||||
# Be careful what ends up in your namespace.
|
||||
// NOTE: The global scope here is shared with other sharejs files when built with closure.
|
||||
// Be careful what ends up in your namespace.
|
||||
|
||||
text = {}
|
||||
let append, transformComponent;
|
||||
const text = {};
|
||||
|
||||
text.name = 'text'
|
||||
text.name = 'text';
|
||||
|
||||
text.create = -> ''
|
||||
text.create = () => '';
|
||||
|
||||
strInject = (s1, pos, s2) -> s1[...pos] + s2 + s1[pos..]
|
||||
const strInject = (s1, pos, s2) => s1.slice(0, pos) + s2 + s1.slice(pos);
|
||||
|
||||
checkValidComponent = (c) ->
|
||||
throw new Error 'component missing position field' if typeof c.p != 'number'
|
||||
const checkValidComponent = function(c) {
|
||||
if (typeof c.p !== 'number') { throw new Error('component missing position field'); }
|
||||
|
||||
i_type = typeof c.i
|
||||
d_type = typeof c.d
|
||||
c_type = typeof c.c
|
||||
throw new Error 'component needs an i, d or c field' unless (i_type == 'string') ^ (d_type == 'string') ^ (c_type == 'string')
|
||||
const i_type = typeof c.i;
|
||||
const d_type = typeof c.d;
|
||||
const c_type = typeof c.c;
|
||||
if (!((i_type === 'string') ^ (d_type === 'string') ^ (c_type === 'string'))) { throw new Error('component needs an i, d or c field'); }
|
||||
|
||||
throw new Error 'position cannot be negative' unless c.p >= 0
|
||||
if (!(c.p >= 0)) { throw new Error('position cannot be negative'); }
|
||||
};
|
||||
|
||||
checkValidOp = (op) ->
|
||||
checkValidComponent(c) for c in op
|
||||
true
|
||||
const checkValidOp = function(op) {
|
||||
for (let c of Array.from(op)) { checkValidComponent(c); }
|
||||
return true;
|
||||
};
|
||||
|
||||
text.apply = (snapshot, op) ->
|
||||
checkValidOp op
|
||||
for component in op
|
||||
if component.i?
|
||||
snapshot = strInject snapshot, component.p, component.i
|
||||
else if component.d?
|
||||
deleted = snapshot[component.p...(component.p + component.d.length)]
|
||||
throw new Error "Delete component '#{component.d}' does not match deleted text '#{deleted}'" unless component.d == deleted
|
||||
snapshot = snapshot[...component.p] + snapshot[(component.p + component.d.length)..]
|
||||
else if component.c?
|
||||
comment = snapshot[component.p...(component.p + component.c.length)]
|
||||
throw new Error "Comment component '#{component.c}' does not match commented text '#{comment}'" unless component.c == comment
|
||||
else
|
||||
throw new Error "Unknown op type"
|
||||
snapshot
|
||||
text.apply = function(snapshot, op) {
|
||||
checkValidOp(op);
|
||||
for (let component of Array.from(op)) {
|
||||
if (component.i != null) {
|
||||
snapshot = strInject(snapshot, component.p, component.i);
|
||||
} else if (component.d != null) {
|
||||
const deleted = snapshot.slice(component.p, (component.p + component.d.length));
|
||||
if (component.d !== deleted) { throw new Error(`Delete component '${component.d}' does not match deleted text '${deleted}'`); }
|
||||
snapshot = snapshot.slice(0, component.p) + snapshot.slice((component.p + component.d.length));
|
||||
} else if (component.c != null) {
|
||||
const comment = snapshot.slice(component.p, (component.p + component.c.length));
|
||||
if (component.c !== comment) { throw new Error(`Comment component '${component.c}' does not match commented text '${comment}'`); }
|
||||
} else {
|
||||
throw new Error("Unknown op type");
|
||||
}
|
||||
}
|
||||
return snapshot;
|
||||
};
|
||||
|
||||
|
||||
# Exported for use by the random op generator.
|
||||
#
|
||||
# For simplicity, this version of append does not compress adjacent inserts and deletes of
|
||||
# the same text. It would be nice to change that at some stage.
|
||||
text._append = append = (newOp, c) ->
|
||||
return if c.i == '' or c.d == ''
|
||||
if newOp.length == 0
|
||||
newOp.push c
|
||||
else
|
||||
last = newOp[newOp.length - 1]
|
||||
// Exported for use by the random op generator.
|
||||
//
|
||||
// For simplicity, this version of append does not compress adjacent inserts and deletes of
|
||||
// the same text. It would be nice to change that at some stage.
|
||||
text._append = (append = function(newOp, c) {
|
||||
if ((c.i === '') || (c.d === '')) { return; }
|
||||
if (newOp.length === 0) {
|
||||
return newOp.push(c);
|
||||
} else {
|
||||
const last = newOp[newOp.length - 1];
|
||||
|
||||
# Compose the insert into the previous insert if possible
|
||||
if last.i? && c.i? and last.p <= c.p <= (last.p + last.i.length)
|
||||
newOp[newOp.length - 1] = {i:strInject(last.i, c.p - last.p, c.i), p:last.p}
|
||||
else if last.d? && c.d? and c.p <= last.p <= (c.p + c.d.length)
|
||||
newOp[newOp.length - 1] = {d:strInject(c.d, last.p - c.p, last.d), p:c.p}
|
||||
else
|
||||
newOp.push c
|
||||
// Compose the insert into the previous insert if possible
|
||||
if ((last.i != null) && (c.i != null) && (last.p <= c.p && c.p <= (last.p + last.i.length))) {
|
||||
return newOp[newOp.length - 1] = {i:strInject(last.i, c.p - last.p, c.i), p:last.p};
|
||||
} else if ((last.d != null) && (c.d != null) && (c.p <= last.p && last.p <= (c.p + c.d.length))) {
|
||||
return newOp[newOp.length - 1] = {d:strInject(c.d, last.p - c.p, last.d), p:c.p};
|
||||
} else {
|
||||
return newOp.push(c);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
text.compose = (op1, op2) ->
|
||||
checkValidOp op1
|
||||
checkValidOp op2
|
||||
text.compose = function(op1, op2) {
|
||||
checkValidOp(op1);
|
||||
checkValidOp(op2);
|
||||
|
||||
newOp = op1.slice()
|
||||
append newOp, c for c in op2
|
||||
const newOp = op1.slice();
|
||||
for (let c of Array.from(op2)) { append(newOp, c); }
|
||||
|
||||
newOp
|
||||
return newOp;
|
||||
};
|
||||
|
||||
# Attempt to compress the op components together 'as much as possible'.
|
||||
# This implementation preserves order and preserves create/delete pairs.
|
||||
text.compress = (op) -> text.compose [], op
|
||||
// Attempt to compress the op components together 'as much as possible'.
|
||||
// This implementation preserves order and preserves create/delete pairs.
|
||||
text.compress = op => text.compose([], op);
|
||||
|
||||
text.normalize = (op) ->
|
||||
newOp = []
|
||||
text.normalize = function(op) {
|
||||
const newOp = [];
|
||||
|
||||
# Normalize should allow ops which are a single (unwrapped) component:
|
||||
# {i:'asdf', p:23}.
|
||||
# There's no good way to test if something is an array:
|
||||
# http://perfectionkills.com/instanceof-considered-harmful-or-how-to-write-a-robust-isarray/
|
||||
# so this is probably the least bad solution.
|
||||
op = [op] if op.i? or op.p?
|
||||
// Normalize should allow ops which are a single (unwrapped) component:
|
||||
// {i:'asdf', p:23}.
|
||||
// There's no good way to test if something is an array:
|
||||
// http://perfectionkills.com/instanceof-considered-harmful-or-how-to-write-a-robust-isarray/
|
||||
// so this is probably the least bad solution.
|
||||
if ((op.i != null) || (op.p != null)) { op = [op]; }
|
||||
|
||||
for c in op
|
||||
c.p ?= 0
|
||||
append newOp, c
|
||||
for (let c of Array.from(op)) {
|
||||
if (c.p == null) { c.p = 0; }
|
||||
append(newOp, c);
|
||||
}
|
||||
|
||||
newOp
|
||||
return newOp;
|
||||
};
|
||||
|
||||
# This helper method transforms a position by an op component.
|
||||
#
|
||||
# If c is an insert, insertAfter specifies whether the transform
|
||||
# is pushed after the insert (true) or before it (false).
|
||||
#
|
||||
# insertAfter is optional for deletes.
|
||||
transformPosition = (pos, c, insertAfter) ->
|
||||
if c.i?
|
||||
if c.p < pos || (c.p == pos && insertAfter)
|
||||
pos + c.i.length
|
||||
else
|
||||
pos
|
||||
else if c.d?
|
||||
# I think this could also be written as: Math.min(c.p, Math.min(c.p - otherC.p, otherC.d.length))
|
||||
# but I think its harder to read that way, and it compiles using ternary operators anyway
|
||||
# so its no slower written like this.
|
||||
if pos <= c.p
|
||||
pos
|
||||
else if pos <= c.p + c.d.length
|
||||
c.p
|
||||
else
|
||||
pos - c.d.length
|
||||
else if c.c?
|
||||
pos
|
||||
else
|
||||
throw new Error("unknown op type")
|
||||
// This helper method transforms a position by an op component.
|
||||
//
|
||||
// If c is an insert, insertAfter specifies whether the transform
|
||||
// is pushed after the insert (true) or before it (false).
|
||||
//
|
||||
// insertAfter is optional for deletes.
|
||||
const transformPosition = function(pos, c, insertAfter) {
|
||||
if (c.i != null) {
|
||||
if ((c.p < pos) || ((c.p === pos) && insertAfter)) {
|
||||
return pos + c.i.length;
|
||||
} else {
|
||||
return pos;
|
||||
}
|
||||
} else if (c.d != null) {
|
||||
// I think this could also be written as: Math.min(c.p, Math.min(c.p - otherC.p, otherC.d.length))
|
||||
// but I think its harder to read that way, and it compiles using ternary operators anyway
|
||||
// so its no slower written like this.
|
||||
if (pos <= c.p) {
|
||||
return pos;
|
||||
} else if (pos <= (c.p + c.d.length)) {
|
||||
return c.p;
|
||||
} else {
|
||||
return pos - c.d.length;
|
||||
}
|
||||
} else if (c.c != null) {
|
||||
return pos;
|
||||
} else {
|
||||
throw new Error("unknown op type");
|
||||
}
|
||||
};
|
||||
|
||||
# Helper method to transform a cursor position as a result of an op.
|
||||
#
|
||||
# Like transformPosition above, if c is an insert, insertAfter specifies whether the cursor position
|
||||
# is pushed after an insert (true) or before it (false).
|
||||
text.transformCursor = (position, op, side) ->
|
||||
insertAfter = side == 'right'
|
||||
position = transformPosition position, c, insertAfter for c in op
|
||||
position
|
||||
// Helper method to transform a cursor position as a result of an op.
|
||||
//
|
||||
// Like transformPosition above, if c is an insert, insertAfter specifies whether the cursor position
|
||||
// is pushed after an insert (true) or before it (false).
|
||||
text.transformCursor = function(position, op, side) {
|
||||
const insertAfter = side === 'right';
|
||||
for (let c of Array.from(op)) { position = transformPosition(position, c, insertAfter); }
|
||||
return position;
|
||||
};
|
||||
|
||||
# Transform an op component by another op component. Asymmetric.
|
||||
# The result will be appended to destination.
|
||||
#
|
||||
# exported for use in JSON type
|
||||
text._tc = transformComponent = (dest, c, otherC, side) ->
|
||||
checkValidOp [c]
|
||||
checkValidOp [otherC]
|
||||
// Transform an op component by another op component. Asymmetric.
|
||||
// The result will be appended to destination.
|
||||
//
|
||||
// exported for use in JSON type
|
||||
text._tc = (transformComponent = function(dest, c, otherC, side) {
|
||||
let cIntersect, intersectEnd, intersectStart, newC, otherIntersect;
|
||||
checkValidOp([c]);
|
||||
checkValidOp([otherC]);
|
||||
|
||||
if c.i?
|
||||
append dest, {i:c.i, p:transformPosition(c.p, otherC, side == 'right')}
|
||||
if (c.i != null) {
|
||||
append(dest, {i:c.i, p:transformPosition(c.p, otherC, side === 'right')});
|
||||
|
||||
else if c.d? # Delete
|
||||
if otherC.i? # delete vs insert
|
||||
s = c.d
|
||||
if c.p < otherC.p
|
||||
append dest, {d:s[...otherC.p - c.p], p:c.p}
|
||||
s = s[(otherC.p - c.p)..]
|
||||
if s != ''
|
||||
append dest, {d:s, p:c.p + otherC.i.length}
|
||||
} else if (c.d != null) { // Delete
|
||||
if (otherC.i != null) { // delete vs insert
|
||||
let s = c.d;
|
||||
if (c.p < otherC.p) {
|
||||
append(dest, {d:s.slice(0, otherC.p - c.p), p:c.p});
|
||||
s = s.slice((otherC.p - c.p));
|
||||
}
|
||||
if (s !== '') {
|
||||
append(dest, {d:s, p:c.p + otherC.i.length});
|
||||
}
|
||||
|
||||
else if otherC.d? # Delete vs delete
|
||||
if c.p >= otherC.p + otherC.d.length
|
||||
append dest, {d:c.d, p:c.p - otherC.d.length}
|
||||
else if c.p + c.d.length <= otherC.p
|
||||
append dest, c
|
||||
else
|
||||
# They overlap somewhere.
|
||||
newC = {d:'', p:c.p}
|
||||
if c.p < otherC.p
|
||||
newC.d = c.d[...(otherC.p - c.p)]
|
||||
if c.p + c.d.length > otherC.p + otherC.d.length
|
||||
newC.d += c.d[(otherC.p + otherC.d.length - c.p)..]
|
||||
} else if (otherC.d != null) { // Delete vs delete
|
||||
if (c.p >= (otherC.p + otherC.d.length)) {
|
||||
append(dest, {d:c.d, p:c.p - otherC.d.length});
|
||||
} else if ((c.p + c.d.length) <= otherC.p) {
|
||||
append(dest, c);
|
||||
} else {
|
||||
// They overlap somewhere.
|
||||
newC = {d:'', p:c.p};
|
||||
if (c.p < otherC.p) {
|
||||
newC.d = c.d.slice(0, (otherC.p - c.p));
|
||||
}
|
||||
if ((c.p + c.d.length) > (otherC.p + otherC.d.length)) {
|
||||
newC.d += c.d.slice(((otherC.p + otherC.d.length) - c.p));
|
||||
}
|
||||
|
||||
# This is entirely optional - just for a check that the deleted
|
||||
# text in the two ops matches
|
||||
intersectStart = Math.max c.p, otherC.p
|
||||
intersectEnd = Math.min c.p + c.d.length, otherC.p + otherC.d.length
|
||||
cIntersect = c.d[intersectStart - c.p...intersectEnd - c.p]
|
||||
otherIntersect = otherC.d[intersectStart - otherC.p...intersectEnd - otherC.p]
|
||||
throw new Error 'Delete ops delete different text in the same region of the document' unless cIntersect == otherIntersect
|
||||
// This is entirely optional - just for a check that the deleted
|
||||
// text in the two ops matches
|
||||
intersectStart = Math.max(c.p, otherC.p);
|
||||
intersectEnd = Math.min(c.p + c.d.length, otherC.p + otherC.d.length);
|
||||
cIntersect = c.d.slice(intersectStart - c.p, intersectEnd - c.p);
|
||||
otherIntersect = otherC.d.slice(intersectStart - otherC.p, intersectEnd - otherC.p);
|
||||
if (cIntersect !== otherIntersect) { throw new Error('Delete ops delete different text in the same region of the document'); }
|
||||
|
||||
if newC.d != ''
|
||||
# This could be rewritten similarly to insert v delete, above.
|
||||
newC.p = transformPosition newC.p, otherC
|
||||
append dest, newC
|
||||
if (newC.d !== '') {
|
||||
// This could be rewritten similarly to insert v delete, above.
|
||||
newC.p = transformPosition(newC.p, otherC);
|
||||
append(dest, newC);
|
||||
}
|
||||
}
|
||||
|
||||
else if otherC.c?
|
||||
append dest, c
|
||||
} else if (otherC.c != null) {
|
||||
append(dest, c);
|
||||
|
||||
else
|
||||
throw new Error("unknown op type")
|
||||
} else {
|
||||
throw new Error("unknown op type");
|
||||
}
|
||||
|
||||
else if c.c? # Comment
|
||||
if otherC.i?
|
||||
if c.p < otherC.p < c.p + c.c.length
|
||||
offset = otherC.p - c.p
|
||||
new_c = (c.c[0..(offset-1)] + otherC.i + c.c[offset...])
|
||||
append dest, {c:new_c, p:c.p, t: c.t}
|
||||
else
|
||||
append dest, {c:c.c, p:transformPosition(c.p, otherC, true), t: c.t}
|
||||
} else if (c.c != null) { // Comment
|
||||
if (otherC.i != null) {
|
||||
if (c.p < otherC.p && otherC.p < c.p + c.c.length) {
|
||||
const offset = otherC.p - c.p;
|
||||
const new_c = (c.c.slice(0, +(offset-1) + 1 || undefined) + otherC.i + c.c.slice(offset));
|
||||
append(dest, {c:new_c, p:c.p, t: c.t});
|
||||
} else {
|
||||
append(dest, {c:c.c, p:transformPosition(c.p, otherC, true), t: c.t});
|
||||
}
|
||||
|
||||
else if otherC.d?
|
||||
if c.p >= otherC.p + otherC.d.length
|
||||
append dest, {c:c.c, p:c.p - otherC.d.length, t: c.t}
|
||||
else if c.p + c.c.length <= otherC.p
|
||||
append dest, c
|
||||
else # Delete overlaps comment
|
||||
# They overlap somewhere.
|
||||
newC = {c:'', p:c.p, t: c.t}
|
||||
if c.p < otherC.p
|
||||
newC.c = c.c[...(otherC.p - c.p)]
|
||||
if c.p + c.c.length > otherC.p + otherC.d.length
|
||||
newC.c += c.c[(otherC.p + otherC.d.length - c.p)..]
|
||||
} else if (otherC.d != null) {
|
||||
if (c.p >= (otherC.p + otherC.d.length)) {
|
||||
append(dest, {c:c.c, p:c.p - otherC.d.length, t: c.t});
|
||||
} else if ((c.p + c.c.length) <= otherC.p) {
|
||||
append(dest, c);
|
||||
} else { // Delete overlaps comment
|
||||
// They overlap somewhere.
|
||||
newC = {c:'', p:c.p, t: c.t};
|
||||
if (c.p < otherC.p) {
|
||||
newC.c = c.c.slice(0, (otherC.p - c.p));
|
||||
}
|
||||
if ((c.p + c.c.length) > (otherC.p + otherC.d.length)) {
|
||||
newC.c += c.c.slice(((otherC.p + otherC.d.length) - c.p));
|
||||
}
|
||||
|
||||
# This is entirely optional - just for a check that the deleted
|
||||
# text in the two ops matches
|
||||
intersectStart = Math.max c.p, otherC.p
|
||||
intersectEnd = Math.min c.p + c.c.length, otherC.p + otherC.d.length
|
||||
cIntersect = c.c[intersectStart - c.p...intersectEnd - c.p]
|
||||
otherIntersect = otherC.d[intersectStart - otherC.p...intersectEnd - otherC.p]
|
||||
throw new Error 'Delete ops delete different text in the same region of the document' unless cIntersect == otherIntersect
|
||||
// This is entirely optional - just for a check that the deleted
|
||||
// text in the two ops matches
|
||||
intersectStart = Math.max(c.p, otherC.p);
|
||||
intersectEnd = Math.min(c.p + c.c.length, otherC.p + otherC.d.length);
|
||||
cIntersect = c.c.slice(intersectStart - c.p, intersectEnd - c.p);
|
||||
otherIntersect = otherC.d.slice(intersectStart - otherC.p, intersectEnd - otherC.p);
|
||||
if (cIntersect !== otherIntersect) { throw new Error('Delete ops delete different text in the same region of the document'); }
|
||||
|
||||
newC.p = transformPosition newC.p, otherC
|
||||
append dest, newC
|
||||
newC.p = transformPosition(newC.p, otherC);
|
||||
append(dest, newC);
|
||||
}
|
||||
|
||||
else if otherC.c?
|
||||
append dest, c
|
||||
} else if (otherC.c != null) {
|
||||
append(dest, c);
|
||||
|
||||
else
|
||||
throw new Error("unknown op type")
|
||||
} else {
|
||||
throw new Error("unknown op type");
|
||||
}
|
||||
}
|
||||
|
||||
dest
|
||||
return dest;
|
||||
});
|
||||
|
||||
invertComponent = (c) ->
|
||||
if c.i?
|
||||
{d:c.i, p:c.p}
|
||||
else
|
||||
{i:c.d, p:c.p}
|
||||
const invertComponent = function(c) {
|
||||
if (c.i != null) {
|
||||
return {d:c.i, p:c.p};
|
||||
} else {
|
||||
return {i:c.d, p:c.p};
|
||||
}
|
||||
};
|
||||
|
||||
# No need to use append for invert, because the components won't be able to
|
||||
# cancel with one another.
|
||||
text.invert = (op) -> (invertComponent c for c in op.slice().reverse())
|
||||
// No need to use append for invert, because the components won't be able to
|
||||
// cancel with one another.
|
||||
text.invert = op => Array.from(op.slice().reverse()).map((c) => invertComponent(c));
|
||||
|
||||
|
||||
if WEB?
|
||||
exports.types ||= {}
|
||||
if (typeof WEB !== 'undefined' && WEB !== null) {
|
||||
if (!exports.types) { exports.types = {}; }
|
||||
|
||||
# This is kind of awful - come up with a better way to hook this helper code up.
|
||||
bootstrapTransform(text, transformComponent, checkValidOp, append)
|
||||
// This is kind of awful - come up with a better way to hook this helper code up.
|
||||
bootstrapTransform(text, transformComponent, checkValidOp, append);
|
||||
|
||||
# [] is used to prevent closure from renaming types.text
|
||||
exports.types.text = text
|
||||
else
|
||||
module.exports = text
|
||||
// [] is used to prevent closure from renaming types.text
|
||||
exports.types.text = text;
|
||||
} else {
|
||||
module.exports = text;
|
||||
|
||||
# The text type really shouldn't need this - it should be possible to define
|
||||
# an efficient transform function by making a sort of transform map and passing each
|
||||
# op component through it.
|
||||
require('./helpers').bootstrapTransform(text, transformComponent, checkValidOp, append)
|
||||
// The text type really shouldn't need this - it should be possible to define
|
||||
// an efficient transform function by making a sort of transform map and passing each
|
||||
// op component through it.
|
||||
require('./helpers').bootstrapTransform(text, transformComponent, checkValidOp, append);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
# This is included at the top of each compiled type file for the web.
|
||||
// This is included at the top of each compiled type file for the web.
|
||||
|
||||
`/**
|
||||
/**
|
||||
@const
|
||||
@type {boolean}
|
||||
*/
|
||||
var WEB = true;
|
||||
`
|
||||
const WEB = true;
|
||||
|
||||
exports = window['sharejs']
|
||||
|
||||
const exports = window['sharejs'];
|
||||
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
# This is included at the top of each compiled type file for the web.
|
||||
// This is included at the top of each compiled type file for the web.
|
||||
|
||||
`/**
|
||||
/**
|
||||
@const
|
||||
@type {boolean}
|
||||
*/
|
||||
var WEB = true;
|
||||
`
|
||||
const WEB = true;
|
||||
|
||||
exports = window['sharejs']
|
||||
|
||||
const exports = window['sharejs'];
|
||||
|
||||
|
|
Loading…
Reference in a new issue