mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #31 from sharelatex/bg-use-redis-sharelatex-cluster
use redis sharelatex v1.0.0
This commit is contained in:
commit
5fb4a7fdc9
6 changed files with 1401 additions and 888 deletions
|
@ -1,14 +1,12 @@
|
|||
Settings = require "settings-sharelatex"
|
||||
redis = require("redis-sharelatex")
|
||||
rclient = redis.createClient(Settings.redis.web)
|
||||
|
||||
rawUpdatesKey = (doc_id) -> "UncompressedHistoryOps:#{doc_id}"
|
||||
docsWithHistoryOpsKey = (project_id) -> "DocsWithHistoryOps:#{project_id}"
|
||||
rclient = redis.createClient(Settings.redis.history)
|
||||
Keys = Settings.redis.history.key_schema
|
||||
|
||||
module.exports = RedisManager =
|
||||
|
||||
getOldestDocUpdates: (doc_id, batchSize, callback = (error, jsonUpdates) ->) ->
|
||||
key = rawUpdatesKey(doc_id)
|
||||
key = Keys.uncompressedHistoryOps({doc_id})
|
||||
rclient.lrange key, 0, batchSize - 1, callback
|
||||
|
||||
expandDocUpdates: (jsonUpdates, callback = (error, rawUpdates) ->) ->
|
||||
|
@ -22,16 +20,17 @@ module.exports = RedisManager =
|
|||
multi = rclient.multi()
|
||||
# Delete all the updates which have been applied (exact match)
|
||||
for update in docUpdates or []
|
||||
multi.lrem rawUpdatesKey(doc_id), 0, update
|
||||
multi.lrem Keys.uncompressedHistoryOps({doc_id}), 0, update
|
||||
multi.exec (error, results) ->
|
||||
return callback(error) if error?
|
||||
# It's ok to delete the doc_id from the set here. Even though the list
|
||||
# of updates may not be empty, we will continue to process it until it is.
|
||||
multi.srem docsWithHistoryOpsKey(project_id), doc_id
|
||||
multi.exec (error, results) ->
|
||||
rclient.srem Keys.docsWithHistoryOps({project_id}), doc_id, (error) ->
|
||||
return callback(error) if error?
|
||||
callback null
|
||||
|
||||
getDocIdsWithHistoryOps: (project_id, callback = (error, doc_ids) ->) ->
|
||||
rclient.smembers docsWithHistoryOpsKey(project_id), callback
|
||||
rclient.smembers Keys.docsWithHistoryOps({project_id}), callback
|
||||
|
||||
# iterate over keys asynchronously using redis scan (non-blocking)
|
||||
_getKeys: (pattern, callback) ->
|
||||
|
@ -57,16 +56,18 @@ module.exports = RedisManager =
|
|||
|
||||
# this will only work on single node redis, not redis cluster
|
||||
getProjectIdsWithHistoryOps: (callback = (error, project_ids) ->) ->
|
||||
RedisManager._getKeys docsWithHistoryOpsKey("*"), (error, project_keys) ->
|
||||
return callback(new Error("not supported")) if rclient.nodes?
|
||||
RedisManager._getKeys Keys.docsWithHistoryOps({project_id:"*"}), (error, project_keys) ->
|
||||
return callback(error) if error?
|
||||
project_ids = RedisManager._extractIds project_keys
|
||||
callback(error, project_ids)
|
||||
|
||||
# this will only work on single node redis, not redis cluster
|
||||
getAllDocIdsWithHistoryOps: (callback = (error, doc_ids) ->) ->
|
||||
return callback(new Error("not supported")) if rclient.nodes?
|
||||
# return all the docids, to find dangling history entries after
|
||||
# everything is flushed.
|
||||
RedisManager._getKeys rawUpdatesKey("*"), (error, doc_keys) ->
|
||||
RedisManager._getKeys Keys.uncompressedHistoryOps({doc_id:"*"}), (error, doc_keys) ->
|
||||
return callback(error) if error?
|
||||
doc_ids = RedisManager._extractIds doc_keys
|
||||
callback(error, doc_ids)
|
||||
|
|
|
@ -22,6 +22,13 @@ module.exports =
|
|||
host: "localhost"
|
||||
port: 6379
|
||||
pass: ""
|
||||
history:
|
||||
port:"6379"
|
||||
host:"localhost"
|
||||
password:""
|
||||
key_schema:
|
||||
uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}"
|
||||
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}"
|
||||
|
||||
trackchanges:
|
||||
s3:
|
||||
|
|
2619
services/track-changes/npm-shrinkwrap.json
generated
2619
services/track-changes/npm-shrinkwrap.json
generated
File diff suppressed because it is too large
Load diff
|
@ -19,7 +19,7 @@
|
|||
"metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.7.1",
|
||||
"request": "~2.33.0",
|
||||
"requestretry": "^1.12.0",
|
||||
"redis-sharelatex": "~0.0.9",
|
||||
"redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.0",
|
||||
"redis": "~0.10.1",
|
||||
"underscore": "~1.7.0",
|
||||
"mongo-uri": "^0.1.2",
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
async = require 'async'
|
||||
zlib = require 'zlib'
|
||||
request = require "request"
|
||||
rclient = require("redis").createClient() # Only works locally for now
|
||||
{db, ObjectId} = require "../../../../app/js/mongojs"
|
||||
Settings = require "settings-sharelatex"
|
||||
rclient = require("redis-sharelatex").createClient(Settings.redis.history) # Only works locally for now
|
||||
Keys = Settings.redis.history.key_schema
|
||||
{db, ObjectId} = require "../../../../app/js/mongojs"
|
||||
|
||||
module.exports = TrackChangesClient =
|
||||
flushAndGetCompressedUpdates: (project_id, doc_id, callback = (error, updates) ->) ->
|
||||
|
@ -49,9 +50,9 @@ module.exports = TrackChangesClient =
|
|||
}, callback
|
||||
|
||||
pushRawUpdates: (project_id, doc_id, updates, callback = (error) ->) ->
|
||||
rclient.sadd "DocsWithHistoryOps:#{project_id}", doc_id, (error) ->
|
||||
rclient.sadd Keys.docsWithHistoryOps({project_id}), doc_id, (error) ->
|
||||
return callback(error) if error?
|
||||
rclient.rpush "UncompressedHistoryOps:#{doc_id}", (JSON.stringify(u) for u in updates)..., callback
|
||||
rclient.rpush Keys.uncompressedHistoryOps({doc_id}), (JSON.stringify(u) for u in updates)..., callback
|
||||
|
||||
getDiff: (project_id, doc_id, from, to, callback = (error, diff) ->) ->
|
||||
request.get {
|
||||
|
|
|
@ -14,7 +14,10 @@ describe "RedisManager", ->
|
|||
multi: () => @rclient
|
||||
"settings-sharelatex":
|
||||
redis:
|
||||
web:{}
|
||||
history:
|
||||
key_schema:
|
||||
uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}"
|
||||
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}"
|
||||
@doc_id = "doc-id-123"
|
||||
@project_id = "project-id-123"
|
||||
@batchSize = 100
|
||||
|
|
Loading…
Reference in a new issue