Merge pull request #31 from sharelatex/bg-use-redis-sharelatex-cluster

use redis sharelatex v1.0.0
This commit is contained in:
Brian Gough 2017-05-02 15:00:17 +01:00 committed by GitHub
commit 5fb4a7fdc9
6 changed files with 1401 additions and 888 deletions

View file

@ -1,14 +1,12 @@
Settings = require "settings-sharelatex" Settings = require "settings-sharelatex"
redis = require("redis-sharelatex") redis = require("redis-sharelatex")
rclient = redis.createClient(Settings.redis.web) rclient = redis.createClient(Settings.redis.history)
Keys = Settings.redis.history.key_schema
rawUpdatesKey = (doc_id) -> "UncompressedHistoryOps:#{doc_id}"
docsWithHistoryOpsKey = (project_id) -> "DocsWithHistoryOps:#{project_id}"
module.exports = RedisManager = module.exports = RedisManager =
getOldestDocUpdates: (doc_id, batchSize, callback = (error, jsonUpdates) ->) -> getOldestDocUpdates: (doc_id, batchSize, callback = (error, jsonUpdates) ->) ->
key = rawUpdatesKey(doc_id) key = Keys.uncompressedHistoryOps({doc_id})
rclient.lrange key, 0, batchSize - 1, callback rclient.lrange key, 0, batchSize - 1, callback
expandDocUpdates: (jsonUpdates, callback = (error, rawUpdates) ->) -> expandDocUpdates: (jsonUpdates, callback = (error, rawUpdates) ->) ->
@ -22,16 +20,17 @@ module.exports = RedisManager =
multi = rclient.multi() multi = rclient.multi()
# Delete all the updates which have been applied (exact match) # Delete all the updates which have been applied (exact match)
for update in docUpdates or [] for update in docUpdates or []
multi.lrem rawUpdatesKey(doc_id), 0, update multi.lrem Keys.uncompressedHistoryOps({doc_id}), 0, update
multi.exec (error, results) ->
return callback(error) if error?
# It's ok to delete the doc_id from the set here. Even though the list # It's ok to delete the doc_id from the set here. Even though the list
# of updates may not be empty, we will continue to process it until it is. # of updates may not be empty, we will continue to process it until it is.
multi.srem docsWithHistoryOpsKey(project_id), doc_id rclient.srem Keys.docsWithHistoryOps({project_id}), doc_id, (error) ->
multi.exec (error, results) ->
return callback(error) if error? return callback(error) if error?
callback null callback null
getDocIdsWithHistoryOps: (project_id, callback = (error, doc_ids) ->) -> getDocIdsWithHistoryOps: (project_id, callback = (error, doc_ids) ->) ->
rclient.smembers docsWithHistoryOpsKey(project_id), callback rclient.smembers Keys.docsWithHistoryOps({project_id}), callback
# iterate over keys asynchronously using redis scan (non-blocking) # iterate over keys asynchronously using redis scan (non-blocking)
_getKeys: (pattern, callback) -> _getKeys: (pattern, callback) ->
@ -57,16 +56,18 @@ module.exports = RedisManager =
# this will only work on single node redis, not redis cluster # this will only work on single node redis, not redis cluster
getProjectIdsWithHistoryOps: (callback = (error, project_ids) ->) -> getProjectIdsWithHistoryOps: (callback = (error, project_ids) ->) ->
RedisManager._getKeys docsWithHistoryOpsKey("*"), (error, project_keys) -> return callback(new Error("not supported")) if rclient.nodes?
RedisManager._getKeys Keys.docsWithHistoryOps({project_id:"*"}), (error, project_keys) ->
return callback(error) if error? return callback(error) if error?
project_ids = RedisManager._extractIds project_keys project_ids = RedisManager._extractIds project_keys
callback(error, project_ids) callback(error, project_ids)
# this will only work on single node redis, not redis cluster # this will only work on single node redis, not redis cluster
getAllDocIdsWithHistoryOps: (callback = (error, doc_ids) ->) -> getAllDocIdsWithHistoryOps: (callback = (error, doc_ids) ->) ->
return callback(new Error("not supported")) if rclient.nodes?
# return all the docids, to find dangling history entries after # return all the docids, to find dangling history entries after
# everything is flushed. # everything is flushed.
RedisManager._getKeys rawUpdatesKey("*"), (error, doc_keys) -> RedisManager._getKeys Keys.uncompressedHistoryOps({doc_id:"*"}), (error, doc_keys) ->
return callback(error) if error? return callback(error) if error?
doc_ids = RedisManager._extractIds doc_keys doc_ids = RedisManager._extractIds doc_keys
callback(error, doc_ids) callback(error, doc_ids)

View file

@ -22,6 +22,13 @@ module.exports =
host: "localhost" host: "localhost"
port: 6379 port: 6379
pass: "" pass: ""
history:
port:"6379"
host:"localhost"
password:""
key_schema:
uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}"
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}"
trackchanges: trackchanges:
s3: s3:

File diff suppressed because it is too large Load diff

View file

@ -19,7 +19,7 @@
"metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.7.1", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.7.1",
"request": "~2.33.0", "request": "~2.33.0",
"requestretry": "^1.12.0", "requestretry": "^1.12.0",
"redis-sharelatex": "~0.0.9", "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.0",
"redis": "~0.10.1", "redis": "~0.10.1",
"underscore": "~1.7.0", "underscore": "~1.7.0",
"mongo-uri": "^0.1.2", "mongo-uri": "^0.1.2",

View file

@ -1,9 +1,10 @@
async = require 'async' async = require 'async'
zlib = require 'zlib' zlib = require 'zlib'
request = require "request" request = require "request"
rclient = require("redis").createClient() # Only works locally for now
{db, ObjectId} = require "../../../../app/js/mongojs"
Settings = require "settings-sharelatex" Settings = require "settings-sharelatex"
rclient = require("redis-sharelatex").createClient(Settings.redis.history) # Only works locally for now
Keys = Settings.redis.history.key_schema
{db, ObjectId} = require "../../../../app/js/mongojs"
module.exports = TrackChangesClient = module.exports = TrackChangesClient =
flushAndGetCompressedUpdates: (project_id, doc_id, callback = (error, updates) ->) -> flushAndGetCompressedUpdates: (project_id, doc_id, callback = (error, updates) ->) ->
@ -49,9 +50,9 @@ module.exports = TrackChangesClient =
}, callback }, callback
pushRawUpdates: (project_id, doc_id, updates, callback = (error) ->) -> pushRawUpdates: (project_id, doc_id, updates, callback = (error) ->) ->
rclient.sadd "DocsWithHistoryOps:#{project_id}", doc_id, (error) -> rclient.sadd Keys.docsWithHistoryOps({project_id}), doc_id, (error) ->
return callback(error) if error? return callback(error) if error?
rclient.rpush "UncompressedHistoryOps:#{doc_id}", (JSON.stringify(u) for u in updates)..., callback rclient.rpush Keys.uncompressedHistoryOps({doc_id}), (JSON.stringify(u) for u in updates)..., callback
getDiff: (project_id, doc_id, from, to, callback = (error, diff) ->) -> getDiff: (project_id, doc_id, from, to, callback = (error, diff) ->) ->
request.get { request.get {

View file

@ -14,7 +14,10 @@ describe "RedisManager", ->
multi: () => @rclient multi: () => @rclient
"settings-sharelatex": "settings-sharelatex":
redis: redis:
web:{} history:
key_schema:
uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}"
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}"
@doc_id = "doc-id-123" @doc_id = "doc-id-123"
@project_id = "project-id-123" @project_id = "project-id-123"
@batchSize = 100 @batchSize = 100