mirror of
https://github.com/overleaf/overleaf.git
synced 2025-04-09 05:16:02 +00:00
archive docChanges list to s3
This commit is contained in:
parent
ae047ecf76
commit
028fe2fa03
6 changed files with 67 additions and 25 deletions
|
@ -27,6 +27,9 @@ app.post "/project/:project_id/doc/:doc_id/version/:version/restore", HttpContro
|
|||
|
||||
app.post "/doc/:doc_id/pack", HttpController.packDoc
|
||||
|
||||
if Settings.filestore?.backend == "s3"
|
||||
app.get '/project/:project_id/archive', HttpController.archiveProject
|
||||
|
||||
packWorker = null # use a single packing worker
|
||||
|
||||
app.post "/pack", (req, res, next) ->
|
||||
|
|
49
services/track-changes/app/coffee/DocArchiveManager.coffee
Normal file
49
services/track-changes/app/coffee/DocArchiveManager.coffee
Normal file
|
@ -0,0 +1,49 @@
|
|||
MongoManager = require "./MongoManager"
|
||||
logger = require "logger-sharelatex"
|
||||
_ = require "underscore"
|
||||
async = require "async"
|
||||
settings = require("settings-sharelatex")
|
||||
request = require("request")
|
||||
crypto = require("crypto")
|
||||
thirtySeconds = 30 * 1000
|
||||
|
||||
module.exports = DocArchiveManager =
|
||||
|
||||
archiveAllDocsChanges: (project_id, callback = (error, docs) ->) ->
|
||||
MongoManager.getProjectsDocs project_id, (error, docs) ->
|
||||
if error?
|
||||
return callback(error)
|
||||
else if !docs?
|
||||
return callback new Error("No docs for project #{project_id}")
|
||||
jobs = _.map docs, (doc) ->
|
||||
(cb)-> DocArchiveManager.archiveDocChanges project_id, doc._id, cb
|
||||
async.series jobs, callback
|
||||
|
||||
|
||||
archiveDocChanges: (project_id, doc_id, callback)->
|
||||
MongoManager.getDocChanges doc_id, (error, docChanges) ->
|
||||
logger.log project_id: project_id, doc_id: doc_id, "sending doc changes to s3"
|
||||
options = DocArchiveManager.buildS3Options(docChanges, project_id+"/changes-"+doc_id)
|
||||
request.put options, (err, res)->
|
||||
md5lines = crypto.createHash("md5").update(JSON.stringify(docChanges)).digest("hex")
|
||||
md5response = res.headers.etag.toString().replace(/\"/g, '')
|
||||
if err? || res.statusCode != 200
|
||||
logger.err err:err, res:res, "something went wrong archiving doc changes in aws"
|
||||
return callback new Error("Error in S3 request")
|
||||
if md5lines != md5response
|
||||
logger.err responseMD5:md5response, linesMD5:md5lines, "error in response md5 from s3"
|
||||
return callback new Error("Error in S3 md5 response")
|
||||
#MongoManager.markDocAsArchived doc._id, doc.rev, (error) ->
|
||||
# return callback(error) if error?
|
||||
callback()
|
||||
|
||||
buildS3Options: (content, key)->
|
||||
return {
|
||||
aws:
|
||||
key: settings.filestore.s3.key
|
||||
secret: settings.filestore.s3.secret
|
||||
bucket: settings.filestore.stores.user_files
|
||||
timeout: thirtySeconds
|
||||
json: content
|
||||
uri:"https://#{settings.filestore.stores.user_files}.s3.amazonaws.com/#{key}"
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
MongoManager = require "./MongoManager"
|
||||
Errors = require "./Errors"
|
||||
logger = require "logger-sharelatex"
|
||||
_ = require "underscore"
|
||||
async = require "async"
|
||||
settings = require("settings-sharelatex")
|
||||
request = require("request")
|
||||
crypto = require("crypto")
|
||||
thirtySeconds = 30 * 1000
|
||||
|
||||
module.exports = DocArchiveManager =
|
||||
|
||||
buildS3Options: (content, key)->
|
||||
return {
|
||||
aws:
|
||||
key: settings.filestore.s3.key
|
||||
secret: settings.filestore.s3.secret
|
||||
bucket: settings.filestore.stores.user_files
|
||||
timeout: thirtySeconds
|
||||
json: content
|
||||
#headers:
|
||||
# 'content-md5': crypto.createHash("md5").update(JSON.stringify(content)).digest("hex")
|
||||
uri:"https://#{settings.filestore.stores.user_files}.s3.amazonaws.com/#{key}"
|
||||
}
|
|
@ -3,6 +3,7 @@ DiffManager = require "./DiffManager"
|
|||
PackManager = require "./PackManager"
|
||||
RestoreManager = require "./RestoreManager"
|
||||
logger = require "logger-sharelatex"
|
||||
DocArchiveManager = require "./DocArchiveManager"
|
||||
|
||||
module.exports = HttpController =
|
||||
flushDoc: (req, res, next = (error) ->) ->
|
||||
|
@ -66,3 +67,10 @@ module.exports = HttpController =
|
|||
RestoreManager.restoreToBeforeVersion project_id, doc_id, version, user_id, (error) ->
|
||||
return next(error) if error?
|
||||
res.send 204
|
||||
|
||||
archiveProject: (req, res, next = (error) ->) ->
|
||||
project_id = req.params.project_id
|
||||
logger.log project_id: project_id, "archiving all track changes"
|
||||
DocArchiveManager.archiveAllDocsChanges project_id, (error) ->
|
||||
return next(error) if error?
|
||||
res.send 204
|
|
@ -126,3 +126,9 @@ module.exports = MongoManager =
|
|||
# For finding documents which need packing
|
||||
db.docHistoryStats.ensureIndex { doc_id: 1 }, { background: true }
|
||||
db.docHistoryStats.ensureIndex { updates: -1, doc_id: 1 }, { background: true }
|
||||
|
||||
getProjectsDocs: (project_id, callback)->
|
||||
db.docs.find project_id: ObjectId(project_id.toString()), {}, callback
|
||||
|
||||
getDocChanges: (doc_id, callback)->
|
||||
db.docHistory.find doc_id: ObjectId(doc_id.toString()), {}, callback
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
Settings = require "settings-sharelatex"
|
||||
mongojs = require "mongojs"
|
||||
db = mongojs.connect(Settings.mongo.url, ["docHistory", "projectHistoryMetaData", "docHistoryStats"])
|
||||
db = mongojs.connect(Settings.mongo.url, ["docHistory", "projectHistoryMetaData", "docHistoryStats", "docs"])
|
||||
module.exports =
|
||||
db: db
|
||||
ObjectId: mongojs.ObjectId
|
||||
|
|
Loading…
Add table
Reference in a new issue