2015-08-06 14:46:44 -04:00
|
|
|
settings = require "settings-sharelatex"
|
|
|
|
logger = require "logger-sharelatex"
|
|
|
|
AWS = require 'aws-sdk'
|
2015-08-09 14:47:47 -04:00
|
|
|
S3S = require 's3-streams'
|
2015-08-14 18:19:54 -04:00
|
|
|
{db, ObjectId} = require "./mongojs"
|
|
|
|
JSONStream = require "JSONStream"
|
|
|
|
ReadlineStream = require "readline-stream"
|
2015-09-17 08:23:13 -04:00
|
|
|
BSON=db.bson.BSON
|
2015-08-06 14:46:44 -04:00
|
|
|
|
|
|
|
module.exports = MongoAWS =
|
|
|
|
|
2015-09-17 08:23:13 -04:00
|
|
|
MAX_SIZE: 1024*1024 # almost max size
|
|
|
|
MAX_COUNT: 1024 # almost max count
|
2015-08-14 18:19:54 -04:00
|
|
|
|
2015-09-16 11:00:25 -04:00
|
|
|
archiveDocHistory: (project_id, doc_id, _callback = (error) ->) ->
|
|
|
|
|
|
|
|
callback = (args...) ->
|
|
|
|
_callback(args...)
|
|
|
|
_callback = () ->
|
|
|
|
|
2015-08-14 18:19:54 -04:00
|
|
|
query = {
|
|
|
|
doc_id: ObjectId(doc_id)
|
|
|
|
expiresAt: {$exists : false}
|
|
|
|
}
|
|
|
|
|
|
|
|
AWS.config.update {
|
|
|
|
accessKeyId: settings.filestore.s3.key
|
|
|
|
secretAccessKey: settings.filestore.s3.secret
|
|
|
|
}
|
|
|
|
|
|
|
|
upload = S3S.WriteStream new AWS.S3(), {
|
|
|
|
"Bucket": settings.filestore.stores.user_files,
|
|
|
|
"Key": project_id+"/changes-"+doc_id
|
|
|
|
}
|
|
|
|
|
|
|
|
db.docHistory.find(query)
|
2015-09-16 11:00:25 -04:00
|
|
|
.on 'error', (err) ->
|
|
|
|
callback(err)
|
2015-08-14 18:19:54 -04:00
|
|
|
.pipe JSONStream.stringify()
|
2015-09-16 11:00:25 -04:00
|
|
|
.pipe upload
|
|
|
|
.on 'error', (err) ->
|
|
|
|
callback(err)
|
|
|
|
.on 'finish', () ->
|
|
|
|
return callback(null)
|
2015-08-14 18:19:54 -04:00
|
|
|
|
2015-09-16 11:00:37 -04:00
|
|
|
unArchiveDocHistory: (project_id, doc_id, _callback = (error) ->) ->
|
|
|
|
|
|
|
|
callback = (args...) ->
|
|
|
|
_callback(args...)
|
|
|
|
_callback = () ->
|
2015-08-14 18:19:54 -04:00
|
|
|
|
|
|
|
AWS.config.update {
|
|
|
|
accessKeyId: settings.filestore.s3.key
|
|
|
|
secretAccessKey: settings.filestore.s3.secret
|
|
|
|
}
|
|
|
|
|
|
|
|
download = S3S.ReadStream new AWS.S3(), {
|
|
|
|
"Bucket": settings.filestore.stores.user_files,
|
|
|
|
"Key": project_id+"/changes-"+doc_id
|
|
|
|
}, {
|
|
|
|
encoding: "utf8"
|
|
|
|
}
|
|
|
|
|
|
|
|
lineStream = new ReadlineStream();
|
|
|
|
ops = []
|
2015-09-17 08:23:13 -04:00
|
|
|
sz = 0
|
2015-08-14 18:19:54 -04:00
|
|
|
|
|
|
|
download
|
|
|
|
.on 'open', (obj) ->
|
|
|
|
return 1
|
2015-09-16 11:00:37 -04:00
|
|
|
.on 'error', (err) ->
|
|
|
|
callback(err)
|
2015-08-14 18:19:54 -04:00
|
|
|
.pipe lineStream
|
|
|
|
.on 'data', (line) ->
|
|
|
|
if line.length > 2
|
|
|
|
ops.push(JSON.parse(line))
|
2015-09-17 08:23:13 -04:00
|
|
|
sz += BSON.calculateObjectSize(ops[ops.length-1])
|
|
|
|
if ops.length >= MongoAWS.MAX_COUNT || sz >= MongoAWS.MAX_SIZE
|
2015-09-16 10:32:36 -04:00
|
|
|
download.pause()
|
2015-08-14 18:58:38 -04:00
|
|
|
MongoAWS.handleBulk ops.slice(0), () ->
|
2015-09-16 10:32:36 -04:00
|
|
|
download.resume()
|
2015-08-14 18:58:38 -04:00
|
|
|
ops.splice(0,ops.length)
|
2015-09-17 08:23:13 -04:00
|
|
|
sz = 0
|
2015-08-14 18:19:54 -04:00
|
|
|
.on 'end', () ->
|
|
|
|
MongoAWS.handleBulk ops, callback
|
|
|
|
.on 'error', (err) ->
|
|
|
|
return callback(err)
|
|
|
|
|
|
|
|
handleBulk: (ops, cb) ->
|
|
|
|
bulk = db.docHistory.initializeUnorderedBulkOp();
|
|
|
|
|
|
|
|
for op in ops
|
|
|
|
op._id = ObjectId(op._id)
|
|
|
|
op.doc_id = ObjectId(op.doc_id)
|
|
|
|
op.project_id = ObjectId(op.project_id)
|
|
|
|
bulk.find({_id:op._id}).upsert().updateOne(op)
|
2015-09-03 07:36:32 -04:00
|
|
|
|
|
|
|
if ops.length > 0
|
|
|
|
bulk.execute (err, result) ->
|
|
|
|
if err?
|
|
|
|
logger.error err:err, "error bulking ReadlineStream"
|
|
|
|
else
|
2015-09-17 08:23:13 -04:00
|
|
|
logger.log count:ops.length, result:result, size: BSON.calculateObjectSize(ops), "bulked ReadlineStream"
|
2015-09-03 07:36:32 -04:00
|
|
|
cb(err)
|
|
|
|
else
|
|
|
|
cb()
|