mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
split MongoAWS files
This commit is contained in:
parent
0c16fbed88
commit
d2b1243701
2 changed files with 123 additions and 118 deletions
|
@ -1,9 +1,6 @@
|
||||||
settings = require "settings-sharelatex"
|
settings = require "settings-sharelatex"
|
||||||
child_process = require "child_process"
|
|
||||||
mongoUri = require "mongo-uri";
|
|
||||||
logger = require "logger-sharelatex"
|
logger = require "logger-sharelatex"
|
||||||
AWS = require 'aws-sdk'
|
AWS = require 'aws-sdk'
|
||||||
fs = require 'fs'
|
|
||||||
S3S = require 's3-streams'
|
S3S = require 's3-streams'
|
||||||
{db, ObjectId} = require "./mongojs"
|
{db, ObjectId} = require "./mongojs"
|
||||||
JSONStream = require "JSONStream"
|
JSONStream = require "JSONStream"
|
||||||
|
@ -82,118 +79,3 @@ module.exports = MongoAWS =
|
||||||
else
|
else
|
||||||
logger.log count:ops.length, result:result, "bulked ReadlineStream"
|
logger.log count:ops.length, result:result, "bulked ReadlineStream"
|
||||||
cb(err)
|
cb(err)
|
||||||
|
|
||||||
|
|
||||||
archiveDocHistoryExternal: (project_id, doc_id, callback = (error) ->) ->
|
|
||||||
MongoAWS.mongoExportDocHistory doc_id, (error, filepath) ->
|
|
||||||
MongoAWS.s3upStream project_id, doc_id, filepath, callback
|
|
||||||
#delete temp file?
|
|
||||||
|
|
||||||
|
|
||||||
unArchiveDocHistoryExternal: (project_id, doc_id, callback = (error) ->) ->
|
|
||||||
MongoAWS.s3downStream project_id, doc_id, (error, filepath) ->
|
|
||||||
if error == null
|
|
||||||
MongoAWS.mongoImportDocHistory filepath, callback
|
|
||||||
#delete temp file?
|
|
||||||
else
|
|
||||||
callback
|
|
||||||
|
|
||||||
mongoExportDocHistory: (doc_id, callback = (error, filepath) ->) ->
|
|
||||||
uriData = mongoUri.parse(settings.mongo.url);
|
|
||||||
filepath = settings.path.dumpFolder + '/' + doc_id + '.jsonUp'
|
|
||||||
|
|
||||||
args = []
|
|
||||||
args.push '-h'
|
|
||||||
args.push uriData.hosts[0]
|
|
||||||
args.push '-d'
|
|
||||||
args.push uriData.database
|
|
||||||
args.push '-c'
|
|
||||||
args.push 'docHistory'
|
|
||||||
args.push '-q'
|
|
||||||
args.push "{doc_id: ObjectId('#{doc_id}') , expiresAt: {$exists : false} }"
|
|
||||||
args.push '-o'
|
|
||||||
args.push filepath
|
|
||||||
|
|
||||||
proc = child_process.spawn "mongoexport", args
|
|
||||||
|
|
||||||
proc.on "error", callback
|
|
||||||
|
|
||||||
stderr = ""
|
|
||||||
proc.stderr.on "data", (chunk) -> stderr += chunk.toString()
|
|
||||||
|
|
||||||
proc.on "close", (code) ->
|
|
||||||
if code == 0
|
|
||||||
return callback(null,filepath)
|
|
||||||
else
|
|
||||||
return callback(new Error("mongodump failed: #{stderr}"),null)
|
|
||||||
|
|
||||||
mongoImportDocHistory: (filepath, callback = (error) ->) ->
|
|
||||||
|
|
||||||
uriData = mongoUri.parse(settings.mongo.url);
|
|
||||||
|
|
||||||
args = []
|
|
||||||
args.push '-h'
|
|
||||||
args.push uriData.hosts[0]
|
|
||||||
args.push '-d'
|
|
||||||
args.push uriData.database
|
|
||||||
args.push '-c'
|
|
||||||
args.push 'docHistory'
|
|
||||||
args.push '--file'
|
|
||||||
args.push filepath
|
|
||||||
|
|
||||||
proc = child_process.spawn "mongoimport", args
|
|
||||||
|
|
||||||
proc.on "error", callback
|
|
||||||
|
|
||||||
stderr = ""
|
|
||||||
proc.stderr.on "data", (chunk) -> stderr += chunk.toString()
|
|
||||||
|
|
||||||
proc.on "close", (code) ->
|
|
||||||
if code == 0
|
|
||||||
return callback(null,filepath)
|
|
||||||
else
|
|
||||||
return callback(new Error("mongodump failed: #{stderr}"),null)
|
|
||||||
|
|
||||||
s3upStream: (project_id, doc_id, filepath, callback = (error) ->) ->
|
|
||||||
|
|
||||||
AWS.config.update {
|
|
||||||
accessKeyId: settings.filestore.s3.key
|
|
||||||
secretAccessKey: settings.filestore.s3.secret
|
|
||||||
}
|
|
||||||
|
|
||||||
upload = S3S.WriteStream new AWS.S3(), {
|
|
||||||
"Bucket": settings.filestore.stores.user_files,
|
|
||||||
"Key": project_id+"/changes-"+doc_id
|
|
||||||
}
|
|
||||||
|
|
||||||
fs.createReadStream(filepath)
|
|
||||||
.on 'open', (obj) ->
|
|
||||||
return 1
|
|
||||||
.pipe(upload)
|
|
||||||
.on 'finish', () ->
|
|
||||||
return callback(null)
|
|
||||||
.on 'error', (err) ->
|
|
||||||
return callback(err)
|
|
||||||
|
|
||||||
s3downStream: (project_id, doc_id, callback = (error, filepath) ->) ->
|
|
||||||
|
|
||||||
filepath = settings.path.dumpFolder + '/' + doc_id + '.jsonDown'
|
|
||||||
|
|
||||||
AWS.config.update {
|
|
||||||
accessKeyId: settings.filestore.s3.key
|
|
||||||
secretAccessKey: settings.filestore.s3.secret
|
|
||||||
}
|
|
||||||
|
|
||||||
download = S3S.ReadStream new AWS.S3(), {
|
|
||||||
"Bucket": settings.filestore.stores.user_files,
|
|
||||||
"Key": project_id+"/changes-"+doc_id
|
|
||||||
}
|
|
||||||
|
|
||||||
download
|
|
||||||
.on 'open', (obj) ->
|
|
||||||
return 1
|
|
||||||
.pipe(fs.createWriteStream(filepath))
|
|
||||||
.on 'finish', () ->
|
|
||||||
return callback(null, filepath)
|
|
||||||
.on 'error', (err) ->
|
|
||||||
return callback(err, null)
|
|
||||||
|
|
123
services/track-changes/app/coffee/MongoAWSexternal.coffee
Normal file
123
services/track-changes/app/coffee/MongoAWSexternal.coffee
Normal file
|
@ -0,0 +1,123 @@
|
||||||
|
settings = require "settings-sharelatex"
|
||||||
|
child_process = require "child_process"
|
||||||
|
mongoUri = require "mongo-uri";
|
||||||
|
logger = require "logger-sharelatex"
|
||||||
|
AWS = require 'aws-sdk'
|
||||||
|
fs = require 'fs'
|
||||||
|
S3S = require 's3-streams'
|
||||||
|
|
||||||
|
module.exports = MongoAWSexternal =
|
||||||
|
|
||||||
|
archiveDocHistory: (project_id, doc_id, callback = (error) ->) ->
|
||||||
|
MongoAWS.mongoExportDocHistory doc_id, (error, filepath) ->
|
||||||
|
MongoAWS.s3upStream project_id, doc_id, filepath, callback
|
||||||
|
#delete temp file?
|
||||||
|
|
||||||
|
|
||||||
|
unArchiveDocHistory: (project_id, doc_id, callback = (error) ->) ->
|
||||||
|
MongoAWS.s3downStream project_id, doc_id, (error, filepath) ->
|
||||||
|
if error == null
|
||||||
|
MongoAWS.mongoImportDocHistory filepath, callback
|
||||||
|
#delete temp file?
|
||||||
|
else
|
||||||
|
callback
|
||||||
|
|
||||||
|
mongoExportDocHistory: (doc_id, callback = (error, filepath) ->) ->
|
||||||
|
uriData = mongoUri.parse(settings.mongo.url);
|
||||||
|
filepath = settings.path.dumpFolder + '/' + doc_id + '.jsonUp'
|
||||||
|
|
||||||
|
args = []
|
||||||
|
args.push '-h'
|
||||||
|
args.push uriData.hosts[0]
|
||||||
|
args.push '-d'
|
||||||
|
args.push uriData.database
|
||||||
|
args.push '-c'
|
||||||
|
args.push 'docHistory'
|
||||||
|
args.push '-q'
|
||||||
|
args.push "{doc_id: ObjectId('#{doc_id}') , expiresAt: {$exists : false} }"
|
||||||
|
args.push '-o'
|
||||||
|
args.push filepath
|
||||||
|
|
||||||
|
proc = child_process.spawn "mongoexport", args
|
||||||
|
|
||||||
|
proc.on "error", callback
|
||||||
|
|
||||||
|
stderr = ""
|
||||||
|
proc.stderr.on "data", (chunk) -> stderr += chunk.toString()
|
||||||
|
|
||||||
|
proc.on "close", (code) ->
|
||||||
|
if code == 0
|
||||||
|
return callback(null,filepath)
|
||||||
|
else
|
||||||
|
return callback(new Error("mongodump failed: #{stderr}"),null)
|
||||||
|
|
||||||
|
mongoImportDocHistory: (filepath, callback = (error) ->) ->
|
||||||
|
|
||||||
|
uriData = mongoUri.parse(settings.mongo.url);
|
||||||
|
|
||||||
|
args = []
|
||||||
|
args.push '-h'
|
||||||
|
args.push uriData.hosts[0]
|
||||||
|
args.push '-d'
|
||||||
|
args.push uriData.database
|
||||||
|
args.push '-c'
|
||||||
|
args.push 'docHistory'
|
||||||
|
args.push '--file'
|
||||||
|
args.push filepath
|
||||||
|
|
||||||
|
proc = child_process.spawn "mongoimport", args
|
||||||
|
|
||||||
|
proc.on "error", callback
|
||||||
|
|
||||||
|
stderr = ""
|
||||||
|
proc.stderr.on "data", (chunk) -> stderr += chunk.toString()
|
||||||
|
|
||||||
|
proc.on "close", (code) ->
|
||||||
|
if code == 0
|
||||||
|
return callback(null,filepath)
|
||||||
|
else
|
||||||
|
return callback(new Error("mongodump failed: #{stderr}"),null)
|
||||||
|
|
||||||
|
s3upStream: (project_id, doc_id, filepath, callback = (error) ->) ->
|
||||||
|
|
||||||
|
AWS.config.update {
|
||||||
|
accessKeyId: settings.filestore.s3.key
|
||||||
|
secretAccessKey: settings.filestore.s3.secret
|
||||||
|
}
|
||||||
|
|
||||||
|
upload = S3S.WriteStream new AWS.S3(), {
|
||||||
|
"Bucket": settings.filestore.stores.user_files,
|
||||||
|
"Key": project_id+"/changes-"+doc_id
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.createReadStream(filepath)
|
||||||
|
.on 'open', (obj) ->
|
||||||
|
return 1
|
||||||
|
.pipe(upload)
|
||||||
|
.on 'finish', () ->
|
||||||
|
return callback(null)
|
||||||
|
.on 'error', (err) ->
|
||||||
|
return callback(err)
|
||||||
|
|
||||||
|
s3downStream: (project_id, doc_id, callback = (error, filepath) ->) ->
|
||||||
|
|
||||||
|
filepath = settings.path.dumpFolder + '/' + doc_id + '.jsonDown'
|
||||||
|
|
||||||
|
AWS.config.update {
|
||||||
|
accessKeyId: settings.filestore.s3.key
|
||||||
|
secretAccessKey: settings.filestore.s3.secret
|
||||||
|
}
|
||||||
|
|
||||||
|
download = S3S.ReadStream new AWS.S3(), {
|
||||||
|
"Bucket": settings.filestore.stores.user_files,
|
||||||
|
"Key": project_id+"/changes-"+doc_id
|
||||||
|
}
|
||||||
|
|
||||||
|
download
|
||||||
|
.on 'open', (obj) ->
|
||||||
|
return 1
|
||||||
|
.pipe(fs.createWriteStream(filepath))
|
||||||
|
.on 'finish', () ->
|
||||||
|
return callback(null, filepath)
|
||||||
|
.on 'error', (err) ->
|
||||||
|
return callback(err, null)
|
Loading…
Reference in a new issue