increase logging on s3 operations

This commit is contained in:
Brian Gough 2016-01-12 10:36:00 +00:00
parent ca1f1dc944
commit 6199532d08
2 changed files with 7 additions and 2 deletions

View file

@ -78,6 +78,7 @@ module.exports = DocArchiveManager =
MongoManager.getArchivedDocChanges doc_id, (error, count) ->
return callback(error) if error?
if count == 0
logger.log {project_id, doc_id}, "no changes marked as in s3, not unarchiving"
return callback()
else
MongoAWS.unArchiveDocHistory project_id, doc_id, (error) ->

View file

@ -27,7 +27,9 @@ module.exports = MongoAWS =
accessKeyId: settings.filestore.s3.key
secretAccessKey: settings.filestore.s3.secret
}
logger.log {project_id, doc_id}, "uploading data to s3"
upload = S3S.WriteStream new AWS.S3(), {
"Bucket": settings.filestore.stores.user_files,
"Key": project_id+"/changes-"+doc_id
@ -53,7 +55,9 @@ module.exports = MongoAWS =
accessKeyId: settings.filestore.s3.key
secretAccessKey: settings.filestore.s3.secret
}
logger.log {project_id, doc_id}, "downloading data from s3"
download = S3S.ReadStream new AWS.S3(), {
"Bucket": settings.filestore.stores.user_files,
"Key": project_id+"/changes-"+doc_id