increase logging on s3 operations

This commit is contained in:
Brian Gough 2016-01-12 10:36:00 +00:00
parent ca1f1dc944
commit 6199532d08
2 changed files with 7 additions and 2 deletions

View file

@ -78,6 +78,7 @@ module.exports = DocArchiveManager =
MongoManager.getArchivedDocChanges doc_id, (error, count) -> MongoManager.getArchivedDocChanges doc_id, (error, count) ->
return callback(error) if error? return callback(error) if error?
if count == 0 if count == 0
logger.log {project_id, doc_id}, "no changes marked as in s3, not unarchiving"
return callback() return callback()
else else
MongoAWS.unArchiveDocHistory project_id, doc_id, (error) -> MongoAWS.unArchiveDocHistory project_id, doc_id, (error) ->

View file

@ -28,6 +28,8 @@ module.exports = MongoAWS =
secretAccessKey: settings.filestore.s3.secret secretAccessKey: settings.filestore.s3.secret
} }
logger.log {project_id, doc_id}, "uploading data to s3"
upload = S3S.WriteStream new AWS.S3(), { upload = S3S.WriteStream new AWS.S3(), {
"Bucket": settings.filestore.stores.user_files, "Bucket": settings.filestore.stores.user_files,
"Key": project_id+"/changes-"+doc_id "Key": project_id+"/changes-"+doc_id
@ -54,6 +56,8 @@ module.exports = MongoAWS =
secretAccessKey: settings.filestore.s3.secret secretAccessKey: settings.filestore.s3.secret
} }
logger.log {project_id, doc_id}, "downloading data from s3"
download = S3S.ReadStream new AWS.S3(), { download = S3S.ReadStream new AWS.S3(), {
"Bucket": settings.filestore.stores.user_files, "Bucket": settings.filestore.stores.user_files,
"Key": project_id+"/changes-"+doc_id "Key": project_id+"/changes-"+doc_id