mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
improve size function
This commit is contained in:
parent
3f712c452a
commit
aa66c5ee8c
2 changed files with 7 additions and 9 deletions
|
@ -5,7 +5,6 @@ S3S = require 's3-streams'
|
||||||
{db, ObjectId} = require "./mongojs"
|
{db, ObjectId} = require "./mongojs"
|
||||||
JSONStream = require "JSONStream"
|
JSONStream = require "JSONStream"
|
||||||
ReadlineStream = require "readline-stream"
|
ReadlineStream = require "readline-stream"
|
||||||
BSON=db.bson.BSON
|
|
||||||
|
|
||||||
module.exports = MongoAWS =
|
module.exports = MongoAWS =
|
||||||
|
|
||||||
|
@ -74,19 +73,19 @@ module.exports = MongoAWS =
|
||||||
.on 'data', (line) ->
|
.on 'data', (line) ->
|
||||||
if line.length > 2
|
if line.length > 2
|
||||||
ops.push(JSON.parse(line))
|
ops.push(JSON.parse(line))
|
||||||
sz += BSON.calculateObjectSize(ops[ops.length-1])
|
sz += line.length
|
||||||
if ops.length >= MongoAWS.MAX_COUNT || sz >= MongoAWS.MAX_SIZE
|
if ops.length >= MongoAWS.MAX_COUNT || sz >= MongoAWS.MAX_SIZE
|
||||||
download.pause()
|
download.pause()
|
||||||
MongoAWS.handleBulk ops.slice(0), () ->
|
MongoAWS.handleBulk ops.slice(0), sz, () ->
|
||||||
download.resume()
|
download.resume()
|
||||||
ops.splice(0,ops.length)
|
ops.splice(0,ops.length)
|
||||||
sz = 0
|
sz = 0
|
||||||
.on 'end', () ->
|
.on 'end', () ->
|
||||||
MongoAWS.handleBulk ops, callback
|
MongoAWS.handleBulk ops, sz, callback
|
||||||
.on 'error', (err) ->
|
.on 'error', (err) ->
|
||||||
return callback(err)
|
return callback(err)
|
||||||
|
|
||||||
handleBulk: (ops, cb) ->
|
handleBulk: (ops, size, cb) ->
|
||||||
bulk = db.docHistory.initializeUnorderedBulkOp();
|
bulk = db.docHistory.initializeUnorderedBulkOp();
|
||||||
|
|
||||||
for op in ops
|
for op in ops
|
||||||
|
@ -100,7 +99,7 @@ module.exports = MongoAWS =
|
||||||
if err?
|
if err?
|
||||||
logger.error err:err, "error bulking ReadlineStream"
|
logger.error err:err, "error bulking ReadlineStream"
|
||||||
else
|
else
|
||||||
logger.log count:ops.length, result:result, size: BSON.calculateObjectSize(ops), "bulked ReadlineStream"
|
logger.log count:ops.length, result:result, size: size, "bulked ReadlineStream"
|
||||||
cb(err)
|
cb(err)
|
||||||
else
|
else
|
||||||
cb()
|
cb()
|
||||||
|
|
|
@ -21,11 +21,10 @@ describe "MongoAWS", ->
|
||||||
"aws-sdk": @awssdk = {}
|
"aws-sdk": @awssdk = {}
|
||||||
"fs": @fs = {}
|
"fs": @fs = {}
|
||||||
"s3-streams": @s3streams = {}
|
"s3-streams": @s3streams = {}
|
||||||
"./mongojs" : { db: @db = { bson: { BSON:{} } }, ObjectId: ObjectId }
|
"./mongojs" : { db: @db = {}, ObjectId: ObjectId }
|
||||||
"JSONStream": @JSONStream = {}
|
"JSONStream": @JSONStream = {}
|
||||||
"readline-stream": @readline = sinon.stub()
|
"readline-stream": @readline = sinon.stub()
|
||||||
|
|
||||||
@db.bson.BSON.calculateObjectSize = sinon.stub().returns true
|
|
||||||
@project_id = ObjectId().toString()
|
@project_id = ObjectId().toString()
|
||||||
@doc_id = ObjectId().toString()
|
@doc_id = ObjectId().toString()
|
||||||
@callback = sinon.stub()
|
@callback = sinon.stub()
|
||||||
|
@ -102,7 +101,7 @@ describe "MongoAWS", ->
|
||||||
execute: sinon.stub().callsArgWith(0, null, {})
|
execute: sinon.stub().callsArgWith(0, null, {})
|
||||||
@db.docHistory = {}
|
@db.docHistory = {}
|
||||||
@db.docHistory.initializeUnorderedBulkOp = sinon.stub().returns @bulk
|
@db.docHistory.initializeUnorderedBulkOp = sinon.stub().returns @bulk
|
||||||
@MongoAWS.handleBulk @bulkOps, @callback
|
@MongoAWS.handleBulk @bulkOps, @bulkOps.length, @callback
|
||||||
|
|
||||||
it "should call updateOne for each operation", ->
|
it "should call updateOne for each operation", ->
|
||||||
@bulk.find.calledWith({_id:@bulkOps[0]._id}).should.equal true
|
@bulk.find.calledWith({_id:@bulkOps[0]._id}).should.equal true
|
||||||
|
|
Loading…
Reference in a new issue