[track-changes] Ignore duplicated packs when zipping SL history (#8353)

* [track-changes] Ignore duplicated packs when zipping SL history

* Fix incorrect `ObjectId` import in unit tests

GitOrigin-RevId: 165fcaa0c7bc57d383d01eedaf6dd3b078aff2aa
This commit is contained in:
Miguel Serrano 2022-06-10 10:20:57 +02:00 committed by Copybot
parent c2cfa96983
commit bb04841504
5 changed files with 83 additions and 0 deletions

2
package-lock.json generated
View file

@ -34447,6 +34447,7 @@
"heap": "^0.2.6",
"JSONStream": "^1.3.5",
"line-reader": "^0.4.0",
"lodash": "^4.17.21",
"mongo-uri": "^0.1.2",
"mongodb": "^3.6.0",
"redis": "~0.10.1",
@ -42165,6 +42166,7 @@
"heap": "^0.2.6",
"JSONStream": "^1.3.5",
"line-reader": "^0.4.0",
"lodash": "*",
"memorystream": "0.3.1",
"mocha": "^8.4.0",
"mongo-uri": "^0.1.2",

View file

@ -11,6 +11,7 @@ const stream = require('stream')
const fs = require('fs')
const os = require('os')
const Path = require('path')
const { packsAreDuplicated } = require('./util/PackUtils')
const streamPipeline = util.promisify(stream.pipeline)
@ -85,8 +86,13 @@ async function rewindDoc(projectId, docId, zipfile) {
let content = latestContent
let v = version
let update = lastUpdate
let previousUpdate = null
while (update) {
if (packsAreDuplicated(update, previousUpdate)) {
continue
}
const updatePath = `${id}/updates/${update.v}`
zipfile.addBuffer(Buffer.from(JSON.stringify(update)), updatePath, {
@ -107,6 +113,7 @@ async function rewindDoc(projectId, docId, zipfile) {
ts: update.meta.start_ts,
doc_length: content.length,
})
previousUpdate = update
update = await getUpdate()
}

View file

@ -0,0 +1,18 @@
const _ = require('lodash')
/**
* Compares a deep equality of Packs excluding _id
*/
function packsAreDuplicated(pack1, pack2) {
return Boolean(
pack1 &&
pack2 &&
pack1.v === pack2.v &&
_.isEqual(pack1.meta, pack2.meta) &&
_.isEqual(pack1.op, pack2.op)
)
}
module.exports = {
packsAreDuplicated,
}

View file

@ -31,6 +31,7 @@
"heap": "^0.2.6",
"JSONStream": "^1.3.5",
"line-reader": "^0.4.0",
"lodash": "^4.17.21",
"mongo-uri": "^0.1.2",
"mongodb": "^3.6.0",
"redis": "~0.10.1",

View file

@ -0,0 +1,55 @@
const { expect } = require('chai')
const { ObjectId } = require('mongodb')
const { packsAreDuplicated } = require('../../../../app/js/util/PackUtils')
const examplePack = {
v: 12,
meta: {
user_id: '525e6018b53de7a920002545',
start_ts: 1399130007228,
end_ts: 1399130007228,
},
op: [
{
p: 2372,
d: 'Test for a Subsection',
},
{
p: 2372,
i: 'Reviews and review terminology',
},
],
}
const objectId1 = ObjectId('53650ba27e62ca78520d9814')
const objectId2 = ObjectId('0b5a814a27e678520d92c536')
describe('PackUtils', function () {
describe('packsAreDuplicated()', function () {
it('returns `false` when any of the packs is undefined', function () {
const pack = { ...examplePack, _id: objectId1 }
expect(packsAreDuplicated(pack, undefined)).to.be.false
expect(packsAreDuplicated(undefined, pack)).to.be.false
expect(packsAreDuplicated(undefined, undefined)).to.be.false
})
it('returns `true` for identical packs with same `_id`', function () {
const pack1 = { ...examplePack, _id: objectId1 }
const pack2 = { ...examplePack, _id: objectId1 }
expect(packsAreDuplicated(pack1, pack2)).to.be.true
})
it('returns `true` for identical packs with different `_id`', function () {
const pack1 = { ...examplePack, _id: objectId1 }
const pack2 = { ...examplePack, _id: objectId2 }
expect(packsAreDuplicated(pack1, pack2)).to.be.true
})
it('returns `false` for packs with different anidated properties', function () {
const pack1 = { ...examplePack, _id: objectId1 }
const pack2 = { ...examplePack, _id: 1 }
pack2.op = [...pack2.op, { p: 2800, i: 'char' }]
expect(packsAreDuplicated(pack1, pack2)).to.be.false
})
})
})