mirror of
https://github.com/overleaf/overleaf.git
synced 2025-04-19 18:25:22 +00:00
Merge pull request #24275 from overleaf/bg-fix-pending-change-timestamp
fix pending change timestamp GitOrigin-RevId: 9297a4b57ea718e6a2e1ca62388919c62911af6c
This commit is contained in:
parent
f134746c9c
commit
3f7c88108c
6 changed files with 158 additions and 25 deletions
|
@ -155,15 +155,22 @@ async function loadAtTimestamp(projectId, timestamp) {
|
|||
*
|
||||
* @param {string} projectId
|
||||
* @param {Chunk} chunk
|
||||
* @param {Date} [earliestChangeTimestamp]
|
||||
* @return {Promise.<number>} for the chunkId of the inserted chunk
|
||||
*/
|
||||
async function create(projectId, chunk) {
|
||||
async function create(projectId, chunk, earliestChangeTimestamp) {
|
||||
assert.projectId(projectId, 'bad projectId')
|
||||
assert.instance(chunk, Chunk, 'bad chunk')
|
||||
assert.maybe.date(earliestChangeTimestamp, 'bad timestamp')
|
||||
|
||||
const backend = getBackend(projectId)
|
||||
const chunkId = await uploadChunk(projectId, chunk)
|
||||
await backend.confirmCreate(projectId, chunk, chunkId)
|
||||
await backend.confirmCreate(
|
||||
projectId,
|
||||
chunk,
|
||||
chunkId,
|
||||
earliestChangeTimestamp
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -195,18 +202,31 @@ async function uploadChunk(projectId, chunk) {
|
|||
* @param {string} projectId
|
||||
* @param {number} oldEndVersion
|
||||
* @param {Chunk} newChunk
|
||||
* @param {Date} [earliestChangeTimestamp]
|
||||
* @return {Promise}
|
||||
*/
|
||||
async function update(projectId, oldEndVersion, newChunk) {
|
||||
async function update(
|
||||
projectId,
|
||||
oldEndVersion,
|
||||
newChunk,
|
||||
earliestChangeTimestamp
|
||||
) {
|
||||
assert.projectId(projectId, 'bad projectId')
|
||||
assert.integer(oldEndVersion, 'bad oldEndVersion')
|
||||
assert.instance(newChunk, Chunk, 'bad newChunk')
|
||||
assert.maybe.date(earliestChangeTimestamp, 'bad timestamp')
|
||||
|
||||
const backend = getBackend(projectId)
|
||||
const oldChunkId = await getChunkIdForVersion(projectId, oldEndVersion)
|
||||
const newChunkId = await uploadChunk(projectId, newChunk)
|
||||
|
||||
await backend.confirmUpdate(projectId, oldChunkId, newChunk, newChunkId)
|
||||
await backend.confirmUpdate(
|
||||
projectId,
|
||||
oldChunkId,
|
||||
newChunk,
|
||||
newChunkId,
|
||||
earliestChangeTimestamp
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -199,7 +199,13 @@ async function insertPendingChunk(projectId, chunk) {
|
|||
/**
|
||||
* Record that a new chunk was created.
|
||||
*/
|
||||
async function confirmCreate(projectId, chunk, chunkId, mongoOpts = {}) {
|
||||
async function confirmCreate(
|
||||
projectId,
|
||||
chunk,
|
||||
chunkId,
|
||||
earliestChangeTimestamp,
|
||||
mongoOpts = {}
|
||||
) {
|
||||
assert.mongoId(projectId, 'bad projectId')
|
||||
assert.instance(chunk, Chunk, 'bad chunk')
|
||||
assert.mongoId(chunkId, 'bad chunkId')
|
||||
|
@ -228,13 +234,23 @@ async function confirmCreate(projectId, chunk, chunkId, mongoOpts = {}) {
|
|||
if (result.matchedCount === 0) {
|
||||
throw new OError('pending chunk not found', { projectId, chunkId })
|
||||
}
|
||||
await updateProjectRecord(projectId, chunk, mongoOpts)
|
||||
await updateProjectRecord(
|
||||
projectId,
|
||||
chunk,
|
||||
earliestChangeTimestamp,
|
||||
mongoOpts
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Write the metadata to the project record
|
||||
*/
|
||||
async function updateProjectRecord(projectId, chunk, mongoOpts = {}) {
|
||||
async function updateProjectRecord(
|
||||
projectId,
|
||||
chunk,
|
||||
earliestChangeTimestamp,
|
||||
mongoOpts = {}
|
||||
) {
|
||||
// record the end version against the project
|
||||
await mongodb.projects.updateOne(
|
||||
{
|
||||
|
@ -251,7 +267,7 @@ async function updateProjectRecord(projectId, chunk, mongoOpts = {}) {
|
|||
// be cleared every time a backup is completed.
|
||||
$min: {
|
||||
'overleaf.backup.pendingChangeAt':
|
||||
chunk.getEndTimestamp() || new Date(),
|
||||
earliestChangeTimestamp || chunk.getEndTimestamp() || new Date(),
|
||||
},
|
||||
},
|
||||
mongoOpts
|
||||
|
@ -261,7 +277,13 @@ async function updateProjectRecord(projectId, chunk, mongoOpts = {}) {
|
|||
/**
|
||||
* Record that a chunk was replaced by a new one.
|
||||
*/
|
||||
async function confirmUpdate(projectId, oldChunkId, newChunk, newChunkId) {
|
||||
async function confirmUpdate(
|
||||
projectId,
|
||||
oldChunkId,
|
||||
newChunk,
|
||||
newChunkId,
|
||||
earliestChangeTimestamp
|
||||
) {
|
||||
assert.mongoId(projectId, 'bad projectId')
|
||||
assert.mongoId(oldChunkId, 'bad oldChunkId')
|
||||
assert.instance(newChunk, Chunk, 'bad newChunk')
|
||||
|
@ -271,7 +293,13 @@ async function confirmUpdate(projectId, oldChunkId, newChunk, newChunkId) {
|
|||
try {
|
||||
await session.withTransaction(async () => {
|
||||
await deleteChunk(projectId, oldChunkId, { session })
|
||||
await confirmCreate(projectId, newChunk, newChunkId, { session })
|
||||
await confirmCreate(
|
||||
projectId,
|
||||
newChunk,
|
||||
newChunkId,
|
||||
earliestChangeTimestamp,
|
||||
{ session }
|
||||
)
|
||||
})
|
||||
} finally {
|
||||
await session.endSession()
|
||||
|
|
|
@ -193,7 +193,12 @@ async function insertPendingChunk(projectId, chunk) {
|
|||
/**
|
||||
* Record that a new chunk was created.
|
||||
*/
|
||||
async function confirmCreate(projectId, chunk, chunkId) {
|
||||
async function confirmCreate(
|
||||
projectId,
|
||||
chunk,
|
||||
chunkId,
|
||||
earliestChangeTimestamp
|
||||
) {
|
||||
assert.postgresId(projectId, `bad projectId ${projectId}`)
|
||||
projectId = parseInt(projectId, 10)
|
||||
|
||||
|
@ -202,14 +207,20 @@ async function confirmCreate(projectId, chunk, chunkId) {
|
|||
_deletePendingChunk(tx, projectId, chunkId),
|
||||
_insertChunk(tx, projectId, chunk, chunkId),
|
||||
])
|
||||
await updateProjectRecord(projectId, chunk)
|
||||
await updateProjectRecord(projectId, chunk, earliestChangeTimestamp)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Record that a chunk was replaced by a new one.
|
||||
*/
|
||||
async function confirmUpdate(projectId, oldChunkId, newChunk, newChunkId) {
|
||||
async function confirmUpdate(
|
||||
projectId,
|
||||
oldChunkId,
|
||||
newChunk,
|
||||
newChunkId,
|
||||
earliestChangeTimestamp
|
||||
) {
|
||||
assert.postgresId(projectId, `bad projectId ${projectId}`)
|
||||
projectId = parseInt(projectId, 10)
|
||||
|
||||
|
@ -219,7 +230,7 @@ async function confirmUpdate(projectId, oldChunkId, newChunk, newChunkId) {
|
|||
_deletePendingChunk(tx, projectId, newChunkId),
|
||||
_insertChunk(tx, projectId, newChunk, newChunkId),
|
||||
])
|
||||
await updateProjectRecord(projectId, newChunk)
|
||||
await updateProjectRecord(projectId, newChunk, earliestChangeTimestamp)
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -65,6 +65,9 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) {
|
|||
|
||||
const blobStore = new BlobStore(projectId)
|
||||
|
||||
const earliestChangeTimestamp =
|
||||
allChanges.length > 0 ? allChanges[0].getTimestamp() : null
|
||||
|
||||
let currentChunk
|
||||
|
||||
/**
|
||||
|
@ -220,7 +223,12 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) {
|
|||
|
||||
checkElapsedTime(timer)
|
||||
|
||||
await chunkStore.update(projectId, originalEndVersion, currentChunk)
|
||||
await chunkStore.update(
|
||||
projectId,
|
||||
originalEndVersion,
|
||||
currentChunk,
|
||||
earliestChangeTimestamp
|
||||
)
|
||||
}
|
||||
|
||||
async function createNewChunksAsNeeded() {
|
||||
|
@ -234,7 +242,7 @@ async function persistChanges(projectId, allChanges, limits, clientEndVersion) {
|
|||
if (changesPushed) {
|
||||
checkElapsedTime(timer)
|
||||
currentChunk = chunk
|
||||
await chunkStore.create(projectId, chunk)
|
||||
await chunkStore.create(projectId, chunk, earliestChangeTimestamp)
|
||||
} else {
|
||||
throw new Error('failed to fill empty chunk')
|
||||
}
|
||||
|
|
|
@ -201,7 +201,12 @@ describe('backup script', function () {
|
|||
textOperation: [newContentString.length, ' even more'], // Keep existing content, append ' even more'
|
||||
})
|
||||
const additionalEditOp = Operation.editFile('main.tex', additionalTextOp)
|
||||
const additionalChange = new Change([additionalEditOp], new Date(), [])
|
||||
const firstTimestamp = new Date()
|
||||
const additionalChange = new Change(
|
||||
[additionalEditOp],
|
||||
firstTimestamp,
|
||||
[]
|
||||
)
|
||||
|
||||
// add the nonbmp file
|
||||
const blobStore = new BlobStore(historyId)
|
||||
|
@ -222,7 +227,12 @@ describe('backup script', function () {
|
|||
'non_bmp.txt',
|
||||
File.fromHash(testFiles.NON_BMP_TXT_HASH)
|
||||
)
|
||||
const additionalChange2 = new Change([addNonBmpFileOp], new Date(), [])
|
||||
const secondTimestamp = new Date()
|
||||
const additionalChange2 = new Change(
|
||||
[addNonBmpFileOp],
|
||||
secondTimestamp,
|
||||
[]
|
||||
)
|
||||
|
||||
await persistChanges(
|
||||
historyId,
|
||||
|
@ -242,10 +252,11 @@ describe('backup script', function () {
|
|||
expect(afterChangeResult.backupStatus.lastBackedUpAt)
|
||||
.to.be.an.instanceOf(Date)
|
||||
.and.to.deep.equal(result1.backupStatus.lastBackedUpAt)
|
||||
// but it should update the pendingChangeAt timestamp
|
||||
// but it should update the pendingChangeAt timestamp to the timestamp of the
|
||||
// first change which modified the project
|
||||
expect(afterChangeResult.backupStatus.pendingChangeAt)
|
||||
.to.be.an.instanceOf(Date)
|
||||
.and.to.be.greaterThan(result1.backupStatus.lastBackedUpAt)
|
||||
.and.to.deep.equal(firstTimestamp)
|
||||
|
||||
// Second backup
|
||||
const { stdout: stdout2 } = await runBackupScript([
|
||||
|
@ -410,12 +421,18 @@ describe('backup script', function () {
|
|||
})
|
||||
|
||||
describe('with complex project content', function () {
|
||||
let beforeInitializationTimestamp
|
||||
let afterInitializationTimestamp
|
||||
|
||||
beforeEach(async function () {
|
||||
// Create initial project
|
||||
await projectsCollection.insertOne(project)
|
||||
|
||||
// Initialize project in chunk store
|
||||
// bracket the initialisation with two timestamps to check the pendingChangeAt field
|
||||
beforeInitializationTimestamp = new Date()
|
||||
await ChunkStore.initializeProject(historyId)
|
||||
afterInitializationTimestamp = new Date()
|
||||
|
||||
const blobStore = new BlobStore(historyId)
|
||||
|
||||
|
@ -528,6 +545,14 @@ describe('backup script', function () {
|
|||
)
|
||||
})
|
||||
|
||||
it('persistChanges should set the pendingChangeAt field to the time of snapshot initialisation', async function () {
|
||||
const result = await getBackupStatus(projectId)
|
||||
expect(result.backupStatus.pendingChangeAt).to.be.an.instanceOf(Date)
|
||||
expect(result.backupStatus.pendingChangeAt)
|
||||
.to.be.greaterThan(beforeInitializationTimestamp)
|
||||
.and.to.be.lessThan(afterInitializationTimestamp)
|
||||
})
|
||||
|
||||
it('should backup all chunks and blobs from a complex project history', async function () {
|
||||
// Run backup script
|
||||
const { stdout } = await runBackupScript(['--projectId', projectId])
|
||||
|
|
|
@ -58,12 +58,42 @@ describe('chunkStore', function () {
|
|||
expect(chunk.getEndTimestamp()).not.to.exist
|
||||
})
|
||||
|
||||
describe('creating a chunk', async function () {
|
||||
const pendingChangeTimestamp = new Date('2014-01-01T00:00:00')
|
||||
const lastChangeTimestamp = new Date('2015-01-01T00:00:00')
|
||||
beforeEach(async function () {
|
||||
const chunk = makeChunk(
|
||||
[
|
||||
makeChange(
|
||||
Operation.addFile('main.tex', File.fromString('abc')),
|
||||
lastChangeTimestamp
|
||||
),
|
||||
],
|
||||
1
|
||||
)
|
||||
await chunkStore.create(projectId, chunk, pendingChangeTimestamp)
|
||||
})
|
||||
it('creates a chunk and inserts the pending change timestamp', async function () {
|
||||
const project = await projects.findOne({
|
||||
_id: new ObjectId(projectRecord.insertedId),
|
||||
})
|
||||
expect(project.overleaf.history.currentEndVersion).to.equal(2)
|
||||
expect(project.overleaf.history.currentEndTimestamp).to.deep.equal(
|
||||
lastChangeTimestamp
|
||||
)
|
||||
expect(project.overleaf.backup.pendingChangeAt).to.deep.equal(
|
||||
pendingChangeTimestamp
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('adding and editing a blank file', function () {
|
||||
const testPathname = 'foo.txt'
|
||||
const testTextOperation = TextOperation.fromJSON({
|
||||
textOperation: ['a'],
|
||||
}) // insert an a
|
||||
let lastChangeTimestamp
|
||||
const pendingChangeTimestamp = new Date()
|
||||
|
||||
beforeEach(async function () {
|
||||
const chunk = await chunkStore.loadLatest(projectId)
|
||||
|
@ -74,7 +104,12 @@ describe('chunkStore', function () {
|
|||
]
|
||||
lastChangeTimestamp = changes[1].getTimestamp()
|
||||
chunk.pushChanges(changes)
|
||||
await chunkStore.update(projectId, oldEndVersion, chunk)
|
||||
await chunkStore.update(
|
||||
projectId,
|
||||
oldEndVersion,
|
||||
chunk,
|
||||
pendingChangeTimestamp
|
||||
)
|
||||
})
|
||||
|
||||
it('records the correct metadata in db readOnly=false', async function () {
|
||||
|
@ -132,13 +167,14 @@ describe('chunkStore', function () {
|
|||
lastChangeTimestamp
|
||||
)
|
||||
expect(project.overleaf.backup.pendingChangeAt).to.deep.equal(
|
||||
lastChangeTimestamp
|
||||
pendingChangeTimestamp
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('multiple chunks', async function () {
|
||||
// Two chunks are 1 year apart
|
||||
const pendingChangeTimestamp = new Date('2014-01-01T00:00:00')
|
||||
const firstChunkTimestamp = new Date('2015-01-01T00:00:00')
|
||||
const secondChunkTimestamp = new Date('2016-01-01T00:00:00')
|
||||
const thirdChunkTimestamp = new Date('2017-01-01T00:00:00')
|
||||
|
@ -158,7 +194,12 @@ describe('chunkStore', function () {
|
|||
],
|
||||
0
|
||||
)
|
||||
await chunkStore.update(projectId, 0, firstChunk)
|
||||
await chunkStore.update(
|
||||
projectId,
|
||||
0,
|
||||
firstChunk,
|
||||
pendingChangeTimestamp
|
||||
)
|
||||
firstChunk = await chunkStore.loadLatest(projectId)
|
||||
|
||||
secondChunk = makeChunk(
|
||||
|
@ -268,7 +309,7 @@ describe('chunkStore', function () {
|
|||
_id: new ObjectId(projectRecord.insertedId),
|
||||
})
|
||||
expect(project.overleaf.backup.pendingChangeAt).to.deep.equal(
|
||||
firstChunkTimestamp
|
||||
pendingChangeTimestamp
|
||||
)
|
||||
})
|
||||
|
||||
|
@ -322,7 +363,7 @@ describe('chunkStore', function () {
|
|||
_id: new ObjectId(projectRecord.insertedId),
|
||||
})
|
||||
expect(project.overleaf.backup.pendingChangeAt).to.deep.equal(
|
||||
firstChunkTimestamp
|
||||
pendingChangeTimestamp
|
||||
)
|
||||
})
|
||||
})
|
||||
|
|
Loading…
Add table
Reference in a new issue