Merge pull request #24224 from overleaf/em-resync-on-flush-failure

Immediately attempt a resync when a history flush fails

GitOrigin-RevId: 098a0a7edf55c0ed47c48e0a3c080e3562cdceaa
This commit is contained in:
Eric Mc Sween 2025-03-17 08:38:26 -04:00 committed by Copybot
parent c5b584e3d8
commit 35dc7faab6
10 changed files with 369 additions and 235 deletions

View file

@ -1,54 +1,57 @@
// @ts-check
import { callbackify } from 'node:util'
import logger from '@overleaf/logger'
import metrics from '@overleaf/metrics'
import OError from '@overleaf/o-error'
import { db } from './mongodb.js'
/**
* @import { ProjectHistoryFailure } from './mongo-types'
*/
/**
* @param {string} projectId
* @param {number} queueSize
* @param {Error} error
* @return {Promise<ProjectHistoryFailure>} the failure record
*/
async function record(projectId, queueSize, error) {
if (error != null) {
const errorRecord = {
queueSize,
error: error.toString(),
stack: error.stack,
ts: new Date(),
}
logger.debug(
{ projectId, errorRecord },
'recording failed attempt to process updates'
)
try {
await db.projectHistoryFailures.updateOne(
{ project_id: projectId },
{
$set: errorRecord,
$inc: { attempts: 1 },
$push: {
history: {
$each: [errorRecord],
$position: 0,
$slice: 10,
},
}, // only keep recent failures
},
{ upsert: true }
)
} catch (mongoError) {
logger.error(
{ projectId, mongoError },
'failed to change project statues in mongo'
)
}
throw error
} else {
try {
await db.projectHistoryFailures.deleteOne({ project_id: projectId })
} catch (mongoError) {
logger.error(
{ projectId, mongoError },
'failed to change project statues in mongo'
)
}
return queueSize
const errorRecord = {
queueSize,
error: error.toString(),
stack: error.stack ?? '',
ts: new Date(),
}
logger.debug(
{ projectId, errorRecord },
'recording failed attempt to process updates'
)
const result = await db.projectHistoryFailures.findOneAndUpdate(
{ project_id: projectId },
{
$set: errorRecord,
$inc: { attempts: 1 },
$push: {
history: {
$each: [errorRecord],
$position: 0,
// only keep recent failures
$slice: 10,
},
},
},
{ upsert: true, returnDocument: 'after', includeResultMetadata: true }
)
if (result.value == null) {
// Since we upsert, the result should always have a value
throw new OError('no value returned when recording an error', { projectId })
}
return result.value
}
async function clearError(projectId) {
await db.projectHistoryFailures.deleteOne({ project_id: projectId })
}
async function setForceDebug(projectId, state) {
@ -85,7 +88,6 @@ async function recordSyncStart(projectId) {
/**
* @param projectId
* @return {Promise<{error: string, forceDebug?: boolean}|null>}
*/
async function getFailureRecord(projectId) {
return await db.projectHistoryFailures.findOne({ project_id: projectId })
@ -238,6 +240,7 @@ const getFailureRecordCb = callbackify(getFailureRecord)
const getFailuresCb = callbackify(getFailures)
const getLastFailureCb = callbackify(getLastFailure)
const recordCb = callbackify(record)
const clearErrorCb = callbackify(clearError)
const recordSyncStartCb = callbackify(recordSyncStart)
const setForceDebugCb = callbackify(setForceDebug)
@ -247,6 +250,7 @@ export {
getLastFailureCb as getLastFailure,
getFailuresCb as getFailures,
recordCb as record,
clearErrorCb as clearError,
recordSyncStartCb as recordSyncStart,
setForceDebugCb as setForceDebug,
}
@ -257,6 +261,7 @@ export const promises = {
getLastFailure,
getFailures,
record,
clearError,
recordSyncStart,
setForceDebug,
}

View file

@ -604,9 +604,7 @@ export function deleteProject(req, res, next) {
if (err) {
return next(err)
}
// The third parameter to the following call is the error. Calling it
// with null will remove any failure record for this project.
ErrorRecorder.record(projectId, 0, null, err => {
ErrorRecorder.clearError(projectId, err => {
if (err) {
return next(err)
}

View file

@ -73,11 +73,11 @@ function isTemporaryFailure(failure) {
return TEMPORARY_FAILURES.includes(failure.error)
}
function isHardFailure(failure) {
export function isHardFailure(failure) {
return HARD_FAILURES.includes(failure.error)
}
function isFirstFailure(failure) {
export function isFirstFailure(failure) {
return failure.attempts <= 1
}
@ -147,7 +147,7 @@ async function resyncProject(projectId, options = {}) {
try {
if (!/^[0-9a-f]{24}$/.test(projectId)) {
logger.debug({ projectId }, 'clearing bad project id')
await ErrorRecorder.promises.record(projectId, 0, null)
await ErrorRecorder.promises.clearError(projectId)
return
}

View file

@ -58,10 +58,8 @@ async function startResync(projectId, options = {}) {
)
} catch (error) {
// record error in starting sync ("sync ongoing")
try {
if (error instanceof Error) {
await ErrorRecorder.promises.record(projectId, -1, error)
} catch (err) {
// swallow any error thrown by ErrorRecorder.record()
}
throw error
}
@ -81,7 +79,9 @@ async function startHardResync(projectId, options = {}) {
)
} catch (error) {
// record error in starting sync ("sync ongoing")
await ErrorRecorder.promises.record(projectId, -1, error)
if (error instanceof Error) {
await ErrorRecorder.promises.record(projectId, -1, error)
}
throw error
}
}

View file

@ -16,6 +16,7 @@ import * as SyncManager from './SyncManager.js'
import * as Versions from './Versions.js'
import * as Errors from './Errors.js'
import * as Metrics from './Metrics.js'
import * as RetryManager from './RetryManager.js'
import { Profiler } from './Profiler.js'
const keys = Settings.redis.lock.key_schema
@ -84,11 +85,29 @@ export function startResyncAndProcessUpdatesUnderLock(
})
})
},
(error, queueSize) => {
if (error) {
OError.tag(error)
(flushError, queueSize) => {
if (flushError) {
OError.tag(flushError)
ErrorRecorder.record(projectId, queueSize, flushError, recordError => {
if (recordError) {
logger.error(
{ err: recordError, projectId },
'failed to record error'
)
}
callback(flushError)
})
} else {
ErrorRecorder.clearError(projectId, clearError => {
if (clearError) {
logger.error(
{ err: clearError, projectId },
'failed to clear error'
)
}
callback()
})
}
ErrorRecorder.record(projectId, queueSize, error, callback)
if (queueSize > 0) {
const duration = (Date.now() - startTimeMs) / 1000
Metrics.historyFlushDurationSeconds.observe(duration)
@ -113,11 +132,44 @@ export function processUpdatesForProject(projectId, callback) {
releaseLock
)
},
(error, queueSize) => {
if (error) {
OError.tag(error)
(flushError, queueSize) => {
if (flushError) {
OError.tag(flushError)
ErrorRecorder.record(
projectId,
queueSize,
flushError,
(recordError, failure) => {
if (recordError) {
logger.error(
{ err: recordError, projectId },
'failed to record error'
)
callback(recordError)
} else if (
RetryManager.isFirstFailure(failure) &&
RetryManager.isHardFailure(failure)
) {
// This is the first failed flush since the last successful flush.
// Immediately attempt a resync.
logger.warn({ projectId }, 'Flush failed, attempting resync')
resyncProject(projectId, callback)
} else {
callback(flushError)
}
}
)
} else {
ErrorRecorder.clearError(projectId, clearError => {
if (clearError) {
logger.error(
{ err: clearError, projectId },
'failed to clear error'
)
}
callback()
})
}
ErrorRecorder.record(projectId, queueSize, error, callback)
if (queueSize > 0) {
const duration = (Date.now() - startTimeMs) / 1000
Metrics.historyFlushDurationSeconds.observe(duration)
@ -129,6 +181,57 @@ export function processUpdatesForProject(projectId, callback) {
)
}
export function resyncProject(projectId, callback) {
SyncManager.startHardResync(projectId, {}, error => {
if (error != null) {
return callback(OError.tag(error))
}
// Flush the sync operations; this will not loop indefinitely
// because any failure won't be the first failure anymore.
LockManager.runWithLock(
keys.projectHistoryLock({ project_id: projectId }),
(extendLock, releaseLock) => {
_countAndProcessUpdates(
projectId,
extendLock,
REDIS_READ_BATCH_SIZE,
releaseLock
)
},
(flushError, queueSize) => {
if (flushError) {
ErrorRecorder.record(
projectId,
queueSize,
flushError,
(recordError, failure) => {
if (OError.tag(recordError)) {
logger.error(
{ err: recordError, projectId },
'failed to record error'
)
callback(OError.tag(recordError))
} else {
callback(OError.tag(flushError))
}
}
)
} else {
ErrorRecorder.clearError(projectId, clearError => {
if (clearError) {
logger.error(
{ err: clearError, projectId },
'failed to clear error'
)
}
callback()
})
}
}
)
})
}
export function processUpdatesForProjectUsingBisect(
projectId,
amountToProcess,
@ -144,21 +247,29 @@ export function processUpdatesForProjectUsingBisect(
releaseLock
)
},
(error, queueSize) => {
(flushError, queueSize) => {
if (amountToProcess === 0 || queueSize === 0) {
// no further processing possible
if (error != null) {
if (flushError != null) {
ErrorRecorder.record(
projectId,
queueSize,
OError.tag(error),
callback
OError.tag(flushError),
recordError => {
if (recordError) {
logger.error(
{ err: recordError, projectId },
'failed to record error'
)
}
callback(flushError)
}
)
} else {
callback()
}
} else {
if (error != null) {
if (flushError != null) {
// decrease the batch size when we hit an error
processUpdatesForProjectUsingBisect(
projectId,
@ -187,13 +298,31 @@ export function processSingleUpdateForProject(projectId, callback) {
) => {
_countAndProcessUpdates(projectId, extendLock, 1, releaseLock)
},
(
error,
queueSize // no need to clear the flush marker when single stepping
) => {
(flushError, queueSize) => {
// no need to clear the flush marker when single stepping
// it will be cleared up on the next background flush if
// the queue is empty
ErrorRecorder.record(projectId, queueSize, error, callback)
if (flushError) {
ErrorRecorder.record(projectId, queueSize, flushError, recordError => {
if (recordError) {
logger.error(
{ err: recordError, projectId },
'failed to record error'
)
}
callback(flushError)
})
} else {
ErrorRecorder.clearError(projectId, clearError => {
if (clearError) {
logger.error(
{ err: clearError, projectId },
'failed to clear error'
)
}
callback()
})
}
}
)
}

View file

@ -0,0 +1,22 @@
import { ObjectId } from 'mongodb-legacy'
export type ProjectHistoryFailure = {
_id: ObjectId
project_id: string
attempts: number
resyncAttempts: number
resyncStartedAt: Date
requestCount?: number
history: (ErrorRecord | SyncStartRecord)[]
} & ErrorRecord
type ErrorRecord = {
error: string
stack: string
queueSize: number
ts: Date
}
type SyncStartRecord = {
resyncStartedAt: Date
}

View file

@ -3,6 +3,10 @@ import Settings from '@overleaf/settings'
import mongodb from 'mongodb-legacy'
const { MongoClient, ObjectId } = mongodb
/**
* @import { ProjectHistoryFailure } from './mongo-types.ts'
*/
export { ObjectId }
export const mongoClient = new MongoClient(
@ -16,6 +20,7 @@ Metrics.mongodb.monitor(mongoClient)
export const db = {
deletedProjects: mongoDb.collection('deletedProjects'),
projects: mongoDb.collection('projects'),
/** @type {mongodb.Collection<ProjectHistoryFailure>} */
projectHistoryFailures: mongoDb.collection('projectHistoryFailures'),
projectHistoryLabels: mongoDb.collection('projectHistoryLabels'),
projectHistorySyncState: mongoDb.collection('projectHistorySyncState'),

View file

@ -1,5 +1,4 @@
import sinon from 'sinon'
import { expect } from 'chai'
import { strict as esmock } from 'esmock'
import tk from 'timekeeper'
@ -12,7 +11,9 @@ describe('ErrorRecorder', function () {
this.db = {
projectHistoryFailures: {
deleteOne: sinon.stub().resolves(),
updateOne: sinon.stub().resolves(),
findOneAndUpdate: sinon
.stub()
.resolves({ value: { failure: 'record' } }),
},
}
this.mongodb = { db: this.db }
@ -31,75 +32,65 @@ describe('ErrorRecorder', function () {
})
describe('record', function () {
describe('with an error', function () {
beforeEach(async function () {
this.error = new Error('something bad')
await expect(
this.ErrorRecorder.promises.record(
this.project_id,
this.queueSize,
this.error
)
).to.be.rejected
})
it('should record the error to mongo', function () {
this.db.projectHistoryFailures.updateOne
.calledWithMatch(
{
project_id: this.project_id,
},
{
$set: {
queueSize: this.queueSize,
error: this.error.toString(),
stack: this.error.stack,
ts: this.now,
},
$inc: {
attempts: 1,
},
$push: {
history: {
$each: [
{
queueSize: this.queueSize,
error: this.error.toString(),
stack: this.error.stack,
ts: this.now,
},
],
$position: 0,
$slice: 10,
},
},
},
{
upsert: true,
}
)
.should.equal(true)
})
beforeEach(async function () {
this.error = new Error('something bad')
await this.ErrorRecorder.promises.record(
this.project_id,
this.queueSize,
this.error
)
})
describe('without an error', function () {
beforeEach(async function () {
this.result = await this.ErrorRecorder.promises.record(
this.project_id,
this.queueSize,
this.error
it('should record the error to mongo', function () {
this.db.projectHistoryFailures.findOneAndUpdate
.calledWithMatch(
{
project_id: this.project_id,
},
{
$set: {
queueSize: this.queueSize,
error: this.error.toString(),
stack: this.error.stack,
ts: this.now,
},
$inc: {
attempts: 1,
},
$push: {
history: {
$each: [
{
queueSize: this.queueSize,
error: this.error.toString(),
stack: this.error.stack,
ts: this.now,
},
],
$position: 0,
$slice: 10,
},
},
},
{
upsert: true,
}
)
})
.should.equal(true)
})
})
it('should remove any error from mongo', function () {
this.db.projectHistoryFailures.deleteOne
.calledWithMatch({ project_id: this.project_id })
.should.equal(true)
})
describe('clearError', function () {
beforeEach(async function () {
this.result = await this.ErrorRecorder.promises.clearError(
this.project_id
)
})
it('should return the queue size', function () {
expect(this.result).to.equal(this.queueSize)
})
it('should remove any error from mongo', function () {
this.db.projectHistoryFailures.deleteOne
.calledWithMatch({ project_id: this.project_id })
.should.equal(true)
})
})
})

View file

@ -40,7 +40,7 @@ describe('HttpController', function () {
clearCachedHistoryId: sinon.stub().yields(),
}
this.ErrorRecorder = {
record: sinon.stub().yields(),
clearError: sinon.stub().yields(),
}
this.LabelsManager = {
createLabel: sinon.stub(),
@ -567,11 +567,7 @@ describe('HttpController', function () {
})
it('should clear any failure record', function () {
this.ErrorRecorder.record.should.have.been.calledWith(
this.projectId,
0,
null
)
this.ErrorRecorder.clearError.should.have.been.calledWith(this.projectId)
})
})
})

View file

@ -1,17 +1,3 @@
/* eslint-disable
mocha/no-nested-tests,
no-return-assign,
no-undef,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
import sinon from 'sinon'
import { expect } from 'chai'
import { strict as esmock } from 'esmock'
@ -53,7 +39,11 @@ describe('UpdatesProcessor', function () {
}
this.ErrorRecorder = {
getLastFailure: sinon.stub(),
record: sinon.stub().yields(),
record: sinon.stub().yields(null, { attempts: 1 }),
}
this.RetryManager = {
isFirstFailure: sinon.stub().returns(true),
isHardFailure: sinon.stub().returns(false),
}
this.Profiler = {
Profiler: class {
@ -101,6 +91,7 @@ describe('UpdatesProcessor', function () {
'../../../../app/js/SyncManager.js': this.SyncManager,
'../../../../app/js/ErrorRecorder.js': this.ErrorRecorder,
'../../../../app/js/Profiler.js': this.Profiler,
'../../../../app/js/RetryManager.js': this.RetryManager,
'../../../../app/js/Errors.js': Errors,
'@overleaf/metrics': this.Metrics,
'@overleaf/settings': this.Settings,
@ -109,7 +100,7 @@ describe('UpdatesProcessor', function () {
this.project_id = 'project-id-123'
this.ol_project_id = 'ol-project-id-234'
this.callback = sinon.stub()
return (this.temporary = 'temp-mock')
this.temporary = 'temp-mock'
})
describe('processUpdatesForProject', function () {
@ -124,20 +115,20 @@ describe('UpdatesProcessor', function () {
describe('when there is no existing error', function () {
beforeEach(function (done) {
this.ErrorRecorder.getLastFailure.yields()
return this.UpdatesProcessor.processUpdatesForProject(
this.project_id,
done
)
this.UpdatesProcessor.processUpdatesForProject(this.project_id, err => {
expect(err).to.equal(this.error)
done()
})
})
it('processes updates', function () {
return this.UpdatesProcessor._mocks._countAndProcessUpdates
this.UpdatesProcessor._mocks._countAndProcessUpdates
.calledWith(this.project_id)
.should.equal(true)
})
return it('records errors', function () {
return this.ErrorRecorder.record
it('records errors', function () {
this.ErrorRecorder.record
.calledWith(this.project_id, this.queueSize, this.error)
.should.equal(true)
})
@ -154,14 +145,14 @@ describe('UpdatesProcessor', function () {
this.WebApiManager.getHistoryId.yields(null)
})
return it('returns null', function (done) {
return this.UpdatesProcessor._getHistoryId(
it('returns null', function (done) {
this.UpdatesProcessor._getHistoryId(
this.project_id,
this.updates,
(error, projectHistoryId) => {
expect(error).to.be.null
expect(projectHistoryId).to.be.null
return done()
done()
}
)
})
@ -169,102 +160,102 @@ describe('UpdatesProcessor', function () {
describe('projectHistoryId is not present in updates', function () {
beforeEach(function () {
return (this.updates = [
this.updates = [
{ p: 0, i: 'a' },
{ p: 1, i: 's' },
])
]
})
it('returns the id from web', function (done) {
this.projectHistoryId = '1234'
this.WebApiManager.getHistoryId.yields(null, this.projectHistoryId)
return this.UpdatesProcessor._getHistoryId(
this.UpdatesProcessor._getHistoryId(
this.project_id,
this.updates,
(error, projectHistoryId) => {
expect(error).to.be.null
expect(projectHistoryId).equal(this.projectHistoryId)
return done()
done()
}
)
})
return it('returns errors from web', function (done) {
it('returns errors from web', function (done) {
this.error = new Error('oh no!')
this.WebApiManager.getHistoryId.yields(this.error)
return this.UpdatesProcessor._getHistoryId(
this.UpdatesProcessor._getHistoryId(
this.project_id,
this.updates,
error => {
expect(error).to.equal(this.error)
return done()
done()
}
)
})
})
return describe('projectHistoryId is present in some updates', function () {
describe('projectHistoryId is present in some updates', function () {
beforeEach(function () {
this.projectHistoryId = '1234'
return (this.updates = [
this.updates = [
{ p: 0, i: 'a' },
{ p: 1, i: 's', projectHistoryId: this.projectHistoryId },
{ p: 2, i: 'd', projectHistoryId: this.projectHistoryId },
])
]
})
it('returns an error if the id is inconsistent between updates', function (done) {
this.updates[1].projectHistoryId = 2345
return this.UpdatesProcessor._getHistoryId(
this.UpdatesProcessor._getHistoryId(
this.project_id,
this.updates,
error => {
expect(error.message).to.equal(
'inconsistent project history id between updates'
)
return done()
done()
}
)
})
it('returns an error if the id is inconsistent between updates and web', function (done) {
this.WebApiManager.getHistoryId.yields(null, 2345)
return this.UpdatesProcessor._getHistoryId(
this.UpdatesProcessor._getHistoryId(
this.project_id,
this.updates,
error => {
expect(error.message).to.equal(
'inconsistent project history id between updates and web'
)
return done()
done()
}
)
})
it('returns the id if it is consistent between updates and web', function (done) {
this.WebApiManager.getHistoryId.yields(null, this.projectHistoryId)
return this.UpdatesProcessor._getHistoryId(
this.UpdatesProcessor._getHistoryId(
this.project_id,
this.updates,
(error, projectHistoryId) => {
expect(error).to.be.null
expect(projectHistoryId).equal(this.projectHistoryId)
return done()
done()
}
)
})
return it('returns the id if it is consistent between updates but unavaiable in web', function (done) {
it('returns the id if it is consistent between updates but unavaiable in web', function (done) {
this.WebApiManager.getHistoryId.yields(new Error('oh no!'))
return this.UpdatesProcessor._getHistoryId(
this.UpdatesProcessor._getHistoryId(
this.project_id,
this.updates,
(error, projectHistoryId) => {
expect(error).to.be.null
expect(projectHistoryId).equal(this.projectHistoryId)
return done()
done()
}
)
})
@ -332,21 +323,21 @@ describe('UpdatesProcessor', function () {
})
it('should get the latest version id', function () {
return this.HistoryStoreManager.getMostRecentVersion.should.have.been.calledWith(
this.HistoryStoreManager.getMostRecentVersion.should.have.been.calledWith(
this.project_id,
this.ol_project_id
)
})
it('should skip updates when resyncing', function () {
return this.SyncManager.skipUpdatesDuringSync.should.have.been.calledWith(
this.SyncManager.skipUpdatesDuringSync.should.have.been.calledWith(
this.project_id,
this.rawUpdates
)
})
it('should expand sync updates', function () {
return this.SyncManager.expandSyncUpdates.should.have.been.calledWith(
this.SyncManager.expandSyncUpdates.should.have.been.calledWith(
this.project_id,
this.ol_project_id,
this.mostRecentChunk,
@ -356,13 +347,13 @@ describe('UpdatesProcessor', function () {
})
it('should compress updates', function () {
return this.UpdateCompressor.compressRawUpdates.should.have.been.calledWith(
this.UpdateCompressor.compressRawUpdates.should.have.been.calledWith(
this.expandedUpdates
)
})
it('should create any blobs for the updates', function () {
return this.BlobManager.createBlobsForUpdates.should.have.been.calledWith(
this.BlobManager.createBlobsForUpdates.should.have.been.calledWith(
this.project_id,
this.ol_project_id,
this.compressedUpdates
@ -370,14 +361,14 @@ describe('UpdatesProcessor', function () {
})
it('should convert the updates into a change requests', function () {
return this.UpdateTranslator.convertToChanges.should.have.been.calledWith(
this.UpdateTranslator.convertToChanges.should.have.been.calledWith(
this.project_id,
this.updatesWithBlobs
)
})
it('should send the change request to the history store', function () {
return this.HistoryStoreManager.sendChanges.should.have.been.calledWith(
this.HistoryStoreManager.sendChanges.should.have.been.calledWith(
this.project_id,
this.ol_project_id,
['change']
@ -385,14 +376,14 @@ describe('UpdatesProcessor', function () {
})
it('should set the sync state', function () {
return this.SyncManager.setResyncState.should.have.been.calledWith(
this.SyncManager.setResyncState.should.have.been.calledWith(
this.project_id,
this.newSyncState
)
})
it('should call the callback with no error', function () {
return this.callback.should.have.been.called
this.callback.should.have.been.called
})
})
@ -420,7 +411,7 @@ describe('UpdatesProcessor', function () {
})
})
return describe('_skipAlreadyAppliedUpdates', function () {
describe('_skipAlreadyAppliedUpdates', function () {
before(function () {
this.UpdateTranslator.isProjectStructureUpdate.callsFake(
update => update.version != null
@ -436,16 +427,15 @@ describe('UpdatesProcessor', function () {
{ doc: 'id', v: 3 },
{ doc: 'id', v: 4 },
]
return (this.updatesToApply =
this.UpdatesProcessor._skipAlreadyAppliedUpdates(
this.project_id,
this.updates,
{ docs: {} }
))
this.updatesToApply = this.UpdatesProcessor._skipAlreadyAppliedUpdates(
this.project_id,
this.updates,
{ docs: {} }
)
})
return it('should return the original updates', function () {
return expect(this.updatesToApply).to.eql(this.updates)
it('should return the original updates', function () {
expect(this.updatesToApply).to.eql(this.updates)
})
})
@ -457,16 +447,15 @@ describe('UpdatesProcessor', function () {
{ version: 3 },
{ version: 4 },
]
return (this.updatesToApply =
this.UpdatesProcessor._skipAlreadyAppliedUpdates(
this.project_id,
this.updates,
{ docs: {} }
))
this.updatesToApply = this.UpdatesProcessor._skipAlreadyAppliedUpdates(
this.project_id,
this.updates,
{ docs: {} }
)
})
return it('should return the original updates', function () {
return expect(this.updatesToApply).to.eql(this.updates)
it('should return the original updates', function () {
expect(this.updatesToApply).to.eql(this.updates)
})
})
@ -486,16 +475,15 @@ describe('UpdatesProcessor', function () {
{ version: 3 },
{ version: 4 },
]
return (this.updatesToApply =
this.UpdatesProcessor._skipAlreadyAppliedUpdates(
this.project_id,
this.updates,
{ docs: {} }
))
this.updatesToApply = this.UpdatesProcessor._skipAlreadyAppliedUpdates(
this.project_id,
this.updates,
{ docs: {} }
)
})
return it('should return the original updates', function () {
return expect(this.updatesToApply).to.eql(this.updates)
it('should return the original updates', function () {
expect(this.updatesToApply).to.eql(this.updates)
})
})
@ -512,25 +500,25 @@ describe('UpdatesProcessor', function () {
'_skipAlreadyAppliedUpdates'
)
try {
return (this.updatesToApply =
this.updatesToApply =
this.UpdatesProcessor._skipAlreadyAppliedUpdates(
this.project_id,
this.updates,
{ docs: {} }
))
)
} catch (error) {}
})
after(function () {
return this.skipFn.restore()
this.skipFn.restore()
})
return it('should throw an exception', function () {
return this.skipFn.threw('OpsOutOfOrderError').should.equal(true)
it('should throw an exception', function () {
this.skipFn.threw('OpsOutOfOrderError').should.equal(true)
})
})
return describe('with project ops out of order', function () {
describe('with project ops out of order', function () {
before(function () {
this.updates = [
{ version: 1 },
@ -543,21 +531,21 @@ describe('UpdatesProcessor', function () {
'_skipAlreadyAppliedUpdates'
)
try {
return (this.updatesToApply =
this.updatesToApply =
this.UpdatesProcessor._skipAlreadyAppliedUpdates(
this.project_id,
this.updates,
{ docs: {} }
))
)
} catch (error) {}
})
after(function () {
return this.skipFn.restore()
this.skipFn.restore()
})
return it('should throw an exception', function () {
return this.skipFn.threw('OpsOutOfOrderError').should.equal(true)
it('should throw an exception', function () {
this.skipFn.threw('OpsOutOfOrderError').should.equal(true)
})
})
})