Merge branch 'overleaf:main' into main

This commit is contained in:
Damian Fajfer 2024-04-30 22:29:40 +02:00 committed by GitHub
commit 4a94693c67
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 918 additions and 509 deletions

16
package-lock.json generated
View file

@ -41549,7 +41549,7 @@
"url-loader": "^4.1.1", "url-loader": "^4.1.1",
"webpack": "^5.81.0", "webpack": "^5.81.0",
"webpack-cli": "^5.0.2", "webpack-cli": "^5.0.2",
"webpack-dev-middleware": "^6.0.2", "webpack-dev-middleware": "^6.1.2",
"webpack-hot-middleware": "^2.25.1", "webpack-hot-middleware": "^2.25.1",
"webpack-manifest-plugin": "^5.0.0" "webpack-manifest-plugin": "^5.0.0"
}, },
@ -42612,9 +42612,9 @@
} }
}, },
"services/latexqc/node_modules/webpack-dev-middleware": { "services/latexqc/node_modules/webpack-dev-middleware": {
"version": "6.1.1", "version": "6.1.2",
"resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-6.1.1.tgz", "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-6.1.2.tgz",
"integrity": "sha512-y51HrHaFeeWir0YO4f0g+9GwZawuigzcAdRNon6jErXy/SqV/+O6eaVAzDqE6t3e3NpGeR5CS+cCDaTC+V3yEQ==", "integrity": "sha512-Wu+EHmX326YPYUpQLKmKbTyZZJIB8/n6R09pTmB03kJmnMsVPTo9COzHZFr01txwaCAuZvfBJE4ZCHRcKs5JaQ==",
"dev": true, "dev": true,
"dependencies": { "dependencies": {
"colorette": "^2.0.10", "colorette": "^2.0.10",
@ -67255,7 +67255,7 @@
"url-loader": "^4.1.1", "url-loader": "^4.1.1",
"webpack": "^5.81.0", "webpack": "^5.81.0",
"webpack-cli": "^5.0.2", "webpack-cli": "^5.0.2",
"webpack-dev-middleware": "^6.0.2", "webpack-dev-middleware": "^6.1.2",
"webpack-hot-middleware": "^2.25.1", "webpack-hot-middleware": "^2.25.1",
"webpack-manifest-plugin": "^5.0.0" "webpack-manifest-plugin": "^5.0.0"
}, },
@ -68067,9 +68067,9 @@
} }
}, },
"webpack-dev-middleware": { "webpack-dev-middleware": {
"version": "6.1.1", "version": "6.1.2",
"resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-6.1.1.tgz", "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-6.1.2.tgz",
"integrity": "sha512-y51HrHaFeeWir0YO4f0g+9GwZawuigzcAdRNon6jErXy/SqV/+O6eaVAzDqE6t3e3NpGeR5CS+cCDaTC+V3yEQ==", "integrity": "sha512-Wu+EHmX326YPYUpQLKmKbTyZZJIB8/n6R09pTmB03kJmnMsVPTo9COzHZFr01txwaCAuZvfBJE4ZCHRcKs5JaQ==",
"dev": true, "dev": true,
"requires": { "requires": {
"colorette": "^2.0.10", "colorette": "^2.0.10",

View file

@ -80,6 +80,13 @@ COPY server-ce/config/custom-environment-variables.json /overleaf/services/histo
ADD server-ce/bin/grunt /usr/local/bin/grunt ADD server-ce/bin/grunt /usr/local/bin/grunt
RUN chmod +x /usr/local/bin/grunt RUN chmod +x /usr/local/bin/grunt
# Copy history helper scripts
# ---------------------------
ADD server-ce/bin/flush-history-queues /overleaf/bin/flush-history-queues
RUN chmod +x /overleaf/bin/flush-history-queues
ADD server-ce/bin/force-history-resyncs /overleaf/bin/force-history-resyncs
RUN chmod +x /overleaf/bin/force-history-resyncs
# File that controls open|closed status of the site # File that controls open|closed status of the site
# ------------------------------------------------- # -------------------------------------------------
ENV SITE_MAINTENANCE_FILE "/etc/overleaf/site_status" ENV SITE_MAINTENANCE_FILE "/etc/overleaf/site_status"

View file

@ -0,0 +1,8 @@
#!/bin/bash
set -euo pipefail
source /etc/container_environment.sh
source /etc/overleaf/env.sh
cd /overleaf/services/project-history
node scripts/flush_all.js 100000

View file

@ -0,0 +1,8 @@
#!/bin/bash
set -euo pipefail
source /etc/container_environment.sh
source /etc/overleaf/env.sh
cd /overleaf/services/project-history
node scripts/force_resync.js 1000 force

View file

@ -170,10 +170,11 @@ _mocks._countAndProcessUpdates = (
_processUpdatesBatch(projectId, updates, extendLock, cb) _processUpdatesBatch(projectId, updates, extendLock, cb)
}, },
error => { error => {
if (error) { // Unconventional callback signature. The caller needs the queue size
return callback(error) // even when an error is thrown in order to record the queue size in
} // the projectHistoryFailures collection. We'll have to find another
callback(null, queueSize) // way to achieve this when we promisify.
callback(error, queueSize)
} }
) )
} else { } else {

View file

@ -9,81 +9,77 @@ const ClsiManager = require('./ClsiManager')
const Metrics = require('@overleaf/metrics') const Metrics = require('@overleaf/metrics')
const { RateLimiter } = require('../../infrastructure/RateLimiter') const { RateLimiter } = require('../../infrastructure/RateLimiter')
const UserAnalyticsIdCache = require('../Analytics/UserAnalyticsIdCache') const UserAnalyticsIdCache = require('../Analytics/UserAnalyticsIdCache')
const {
callbackify,
callbackifyMultiResult,
} = require('@overleaf/promise-utils')
module.exports = CompileManager = { function instrumentWithTimer(fn, key) {
compile(projectId, userId, options = {}, _callback) { return async (...args) => {
const timer = new Metrics.Timer('editor.compile') const timer = new Metrics.Timer(key)
const callback = function (...args) { try {
return await fn(...args)
} finally {
timer.done() timer.done()
_callback(...args) }
}
} }
CompileManager._checkIfRecentlyCompiled( async function compile(projectId, userId, options = {}) {
const recentlyCompiled = await CompileManager._checkIfRecentlyCompiled(
projectId, projectId,
userId, userId
function (error, recentlyCompiled) { )
if (error) {
return callback(error)
}
if (recentlyCompiled) { if (recentlyCompiled) {
return callback(null, 'too-recently-compiled', []) return { status: 'too-recently-compiled', outputFiles: [] }
} }
CompileManager._checkIfAutoCompileLimitHasBeenHit( try {
const canCompile = await CompileManager._checkIfAutoCompileLimitHasBeenHit(
options.isAutoCompile, options.isAutoCompile,
'everyone', 'everyone'
function (err, canCompile) { )
if (err || !canCompile) { if (!canCompile) {
return callback(null, 'autocompile-backoff', []) return { status: 'autocompile-backoff', outputFiles: [] }
}
} catch (error) {
return { status: 'autocompile-backoff', outputFiles: [] }
} }
ProjectRootDocManager.ensureRootDocumentIsSet( await ProjectRootDocManager.promises.ensureRootDocumentIsSet(projectId)
projectId,
function (error) { const limits =
if (error) { await CompileManager.promises.getProjectCompileLimits(projectId)
return callback(error)
}
CompileManager.getProjectCompileLimits(
projectId,
function (error, limits) {
if (error) {
return callback(error)
}
for (const key in limits) { for (const key in limits) {
const value = limits[key] const value = limits[key]
options[key] = value options[key] = value
} }
// Put a lower limit on autocompiles for free users, based on compileGroup
CompileManager._checkCompileGroupAutoCompileLimit( try {
const canCompile = await CompileManager._checkCompileGroupAutoCompileLimit(
options.isAutoCompile, options.isAutoCompile,
limits.compileGroup, limits.compileGroup
function (err, canCompile) { )
if (err || !canCompile) { if (!canCompile) {
return callback(null, 'autocompile-backoff', []) return { status: 'autocompile-backoff', outputFiles: [] }
} }
} catch (error) {
return { message: 'autocompile-backoff', outputFiles: [] }
}
// only pass userId down to clsi if this is a per-user compile // only pass userId down to clsi if this is a per-user compile
const compileAsUser = Settings.disablePerUserCompiles const compileAsUser = Settings.disablePerUserCompiles ? undefined : userId
? undefined const {
: userId
ClsiManager.sendRequest(
projectId,
compileAsUser,
options,
function (
error,
status, status,
outputFiles, outputFiles,
clsiServerId, clsiServerId,
validationProblems, validationProblems,
stats, stats,
timings, timings,
outputUrlPrefix outputUrlPrefix,
) { } = await ClsiManager.promises.sendRequest(projectId, compileAsUser, options)
if (error) {
return callback(error) return {
}
callback(
null,
status, status,
outputFiles, outputFiles,
clsiServerId, clsiServerId,
@ -91,172 +87,141 @@ module.exports = CompileManager = {
validationProblems, validationProblems,
stats, stats,
timings, timings,
outputUrlPrefix outputUrlPrefix,
)
} }
)
} }
)
}
)
}
)
}
)
}
)
},
stopCompile(projectId, userId, callback) { const instrumentedCompile = instrumentWithTimer(compile, 'editor.compile')
CompileManager.getProjectCompileLimits(projectId, function (error, limits) {
if (error) { async function getProjectCompileLimits(projectId) {
return callback(error) const project = await ProjectGetter.promises.getProject(projectId, {
} owner_ref: 1,
ClsiManager.stopCompile(projectId, userId, limits, callback)
}) })
},
deleteAuxFiles(projectId, userId, clsiserverid, callback) { const owner = await UserGetter.promises.getUser(project.owner_ref, {
CompileManager.getProjectCompileLimits(projectId, function (error, limits) {
if (error) {
return callback(error)
}
ClsiManager.deleteAuxFiles(
projectId,
userId,
limits,
clsiserverid,
callback
)
})
},
getProjectCompileLimits(projectId, callback) {
ProjectGetter.getProject(
projectId,
{ owner_ref: 1 },
function (error, project) {
if (error) {
return callback(error)
}
UserGetter.getUser(
project.owner_ref,
{
_id: 1, _id: 1,
alphaProgram: 1, alphaProgram: 1,
analyticsId: 1, analyticsId: 1,
betaProgram: 1, betaProgram: 1,
features: 1, features: 1,
}, })
function (err, owner) {
if (err) {
return callback(err)
}
const ownerFeatures = (owner && owner.features) || {} const ownerFeatures = (owner && owner.features) || {}
// put alpha users into their own compile group // put alpha users into their own compile group
if (owner && owner.alphaProgram) { if (owner && owner.alphaProgram) {
ownerFeatures.compileGroup = 'alpha' ownerFeatures.compileGroup = 'alpha'
} }
UserAnalyticsIdCache.callbacks.get( const analyticsId = await UserAnalyticsIdCache.get(owner._id)
owner._id,
function (err, analyticsId) {
if (err) {
return callback(err)
}
const compileGroup = const compileGroup =
ownerFeatures.compileGroup || ownerFeatures.compileGroup || Settings.defaultFeatures.compileGroup
Settings.defaultFeatures.compileGroup
const limits = { const limits = {
timeout: timeout:
ownerFeatures.compileTimeout || ownerFeatures.compileTimeout || Settings.defaultFeatures.compileTimeout,
Settings.defaultFeatures.compileTimeout,
compileGroup, compileGroup,
compileBackendClass: compileBackendClass: compileGroup === 'standard' ? 'n2d' : 'c2d',
compileGroup === 'standard' ? 'n2d' : 'c2d',
ownerAnalyticsId: analyticsId, ownerAnalyticsId: analyticsId,
} }
callback(null, limits) return limits
} }
)
}
)
}
)
},
COMPILE_DELAY: 1, // seconds async function wordCount(projectId, userId, file, clsiserverid) {
_checkIfRecentlyCompiled(projectId, userId, callback) { const limits =
const key = `compile:${projectId}:${userId}` await CompileManager.promises.getProjectCompileLimits(projectId)
rclient.set( return await ClsiManager.promises.wordCount(
key,
true,
'EX',
this.COMPILE_DELAY,
'NX',
function (error, ok) {
if (error) {
return callback(error)
}
if (ok === 'OK') {
callback(null, false)
} else {
callback(null, true)
}
}
)
},
_checkCompileGroupAutoCompileLimit(isAutoCompile, compileGroup, callback) {
if (!isAutoCompile) {
return callback(null, true)
}
if (compileGroup === 'standard') {
// apply extra limits to the standard compile group
CompileManager._checkIfAutoCompileLimitHasBeenHit(
isAutoCompile,
compileGroup,
callback
)
} else {
Metrics.inc(`auto-compile-${compileGroup}`)
callback(null, true)
}
}, // always allow priority group users to compile
_checkIfAutoCompileLimitHasBeenHit(isAutoCompile, compileGroup, callback) {
if (!isAutoCompile) {
return callback(null, true)
}
Metrics.inc(`auto-compile-${compileGroup}`)
const rateLimiter = getAutoCompileRateLimiter(compileGroup)
rateLimiter
.consume('global', 1, { method: 'global' })
.then(() => {
callback(null, true)
})
.catch(() => {
// Don't differentiate between errors and rate limits. Silently trigger
// the rate limit if there's an error consuming the points.
Metrics.inc(`auto-compile-${compileGroup}-limited`)
callback(null, false)
})
},
wordCount(projectId, userId, file, clsiserverid, callback) {
CompileManager.getProjectCompileLimits(projectId, function (error, limits) {
if (error) {
return callback(error)
}
ClsiManager.wordCount(
projectId, projectId,
userId, userId,
file, file,
limits, limits,
clsiserverid, clsiserverid
callback
) )
}) }
async function stopCompile(projectId, userId) {
const limits =
await CompileManager.promises.getProjectCompileLimits(projectId)
return await ClsiManager.promises.stopCompile(projectId, userId, limits)
}
async function deleteAuxFiles(projectId, userId, clsiserverid) {
const limits =
await CompileManager.promises.getProjectCompileLimits(projectId)
return await ClsiManager.promises.deleteAuxFiles(
projectId,
userId,
limits,
clsiserverid
)
}
module.exports = CompileManager = {
promises: {
compile: instrumentedCompile,
deleteAuxFiles,
getProjectCompileLimits,
stopCompile,
wordCount,
}, },
compile: callbackifyMultiResult(instrumentedCompile, [
'status',
'outputFiles',
'clsiServerId',
'limits',
'validationProblems',
'stats',
'timings',
'outputUrlPrefix',
]),
stopCompile: callbackify(stopCompile),
deleteAuxFiles: callbackify(deleteAuxFiles),
getProjectCompileLimits: callbackify(getProjectCompileLimits),
COMPILE_DELAY: 1, // seconds
async _checkIfRecentlyCompiled(projectId, userId) {
const key = `compile:${projectId}:${userId}`
const ok = await rclient.set(key, true, 'EX', this.COMPILE_DELAY, 'NX')
return ok !== 'OK'
},
async _checkCompileGroupAutoCompileLimit(isAutoCompile, compileGroup) {
if (!isAutoCompile) {
return true
}
if (compileGroup === 'standard') {
// apply extra limits to the standard compile group
return await CompileManager._checkIfAutoCompileLimitHasBeenHit(
isAutoCompile,
compileGroup
)
} else {
Metrics.inc(`auto-compile-${compileGroup}`)
return true
}
}, // always allow priority group users to compile
async _checkIfAutoCompileLimitHasBeenHit(isAutoCompile, compileGroup) {
if (!isAutoCompile) {
return true
}
Metrics.inc(`auto-compile-${compileGroup}`)
const rateLimiter = getAutoCompileRateLimiter(compileGroup)
try {
await rateLimiter.consume('global', 1, { method: 'global' })
return true
} catch (e) {
// Don't differentiate between errors and rate limits. Silently trigger
// the rate limit if there's an error consuming the points.
Metrics.inc(`auto-compile-${compileGroup}-limited`)
return false
}
},
wordCount: callbackify(wordCount),
} }
const autoCompileRateLimiters = new Map() const autoCompileRateLimiters = new Map()

View file

@ -34,7 +34,8 @@ const main = async ({ COMMIT, SKIP_COUNT }) => {
const nModified = await batchedUpdate( const nModified = await batchedUpdate(
'users', 'users',
{ 'features.compileTimeout': { $lte: 60, $ne: 20 } }, { 'features.compileTimeout': { $lte: 60, $ne: 20 } },
{ $set: { 'features.compileTimeout': 20 } } // NOTE: Always update featuresUpdatedAt to ensure the user's features synced with BigQuery
{ $set: { 'features.compileTimeout': 20, featuresUpdatedAt: new Date() } }
) )
console.log(`Updated ${nModified} records`) console.log(`Updated ${nModified} records`)
} }

View file

@ -0,0 +1,65 @@
#!/usr/bin/env node
const minimist = require('minimist')
const {
db,
READ_PREFERENCE_SECONDARY,
waitForDb,
} = require('../app/src/infrastructure/mongodb')
const { batchedUpdate } = require('./helpers/batchedUpdate')
// A few seconds after the previous migration script was run
const FEATURES_UPDATED_AT = new Date('2024-04-16T12:41:00Z')
const query = {
'features.compileTimeout': 20,
$or: [
{ featuresUpdatedAt: { $exists: false } },
{ featuresUpdatedAt: { $lt: FEATURES_UPDATED_AT } },
],
}
async function logCount() {
const usersToUpdate = await db.users.countDocuments(query, {
readPreference: READ_PREFERENCE_SECONDARY,
})
console.log(
`Found ${usersToUpdate} users needing their featuresUpdatedAt updated`
)
}
const main = async ({ COMMIT, SKIP_COUNT }) => {
console.time('Script Duration')
await waitForDb()
if (!SKIP_COUNT) {
await logCount()
}
if (COMMIT) {
const nModified = await batchedUpdate('users', query, {
$set: { featuresUpdatedAt: FEATURES_UPDATED_AT },
})
console.log(`Updated ${nModified} records`)
}
console.timeEnd('Script Duration')
}
const setup = () => {
const argv = minimist(process.argv.slice(2))
const COMMIT = argv.commit !== undefined
const SKIP_COUNT = argv['skip-count'] !== undefined
if (!COMMIT) {
console.warn('Doing dry run. Add --commit to commit changes')
}
return { COMMIT, SKIP_COUNT }
}
main(setup())
.catch(err => {
console.error(err)
process.exit(1)
})
.then(() => process.exit(0))

View file

@ -0,0 +1,63 @@
#!/usr/bin/env node
const minimist = require('minimist')
const {
db,
READ_PREFERENCE_SECONDARY,
waitForDb,
} = require('../app/src/infrastructure/mongodb')
const { batchedUpdate } = require('./helpers/batchedUpdate')
// A few seconds after the previous migration script was run
const FEATURES_UPDATED_AT = new Date('2024-04-16T12:41:00Z')
const query = {
'features.compileTimeout': 20,
featuresUpdatedAt: FEATURES_UPDATED_AT,
signUpDate: { $gt: FEATURES_UPDATED_AT },
}
async function logCount() {
const usersToUpdate = await db.users.countDocuments(query, {
readPreference: READ_PREFERENCE_SECONDARY,
})
console.log(
`Found ${usersToUpdate} users needing their featuresUpdatedAt removed`
)
}
const main = async ({ COMMIT, SKIP_COUNT }) => {
console.time('Script Duration')
await waitForDb()
if (!SKIP_COUNT) {
await logCount()
}
if (COMMIT) {
const nModified = await batchedUpdate('users', query, {
$unset: { featuresUpdatedAt: 1 },
})
console.log(`Updated ${nModified} records`)
}
console.timeEnd('Script Duration')
}
const setup = () => {
const argv = minimist(process.argv.slice(2))
const COMMIT = argv.commit !== undefined
const SKIP_COUNT = argv['skip-count'] !== undefined
if (!COMMIT) {
console.warn('Doing dry run. Add --commit to commit changes')
}
return { COMMIT, SKIP_COUNT }
}
main(setup())
.catch(err => {
console.error(err)
process.exit(1)
})
.then(() => process.exit(0))

View file

@ -17,16 +17,66 @@ async function runScript(args = []) {
} }
} }
async function runFixupScript(args = []) {
try {
return await promisify(exec)(
[
'node',
'scripts/migration_compile_timeout_60s_to_20s_fixup_features_updated_at.js',
...args,
].join(' ')
)
} catch (error) {
logger.error({ error }, 'script failed')
throw error
}
}
async function runSecondFixupScript(args = []) {
try {
return await promisify(exec)(
[
'node',
'scripts/migration_compile_timeout_60s_to_20s_fixup_new_users.js',
...args,
].join(' ')
)
} catch (error) {
logger.error({ error }, 'script failed')
throw error
}
}
describe('MigrateUserFeatureTimeoutTests', function () { describe('MigrateUserFeatureTimeoutTests', function () {
describe('initial script', function () {
const usersInput = { const usersInput = {
noFeatures: {}, noFeatures: {},
noFeatureTimeout: { features: {} }, noFeatureTimeout: { features: {} },
timeout10s: { features: { compileTimeout: 10, other: 'val1' }, bar: '1' }, timeout10s: {
features: { compileTimeout: 10, other: 'val1' },
bar: '1',
featuresUpdatedAt: new Date('2020-01-01'),
},
timeout20s: { features: { compileTimeout: 20, other: 'val2' }, bar: '2' }, timeout20s: { features: { compileTimeout: 20, other: 'val2' }, bar: '2' },
timeout30s: { features: { compileTimeout: 30, other: 'val3' }, bar: '3' }, timeout30s: {
timeout60s: { features: { compileTimeout: 60, other: 'val4' }, bar: '4' }, features: { compileTimeout: 30, other: 'val3' },
timeout120s: { features: { compileTimeout: 120, other: 'val5' }, bar: '5' }, bar: '3',
timeout180s: { features: { compileTimeout: 180, other: 'val6' }, bar: '6' }, featuresUpdatedAt: new Date('2025-01-01'),
},
timeout60s: {
features: { compileTimeout: 60, other: 'val4' },
bar: '4',
featuresUpdatedAt: new Date(),
},
timeout120s: {
features: { compileTimeout: 120, other: 'val5' },
bar: '5',
},
timeout180s: {
features: { compileTimeout: 180, other: 'val6' },
bar: '6',
featuresUpdatedAt: new Date('2020-01-01'),
},
} }
const usersKeys = Object.keys(usersInput) const usersKeys = Object.keys(usersInput)
@ -44,6 +94,10 @@ describe('MigrateUserFeatureTimeoutTests', function () {
) )
}) })
afterEach('clear users', async function () {
await db.users.deleteMany({})
})
it('gives correct counts in dry mode', async function () { it('gives correct counts in dry mode', async function () {
const users = await db.users.find().toArray() const users = await db.users.find().toArray()
expect(users).to.have.lengthOf(usersKeys.length) expect(users).to.have.lengthOf(usersKeys.length)
@ -56,7 +110,9 @@ describe('MigrateUserFeatureTimeoutTests', function () {
expect(result.stdout).to.contain( expect(result.stdout).to.contain(
'Found 3 users with compileTimeout <= 60s && != 20s' 'Found 3 users with compileTimeout <= 60s && != 20s'
) )
expect(result.stdout).to.contain('Found 1 users with compileTimeout == 20s') expect(result.stdout).to.contain(
'Found 1 users with compileTimeout == 20s'
)
expect(result.stdout).not.to.contain('Updated') expect(result.stdout).not.to.contain('Updated')
const usersAfter = await db.users.find().toArray() const usersAfter = await db.users.find().toArray()
@ -72,12 +128,16 @@ describe('MigrateUserFeatureTimeoutTests', function () {
expect(result.stdout).to.contain( expect(result.stdout).to.contain(
'Found 3 users with compileTimeout <= 60s && != 20s' 'Found 3 users with compileTimeout <= 60s && != 20s'
) )
expect(result.stdout).to.contain('Found 1 users with compileTimeout == 20s') expect(result.stdout).to.contain(
'Found 1 users with compileTimeout == 20s'
)
expect(result.stdout).to.contain('Updated 3 records') expect(result.stdout).to.contain('Updated 3 records')
const usersAfter = await db.users.find().toArray() const usersAfter = await db.users.find().toArray()
expect(usersAfter).to.deep.equal([ expect(
usersAfter.map(({ featuresUpdatedAt, ...rest }) => rest)
).to.deep.equal([
{ _id: userIds.noFeatures, email: 'noFeatures@example.com' }, { _id: userIds.noFeatures, email: 'noFeatures@example.com' },
{ {
_id: userIds.noFeatureTimeout, _id: userIds.noFeatureTimeout,
@ -122,6 +182,14 @@ describe('MigrateUserFeatureTimeoutTests', function () {
}, },
]) ])
expect(usersAfter[0].featuresUpdatedAt).to.be.undefined
expect(usersAfter[1].featuresUpdatedAt).to.be.undefined
expect(usersAfter[2].featuresUpdatedAt).to.be.instanceOf(Date)
expect(usersAfter[3].featuresUpdatedAt).to.be.undefined // was already 20s
expect(usersAfter[4].featuresUpdatedAt).to.be.instanceOf(Date)
expect(usersAfter[5].featuresUpdatedAt).to.be.instanceOf(Date)
expect(usersAfter[6].featuresUpdatedAt).to.be.undefined
const result2 = await runScript([]) const result2 = await runScript([])
expect(result2.stdout).to.contain( expect(result2.stdout).to.contain(
@ -132,3 +200,248 @@ describe('MigrateUserFeatureTimeoutTests', function () {
) )
}) })
}) })
const FEATURES_UPDATED_AT = new Date('2024-04-16T12:41:00Z')
describe('fixup script', function () {
const usersInput = {
timeout20s1: {
features: { compileTimeout: 20 },
},
timeout20s2: {
features: { compileTimeout: 20 },
featuresUpdatedAt: new Date('2023-01-01'),
},
timeout20s3: {
features: { compileTimeout: 20 },
featuresUpdatedAt: new Date('2025-01-01'),
},
timeout240s1: {
features: { compileTimeout: 240 },
},
timeout240s2: {
features: { compileTimeout: 240 },
featuresUpdatedAt: new Date('2023-01-01'),
},
timeout240s3: {
features: { compileTimeout: 240 },
featuresUpdatedAt: new Date('2025-01-01'),
},
}
const usersKeys = Object.keys(usersInput)
const userIds = {}
beforeEach('insert users', async function () {
const usersInsertedValues = await db.users.insertMany(
usersKeys.map(key => ({
...usersInput[key],
email: `${key}@example.com`,
}))
)
usersKeys.forEach(
(key, index) => (userIds[key] = usersInsertedValues.insertedIds[index])
)
})
afterEach('clear users', async function () {
await db.users.deleteMany({})
})
it('gives correct counts in dry mode', async function () {
const users = await db.users.find().toArray()
expect(users).to.have.lengthOf(usersKeys.length)
const result = await runFixupScript([])
expect(result.stderr).to.contain(
'Doing dry run. Add --commit to commit changes'
)
expect(result.stdout).to.contain(
'Found 2 users needing their featuresUpdatedAt updated'
)
expect(result.stdout).not.to.contain('Updated 2 records')
const usersAfter = await db.users.find().toArray()
expect(usersAfter).to.deep.equal(users)
})
it("updates users featuresUpdatedAt when '--commit' is set", async function () {
const users = await db.users.find().toArray()
expect(users).to.have.lengthOf(usersKeys.length)
const result = await runFixupScript(['--commit'])
expect(result.stdout).to.contain(
'Found 2 users needing their featuresUpdatedAt updated'
)
expect(result.stdout).to.contain('Updated 2 records')
const usersAfter = await db.users.find().toArray()
expect(usersAfter).to.deep.equal([
{
_id: userIds.timeout20s1,
email: 'timeout20s1@example.com',
features: { compileTimeout: 20 },
featuresUpdatedAt: FEATURES_UPDATED_AT,
},
{
_id: userIds.timeout20s2,
email: 'timeout20s2@example.com',
features: { compileTimeout: 20 },
featuresUpdatedAt: FEATURES_UPDATED_AT,
},
{
_id: userIds.timeout20s3,
email: 'timeout20s3@example.com',
features: { compileTimeout: 20 },
featuresUpdatedAt: new Date('2025-01-01'),
},
{
_id: userIds.timeout240s1,
email: 'timeout240s1@example.com',
features: { compileTimeout: 240 },
},
{
_id: userIds.timeout240s2,
email: 'timeout240s2@example.com',
features: { compileTimeout: 240 },
featuresUpdatedAt: new Date('2023-01-01'),
},
{
_id: userIds.timeout240s3,
email: 'timeout240s3@example.com',
features: { compileTimeout: 240 },
featuresUpdatedAt: new Date('2025-01-01'),
},
])
const result2 = await runFixupScript([])
expect(result2.stdout).to.contain(
'Found 0 users needing their featuresUpdatedAt updated'
)
})
})
describe('fixup recent users', function () {
const usersInput = {
timeout20sNewerUser: {
features: { compileTimeout: 20 },
signUpDate: new Date('2026-01-01'),
},
// only this user should get updated
timeout20sNewUser: {
features: { compileTimeout: 20 },
signUpDate: new Date('2025-01-01'),
featuresUpdatedAt: FEATURES_UPDATED_AT,
},
timeout20sOldUser: {
features: { compileTimeout: 20 },
signUpDate: new Date('2023-01-01'),
featuresUpdatedAt: FEATURES_UPDATED_AT,
},
timeout240sNewerUser: {
features: { compileTimeout: 240 },
signUpDate: new Date('2026-01-01'),
},
// We didn't produce such mismatch (featuresUpdatedAt < signUpDate) on premium users.
// But we should still test that the script doesn't update them.
timeout240sNewUser: {
features: { compileTimeout: 240 },
signUpDate: new Date('2025-01-01'),
featuresUpdatedAt: FEATURES_UPDATED_AT,
},
timeout240sOldUser: {
features: { compileTimeout: 240 },
signUpDate: new Date('2023-01-01'),
featuresUpdatedAt: FEATURES_UPDATED_AT,
},
}
const usersKeys = Object.keys(usersInput)
const userIds = {}
beforeEach('insert users', async function () {
const usersInsertedValues = await db.users.insertMany(
usersKeys.map(key => ({
...usersInput[key],
email: `${key}@example.com`,
}))
)
usersKeys.forEach(
(key, index) => (userIds[key] = usersInsertedValues.insertedIds[index])
)
})
afterEach('clear users', async function () {
await db.users.deleteMany({})
})
it('gives correct counts in dry mode', async function () {
const users = await db.users.find().toArray()
expect(users).to.have.lengthOf(usersKeys.length)
const result = await runSecondFixupScript([])
expect(result.stderr).to.contain(
'Doing dry run. Add --commit to commit changes'
)
expect(result.stdout).to.contain(
'Found 1 users needing their featuresUpdatedAt removed'
)
expect(result.stdout).not.to.contain('Updated 1 records')
const usersAfter = await db.users.find().toArray()
expect(usersAfter).to.deep.equal(users)
})
it("removes users featuresUpdatedAt when '--commit' is set", async function () {
const users = await db.users.find().toArray()
expect(users).to.have.lengthOf(usersKeys.length)
const result = await runSecondFixupScript(['--commit'])
expect(result.stdout).to.contain(
'Found 1 users needing their featuresUpdatedAt removed'
)
expect(result.stdout).to.contain('Updated 1 records')
const usersAfter = await db.users.find().toArray()
expect(usersAfter).to.deep.equal([
{
_id: userIds.timeout20sNewerUser,
email: 'timeout20sNewerUser@example.com',
features: { compileTimeout: 20 },
signUpDate: new Date('2026-01-01'),
},
{
_id: userIds.timeout20sNewUser,
email: 'timeout20sNewUser@example.com',
features: { compileTimeout: 20 },
signUpDate: new Date('2025-01-01'),
},
{
_id: userIds.timeout20sOldUser,
email: 'timeout20sOldUser@example.com',
features: { compileTimeout: 20 },
featuresUpdatedAt: FEATURES_UPDATED_AT,
signUpDate: new Date('2023-01-01'),
},
{
_id: userIds.timeout240sNewerUser,
email: 'timeout240sNewerUser@example.com',
features: { compileTimeout: 240 },
signUpDate: new Date('2026-01-01'),
},
{
_id: userIds.timeout240sNewUser,
email: 'timeout240sNewUser@example.com',
features: { compileTimeout: 240 },
featuresUpdatedAt: FEATURES_UPDATED_AT,
signUpDate: new Date('2025-01-01'),
},
{
_id: userIds.timeout240sOldUser,
email: 'timeout240sOldUser@example.com',
features: { compileTimeout: 240 },
featuresUpdatedAt: FEATURES_UPDATED_AT,
signUpDate: new Date('2023-01-01'),
},
])
const result2 = await runSecondFixupScript([])
expect(result2.stdout).to.contain(
'Found 0 users needing their featuresUpdatedAt removed'
)
})
})
})

View file

@ -29,18 +29,21 @@ describe('CompileManager', function () {
rateLimit: { autoCompile: {} }, rateLimit: { autoCompile: {} },
}), }),
'../../infrastructure/RedisWrapper': { '../../infrastructure/RedisWrapper': {
client: () => (this.rclient = { auth() {} }), client: () =>
(this.rclient = {
auth() {},
}),
}, },
'../Project/ProjectRootDocManager': (this.ProjectRootDocManager = {}), '../Project/ProjectRootDocManager': (this.ProjectRootDocManager = {
'../Project/ProjectGetter': (this.ProjectGetter = {}), promises: {},
'../User/UserGetter': (this.UserGetter = {}), }),
'./ClsiManager': (this.ClsiManager = {}), '../Project/ProjectGetter': (this.ProjectGetter = { promises: {} }),
'../User/UserGetter': (this.UserGetter = { promises: {} }),
'./ClsiManager': (this.ClsiManager = { promises: {} }),
'../../infrastructure/RateLimiter': this.RateLimiter, '../../infrastructure/RateLimiter': this.RateLimiter,
'@overleaf/metrics': this.Metrics, '@overleaf/metrics': this.Metrics,
'../Analytics/UserAnalyticsIdCache': (this.UserAnalyticsIdCache = { '../Analytics/UserAnalyticsIdCache': (this.UserAnalyticsIdCache = {
callbacks: { get: sinon.stub().resolves('abc'),
get: sinon.stub().yields(null, 'abc'),
},
}), }),
}, },
}) })
@ -57,36 +60,42 @@ describe('CompileManager', function () {
beforeEach(function () { beforeEach(function () {
this.CompileManager._checkIfRecentlyCompiled = sinon this.CompileManager._checkIfRecentlyCompiled = sinon
.stub() .stub()
.callsArgWith(2, null, false) .resolves(false)
this.ProjectRootDocManager.ensureRootDocumentIsSet = sinon this.ProjectRootDocManager.promises.ensureRootDocumentIsSet = sinon
.stub() .stub()
.callsArgWith(1, null) .resolves()
this.CompileManager.getProjectCompileLimits = sinon this.CompileManager.promises.getProjectCompileLimits = sinon
.stub() .stub()
.callsArgWith(1, null, this.limits) .resolves(this.limits)
this.ClsiManager.sendRequest = sinon this.ClsiManager.promises.sendRequest = sinon.stub().resolves({
.stub() status: (this.status = 'mock-status'),
.callsArgWith( outputFiles: (this.outputFiles = []),
3, clsiServerId: (this.output = 'mock output'),
null, })
(this.status = 'mock-status'),
(this.outputFiles = 'mock output files'),
(this.output = 'mock output')
)
}) })
describe('succesfully', function () { describe('succesfully', function () {
beforeEach(function () { let result
this.CompileManager._checkIfAutoCompileLimitHasBeenHit = ( beforeEach(async function () {
this.CompileManager._checkIfAutoCompileLimitHasBeenHit = async (
isAutoCompile, isAutoCompile,
compileGroup, compileGroup
cb ) => true
) => cb(null, true) this.ProjectGetter.promises.getProject = sinon
this.CompileManager.compile( .stub()
.resolves(
(this.project = { owner_ref: (this.owner_id = 'owner-id-123') })
)
this.UserGetter.promises.getUser = sinon.stub().resolves(
(this.user = {
features: { compileTimeout: '20s', compileGroup: 'standard' },
analyticsId: 'abc',
})
)
result = await this.CompileManager.promises.compile(
this.project_id, this.project_id,
this.user_id, this.user_id,
{}, {}
this.callback
) )
}) })
@ -97,19 +106,19 @@ describe('CompileManager', function () {
}) })
it('should ensure that the root document is set', function () { it('should ensure that the root document is set', function () {
this.ProjectRootDocManager.ensureRootDocumentIsSet this.ProjectRootDocManager.promises.ensureRootDocumentIsSet
.calledWith(this.project_id) .calledWith(this.project_id)
.should.equal(true) .should.equal(true)
}) })
it('should get the project compile limits', function () { it('should get the project compile limits', function () {
this.CompileManager.getProjectCompileLimits this.CompileManager.promises.getProjectCompileLimits
.calledWith(this.project_id) .calledWith(this.project_id)
.should.equal(true) .should.equal(true)
}) })
it('should run the compile with the compile limits', function () { it('should run the compile with the compile limits', function () {
this.ClsiManager.sendRequest this.ClsiManager.promises.sendRequest
.calledWith(this.project_id, this.user_id, { .calledWith(this.project_id, this.user_id, {
timeout: this.limits.timeout, timeout: this.limits.timeout,
compileGroup: 'standard', compileGroup: 'standard',
@ -117,10 +126,10 @@ describe('CompileManager', function () {
.should.equal(true) .should.equal(true)
}) })
it('should call the callback with the output', function () { it('should resolve with the output', function () {
this.callback expect(result).to.haveOwnProperty('status', this.status)
.calledWith(null, this.status, this.outputFiles, this.output) expect(result).to.haveOwnProperty('clsiServerId', this.output)
.should.equal(true) expect(result).to.haveOwnProperty('outputFiles', this.outputFiles)
}) })
it('should time the compile', function () { it('should time the compile', function () {
@ -130,26 +139,24 @@ describe('CompileManager', function () {
describe('when the project has been recently compiled', function () { describe('when the project has been recently compiled', function () {
it('should return', function (done) { it('should return', function (done) {
this.CompileManager._checkIfAutoCompileLimitHasBeenHit = ( this.CompileManager._checkIfAutoCompileLimitHasBeenHit = async (
isAutoCompile, isAutoCompile,
compileGroup, compileGroup
cb ) => true
) => cb(null, true)
this.CompileManager._checkIfRecentlyCompiled = sinon this.CompileManager._checkIfRecentlyCompiled = sinon
.stub() .stub()
.callsArgWith(2, null, true) .resolves(true)
this.CompileManager.compile( this.CompileManager.promises
this.project_id, .compile(this.project_id, this.user_id, {})
this.user_id, .then(({ status }) => {
{},
(err, status) => {
if (err) {
return done(err)
}
status.should.equal('too-recently-compiled') status.should.equal('too-recently-compiled')
done() done()
} })
) .catch(error => {
// Catch any errors and fail the test
true.should.equal(false)
done(error)
})
}) })
}) })
@ -157,60 +164,51 @@ describe('CompileManager', function () {
it('should return', function (done) { it('should return', function (done) {
this.CompileManager._checkIfAutoCompileLimitHasBeenHit = sinon this.CompileManager._checkIfAutoCompileLimitHasBeenHit = sinon
.stub() .stub()
.callsArgWith(2, null, false) .resolves(false)
this.CompileManager.compile( this.CompileManager.promises
this.project_id, .compile(this.project_id, this.user_id, {})
this.user_id, .then(({ status }) => {
{}, expect(status).to.equal('autocompile-backoff')
(err, status) => {
if (err) {
return done(err)
}
status.should.equal('autocompile-backoff')
done() done()
} })
) .catch(err => done(err))
}) })
}) })
}) })
describe('getProjectCompileLimits', function () { describe('getProjectCompileLimits', function () {
beforeEach(function (done) { beforeEach(async function () {
this.features = { this.features = {
compileTimeout: (this.timeout = 42), compileTimeout: (this.timeout = 42),
compileGroup: (this.group = 'priority'), compileGroup: (this.group = 'priority'),
} }
this.ProjectGetter.getProject = sinon this.ProjectGetter.promises.getProject = sinon
.stub() .stub()
.callsArgWith( .resolves(
2,
null,
(this.project = { owner_ref: (this.owner_id = 'owner-id-123') }) (this.project = { owner_ref: (this.owner_id = 'owner-id-123') })
) )
this.UserGetter.getUser = sinon this.UserGetter.promises.getUser = sinon
.stub() .stub()
.callsArgWith( .resolves((this.user = { features: this.features, analyticsId: 'abc' }))
2, try {
null, const result =
(this.user = { features: this.features, analyticsId: 'abc' }) await this.CompileManager.promises.getProjectCompileLimits(
this.project_id
) )
this.CompileManager.getProjectCompileLimits( this.callback(null, result)
this.project_id, } catch (error) {
(err, res) => { this.callback(error)
this.callback(err, res)
done()
} }
)
}) })
it('should look up the owner of the project', function () { it('should look up the owner of the project', function () {
this.ProjectGetter.getProject this.ProjectGetter.promises.getProject
.calledWith(this.project_id, { owner_ref: 1 }) .calledWith(this.project_id, { owner_ref: 1 })
.should.equal(true) .should.equal(true)
}) })
it("should look up the owner's features", function () { it("should look up the owner's features", function () {
this.UserGetter.getUser this.UserGetter.promises.getUser
.calledWith(this.project.owner_ref, { .calledWith(this.project.owner_ref, {
_id: 1, _id: 1,
alphaProgram: 1, alphaProgram: 1,
@ -239,12 +237,12 @@ describe('CompileManager', function () {
compileTimeout: 42, compileTimeout: 42,
compileGroup: 'standard', compileGroup: 'standard',
} }
this.ProjectGetter.getProject = sinon this.ProjectGetter.promises.getProject = sinon
.stub() .stub()
.yields(null, { owner_ref: 'owner-id-123' }) .resolves({ owner_ref: 'owner-id-123' })
this.UserGetter.getUser = sinon this.UserGetter.promises.getUser = sinon
.stub() .stub()
.yields(null, { features: this.features, analyticsId: 'abc' }) .resolves({ features: this.features, analyticsId: 'abc' })
}) })
describe('with priority compile', function () { describe('with priority compile', function () {
@ -265,47 +263,45 @@ describe('CompileManager', function () {
}) })
describe('deleteAuxFiles', function () { describe('deleteAuxFiles', function () {
beforeEach(function () { let result
this.CompileManager.getProjectCompileLimits = sinon
beforeEach(async function () {
this.CompileManager.promises.getProjectCompileLimits = sinon
.stub() .stub()
.callsArgWith( .resolves((this.limits = { compileGroup: 'mock-compile-group' }))
1, this.ClsiManager.promises.deleteAuxFiles = sinon.stub().resolves('test')
null, result = await this.CompileManager.promises.deleteAuxFiles(
(this.limits = { compileGroup: 'mock-compile-group' })
)
this.ClsiManager.deleteAuxFiles = sinon.stub().callsArg(3)
this.CompileManager.deleteAuxFiles(
this.project_id, this.project_id,
this.user_id, this.user_id
this.callback
) )
}) })
it('should look up the compile group to use', function () { it('should look up the compile group to use', function () {
this.CompileManager.getProjectCompileLimits this.CompileManager.promises.getProjectCompileLimits
.calledWith(this.project_id) .calledWith(this.project_id)
.should.equal(true) .should.equal(true)
}) })
it('should delete the aux files', function () { it('should delete the aux files', function () {
this.ClsiManager.deleteAuxFiles this.ClsiManager.promises.deleteAuxFiles
.calledWith(this.project_id, this.user_id, this.limits) .calledWith(this.project_id, this.user_id, this.limits)
.should.equal(true) .should.equal(true)
}) })
it('should call the callback', function () { it('should resolve', function () {
this.callback.called.should.equal(true) expect(result).not.to.be.undefined
}) })
}) })
describe('_checkIfRecentlyCompiled', function () { describe('_checkIfRecentlyCompiled', function () {
describe('when the key exists in redis', function () { describe('when the key exists in redis', function () {
beforeEach(function () { let result
this.rclient.set = sinon.stub().callsArgWith(5, null, null)
this.CompileManager._checkIfRecentlyCompiled( beforeEach(async function () {
this.rclient.set = sinon.stub().resolves(null)
result = await this.CompileManager._checkIfRecentlyCompiled(
this.project_id, this.project_id,
this.user_id, this.user_id
this.callback
) )
}) })
@ -321,18 +317,19 @@ describe('CompileManager', function () {
.should.equal(true) .should.equal(true)
}) })
it('should call the callback with true', function () { it('should resolve with true', function () {
this.callback.calledWith(null, true).should.equal(true) result.should.equal(true)
}) })
}) })
describe('when the key does not exist in redis', function () { describe('when the key does not exist in redis', function () {
beforeEach(function () { let result
this.rclient.set = sinon.stub().callsArgWith(5, null, 'OK')
this.CompileManager._checkIfRecentlyCompiled( beforeEach(async function () {
this.rclient.set = sinon.stub().resolves('OK')
result = await this.CompileManager._checkIfRecentlyCompiled(
this.project_id, this.project_id,
this.user_id, this.user_id
this.callback
) )
}) })
@ -348,105 +345,86 @@ describe('CompileManager', function () {
.should.equal(true) .should.equal(true)
}) })
it('should call the callback with false', function () { it('should resolve with false', function () {
this.callback.calledWith(null, false).should.equal(true) result.should.equal(false)
}) })
}) })
}) })
describe('_checkIfAutoCompileLimitHasBeenHit', function () { describe('_checkIfAutoCompileLimitHasBeenHit', function () {
it('should be able to compile if it is not an autocompile', function (done) { it('should be able to compile if it is not an autocompile', async function () {
this.CompileManager._checkIfAutoCompileLimitHasBeenHit( const canCompile =
await this.CompileManager._checkIfAutoCompileLimitHasBeenHit(
false, false,
'everyone', 'everyone'
(err, canCompile) => {
if (err) {
return done(err)
}
canCompile.should.equal(true)
done()
}
) )
expect(canCompile).to.equal(true)
}) })
it('should be able to compile if rate limit has remaining', function (done) { it('should be able to compile if rate limit has remaining', async function () {
this.CompileManager._checkIfAutoCompileLimitHasBeenHit( const canCompile =
await this.CompileManager._checkIfAutoCompileLimitHasBeenHit(
true, true,
'everyone', 'everyone'
(err, canCompile) => { )
if (err) {
return done(err)
}
expect(this.rateLimiter.consume).to.have.been.calledWith('global') expect(this.rateLimiter.consume).to.have.been.calledWith('global')
canCompile.should.equal(true) expect(canCompile).to.equal(true)
done()
}
)
}) })
it('should be not able to compile if rate limit has no remianing', function (done) { it('should be not able to compile if rate limit has no remianing', async function () {
this.rateLimiter.consume.rejects({ remainingPoints: 0 }) this.rateLimiter.consume.rejects({ remainingPoints: 0 })
this.CompileManager._checkIfAutoCompileLimitHasBeenHit( const canCompile =
await this.CompileManager._checkIfAutoCompileLimitHasBeenHit(
true, true,
'everyone', 'everyone'
(err, canCompile) => {
if (err) {
return done(err)
}
canCompile.should.equal(false)
done()
}
) )
expect(canCompile).to.equal(false)
}) })
it('should return false if there is an error in the rate limit', function (done) { it('should return false if there is an error in the rate limit', async function () {
this.rateLimiter.consume.rejects(new Error('BOOM!')) this.rateLimiter.consume.rejects(new Error('BOOM!'))
this.CompileManager._checkIfAutoCompileLimitHasBeenHit( const canCompile =
await this.CompileManager._checkIfAutoCompileLimitHasBeenHit(
true, true,
'everyone', 'everyone'
(err, canCompile) => {
if (err) {
return done(err)
}
canCompile.should.equal(false)
done()
}
) )
expect(canCompile).to.equal(false)
}) })
}) })
describe('wordCount', function () { describe('wordCount', function () {
beforeEach(function () { let result
this.CompileManager.getProjectCompileLimits = sinon const wordCount = 1
beforeEach(async function () {
this.CompileManager.promises.getProjectCompileLimits = sinon
.stub() .stub()
.callsArgWith( .resolves((this.limits = { compileGroup: 'mock-compile-group' }))
1, this.ClsiManager.promises.wordCount = sinon.stub().resolves(wordCount)
null, result = await this.CompileManager.promises.wordCount(
(this.limits = { compileGroup: 'mock-compile-group' })
)
this.ClsiManager.wordCount = sinon.stub().callsArg(4)
this.CompileManager.wordCount(
this.project_id, this.project_id,
this.user_id, this.user_id,
false, false
this.callback
) )
}) })
it('should look up the compile group to use', function () { it('should look up the compile group to use', function () {
this.CompileManager.getProjectCompileLimits this.CompileManager.promises.getProjectCompileLimits
.calledWith(this.project_id) .calledWith(this.project_id)
.should.equal(true) .should.equal(true)
}) })
it('should call wordCount for project', function () { it('should call wordCount for project', function () {
this.ClsiManager.wordCount this.ClsiManager.promises.wordCount
.calledWith(this.project_id, this.user_id, false, this.limits) .calledWith(this.project_id, this.user_id, false, this.limits)
.should.equal(true) .should.equal(true)
}) })
it('should call the callback', function () { it('should resolve with the wordCount from the ClsiManager', function () {
this.callback.called.should.equal(true) expect(result).to.equal(wordCount)
}) })
}) })
}) })