Merge branch 'overleaf:main' into main

This commit is contained in:
Damian Fajfer 2024-04-30 22:29:40 +02:00 committed by GitHub
commit 4a94693c67
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 918 additions and 509 deletions

16
package-lock.json generated
View file

@ -41549,7 +41549,7 @@
"url-loader": "^4.1.1",
"webpack": "^5.81.0",
"webpack-cli": "^5.0.2",
"webpack-dev-middleware": "^6.0.2",
"webpack-dev-middleware": "^6.1.2",
"webpack-hot-middleware": "^2.25.1",
"webpack-manifest-plugin": "^5.0.0"
},
@ -42612,9 +42612,9 @@
}
},
"services/latexqc/node_modules/webpack-dev-middleware": {
"version": "6.1.1",
"resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-6.1.1.tgz",
"integrity": "sha512-y51HrHaFeeWir0YO4f0g+9GwZawuigzcAdRNon6jErXy/SqV/+O6eaVAzDqE6t3e3NpGeR5CS+cCDaTC+V3yEQ==",
"version": "6.1.2",
"resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-6.1.2.tgz",
"integrity": "sha512-Wu+EHmX326YPYUpQLKmKbTyZZJIB8/n6R09pTmB03kJmnMsVPTo9COzHZFr01txwaCAuZvfBJE4ZCHRcKs5JaQ==",
"dev": true,
"dependencies": {
"colorette": "^2.0.10",
@ -67255,7 +67255,7 @@
"url-loader": "^4.1.1",
"webpack": "^5.81.0",
"webpack-cli": "^5.0.2",
"webpack-dev-middleware": "^6.0.2",
"webpack-dev-middleware": "^6.1.2",
"webpack-hot-middleware": "^2.25.1",
"webpack-manifest-plugin": "^5.0.0"
},
@ -68067,9 +68067,9 @@
}
},
"webpack-dev-middleware": {
"version": "6.1.1",
"resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-6.1.1.tgz",
"integrity": "sha512-y51HrHaFeeWir0YO4f0g+9GwZawuigzcAdRNon6jErXy/SqV/+O6eaVAzDqE6t3e3NpGeR5CS+cCDaTC+V3yEQ==",
"version": "6.1.2",
"resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-6.1.2.tgz",
"integrity": "sha512-Wu+EHmX326YPYUpQLKmKbTyZZJIB8/n6R09pTmB03kJmnMsVPTo9COzHZFr01txwaCAuZvfBJE4ZCHRcKs5JaQ==",
"dev": true,
"requires": {
"colorette": "^2.0.10",

View file

@ -80,6 +80,13 @@ COPY server-ce/config/custom-environment-variables.json /overleaf/services/histo
ADD server-ce/bin/grunt /usr/local/bin/grunt
RUN chmod +x /usr/local/bin/grunt
# Copy history helper scripts
# ---------------------------
ADD server-ce/bin/flush-history-queues /overleaf/bin/flush-history-queues
RUN chmod +x /overleaf/bin/flush-history-queues
ADD server-ce/bin/force-history-resyncs /overleaf/bin/force-history-resyncs
RUN chmod +x /overleaf/bin/force-history-resyncs
# File that controls open|closed status of the site
# -------------------------------------------------
ENV SITE_MAINTENANCE_FILE "/etc/overleaf/site_status"

View file

@ -0,0 +1,8 @@
#!/bin/bash
set -euo pipefail
source /etc/container_environment.sh
source /etc/overleaf/env.sh
cd /overleaf/services/project-history
node scripts/flush_all.js 100000

View file

@ -0,0 +1,8 @@
#!/bin/bash
set -euo pipefail
source /etc/container_environment.sh
source /etc/overleaf/env.sh
cd /overleaf/services/project-history
node scripts/force_resync.js 1000 force

View file

@ -170,10 +170,11 @@ _mocks._countAndProcessUpdates = (
_processUpdatesBatch(projectId, updates, extendLock, cb)
},
error => {
if (error) {
return callback(error)
}
callback(null, queueSize)
// Unconventional callback signature. The caller needs the queue size
// even when an error is thrown in order to record the queue size in
// the projectHistoryFailures collection. We'll have to find another
// way to achieve this when we promisify.
callback(error, queueSize)
}
)
} else {

View file

@ -9,81 +9,77 @@ const ClsiManager = require('./ClsiManager')
const Metrics = require('@overleaf/metrics')
const { RateLimiter } = require('../../infrastructure/RateLimiter')
const UserAnalyticsIdCache = require('../Analytics/UserAnalyticsIdCache')
const {
callbackify,
callbackifyMultiResult,
} = require('@overleaf/promise-utils')
module.exports = CompileManager = {
compile(projectId, userId, options = {}, _callback) {
const timer = new Metrics.Timer('editor.compile')
const callback = function (...args) {
function instrumentWithTimer(fn, key) {
return async (...args) => {
const timer = new Metrics.Timer(key)
try {
return await fn(...args)
} finally {
timer.done()
_callback(...args)
}
}
}
CompileManager._checkIfRecentlyCompiled(
async function compile(projectId, userId, options = {}) {
const recentlyCompiled = await CompileManager._checkIfRecentlyCompiled(
projectId,
userId,
function (error, recentlyCompiled) {
if (error) {
return callback(error)
}
userId
)
if (recentlyCompiled) {
return callback(null, 'too-recently-compiled', [])
return { status: 'too-recently-compiled', outputFiles: [] }
}
CompileManager._checkIfAutoCompileLimitHasBeenHit(
try {
const canCompile = await CompileManager._checkIfAutoCompileLimitHasBeenHit(
options.isAutoCompile,
'everyone',
function (err, canCompile) {
if (err || !canCompile) {
return callback(null, 'autocompile-backoff', [])
'everyone'
)
if (!canCompile) {
return { status: 'autocompile-backoff', outputFiles: [] }
}
} catch (error) {
return { status: 'autocompile-backoff', outputFiles: [] }
}
ProjectRootDocManager.ensureRootDocumentIsSet(
projectId,
function (error) {
if (error) {
return callback(error)
}
CompileManager.getProjectCompileLimits(
projectId,
function (error, limits) {
if (error) {
return callback(error)
}
await ProjectRootDocManager.promises.ensureRootDocumentIsSet(projectId)
const limits =
await CompileManager.promises.getProjectCompileLimits(projectId)
for (const key in limits) {
const value = limits[key]
options[key] = value
}
// Put a lower limit on autocompiles for free users, based on compileGroup
CompileManager._checkCompileGroupAutoCompileLimit(
try {
const canCompile = await CompileManager._checkCompileGroupAutoCompileLimit(
options.isAutoCompile,
limits.compileGroup,
function (err, canCompile) {
if (err || !canCompile) {
return callback(null, 'autocompile-backoff', [])
limits.compileGroup
)
if (!canCompile) {
return { status: 'autocompile-backoff', outputFiles: [] }
}
} catch (error) {
return { message: 'autocompile-backoff', outputFiles: [] }
}
// only pass userId down to clsi if this is a per-user compile
const compileAsUser = Settings.disablePerUserCompiles
? undefined
: userId
ClsiManager.sendRequest(
projectId,
compileAsUser,
options,
function (
error,
const compileAsUser = Settings.disablePerUserCompiles ? undefined : userId
const {
status,
outputFiles,
clsiServerId,
validationProblems,
stats,
timings,
outputUrlPrefix
) {
if (error) {
return callback(error)
}
callback(
null,
outputUrlPrefix,
} = await ClsiManager.promises.sendRequest(projectId, compileAsUser, options)
return {
status,
outputFiles,
clsiServerId,
@ -91,172 +87,141 @@ module.exports = CompileManager = {
validationProblems,
stats,
timings,
outputUrlPrefix
)
outputUrlPrefix,
}
)
}
)
}
)
}
)
}
)
}
)
},
}
stopCompile(projectId, userId, callback) {
CompileManager.getProjectCompileLimits(projectId, function (error, limits) {
if (error) {
return callback(error)
}
ClsiManager.stopCompile(projectId, userId, limits, callback)
const instrumentedCompile = instrumentWithTimer(compile, 'editor.compile')
async function getProjectCompileLimits(projectId) {
const project = await ProjectGetter.promises.getProject(projectId, {
owner_ref: 1,
})
},
deleteAuxFiles(projectId, userId, clsiserverid, callback) {
CompileManager.getProjectCompileLimits(projectId, function (error, limits) {
if (error) {
return callback(error)
}
ClsiManager.deleteAuxFiles(
projectId,
userId,
limits,
clsiserverid,
callback
)
})
},
getProjectCompileLimits(projectId, callback) {
ProjectGetter.getProject(
projectId,
{ owner_ref: 1 },
function (error, project) {
if (error) {
return callback(error)
}
UserGetter.getUser(
project.owner_ref,
{
const owner = await UserGetter.promises.getUser(project.owner_ref, {
_id: 1,
alphaProgram: 1,
analyticsId: 1,
betaProgram: 1,
features: 1,
},
function (err, owner) {
if (err) {
return callback(err)
}
})
const ownerFeatures = (owner && owner.features) || {}
// put alpha users into their own compile group
if (owner && owner.alphaProgram) {
ownerFeatures.compileGroup = 'alpha'
}
UserAnalyticsIdCache.callbacks.get(
owner._id,
function (err, analyticsId) {
if (err) {
return callback(err)
}
const analyticsId = await UserAnalyticsIdCache.get(owner._id)
const compileGroup =
ownerFeatures.compileGroup ||
Settings.defaultFeatures.compileGroup
ownerFeatures.compileGroup || Settings.defaultFeatures.compileGroup
const limits = {
timeout:
ownerFeatures.compileTimeout ||
Settings.defaultFeatures.compileTimeout,
ownerFeatures.compileTimeout || Settings.defaultFeatures.compileTimeout,
compileGroup,
compileBackendClass:
compileGroup === 'standard' ? 'n2d' : 'c2d',
compileBackendClass: compileGroup === 'standard' ? 'n2d' : 'c2d',
ownerAnalyticsId: analyticsId,
}
callback(null, limits)
}
)
}
)
}
)
},
return limits
}
COMPILE_DELAY: 1, // seconds
_checkIfRecentlyCompiled(projectId, userId, callback) {
const key = `compile:${projectId}:${userId}`
rclient.set(
key,
true,
'EX',
this.COMPILE_DELAY,
'NX',
function (error, ok) {
if (error) {
return callback(error)
}
if (ok === 'OK') {
callback(null, false)
} else {
callback(null, true)
}
}
)
},
_checkCompileGroupAutoCompileLimit(isAutoCompile, compileGroup, callback) {
if (!isAutoCompile) {
return callback(null, true)
}
if (compileGroup === 'standard') {
// apply extra limits to the standard compile group
CompileManager._checkIfAutoCompileLimitHasBeenHit(
isAutoCompile,
compileGroup,
callback
)
} else {
Metrics.inc(`auto-compile-${compileGroup}`)
callback(null, true)
}
}, // always allow priority group users to compile
_checkIfAutoCompileLimitHasBeenHit(isAutoCompile, compileGroup, callback) {
if (!isAutoCompile) {
return callback(null, true)
}
Metrics.inc(`auto-compile-${compileGroup}`)
const rateLimiter = getAutoCompileRateLimiter(compileGroup)
rateLimiter
.consume('global', 1, { method: 'global' })
.then(() => {
callback(null, true)
})
.catch(() => {
// Don't differentiate between errors and rate limits. Silently trigger
// the rate limit if there's an error consuming the points.
Metrics.inc(`auto-compile-${compileGroup}-limited`)
callback(null, false)
})
},
wordCount(projectId, userId, file, clsiserverid, callback) {
CompileManager.getProjectCompileLimits(projectId, function (error, limits) {
if (error) {
return callback(error)
}
ClsiManager.wordCount(
async function wordCount(projectId, userId, file, clsiserverid) {
const limits =
await CompileManager.promises.getProjectCompileLimits(projectId)
return await ClsiManager.promises.wordCount(
projectId,
userId,
file,
limits,
clsiserverid,
callback
clsiserverid
)
})
}
async function stopCompile(projectId, userId) {
const limits =
await CompileManager.promises.getProjectCompileLimits(projectId)
return await ClsiManager.promises.stopCompile(projectId, userId, limits)
}
async function deleteAuxFiles(projectId, userId, clsiserverid) {
const limits =
await CompileManager.promises.getProjectCompileLimits(projectId)
return await ClsiManager.promises.deleteAuxFiles(
projectId,
userId,
limits,
clsiserverid
)
}
module.exports = CompileManager = {
promises: {
compile: instrumentedCompile,
deleteAuxFiles,
getProjectCompileLimits,
stopCompile,
wordCount,
},
compile: callbackifyMultiResult(instrumentedCompile, [
'status',
'outputFiles',
'clsiServerId',
'limits',
'validationProblems',
'stats',
'timings',
'outputUrlPrefix',
]),
stopCompile: callbackify(stopCompile),
deleteAuxFiles: callbackify(deleteAuxFiles),
getProjectCompileLimits: callbackify(getProjectCompileLimits),
COMPILE_DELAY: 1, // seconds
async _checkIfRecentlyCompiled(projectId, userId) {
const key = `compile:${projectId}:${userId}`
const ok = await rclient.set(key, true, 'EX', this.COMPILE_DELAY, 'NX')
return ok !== 'OK'
},
async _checkCompileGroupAutoCompileLimit(isAutoCompile, compileGroup) {
if (!isAutoCompile) {
return true
}
if (compileGroup === 'standard') {
// apply extra limits to the standard compile group
return await CompileManager._checkIfAutoCompileLimitHasBeenHit(
isAutoCompile,
compileGroup
)
} else {
Metrics.inc(`auto-compile-${compileGroup}`)
return true
}
}, // always allow priority group users to compile
async _checkIfAutoCompileLimitHasBeenHit(isAutoCompile, compileGroup) {
if (!isAutoCompile) {
return true
}
Metrics.inc(`auto-compile-${compileGroup}`)
const rateLimiter = getAutoCompileRateLimiter(compileGroup)
try {
await rateLimiter.consume('global', 1, { method: 'global' })
return true
} catch (e) {
// Don't differentiate between errors and rate limits. Silently trigger
// the rate limit if there's an error consuming the points.
Metrics.inc(`auto-compile-${compileGroup}-limited`)
return false
}
},
wordCount: callbackify(wordCount),
}
const autoCompileRateLimiters = new Map()

View file

@ -34,7 +34,8 @@ const main = async ({ COMMIT, SKIP_COUNT }) => {
const nModified = await batchedUpdate(
'users',
{ 'features.compileTimeout': { $lte: 60, $ne: 20 } },
{ $set: { 'features.compileTimeout': 20 } }
// NOTE: Always update featuresUpdatedAt to ensure the user's features synced with BigQuery
{ $set: { 'features.compileTimeout': 20, featuresUpdatedAt: new Date() } }
)
console.log(`Updated ${nModified} records`)
}

View file

@ -0,0 +1,65 @@
#!/usr/bin/env node
const minimist = require('minimist')
const {
db,
READ_PREFERENCE_SECONDARY,
waitForDb,
} = require('../app/src/infrastructure/mongodb')
const { batchedUpdate } = require('./helpers/batchedUpdate')
// A few seconds after the previous migration script was run
const FEATURES_UPDATED_AT = new Date('2024-04-16T12:41:00Z')
const query = {
'features.compileTimeout': 20,
$or: [
{ featuresUpdatedAt: { $exists: false } },
{ featuresUpdatedAt: { $lt: FEATURES_UPDATED_AT } },
],
}
async function logCount() {
const usersToUpdate = await db.users.countDocuments(query, {
readPreference: READ_PREFERENCE_SECONDARY,
})
console.log(
`Found ${usersToUpdate} users needing their featuresUpdatedAt updated`
)
}
const main = async ({ COMMIT, SKIP_COUNT }) => {
console.time('Script Duration')
await waitForDb()
if (!SKIP_COUNT) {
await logCount()
}
if (COMMIT) {
const nModified = await batchedUpdate('users', query, {
$set: { featuresUpdatedAt: FEATURES_UPDATED_AT },
})
console.log(`Updated ${nModified} records`)
}
console.timeEnd('Script Duration')
}
const setup = () => {
const argv = minimist(process.argv.slice(2))
const COMMIT = argv.commit !== undefined
const SKIP_COUNT = argv['skip-count'] !== undefined
if (!COMMIT) {
console.warn('Doing dry run. Add --commit to commit changes')
}
return { COMMIT, SKIP_COUNT }
}
main(setup())
.catch(err => {
console.error(err)
process.exit(1)
})
.then(() => process.exit(0))

View file

@ -0,0 +1,63 @@
#!/usr/bin/env node
const minimist = require('minimist')
const {
db,
READ_PREFERENCE_SECONDARY,
waitForDb,
} = require('../app/src/infrastructure/mongodb')
const { batchedUpdate } = require('./helpers/batchedUpdate')
// A few seconds after the previous migration script was run
const FEATURES_UPDATED_AT = new Date('2024-04-16T12:41:00Z')
const query = {
'features.compileTimeout': 20,
featuresUpdatedAt: FEATURES_UPDATED_AT,
signUpDate: { $gt: FEATURES_UPDATED_AT },
}
async function logCount() {
const usersToUpdate = await db.users.countDocuments(query, {
readPreference: READ_PREFERENCE_SECONDARY,
})
console.log(
`Found ${usersToUpdate} users needing their featuresUpdatedAt removed`
)
}
const main = async ({ COMMIT, SKIP_COUNT }) => {
console.time('Script Duration')
await waitForDb()
if (!SKIP_COUNT) {
await logCount()
}
if (COMMIT) {
const nModified = await batchedUpdate('users', query, {
$unset: { featuresUpdatedAt: 1 },
})
console.log(`Updated ${nModified} records`)
}
console.timeEnd('Script Duration')
}
const setup = () => {
const argv = minimist(process.argv.slice(2))
const COMMIT = argv.commit !== undefined
const SKIP_COUNT = argv['skip-count'] !== undefined
if (!COMMIT) {
console.warn('Doing dry run. Add --commit to commit changes')
}
return { COMMIT, SKIP_COUNT }
}
main(setup())
.catch(err => {
console.error(err)
process.exit(1)
})
.then(() => process.exit(0))

View file

@ -17,16 +17,66 @@ async function runScript(args = []) {
}
}
async function runFixupScript(args = []) {
try {
return await promisify(exec)(
[
'node',
'scripts/migration_compile_timeout_60s_to_20s_fixup_features_updated_at.js',
...args,
].join(' ')
)
} catch (error) {
logger.error({ error }, 'script failed')
throw error
}
}
async function runSecondFixupScript(args = []) {
try {
return await promisify(exec)(
[
'node',
'scripts/migration_compile_timeout_60s_to_20s_fixup_new_users.js',
...args,
].join(' ')
)
} catch (error) {
logger.error({ error }, 'script failed')
throw error
}
}
describe('MigrateUserFeatureTimeoutTests', function () {
describe('initial script', function () {
const usersInput = {
noFeatures: {},
noFeatureTimeout: { features: {} },
timeout10s: { features: { compileTimeout: 10, other: 'val1' }, bar: '1' },
timeout10s: {
features: { compileTimeout: 10, other: 'val1' },
bar: '1',
featuresUpdatedAt: new Date('2020-01-01'),
},
timeout20s: { features: { compileTimeout: 20, other: 'val2' }, bar: '2' },
timeout30s: { features: { compileTimeout: 30, other: 'val3' }, bar: '3' },
timeout60s: { features: { compileTimeout: 60, other: 'val4' }, bar: '4' },
timeout120s: { features: { compileTimeout: 120, other: 'val5' }, bar: '5' },
timeout180s: { features: { compileTimeout: 180, other: 'val6' }, bar: '6' },
timeout30s: {
features: { compileTimeout: 30, other: 'val3' },
bar: '3',
featuresUpdatedAt: new Date('2025-01-01'),
},
timeout60s: {
features: { compileTimeout: 60, other: 'val4' },
bar: '4',
featuresUpdatedAt: new Date(),
},
timeout120s: {
features: { compileTimeout: 120, other: 'val5' },
bar: '5',
},
timeout180s: {
features: { compileTimeout: 180, other: 'val6' },
bar: '6',
featuresUpdatedAt: new Date('2020-01-01'),
},
}
const usersKeys = Object.keys(usersInput)
@ -44,6 +94,10 @@ describe('MigrateUserFeatureTimeoutTests', function () {
)
})
afterEach('clear users', async function () {
await db.users.deleteMany({})
})
it('gives correct counts in dry mode', async function () {
const users = await db.users.find().toArray()
expect(users).to.have.lengthOf(usersKeys.length)
@ -56,7 +110,9 @@ describe('MigrateUserFeatureTimeoutTests', function () {
expect(result.stdout).to.contain(
'Found 3 users with compileTimeout <= 60s && != 20s'
)
expect(result.stdout).to.contain('Found 1 users with compileTimeout == 20s')
expect(result.stdout).to.contain(
'Found 1 users with compileTimeout == 20s'
)
expect(result.stdout).not.to.contain('Updated')
const usersAfter = await db.users.find().toArray()
@ -72,12 +128,16 @@ describe('MigrateUserFeatureTimeoutTests', function () {
expect(result.stdout).to.contain(
'Found 3 users with compileTimeout <= 60s && != 20s'
)
expect(result.stdout).to.contain('Found 1 users with compileTimeout == 20s')
expect(result.stdout).to.contain(
'Found 1 users with compileTimeout == 20s'
)
expect(result.stdout).to.contain('Updated 3 records')
const usersAfter = await db.users.find().toArray()
expect(usersAfter).to.deep.equal([
expect(
usersAfter.map(({ featuresUpdatedAt, ...rest }) => rest)
).to.deep.equal([
{ _id: userIds.noFeatures, email: 'noFeatures@example.com' },
{
_id: userIds.noFeatureTimeout,
@ -122,6 +182,14 @@ describe('MigrateUserFeatureTimeoutTests', function () {
},
])
expect(usersAfter[0].featuresUpdatedAt).to.be.undefined
expect(usersAfter[1].featuresUpdatedAt).to.be.undefined
expect(usersAfter[2].featuresUpdatedAt).to.be.instanceOf(Date)
expect(usersAfter[3].featuresUpdatedAt).to.be.undefined // was already 20s
expect(usersAfter[4].featuresUpdatedAt).to.be.instanceOf(Date)
expect(usersAfter[5].featuresUpdatedAt).to.be.instanceOf(Date)
expect(usersAfter[6].featuresUpdatedAt).to.be.undefined
const result2 = await runScript([])
expect(result2.stdout).to.contain(
@ -131,4 +199,249 @@ describe('MigrateUserFeatureTimeoutTests', function () {
'Found 4 users with compileTimeout == 20s'
)
})
})
const FEATURES_UPDATED_AT = new Date('2024-04-16T12:41:00Z')
describe('fixup script', function () {
const usersInput = {
timeout20s1: {
features: { compileTimeout: 20 },
},
timeout20s2: {
features: { compileTimeout: 20 },
featuresUpdatedAt: new Date('2023-01-01'),
},
timeout20s3: {
features: { compileTimeout: 20 },
featuresUpdatedAt: new Date('2025-01-01'),
},
timeout240s1: {
features: { compileTimeout: 240 },
},
timeout240s2: {
features: { compileTimeout: 240 },
featuresUpdatedAt: new Date('2023-01-01'),
},
timeout240s3: {
features: { compileTimeout: 240 },
featuresUpdatedAt: new Date('2025-01-01'),
},
}
const usersKeys = Object.keys(usersInput)
const userIds = {}
beforeEach('insert users', async function () {
const usersInsertedValues = await db.users.insertMany(
usersKeys.map(key => ({
...usersInput[key],
email: `${key}@example.com`,
}))
)
usersKeys.forEach(
(key, index) => (userIds[key] = usersInsertedValues.insertedIds[index])
)
})
afterEach('clear users', async function () {
await db.users.deleteMany({})
})
it('gives correct counts in dry mode', async function () {
const users = await db.users.find().toArray()
expect(users).to.have.lengthOf(usersKeys.length)
const result = await runFixupScript([])
expect(result.stderr).to.contain(
'Doing dry run. Add --commit to commit changes'
)
expect(result.stdout).to.contain(
'Found 2 users needing their featuresUpdatedAt updated'
)
expect(result.stdout).not.to.contain('Updated 2 records')
const usersAfter = await db.users.find().toArray()
expect(usersAfter).to.deep.equal(users)
})
it("updates users featuresUpdatedAt when '--commit' is set", async function () {
const users = await db.users.find().toArray()
expect(users).to.have.lengthOf(usersKeys.length)
const result = await runFixupScript(['--commit'])
expect(result.stdout).to.contain(
'Found 2 users needing their featuresUpdatedAt updated'
)
expect(result.stdout).to.contain('Updated 2 records')
const usersAfter = await db.users.find().toArray()
expect(usersAfter).to.deep.equal([
{
_id: userIds.timeout20s1,
email: 'timeout20s1@example.com',
features: { compileTimeout: 20 },
featuresUpdatedAt: FEATURES_UPDATED_AT,
},
{
_id: userIds.timeout20s2,
email: 'timeout20s2@example.com',
features: { compileTimeout: 20 },
featuresUpdatedAt: FEATURES_UPDATED_AT,
},
{
_id: userIds.timeout20s3,
email: 'timeout20s3@example.com',
features: { compileTimeout: 20 },
featuresUpdatedAt: new Date('2025-01-01'),
},
{
_id: userIds.timeout240s1,
email: 'timeout240s1@example.com',
features: { compileTimeout: 240 },
},
{
_id: userIds.timeout240s2,
email: 'timeout240s2@example.com',
features: { compileTimeout: 240 },
featuresUpdatedAt: new Date('2023-01-01'),
},
{
_id: userIds.timeout240s3,
email: 'timeout240s3@example.com',
features: { compileTimeout: 240 },
featuresUpdatedAt: new Date('2025-01-01'),
},
])
const result2 = await runFixupScript([])
expect(result2.stdout).to.contain(
'Found 0 users needing their featuresUpdatedAt updated'
)
})
})
describe('fixup recent users', function () {
const usersInput = {
timeout20sNewerUser: {
features: { compileTimeout: 20 },
signUpDate: new Date('2026-01-01'),
},
// only this user should get updated
timeout20sNewUser: {
features: { compileTimeout: 20 },
signUpDate: new Date('2025-01-01'),
featuresUpdatedAt: FEATURES_UPDATED_AT,
},
timeout20sOldUser: {
features: { compileTimeout: 20 },
signUpDate: new Date('2023-01-01'),
featuresUpdatedAt: FEATURES_UPDATED_AT,
},
timeout240sNewerUser: {
features: { compileTimeout: 240 },
signUpDate: new Date('2026-01-01'),
},
// We didn't produce such mismatch (featuresUpdatedAt < signUpDate) on premium users.
// But we should still test that the script doesn't update them.
timeout240sNewUser: {
features: { compileTimeout: 240 },
signUpDate: new Date('2025-01-01'),
featuresUpdatedAt: FEATURES_UPDATED_AT,
},
timeout240sOldUser: {
features: { compileTimeout: 240 },
signUpDate: new Date('2023-01-01'),
featuresUpdatedAt: FEATURES_UPDATED_AT,
},
}
const usersKeys = Object.keys(usersInput)
const userIds = {}
beforeEach('insert users', async function () {
const usersInsertedValues = await db.users.insertMany(
usersKeys.map(key => ({
...usersInput[key],
email: `${key}@example.com`,
}))
)
usersKeys.forEach(
(key, index) => (userIds[key] = usersInsertedValues.insertedIds[index])
)
})
afterEach('clear users', async function () {
await db.users.deleteMany({})
})
it('gives correct counts in dry mode', async function () {
const users = await db.users.find().toArray()
expect(users).to.have.lengthOf(usersKeys.length)
const result = await runSecondFixupScript([])
expect(result.stderr).to.contain(
'Doing dry run. Add --commit to commit changes'
)
expect(result.stdout).to.contain(
'Found 1 users needing their featuresUpdatedAt removed'
)
expect(result.stdout).not.to.contain('Updated 1 records')
const usersAfter = await db.users.find().toArray()
expect(usersAfter).to.deep.equal(users)
})
it("removes users featuresUpdatedAt when '--commit' is set", async function () {
const users = await db.users.find().toArray()
expect(users).to.have.lengthOf(usersKeys.length)
const result = await runSecondFixupScript(['--commit'])
expect(result.stdout).to.contain(
'Found 1 users needing their featuresUpdatedAt removed'
)
expect(result.stdout).to.contain('Updated 1 records')
const usersAfter = await db.users.find().toArray()
expect(usersAfter).to.deep.equal([
{
_id: userIds.timeout20sNewerUser,
email: 'timeout20sNewerUser@example.com',
features: { compileTimeout: 20 },
signUpDate: new Date('2026-01-01'),
},
{
_id: userIds.timeout20sNewUser,
email: 'timeout20sNewUser@example.com',
features: { compileTimeout: 20 },
signUpDate: new Date('2025-01-01'),
},
{
_id: userIds.timeout20sOldUser,
email: 'timeout20sOldUser@example.com',
features: { compileTimeout: 20 },
featuresUpdatedAt: FEATURES_UPDATED_AT,
signUpDate: new Date('2023-01-01'),
},
{
_id: userIds.timeout240sNewerUser,
email: 'timeout240sNewerUser@example.com',
features: { compileTimeout: 240 },
signUpDate: new Date('2026-01-01'),
},
{
_id: userIds.timeout240sNewUser,
email: 'timeout240sNewUser@example.com',
features: { compileTimeout: 240 },
featuresUpdatedAt: FEATURES_UPDATED_AT,
signUpDate: new Date('2025-01-01'),
},
{
_id: userIds.timeout240sOldUser,
email: 'timeout240sOldUser@example.com',
features: { compileTimeout: 240 },
featuresUpdatedAt: FEATURES_UPDATED_AT,
signUpDate: new Date('2023-01-01'),
},
])
const result2 = await runSecondFixupScript([])
expect(result2.stdout).to.contain(
'Found 0 users needing their featuresUpdatedAt removed'
)
})
})
})

View file

@ -29,18 +29,21 @@ describe('CompileManager', function () {
rateLimit: { autoCompile: {} },
}),
'../../infrastructure/RedisWrapper': {
client: () => (this.rclient = { auth() {} }),
client: () =>
(this.rclient = {
auth() {},
}),
},
'../Project/ProjectRootDocManager': (this.ProjectRootDocManager = {}),
'../Project/ProjectGetter': (this.ProjectGetter = {}),
'../User/UserGetter': (this.UserGetter = {}),
'./ClsiManager': (this.ClsiManager = {}),
'../Project/ProjectRootDocManager': (this.ProjectRootDocManager = {
promises: {},
}),
'../Project/ProjectGetter': (this.ProjectGetter = { promises: {} }),
'../User/UserGetter': (this.UserGetter = { promises: {} }),
'./ClsiManager': (this.ClsiManager = { promises: {} }),
'../../infrastructure/RateLimiter': this.RateLimiter,
'@overleaf/metrics': this.Metrics,
'../Analytics/UserAnalyticsIdCache': (this.UserAnalyticsIdCache = {
callbacks: {
get: sinon.stub().yields(null, 'abc'),
},
get: sinon.stub().resolves('abc'),
}),
},
})
@ -57,36 +60,42 @@ describe('CompileManager', function () {
beforeEach(function () {
this.CompileManager._checkIfRecentlyCompiled = sinon
.stub()
.callsArgWith(2, null, false)
this.ProjectRootDocManager.ensureRootDocumentIsSet = sinon
.resolves(false)
this.ProjectRootDocManager.promises.ensureRootDocumentIsSet = sinon
.stub()
.callsArgWith(1, null)
this.CompileManager.getProjectCompileLimits = sinon
.resolves()
this.CompileManager.promises.getProjectCompileLimits = sinon
.stub()
.callsArgWith(1, null, this.limits)
this.ClsiManager.sendRequest = sinon
.stub()
.callsArgWith(
3,
null,
(this.status = 'mock-status'),
(this.outputFiles = 'mock output files'),
(this.output = 'mock output')
)
.resolves(this.limits)
this.ClsiManager.promises.sendRequest = sinon.stub().resolves({
status: (this.status = 'mock-status'),
outputFiles: (this.outputFiles = []),
clsiServerId: (this.output = 'mock output'),
})
})
describe('succesfully', function () {
beforeEach(function () {
this.CompileManager._checkIfAutoCompileLimitHasBeenHit = (
let result
beforeEach(async function () {
this.CompileManager._checkIfAutoCompileLimitHasBeenHit = async (
isAutoCompile,
compileGroup,
cb
) => cb(null, true)
this.CompileManager.compile(
compileGroup
) => true
this.ProjectGetter.promises.getProject = sinon
.stub()
.resolves(
(this.project = { owner_ref: (this.owner_id = 'owner-id-123') })
)
this.UserGetter.promises.getUser = sinon.stub().resolves(
(this.user = {
features: { compileTimeout: '20s', compileGroup: 'standard' },
analyticsId: 'abc',
})
)
result = await this.CompileManager.promises.compile(
this.project_id,
this.user_id,
{},
this.callback
{}
)
})
@ -97,19 +106,19 @@ describe('CompileManager', function () {
})
it('should ensure that the root document is set', function () {
this.ProjectRootDocManager.ensureRootDocumentIsSet
this.ProjectRootDocManager.promises.ensureRootDocumentIsSet
.calledWith(this.project_id)
.should.equal(true)
})
it('should get the project compile limits', function () {
this.CompileManager.getProjectCompileLimits
this.CompileManager.promises.getProjectCompileLimits
.calledWith(this.project_id)
.should.equal(true)
})
it('should run the compile with the compile limits', function () {
this.ClsiManager.sendRequest
this.ClsiManager.promises.sendRequest
.calledWith(this.project_id, this.user_id, {
timeout: this.limits.timeout,
compileGroup: 'standard',
@ -117,10 +126,10 @@ describe('CompileManager', function () {
.should.equal(true)
})
it('should call the callback with the output', function () {
this.callback
.calledWith(null, this.status, this.outputFiles, this.output)
.should.equal(true)
it('should resolve with the output', function () {
expect(result).to.haveOwnProperty('status', this.status)
expect(result).to.haveOwnProperty('clsiServerId', this.output)
expect(result).to.haveOwnProperty('outputFiles', this.outputFiles)
})
it('should time the compile', function () {
@ -130,26 +139,24 @@ describe('CompileManager', function () {
describe('when the project has been recently compiled', function () {
it('should return', function (done) {
this.CompileManager._checkIfAutoCompileLimitHasBeenHit = (
this.CompileManager._checkIfAutoCompileLimitHasBeenHit = async (
isAutoCompile,
compileGroup,
cb
) => cb(null, true)
compileGroup
) => true
this.CompileManager._checkIfRecentlyCompiled = sinon
.stub()
.callsArgWith(2, null, true)
this.CompileManager.compile(
this.project_id,
this.user_id,
{},
(err, status) => {
if (err) {
return done(err)
}
.resolves(true)
this.CompileManager.promises
.compile(this.project_id, this.user_id, {})
.then(({ status }) => {
status.should.equal('too-recently-compiled')
done()
}
)
})
.catch(error => {
// Catch any errors and fail the test
true.should.equal(false)
done(error)
})
})
})
@ -157,60 +164,51 @@ describe('CompileManager', function () {
it('should return', function (done) {
this.CompileManager._checkIfAutoCompileLimitHasBeenHit = sinon
.stub()
.callsArgWith(2, null, false)
this.CompileManager.compile(
this.project_id,
this.user_id,
{},
(err, status) => {
if (err) {
return done(err)
}
status.should.equal('autocompile-backoff')
.resolves(false)
this.CompileManager.promises
.compile(this.project_id, this.user_id, {})
.then(({ status }) => {
expect(status).to.equal('autocompile-backoff')
done()
}
)
})
.catch(err => done(err))
})
})
})
describe('getProjectCompileLimits', function () {
beforeEach(function (done) {
beforeEach(async function () {
this.features = {
compileTimeout: (this.timeout = 42),
compileGroup: (this.group = 'priority'),
}
this.ProjectGetter.getProject = sinon
this.ProjectGetter.promises.getProject = sinon
.stub()
.callsArgWith(
2,
null,
.resolves(
(this.project = { owner_ref: (this.owner_id = 'owner-id-123') })
)
this.UserGetter.getUser = sinon
this.UserGetter.promises.getUser = sinon
.stub()
.callsArgWith(
2,
null,
(this.user = { features: this.features, analyticsId: 'abc' })
.resolves((this.user = { features: this.features, analyticsId: 'abc' }))
try {
const result =
await this.CompileManager.promises.getProjectCompileLimits(
this.project_id
)
this.CompileManager.getProjectCompileLimits(
this.project_id,
(err, res) => {
this.callback(err, res)
done()
this.callback(null, result)
} catch (error) {
this.callback(error)
}
)
})
it('should look up the owner of the project', function () {
this.ProjectGetter.getProject
this.ProjectGetter.promises.getProject
.calledWith(this.project_id, { owner_ref: 1 })
.should.equal(true)
})
it("should look up the owner's features", function () {
this.UserGetter.getUser
this.UserGetter.promises.getUser
.calledWith(this.project.owner_ref, {
_id: 1,
alphaProgram: 1,
@ -239,12 +237,12 @@ describe('CompileManager', function () {
compileTimeout: 42,
compileGroup: 'standard',
}
this.ProjectGetter.getProject = sinon
this.ProjectGetter.promises.getProject = sinon
.stub()
.yields(null, { owner_ref: 'owner-id-123' })
this.UserGetter.getUser = sinon
.resolves({ owner_ref: 'owner-id-123' })
this.UserGetter.promises.getUser = sinon
.stub()
.yields(null, { features: this.features, analyticsId: 'abc' })
.resolves({ features: this.features, analyticsId: 'abc' })
})
describe('with priority compile', function () {
@ -265,47 +263,45 @@ describe('CompileManager', function () {
})
describe('deleteAuxFiles', function () {
beforeEach(function () {
this.CompileManager.getProjectCompileLimits = sinon
let result
beforeEach(async function () {
this.CompileManager.promises.getProjectCompileLimits = sinon
.stub()
.callsArgWith(
1,
null,
(this.limits = { compileGroup: 'mock-compile-group' })
)
this.ClsiManager.deleteAuxFiles = sinon.stub().callsArg(3)
this.CompileManager.deleteAuxFiles(
.resolves((this.limits = { compileGroup: 'mock-compile-group' }))
this.ClsiManager.promises.deleteAuxFiles = sinon.stub().resolves('test')
result = await this.CompileManager.promises.deleteAuxFiles(
this.project_id,
this.user_id,
this.callback
this.user_id
)
})
it('should look up the compile group to use', function () {
this.CompileManager.getProjectCompileLimits
this.CompileManager.promises.getProjectCompileLimits
.calledWith(this.project_id)
.should.equal(true)
})
it('should delete the aux files', function () {
this.ClsiManager.deleteAuxFiles
this.ClsiManager.promises.deleteAuxFiles
.calledWith(this.project_id, this.user_id, this.limits)
.should.equal(true)
})
it('should call the callback', function () {
this.callback.called.should.equal(true)
it('should resolve', function () {
expect(result).not.to.be.undefined
})
})
describe('_checkIfRecentlyCompiled', function () {
describe('when the key exists in redis', function () {
beforeEach(function () {
this.rclient.set = sinon.stub().callsArgWith(5, null, null)
this.CompileManager._checkIfRecentlyCompiled(
let result
beforeEach(async function () {
this.rclient.set = sinon.stub().resolves(null)
result = await this.CompileManager._checkIfRecentlyCompiled(
this.project_id,
this.user_id,
this.callback
this.user_id
)
})
@ -321,18 +317,19 @@ describe('CompileManager', function () {
.should.equal(true)
})
it('should call the callback with true', function () {
this.callback.calledWith(null, true).should.equal(true)
it('should resolve with true', function () {
result.should.equal(true)
})
})
describe('when the key does not exist in redis', function () {
beforeEach(function () {
this.rclient.set = sinon.stub().callsArgWith(5, null, 'OK')
this.CompileManager._checkIfRecentlyCompiled(
let result
beforeEach(async function () {
this.rclient.set = sinon.stub().resolves('OK')
result = await this.CompileManager._checkIfRecentlyCompiled(
this.project_id,
this.user_id,
this.callback
this.user_id
)
})
@ -348,105 +345,86 @@ describe('CompileManager', function () {
.should.equal(true)
})
it('should call the callback with false', function () {
this.callback.calledWith(null, false).should.equal(true)
it('should resolve with false', function () {
result.should.equal(false)
})
})
})
describe('_checkIfAutoCompileLimitHasBeenHit', function () {
it('should be able to compile if it is not an autocompile', function (done) {
this.CompileManager._checkIfAutoCompileLimitHasBeenHit(
it('should be able to compile if it is not an autocompile', async function () {
const canCompile =
await this.CompileManager._checkIfAutoCompileLimitHasBeenHit(
false,
'everyone',
(err, canCompile) => {
if (err) {
return done(err)
}
canCompile.should.equal(true)
done()
}
'everyone'
)
expect(canCompile).to.equal(true)
})
it('should be able to compile if rate limit has remaining', function (done) {
this.CompileManager._checkIfAutoCompileLimitHasBeenHit(
it('should be able to compile if rate limit has remaining', async function () {
const canCompile =
await this.CompileManager._checkIfAutoCompileLimitHasBeenHit(
true,
'everyone',
(err, canCompile) => {
if (err) {
return done(err)
}
'everyone'
)
expect(this.rateLimiter.consume).to.have.been.calledWith('global')
canCompile.should.equal(true)
done()
}
)
expect(canCompile).to.equal(true)
})
it('should be not able to compile if rate limit has no remianing', function (done) {
it('should be not able to compile if rate limit has no remianing', async function () {
this.rateLimiter.consume.rejects({ remainingPoints: 0 })
this.CompileManager._checkIfAutoCompileLimitHasBeenHit(
const canCompile =
await this.CompileManager._checkIfAutoCompileLimitHasBeenHit(
true,
'everyone',
(err, canCompile) => {
if (err) {
return done(err)
}
canCompile.should.equal(false)
done()
}
'everyone'
)
expect(canCompile).to.equal(false)
})
it('should return false if there is an error in the rate limit', function (done) {
it('should return false if there is an error in the rate limit', async function () {
this.rateLimiter.consume.rejects(new Error('BOOM!'))
this.CompileManager._checkIfAutoCompileLimitHasBeenHit(
const canCompile =
await this.CompileManager._checkIfAutoCompileLimitHasBeenHit(
true,
'everyone',
(err, canCompile) => {
if (err) {
return done(err)
}
canCompile.should.equal(false)
done()
}
'everyone'
)
expect(canCompile).to.equal(false)
})
})
describe('wordCount', function () {
beforeEach(function () {
this.CompileManager.getProjectCompileLimits = sinon
let result
const wordCount = 1
beforeEach(async function () {
this.CompileManager.promises.getProjectCompileLimits = sinon
.stub()
.callsArgWith(
1,
null,
(this.limits = { compileGroup: 'mock-compile-group' })
)
this.ClsiManager.wordCount = sinon.stub().callsArg(4)
this.CompileManager.wordCount(
.resolves((this.limits = { compileGroup: 'mock-compile-group' }))
this.ClsiManager.promises.wordCount = sinon.stub().resolves(wordCount)
result = await this.CompileManager.promises.wordCount(
this.project_id,
this.user_id,
false,
this.callback
false
)
})
it('should look up the compile group to use', function () {
this.CompileManager.getProjectCompileLimits
this.CompileManager.promises.getProjectCompileLimits
.calledWith(this.project_id)
.should.equal(true)
})
it('should call wordCount for project', function () {
this.ClsiManager.wordCount
this.ClsiManager.promises.wordCount
.calledWith(this.project_id, this.user_id, false, this.limits)
.should.equal(true)
})
it('should call the callback', function () {
this.callback.called.should.equal(true)
it('should resolve with the wordCount from the ClsiManager', function () {
expect(result).to.equal(wordCount)
})
})
})