diff --git a/services/web/scripts/history/upgrade_none_without_conversion_if_no_sl_history.js b/services/web/scripts/history/upgrade_none_without_conversion_if_no_sl_history.js index f7d29c1049..bead4cbb59 100644 --- a/services/web/scripts/history/upgrade_none_without_conversion_if_no_sl_history.js +++ b/services/web/scripts/history/upgrade_none_without_conversion_if_no_sl_history.js @@ -3,43 +3,59 @@ const VERBOSE_LOGGING = process.env.VERBOSE_LOGGING === 'true' const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 10 const BATCH_SIZE = parseInt(process.env.BATCH_SIZE, 10) || 100 const DRY_RUN = process.env.DRY_RUN !== 'false' -const MAX_PROJECT_UPGRADES = - parseInt(process.env.MAX_PROJECT_UPGRADES, 10) || false +const MAX_UPGRADES_TO_ATTEMPT = + parseInt(process.env.MAX_UPGRADES_TO_ATTEMPT, 10) || false // persist fallback in order to keep batchedUpdate in-sync process.env.BATCH_SIZE = BATCH_SIZE +// raise mongo timeout to 1hr if otherwise unspecified +process.env.MONGO_SOCKET_TIMEOUT = + parseInt(process.env.MONGO_SOCKET_TIMEOUT, 10) || 3600000 const { ReadPreference } = require('mongodb') const { db } = require('../../app/src/infrastructure/mongodb') const { promiseMapWithLimit } = require('../../app/src/util/promises') const { batchedUpdate } = require('../helpers/batchedUpdate') const ProjectHistoryHandler = require('../../app/src/Features/Project/ProjectHistoryHandler') +const HistoryManager = require('../../app/src/Features/History/HistoryManager') console.log({ DRY_RUN, VERBOSE_LOGGING, WRITE_CONCURRENCY, BATCH_SIZE, - MAX_PROJECT_UPGRADES, + MAX_UPGRADES_TO_ATTEMPT, }) const RESULT = { DRY_RUN, + attempted: 0, projectsUpgraded: 0, + failed: 0, } +let INTERRUPT = false + async function processBatch(_, projects) { - if (MAX_PROJECT_UPGRADES && RESULT.projectsUpgraded >= MAX_PROJECT_UPGRADES) { + if (MAX_UPGRADES_TO_ATTEMPT && RESULT.attempted >= MAX_UPGRADES_TO_ATTEMPT) { console.log( - `MAX_PROJECT_UPGRADES limit (${MAX_PROJECT_UPGRADES}) reached. Stopping.` + `MAX_UPGRADES_TO_ATTEMPT limit (${MAX_UPGRADES_TO_ATTEMPT}) reached. Stopping.` ) process.exit(0) } else { await promiseMapWithLimit(WRITE_CONCURRENCY, projects, processProject) console.log(RESULT) + if (INTERRUPT) { + // ctrl+c + console.log('Terminated by SIGINT') + process.exit(0) + } } } async function processProject(project) { + if (INTERRUPT) { + return + } const anyDocHistory = await anyDocHistoryExists(project) if (anyDocHistory) { return @@ -52,22 +68,49 @@ async function processProject(project) { } async function doUpgradeForNoneWithoutConversion(project) { + if (MAX_UPGRADES_TO_ATTEMPT && RESULT.attempted >= MAX_UPGRADES_TO_ATTEMPT) { + return + } else { + RESULT.attempted += 1 + } const projectId = project._id if (!DRY_RUN) { // ensureHistoryExistsForProject resyncs project // Change to 'peek'ing the doc when resyncing should - // be rolled out prior to using this script, - // - // Alternatively: do we need to resync now? - // Probably a lot of dead projects - could we set a flag somehow - // to resync later/if they ever become active (but for now just - // make sure they have a history id?) + // be rolled out prior to using this script try { - await ProjectHistoryHandler.promises.ensureHistoryExistsForProject( + // Logic originally from ProjectHistoryHandler.ensureHistoryExistsForProject + // However sends a force resync project to project history instead + // of a resync request to doc-updater + const historyId = await ProjectHistoryHandler.promises.getHistoryId( projectId ) + if (historyId != null) { + return + } + const history = await HistoryManager.promises.initializeProject() + if (history && history.overleaf_id) { + await ProjectHistoryHandler.promises.setHistoryId( + projectId, + history.overleaf_id + ) + await HistoryManager.promises.resyncProject(projectId, { + force: true, + origin: { kind: 'history-migration' }, + }) + await HistoryManager.promises.flushProject(projectId) + } } catch (err) { - console.log('error setting up history:', err) + RESULT.failed += 1 + console.error(`project ${project._id} FAILED with error: `, err) + await db.projects.updateOne( + { _id: project._id }, + { + $set: { + 'overleaf.history.upgradeFailed': true, + }, + } + ) return } await db.projects.updateOne( @@ -126,6 +169,14 @@ async function main() { console.log(RESULT) } +// Upgrading history is not atomic, if we quit out mid-initialisation +// then history could get into a broken state +// Instead, skip any unprocessed projects and exit() at end of the batch. +process.on('SIGINT', function () { + console.log('Caught SIGINT, waiting for in process upgrades to complete') + INTERRUPT = true +}) + main() .then(() => { console.error('Done.')