Merge pull request #21097 from overleaf/ls-scripts-to-esm-1

Migrate scripts folder to esm 1/x

GitOrigin-RevId: 4a4bc9a161f144fdb40ce3f2a0a9313b36c6df81
This commit is contained in:
Liangjun Song 2024-10-18 19:04:57 +08:00 committed by Copybot
parent 8bc374c916
commit 26f3f3e2e2
94 changed files with 916 additions and 847 deletions

View file

@ -12,12 +12,12 @@ echo "closed" > "${SITE_MAINTENANCE_FILE}"
sleep 5 sleep 5
# giving a grace period of 5 seconds for users before disconnecting them and start shutting down # giving a grace period of 5 seconds for users before disconnecting them and start shutting down
cd /overleaf/services/web && node scripts/disconnect_all_users.js --delay-in-seconds=5 >> /var/log/overleaf/web.log 2>&1 cd /overleaf/services/web && node scripts/disconnect_all_users.mjs --delay-in-seconds=5 >> /var/log/overleaf/web.log 2>&1
EXIT_CODE="$?" EXIT_CODE="$?"
if [ $EXIT_CODE -ne 0 ] if [ $EXIT_CODE -ne 0 ]
then then
echo "scripts/disconnect_all_users.js failed with exit code $EXIT_CODE" echo "scripts/disconnect_all_users.mjs failed with exit code $EXIT_CODE"
exit 1 exit 1
fi fi

View file

@ -425,7 +425,7 @@ lint_flag_res_send_usage:
lint: lint_overleafModuleImports lint: lint_overleafModuleImports
lint_overleafModuleImports: lint_overleafModuleImports:
node scripts/check_overleafModuleImports.js node scripts/check_overleafModuleImports.mjs
lint: typecheck_frontend lint: typecheck_frontend
typecheck_frontend: typecheck_frontend:

View file

@ -1,4 +1,4 @@
import updateStringDates from '../scripts/confirmed_at_to_dates.js' import updateStringDates from '../scripts/confirmed_at_to_dates.mjs'
const tags = ['saas'] const tags = ['saas']

View file

@ -1,4 +1,4 @@
import updateStringDates from '../scripts/confirmed_at_to_dates.js' import updateStringDates from '../scripts/confirmed_at_to_dates.mjs'
const tags = ['saas'] const tags = ['saas']

View file

@ -1,4 +1,4 @@
import runScript from '../scripts/back_fill_deleted_files.js' import runScript from '../scripts/back_fill_deleted_files.mjs'
const tags = ['server-ce', 'server-pro', 'saas'] const tags = ['server-ce', 'server-pro', 'saas']

View file

@ -1,4 +1,4 @@
import runScript from '../scripts/back_fill_doc_name_for_deleted_docs.js' import runScript from '../scripts/back_fill_doc_name_for_deleted_docs.mjs'
const tags = ['server-ce', 'server-pro', 'saas'] const tags = ['server-ce', 'server-pro', 'saas']

View file

@ -1,4 +1,4 @@
import runScript from '../scripts/back_fill_dummy_doc_meta.js' import runScript from '../scripts/back_fill_dummy_doc_meta.mjs'
const tags = ['server-ce', 'server-pro', 'saas'] const tags = ['server-ce', 'server-pro', 'saas']

View file

@ -1,4 +1,4 @@
import runScript from '../scripts/migrate_audit_logs.js' import runScript from '../scripts/migrate_audit_logs.mjs'
const tags = ['server-ce', 'server-pro', 'saas'] const tags = ['server-ce', 'server-pro', 'saas']

View file

@ -1,4 +1,4 @@
import runScript from '../scripts/convert_archived_state.js' import runScript from '../scripts/convert_archived_state.mjs'
const tags = ['server-ce', 'server-pro'] const tags = ['server-ce', 'server-pro']

View file

@ -1,4 +1,4 @@
import BatchedUpdateScript from '../scripts/helpers/batchedUpdate.js' import BatchedUpdateScript from '../scripts/helpers/batchedUpdate.mjs'
const { batchedUpdate } = BatchedUpdateScript const { batchedUpdate } = BatchedUpdateScript
const tags = ['saas'] const tags = ['saas']

View file

@ -1,4 +1,4 @@
import runScript from '../scripts/back_fill_doc_rev.js' import runScript from '../scripts/back_fill_doc_rev.mjs'
const tags = ['server-ce', 'server-pro'] const tags = ['server-ce', 'server-pro']

View file

@ -1,4 +1,4 @@
import runScript from '../scripts/remove_deleted_users_from_token_access_refs.js' import runScript from '../scripts/remove_deleted_users_from_token_access_refs.mjs'
const tags = ['server-ce', 'server-pro', 'saas'] const tags = ['server-ce', 'server-pro', 'saas']

View file

@ -1,7 +1,7 @@
/* eslint-disable no-unused-vars */ /* eslint-disable no-unused-vars */
import Helpers from './lib/helpers.mjs' import Helpers from './lib/helpers.mjs'
import runScript from '../scripts/backfill_project_invites_token_hmac.js' import runScript from '../scripts/backfill_project_invites_token_hmac.mjs'
const tags = ['server-ce', 'server-pro', 'saas'] const tags = ['server-ce', 'server-pro', 'saas']

View file

@ -1,4 +1,4 @@
import BatchedUpdateScript from '../scripts/helpers/batchedUpdate.js' import BatchedUpdateScript from '../scripts/helpers/batchedUpdate.mjs'
const { batchedUpdate } = BatchedUpdateScript const { batchedUpdate } = BatchedUpdateScript
const tags = ['server-ce', 'server-pro', 'saas'] const tags = ['server-ce', 'server-pro', 'saas']

View file

@ -10,7 +10,7 @@
// //
// Usage: // Usage:
// //
// $ node scripts/add_feature_override.js --commit --note 'text description' --expires 2022-01-01 --override JSONFILE --ids IDFILE // $ node scripts/add_feature_override.mjs --commit --note 'text description' --expires 2022-01-01 --override JSONFILE --ids IDFILE
// //
// --commit do the update, remove this option for dry-run testing // --commit do the update, remove this option for dry-run testing
// --note text description [optional] // --note text description [optional]
@ -23,14 +23,15 @@
// The feature override is specified with JSON to allow types to be set as string/number/boolean. // The feature override is specified with JSON to allow types to be set as string/number/boolean.
// It is contained in a file to avoid any issues with shell quoting. // It is contained in a file to avoid any issues with shell quoting.
const minimist = require('minimist') import minimist from 'minimist'
const fs = require('fs')
const { ObjectId, waitForDb } = require('../app/src/infrastructure/mongodb') import fs from 'fs'
const pLimit = require('p-limit') import { ObjectId, waitForDb } from '../app/src/infrastructure/mongodb.js'
const FeaturesUpdater = require('../app/src/Features/Subscription/FeaturesUpdater') import pLimit from 'p-limit'
const FeaturesHelper = require('../app/src/Features/Subscription/FeaturesHelper') import FeaturesUpdater from '../app/src/Features/Subscription/FeaturesUpdater.js'
const UserFeaturesUpdater = require('../app/src/Features/Subscription/UserFeaturesUpdater') import FeaturesHelper from '../app/src/Features/Subscription/FeaturesHelper.js'
const UserGetter = require('../app/src/Features/User/UserGetter') import UserFeaturesUpdater from '../app/src/Features/Subscription/UserFeaturesUpdater.js'
import UserGetter from '../app/src/Features/User/UserGetter.js'
const processLogger = { const processLogger = {
failed: [], failed: [],
@ -180,4 +181,4 @@ async function processUsers(userIds) {
process.exit() process.exit()
} }
processUsers(userIds) await processUsers(userIds)

View file

@ -1,13 +1,13 @@
const fs = require('node:fs') import fs from 'node:fs'
const minimist = require('minimist') import minimist from 'minimist'
const { parse } = require('csv') import { parse } from 'csv'
const Stream = require('stream/promises') import Stream from 'stream/promises'
const { ObjectId, waitForDb } = require('../app/src/infrastructure/mongodb') import { ObjectId, waitForDb } from '../app/src/infrastructure/mongodb.js'
const { Subscription } = require('../app/src/models/Subscription') import { Subscription } from '../app/src/models/Subscription.js'
function usage() { function usage() {
console.log( console.log(
'Usage: node add_salesforce_data_to_subscriptions.js -f <filename> [options]' 'Usage: node add_salesforce_data_to_subscriptions.mjs -f <filename> [options]'
) )
console.log( console.log(
'Updates the subscriptions collection with external IDs for determining the Salesforce account that goes with the subscription. The file should be a CSV and have columns account_id, v1_id and subscription_id. The account_id column is the Salesforce account ID, the v1_id column is the V1 account ID, and the subscription_id column is the subscription ID.' 'Updates the subscriptions collection with external IDs for determining the Salesforce account that goes with the subscription. The file should be a CSV and have columns account_id, v1_id and subscription_id. The account_id column is the Salesforce account ID, the v1_id column is the V1 account ID, and the subscription_id column is the subscription ID.'
@ -201,7 +201,6 @@ if (!commit) {
console.log('Committing changes to the database') console.log('Committing changes to the database')
} }
main().then(() => { await main()
showStats() showStats()
process.exit() process.exit()
})

View file

@ -1,8 +1,11 @@
const { batchedUpdate } = require('./helpers/batchedUpdate') import BatchedUpdateModule from './helpers/batchedUpdate.mjs'
const { promiseMapWithLimit, promisify } = require('@overleaf/promise-utils') import { promiseMapWithLimit, promisify } from '@overleaf/promise-utils'
const { db } = require('../app/src/infrastructure/mongodb') import { db } from '../app/src/infrastructure/mongodb.js'
import _ from 'lodash'
import { fileURLToPath } from 'url'
const { batchedUpdate } = BatchedUpdateModule
const sleep = promisify(setTimeout) const sleep = promisify(setTimeout)
const _ = require('lodash')
async function main(options) { async function main(options) {
if (!options) { if (!options) {
@ -118,15 +121,14 @@ async function letUserDoubleCheckInputs(options) {
await sleep(options.letUserDoubleCheckInputsFor) await sleep(options.letUserDoubleCheckInputsFor)
} }
module.exports = main export default main
if (require.main === module) { if (fileURLToPath(import.meta.url) === process.argv[1]) {
main() try {
.then(() => { await main()
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error({ error }) console.error({ error })
process.exit(1) process.exit(1)
}) }
} }

View file

@ -1,8 +1,11 @@
const { batchedUpdate } = require('./helpers/batchedUpdate') import BatchedUpdateModule from './helpers/batchedUpdate.mjs'
const { promiseMapWithLimit, promisify } = require('@overleaf/promise-utils') import { promiseMapWithLimit, promisify } from '@overleaf/promise-utils'
const { db } = require('../app/src/infrastructure/mongodb') import { db } from '../app/src/infrastructure/mongodb.js'
import { fileURLToPath } from 'url'
import _ from 'lodash'
const sleep = promisify(setTimeout) const sleep = promisify(setTimeout)
const _ = require('lodash') const { batchedUpdate } = BatchedUpdateModule
async function main(options) { async function main(options) {
if (!options) { if (!options) {
@ -77,15 +80,14 @@ async function letUserDoubleCheckInputs(options) {
await sleep(options.letUserDoubleCheckInputsFor) await sleep(options.letUserDoubleCheckInputsFor)
} }
module.exports = main export default main
if (require.main === module) { if (fileURLToPath(import.meta.url) === process.argv[1]) {
main() try {
.then(() => { await main()
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error({ error }) console.error({ error })
process.exit(1) process.exit(1)
}) }
} }

View file

@ -1,5 +1,8 @@
const { db } = require('../app/src/infrastructure/mongodb') import { db } from '../app/src/infrastructure/mongodb.js'
const { batchedUpdate } = require('./helpers/batchedUpdate') import BatchedUpdateModule from './helpers/batchedUpdate.mjs'
import { fileURLToPath } from 'url'
const { batchedUpdate } = BatchedUpdateModule
const DRY_RUN = !process.argv.includes('--dry-run=false') const DRY_RUN = !process.argv.includes('--dry-run=false')
const LOG_EVERY_IN_S = parseInt(process.env.LOG_EVERY_IN_S, 10) || 5 const LOG_EVERY_IN_S = parseInt(process.env.LOG_EVERY_IN_S, 10) || 5
@ -44,16 +47,15 @@ async function main(DRY_RUN) {
logProgress() logProgress()
} }
module.exports = main export default main
if (require.main === module) { if (fileURLToPath(import.meta.url) === process.argv[1]) {
main(DRY_RUN) try {
.then(() => { await main(DRY_RUN)
console.log('Done.') console.log('Done.')
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error({ error }) console.error({ error })
process.exit(1) process.exit(1)
}) }
} }

View file

@ -1,12 +1,16 @@
const { promisify } = require('util') import { promisify } from 'util'
const { ObjectId } = require('mongodb-legacy') import mongodb from 'mongodb-legacy'
const { import {
db, db,
waitForDb, waitForDb,
READ_PREFERENCE_SECONDARY, READ_PREFERENCE_SECONDARY,
} = require('../app/src/infrastructure/mongodb') } from '../app/src/infrastructure/mongodb.js'
import _ from 'lodash'
import LRUCache from 'lru-cache'
import { fileURLToPath } from 'url'
const { ObjectId } = mongodb
const sleep = promisify(setTimeout) const sleep = promisify(setTimeout)
const _ = require('lodash')
const NOW_IN_S = Date.now() / 1000 const NOW_IN_S = Date.now() / 1000
const ONE_WEEK_IN_S = 60 * 60 * 24 * 7 const ONE_WEEK_IN_S = 60 * 60 * 24 * 7
@ -15,7 +19,6 @@ const TEN_SECONDS = 10 * 1000
const DUMMY_NAME = 'unknown.tex' const DUMMY_NAME = 'unknown.tex'
const DUMMY_TIME = new Date('2021-04-12T00:00:00.000Z') const DUMMY_TIME = new Date('2021-04-12T00:00:00.000Z')
const LRUCache = require('lru-cache')
let deletedProjectsCache = null let deletedProjectsCache = null
function getSecondsFromObjectId(id) { function getSecondsFromObjectId(id) {
@ -147,16 +150,15 @@ async function letUserDoubleCheckInputs(options) {
await sleep(options.letUserDoubleCheckInputsFor) await sleep(options.letUserDoubleCheckInputsFor)
} }
module.exports = main export default main
if (require.main === module) { if (fileURLToPath(import.meta.url) === process.argv[1]) {
main() try {
.then(() => { await main()
console.error('Done.') console.error('Done.')
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error({ error }) console.error({ error })
process.exit(1) process.exit(1)
}) }
} }

View file

@ -1,9 +1,9 @@
const { import {
db, db,
waitForDb, waitForDb,
READ_PREFERENCE_SECONDARY, READ_PREFERENCE_SECONDARY,
} = require('../app/src/infrastructure/mongodb') } from '../app/src/infrastructure/mongodb.js'
const UserSessionsManager = require('../app/src/Features/User/UserSessionsManager') import UserSessionsManager from '../app/src/Features/User/UserSessionsManager.js'
const COMMIT = process.argv.includes('--commit') const COMMIT = process.argv.includes('--commit')
const KEEP_SESSIONS = process.argv.includes('--keep-sessions') const KEEP_SESSIONS = process.argv.includes('--keep-sessions')
@ -85,12 +85,11 @@ async function main() {
} }
} }
main() try {
.then(() => { await main()
console.error('Done.') console.error('Done.')
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error({ error }) console.error({ error })
process.exit(1) process.exit(1)
}) }

View file

@ -1,7 +1,8 @@
const NotificationsBuilder = require('../app/src/Features/Notifications/NotificationsBuilder') import NotificationsBuilder from '../app/src/Features/Notifications/NotificationsBuilder.js'
const { db, waitForDb } = require('../app/src/infrastructure/mongodb') import { db, waitForDb } from '../app/src/infrastructure/mongodb.js'
const { batchedUpdate } = require('./helpers/batchedUpdate') import BatchedUpdateModule from './helpers/batchedUpdate.mjs'
const { batchedUpdate } = BatchedUpdateModule
const DRY_RUN = !process.argv.includes('--dry-run=false') const DRY_RUN = !process.argv.includes('--dry-run=false')
if (DRY_RUN) { if (DRY_RUN) {
@ -63,11 +64,10 @@ async function main() {
}) })
} }
main() try {
.then(() => { await main()
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(err => { console.error(error)
console.error(err)
process.exit(1) process.exit(1)
}) }

View file

@ -1,15 +1,15 @@
import '../app/src/models/User.js'
import BatchedUpdateModule from './helpers/batchedUpdate.mjs'
import { promiseMapWithLimit } from '@overleaf/promise-utils'
import { getQueue } from '../app/src/infrastructure/Queues.js'
import SubscriptionLocator from '../app/src/Features/Subscription/SubscriptionLocator.js'
import PlansLocator from '../app/src/Features/Subscription/PlansLocator.js'
import FeaturesHelper from '../app/src/Features/Subscription/FeaturesHelper.js'
const { batchedUpdateWithResultHandling } = BatchedUpdateModule
const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 10 const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 10
require('../app/src/models/User')
const { batchedUpdateWithResultHandling } = require('./helpers/batchedUpdate')
const { promiseMapWithLimit } = require('@overleaf/promise-utils')
const { getQueue } = require('../app/src/infrastructure/Queues')
const SubscriptionLocator = require('../app/src/Features/Subscription/SubscriptionLocator')
const PlansLocator = require('../app/src/Features/Subscription/PlansLocator')
const FeaturesHelper = require('../app/src/Features/Subscription/FeaturesHelper')
const mixpanelSinkQueue = getQueue('analytics-mixpanel-sink') const mixpanelSinkQueue = getQueue('analytics-mixpanel-sink')
async function processUser(user) { async function processUser(user) {

View file

@ -1,5 +1,7 @@
const minimist = require('minimist') import minimist from 'minimist'
const { batchedUpdateWithResultHandling } = require('./helpers/batchedUpdate') import BatchedUpdateModule from './helpers/batchedUpdate.mjs'
const { batchedUpdateWithResultHandling } = BatchedUpdateModule
const argv = minimist(process.argv.slice(2)) const argv = minimist(process.argv.slice(2))
const commit = argv.commit !== undefined const commit = argv.commit !== undefined
@ -7,7 +9,7 @@ let imageName = argv._[0]
function usage() { function usage() {
console.log( console.log(
'Usage: node backfill_project_image_name.js --commit <texlive_docker_image>' 'Usage: node backfill_project_image_name.mjs --commit <texlive_docker_image>'
) )
console.log( console.log(
'Argument <texlive_docker_image> is not required when TEX_LIVE_DOCKER_IMAGE is set.' 'Argument <texlive_docker_image> is not required when TEX_LIVE_DOCKER_IMAGE is set.'

View file

@ -1,7 +1,10 @@
const { db, waitForDb } = require('../app/src/infrastructure/mongodb') import { db, waitForDb } from '../app/src/infrastructure/mongodb.js'
const { batchedUpdate } = require('./helpers/batchedUpdate') import BatchedUpdateModule from './helpers/batchedUpdate.mjs'
const minimist = require('minimist') import minimist from 'minimist'
const CollaboratorsInviteHelper = require('../app/src/Features/Collaborators/CollaboratorsInviteHelper') import CollaboratorsInviteHelper from '../app/src/Features/Collaborators/CollaboratorsInviteHelper.js'
import { fileURLToPath } from 'url'
const { batchedUpdate } = BatchedUpdateModule
const argv = minimist(process.argv.slice(2), { const argv = minimist(process.argv.slice(2), {
boolean: ['dry-run', 'help'], boolean: ['dry-run', 'help'],
@ -54,11 +57,11 @@ async function main(DRY_RUN) {
await addTokenHmacField(DRY_RUN) await addTokenHmacField(DRY_RUN)
} }
module.exports = main export default main
if (require.main === module) { if (fileURLToPath(import.meta.url) === process.argv[1]) {
if (argv.help || argv._.length > 1) { if (argv.help || argv._.length > 1) {
console.error(`Usage: node scripts/backfill_project_invites_token_hmac.js console.error(`Usage: node scripts/backfill_project_invites_token_hmac.mjs
Adds a "tokenHmac" field (which is a hashed version of the token) to each project invite record. Adds a "tokenHmac" field (which is a hashed version of the token) to each project invite record.
Options: Options:
@ -68,13 +71,12 @@ if (require.main === module) {
process.exit(1) process.exit(1)
} }
main(DRY_RUN) try {
.then(() => { await main(DRY_RUN)
console.error('Done') console.error('Done')
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(err => { console.error(error)
console.error(err)
process.exit(1) process.exit(1)
}) }
} }

View file

@ -1,11 +1,13 @@
const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 10 import BatchedUpdateModule from './helpers/batchedUpdate.mjs'
import { promiseMapWithLimit } from '@overleaf/promise-utils'
import SubscriptionLocator from '../app/src/Features/Subscription/SubscriptionLocator.js'
import PlansLocator from '../app/src/Features/Subscription/PlansLocator.js'
import FeaturesHelper from '../app/src/Features/Subscription/FeaturesHelper.js'
import AnalyticsManager from '../app/src/Features/Analytics/AnalyticsManager.js'
const { batchedUpdateWithResultHandling } = require('./helpers/batchedUpdate') const { batchedUpdateWithResultHandling } = BatchedUpdateModule
const { promiseMapWithLimit } = require('@overleaf/promise-utils')
const SubscriptionLocator = require('../app/src/Features/Subscription/SubscriptionLocator') const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 10
const PlansLocator = require('../app/src/Features/Subscription/PlansLocator')
const FeaturesHelper = require('../app/src/Features/Subscription/FeaturesHelper')
const AnalyticsManager = require('../app/src/Features/Analytics/AnalyticsManager')
async function getGroupSubscriptionPlanCode(userId) { async function getGroupSubscriptionPlanCode(userId) {
const subscriptions = const subscriptions =
@ -47,7 +49,7 @@ async function processUser(user) {
} }
} }
async function processBatch(_, users) { async function processBatch(users) {
await promiseMapWithLimit(WRITE_CONCURRENCY, users, async user => { await promiseMapWithLimit(WRITE_CONCURRENCY, users, async user => {
await processUser(user) await processUser(user)
}) })

View file

@ -1,4 +1,4 @@
const SAMLUserIdAttributeBatchHandler = require('../modules/saas-authentication/app/src/SAML/SAMLUserIdAttributeBatchHandler') import SAMLUserIdAttributeBatchHandler from '../modules/saas-authentication/app/src/SAML/SAMLUserIdAttributeBatchHandler.js'
const startInstitutionId = parseInt(process.argv[2]) const startInstitutionId = parseInt(process.argv[2])
const endInstitutionId = parseInt(process.argv[3]) const endInstitutionId = parseInt(process.argv[3])
@ -20,12 +20,14 @@ console.log(
endInstitutionId || 'none provided, will go to end of ordered list.' endInstitutionId || 'none provided, will go to end of ordered list.'
) )
SAMLUserIdAttributeBatchHandler.check(startInstitutionId, endInstitutionId) try {
.then(result => { const result = await SAMLUserIdAttributeBatchHandler.check(
startInstitutionId,
endInstitutionId
)
console.log(result) console.log(result)
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error(error) console.error(error)
process.exit(1) process.exit(1)
}) }

View file

@ -1,23 +1,14 @@
process.env.MONGO_SOCKET_TIMEOUT = '300000' import { waitForDb } from '../app/src/infrastructure/mongodb.js'
// Run all the mongo queries on secondaries import InstitutionsManager from '../app/src/Features/Institutions/InstitutionsManager.js'
process.env.MONGO_CONNECTION_STRING = import { ensureRunningOnMongoSecondaryWithTimeout } from './helpers/env_variable_helper.mjs'
process.env.READ_ONLY_MONGO_CONNECTION_STRING
const { waitForDb } = require('../app/src/infrastructure/mongodb') ensureRunningOnMongoSecondaryWithTimeout(300000)
const InstitutionsManager = require('../app/src/Features/Institutions/InstitutionsManager')
const institutionId = parseInt(process.argv[2]) const institutionId = parseInt(process.argv[2])
if (isNaN(institutionId)) throw new Error('No institution id') if (isNaN(institutionId)) throw new Error('No institution id')
console.log('Checking users of institution', institutionId) console.log('Checking users of institution', institutionId)
const emitNonProUserIds = process.argv.includes('--emit-non-pro-user-ids') const emitNonProUserIds = process.argv.includes('--emit-non-pro-user-ids')
waitForDb()
.then(main)
.catch(err => {
console.error(err)
process.exit(1)
})
async function main() { async function main() {
const usersSummary = await InstitutionsManager.promises.checkInstitutionUsers( const usersSummary = await InstitutionsManager.promises.checkInstitutionUsers(
institutionId, institutionId,
@ -26,3 +17,11 @@ async function main() {
console.log(usersSummary) console.log(usersSummary)
process.exit() process.exit()
} }
try {
await waitForDb()
await main()
} catch (error) {
console.error(error)
process.exit(1)
}

View file

@ -1,11 +1,11 @@
import CE_CONFIG from '../config/settings.defaults.js'
import PRO_CONFIG from '../config/settings.overrides.server-pro.js'
import SAAS_CONFIG from '../config/settings.webpack.js'
function getOverleafModuleImports(settings) { function getOverleafModuleImports(settings) {
return Object.keys(settings.overleafModuleImports).sort().join(',') return Object.keys(settings.overleafModuleImports).sort().join(',')
} }
const CE_CONFIG = require('../config/settings.defaults')
const PRO_CONFIG = require('../config/settings.overrides.server-pro')
const SAAS_CONFIG = require('../config/settings.webpack')
function main() { function main() {
const CE = getOverleafModuleImports(CE_CONFIG) const CE = getOverleafModuleImports(CE_CONFIG)
const PRO = getOverleafModuleImports(CE_CONFIG.mergeWith(PRO_CONFIG)) const PRO = getOverleafModuleImports(CE_CONFIG.mergeWith(PRO_CONFIG))

View file

@ -1,19 +0,0 @@
process.env.MONGO_SOCKET_TIMEOUT = '300000'
// Run all the mongo queries on secondaries
process.env.MONGO_CONNECTION_STRING =
process.env.READ_ONLY_MONGO_CONNECTION_STRING
const SAMLEmailBatchCheck = require('../modules/saas-authentication/app/src/SAML/SAMLEmailBatchCheck')
const startInstitutionId = parseInt(process.argv[2])
const emitDetailedData = process.argv.includes('--detailed-data')
SAMLEmailBatchCheck.promises
.checkEmails(startInstitutionId, emitDetailedData)
.then(result => {
console.table(result)
process.exit()
})
.catch(err => {
console.error(err)
process.exit(1)
})

View file

@ -0,0 +1,19 @@
import SAMLEmailBatchCheck from '../modules/saas-authentication/app/src/SAML/SAMLEmailBatchCheck.js'
import { ensureRunningOnMongoSecondaryWithTimeout } from './helpers/env_variable_helper.mjs'
ensureRunningOnMongoSecondaryWithTimeout(300000)
const startInstitutionId = parseInt(process.argv[2])
const emitDetailedData = process.argv.includes('--detailed-data')
try {
const result = await SAMLEmailBatchCheck.promises.checkEmails(
startInstitutionId,
emitDetailedData
)
console.table(result)
process.exit()
} catch (error) {
console.error(error)
process.exit(1)
}

View file

@ -1,9 +1,9 @@
const { import {
db, db,
waitForDb, waitForDb,
READ_PREFERENCE_SECONDARY, READ_PREFERENCE_SECONDARY,
} = require('../app/src/infrastructure/mongodb') } from '../app/src/infrastructure/mongodb.js'
const UserSessionsManager = require('../app/src/Features/User/UserSessionsManager') import UserSessionsManager from '../app/src/Features/User/UserSessionsManager.js'
const COMMIT = process.argv.includes('--commit') const COMMIT = process.argv.includes('--commit')
const LOG_SESSIONS = !process.argv.includes('--log-sessions=false') const LOG_SESSIONS = !process.argv.includes('--log-sessions=false')
@ -59,12 +59,11 @@ async function main() {
} }
} }
main() try {
.then(() => { await main()
console.error('Done.') console.error('Done.')
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error({ error }) console.error({ error })
process.exit(1) process.exit(1)
}) }

View file

@ -5,7 +5,8 @@
* DRY_RUN=false node scripts/clear_feedback_collection.js 2022-11-01 # deletion mode * DRY_RUN=false node scripts/clear_feedback_collection.js 2022-11-01 # deletion mode
*/ */
const { db, ObjectId, waitForDb } = require('../app/src/infrastructure/mongodb') import { db, ObjectId, waitForDb } from '../app/src/infrastructure/mongodb.js'
import { fileURLToPath } from 'url'
const runScript = async (timestamp, dryRun) => { const runScript = async (timestamp, dryRun) => {
await waitForDb() await waitForDb()
@ -26,7 +27,7 @@ const runScript = async (timestamp, dryRun) => {
} }
} }
if (!module.parent) { if (fileURLToPath(import.meta.url) === process.argv[1]) {
// we are in the root module, which means that we're running as a script // we are in the root module, which means that we're running as a script
const timestamp = process.env.CUTOFF_TIMESTAMP || process.argv[2] const timestamp = process.env.CUTOFF_TIMESTAMP || process.argv[2]
const dryRun = process.env.DRY_RUN !== 'false' const dryRun = process.env.DRY_RUN !== 'false'
@ -38,4 +39,4 @@ if (!module.parent) {
}) })
} }
module.exports = runScript export default runScript

View file

@ -1,5 +1,6 @@
const { promisify } = require('util') import { promisify } from 'util'
const InstitutionsManager = require('../app/src/Features/Institutions/InstitutionsManager') import InstitutionsManager from '../app/src/Features/Institutions/InstitutionsManager.js'
import { fileURLToPath } from 'url'
const sleep = promisify(setTimeout) const sleep = promisify(setTimeout)
async function main() { async function main() {
@ -36,14 +37,13 @@ async function main() {
console.log('---------------') console.log('---------------')
} }
if (require.main === module) { if (fileURLToPath(import.meta.url) === process.argv[1]) {
main() try {
.then(() => { await main()
console.log('Done.') console.log('Done.')
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(err => { console.error(error)
console.error(err)
process.exit(1) process.exit(1)
}) }
} }

View file

@ -1,5 +1,5 @@
const { waitForDb } = require('../app/src/infrastructure/mongodb') import { waitForDb } from '../app/src/infrastructure/mongodb.js'
const ProjectDetailsHandler = require('../app/src/Features/Project/ProjectDetailsHandler') import ProjectDetailsHandler from '../app/src/Features/Project/ProjectDetailsHandler.js'
const projectId = process.argv[2] const projectId = process.argv[2]
if (!/^(?=[a-f\d]{24}$)(\d+[a-f]|[a-f]+\d)/.test(projectId)) { if (!/^(?=[a-f\d]{24}$)(\d+[a-f]|[a-f]+\d)/.test(projectId)) {
@ -7,13 +7,6 @@ if (!/^(?=[a-f\d]{24}$)(\d+[a-f]|[a-f]+\d)/.test(projectId)) {
process.exit(1) process.exit(1)
} }
waitForDb()
.then(main)
.catch(err => {
console.error(err)
process.exit(1)
})
function main() { function main() {
ProjectDetailsHandler.clearTokens(projectId, err => { ProjectDetailsHandler.clearTokens(projectId, err => {
if (err) { if (err) {
@ -27,3 +20,11 @@ function main() {
process.exit(0) process.exit(0)
}) })
} }
try {
await waitForDb()
await main()
} catch (error) {
console.error(error)
process.exit(1)
}

View file

@ -1,7 +1,10 @@
const { promisify, promiseMapWithLimit } = require('@overleaf/promise-utils') import { promisify, promiseMapWithLimit } from '@overleaf/promise-utils'
const rClient = require('../app/src/Features/User/UserSessionsRedis').client() import UserSessionsRedis from '../app/src/Features/User/UserSessionsRedis.js'
import minimist from 'minimist'
const args = require('minimist')(process.argv.slice(2)) const rClient = UserSessionsRedis.client()
const args = minimist(process.argv.slice(2))
const CURSOR = args.cursor const CURSOR = args.cursor
const COMMIT = args.commit === 'true' const COMMIT = args.commit === 'true'
const CONCURRENCY = parseInt(args.concurrency, 10) || 50 const CONCURRENCY = parseInt(args.concurrency, 10) || 50
@ -72,7 +75,9 @@ async function main() {
await rClient.disconnect() await rClient.disconnect()
} }
main().catch(err => { try {
console.error(err) await main()
} catch (error) {
console.error(error)
process.exit(1) process.exit(1)
}) }

View file

@ -1,8 +1,8 @@
const fs = require('fs') import fs from 'fs'
const { ObjectId, waitForDb } = require('../app/src/infrastructure/mongodb') import { ObjectId, waitForDb } from '../app/src/infrastructure/mongodb.js'
const async = require('async') import async from 'async'
const UserUpdater = require('../app/src/Features/User/UserUpdater') import UserUpdater from '../app/src/Features/User/UserUpdater.js'
const UserSessionsManager = require('../app/src/Features/User/UserSessionsManager') import UserSessionsManager from '../app/src/Features/User/UserSessionsManager.js'
const ASYNC_LIMIT = 10 const ASYNC_LIMIT = 10

View file

@ -1,4 +1,5 @@
const { db, waitForDb } = require('../app/src/infrastructure/mongodb') import { db, waitForDb } from '../app/src/infrastructure/mongodb.js'
import { fileURLToPath } from 'url'
async function updateStringDates() { async function updateStringDates() {
await waitForDb() await waitForDb()
@ -40,15 +41,14 @@ async function updateStringDates() {
console.log(`Updated ${count} confirmedAt strings to dates!`) console.log(`Updated ${count} confirmedAt strings to dates!`)
} }
if (require.main === module) { if (fileURLToPath(import.meta.url) === process.argv[1]) {
updateStringDates() try {
.then(() => { await updateStringDates()
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error(error) console.error(error)
process.exit(1) process.exit(1)
}) }
} }
module.exports = updateStringDates export default updateStringDates

View file

@ -1,12 +1,14 @@
const _ = require('lodash') import _ from 'lodash'
import { db } from '../app/src/infrastructure/mongodb.js'
import BatchedUpdateModule from './helpers/batchedUpdate.mjs'
import { promiseMapWithLimit } from '@overleaf/promise-utils'
import { fileURLToPath } from 'url'
const { batchedUpdate } = BatchedUpdateModule
const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 10 const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 10
const { db } = require('../app/src/infrastructure/mongodb') // $ node scripts/convert_archived_state.mjs FIRST,SECOND
const { batchedUpdate } = require('./helpers/batchedUpdate')
const { promiseMapWithLimit } = require('@overleaf/promise-utils')
// $ node scripts/convert_archived_state.js FIRST,SECOND
async function main(STAGE) { async function main(STAGE) {
for (const FIELD of ['archived', 'trashed']) { for (const FIELD of ['archived', 'trashed']) {
@ -55,19 +57,6 @@ async function main(STAGE) {
} }
} }
module.exports = main
if (require.main === module) {
main(process.argv.pop())
.then(() => {
process.exit(0)
})
.catch(error => {
console.error({ error })
process.exit(1)
})
}
async function upgradeFieldToArray({ project, FIELD }) { async function upgradeFieldToArray({ project, FIELD }) {
return db.projects.updateOne( return db.projects.updateOne(
{ _id: project._id }, { _id: project._id },
@ -92,3 +81,15 @@ function _objectIdEquals(firstVal, secondVal) {
// For use as a comparator for unionWith // For use as a comparator for unionWith
return firstVal.toString() === secondVal.toString() return firstVal.toString() === secondVal.toString()
} }
if (fileURLToPath(import.meta.url) === process.argv[1]) {
try {
await main(process.argv.pop())
process.exit(0)
} catch (error) {
console.error({ error })
process.exit(1)
}
}
export default main

View file

@ -1,7 +1,7 @@
const minimist = require('minimist') import minimist from 'minimist'
const { waitForDb, ObjectId } = require('../app/src/infrastructure/mongodb') import { waitForDb, ObjectId } from '../app/src/infrastructure/mongodb.js'
const ProjectEntityUpdateHandler = require('../app/src/Features/Project/ProjectEntityUpdateHandler') import ProjectEntityUpdateHandler from '../app/src/Features/Project/ProjectEntityUpdateHandler.js'
const Errors = require('../app/src/Features/Errors/Errors') import Errors from '../app/src/Features/Errors/Errors.js'
async function main() { async function main() {
const argv = minimist(process.argv.slice(2)) const argv = minimist(process.argv.slice(2))
@ -35,12 +35,11 @@ async function main() {
} }
} }
main() try {
.then(() => { await main()
console.log('Done.') console.log('Done.')
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(err => { console.error(error)
console.error(err)
process.exit(1) process.exit(1)
}) }

View file

@ -1,16 +1,16 @@
const TEN_MINUTES = 1000 * 60 * 10 import {
process.env.MONGO_SOCKET_TIMEOUT =
process.env.MONGO_SOCKET_TIMEOUT || TEN_MINUTES.toString()
const {
db, db,
waitForDb, waitForDb,
READ_PREFERENCE_SECONDARY, READ_PREFERENCE_SECONDARY,
} = require('../app/src/infrastructure/mongodb') } from '../app/src/infrastructure/mongodb.js'
const _ = require('lodash') import _ from 'lodash'
const { import { formatTokenUsageStats } from '@overleaf/access-token-encryptor/scripts/helpers/format-usage-stats.js'
formatTokenUsageStats, import { ensureMongoTimeout } from './helpers/env_variable_helper.mjs'
} = require('@overleaf/access-token-encryptor/scripts/helpers/format-usage-stats')
if (!process.env.MONGO_SOCKET_TIMEOUT) {
const TEN_MINUTES = 1000 * 60 * 10
ensureMongoTimeout(TEN_MINUTES)
}
const CASES = { const CASES = {
users: { users: {
@ -67,11 +67,10 @@ async function main() {
formatTokenUsageStats(STATS) formatTokenUsageStats(STATS)
} }
main() try {
.then(() => { await main()
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(err => { console.error(error)
console.error(err)
process.exit(1) process.exit(1)
}) }

View file

@ -1,10 +1,8 @@
const readline = require('readline') import readline from 'readline'
const { waitForDb } = require('../app/src/infrastructure/mongodb') import { waitForDb } from '../app/src/infrastructure/mongodb.js'
const ProjectEntityHandler = require('../app/src/Features/Project/ProjectEntityHandler') import ProjectEntityHandler from '../app/src/Features/Project/ProjectEntityHandler.js'
const ProjectGetter = require('../app/src/Features/Project/ProjectGetter') import ProjectGetter from '../app/src/Features/Project/ProjectGetter.js'
const Errors = require('../app/src/Features/Errors/Errors') import Errors from '../app/src/Features/Errors/Errors.js'
/* eslint-disable no-console */
async function countFiles() { async function countFiles() {
const rl = readline.createInterface({ const rl = readline.createInterface({
@ -36,12 +34,11 @@ async function countFiles() {
} }
} }
waitForDb() try {
.then(countFiles) await waitForDb()
.then(() => { await countFiles()
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(err => { console.log('Aiee, something went wrong!', error)
console.log('Aiee, something went wrong!', err)
process.exit(1) process.exit(1)
}) }

View file

@ -1,9 +1,9 @@
const { import {
db, db,
waitForDb, waitForDb,
READ_PREFERENCE_SECONDARY, READ_PREFERENCE_SECONDARY,
} = require('../app/src/infrastructure/mongodb') } from '../app/src/infrastructure/mongodb.js'
const { extname } = require('node:path') import { extname } from 'node:path'
const FILE_TYPES = [ const FILE_TYPES = [
'.jpg', '.jpg',
@ -72,11 +72,10 @@ function countFiles(folder, result) {
return result return result
} }
main() try {
.then(() => { await main()
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(err => { console.error(error)
console.error(err)
process.exit(1) process.exit(1)
}) }

View file

@ -1,17 +1,15 @@
const readline = require('readline') import readline from 'readline'
const { waitForDb, ObjectId, db } = require('../app/src/infrastructure/mongodb') import { waitForDb, ObjectId, db } from '../app/src/infrastructure/mongodb.js'
const ProjectEntityHandler = require('../app/src/Features/Project/ProjectEntityHandler') import ProjectEntityHandler from '../app/src/Features/Project/ProjectEntityHandler.js'
const ProjectGetter = require('../app/src/Features/Project/ProjectGetter') import ProjectGetter from '../app/src/Features/Project/ProjectGetter.js'
const Errors = require('../app/src/Features/Errors/Errors') import Errors from '../app/src/Features/Errors/Errors.js'
const FileStoreHandler = require('../app/src/Features/FileStore/FileStoreHandler') import FileStoreHandler from '../app/src/Features/FileStore/FileStoreHandler.js'
/* eslint-disable no-console */
// Handles a list of project IDs from stdin, one per line, and outputs the count of files and docs // Handles a list of project IDs from stdin, one per line, and outputs the count of files and docs
// in the project, along with the aggregated size in bytes for all files and docs. // in the project, along with the aggregated size in bytes for all files and docs.
// //
// It outputs to stderr, so that the logging junk can be piped elsewhere - e.g., running like: // It outputs to stderr, so that the logging junk can be piped elsewhere - e.g., running like:
// node scripts/count_project_size.js < /tmp/project_ids.txt /dev/null 2> /tmp/output.txt // node scripts/count_project_size.mjs < /tmp/project_ids.txt /dev/null 2> /tmp/output.txt
// //
// The output format is line-per-project with data separated by a single space, containing: // The output format is line-per-project with data separated by a single space, containing:
// - projectId // - projectId
@ -128,12 +126,11 @@ async function countDocsSizes(docs) {
return totalDocSize return totalDocSize
} }
waitForDb() try {
.then(countProjectFiles) await waitForDb()
.then(() => { await countProjectFiles()
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(err => { console.log('Aiee, something went wrong!', error)
console.log('Aiee, something went wrong!', err)
process.exit(1) process.exit(1)
}) }

View file

@ -1,10 +1,11 @@
// Script to create a Personal Access Token for a given user // Script to create a Personal Access Token for a given user
// Example: // Example:
// node scripts/create_oauth_personal_access_token.js --user-id=643e5b240dc50c83b5bf1127 // node scripts/create_oauth_personal_access_token.mjs --user-id=643e5b240dc50c83b5bf1127
const parseArgs = require('minimist') import parseArgs from 'minimist'
const { waitForDb } = require('../app/src/infrastructure/mongodb')
const OAuthPersonalAccessTokenManager = require('../modules/oauth2-server/app/src/OAuthPersonalAccessTokenManager') import { waitForDb } from '../app/src/infrastructure/mongodb.js'
import OAuthPersonalAccessTokenManager from '../modules/oauth2-server/app/src/OAuthPersonalAccessTokenManager.js'
const argv = parseArgs(process.argv.slice(2), { const argv = parseArgs(process.argv.slice(2), {
string: ['user-id'], string: ['user-id'],
@ -23,11 +24,10 @@ async function createPersonalAccessToken() {
console.log('Personal Access Token: ' + accessToken) console.log('Personal Access Token: ' + accessToken)
} }
createPersonalAccessToken() try {
.then(() => { await createPersonalAccessToken()
process.exit() process.exit()
}) } catch (error) {
.catch(err => { console.error(error)
console.error(err)
process.exit(1) process.exit(1)
}) }

View file

@ -2,17 +2,21 @@
// Example: // Example:
// node scripts/create_project.js --user-id=5dca84e11e71ae002ff73bd4 --name="My Test Project" --old-history // node scripts/create_project.js --user-id=5dca84e11e71ae002ff73bd4 --name="My Test Project" --old-history
const fs = require('fs') import fs from 'fs'
const path = require('path')
const _ = require('lodash') import path from 'path'
const parseArgs = require('minimist') import _ from 'lodash'
const OError = require('@overleaf/o-error') import parseArgs from 'minimist'
const { waitForDb } = require('../app/src/infrastructure/mongodb') import OError from '@overleaf/o-error'
const { User } = require('../app/src/models/User') import { waitForDb } from '../app/src/infrastructure/mongodb.js'
const ProjectCreationHandler = require('../app/src/Features/Project/ProjectCreationHandler') import { User } from '../app/src/models/User.js'
const ProjectEntityUpdateHandler = require('../app/src/Features/Project/ProjectEntityUpdateHandler') import ProjectCreationHandler from '../app/src/Features/Project/ProjectCreationHandler.js'
const ProjectEntityHandler = require('../app/src/Features/Project/ProjectEntityHandler') import ProjectEntityUpdateHandler from '../app/src/Features/Project/ProjectEntityUpdateHandler.js'
const EditorController = require('../app/src/Features/Editor/EditorController') import ProjectEntityHandler from '../app/src/Features/Project/ProjectEntityHandler.js'
import EditorController from '../app/src/Features/Editor/EditorController.js'
import { fileURLToPath } from 'url'
const __dirname = path.dirname(fileURLToPath(import.meta.url))
const argv = parseArgs(process.argv.slice(2), { const argv = parseArgs(process.argv.slice(2), {
string: ['user-id', 'name', 'random-operations', 'extend-project-id'], string: ['user-id', 'name', 'random-operations', 'extend-project-id'],
@ -226,12 +230,11 @@ async function createProject() {
return projectId return projectId
} }
createProject() try {
.then(projectId => { const projectId = await createProject()
console.log('Created project', projectId) console.log('Created project', projectId)
process.exit() process.exit()
}) } catch (error) {
.catch(err => { console.error(error)
console.error(err)
process.exit(1) process.exit(1)
}) }

View file

@ -2,14 +2,16 @@
* This script deletes dangling doc and file refs in projects * This script deletes dangling doc and file refs in projects
*/ */
const minimist = require('minimist') import minimist from 'minimist'
const { ObjectId } = require('mongodb-legacy')
const { db, waitForDb } = require('../app/src/infrastructure/mongodb') import mongodb from 'mongodb-legacy'
const Errors = require('../app/src/Features/Errors/Errors') import { db, waitForDb } from '../app/src/infrastructure/mongodb.js'
const FileStoreHandler = require('../app/src/Features/FileStore/FileStoreHandler') import Errors from '../app/src/Features/Errors/Errors.js'
const ProjectEntityMongoUpdateHandler = require('../app/src/Features/Project/ProjectEntityMongoUpdateHandler') import FileStoreHandler from '../app/src/Features/FileStore/FileStoreHandler.js'
const { iterablePaths } = require('../app/src/Features/Project/IterablePath') import ProjectEntityMongoUpdateHandler from '../app/src/Features/Project/ProjectEntityMongoUpdateHandler.js'
import { iterablePaths } from '../app/src/Features/Project/IterablePath.js'
const { ObjectId } = mongodb
const OPTIONS = parseArgs() const OPTIONS = parseArgs()
@ -119,11 +121,10 @@ async function deleteFile(projectId, fileId) {
} }
} }
main() try {
.then(() => { await main()
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error({ error }) console.error({ error })
process.exit(1) process.exit(1)
}) }

View file

@ -1,3 +1,14 @@
import mongodb from 'mongodb-legacy'
import { promiseMapWithLimit } from '@overleaf/promise-utils'
import BatchedUpdateModule from './helpers/batchedUpdate.mjs'
import ChatApiHandler from '../app/src/Features/Chat/ChatApiHandler.js'
import DeleteOrphanedDataHelper from './delete_orphaned_data_helper.mjs'
import { ensureMongoTimeout } from './helpers/env_variable_helper.mjs'
const { batchedUpdate } = BatchedUpdateModule
const { ObjectId } = mongodb
const { getHardDeletedProjectIds } = DeleteOrphanedDataHelper
const READ_CONCURRENCY_SECONDARY = const READ_CONCURRENCY_SECONDARY =
parseInt(process.env.READ_CONCURRENCY_SECONDARY, 10) || 1000 parseInt(process.env.READ_CONCURRENCY_SECONDARY, 10) || 1000
const READ_CONCURRENCY_PRIMARY = const READ_CONCURRENCY_PRIMARY =
@ -9,15 +20,10 @@ const MAX_CHATS_TO_DESTROY =
parseInt(process.env.MAX_CHATS_TO_DESTROY, 10) || false parseInt(process.env.MAX_CHATS_TO_DESTROY, 10) || false
// persist fallback in order to keep batchedUpdate in-sync // persist fallback in order to keep batchedUpdate in-sync
process.env.BATCH_SIZE = BATCH_SIZE process.env.BATCH_SIZE = BATCH_SIZE
// raise mongo timeout to 10mins if otherwise unspecified // ensure set mongo timeout to 10mins if otherwise unspecified
process.env.MONGO_SOCKET_TIMEOUT = if (!process.env.MONGO_SOCKET_TIMEOUT) {
parseInt(process.env.MONGO_SOCKET_TIMEOUT, 10) || 600000 ensureMongoTimeout(600000)
}
const { ObjectId } = require('mongodb-legacy')
const { promiseMapWithLimit } = require('@overleaf/promise-utils')
const { batchedUpdate } = require('./helpers/batchedUpdate')
const ChatApiHandler = require('../app/src/Features/Chat/ChatApiHandler')
const { getHardDeletedProjectIds } = require('./delete_orphaned_data_helper')
console.log({ console.log({
DRY_RUN, DRY_RUN,

View file

@ -1,9 +1,9 @@
const { import {
db, db,
READ_PREFERENCE_PRIMARY, READ_PREFERENCE_PRIMARY,
READ_PREFERENCE_SECONDARY, READ_PREFERENCE_SECONDARY,
} = require('../app/src/infrastructure/mongodb') } from '../app/src/infrastructure/mongodb.js'
const { promiseMapWithLimit } = require('@overleaf/promise-utils') import { promiseMapWithLimit } from '@overleaf/promise-utils'
async function getDeletedProject(projectId, readPreference) { async function getDeletedProject(projectId, readPreference) {
return await db.deletedProjects.findOne( return await db.deletedProjects.findOne(
@ -108,6 +108,6 @@ async function getHardDeletedProjectIds({
return hardDeletedProjectIds return hardDeletedProjectIds
} }
module.exports = { export default {
getHardDeletedProjectIds, getHardDeletedProjectIds,
} }

View file

@ -1,13 +1,12 @@
import minimist from 'minimist'
import { waitForDb } from '../app/src/infrastructure/mongodb.js'
import ChatApiHandler from '../app/src/Features/Chat/ChatApiHandler.js'
import DocstoreManager from '../app/src/Features/Docstore/DocstoreManager.js'
import DocumentUpdaterHandler from '../app/src/Features/DocumentUpdater/DocumentUpdaterHandler.js'
import { promiseMapWithLimit } from '@overleaf/promise-utils'
const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 10 const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 10
const minimist = require('minimist')
const { waitForDb } = require('../app/src/infrastructure/mongodb')
const ChatApiHandler = require('../app/src/Features/Chat/ChatApiHandler')
const DocstoreManager = require('../app/src/Features/Docstore/DocstoreManager')
const DocumentUpdaterHandler = require('../app/src/Features/DocumentUpdater/DocumentUpdaterHandler')
const { promiseMapWithLimit } = require('@overleaf/promise-utils')
/** /**
* Remove doc comment ranges that are "orphaned" as they do have matching chat * Remove doc comment ranges that are "orphaned" as they do have matching chat
* threads. This can happen when adding comments and the HTTP request fails, but * threads. This can happen when adding comments and the HTTP request fails, but
@ -46,12 +45,11 @@ async function main() {
await DocumentUpdaterHandler.promises.flushDocToMongo(projectId, docId) await DocumentUpdaterHandler.promises.flushDocToMongo(projectId, docId)
} }
main() try {
.then(() => { await main()
console.log('Done.') console.log('Done.')
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error({ error }) console.error({ error })
process.exit(1) process.exit(1)
}) }

View file

@ -1,15 +1,18 @@
const DocstoreManager = require('../app/src/Features/Docstore/DocstoreManager') import DocstoreManager from '../app/src/Features/Docstore/DocstoreManager.js'
const { promisify } = require('util') import { promisify } from 'util'
const { ObjectId } = require('mongodb-legacy') import mongodb from 'mongodb-legacy'
const { import {
db, db,
waitForDb, waitForDb,
READ_PREFERENCE_PRIMARY, READ_PREFERENCE_PRIMARY,
READ_PREFERENCE_SECONDARY, READ_PREFERENCE_SECONDARY,
} = require('../app/src/infrastructure/mongodb') } from '../app/src/infrastructure/mongodb.js'
const { promiseMapWithLimit } = require('@overleaf/promise-utils') import { promiseMapWithLimit } from '@overleaf/promise-utils'
const { getHardDeletedProjectIds } = require('./delete_orphaned_data_helper') import DeleteOrphanedDataHelper from './delete_orphaned_data_helper.mjs'
const { ObjectId } = mongodb
const sleep = promisify(setTimeout) const sleep = promisify(setTimeout)
const { getHardDeletedProjectIds } = DeleteOrphanedDataHelper
const NOW_IN_S = Date.now() / 1000 const NOW_IN_S = Date.now() / 1000
const ONE_WEEK_IN_S = 60 * 60 * 24 * 7 const ONE_WEEK_IN_S = 60 * 60 * 24 * 7
@ -169,12 +172,11 @@ async function letUserDoubleCheckInputs() {
await sleep(LET_USER_DOUBLE_CHECK_INPUTS_FOR) await sleep(LET_USER_DOUBLE_CHECK_INPUTS_FOR)
} }
main() try {
.then(() => { await main()
console.error('Done.') console.error('Done.')
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error({ error }) console.error({ error })
process.exit(1) process.exit(1)
}) }

View file

@ -1,9 +1,10 @@
const { Subscription } = require('../app/src/models/Subscription') import { Subscription } from '../app/src/models/Subscription.js'
const SubscriptionUpdater = require('../app/src/Features/Subscription/SubscriptionUpdater') import SubscriptionUpdater from '../app/src/Features/Subscription/SubscriptionUpdater.js'
const minimist = require('minimist') import minimist from 'minimist'
import { waitForDb } from '../app/src/infrastructure/mongodb.js'
import mongodb from 'mongodb-legacy'
const { waitForDb } = require('../app/src/infrastructure/mongodb') const { ObjectId } = mongodb
const { ObjectId } = require('mongodb-legacy')
const run = async () => { const run = async () => {
for (const id of ids) { for (const id of ids) {

View file

@ -1,21 +1,24 @@
const { promisify } = require('util') import { promisify } from 'util'
const Settings = require('@overleaf/settings') import Settings from '@overleaf/settings'
const AdminController = require('../app/src/Features/ServerAdmin/AdminController') import AdminController from '../app/src/Features/ServerAdmin/AdminController.js'
import minimist from 'minimist'
import { fileURLToPath } from 'url'
const args = require('minimist')(process.argv.slice(2), { const args = minimist(process.argv.slice(2), {
string: ['confirm-site-url', 'delay-in-seconds'], string: ['confirm-site-url', 'delay-in-seconds'],
default: { default: {
'delay-in-seconds': 10, 'delay-in-seconds': 10,
'confirm-site-url': '', 'confirm-site-url': '',
}, },
}) })
const sleep = promisify(setTimeout) const sleep = promisify(setTimeout)
async function main() { async function main() {
if (args.help) { if (args.help) {
console.error() console.error()
console.error( console.error(
' usage: node disconnect_all_users.js [--delay-in-seconds=10] --confirm-site-url=https://www....\n' ' usage: node disconnect_all_users.mjs [--delay-in-seconds=10] --confirm-site-url=https://www....\n'
) )
process.exit(1) process.exit(1)
} }
@ -55,14 +58,13 @@ async function main() {
await AdminController._sendDisconnectAllUsersMessage(delay) await AdminController._sendDisconnectAllUsersMessage(delay)
} }
if (require.main === module) { if (fileURLToPath(import.meta.url) === process.argv[1]) {
main() try {
.then(() => { await main()
console.error('Done.') console.error('Done.')
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(err => { console.error('Error', error)
console.error('Error', err)
process.exit(1) process.exit(1)
}) }
} }

View file

@ -1,8 +1,10 @@
const { waitForDb } = require('../app/src/infrastructure/mongodb') import { waitForDb } from '../app/src/infrastructure/mongodb.js'
const { User } = require('../app/src/models/User') import { User } from '../app/src/models/User.js'
const UserController = require('../app/src/Features/User/UserController') import UserController from '../app/src/Features/User/UserController.js'
require('@overleaf/logger').logger.level('error') import Logger from '@overleaf/logger'
const pLimit = require('p-limit') import pLimit from 'p-limit'
Logger.logger.level('error')
const CONCURRENCY = 10 const CONCURRENCY = 10
const failure = [] const failure = []
const success = [] const success = []
@ -42,12 +44,11 @@ async function run() {
} }
} }
waitForDb() try {
.then(run) await waitForDb()
.then(() => { await run()
process.exit() process.exit()
}) } catch (error) {
.catch(error => {
console.log(error) console.log(error)
process.exit(1) process.exit(1)
}) }

View file

@ -1,6 +1,6 @@
const fs = require('fs') import fs from 'fs'
const path = require('path') import path from 'path'
const minimist = require('minimist') import minimist from 'minimist'
const APP_CODE_PATH = ['app', 'modules', 'migrations', 'scripts', 'test'] const APP_CODE_PATH = ['app', 'modules', 'migrations', 'scripts', 'test']

View file

@ -5,7 +5,7 @@ import {
ObjectId, ObjectId,
} from '../app/src/infrastructure/mongodb.js' } from '../app/src/infrastructure/mongodb.js'
import minimist from 'minimist' import minimist from 'minimist'
import InstitutionHubsController from '../modules/metrics/app/src/InstitutionHubsController.js' import InstitutionHubsController from '../modules/metrics/app/src/InstitutionHubsController.mjs'
function usage() { function usage() {
console.log( console.log(

View file

@ -1,8 +1,8 @@
const { import {
db, db,
waitForDb, waitForDb,
READ_PREFERENCE_SECONDARY, READ_PREFERENCE_SECONDARY,
} = require('../app/src/infrastructure/mongodb') } from '../app/src/infrastructure/mongodb.js'
async function main() { async function main() {
await waitForDb() await waitForDb()
@ -109,11 +109,10 @@ function findBadPaths(folder) {
return result return result
} }
main() try {
.then(() => { await main()
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(err => { console.error(error)
console.error(err)
process.exit(1) process.exit(1)
}) }

View file

@ -1,7 +1,8 @@
const DRY_RUN = process.env.DRY_RUN !== 'false' import { db, waitForDb } from '../app/src/infrastructure/mongodb.js'
import BatchedUpdateModule from './helpers/batchedUpdate.mjs'
const { db, waitForDb } = require('../app/src/infrastructure/mongodb') const { batchedUpdate } = BatchedUpdateModule
const { batchedUpdate } = require('./helpers/batchedUpdate') const DRY_RUN = process.env.DRY_RUN !== 'false'
console.log({ console.log({
DRY_RUN, DRY_RUN,
@ -59,12 +60,11 @@ async function main() {
await batchedUpdate('subscriptions', query, processBatch, projection) await batchedUpdate('subscriptions', query, processBatch, projection)
} }
main() try {
.then(() => { await main()
console.error('Done.') console.error('Done.')
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error({ error }) console.error({ error })
process.exit(1) process.exit(1)
}) }

View file

@ -5,9 +5,11 @@
* This is the output format of each line in the find_malformed_filetrees.js * This is the output format of each line in the find_malformed_filetrees.js
* script. * script.
*/ */
const { ObjectId } = require('mongodb-legacy') import mongodb from 'mongodb-legacy'
const { db, waitForDb } = require('../app/src/infrastructure/mongodb') import { db, waitForDb } from '../app/src/infrastructure/mongodb.js'
const ProjectLocator = require('../app/src/Features/Project/ProjectLocator') import ProjectLocator from '../app/src/Features/Project/ProjectLocator.js'
const { ObjectId } = mongodb
async function main() { async function main() {
const { projectId, mongoPath } = parseArgs() const { projectId, mongoPath } = parseArgs()
@ -172,11 +174,10 @@ function findUniqueName(existingFilenames) {
return filename return filename
} }
main() try {
.then(() => { await main()
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(err => { console.error(error)
console.error(err)
process.exit(1) process.exit(1)
}) }

View file

@ -1,13 +1,13 @@
const fs = require('fs') import fs from 'fs'
const minimist = require('minimist') import minimist from 'minimist'
const { waitForDb, ObjectId } = require('../app/src/infrastructure/mongodb') import { waitForDb, ObjectId } from '../app/src/infrastructure/mongodb.js'
const DocstoreManager = require('../app/src/Features/Docstore/DocstoreManager') import DocstoreManager from '../app/src/Features/Docstore/DocstoreManager.js'
const FileStoreHandler = require('../app/src/Features/FileStore/FileStoreHandler') import FileStoreHandler from '../app/src/Features/FileStore/FileStoreHandler.js'
const FileWriter = require('../app/src/infrastructure/FileWriter') import FileWriter from '../app/src/infrastructure/FileWriter.js'
const ProjectEntityMongoUpdateHandler = require('../app/src/Features/Project/ProjectEntityMongoUpdateHandler') import ProjectEntityMongoUpdateHandler from '../app/src/Features/Project/ProjectEntityMongoUpdateHandler.js'
const ProjectLocator = require('../app/src/Features/Project/ProjectLocator') import ProjectLocator from '../app/src/Features/Project/ProjectLocator.js'
const RedisWrapper = require('@overleaf/redis-wrapper') import RedisWrapper from '@overleaf/redis-wrapper'
const Settings = require('@overleaf/settings') import Settings from '@overleaf/settings'
const opts = parseArgs() const opts = parseArgs()
const redis = RedisWrapper.createClient(Settings.redis.web) const redis = RedisWrapper.createClient(Settings.redis.web)
@ -154,11 +154,10 @@ async function deleteDocFromRedis(projectId, docId) {
await redis.srem(`DocsIn:{${projectId}}`, projectId) await redis.srem(`DocsIn:{${projectId}}`, projectId)
} }
main() try {
.then(() => { await main()
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(err => { console.error(error)
console.error(err)
process.exit(1) process.exit(1)
}) }

View file

@ -1,7 +1,8 @@
const { ObjectId } = require('mongodb-legacy') import mongodb from 'mongodb-legacy'
const { db, waitForDb } = require('../app/src/infrastructure/mongodb') import { db, waitForDb } from '../app/src/infrastructure/mongodb.js'
const DocumentUpdaterHandler = require('../app/src/Features/DocumentUpdater/DocumentUpdaterHandler') import DocumentUpdaterHandler from '../app/src/Features/DocumentUpdater/DocumentUpdaterHandler.js'
const { ObjectId } = mongodb
const PROJECT_ID = process.env.PROJECT_ID const PROJECT_ID = process.env.PROJECT_ID
const DOC_ID = process.env.DOC_ID const DOC_ID = process.env.DOC_ID
const VERBOSE_LOGGING = process.env.VERBOSE_LOGGING === 'true' const VERBOSE_LOGGING = process.env.VERBOSE_LOGGING === 'true'
@ -67,12 +68,11 @@ function getDocument() {
}) })
} }
main() try {
.then(() => { await main()
console.error('Done.') console.error('Done.')
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error({ error }) console.error({ error })
process.exit(1) process.exit(1)
}) }

View file

@ -1,9 +1,11 @@
const { ObjectId } = require('mongodb-legacy') import mongodb from 'mongodb-legacy'
const { import {
db, db,
waitForDb, waitForDb,
READ_PREFERENCE_SECONDARY, READ_PREFERENCE_SECONDARY,
} = require('../../app/src/infrastructure/mongodb') } from '../../app/src/infrastructure/mongodb.js'
const { ObjectId } = mongodb
const ONE_MONTH_IN_MS = 1000 * 60 * 60 * 24 * 31 const ONE_MONTH_IN_MS = 1000 * 60 * 60 * 24 * 31
let ID_EDGE_PAST let ID_EDGE_PAST
@ -195,7 +197,7 @@ function batchedUpdateWithResultHandling(
}) })
} }
module.exports = { export default {
batchedUpdate, batchedUpdate,
batchedUpdateWithResultHandling, batchedUpdateWithResultHandling,
} }

View file

@ -0,0 +1,31 @@
/**
* Ensures that the specific MongoDB connection timeout is set.
*
* @param {number} timeoutInMS
* @returns {void}
*/
export function ensureMongoTimeout(timeoutInMS) {
if (process.env.MONGO_SOCKET_TIMEOUT !== timeoutInMS.toString()) {
throw new Error(
`must run with higher mongo timeout: MONGO_SOCKET_TIMEOUT=${timeoutInMS} node ${process.argv[1]}`
)
}
}
/**
* Ensures MongoDB queries are running on secondary and the specific connection timeout is set.
*
* @param {number} timeoutInMS
* @returns {void}
*/
export function ensureRunningOnMongoSecondaryWithTimeout(timeoutInMS) {
if (
process.env.MONGO_SOCKET_TIMEOUT !== timeoutInMS.toString() ||
process.env.MONGO_CONNECTION_STRING !==
process.env.READ_ONLY_MONGO_CONNECTION_STRING
) {
throw new Error(
`must run on secondary with higher mongo timeout: MONGO_SOCKET_TIMEOUT=${timeoutInMS} MONGO_CONNECTION_STRING="$READ_ONLY_MONGO_CONNECTION_STRING" node ${process.argv[1]}`
)
}
}

View file

@ -1,53 +0,0 @@
const { db, ObjectId, waitForDb } = require('../app/src/infrastructure/mongodb')
const minimist = require('minimist')
const argv = minimist(process.argv.slice(2))
const commit = argv.commit !== undefined
const projectIds = argv._.map(x => {
return new ObjectId(x)
})
if (!commit) {
console.log('Doing dry run without --commit')
}
console.log('checking', projectIds.length, 'projects')
waitForDb().then(async () => {
const affectedProjects = await db.projects
.find(
{ _id: { $in: projectIds } },
{
projection: {
_id: 1,
owner_ref: 1,
tokenAccessReadOnly_refs: 1,
tokenAccessReadAndWrite_refs: 1,
},
}
)
.toArray()
console.log('Found ' + affectedProjects.length + ' affected projects')
affectedProjects.forEach(project => {
console.log(JSON.stringify(project))
})
if (!commit) {
console.log('dry run, not updating')
process.exit(0)
} else {
try {
const result = await db.projects.updateMany(
{ _id: { $in: affectedProjects.map(project => project._id) } },
{
$set: {
publicAccesLevel: 'private', // note the spelling in the db is publicAccesLevel (with one 's')
tokenAccessReadOnly_refs: [],
tokenAccessReadAndWrite_refs: [],
},
}
)
console.log('result', JSON.stringify(result))
process.exit(0)
} catch (err) {
console.error('err', err)
process.exit(1)
}
}
})

View file

@ -0,0 +1,54 @@
import { db, ObjectId, waitForDb } from '../app/src/infrastructure/mongodb.js'
import minimist from 'minimist'
const argv = minimist(process.argv.slice(2))
const commit = argv.commit !== undefined
const projectIds = argv._.map(x => {
return new ObjectId(x)
})
if (!commit) {
console.log('Doing dry run without --commit')
}
console.log('checking', projectIds.length, 'projects')
await waitForDb()
const affectedProjects = await db.projects
.find(
{ _id: { $in: projectIds } },
{
projection: {
_id: 1,
owner_ref: 1,
tokenAccessReadOnly_refs: 1,
tokenAccessReadAndWrite_refs: 1,
},
}
)
.toArray()
console.log('Found ' + affectedProjects.length + ' affected projects')
affectedProjects.forEach(project => {
console.log(JSON.stringify(project))
})
if (!commit) {
console.log('dry run, not updating')
process.exit(0)
} else {
try {
const result = await db.projects.updateMany(
{ _id: { $in: affectedProjects.map(project => project._id) } },
{
$set: {
publicAccesLevel: 'private', // note the spelling in the db is publicAccesLevel (with one 's')
tokenAccessReadOnly_refs: [],
tokenAccessReadAndWrite_refs: [],
},
}
)
console.log('result', JSON.stringify(result))
process.exit(0)
} catch (err) {
console.error('err', err)
process.exit(1)
}
}

View file

@ -1,8 +1,11 @@
const { batchedUpdate } = require('./helpers/batchedUpdate') import BatchedUpdateModule from './helpers/batchedUpdate.mjs'
const { promiseMapWithLimit, promisify } = require('@overleaf/promise-utils') import { promiseMapWithLimit, promisify } from '@overleaf/promise-utils'
const { db, ObjectId, waitForDb } = require('../app/src/infrastructure/mongodb') import { db, ObjectId, waitForDb } from '../app/src/infrastructure/mongodb.js'
import _ from 'lodash'
import { fileURLToPath } from 'url'
const sleep = promisify(setTimeout) const sleep = promisify(setTimeout)
const _ = require('lodash') const { batchedUpdate } = BatchedUpdateModule
async function main(options) { async function main(options) {
if (!options) { if (!options) {
@ -146,17 +149,16 @@ async function letUserDoubleCheckInputs(options) {
await sleep(options.letUserDoubleCheckInputsFor) await sleep(options.letUserDoubleCheckInputsFor)
} }
module.exports = main export default main
if (require.main === module) { if (fileURLToPath(import.meta.url) === process.argv[1]) {
waitForDb() try {
.then(main) await waitForDb()
.then(() => { await main()
console.log('Done.') console.log('Done.')
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error({ error }) console.error({ error })
process.exit(1) process.exit(1)
}) }
} }

View file

@ -6,7 +6,7 @@ import FeaturesUpdater from '../app/src/Features/Subscription/FeaturesUpdater.js
import FeaturesHelper from '../app/src/Features/Subscription/FeaturesHelper.js' import FeaturesHelper from '../app/src/Features/Subscription/FeaturesHelper.js'
import UserFeaturesUpdater from '../app/src/Features/Subscription/UserFeaturesUpdater.js' import UserFeaturesUpdater from '../app/src/Features/Subscription/UserFeaturesUpdater.js'
import AnalyticsManager from '../app/src/Features/Analytics/AnalyticsManager.js' import AnalyticsManager from '../app/src/Features/Analytics/AnalyticsManager.js'
import DropboxHandler from '../modules/dropbox/app/src/DropboxHandler.js' import DropboxHandler from '../modules/dropbox/app/src/DropboxHandler.mjs'
import { OError } from '../app/src/Features/Errors/Errors.js' import { OError } from '../app/src/Features/Errors/Errors.js'
const ScriptLogger = { const ScriptLogger = {

View file

@ -1,17 +1,18 @@
import {
db,
READ_PREFERENCE_SECONDARY,
} from '../app/src/infrastructure/mongodb.js'
import { promiseMapWithLimit } from '@overleaf/promise-utils'
import TokenGenerator from '../app/src/Features/TokenGenerator/TokenGenerator.js'
import BatchedUpdateModule from './helpers/batchedUpdate.mjs'
const { batchedUpdate } = BatchedUpdateModule
const VERBOSE_LOGGING = process.env.VERBOSE_LOGGING === 'true' const VERBOSE_LOGGING = process.env.VERBOSE_LOGGING === 'true'
const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 10 const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 10
const BATCH_SIZE = parseInt(process.env.BATCH_SIZE, 10) || 100 const BATCH_SIZE = parseInt(process.env.BATCH_SIZE, 10) || 100
// persist fallback in order to keep batchedUpdate in-sync // persist fallback in order to keep batchedUpdate in-sync
process.env.BATCH_SIZE = BATCH_SIZE process.env.BATCH_SIZE = BATCH_SIZE
const {
db,
READ_PREFERENCE_SECONDARY,
} = require('../app/src/infrastructure/mongodb')
const { promiseMapWithLimit } = require('@overleaf/promise-utils')
const TokenGenerator = require('../app/src/Features/TokenGenerator/TokenGenerator')
const { batchedUpdate } = require('./helpers/batchedUpdate')
async function rewriteDuplicates(duplicateReferralIds) { async function rewriteDuplicates(duplicateReferralIds) {
// duplicateReferralIds contains at least one duplicate. // duplicateReferralIds contains at least one duplicate.
// Find out which is the duplicate in parallel and update // Find out which is the duplicate in parallel and update

View file

@ -1,11 +1,17 @@
const { db, waitForDb } = require('../app/src/infrastructure/mongodb') import {
const { batchedUpdate } = require('./helpers/batchedUpdate') db,
const { ObjectId } = require('mongodb-legacy') waitForDb,
const minimist = require('minimist')
const CollaboratorsHandler = require('../app/src/Features/Collaborators/CollaboratorsHandler')
const {
READ_PREFERENCE_SECONDARY, READ_PREFERENCE_SECONDARY,
} = require('../app/src/infrastructure/mongodb') } from '../app/src/infrastructure/mongodb.js'
import BatchedUpdateModule from './helpers/batchedUpdate.mjs'
import mongodb from 'mongodb-legacy'
import minimist from 'minimist'
import CollaboratorsHandler from '../app/src/Features/Collaborators/CollaboratorsHandler.js'
import { fileURLToPath } from 'url'
const { ObjectId } = mongodb
const { batchedUpdate } = BatchedUpdateModule
const argv = minimist(process.argv.slice(2), { const argv = minimist(process.argv.slice(2), {
string: ['projects'], string: ['projects'],
@ -190,9 +196,9 @@ async function main(DRY_RUN, PROJECTS_LIST) {
await fixProjectsWithInvalidTokenAccessRefsIds(DRY_RUN, PROJECTS_LIST) await fixProjectsWithInvalidTokenAccessRefsIds(DRY_RUN, PROJECTS_LIST)
} }
module.exports = main export default main
if (require.main === module) { if (fileURLToPath(import.meta.url) === process.argv[1]) {
if (argv.help || argv._.length > 1) { if (argv.help || argv._.length > 1) {
console.error(`Usage: node scripts/remove_deleted_users_from_token_access_refs.js [OPTS] console.error(`Usage: node scripts/remove_deleted_users_from_token_access_refs.js [OPTS]
Finds or removes deleted user ids from token access fields Finds or removes deleted user ids from token access fields
@ -208,13 +214,12 @@ if (require.main === module) {
process.exit(1) process.exit(1)
} }
main(DRY_RUN, PROJECTS_LIST) try {
.then(() => { await main(DRY_RUN, PROJECTS_LIST)
console.error('Done') console.error('Done')
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(err => { console.error(error)
console.error(err)
process.exit(1) process.exit(1)
}) }
} }

View file

@ -1,17 +1,6 @@
const { ObjectId, waitForDb } = require('../app/src/infrastructure/mongodb') import { ObjectId, waitForDb } from '../app/src/infrastructure/mongodb.js'
const UserUpdater = require('../app/src/Features/User/UserUpdater') import UserUpdater from '../app/src/Features/User/UserUpdater.js'
const UserGetter = require('../app/src/Features/User/UserGetter') import UserGetter from '../app/src/Features/User/UserGetter.js'
waitForDb()
.then(removeEmail)
.catch(error => {
console.error(error)
process.exit(1)
})
.then(() => {
console.log('Done.')
process.exit()
})
async function removeEmail() { async function removeEmail() {
const userId = process.argv[2] const userId = process.argv[2]
@ -63,3 +52,12 @@ async function removeEmail() {
skipParseEmail skipParseEmail
) )
} }
try {
await waitForDb()
await removeEmail()
console.log('Done.')
process.exit()
} catch (error) {
console.error(error)
process.exit(1)
}

View file

@ -1,7 +1,7 @@
const { OauthApplication } = require('../app/src/models/OauthApplication') import { OauthApplication } from '../app/src/models/OauthApplication.js'
const parseArgs = require('minimist') import parseArgs from 'minimist'
const OError = require('@overleaf/o-error') import OError from '@overleaf/o-error'
const { waitForDb } = require('../app/src/infrastructure/mongodb') import { waitForDb } from '../app/src/infrastructure/mongodb.js'
async function _removeOauthApplication(appId) { async function _removeOauthApplication(appId) {
if (!appId) { if (!appId) {
@ -26,14 +26,11 @@ async function main() {
await _removeOauthApplication(appId) await _removeOauthApplication(appId)
} }
if (require.main === module) { try {
main() await main()
.then(() => {
console.log('Done') console.log('Done')
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(err => { console.error(error)
console.error(err)
process.exit(1) process.exit(1)
})
} }

View file

@ -1,7 +1,7 @@
const { waitForDb } = require('../app/src/infrastructure/mongodb') import { waitForDb } from '../app/src/infrastructure/mongodb.js'
const ProjectEntityRestoreHandler = require('../app/src/Features/Project/ProjectEntityRestoreHandler') import ProjectEntityRestoreHandler from '../app/src/Features/Project/ProjectEntityRestoreHandler.js'
const ProjectEntityHandler = require('../app/src/Features/Project/ProjectEntityHandler') import ProjectEntityHandler from '../app/src/Features/Project/ProjectEntityHandler.js'
const DocstoreManager = require('../app/src/Features/Docstore/DocstoreManager') import DocstoreManager from '../app/src/Features/Docstore/DocstoreManager.js'
const ARGV = process.argv.slice(2) const ARGV = process.argv.slice(2)
const DEVELOPER_USER_ID = ARGV.shift() const DEVELOPER_USER_ID = ARGV.shift()
@ -35,12 +35,11 @@ async function main() {
} }
} }
waitForDb() try {
.then(main) await waitForDb()
.then(() => { await main()
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error(error) console.error(error)
process.exit(1) process.exit(1)
}) }

View file

@ -1,6 +1,6 @@
const { waitForDb } = require('../app/src/infrastructure/mongodb') import { waitForDb } from '../app/src/infrastructure/mongodb.js'
const ProjectEntityRestoreHandler = require('../app/src/Features/Project/ProjectEntityRestoreHandler') import ProjectEntityRestoreHandler from '../app/src/Features/Project/ProjectEntityRestoreHandler.js'
const DocstoreManager = require('../app/src/Features/Docstore/DocstoreManager') import DocstoreManager from '../app/src/Features/Docstore/DocstoreManager.js'
const ARGV = process.argv.slice(2) const ARGV = process.argv.slice(2)
const DEVELOPER_USER_ID = ARGV.shift() const DEVELOPER_USER_ID = ARGV.shift()
@ -24,12 +24,11 @@ async function main() {
} }
} }
waitForDb() try {
.then(main) await waitForDb()
.then(() => { await main()
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error(error) console.error(error)
process.exit(1) process.exit(1)
}) }

View file

@ -1,6 +1,8 @@
const Settings = require('@overleaf/settings') import Settings from '@overleaf/settings'
const { ObjectId } = require('mongodb-legacy') import mongodb from 'mongodb-legacy'
const { Project } = require('../app/src/models/Project') import { Project } from '../app/src/models/Project.js'
const { ObjectId } = mongodb
async function main() { async function main() {
const { image, projectIds } = parseArgs() const { image, projectIds } = parseArgs()
@ -58,11 +60,10 @@ async function updateImage(image, projectIds) {
console.log(`Modified ${res.modifiedCount} projects`) console.log(`Modified ${res.modifiedCount} projects`)
} }
main() try {
.then(() => { await main()
process.exit() process.exit()
}) } catch (error) {
.catch(err => { console.error(error)
console.error(err)
process.exit(1) process.exit(1)
}) }

View file

@ -1,6 +1,6 @@
const minimist = require('minimist') import minimist from 'minimist'
const { waitForDb } = require('../app/src/infrastructure/mongodb') import { waitForDb } from '../app/src/infrastructure/mongodb.js'
const ProjectDeleter = require('../app/src/Features/Project/ProjectDeleter') import ProjectDeleter from '../app/src/Features/Project/ProjectDeleter.js'
async function main() { async function main() {
const argv = minimist(process.argv.slice(2)) const argv = minimist(process.argv.slice(2))
@ -16,12 +16,11 @@ async function main() {
await ProjectDeleter.promises.deleteProject(projectId) await ProjectDeleter.promises.deleteProject(projectId)
} }
main() try {
.then(() => { await main()
console.log('Done.') console.log('Done.')
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(err => { console.error(error)
console.error(err)
process.exit(1) process.exit(1)
}) }

View file

@ -1,4 +1,5 @@
const { db, waitForDb } = require('../app/src/infrastructure/mongodb') import { db, waitForDb } from '../app/src/infrastructure/mongodb.js'
import { fileURLToPath } from 'url'
async function updateStringDates() { async function updateStringDates() {
await waitForDb() await waitForDb()
@ -33,15 +34,14 @@ async function updateStringDates() {
console.log(`Updated ${count} assignedAt strings to dates!`) console.log(`Updated ${count} assignedAt strings to dates!`)
} }
if (!module.parent) { if (fileURLToPath(import.meta.url) === process.argv[1]) {
updateStringDates() try {
.then(() => { await updateStringDates()
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error(error) console.error(error)
process.exit(1) process.exit(1)
}) }
} }
module.exports = updateStringDates export default updateStringDates

View file

@ -1,8 +1,11 @@
const { db, waitForDb } = require('../app/src/infrastructure/mongodb') import { db, waitForDb } from '../app/src/infrastructure/mongodb.js'
const { batchedUpdate } = require('./helpers/batchedUpdate') import BatchedUpdateModule from './helpers/batchedUpdate.mjs'
const { ObjectId } = require('mongodb-legacy') import mongodb from 'mongodb-legacy'
const fs = require('fs') import fs from 'fs'
import { fileURLToPath } from 'url'
const { ObjectId } = mongodb
const { batchedUpdate } = BatchedUpdateModule
const CHUNK_SIZE = 1000 const CHUNK_SIZE = 1000
// Function to chunk the array // Function to chunk the array
@ -45,15 +48,14 @@ async function main() {
} }
} }
module.exports = main export default main
if (require.main === module) { if (fileURLToPath(import.meta.url) === process.argv[1]) {
main() try {
.then(() => { await main()
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error({ error }) console.error({ error })
process.exit(1) process.exit(1)
}) }
} }

View file

@ -1,9 +1,8 @@
process.env.MONGO_SOCKET_TIMEOUT = '300000' import { waitForDb } from '../app/src/infrastructure/mongodb.js'
process.env.MONGO_CONNECTION_STRING = import SAMLUserIdMigrationHandler from '../modules/saas-authentication/app/src/SAML/SAMLUserIdMigrationHandler.js'
process.env.READ_ONLY_MONGO_CONNECTION_STRING import { ensureRunningOnMongoSecondaryWithTimeout } from './helpers/env_variable_helper.mjs'
const { waitForDb } = require('../app/src/infrastructure/mongodb') ensureRunningOnMongoSecondaryWithTimeout(300000)
const SAMLUserIdMigrationHandler = require('../modules/saas-authentication/app/src/SAML/SAMLUserIdMigrationHandler')
const institutionId = parseInt(process.argv[2]) const institutionId = parseInt(process.argv[2])
if (isNaN(institutionId)) throw new Error('No institution id') if (isNaN(institutionId)) throw new Error('No institution id')

View file

@ -1,6 +1,8 @@
process.env.MONGO_SOCKET_TIMEOUT = '300000' import { waitForDb } from '../app/src/infrastructure/mongodb.js'
const { waitForDb } = require('../app/src/infrastructure/mongodb') import SAMLUserIdMigrationHandler from '../modules/saas-authentication/app/src/SAML/SAMLUserIdMigrationHandler.js'
const SAMLUserIdMigrationHandler = require('../modules/saas-authentication/app/src/SAML/SAMLUserIdMigrationHandler') import { ensureMongoTimeout } from './helpers/env_variable_helper.mjs'
ensureMongoTimeout(300000)
const institutionId = parseInt(process.argv[2]) const institutionId = parseInt(process.argv[2])
if (isNaN(institutionId)) throw new Error('No institution id') if (isNaN(institutionId)) throw new Error('No institution id')
@ -11,13 +13,6 @@ console.log(
institutionId institutionId
) )
waitForDb()
.then(main)
.catch(error => {
console.error(error)
process.exit(1)
})
async function main() { async function main() {
const result = const result =
await SAMLUserIdMigrationHandler.promises.removeNotMigrated(institutionId) await SAMLUserIdMigrationHandler.promises.removeNotMigrated(institutionId)
@ -34,3 +29,11 @@ async function main() {
process.exit() process.exit()
} }
try {
await waitForDb()
await main()
} catch (error) {
console.error(error)
process.exit(1)
}

View file

@ -1,39 +1,40 @@
const minimist = require('minimist') import minimist from 'minimist'
const settings = require('@overleaf/settings') import settings from '@overleaf/settings'
const ProjectDetailsHandler = require('../app/src/Features/Project/ProjectDetailsHandler') import ProjectDetailsHandler from '../app/src/Features/Project/ProjectDetailsHandler.js'
const mongodb = require('../app/src/infrastructure/mongodb') import mongodb from '../app/src/infrastructure/mongodb.js'
const mongoose = require('../app/src/infrastructure/Mongoose') import mongoose from '../app/src/infrastructure/Mongoose.js'
const fs = require('fs') import fs from 'fs'
const path = require('path') import path from 'path'
const crypto = require('crypto') import crypto from 'crypto'
const fetch = require('node-fetch') import fetch from 'node-fetch'
const http = require('http') import http from 'http'
import _ from 'lodash'
const { ObjectId } = mongodb const { ObjectId } = mongodb
const _ = require('lodash')
// Examples: // Examples:
// //
// Simple usage: // Simple usage:
// node stress_test.js --project-id=ID -n 100 --download-zip # download 100 zips from history-v1 // node stress_test.mjs --project-id=ID -n 100 --download-zip # download 100 zips from history-v1
// node stress_test.js --project-id=ID -n 100 --create-blob # create 100 blobs in history-v1 // node stress_test.mjs --project-id=ID -n 100 --create-blob # create 100 blobs in history-v1
// node stress_test.js --project-id=ID -n 100 --fetch-blob # create blob and fetch it 100 times from history-v1 // node stress_test.mjs --project-id=ID -n 100 --fetch-blob # create blob and fetch it 100 times from history-v1
// node stress_test.js --project-id=ID -n 100 --upload-file # upload 100 files to filestore // node stress_test.mjs --project-id=ID -n 100 --upload-file # upload 100 files to filestore
// node stress_test.js --project-id=ID -n 100 --download-file # create file in filestore and download it 100 times // node stress_test.mjs --project-id=ID -n 100 --download-file # create file in filestore and download it 100 times
// //
// Delay between requests: // Delay between requests:
// node stress_test.js --project-id=ID -n 100 --download-zip --sleep=0.1 # download 100 zips from history-v1 with 0.1s sleep // node stress_test.mjs --project-id=ID -n 100 --download-zip --sleep=0.1 # download 100 zips from history-v1 with 0.1s sleep
// //
// Abort requests at random times: // Abort requests at random times:
// node stress_test.js --project-id=ID -n 100 --download-zip --abort # download 100 zips from history-v1 with aborts // node stress_test.mjs --project-id=ID -n 100 --download-zip --abort # download 100 zips from history-v1 with aborts
// //
// Parallel workers: // Parallel workers:
// node stress_test.js --project-id=ID -n 1000 -j 10 --upload-file # upload 1000 files in 10 parallel workers // node stress_test.mjs --project-id=ID -n 1000 -j 10 --upload-file # upload 1000 files in 10 parallel workers
// //
// Fixed file size: // Fixed file size:
// node stress_test.js --project-id=ID -n 1000 --size 1000000 --upload-file # upload 1000 files of 1MB in 10 parallel workers // node stress_test.mjs --project-id=ID -n 1000 --size 1000000 --upload-file # upload 1000 files of 1MB in 10 parallel workers
// //
// Random file size: // Random file size:
// node stress_test.js --project-id=ID -n 1000 --size-min 1024 --size-max 10000000 --upload-file # upload 1000 files of 1KB to 10MB in 10 parallel workers // node stress_test.mjs --project-id=ID -n 1000 --size-min 1024 --size-max 10000000 --upload-file # upload 1000 files of 1KB to 10MB in 10 parallel workers
const argv = minimist(process.argv.slice(2), { const argv = minimist(process.argv.slice(2), {
string: ['n', 'j', 'project-id', 'sleep', 'size', 'size-min', 'size-max'], string: ['n', 'j', 'project-id', 'sleep', 'size', 'size-min', 'size-max'],
@ -58,7 +59,7 @@ const argv = minimist(process.argv.slice(2), {
const projectId = argv['project-id'] const projectId = argv['project-id']
if (!projectId) { if (!projectId) {
console.error( console.error(
'Usage: node stress_test.js --project-id ID -n COUNT -j CONCURRENCY --sleep T --size BYTES --use-file --[create-blob|fetch-blob|download-zip|upload-file|download-file]' 'Usage: node stress_test.mjs --project-id ID -n COUNT -j CONCURRENCY --sleep T --size BYTES --use-file --[create-blob|fetch-blob|download-zip|upload-file|download-file]'
) )
process.exit(1) process.exit(1)
} }
@ -430,13 +431,12 @@ async function run() {
log('Stress test done') log('Stress test done')
} }
Promise.all([mongodb.waitForDb(), mongoose.connectionPromise]) try {
.then(() => run()) await Promise.all([mongodb.waitForDb(), mongoose.connectionPromise])
.then(() => { await run()
log('Completed') log('Completed')
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error(error) console.error(error)
process.exit(1) process.exit(1)
}) }

View file

@ -1,6 +1,6 @@
const minimist = require('minimist') import minimist from 'minimist'
const { waitForDb } = require('../app/src/infrastructure/mongodb') import { waitForDb } from '../app/src/infrastructure/mongodb.js'
const ProjectDeleter = require('../app/src/Features/Project/ProjectDeleter') import ProjectDeleter from '../app/src/Features/Project/ProjectDeleter.js'
async function main() { async function main() {
const argv = minimist(process.argv.slice(2)) const argv = minimist(process.argv.slice(2))
@ -15,12 +15,11 @@ async function main() {
await ProjectDeleter.promises.undeleteProject(projectId, { userId }) await ProjectDeleter.promises.undeleteProject(projectId, { userId })
} }
main() try {
.then(() => { await main()
console.log('Done.') console.log('Done.')
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(err => { console.error(error)
console.error(err)
process.exit(1) process.exit(1)
}) }

View file

@ -1,7 +1,7 @@
const { waitForDb } = require('../app/src/infrastructure/mongodb') import { waitForDb } from '../app/src/infrastructure/mongodb.js'
const minimist = require('minimist') import minimist from 'minimist'
const ThirdPartyIdentityManager = require('../app/src/Features/User/ThirdPartyIdentityManager') import ThirdPartyIdentityManager from '../app/src/Features/User/ThirdPartyIdentityManager.js'
const UserGetter = require('../app/src/Features/User/UserGetter') import UserGetter from '../app/src/Features/User/UserGetter.js'
/** /**
* This script is used to remove a linked third party identity from a user account. * This script is used to remove a linked third party identity from a user account.
@ -14,9 +14,9 @@ const UserGetter = require('../app/src/Features/User/UserGetter')
* Usage: * Usage:
* *
* - dry run: * - dry run:
* node scripts/unlink_third_party_id.js --providerId=google --userId=${SOME_USER_ID} * node scripts/unlink_third_party_id.mjs --providerId=google --userId=${SOME_USER_ID}
* - commit: * - commit:
* node scripts/unlink_third_party_id.js --providerId=google --userId=${SOME_USER_ID} --commit * node scripts/unlink_third_party_id.mjs --providerId=google --userId=${SOME_USER_ID} --commit
*/ */
let COMMIT = false let COMMIT = false
@ -81,11 +81,10 @@ async function main() {
setup() setup()
main() try {
.then(() => { await main()
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(err => { console.error(error)
console.error(err)
process.exit(1) process.exit(1)
}) }

View file

@ -1,5 +1,6 @@
const { batchedUpdate } = require('./helpers/batchedUpdate') import BatchedUpdateModule from './helpers/batchedUpdate.mjs'
const { batchedUpdate } = BatchedUpdateModule
const oldImage = process.argv[2] const oldImage = process.argv[2]
const newImage = process.argv[3] const newImage = process.argv[3]
@ -31,16 +32,15 @@ if (!process.env.ALL_TEX_LIVE_DOCKER_IMAGES.split(',').includes(newImage)) {
process.exit(1) process.exit(1)
} }
batchedUpdate( try {
await batchedUpdate(
'projects', 'projects',
{ imageName: oldImage }, { imageName: oldImage },
{ $set: { imageName: newImage } } { $set: { imageName: newImage } }
) )
.then(() => {
console.log('Done') console.log('Done')
process.exit(0) process.exit(0)
}) } catch (error) {
.catch(error => {
console.error(error) console.error(error)
process.exit(1) process.exit(1)
}) }

View file

@ -1,7 +1,13 @@
const { batchedUpdateWithResultHandling } = require('./helpers/batchedUpdate') import BatchedUpdateModule from './helpers/batchedUpdate.mjs'
const { batchedUpdateWithResultHandling } = BatchedUpdateModule
const MODEL_NAME = process.argv.pop() const MODEL_NAME = process.argv.pop()
const Model = require(`../app/src/models/${MODEL_NAME}`)[MODEL_NAME]
// Todo: handle mjs file once models have been converted to ES module
const { [MODEL_NAME]: Model } = await import(
`../app/src/models/${MODEL_NAME}.js`
)
function processBatch(batch) { function processBatch(batch) {
for (const doc of batch) { for (const doc of batch) {
@ -16,7 +22,7 @@ function processBatch(batch) {
batchedUpdateWithResultHandling( batchedUpdateWithResultHandling(
Model.collection.name, Model.collection.name,
{}, {},
async (_, nextBatch) => { async nextBatch => {
await processBatch(nextBatch) await processBatch(nextBatch)
}, },
{} {}

View file

@ -78,7 +78,7 @@ describe('BackFillDeletedFiles', function () {
try { try {
result = await promisify(exec)( result = await promisify(exec)(
['LET_USER_DOUBLE_CHECK_INPUTS_FOR=1', 'VERBOSE_LOGGING=true'] ['LET_USER_DOUBLE_CHECK_INPUTS_FOR=1', 'VERBOSE_LOGGING=true']
.concat(['node', 'scripts/back_fill_deleted_files']) .concat(['node', 'scripts/back_fill_deleted_files.mjs'])
.concat(args) .concat(args)
.join(' ') .join(' ')
) )

View file

@ -67,7 +67,7 @@ describe('BackFillDocNameForDeletedDocs', function () {
try { try {
result = await promisify(exec)( result = await promisify(exec)(
['LET_USER_DOUBLE_CHECK_INPUTS_FOR=1'] ['LET_USER_DOUBLE_CHECK_INPUTS_FOR=1']
.concat(['node', 'scripts/back_fill_doc_name_for_deleted_docs']) .concat(['node', 'scripts/back_fill_doc_name_for_deleted_docs.mjs'])
.concat(args) .concat(args)
.join(' ') .join(' ')
) )

View file

@ -24,7 +24,7 @@ describe('BackFillDocRevTests', function () {
[ [
'VERBOSE_LOGGING=true', 'VERBOSE_LOGGING=true',
'node', 'node',
'scripts/back_fill_doc_rev', 'scripts/back_fill_doc_rev.mjs',
dryRun, dryRun,
].join(' ') ].join(' ')
) )

View file

@ -121,7 +121,7 @@ describe('BackFillDummyDocMeta', function () {
result = await promisify(exec)( result = await promisify(exec)(
Object.entries(options) Object.entries(options)
.map(([key, value]) => `${key}=${value}`) .map(([key, value]) => `${key}=${value}`)
.concat(['node', 'scripts/back_fill_dummy_doc_meta.js']) .concat(['node', 'scripts/back_fill_dummy_doc_meta.mjs'])
.join(' ') .join(' ')
) )
} catch (error) { } catch (error) {

View file

@ -22,8 +22,9 @@ describe('BatchedUpdateTests', function () {
}) })
spawnSync(process.argv0, [ spawnSync(process.argv0, [
'--input-type=module',
'-e', '-e',
'require("./scripts/helpers/batchedUpdate").batchedUpdateWithResultHandling("systemmessages", { content: { $ne: "42" }}, { $set: { content: "42" } })', 'import BatchedUpdateModule from "./scripts/helpers/batchedUpdate.mjs"; BatchedUpdateModule.batchedUpdateWithResultHandling("systemmessages", { content: { $ne: "42" }}, { $set: { content: "42" } })',
]) ])
await expect( await expect(

View file

@ -120,7 +120,7 @@ describe('ConvertArchivedState', function () {
beforeEach(function (done) { beforeEach(function (done) {
exec( exec(
'CONNECT_DELAY=1 node scripts/convert_archived_state.js FIRST,SECOND', 'CONNECT_DELAY=1 node scripts/convert_archived_state.mjs FIRST,SECOND',
error => { error => {
if (error) { if (error) {
return done(error) return done(error)

View file

@ -121,7 +121,7 @@ describe('DeleteOrphanedDocsOnlineCheck', function () {
// Hide deprecation warnings for calling `db.collection.count` // Hide deprecation warnings for calling `db.collection.count`
'NODE_OPTIONS=--no-deprecation', 'NODE_OPTIONS=--no-deprecation',
]) ])
.concat(['node', 'scripts/delete_orphaned_docs_online_check.js']) .concat(['node', 'scripts/delete_orphaned_docs_online_check.mjs'])
.join(' ') .join(' ')
) )
} catch (error) { } catch (error) {

View file

@ -96,7 +96,7 @@ describe('RegenerateDuplicateReferralIds', function () {
// actual command // actual command
'node', 'node',
'scripts/regenerate_duplicate_referral_ids', 'scripts/regenerate_duplicate_referral_ids.mjs',
].join(' ') ].join(' ')
) )
} catch (err) { } catch (err) {

View file

@ -54,7 +54,7 @@ describe('RemoveDeletedUsersFromTokenAccessRefsTests', function () {
[ [
'VERBOSE_LOGGING=true', 'VERBOSE_LOGGING=true',
'node', 'node',
'scripts/remove_deleted_users_from_token_access_refs', 'scripts/remove_deleted_users_from_token_access_refs.mjs',
dryRun, dryRun,
projectsList, projectsList,
].join(' ') ].join(' ')