overleaf/services/web/scripts/split_writefull_disabled_from_unset.mjs
Jakob Ackermann a7517eefcb Merge pull request #21427 from overleaf/jpa-populate-db-on-import
[web] populate db with collections on import, ahead of waitForDb() call

GitOrigin-RevId: 7eb4cd61c2052187acd9947d7060f54d9822d314
2024-11-01 09:05:15 +00:00

60 lines
1.6 KiB
JavaScript

import { db } from '../app/src/infrastructure/mongodb.js'
import BatchedUpdateModule from './helpers/batchedUpdate.mjs'
import mongodb from 'mongodb-legacy'
import fs from 'fs'
import { fileURLToPath } from 'url'
const { ObjectId } = mongodb
const { batchedUpdate } = BatchedUpdateModule
const CHUNK_SIZE = 1000
// Function to chunk the array
function chunkArray(array, size) {
const result = []
for (let i = 0; i < array.length; i += size) {
result.push(array.slice(i, i + size))
}
return result
}
async function main() {
// search for file of users who already explicitly opted out first
const optOutPath = process.argv[2]
const optedOutFile = fs.readFileSync(optOutPath, 'utf8')
let optedOutList = optedOutFile
optedOutList = optedOutFile.split('\n').map(id => new ObjectId(id))
console.log(`preserving opt-outs of ${optedOutList.length} users`)
// update all applicable user models
await batchedUpdate(
'users',
{ 'writefull.enabled': false }, // and is false
{ $set: { 'writefull.enabled': null } }
)
const chunks = chunkArray(optedOutList, CHUNK_SIZE)
// then reset any explicit false back to being false
// Iterate over each chunk and perform the query
for (const chunkedIds of chunks) {
console.log('batch update started')
await db.users.updateMany(
{ _id: { $in: chunkedIds } },
{ $set: { 'writefull.enabled': false } }
)
console.log('batch completed')
}
}
export default main
if (fileURLToPath(import.meta.url) === process.argv[1]) {
try {
await main()
process.exit(0)
} catch (error) {
console.error({ error })
process.exit(1)
}
}