mirror of
https://github.com/overleaf/overleaf.git
synced 2025-04-21 03:13:42 +00:00
Merge pull request #23820 from overleaf/jdt-transition-wf-accounts-script
Script to transition WF users from auto-load to auto-create GitOrigin-RevId: 538ff04dece52e4c46a4a592cb5089d20eb5aa7a
This commit is contained in:
parent
18c2b4108d
commit
8a81b0777a
5 changed files with 107 additions and 13 deletions
12
services/web/scripts/helpers/chunkArray.mjs
Normal file
12
services/web/scripts/helpers/chunkArray.mjs
Normal file
|
@ -0,0 +1,12 @@
|
|||
const CHUNK_SIZE = 1000
|
||||
|
||||
// Function to chunk the array
|
||||
export function chunkArray(array, size = CHUNK_SIZE) {
|
||||
const result = []
|
||||
for (let i = 0; i < array.length; i += size) {
|
||||
result.push(array.slice(i, i + size))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
export default { chunkArray }
|
|
@ -1,4 +1,4 @@
|
|||
import { db } from '../app/src/infrastructure/mongodb.js'
|
||||
import { db } from '../../app/src/infrastructure/mongodb.js'
|
||||
import { batchedUpdate } from '@overleaf/mongo-utils/batchedUpdate.js'
|
||||
|
||||
async function main() {
|
|
@ -0,0 +1,46 @@
|
|||
import { db } from '../../app/src/infrastructure/mongodb.js'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import fs from 'node:fs'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import { chunkArray } from '../helpers/chunkArray.mjs'
|
||||
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
async function main() {
|
||||
// search for file of users to transition
|
||||
const userIdsPath = process.argv[2]
|
||||
const userIdsFile = fs.readFileSync(userIdsPath, 'utf8')
|
||||
let userIdsList = userIdsFile
|
||||
|
||||
userIdsList = userIdsList
|
||||
.split('\n')
|
||||
.filter(id => id?.length)
|
||||
.map(id => new ObjectId(id))
|
||||
|
||||
const chunks = chunkArray(userIdsList)
|
||||
console.log(
|
||||
`transitioning ${userIdsList.length} users to auto-account-created state in ${chunks.length} chunks`
|
||||
)
|
||||
|
||||
// Iterate over each chunk and update their autoAccountCreated flag
|
||||
for (const chunkedIds of chunks) {
|
||||
console.log('batch update started')
|
||||
await db.users.updateMany(
|
||||
{ _id: { $in: chunkedIds } },
|
||||
{ $set: { 'writefull.autoCreatedAccount': true } }
|
||||
)
|
||||
console.log('batch completed')
|
||||
}
|
||||
}
|
||||
|
||||
export default main
|
||||
|
||||
if (fileURLToPath(import.meta.url) === process.argv[1]) {
|
||||
try {
|
||||
await main()
|
||||
process.exit(0)
|
||||
} catch (error) {
|
||||
console.error({ error })
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,45 @@
|
|||
import { db } from '../../app/src/infrastructure/mongodb.js'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import fs from 'node:fs'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
async function main() {
|
||||
// search for file of users to force into auto load
|
||||
const userIdsPath = process.argv[2]
|
||||
const userIdsFile = fs.readFileSync(userIdsPath, 'utf8')
|
||||
let userIdsList = userIdsFile
|
||||
userIdsList = userIdsList
|
||||
.split('\n')
|
||||
.filter(id => id?.length)
|
||||
.map(id => new ObjectId(id))
|
||||
|
||||
console.log(
|
||||
`enabling writefull with autoCreatedAccount:false for ${userIdsList.length} users`
|
||||
)
|
||||
|
||||
// set them to writefull.enabled true, and autoCreatedAccount false which is the same state an auto-load account is placed in after their first load
|
||||
// not this does NOT call writefull's first load function for the user's account
|
||||
await db.users.updateMany(
|
||||
{ _id: { $in: userIdsList } },
|
||||
{
|
||||
$set: {
|
||||
'writefull.enabled': true,
|
||||
'writefull.autoCreatedAccount': false,
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export default main
|
||||
|
||||
if (fileURLToPath(import.meta.url) === process.argv[1]) {
|
||||
try {
|
||||
await main()
|
||||
process.exit(0)
|
||||
} catch (error) {
|
||||
console.error({ error })
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
|
@ -1,20 +1,11 @@
|
|||
import { db } from '../app/src/infrastructure/mongodb.js'
|
||||
import { db } from '../../app/src/infrastructure/mongodb.js'
|
||||
import { batchedUpdate } from '@overleaf/mongo-utils/batchedUpdate.js'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import fs from 'node:fs'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import { chunkArray } from '../helpers/chunkArray.mjs'
|
||||
|
||||
const { ObjectId } = mongodb
|
||||
const CHUNK_SIZE = 1000
|
||||
|
||||
// Function to chunk the array
|
||||
function chunkArray(array, size) {
|
||||
const result = []
|
||||
for (let i = 0; i < array.length; i += size) {
|
||||
result.push(array.slice(i, i + size))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
async function main() {
|
||||
// search for file of users who already explicitly opted out first
|
||||
|
@ -32,7 +23,7 @@ async function main() {
|
|||
{ $set: { 'writefull.enabled': null } }
|
||||
)
|
||||
|
||||
const chunks = chunkArray(optedOutList, CHUNK_SIZE)
|
||||
const chunks = chunkArray(optedOutList)
|
||||
|
||||
// then reset any explicit false back to being false
|
||||
// Iterate over each chunk and perform the query
|
Loading…
Add table
Reference in a new issue