2021-04-19 12:35:39 +00:00
|
|
|
const VERBOSE_LOGGING = process.env.VERBOSE_LOGGING === 'true'
|
2021-03-31 11:56:00 +00:00
|
|
|
const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 10
|
2021-04-19 12:35:39 +00:00
|
|
|
const BATCH_SIZE = parseInt(process.env.BATCH_SIZE, 10) || 100
|
|
|
|
// persist fallback in order to keep batchedUpdate in-sync
|
|
|
|
process.env.BATCH_SIZE = BATCH_SIZE
|
2021-03-31 11:56:00 +00:00
|
|
|
|
2021-04-19 12:35:39 +00:00
|
|
|
const { ReadPreference } = require('mongodb')
|
|
|
|
const { db } = require('../app/src/infrastructure/mongodb')
|
2021-03-31 11:56:00 +00:00
|
|
|
const { promiseMapWithLimit } = require('../app/src/util/promises')
|
|
|
|
const TokenGenerator = require('../app/src/Features/TokenGenerator/TokenGenerator')
|
2021-04-19 12:35:39 +00:00
|
|
|
const { batchedUpdate } = require('./helpers/batchedUpdate')
|
2021-03-31 11:56:00 +00:00
|
|
|
|
2021-04-19 12:35:39 +00:00
|
|
|
async function rewriteDuplicates(duplicateReferralIds) {
|
|
|
|
// duplicateReferralIds contains at least one duplicate.
|
|
|
|
// Find out which is the duplicate in parallel and update
|
|
|
|
// any users if necessary.
|
2021-03-31 11:56:00 +00:00
|
|
|
await promiseMapWithLimit(
|
|
|
|
WRITE_CONCURRENCY,
|
|
|
|
duplicateReferralIds,
|
|
|
|
async referralId => {
|
|
|
|
try {
|
2021-04-19 12:35:39 +00:00
|
|
|
const users = await db.users
|
|
|
|
.find(
|
|
|
|
{ referal_id: referralId },
|
|
|
|
{
|
|
|
|
projection: { _id: 1 },
|
2021-04-27 07:52:58 +00:00
|
|
|
readPreference: ReadPreference.SECONDARY,
|
2021-04-19 12:35:39 +00:00
|
|
|
}
|
|
|
|
)
|
|
|
|
.toArray()
|
|
|
|
|
|
|
|
if (users.length === 1) {
|
|
|
|
// This referral id was part of a batch of duplicates.
|
|
|
|
// Keep the write load low and skip the update.
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if (VERBOSE_LOGGING) {
|
|
|
|
console.log('Found duplicate:', referralId)
|
|
|
|
}
|
|
|
|
|
2021-03-31 11:56:00 +00:00
|
|
|
for (const user of users) {
|
|
|
|
const newReferralId = TokenGenerator.generateReferralId()
|
2021-04-19 12:35:39 +00:00
|
|
|
await db.users.updateOne(
|
|
|
|
{ _id: user._id },
|
|
|
|
{
|
|
|
|
$set: {
|
2021-04-27 07:52:58 +00:00
|
|
|
referal_id: newReferralId,
|
|
|
|
},
|
2021-03-31 11:56:00 +00:00
|
|
|
}
|
2021-04-19 12:35:39 +00:00
|
|
|
)
|
2021-03-31 11:56:00 +00:00
|
|
|
}
|
|
|
|
} catch (error) {
|
|
|
|
console.error(
|
|
|
|
{ err: error },
|
|
|
|
`Failed to generate new referral ID for duplicate ID: ${referralId}`
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2021-04-19 12:35:39 +00:00
|
|
|
async function processBatch(_, users) {
|
|
|
|
const uniqueReferalIdsInBatch = Array.from(
|
|
|
|
new Set(users.map(user => user.referal_id))
|
|
|
|
)
|
|
|
|
if (uniqueReferalIdsInBatch.length !== users.length) {
|
|
|
|
if (VERBOSE_LOGGING) {
|
|
|
|
console.log('Got duplicates from looking at batch.')
|
|
|
|
}
|
|
|
|
await rewriteDuplicates(uniqueReferalIdsInBatch)
|
|
|
|
return
|
|
|
|
}
|
2021-11-08 15:06:12 +00:00
|
|
|
const matches = await db.users
|
|
|
|
.find(
|
|
|
|
{ referal_id: { $in: uniqueReferalIdsInBatch } },
|
|
|
|
{
|
|
|
|
readPreference: ReadPreference.SECONDARY,
|
|
|
|
projection: { _id: true },
|
|
|
|
}
|
|
|
|
)
|
|
|
|
.toArray()
|
|
|
|
if (matches.length !== uniqueReferalIdsInBatch.length) {
|
2021-04-19 12:35:39 +00:00
|
|
|
if (VERBOSE_LOGGING) {
|
|
|
|
console.log('Got duplicates from running count.')
|
|
|
|
}
|
|
|
|
await rewriteDuplicates(uniqueReferalIdsInBatch)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async function main() {
|
|
|
|
await batchedUpdate(
|
|
|
|
'users',
|
|
|
|
{ referal_id: { $exists: true } },
|
|
|
|
processBatch,
|
|
|
|
{ _id: 1, referal_id: 1 }
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2021-03-31 11:56:00 +00:00
|
|
|
main()
|
|
|
|
.then(() => {
|
2021-04-19 12:35:39 +00:00
|
|
|
console.error('Done.')
|
2021-03-31 11:56:00 +00:00
|
|
|
process.exit(0)
|
|
|
|
})
|
|
|
|
.catch(error => {
|
|
|
|
console.error({ error })
|
|
|
|
process.exit(1)
|
|
|
|
})
|