2021-04-19 08:35:39 -04:00
|
|
|
const { exec } = require('child_process')
|
|
|
|
const { promisify } = require('util')
|
|
|
|
const { expect } = require('chai')
|
2021-11-01 12:30:59 -04:00
|
|
|
const logger = require('logger-sharelatex')
|
2021-05-19 08:07:22 -04:00
|
|
|
const { filterOutput } = require('./helpers/settings')
|
2021-04-19 08:35:39 -04:00
|
|
|
const { db } = require('../../../app/src/infrastructure/mongodb')
|
|
|
|
|
|
|
|
const BATCH_SIZE = 100
|
|
|
|
let n = 0
|
|
|
|
function getUniqueReferralId() {
|
|
|
|
return `unique_${n++}`
|
|
|
|
}
|
|
|
|
function getUserWithReferralId(referralId) {
|
|
|
|
const email = `${Math.random()}@example.com`
|
|
|
|
return {
|
|
|
|
referal_id: referralId,
|
|
|
|
// Make the unique indexes happy.
|
|
|
|
email,
|
2021-04-27 03:52:58 -04:00
|
|
|
emails: [{ email }],
|
2021-04-19 08:35:39 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
async function getBatch(batchCounter) {
|
|
|
|
return (
|
|
|
|
await db.users
|
|
|
|
.find(
|
|
|
|
{},
|
|
|
|
{
|
|
|
|
projection: { _id: 1 },
|
|
|
|
skip: BATCH_SIZE * --batchCounter,
|
2021-04-27 03:52:58 -04:00
|
|
|
limit: BATCH_SIZE,
|
2021-04-19 08:35:39 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
.toArray()
|
|
|
|
).map(user => user._id)
|
|
|
|
}
|
|
|
|
|
2021-04-19 08:49:23 -04:00
|
|
|
describe('RegenerateDuplicateReferralIds', function () {
|
2021-04-19 08:35:39 -04:00
|
|
|
let firstBatch, secondBatch, thirdBatch, forthBatch, duplicateAcrossBatch
|
2021-04-19 08:49:23 -04:00
|
|
|
beforeEach('insert duplicates', async function () {
|
2021-04-19 08:35:39 -04:00
|
|
|
// full batch of duplicates
|
|
|
|
await db.users.insertMany(
|
|
|
|
Array(BATCH_SIZE)
|
|
|
|
.fill(0)
|
|
|
|
.map(() => {
|
|
|
|
return getUserWithReferralId('duplicate1')
|
|
|
|
})
|
|
|
|
)
|
|
|
|
firstBatch = await getBatch(1)
|
|
|
|
|
|
|
|
// batch of 999 duplicates and 1 unique
|
|
|
|
await db.users.insertMany(
|
|
|
|
Array(BATCH_SIZE - 1)
|
|
|
|
.fill(0)
|
|
|
|
.map(() => {
|
|
|
|
return getUserWithReferralId('duplicate2')
|
|
|
|
})
|
|
|
|
.concat([getUserWithReferralId(getUniqueReferralId())])
|
|
|
|
)
|
|
|
|
secondBatch = await getBatch(2)
|
|
|
|
|
|
|
|
// duplicate outside batch
|
|
|
|
duplicateAcrossBatch = getUniqueReferralId()
|
|
|
|
await db.users.insertMany(
|
|
|
|
Array(BATCH_SIZE - 1)
|
|
|
|
.fill(0)
|
|
|
|
.map(() => {
|
|
|
|
return getUserWithReferralId(getUniqueReferralId())
|
|
|
|
})
|
|
|
|
.concat([getUserWithReferralId(duplicateAcrossBatch)])
|
|
|
|
)
|
|
|
|
thirdBatch = await getBatch(3)
|
|
|
|
|
|
|
|
// no new duplicates onwards
|
|
|
|
await db.users.insertMany(
|
|
|
|
Array(BATCH_SIZE - 1)
|
|
|
|
.fill(0)
|
|
|
|
.map(() => {
|
|
|
|
return getUserWithReferralId(getUniqueReferralId())
|
|
|
|
})
|
|
|
|
.concat([getUserWithReferralId(duplicateAcrossBatch)])
|
|
|
|
)
|
|
|
|
forthBatch = await getBatch(4)
|
|
|
|
})
|
|
|
|
|
|
|
|
let result
|
2021-04-19 08:49:23 -04:00
|
|
|
beforeEach('run script', async function () {
|
2021-04-19 08:35:39 -04:00
|
|
|
try {
|
|
|
|
result = await promisify(exec)(
|
|
|
|
[
|
|
|
|
// set low BATCH_SIZE
|
|
|
|
`BATCH_SIZE=${BATCH_SIZE}`,
|
|
|
|
// log details on duplicate matching
|
|
|
|
'VERBOSE_LOGGING=true',
|
2021-11-01 12:30:59 -04:00
|
|
|
// disable verbose logging from logger-sharelatex
|
2021-04-19 08:35:39 -04:00
|
|
|
'LOG_LEVEL=ERROR',
|
|
|
|
|
|
|
|
// actual command
|
|
|
|
'node',
|
2021-04-27 03:52:58 -04:00
|
|
|
'scripts/regenerate_duplicate_referral_ids',
|
2021-04-19 08:35:39 -04:00
|
|
|
].join(' ')
|
|
|
|
)
|
|
|
|
} catch (err) {
|
|
|
|
// dump details like exit code, stdErr and stdOut
|
|
|
|
logger.error({ err }, 'script failed')
|
|
|
|
throw err
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
2021-04-19 08:49:23 -04:00
|
|
|
it('should do the correct operations', function () {
|
2021-04-19 08:35:39 -04:00
|
|
|
let { stderr: stdErr, stdout: stdOut } = result
|
2021-11-08 10:06:12 -05:00
|
|
|
stdErr = stdErr.split('\n').filter(filterOutput)
|
2021-05-19 08:07:22 -04:00
|
|
|
stdOut = stdOut.split('\n').filter(filterOutput)
|
2021-04-19 08:35:39 -04:00
|
|
|
expect(stdErr).to.deep.equal([
|
|
|
|
`Completed batch ending ${firstBatch[BATCH_SIZE - 1]}`,
|
|
|
|
`Completed batch ending ${secondBatch[BATCH_SIZE - 1]}`,
|
|
|
|
`Completed batch ending ${thirdBatch[BATCH_SIZE - 1]}`,
|
|
|
|
`Completed batch ending ${forthBatch[BATCH_SIZE - 1]}`,
|
|
|
|
'Done.',
|
2021-04-27 03:52:58 -04:00
|
|
|
'',
|
2021-04-19 08:35:39 -04:00
|
|
|
])
|
2021-05-19 08:07:22 -04:00
|
|
|
expect(stdOut.filter(filterOutput)).to.deep.equal([
|
2021-04-19 08:35:39 -04:00
|
|
|
// only duplicates
|
|
|
|
`Running update on batch with ids ${JSON.stringify(firstBatch)}`,
|
|
|
|
'Got duplicates from looking at batch.',
|
|
|
|
'Found duplicate: duplicate1',
|
|
|
|
|
|
|
|
// duplicate in batch
|
|
|
|
`Running update on batch with ids ${JSON.stringify(secondBatch)}`,
|
|
|
|
'Got duplicates from looking at batch.',
|
|
|
|
'Found duplicate: duplicate2',
|
|
|
|
|
|
|
|
// duplicate with next batch
|
|
|
|
`Running update on batch with ids ${JSON.stringify(thirdBatch)}`,
|
|
|
|
'Got duplicates from running count.',
|
|
|
|
`Found duplicate: ${duplicateAcrossBatch}`,
|
|
|
|
|
|
|
|
// no new duplicates
|
|
|
|
`Running update on batch with ids ${JSON.stringify(forthBatch)}`,
|
2021-04-27 03:52:58 -04:00
|
|
|
'',
|
2021-04-19 08:35:39 -04:00
|
|
|
])
|
|
|
|
})
|
|
|
|
|
2021-04-19 08:49:23 -04:00
|
|
|
it('should give all users a unique refereal_id', async function () {
|
2021-04-19 08:35:39 -04:00
|
|
|
const users = await db.users
|
|
|
|
.find({}, { projection: { referal_id: 1 } })
|
|
|
|
.toArray()
|
|
|
|
const uniqueReferralIds = Array.from(
|
|
|
|
new Set(users.map(user => user.referal_id))
|
|
|
|
)
|
|
|
|
expect(users).to.have.length(4 * BATCH_SIZE)
|
|
|
|
expect(uniqueReferralIds).to.have.length(users.length)
|
|
|
|
})
|
|
|
|
})
|