Merge pull request #3958 from overleaf/jpa-script-delete-orphaned-docs-online

[scripts] add a script for deleting orphaned docs

GitOrigin-RevId: f3b16d900cc162852491bd7a83fe1ba61a0579d8
This commit is contained in:
Jakob Ackermann 2021-04-28 10:48:15 +02:00 committed by Copybot
parent 48edcb0127
commit cf496e1fd2
3 changed files with 526 additions and 1 deletions

View file

@ -0,0 +1,257 @@
const DocstoreManager = require('../app/src/Features/Docstore/DocstoreManager')
const { promisify } = require('util')
const { ObjectId, ReadPreference } = require('mongodb')
const { db, waitForDb } = require('../app/src/infrastructure/mongodb')
const { promiseMapWithLimit } = require('../app/src/util/promises')
const sleep = promisify(setTimeout)
const NOW_IN_S = Date.now() / 1000
const ONE_WEEK_IN_S = 60 * 60 * 24 * 7
const TEN_SECONDS = 10 * 1000
const DRY_RUN = process.env.DRY_RUN === 'true'
if (!process.env.BATCH_LAST_ID) {
console.error('Set BATCH_LAST_ID and re-run.')
process.exit(1)
}
const BATCH_LAST_ID = ObjectId(process.env.BATCH_LAST_ID)
const INCREMENT_BY_S = parseInt(process.env.INCREMENT_BY_S, 10) || ONE_WEEK_IN_S
const BATCH_SIZE = parseInt(process.env.BATCH_SIZE, 10) || 1000
const READ_CONCURRENCY_SECONDARY =
parseInt(process.env.READ_CONCURRENCY_SECONDARY, 10) || 1000
const READ_CONCURRENCY_PRIMARY =
parseInt(process.env.READ_CONCURRENCY_PRIMARY, 10) || 500
const STOP_AT_S = parseInt(process.env.STOP_AT_S, 10) || NOW_IN_S
const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 10
const LET_USER_DOUBLE_CHECK_INPUTS_FOR =
parseInt(process.env.LET_USER_DOUBLE_CHECK_INPUTS_FOR, 10) || TEN_SECONDS
function getSecondsFromObjectId(id) {
return id.getTimestamp().getTime() / 1000
}
async function main() {
await letUserDoubleCheckInputs()
await waitForDb()
let lowerProjectId = BATCH_LAST_ID
let nProjectsProcessedTotal = 0
let nProjectsWithOrphanedDocsTotal = 0
let nDeletedDocsTotal = 0
while (getSecondsFromObjectId(lowerProjectId) <= STOP_AT_S) {
const upperTime = getSecondsFromObjectId(lowerProjectId) + INCREMENT_BY_S
let upperProjectId = ObjectId.createFromTime(upperTime)
const query = {
project_id: {
// exclude edge
$gt: lowerProjectId,
// include edge
$lte: upperProjectId,
},
}
const docs = await db.docs
.find(query, { readPreference: ReadPreference.SECONDARY })
.project({ project_id: 1 })
.sort({ project_id: 1 })
.limit(BATCH_SIZE)
.toArray()
if (docs.length) {
const projectIds = Array.from(
new Set(docs.map(doc => doc.project_id.toString()))
).map(ObjectId)
console.log('Checking projects', JSON.stringify(projectIds))
const { nProjectsWithOrphanedDocs, nDeletedDocs } = await processBatch(
projectIds
)
nProjectsProcessedTotal += projectIds.length
nProjectsWithOrphanedDocsTotal += nProjectsWithOrphanedDocs
nDeletedDocsTotal += nDeletedDocs
if (docs.length === BATCH_SIZE) {
// This project may have more than BATCH_SIZE docs.
const lastDoc = docs[docs.length - 1]
// Resume from after this projectId.
upperProjectId = lastDoc.project_id
}
}
console.error(
'Processed %d projects ' +
'(%d projects with orphaned docs/%d docs deleted) ' +
'until %s',
nProjectsProcessedTotal,
nProjectsWithOrphanedDocsTotal,
nDeletedDocsTotal,
upperProjectId
)
lowerProjectId = upperProjectId
}
}
async function getDeletedProject(projectId, readPreference) {
return await db.deletedProjects.findOne(
{ 'deleterData.deletedProjectId': projectId },
{
// There is no index on .project. Pull down something small.
projection: { 'project._id': 1 },
readPreference,
}
)
}
async function getProject(projectId, readPreference) {
return await db.projects.findOne(
{ _id: projectId },
{
// Pulling down an empty object is fine for differentiating with null.
projection: { _id: 0 },
readPreference,
}
)
}
async function getProjectDocs(projectId) {
return await db.docs
.find(
{ project_id: projectId },
{
projection: { _id: 1 },
readPreference: ReadPreference.PRIMARY,
}
)
.toArray()
}
async function checkProjectExistsWithReadPreference(projectId, readPreference) {
// NOTE: Possible race conditions!
// There are two processes which are racing with our queries:
// 1. project deletion
// 2. project restoring
// For 1. we check the projects collection before deletedProjects.
// If a project were to be delete in this very moment, we should see the
// soft-deleted entry which is created before deleting the projects entry.
// For 2. we check the projects collection after deletedProjects again.
// If a project were to be restored in this very moment, it is very likely
// to see the projects entry again.
// Unlikely edge case: Restore+Deletion in rapid succession.
// We could add locking to the ProjectDeleter for ruling ^ out.
if (await getProject(projectId, readPreference)) {
// The project is live.
return true
}
const deletedProject = await getDeletedProject(projectId, readPreference)
if (deletedProject && deletedProject.project) {
// The project is registered for hard-deletion.
return true
}
if (await getProject(projectId, readPreference)) {
// The project was just restored.
return true
}
// The project does not exist.
return false
}
async function checkProjectExistsOnPrimary(projectId) {
return await checkProjectExistsWithReadPreference(
projectId,
ReadPreference.PRIMARY
)
}
async function checkProjectExistsOnSecondary(projectId) {
return await checkProjectExistsWithReadPreference(
projectId,
ReadPreference.SECONDARY
)
}
async function processBatch(projectIds) {
const doubleCheckProjectIdsOnPrimary = []
let nDeletedDocs = 0
async function checkProjectOnSecondary(projectId) {
if (await checkProjectExistsOnSecondary(projectId)) {
// Finding a project with secondary confidence is sufficient.
return
}
// At this point, the secondaries deem this project as having orphaned docs.
doubleCheckProjectIdsOnPrimary.push(projectId)
}
const projectsWithOrphanedDocs = []
async function checkProjectOnPrimary(projectId) {
if (await checkProjectExistsOnPrimary(projectId)) {
// The project is actually live.
return
}
projectsWithOrphanedDocs.push(projectId)
const docs = await getProjectDocs(projectId)
nDeletedDocs += docs.length
console.log(
'Deleted project %s has %s orphaned docs: %s',
projectId,
docs.length,
JSON.stringify(docs.map(doc => doc._id))
)
}
await promiseMapWithLimit(
READ_CONCURRENCY_SECONDARY,
projectIds,
checkProjectOnSecondary
)
await promiseMapWithLimit(
READ_CONCURRENCY_PRIMARY,
doubleCheckProjectIdsOnPrimary,
checkProjectOnPrimary
)
if (!DRY_RUN) {
await promiseMapWithLimit(
WRITE_CONCURRENCY,
projectsWithOrphanedDocs,
DocstoreManager.promises.destroyProject
)
}
const nProjectsWithOrphanedDocs = projectsWithOrphanedDocs.length
return { nProjectsWithOrphanedDocs, nDeletedDocs }
}
async function letUserDoubleCheckInputs() {
console.error(
'Options:',
JSON.stringify(
{
BATCH_LAST_ID,
BATCH_SIZE,
DRY_RUN,
INCREMENT_BY_S,
STOP_AT_S,
READ_CONCURRENCY_SECONDARY,
READ_CONCURRENCY_PRIMARY,
WRITE_CONCURRENCY,
LET_USER_DOUBLE_CHECK_INPUTS_FOR,
},
null,
2
)
)
console.error(
'Waiting for you to double check inputs for',
LET_USER_DOUBLE_CHECK_INPUTS_FOR,
'ms'
)
await sleep(LET_USER_DOUBLE_CHECK_INPUTS_FOR)
}
main()
.then(() => {
console.error('Done.')
process.exit(0)
})
.catch(error => {
console.error({ error })
process.exit(1)
})

View file

@ -0,0 +1,266 @@
const { exec } = require('child_process')
const { promisify } = require('util')
const { expect } = require('chai')
const logger = require('logger-sharelatex')
const { db, ObjectId } = require('../../../app/src/infrastructure/mongodb')
const ONE_DAY_IN_S = 60 * 60 * 24
const BATCH_SIZE = 3
function getSecondsFromObjectId(id) {
return id.getTimestamp().getTime() / 1000
}
function getObjectIdFromDate(date) {
const seconds = new Date(date).getTime() / 1000
return ObjectId.createFromTime(seconds)
}
describe('DeleteOrphanedDocsOnlineCheck', function () {
let docIds
let projectIds
let stopAtSeconds
let BATCH_LAST_ID
beforeEach('create docs', async function () {
BATCH_LAST_ID = getObjectIdFromDate('2021-03-31T00:00:00.000Z')
docIds = []
docIds[0] = getObjectIdFromDate('2021-04-01T00:00:00.000Z')
docIds[1] = getObjectIdFromDate('2021-04-02T00:00:00.000Z')
docIds[2] = getObjectIdFromDate('2021-04-11T00:00:00.000Z')
docIds[3] = getObjectIdFromDate('2021-04-12T00:00:00.000Z')
docIds[4] = getObjectIdFromDate('2021-04-13T00:00:00.000Z')
docIds[5] = getObjectIdFromDate('2021-04-14T00:00:00.000Z')
docIds[6] = getObjectIdFromDate('2021-04-15T00:00:00.000Z')
docIds[7] = getObjectIdFromDate('2021-04-16T00:01:00.000Z')
docIds[8] = getObjectIdFromDate('2021-04-16T00:02:00.000Z')
docIds[9] = getObjectIdFromDate('2021-04-16T00:03:00.000Z')
docIds[10] = getObjectIdFromDate('2021-04-16T00:04:00.000Z')
docIds[11] = getObjectIdFromDate('2021-04-16T00:05:00.000Z')
projectIds = []
projectIds[0] = getObjectIdFromDate('2021-04-01T00:00:00.000Z')
projectIds[1] = getObjectIdFromDate('2021-04-02T00:00:00.000Z')
projectIds[2] = getObjectIdFromDate('2021-04-11T00:00:00.000Z')
projectIds[3] = getObjectIdFromDate('2021-04-12T00:00:00.000Z')
projectIds[4] = getObjectIdFromDate('2021-04-13T00:00:00.000Z')
projectIds[5] = getObjectIdFromDate('2021-04-14T00:00:00.000Z')
projectIds[6] = getObjectIdFromDate('2021-04-15T00:00:00.000Z')
projectIds[7] = getObjectIdFromDate('2021-04-16T00:01:00.000Z')
projectIds[8] = getObjectIdFromDate('2021-04-16T00:02:00.000Z')
projectIds[9] = getObjectIdFromDate('2021-04-16T00:03:00.000Z')
// two docs in the same project
projectIds[10] = projectIds[9]
projectIds[11] = projectIds[4]
stopAtSeconds = new Date('2021-04-17T00:00:00.000Z').getTime() / 1000
})
beforeEach('create doc stubs', async function () {
await db.docs.insertMany([
// orphaned
{ _id: docIds[0], project_id: projectIds[0] },
{ _id: docIds[1], project_id: projectIds[1] },
{ _id: docIds[2], project_id: projectIds[2] },
{ _id: docIds[3], project_id: projectIds[3] },
// orphaned, failed hard deletion
{ _id: docIds[4], project_id: projectIds[4] },
// not orphaned, live
{ _id: docIds[5], project_id: projectIds[5] },
// not orphaned, pending hard deletion
{ _id: docIds[6], project_id: projectIds[6] },
// multiple in a single batch
{ _id: docIds[7], project_id: projectIds[7] },
{ _id: docIds[8], project_id: projectIds[8] },
{ _id: docIds[9], project_id: projectIds[9] },
// two docs in one project
{ _id: docIds[10], project_id: projectIds[10] },
{ _id: docIds[11], project_id: projectIds[11] },
])
})
beforeEach('create project stubs', async function () {
await db.projects.insertMany([
// live
{ _id: projectIds[5] },
])
})
beforeEach('create deleted project stubs', async function () {
await db.deletedProjects.insertMany([
// hard-deleted
{ deleterData: { deletedProjectId: projectIds[4] } },
// soft-deleted
{
deleterData: { deletedProjectId: projectIds[6] },
project: { _id: projectIds[6] },
},
])
})
let options
async function runScript(dryRun) {
options = {
BATCH_LAST_ID,
BATCH_SIZE,
DRY_RUN: dryRun,
INCREMENT_BY_S: ONE_DAY_IN_S,
STOP_AT_S: stopAtSeconds,
// Lower concurrency to 1 for strict sequence of log messages.
READ_CONCURRENCY_SECONDARY: 1,
READ_CONCURRENCY_PRIMARY: 1,
WRITE_CONCURRENCY: 1,
// start right away
LET_USER_DOUBLE_CHECK_INPUTS_FOR: 1,
}
let result
try {
result = await promisify(exec)(
Object.entries(options)
.map(([key, value]) => `${key}=${value}`)
.concat([
// Hide verbose log messages `calling destroy for project in docstore`
'LOG_LEVEL=error',
// Hide deprecation warnings for calling `db.collection.count`
'NODE_OPTIONS=--no-deprecation',
])
.concat(['node', 'scripts/delete_orphaned_docs_online_check.js'])
.join(' ')
)
} catch (error) {
// dump details like exit code, stdErr and stdOut
logger.error({ error }, 'script failed')
throw error
}
let { stderr: stdErr, stdout: stdOut } = result
stdErr = stdErr.split('\n')
stdOut = stdOut
.split('\n')
.filter(line => !line.includes('Using settings from'))
const oneDayFromProjectId9InSeconds =
getSecondsFromObjectId(projectIds[9]) + ONE_DAY_IN_S
const oneDayFromProjectId9AsObjectId = getObjectIdFromDate(
1000 * oneDayFromProjectId9InSeconds
)
expect(stdOut).to.deep.equal([
`Checking projects ["${projectIds[0]}"]`,
`Deleted project ${projectIds[0]} has 1 orphaned docs: ["${docIds[0]}"]`,
`Checking projects ["${projectIds[1]}"]`,
`Deleted project ${projectIds[1]} has 1 orphaned docs: ["${docIds[1]}"]`,
`Checking projects ["${projectIds[2]}"]`,
`Deleted project ${projectIds[2]} has 1 orphaned docs: ["${docIds[2]}"]`,
`Checking projects ["${projectIds[3]}"]`,
`Deleted project ${projectIds[3]} has 1 orphaned docs: ["${docIds[3]}"]`,
// Two docs in the same project
`Checking projects ["${projectIds[4]}"]`,
`Deleted project ${projectIds[4]} has 2 orphaned docs: ["${docIds[4]}","${docIds[11]}"]`,
// Project 5 is live
`Checking projects ["${projectIds[5]}"]`,
// Project 6 is soft-deleted
`Checking projects ["${projectIds[6]}"]`,
// 7,8,9 are on the same day, but exceed the batch size of 2
`Checking projects ["${projectIds[7]}","${projectIds[8]}","${projectIds[9]}"]`,
`Deleted project ${projectIds[7]} has 1 orphaned docs: ["${docIds[7]}"]`,
`Deleted project ${projectIds[8]} has 1 orphaned docs: ["${docIds[8]}"]`,
// Two docs in the same project
`Deleted project ${projectIds[9]} has 2 orphaned docs: ["${docIds[9]}","${docIds[10]}"]`,
'',
])
expect(stdErr).to.deep.equal([
...`Options: ${JSON.stringify(options, null, 2)}`.split('\n'),
'Waiting for you to double check inputs for 1 ms',
`Processed 1 projects (1 projects with orphaned docs/1 docs deleted) until ${getObjectIdFromDate(
'2021-04-01T00:00:00.000Z'
)}`,
`Processed 2 projects (2 projects with orphaned docs/2 docs deleted) until ${getObjectIdFromDate(
'2021-04-02T00:00:00.000Z'
)}`,
`Processed 2 projects (2 projects with orphaned docs/2 docs deleted) until ${getObjectIdFromDate(
'2021-04-03T00:00:00.000Z'
)}`,
`Processed 2 projects (2 projects with orphaned docs/2 docs deleted) until ${getObjectIdFromDate(
'2021-04-04T00:00:00.000Z'
)}`,
`Processed 2 projects (2 projects with orphaned docs/2 docs deleted) until ${getObjectIdFromDate(
'2021-04-05T00:00:00.000Z'
)}`,
`Processed 2 projects (2 projects with orphaned docs/2 docs deleted) until ${getObjectIdFromDate(
'2021-04-06T00:00:00.000Z'
)}`,
`Processed 2 projects (2 projects with orphaned docs/2 docs deleted) until ${getObjectIdFromDate(
'2021-04-07T00:00:00.000Z'
)}`,
`Processed 2 projects (2 projects with orphaned docs/2 docs deleted) until ${getObjectIdFromDate(
'2021-04-08T00:00:00.000Z'
)}`,
`Processed 2 projects (2 projects with orphaned docs/2 docs deleted) until ${getObjectIdFromDate(
'2021-04-09T00:00:00.000Z'
)}`,
`Processed 2 projects (2 projects with orphaned docs/2 docs deleted) until ${getObjectIdFromDate(
'2021-04-10T00:00:00.000Z'
)}`,
`Processed 3 projects (3 projects with orphaned docs/3 docs deleted) until ${getObjectIdFromDate(
'2021-04-11T00:00:00.000Z'
)}`,
`Processed 4 projects (4 projects with orphaned docs/4 docs deleted) until ${getObjectIdFromDate(
'2021-04-12T00:00:00.000Z'
)}`,
`Processed 5 projects (5 projects with orphaned docs/6 docs deleted) until ${getObjectIdFromDate(
'2021-04-13T00:00:00.000Z'
)}`,
`Processed 6 projects (5 projects with orphaned docs/6 docs deleted) until ${getObjectIdFromDate(
'2021-04-14T00:00:00.000Z'
)}`,
`Processed 7 projects (5 projects with orphaned docs/6 docs deleted) until ${getObjectIdFromDate(
'2021-04-15T00:00:00.000Z'
)}`,
`Processed 7 projects (5 projects with orphaned docs/6 docs deleted) until ${getObjectIdFromDate(
'2021-04-16T00:00:00.000Z'
)}`,
// 7,8,9,10 are on the same day, but exceed the batch size of 3
// Project 9 has two docs.
`Processed 10 projects (8 projects with orphaned docs/10 docs deleted) until ${projectIds[9]}`,
// 10 has as ready been processed as part of the last batch -- same project_id as 9.
`Processed 10 projects (8 projects with orphaned docs/10 docs deleted) until ${oneDayFromProjectId9AsObjectId}`,
'Done.',
'',
])
}
describe('DRY_RUN=true', function () {
beforeEach('run script', async function () {
await runScript(true)
})
it('should leave docs as is', async function () {
const docs = await db.docs.find({}).toArray()
expect(docs).to.deep.equal([
{ _id: docIds[0], project_id: projectIds[0] },
{ _id: docIds[1], project_id: projectIds[1] },
{ _id: docIds[2], project_id: projectIds[2] },
{ _id: docIds[3], project_id: projectIds[3] },
{ _id: docIds[4], project_id: projectIds[4] },
{ _id: docIds[5], project_id: projectIds[5] },
{ _id: docIds[6], project_id: projectIds[6] },
{ _id: docIds[7], project_id: projectIds[7] },
{ _id: docIds[8], project_id: projectIds[8] },
{ _id: docIds[9], project_id: projectIds[9] },
{ _id: docIds[10], project_id: projectIds[10] },
{ _id: docIds[11], project_id: projectIds[11] },
])
})
})
describe('DRY_RUN=false', function () {
beforeEach('run script', async function () {
await runScript(false)
})
it('should deleted all but docs from live/soft-deleted projects', async function () {
const docs = await db.docs.find({}).toArray()
expect(docs).to.deep.equal([
// not orphaned, live
{ _id: docIds[5], project_id: projectIds[5] },
// not orphaned, pending hard deletion
{ _id: docIds[6], project_id: projectIds[6] },
])
})
})
})

View file

@ -1,3 +1,4 @@
const { db, ObjectId } = require('../../../../app/src/infrastructure/mongodb')
const AbstractMockApi = require('./AbstractMockApi')
class MockDocstoreApi extends AbstractMockApi {
@ -87,9 +88,10 @@ class MockDocstoreApi extends AbstractMockApi {
}
})
this.app.post('/project/:projectId/destroy', (req, res) => {
this.app.post('/project/:projectId/destroy', async (req, res) => {
const { projectId } = req.params
delete this.docs[projectId]
await db.docs.deleteMany({ project_id: ObjectId(projectId) })
res.sendStatus(204)
})
}