Merge pull request #3225 from overleaf/jpa-scripts-cleanup

[misc] delete unneeded scripts

GitOrigin-RevId: 30b1c97dfde8d2e93b40c0df4cb8be9a963580ad
This commit is contained in:
Jakob Ackermann 2020-10-07 15:17:28 +02:00 committed by Copybot
parent 370ee79a30
commit cec854de89
17 changed files with 0 additions and 939 deletions

View file

@ -1,25 +0,0 @@
const { db } = require('../app/src/infrastructure/mongojs')
const logger = require('logger-sharelatex')
logger.logger.level('error')
logger.log({}, 'Updating users in mongo')
db.users.update(
{
'features.github': true
},
{
$set: { 'features.gitBridge': true }
},
function(err, result) {
if (err) {
logger.err({ err: err, result: result }, 'Error updating users in mongo')
return
}
logger.log(
{ result: result },
'Updated users who have github to have gitBridge too'
)
process.exit(0)
}
)

View file

@ -1,63 +0,0 @@
const { db } = require('../app/src/infrastructure/mongojs')
const async = require('async')
const minilist = require('minimist')
const updateUser = function(user, callback) {
console.log(`Updating user ${user._id}`)
const update = {
$set: {
emails: [
{
email: user.email,
createdAt: new Date()
}
]
}
}
db.users.update({ _id: user._id }, update, callback)
}
const updateUsers = (users, callback) =>
async.eachLimit(users, ASYNC_LIMIT, updateUser, function(error) {
if (error) {
callback(error)
return
}
counter += users.length
console.log(`${counter} users updated`)
loopForUsers(callback)
})
var loopForUsers = callback =>
db.users
.find({ emails: { $exists: false } }, { email: 1 })
.limit(FETCH_LIMIT, function(error, users) {
if (error) {
callback(error)
return
}
if (users.length === 0) {
console.log(`DONE (${counter} users updated)`)
return callback()
}
updateUsers(users, callback)
})
var counter = 0
var run = () =>
loopForUsers(function(error) {
if (error) {
throw error
}
process.exit()
})
let FETCH_LIMIT, ASYNC_LIMIT
var setup = function() {
let args = minilist(process.argv.slice(2))
FETCH_LIMIT = args.fetch ? args.fetch : 100
ASYNC_LIMIT = args.async ? args.async : 10
}
setup()
run()

View file

@ -1,63 +0,0 @@
const { DeletedProject } = require('../app/src/models/DeletedProject')
const Async = require('async')
DeletedProject.find({}, (error, deletedProjects) => {
if (error) {
throw error
}
Async.eachLimit(
deletedProjects,
10,
(deletedProject, cb) => {
if (deletedProject.project) {
const src = deletedProject.project
let values = {
'deleterData.deletedProjectId': src._id,
'deleterData.deletedProjectOwnerId': src.owner_ref,
'deleterData.deletedProjectCollaboratorIds': src.collaberator_refs,
'deleterData.deletedProjectReadOnlyIds': src.readOnly_refs,
'deleterData.deletedProjectReadWriteToken': src.tokens
? src.tokens.readAndWrite
: undefined,
'deleterData.deletedProjectOverleafId': src.overleaf
? src.overleaf.id
: undefined,
'deleterData.deletedProjectOverleafHistoryId':
src.overleaf && src.overleaf.history
? src.overleaf.history.id
: undefined,
'deleterData.deletedProjectReadOnlyToken': src.tokens
? src.tokens.readOnly
: undefined,
'deleterData.deletedProjectReadWriteTokenAccessIds':
src.tokenAccessReadOnly_refs,
'deleterData.deletedProjectReadOnlyTokenAccessIds':
src.tokenAccessReadAndWrite_refs,
'deleterData.deletedProjectLastUpdatedAt': src.lastUpdated
}
Object.keys(values).forEach(
key => (values[key] === undefined ? delete values[key] : '')
)
DeletedProject.findOneAndUpdate(
{ _id: deletedProject._id },
{
$set: values
},
cb
)
} else {
cb()
}
},
err => {
if (err) {
throw err
}
process.exit(0)
}
)
})

View file

@ -1,97 +0,0 @@
const { DeletedProject } = require('../app/src/models/DeletedProject')
const { DeletedUser } = require('../app/src/models/DeletedUser')
const { db } = require('../app/src/infrastructure/mongojs')
const pLimit = require('p-limit')
const CONCURRENCY = 10
function getCollectionContents(collection) {
return new Promise((resolve, reject) => {
collection.find({}).toArray((error, contents) => {
if (error) {
reject(error)
} else {
resolve(contents)
}
})
})
}
function deleteCollectionItem(collection, id) {
return new Promise((resolve, reject) => {
collection.remove({ _id: id }, error => {
if (error) {
reject(error)
} else {
resolve()
}
})
})
}
async function backfillUser(user) {
await DeletedUser.create({
user: user,
deleterData: {
deletedAt: new Date(),
deletedUserId: user._id,
deletedUserLastLoggedIn: user.lastLoggedIn,
deletedUserSignUpDate: user.signUpDate,
deletedUserLoginCount: user.loginCount,
deletedUserReferralId: user.referal_id,
deletedUserReferredUsers: user.refered_users,
deletedUserReferredUserCount: user.refered_user_count,
deletedUserOverleafId: user.overleaf ? user.overleaf.id : undefined
}
})
await deleteCollectionItem(db.usersDeletedByMigration, user._id)
}
async function backfillProject(project) {
await DeletedProject.create({
project: project,
deleterData: {
deletedAt: new Date(),
deletedProjectId: project._id,
deletedProjectOwnerId: project.owner_ref,
deletedProjectCollaboratorIds: project.collaberator_refs,
deletedProjectReadOnlyIds: project.readOnly_refs,
deletedProjectReadWriteTokenAccessIds:
project.tokenAccessReadAndWrite_refs,
deletedProjectReadOnlyTokenAccessIds: project.tokenAccessReadOnly_refs,
deletedProjectReadWriteToken: project.tokens
? project.tokens.readAndWrite
: undefined,
deletedProjectReadOnlyToken: project.tokens
? project.tokens.readOnly
: undefined,
deletedProjectLastUpdatedAt: project.lastUpdated
}
})
await deleteCollectionItem(db.projectsDeletedByMigration, project._id)
}
async function backfillUsers() {
const limit = pLimit(CONCURRENCY)
const migrationUsers = await getCollectionContents(db.usersDeletedByMigration)
console.log('Found ' + migrationUsers.length + ' users')
await Promise.all(migrationUsers.map(user => limit(() => backfillUser(user))))
}
async function backfillProjects() {
const limit = pLimit(CONCURRENCY)
const migrationProjects = await getCollectionContents(
db.projectsDeletedByMigration
)
console.log('Found ' + migrationProjects.length + ' projects')
await Promise.all(
migrationProjects.map(project => limit(() => backfillProject(project)))
)
}
Promise.all([backfillProjects(), backfillUsers()]).then(() => {
console.log('Finished')
process.exit(0)
})

View file

@ -1,40 +0,0 @@
const { db } = require('../app/src/infrastructure/mongojs')
const Async = require('async')
const minimist = require('minimist')
const argv = minimist(process.argv.slice(2))
const limit = argv.limit
if (!limit) {
console.log('Please supply an async limit with --limit')
process.exit(1)
}
db.users.find(
{ hashedPassword: { $exists: 1 }, sharelatexHashedPassword: { $exists: 0 } },
{ hashedPassword: 1 },
(err, users) => {
if (err) {
throw err
}
Async.eachLimit(
users,
limit,
(user, cb) => {
db.users.update(
{ _id: user._id },
{ $set: { sharelatexHashedPassword: user.hashedPassword } },
cb
)
},
err => {
if (err) {
throw err
}
console.log('finished')
process.exit(0)
}
)
}
)

View file

@ -1,41 +0,0 @@
const readline = require('readline')
const { Project } = require('../app/src/models/Project')
const FileStoreHandler = require('../app/src/Features/FileStore/FileStoreHandler')
const { DeletedProject } = require('../app/src/models/DeletedProject')
/* eslint-disable no-console */
async function deleteFiles() {
const rl = readline.createInterface({
input: process.stdin
})
for await (const projectId of rl) {
try {
const projectCount = await Project.count({ _id: projectId }).exec()
if (projectCount > 0) {
throw new Error('found an existing project - refusing')
}
const count = await DeletedProject.count({
'deleterData.deletedProjectId': projectId,
project: { $ne: null }
}).exec()
if (count > 0) {
throw new Error('found an existing deleted project - refusing')
}
await FileStoreHandler.promises.deleteProject(projectId)
console.error(projectId, 'OK')
} catch (err) {
console.error(projectId, 'ERROR', err.name, err.message)
}
}
}
deleteFiles()
.then(() => {
process.exit(0)
})
.catch(err => {
console.log('Aiee, something went wrong!', err)
process.exit(1)
})

View file

@ -1,92 +0,0 @@
const { db } = require('../app/src/infrastructure/mongojs')
const async = require('async')
const minilist = require('minimist')
const newTimeout = 240
const oldTimeoutLimits = { $gt: 60, $lt: 240 }
const updateUser = function(user, callback) {
console.log(`Updating user ${user._id}`)
const update = {
$set: {
'features.compileTimeout': newTimeout
}
}
db.users.update(
{
_id: user._id,
'features.compileTimeout': oldTimeoutLimits
},
update,
callback
)
}
const updateUsers = (users, callback) =>
async.eachLimit(users, ASYNC_LIMIT, updateUser, function(error) {
if (error) {
callback(error)
return
}
counter += users.length
console.log(`${counter} users updated`)
if (DO_ALL) {
return loopForUsers(callback)
} else {
console.log('*** run again to continue updating ***')
return callback()
}
})
var loopForUsers = callback =>
db.users
.find(
{ 'features.compileTimeout': oldTimeoutLimits },
{ 'features.compileTimeout': 1 }
)
.limit(FETCH_LIMIT, function(error, users) {
if (error) {
callback(error)
return
}
if (users.length === 0) {
console.log(`DONE (${counter} users updated)`)
return callback()
}
updateUsers(users, callback)
})
var counter = 0
var run = () =>
loopForUsers(function(error) {
if (error) {
throw error
}
process.exit()
})
let FETCH_LIMIT, ASYNC_LIMIT, DO_ALL
var setup = function() {
let args = minilist(process.argv.slice(2))
// --fetch N get N users each time
FETCH_LIMIT = args.fetch ? args.fetch : 100
// --async M run M updates in parallel
ASYNC_LIMIT = args.async ? args.async : 10
// --all means run to completion
if (args.all) {
if (args.fetch) {
console.error('error: do not use --fetch with --all')
process.exit(1)
} else {
DO_ALL = true
// if we are updating for all users then ignore the fetch limit.
FETCH_LIMIT = 0
// A limit() value of 0 (i.e. .limit(0)) is equivalent to setting
// no limit.
// https://docs.mongodb.com/manual/reference/method/cursor.limit
}
}
}
setup()
run()

View file

@ -1,22 +0,0 @@
const { db } = require('../../app/src/infrastructure/mongojs')
const keys = { 'tokens.readAndWritePrefix': 1 }
const opts = {
unique: true,
partialFilterExpression: {
'tokens.readAndWritePrefix': { $exists: true }
},
background: true
}
console.log(
`>> Creating index on ${JSON.stringify(keys)}, ${JSON.stringify(opts)}`
)
db.projects.createIndex(keys, opts, err => {
if (err) {
throw err
}
console.log('>> done')
process.exit(0)
})

View file

@ -1,30 +0,0 @@
const { db, ObjectId } = require('../../app/src/infrastructure/mongojs')
const Async = require('async')
const projectIds = [
// put ids here
]
Async.eachLimit(
projectIds,
5,
(projectId, cb) => {
db.projects.update(
{ _id: ObjectId(projectId) },
{
$unset: { tokens: 1 },
$set: { publicAccesLevel: 'private' }
},
err => {
if (err) return cb(err)
console.log(`Deactivated tokens for ${projectId}`)
cb()
}
)
},
err => {
if (err) throw err
console.log('>> Done')
process.exit(0)
}
)

View file

@ -1,43 +0,0 @@
const { db } = require('../../app/src/infrastructure/mongojs')
const Async = require('async')
db.projects.find(
{
'tokens.readAndWrite': { $exists: true },
'tokens.readAndWritePrefix': { $exists: false }
},
{ tokens: 1 },
(err, projects) => {
if (err) {
throw err
}
console.log(`>> Updating ${projects.length} projects`)
Async.eachLimit(
projects,
5,
(project, cb) => {
const rwToken = project.tokens.readAndWrite
const prefixMatch = rwToken.match(/^(\d+).*$/)
if (!prefixMatch) {
const err = new Error(
`no prefix on token: ${project._id}, ${rwToken}`
)
console.log(`>> Error, ${err.message}`)
return cb(err)
}
db.projects.update(
{ _id: project._id },
{ $set: { 'tokens.readAndWritePrefix': prefixMatch[1] } },
cb
)
},
err => {
if (err) {
throw err
}
console.log('>> done')
process.exit(0)
}
)
}
)

View file

@ -1,39 +0,0 @@
const ProjectDetailsHandler = require('../../app/src/Features/Project/ProjectDetailsHandler')
const Async = require('async')
const projectIds = [
// put ids here
]
Async.eachLimit(
projectIds,
5,
(projectId, cb) => {
ProjectDetailsHandler.setPublicAccessLevel(projectId, 'tokenBased', err => {
if (err) {
return cb(err)
}
console.log(
`>> Set public-access-level to tokenBased for project ${projectId}`
)
ProjectDetailsHandler.ensureTokensArePresent(projectId, (err, tokens) => {
if (err) {
return cb(err)
}
console.log(
`>> Re-generated tokens for project ${projectId}, ${JSON.stringify(
tokens
)}`
)
cb()
})
})
},
err => {
if (err) {
throw err
}
console.log('>> Done')
process.exit(0)
}
)

View file

@ -1,77 +0,0 @@
//
// Remove the brandVariationId attribute from project documents that have
// that attribute, which value matches the one given.
//
// node scripts/remove_brand_variation_ids.js 3
// gives a report of project documents that have brandVariationId attribute
// with value, "3"
//
// node scripts/remove_brand_variation_ids.js 3 --commit true
// actually removes the brandVariationId attribute from project documents
// that have brandVariationId attribute with value, "3"
//
const { db } = require('../app/src/infrastructure/mongojs')
const async = require('async')
const minimist = require('minimist')
const argv = minimist(process.argv.slice(2))
const bvId = argv._[0]
const commit = argv.commit !== undefined
const maxParallel = 4
console.log(
(commit ? 'Remove' : 'Dry run for remove') +
' brandVariationId from projects that have { brandVariationId: ' +
bvId +
' }'
)
var count = 0
db.projects.find(
{ brandVariationId: bvId.toString() },
{ _id: 1, name: 1 },
processRemovals
)
function processRemovals(err, projects) {
if (err) throw err
async.eachLimit(
projects,
maxParallel,
function(project, cb) {
count += 1
console.log(
(commit ? 'Removing' : 'Would remove') +
' brandVariationId on project ' +
project._id +
', name: "' +
project.name +
'"'
)
if (commit) {
db.projects.update(
{ _id: project._id },
{ $unset: { brandVariationId: '' } },
cb
)
} else {
async.setImmediate(cb)
}
},
function(err) {
if (err) {
console.log('There was a problem: ', err)
}
console.log(
'BrandVariationId ' +
(commit ? 'removed' : 'would be removed') +
' from ' +
count +
' projects'
)
process.exit()
}
)
}

View file

@ -1,45 +0,0 @@
const { db } = require('../app/src/infrastructure/mongojs')
const minimist = require('minimist')
const argv = minimist(process.argv.slice(2))
const commit = argv.commit !== undefined
if (!commit) {
console.log('DOING DRY RUN. TO SAVE CHANGES PASS --commit')
}
function main(callback) {
const query = { 'overleaf.id': { $exists: true } }
db.users.count(query, (err, result) => {
if (err) {
return callback(err)
}
console.log(`>> Count: ${result}`)
if (!commit) {
return callback()
}
db.users.update(
query,
{ $unset: { hashedPassword: 1 } },
{ multi: true },
(err, result) => {
if (err) {
return callback(err)
}
console.log(`>> Updated users: ${JSON.stringify(result)}`)
return callback()
}
)
})
}
if (require.main === module) {
main(err => {
if (err) {
console.error(err)
return process.exit(1)
}
console.log('>> done')
process.exit(0)
})
}

View file

@ -1,76 +0,0 @@
// Run like this:
// node ... --projectIds ./path/to/file/with/one/projectId/in/each/line
const fs = require('fs')
const { db, ObjectId } = require('../app/src/infrastructure/mongojs')
const minimist = require('minimist')
const argv = minimist(process.argv.slice(2))
const commit = argv.commit !== undefined
if (!argv.projectIds) {
console.error('--projectIds flag is missing')
process.exit(100)
}
if (!commit) {
console.log('DOING DRY RUN. TO SAVE CHANGES PASS --commit')
}
const languages = [
'am',
'hy',
'bn',
'gu',
'he',
'hi',
'hu',
'is',
'kn',
'ml',
'mr',
'or',
'ss',
'ta',
'te',
'uk',
'uz',
'zu',
'fi'
]
const projectIds = fs
.readFileSync(argv.projectIds, { encoding: 'utf-8' })
.split('\n')
.filter(Boolean)
function main(callback) {
const query = {
_id: { $in: projectIds.map(ObjectId) },
spellCheckLanguage: { $in: languages }
}
db.projects.update(
query,
{ $set: { spellCheckLanguage: '' } },
{ multi: true },
(err, result) => {
if (err) {
return callback(err)
}
console.log(`>> Updated projects: ${JSON.stringify(result)}`)
return callback()
}
)
}
if (require.main === module) {
main(err => {
if (err) {
console.error(err)
return process.exit(1)
}
console.log('>> done')
process.exit(0)
})
}

View file

@ -1,53 +0,0 @@
const { db } = require('../../app/src/infrastructure/mongojs')
const async = require('async')
db.deletedSubscriptions.aggregate(
{ $match: { 'subscription.teamName': /(Work From Home|Work from Home)/ } },
{ $unwind: '$subscription.member_ids' },
{
$group: { _id: null, memberIds: { $addToSet: '$subscription.member_ids' } }
},
function(err, results) {
if (err) {
console.error(err)
process.exit(1)
}
if (!results.length) {
console.error('No users found')
process.exit(1)
}
const userIds = results[0].memberIds
console.log('Id,First Name,Last Name,Sign Up Date,Emails')
async.eachLimit(
userIds,
10,
function(userId, callback) {
db.users.findOne(userId, function(err, user) {
if (user) {
const emails = user.emails.map(email => email.email)
console.log(
`${user._id},${user.first_name || ''},${user.last_name || ''},${
user.signUpDate
},${emails.join(',')}`
)
} else {
console.error('A group user was not found')
}
callback(err)
})
},
function(err) {
if (err) {
console.error(err)
process.exit(1)
}
process.exit(0)
}
)
}
)

View file

@ -1,46 +0,0 @@
const NotificationsHandler = require('../../app/src/Features/Notifications/NotificationsHandler')
const { db } = require('../../app/src/infrastructure/mongojs')
const async = require('async')
const templateKey = 'wfh_2020_upgrade_offer'
const key = 'wfh-2020-upgrade-2020-06-01'
db.subscriptions.aggregate(
{ $match: { teamName: /(Work From Home|Work from Home)/ } },
{ $unwind: '$member_ids' },
{ $group: { _id: null, memberIds: { $addToSet: '$member_ids' } } },
function(err, results) {
if (err) {
throw err
}
const userIds = results[0].memberIds
async.eachLimit(
userIds,
10,
function(userId, callback) {
NotificationsHandler.createNotification(
userId,
key,
templateKey,
{},
null,
true,
function(err) {
if (err) {
return callback(err)
}
console.log('Notification created for user ' + userId)
callback()
}
)
},
function() {
console.log('Done')
process.exit(0)
}
)
}
)

View file

@ -1,87 +0,0 @@
const NotificationsHandler = require('../../app/src/Features/Notifications/NotificationsHandler')
const { db } = require('../../app/src/infrastructure/mongojs')
const async = require('async')
const templateKey = 'wfh_2020_upgrade_offer'
const oldKey = 'wfh-2020-upgrade-2020-06-01'
const key = 'wfh-2020-upgrade-2020-06-18'
db.subscriptions.aggregate(
{ $match: { teamName: /(Work From Home|Work from Home)/ } },
{ $unwind: '$member_ids' },
{ $group: { _id: null, memberIds: { $addToSet: '$member_ids' } } },
function(err, results) {
if (err) {
throw err
}
const userIds = results[0].memberIds
async.eachLimit(
userIds,
10,
function(userId, callback) {
async.series(
[
function(cb) {
db.notifications.findOne(
{ user_id: userId, key: oldKey },
function(err, notification) {
if (err) {
console.log('Error finding notification for ' + userId)
cb(err)
} else if (!notification) {
cb()
} else {
if (notification.templateKey && notification.messageOpts) {
db.notifications.update(
{
_id: notification._id
},
{
$unset: { templateKey: true, messageOpts: true }
},
cb
)
} else {
cb()
}
}
}
)
},
function(cb) {
NotificationsHandler.createNotification(
userId,
key,
templateKey,
{},
null,
true,
cb
)
}
],
function(err) {
if (err) {
callback(err)
} else {
console.log('Notification created for user ' + userId)
callback()
}
}
)
},
function(err) {
if (err) {
console.log(err)
process.exit(1)
} else {
console.log('Done')
process.exit(0)
}
}
)
}
)