mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #910 from overleaf/sk-remove-migrations-1
Remove migrations
This commit is contained in:
commit
baccf2e0d5
17 changed files with 0 additions and 1211 deletions
|
@ -12,7 +12,6 @@ ENV SHARELATEX_CONFIG /etc/sharelatex/settings.coffee
|
||||||
# -------------------------
|
# -------------------------
|
||||||
ADD ${baseDir}/bin /var/www/sharelatex/bin
|
ADD ${baseDir}/bin /var/www/sharelatex/bin
|
||||||
ADD ${baseDir}/doc /var/www/sharelatex/doc
|
ADD ${baseDir}/doc /var/www/sharelatex/doc
|
||||||
ADD ${baseDir}/migrations /var/www/sharelatex/migrations
|
|
||||||
ADD ${baseDir}/tasks /var/www/sharelatex/tasks
|
ADD ${baseDir}/tasks /var/www/sharelatex/tasks
|
||||||
ADD ${baseDir}/Gruntfile.coffee /var/www/sharelatex/Gruntfile.coffee
|
ADD ${baseDir}/Gruntfile.coffee /var/www/sharelatex/Gruntfile.coffee
|
||||||
ADD ${baseDir}/package.json /var/www/sharelatex/package.json
|
ADD ${baseDir}/package.json /var/www/sharelatex/package.json
|
||||||
|
|
|
@ -38,20 +38,6 @@ module.exports = (grunt) ->
|
||||||
options:
|
options:
|
||||||
limit: SERVICES.length
|
limit: SERVICES.length
|
||||||
logConcurrentOutput: true
|
logConcurrentOutput: true
|
||||||
coffee:
|
|
||||||
migrate:
|
|
||||||
expand: true,
|
|
||||||
flatten: false,
|
|
||||||
cwd: './',
|
|
||||||
src: ['./migrations/*.coffee'],
|
|
||||||
dest: './',
|
|
||||||
ext: '.js'
|
|
||||||
options:
|
|
||||||
bare:true
|
|
||||||
|
|
||||||
shell:
|
|
||||||
migrate:
|
|
||||||
command: "./node_modules/east/bin/east migrate --adapter east-mongo --url #{settings?.mongo?.url}"
|
|
||||||
|
|
||||||
availabletasks:
|
availabletasks:
|
||||||
tasks:
|
tasks:
|
||||||
|
@ -115,8 +101,6 @@ module.exports = (grunt) ->
|
||||||
grunt.registerTask "check:make", "Check that make is installed", () ->
|
grunt.registerTask "check:make", "Check that make is installed", () ->
|
||||||
Helpers.checkMake @async()
|
Helpers.checkMake @async()
|
||||||
|
|
||||||
grunt.registerTask 'migrate', "compile migrations and run them", ["coffee:migrate", 'shell:migrate']
|
|
||||||
|
|
||||||
|
|
||||||
Helpers =
|
Helpers =
|
||||||
installService: (service, callback = (error) ->) ->
|
installService: (service, callback = (error) ->) ->
|
||||||
|
|
|
@ -1,8 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
set -e
|
|
||||||
|
|
||||||
which node
|
|
||||||
which grunt
|
|
||||||
ls -al /var/www/sharelatex/migrations
|
|
||||||
cd /var/www/sharelatex && grunt migrate -v
|
|
||||||
echo "All migrations finished"
|
|
|
@ -1,109 +0,0 @@
|
||||||
const Settings = require('settings-sharelatex')
|
|
||||||
const Async = require('async')
|
|
||||||
const mongojs = require('mongojs')
|
|
||||||
const db = mongojs(Settings.mongo.url, ['users'])
|
|
||||||
|
|
||||||
const indexKeys = { 'tokens.readAndWritePrefix': 1 }
|
|
||||||
const indexOpts = {
|
|
||||||
unique: true,
|
|
||||||
partialFilterExpression: {
|
|
||||||
'tokens.readAndWritePrefix': { $exists: true }
|
|
||||||
},
|
|
||||||
background: true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Index on Prefix
|
|
||||||
const addReadAndWritePrefixIndex = (db, callback) => {
|
|
||||||
db.projects.ensureIndex(indexKeys, indexOpts, callback)
|
|
||||||
}
|
|
||||||
|
|
||||||
const removeReadAndWritePrefixIndex = (db, callback) => {
|
|
||||||
db.projects.dropIndex(indexKeys, callback)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract prefix data
|
|
||||||
const extractPrefix = (db, callback) => {
|
|
||||||
db.projects.find(
|
|
||||||
{
|
|
||||||
'tokens.readAndWrite': { $exists: true },
|
|
||||||
'tokens.readAndWritePrefix': { $exists: false }
|
|
||||||
},
|
|
||||||
{ tokens: 1 },
|
|
||||||
(err, projects) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err)
|
|
||||||
}
|
|
||||||
console.log(`>> Updating ${projects.length} projects`)
|
|
||||||
Async.eachLimit(
|
|
||||||
projects,
|
|
||||||
5,
|
|
||||||
(project, cb) => {
|
|
||||||
const rwToken = project.tokens.readAndWrite
|
|
||||||
const prefixMatch = rwToken.match(/^(\d+).*$/)
|
|
||||||
if (!prefixMatch) {
|
|
||||||
const err = new Error(
|
|
||||||
`no prefix on token: ${project._id}, ${rwToken}`
|
|
||||||
)
|
|
||||||
console.log(`>> Error, ${err.message}`)
|
|
||||||
return cb(err)
|
|
||||||
}
|
|
||||||
db.projects.update(
|
|
||||||
{ _id: project._id },
|
|
||||||
{ $set: { 'tokens.readAndWritePrefix': prefixMatch[1] } },
|
|
||||||
cb
|
|
||||||
)
|
|
||||||
},
|
|
||||||
err => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err)
|
|
||||||
}
|
|
||||||
console.log('>> done')
|
|
||||||
callback()
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const erasePrefix = (db, callback) => {
|
|
||||||
db.projects.update({$unset: 'tokens.readAndWritePrefix'}, callback)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// Migrations
|
|
||||||
|
|
||||||
exports.migrate = (client, done) => {
|
|
||||||
console.log(`>> Adding index to projects: ${JSON.stringify(indexKeys)}, with options: ${JSON.stringify(indexOpts)}`)
|
|
||||||
addReadAndWritePrefixIndex(db, (err) => {
|
|
||||||
if(err) {
|
|
||||||
console.log(">> Error while adding index")
|
|
||||||
return done(err)
|
|
||||||
}
|
|
||||||
console.log(">> Extracting tokens.readAndWritePrefix field for existing projects")
|
|
||||||
extractPrefix(db, (err) => {
|
|
||||||
if(err) {
|
|
||||||
console.log(">> Error while extracting prefix data")
|
|
||||||
return done(err)
|
|
||||||
}
|
|
||||||
done()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.rollback = (client, done) => {
|
|
||||||
console.log(`>> Dropping index on projects: ${JSON.stringify(indexKeys)}`)
|
|
||||||
removeReadAndWritePrefixIndex(db, (err) => {
|
|
||||||
if(err) {
|
|
||||||
console.log(">> Error while dropping index")
|
|
||||||
return done(err)
|
|
||||||
}
|
|
||||||
console.log(">> Erasing tokens.readAndWritePrefix field for existing projects")
|
|
||||||
erasePrefix(db, (err) => {
|
|
||||||
if(err) {
|
|
||||||
console.log(">> Error while erasing prefix data")
|
|
||||||
return done(err)
|
|
||||||
}
|
|
||||||
done()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
|
@ -1,23 +0,0 @@
|
||||||
const Settings = require('settings-sharelatex')
|
|
||||||
const mongojs = require('mongojs')
|
|
||||||
const db = mongojs(Settings.mongo.url, ['projects'])
|
|
||||||
|
|
||||||
exports.migrate = (client, done) => {
|
|
||||||
console.log(`>> Setting 'imageName' in projects`)
|
|
||||||
|
|
||||||
if (!Settings.currentImageName) {
|
|
||||||
console.log(`>> 'currentImageName' is not defined, no projects updated`)
|
|
||||||
return done()
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(`>> Setting 'imageName' = ${Settings.currentImageName}`)
|
|
||||||
|
|
||||||
db.projects.update(
|
|
||||||
{ imageName: { $exists: false } },
|
|
||||||
{ $set: { imageName: Settings.currentImageName } },
|
|
||||||
{ multi: true },
|
|
||||||
done
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.rollback = (client, done) => done()
|
|
|
@ -1,24 +0,0 @@
|
||||||
// Internal ticket: https://github.com/overleaf/issues/issues/4094
|
|
||||||
|
|
||||||
const Settings = require('settings-sharelatex')
|
|
||||||
const mongojs = require('mongojs')
|
|
||||||
const db = mongojs(Settings.mongo.url, ['deletedFiles'])
|
|
||||||
|
|
||||||
const INDEX_NAME = 'projectId_1'
|
|
||||||
const INDEX_KEYS = { projectId: 1 }
|
|
||||||
const INDEX_OPTIONS = {
|
|
||||||
name: INDEX_NAME,
|
|
||||||
background: 1
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.migrate = (client, done) => {
|
|
||||||
db.deletedFiles.ensureIndex(
|
|
||||||
INDEX_KEYS,
|
|
||||||
INDEX_OPTIONS,
|
|
||||||
done
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.rollback = (client, done) => {
|
|
||||||
db.deletedFiles.dropIndex(INDEX_NAME, done)
|
|
||||||
}
|
|
|
@ -1,28 +0,0 @@
|
||||||
// Internal ticket: https://github.com/overleaf/issues/issues/4211
|
|
||||||
|
|
||||||
const Settings = require('settings-sharelatex')
|
|
||||||
const mongojs = require('mongojs')
|
|
||||||
const db = mongojs(Settings.mongo.url, ['docs'])
|
|
||||||
|
|
||||||
const INDEX_NAME = 'project_id_deleted_deletedAt_1'
|
|
||||||
const INDEX_KEYS = {
|
|
||||||
project_id: 1,
|
|
||||||
deleted: 1,
|
|
||||||
deletedAt: -1
|
|
||||||
}
|
|
||||||
const INDEX_OPTIONS = {
|
|
||||||
name: INDEX_NAME,
|
|
||||||
background: 1
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.migrate = (client, done) => {
|
|
||||||
db.docs.ensureIndex(
|
|
||||||
INDEX_KEYS,
|
|
||||||
INDEX_OPTIONS,
|
|
||||||
done
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.rollback = (client, done) => {
|
|
||||||
db.docs.dropIndex(INDEX_NAME, done)
|
|
||||||
}
|
|
|
@ -1,164 +0,0 @@
|
||||||
Settings = require "settings-sharelatex"
|
|
||||||
bson = require('bson')
|
|
||||||
BSON = new bson()
|
|
||||||
fs = require("fs")
|
|
||||||
mongojs = require("mongojs")
|
|
||||||
ObjectId = mongojs.ObjectId
|
|
||||||
console.log Settings.mongo.url
|
|
||||||
db = mongojs(Settings.mongo.url, ['projects', 'docs'])
|
|
||||||
_ = require("lodash")
|
|
||||||
async = require("async")
|
|
||||||
exec = require("child_process").exec
|
|
||||||
|
|
||||||
finished_projects_path = "/tmp/finished-projects"
|
|
||||||
all_projects_path = "/tmp/all-projects"
|
|
||||||
project_too_large_path = "/tmp/large_projects"
|
|
||||||
|
|
||||||
|
|
||||||
printProgress = ->
|
|
||||||
exec "wc #{finished_projects_path}", (error, results) ->
|
|
||||||
setTimeout printProgress, 1000 * 30
|
|
||||||
|
|
||||||
checkIfFileHasBeenProcessed = (project_id, callback)->
|
|
||||||
exec "grep #{project_id} #{finished_projects_path}", (error, results) ->
|
|
||||||
hasBeenProcessed = _.include(results, project_id)
|
|
||||||
callback(error, hasBeenProcessed)
|
|
||||||
|
|
||||||
loadProjectIds = (callback)->
|
|
||||||
console.log "loading project ids from #{all_projects_path}"
|
|
||||||
fs.readFile all_projects_path, "utf-8", (err, data)->
|
|
||||||
ids = data.split("\n")
|
|
||||||
ids = _.filter ids, (id)-> id? and id.length == 24
|
|
||||||
console.log "loaded #{ids.length} project ids from #{all_projects_path}"
|
|
||||||
callback err, ids
|
|
||||||
|
|
||||||
getAndWriteProjectids = (callback)->
|
|
||||||
console.log "finding all project id's - #{new Date().toString()}"
|
|
||||||
db.projects.find {}, {_id:1}, (err, ids)->
|
|
||||||
console.log "total found projects in mongo #{ids.length} - #{new Date().toString()}"
|
|
||||||
ids = _.pluck ids, '_id'
|
|
||||||
ids = _.filter ids, (id)-> id?
|
|
||||||
fileData = ids.join("\n")
|
|
||||||
fs.writeFile all_projects_path, fileData, ->
|
|
||||||
callback(err, ids)
|
|
||||||
|
|
||||||
markProjectAsToLargeAndFinished = (project_id, callback)->
|
|
||||||
console.log "#{project_id} too large"
|
|
||||||
markProjectAsProcessed project_id, (err)->
|
|
||||||
fs.appendFile project_too_large_path, "#{project_id}\n", callback
|
|
||||||
|
|
||||||
getProjectIds = (callback)->
|
|
||||||
exists = fs.existsSync all_projects_path
|
|
||||||
if exists
|
|
||||||
loadProjectIds callback
|
|
||||||
else
|
|
||||||
getAndWriteProjectids callback
|
|
||||||
|
|
||||||
markProjectAsProcessed = (project_id, callback)->
|
|
||||||
fs.appendFile finished_projects_path, "#{project_id}\n", callback
|
|
||||||
|
|
||||||
getAllDocs = (project_id, callback = (error, docs) ->) ->
|
|
||||||
db.projects.findOne _id:ObjectId(project_id), (error, project) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
if !project?
|
|
||||||
console.log "no such project #{project_id}"
|
|
||||||
return callback()
|
|
||||||
size = BSON.calculateObjectSize(project)
|
|
||||||
if size > 12000000 #12mb
|
|
||||||
return markProjectAsToLargeAndFinished project_id, callback
|
|
||||||
findAllDocsInProject project, (error, docs) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
return callback null, docs
|
|
||||||
|
|
||||||
findAllDocsInProject = (project, callback = (error, docs) ->) ->
|
|
||||||
callback null, _findAllDocsInFolder project.rootFolder[0]
|
|
||||||
|
|
||||||
_findDocInFolder = (folder = {}, doc_id, currentPath) ->
|
|
||||||
for doc, i in folder.docs or []
|
|
||||||
if doc?._id? and doc._id.toString() == doc_id.toString()
|
|
||||||
return {
|
|
||||||
doc: doc
|
|
||||||
mongoPath: "#{currentPath}.docs.#{i}"
|
|
||||||
}
|
|
||||||
|
|
||||||
for childFolder, i in folder.folders or []
|
|
||||||
result = _findDocInFolder childFolder, doc_id, "#{currentPath}.folders.#{i}"
|
|
||||||
return result if result?
|
|
||||||
|
|
||||||
return null
|
|
||||||
|
|
||||||
_findAllDocsInFolder = (folder = {}) ->
|
|
||||||
docs = folder.docs or []
|
|
||||||
for childFolder in folder.folders or []
|
|
||||||
docs = docs.concat _findAllDocsInFolder childFolder
|
|
||||||
return docs
|
|
||||||
|
|
||||||
insertDocIntoDocCollection = (project_id, doc_id, lines, oldRev, callback)->
|
|
||||||
if !project_id?
|
|
||||||
return callback("no project id")
|
|
||||||
if !doc_id?
|
|
||||||
return callback()
|
|
||||||
if !lines?
|
|
||||||
lines = [""]
|
|
||||||
update = {}
|
|
||||||
update["_id"] = ObjectId(doc_id.toString())
|
|
||||||
update["lines"] = lines
|
|
||||||
update["project_id"] = ObjectId(project_id)
|
|
||||||
update["rev"] = oldRev || 0
|
|
||||||
db.docs.insert update, callback
|
|
||||||
|
|
||||||
saveDocsIntoMongo = (project_id, docs, callback)->
|
|
||||||
jobs = _.map docs, (doc)->
|
|
||||||
(cb)->
|
|
||||||
if !doc?
|
|
||||||
console.error "null doc in project #{project_id}" #just skip it, not a big deal
|
|
||||||
return cb()
|
|
||||||
insertDocIntoDocCollection project_id, doc._id, doc.lines, doc.rev, (err)->
|
|
||||||
if err?.code == 11000 #duplicate key, doc already in there so its not a problem.
|
|
||||||
err = undefined
|
|
||||||
if err?
|
|
||||||
console.log "error inserting doc into doc collection", err
|
|
||||||
cb(err)
|
|
||||||
|
|
||||||
|
|
||||||
async.series jobs, callback
|
|
||||||
|
|
||||||
|
|
||||||
processNext = (project_id, callback)->
|
|
||||||
checkIfFileHasBeenProcessed project_id, (err, hasBeenProcessed)->
|
|
||||||
if hasBeenProcessed
|
|
||||||
console.log "#{project_id} already processed, skipping"
|
|
||||||
return callback()
|
|
||||||
console.log "#{project_id} processing"
|
|
||||||
getAllDocs project_id, (err, docs)->
|
|
||||||
if err?
|
|
||||||
console.error err, project_id, "could not get all docs"
|
|
||||||
return callback(err)
|
|
||||||
else
|
|
||||||
saveDocsIntoMongo project_id, docs, (err)->
|
|
||||||
if err?
|
|
||||||
console.error err, project_id, "could not save docs into mongo"
|
|
||||||
return callback(err)
|
|
||||||
markProjectAsProcessed project_id, (err)->
|
|
||||||
setTimeout(
|
|
||||||
-> callback(err)
|
|
||||||
,0)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
exports.migrate = (client, done = ->)->
|
|
||||||
getProjectIds (err, ids)->
|
|
||||||
printProgress()
|
|
||||||
jobs = _.map ids, (id)->
|
|
||||||
return (cb)->
|
|
||||||
processNext(id, cb)
|
|
||||||
async.series jobs, (err)->
|
|
||||||
if err?
|
|
||||||
console.error err, "at end of jobs"
|
|
||||||
else
|
|
||||||
console.log "finished"
|
|
||||||
done(err)
|
|
||||||
|
|
||||||
|
|
||||||
exports.rollback = (next)->
|
|
||||||
next()
|
|
|
@ -1,185 +0,0 @@
|
||||||
Settings = require "settings-sharelatex"
|
|
||||||
fs = require("fs")
|
|
||||||
mongojs = require("mongojs")
|
|
||||||
ObjectId = mongojs.ObjectId
|
|
||||||
db = mongojs(Settings.mongo.url, ['projects', 'docs'])
|
|
||||||
_ = require("lodash")
|
|
||||||
async = require("async")
|
|
||||||
exec = require("child_process").exec
|
|
||||||
|
|
||||||
finished_projects_path = "/tmp/finished-projects-2"
|
|
||||||
all_projects_path = "/tmp/all-projects-2"
|
|
||||||
unmigrated_docs_path = "/tmp/unmigrated-2"
|
|
||||||
|
|
||||||
|
|
||||||
printProgress = ->
|
|
||||||
exec "wc #{finished_projects_path}", (error, results) ->
|
|
||||||
setTimeout printProgress, 1000 * 30
|
|
||||||
|
|
||||||
checkIfFileHasBeenProcessed = (project_id, callback)->
|
|
||||||
exec "grep #{project_id} #{finished_projects_path}", (error, results) ->
|
|
||||||
hasBeenProcessed = _.include(results, project_id)
|
|
||||||
callback(error, hasBeenProcessed)
|
|
||||||
|
|
||||||
loadProjectIds = (callback)->
|
|
||||||
console.log "loading project ids from #{all_projects_path}"
|
|
||||||
fs.readFile all_projects_path, "utf-8", (err, data)->
|
|
||||||
ids = data.split("\n")
|
|
||||||
ids = _.filter ids, (id)-> id? and id.length == 24
|
|
||||||
console.log "loaded #{ids.length} project ids from #{all_projects_path}"
|
|
||||||
callback err, ids
|
|
||||||
|
|
||||||
getAndWriteProjectids = (callback)->
|
|
||||||
console.log "finding all project id's - #{new Date().toString()}"
|
|
||||||
db.projects.find {}, {_id:1}, (err, ids)->
|
|
||||||
console.log "total found projects in mongo #{ids.length} - #{new Date().toString()}"
|
|
||||||
ids = _.pluck ids, '_id'
|
|
||||||
ids = _.filter ids, (id)-> id?
|
|
||||||
fileData = ids.join("\n")
|
|
||||||
fs.writeFile all_projects_path, fileData, ->
|
|
||||||
callback(err, ids)
|
|
||||||
|
|
||||||
markDocAsUnmigrated = (project_id, doc_id, callback)->
|
|
||||||
console.log "#{project_id} #{doc_id} unmigrated"
|
|
||||||
markProjectAsProcessed project_id, (err)->
|
|
||||||
fs.appendFile unmigrated_docs_path, "#{project_id} #{doc_id}\n", callback
|
|
||||||
|
|
||||||
markUnmigratedDocs = (project_id, docs, callback)->
|
|
||||||
console.log docs.length, project_id, "unmigrated"
|
|
||||||
jobs = _.map docs, (doc)->
|
|
||||||
(cb)->
|
|
||||||
markDocAsUnmigrated project_id, doc._id, cb
|
|
||||||
async.series jobs, callback
|
|
||||||
|
|
||||||
getProjectIds = (callback)->
|
|
||||||
exists = fs.existsSync all_projects_path
|
|
||||||
if exists
|
|
||||||
loadProjectIds callback
|
|
||||||
else
|
|
||||||
getAndWriteProjectids callback
|
|
||||||
|
|
||||||
markProjectAsProcessed = (project_id, callback)->
|
|
||||||
fs.appendFile finished_projects_path, "#{project_id}\n", callback
|
|
||||||
|
|
||||||
getAllDocs = (project_id, callback = (error, docs) ->) ->
|
|
||||||
excludes = {}
|
|
||||||
for i in [0..12]
|
|
||||||
excludes["rootFolder#{Array(i).join(".folders")}.docs.lines"] = 0
|
|
||||||
db.projects.findOne _id: ObjectId(project_id.toString()), excludes, (error, project) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
if !project?
|
|
||||||
console.log "no such project #{project_id}"
|
|
||||||
return callback()
|
|
||||||
findAllDocsInProject project, (error, docs) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
return callback null, docs, project
|
|
||||||
|
|
||||||
findAllDocsInProject = (project, callback = (error, docs) ->) ->
|
|
||||||
callback null, _findAllDocsInFolder project.rootFolder[0]
|
|
||||||
|
|
||||||
findDocInProject = (project, doc_id, callback = (error, doc, mongoPath) ->) ->
|
|
||||||
result = _findDocInFolder project.rootFolder[0], doc_id, "rootFolder.0"
|
|
||||||
if result?
|
|
||||||
callback null, result.doc, result.mongoPath
|
|
||||||
else
|
|
||||||
callback null, null, null
|
|
||||||
|
|
||||||
_findDocInFolder = (folder = {}, doc_id, currentPath) ->
|
|
||||||
for doc, i in folder.docs or []
|
|
||||||
if doc?._id? and doc._id.toString() == doc_id.toString()
|
|
||||||
return {
|
|
||||||
doc: doc
|
|
||||||
mongoPath: "#{currentPath}.docs.#{i}"
|
|
||||||
}
|
|
||||||
for childFolder, i in folder.folders or []
|
|
||||||
result = _findDocInFolder childFolder, doc_id, "#{currentPath}.folders.#{i}"
|
|
||||||
return result if result?
|
|
||||||
|
|
||||||
return null
|
|
||||||
|
|
||||||
_findAllDocsInFolder = (folder = {}) ->
|
|
||||||
docs = folder.docs or []
|
|
||||||
for childFolder in folder.folders or []
|
|
||||||
docs = docs.concat _findAllDocsInFolder childFolder
|
|
||||||
return docs
|
|
||||||
|
|
||||||
isDocInDocCollection = (doc, callback)->
|
|
||||||
if !doc?._id? or doc._id.length == 0
|
|
||||||
return callback(null, true)
|
|
||||||
db.docs.find({_id: ObjectId(doc._id+"")}, {_id: 1}).limit 1, (err, foundDocs)->
|
|
||||||
exists = foundDocs.length > 0
|
|
||||||
callback err, exists
|
|
||||||
|
|
||||||
getWhichDocsCanBeDeleted = (docs, callback = (err, docsToBeDeleted, unmigratedDocs)->)->
|
|
||||||
docsToBeDeleted = []
|
|
||||||
unmigratedDocs = []
|
|
||||||
|
|
||||||
jobs = _.map docs, (doc)->
|
|
||||||
return (cb)->
|
|
||||||
isDocInDocCollection doc, (err, exists)->
|
|
||||||
if exists
|
|
||||||
docsToBeDeleted.push doc
|
|
||||||
else
|
|
||||||
unmigratedDocs.push doc
|
|
||||||
cb(err)
|
|
||||||
async.series jobs, (err)->
|
|
||||||
callback err, docsToBeDeleted, unmigratedDocs
|
|
||||||
|
|
||||||
wipeDocLines = (project_id, mongoPath, callback)->
|
|
||||||
update =
|
|
||||||
$unset: {}
|
|
||||||
update.$unset["#{mongoPath}.lines"] = ""
|
|
||||||
update.$unset["#{mongoPath}.rev"] = ""
|
|
||||||
db.projects.update _id: ObjectId(project_id+''), update, callback
|
|
||||||
|
|
||||||
|
|
||||||
removeDocLinesFromProject = (docs, project, callback)->
|
|
||||||
jobs = _.map docs, (doc)->
|
|
||||||
(cb)->
|
|
||||||
findDocInProject project, doc._id, (err, doc, mongoPath)->
|
|
||||||
wipeDocLines project._id, mongoPath, cb
|
|
||||||
async.parallelLimit jobs, 5, callback
|
|
||||||
|
|
||||||
processNext = (project_id, callback)->
|
|
||||||
if !project_id? or project_id.length == 0
|
|
||||||
return callback()
|
|
||||||
checkIfFileHasBeenProcessed project_id, (err, hasBeenProcessed)->
|
|
||||||
if hasBeenProcessed
|
|
||||||
console.log "#{project_id} already processed, skipping"
|
|
||||||
return callback()
|
|
||||||
console.log "#{project_id} processing"
|
|
||||||
getAllDocs project_id, (err, docs, project)->
|
|
||||||
if err?
|
|
||||||
console.error err, project_id, "could not get all docs"
|
|
||||||
return callback(err)
|
|
||||||
else
|
|
||||||
getWhichDocsCanBeDeleted docs, (err, docsToBeDeleted, unmigratedDocs)->
|
|
||||||
if err?
|
|
||||||
console.error err, project_id, "could not save docs into mongo"
|
|
||||||
return callback(err)
|
|
||||||
markUnmigratedDocs project_id, unmigratedDocs, (err)->
|
|
||||||
removeDocLinesFromProject docsToBeDeleted, project, (err)->
|
|
||||||
if err?
|
|
||||||
return callback(err)
|
|
||||||
markProjectAsProcessed project_id, (err)->
|
|
||||||
setTimeout(
|
|
||||||
-> callback(err)
|
|
||||||
,0)
|
|
||||||
|
|
||||||
exports.migrate = (client, done = ->)->
|
|
||||||
getProjectIds (err, ids)->
|
|
||||||
printProgress()
|
|
||||||
jobs = _.map ids, (id)->
|
|
||||||
return (cb)->
|
|
||||||
processNext(id, cb)
|
|
||||||
async.series jobs, (err)->
|
|
||||||
if err?
|
|
||||||
console.error err, "at end of jobs"
|
|
||||||
else
|
|
||||||
console.log "finished"
|
|
||||||
done(err)
|
|
||||||
|
|
||||||
|
|
||||||
exports.rollback = (next)->
|
|
||||||
next()
|
|
||||||
|
|
|
@ -1,342 +0,0 @@
|
||||||
Settings = require "settings-sharelatex"
|
|
||||||
fs = require("fs")
|
|
||||||
mongojs = require("mongojs")
|
|
||||||
ObjectId = mongojs.ObjectId
|
|
||||||
db = mongojs(Settings.mongo.url, ['docs','docHistory', 'docHistoryStats'])
|
|
||||||
_ = require("underscore")
|
|
||||||
async = require("async")
|
|
||||||
exec = require("child_process").exec
|
|
||||||
bson = require('bson')
|
|
||||||
BSON = new bson()
|
|
||||||
|
|
||||||
logger = {
|
|
||||||
log: ->
|
|
||||||
err: ->
|
|
||||||
}
|
|
||||||
|
|
||||||
needToExit = false
|
|
||||||
handleExit = () ->
|
|
||||||
needToExit = true
|
|
||||||
console.log('Got signal. Shutting down.')
|
|
||||||
|
|
||||||
process.on 'SIGINT', handleExit
|
|
||||||
process.on 'SIGHUP', handleExit
|
|
||||||
|
|
||||||
finished_docs_path = "/tmp/finished-docs-3"
|
|
||||||
all_docs_path = "/tmp/all-docs-3"
|
|
||||||
unmigrated_docs_path = "/tmp/unmigrated-docs-3"
|
|
||||||
|
|
||||||
finished_docs = {}
|
|
||||||
if fs.existsSync(finished_docs_path)
|
|
||||||
for id in fs.readFileSync(finished_docs_path,'utf-8').split("\n")
|
|
||||||
finished_docs[id] = true
|
|
||||||
|
|
||||||
getAndWriteDocids = (callback)->
|
|
||||||
console.log "finding all doc id's - #{new Date().toString()}"
|
|
||||||
db.docs.find {}, {_id:1}, (err, ids)->
|
|
||||||
console.log "total found docs in mongo #{ids.length} - #{new Date().toString()}"
|
|
||||||
ids = _.pluck ids, '_id'
|
|
||||||
ids = _.filter ids, (id)-> id?
|
|
||||||
fileData = ids.join("\n")
|
|
||||||
fs.writeFileSync all_docs_path + ".tmp", fileData
|
|
||||||
fs.renameSync all_docs_path + ".tmp", all_docs_path
|
|
||||||
callback(err, ids)
|
|
||||||
|
|
||||||
loadDocIds = (callback)->
|
|
||||||
console.log "loading doc ids from #{all_docs_path}"
|
|
||||||
data = fs.readFileSync all_docs_path, "utf-8"
|
|
||||||
ids = data.split("\n")
|
|
||||||
console.log "loaded #{ids.length} doc ids from #{all_docs_path}"
|
|
||||||
callback null, ids
|
|
||||||
|
|
||||||
getDocIds = (callback)->
|
|
||||||
exists = fs.existsSync all_docs_path
|
|
||||||
if exists
|
|
||||||
loadDocIds callback
|
|
||||||
else
|
|
||||||
getAndWriteDocids callback
|
|
||||||
|
|
||||||
markDocAsProcessed = (doc_id, callback)->
|
|
||||||
finished_docs[doc_id] = true
|
|
||||||
fs.appendFile finished_docs_path, "#{doc_id}\n", callback
|
|
||||||
|
|
||||||
markDocAsUnmigrated = (doc_id, callback)->
|
|
||||||
console.log "#{doc_id} unmigrated"
|
|
||||||
markDocAsProcessed doc_id, (err)->
|
|
||||||
fs.appendFile unmigrated_docs_path, "#{doc_id}\n", callback
|
|
||||||
|
|
||||||
checkIfDocHasBeenProcessed = (doc_id, callback)->
|
|
||||||
callback(null, finished_docs[doc_id])
|
|
||||||
|
|
||||||
processNext = (doc_id, callback)->
|
|
||||||
if !doc_id? or doc_id.length == 0
|
|
||||||
return callback()
|
|
||||||
if needToExit
|
|
||||||
return callback(new Error("graceful shutdown"))
|
|
||||||
checkIfDocHasBeenProcessed doc_id, (err, hasBeenProcessed)->
|
|
||||||
if hasBeenProcessed
|
|
||||||
console.log "#{doc_id} already processed, skipping"
|
|
||||||
return callback()
|
|
||||||
PackManager._packDocHistory doc_id, {}, (err) ->
|
|
||||||
if err?
|
|
||||||
console.log "error processing #{doc_id}"
|
|
||||||
markDocAsUnmigrated doc_id, callback
|
|
||||||
else
|
|
||||||
markDocAsProcessed doc_id, callback
|
|
||||||
|
|
||||||
updateIndexes = (callback) ->
|
|
||||||
async.series [
|
|
||||||
(cb) ->
|
|
||||||
console.log "create index"
|
|
||||||
db.docHistory.ensureIndex { project_id: 1, "meta.end_ts": 1, "meta.start_ts": -1 }, { background: true }, cb
|
|
||||||
(cb) ->
|
|
||||||
console.log "drop index"
|
|
||||||
db.docHistory.dropIndex { project_id: 1, "meta.end_ts": 1 }, cb
|
|
||||||
(cb) ->
|
|
||||||
console.log "drop index"
|
|
||||||
db.docHistory.dropIndex { project_id: 1, "pack.0.meta.end_ts": 1, "meta.end_ts": 1}, cb
|
|
||||||
], (err, results) ->
|
|
||||||
console.log "all done"
|
|
||||||
callback(err)
|
|
||||||
|
|
||||||
exports.migrate = (client, done = ->)->
|
|
||||||
getDocIds (err, ids)->
|
|
||||||
totalDocCount = ids.length
|
|
||||||
alreadyFinishedCount = Object.keys(finished_docs).length
|
|
||||||
t0 = Date.now()
|
|
||||||
printProgress = () ->
|
|
||||||
count = Object.keys(finished_docs).length
|
|
||||||
processedFraction = (count-alreadyFinishedCount)/totalDocCount
|
|
||||||
remainingFraction = (totalDocCount-count)/totalDocCount
|
|
||||||
t = Date.now()
|
|
||||||
dt = (t-t0)*remainingFraction/processedFraction
|
|
||||||
estFinishTime = new Date(t + dt)
|
|
||||||
console.log "completed #{count}/#{totalDocCount} processed=#{processedFraction.toFixed(2)} remaining=#{remainingFraction.toFixed(2)} elapsed=#{(t-t0)/1000} est Finish=#{estFinishTime}"
|
|
||||||
interval = setInterval printProgress, 3*1000
|
|
||||||
|
|
||||||
nextId = null
|
|
||||||
|
|
||||||
testFn = () ->
|
|
||||||
return false if needToExit
|
|
||||||
id = ids.shift()
|
|
||||||
while id? and finished_docs[id] # skip finished
|
|
||||||
id = ids.shift()
|
|
||||||
nextId = id
|
|
||||||
return nextId?
|
|
||||||
|
|
||||||
executeFn = (cb) ->
|
|
||||||
processNext nextId, cb
|
|
||||||
|
|
||||||
async.whilst testFn, executeFn, (err)->
|
|
||||||
if err?
|
|
||||||
console.error err, "at end of jobs"
|
|
||||||
else
|
|
||||||
console.log "finished at #{new Date}"
|
|
||||||
clearInterval interval
|
|
||||||
done(err)
|
|
||||||
|
|
||||||
exports.rollback = (client, done)->
|
|
||||||
done()
|
|
||||||
|
|
||||||
# process.nextTick () ->
|
|
||||||
# exports.migrate () ->
|
|
||||||
# console.log "done"
|
|
||||||
|
|
||||||
DAYS = 24 * 3600 * 1000 # one day in milliseconds
|
|
||||||
|
|
||||||
# copied from track-changes/app/coffee/PackManager.coffee
|
|
||||||
|
|
||||||
PackManager =
|
|
||||||
MAX_SIZE: 1024*1024 # make these configurable parameters
|
|
||||||
MAX_COUNT: 512
|
|
||||||
|
|
||||||
convertDocsToPacks: (docs, callback) ->
|
|
||||||
packs = []
|
|
||||||
top = null
|
|
||||||
docs.forEach (d,i) ->
|
|
||||||
# skip existing packs
|
|
||||||
if d.pack?
|
|
||||||
top = null
|
|
||||||
return
|
|
||||||
sz = BSON.calculateObjectSize(d)
|
|
||||||
# decide if this doc can be added to the current pack
|
|
||||||
validLength = top? && (top.pack.length < PackManager.MAX_COUNT)
|
|
||||||
validSize = top? && (top.sz + sz < PackManager.MAX_SIZE)
|
|
||||||
bothPermanent = top? && (top.expiresAt? is false) && (d.expiresAt? is false)
|
|
||||||
bothTemporary = top? && (top.expiresAt? is true) && (d.expiresAt? is true)
|
|
||||||
within1Day = bothTemporary && (d.meta.start_ts - top.meta.start_ts < 24 * 3600 * 1000)
|
|
||||||
if top? && validLength && validSize && (bothPermanent || (bothTemporary && within1Day))
|
|
||||||
top.pack = top.pack.concat {v: d.v, meta: d.meta, op: d.op, _id: d._id}
|
|
||||||
top.sz += sz
|
|
||||||
top.n += 1
|
|
||||||
top.v_end = d.v
|
|
||||||
top.meta.end_ts = d.meta.end_ts
|
|
||||||
top.expiresAt = d.expiresAt if top.expiresAt?
|
|
||||||
return
|
|
||||||
else
|
|
||||||
# create a new pack
|
|
||||||
top = _.clone(d)
|
|
||||||
top.pack = [ {v: d.v, meta: d.meta, op: d.op, _id: d._id} ]
|
|
||||||
top.meta = { start_ts: d.meta.start_ts, end_ts: d.meta.end_ts }
|
|
||||||
top.sz = sz
|
|
||||||
top.n = 1
|
|
||||||
top.v_end = d.v
|
|
||||||
delete top.op
|
|
||||||
delete top._id
|
|
||||||
packs.push top
|
|
||||||
|
|
||||||
callback(null, packs)
|
|
||||||
|
|
||||||
checkHistory: (docs, callback) ->
|
|
||||||
errors = []
|
|
||||||
prev = null
|
|
||||||
error = (args...) ->
|
|
||||||
errors.push args
|
|
||||||
docs.forEach (d,i) ->
|
|
||||||
if d.pack?
|
|
||||||
n = d.pack.length
|
|
||||||
last = d.pack[n-1]
|
|
||||||
error('bad pack v_end', d) if d.v_end != last.v
|
|
||||||
error('bad pack start_ts', d) if d.meta.start_ts != d.pack[0].meta.start_ts
|
|
||||||
error('bad pack end_ts', d) if d.meta.end_ts != last.meta.end_ts
|
|
||||||
d.pack.forEach (p, i) ->
|
|
||||||
prev = v
|
|
||||||
v = p.v
|
|
||||||
error('bad version', v, 'in', p) if v <= prev
|
|
||||||
#error('expired op', p, 'in pack') if p.expiresAt?
|
|
||||||
else
|
|
||||||
prev = v
|
|
||||||
v = d.v
|
|
||||||
error('bad version', v, 'in', d) if v <= prev
|
|
||||||
if errors.length
|
|
||||||
callback(errors)
|
|
||||||
else
|
|
||||||
callback()
|
|
||||||
|
|
||||||
insertPack: (packObj, callback) ->
|
|
||||||
bulk = db.docHistory.initializeOrderedBulkOp()
|
|
||||||
doc_id = packObj.doc_id
|
|
||||||
expect_nInserted = 1
|
|
||||||
expect_nRemoved = packObj.pack.length
|
|
||||||
logger.log {doc_id: doc_id}, "adding pack, removing #{expect_nRemoved} ops"
|
|
||||||
bulk.insert packObj
|
|
||||||
ids = (op._id for op in packObj.pack)
|
|
||||||
bulk.find({_id:{$in:ids}}).remove()
|
|
||||||
bulk.execute (err, result) ->
|
|
||||||
if err?
|
|
||||||
logger.error {doc_id: doc_id}, "error adding pack"
|
|
||||||
callback(err, result)
|
|
||||||
else if result.nInserted != expect_nInserted or result.nRemoved != expect_nRemoved
|
|
||||||
logger.error {doc_id: doc_id, result}, "unexpected result adding pack"
|
|
||||||
callback(new Error(
|
|
||||||
msg: 'unexpected result'
|
|
||||||
expected: {expect_nInserted, expect_nRemoved}
|
|
||||||
), result)
|
|
||||||
else
|
|
||||||
db.docHistoryStats.update {doc_id:doc_id}, {
|
|
||||||
$inc:{update_count:-expect_nRemoved},
|
|
||||||
$currentDate:{last_packed:true}
|
|
||||||
}, {upsert:true}, () ->
|
|
||||||
callback(err, result)
|
|
||||||
|
|
||||||
# retrieve document ops/packs and check them
|
|
||||||
getDocHistory: (doc_id, callback) ->
|
|
||||||
db.docHistory.find({doc_id:ObjectId(doc_id)}).sort {v:1}, (err, docs) ->
|
|
||||||
return callback(err) if err?
|
|
||||||
# for safety, do a consistency check of the history
|
|
||||||
logger.log {doc_id}, "checking history for document"
|
|
||||||
PackManager.checkHistory docs, (err) ->
|
|
||||||
return callback(err) if err?
|
|
||||||
callback(err, docs)
|
|
||||||
#PackManager.deleteExpiredPackOps docs, (err) ->
|
|
||||||
# return callback(err) if err?
|
|
||||||
# callback err, docs
|
|
||||||
|
|
||||||
packDocHistory: (doc_id, options, callback) ->
|
|
||||||
if typeof callback == "undefined" and typeof options == 'function'
|
|
||||||
callback = options
|
|
||||||
options = {}
|
|
||||||
LockManager.runWithLock(
|
|
||||||
"HistoryLock:#{doc_id}",
|
|
||||||
(releaseLock) ->
|
|
||||||
PackManager._packDocHistory(doc_id, options, releaseLock)
|
|
||||||
, callback
|
|
||||||
)
|
|
||||||
|
|
||||||
_packDocHistory: (doc_id, options, callback) ->
|
|
||||||
logger.log {doc_id},"starting pack operation for document history"
|
|
||||||
|
|
||||||
PackManager.getDocHistory doc_id, (err, docs) ->
|
|
||||||
return callback(err) if err?
|
|
||||||
origDocs = 0
|
|
||||||
origPacks = 0
|
|
||||||
for d in docs
|
|
||||||
if d.pack? then origPacks++ else origDocs++
|
|
||||||
PackManager.convertDocsToPacks docs, (err, packs) ->
|
|
||||||
return callback(err) if err?
|
|
||||||
total = 0
|
|
||||||
for p in packs
|
|
||||||
total = total + p.pack.length
|
|
||||||
logger.log {doc_id, origDocs, origPacks, newPacks: packs.length, totalOps: total}, "document stats"
|
|
||||||
if packs.length
|
|
||||||
if options['dry-run']
|
|
||||||
logger.log {doc_id}, 'dry-run, skipping write packs'
|
|
||||||
return callback()
|
|
||||||
PackManager.savePacks packs, (err) ->
|
|
||||||
return callback(err) if err?
|
|
||||||
# check the history again
|
|
||||||
PackManager.getDocHistory doc_id, callback
|
|
||||||
else
|
|
||||||
logger.log {doc_id}, "no packs to write"
|
|
||||||
# keep a record that we checked this one to avoid rechecking it
|
|
||||||
db.docHistoryStats.update {doc_id:doc_id}, {
|
|
||||||
$currentDate:{last_checked:true}
|
|
||||||
}, {upsert:true}, () ->
|
|
||||||
callback null, null
|
|
||||||
|
|
||||||
DB_WRITE_DELAY: 100
|
|
||||||
|
|
||||||
savePacks: (packs, callback) ->
|
|
||||||
async.eachSeries packs, PackManager.safeInsert, (err, result) ->
|
|
||||||
if err?
|
|
||||||
logger.log {err, result}, "error writing packs"
|
|
||||||
callback err, result
|
|
||||||
else
|
|
||||||
callback()
|
|
||||||
|
|
||||||
safeInsert: (packObj, callback) ->
|
|
||||||
PackManager.insertPack packObj, (err, result) ->
|
|
||||||
setTimeout () ->
|
|
||||||
callback(err,result)
|
|
||||||
, PackManager.DB_WRITE_DELAY
|
|
||||||
|
|
||||||
deleteExpiredPackOps: (docs, callback) ->
|
|
||||||
now = Date.now()
|
|
||||||
toRemove = []
|
|
||||||
toUpdate = []
|
|
||||||
docs.forEach (d,i) ->
|
|
||||||
if d.pack?
|
|
||||||
newPack = d.pack.filter (op) ->
|
|
||||||
if op.expiresAt? then op.expiresAt > now else true
|
|
||||||
if newPack.length == 0
|
|
||||||
toRemove.push d
|
|
||||||
else if newPack.length < d.pack.length
|
|
||||||
# adjust the pack properties
|
|
||||||
d.pack = newPack
|
|
||||||
first = d.pack[0]
|
|
||||||
last = d.pack[d.pack.length - 1]
|
|
||||||
d.v_end = last.v
|
|
||||||
d.meta.start_ts = first.meta.start_ts
|
|
||||||
d.meta.end_ts = last.meta.end_ts
|
|
||||||
toUpdate.push d
|
|
||||||
if toRemove.length or toUpdate.length
|
|
||||||
bulk = db.docHistory.initializeOrderedBulkOp()
|
|
||||||
toRemove.forEach (pack) ->
|
|
||||||
console.log "would remove", pack
|
|
||||||
#bulk.find({_id:pack._id}).removeOne()
|
|
||||||
toUpdate.forEach (pack) ->
|
|
||||||
console.log "would update", pack
|
|
||||||
#bulk.find({_id:pack._id}).updateOne(pack);
|
|
||||||
bulk.execute callback
|
|
||||||
else
|
|
||||||
callback()
|
|
|
@ -1,38 +0,0 @@
|
||||||
Settings = require "settings-sharelatex"
|
|
||||||
fs = require("fs")
|
|
||||||
mongojs = require("mongojs")
|
|
||||||
ObjectId = mongojs.ObjectId
|
|
||||||
db = mongojs(Settings.mongo.url, ['users'])
|
|
||||||
_ = require("underscore")
|
|
||||||
|
|
||||||
|
|
||||||
handleExit = () ->
|
|
||||||
console.log('Got signal. Shutting down.')
|
|
||||||
|
|
||||||
|
|
||||||
process.on 'SIGINT', handleExit
|
|
||||||
process.on 'SIGHUP', handleExit
|
|
||||||
|
|
||||||
|
|
||||||
exports.migrate = (client, done=()->) ->
|
|
||||||
patch = {
|
|
||||||
$set: {
|
|
||||||
features: {
|
|
||||||
collaborators: -1
|
|
||||||
dropbox: true
|
|
||||||
versioning: true
|
|
||||||
references: true
|
|
||||||
templates: true
|
|
||||||
compileTimeout: 180
|
|
||||||
compileGroup: "standard"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
console.log ">> updating all user features: ", patch
|
|
||||||
db.users.update {}, patch, {multi: true}, (err) ->
|
|
||||||
console.log "finished updating all user features"
|
|
||||||
return done(err)
|
|
||||||
|
|
||||||
|
|
||||||
exports.rollback = (client, done) ->
|
|
||||||
done()
|
|
|
@ -1,103 +0,0 @@
|
||||||
Settings = require "settings-sharelatex"
|
|
||||||
mongojs = require("mongojs")
|
|
||||||
ObjectId = mongojs.ObjectId
|
|
||||||
db = mongojs(Settings.mongo.url, ['users', 'projects', 'subscriptions'])
|
|
||||||
async = require "async"
|
|
||||||
|
|
||||||
module.exports = HoldingAccountMigration =
|
|
||||||
DRY_RUN: true
|
|
||||||
|
|
||||||
findHoldingAccounts: (callback = (error, users) ->) ->
|
|
||||||
db.users.find({holdingAccount: true, hashedPassword: { $exists: false }}, {holdingAccount: 1, email: 1}, callback)
|
|
||||||
|
|
||||||
deleteUserProjects: (user_id, callback = (error) ->) ->
|
|
||||||
# Holding accounts can't own projects, so only remove from
|
|
||||||
# collaberator_refs and readOnly_refs
|
|
||||||
console.log "[Removing user from projects]", user_id
|
|
||||||
db.projects.find {
|
|
||||||
$or: [
|
|
||||||
{collaberator_refs: user_id},
|
|
||||||
{readOnly_refs: user_id}
|
|
||||||
]
|
|
||||||
}, { collaberator_refs: 1, readOnly_refs: 1 }, (error, projects = []) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
jobs = projects.map (project) ->
|
|
||||||
(cb) ->
|
|
||||||
console.log "[Removing user from project]", user_id, JSON.stringify(project)
|
|
||||||
if !project?._id?
|
|
||||||
throw new Error("no project id")
|
|
||||||
|
|
||||||
if !HoldingAccountMigration.DRY_RUN
|
|
||||||
db.projects.update {
|
|
||||||
_id: project._id
|
|
||||||
}, {
|
|
||||||
$pull: {
|
|
||||||
collaberator_refs: user_id,
|
|
||||||
readOnly_refs: user_id
|
|
||||||
}
|
|
||||||
}, (error, result) ->
|
|
||||||
return cb(error) if error?
|
|
||||||
console.log "[Removed user from project]", user_id, project._id, result
|
|
||||||
cb()
|
|
||||||
else
|
|
||||||
console.log "[Would have removed user from project]", user_id, project._id
|
|
||||||
cb()
|
|
||||||
|
|
||||||
async.series jobs, callback
|
|
||||||
|
|
||||||
deleteUser: (user_id, callback = (error) ->) ->
|
|
||||||
if !user_id?
|
|
||||||
throw new Error("must have user_id")
|
|
||||||
if !HoldingAccountMigration.DRY_RUN
|
|
||||||
db.users.remove {_id: user_id, holdingAccount: true}, (error, result) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
console.log "[Removed user]", user_id, result
|
|
||||||
if result.n != 1
|
|
||||||
return callback(new Error("failed to remove user as expected"))
|
|
||||||
callback()
|
|
||||||
else
|
|
||||||
console.log "[Would have removed user]", user_id
|
|
||||||
callback()
|
|
||||||
|
|
||||||
migrateGroupInvites: (user_id, email, callback = (error) ->) ->
|
|
||||||
if !user_id?
|
|
||||||
throw new Error("must have user_id")
|
|
||||||
if !HoldingAccountMigration.DRY_RUN
|
|
||||||
db.subscriptions.update {member_ids: user_id}, {
|
|
||||||
$pull: { member_ids: user_id },
|
|
||||||
$addToSet : { invited_emails: email }
|
|
||||||
}, { multi : true }, (error, result) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
console.log "[Migrated user in group accounts]", user_id, email, result
|
|
||||||
callback()
|
|
||||||
else
|
|
||||||
console.log "[Would have migrated user in group accounts]", user_id, email
|
|
||||||
callback()
|
|
||||||
|
|
||||||
run: (done = () ->) ->
|
|
||||||
console.log "[Getting list of holding accounts]"
|
|
||||||
HoldingAccountMigration.findHoldingAccounts (error, users) ->
|
|
||||||
throw error if error?
|
|
||||||
console.log "[Got #{users.length} holding accounts]"
|
|
||||||
i = 0
|
|
||||||
jobs = users.map (u) ->
|
|
||||||
(cb) ->
|
|
||||||
console.log "[Removing user #{i++}/#{users.length}]"
|
|
||||||
HoldingAccountMigration.migrateGroupInvites u._id, u.email, (error) ->
|
|
||||||
return cb(error) if error?
|
|
||||||
HoldingAccountMigration.deleteUser u._id, (error) ->
|
|
||||||
return cb(error) if error?
|
|
||||||
HoldingAccountMigration.deleteUserProjects u._id, (error) ->
|
|
||||||
return cb(error) if error?
|
|
||||||
setTimeout cb, 50 # Small delay to not hammer DB
|
|
||||||
async.series jobs, (error) ->
|
|
||||||
throw error if error?
|
|
||||||
console.log "[FINISHED]"
|
|
||||||
done()
|
|
||||||
|
|
||||||
migrate: (client, done=()->) ->
|
|
||||||
HoldingAccountMigration.DRY_RUN = false
|
|
||||||
HoldingAccountMigration.run(done)
|
|
||||||
|
|
||||||
rollback: (client, done) ->
|
|
||||||
done()
|
|
|
@ -1,30 +0,0 @@
|
||||||
Settings = require "settings-sharelatex"
|
|
||||||
fs = require("fs")
|
|
||||||
mongojs = require("mongojs")
|
|
||||||
ObjectId = mongojs.ObjectId
|
|
||||||
db = mongojs(Settings.mongo.url, ['users'])
|
|
||||||
_ = require("underscore")
|
|
||||||
|
|
||||||
|
|
||||||
handleExit = () ->
|
|
||||||
console.log('Got signal. Shutting down.')
|
|
||||||
|
|
||||||
|
|
||||||
process.on 'SIGINT', handleExit
|
|
||||||
process.on 'SIGHUP', handleExit
|
|
||||||
|
|
||||||
|
|
||||||
exports.migrate = (client, done=()->) ->
|
|
||||||
patch = {
|
|
||||||
$set: {
|
|
||||||
'features.trackChanges': true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
console.log ">> enabling trackChanges feature: ", patch
|
|
||||||
db.users.update {}, patch, {multi: true}, (err) ->
|
|
||||||
console.log "finished enabling trackChanges feature"
|
|
||||||
return done(err)
|
|
||||||
|
|
||||||
|
|
||||||
exports.rollback = (client, done) ->
|
|
||||||
done()
|
|
|
@ -1,51 +0,0 @@
|
||||||
Settings = require "settings-sharelatex"
|
|
||||||
fs = require("fs")
|
|
||||||
mongojs = require("mongojs")
|
|
||||||
ObjectId = mongojs.ObjectId
|
|
||||||
db = mongojs(Settings.mongo.url, ['docs','docHistory', 'docHistoryStats'])
|
|
||||||
_ = require("underscore")
|
|
||||||
async = require("async")
|
|
||||||
exec = require("child_process").exec
|
|
||||||
bson = require('bson')
|
|
||||||
BSON = new bson()
|
|
||||||
|
|
||||||
|
|
||||||
handleExit = () ->
|
|
||||||
console.log('Got signal. Shutting down.')
|
|
||||||
|
|
||||||
|
|
||||||
exports.migrate = (client, done=()->) ->
|
|
||||||
console.log ">> Adding indexes for token-based project access: "
|
|
||||||
db.projects.ensureIndex {'tokens.readAndWrite': 1}, {
|
|
||||||
partialFilterExpression: { 'tokens.readAndWrite': { $exists: true } },
|
|
||||||
unique: true,
|
|
||||||
background: true
|
|
||||||
}, (err) ->
|
|
||||||
if err?
|
|
||||||
return done(err)
|
|
||||||
db.projects.ensureIndex {'tokens.readOnly': 1}, {
|
|
||||||
partialFilterExpression: { 'tokens.readOnly': { $exists: true } },
|
|
||||||
unique: true,
|
|
||||||
background: true
|
|
||||||
}, (err) ->
|
|
||||||
if err?
|
|
||||||
return done(err)
|
|
||||||
db.projects.ensureIndex {tokenAccessReadAndWrite_refs: 1}, {
|
|
||||||
background: true
|
|
||||||
}, (err) ->
|
|
||||||
if err?
|
|
||||||
return done(err)
|
|
||||||
db.projects.ensureIndex {tokenAccessOnly_refs: 1}, {
|
|
||||||
background: true
|
|
||||||
}, (err) ->
|
|
||||||
console.log ">> done adding indexes for token-based project access"
|
|
||||||
done()
|
|
||||||
|
|
||||||
|
|
||||||
exports.rollback = (client, done) ->
|
|
||||||
done()
|
|
||||||
|
|
||||||
|
|
||||||
process.on 'SIGINT', handleExit
|
|
||||||
process.on 'SIGHUP', handleExit
|
|
||||||
|
|
|
@ -1,31 +0,0 @@
|
||||||
#This is needed because we forgot to add track changes into the default settings
|
|
||||||
Settings = require "settings-sharelatex"
|
|
||||||
fs = require("fs")
|
|
||||||
mongojs = require("mongojs")
|
|
||||||
ObjectId = mongojs.ObjectId
|
|
||||||
db = mongojs(Settings.mongo.url, ['users'])
|
|
||||||
_ = require("underscore")
|
|
||||||
|
|
||||||
|
|
||||||
handleExit = () ->
|
|
||||||
console.log('Got signal. Shutting down.')
|
|
||||||
|
|
||||||
|
|
||||||
process.on 'SIGINT', handleExit
|
|
||||||
process.on 'SIGHUP', handleExit
|
|
||||||
|
|
||||||
|
|
||||||
exports.migrate = (client, done=()->) ->
|
|
||||||
patch = {
|
|
||||||
$set: {
|
|
||||||
'features.trackChanges': true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
console.log ">> enabling trackChanges feature: ", patch
|
|
||||||
db.users.update {}, patch, {multi: true}, (err) ->
|
|
||||||
console.log "finished enabling trackChanges feature"
|
|
||||||
return done(err)
|
|
||||||
|
|
||||||
|
|
||||||
exports.rollback = (client, done) ->
|
|
||||||
done()
|
|
|
@ -1,49 +0,0 @@
|
||||||
const Settings = require('settings-sharelatex')
|
|
||||||
const mongojs = require('mongojs')
|
|
||||||
const db = mongojs(Settings.mongo.url, ['users'])
|
|
||||||
const async = require('async')
|
|
||||||
|
|
||||||
const handleExit = () => console.log('Got signal. Shutting down.')
|
|
||||||
process.on('SIGINT', handleExit)
|
|
||||||
process.on('SIGHUP', handleExit)
|
|
||||||
|
|
||||||
const initUserEmailsAttribute = (user, callback) => {
|
|
||||||
const update = {
|
|
||||||
$set: {
|
|
||||||
emails: [
|
|
||||||
{
|
|
||||||
email: user.email,
|
|
||||||
createdAt: new Date()
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
db.users.update({ _id: user._id }, update, callback)
|
|
||||||
}
|
|
||||||
|
|
||||||
const updateAllUsersEmailsAttribute = (users, callback) => {
|
|
||||||
console.log(`updating ${users.length} users`)
|
|
||||||
async.eachSeries(users, initUserEmailsAttribute, callback)
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.migrate = (client, done) =>
|
|
||||||
db.users.find(
|
|
||||||
{ emails: { $exists: false } },
|
|
||||||
{ email: 1 },
|
|
||||||
(error, users) => {
|
|
||||||
if (error) {
|
|
||||||
callback(error)
|
|
||||||
} else {
|
|
||||||
updateAllUsersEmailsAttribute(users, done)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
exports.rollback = (client, done) => {
|
|
||||||
const update = {
|
|
||||||
$unset: {
|
|
||||||
emails: 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
db.users.update({ emails: { $exists: true } }, update, done)
|
|
||||||
}
|
|
|
@ -1,9 +0,0 @@
|
||||||
If migration is stopped mid way it will start at the beginning next time
|
|
||||||
|
|
||||||
To see the run migrations do db.getCollection('_migrations').find() you can't do db._migrations.find()
|
|
||||||
|
|
||||||
When testing, to roll back a migration run:
|
|
||||||
|
|
||||||
```
|
|
||||||
./node_modules/east/bin/east rollback 5 --adapter east-mongo --url mongodb://localhost:27017/sharelatex
|
|
||||||
```
|
|
Loading…
Reference in a new issue