mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
added migrations with east package
This commit is contained in:
parent
8d3a57bd4b
commit
bfe4dd018b
4 changed files with 64 additions and 35 deletions
|
@ -7,6 +7,8 @@ semver = require "semver"
|
||||||
knox = require "knox"
|
knox = require "knox"
|
||||||
crypto = require "crypto"
|
crypto = require "crypto"
|
||||||
async = require "async"
|
async = require "async"
|
||||||
|
settings = require("settings-sharelatex")
|
||||||
|
|
||||||
|
|
||||||
SERVICES = [{
|
SERVICES = [{
|
||||||
name: "web"
|
name: "web"
|
||||||
|
@ -56,6 +58,7 @@ module.exports = (grunt) ->
|
||||||
grunt.loadNpmTasks 'grunt-available-tasks'
|
grunt.loadNpmTasks 'grunt-available-tasks'
|
||||||
grunt.loadNpmTasks 'grunt-concurrent'
|
grunt.loadNpmTasks 'grunt-concurrent'
|
||||||
grunt.loadNpmTasks "grunt-contrib-coffee"
|
grunt.loadNpmTasks "grunt-contrib-coffee"
|
||||||
|
grunt.loadNpmTasks "grunt-shell"
|
||||||
|
|
||||||
|
|
||||||
execute = {}
|
execute = {}
|
||||||
|
@ -83,7 +86,9 @@ module.exports = (grunt) ->
|
||||||
options:
|
options:
|
||||||
bare:true
|
bare:true
|
||||||
|
|
||||||
|
shell:
|
||||||
|
migrate:
|
||||||
|
command: "./node_modules/east/bin/east migrate --adapter east-mongo --url #{settings.mongo.url}"
|
||||||
|
|
||||||
availabletasks:
|
availabletasks:
|
||||||
tasks:
|
tasks:
|
||||||
|
@ -162,7 +167,7 @@ module.exports = (grunt) ->
|
||||||
Helpers.buildUpstartScripts()
|
Helpers.buildUpstartScripts()
|
||||||
|
|
||||||
|
|
||||||
grunt.registerTask 'migrate', 'run migrations', ['coffee:migrate']
|
grunt.registerTask 'migrate', "compile migrations and run them", ['coffee:migrate', 'shell:migrate']
|
||||||
|
|
||||||
|
|
||||||
Helpers =
|
Helpers =
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
|
Settings = require "settings-sharelatex"
|
||||||
fs = require("fs")
|
fs = require("fs")
|
||||||
mongojs = require("mongojs")
|
mongojs = require("mongojs")
|
||||||
ObjectId = mongojs.ObjectId
|
ObjectId = mongojs.ObjectId
|
||||||
db = mongojs('sharelatex', ['projects', 'docs'])
|
db = mongojs(Settings.mongo.url, ['projects', 'docs'])
|
||||||
_ = require("lodash")
|
_ = require("lodash")
|
||||||
async = require("async")
|
async = require("async")
|
||||||
exec = require("child_process").exec
|
exec = require("child_process").exec
|
||||||
|
@ -19,13 +19,13 @@ checkIfFileHasBeenProccessed = (project_id, callback)->
|
||||||
exec "grep #{project_id} #{finished_projects_path}", (error, results) ->
|
exec "grep #{project_id} #{finished_projects_path}", (error, results) ->
|
||||||
hasBeenProcessed = _.include(results, project_id)
|
hasBeenProcessed = _.include(results, project_id)
|
||||||
#console.log hasBeenProcessed, project_id
|
#console.log hasBeenProcessed, project_id
|
||||||
callback(null, hasBeenProcessed)
|
callback(error, hasBeenProcessed)
|
||||||
|
|
||||||
loadProjectIds = (callback)->
|
loadProjectIds = (callback)->
|
||||||
|
console.log "loading project ids from #{all_projects_path}"
|
||||||
fs.readFile all_projects_path, "utf-8", (err, data)->
|
fs.readFile all_projects_path, "utf-8", (err, data)->
|
||||||
console.log data.length
|
|
||||||
ids = data.split("\n")
|
ids = data.split("\n")
|
||||||
console.log ids.length
|
console.log "loaded #{ids.length} project ids from #{all_projects_path}"
|
||||||
callback err, ids
|
callback err, ids
|
||||||
|
|
||||||
getAndWriteProjectids = (callback)->
|
getAndWriteProjectids = (callback)->
|
||||||
|
@ -52,7 +52,6 @@ getAllDocs = (project_id, callback = (error, docs) ->) ->
|
||||||
db.projects.findOne _id:ObjectId(project_id), (error, project) ->
|
db.projects.findOne _id:ObjectId(project_id), (error, project) ->
|
||||||
return callback(error) if error?
|
return callback(error) if error?
|
||||||
if !project?
|
if !project?
|
||||||
console.error("No such project: #{project_id}")
|
|
||||||
return callback("no such project #{project_id}")
|
return callback("no such project #{project_id}")
|
||||||
findAllDocsInProject project, (error, docs) ->
|
findAllDocsInProject project, (error, docs) ->
|
||||||
return callback(error) if error?
|
return callback(error) if error?
|
||||||
|
@ -82,17 +81,34 @@ _findAllDocsInFolder = (folder = {}) ->
|
||||||
return docs
|
return docs
|
||||||
|
|
||||||
insertDocIntoDocCollection = (project_id, doc_id, lines, oldRev, callback)->
|
insertDocIntoDocCollection = (project_id, doc_id, lines, oldRev, callback)->
|
||||||
|
if !project_id?
|
||||||
|
return callback("no project id")
|
||||||
|
if !doc_id?
|
||||||
|
return callback("no doc id. project=#{project_id}")
|
||||||
|
if !lines?
|
||||||
|
return callback("no lines")
|
||||||
update = {}
|
update = {}
|
||||||
update["_id"] = ObjectId(doc_id)
|
update["_id"] = ObjectId(doc_id.toString())
|
||||||
update["lines"] = lines
|
update["lines"] = lines
|
||||||
update["project_id"] = ObjectId(project_id)
|
update["project_id"] = ObjectId(project_id)
|
||||||
update["rev"] = oldRev
|
update["rev"] = oldRev || 0
|
||||||
db.docs.insert _id: ObjectId(doc_id), callback
|
# console.log update
|
||||||
|
db.docs.insert update, callback
|
||||||
|
|
||||||
saveDocsIntoMongo = (project_id, docs, callback)->
|
saveDocsIntoMongo = (project_id, docs, callback)->
|
||||||
jobs = _.map docs, (doc)->
|
jobs = _.map docs, (doc)->
|
||||||
(cb)->
|
(cb)->
|
||||||
insertDocIntoDocCollection project_id, doc._id, project_id.lines, doc.rev, cb
|
if !doc?
|
||||||
|
console.error "null doc in project #{project_id}"
|
||||||
|
return cb()
|
||||||
|
insertDocIntoDocCollection project_id, doc._id, doc.lines, doc.rev, (err)->
|
||||||
|
if err?.code == 11000 #duplicate key, doc already in there so its not a problem.
|
||||||
|
err = undefined
|
||||||
|
if err?
|
||||||
|
console.log "error inserting doc into doc collection", err
|
||||||
|
cb(err)
|
||||||
|
|
||||||
|
|
||||||
async.series jobs, callback
|
async.series jobs, callback
|
||||||
|
|
||||||
|
|
||||||
|
@ -101,33 +117,36 @@ processNext = (project_id, callback)->
|
||||||
if hasBeenProcessed
|
if hasBeenProcessed
|
||||||
console.log "#{project_id} already procssed, skipping"
|
console.log "#{project_id} already procssed, skipping"
|
||||||
return callback()
|
return callback()
|
||||||
|
console.log "#{project_id} processing"
|
||||||
getAllDocs project_id, (err, docs)->
|
getAllDocs project_id, (err, docs)->
|
||||||
if err?
|
if err?
|
||||||
console.error err, project_id, "could not get all docs"
|
console.error err, project_id, "could not get all docs"
|
||||||
return callback()
|
return callback(err)
|
||||||
saveDocsIntoMongo project_id, docs, ->
|
else
|
||||||
if err?
|
saveDocsIntoMongo project_id, docs, (err)->
|
||||||
console.error err, project_id, "could not save docs into mongo"
|
if err?
|
||||||
return callback()
|
console.error err, project_id, "could not save docs into mongo"
|
||||||
markProjectAsProcessed project_id, ->
|
return callback(err)
|
||||||
callback()
|
markProjectAsProcessed project_id, (err)->
|
||||||
|
setTimeout(
|
||||||
|
-> callback(err)
|
||||||
|
,100)
|
||||||
|
|
||||||
|
|
||||||
getProjectIds (err, ids)->
|
|
||||||
printProgress()
|
|
||||||
jobs = _.map ids, (id)->
|
|
||||||
return (cb)->
|
|
||||||
processNext(id, cb)
|
|
||||||
async.series jobs, (err)->
|
|
||||||
if err?
|
|
||||||
console.error err, "at end of jobs"
|
|
||||||
else
|
|
||||||
console.log "finished"
|
|
||||||
process.exit()
|
|
||||||
|
|
||||||
exports.up = (next)->
|
exports.migrate = (client, done)->
|
||||||
|
getProjectIds (err, ids)->
|
||||||
|
printProgress()
|
||||||
|
jobs = _.map ids, (id)->
|
||||||
|
return (cb)->
|
||||||
|
processNext(id, cb)
|
||||||
|
async.series jobs, (err)->
|
||||||
|
if err?
|
||||||
|
console.error err, "at end of jobs"
|
||||||
|
else
|
||||||
|
console.log "finished"
|
||||||
|
done(err)
|
||||||
|
|
||||||
next()
|
|
||||||
|
exports.rollback = (next)->
|
||||||
|
|
||||||
exports.down = (next)->
|
|
||||||
next()
|
next()
|
2
migrations/about_migrations.md
Normal file
2
migrations/about_migrations.md
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
* if migration is stopped mid way it will start at the beginging next time
|
||||||
|
* to see the run migrations do db.getCollection('_migrations').find() you can't do db._migrations.find()
|
|
@ -4,6 +4,9 @@
|
||||||
"description": "An online collaborative LaTeX editor",
|
"description": "An online collaborative LaTeX editor",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"async": "^0.9.0",
|
"async": "^0.9.0",
|
||||||
|
"east": "^0.2.3",
|
||||||
|
"east-mongo": "^0.1.2",
|
||||||
|
"grunt-shell": "^1.1.1",
|
||||||
"lodash": "^3.0.0",
|
"lodash": "^3.0.0",
|
||||||
"mongojs": "^0.18.1",
|
"mongojs": "^0.18.1",
|
||||||
"rimraf": "~2.2.6",
|
"rimraf": "~2.2.6",
|
||||||
|
|
Loading…
Reference in a new issue