mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #114 from overleaf/ae-remove-coffee
Delete CoffeeScript scripts
This commit is contained in:
commit
93b87d5a00
2 changed files with 0 additions and 110 deletions
|
@ -1,56 +0,0 @@
|
|||
request = require "request"
|
||||
rclient = require("redis").createClient()
|
||||
async = require "async"
|
||||
{ObjectId} = require("./app/js/mongojs")
|
||||
|
||||
NO_OF_DOCS = 100
|
||||
NO_OF_UPDATES = 200
|
||||
|
||||
user_id = ObjectId().toString()
|
||||
|
||||
updates = for i in [1..NO_OF_UPDATES]
|
||||
{
|
||||
op: { i: "a", p: 0 }
|
||||
v: i
|
||||
meta: ts: new Date(), user_id: user_id
|
||||
}
|
||||
jsonUpdates = (JSON.stringify(u) for u in updates)
|
||||
|
||||
doc_ids = (ObjectId().toString() for i in [1..NO_OF_DOCS])
|
||||
|
||||
populateRedis = (callback = (error) ->) ->
|
||||
console.log "Populating Redis queues..."
|
||||
|
||||
jobs = []
|
||||
for doc_id in doc_ids
|
||||
do (doc_id) ->
|
||||
jobs.push (callback) ->
|
||||
rclient.rpush "UncompressedHistoryOps:#{doc_id}", jsonUpdates..., callback
|
||||
async.series jobs, (error) ->
|
||||
return callback(error) if error?
|
||||
console.log "Done."
|
||||
callback()
|
||||
|
||||
flushDocs = (callback = (error) ->) ->
|
||||
console.log "Flushing docs..."
|
||||
inProgress = 0
|
||||
jobs = []
|
||||
for doc_id in doc_ids
|
||||
do (doc_id) ->
|
||||
jobs.push (callback) ->
|
||||
inProgress = inProgress + 1
|
||||
request.post "http://localhost:3014/doc/#{doc_id}/flush", (error) ->
|
||||
inProgress = inProgress - 1
|
||||
console.log Date.now(), "In progress: #{inProgress}"
|
||||
callback(error)
|
||||
async.parallel jobs, (error) ->
|
||||
return callback(error) if error?
|
||||
console.log "Done."
|
||||
callback()
|
||||
|
||||
populateRedis (error) ->
|
||||
throw error if error?
|
||||
flushDocs (error) ->
|
||||
throw error if error?
|
||||
process.exit(0)
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
Settings = require "settings-sharelatex"
|
||||
logger = require "logger-sharelatex"
|
||||
TrackChangesLogger = logger.initialize("track-changes").logger
|
||||
async = require "async"
|
||||
fs = require "fs"
|
||||
request = require "request"
|
||||
cli = require "cli"
|
||||
|
||||
mongojs = require "mongojs"
|
||||
bson = require "bson"
|
||||
db = mongojs(Settings.mongo.url, ["docs"])
|
||||
ObjectId = mongojs.ObjectId
|
||||
|
||||
options = cli.parse({
|
||||
port: ['p', 'port number for track changes', 'number'],
|
||||
force: ['f', 'actually make the fix']
|
||||
});
|
||||
|
||||
if cli.args.length < 1
|
||||
console.log "fixdangling -p PORT file_of_doc_ids"
|
||||
process.exit()
|
||||
|
||||
file = cli.args.pop()
|
||||
doc_ids = fs.readFileSync(file).toString().trim().split("\n")
|
||||
|
||||
missing = 0
|
||||
errored = 0
|
||||
success = 0
|
||||
|
||||
fixDangling = (doc_id, callback) ->
|
||||
# look up project id from doc id
|
||||
db.docs.find {_id:ObjectId(doc_id)}, {project_id:1}, (err, result) ->
|
||||
#console.log "doc_id", doc_id, "err", err, "result", result
|
||||
if err?
|
||||
errored++
|
||||
return callback()
|
||||
if !result? or result.length == 0
|
||||
missing++
|
||||
return callback()
|
||||
project_id = result[0].project_id
|
||||
console.log "found project_id", project_id, "for doc_id", doc_id
|
||||
url = "http://localhost:#{options.port}/project/#{project_id}/doc/#{doc_id}/flush"
|
||||
if options.force
|
||||
request.post url, (err, response, body) ->
|
||||
if err? then errored++ else success++
|
||||
callback()
|
||||
else
|
||||
console.log "URL:", url
|
||||
success++
|
||||
callback()
|
||||
|
||||
async.eachSeries doc_ids, fixDangling, (err) ->
|
||||
console.log "final result", err, "missing", missing, "errored", errored, "success", success
|
||||
db.close()
|
Loading…
Reference in a new issue