mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
add db queue file for global db query queues
This commit is contained in:
parent
1080c2c428
commit
759988401b
3 changed files with 46 additions and 31 deletions
13
services/clsi/app/coffee/DbQueue.coffee
Normal file
13
services/clsi/app/coffee/DbQueue.coffee
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
async = require "async"
|
||||||
|
|
||||||
|
queue = async.queue((task, cb)->
|
||||||
|
console.log("running task")
|
||||||
|
task(cb)
|
||||||
|
, 1)
|
||||||
|
|
||||||
|
queue.drain = ()->
|
||||||
|
console.log('HI all items have been processed')
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
queue: queue
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
UrlCache = require "./UrlCache"
|
UrlCache = require "./UrlCache"
|
||||||
CompileManager = require "./CompileManager"
|
CompileManager = require "./CompileManager"
|
||||||
db = require "./db"
|
db = require "./db"
|
||||||
|
dbQueue = require "./DbQueue"
|
||||||
async = require "async"
|
async = require "async"
|
||||||
logger = require "logger-sharelatex"
|
logger = require "logger-sharelatex"
|
||||||
oneDay = 24 * 60 * 60 * 1000
|
oneDay = 24 * 60 * 60 * 1000
|
||||||
|
@ -11,15 +12,18 @@ module.exports = ProjectPersistenceManager =
|
||||||
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5
|
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5
|
||||||
|
|
||||||
markProjectAsJustAccessed: (project_id, callback = (error) ->) ->
|
markProjectAsJustAccessed: (project_id, callback = (error) ->) ->
|
||||||
|
job = (cb)->
|
||||||
console.log("markProjectAsJustAccessed")
|
console.log("markProjectAsJustAccessed")
|
||||||
db.Project.findOrCreate(where: {project_id: project_id})
|
db.Project.findOrCreate(where: {project_id: project_id})
|
||||||
.spread(
|
.spread(
|
||||||
(project, created) ->
|
(project, created) ->
|
||||||
project.updateAttributes(lastAccessed: new Date())
|
project.updateAttributes(lastAccessed: new Date())
|
||||||
.then(() -> callback())
|
.then(() -> cb())
|
||||||
.error callback
|
.error cb
|
||||||
)
|
)
|
||||||
.error callback
|
.error cb
|
||||||
|
dbQueue.queue.push(job, callback)
|
||||||
|
|
||||||
|
|
||||||
clearExpiredProjects: (callback = (error) ->) ->
|
clearExpiredProjects: (callback = (error) ->) ->
|
||||||
ProjectPersistenceManager._findExpiredProjectIds (error, project_ids) ->
|
ProjectPersistenceManager._findExpiredProjectIds (error, project_ids) ->
|
||||||
|
@ -54,16 +58,22 @@ module.exports = ProjectPersistenceManager =
|
||||||
callback()
|
callback()
|
||||||
|
|
||||||
_clearProjectFromDatabase: (project_id, callback = (error) ->) ->
|
_clearProjectFromDatabase: (project_id, callback = (error) ->) ->
|
||||||
|
job = (cb)->
|
||||||
console.log("_clearProjectFromDatabase")
|
console.log("_clearProjectFromDatabase")
|
||||||
db.Project.destroy(where: {project_id: project_id})
|
db.Project.destroy(where: {project_id: project_id})
|
||||||
.then(() -> callback())
|
.then(() -> callback())
|
||||||
.error callback
|
.error callback
|
||||||
|
dbQueue.queue.push(job, callback)
|
||||||
|
|
||||||
|
|
||||||
_findExpiredProjectIds: (callback = (error, project_ids) ->) ->
|
_findExpiredProjectIds: (callback = (error, project_ids) ->) ->
|
||||||
|
job = (cb)->
|
||||||
console.log("_findExpiredProjectIds")
|
console.log("_findExpiredProjectIds")
|
||||||
db.Project.findAll(where: ["lastAccessed < ?", new Date(Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT)])
|
db.Project.findAll(where: ["lastAccessed < ?", new Date(Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT)])
|
||||||
.then((projects) ->
|
.then((projects) ->
|
||||||
callback null, projects.map((project) -> project.project_id)
|
callback null, projects.map((project) -> project.project_id)
|
||||||
).error callback
|
).error callback
|
||||||
|
dbQueue.queue.push(job, callback)
|
||||||
|
|
||||||
|
|
||||||
logger.log {EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT}, "project assets kept timeout"
|
logger.log {EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT}, "project assets kept timeout"
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
db = require("./db")
|
db = require("./db")
|
||||||
|
dbQueue = require "./DbQueue"
|
||||||
UrlFetcher = require("./UrlFetcher")
|
UrlFetcher = require("./UrlFetcher")
|
||||||
Settings = require("settings-sharelatex")
|
Settings = require("settings-sharelatex")
|
||||||
crypto = require("crypto")
|
crypto = require("crypto")
|
||||||
|
@ -6,15 +7,6 @@ fs = require("fs")
|
||||||
logger = require "logger-sharelatex"
|
logger = require "logger-sharelatex"
|
||||||
async = require "async"
|
async = require "async"
|
||||||
|
|
||||||
queue = async.queue((task, cb)->
|
|
||||||
console.log("running task")
|
|
||||||
task(cb)
|
|
||||||
, 1)
|
|
||||||
|
|
||||||
console.log("hi there queue")
|
|
||||||
queue.drain = ()->
|
|
||||||
console.log('HI all items have been processed')
|
|
||||||
|
|
||||||
module.exports = UrlCache =
|
module.exports = UrlCache =
|
||||||
downloadUrlToFile: (project_id, url, destPath, lastModified, callback = (error) ->) ->
|
downloadUrlToFile: (project_id, url, destPath, lastModified, callback = (error) ->) ->
|
||||||
UrlCache._ensureUrlIsInCache project_id, url, lastModified, (error, pathToCachedUrl) =>
|
UrlCache._ensureUrlIsInCache project_id, url, lastModified, (error, pathToCachedUrl) =>
|
||||||
|
@ -108,7 +100,7 @@ module.exports = UrlCache =
|
||||||
db.UrlCache.find(where: { url: url, project_id: project_id })
|
db.UrlCache.find(where: { url: url, project_id: project_id })
|
||||||
.then((urlDetails) -> cb null, urlDetails)
|
.then((urlDetails) -> cb null, urlDetails)
|
||||||
.error cb
|
.error cb
|
||||||
queue.push job, callback
|
dbQueue.queue.push job, callback
|
||||||
|
|
||||||
_updateOrCreateUrlDetails: (project_id, url, lastModified, callback = (error) ->) ->
|
_updateOrCreateUrlDetails: (project_id, url, lastModified, callback = (error) ->) ->
|
||||||
job = (cb)->
|
job = (cb)->
|
||||||
|
@ -121,7 +113,7 @@ module.exports = UrlCache =
|
||||||
.error(cb)
|
.error(cb)
|
||||||
)
|
)
|
||||||
.error cb
|
.error cb
|
||||||
queue.push(job, callback)
|
dbQueue.queue.push(job, callback)
|
||||||
|
|
||||||
_clearUrlDetails: (project_id, url, callback = (error) ->) ->
|
_clearUrlDetails: (project_id, url, callback = (error) ->) ->
|
||||||
job = (cb)->
|
job = (cb)->
|
||||||
|
@ -129,7 +121,7 @@ module.exports = UrlCache =
|
||||||
db.UrlCache.destroy(where: {url: url, project_id: project_id})
|
db.UrlCache.destroy(where: {url: url, project_id: project_id})
|
||||||
.then(() -> cb null)
|
.then(() -> cb null)
|
||||||
.error cb
|
.error cb
|
||||||
queue.push(job, callback)
|
dbQueue.queue.push(job, callback)
|
||||||
|
|
||||||
|
|
||||||
_findAllUrlsInProject: (project_id, callback = (error, urls) ->) ->
|
_findAllUrlsInProject: (project_id, callback = (error, urls) ->) ->
|
||||||
|
@ -141,7 +133,7 @@ module.exports = UrlCache =
|
||||||
cb null, urlEntries.map((entry) -> entry.url)
|
cb null, urlEntries.map((entry) -> entry.url)
|
||||||
)
|
)
|
||||||
.error cb
|
.error cb
|
||||||
queue.push(job, callback)
|
dbQueue.queue.push(job, callback)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue