mirror of
https://github.com/overleaf/overleaf.git
synced 2025-04-06 14:20:56 +00:00
Merge pull request #71 from sharelatex/bg-merge-state-and-resource-list-files
merge state and resource list files
This commit is contained in:
commit
0d613a6480
6 changed files with 61 additions and 73 deletions
|
@ -5,7 +5,7 @@ spawn = require("child_process").spawn
|
|||
logger = require "logger-sharelatex"
|
||||
|
||||
module.exports = OutputFileFinder =
|
||||
findOutputFiles: (resources, directory, callback = (error, outputFiles) ->) ->
|
||||
findOutputFiles: (resources, directory, callback = (error, outputFiles, allFiles) ->) ->
|
||||
incomingResources = {}
|
||||
for resource in resources
|
||||
incomingResources[resource.path] = true
|
||||
|
@ -16,7 +16,6 @@ module.exports = OutputFileFinder =
|
|||
if error?
|
||||
logger.err err:error, "error finding all output files"
|
||||
return callback(error)
|
||||
jobs = []
|
||||
outputFiles = []
|
||||
for file in allFiles
|
||||
if !incomingResources[file]
|
||||
|
@ -24,7 +23,7 @@ module.exports = OutputFileFinder =
|
|||
path: file
|
||||
type: file.match(/\.([^\.]+)$/)?[1]
|
||||
}
|
||||
callback null, outputFiles
|
||||
callback null, outputFiles, allFiles
|
||||
|
||||
_getAllFiles: (directory, _callback = (error, fileList) ->) ->
|
||||
callback = (error, fileList) ->
|
||||
|
|
|
@ -1,25 +0,0 @@
|
|||
Path = require "path"
|
||||
fs = require "fs"
|
||||
logger = require "logger-sharelatex"
|
||||
settings = require("settings-sharelatex")
|
||||
SafeReader = require "./SafeReader"
|
||||
|
||||
module.exports = ResourceListManager =
|
||||
|
||||
# This file is a list of the input files for the project, one per
|
||||
# line, used to identify output files (i.e. files not on this list)
|
||||
# when the incoming request is incremental.
|
||||
RESOURCE_LIST_FILE: ".project-resource-list"
|
||||
|
||||
saveResourceList: (resources, basePath, callback = (error) ->) ->
|
||||
resourceListFile = Path.join(basePath, @RESOURCE_LIST_FILE)
|
||||
resourceList = (resource.path for resource in resources)
|
||||
fs.writeFile resourceListFile, resourceList.join("\n"), callback
|
||||
|
||||
loadResourceList: (basePath, callback = (error) ->) ->
|
||||
resourceListFile = Path.join(basePath, @RESOURCE_LIST_FILE)
|
||||
# limit file to 128K, compile directory is user accessible
|
||||
SafeReader.readFile resourceListFile, 128*1024, 'utf8', (err, resourceList) ->
|
||||
return callback(err) if err?
|
||||
resources = ({path: path} for path in resourceList?.toString()?.split("\n") or [])
|
||||
callback(null, resources)
|
|
@ -11,10 +11,11 @@ module.exports = ResourceStateManager =
|
|||
# incremental update to be allowed.
|
||||
#
|
||||
# The initial value is passed in and stored on a full
|
||||
# compile.
|
||||
# compile, along with the list of resources..
|
||||
#
|
||||
# Subsequent incremental compiles must come with the same value - if
|
||||
# not they will be rejected with a 409 Conflict response.
|
||||
# not they will be rejected with a 409 Conflict response. The
|
||||
# previous list of resources is returned.
|
||||
#
|
||||
# An incremental compile can only update existing files with new
|
||||
# content. The sync state identifier must change if any docs or
|
||||
|
@ -22,7 +23,7 @@ module.exports = ResourceStateManager =
|
|||
|
||||
SYNC_STATE_FILE: ".project-sync-state"
|
||||
|
||||
saveProjectStateHash: (state, basePath, callback) ->
|
||||
saveProjectState: (state, resources, basePath, callback = (error) ->) ->
|
||||
stateFile = Path.join(basePath, @SYNC_STATE_FILE)
|
||||
if not state? # remove the file if no state passed in
|
||||
logger.log state:state, basePath:basePath, "clearing sync state"
|
||||
|
@ -33,14 +34,30 @@ module.exports = ResourceStateManager =
|
|||
return callback()
|
||||
else
|
||||
logger.log state:state, basePath:basePath, "writing sync state"
|
||||
fs.writeFile stateFile, state, {encoding: 'ascii'}, callback
|
||||
resourceList = (resource.path for resource in resources)
|
||||
fs.writeFile stateFile, [resourceList..., "stateHash:#{state}"].join("\n"), callback
|
||||
|
||||
checkProjectStateHashMatches: (state, basePath, callback) ->
|
||||
checkProjectStateMatches: (state, basePath, callback = (error, resources) ->) ->
|
||||
stateFile = Path.join(basePath, @SYNC_STATE_FILE)
|
||||
SafeReader.readFile stateFile, 64, 'ascii', (err, oldState) ->
|
||||
SafeReader.readFile stateFile, 128*1024, 'utf8', (err, result) ->
|
||||
return callback(err) if err?
|
||||
logger.log state:state, oldState: oldState, basePath:basePath, stateMatches: !(state isnt oldState), "checking sync state"
|
||||
if state isnt oldState
|
||||
[resourceList..., oldState] = result?.toString()?.split("\n") or []
|
||||
newState = "stateHash:#{state}"
|
||||
logger.log state:state, oldState: oldState, basePath:basePath, stateMatches: (newState is oldState), "checking sync state"
|
||||
if newState isnt oldState
|
||||
return callback new Errors.FilesOutOfSyncError("invalid state for incremental update")
|
||||
else
|
||||
callback(null)
|
||||
resources = ({path: path} for path in resourceList)
|
||||
callback(null, resources)
|
||||
|
||||
checkResourceFiles: (resources, allFiles, directory, callback = (error) ->) ->
|
||||
# check if any of the input files are not present in list of files
|
||||
seenFile = {}
|
||||
for file in allFiles
|
||||
seenFile[file] = true
|
||||
missingFiles = (resource.path for resource in resources when not seenFile[resource.path])
|
||||
if missingFiles.length > 0
|
||||
logger.err missingFiles:missingFiles, dir:directory, allFiles:allFiles, resources:resources, "missing input files for project"
|
||||
return callback new Errors.FilesOutOfSyncError("resource files missing in incremental update")
|
||||
else
|
||||
callback()
|
||||
|
|
|
@ -5,7 +5,6 @@ async = require "async"
|
|||
mkdirp = require "mkdirp"
|
||||
OutputFileFinder = require "./OutputFileFinder"
|
||||
ResourceStateManager = require "./ResourceStateManager"
|
||||
ResourceListManager = require "./ResourceListManager"
|
||||
Metrics = require "./Metrics"
|
||||
logger = require "logger-sharelatex"
|
||||
settings = require("settings-sharelatex")
|
||||
|
@ -17,11 +16,11 @@ module.exports = ResourceWriter =
|
|||
syncResourcesToDisk: (request, basePath, callback = (error, resourceList) ->) ->
|
||||
if request.syncType is "incremental"
|
||||
logger.log project_id: request.project_id, user_id: request.user_id, "incremental sync"
|
||||
ResourceStateManager.checkProjectStateHashMatches request.syncState, basePath, (error) ->
|
||||
ResourceStateManager.checkProjectStateMatches request.syncState, basePath, (error, resourceList) ->
|
||||
return callback(error) if error?
|
||||
ResourceListManager.loadResourceList basePath, (error, resourceList) ->
|
||||
ResourceWriter._removeExtraneousFiles resourceList, basePath, (error, outputFiles, allFiles) ->
|
||||
return callback(error) if error?
|
||||
ResourceWriter._removeExtraneousFiles resourceList, basePath, (error) =>
|
||||
ResourceStateManager.checkResourceFiles resourceList, allFiles, basePath, (error) ->
|
||||
return callback(error) if error?
|
||||
ResourceWriter.saveIncrementalResourcesToDisk request.project_id, request.resources, basePath, (error) ->
|
||||
return callback(error) if error?
|
||||
|
@ -30,11 +29,9 @@ module.exports = ResourceWriter =
|
|||
logger.log project_id: request.project_id, user_id: request.user_id, "full sync"
|
||||
@saveAllResourcesToDisk request.project_id, request.resources, basePath, (error) ->
|
||||
return callback(error) if error?
|
||||
ResourceStateManager.saveProjectStateHash request.syncState, basePath, (error) ->
|
||||
ResourceStateManager.saveProjectState request.syncState, request.resources, basePath, (error) ->
|
||||
return callback(error) if error?
|
||||
ResourceListManager.saveResourceList request.resources, basePath, (error) =>
|
||||
return callback(error) if error?
|
||||
callback(null, request.resources)
|
||||
callback(null, request.resources)
|
||||
|
||||
saveIncrementalResourcesToDisk: (project_id, resources, basePath, callback = (error) ->) ->
|
||||
@_createDirectory basePath, (error) =>
|
||||
|
@ -65,13 +62,13 @@ module.exports = ResourceWriter =
|
|||
else
|
||||
return callback()
|
||||
|
||||
_removeExtraneousFiles: (resources, basePath, _callback = (error) ->) ->
|
||||
_removeExtraneousFiles: (resources, basePath, _callback = (error, outputFiles, allFiles) ->) ->
|
||||
timer = new Metrics.Timer("unlink-output-files")
|
||||
callback = (error) ->
|
||||
callback = (error, result...) ->
|
||||
timer.done()
|
||||
_callback(error)
|
||||
_callback(error, result...)
|
||||
|
||||
OutputFileFinder.findOutputFiles resources, basePath, (error, outputFiles) ->
|
||||
OutputFileFinder.findOutputFiles resources, basePath, (error, outputFiles, allFiles) ->
|
||||
return callback(error) if error?
|
||||
|
||||
jobs = []
|
||||
|
@ -81,7 +78,7 @@ module.exports = ResourceWriter =
|
|||
should_delete = true
|
||||
if path.match(/^output\./) or path.match(/\.aux$/) or path.match(/^cache\//) # knitr cache
|
||||
should_delete = false
|
||||
if path in ['.project-resource-list', '.project-sync-state']
|
||||
if path == '.project-sync-state'
|
||||
should_delete = false
|
||||
if path == "output.pdf" or path == "output.dvi" or path == "output.log" or path == "output.xdv"
|
||||
should_delete = true
|
||||
|
@ -90,7 +87,9 @@ module.exports = ResourceWriter =
|
|||
if should_delete
|
||||
jobs.push (callback) -> ResourceWriter._deleteFileIfNotDirectory Path.join(basePath, path), callback
|
||||
|
||||
async.series jobs, callback
|
||||
async.series jobs, (error) ->
|
||||
return callback(error) if error?
|
||||
callback(null, outputFiles, allFiles)
|
||||
|
||||
_deleteFileIfNotDirectory: (path, callback = (error) ->) ->
|
||||
fs.stat path, (error, stat) ->
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
fs = require "fs"
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
module.exports = SafeReader =
|
||||
|
||||
|
@ -7,7 +8,7 @@ module.exports = SafeReader =
|
|||
|
||||
readFile: (file, size, encoding, callback = (error, result) ->) ->
|
||||
fs.open file, 'r', (err, fd) ->
|
||||
return callback() if err? and err.code is 'ENOENT'
|
||||
return callback() if err? and err.code is 'ENOENT'
|
||||
return callback(err) if err?
|
||||
|
||||
# safely return always closing the file
|
||||
|
@ -21,4 +22,6 @@ module.exports = SafeReader =
|
|||
fs.read fd, buff, 0, buff.length, 0, (err, bytesRead, buffer) ->
|
||||
return callbackWithClose(err) if err?
|
||||
result = buffer.toString(encoding, 0, bytesRead)
|
||||
if bytesRead is size
|
||||
logger.error file:file, size:size, bytesRead:bytesRead, "file truncated"
|
||||
callbackWithClose(null, result)
|
||||
|
|
|
@ -10,7 +10,6 @@ describe "ResourceWriter", ->
|
|||
"fs": @fs =
|
||||
mkdir: sinon.stub().callsArg(1)
|
||||
unlink: sinon.stub().callsArg(1)
|
||||
"./ResourceListManager": @ResourceListManager = {}
|
||||
"./ResourceStateManager": @ResourceStateManager = {}
|
||||
"wrench": @wrench = {}
|
||||
"./UrlCache" : @UrlCache = {}
|
||||
|
@ -33,10 +32,7 @@ describe "ResourceWriter", ->
|
|||
]
|
||||
@ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3)
|
||||
@ResourceWriter._removeExtraneousFiles = sinon.stub().callsArg(2)
|
||||
@ResourceStateManager.checkProjectStateHashMatches = sinon.stub().callsArg(2)
|
||||
@ResourceStateManager.saveProjectStateHash = sinon.stub().callsArg(2)
|
||||
@ResourceListManager.saveResourceList = sinon.stub().callsArg(2)
|
||||
@ResourceListManager.loadResourceList = sinon.stub().callsArg(1)
|
||||
@ResourceStateManager.saveProjectState = sinon.stub().callsArg(3)
|
||||
@ResourceWriter.syncResourcesToDisk({
|
||||
project_id: @project_id
|
||||
syncState: @syncState = "0123456789abcdef"
|
||||
|
@ -54,14 +50,9 @@ describe "ResourceWriter", ->
|
|||
.calledWith(@project_id, resource, @basePath)
|
||||
.should.equal true
|
||||
|
||||
it "should store the sync state", ->
|
||||
@ResourceStateManager.saveProjectStateHash
|
||||
.calledWith(@syncState, @basePath)
|
||||
.should.equal true
|
||||
|
||||
it "should save the resource list", ->
|
||||
@ResourceListManager.saveResourceList
|
||||
.calledWith(@resources, @basePath)
|
||||
it "should store the sync state and resource list", ->
|
||||
@ResourceStateManager.saveProjectState
|
||||
.calledWith(@syncState, @resources, @basePath)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
|
@ -73,11 +64,10 @@ describe "ResourceWriter", ->
|
|||
"resource-1-mock"
|
||||
]
|
||||
@ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3)
|
||||
@ResourceWriter._removeExtraneousFiles = sinon.stub().callsArg(2)
|
||||
@ResourceStateManager.checkProjectStateHashMatches = sinon.stub().callsArg(2)
|
||||
@ResourceStateManager.saveProjectStateHash = sinon.stub().callsArg(2)
|
||||
@ResourceListManager.saveResourceList = sinon.stub().callsArg(2)
|
||||
@ResourceListManager.loadResourceList = sinon.stub().callsArgWith(1, null, @resources)
|
||||
@ResourceWriter._removeExtraneousFiles = sinon.stub().callsArgWith(2, null, @outputFiles = [], @allFiles = [])
|
||||
@ResourceStateManager.checkProjectStateMatches = sinon.stub().callsArgWith(2, null, @resources)
|
||||
@ResourceStateManager.saveProjectState = sinon.stub().callsArg(3)
|
||||
@ResourceStateManager.checkResourceFiles = sinon.stub().callsArg(3)
|
||||
@ResourceWriter.syncResourcesToDisk({
|
||||
project_id: @project_id,
|
||||
syncType: "incremental",
|
||||
|
@ -86,7 +76,7 @@ describe "ResourceWriter", ->
|
|||
}, @basePath, @callback)
|
||||
|
||||
it "should check the sync state matches", ->
|
||||
@ResourceStateManager.checkProjectStateHashMatches
|
||||
@ResourceStateManager.checkProjectStateMatches
|
||||
.calledWith(@syncState, @basePath)
|
||||
.should.equal true
|
||||
|
||||
|
@ -95,6 +85,11 @@ describe "ResourceWriter", ->
|
|||
.calledWith(@resources, @basePath)
|
||||
.should.equal true
|
||||
|
||||
it "should check each resource exists", ->
|
||||
@ResourceStateManager.checkResourceFiles
|
||||
.calledWith(@resources, @allFiles, @basePath)
|
||||
.should.equal true
|
||||
|
||||
it "should write each resource to disk", ->
|
||||
for resource in @resources
|
||||
@ResourceWriter._writeResourceToDisk
|
||||
|
@ -109,7 +104,7 @@ describe "ResourceWriter", ->
|
|||
@resources = [
|
||||
"resource-1-mock"
|
||||
]
|
||||
@ResourceStateManager.checkProjectStateHashMatches = sinon.stub().callsArgWith(2, @error = new Error())
|
||||
@ResourceStateManager.checkProjectStateMatches = sinon.stub().callsArgWith(2, @error = new Error())
|
||||
@ResourceWriter.syncResourcesToDisk({
|
||||
project_id: @project_id,
|
||||
syncType: "incremental",
|
||||
|
@ -118,7 +113,7 @@ describe "ResourceWriter", ->
|
|||
}, @basePath, @callback)
|
||||
|
||||
it "should check whether the sync state matches", ->
|
||||
@ResourceStateManager.checkProjectStateHashMatches
|
||||
@ResourceStateManager.checkProjectStateMatches
|
||||
.calledWith(@syncState, @basePath)
|
||||
.should.equal true
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue