Merge pull request #6927 from overleaf/jpa-split-clsi-metrics

[misc] split clsi metrics into multiple categories

GitOrigin-RevId: 964ab57b344b92383a2937beff50139bae273a0e
This commit is contained in:
Jakob Ackermann 2022-03-01 15:09:55 +00:00 committed by Copybot
parent 958216d578
commit 264b107e39
13 changed files with 312 additions and 84 deletions

View file

@ -131,18 +131,31 @@ function timing(key, timeSpan, sampleRate = 1, opts = {}) {
} }
} }
function histogram(key, value, buckets, opts = {}) {
key = buildPromKey('histogram_' + key)
promWrapper.metric('histogram', key, buckets).observe(opts, value)
if (process.env.DEBUG_METRICS) {
console.log('doing histogram', key, buckets, opts)
}
}
class Timer { class Timer {
constructor(key, sampleRate = 1, opts = {}) { constructor(key, sampleRate = 1, opts = {}, buckets) {
this.start = new Date() this.start = new Date()
key = buildPromKey(key) key = buildPromKey(key)
this.key = key this.key = key
this.sampleRate = sampleRate this.sampleRate = sampleRate
this.opts = opts this.opts = opts
this.buckets = buckets
} }
done() { done() {
const timeSpan = new Date() - this.start const timeSpan = new Date() - this.start
timing(this.key, timeSpan, this.sampleRate, this.opts) if (this.buckets) {
histogram(this.key, timeSpan, this.buckets, this.opts)
} else {
timing(this.key, timeSpan, this.sampleRate, this.opts)
}
return timeSpan return timeSpan
} }
} }
@ -181,6 +194,7 @@ module.exports.inc = inc
module.exports.count = count module.exports.count = count
module.exports.summary = summary module.exports.summary = summary
module.exports.timing = timing module.exports.timing = timing
module.exports.histogram = histogram
module.exports.Timer = Timer module.exports.Timer = Timer
module.exports.gauge = gauge module.exports.gauge = gauge
module.exports.globalGauge = globalGauge module.exports.globalGauge = globalGauge

View file

@ -29,6 +29,8 @@ const optsKey = function (opts) {
} }
const extendOpts = function (opts, labelNames) { const extendOpts = function (opts, labelNames) {
// Make a clone in order to be able to re-use opts for other kinds of metrics.
opts = Object.assign({}, opts)
for (const label of Array.from(labelNames)) { for (const label of Array.from(labelNames)) {
if (!opts[label]) { if (!opts[label]) {
opts[label] = '' opts[label] = ''
@ -49,15 +51,15 @@ const PromWrapper = {
ttlInMinutes: 0, ttlInMinutes: 0,
registry, registry,
metric(type, name) { metric(type, name, buckets) {
return metrics.get(name) || new MetricWrapper(type, name) return metrics.get(name) || new MetricWrapper(type, name, buckets)
}, },
collectDefaultMetrics: prom.collectDefaultMetrics, collectDefaultMetrics: prom.collectDefaultMetrics,
} }
class MetricWrapper { class MetricWrapper {
constructor(type, name) { constructor(type, name, buckets) {
metrics.set(name, this) metrics.set(name, this)
this.name = name this.name = name
this.instances = new Map() this.instances = new Map()
@ -70,6 +72,19 @@ class MetricWrapper {
help: name, help: name,
labelNames: ['status', 'method', 'path'], labelNames: ['status', 'method', 'path'],
}) })
case 'histogram':
return new prom.Histogram({
name,
help: name,
labelNames: [
'path',
'status_code',
'method',
'collection',
'query',
],
buckets,
})
case 'summary': case 'summary':
return new prom.Summary({ return new prom.Summary({
name, name,

View file

@ -1,3 +1,4 @@
const { promisify } = require('util')
const os = require('os') const os = require('os')
const http = require('http') const http = require('http')
const { expect } = require('chai') const { expect } = require('chai')
@ -5,6 +6,7 @@ const Metrics = require('../..')
const HOSTNAME = os.hostname() const HOSTNAME = os.hostname()
const APP_NAME = 'test-app' const APP_NAME = 'test-app'
const sleep = promisify(setTimeout)
describe('Metrics module', function () { describe('Metrics module', function () {
before(function () { before(function () {
@ -67,6 +69,60 @@ describe('Metrics module', function () {
}) })
}) })
describe('histogram()', function () {
it('collects in buckets', async function () {
const buckets = [10, 100, 1000]
Metrics.histogram('distance', 10, buckets)
Metrics.histogram('distance', 20, buckets)
Metrics.histogram('distance', 100, buckets)
Metrics.histogram('distance', 200, buckets)
Metrics.histogram('distance', 1000, buckets)
Metrics.histogram('distance', 2000, buckets)
const sum = await getSummarySum('histogram_distance')
expect(sum).to.equal(3330)
await checkHistogramValues('histogram_distance', {
10: 1,
100: 3,
1000: 5,
'+Inf': 6,
})
})
})
describe('Timer', function () {
beforeEach('collect timings', async function () {
const buckets = [10, 100, 1000]
for (const duration of [1, 1, 1, 15, 15, 15, 105, 105, 105]) {
const withBuckets = new Metrics.Timer('height', 1, {}, buckets)
const withOutBuckets = new Metrics.Timer('depth', 1, {})
await sleep(duration)
withBuckets.done()
withOutBuckets.done()
}
})
it('with buckets', async function () {
await checkHistogramValues('histogram_height', {
10: 3,
100: 6,
1000: 9,
'+Inf': 9,
})
})
it('without buckets', async function () {
await checkSummaryValues('timer_depth', {
0.01: 1,
0.05: 1,
0.5: 15,
0.9: 105,
0.95: 105,
0.99: 105,
0.999: 105,
})
})
})
describe('gauge()', function () { describe('gauge()', function () {
it('records values', async function () { it('records values', async function () {
Metrics.gauge('water_level', 1.5) Metrics.gauge('water_level', 1.5)
@ -225,6 +281,38 @@ async function getSummarySum(key) {
return null return null
} }
async function checkHistogramValues(key, values) {
const metric = getMetric(key)
const item = await metric.get()
const found = {}
for (const value of item.values) {
const bucket = value.labels.le
if (!bucket) continue
found[bucket] = value.value
}
expect(found).to.deep.equal(values)
return null
}
async function checkSummaryValues(key, values) {
const metric = getMetric(key)
const item = await metric.get()
const found = {}
for (const value of item.values) {
const quantile = value.labels.quantile
if (!quantile) continue
found[quantile] = value.value
}
for (const quantile of Object.keys(values)) {
expect(found[quantile]).to.be.within(
values[quantile] - 5,
values[quantile] + 5,
`quantile: ${quantile}`
)
}
return null
}
async function getMetricValue(key) { async function getMetricValue(key) {
const metrics = await Metrics.register.getMetricsAsJSON() const metrics = await Metrics.register.getMetricsAsJSON()
const metric = metrics.find(m => m.name === key) const metric = metrics.find(m => m.name === key)

View file

@ -36,6 +36,7 @@ module.exports = CompileController = {
if (error != null) { if (error != null) {
return next(error) return next(error)
} }
timer.opts = request.metricsOpts
request.project_id = req.params.project_id request.project_id = req.params.project_id
if (req.params.user_id != null) { if (req.params.user_id != null) {
request.user_id = req.params.user_id request.user_id = req.params.user_id

View file

@ -19,6 +19,12 @@ const CommandRunner = require('./CommandRunner')
const { emitPdfStats } = require('./ContentCacheMetrics') const { emitPdfStats } = require('./ContentCacheMetrics')
const SynctexOutputParser = require('./SynctexOutputParser') const SynctexOutputParser = require('./SynctexOutputParser')
const COMPILE_TIME_BUCKETS = [
// NOTE: These buckets are locked in per metric name.
// If you want to change them, you will need to rename metrics.
1, 2, 3, 4, 6, 8, 11, 15, 22, 31, 43, 61, 86, 121, 170, 240,
].map(seconds => seconds * 1000)
function getCompileName(projectId, userId) { function getCompileName(projectId, userId) {
if (userId != null) { if (userId != null) {
return `${projectId}-${userId}` return `${projectId}-${userId}`
@ -56,8 +62,13 @@ function doCompile(request, callback) {
const compileDir = getCompileDir(request.project_id, request.user_id) const compileDir = getCompileDir(request.project_id, request.user_id)
const outputDir = getOutputDir(request.project_id, request.user_id) const outputDir = getOutputDir(request.project_id, request.user_id)
const timerE2E = new Metrics.Timer('compile-e2e') const timerE2E = new Metrics.Timer(
const timer = new Metrics.Timer('write-to-disk') 'compile-e2e',
1,
request.metricsOpts,
COMPILE_TIME_BUCKETS
)
const timer = new Metrics.Timer('write-to-disk', 1, request.metricsOpts)
logger.log( logger.log(
{ projectId: request.project_id, userId: request.user_id }, { projectId: request.project_id, userId: request.user_id },
'syncing resources to disk' 'syncing resources to disk'
@ -158,7 +169,7 @@ function doCompile(request, callback) {
if (error) { if (error) {
return callback(error) return callback(error)
} }
const timer = new Metrics.Timer('run-compile') const timer = new Metrics.Timer('run-compile', 1, request.metricsOpts)
// find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite) // find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite)
let tag = 'default' let tag = 'default'
if (request.imageName != null) { if (request.imageName != null) {
@ -170,8 +181,8 @@ function doCompile(request, callback) {
if (!request.project_id.match(/^[0-9a-f]{24}$/)) { if (!request.project_id.match(/^[0-9a-f]{24}$/)) {
tag = 'other' tag = 'other'
} // exclude smoke test } // exclude smoke test
Metrics.inc('compiles') Metrics.inc('compiles', 1, request.metricsOpts)
Metrics.inc(`compiles-with-image.${tag}`) Metrics.inc(`compiles-with-image.${tag}`, 1, request.metricsOpts)
const compileName = getCompileName( const compileName = getCompileName(
request.project_id, request.project_id,
request.user_id request.user_id
@ -204,6 +215,10 @@ function doCompile(request, callback) {
error = new Error('compilation') error = new Error('compilation')
error.validate = 'fail' error.validate = 'fail'
} }
// record timeout errors as a separate counter, success is recorded later
if (error && error.timedout) {
Metrics.inc('compiles-timeout', 1, request.metricsOpts)
}
// compile was killed by user, was a validation, or a compile which failed validation // compile was killed by user, was a validation, or a compile which failed validation
if ( if (
error && error &&
@ -225,16 +240,16 @@ function doCompile(request, callback) {
if (error) { if (error) {
return callback(error) return callback(error)
} }
Metrics.inc('compiles-succeeded') Metrics.inc('compiles-succeeded', 1, request.metricsOpts)
stats = stats || {} stats = stats || {}
for (const metricKey in stats) { for (const metricKey in stats) {
const metricValue = stats[metricKey] const metricValue = stats[metricKey]
Metrics.count(metricKey, metricValue) Metrics.count(metricKey, metricValue, 1, request.metricsOpts)
} }
timings = timings || {} timings = timings || {}
for (const metricKey in timings) { for (const metricKey in timings) {
const metricValue = timings[metricKey] const metricValue = timings[metricKey]
Metrics.timing(metricKey, metricValue) Metrics.timing(metricKey, metricValue, 1, request.metricsOpts)
} }
const loadavg = const loadavg =
typeof os.loadavg === 'function' ? os.loadavg() : undefined typeof os.loadavg === 'function' ? os.loadavg() : undefined
@ -254,18 +269,29 @@ function doCompile(request, callback) {
'done compile' 'done compile'
) )
if (stats['latex-runs'] > 0) { if (stats['latex-runs'] > 0) {
Metrics.timing('run-compile-per-pass', ts / stats['latex-runs']) Metrics.histogram(
'avg-compile-per-pass',
ts / stats['latex-runs'],
COMPILE_TIME_BUCKETS,
request.metricsOpts
)
} }
if (stats['latex-runs'] > 0 && timings['cpu-time'] > 0) { if (stats['latex-runs'] > 0 && timings['cpu-time'] > 0) {
Metrics.timing( Metrics.timing(
'run-compile-cpu-time-per-pass', 'run-compile-cpu-time-per-pass',
timings['cpu-time'] / stats['latex-runs'] timings['cpu-time'] / stats['latex-runs'],
1,
request.metricsOpts
) )
} }
// Emit compile time. // Emit compile time.
timings.compile = ts timings.compile = ts
const outputStageTimer = new Metrics.Timer('process-output-files') const outputStageTimer = new Metrics.Timer(
'process-output-files',
1,
request.metricsOpts
)
OutputFileFinder.findOutputFiles( OutputFileFinder.findOutputFiles(
resourceList, resourceList,
@ -297,7 +323,7 @@ function doCompile(request, callback) {
timings.compileE2E = timerE2E.done() timings.compileE2E = timerE2E.done()
if (stats['pdf-size']) { if (stats['pdf-size']) {
emitPdfStats(stats, timings) emitPdfStats(stats, timings, request)
} }
callback(null, newOutputFiles, stats, timings) callback(null, newOutputFiles, stats, timings)

View file

@ -18,26 +18,31 @@ function getSystemLoad() {
const ONE_MB = 1024 * 1024 const ONE_MB = 1024 * 1024
function emitPdfStats(stats, timings) { function emitPdfStats(stats, timings, request) {
if (stats['pdf-caching-timed-out']) { if (stats['pdf-caching-timed-out']) {
Metrics.inc('pdf-caching-timed-out') Metrics.inc('pdf-caching-timed-out', 1, request.metricsOpts)
} }
if (stats['pdf-caching-queue-limit-reached']) { if (stats['pdf-caching-queue-limit-reached']) {
Metrics.inc('pdf-caching-queue-limit-reached') Metrics.inc('pdf-caching-queue-limit-reached', 1, request.metricsOpts)
} }
if (timings['compute-pdf-caching']) { if (timings['compute-pdf-caching']) {
emitPdfCachingStats(stats, timings) emitPdfCachingStats(stats, timings, request)
} else { } else {
// How much bandwidth will the pdf incur when downloaded in full? // How much bandwidth will the pdf incur when downloaded in full?
Metrics.summary('pdf-bandwidth', stats['pdf-size']) Metrics.summary('pdf-bandwidth', stats['pdf-size'], 1, request.metricsOpts)
} }
} }
function emitPdfCachingStats(stats, timings) { function emitPdfCachingStats(stats, timings, request) {
if (!stats['pdf-size']) return // double check if (!stats['pdf-size']) return // double check
// How much extra time did we spent in PDF.js? // How much extra time did we spent in PDF.js?
Metrics.timing('compute-pdf-caching', timings['compute-pdf-caching']) Metrics.timing(
'compute-pdf-caching',
timings['compute-pdf-caching'],
1,
request.metricsOpts
)
// How large is the overhead of hashing up-front? // How large is the overhead of hashing up-front?
const fraction = const fraction =
@ -55,37 +60,52 @@ function emitPdfCachingStats(stats, timings) {
'slow pdf caching' 'slow pdf caching'
) )
} }
Metrics.summary('overhead-compute-pdf-ranges', fraction * 100 - 100) Metrics.summary(
'overhead-compute-pdf-ranges',
fraction * 100 - 100,
1,
request.metricsOpts
)
// How does the hashing scale to pdf size in MB? // How does the hashing scale to pdf size in MB?
Metrics.timing( Metrics.timing(
'compute-pdf-caching-relative-to-pdf-size', 'compute-pdf-caching-relative-to-pdf-size',
timings['compute-pdf-caching'] / (stats['pdf-size'] / ONE_MB) timings['compute-pdf-caching'] / (stats['pdf-size'] / ONE_MB),
1,
request.metricsOpts
) )
if (stats['pdf-caching-total-ranges-size']) { if (stats['pdf-caching-total-ranges-size']) {
// How does the hashing scale to total ranges size in MB? // How does the hashing scale to total ranges size in MB?
Metrics.timing( Metrics.timing(
'compute-pdf-caching-relative-to-total-ranges-size', 'compute-pdf-caching-relative-to-total-ranges-size',
timings['compute-pdf-caching'] / timings['compute-pdf-caching'] /
(stats['pdf-caching-total-ranges-size'] / ONE_MB) (stats['pdf-caching-total-ranges-size'] / ONE_MB),
1,
request.metricsOpts
) )
// How fast is the hashing per range on average? // How fast is the hashing per range on average?
Metrics.timing( Metrics.timing(
'compute-pdf-caching-relative-to-ranges-count', 'compute-pdf-caching-relative-to-ranges-count',
timings['compute-pdf-caching'] / stats['pdf-caching-n-ranges'] timings['compute-pdf-caching'] / stats['pdf-caching-n-ranges'],
1,
request.metricsOpts
) )
// How many ranges are new? // How many ranges are new?
Metrics.summary( Metrics.summary(
'new-pdf-ranges-relative-to-total-ranges', 'new-pdf-ranges-relative-to-total-ranges',
(stats['pdf-caching-n-new-ranges'] / stats['pdf-caching-n-ranges']) * 100 (stats['pdf-caching-n-new-ranges'] / stats['pdf-caching-n-ranges']) * 100,
1,
request.metricsOpts
) )
} }
// How much content is cacheable? // How much content is cacheable?
Metrics.summary( Metrics.summary(
'cacheable-ranges-to-pdf-size', 'cacheable-ranges-to-pdf-size',
(stats['pdf-caching-total-ranges-size'] / stats['pdf-size']) * 100 (stats['pdf-caching-total-ranges-size'] / stats['pdf-size']) * 100,
1,
request.metricsOpts
) )
const sizeWhenDownloadedInFull = const sizeWhenDownloadedInFull =
@ -99,17 +119,26 @@ function emitPdfCachingStats(stats, timings) {
// How much bandwidth can we save when downloading the pdf in full? // How much bandwidth can we save when downloading the pdf in full?
Metrics.summary( Metrics.summary(
'pdf-bandwidth-savings', 'pdf-bandwidth-savings',
100 - (sizeWhenDownloadedInFull / stats['pdf-size']) * 100 100 - (sizeWhenDownloadedInFull / stats['pdf-size']) * 100,
1,
request.metricsOpts
) )
// How much bandwidth will the pdf incur when downloaded in full? // How much bandwidth will the pdf incur when downloaded in full?
Metrics.summary('pdf-bandwidth', sizeWhenDownloadedInFull) Metrics.summary(
'pdf-bandwidth',
sizeWhenDownloadedInFull,
1,
request.metricsOpts
)
// How much space do the ranges use? // How much space do the ranges use?
// This will accumulate the ranges size over time, skipping already written ranges. // This will accumulate the ranges size over time, skipping already written ranges.
Metrics.summary( Metrics.summary(
'pdf-ranges-disk-size', 'pdf-ranges-disk-size',
stats['pdf-caching-new-ranges-size'] - stats['pdf-caching-reclaimed-space'] stats['pdf-caching-new-ranges-size'] - stats['pdf-caching-reclaimed-space'],
1,
request.metricsOpts
) )
} }

View file

@ -186,7 +186,7 @@ module.exports = OutputCacheManager = {
} }
OutputCacheManager.saveStreamsInContentDir( OutputCacheManager.saveStreamsInContentDir(
{ stats, timings }, { request, stats, timings },
result, result,
compileDir, compileDir,
outputDir, outputDir,
@ -348,7 +348,7 @@ module.exports = OutputCacheManager = {
}, },
saveStreamsInContentDir( saveStreamsInContentDir(
{ stats, timings }, { request, stats, timings },
outputFiles, outputFiles,
compileDir, compileDir,
outputDir, outputDir,
@ -367,7 +367,11 @@ module.exports = OutputCacheManager = {
OutputCacheManager.path(outputFile.build, outputFile.path) OutputCacheManager.path(outputFile.build, outputFile.path)
) )
const pdfSize = outputFile.size const pdfSize = outputFile.size
const timer = new Metrics.Timer('compute-pdf-ranges') const timer = new Metrics.Timer(
'compute-pdf-ranges',
1,
request.metricsOpts
)
ContentCacheManager.update( ContentCacheManager.update(
contentDir, contentDir,
outputFilePath, outputFilePath,

View file

@ -41,6 +41,20 @@ module.exports = RequestParser = {
} }
try { try {
response.metricsOpts = {
path: this._parseAttribute('metricsPath', compile.options.metricsPath, {
default: '',
type: 'string',
}),
method: this._parseAttribute(
'metricsMethod',
compile.options.metricsMethod,
{
default: '',
type: 'string',
}
),
}
response.compiler = this._parseAttribute( response.compiler = this._parseAttribute(
'compiler', 'compiler',
compile.options.compiler, compile.options.compiler,

View file

@ -44,6 +44,7 @@ module.exports = ResourceWriter = {
return callback(error) return callback(error)
} }
return ResourceWriter._removeExtraneousFiles( return ResourceWriter._removeExtraneousFiles(
request,
resourceList, resourceList,
basePath, basePath,
function (error, outputFiles, allFiles) { function (error, outputFiles, allFiles) {
@ -84,27 +85,22 @@ module.exports = ResourceWriter = {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
this.saveAllResourcesToDisk( this.saveAllResourcesToDisk(request, basePath, function (error) {
request.project_id, if (error != null) {
request.resources, return callback(error)
basePath,
function (error) {
if (error != null) {
return callback(error)
}
return ResourceStateManager.saveProjectState(
request.syncState,
request.resources,
basePath,
function (error) {
if (error != null) {
return callback(error)
}
return callback(null, request.resources)
}
)
} }
) return ResourceStateManager.saveProjectState(
request.syncState,
request.resources,
basePath,
function (error) {
if (error != null) {
return callback(error)
}
return callback(null, request.resources)
}
)
})
}) })
}, },
@ -126,7 +122,7 @@ module.exports = ResourceWriter = {
}) })
}, },
saveAllResourcesToDisk(project_id, resources, basePath, callback) { saveAllResourcesToDisk(request, basePath, callback) {
if (callback == null) { if (callback == null) {
callback = function () {} callback = function () {}
} }
@ -134,7 +130,8 @@ module.exports = ResourceWriter = {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
return this._removeExtraneousFiles(resources, basePath, error => { const { project_id, resources } = request
this._removeExtraneousFiles(request, resources, basePath, error => {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
@ -172,11 +169,15 @@ module.exports = ResourceWriter = {
}) })
}, },
_removeExtraneousFiles(resources, basePath, _callback) { _removeExtraneousFiles(request, resources, basePath, _callback) {
if (_callback == null) { if (_callback == null) {
_callback = function () {} _callback = function () {}
} }
const timer = new Metrics.Timer('unlink-output-files') const timer = new Metrics.Timer(
'unlink-output-files',
1,
request.metricsOpts
)
const callback = function (error, ...result) { const callback = function (error, ...result) {
timer.done() timer.done()
return _callback(error, ...Array.from(result)) return _callback(error, ...Array.from(result))

View file

@ -8,6 +8,7 @@
const Client = require('./helpers/Client') const Client = require('./helpers/Client')
const request = require('request') const request = require('request')
const ClsiApp = require('./helpers/ClsiApp') const ClsiApp = require('./helpers/ClsiApp')
const Settings = require('@overleaf/settings')
describe('Simple LaTeX file', function () { describe('Simple LaTeX file', function () {
before(function (done) { before(function (done) {
@ -24,6 +25,10 @@ Hello world
`, `,
}, },
], ],
options: {
metricsPath: 'clsi-perf',
metricsMethod: 'priority',
},
} }
return ClsiApp.ensureRunning(() => { return ClsiApp.ensureRunning(() => {
return Client.compile( return Client.compile(
@ -58,7 +63,7 @@ Hello world
}) })
}) })
return it('should provide the log for download', function (done) { it('should provide the log for download', function (done) {
const log = Client.getOutputFile(this.body, 'pdf') const log = Client.getOutputFile(this.body, 'pdf')
return request.get(log.url, (error, res, body) => { return request.get(log.url, (error, res, body) => {
if (error) return done(error) if (error) return done(error)
@ -66,4 +71,21 @@ Hello world
return done() return done()
}) })
}) })
it('should gather personalized metrics', function (done) {
request.get(`${Settings.apis.clsi.url}/metrics`, (err, res, body) => {
if (err) return done(err)
body
.split('\n')
.some(line => {
return (
line.startsWith('compile') &&
line.includes('path="clsi-perf"') &&
line.includes('method="priority"')
)
})
.should.equal(true)
done()
})
})
}) })

View file

@ -56,15 +56,16 @@ describe('ResourceWriter', function () {
describe('syncResourcesToDisk on a full request', function () { describe('syncResourcesToDisk on a full request', function () {
beforeEach(function () { beforeEach(function () {
this.resources = ['resource-1-mock', 'resource-2-mock', 'resource-3-mock'] this.resources = ['resource-1-mock', 'resource-2-mock', 'resource-3-mock']
this.request = {
project_id: this.project_id,
syncState: (this.syncState = '0123456789abcdef'),
resources: this.resources,
}
this.ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3) this.ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3)
this.ResourceWriter._removeExtraneousFiles = sinon.stub().callsArg(2) this.ResourceWriter._removeExtraneousFiles = sinon.stub().yields(null)
this.ResourceStateManager.saveProjectState = sinon.stub().callsArg(3) this.ResourceStateManager.saveProjectState = sinon.stub().callsArg(3)
return this.ResourceWriter.syncResourcesToDisk( return this.ResourceWriter.syncResourcesToDisk(
{ this.request,
project_id: this.project_id,
syncState: (this.syncState = '0123456789abcdef'),
resources: this.resources,
},
this.basePath, this.basePath,
this.callback this.callback
) )
@ -72,7 +73,7 @@ describe('ResourceWriter', function () {
it('should remove old files', function () { it('should remove old files', function () {
return this.ResourceWriter._removeExtraneousFiles return this.ResourceWriter._removeExtraneousFiles
.calledWith(this.resources, this.basePath) .calledWith(this.request, this.resources, this.basePath)
.should.equal(true) .should.equal(true)
}) })
@ -98,22 +99,24 @@ describe('ResourceWriter', function () {
describe('syncResourcesToDisk on an incremental update', function () { describe('syncResourcesToDisk on an incremental update', function () {
beforeEach(function () { beforeEach(function () {
this.resources = ['resource-1-mock'] this.resources = ['resource-1-mock']
this.request = {
project_id: this.project_id,
syncType: 'incremental',
syncState: (this.syncState = '1234567890abcdef'),
resources: this.resources,
}
this.fullResources = this.resources.concat(['file-1'])
this.ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3) this.ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3)
this.ResourceWriter._removeExtraneousFiles = sinon this.ResourceWriter._removeExtraneousFiles = sinon
.stub() .stub()
.callsArgWith(2, null, (this.outputFiles = []), (this.allFiles = [])) .yields(null, (this.outputFiles = []), (this.allFiles = []))
this.ResourceStateManager.checkProjectStateMatches = sinon this.ResourceStateManager.checkProjectStateMatches = sinon
.stub() .stub()
.callsArgWith(2, null, this.resources) .callsArgWith(2, null, this.fullResources)
this.ResourceStateManager.saveProjectState = sinon.stub().callsArg(3) this.ResourceStateManager.saveProjectState = sinon.stub().callsArg(3)
this.ResourceStateManager.checkResourceFiles = sinon.stub().callsArg(3) this.ResourceStateManager.checkResourceFiles = sinon.stub().callsArg(3)
return this.ResourceWriter.syncResourcesToDisk( return this.ResourceWriter.syncResourcesToDisk(
{ this.request,
project_id: this.project_id,
syncType: 'incremental',
syncState: (this.syncState = '1234567890abcdef'),
resources: this.resources,
},
this.basePath, this.basePath,
this.callback this.callback
) )
@ -127,13 +130,13 @@ describe('ResourceWriter', function () {
it('should remove old files', function () { it('should remove old files', function () {
return this.ResourceWriter._removeExtraneousFiles return this.ResourceWriter._removeExtraneousFiles
.calledWith(this.resources, this.basePath) .calledWith(this.request, this.fullResources, this.basePath)
.should.equal(true) .should.equal(true)
}) })
it('should check each resource exists', function () { it('should check each resource exists', function () {
return this.ResourceStateManager.checkResourceFiles return this.ResourceStateManager.checkResourceFiles
.calledWith(this.resources, this.allFiles, this.basePath) .calledWith(this.fullResources, this.allFiles, this.basePath)
.should.equal(true) .should.equal(true)
}) })
@ -153,16 +156,17 @@ describe('ResourceWriter', function () {
describe('syncResourcesToDisk on an incremental update when the state does not match', function () { describe('syncResourcesToDisk on an incremental update when the state does not match', function () {
beforeEach(function () { beforeEach(function () {
this.resources = ['resource-1-mock'] this.resources = ['resource-1-mock']
this.request = {
project_id: this.project_id,
syncType: 'incremental',
syncState: (this.syncState = '1234567890abcdef'),
resources: this.resources,
}
this.ResourceStateManager.checkProjectStateMatches = sinon this.ResourceStateManager.checkProjectStateMatches = sinon
.stub() .stub()
.callsArgWith(2, (this.error = new Error())) .callsArgWith(2, (this.error = new Error()))
return this.ResourceWriter.syncResourcesToDisk( return this.ResourceWriter.syncResourcesToDisk(
{ this.request,
project_id: this.project_id,
syncType: 'incremental',
syncState: (this.syncState = '1234567890abcdef'),
resources: this.resources,
},
this.basePath, this.basePath,
this.callback this.callback
) )
@ -237,11 +241,18 @@ describe('ResourceWriter', function () {
}, },
] ]
this.resources = 'mock-resources' this.resources = 'mock-resources'
this.request = {
project_id: this.project_id,
syncType: 'incremental',
syncState: (this.syncState = '1234567890abcdef'),
resources: this.resources,
}
this.OutputFileFinder.findOutputFiles = sinon this.OutputFileFinder.findOutputFiles = sinon
.stub() .stub()
.callsArgWith(2, null, this.output_files) .callsArgWith(2, null, this.output_files)
this.ResourceWriter._deleteFileIfNotDirectory = sinon.stub().callsArg(1) this.ResourceWriter._deleteFileIfNotDirectory = sinon.stub().callsArg(1)
return this.ResourceWriter._removeExtraneousFiles( return this.ResourceWriter._removeExtraneousFiles(
this.request,
this.resources, this.resources,
this.basePath, this.basePath,
this.callback this.callback

View file

@ -854,6 +854,7 @@ const ClsiManager = {
enablePdfCaching: enablePdfCaching:
(Settings.enablePdfCaching && options.enablePdfCaching) || false, (Settings.enablePdfCaching && options.enablePdfCaching) || false,
flags: flags, flags: flags,
metricsMethod: options.compileGroup,
}, },
rootResourcePath, rootResourcePath,
resources, resources,

View file

@ -621,6 +621,7 @@ describe('ClsiManager', function () {
compileGroup: 'standard', compileGroup: 'standard',
enablePdfCaching: false, enablePdfCaching: false,
flags: undefined, flags: undefined,
metricsMethod: 'standard',
}, // "01234567890abcdef" }, // "01234567890abcdef"
rootResourcePath: 'main.tex', rootResourcePath: 'main.tex',
resources: [ resources: [
@ -715,6 +716,7 @@ describe('ClsiManager', function () {
compileGroup: 'priority', compileGroup: 'priority',
enablePdfCaching: false, enablePdfCaching: false,
flags: undefined, flags: undefined,
metricsMethod: 'priority',
}, },
rootResourcePath: 'main.tex', rootResourcePath: 'main.tex',
resources: [ resources: [