mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #6927 from overleaf/jpa-split-clsi-metrics
[misc] split clsi metrics into multiple categories GitOrigin-RevId: 964ab57b344b92383a2937beff50139bae273a0e
This commit is contained in:
parent
958216d578
commit
264b107e39
13 changed files with 312 additions and 84 deletions
|
@ -131,18 +131,31 @@ function timing(key, timeSpan, sampleRate = 1, opts = {}) {
|
|||
}
|
||||
}
|
||||
|
||||
function histogram(key, value, buckets, opts = {}) {
|
||||
key = buildPromKey('histogram_' + key)
|
||||
promWrapper.metric('histogram', key, buckets).observe(opts, value)
|
||||
if (process.env.DEBUG_METRICS) {
|
||||
console.log('doing histogram', key, buckets, opts)
|
||||
}
|
||||
}
|
||||
|
||||
class Timer {
|
||||
constructor(key, sampleRate = 1, opts = {}) {
|
||||
constructor(key, sampleRate = 1, opts = {}, buckets) {
|
||||
this.start = new Date()
|
||||
key = buildPromKey(key)
|
||||
this.key = key
|
||||
this.sampleRate = sampleRate
|
||||
this.opts = opts
|
||||
this.buckets = buckets
|
||||
}
|
||||
|
||||
done() {
|
||||
const timeSpan = new Date() - this.start
|
||||
timing(this.key, timeSpan, this.sampleRate, this.opts)
|
||||
if (this.buckets) {
|
||||
histogram(this.key, timeSpan, this.buckets, this.opts)
|
||||
} else {
|
||||
timing(this.key, timeSpan, this.sampleRate, this.opts)
|
||||
}
|
||||
return timeSpan
|
||||
}
|
||||
}
|
||||
|
@ -181,6 +194,7 @@ module.exports.inc = inc
|
|||
module.exports.count = count
|
||||
module.exports.summary = summary
|
||||
module.exports.timing = timing
|
||||
module.exports.histogram = histogram
|
||||
module.exports.Timer = Timer
|
||||
module.exports.gauge = gauge
|
||||
module.exports.globalGauge = globalGauge
|
||||
|
|
|
@ -29,6 +29,8 @@ const optsKey = function (opts) {
|
|||
}
|
||||
|
||||
const extendOpts = function (opts, labelNames) {
|
||||
// Make a clone in order to be able to re-use opts for other kinds of metrics.
|
||||
opts = Object.assign({}, opts)
|
||||
for (const label of Array.from(labelNames)) {
|
||||
if (!opts[label]) {
|
||||
opts[label] = ''
|
||||
|
@ -49,15 +51,15 @@ const PromWrapper = {
|
|||
ttlInMinutes: 0,
|
||||
registry,
|
||||
|
||||
metric(type, name) {
|
||||
return metrics.get(name) || new MetricWrapper(type, name)
|
||||
metric(type, name, buckets) {
|
||||
return metrics.get(name) || new MetricWrapper(type, name, buckets)
|
||||
},
|
||||
|
||||
collectDefaultMetrics: prom.collectDefaultMetrics,
|
||||
}
|
||||
|
||||
class MetricWrapper {
|
||||
constructor(type, name) {
|
||||
constructor(type, name, buckets) {
|
||||
metrics.set(name, this)
|
||||
this.name = name
|
||||
this.instances = new Map()
|
||||
|
@ -70,6 +72,19 @@ class MetricWrapper {
|
|||
help: name,
|
||||
labelNames: ['status', 'method', 'path'],
|
||||
})
|
||||
case 'histogram':
|
||||
return new prom.Histogram({
|
||||
name,
|
||||
help: name,
|
||||
labelNames: [
|
||||
'path',
|
||||
'status_code',
|
||||
'method',
|
||||
'collection',
|
||||
'query',
|
||||
],
|
||||
buckets,
|
||||
})
|
||||
case 'summary':
|
||||
return new prom.Summary({
|
||||
name,
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
const { promisify } = require('util')
|
||||
const os = require('os')
|
||||
const http = require('http')
|
||||
const { expect } = require('chai')
|
||||
|
@ -5,6 +6,7 @@ const Metrics = require('../..')
|
|||
|
||||
const HOSTNAME = os.hostname()
|
||||
const APP_NAME = 'test-app'
|
||||
const sleep = promisify(setTimeout)
|
||||
|
||||
describe('Metrics module', function () {
|
||||
before(function () {
|
||||
|
@ -67,6 +69,60 @@ describe('Metrics module', function () {
|
|||
})
|
||||
})
|
||||
|
||||
describe('histogram()', function () {
|
||||
it('collects in buckets', async function () {
|
||||
const buckets = [10, 100, 1000]
|
||||
Metrics.histogram('distance', 10, buckets)
|
||||
Metrics.histogram('distance', 20, buckets)
|
||||
Metrics.histogram('distance', 100, buckets)
|
||||
Metrics.histogram('distance', 200, buckets)
|
||||
Metrics.histogram('distance', 1000, buckets)
|
||||
Metrics.histogram('distance', 2000, buckets)
|
||||
const sum = await getSummarySum('histogram_distance')
|
||||
expect(sum).to.equal(3330)
|
||||
await checkHistogramValues('histogram_distance', {
|
||||
10: 1,
|
||||
100: 3,
|
||||
1000: 5,
|
||||
'+Inf': 6,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Timer', function () {
|
||||
beforeEach('collect timings', async function () {
|
||||
const buckets = [10, 100, 1000]
|
||||
for (const duration of [1, 1, 1, 15, 15, 15, 105, 105, 105]) {
|
||||
const withBuckets = new Metrics.Timer('height', 1, {}, buckets)
|
||||
const withOutBuckets = new Metrics.Timer('depth', 1, {})
|
||||
await sleep(duration)
|
||||
withBuckets.done()
|
||||
withOutBuckets.done()
|
||||
}
|
||||
})
|
||||
|
||||
it('with buckets', async function () {
|
||||
await checkHistogramValues('histogram_height', {
|
||||
10: 3,
|
||||
100: 6,
|
||||
1000: 9,
|
||||
'+Inf': 9,
|
||||
})
|
||||
})
|
||||
|
||||
it('without buckets', async function () {
|
||||
await checkSummaryValues('timer_depth', {
|
||||
0.01: 1,
|
||||
0.05: 1,
|
||||
0.5: 15,
|
||||
0.9: 105,
|
||||
0.95: 105,
|
||||
0.99: 105,
|
||||
0.999: 105,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('gauge()', function () {
|
||||
it('records values', async function () {
|
||||
Metrics.gauge('water_level', 1.5)
|
||||
|
@ -225,6 +281,38 @@ async function getSummarySum(key) {
|
|||
return null
|
||||
}
|
||||
|
||||
async function checkHistogramValues(key, values) {
|
||||
const metric = getMetric(key)
|
||||
const item = await metric.get()
|
||||
const found = {}
|
||||
for (const value of item.values) {
|
||||
const bucket = value.labels.le
|
||||
if (!bucket) continue
|
||||
found[bucket] = value.value
|
||||
}
|
||||
expect(found).to.deep.equal(values)
|
||||
return null
|
||||
}
|
||||
|
||||
async function checkSummaryValues(key, values) {
|
||||
const metric = getMetric(key)
|
||||
const item = await metric.get()
|
||||
const found = {}
|
||||
for (const value of item.values) {
|
||||
const quantile = value.labels.quantile
|
||||
if (!quantile) continue
|
||||
found[quantile] = value.value
|
||||
}
|
||||
for (const quantile of Object.keys(values)) {
|
||||
expect(found[quantile]).to.be.within(
|
||||
values[quantile] - 5,
|
||||
values[quantile] + 5,
|
||||
`quantile: ${quantile}`
|
||||
)
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
async function getMetricValue(key) {
|
||||
const metrics = await Metrics.register.getMetricsAsJSON()
|
||||
const metric = metrics.find(m => m.name === key)
|
||||
|
|
|
@ -36,6 +36,7 @@ module.exports = CompileController = {
|
|||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
timer.opts = request.metricsOpts
|
||||
request.project_id = req.params.project_id
|
||||
if (req.params.user_id != null) {
|
||||
request.user_id = req.params.user_id
|
||||
|
|
|
@ -19,6 +19,12 @@ const CommandRunner = require('./CommandRunner')
|
|||
const { emitPdfStats } = require('./ContentCacheMetrics')
|
||||
const SynctexOutputParser = require('./SynctexOutputParser')
|
||||
|
||||
const COMPILE_TIME_BUCKETS = [
|
||||
// NOTE: These buckets are locked in per metric name.
|
||||
// If you want to change them, you will need to rename metrics.
|
||||
1, 2, 3, 4, 6, 8, 11, 15, 22, 31, 43, 61, 86, 121, 170, 240,
|
||||
].map(seconds => seconds * 1000)
|
||||
|
||||
function getCompileName(projectId, userId) {
|
||||
if (userId != null) {
|
||||
return `${projectId}-${userId}`
|
||||
|
@ -56,8 +62,13 @@ function doCompile(request, callback) {
|
|||
const compileDir = getCompileDir(request.project_id, request.user_id)
|
||||
const outputDir = getOutputDir(request.project_id, request.user_id)
|
||||
|
||||
const timerE2E = new Metrics.Timer('compile-e2e')
|
||||
const timer = new Metrics.Timer('write-to-disk')
|
||||
const timerE2E = new Metrics.Timer(
|
||||
'compile-e2e',
|
||||
1,
|
||||
request.metricsOpts,
|
||||
COMPILE_TIME_BUCKETS
|
||||
)
|
||||
const timer = new Metrics.Timer('write-to-disk', 1, request.metricsOpts)
|
||||
logger.log(
|
||||
{ projectId: request.project_id, userId: request.user_id },
|
||||
'syncing resources to disk'
|
||||
|
@ -158,7 +169,7 @@ function doCompile(request, callback) {
|
|||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
const timer = new Metrics.Timer('run-compile')
|
||||
const timer = new Metrics.Timer('run-compile', 1, request.metricsOpts)
|
||||
// find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite)
|
||||
let tag = 'default'
|
||||
if (request.imageName != null) {
|
||||
|
@ -170,8 +181,8 @@ function doCompile(request, callback) {
|
|||
if (!request.project_id.match(/^[0-9a-f]{24}$/)) {
|
||||
tag = 'other'
|
||||
} // exclude smoke test
|
||||
Metrics.inc('compiles')
|
||||
Metrics.inc(`compiles-with-image.${tag}`)
|
||||
Metrics.inc('compiles', 1, request.metricsOpts)
|
||||
Metrics.inc(`compiles-with-image.${tag}`, 1, request.metricsOpts)
|
||||
const compileName = getCompileName(
|
||||
request.project_id,
|
||||
request.user_id
|
||||
|
@ -204,6 +215,10 @@ function doCompile(request, callback) {
|
|||
error = new Error('compilation')
|
||||
error.validate = 'fail'
|
||||
}
|
||||
// record timeout errors as a separate counter, success is recorded later
|
||||
if (error && error.timedout) {
|
||||
Metrics.inc('compiles-timeout', 1, request.metricsOpts)
|
||||
}
|
||||
// compile was killed by user, was a validation, or a compile which failed validation
|
||||
if (
|
||||
error &&
|
||||
|
@ -225,16 +240,16 @@ function doCompile(request, callback) {
|
|||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
Metrics.inc('compiles-succeeded')
|
||||
Metrics.inc('compiles-succeeded', 1, request.metricsOpts)
|
||||
stats = stats || {}
|
||||
for (const metricKey in stats) {
|
||||
const metricValue = stats[metricKey]
|
||||
Metrics.count(metricKey, metricValue)
|
||||
Metrics.count(metricKey, metricValue, 1, request.metricsOpts)
|
||||
}
|
||||
timings = timings || {}
|
||||
for (const metricKey in timings) {
|
||||
const metricValue = timings[metricKey]
|
||||
Metrics.timing(metricKey, metricValue)
|
||||
Metrics.timing(metricKey, metricValue, 1, request.metricsOpts)
|
||||
}
|
||||
const loadavg =
|
||||
typeof os.loadavg === 'function' ? os.loadavg() : undefined
|
||||
|
@ -254,18 +269,29 @@ function doCompile(request, callback) {
|
|||
'done compile'
|
||||
)
|
||||
if (stats['latex-runs'] > 0) {
|
||||
Metrics.timing('run-compile-per-pass', ts / stats['latex-runs'])
|
||||
Metrics.histogram(
|
||||
'avg-compile-per-pass',
|
||||
ts / stats['latex-runs'],
|
||||
COMPILE_TIME_BUCKETS,
|
||||
request.metricsOpts
|
||||
)
|
||||
}
|
||||
if (stats['latex-runs'] > 0 && timings['cpu-time'] > 0) {
|
||||
Metrics.timing(
|
||||
'run-compile-cpu-time-per-pass',
|
||||
timings['cpu-time'] / stats['latex-runs']
|
||||
timings['cpu-time'] / stats['latex-runs'],
|
||||
1,
|
||||
request.metricsOpts
|
||||
)
|
||||
}
|
||||
// Emit compile time.
|
||||
timings.compile = ts
|
||||
|
||||
const outputStageTimer = new Metrics.Timer('process-output-files')
|
||||
const outputStageTimer = new Metrics.Timer(
|
||||
'process-output-files',
|
||||
1,
|
||||
request.metricsOpts
|
||||
)
|
||||
|
||||
OutputFileFinder.findOutputFiles(
|
||||
resourceList,
|
||||
|
@ -297,7 +323,7 @@ function doCompile(request, callback) {
|
|||
timings.compileE2E = timerE2E.done()
|
||||
|
||||
if (stats['pdf-size']) {
|
||||
emitPdfStats(stats, timings)
|
||||
emitPdfStats(stats, timings, request)
|
||||
}
|
||||
|
||||
callback(null, newOutputFiles, stats, timings)
|
||||
|
|
|
@ -18,26 +18,31 @@ function getSystemLoad() {
|
|||
|
||||
const ONE_MB = 1024 * 1024
|
||||
|
||||
function emitPdfStats(stats, timings) {
|
||||
function emitPdfStats(stats, timings, request) {
|
||||
if (stats['pdf-caching-timed-out']) {
|
||||
Metrics.inc('pdf-caching-timed-out')
|
||||
Metrics.inc('pdf-caching-timed-out', 1, request.metricsOpts)
|
||||
}
|
||||
if (stats['pdf-caching-queue-limit-reached']) {
|
||||
Metrics.inc('pdf-caching-queue-limit-reached')
|
||||
Metrics.inc('pdf-caching-queue-limit-reached', 1, request.metricsOpts)
|
||||
}
|
||||
if (timings['compute-pdf-caching']) {
|
||||
emitPdfCachingStats(stats, timings)
|
||||
emitPdfCachingStats(stats, timings, request)
|
||||
} else {
|
||||
// How much bandwidth will the pdf incur when downloaded in full?
|
||||
Metrics.summary('pdf-bandwidth', stats['pdf-size'])
|
||||
Metrics.summary('pdf-bandwidth', stats['pdf-size'], 1, request.metricsOpts)
|
||||
}
|
||||
}
|
||||
|
||||
function emitPdfCachingStats(stats, timings) {
|
||||
function emitPdfCachingStats(stats, timings, request) {
|
||||
if (!stats['pdf-size']) return // double check
|
||||
|
||||
// How much extra time did we spent in PDF.js?
|
||||
Metrics.timing('compute-pdf-caching', timings['compute-pdf-caching'])
|
||||
Metrics.timing(
|
||||
'compute-pdf-caching',
|
||||
timings['compute-pdf-caching'],
|
||||
1,
|
||||
request.metricsOpts
|
||||
)
|
||||
|
||||
// How large is the overhead of hashing up-front?
|
||||
const fraction =
|
||||
|
@ -55,37 +60,52 @@ function emitPdfCachingStats(stats, timings) {
|
|||
'slow pdf caching'
|
||||
)
|
||||
}
|
||||
Metrics.summary('overhead-compute-pdf-ranges', fraction * 100 - 100)
|
||||
Metrics.summary(
|
||||
'overhead-compute-pdf-ranges',
|
||||
fraction * 100 - 100,
|
||||
1,
|
||||
request.metricsOpts
|
||||
)
|
||||
|
||||
// How does the hashing scale to pdf size in MB?
|
||||
Metrics.timing(
|
||||
'compute-pdf-caching-relative-to-pdf-size',
|
||||
timings['compute-pdf-caching'] / (stats['pdf-size'] / ONE_MB)
|
||||
timings['compute-pdf-caching'] / (stats['pdf-size'] / ONE_MB),
|
||||
1,
|
||||
request.metricsOpts
|
||||
)
|
||||
if (stats['pdf-caching-total-ranges-size']) {
|
||||
// How does the hashing scale to total ranges size in MB?
|
||||
Metrics.timing(
|
||||
'compute-pdf-caching-relative-to-total-ranges-size',
|
||||
timings['compute-pdf-caching'] /
|
||||
(stats['pdf-caching-total-ranges-size'] / ONE_MB)
|
||||
(stats['pdf-caching-total-ranges-size'] / ONE_MB),
|
||||
1,
|
||||
request.metricsOpts
|
||||
)
|
||||
// How fast is the hashing per range on average?
|
||||
Metrics.timing(
|
||||
'compute-pdf-caching-relative-to-ranges-count',
|
||||
timings['compute-pdf-caching'] / stats['pdf-caching-n-ranges']
|
||||
timings['compute-pdf-caching'] / stats['pdf-caching-n-ranges'],
|
||||
1,
|
||||
request.metricsOpts
|
||||
)
|
||||
|
||||
// How many ranges are new?
|
||||
Metrics.summary(
|
||||
'new-pdf-ranges-relative-to-total-ranges',
|
||||
(stats['pdf-caching-n-new-ranges'] / stats['pdf-caching-n-ranges']) * 100
|
||||
(stats['pdf-caching-n-new-ranges'] / stats['pdf-caching-n-ranges']) * 100,
|
||||
1,
|
||||
request.metricsOpts
|
||||
)
|
||||
}
|
||||
|
||||
// How much content is cacheable?
|
||||
Metrics.summary(
|
||||
'cacheable-ranges-to-pdf-size',
|
||||
(stats['pdf-caching-total-ranges-size'] / stats['pdf-size']) * 100
|
||||
(stats['pdf-caching-total-ranges-size'] / stats['pdf-size']) * 100,
|
||||
1,
|
||||
request.metricsOpts
|
||||
)
|
||||
|
||||
const sizeWhenDownloadedInFull =
|
||||
|
@ -99,17 +119,26 @@ function emitPdfCachingStats(stats, timings) {
|
|||
// How much bandwidth can we save when downloading the pdf in full?
|
||||
Metrics.summary(
|
||||
'pdf-bandwidth-savings',
|
||||
100 - (sizeWhenDownloadedInFull / stats['pdf-size']) * 100
|
||||
100 - (sizeWhenDownloadedInFull / stats['pdf-size']) * 100,
|
||||
1,
|
||||
request.metricsOpts
|
||||
)
|
||||
|
||||
// How much bandwidth will the pdf incur when downloaded in full?
|
||||
Metrics.summary('pdf-bandwidth', sizeWhenDownloadedInFull)
|
||||
Metrics.summary(
|
||||
'pdf-bandwidth',
|
||||
sizeWhenDownloadedInFull,
|
||||
1,
|
||||
request.metricsOpts
|
||||
)
|
||||
|
||||
// How much space do the ranges use?
|
||||
// This will accumulate the ranges size over time, skipping already written ranges.
|
||||
Metrics.summary(
|
||||
'pdf-ranges-disk-size',
|
||||
stats['pdf-caching-new-ranges-size'] - stats['pdf-caching-reclaimed-space']
|
||||
stats['pdf-caching-new-ranges-size'] - stats['pdf-caching-reclaimed-space'],
|
||||
1,
|
||||
request.metricsOpts
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -186,7 +186,7 @@ module.exports = OutputCacheManager = {
|
|||
}
|
||||
|
||||
OutputCacheManager.saveStreamsInContentDir(
|
||||
{ stats, timings },
|
||||
{ request, stats, timings },
|
||||
result,
|
||||
compileDir,
|
||||
outputDir,
|
||||
|
@ -348,7 +348,7 @@ module.exports = OutputCacheManager = {
|
|||
},
|
||||
|
||||
saveStreamsInContentDir(
|
||||
{ stats, timings },
|
||||
{ request, stats, timings },
|
||||
outputFiles,
|
||||
compileDir,
|
||||
outputDir,
|
||||
|
@ -367,7 +367,11 @@ module.exports = OutputCacheManager = {
|
|||
OutputCacheManager.path(outputFile.build, outputFile.path)
|
||||
)
|
||||
const pdfSize = outputFile.size
|
||||
const timer = new Metrics.Timer('compute-pdf-ranges')
|
||||
const timer = new Metrics.Timer(
|
||||
'compute-pdf-ranges',
|
||||
1,
|
||||
request.metricsOpts
|
||||
)
|
||||
ContentCacheManager.update(
|
||||
contentDir,
|
||||
outputFilePath,
|
||||
|
|
|
@ -41,6 +41,20 @@ module.exports = RequestParser = {
|
|||
}
|
||||
|
||||
try {
|
||||
response.metricsOpts = {
|
||||
path: this._parseAttribute('metricsPath', compile.options.metricsPath, {
|
||||
default: '',
|
||||
type: 'string',
|
||||
}),
|
||||
method: this._parseAttribute(
|
||||
'metricsMethod',
|
||||
compile.options.metricsMethod,
|
||||
{
|
||||
default: '',
|
||||
type: 'string',
|
||||
}
|
||||
),
|
||||
}
|
||||
response.compiler = this._parseAttribute(
|
||||
'compiler',
|
||||
compile.options.compiler,
|
||||
|
|
|
@ -44,6 +44,7 @@ module.exports = ResourceWriter = {
|
|||
return callback(error)
|
||||
}
|
||||
return ResourceWriter._removeExtraneousFiles(
|
||||
request,
|
||||
resourceList,
|
||||
basePath,
|
||||
function (error, outputFiles, allFiles) {
|
||||
|
@ -84,27 +85,22 @@ module.exports = ResourceWriter = {
|
|||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
this.saveAllResourcesToDisk(
|
||||
request.project_id,
|
||||
request.resources,
|
||||
basePath,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return ResourceStateManager.saveProjectState(
|
||||
request.syncState,
|
||||
request.resources,
|
||||
basePath,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, request.resources)
|
||||
}
|
||||
)
|
||||
this.saveAllResourcesToDisk(request, basePath, function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
)
|
||||
return ResourceStateManager.saveProjectState(
|
||||
request.syncState,
|
||||
request.resources,
|
||||
basePath,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, request.resources)
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
|
@ -126,7 +122,7 @@ module.exports = ResourceWriter = {
|
|||
})
|
||||
},
|
||||
|
||||
saveAllResourcesToDisk(project_id, resources, basePath, callback) {
|
||||
saveAllResourcesToDisk(request, basePath, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
|
@ -134,7 +130,8 @@ module.exports = ResourceWriter = {
|
|||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return this._removeExtraneousFiles(resources, basePath, error => {
|
||||
const { project_id, resources } = request
|
||||
this._removeExtraneousFiles(request, resources, basePath, error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
|
@ -172,11 +169,15 @@ module.exports = ResourceWriter = {
|
|||
})
|
||||
},
|
||||
|
||||
_removeExtraneousFiles(resources, basePath, _callback) {
|
||||
_removeExtraneousFiles(request, resources, basePath, _callback) {
|
||||
if (_callback == null) {
|
||||
_callback = function () {}
|
||||
}
|
||||
const timer = new Metrics.Timer('unlink-output-files')
|
||||
const timer = new Metrics.Timer(
|
||||
'unlink-output-files',
|
||||
1,
|
||||
request.metricsOpts
|
||||
)
|
||||
const callback = function (error, ...result) {
|
||||
timer.done()
|
||||
return _callback(error, ...Array.from(result))
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
const Client = require('./helpers/Client')
|
||||
const request = require('request')
|
||||
const ClsiApp = require('./helpers/ClsiApp')
|
||||
const Settings = require('@overleaf/settings')
|
||||
|
||||
describe('Simple LaTeX file', function () {
|
||||
before(function (done) {
|
||||
|
@ -24,6 +25,10 @@ Hello world
|
|||
`,
|
||||
},
|
||||
],
|
||||
options: {
|
||||
metricsPath: 'clsi-perf',
|
||||
metricsMethod: 'priority',
|
||||
},
|
||||
}
|
||||
return ClsiApp.ensureRunning(() => {
|
||||
return Client.compile(
|
||||
|
@ -58,7 +63,7 @@ Hello world
|
|||
})
|
||||
})
|
||||
|
||||
return it('should provide the log for download', function (done) {
|
||||
it('should provide the log for download', function (done) {
|
||||
const log = Client.getOutputFile(this.body, 'pdf')
|
||||
return request.get(log.url, (error, res, body) => {
|
||||
if (error) return done(error)
|
||||
|
@ -66,4 +71,21 @@ Hello world
|
|||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should gather personalized metrics', function (done) {
|
||||
request.get(`${Settings.apis.clsi.url}/metrics`, (err, res, body) => {
|
||||
if (err) return done(err)
|
||||
body
|
||||
.split('\n')
|
||||
.some(line => {
|
||||
return (
|
||||
line.startsWith('compile') &&
|
||||
line.includes('path="clsi-perf"') &&
|
||||
line.includes('method="priority"')
|
||||
)
|
||||
})
|
||||
.should.equal(true)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -56,15 +56,16 @@ describe('ResourceWriter', function () {
|
|||
describe('syncResourcesToDisk on a full request', function () {
|
||||
beforeEach(function () {
|
||||
this.resources = ['resource-1-mock', 'resource-2-mock', 'resource-3-mock']
|
||||
this.request = {
|
||||
project_id: this.project_id,
|
||||
syncState: (this.syncState = '0123456789abcdef'),
|
||||
resources: this.resources,
|
||||
}
|
||||
this.ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3)
|
||||
this.ResourceWriter._removeExtraneousFiles = sinon.stub().callsArg(2)
|
||||
this.ResourceWriter._removeExtraneousFiles = sinon.stub().yields(null)
|
||||
this.ResourceStateManager.saveProjectState = sinon.stub().callsArg(3)
|
||||
return this.ResourceWriter.syncResourcesToDisk(
|
||||
{
|
||||
project_id: this.project_id,
|
||||
syncState: (this.syncState = '0123456789abcdef'),
|
||||
resources: this.resources,
|
||||
},
|
||||
this.request,
|
||||
this.basePath,
|
||||
this.callback
|
||||
)
|
||||
|
@ -72,7 +73,7 @@ describe('ResourceWriter', function () {
|
|||
|
||||
it('should remove old files', function () {
|
||||
return this.ResourceWriter._removeExtraneousFiles
|
||||
.calledWith(this.resources, this.basePath)
|
||||
.calledWith(this.request, this.resources, this.basePath)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
|
@ -98,22 +99,24 @@ describe('ResourceWriter', function () {
|
|||
describe('syncResourcesToDisk on an incremental update', function () {
|
||||
beforeEach(function () {
|
||||
this.resources = ['resource-1-mock']
|
||||
this.request = {
|
||||
project_id: this.project_id,
|
||||
syncType: 'incremental',
|
||||
syncState: (this.syncState = '1234567890abcdef'),
|
||||
resources: this.resources,
|
||||
}
|
||||
this.fullResources = this.resources.concat(['file-1'])
|
||||
this.ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3)
|
||||
this.ResourceWriter._removeExtraneousFiles = sinon
|
||||
.stub()
|
||||
.callsArgWith(2, null, (this.outputFiles = []), (this.allFiles = []))
|
||||
.yields(null, (this.outputFiles = []), (this.allFiles = []))
|
||||
this.ResourceStateManager.checkProjectStateMatches = sinon
|
||||
.stub()
|
||||
.callsArgWith(2, null, this.resources)
|
||||
.callsArgWith(2, null, this.fullResources)
|
||||
this.ResourceStateManager.saveProjectState = sinon.stub().callsArg(3)
|
||||
this.ResourceStateManager.checkResourceFiles = sinon.stub().callsArg(3)
|
||||
return this.ResourceWriter.syncResourcesToDisk(
|
||||
{
|
||||
project_id: this.project_id,
|
||||
syncType: 'incremental',
|
||||
syncState: (this.syncState = '1234567890abcdef'),
|
||||
resources: this.resources,
|
||||
},
|
||||
this.request,
|
||||
this.basePath,
|
||||
this.callback
|
||||
)
|
||||
|
@ -127,13 +130,13 @@ describe('ResourceWriter', function () {
|
|||
|
||||
it('should remove old files', function () {
|
||||
return this.ResourceWriter._removeExtraneousFiles
|
||||
.calledWith(this.resources, this.basePath)
|
||||
.calledWith(this.request, this.fullResources, this.basePath)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should check each resource exists', function () {
|
||||
return this.ResourceStateManager.checkResourceFiles
|
||||
.calledWith(this.resources, this.allFiles, this.basePath)
|
||||
.calledWith(this.fullResources, this.allFiles, this.basePath)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
|
@ -153,16 +156,17 @@ describe('ResourceWriter', function () {
|
|||
describe('syncResourcesToDisk on an incremental update when the state does not match', function () {
|
||||
beforeEach(function () {
|
||||
this.resources = ['resource-1-mock']
|
||||
this.request = {
|
||||
project_id: this.project_id,
|
||||
syncType: 'incremental',
|
||||
syncState: (this.syncState = '1234567890abcdef'),
|
||||
resources: this.resources,
|
||||
}
|
||||
this.ResourceStateManager.checkProjectStateMatches = sinon
|
||||
.stub()
|
||||
.callsArgWith(2, (this.error = new Error()))
|
||||
return this.ResourceWriter.syncResourcesToDisk(
|
||||
{
|
||||
project_id: this.project_id,
|
||||
syncType: 'incremental',
|
||||
syncState: (this.syncState = '1234567890abcdef'),
|
||||
resources: this.resources,
|
||||
},
|
||||
this.request,
|
||||
this.basePath,
|
||||
this.callback
|
||||
)
|
||||
|
@ -237,11 +241,18 @@ describe('ResourceWriter', function () {
|
|||
},
|
||||
]
|
||||
this.resources = 'mock-resources'
|
||||
this.request = {
|
||||
project_id: this.project_id,
|
||||
syncType: 'incremental',
|
||||
syncState: (this.syncState = '1234567890abcdef'),
|
||||
resources: this.resources,
|
||||
}
|
||||
this.OutputFileFinder.findOutputFiles = sinon
|
||||
.stub()
|
||||
.callsArgWith(2, null, this.output_files)
|
||||
this.ResourceWriter._deleteFileIfNotDirectory = sinon.stub().callsArg(1)
|
||||
return this.ResourceWriter._removeExtraneousFiles(
|
||||
this.request,
|
||||
this.resources,
|
||||
this.basePath,
|
||||
this.callback
|
||||
|
|
|
@ -854,6 +854,7 @@ const ClsiManager = {
|
|||
enablePdfCaching:
|
||||
(Settings.enablePdfCaching && options.enablePdfCaching) || false,
|
||||
flags: flags,
|
||||
metricsMethod: options.compileGroup,
|
||||
},
|
||||
rootResourcePath,
|
||||
resources,
|
||||
|
|
|
@ -621,6 +621,7 @@ describe('ClsiManager', function () {
|
|||
compileGroup: 'standard',
|
||||
enablePdfCaching: false,
|
||||
flags: undefined,
|
||||
metricsMethod: 'standard',
|
||||
}, // "01234567890abcdef"
|
||||
rootResourcePath: 'main.tex',
|
||||
resources: [
|
||||
|
@ -715,6 +716,7 @@ describe('ClsiManager', function () {
|
|||
compileGroup: 'priority',
|
||||
enablePdfCaching: false,
|
||||
flags: undefined,
|
||||
metricsMethod: 'priority',
|
||||
},
|
||||
rootResourcePath: 'main.tex',
|
||||
resources: [
|
||||
|
|
Loading…
Reference in a new issue