mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #10007 from overleaf/jpa-pdf-caching-latency-compute
[web] collect latencyCompute metric from pdf-caching GitOrigin-RevId: f20fa2b54078a7106b2413432c4f47375878a5d6
This commit is contained in:
parent
10eb65e2d2
commit
abaecac268
3 changed files with 55 additions and 2 deletions
|
@ -4,7 +4,7 @@ import { trackPdfDownloadEnabled } from './pdf-caching-flags'
|
||||||
|
|
||||||
// VERSION should get incremented when making changes to caching behavior or
|
// VERSION should get incremented when making changes to caching behavior or
|
||||||
// adjusting metrics collection.
|
// adjusting metrics collection.
|
||||||
const VERSION = 6
|
const VERSION = 7
|
||||||
|
|
||||||
// editing session id
|
// editing session id
|
||||||
const EDITOR_SESSION_ID = uuid()
|
const EDITOR_SESSION_ID = uuid()
|
||||||
|
|
|
@ -26,6 +26,8 @@ export function generatePdfCachingTransportFactory(PDFJS) {
|
||||||
cachedBytes: 0,
|
cachedBytes: 0,
|
||||||
fetchedCount: 0,
|
fetchedCount: 0,
|
||||||
fetchedBytes: 0,
|
fetchedBytes: 0,
|
||||||
|
latencyComputeMax: 0,
|
||||||
|
latencyComputeTotal: 0,
|
||||||
requestedCount: 0,
|
requestedCount: 0,
|
||||||
requestedBytes: 0,
|
requestedBytes: 0,
|
||||||
oldUrlHitCount: 0,
|
oldUrlHitCount: 0,
|
||||||
|
@ -118,6 +120,9 @@ export function generatePdfCachingTransportFactory(PDFJS) {
|
||||||
// Be trigger-happy here until we reached a stable state of the feature.
|
// Be trigger-happy here until we reached a stable state of the feature.
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
// Latency is collected per preview cycle.
|
||||||
|
metrics.latencyComputeMax = 0
|
||||||
|
metrics.latencyComputeTotal = 0
|
||||||
return new PDFDataRangeTransport({
|
return new PDFDataRangeTransport({
|
||||||
url,
|
url,
|
||||||
pdfFile,
|
pdfFile,
|
||||||
|
|
|
@ -583,6 +583,37 @@ function addPrefetchingChunks({
|
||||||
dynamicChunks.sort(sortByStartASC)
|
dynamicChunks.sort(sortByStartASC)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class Timer {
|
||||||
|
constructor() {
|
||||||
|
this.max = 0
|
||||||
|
this.total = 0
|
||||||
|
this.lastStart = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
startBlockingCompute() {
|
||||||
|
this.lastStart = performance.now()
|
||||||
|
}
|
||||||
|
|
||||||
|
finishBlockingCompute() {
|
||||||
|
if (this.lastStart === 0) return
|
||||||
|
const last = performance.now() - this.lastStart
|
||||||
|
if (last > this.max) {
|
||||||
|
this.max = last
|
||||||
|
}
|
||||||
|
this.total += last
|
||||||
|
this.lastStart = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
reportInto(metrics) {
|
||||||
|
const max = Math.ceil(this.max)
|
||||||
|
const total = Math.ceil(this.total)
|
||||||
|
if (max > metrics.latencyComputeMax) {
|
||||||
|
metrics.latencyComputeMax = max
|
||||||
|
}
|
||||||
|
metrics.latencyComputeTotal += total
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @param {string} url
|
* @param {string} url
|
||||||
|
@ -612,6 +643,8 @@ export async function fetchRange({
|
||||||
cachedUrlLookupEnabled,
|
cachedUrlLookupEnabled,
|
||||||
abortSignal,
|
abortSignal,
|
||||||
}) {
|
}) {
|
||||||
|
const timer = new Timer()
|
||||||
|
timer.startBlockingCompute()
|
||||||
preprocessFileOnce({ file, usageScore, cachedUrls })
|
preprocessFileOnce({ file, usageScore, cachedUrls })
|
||||||
const startXRefTableRange =
|
const startXRefTableRange =
|
||||||
Math.floor(file.startXRefTable / PDF_JS_CHUNK_SIZE) * PDF_JS_CHUNK_SIZE
|
Math.floor(file.startXRefTable / PDF_JS_CHUNK_SIZE) * PDF_JS_CHUNK_SIZE
|
||||||
|
@ -650,6 +683,8 @@ export async function fetchRange({
|
||||||
) {
|
) {
|
||||||
// fall back to the original range request when no chunks are cached.
|
// fall back to the original range request when no chunks are cached.
|
||||||
// Exception: The first range should fetch the xref table as well.
|
// Exception: The first range should fetch the xref table as well.
|
||||||
|
timer.finishBlockingCompute()
|
||||||
|
timer.reportInto(metrics)
|
||||||
trackDownloadStats(metrics, {
|
trackDownloadStats(metrics, {
|
||||||
size,
|
size,
|
||||||
cachedCount: 0,
|
cachedCount: 0,
|
||||||
|
@ -728,6 +763,9 @@ export async function fetchRange({
|
||||||
let fetchedBytes = 0
|
let fetchedBytes = 0
|
||||||
const reassembledBlob = new Uint8Array(size)
|
const reassembledBlob = new Uint8Array(size)
|
||||||
|
|
||||||
|
// Pause while performing network IO
|
||||||
|
timer.finishBlockingCompute()
|
||||||
|
|
||||||
const rawResponses = await Promise.all(
|
const rawResponses = await Promise.all(
|
||||||
requests.map(async ({ chunk, url, init }) => {
|
requests.map(async ({ chunk, url, init }) => {
|
||||||
try {
|
try {
|
||||||
|
@ -739,6 +777,7 @@ export async function fetchRange({
|
||||||
metrics,
|
metrics,
|
||||||
cachedUrlLookupEnabled,
|
cachedUrlLookupEnabled,
|
||||||
})
|
})
|
||||||
|
timer.startBlockingCompute()
|
||||||
const boundary = getMultipartBoundary(response, chunk)
|
const boundary = getMultipartBoundary(response, chunk)
|
||||||
const blobFetchDate = getServerTime(response)
|
const blobFetchDate = getServerTime(response)
|
||||||
const blobSize = getResponseSize(response)
|
const blobSize = getResponseSize(response)
|
||||||
|
@ -761,7 +800,10 @@ export async function fetchRange({
|
||||||
fetchedBytes += blobSize
|
fetchedBytes += blobSize
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const data = backFillObjectContext(chunk, await response.arrayBuffer())
|
timer.finishBlockingCompute()
|
||||||
|
const buf = await response.arrayBuffer()
|
||||||
|
timer.startBlockingCompute()
|
||||||
|
const data = backFillObjectContext(chunk, buf)
|
||||||
if (!Array.isArray(chunk)) {
|
if (!Array.isArray(chunk)) {
|
||||||
return [{ chunk, data }]
|
return [{ chunk, data }]
|
||||||
}
|
}
|
||||||
|
@ -774,10 +816,14 @@ export async function fetchRange({
|
||||||
})
|
})
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
throw OError.tag(err, 'cannot fetch chunk', { chunk, url, init })
|
throw OError.tag(err, 'cannot fetch chunk', { chunk, url, init })
|
||||||
|
} finally {
|
||||||
|
timer.finishBlockingCompute()
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
|
timer.startBlockingCompute()
|
||||||
|
|
||||||
rawResponses
|
rawResponses
|
||||||
.flat() // flatten after splitting multipart responses
|
.flat() // flatten after splitting multipart responses
|
||||||
.concat(prefetched.map(chunk => ({ chunk, data: chunk.buffer })))
|
.concat(prefetched.map(chunk => ({ chunk, data: chunk.buffer })))
|
||||||
|
@ -805,6 +851,8 @@ export async function fetchRange({
|
||||||
reassembledBlob.set(data, insertPosition)
|
reassembledBlob.set(data, insertPosition)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
timer.finishBlockingCompute()
|
||||||
|
timer.reportInto(metrics)
|
||||||
trackDownloadStats(metrics, {
|
trackDownloadStats(metrics, {
|
||||||
size,
|
size,
|
||||||
cachedCount,
|
cachedCount,
|
||||||
|
|
Loading…
Reference in a new issue