mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #4060 from overleaf/jpa-service-worker-metrics
[misc] serviceWorker: collect metrics for cached vs fetched performance GitOrigin-RevId: db29bc77801d71008ba61ffb7e335a67cd5bf16d
This commit is contained in:
parent
e0616096e2
commit
9ad248af41
1 changed files with 73 additions and 4 deletions
|
@ -3,6 +3,25 @@ const MIN_CHUNK_SIZE = 128 * 1024
|
||||||
|
|
||||||
const PDF_FILES = new Map()
|
const PDF_FILES = new Map()
|
||||||
|
|
||||||
|
const METRICS = {
|
||||||
|
epoch: Date.now(),
|
||||||
|
cachedBytes: 0,
|
||||||
|
fetchedBytes: 0,
|
||||||
|
requestedBytes: 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param {number} size
|
||||||
|
* @param {number} cachedBytes
|
||||||
|
* @param {number} fetchedBytes
|
||||||
|
*/
|
||||||
|
function trackStats({ size, cachedBytes, fetchedBytes }) {
|
||||||
|
METRICS.cachedBytes += cachedBytes
|
||||||
|
METRICS.fetchedBytes += fetchedBytes
|
||||||
|
METRICS.requestedBytes += size
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {FetchEvent} event
|
* @param {FetchEvent} event
|
||||||
*/
|
*/
|
||||||
|
@ -18,6 +37,7 @@ function onFetch(event) {
|
||||||
if (ctx) {
|
if (ctx) {
|
||||||
return processPdfRequest(event, ctx)
|
return processPdfRequest(event, ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
// other request, ignore
|
// other request, ignore
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -27,8 +47,9 @@ function processCompileRequest(event) {
|
||||||
if (response.status !== 200) return response
|
if (response.status !== 200) return response
|
||||||
|
|
||||||
return response.json().then(body => {
|
return response.json().then(body => {
|
||||||
handleCompileResponse(body)
|
handleCompileResponse(response, body)
|
||||||
// The response body is consumed, serialize it again.
|
// Send the service workers metrics to the frontend.
|
||||||
|
body.serviceWorkerMetrics = METRICS
|
||||||
return new Response(JSON.stringify(body), response)
|
return new Response(JSON.stringify(body), response)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -41,8 +62,12 @@ function processCompileRequest(event) {
|
||||||
* @param {Object} file
|
* @param {Object} file
|
||||||
* @param {string} clsiServerId
|
* @param {string} clsiServerId
|
||||||
* @param {string} compileGroup
|
* @param {string} compileGroup
|
||||||
|
* @param {Date} pdfCreatedAt
|
||||||
*/
|
*/
|
||||||
function processPdfRequest(event, { file, clsiServerId, compileGroup }) {
|
function processPdfRequest(
|
||||||
|
event,
|
||||||
|
{ file, clsiServerId, compileGroup, pdfCreatedAt }
|
||||||
|
) {
|
||||||
if (!event.request.headers.has('Range') && file.size > MIN_CHUNK_SIZE) {
|
if (!event.request.headers.has('Range') && file.size > MIN_CHUNK_SIZE) {
|
||||||
// skip probe request
|
// skip probe request
|
||||||
const headers = new Headers()
|
const headers = new Headers()
|
||||||
|
@ -96,6 +121,8 @@ function processPdfRequest(event, { file, clsiServerId, compileGroup }) {
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
const size = end - start
|
const size = end - start
|
||||||
|
let cachedBytes = 0
|
||||||
|
let fetchedBytes = 0
|
||||||
const reAssembledBlob = new Uint8Array(size)
|
const reAssembledBlob = new Uint8Array(size)
|
||||||
event.respondWith(
|
event.respondWith(
|
||||||
Promise.all(
|
Promise.all(
|
||||||
|
@ -109,6 +136,22 @@ function processPdfRequest(event, { file, clsiServerId, compileGroup }) {
|
||||||
}`
|
}`
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
const blobFetchDate = getServerTime(response)
|
||||||
|
const blobSize = getResponseSize(response)
|
||||||
|
if (blobFetchDate && blobSize) {
|
||||||
|
const chunkSize =
|
||||||
|
Math.min(end, chunk.end) - Math.max(start, chunk.start)
|
||||||
|
// Example: 2MB PDF, 1MB image, 128KB PDF.js chunk.
|
||||||
|
// | pdf.js chunk |
|
||||||
|
// | A BIG IMAGE BLOB |
|
||||||
|
// | THE FULL PDF |
|
||||||
|
if (blobFetchDate < pdfCreatedAt) {
|
||||||
|
cachedBytes += chunkSize
|
||||||
|
} else {
|
||||||
|
// Blobs are fetched in bulk.
|
||||||
|
fetchedBytes += blobSize
|
||||||
|
}
|
||||||
|
}
|
||||||
return response.arrayBuffer()
|
return response.arrayBuffer()
|
||||||
})
|
})
|
||||||
.then(arrayBuffer => {
|
.then(arrayBuffer => {
|
||||||
|
@ -134,6 +177,8 @@ function processPdfRequest(event, { file, clsiServerId, compileGroup }) {
|
||||||
const insertPosition = Math.max(chunk.start - start, 0)
|
const insertPosition = Math.max(chunk.start - start, 0)
|
||||||
reAssembledBlob.set(new Uint8Array(arrayBuffer), insertPosition)
|
reAssembledBlob.set(new Uint8Array(arrayBuffer), insertPosition)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
trackStats({ size, cachedBytes, fetchedBytes })
|
||||||
return new Response(reAssembledBlob, {
|
return new Response(reAssembledBlob, {
|
||||||
status: 206,
|
status: 206,
|
||||||
headers: {
|
headers: {
|
||||||
|
@ -152,16 +197,40 @@ function processPdfRequest(event, { file, clsiServerId, compileGroup }) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
*
|
||||||
|
* @param {Response} response
|
||||||
|
*/
|
||||||
|
function getServerTime(response) {
|
||||||
|
const raw = response.headers.get('Date')
|
||||||
|
if (!raw) return undefined
|
||||||
|
return new Date(raw)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param {Response} response
|
||||||
|
*/
|
||||||
|
function getResponseSize(response) {
|
||||||
|
const raw = response.headers.get('Content-Length')
|
||||||
|
if (!raw) return 0
|
||||||
|
return parseInt(raw, 10)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Response} response
|
||||||
* @param {Object} body
|
* @param {Object} body
|
||||||
*/
|
*/
|
||||||
function handleCompileResponse(body) {
|
function handleCompileResponse(response, body) {
|
||||||
if (!body || body.status !== 'success') return
|
if (!body || body.status !== 'success') return
|
||||||
|
|
||||||
|
const pdfCreatedAt = getServerTime(response)
|
||||||
|
|
||||||
for (const file of body.outputFiles) {
|
for (const file of body.outputFiles) {
|
||||||
if (file.path !== 'output.pdf') continue // not the pdf used for rendering
|
if (file.path !== 'output.pdf') continue // not the pdf used for rendering
|
||||||
if (file.ranges) {
|
if (file.ranges) {
|
||||||
const { clsiServerId, compileGroup } = body
|
const { clsiServerId, compileGroup } = body
|
||||||
PDF_FILES.set(file.url, {
|
PDF_FILES.set(file.url, {
|
||||||
|
pdfCreatedAt,
|
||||||
file,
|
file,
|
||||||
clsiServerId,
|
clsiServerId,
|
||||||
compileGroup,
|
compileGroup,
|
||||||
|
|
Loading…
Reference in a new issue