2021-05-21 07:32:07 -04:00
|
|
|
import { v4 as uuid } from 'uuid'
|
2022-07-06 07:06:53 -04:00
|
|
|
import { fetchRange } from './features/pdf-preview/util/pdf-caching'
|
2021-06-09 09:45:55 -04:00
|
|
|
const OError = require('@overleaf/o-error')
|
|
|
|
|
2021-06-10 06:02:06 -04:00
|
|
|
// VERSION should get incremented when making changes to caching behavior or
|
|
|
|
// adjusting metrics collection.
|
2021-06-14 04:44:25 -04:00
|
|
|
// Keep in sync with PdfJsMetrics.
|
2022-07-06 07:06:53 -04:00
|
|
|
const VERSION = 3
|
2021-06-10 06:02:06 -04:00
|
|
|
|
2021-06-14 04:11:52 -04:00
|
|
|
const CLEAR_CACHE_REQUEST_MATCHER = /^\/project\/[0-9a-f]{24}\/output$/
|
2021-05-17 05:38:18 -04:00
|
|
|
const COMPILE_REQUEST_MATCHER = /^\/project\/[0-9a-f]{24}\/compile$/
|
2022-06-30 05:27:09 -04:00
|
|
|
const PDF_REQUEST_MATCHER =
|
|
|
|
/^(\/zone\/.)?(\/project\/[0-9a-f]{24}\/.*\/output.pdf)$/
|
2021-05-26 05:19:49 -04:00
|
|
|
const PDF_JS_CHUNK_SIZE = 128 * 1024
|
2021-06-08 09:05:25 -04:00
|
|
|
|
|
|
|
// Each compile request defines a context (essentially the specific pdf file for
|
|
|
|
// that compile), requests for that pdf file can use the hashes in the compile
|
|
|
|
// response, which are stored in the context.
|
|
|
|
|
2021-06-09 09:46:06 -04:00
|
|
|
const CLIENT_CONTEXT = new Map()
|
2021-06-08 09:05:25 -04:00
|
|
|
|
2021-06-09 09:46:06 -04:00
|
|
|
/**
|
|
|
|
* @param {string} clientId
|
|
|
|
*/
|
|
|
|
function getClientContext(clientId) {
|
|
|
|
let clientContext = CLIENT_CONTEXT.get(clientId)
|
|
|
|
if (!clientContext) {
|
2021-06-14 04:11:37 -04:00
|
|
|
const cached = new Set()
|
2021-06-09 09:46:06 -04:00
|
|
|
const pdfs = new Map()
|
|
|
|
const metrics = {
|
2021-06-10 06:02:06 -04:00
|
|
|
version: VERSION,
|
2021-06-09 09:46:06 -04:00
|
|
|
id: uuid(),
|
|
|
|
epoch: Date.now(),
|
2021-06-10 06:02:32 -04:00
|
|
|
failedCount: 0,
|
|
|
|
tooLargeOverheadCount: 0,
|
|
|
|
tooManyRequestsCount: 0,
|
|
|
|
cachedCount: 0,
|
2021-06-09 09:46:06 -04:00
|
|
|
cachedBytes: 0,
|
2021-06-10 06:02:32 -04:00
|
|
|
fetchedCount: 0,
|
2021-06-09 09:46:06 -04:00
|
|
|
fetchedBytes: 0,
|
2021-06-10 06:02:32 -04:00
|
|
|
requestedCount: 0,
|
2021-06-09 09:46:06 -04:00
|
|
|
requestedBytes: 0,
|
2021-06-10 06:02:32 -04:00
|
|
|
compileCount: 0,
|
2021-06-09 09:46:06 -04:00
|
|
|
}
|
2021-06-14 04:11:37 -04:00
|
|
|
clientContext = { pdfs, metrics, cached }
|
2021-06-09 09:46:06 -04:00
|
|
|
CLIENT_CONTEXT.set(clientId, clientContext)
|
2021-06-08 09:05:25 -04:00
|
|
|
// clean up old client maps
|
|
|
|
expirePdfContexts()
|
|
|
|
}
|
2021-06-09 09:46:06 -04:00
|
|
|
return clientContext
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @param {string} clientId
|
|
|
|
* @param {string} path
|
|
|
|
* @param {Object} pdfContext
|
|
|
|
*/
|
|
|
|
function registerPdfContext(clientId, path, pdfContext) {
|
|
|
|
const clientContext = getClientContext(clientId)
|
2021-06-14 04:11:52 -04:00
|
|
|
const { pdfs, metrics, cached, clsiServerId } = clientContext
|
2021-06-09 09:46:06 -04:00
|
|
|
pdfContext.metrics = metrics
|
2021-06-14 04:11:37 -04:00
|
|
|
pdfContext.cached = cached
|
2021-06-14 04:11:52 -04:00
|
|
|
if (pdfContext.clsiServerId !== clsiServerId) {
|
|
|
|
// VM changed, this invalidates all browser caches.
|
|
|
|
clientContext.clsiServerId = pdfContext.clsiServerId
|
|
|
|
cached.clear()
|
|
|
|
}
|
2021-06-08 09:05:25 -04:00
|
|
|
// we only need to keep the last 3 contexts
|
2021-06-09 09:46:06 -04:00
|
|
|
for (const key of pdfs.keys()) {
|
|
|
|
if (pdfs.size < 3) {
|
2021-06-08 09:05:25 -04:00
|
|
|
break
|
|
|
|
}
|
2021-06-09 09:46:06 -04:00
|
|
|
pdfs.delete(key) // the map keys are returned in insertion order, so we are deleting the oldest entry here
|
2021-06-08 09:05:25 -04:00
|
|
|
}
|
2021-06-09 09:46:06 -04:00
|
|
|
pdfs.set(path, pdfContext)
|
2021-06-08 09:05:25 -04:00
|
|
|
}
|
|
|
|
|
2021-06-09 09:46:06 -04:00
|
|
|
/**
|
|
|
|
* @param {string} clientId
|
|
|
|
* @param {string} path
|
|
|
|
*/
|
2021-06-08 09:05:25 -04:00
|
|
|
function getPdfContext(clientId, path) {
|
2021-06-09 09:46:06 -04:00
|
|
|
const { pdfs } = getClientContext(clientId)
|
|
|
|
return pdfs.get(path)
|
2021-06-08 09:05:25 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
function expirePdfContexts() {
|
|
|
|
// discard client maps for clients that are no longer connected
|
|
|
|
const currentClientSet = new Set()
|
|
|
|
self.clients.matchAll().then(function (clientList) {
|
|
|
|
clientList.forEach(client => {
|
|
|
|
currentClientSet.add(client.id)
|
|
|
|
})
|
2021-06-09 09:46:06 -04:00
|
|
|
CLIENT_CONTEXT.forEach((map, clientId) => {
|
2021-06-08 09:05:25 -04:00
|
|
|
if (!currentClientSet.has(clientId)) {
|
2021-06-09 09:46:06 -04:00
|
|
|
CLIENT_CONTEXT.delete(clientId)
|
2021-06-08 09:05:25 -04:00
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
2021-05-17 05:38:18 -04:00
|
|
|
|
|
|
|
/**
|
|
|
|
* @param {FetchEvent} event
|
|
|
|
*/
|
|
|
|
function onFetch(event) {
|
|
|
|
const url = new URL(event.request.url)
|
|
|
|
const path = url.pathname
|
|
|
|
|
|
|
|
if (path.match(COMPILE_REQUEST_MATCHER)) {
|
|
|
|
return processCompileRequest(event)
|
|
|
|
}
|
|
|
|
|
2022-06-30 05:27:09 -04:00
|
|
|
const match = path.match(PDF_REQUEST_MATCHER)
|
|
|
|
if (match) {
|
|
|
|
const ctx = getPdfContext(event.clientId, match[2])
|
2021-06-08 09:05:25 -04:00
|
|
|
if (ctx) {
|
|
|
|
return processPdfRequest(event, ctx)
|
|
|
|
}
|
2021-05-17 05:38:18 -04:00
|
|
|
}
|
2021-05-21 07:31:37 -04:00
|
|
|
|
2021-06-14 04:11:52 -04:00
|
|
|
if (
|
|
|
|
event.request.method === 'DELETE' &&
|
|
|
|
path.match(CLEAR_CACHE_REQUEST_MATCHER)
|
|
|
|
) {
|
|
|
|
return processClearCacheRequest(event)
|
|
|
|
}
|
|
|
|
|
2021-05-17 05:38:18 -04:00
|
|
|
// other request, ignore
|
|
|
|
}
|
|
|
|
|
2021-06-14 04:11:52 -04:00
|
|
|
/**
|
|
|
|
* @param {FetchEvent} event
|
|
|
|
*/
|
|
|
|
function processClearCacheRequest(event) {
|
|
|
|
CLIENT_CONTEXT.delete(event.clientId)
|
|
|
|
// use default request proxy.
|
|
|
|
}
|
|
|
|
|
2021-06-09 09:46:06 -04:00
|
|
|
/**
|
|
|
|
* @param {FetchEvent} event
|
|
|
|
*/
|
2021-05-17 05:38:18 -04:00
|
|
|
function processCompileRequest(event) {
|
|
|
|
event.respondWith(
|
|
|
|
fetch(event.request).then(response => {
|
|
|
|
if (response.status !== 200) return response
|
|
|
|
|
|
|
|
return response.json().then(body => {
|
2021-06-08 09:05:25 -04:00
|
|
|
handleCompileResponse(event, response, body)
|
2021-06-09 09:46:06 -04:00
|
|
|
|
2021-05-21 07:31:37 -04:00
|
|
|
// Send the service workers metrics to the frontend.
|
2021-06-09 09:46:06 -04:00
|
|
|
const { metrics } = getClientContext(event.clientId)
|
2021-06-10 06:02:32 -04:00
|
|
|
metrics.compileCount++
|
2021-06-09 09:46:06 -04:00
|
|
|
body.serviceWorkerMetrics = metrics
|
|
|
|
|
2021-05-17 05:38:18 -04:00
|
|
|
return new Response(JSON.stringify(body), response)
|
|
|
|
})
|
|
|
|
})
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2021-05-26 05:19:49 -04:00
|
|
|
/**
|
|
|
|
* @param {Request} request
|
|
|
|
* @param {Object} file
|
|
|
|
* @return {Response}
|
|
|
|
*/
|
|
|
|
function handleProbeRequest(request, file) {
|
|
|
|
// PDF.js starts the pdf download with a probe request that has no
|
|
|
|
// range headers on it.
|
|
|
|
// Upon seeing the response headers, it decides whether to upgrade the
|
|
|
|
// transport to chunked requests or keep reading the response body.
|
|
|
|
// For small PDFs (2*chunkSize = 2*128kB) it just sends one request.
|
|
|
|
// We will fetch all the ranges in bulk and emit them.
|
|
|
|
// For large PDFs it sends this probe request, aborts that request before
|
|
|
|
// reading any data and then sends multiple range requests.
|
|
|
|
// It would be wasteful to action this probe request with all the ranges
|
|
|
|
// that are available in the PDF and serve the full PDF content to
|
|
|
|
// PDF.js for the probe request.
|
|
|
|
// We are emitting a dummy response to the probe request instead.
|
|
|
|
// It triggers the chunked transfer and subsequent fewer ranges need to be
|
|
|
|
// requested -- only those of visible pages in the pdf viewer.
|
|
|
|
// https://github.com/mozilla/pdf.js/blob/6fd899dc443425747098935207096328e7b55eb2/src/display/network_utils.js#L43-L47
|
|
|
|
const pdfJSWillUseChunkedTransfer = file.size > 2 * PDF_JS_CHUNK_SIZE
|
|
|
|
const isRangeRequest = request.headers.has('Range')
|
|
|
|
if (!isRangeRequest && pdfJSWillUseChunkedTransfer) {
|
|
|
|
const headers = new Headers()
|
|
|
|
headers.set('Accept-Ranges', 'bytes')
|
|
|
|
headers.set('Content-Length', file.size)
|
|
|
|
headers.set('Content-Type', 'application/pdf')
|
|
|
|
return new Response('', {
|
|
|
|
headers,
|
|
|
|
status: 200,
|
|
|
|
statusText: 'OK',
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-17 05:38:18 -04:00
|
|
|
/**
|
|
|
|
*
|
|
|
|
* @param {FetchEvent} event
|
|
|
|
* @param {Object} file
|
|
|
|
* @param {string} clsiServerId
|
|
|
|
* @param {string} compileGroup
|
2021-05-21 07:31:37 -04:00
|
|
|
* @param {Date} pdfCreatedAt
|
2021-06-09 09:46:06 -04:00
|
|
|
* @param {Object} metrics
|
2021-06-14 04:11:37 -04:00
|
|
|
* @param {Set} cached
|
2021-05-17 05:38:18 -04:00
|
|
|
*/
|
2021-05-21 07:31:37 -04:00
|
|
|
function processPdfRequest(
|
|
|
|
event,
|
2021-06-14 04:11:37 -04:00
|
|
|
{ file, clsiServerId, compileGroup, pdfCreatedAt, metrics, cached }
|
2021-05-21 07:31:37 -04:00
|
|
|
) {
|
2021-05-26 05:19:49 -04:00
|
|
|
const response = handleProbeRequest(event.request, file)
|
|
|
|
if (response) {
|
|
|
|
return event.respondWith(response)
|
2021-05-17 05:38:18 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
const rangeHeader =
|
2021-05-31 04:27:32 -04:00
|
|
|
event.request.headers.get('Range') || `bytes=0-${file.size - 1}`
|
2021-05-17 05:38:18 -04:00
|
|
|
const [start, last] = rangeHeader
|
|
|
|
.slice('bytes='.length)
|
|
|
|
.split('-')
|
|
|
|
.map(i => parseInt(i, 10))
|
|
|
|
const end = last + 1
|
|
|
|
|
2022-07-06 07:06:53 -04:00
|
|
|
return event.respondWith(
|
|
|
|
fetchRange({
|
|
|
|
url: event.request.url,
|
|
|
|
start,
|
|
|
|
end,
|
|
|
|
file,
|
|
|
|
pdfCreatedAt,
|
|
|
|
metrics,
|
|
|
|
cached,
|
2021-05-17 05:38:18 -04:00
|
|
|
})
|
2022-07-06 07:06:53 -04:00
|
|
|
.then(blob => {
|
|
|
|
return new Response(blob, {
|
|
|
|
status: 206,
|
|
|
|
headers: {
|
|
|
|
'Accept-Ranges': 'bytes',
|
|
|
|
'Content-Length': end - start,
|
|
|
|
'Content-Range': `bytes ${start}-${last}/${file.size}`,
|
|
|
|
'Content-Type': 'application/pdf',
|
|
|
|
},
|
2021-05-17 05:38:18 -04:00
|
|
|
})
|
|
|
|
})
|
|
|
|
.catch(error => {
|
2021-06-10 06:02:32 -04:00
|
|
|
metrics.failedCount++
|
2021-06-09 09:45:55 -04:00
|
|
|
reportError(event, OError.tag(error, 'failed to compose pdf response'))
|
2021-05-17 05:38:18 -04:00
|
|
|
return fetch(event.request)
|
|
|
|
})
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2021-05-21 07:31:37 -04:00
|
|
|
/**
|
2021-06-08 09:05:25 -04:00
|
|
|
* @param {FetchEvent} event
|
2021-05-21 07:31:37 -04:00
|
|
|
* @param {Response} response
|
2021-05-17 05:38:18 -04:00
|
|
|
* @param {Object} body
|
|
|
|
*/
|
2021-06-08 09:05:25 -04:00
|
|
|
function handleCompileResponse(event, response, body) {
|
2021-05-17 05:38:18 -04:00
|
|
|
if (!body || body.status !== 'success') return
|
|
|
|
|
|
|
|
for (const file of body.outputFiles) {
|
|
|
|
if (file.path !== 'output.pdf') continue // not the pdf used for rendering
|
2022-07-06 07:06:53 -04:00
|
|
|
if (file.ranges?.length) {
|
2021-05-17 05:38:18 -04:00
|
|
|
const { clsiServerId, compileGroup } = body
|
2021-06-08 09:05:25 -04:00
|
|
|
registerPdfContext(event.clientId, file.url, {
|
2021-05-17 05:38:18 -04:00
|
|
|
file,
|
|
|
|
clsiServerId,
|
|
|
|
compileGroup,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-09 09:45:55 -04:00
|
|
|
/**
|
|
|
|
* @param {FetchEvent} event
|
|
|
|
*/
|
|
|
|
function onFetchWithErrorHandling(event) {
|
|
|
|
try {
|
|
|
|
onFetch(event)
|
|
|
|
} catch (error) {
|
|
|
|
reportError(event, OError.tag(error, 'low level error in onFetch'))
|
|
|
|
}
|
|
|
|
}
|
2021-06-16 04:14:18 -04:00
|
|
|
// allow fetch event listener to be removed if necessary
|
|
|
|
const controller = new AbortController()
|
2021-05-17 05:38:18 -04:00
|
|
|
// listen to all network requests
|
2021-06-16 04:14:18 -04:00
|
|
|
self.addEventListener('fetch', onFetchWithErrorHandling, {
|
|
|
|
signal: controller.signal,
|
|
|
|
})
|
2021-05-17 05:38:18 -04:00
|
|
|
|
|
|
|
// complete setup ASAP
|
|
|
|
self.addEventListener('install', event => {
|
|
|
|
event.waitUntil(self.skipWaiting())
|
|
|
|
})
|
|
|
|
self.addEventListener('activate', event => {
|
|
|
|
event.waitUntil(self.clients.claim())
|
|
|
|
})
|
2021-06-16 04:14:18 -04:00
|
|
|
self.addEventListener('message', event => {
|
|
|
|
if (event.data && event.data.type === 'disable') {
|
|
|
|
controller.abort() // removes the fetch event listener
|
|
|
|
}
|
|
|
|
})
|
2021-06-09 09:45:55 -04:00
|
|
|
|
|
|
|
/**
|
|
|
|
*
|
|
|
|
* @param {FetchEvent} event
|
|
|
|
* @param {Error} error
|
|
|
|
*/
|
|
|
|
function reportError(event, error) {
|
|
|
|
self.clients
|
|
|
|
.get(event.clientId)
|
|
|
|
.then(client => {
|
|
|
|
if (!client) {
|
|
|
|
// The client disconnected.
|
|
|
|
return
|
|
|
|
}
|
|
|
|
client.postMessage(
|
|
|
|
JSON.stringify({
|
|
|
|
extra: { url: event.request.url, info: OError.getFullInfo(error) },
|
|
|
|
error: {
|
|
|
|
name: error.name,
|
|
|
|
message: error.message,
|
|
|
|
stack: OError.getFullStack(error),
|
|
|
|
},
|
|
|
|
})
|
|
|
|
)
|
|
|
|
})
|
|
|
|
.catch(() => {})
|
|
|
|
}
|