Merge pull request #8776 from overleaf/jpa-drop-service-worker

[web] goodbye service worker

GitOrigin-RevId: ce85d4850faba15c5877ce1f3e78026de30c6eae
This commit is contained in:
Jakob Ackermann 2022-07-08 09:15:13 +01:00 committed by Copybot
parent db83832485
commit f11e1a83cd
19 changed files with 47 additions and 536 deletions

View file

@ -50,8 +50,6 @@ public/minjs
public/stylesheets public/stylesheets
public/fonts public/fonts
public/images public/images
public/serviceWorker.js
public/serviceWorker.js.map
Gemfile.lock Gemfile.lock

View file

@ -1055,7 +1055,7 @@ const ProjectController = {
} }
// Let the user opt-in only. // Let the user opt-in only.
const v = req.query['pdf-caching-mode'] const v = req.query['pdf-caching-mode']
if (['service-worker', 'no-service-worker'].includes(v)) { if (['enabled'].includes(v)) {
return v return v
} }
return '' return ''

View file

@ -108,12 +108,6 @@ const AdminController = {
return res.sendStatus(200) return res.sendStatus(200)
}, },
unregisterServiceWorker: (req, res) => {
logger.warn('unregistering service worker for all users')
EditorRealTimeController.emitToAll('unregisterServiceWorker')
return res.sendStatus(200)
},
openEditor(req, res) { openEditor(req, res) {
logger.warn('opening editor') logger.warn('opening editor')
Settings.editorIsOpen = true Settings.editorIsOpen = true

View file

@ -120,13 +120,6 @@ if (Settings.exposeHostname) {
}) })
} }
webRouter.get(
'/serviceWorker.js',
express.static(Path.join(__dirname, '/../../../public'), {
maxAge: oneDayInMilliseconds,
setHeaders: csp.removeCSPHeaders,
})
)
webRouter.use( webRouter.use(
express.static(Path.join(__dirname, '/../../../public'), { express.static(Path.join(__dirname, '/../../../public'), {
maxAge: STATIC_CACHE_AGE, maxAge: STATIC_CACHE_AGE,

View file

@ -1087,11 +1087,6 @@ function initialize(webRouter, privateApiRouter, publicApiRouter) {
AuthorizationMiddleware.ensureUserIsSiteAdmin, AuthorizationMiddleware.ensureUserIsSiteAdmin,
AdminController.clearMessages AdminController.clearMessages
) )
webRouter.post(
'/admin/unregisterServiceWorker',
AuthorizationMiddleware.ensureUserIsSiteAdmin,
AdminController.unregisterServiceWorker
)
privateApiRouter.get('/perfTest', (req, res) => { privateApiRouter.get('/perfTest', (req, res) => {
plainTextResponse(res, 'hello') plainTextResponse(res, 'hello')

View file

@ -98,13 +98,3 @@ block content
input.form-control(type='text', name='user_id', placeholder='user_id', required) input.form-control(type='text', name='user_id', placeholder='user_id', required)
.form-group .form-group
button.btn-primary.btn(type='submit') Poll button.btn-primary.btn(type='submit') Poll
.tab-pane(
role="tabpanel"
id='advanced'
)
.row-spaced
form(method='post',action='/admin/unregisterServiceWorker')
input(name="_csrf", type="hidden", value=csrfToken)
button.btn.btn-danger(type="submit") Unregister service worker
p.small Will force service worker reload for all users with the editor open.

View file

@ -536,9 +536,6 @@ module.exports = {
// By default turn on feature flag, can be overridden per request. // By default turn on feature flag, can be overridden per request.
enablePdfCaching: process.env.ENABLE_PDF_CACHING === 'true', enablePdfCaching: process.env.ENABLE_PDF_CACHING === 'true',
// Whether to disable any existing service worker on the next load of the editor
resetServiceWorker: process.env.RESET_SERVICE_WORKER === 'true',
// Maximum size of text documents in the real-time editing system. // Maximum size of text documents in the real-time editing system.
max_doc_length: 2 * 1024 * 1024, // 2mb max_doc_length: 2 * 1024 * 1024, // 2mb

View file

@ -166,8 +166,8 @@ export default class DocumentCompiler {
params.set('auto_compile', 'true') params.set('auto_compile', 'true')
} }
// use the feature flag to enable PDF caching in a ServiceWorker // use the feature flag to enable PDF caching
if (getMeta('ol-pdfCachingMode')) { if (getMeta('ol-pdfCachingMode') === 'enabled') {
params.set('enable_pdf_caching', 'true') params.set('enable_pdf_caching', 'true')
} }

View file

@ -4,15 +4,10 @@ import getMeta from '../../../utils/meta'
// VERSION should get incremented when making changes to caching behavior or // VERSION should get incremented when making changes to caching behavior or
// adjusting metrics collection. // adjusting metrics collection.
// Keep in sync with the service worker.
const VERSION = 3 const VERSION = 3
const pdfJsMetrics = { // editing session id
version: VERSION, const EDITOR_SESSION_ID = uuid()
id: uuid(),
epoch: Date.now(),
totalBandwidth: 0,
}
let pdfCachingMetrics let pdfCachingMetrics
@ -20,13 +15,10 @@ export function setCachingMetrics(metrics) {
pdfCachingMetrics = metrics pdfCachingMetrics = metrics
} }
const SAMPLING_RATE = 0.01
export function trackPdfDownload(response, compileTimeClientE2E) { export function trackPdfDownload(response, compileTimeClientE2E) {
const { serviceWorkerMetrics, stats, timings } = response const { stats, timings } = response
const t0 = performance.now() const t0 = performance.now()
let bandwidth = 0
const deliveryLatencies = { const deliveryLatencies = {
compileTimeClientE2E, compileTimeClientE2E,
compileTimeServerE2E: timings?.compileE2E, compileTimeServerE2E: timings?.compileE2E,
@ -46,10 +38,6 @@ export function trackPdfDownload(response, compileTimeClientE2E) {
} }
done({ latencyFetch, latencyRender }) done({ latencyFetch, latencyRender })
} }
function updateConsumedBandwidth(bytes) {
pdfJsMetrics.totalBandwidth += bytes - bandwidth
bandwidth = bytes
}
let done let done
const onFirstRenderDone = new Promise(resolve => { const onFirstRenderDone = new Promise(resolve => {
done = resolve done = resolve
@ -66,16 +54,11 @@ export function trackPdfDownload(response, compileTimeClientE2E) {
timings, timings,
}) })
}) })
if (getMeta('ol-pdfCachingMode') === 'service-worker') {
// Submit (serviceWorker) bandwidth counter separate from compile context.
submitPDFBandwidth({ pdfJsMetrics, serviceWorkerMetrics })
}
} }
return { return {
deliveryLatencies, deliveryLatencies,
firstRenderDone, firstRenderDone,
updateConsumedBandwidth,
} }
} }
@ -86,39 +69,9 @@ function submitCompileMetrics(metrics) {
latencyFetch, latencyFetch,
latencyRender, latencyRender,
compileTimeClientE2E, compileTimeClientE2E,
id: pdfJsMetrics.id, id: EDITOR_SESSION_ID,
...(pdfCachingMetrics || {}), ...(pdfCachingMetrics || {}),
} }
sl_console.log('/event/compile-metrics', JSON.stringify(metrics)) sl_console.log('/event/compile-metrics', JSON.stringify(metrics))
sendMB('compile-metrics-v6', leanMetrics, SAMPLING_RATE) sendMB('compile-metrics-v6', leanMetrics)
}
function submitPDFBandwidth(metrics) {
const metricsFlat = {}
Object.entries(metrics).forEach(([section, items]) => {
if (!items) return
Object.entries(items).forEach(([key, value]) => {
metricsFlat[section + '_' + key] = value
})
})
const leanMetrics = {}
Object.entries(metricsFlat).forEach(([metric, value]) => {
if (
[
'serviceWorkerMetrics_id',
'serviceWorkerMetrics_cachedBytes',
'serviceWorkerMetrics_fetchedBytes',
'serviceWorkerMetrics_requestedBytes',
'serviceWorkerMetrics_version',
'serviceWorkerMetrics_epoch',
].includes(metric)
) {
leanMetrics[metric] = value
}
})
if (Object.entries(leanMetrics).length === 0) {
return
}
sl_console.log('/event/pdf-bandwidth', JSON.stringify(metrics))
sendMB('pdf-bandwidth-v6', leanMetrics, SAMPLING_RATE)
} }

View file

@ -5,7 +5,6 @@ import { v4 as uuid } from 'uuid'
// Warnings that may disappear after a second LaTeX pass // Warnings that may disappear after a second LaTeX pass
const TRANSIENT_WARNING_REGEX = /^(Reference|Citation).+undefined on input line/ const TRANSIENT_WARNING_REGEX = /^(Reference|Citation).+undefined on input line/
const searchParams = new URLSearchParams(window.location.search)
export function handleOutputFiles(outputFiles, projectId, data) { export function handleOutputFiles(outputFiles, projectId, data) {
const result = {} const result = {}
@ -23,14 +22,9 @@ export function handleOutputFiles(outputFiles, projectId, data) {
params.set('clsiserverid', data.clsiServerId) params.set('clsiserverid', data.clsiServerId)
} }
if (searchParams.get('verify_chunks') === 'true') { if (getMeta('ol-pdfCachingMode') === 'enabled') {
// Instruct the serviceWorker to verify composed ranges.
params.set('verify_chunks', 'true')
}
if (getMeta('ol-pdfCachingMode')) {
// Tag traffic that uses the pdf caching logic. // Tag traffic that uses the pdf caching logic.
params.set('enable_pdf_caching', getMeta('ol-pdfCachingMode')) params.set('enable_pdf_caching', 'true')
} }
result.pdfUrl = `${buildURL(outputFile, data.pdfDownloadDomain)}?${params}` result.pdfUrl = `${buildURL(outputFile, data.pdfDownloadDomain)}?${params}`

View file

@ -4,7 +4,7 @@ import { captureException } from '../../../infrastructure/error-reporter'
import { setCachingMetrics } from './metrics' import { setCachingMetrics } from './metrics'
export function generatePdfCachingTransportFactory(PDFJS) { export function generatePdfCachingTransportFactory(PDFJS) {
if (getMeta('ol-pdfCachingMode') !== 'no-service-worker') { if (getMeta('ol-pdfCachingMode') !== 'enabled') {
return () => null return () => null
} }
const cached = new Set() const cached = new Set()
@ -19,6 +19,8 @@ export function generatePdfCachingTransportFactory(PDFJS) {
requestedCount: 0, requestedCount: 0,
requestedBytes: 0, requestedBytes: 0,
} }
const verifyChunks =
new URLSearchParams(window.location.search).get('verify_chunks') === 'true'
setCachingMetrics(metrics) setCachingMetrics(metrics)
class PDFDataRangeTransport extends PDFJS.PDFDataRangeTransport { class PDFDataRangeTransport extends PDFJS.PDFDataRangeTransport {
@ -37,6 +39,7 @@ export function generatePdfCachingTransportFactory(PDFJS) {
file: this.pdfFile, file: this.pdfFile,
metrics, metrics,
cached, cached,
verifyChunks,
}) })
.catch(err => { .catch(err => {
metrics.failedCount++ metrics.failedCount++

View file

@ -318,8 +318,17 @@ async function verifyRange({ url, start, end, metrics, actual }) {
* @param {Object} file * @param {Object} file
* @param {Object} metrics * @param {Object} metrics
* @param {Set} cached * @param {Set} cached
* @param {boolean} verifyChunks
*/ */
export async function fetchRange({ url, start, end, file, metrics, cached }) { export async function fetchRange({
url,
start,
end,
file,
metrics,
cached,
verifyChunks,
}) {
file.createdAt = new Date(file.createdAt) file.createdAt = new Date(file.createdAt)
backfillEdgeBounds(file) backfillEdgeBounds(file)
@ -477,7 +486,7 @@ export async function fetchRange({ url, start, end, file, metrics, cached }) {
fetchedBytes, fetchedBytes,
}) })
if (url.includes('verify_chunks=true')) { if (verifyChunks) {
return await verifyRange({ return await verifyRange({
url, url,
start, start,

View file

@ -1,73 +0,0 @@
import { captureException } from '../../../infrastructure/error-reporter'
const OError = require('@overleaf/o-error')
let pendingWorkerSetup = Promise.resolve()
function supportsServiceWorker() {
return 'serviceWorker' in navigator
}
export function waitForServiceWorker() {
return pendingWorkerSetup
}
export function loadServiceWorker(options) {
if (supportsServiceWorker()) {
const workerSetup = navigator.serviceWorker
.register('/serviceWorker.js', {
scope: '/project/',
})
.then(() => {
navigator.serviceWorker.addEventListener('message', event => {
let ctx
try {
ctx = JSON.parse(event.data)
} catch (e) {
return
}
if (!ctx || !ctx.error || !ctx.extra) return
const err = OError.tag(ctx.error, 'Error in serviceWorker')
const fullError = new Error()
fullError.name = err.name
fullError.message = err.message
fullError.stack = OError.getFullStack(err)
captureException(fullError, { extra: ctx.extra })
})
})
.catch(error =>
captureException(OError.tag(error, 'Cannot register serviceWorker'))
)
if (options && options.timeout > 0) {
const workerTimeout = new Promise(resolve => {
setTimeout(resolve, options.timeout)
})
pendingWorkerSetup = Promise.race([workerSetup, workerTimeout])
} else {
pendingWorkerSetup = workerSetup
}
}
}
export function unregisterServiceWorker() {
if (supportsServiceWorker()) {
if (navigator.serviceWorker.controller) {
navigator.serviceWorker.controller.postMessage({
type: 'disable',
})
}
navigator.serviceWorker
.getRegistrations()
.catch(error => {
// fail silently if permission not given (e.g. SecurityError)
console.error('error listing service worker registrations', error)
return []
})
.then(registrations => {
registrations.forEach(worker => {
worker.unregister()
})
})
}
}

View file

@ -31,10 +31,6 @@ import MetadataManager from './ide/metadata/MetadataManager'
import './ide/review-panel/ReviewPanelManager' import './ide/review-panel/ReviewPanelManager'
import OutlineManager from './features/outline/outline-manager' import OutlineManager from './features/outline/outline-manager'
import SafariScrollPatcher from './ide/SafariScrollPatcher' import SafariScrollPatcher from './ide/SafariScrollPatcher'
import {
loadServiceWorker,
unregisterServiceWorker,
} from './features/pdf-preview/util/service-worker'
import './ide/cobranding/CobrandingDataService' import './ide/cobranding/CobrandingDataService'
import './ide/settings/index' import './ide/settings/index'
import './ide/chat/index' import './ide/chat/index'
@ -69,6 +65,7 @@ import './features/pdf-preview/controllers/pdf-preview-controller'
import './features/share-project-modal/controllers/react-share-project-modal-controller' import './features/share-project-modal/controllers/react-share-project-modal-controller'
import './features/source-editor/controllers/editor-switch-controller' import './features/source-editor/controllers/editor-switch-controller'
import getMeta from './utils/meta' import getMeta from './utils/meta'
import { cleanupServiceWorker } from './utils/service-worker-cleanup'
App.controller( App.controller(
'IdeController', 'IdeController',
@ -418,9 +415,6 @@ If the project has been renamed please look in your project list for a new proje
x => x[1] x => x[1]
) )
// Allow service worker to be removed via the websocket
ide.$scope.$on('service-worker:unregister', unregisterServiceWorker)
// Listen for editor:lint event from CM6 linter // Listen for editor:lint event from CM6 linter
window.addEventListener('editor:lint', event => { window.addEventListener('editor:lint', event => {
$scope.hasLintingError = event.detail.hasLintingError $scope.hasLintingError = event.detail.hasLintingError
@ -435,11 +429,7 @@ If the project has been renamed please look in your project list for a new proje
} }
) )
if (getMeta('ol-pdfCachingMode') === 'service-worker') { cleanupServiceWorker()
loadServiceWorker()
} else {
unregisterServiceWorker()
}
angular.module('SharelatexApp').config(function ($provide) { angular.module('SharelatexApp').config(function ($provide) {
$provide.decorator('$browser', [ $provide.decorator('$browser', [

View file

@ -274,11 +274,6 @@ The editor will refresh automatically in ${delay} seconds.\
sl_console.log('Reconnect gracefully') sl_console.log('Reconnect gracefully')
this.reconnectGracefully() this.reconnectGracefully()
}) })
this.ide.socket.on('unregisterServiceWorker', () => {
sl_console.log('Unregister service worker')
this.$scope.$broadcast('service-worker:unregister')
})
} }
updateConnectionManagerState(state) { updateConnectionManagerState(state) {

View file

@ -1,330 +0,0 @@
import { v4 as uuid } from 'uuid'
import { fetchRange } from './features/pdf-preview/util/pdf-caching'
const OError = require('@overleaf/o-error')
// VERSION should get incremented when making changes to caching behavior or
// adjusting metrics collection.
// Keep in sync with PdfJsMetrics.
const VERSION = 3
const CLEAR_CACHE_REQUEST_MATCHER = /^\/project\/[0-9a-f]{24}\/output$/
const COMPILE_REQUEST_MATCHER = /^\/project\/[0-9a-f]{24}\/compile$/
const PDF_REQUEST_MATCHER =
/^(\/zone\/.)?(\/project\/[0-9a-f]{24}\/.*\/output.pdf)$/
const PDF_JS_CHUNK_SIZE = 128 * 1024
// Each compile request defines a context (essentially the specific pdf file for
// that compile), requests for that pdf file can use the hashes in the compile
// response, which are stored in the context.
const CLIENT_CONTEXT = new Map()
/**
* @param {string} clientId
*/
function getClientContext(clientId) {
let clientContext = CLIENT_CONTEXT.get(clientId)
if (!clientContext) {
const cached = new Set()
const pdfs = new Map()
const metrics = {
version: VERSION,
id: uuid(),
epoch: Date.now(),
failedCount: 0,
tooLargeOverheadCount: 0,
tooManyRequestsCount: 0,
cachedCount: 0,
cachedBytes: 0,
fetchedCount: 0,
fetchedBytes: 0,
requestedCount: 0,
requestedBytes: 0,
compileCount: 0,
}
clientContext = { pdfs, metrics, cached }
CLIENT_CONTEXT.set(clientId, clientContext)
// clean up old client maps
expirePdfContexts()
}
return clientContext
}
/**
* @param {string} clientId
* @param {string} path
* @param {Object} pdfContext
*/
function registerPdfContext(clientId, path, pdfContext) {
const clientContext = getClientContext(clientId)
const { pdfs, metrics, cached, clsiServerId } = clientContext
pdfContext.metrics = metrics
pdfContext.cached = cached
if (pdfContext.clsiServerId !== clsiServerId) {
// VM changed, this invalidates all browser caches.
clientContext.clsiServerId = pdfContext.clsiServerId
cached.clear()
}
// we only need to keep the last 3 contexts
for (const key of pdfs.keys()) {
if (pdfs.size < 3) {
break
}
pdfs.delete(key) // the map keys are returned in insertion order, so we are deleting the oldest entry here
}
pdfs.set(path, pdfContext)
}
/**
* @param {string} clientId
* @param {string} path
*/
function getPdfContext(clientId, path) {
const { pdfs } = getClientContext(clientId)
return pdfs.get(path)
}
function expirePdfContexts() {
// discard client maps for clients that are no longer connected
const currentClientSet = new Set()
self.clients.matchAll().then(function (clientList) {
clientList.forEach(client => {
currentClientSet.add(client.id)
})
CLIENT_CONTEXT.forEach((map, clientId) => {
if (!currentClientSet.has(clientId)) {
CLIENT_CONTEXT.delete(clientId)
}
})
})
}
/**
* @param {FetchEvent} event
*/
function onFetch(event) {
const url = new URL(event.request.url)
const path = url.pathname
if (path.match(COMPILE_REQUEST_MATCHER)) {
return processCompileRequest(event)
}
const match = path.match(PDF_REQUEST_MATCHER)
if (match) {
const ctx = getPdfContext(event.clientId, match[2])
if (ctx) {
return processPdfRequest(event, ctx)
}
}
if (
event.request.method === 'DELETE' &&
path.match(CLEAR_CACHE_REQUEST_MATCHER)
) {
return processClearCacheRequest(event)
}
// other request, ignore
}
/**
* @param {FetchEvent} event
*/
function processClearCacheRequest(event) {
CLIENT_CONTEXT.delete(event.clientId)
// use default request proxy.
}
/**
* @param {FetchEvent} event
*/
function processCompileRequest(event) {
event.respondWith(
fetch(event.request).then(response => {
if (response.status !== 200) return response
return response.json().then(body => {
handleCompileResponse(event, response, body)
// Send the service workers metrics to the frontend.
const { metrics } = getClientContext(event.clientId)
metrics.compileCount++
body.serviceWorkerMetrics = metrics
return new Response(JSON.stringify(body), response)
})
})
)
}
/**
* @param {Request} request
* @param {Object} file
* @return {Response}
*/
function handleProbeRequest(request, file) {
// PDF.js starts the pdf download with a probe request that has no
// range headers on it.
// Upon seeing the response headers, it decides whether to upgrade the
// transport to chunked requests or keep reading the response body.
// For small PDFs (2*chunkSize = 2*128kB) it just sends one request.
// We will fetch all the ranges in bulk and emit them.
// For large PDFs it sends this probe request, aborts that request before
// reading any data and then sends multiple range requests.
// It would be wasteful to action this probe request with all the ranges
// that are available in the PDF and serve the full PDF content to
// PDF.js for the probe request.
// We are emitting a dummy response to the probe request instead.
// It triggers the chunked transfer and subsequent fewer ranges need to be
// requested -- only those of visible pages in the pdf viewer.
// https://github.com/mozilla/pdf.js/blob/6fd899dc443425747098935207096328e7b55eb2/src/display/network_utils.js#L43-L47
const pdfJSWillUseChunkedTransfer = file.size > 2 * PDF_JS_CHUNK_SIZE
const isRangeRequest = request.headers.has('Range')
if (!isRangeRequest && pdfJSWillUseChunkedTransfer) {
const headers = new Headers()
headers.set('Accept-Ranges', 'bytes')
headers.set('Content-Length', file.size)
headers.set('Content-Type', 'application/pdf')
return new Response('', {
headers,
status: 200,
statusText: 'OK',
})
}
}
/**
*
* @param {FetchEvent} event
* @param {Object} file
* @param {string} clsiServerId
* @param {string} compileGroup
* @param {Date} pdfCreatedAt
* @param {Object} metrics
* @param {Set} cached
*/
function processPdfRequest(
event,
{ file, clsiServerId, compileGroup, pdfCreatedAt, metrics, cached }
) {
const response = handleProbeRequest(event.request, file)
if (response) {
return event.respondWith(response)
}
const rangeHeader =
event.request.headers.get('Range') || `bytes=0-${file.size - 1}`
const [start, last] = rangeHeader
.slice('bytes='.length)
.split('-')
.map(i => parseInt(i, 10))
const end = last + 1
return event.respondWith(
fetchRange({
url: event.request.url,
start,
end,
file,
pdfCreatedAt,
metrics,
cached,
})
.then(blob => {
return new Response(blob, {
status: 206,
headers: {
'Accept-Ranges': 'bytes',
'Content-Length': end - start,
'Content-Range': `bytes ${start}-${last}/${file.size}`,
'Content-Type': 'application/pdf',
},
})
})
.catch(error => {
metrics.failedCount++
reportError(event, OError.tag(error, 'failed to compose pdf response'))
return fetch(event.request)
})
)
}
/**
* @param {FetchEvent} event
* @param {Response} response
* @param {Object} body
*/
function handleCompileResponse(event, response, body) {
if (!body || body.status !== 'success') return
for (const file of body.outputFiles) {
if (file.path !== 'output.pdf') continue // not the pdf used for rendering
if (file.ranges?.length) {
const { clsiServerId, compileGroup } = body
registerPdfContext(event.clientId, file.url, {
file,
clsiServerId,
compileGroup,
})
}
break
}
}
/**
* @param {FetchEvent} event
*/
function onFetchWithErrorHandling(event) {
try {
onFetch(event)
} catch (error) {
reportError(event, OError.tag(error, 'low level error in onFetch'))
}
}
// allow fetch event listener to be removed if necessary
const controller = new AbortController()
// listen to all network requests
self.addEventListener('fetch', onFetchWithErrorHandling, {
signal: controller.signal,
})
// complete setup ASAP
self.addEventListener('install', event => {
event.waitUntil(self.skipWaiting())
})
self.addEventListener('activate', event => {
event.waitUntil(self.clients.claim())
})
self.addEventListener('message', event => {
if (event.data && event.data.type === 'disable') {
controller.abort() // removes the fetch event listener
}
})
/**
*
* @param {FetchEvent} event
* @param {Error} error
*/
function reportError(event, error) {
self.clients
.get(event.clientId)
.then(client => {
if (!client) {
// The client disconnected.
return
}
client.postMessage(
JSON.stringify({
extra: { url: event.request.url, info: OError.getFullInfo(error) },
error: {
name: error.name,
message: error.message,
stack: OError.getFullStack(error),
},
})
)
})
.catch(() => {})
}

View file

@ -0,0 +1,17 @@
export function cleanupServiceWorker() {
try {
navigator.serviceWorker
.getRegistrations()
.catch(() => {
// fail silently if permission not given (e.g. SecurityError)
return []
})
.then(registrations => {
registrations.forEach(worker => {
worker.unregister()
})
})
} catch (e) {
// fail silently if service worker are not available (on the navigator)
}
}

View file

@ -1188,18 +1188,11 @@ describe('ProjectController', function () {
expectBandwidthTrackingEnabled() expectBandwidthTrackingEnabled()
}) })
describe('with pdf-caching-mode=no-service-worker', function () { describe('with pdf-caching-mode=enabled', function () {
beforeEach(function () { beforeEach(function () {
this.req.query['pdf-caching-mode'] = 'no-service-worker' this.req.query['pdf-caching-mode'] = 'enabled'
}) })
expectPDFCachingEnabled('no-service-worker') expectPDFCachingEnabled('enabled')
})
describe('with pdf-caching-mode=service-worker', function () {
beforeEach(function () {
this.req.query['pdf-caching-mode'] = 'service-worker'
})
expectPDFCachingEnabled('service-worker')
}) })
}) })
}) })

View file

@ -21,13 +21,6 @@ const entryPoints = {
'light-style': './frontend/stylesheets/light-style.less', 'light-style': './frontend/stylesheets/light-style.less',
} }
// ServiceWorker at /serviceWorker.js
entryPoints.serviceWorker = {
import: './frontend/js/serviceWorker.js',
publicPath: '/',
filename: 'serviceWorker.js',
}
// Add entrypoints for each "page" // Add entrypoints for each "page"
glob glob
.sync(path.join(__dirname, 'modules/*/frontend/js/pages/**/*.js')) .sync(path.join(__dirname, 'modules/*/frontend/js/pages/**/*.js'))