mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #9008 from overleaf/jpa-disable-pdf-caching-edge-legacy
[web] disable pdf caching feature in legacy Edge browser GitOrigin-RevId: f57ca3385337b5516e27a701646f2ed92325bf8d
This commit is contained in:
parent
95a289b80b
commit
8841c8c874
2 changed files with 10 additions and 3 deletions
|
@ -1,6 +1,12 @@
|
||||||
import getMeta from '../../../utils/meta'
|
import getMeta from '../../../utils/meta'
|
||||||
|
|
||||||
|
const hasTextEncoder = typeof TextEncoder !== 'undefined'
|
||||||
|
if (!hasTextEncoder) {
|
||||||
|
console.warn('TextEncoder is not available. Disabling pdf-caching.')
|
||||||
|
}
|
||||||
|
|
||||||
function isFlagEnabled(flag) {
|
function isFlagEnabled(flag) {
|
||||||
|
if (!hasTextEncoder) return false
|
||||||
return getMeta('ol-splitTestVariants')?.[flag] === 'enabled'
|
return getMeta('ol-splitTestVariants')?.[flag] === 'enabled'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,12 +5,12 @@ const MAX_SUB_REQUEST_COUNT = 4
|
||||||
const MAX_SUB_REQUEST_BYTES = 4 * PDF_JS_CHUNK_SIZE
|
const MAX_SUB_REQUEST_BYTES = 4 * PDF_JS_CHUNK_SIZE
|
||||||
const INCREMENTAL_CACHE_SIZE = 1000
|
const INCREMENTAL_CACHE_SIZE = 1000
|
||||||
|
|
||||||
const ENCODER = new TextEncoder()
|
|
||||||
function backfillEdgeBounds(file) {
|
function backfillEdgeBounds(file) {
|
||||||
if (!file.backfilledEdgeBoundsOnce) {
|
if (!file.backfilledEdgeBoundsOnce) {
|
||||||
|
const encoder = new TextEncoder()
|
||||||
for (const range of file.ranges) {
|
for (const range of file.ranges) {
|
||||||
if (range.objectId) {
|
if (range.objectId) {
|
||||||
range.objectId = ENCODER.encode(range.objectId)
|
range.objectId = encoder.encode(range.objectId)
|
||||||
range.start -= range.objectId.byteLength
|
range.start -= range.objectId.byteLength
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -221,6 +221,7 @@ function getMultipartBoundary(response, chunk) {
|
||||||
function resolveMultiPartResponses({ file, chunks, data, boundary, metrics }) {
|
function resolveMultiPartResponses({ file, chunks, data, boundary, metrics }) {
|
||||||
const responses = []
|
const responses = []
|
||||||
let offsetStart = 0
|
let offsetStart = 0
|
||||||
|
const encoder = new TextEncoder()
|
||||||
for (const chunk of chunks) {
|
for (const chunk of chunks) {
|
||||||
const header = `\r\n--${boundary}\r\nContent-Type: application/pdf\r\nContent-Range: bytes ${
|
const header = `\r\n--${boundary}\r\nContent-Type: application/pdf\r\nContent-Range: bytes ${
|
||||||
chunk.start
|
chunk.start
|
||||||
|
@ -228,7 +229,7 @@ function resolveMultiPartResponses({ file, chunks, data, boundary, metrics }) {
|
||||||
const headerSize = header.length
|
const headerSize = header.length
|
||||||
|
|
||||||
// Verify header content. A proxy might have tampered with it.
|
// Verify header content. A proxy might have tampered with it.
|
||||||
const headerRaw = ENCODER.encode(header)
|
const headerRaw = encoder.encode(header)
|
||||||
if (
|
if (
|
||||||
!data
|
!data
|
||||||
.subarray(offsetStart, offsetStart + headerSize)
|
.subarray(offsetStart, offsetStart + headerSize)
|
||||||
|
|
Loading…
Reference in a new issue