Merge pull request #13525 from overleaf/jpa-teardown-compile-domain-test

[web] start tear down of split test for new compile domain

GitOrigin-RevId: 405bba924a37f56614b0744f47914306f420a028
This commit is contained in:
Alf Eaton 2023-06-27 14:37:53 +01:00 committed by Copybot
parent b61ee4c4d3
commit 29f67a3985
18 changed files with 48 additions and 710 deletions

View file

@ -58,33 +58,16 @@ const getSplitTestOptions = callbackify(async function (req, res) {
res,
'pdf-download-domain'
)
const { variant: forceNewDomainVariant } =
await SplitTestHandler.promises.getAssignment(
editorReq,
res,
'force-new-compile-domain'
)
const pdfDownloadDomain =
(domainVariant === 'user' || forceNewDomainVariant === 'enabled') &&
Settings.compilesUserContentDomain
domainVariant === 'user' && Settings.compilesUserContentDomain
? Settings.compilesUserContentDomain
: Settings.pdfDownloadDomain
const { variant: hybridDomainVariant } =
await SplitTestHandler.promises.getAssignment(
editorReq,
res,
'pdf-download-domain-hybrid'
)
const enableHybridPdfDownload = hybridDomainVariant === 'enabled'
if (!req.query.enable_pdf_caching) {
// The frontend does not want to do pdf caching.
return {
pdfDownloadDomain,
enableHybridPdfDownload,
enablePdfCaching: false,
forceNewDomainVariant,
}
}
@ -101,18 +84,14 @@ const getSplitTestOptions = callbackify(async function (req, res) {
// Skip the lookup of the chunk size when caching is not enabled.
return {
pdfDownloadDomain,
enableHybridPdfDownload,
enablePdfCaching: false,
forceNewDomainVariant,
}
}
const pdfCachingMinChunkSize = await getPdfCachingMinChunkSize(editorReq, res)
return {
pdfDownloadDomain,
enableHybridPdfDownload,
enablePdfCaching,
pdfCachingMinChunkSize,
forceNewDomainVariant,
}
})
@ -154,13 +133,8 @@ module.exports = CompileController = {
getSplitTestOptions(req, res, (err, splitTestOptions) => {
if (err) return next(err)
let {
enablePdfCaching,
pdfCachingMinChunkSize,
pdfDownloadDomain,
enableHybridPdfDownload,
forceNewDomainVariant,
} = splitTestOptions
let { enablePdfCaching, pdfCachingMinChunkSize, pdfDownloadDomain } =
splitTestOptions
options.enablePdfCaching = enablePdfCaching
if (enablePdfCaching) {
options.pdfCachingMinChunkSize = pdfCachingMinChunkSize
@ -227,8 +201,6 @@ module.exports = CompileController = {
timings,
pdfDownloadDomain,
pdfCachingMinChunkSize,
enableHybridPdfDownload,
forceNewDomainVariant,
})
}
)

View file

@ -713,72 +713,6 @@ const ProjectController = {
}
)
},
accessCheckForOldCompileDomainAssigment(cb) {
SplitTestHandler.getAssignment(
req,
res,
'access-check-for-old-compile-domain',
() => {
// We'll pick up the assignment from the res.locals assignment.
cb()
}
)
},
forceNewDomainAssignment(cb) {
SplitTestHandler.getAssignment(
req,
res,
'force-new-compile-domain',
() => {
// We'll pick up the assignment from the res.locals assignment.
cb()
}
)
},
userContentDomainAccessCheckAssigment(cb) {
SplitTestHandler.getAssignment(
req,
res,
'user-content-domain-access-check',
() => {
// We'll pick up the assignment from the res.locals assignment.
cb()
}
)
},
userContentDomainAccessCheckDelayAssigment(cb) {
SplitTestHandler.getAssignment(
req,
res,
'user-content-domain-access-check-delay',
() => {
// We'll pick up the assignment from the res.locals assignment.
cb()
}
)
},
userContentDomainAccessCheckMaxChecksAssigment(cb) {
SplitTestHandler.getAssignment(
req,
res,
'user-content-domain-access-check-max-checks',
() => {
// We'll pick up the assignment from the res.locals assignment.
cb()
}
)
},
reportUserContentDomainAccessCheckErrorAssigment(cb) {
SplitTestHandler.getAssignment(
req,
res,
'report-user-content-domain-access-check-error',
() => {
// We'll pick up the assignment from the res.locals assignment.
cb()
}
)
},
},
(
err,

View file

@ -1,30 +0,0 @@
const Metrics = require('@overleaf/metrics')
function recordCheckResult(req, res) {
const path = req.body.isOldDomain ? 'old' : ''
Metrics.count('user_content_domain_check', req.body.succeeded, 1, {
status: 'success',
path,
})
Metrics.count('user_content_domain_check', req.body.failed, 1, {
status: 'failure',
path,
})
res.sendStatus(204)
}
function recordFallbackUsage(_req, res) {
Metrics.inc('user_content_domain_fallback')
res.sendStatus(204)
}
function recordMaxAccessChecksHit(_req, res) {
Metrics.inc('user_content_domain_max_access_checks_hit')
res.sendStatus(204)
}
module.exports = {
recordCheckResult,
recordFallbackUsage,
recordMaxAccessChecksHit,
}

View file

@ -67,7 +67,6 @@ const logger = require('@overleaf/logger')
const _ = require('underscore')
const { plainTextResponse } = require('./infrastructure/Response')
const PublicAccessLevels = require('./Features/Authorization/PublicAccessLevels')
const UserContentDomainController = require('./Features/UserContentDomainCheck/UserContentDomainController')
const rateLimiters = {
addEmail: new RateLimiter('add-email', {
@ -201,21 +200,6 @@ const rateLimiters = {
points: 10,
duration: 60,
}),
userContentDomainAccessCheckResult: new RateLimiter(
'user-content-domain-a-c-r',
{
points: 30,
duration: 60,
}
),
userContentDomainFallbackUsage: new RateLimiter('user-content-fb-u', {
points: 15,
duration: 60,
}),
userContentDomainMaxAccessChecksHit: new RateLimiter('user-content-mach', {
points: 15,
duration: 60,
}),
}
function initialize(webRouter, privateApiRouter, publicApiRouter) {
@ -1333,35 +1317,6 @@ function initialize(webRouter, privateApiRouter, publicApiRouter) {
res.sendStatus(204)
})
webRouter.post(
'/record-user-content-domain-access-check-result',
validate({
body: Joi.object({
failed: Joi.number().min(0).max(6),
succeeded: Joi.number().min(0).max(6),
isOldDomain: Joi.boolean().default(false),
}),
}),
RateLimiterMiddleware.rateLimit(
rateLimiters.userContentDomainAccessCheckResult
),
UserContentDomainController.recordCheckResult
)
webRouter.post(
'/record-user-content-domain-fallback-usage',
RateLimiterMiddleware.rateLimit(
rateLimiters.userContentDomainFallbackUsage
),
UserContentDomainController.recordFallbackUsage
)
webRouter.post(
'/record-user-content-domain-max-access-checks-hit',
RateLimiterMiddleware.rateLimit(
rateLimiters.userContentDomainMaxAccessChecksHit
),
UserContentDomainController.recordMaxAccessChecksHit
)
webRouter.get(
`/read/:token(${TokenAccessController.READ_ONLY_TOKEN_PATTERN})`,
RateLimiterMiddleware.rateLimit(rateLimiters.readOnlyToken),

View file

@ -609,7 +609,7 @@
"need_to_leave": "",
"need_to_upgrade_for_more_collabs": "",
"need_to_upgrade_for_more_collabs_variant": "",
"new_compile_domain_trouble_shooting": "",
"new_compile_domain_notice": "",
"new_file": "",
"new_folder": "",
"new_name": "",

View file

@ -11,10 +11,6 @@ import PdfPreviewErrorBoundaryFallback from './pdf-preview-error-boundary-fallba
import { useDetachCompileContext as useCompileContext } from '../../../shared/context/detach-compile-context'
import { captureException } from '../../../infrastructure/error-reporter'
import { getPdfCachingMetrics } from '../util/metrics'
import { userContentDomainAccessCheckFailed } from '../../user-content-domain-access-check'
import { isURLOnUserContentDomain } from '../util/fetchFromCompileDomain'
import { isNetworkError } from '../../../utils/isNetworkError'
import OError from '@overleaf/o-error'
function PdfJsViewer({ url, pdfFile }) {
const { _id: projectId } = useProjectContext()
@ -130,15 +126,7 @@ function PdfJsViewer({ url, pdfFile }) {
if (abortController.signal.aborted) return
// The error is already logged at the call-site with additional context.
if (err instanceof pdfJsWrapper.PDFJS.MissingPDFException) {
if (
// 404 is unrelated to new domain
OError.getFullInfo(err).statusCode !== 404 &&
isURLOnUserContentDomain(OError.getFullInfo(err).url)
) {
setError('rendering-error-new-domain')
} else {
setError('rendering-error-expected')
}
setError('rendering-error-expected')
} else {
setError('rendering-error')
}
@ -148,22 +136,7 @@ function PdfJsViewer({ url, pdfFile }) {
.catch(error => {
if (abortController.signal.aborted) return
console.error(error)
if (
isURLOnUserContentDomain(url) &&
error instanceof pdfJsWrapper.PDFJS.UnexpectedResponseException
) {
setError('rendering-error-new-domain')
} else if (
isURLOnUserContentDomain(url) &&
error.name === 'UnknownErrorException' &&
(isNetworkError(error) || userContentDomainAccessCheckFailed())
) {
// For some reason, pdfJsWrapper.PDFJS.UnknownErrorException is
// not available for an instance check.
setError('rendering-error-new-domain')
} else {
setError('rendering-error')
}
setError('rendering-error')
})
return () => {
abortController.abort()

View file

@ -13,32 +13,6 @@ function PdfPreviewError({ error }) {
const { startCompile } = useCompileContext()
switch (error) {
case 'rendering-error-new-domain':
return (
<PdfLogEntry
headerTitle={t('pdf_rendering_error')}
formattedContent={
<Trans
i18nKey="new_compile_domain_trouble_shooting"
values={{
compilesUserContentDomain: new URL(
getMeta('ol-compilesUserContentDomain')
).hostname,
}}
components={[
<code key="domain" />,
/* eslint-disable-next-line jsx-a11y/anchor-has-content */
<a
href="/learn/how-to/Resolving_access%2C_loading%2C_and_display_problems"
target="_blank"
key="troubleshooting-link"
/>,
]}
/>
}
level="warning"
/>
)
case 'rendering-error-expected':
return (
<PdfLogEntry
@ -87,6 +61,29 @@ function PdfPreviewError({ error }) {
/>,
]}
/>
{getMeta('ol-compilesUserContentDomain') && (
<>
<br />
<br />
<Trans
i18nKey="new_compile_domain_notice"
values={{
compilesUserContentDomain: new URL(
getMeta('ol-compilesUserContentDomain')
).hostname,
}}
components={[
<code key="domain" />,
/* eslint-disable-next-line jsx-a11y/anchor-has-content */
<a
href="/learn/how-to/Resolving_access%2C_loading%2C_and_display_problems"
target="_blank"
key="troubleshooting-link"
/>,
]}
/>
</>
)}
</ErrorLogEntry>
)

View file

@ -1,92 +0,0 @@
import { isNetworkError } from '../../../utils/isNetworkError'
import getMeta from '../../../utils/meta'
import OError from '@overleaf/o-error'
import { postJSON } from '../../../infrastructure/fetch-json'
import { isSplitTestEnabled } from '../../../utils/splitTestUtils'
let useFallbackDomainUntil = performance.now()
const ONE_HOUR_IN_MS = 1000 * 60 * 60
class MaybeBlockedByProxyError extends OError {}
function checkForBlockingByProxy(url: string, res: Response) {
const statusCode = res.status
switch (statusCode) {
case 200: // full response
case 206: // range response
case 404: // file not found
case 416: // range not found
return
default:
throw new MaybeBlockedByProxyError('request might be blocked by proxy', {
res,
url,
statusCode,
})
}
}
export function isURLOnUserContentDomain(url: string) {
const userContentDomain = getMeta('ol-compilesUserContentDomain')
return (
userContentDomain &&
url &&
new URL(url).hostname === new URL(userContentDomain).hostname
)
}
export async function fetchFromCompileDomain(url: string, init: RequestInit) {
let isUserContentDomain = isURLOnUserContentDomain(url)
const fallbackAllowed = !isSplitTestEnabled('force-new-compile-domain')
if (fallbackAllowed && useFallbackDomainUntil > performance.now()) {
isUserContentDomain = false
url = withFallbackCompileDomain(url)
}
try {
const res = await fetch(url, init)
if (isUserContentDomain) {
// Only throw a MaybeBlockedByProxyError when the request will be retried
// on the fallback domain below.
checkForBlockingByProxy(url, res)
}
return res
} catch (err) {
if (
fallbackAllowed &&
isUserContentDomain &&
(isNetworkError(err) || err instanceof MaybeBlockedByProxyError)
) {
try {
const res = await fetch(withFallbackCompileDomain(url), init)
// Only switch to the fallback when fetch does not throw there as well.
if (useFallbackDomainUntil < performance.now()) {
useFallbackDomainUntil = performance.now() + ONE_HOUR_IN_MS
recordFallbackUsage()
}
return res
} catch (err2: any) {
throw OError.tag(err2, 'fallback request failed', {
errUserContentDomain: err,
})
}
}
throw err
}
}
export function swapDomain(url: string, domain: string) {
const u = new URL(url)
u.hostname = new URL(domain).hostname
return u.href
}
function withFallbackCompileDomain(url: string) {
return swapDomain(url, getMeta('ol-fallbackCompileDomain'))
}
function recordFallbackUsage() {
setTimeout(() => {
postJSON('/record-user-content-domain-fallback-usage').catch(() => {})
}, 1_000)
}

View file

@ -3,8 +3,6 @@ import HumanReadableLogs from '../../../ide/human-readable-logs/HumanReadableLog
import BibLogParser from '../../../ide/log-parser/bib-log-parser'
import { v4 as uuid } from 'uuid'
import { enablePdfCaching } from './pdf-caching-flags'
import { fetchFromCompileDomain, swapDomain } from './fetchFromCompileDomain'
import { userContentDomainAccessCheckPassed } from '../../user-content-domain-access-check'
// Warnings that may disappear after a second LaTeX pass
const TRANSIENT_WARNING_REGEX = /^(Reference|Citation).+undefined on input line/
@ -29,8 +27,7 @@ export function handleOutputFiles(outputFiles, projectId, data) {
outputFile.pdfUrl = `${buildURL(
outputFile,
data.pdfDownloadDomain,
data.enableHybridPdfDownload
data.pdfDownloadDomain
)}?${params}`
// build the URL for downloading the PDF
@ -72,10 +69,9 @@ export const handleLogFiles = async (outputFiles, data, signal) => {
if (logFile) {
try {
const response = await fetchFromCompileDomain(
buildURL(logFile, data.pdfDownloadDomain, data.enableHybridPdfDownload),
{ signal }
)
const response = await fetch(buildURL(logFile, data.pdfDownloadDomain), {
signal,
})
result.log = await response.text()
@ -108,10 +104,9 @@ export const handleLogFiles = async (outputFiles, data, signal) => {
}
for (const blgFile of blgFiles) {
try {
const response = await fetchFromCompileDomain(
buildURL(blgFile, data.pdfDownloadDomain, data.enableHybridPdfDownload),
{ signal }
)
const response = await fetch(buildURL(blgFile, data.pdfDownloadDomain), {
signal,
})
const log = await response.text()
@ -166,20 +161,7 @@ export function buildLogEntryAnnotations(entries, fileTreeManager) {
return logEntryAnnotations
}
function buildURL(file, pdfDownloadDomain, enableHybridPdfDownload) {
const userContentDomain = getMeta('ol-compilesUserContentDomain')
if (
enableHybridPdfDownload &&
userContentDomainAccessCheckPassed() &&
file.build &&
userContentDomain
) {
// This user is enrolled in the hybrid download of compile output.
// The access check passed, so try to use the new user content domain.
// Downloads from the compiles domains must include a build id.
// The build id is used implicitly for access control.
return swapDomain(`${pdfDownloadDomain}${file.url}`, userContentDomain)
}
function buildURL(file, pdfDownloadDomain) {
if (file.build && pdfDownloadDomain) {
// Downloads from the compiles domain must include a build id.
// The build id is used implicitly for access control.

View file

@ -10,8 +10,6 @@ import {
trackPdfDownloadEnabled,
} from './pdf-caching-flags'
import { isNetworkError } from '../../../utils/isNetworkError'
import { isSplitTestEnabled } from '../../../utils/splitTestUtils'
import { isURLOnUserContentDomain } from './fetchFromCompileDomain'
// 30 seconds: The shutdown grace period of a clsi pre-emp instance.
const STALE_OUTPUT_REQUEST_THRESHOLD_MS = 30 * 1000
@ -78,10 +76,6 @@ export function generatePdfCachingTransportFactory(PDFJS) {
end,
metrics,
})
const isExpectedFailureOnNewCompileDomain = err =>
isSplitTestEnabled('force-new-compile-domain') &&
isURLOnUserContentDomain(OError.getFullInfo(err).url) &&
OError.getFullInfo(err).responseSize !== this.pdfFile.size
const isStaleOutputRequest = () =>
performance.now() - this.startTime > STALE_OUTPUT_REQUEST_THRESHOLD_MS
@ -97,9 +91,8 @@ export function generatePdfCachingTransportFactory(PDFJS) {
// - requests for the main output.pdf file
// A fallback request would not be able to retrieve the PDF either.
const isExpectedError = err =>
((is404(err) || isNetworkError(err)) &&
(isStaleOutputRequest() || isFromOutputPDFRequest(err))) ||
isExpectedFailureOnNewCompileDomain(err)
(is404(err) || isNetworkError(err)) &&
(isStaleOutputRequest() || isFromOutputPDFRequest(err))
fetchRange({
url: this.url,
@ -120,8 +113,6 @@ export function generatePdfCachingTransportFactory(PDFJS) {
if (isExpectedError(err)) {
if (is404(err)) {
// A regular pdf-js request would have seen this 404 as well.
} else if (isExpectedFailureOnNewCompileDomain(err)) {
// A regular pdf-js request would have seen this proxy-error as well.
} else {
// Flaky network, switch back to regular pdf-js requests.
metrics.failedCount++

View file

@ -1,5 +1,4 @@
import OError from '@overleaf/o-error'
import { fetchFromCompileDomain } from './fetchFromCompileDomain'
const PDF_JS_CHUNK_SIZE = 128 * 1024
const MAX_SUB_REQUEST_COUNT = 4
@ -485,7 +484,7 @@ export async function fallbackRequest({ url, start, end, abortSignal }) {
headers: { Range: `bytes=${start}-${end - 1}` },
signal: abortSignal,
}
const response = await fetchFromCompileDomain(url, init)
const response = await fetch(url, init)
checkChunkResponse(response, end - start, init)
return await response.arrayBuffer()
} catch (e) {
@ -564,7 +563,7 @@ async function fetchChunk({
// result all the browser cache keys (aka urls) get invalidated.
// We memorize the previous browser cache keys in `cachedUrls`.
try {
const response = await fetchFromCompileDomain(oldUrl, init)
const response = await fetch(oldUrl, init)
if (response.status === 200) {
checkChunkResponse(response, estimatedSize, init)
metrics.oldUrlHitCount += 1
@ -579,7 +578,7 @@ async function fetchChunk({
// Fallback to the latest url.
}
}
const response = await fetchFromCompileDomain(url, init)
const response = await fetch(url, init)
checkChunkResponse(response, estimatedSize, init)
if (chunk.hash) cachedUrls.set(chunk.hash, url)
return response

View file

@ -1,314 +0,0 @@
import {
checkChunkResponse,
estimateSizeOfMultipartResponse,
getMultipartBoundary,
resolveMultiPartResponses,
} from '../pdf-preview/util/pdf-caching'
import getMeta from '../../utils/meta'
import OError from '@overleaf/o-error'
import { captureException } from '../../infrastructure/error-reporter'
import { postJSON } from '../../infrastructure/fetch-json'
import {
isSplitTestEnabled,
parseIntFromSplitTest,
} from '../../utils/splitTestUtils'
const MAX_CHECKS_PER_PAGE_LOAD = parseIntFromSplitTest(
'user-content-domain-access-check-max-checks',
3
)
const INITIAL_DELAY_MS = parseIntFromSplitTest(
'user-content-domain-access-check-delay',
30_000
)
const TIMEOUT_MS = 30_000
const FULL_SIZE = 739
const FULL_HASH =
'b7d25591c18da373709d3d88ddf5eeab0b5089359e580f051314fd8935df0b73'
const CHUNKS = [
{
start: 0,
end: 21,
hash: 'd2ad9cbf1bc669646c0dfc43fa3167d30ab75077bb46bc9e3624b9e7e168abc2',
},
{
start: 21,
end: 42,
hash: 'd6d110ec0f3f4e27a4050bc2be9c5552cc9092f86b74fec75072c2c9e8483454',
},
{
start: 42,
end: 64,
hash: '8278914487a3a099c9af5aa22ed836d6587ca0beb7bf9a059fb0409667b3eb3d',
},
]
function pickZone() {
const x = Math.random()
switch (true) {
case x > 0.66:
return 'b'
case x > 0.33:
return 'c'
default:
return 'd'
}
}
function arrayLikeToHex(a: Uint8Array) {
return Array.from(a)
.map(i => i.toString(16).padStart(2, '0'))
.join('')
}
async function hashBody(body: ArrayBuffer) {
const digest = await crypto.subtle.digest('SHA-256', body)
return arrayLikeToHex(new Uint8Array(digest))
}
async function checkHash(
res: Response,
data: ArrayBuffer,
expectedHash: string
) {
const actualHash = await hashBody(data)
if (actualHash !== expectedHash) {
throw new OError('content hash mismatch', {
actualHash,
expectedHash,
headers: Object.fromEntries(res.headers.entries()),
})
}
}
function randomHex(bytes: number) {
const buf = new Uint8Array(bytes)
crypto.getRandomValues(buf)
return arrayLikeToHex(buf)
}
function genBuildId() {
const date = Date.now().toString(16)
const random = randomHex(8)
return `${date}-${random}`
}
async function singleCheck(
url: string,
init: RequestInit,
estimatedSize: number,
expectedHash: string,
chunks?: Array<any>
) {
const ac = new AbortController()
setTimeout(() => ac.abort(), TIMEOUT_MS)
init.signal = ac.signal
init.cache = 'no-store'
const res = await fetch(url, init)
checkChunkResponse(res, estimatedSize, init)
const body = await res.arrayBuffer()
if (chunks) {
const boundary = getMultipartBoundary(res, chunks)
const parts = resolveMultiPartResponses({
file: { size: FULL_SIZE },
chunks,
data: new Uint8Array(body),
boundary,
metrics: {},
})
for (const part of parts) {
await checkHash(res, part.data, part.chunk.hash)
}
} else {
await checkHash(res, body, expectedHash)
}
}
export async function checkUserContentDomainAccess(
compileDomainOrigin: string
) {
// Note: The ids are zero prefixed. No actual user/project uses these ids.
// mongo-id 000000000000000000000000 -> 1970-01-01T00:00:00.000Z
// mongo-id 000000010000000000000000 -> 1970-01-01T00:00:01.000Z
// mongo-id 100000000000000000000000 -> 1978-07-04T21:24:16.000Z
// This allows us to distinguish between check-traffic and regular output
// traffic.
const projectId = `0${randomHex(12).slice(1)}`
const userId = `0${randomHex(12).slice(1)}`
const buildId = genBuildId()
const zone = pickZone()
const urls = []
if (getMeta('ol-user_id')) {
// Logged-in user
urls.push(
`${compileDomainOrigin}/zone/${zone}/project/${projectId}/user/${userId}/build/${buildId}/output/output.pdf`
)
} else {
// Anonymous user
urls.push(
`${compileDomainOrigin}/zone/${zone}/project/${projectId}/build/${buildId}/output/output.pdf`
)
}
const cases = []
for (const url of urls) {
// full download
cases.push({
url,
init: {},
estimatedSize: FULL_SIZE,
hash: FULL_HASH,
})
// range request
const chunk = CHUNKS[0]
cases.push({
url,
init: {
headers: {
Range: `bytes=${chunk.start}-${chunk.end - 1}`,
},
},
estimatedSize: chunk.end - chunk.start,
hash: chunk.hash,
})
// multipart request
cases.push({
url,
init: {
headers: {
Range: `bytes=${CHUNKS.map(c => `${c.start}-${c.end - 1}`).join(
','
)}`,
},
},
estimatedSize: estimateSizeOfMultipartResponse(CHUNKS),
hash: chunk.hash,
chunks: CHUNKS,
})
}
let failed = 0
let ignoreResult = false
const epochBeforeCheck = networkEpoch
await Promise.all(
cases.map(async ({ url, init, estimatedSize, hash, chunks }) => {
try {
await singleCheck(url, init, estimatedSize, hash, chunks)
} catch (err: any) {
if (!navigator.onLine || epochBeforeCheck !== networkEpoch) {
// It is very likely that the request failed because we are offline or
// the network connection changed just now.
ignoreResult = true
}
if (ignoreResult) return
failed++
OError.tag(err, 'user-content-domain-access-check failed', {
url,
init,
})
if (
isSplitTestEnabled('report-user-content-domain-access-check-error')
) {
captureException(err, {
tags: { compileDomain: new URL(compileDomainOrigin).hostname },
})
} else {
console.error(OError.getFullStack(err), OError.getFullInfo(err))
}
}
})
)
if (ignoreResult) return false
try {
await postJSON('/record-user-content-domain-access-check-result', {
body: {
failed,
succeeded: cases.length - failed,
isOldDomain:
compileDomainOrigin === getMeta('ol-fallbackCompileDomain'),
},
})
} catch (e) {}
return failed === 0
}
const ACCESS_CHECK_PASSED = 'passed'
const ACCESS_CHECK_PENDING = 'pending'
const ACCESS_CHECK_FAILED = 'failed'
let accessCheckStatus = ACCESS_CHECK_PENDING
export function userContentDomainAccessCheckPassed() {
return accessCheckStatus === ACCESS_CHECK_PASSED
}
export function userContentDomainAccessCheckFailed() {
return accessCheckStatus === ACCESS_CHECK_FAILED
}
let networkEpoch = performance.now()
window.addEventListener('offline', () => {
// We are offline. Abort any scheduled check.
clearTimeout(lastScheduledCheck)
accessCheckStatus = ACCESS_CHECK_PENDING
networkEpoch = performance.now()
})
window.addEventListener('online', () => {
// We are online again. Schedule another check for this network.
accessCheckStatus = ACCESS_CHECK_PENDING
networkEpoch = performance.now()
scheduleUserContentDomainAccessCheck()
})
try {
// Note: navigator.connection is not available on Firefox and Safari.
// Docs: https://developer.mozilla.org/en-US/docs/Web/API/NetworkInformation
// @ts-ignore
navigator.connection.addEventListener('change', () => {
// The network changed. Schedule another check for it.
accessCheckStatus = ACCESS_CHECK_PENDING
networkEpoch = performance.now()
scheduleUserContentDomainAccessCheck()
})
} catch (e) {}
let lastScheduledCheck: number
let remainingChecks = MAX_CHECKS_PER_PAGE_LOAD
export function scheduleUserContentDomainAccessCheck() {
if (!isSplitTestEnabled('user-content-domain-access-check')) return
clearTimeout(lastScheduledCheck)
const networkEpochBeforeDelay = networkEpoch
lastScheduledCheck = window.setTimeout(() => {
if (!window.navigator.onLine || networkEpochBeforeDelay !== networkEpoch) {
// Must be online for more than INITIAL_DELAY_MS before we check.
// We want to avoid false-positives from flaky network connections.
// Try again in INITIAL_DELAY_MS.
return scheduleUserContentDomainAccessCheck()
}
if (userContentDomainAccessCheckPassed()) return
if (remainingChecks === 0) {
recordMaxAccessChecksHit()
}
if (remainingChecks-- <= 0) return
if (isSplitTestEnabled('access-check-for-old-compile-domain')) {
checkUserContentDomainAccess(getMeta('ol-fallbackCompileDomain')).catch(
() => {}
)
}
checkUserContentDomainAccess(getMeta('ol-compilesUserContentDomain'))
.then(ok => {
accessCheckStatus = ok ? ACCESS_CHECK_PASSED : ACCESS_CHECK_FAILED
})
.catch(err => {
captureException(err)
})
}, INITIAL_DELAY_MS)
}
function recordMaxAccessChecksHit() {
postJSON('/record-user-content-domain-max-access-checks-hit').catch(() => {})
}

View file

@ -73,7 +73,6 @@ import './features/history/controllers/history-file-tree-controller'
import { cleanupServiceWorker } from './utils/service-worker-cleanup'
import { reportCM6Perf } from './infrastructure/cm6-performance'
import { reportAcePerf } from './ide/editor/ace-performance'
import { scheduleUserContentDomainAccessCheck } from './features/user-content-domain-access-check'
App.controller(
'IdeController',
@ -487,7 +486,6 @@ If the project has been renamed please look in your project list for a new proje
)
cleanupServiceWorker()
scheduleUserContentDomainAccessCheck()
angular.module('SharelatexApp').config(function ($provide) {
$provide.decorator('$browser', [

View file

@ -1,4 +1,4 @@
import { createContext, useContext, useEffect, useMemo } from 'react'
import { createContext, useContext, useMemo } from 'react'
import PropTypes from 'prop-types'
import {
useLocalCompileContext,
@ -7,7 +7,6 @@ import {
import useDetachStateWatcher from '../hooks/use-detach-state-watcher'
import useDetachAction from '../hooks/use-detach-action'
import useCompileTriggers from '../../features/pdf-preview/hooks/use-compile-triggers'
import getMeta from '../../utils/meta'
export const DetachCompileContext = createContext()
@ -33,7 +32,6 @@ export function DetachCompileProvider({ children }) {
editedSinceCompileStarted: _editedSinceCompileStarted,
error: _error,
fileList: _fileList,
forceNewDomainVariant: _forceNewDomainVariant,
hasChanges: _hasChanges,
highlights: _highlights,
lastCompileOptions: _lastCompileOptions,
@ -125,12 +123,6 @@ export function DetachCompileProvider({ children }) {
'detacher',
'detached'
)
const [forceNewDomainVariant] = useDetachStateWatcher(
'forceNewDomainVariant',
_forceNewDomainVariant,
'detacher',
'detached'
)
const [hasChanges] = useDetachStateWatcher(
'hasChanges',
_hasChanges,
@ -375,11 +367,6 @@ export function DetachCompileProvider({ children }) {
)
useCompileTriggers(startCompile, setChangedAt, setSavedAt)
useEffect(() => {
// Sync the split test variant across the editor and pdf-detach.
const variants = getMeta('ol-splitTestVariants') || {}
variants['force-new-compile-domain'] = forceNewDomainVariant
}, [forceNewDomainVariant])
const value = useMemo(
() => ({
@ -395,7 +382,6 @@ export function DetachCompileProvider({ children }) {
editedSinceCompileStarted,
error,
fileList,
forceNewDomainVariant,
hasChanges,
highlights,
lastCompileOptions,
@ -449,7 +435,6 @@ export function DetachCompileProvider({ children }) {
error,
editedSinceCompileStarted,
fileList,
forceNewDomainVariant,
hasChanges,
highlights,
lastCompileOptions,

View file

@ -29,7 +29,6 @@ import { useEditorContext } from './editor-context'
import { buildFileList } from '../../features/pdf-preview/util/file-list'
import { useLayoutContext } from './layout-context'
import { useUserContext } from './user-context'
import getMeta from '../../utils/meta'
export const LocalCompileContext = createContext()
@ -44,7 +43,6 @@ export const CompileContextPropTypes = {
draft: PropTypes.bool.isRequired,
error: PropTypes.string,
fileList: PropTypes.object,
forceNewDomainVariant: PropTypes.string,
hasChanges: PropTypes.bool.isRequired,
highlights: PropTypes.arrayOf(PropTypes.object),
logEntries: PropTypes.object,
@ -174,11 +172,6 @@ export function LocalCompileProvider({ children }) {
// the list of files that can be downloaded
const [fileList, setFileList] = useState()
// Split test variant for disabling the fallback, refreshed on re-compile.
const [forceNewDomainVariant, setForceNewDomainVariant] = useState(
getMeta('ol-splitTestVariants')?.['force-new-compile-domain']
)
// the raw contents of the log file
const [rawLog, setRawLog] = useState()
@ -323,7 +316,6 @@ export function LocalCompileProvider({ children }) {
setShowFasterCompilesFeedbackUI(
Boolean(data.showFasterCompilesFeedbackUI)
)
setForceNewDomainVariant(data.forceNewDomainVariant || 'default')
if (data.outputFiles) {
const outputFiles = new Map()
@ -561,7 +553,6 @@ export function LocalCompileProvider({ children }) {
editedSinceCompileStarted,
error,
fileList,
forceNewDomainVariant,
hasChanges,
highlights,
lastCompileOptions,
@ -616,7 +607,6 @@ export function LocalCompileProvider({ children }) {
editedSinceCompileStarted,
error,
fileList,
forceNewDomainVariant,
hasChanges,
highlights,
lastCompileOptions,

View file

@ -1004,7 +1004,7 @@
"need_to_leave": "Need to leave?",
"need_to_upgrade_for_more_collabs": "You need to upgrade your account to add more collaborators",
"need_to_upgrade_for_more_collabs_variant": "You have reached the maximum number of collaborators. Upgrade your account to add more.",
"new_compile_domain_trouble_shooting": "We are migrating PDF downloads to a new domain. It looks like something is blocking your browser from accessing that new domain, <0>__compilesUserContentDomain__</0>. This could be caused by network blocking or a strict browser plugin rule. Please follow our <1>troubleshooting guide</1>.",
"new_compile_domain_notice": "Weve recently migrated PDF downloads to a new domain. Something might be blocking your browser from accessing that new domain, <0>__compilesUserContentDomain__</0>. This could be caused by network blocking or a strict browser plugin rule. Please follow our <1>troubleshooting guide</1>.",
"new_file": "New File",
"new_folder": "New Folder",
"new_name": "New Name",

View file

@ -23,6 +23,10 @@ const Layout: FC<{ layout: string; view?: string }> = ({ layout, view }) => {
describe('<PdfPreview/>', function () {
beforeEach(function () {
window.metaAttributesCache.set('ol-preventCompileOnLoad', true)
window.metaAttributesCache.set(
'ol-compilesUserContentDomain',
'https://compiles-user.dev-overleaf.com'
)
cy.interceptEvents()
})

View file

@ -126,8 +126,6 @@ describe('CompileController', function () {
},
],
pdfDownloadDomain: 'https://compiles.overleaf.test',
enableHybridPdfDownload: false,
forceNewDomainVariant: 'default',
})
)
})
@ -170,8 +168,6 @@ describe('CompileController', function () {
},
],
pdfDownloadDomain: 'https://compiles.overleaf.test/zone/b',
enableHybridPdfDownload: false,
forceNewDomainVariant: 'default',
})
)
})
@ -213,8 +209,6 @@ describe('CompileController', function () {
JSON.stringify({
status: this.status,
outputFiles: this.outputFiles,
enableHybridPdfDownload: false,
forceNewDomainVariant: 'default',
})
)
})