mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #18635 from overleaf/jpa-test-upgrade
[server-pro] add tests for upgrade process GitOrigin-RevId: eaa1486688cb2fa544adaaee16da04fd757a1b65
This commit is contained in:
parent
9e0bf6c626
commit
b75d183cfc
11 changed files with 354 additions and 131 deletions
1
server-ce/test/.gitignore
vendored
Normal file
1
server-ce/test/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
data/
|
|
@ -8,6 +8,7 @@ export PWD = $(shell pwd)
|
|||
|
||||
export TEX_LIVE_DOCKER_IMAGE ?= quay.io/sharelatex/texlive-full:2023.1
|
||||
export ALL_TEX_LIVE_DOCKER_IMAGES ?= quay.io/sharelatex/texlive-full:2023.1,quay.io/sharelatex/texlive-full:2022.1
|
||||
export IMAGE_TAG_PRO ?= quay.io/sharelatex/sharelatex-pro:latest
|
||||
|
||||
test-e2e:
|
||||
docker compose up --build --no-log-prefix --exit-code-from=e2e e2e
|
||||
|
@ -23,5 +24,8 @@ prefetch:
|
|||
docker compose build
|
||||
echo -n "$$ALL_TEX_LIVE_DOCKER_IMAGES" | xargs -d, -I% \
|
||||
sh -exc 'tag=%; re_tag=quay.io/sharelatex/$${tag#*/}; docker pull $$tag; docker tag $$tag $$re_tag'
|
||||
docker pull $(IMAGE_TAG_PRO:latest=4.2)
|
||||
docker pull $(IMAGE_TAG_PRO:latest=5.0.1-RC1)
|
||||
docker pull $(IMAGE_TAG_PRO:latest=5.0)
|
||||
|
||||
.PHONY: test-e2e test-e2e-open
|
||||
|
|
2
server-ce/test/cypress/.gitignore
vendored
2
server-ce/test/cypress/.gitignore
vendored
|
@ -1,3 +1,3 @@
|
|||
downloads/
|
||||
results/
|
||||
compiles/
|
||||
data/
|
||||
|
|
|
@ -23,19 +23,19 @@ services:
|
|||
ENABLE_CONVERSIONS: 'true'
|
||||
EMAIL_CONFIRMATION_DISABLED: 'true'
|
||||
healthcheck:
|
||||
test: curl --fail http://localhost:3000/status || exit 1
|
||||
test: curl --fail http://localhost:3000/status
|
||||
interval: 3s
|
||||
timeout: 10s
|
||||
retries: 10
|
||||
timeout: 3s
|
||||
retries: 30
|
||||
|
||||
mongo:
|
||||
image: mongo:5.0.17
|
||||
command: '--replSet overleaf'
|
||||
healthcheck:
|
||||
test: echo 'db.stats().ok' | mongo localhost:27017/test --quiet
|
||||
interval: 10s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
interval: 3s
|
||||
timeout: 3s
|
||||
retries: 30
|
||||
|
||||
redis:
|
||||
image: redis:7.2.1
|
||||
|
@ -106,7 +106,7 @@ services:
|
|||
mongo:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: curl --fail http://localhost/status || exit 1
|
||||
test: curl --fail http://localhost/status
|
||||
interval: 3s
|
||||
timeout: 10s
|
||||
retries: 10
|
||||
timeout: 3s
|
||||
retries: 30
|
||||
|
|
|
@ -15,7 +15,7 @@ export function throttledRecompile() {
|
|||
return () =>
|
||||
cy.then(() => {
|
||||
const msSinceLastCompile = Date.now() - lastCompile
|
||||
cy.wait(Math.max(0, 3_000 - msSinceLastCompile))
|
||||
cy.wait(Math.max(0, 1_000 - msSinceLastCompile))
|
||||
cy.findByText('Recompile').click()
|
||||
queueReset()
|
||||
})
|
||||
|
|
|
@ -7,14 +7,15 @@ export function startWith({
|
|||
version = 'latest',
|
||||
vars = {},
|
||||
varsFn = () => ({}),
|
||||
withDataDir = false,
|
||||
}) {
|
||||
before(async function () {
|
||||
Object.assign(vars, varsFn())
|
||||
const cfg = JSON.stringify({ pro, version, vars })
|
||||
const cfg = JSON.stringify({ pro, version, vars, withDataDir })
|
||||
if (lastConfig === cfg) return
|
||||
|
||||
this.timeout(100 * 1000)
|
||||
await reconfigure({ pro, version, vars })
|
||||
await reconfigure({ pro, version, vars, withDataDir })
|
||||
lastConfig = cfg
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,21 +1,3 @@
|
|||
export async function setVars(vars = {}) {
|
||||
return await fetchJSON('http://host-admin/set/vars', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ vars, path: 'docker-compose.yml' }),
|
||||
})
|
||||
}
|
||||
|
||||
export async function setVersion({ pro = false, version = 'latest' }) {
|
||||
return await fetchJSON('http://host-admin/set/version', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
pro,
|
||||
version,
|
||||
path: 'docker-compose.yml',
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
export async function dockerCompose(cmd: string, ...args: string[]) {
|
||||
return await fetchJSON(`http://host-admin/docker/compose/${cmd}`, {
|
||||
method: 'POST',
|
||||
|
@ -31,10 +13,17 @@ export async function mongoInit() {
|
|||
})
|
||||
}
|
||||
|
||||
export async function resetData() {
|
||||
return await fetchJSON('http://host-admin/reset/data', {
|
||||
method: 'POST',
|
||||
})
|
||||
}
|
||||
|
||||
export async function reconfigure({
|
||||
pro = false,
|
||||
version = 'latest',
|
||||
vars = {},
|
||||
withDataDir = false,
|
||||
}) {
|
||||
return await fetchJSON('http://host-admin/reconfigure', {
|
||||
method: 'POST',
|
||||
|
@ -42,6 +31,7 @@ export async function reconfigure({
|
|||
pro,
|
||||
version,
|
||||
vars,
|
||||
withDataDir,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
|
|
@ -4,6 +4,10 @@ const DEFAULT_PASSWORD = 'Passw0rd!'
|
|||
|
||||
const createdUsers = new Set<string>()
|
||||
|
||||
export function resetCreatedUsersCache() {
|
||||
createdUsers.clear()
|
||||
}
|
||||
|
||||
async function createMongoUser({
|
||||
email,
|
||||
isAdmin = false,
|
||||
|
@ -11,7 +15,7 @@ async function createMongoUser({
|
|||
email: string
|
||||
isAdmin?: boolean
|
||||
}) {
|
||||
const t0 = Date.now()
|
||||
const t0 = Math.floor(Date.now() / 1000)
|
||||
const { stdout } = await runScript({
|
||||
cwd: 'services/web',
|
||||
script: 'modules/server-ce-scripts/scripts/create-user.js',
|
||||
|
@ -19,7 +23,7 @@ async function createMongoUser({
|
|||
})
|
||||
const [url] = stdout.match(/\/user\/activate\?token=\S+/)!
|
||||
const userId = new URL(url, location.origin).searchParams.get('user_id')!
|
||||
const signupDate = parseInt(userId.slice(0, 8), 16) * 1000
|
||||
const signupDate = parseInt(userId.slice(0, 8), 16)
|
||||
if (signupDate < t0) {
|
||||
return { url, exists: true }
|
||||
}
|
||||
|
|
|
@ -2,13 +2,13 @@ export function createProject(
|
|||
name: string,
|
||||
{
|
||||
type = 'Blank Project',
|
||||
newProjectButtonMatcher = /new project/i,
|
||||
}: {
|
||||
type?: 'Blank Project' | 'Example Project'
|
||||
newProjectButtonMatcher?: RegExp
|
||||
} = {}
|
||||
): Cypress.Chainable<string> {
|
||||
cy.findAllByRole('button')
|
||||
.contains(/new project/i)
|
||||
.click()
|
||||
cy.findAllByRole('button').contains(newProjectButtonMatcher).click()
|
||||
// FIXME: This should only look in the left menu
|
||||
cy.findAllByText(type).first().click()
|
||||
cy.findByRole('dialog').within(() => {
|
||||
|
|
|
@ -12,7 +12,8 @@ const YAML = require('js-yaml')
|
|||
|
||||
const PATHS = {
|
||||
DOCKER_COMPOSE_OVERRIDE: 'docker-compose.override.yml',
|
||||
SANDBOXED_COMPILES_HOST_DIR: Path.join(__dirname, 'cypress/compiles'),
|
||||
DATA_DIR: Path.join(__dirname, 'data'),
|
||||
SANDBOXED_COMPILES_HOST_DIR: Path.join(__dirname, 'data/compiles'),
|
||||
}
|
||||
const IMAGES = {
|
||||
CE: process.env.IMAGE_TAG_CE.replace(/:.+/, ''),
|
||||
|
@ -43,6 +44,10 @@ function writeDockerComposeOverride(cfg) {
|
|||
fs.writeFileSync(PATHS.DOCKER_COMPOSE_OVERRIDE, YAML.dump(cfg))
|
||||
}
|
||||
|
||||
function purgeDataDir() {
|
||||
fs.rmSync(PATHS.DATA_DIR, { recursive: true, force: true })
|
||||
}
|
||||
|
||||
const app = express()
|
||||
app.get('/status', (req, res) => {
|
||||
res.send('host-admin is up')
|
||||
|
@ -55,6 +60,7 @@ app.use((req, res, next) => {
|
|||
// Add CORS headers
|
||||
res.setHeader('Access-Control-Allow-Origin', 'http://sharelatex')
|
||||
res.setHeader('Access-Control-Allow-Headers', 'Content-Type')
|
||||
res.setHeader('Access-Control-Max-Age', '3600')
|
||||
next()
|
||||
})
|
||||
|
||||
|
@ -81,7 +87,7 @@ app.post(
|
|||
'sharelatex',
|
||||
'bash',
|
||||
'-c',
|
||||
`source /etc/container_environment.sh && source /etc/overleaf/env.sh && cd ${JSON.stringify(cwd)} && node ${JSON.stringify(script)} ${args.map(a => JSON.stringify(a)).join(' ')}`,
|
||||
`source /etc/container_environment.sh && source /etc/overleaf/env.sh || source /etc/sharelatex/env.sh && cd ${JSON.stringify(cwd)} && node ${JSON.stringify(script)} ${args.map(a => JSON.stringify(a)).join(' ')}`,
|
||||
],
|
||||
(error, stdout, stderr) => {
|
||||
res.json({
|
||||
|
@ -94,44 +100,6 @@ app.post(
|
|||
}
|
||||
)
|
||||
|
||||
function setVersionDockerCompose({ pro, version }) {
|
||||
const cfg = readDockerComposeOverride()
|
||||
|
||||
cfg.services.sharelatex.image = `${pro ? IMAGES.PRO : IMAGES.CE}:${version}`
|
||||
cfg.services['git-bridge'].image = `quay.io/sharelatex/git-bridge:${version}`
|
||||
|
||||
writeDockerComposeOverride(cfg)
|
||||
}
|
||||
|
||||
app.post(
|
||||
'/set/version',
|
||||
validate(
|
||||
{
|
||||
body: {
|
||||
pro: Joi.boolean(),
|
||||
version: Joi.string().required(),
|
||||
path: Joi.allow(
|
||||
'docker-compose.yml'
|
||||
// When extending testing for Toolkit:
|
||||
// 'config/version'
|
||||
),
|
||||
},
|
||||
},
|
||||
{ allowUnknown: false }
|
||||
),
|
||||
(req, res) => {
|
||||
const { pro, version } = req.body
|
||||
if (req.body.path === 'docker-compose.yml') {
|
||||
try {
|
||||
setVersionDockerCompose({ pro, version })
|
||||
} catch (error) {
|
||||
return res.json({ error })
|
||||
}
|
||||
}
|
||||
res.json({})
|
||||
}
|
||||
)
|
||||
|
||||
const allowedVars = Joi.object(
|
||||
Object.fromEntries(
|
||||
[
|
||||
|
@ -149,17 +117,37 @@ const allowedVars = Joi.object(
|
|||
'ALL_TEX_LIVE_DOCKER_IMAGE_NAMES',
|
||||
'OVERLEAF_TEMPLATES_USER_ID',
|
||||
'OVERLEAF_NEW_PROJECT_TEMPLATE_LINKS',
|
||||
// Old branding, used for upgrade tests
|
||||
'SHARELATEX_MONGO_URL',
|
||||
'SHARELATEX_REDIS_HOST',
|
||||
].map(name => [name, Joi.string()])
|
||||
)
|
||||
)
|
||||
|
||||
function setVarsDockerCompose({ vars }) {
|
||||
function setVarsDockerCompose({ pro, vars, version, withDataDir }) {
|
||||
const cfg = readDockerComposeOverride()
|
||||
|
||||
cfg.services.sharelatex.image = `${pro ? IMAGES.PRO : IMAGES.CE}:${version}`
|
||||
cfg.services['git-bridge'].image = `quay.io/sharelatex/git-bridge:${version}`
|
||||
|
||||
cfg.services.sharelatex.environment = vars
|
||||
|
||||
if (cfg.services.sharelatex.environment.GIT_BRIDGE_ENABLED === 'true') {
|
||||
cfg.services.sharelatex.depends_on = ['git-bridge']
|
||||
} else {
|
||||
cfg.services.sharelatex.depends_on = []
|
||||
}
|
||||
|
||||
const dataDirInContainer =
|
||||
version === 'latest' || version >= '5.0'
|
||||
? '/var/lib/overleaf/data'
|
||||
: '/var/lib/sharelatex/data'
|
||||
|
||||
cfg.services.sharelatex.volumes = []
|
||||
if (withDataDir) {
|
||||
cfg.services.sharelatex.volumes.push(
|
||||
`${PATHS.DATA_DIR}:${dataDirInContainer}`
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
|
@ -172,45 +160,19 @@ function setVarsDockerCompose({ vars }) {
|
|||
process.env.TEX_LIVE_DOCKER_IMAGE
|
||||
cfg.services.sharelatex.environment.ALL_TEX_LIVE_DOCKER_IMAGES =
|
||||
process.env.ALL_TEX_LIVE_DOCKER_IMAGES
|
||||
cfg.services.sharelatex.volumes = [
|
||||
'/var/run/docker.sock:/var/run/docker.sock',
|
||||
`${PATHS.SANDBOXED_COMPILES_HOST_DIR}:/var/lib/overleaf/data/compiles`,
|
||||
]
|
||||
} else {
|
||||
cfg.services.sharelatex.volumes = []
|
||||
cfg.services.sharelatex.volumes.push(
|
||||
'/var/run/docker.sock:/var/run/docker.sock'
|
||||
)
|
||||
if (!withDataDir) {
|
||||
cfg.services.sharelatex.volumes.push(
|
||||
`${PATHS.SANDBOXED_COMPILES_HOST_DIR}:${dataDirInContainer}/compiles`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
writeDockerComposeOverride(cfg)
|
||||
}
|
||||
|
||||
app.post(
|
||||
'/set/vars',
|
||||
validate(
|
||||
{
|
||||
body: {
|
||||
vars: allowedVars,
|
||||
path: Joi.allow(
|
||||
'docker-compose.yml'
|
||||
// When extending the testing for Toolkit:
|
||||
// 'overleaf.rc', 'variables.env'
|
||||
),
|
||||
},
|
||||
},
|
||||
{ allowUnknown: false }
|
||||
),
|
||||
(req, res) => {
|
||||
if (req.body.path === 'docker-compose.yml') {
|
||||
const { vars } = req.body
|
||||
try {
|
||||
setVarsDockerCompose({ vars })
|
||||
} catch (error) {
|
||||
return res.json({ error })
|
||||
}
|
||||
}
|
||||
res.json({})
|
||||
}
|
||||
)
|
||||
|
||||
app.post(
|
||||
'/docker/compose/:cmd',
|
||||
validate(
|
||||
|
@ -249,19 +211,27 @@ app.post(
|
|||
function mongoInit(callback) {
|
||||
execFile(
|
||||
'docker',
|
||||
[
|
||||
'compose',
|
||||
'exec',
|
||||
'mongo',
|
||||
'mongo',
|
||||
'--eval',
|
||||
'rs.initiate({ _id: "overleaf", members: [ { _id: 0, host: "mongo:27017" } ] })',
|
||||
],
|
||||
['compose', 'up', '--detach', '--wait', 'mongo'],
|
||||
(error, stdout, stderr) => {
|
||||
if (!error) {
|
||||
mongoIsInitialized = true
|
||||
}
|
||||
callback(error, stdout, stderr)
|
||||
if (error) return callback(error, stdout, stderr)
|
||||
|
||||
execFile(
|
||||
'docker',
|
||||
[
|
||||
'compose',
|
||||
'exec',
|
||||
'mongo',
|
||||
'mongo',
|
||||
'--eval',
|
||||
'rs.initiate({ _id: "overleaf", members: [ { _id: 0, host: "mongo:27017" } ] })',
|
||||
],
|
||||
(error, stdout, stderr) => {
|
||||
if (!error) {
|
||||
mongoIsInitialized = true
|
||||
}
|
||||
callback(error, stdout, stderr)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
@ -280,23 +250,23 @@ app.post(
|
|||
pro: Joi.boolean().required(),
|
||||
version: Joi.string().required(),
|
||||
vars: allowedVars,
|
||||
withDataDir: Joi.boolean().optional(),
|
||||
},
|
||||
},
|
||||
{ allowUnknown: false }
|
||||
),
|
||||
(req, res) => {
|
||||
const { pro, version, vars, withDataDir } = req.body
|
||||
try {
|
||||
setVarsDockerCompose({ pro, version, vars, withDataDir })
|
||||
} catch (error) {
|
||||
return res.json({ error })
|
||||
}
|
||||
|
||||
const doMongoInit = mongoIsInitialized ? cb => cb() : mongoInit
|
||||
doMongoInit((error, stdout, stderr) => {
|
||||
if (error) return res.json({ error, stdout, stderr })
|
||||
|
||||
const { pro, version, vars } = req.body
|
||||
try {
|
||||
setVersionDockerCompose({ pro, version })
|
||||
setVarsDockerCompose({ vars })
|
||||
} catch (error) {
|
||||
return res.json({ error })
|
||||
}
|
||||
|
||||
execFile(
|
||||
'docker',
|
||||
['compose', 'up', '--detach', '--wait', 'sharelatex'],
|
||||
|
@ -308,8 +278,35 @@ app.post(
|
|||
}
|
||||
)
|
||||
|
||||
app.post('/reset/data', (req, res) => {
|
||||
execFile(
|
||||
'docker',
|
||||
['compose', 'stop', '--timeout=0', 'sharelatex'],
|
||||
(error, stdout, stderr) => {
|
||||
if (error) return res.json({ error, stdout, stderr })
|
||||
|
||||
try {
|
||||
purgeDataDir()
|
||||
} catch (error) {
|
||||
return res.json({ error })
|
||||
}
|
||||
|
||||
mongoIsInitialized = false
|
||||
execFile(
|
||||
'docker',
|
||||
['compose', 'down', '--timeout=0', '--volumes', 'mongo', 'redis'],
|
||||
(error, stdout, stderr) => {
|
||||
res.json({ error, stdout, stderr })
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
app.use(handleValidationErrors())
|
||||
|
||||
purgeDataDir()
|
||||
|
||||
// Init on startup
|
||||
mongoInit(err => {
|
||||
if (err) {
|
||||
|
|
226
server-ce/test/upgrading.spec.ts
Normal file
226
server-ce/test/upgrading.spec.ts
Normal file
|
@ -0,0 +1,226 @@
|
|||
import {
|
||||
ensureUserExists,
|
||||
login,
|
||||
resetCreatedUsersCache,
|
||||
} from './helpers/login'
|
||||
import { startWith } from './helpers/config'
|
||||
import { dockerCompose, resetData, runScript } from './helpers/hostAdminClient'
|
||||
import { createProject } from './helpers/project'
|
||||
import { throttledRecompile } from './helpers/compile'
|
||||
|
||||
const USER = 'user@example.com'
|
||||
const PROJECT_NAME = 'Old Project'
|
||||
|
||||
describe('Upgrading', function () {
|
||||
function testUpgrade(
|
||||
steps: {
|
||||
version: string
|
||||
vars?: Object
|
||||
newProjectButtonMatcher?: RegExp
|
||||
hook?: () => void
|
||||
}[]
|
||||
) {
|
||||
const startOptions = steps.shift()!
|
||||
|
||||
// Reset mongo/redis/on-disk data
|
||||
before(async () => {
|
||||
resetCreatedUsersCache()
|
||||
await resetData()
|
||||
})
|
||||
|
||||
// Create old instance
|
||||
startWith({
|
||||
pro: true,
|
||||
version: startOptions.version,
|
||||
withDataDir: true,
|
||||
vars: startOptions.vars,
|
||||
})
|
||||
ensureUserExists({ email: USER })
|
||||
|
||||
// Populate old instance
|
||||
before(() => {
|
||||
login(USER)
|
||||
|
||||
cy.visit('/project')
|
||||
createProject(PROJECT_NAME, {
|
||||
newProjectButtonMatcher: startOptions.newProjectButtonMatcher,
|
||||
})
|
||||
const recompile = throttledRecompile()
|
||||
// // wait for successful compile
|
||||
cy.get('.pdf-viewer').should('contain.text', PROJECT_NAME)
|
||||
|
||||
// Increment the doc version three times
|
||||
for (let i = 0; i < 3; i++) {
|
||||
// Add content
|
||||
cy.findByText('\\maketitle').parent().click()
|
||||
cy.findByText('\\maketitle')
|
||||
.parent()
|
||||
.type(`\n\\section{{}Old Section ${i}}`)
|
||||
|
||||
// Trigger full flush
|
||||
recompile()
|
||||
cy.get('header').findByText('Menu').click()
|
||||
cy.findByText('Source').click()
|
||||
// close editor menu
|
||||
cy.get('#left-menu-modal').click()
|
||||
}
|
||||
|
||||
// Check compile and history
|
||||
for (let i = 0; i < 3; i++) {
|
||||
cy.get('.pdf-viewer').should('contain.text', `Old Section ${i}`)
|
||||
}
|
||||
cy.findByText('History').click()
|
||||
for (let i = 0; i < 3; i++) {
|
||||
cy.findByText(new RegExp(`\\\\section\{Old Section ${i}}`))
|
||||
}
|
||||
})
|
||||
|
||||
// Upgrades
|
||||
for (const step of steps) {
|
||||
before(() => {
|
||||
// Navigate way from editor to avoid redirect to /login when the next instance comes up (which slows down tests)
|
||||
cy.visit('/project', {})
|
||||
})
|
||||
// Graceful shutdown
|
||||
before(async function () {
|
||||
this.timeout(20 * 1000)
|
||||
// Ideally we could use the container shutdown procedure, but it's too slow and unreliable for tests.
|
||||
// TODO(das7pad): adopt the below after speeding up the graceful shutdown procedure on all supported releases
|
||||
// await dockerCompose('stop', 'sharelatex')
|
||||
|
||||
// For now, we are stuck with manually flushing things
|
||||
await runScript({
|
||||
cwd: 'services/document-updater',
|
||||
script: 'scripts/flush_all.js',
|
||||
})
|
||||
await runScript({
|
||||
cwd: 'services/project-history',
|
||||
script: 'scripts/flush_all.js',
|
||||
})
|
||||
})
|
||||
startWith({
|
||||
pro: true,
|
||||
version: step.version,
|
||||
vars: step.vars,
|
||||
withDataDir: true,
|
||||
})
|
||||
|
||||
step.hook?.()
|
||||
}
|
||||
beforeEach(() => {
|
||||
login(USER)
|
||||
})
|
||||
|
||||
it('should list the old project', () => {
|
||||
cy.visit('/project')
|
||||
cy.findByText(PROJECT_NAME)
|
||||
})
|
||||
|
||||
it('should open the old project', () => {
|
||||
cy.visit('/project')
|
||||
cy.findByText(PROJECT_NAME).click()
|
||||
|
||||
cy.url().should('match', /\/project\/[a-fA-F0-9]{24}/)
|
||||
cy.findByRole('navigation').within(() => {
|
||||
cy.findByText(PROJECT_NAME)
|
||||
})
|
||||
const recompile = throttledRecompile()
|
||||
|
||||
// wait for successful compile
|
||||
cy.get('.pdf-viewer').should('contain.text', PROJECT_NAME)
|
||||
cy.get('.pdf-viewer').should('contain.text', 'Old Section 2')
|
||||
|
||||
// // Add more content
|
||||
cy.findByText('\\maketitle').parent().click()
|
||||
cy.findByText('\\maketitle').parent().type('\n\\section{{}New Section}')
|
||||
|
||||
// Check compile and history
|
||||
recompile()
|
||||
cy.get('.pdf-viewer').should('contain.text', 'New Section')
|
||||
cy.findByText('History').click()
|
||||
cy.findByText(/\\section\{Old Section 2}/)
|
||||
cy.findByText(/\\section\{New Section}/)
|
||||
})
|
||||
}
|
||||
|
||||
const optionsFourDotTwo = {
|
||||
version: '4.2',
|
||||
vars: {
|
||||
// Add database vars with old branding
|
||||
SHARELATEX_MONGO_URL: 'mongodb://mongo/sharelatex',
|
||||
SHARELATEX_REDIS_HOST: 'redis',
|
||||
},
|
||||
newProjectButtonMatcher: /create first project/i,
|
||||
}
|
||||
describe('from 4.2 to latest', () => {
|
||||
testUpgrade([optionsFourDotTwo, { version: 'latest' }])
|
||||
})
|
||||
describe('from 5.0 to latest', () => {
|
||||
testUpgrade([{ version: '5.0' }, { version: 'latest' }])
|
||||
})
|
||||
describe('doc version recovery', () => {
|
||||
testUpgrade([
|
||||
optionsFourDotTwo,
|
||||
{
|
||||
version: '5.0.1-RC1',
|
||||
hook() {
|
||||
before(function () {
|
||||
login(USER)
|
||||
cy.visit('/')
|
||||
cy.findByText(PROJECT_NAME).click()
|
||||
const recompile = throttledRecompile()
|
||||
|
||||
// Make a change
|
||||
cy.findByText('\\maketitle').parent().click()
|
||||
cy.findByText('\\maketitle')
|
||||
.parent()
|
||||
.type('\n\\section{{}FiveOOne Section}')
|
||||
|
||||
// Trigger flush
|
||||
recompile()
|
||||
cy.get('.pdf-viewer').should('contain.text', 'FiveOOne Section')
|
||||
|
||||
// Check for broken history, i.e. not synced with latest edit
|
||||
cy.findByText('History').click()
|
||||
cy.findByText(/\\section\{Old Section 2}/) // wait for lazy loading
|
||||
cy.findByText(/\\section\{FiveOOne Section}/).should('not.exist')
|
||||
})
|
||||
},
|
||||
},
|
||||
{
|
||||
version: 'latest',
|
||||
hook() {
|
||||
before(async function () {
|
||||
this.timeout(20_000)
|
||||
const needle = 'Finished resyncing history for all projects.'
|
||||
for (let i = 0; i < 30; i++) {
|
||||
const { stdout } = await dockerCompose('logs', 'sharelatex')
|
||||
if (stdout.includes(needle)) {
|
||||
return
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, 500))
|
||||
}
|
||||
const { stdout } = await dockerCompose('logs', 'sharelatex')
|
||||
expect(stdout).to.contain(
|
||||
needle,
|
||||
'Doc version recovery did not finish yet.'
|
||||
)
|
||||
})
|
||||
|
||||
before(function () {
|
||||
login(USER)
|
||||
cy.visit('/')
|
||||
cy.findByText(PROJECT_NAME).click()
|
||||
|
||||
// The edit that was made while the history was broken should be there now.
|
||||
cy.findByText('History').click()
|
||||
cy.findByText(/\\section\{FiveOOne Section}/)
|
||||
|
||||
// Check indicator of force resync
|
||||
cy.findByText('Overleaf History System')
|
||||
})
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
Loading…
Reference in a new issue