Merge pull request #15414 from overleaf/jpa-server-pro-4-1-4

[server-pro] prepare hotfix release 4.1.4

GitOrigin-RevId: 2301f366f9b7f170a5801c5b74d10b9b7757973e
This commit is contained in:
Jakob Ackermann 2023-10-24 15:26:12 +02:00 committed by Copybot
parent b8f4ed0e2c
commit b7c4f3333e
3 changed files with 252 additions and 0 deletions

View file

@ -0,0 +1,9 @@
FROM sharelatex/sharelatex:4.1.3
# Patch: Make history-v1 http request timeout configurable
COPY pr_15409.patch /
RUN cd / && patch -p0 < pr_15409.patch
# Patch: Add verbose logging for I/O in history-v1
COPY pr_15410.patch .
RUN patch -p0 < pr_15410.patch

View file

@ -0,0 +1,90 @@
--- overleaf/services/history-v1/config/custom-environment-variables.json
+++ overleaf/services/history-v1/config/custom-environment-variables.json
@@ -43,5 +43,6 @@
},
"clusterWorkers": "CLUSTER_WORKERS",
"maxFileUploadSize": "MAX_FILE_UPLOAD_SIZE",
- "httpsOnly": "HTTPS_ONLY"
+ "httpsOnly": "HTTPS_ONLY",
+ "httpRequestTimeout": "SHARELATEX_HISTORY_V1_HTTP_REQUEST_TIMEOUT"
}
--- etc/sharelatex/settings.js
+++ etc/sharelatex/settings.js
@@ -261,6 +261,10 @@ const settings = {
url: process.env.V1_HISTORY_URL || 'http://localhost:3100/api',
user: 'staging',
pass: process.env.STAGING_PASSWORD,
+ requestTimeout: parseInt(
+ process.env.SHARELATEX_HISTORY_V1_HTTP_REQUEST_TIMEOUT || '300000', // default is 5min
+ 10
+ ),
},
},
references: {},
diff --git a/services/history-v1/app.js b/services/history-v1/app.js
index 6b3a2ba8f89..2ad490fb6b6 100644
--- overleaf/services/history-v1/app.js
+++ overleaf/services/history-v1/app.js
@@ -5,6 +5,7 @@
// Metrics must be initialized before importing anything else
require('@overleaf/metrics/initialize')
+const config = require('config')
const Events = require('events')
const BPromise = require('bluebird')
const express = require('express')
@@ -47,9 +48,9 @@ app.use(cors())
security.setupSSL(app)
security.setupBasicHttpAuthForSwaggerDocs(app)
+const HTTP_REQUEST_TIMEOUT = parseInt(config.get('httpRequestTimeout'), 10)
app.use(function (req, res, next) {
- // use a 5 minute timeout on all responses
- res.setTimeout(5 * 60 * 1000)
+ res.setTimeout(HTTP_REQUEST_TIMEOUT)
next()
})
--- overleaf/services/history-v1/config/default.json
+++ overleaf/services/history-v1/config/default.json
@@ -25,5 +25,6 @@
"maxFileUploadSize": "52428800",
"databasePoolMin": "2",
"databasePoolMax": "10",
- "httpsOnly": "false"
+ "httpsOnly": "false",
+ "httpRequestTimeout": "300000"
}
--- overleaf/services/project-history/app/js/HistoryStoreManager.js
+++ overleaf/services/project-history/app/js/HistoryStoreManager.js
@@ -17,7 +17,7 @@ import * as Errors from './Errors.js'
import * as LocalFileWriter from './LocalFileWriter.js'
import * as HashManager from './HashManager.js'
-const HTTP_REQUEST_TIMEOUT = 300 * 1000 // 5 minutes
+const HTTP_REQUEST_TIMEOUT = Settings.apis.history_v1.requestTimeout
/**
* Container for functions that need to be mocked in tests
--- overleaf/services/project-history/config/settings.defaults.cjs
+++ overleaf/services/project-history/config/settings.defaults.cjs
@@ -20,6 +20,9 @@ module.exports = {
filestore: {
url: `http://${process.env.FILESTORE_HOST || 'localhost'}:3009`,
},
+ history_v1: {
+ requestTimeout: parseInt(process.env.V1_REQUEST_TIMEOUT || '300000', 10),
+ },
web: {
url: `http://${
process.env.WEB_API_HOST || process.env.WEB_HOST || 'localhost'
--- overleaf/services/project-history/test/unit/js/HistoryStoreManager/HistoryStoreManagerTests.js
+++ overleaf/services/project-history/test/unit/js/HistoryStoreManager/HistoryStoreManagerTests.js
@@ -23,6 +23,7 @@ describe('HistoryStoreManager', function () {
filestore: {
url: 'http://filestore.sharelatex.production',
},
+ history_v1: { requestTimeout: 123 },
},
}
this.latestChunkRequestArgs = sinon.match({

View file

@ -0,0 +1,153 @@
--- services/history-v1/api/controllers/projects.js
+++ services/history-v1/api/controllers/projects.js
@@ -194,18 +194,23 @@ async function getProjectBlob(req, res, next) {
const hash = req.swagger.params.hash.value
const blobStore = new BlobStore(projectId)
- let stream
+ logger.debug({ projectId, hash }, 'getProjectBlob started')
try {
- stream = await blobStore.getStream(hash)
- } catch (err) {
- if (err instanceof Blob.NotFoundError) {
- return render.notFound(res)
- } else {
- throw err
+ let stream
+ try {
+ stream = await blobStore.getStream(hash)
+ } catch (err) {
+ if (err instanceof Blob.NotFoundError) {
+ return render.notFound(res)
+ } else {
+ throw err
+ }
}
+ res.set('Content-Type', 'application/octet-stream')
+ await pipeline(stream, res)
+ } finally {
+ logger.debug({ projectId, hash }, 'getProjectBlob finished')
}
- res.set('Content-Type', 'application/octet-stream')
- await pipeline(stream, res)
}
async function getSnapshotAtVersion(projectId, version) {
--- services/history-v1/storage/lib/blob_store/index.js
+++ services/history-v1/storage/lib/blob_store/index.js
@@ -20,6 +20,7 @@ const projectKey = require('../project_key')
const streams = require('../streams')
const postgresBackend = require('./postgres')
const mongoBackend = require('./mongo')
+const logger = require('@overleaf/logger')
const GLOBAL_BLOBS = new Map()
@@ -34,9 +35,14 @@ function makeProjectKey(projectId, hash) {
async function uploadBlob(projectId, blob, stream) {
const bucket = config.get('blobStore.projectBucket')
const key = makeProjectKey(projectId, blob.getHash())
- await persistor.sendStream(bucket, key, stream, {
- contentType: 'application/octet-stream',
- })
+ logger.debug({ projectId, blob }, 'uploadBlob started')
+ try {
+ await persistor.sendStream(bucket, key, stream, {
+ contentType: 'application/octet-stream',
+ })
+ } finally {
+ logger.debug({ projectId, blob }, 'uploadBlob finished')
+ }
}
function getBlobLocation(projectId, hash) {
@@ -109,7 +115,12 @@ async function getStringLengthOfFile(byteLength, pathname) {
async function deleteBlobsInBucket(projectId) {
const bucket = config.get('blobStore.projectBucket')
const prefix = `${projectKey.format(projectId)}/`
- await persistor.deleteDirectory(bucket, prefix)
+ logger.debug({ projectId }, 'deleteBlobsInBucket started')
+ try {
+ await persistor.deleteDirectory(bucket, prefix)
+ } finally {
+ logger.debug({ projectId }, 'deleteBlobsInBucket finished')
+ }
}
async function loadGlobalBlobs() {
@@ -202,9 +213,15 @@ class BlobStore {
async getString(hash) {
assert.blobHash(hash, 'bad hash')
- const stream = await this.getStream(hash)
- const buffer = await streams.readStreamToBuffer(stream)
- return buffer.toString()
+ const projectId = this.projectId
+ logger.debug({ projectId, hash }, 'getString started')
+ try {
+ const stream = await this.getStream(hash)
+ const buffer = await streams.readStreamToBuffer(stream)
+ return buffer.toString()
+ } finally {
+ logger.debug({ projectId, hash }, 'getString finished')
+ }
}
/**
--- services/history-v1/storage/lib/history_store.js
+++ services/history-v1/storage/lib/history_store.js
@@ -8,6 +8,7 @@ const path = require('path')
const OError = require('@overleaf/o-error')
const objectPersistor = require('@overleaf/object-persistor')
+const logger = require('@overleaf/logger')
const assert = require('./assert')
const persistor = require('./persistor')
@@ -70,6 +71,7 @@ HistoryStore.prototype.loadRaw = function historyStoreLoadRaw(
const key = getKey(projectId, chunkId)
+ logger.debug({ projectId, chunkId }, 'loadRaw started')
return BPromise.resolve()
.then(() => persistor.getObjectStream(BUCKET, key))
.then(streams.gunzipStreamToBuffer)
@@ -80,6 +82,7 @@ HistoryStore.prototype.loadRaw = function historyStoreLoadRaw(
}
throw new HistoryStore.LoadError(projectId, chunkId).withCause(err)
})
+ .finally(() => logger.debug({ projectId, chunkId }, 'loadRaw finished'))
}
/**
@@ -102,6 +105,7 @@ HistoryStore.prototype.storeRaw = function historyStoreStoreRaw(
const key = getKey(projectId, chunkId)
const stream = streams.gzipStringToStream(JSON.stringify(rawHistory))
+ logger.debug({ projectId, chunkId }, 'storeRaw started')
return BPromise.resolve()
.then(() =>
persistor.sendStream(BUCKET, key, stream, {
@@ -112,6 +116,7 @@ HistoryStore.prototype.storeRaw = function historyStoreStoreRaw(
.catch(err => {
throw new HistoryStore.StoreError(projectId, chunkId).withCause(err)
})
+ .finally(() => logger.debug({ projectId, chunkId }, 'storeRaw finished'))
}
/**
@@ -121,12 +126,13 @@ HistoryStore.prototype.storeRaw = function historyStoreStoreRaw(
* @return {Promise}
*/
HistoryStore.prototype.deleteChunks = function historyDeleteChunks(chunks) {
+ logger.debug({ chunks }, 'deleteChunks started')
return BPromise.all(
chunks.map(chunk => {
const key = getKey(chunk.projectId, chunk.chunkId)
return persistor.deleteObject(BUCKET, key)
})
- )
+ ).finally(() => logger.debug({ chunks }, 'deleteChunks finished'))
}
module.exports = new HistoryStore()