Merge pull request #15410 from overleaf/jpa-history-v1-verbose-logging

[history-v1] add verbose logging to I/O operations via object-persistor

GitOrigin-RevId: 4fe7a77f289d5d22a63d6fd0665e5cffe5843b65
This commit is contained in:
Jakob Ackermann 2023-10-24 15:28:46 +02:00 committed by Copybot
parent 47820c61a0
commit c03f2807bf
3 changed files with 45 additions and 17 deletions

View file

@ -194,18 +194,23 @@ async function getProjectBlob(req, res, next) {
const hash = req.swagger.params.hash.value const hash = req.swagger.params.hash.value
const blobStore = new BlobStore(projectId) const blobStore = new BlobStore(projectId)
let stream logger.debug({ projectId, hash }, 'getProjectBlob started')
try { try {
stream = await blobStore.getStream(hash) let stream
} catch (err) { try {
if (err instanceof Blob.NotFoundError) { stream = await blobStore.getStream(hash)
return render.notFound(res) } catch (err) {
} else { if (err instanceof Blob.NotFoundError) {
throw err return render.notFound(res)
} else {
throw err
}
} }
res.set('Content-Type', 'application/octet-stream')
await pipeline(stream, res)
} finally {
logger.debug({ projectId, hash }, 'getProjectBlob finished')
} }
res.set('Content-Type', 'application/octet-stream')
await pipeline(stream, res)
} }
async function getSnapshotAtVersion(projectId, version) { async function getSnapshotAtVersion(projectId, version) {

View file

@ -20,6 +20,7 @@ const projectKey = require('../project_key')
const streams = require('../streams') const streams = require('../streams')
const postgresBackend = require('./postgres') const postgresBackend = require('./postgres')
const mongoBackend = require('./mongo') const mongoBackend = require('./mongo')
const logger = require('@overleaf/logger')
const GLOBAL_BLOBS = new Map() const GLOBAL_BLOBS = new Map()
@ -34,9 +35,14 @@ function makeProjectKey(projectId, hash) {
async function uploadBlob(projectId, blob, stream) { async function uploadBlob(projectId, blob, stream) {
const bucket = config.get('blobStore.projectBucket') const bucket = config.get('blobStore.projectBucket')
const key = makeProjectKey(projectId, blob.getHash()) const key = makeProjectKey(projectId, blob.getHash())
await persistor.sendStream(bucket, key, stream, { logger.debug({ projectId, blob }, 'uploadBlob started')
contentType: 'application/octet-stream', try {
}) await persistor.sendStream(bucket, key, stream, {
contentType: 'application/octet-stream',
})
} finally {
logger.debug({ projectId, blob }, 'uploadBlob finished')
}
} }
function getBlobLocation(projectId, hash) { function getBlobLocation(projectId, hash) {
@ -109,7 +115,12 @@ async function getStringLengthOfFile(byteLength, pathname) {
async function deleteBlobsInBucket(projectId) { async function deleteBlobsInBucket(projectId) {
const bucket = config.get('blobStore.projectBucket') const bucket = config.get('blobStore.projectBucket')
const prefix = `${projectKey.format(projectId)}/` const prefix = `${projectKey.format(projectId)}/`
await persistor.deleteDirectory(bucket, prefix) logger.debug({ projectId }, 'deleteBlobsInBucket started')
try {
await persistor.deleteDirectory(bucket, prefix)
} finally {
logger.debug({ projectId }, 'deleteBlobsInBucket finished')
}
} }
async function loadGlobalBlobs() { async function loadGlobalBlobs() {
@ -202,9 +213,15 @@ class BlobStore {
async getString(hash) { async getString(hash) {
assert.blobHash(hash, 'bad hash') assert.blobHash(hash, 'bad hash')
const stream = await this.getStream(hash) const projectId = this.projectId
const buffer = await streams.readStreamToBuffer(stream) logger.debug({ projectId, hash }, 'getString started')
return buffer.toString() try {
const stream = await this.getStream(hash)
const buffer = await streams.readStreamToBuffer(stream)
return buffer.toString()
} finally {
logger.debug({ projectId, hash }, 'getString finished')
}
} }
/** /**

View file

@ -8,6 +8,7 @@ const path = require('path')
const OError = require('@overleaf/o-error') const OError = require('@overleaf/o-error')
const objectPersistor = require('@overleaf/object-persistor') const objectPersistor = require('@overleaf/object-persistor')
const logger = require('@overleaf/logger')
const assert = require('./assert') const assert = require('./assert')
const persistor = require('./persistor') const persistor = require('./persistor')
@ -70,6 +71,7 @@ HistoryStore.prototype.loadRaw = function historyStoreLoadRaw(
const key = getKey(projectId, chunkId) const key = getKey(projectId, chunkId)
logger.debug({ projectId, chunkId }, 'loadRaw started')
return BPromise.resolve() return BPromise.resolve()
.then(() => persistor.getObjectStream(BUCKET, key)) .then(() => persistor.getObjectStream(BUCKET, key))
.then(streams.gunzipStreamToBuffer) .then(streams.gunzipStreamToBuffer)
@ -80,6 +82,7 @@ HistoryStore.prototype.loadRaw = function historyStoreLoadRaw(
} }
throw new HistoryStore.LoadError(projectId, chunkId).withCause(err) throw new HistoryStore.LoadError(projectId, chunkId).withCause(err)
}) })
.finally(() => logger.debug({ projectId, chunkId }, 'loadRaw finished'))
} }
/** /**
@ -102,6 +105,7 @@ HistoryStore.prototype.storeRaw = function historyStoreStoreRaw(
const key = getKey(projectId, chunkId) const key = getKey(projectId, chunkId)
const stream = streams.gzipStringToStream(JSON.stringify(rawHistory)) const stream = streams.gzipStringToStream(JSON.stringify(rawHistory))
logger.debug({ projectId, chunkId }, 'storeRaw started')
return BPromise.resolve() return BPromise.resolve()
.then(() => .then(() =>
persistor.sendStream(BUCKET, key, stream, { persistor.sendStream(BUCKET, key, stream, {
@ -112,6 +116,7 @@ HistoryStore.prototype.storeRaw = function historyStoreStoreRaw(
.catch(err => { .catch(err => {
throw new HistoryStore.StoreError(projectId, chunkId).withCause(err) throw new HistoryStore.StoreError(projectId, chunkId).withCause(err)
}) })
.finally(() => logger.debug({ projectId, chunkId }, 'storeRaw finished'))
} }
/** /**
@ -121,12 +126,13 @@ HistoryStore.prototype.storeRaw = function historyStoreStoreRaw(
* @return {Promise} * @return {Promise}
*/ */
HistoryStore.prototype.deleteChunks = function historyDeleteChunks(chunks) { HistoryStore.prototype.deleteChunks = function historyDeleteChunks(chunks) {
logger.debug({ chunks }, 'deleteChunks started')
return BPromise.all( return BPromise.all(
chunks.map(chunk => { chunks.map(chunk => {
const key = getKey(chunk.projectId, chunk.chunkId) const key = getKey(chunk.projectId, chunk.chunkId)
return persistor.deleteObject(BUCKET, key) return persistor.deleteObject(BUCKET, key)
}) })
) ).finally(() => logger.debug({ chunks }, 'deleteChunks finished'))
} }
module.exports = new HistoryStore() module.exports = new HistoryStore()