mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-28 21:13:20 -05:00
Merge pull request #15410 from overleaf/jpa-history-v1-verbose-logging
[history-v1] add verbose logging to I/O operations via object-persistor GitOrigin-RevId: 4fe7a77f289d5d22a63d6fd0665e5cffe5843b65
This commit is contained in:
parent
47820c61a0
commit
c03f2807bf
3 changed files with 45 additions and 17 deletions
|
@ -194,6 +194,8 @@ async function getProjectBlob(req, res, next) {
|
||||||
const hash = req.swagger.params.hash.value
|
const hash = req.swagger.params.hash.value
|
||||||
|
|
||||||
const blobStore = new BlobStore(projectId)
|
const blobStore = new BlobStore(projectId)
|
||||||
|
logger.debug({ projectId, hash }, 'getProjectBlob started')
|
||||||
|
try {
|
||||||
let stream
|
let stream
|
||||||
try {
|
try {
|
||||||
stream = await blobStore.getStream(hash)
|
stream = await blobStore.getStream(hash)
|
||||||
|
@ -206,6 +208,9 @@ async function getProjectBlob(req, res, next) {
|
||||||
}
|
}
|
||||||
res.set('Content-Type', 'application/octet-stream')
|
res.set('Content-Type', 'application/octet-stream')
|
||||||
await pipeline(stream, res)
|
await pipeline(stream, res)
|
||||||
|
} finally {
|
||||||
|
logger.debug({ projectId, hash }, 'getProjectBlob finished')
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getSnapshotAtVersion(projectId, version) {
|
async function getSnapshotAtVersion(projectId, version) {
|
||||||
|
|
|
@ -20,6 +20,7 @@ const projectKey = require('../project_key')
|
||||||
const streams = require('../streams')
|
const streams = require('../streams')
|
||||||
const postgresBackend = require('./postgres')
|
const postgresBackend = require('./postgres')
|
||||||
const mongoBackend = require('./mongo')
|
const mongoBackend = require('./mongo')
|
||||||
|
const logger = require('@overleaf/logger')
|
||||||
|
|
||||||
const GLOBAL_BLOBS = new Map()
|
const GLOBAL_BLOBS = new Map()
|
||||||
|
|
||||||
|
@ -34,9 +35,14 @@ function makeProjectKey(projectId, hash) {
|
||||||
async function uploadBlob(projectId, blob, stream) {
|
async function uploadBlob(projectId, blob, stream) {
|
||||||
const bucket = config.get('blobStore.projectBucket')
|
const bucket = config.get('blobStore.projectBucket')
|
||||||
const key = makeProjectKey(projectId, blob.getHash())
|
const key = makeProjectKey(projectId, blob.getHash())
|
||||||
|
logger.debug({ projectId, blob }, 'uploadBlob started')
|
||||||
|
try {
|
||||||
await persistor.sendStream(bucket, key, stream, {
|
await persistor.sendStream(bucket, key, stream, {
|
||||||
contentType: 'application/octet-stream',
|
contentType: 'application/octet-stream',
|
||||||
})
|
})
|
||||||
|
} finally {
|
||||||
|
logger.debug({ projectId, blob }, 'uploadBlob finished')
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function getBlobLocation(projectId, hash) {
|
function getBlobLocation(projectId, hash) {
|
||||||
|
@ -109,7 +115,12 @@ async function getStringLengthOfFile(byteLength, pathname) {
|
||||||
async function deleteBlobsInBucket(projectId) {
|
async function deleteBlobsInBucket(projectId) {
|
||||||
const bucket = config.get('blobStore.projectBucket')
|
const bucket = config.get('blobStore.projectBucket')
|
||||||
const prefix = `${projectKey.format(projectId)}/`
|
const prefix = `${projectKey.format(projectId)}/`
|
||||||
|
logger.debug({ projectId }, 'deleteBlobsInBucket started')
|
||||||
|
try {
|
||||||
await persistor.deleteDirectory(bucket, prefix)
|
await persistor.deleteDirectory(bucket, prefix)
|
||||||
|
} finally {
|
||||||
|
logger.debug({ projectId }, 'deleteBlobsInBucket finished')
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function loadGlobalBlobs() {
|
async function loadGlobalBlobs() {
|
||||||
|
@ -202,9 +213,15 @@ class BlobStore {
|
||||||
async getString(hash) {
|
async getString(hash) {
|
||||||
assert.blobHash(hash, 'bad hash')
|
assert.blobHash(hash, 'bad hash')
|
||||||
|
|
||||||
|
const projectId = this.projectId
|
||||||
|
logger.debug({ projectId, hash }, 'getString started')
|
||||||
|
try {
|
||||||
const stream = await this.getStream(hash)
|
const stream = await this.getStream(hash)
|
||||||
const buffer = await streams.readStreamToBuffer(stream)
|
const buffer = await streams.readStreamToBuffer(stream)
|
||||||
return buffer.toString()
|
return buffer.toString()
|
||||||
|
} finally {
|
||||||
|
logger.debug({ projectId, hash }, 'getString finished')
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -8,6 +8,7 @@ const path = require('path')
|
||||||
|
|
||||||
const OError = require('@overleaf/o-error')
|
const OError = require('@overleaf/o-error')
|
||||||
const objectPersistor = require('@overleaf/object-persistor')
|
const objectPersistor = require('@overleaf/object-persistor')
|
||||||
|
const logger = require('@overleaf/logger')
|
||||||
|
|
||||||
const assert = require('./assert')
|
const assert = require('./assert')
|
||||||
const persistor = require('./persistor')
|
const persistor = require('./persistor')
|
||||||
|
@ -70,6 +71,7 @@ HistoryStore.prototype.loadRaw = function historyStoreLoadRaw(
|
||||||
|
|
||||||
const key = getKey(projectId, chunkId)
|
const key = getKey(projectId, chunkId)
|
||||||
|
|
||||||
|
logger.debug({ projectId, chunkId }, 'loadRaw started')
|
||||||
return BPromise.resolve()
|
return BPromise.resolve()
|
||||||
.then(() => persistor.getObjectStream(BUCKET, key))
|
.then(() => persistor.getObjectStream(BUCKET, key))
|
||||||
.then(streams.gunzipStreamToBuffer)
|
.then(streams.gunzipStreamToBuffer)
|
||||||
|
@ -80,6 +82,7 @@ HistoryStore.prototype.loadRaw = function historyStoreLoadRaw(
|
||||||
}
|
}
|
||||||
throw new HistoryStore.LoadError(projectId, chunkId).withCause(err)
|
throw new HistoryStore.LoadError(projectId, chunkId).withCause(err)
|
||||||
})
|
})
|
||||||
|
.finally(() => logger.debug({ projectId, chunkId }, 'loadRaw finished'))
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -102,6 +105,7 @@ HistoryStore.prototype.storeRaw = function historyStoreStoreRaw(
|
||||||
const key = getKey(projectId, chunkId)
|
const key = getKey(projectId, chunkId)
|
||||||
const stream = streams.gzipStringToStream(JSON.stringify(rawHistory))
|
const stream = streams.gzipStringToStream(JSON.stringify(rawHistory))
|
||||||
|
|
||||||
|
logger.debug({ projectId, chunkId }, 'storeRaw started')
|
||||||
return BPromise.resolve()
|
return BPromise.resolve()
|
||||||
.then(() =>
|
.then(() =>
|
||||||
persistor.sendStream(BUCKET, key, stream, {
|
persistor.sendStream(BUCKET, key, stream, {
|
||||||
|
@ -112,6 +116,7 @@ HistoryStore.prototype.storeRaw = function historyStoreStoreRaw(
|
||||||
.catch(err => {
|
.catch(err => {
|
||||||
throw new HistoryStore.StoreError(projectId, chunkId).withCause(err)
|
throw new HistoryStore.StoreError(projectId, chunkId).withCause(err)
|
||||||
})
|
})
|
||||||
|
.finally(() => logger.debug({ projectId, chunkId }, 'storeRaw finished'))
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -121,12 +126,13 @@ HistoryStore.prototype.storeRaw = function historyStoreStoreRaw(
|
||||||
* @return {Promise}
|
* @return {Promise}
|
||||||
*/
|
*/
|
||||||
HistoryStore.prototype.deleteChunks = function historyDeleteChunks(chunks) {
|
HistoryStore.prototype.deleteChunks = function historyDeleteChunks(chunks) {
|
||||||
|
logger.debug({ chunks }, 'deleteChunks started')
|
||||||
return BPromise.all(
|
return BPromise.all(
|
||||||
chunks.map(chunk => {
|
chunks.map(chunk => {
|
||||||
const key = getKey(chunk.projectId, chunk.chunkId)
|
const key = getKey(chunk.projectId, chunk.chunkId)
|
||||||
return persistor.deleteObject(BUCKET, key)
|
return persistor.deleteObject(BUCKET, key)
|
||||||
})
|
})
|
||||||
)
|
).finally(() => logger.debug({ chunks }, 'deleteChunks finished'))
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = new HistoryStore()
|
module.exports = new HistoryStore()
|
||||||
|
|
Loading…
Reference in a new issue