mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #4866 from overleaf/bg-gcs-delete-directory-batch
make object-persistor delete gcs files in batches GitOrigin-RevId: 8ebc892c5f6eb30507ec41d5d3a108e650af5cac
This commit is contained in:
parent
b5e1ceef17
commit
d4563c8786
4 changed files with 44 additions and 28 deletions
2
libraries/object-persistor/package-lock.json
generated
2
libraries/object-persistor/package-lock.json
generated
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@overleaf/object-persistor",
|
||||
"version": "1.0.1",
|
||||
"version": "1.0.2",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@overleaf/object-persistor",
|
||||
"version": "1.0.1",
|
||||
"version": "1.0.2",
|
||||
"description": "Module for storing objects in multiple backends, with fallback on 404 to assist migration between them",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
|
|
|
@ -216,32 +216,36 @@ module.exports = class GcsPersistor extends AbstractPersistor {
|
|||
}
|
||||
|
||||
async deleteDirectory(bucketName, key) {
|
||||
try {
|
||||
const [files] = await this.storage
|
||||
.bucket(bucketName)
|
||||
.getFiles({ directory: key })
|
||||
|
||||
if (Array.isArray(files) && files.length > 0) {
|
||||
await asyncPool(
|
||||
this.settings.deleteConcurrency,
|
||||
files,
|
||||
async (file) => {
|
||||
await this.deleteObject(bucketName, file.name)
|
||||
}
|
||||
let query = { directory: key, autoPaginate: false }
|
||||
do {
|
||||
try {
|
||||
const [files, nextQuery] = await this.storage
|
||||
.bucket(bucketName)
|
||||
.getFiles(query)
|
||||
// iterate over paginated results using the nextQuery returned by getFiles
|
||||
query = nextQuery
|
||||
if (Array.isArray(files) && files.length > 0) {
|
||||
await asyncPool(
|
||||
this.settings.deleteConcurrency,
|
||||
files,
|
||||
async (file) => {
|
||||
await this.deleteObject(bucketName, file.name)
|
||||
}
|
||||
)
|
||||
}
|
||||
} catch (err) {
|
||||
const error = PersistorHelper.wrapError(
|
||||
err,
|
||||
'failed to delete directory in GCS',
|
||||
{ bucketName, key },
|
||||
WriteError
|
||||
)
|
||||
if (error instanceof NotFoundError) {
|
||||
return
|
||||
}
|
||||
throw error
|
||||
}
|
||||
} catch (err) {
|
||||
const error = PersistorHelper.wrapError(
|
||||
err,
|
||||
'failed to delete directory in GCS',
|
||||
{ bucketName, key },
|
||||
WriteError
|
||||
)
|
||||
if (error instanceof NotFoundError) {
|
||||
return
|
||||
}
|
||||
throw error
|
||||
}
|
||||
} while (query)
|
||||
}
|
||||
|
||||
async directorySize(bucketName, key) {
|
||||
|
|
|
@ -556,18 +556,30 @@ describe('GcsPersistorTests', function () {
|
|||
const directoryName = `${ObjectId()}/${ObjectId()}`
|
||||
describe('with valid parameters', function () {
|
||||
beforeEach(async function () {
|
||||
GcsBucket.getFiles = sinon.stub()
|
||||
// set up multiple paginated calls to getFiles
|
||||
GcsBucket.getFiles
|
||||
.withArgs({ directory: directoryName, autoPaginate: false })
|
||||
.resolves([['aaa', 'bbb'], 'call-1'])
|
||||
GcsBucket.getFiles
|
||||
.withArgs('call-1')
|
||||
.resolves([['ccc', 'ddd', 'eee'], 'call-2'])
|
||||
GcsBucket.getFiles.withArgs('call-2').resolves([['fff', 'ggg']])
|
||||
return GcsPersistor.deleteDirectory(bucket, directoryName)
|
||||
})
|
||||
|
||||
it('should list the objects in the directory', function () {
|
||||
expect(Storage.prototype.bucket).to.have.been.calledWith(bucket)
|
||||
expect(GcsBucket.getFiles).to.have.been.calledWith({
|
||||
directory: directoryName
|
||||
directory: directoryName,
|
||||
autoPaginate: false
|
||||
})
|
||||
expect(GcsBucket.getFiles).to.have.been.calledWith('call-1')
|
||||
expect(GcsBucket.getFiles).to.have.been.calledWith('call-2')
|
||||
})
|
||||
|
||||
it('should delete the files', function () {
|
||||
expect(GcsFile.delete).to.have.been.calledTwice
|
||||
expect(GcsFile.delete.callCount).to.equal(7)
|
||||
})
|
||||
})
|
||||
|
||||
|
|
Loading…
Reference in a new issue