mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-07 20:31:06 -05:00
Update acceptance tests for object-persistor and gcs
This commit is contained in:
parent
6e04db552a
commit
e302c1d844
4 changed files with 98 additions and 67 deletions
|
@ -12,22 +12,37 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const chai = require('chai')
|
||||
const should = chai.should()
|
||||
const { db, ObjectId, ISODate } = require('../../../app/js/mongojs')
|
||||
const async = require('async')
|
||||
process.env.BACKEND = 'gcs'
|
||||
const Settings = require('settings-sharelatex')
|
||||
const DocArchiveManager = require('../../../app/js/DocArchiveManager.js')
|
||||
const request = require('request')
|
||||
const chai = require('chai')
|
||||
const { expect } = chai
|
||||
const should = chai.should()
|
||||
const { db, ObjectId } = require('../../../app/js/mongojs')
|
||||
const async = require('async')
|
||||
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||
const { Storage } = require('@google-cloud/storage')
|
||||
const Persistor = require('../../../app/js/PersistorManager')
|
||||
const Streamifier = require('streamifier')
|
||||
|
||||
function uploadContent(path, json, callback) {
|
||||
const stream = Streamifier.createReadStream(JSON.stringify(json))
|
||||
Persistor.sendStream(Settings.docstore.bucket, path, stream)
|
||||
.then(() => callback())
|
||||
.catch(callback)
|
||||
}
|
||||
|
||||
describe('Archiving', function () {
|
||||
before(function (done) {
|
||||
return DocstoreApp.ensureRunning(done)
|
||||
})
|
||||
|
||||
before(async function () {
|
||||
const storage = new Storage(Settings.docstore.gcs.endpoint)
|
||||
await storage.createBucket(Settings.docstore.bucket)
|
||||
await storage.createBucket(`${Settings.docstore.bucket}-deleted`)
|
||||
})
|
||||
|
||||
describe('multiple docs in a project', function () {
|
||||
before(function (done) {
|
||||
this.project_id = ObjectId()
|
||||
|
@ -99,10 +114,10 @@ describe('Archiving', function () {
|
|||
return DocstoreClient.getS3Doc(
|
||||
this.project_id,
|
||||
doc._id,
|
||||
(error, res, s3_doc) => {
|
||||
(error, s3_doc) => {
|
||||
s3_doc.lines.should.deep.equal(doc.lines)
|
||||
s3_doc.ranges.should.deep.equal(doc.ranges)
|
||||
return callback()
|
||||
callback()
|
||||
}
|
||||
)
|
||||
}
|
||||
|
@ -215,7 +230,7 @@ describe('Archiving', function () {
|
|||
return DocstoreClient.getS3Doc(
|
||||
this.project_id,
|
||||
this.doc._id,
|
||||
(error, res, s3_doc) => {
|
||||
(error, s3_doc) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
|
@ -316,7 +331,7 @@ describe('Archiving', function () {
|
|||
return DocstoreClient.getS3Doc(
|
||||
this.project_id,
|
||||
this.doc._id,
|
||||
(error, res, s3_doc) => {
|
||||
(error, s3_doc) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
|
@ -790,7 +805,7 @@ describe('Archiving', function () {
|
|||
return DocstoreClient.getS3Doc(
|
||||
this.project_id,
|
||||
this.doc._id,
|
||||
(error, res, s3_doc) => {
|
||||
(error, s3_doc) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
|
@ -909,7 +924,7 @@ describe('Archiving', function () {
|
|||
return DocstoreClient.getS3Doc(
|
||||
this.project_id,
|
||||
this.doc._id,
|
||||
(error, res, s3_doc) => {
|
||||
(error, s3_doc) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
|
@ -1006,7 +1021,7 @@ describe('Archiving', function () {
|
|||
return DocstoreClient.getS3Doc(
|
||||
this.project_id,
|
||||
this.doc._id,
|
||||
(error, res, s3_doc) => {
|
||||
(error, s3_doc) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
|
@ -1054,39 +1069,36 @@ describe('Archiving', function () {
|
|||
ranges: {},
|
||||
version: 2
|
||||
}
|
||||
const options = DocArchiveManager.buildS3Options(
|
||||
`${this.project_id}/${this.doc._id}`
|
||||
)
|
||||
options.json = this.doc.lines
|
||||
return request.put(options, (error, res, body) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
res.statusCode.should.equal(200)
|
||||
return db.docs.insert(
|
||||
{
|
||||
project_id: this.project_id,
|
||||
_id: this.doc._id,
|
||||
rev: this.doc.version,
|
||||
inS3: true
|
||||
},
|
||||
(error) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return DocstoreClient.getAllDocs(
|
||||
this.project_id,
|
||||
(error, res, fetched_docs) => {
|
||||
this.fetched_docs = fetched_docs
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
uploadContent(
|
||||
`${this.project_id}/${this.doc._id}`,
|
||||
this.doc.lines,
|
||||
(error) => {
|
||||
expect(error).not.to.exist
|
||||
db.docs.insert(
|
||||
{
|
||||
project_id: this.project_id,
|
||||
_id: this.doc._id,
|
||||
rev: this.doc.version,
|
||||
inS3: true
|
||||
},
|
||||
(error) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
DocstoreClient.getAllDocs(
|
||||
this.project_id,
|
||||
(error, res, fetched_docs) => {
|
||||
this.fetched_docs = fetched_docs
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should restore the doc to mongo', function (done) {
|
||||
|
|
|
@ -11,12 +11,12 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const chai = require('chai')
|
||||
chai.should()
|
||||
const { db, ObjectId } = require('../../../app/js/mongojs')
|
||||
const { expect } = chai
|
||||
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||
const Errors = require('../../../app/js/Errors')
|
||||
|
||||
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||
|
||||
|
@ -143,17 +143,10 @@ describe("Destroying a project's documents", function () {
|
|||
})
|
||||
|
||||
return it('should remove the doc contents from s3', function (done) {
|
||||
return DocstoreClient.getS3Doc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, res, s3_doc) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(res.statusCode).to.equal(404)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
return DocstoreClient.getS3Doc(this.project_id, this.doc_id, (error) => {
|
||||
expect(error).to.be.instanceOf(Errors.NotFoundError)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -15,6 +15,13 @@ const app = require('../../../../app')
|
|||
require('logger-sharelatex').logger.level('error')
|
||||
const settings = require('settings-sharelatex')
|
||||
|
||||
// treat unhandled promise rejections as failures
|
||||
process.on('unhandledRejection', (e) => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log('** Unhandled Promise Rejection **\n', e)
|
||||
throw e
|
||||
})
|
||||
|
||||
module.exports = {
|
||||
running: false,
|
||||
initing: false,
|
||||
|
|
|
@ -13,9 +13,23 @@
|
|||
*/
|
||||
let DocstoreClient
|
||||
const request = require('request').defaults({ jar: false })
|
||||
const { db, ObjectId } = require('../../../../app/js/mongojs')
|
||||
const settings = require('settings-sharelatex')
|
||||
const DocArchiveManager = require('../../../../app/js/DocArchiveManager.js')
|
||||
const Persistor = require('../../../../app/js/PersistorManager')
|
||||
|
||||
async function streamToString(stream) {
|
||||
const chunks = []
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.on('data', (chunk) => chunks.push(chunk))
|
||||
stream.on('error', reject)
|
||||
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')))
|
||||
})
|
||||
}
|
||||
|
||||
async function getStringFromPersistor(persistor, bucket, key) {
|
||||
const stream = await persistor.getObjectStream(bucket, key, {})
|
||||
stream.resume()
|
||||
return streamToString(stream)
|
||||
}
|
||||
|
||||
module.exports = DocstoreClient = {
|
||||
createDoc(project_id, doc_id, lines, version, ranges, callback) {
|
||||
|
@ -55,7 +69,9 @@ module.exports = DocstoreClient = {
|
|||
url: `http://localhost:${settings.internal.docstore.port}/project/${project_id}/doc`,
|
||||
json: true
|
||||
},
|
||||
callback
|
||||
(req, res, body) => {
|
||||
callback(req, res, body)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
|
@ -126,11 +142,14 @@ module.exports = DocstoreClient = {
|
|||
},
|
||||
|
||||
getS3Doc(project_id, doc_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, res, body) {}
|
||||
}
|
||||
const options = DocArchiveManager.buildS3Options(project_id + '/' + doc_id)
|
||||
options.json = true
|
||||
return request.get(options, callback)
|
||||
getStringFromPersistor(
|
||||
Persistor,
|
||||
settings.docstore.bucket,
|
||||
`${project_id}/${doc_id}`
|
||||
)
|
||||
.then((data) => {
|
||||
callback(null, JSON.parse(data))
|
||||
})
|
||||
.catch(callback)
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue