Merge pull request #9894 from overleaf/em-node-fetch-web

Replace request-promise with node-fetch in web

GitOrigin-RevId: 07dbb6db7fd42326807aaeb18e5ee39f7c3d4668
This commit is contained in:
Eric Mc Sween 2022-10-12 07:33:37 -04:00 committed by Copybot
parent 47b3b72076
commit da30da76b2
13 changed files with 313 additions and 217 deletions

1
package-lock.json generated
View file

@ -35156,6 +35156,7 @@
"mocha": "^8.4.0",
"mock-fs": "^5.1.2",
"nock": "^13.1.1",
"node-fetch": "^2.6.7",
"pirates": "^4.0.1",
"postcss-loader": "^6.2.1",
"requirejs": "^2.3.6",

View file

@ -5,7 +5,7 @@
happy path or via an error (message or attributes).
*/
const request = require('request-promise-native')
const fetch = require('node-fetch')
const crypto = require('crypto')
const Settings = require('@overleaf/settings')
const Metrics = require('@overleaf/metrics')
@ -37,15 +37,22 @@ async function getScoresForPrefix(prefix) {
throw INVALID_PREFIX
}
try {
return await request({
uri: `${Settings.apis.haveIBeenPwned.url}/range/${prefix}`,
headers: {
'User-Agent': 'www.overleaf.com',
// Docs: https://haveibeenpwned.com/API/v3#PwnedPasswordsPadding
'Add-Padding': true,
},
timeout: Settings.apis.haveIBeenPwned.timeout,
})
const response = await fetch(
`${Settings.apis.haveIBeenPwned.url}/range/${prefix}`,
{
headers: {
'User-Agent': 'www.overleaf.com',
// Docs: https://haveibeenpwned.com/API/v3#PwnedPasswordsPadding
'Add-Padding': true,
},
signal: AbortSignal.timeout(Settings.apis.haveIBeenPwned.timeout),
}
)
if (!response.ok) {
throw API_ERROR
}
const body = await response.text()
return body
} catch (_errorWithPotentialReferenceToPrefix) {
// NOTE: Do not leak request details by passing the original error up.
throw API_ERROR

View file

@ -1,7 +1,8 @@
const request = require('request-promise-native')
const fetch = require('node-fetch')
const logger = require('@overleaf/logger')
const Settings = require('@overleaf/settings')
const Metrics = require('@overleaf/metrics')
const OError = require('@overleaf/o-error')
const DeviceHistory = require('./DeviceHistory')
const AuthenticationController = require('../Authentication/AuthenticationController')
const { expressify } = require('../../util/promises')
@ -58,30 +59,26 @@ function validateCaptcha(action) {
Metrics.inc('captcha', 1, { path: action, status: 'missing' })
return respondInvalidCaptcha(req, res)
}
const options = {
const response = await fetch(Settings.recaptcha.endpoint, {
method: 'POST',
url: Settings.recaptcha.endpoint,
form: {
secret: Settings.recaptcha.secretKey,
response: reCaptchaResponse,
body: new URLSearchParams([
['secret', Settings.recaptcha.secretKey],
['response', reCaptchaResponse],
]),
headers: {
Accept: 'application/json',
},
json: true,
}
let body
try {
body = await request(options)
} catch (err) {
const response = err.response
if (response) {
logger.warn(
{ statusCode: response.statusCode, body: err.body },
'failed recaptcha siteverify request'
)
}
})
const body = await response.json()
if (!response.ok) {
Metrics.inc('captcha', 1, { path: action, status: 'error' })
return next(err)
throw new OError('failed recaptcha siteverify request', {
statusCode: response.status,
body,
})
}
if (!body?.success) {
if (!body.success) {
logger.warn(
{ statusCode: 200, body },
'failed recaptcha siteverify request'

View file

@ -1,5 +1,5 @@
const { callbackify } = require('util')
const request = require('request-promise-native')
const fetch = require('node-fetch')
const settings = require('@overleaf/settings')
const OError = require('@overleaf/o-error')
const UserGetter = require('../User/UserGetter')
@ -32,103 +32,131 @@ async function initializeProject() {
) {
return
}
try {
const body = await request.post({
url: `${settings.apis.project_history.url}/project`,
const response = await fetch(`${settings.apis.project_history.url}/project`, {
method: 'POST',
headers: {
Accept: 'application/json',
},
})
if (!response.ok) {
throw new OError('failed to initialize project history', {
statusCode: response.status,
})
const project = JSON.parse(body)
const overleafId = project && project.project && project.project.id
if (!overleafId) {
throw new Error('project-history did not provide an id', project)
}
return { overleaf_id: overleafId }
} catch (err) {
throw OError.tag(err, 'failed to initialize project history')
}
const body = await response.json()
const overleafId = body && body.project && body.project.id
if (!overleafId) {
throw new OError('project-history did not provide an id', { body })
}
return { overleaf_id: overleafId }
}
async function flushProject(projectId) {
try {
await request.post({
url: `${settings.apis.project_history.url}/project/${projectId}/flush`,
})
} catch (err) {
throw OError.tag(err, 'failed to flush project to project history', {
const response = await fetch(
`${settings.apis.project_history.url}/project/${projectId}/flush`,
{ method: 'POST' }
)
if (!response.ok) {
throw new OError('failed to flush project to project history', {
projectId,
statusCode: response.status,
})
}
}
async function flushMigration(projectId) {
try {
await request.post({
url: `${settings.apis.project_history_importer.url}/project/${projectId}/flush`,
})
} catch (err) {
throw OError.tag(
err,
const response = await fetch(
`${settings.apis.project_history_importer.url}/project/${projectId}/flush`,
{ method: 'POST' }
)
if (!response.ok) {
throw new OError(
'failed to flush project migration to project history importer',
{
projectId,
}
{ projectId, statusCode: response.status }
)
}
}
async function deleteProjectHistory(projectId) {
try {
await request.delete({
url: `${settings.apis.project_history.url}/project/${projectId}`,
})
} catch (err) {
throw OError.tag(err, 'failed to delete project history', {
const response = await fetch(
`${settings.apis.project_history.url}/project/${projectId}`,
{ method: 'DELETE' }
)
if (!response.ok) {
throw new OError('failed to delete project history', {
projectId,
statusCode: response.status,
})
}
}
async function resyncProject(projectId, options = {}) {
try {
const body = {}
if (options.force) {
body.force = options.force
const body = {}
if (options.force) {
body.force = options.force
}
if (options.origin) {
body.origin = options.origin
}
const response = await fetch(
`${settings.apis.project_history.url}/project/${projectId}/resync`,
{
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(body),
signal: AbortSignal.timeout(6 * 60 * 1000),
}
if (options.origin) {
body.origin = options.origin
}
await request.post({
url: `${settings.apis.project_history.url}/project/${projectId}/resync`,
json: body,
timeout: 6 * 60 * 1000,
)
if (!response.ok) {
throw new OError('failed to resync project history', {
projectId,
statusCode: response.status,
})
} catch (err) {
throw OError.tag(err, 'failed to resync project history', { projectId })
}
}
async function deleteProject(projectId, historyId) {
try {
const tasks = [
request.delete(
`${settings.apis.project_history.url}/project/${projectId}`
),
]
if (historyId != null) {
tasks.push(
request.delete({
url: `${settings.apis.v1_history.url}/projects/${historyId}`,
auth: {
user: settings.apis.v1_history.user,
pass: settings.apis.v1_history.pass,
},
})
)
}
await Promise.all(tasks)
} catch (err) {
throw OError.tag(err, 'failed to clear project history', {
const tasks = []
tasks.push(_deleteProjectInProjectHistory(projectId))
if (historyId != null) {
tasks.push(_deleteProjectInFullProjectHistory(historyId))
}
await Promise.all(tasks)
}
async function _deleteProjectInProjectHistory(projectId) {
const response = await fetch(
`${settings.apis.project_history.url}/project/${projectId}`,
{ method: 'DELETE' }
)
if (!response.ok) {
throw new OError('failed to clear project history in project-history', {
projectId,
statusCode: response.status,
})
}
}
async function _deleteProjectInFullProjectHistory(historyId) {
const response = await fetch(
`${settings.apis.v1_history.url}/projects/${historyId}`,
{
method: 'DELETE',
headers: {
Authorization:
'Basic ' +
Buffer.from(
`${settings.apis.v1_history.user}:${settings.apis.v1_history.pass}`
).toString('base64'),
},
}
)
if (!response.ok) {
throw new OError('failed to clear project history', {
historyId,
statusCode: response.status,
})
}
}

View file

@ -22,7 +22,7 @@ const fs = require('fs')
const util = require('util')
const logger = require('@overleaf/logger')
const request = require('request')
const requestPromise = require('request-promise-native')
const fetch = require('node-fetch')
const settings = require('@overleaf/settings')
const uuid = require('uuid')
const Errors = require('../Errors/Errors')
@ -168,26 +168,27 @@ const TemplatesManager = {
promises: {
async fetchFromV1(templateId) {
const { body, statusCode } = await requestPromise({
baseUrl: settings.apis.v1.url,
url: `/api/v2/templates/${templateId}`,
method: 'GET',
auth: {
user: settings.apis.v1.user,
pass: settings.apis.v1.pass,
sendImmediately: true,
const url = new URL(
`/api/v2/templates/${templateId}`,
settings.apis.v1.url
)
const response = await fetch(url, {
headers: {
Authorization:
'Basic ' +
Buffer.from(
`${settings.apis.v1.user}:${settings.apis.v1.pass}`
).toString('base64'),
Accept: 'application/json',
},
resolveWithFullResponse: true,
simple: false,
json: true,
timeout: settings.apis.v1.timeout,
signal: AbortSignal.timeout(settings.apis.v1.timeout),
})
if (statusCode === 404) {
if (response.status === 404) {
throw new Errors.NotFoundError()
}
if (statusCode !== 200) {
if (response.status !== 200) {
logger.warn(
{ templateId },
"[TemplateMetrics] Couldn't fetch template data from v1"
@ -195,6 +196,7 @@ const TemplatesManager = {
throw new Error("Couldn't fetch template data from v1")
}
const body = await response.json()
return body
},
},

View file

@ -1,11 +1,21 @@
const Settings = require('@overleaf/settings')
const request = require('request-promise-native')
const OError = require('@overleaf/o-error')
const fetch = require('node-fetch')
async function getQueues(userId) {
return request({
uri: `${Settings.apis.tpdsworker.url}/queues/${userId}`,
json: true,
})
const response = await fetch(
`${Settings.apis.tpdsworker.url}/queues/${userId}`,
{
headers: {
Accept: 'application/json',
},
}
)
if (!response.ok) {
throw new OError('failed to query TPDS queues for user', { userId })
}
const body = await response.json()
return body
}
module.exports = {

View file

@ -4,7 +4,7 @@ const { callbackify } = require('util')
const logger = require('@overleaf/logger')
const metrics = require('@overleaf/metrics')
const Path = require('path')
const request = require('request-promise-native')
const fetch = require('node-fetch')
const settings = require('@overleaf/settings')
const CollaboratorsGetter =
@ -184,10 +184,17 @@ async function deleteProject(params) {
metrics.inc('tpds.delete-project')
// send the request directly to project archiver, bypassing third-party-datastore
try {
await request({
uri: `${settings.apis.project_archiver.url}/project/${projectId}`,
method: 'delete',
})
const response = await fetch(
`${settings.apis.project_archiver.url}/project/${projectId}`,
{ method: 'DELETE' }
)
if (!response.ok) {
logger.error(
{ statusCode: response.status, project_id: projectId },
'error deleting project in third party datastore (project_archiver)'
)
return false
}
return true
} catch (err) {
logger.error(
@ -205,13 +212,20 @@ async function enqueue(group, method, job) {
return
}
try {
const response = await request({
uri: `${tpdsWorkerUrl}/enqueue/web_to_tpds_http_requests`,
json: { group, job, method },
method: 'post',
timeout: 5 * 1000,
const url = new URL('/enqueue/web_to_tpds_http_requests', tpdsWorkerUrl)
const response = await fetch(url, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ group, job, method }),
signal: AbortSignal.timeout(5 * 1000),
})
return response
if (!response.ok) {
// log error and continue
logger.error(
{ statusCode: response.status, group, job, method },
'error enqueueing tpdsworker job'
)
}
} catch (err) {
// log error and continue
logger.error({ err, group, job, method }, 'error enqueueing tpdsworker job')
@ -293,7 +307,7 @@ async function pollDropboxForUser(userId) {
// Queue poll requests in the user queue along with file updates, in order
// to avoid race conditions between polling and updates.
return enqueue(userId, 'standardHttpRequest', job)
await enqueue(userId, 'standardHttpRequest', job)
}
const TpdsUpdateSender = {

View file

@ -296,6 +296,7 @@
"mocha": "^8.4.0",
"mock-fs": "^5.1.2",
"nock": "^13.1.1",
"node-fetch": "^2.6.7",
"pirates": "^4.0.1",
"postcss-loader": "^6.2.1",
"requirejs": "^2.3.6",

View file

@ -1,11 +1,12 @@
const Settings = require('@overleaf/settings')
const OError = require('@overleaf/o-error')
const { waitForDb } = require('../app/src/infrastructure/mongodb')
const { promiseMapWithLimit } = require('../app/src/util/promises')
const { getHardDeletedProjectIds } = require('./delete_orphaned_data_helper')
const TpdsUpdateSender = require('../app/src/Features/ThirdPartyDataStore/TpdsUpdateSender')
const { promisify } = require('util')
const { ObjectId } = require('mongodb')
const request = require('request-promise-native')
const fetch = require('node-fetch')
const sleep = promisify(setTimeout)
const START_OFFSET = process.env.START_OFFSET
@ -30,14 +31,18 @@ async function main() {
let pageToken = ''
let startOffset = START_OFFSET
while (pageToken !== undefined) {
const { nextPageToken, entries } = await request({
url: `${Settings.apis.project_archiver.url}/project/list`,
json: true,
qs: {
pageToken,
startOffset,
},
const url = new URL(`${Settings.apis.project_archiver.url}/project/list`)
url.searchParams.append('pageToken', pageToken)
url.searchParams.append('startOffset', startOffset)
const response = await fetch(url, {
headers: { Accept: 'application/json' },
})
if (!response.ok) {
throw new OError('Failed to get list of projects from project archiver', {
status: response.status,
})
}
const { nextPageToken, entries } = await response.json()
pageToken = nextPageToken
startOffset = undefined

View file

@ -46,7 +46,7 @@ const SandboxedModule = require('sandboxed-module')
SandboxedModule.configure({
ignoreMissing: true,
requires: getSandboxedModuleRequires(),
globals: { Buffer, Promise, console, process },
globals: { AbortSignal, Buffer, Promise, console, process, URL },
})
function getSandboxedModuleRequires() {

View file

@ -11,10 +11,11 @@ describe('HistoryManager', function () {
this.AuthenticationController = {
getLoggedInUserId: sinon.stub().returns(this.user_id),
}
this.request = {
post: sinon.stub(),
delete: sinon.stub().resolves(),
this.response = {
ok: true,
json: sinon.stub(),
}
this.fetch = sinon.stub().resolves(this.response)
this.projectHistoryUrl = 'http://project_history.example.com'
this.v1HistoryUrl = 'http://v1_history.example.com'
this.v1HistoryUser = 'system'
@ -45,7 +46,7 @@ describe('HistoryManager', function () {
this.HistoryManager = SandboxedModule.require(MODULE_PATH, {
requires: {
'request-promise-native': this.request,
'node-fetch': this.fetch,
'@overleaf/settings': this.settings,
'../User/UserGetter': this.UserGetter,
},
@ -61,18 +62,15 @@ describe('HistoryManager', function () {
describe('project history returns a successful response', function () {
beforeEach(async function () {
this.overleaf_id = 1234
this.request.post.resolves(
JSON.stringify({ project: { id: this.overleaf_id } })
)
this.response.json.resolves({ project: { id: this.overleaf_id } })
this.result = await this.HistoryManager.promises.initializeProject()
})
it('should call the project history api', function () {
this.request.post
.calledWith({
url: `${this.settings.apis.project_history.url}/project`,
})
.should.equal(true)
this.fetch.should.have.been.calledWithMatch(
`${this.settings.apis.project_history.url}/project`,
{ method: 'POST' }
)
})
it('should return the overleaf id', function () {
@ -82,7 +80,7 @@ describe('HistoryManager', function () {
describe('project history returns a response without the project id', function () {
it('should throw an error', async function () {
this.request.post.resolves(JSON.stringify({ project: {} }))
this.response.json.resolves({ project: {} })
await expect(this.HistoryManager.promises.initializeProject()).to.be
.rejected
})
@ -90,7 +88,7 @@ describe('HistoryManager', function () {
describe('project history errors', function () {
it('should propagate the error', async function () {
this.request.post.rejects(new Error('problem connecting'))
this.fetch.rejects(new Error('problem connecting'))
await expect(this.HistoryManager.promises.initializeProject()).to.be
.rejected
})
@ -267,19 +265,26 @@ describe('HistoryManager', function () {
})
it('should call the project-history service', async function () {
expect(this.request.delete).to.have.been.calledWith(
`${this.projectHistoryUrl}/project/${projectId}`
expect(this.fetch).to.have.been.calledWith(
`${this.projectHistoryUrl}/project/${projectId}`,
{ method: 'DELETE' }
)
})
it('should call the v1-history service', async function () {
expect(this.request.delete).to.have.been.calledWith({
url: `${this.v1HistoryUrl}/projects/${historyId}`,
auth: {
user: this.v1HistoryUser,
pass: this.v1HistoryPassword,
},
})
expect(this.fetch).to.have.been.calledWith(
`${this.v1HistoryUrl}/projects/${historyId}`,
{
method: 'DELETE',
headers: {
Authorization:
'Basic ' +
Buffer.from(
`${this.v1HistoryUser}:${this.v1HistoryPassword}`
).toString('base64'),
},
}
)
})
})
})

View file

@ -63,7 +63,7 @@ describe('TemplatesManager', function () {
this.FileWriter = { ensureDumpFolderExists: sinon.stub().callsArg(0) }
this.TemplatesManager = SandboxedModule.require(modulePath, {
requires: {
'request-promise-native': sinon.stub(),
'node-fetch': sinon.stub(),
'../Uploads/ProjectUploadManager': this.ProjectUploadManager,
'../Project/ProjectOptionsHandler': this.ProjectOptionsHandler,
'../Project/ProjectRootDocManager': this.ProjectRootDocManager,

View file

@ -25,7 +25,6 @@ describe('TpdsUpdateSender', function () {
this.fakeUser = {
_id: '12390i',
}
this.requestQueuer = function (queue, meth, opts, callback) {}
const memberIds = [userId, collaberatorRef, readOnlyRef]
this.CollaboratorsGetter = {
promises: {
@ -33,7 +32,11 @@ describe('TpdsUpdateSender', function () {
},
}
this.docstoreUrl = 'docstore.sharelatex.env'
this.request = sinon.stub().resolves()
this.response = {
ok: true,
json: sinon.stub(),
}
this.fetch = sinon.stub().resolves(this.response)
this.settings = {
siteUrl,
apis: {
@ -58,7 +61,7 @@ describe('TpdsUpdateSender', function () {
requires: {
mongodb: { ObjectId },
'@overleaf/settings': this.settings,
'request-promise-native': this.request,
'node-fetch': this.fetch,
'../Collaborators/CollaboratorsGetter': this.CollaboratorsGetter,
'../User/UserGetter.js': this.UserGetter,
'@overleaf/metrics': {
@ -71,28 +74,30 @@ describe('TpdsUpdateSender', function () {
describe('enqueue', function () {
it('should not call request if there is no tpdsworker url', async function () {
await this.TpdsUpdateSender.promises.enqueue(null, null, null)
this.request.should.not.have.been.called
this.fetch.should.not.have.been.called
})
it('should post the message to the tpdsworker', async function () {
this.settings.apis.tpdsworker = { url: 'www.tpdsworker.env' }
this.settings.apis.tpdsworker = { url: 'http://tpdsworker' }
const group0 = 'myproject'
const method0 = 'somemethod0'
const job0 = 'do something'
await this.TpdsUpdateSender.promises.enqueue(group0, method0, job0)
const args = this.request.firstCall.args[0]
args.json.group.should.equal(group0)
args.json.job.should.equal(job0)
args.json.method.should.equal(method0)
args.uri.should.equal(
'www.tpdsworker.env/enqueue/web_to_tpds_http_requests'
this.fetch.should.have.been.calledWith(
new URL('http://tpdsworker/enqueue/web_to_tpds_http_requests'),
sinon.match({ method: 'POST' })
)
const opts = this.fetch.firstCall.args[1]
const body = JSON.parse(opts.body)
body.group.should.equal(group0)
body.job.should.equal(job0)
body.method.should.equal(method0)
})
})
describe('sending updates', function () {
beforeEach(function () {
this.settings.apis.tpdsworker = { url: 'www.tpdsworker.env' }
this.settings.apis.tpdsworker = { url: 'http://tpdsworker' }
})
it('queues a post the file with user and file id', async function () {
@ -110,8 +115,8 @@ describe('TpdsUpdateSender', function () {
group: group0,
job: job0,
method: method0,
} = this.request.firstCall.args[0].json
group0.should.equal(userId)
} = JSON.parse(this.fetch.firstCall.args[1].body)
group0.should.equal(userId.toString())
method0.should.equal('pipeStreamFrom')
job0.method.should.equal('post')
job0.streamOrigin.should.equal(
@ -122,19 +127,23 @@ describe('TpdsUpdateSender', function () {
)}${encodeURIComponent(path)}`
job0.uri.should.equal(expectedUrl)
job0.headers.sl_all_user_ids.should.equal(JSON.stringify([userId]))
job0.headers.sl_project_owner_user_id.should.equal(userId)
job0.headers.sl_project_owner_user_id.should.equal(userId.toString())
const { group: group1, job: job1 } = this.request.secondCall.args[0].json
group1.should.equal(collaberatorRef)
const { group: group1, job: job1 } = JSON.parse(
this.fetch.secondCall.args[1].body
)
group1.should.equal(collaberatorRef.toString())
job1.headers.sl_all_user_ids.should.equal(
JSON.stringify([collaberatorRef])
)
job1.headers.sl_project_owner_user_id.should.equal(userId)
job1.headers.sl_project_owner_user_id.should.equal(userId.toString())
const { group: group2, job: job2 } = this.request.thirdCall.args[0].json
group2.should.equal(readOnlyRef)
const { group: group2, job: job2 } = JSON.parse(
this.fetch.thirdCall.args[1].body
)
group2.should.equal(readOnlyRef.toString())
job2.headers.sl_all_user_ids.should.equal(JSON.stringify([readOnlyRef]))
job2.headers.sl_project_owner_user_id.should.equal(userId)
job2.headers.sl_project_owner_user_id.should.equal(userId.toString())
this.UserGetter.promises.getUsers.should.have.been.calledOnce.and.calledWith(
{
@ -164,12 +173,12 @@ describe('TpdsUpdateSender', function () {
group: group0,
job: job0,
method: method0,
} = this.request.firstCall.args[0].json
} = JSON.parse(this.fetch.firstCall.args[1].body)
group0.should.equal(userId)
group0.should.equal(userId.toString())
method0.should.equal('pipeStreamFrom')
job0.method.should.equal('post')
const expectedUrl = `${thirdPartyDataStoreApiUrl}/user/${userId}/entity/${encodeURIComponent(
const expectedUrl = `${thirdPartyDataStoreApiUrl}/user/${userId.toString()}/entity/${encodeURIComponent(
projectName
)}${encodeURIComponent(path)}`
job0.uri.should.equal(expectedUrl)
@ -178,14 +187,18 @@ describe('TpdsUpdateSender', function () {
)
job0.headers.sl_all_user_ids.should.eql(JSON.stringify([userId]))
const { group: group1, job: job1 } = this.request.secondCall.args[0].json
group1.should.equal(collaberatorRef)
const { group: group1, job: job1 } = JSON.parse(
this.fetch.secondCall.args[1].body
)
group1.should.equal(collaberatorRef.toString())
job1.headers.sl_all_user_ids.should.equal(
JSON.stringify([collaberatorRef])
)
const { group: group2, job: job2 } = this.request.thirdCall.args[0].json
group2.should.equal(readOnlyRef)
const { group: group2, job: job2 } = JSON.parse(
this.fetch.thirdCall.args[1].body
)
group2.should.equal(readOnlyRef.toString())
job2.headers.sl_all_user_ids.should.equal(JSON.stringify([readOnlyRef]))
this.UserGetter.promises.getUsers.should.have.been.calledOnce.and.calledWith(
@ -214,9 +227,9 @@ describe('TpdsUpdateSender', function () {
group: group0,
job: job0,
method: method0,
} = this.request.firstCall.args[0].json
} = JSON.parse(this.fetch.firstCall.args[1].body)
group0.should.equal(userId)
group0.should.equal(userId.toString())
method0.should.equal('standardHttpRequest')
job0.method.should.equal('delete')
const expectedUrl = `${thirdPartyDataStoreApiUrl}/user/${userId}/entity/${encodeURIComponent(
@ -226,14 +239,18 @@ describe('TpdsUpdateSender', function () {
job0.uri.should.equal(expectedUrl)
expect(job0.json).to.deep.equal({ subtreeEntityIds })
const { group: group1, job: job1 } = this.request.secondCall.args[0].json
group1.should.equal(collaberatorRef)
const { group: group1, job: job1 } = JSON.parse(
this.fetch.secondCall.args[1].body
)
group1.should.equal(collaberatorRef.toString())
job1.headers.sl_all_user_ids.should.equal(
JSON.stringify([collaberatorRef])
)
const { group: group2, job: job2 } = this.request.thirdCall.args[0].json
group2.should.equal(readOnlyRef)
const { group: group2, job: job2 } = JSON.parse(
this.fetch.thirdCall.args[1].body
)
group2.should.equal(readOnlyRef.toString())
job2.headers.sl_all_user_ids.should.equal(JSON.stringify([readOnlyRef]))
this.UserGetter.promises.getUsers.should.have.been.calledOnce.and.calledWith(
@ -262,9 +279,9 @@ describe('TpdsUpdateSender', function () {
group: group0,
job: job0,
method: method0,
} = this.request.firstCall.args[0].json
} = JSON.parse(this.fetch.firstCall.args[1].body)
group0.should.equal(userId)
group0.should.equal(userId.toString())
method0.should.equal('standardHttpRequest')
job0.method.should.equal('put')
job0.uri.should.equal(
@ -274,14 +291,18 @@ describe('TpdsUpdateSender', function () {
job0.json.endPath.should.equal(`/${projectName}/${endPath}`)
job0.headers.sl_all_user_ids.should.eql(JSON.stringify([userId]))
const { group: group1, job: job1 } = this.request.secondCall.args[0].json
group1.should.equal(collaberatorRef)
const { group: group1, job: job1 } = JSON.parse(
this.fetch.secondCall.args[1].body
)
group1.should.equal(collaberatorRef.toString())
job1.headers.sl_all_user_ids.should.equal(
JSON.stringify([collaberatorRef])
)
const { group: group2, job: job2 } = this.request.thirdCall.args[0].json
group2.should.equal(readOnlyRef)
const { group: group2, job: job2 } = JSON.parse(
this.fetch.thirdCall.args[1].body
)
group2.should.equal(readOnlyRef.toString())
job2.headers.sl_all_user_ids.should.equal(JSON.stringify([readOnlyRef]))
this.UserGetter.promises.getUsers.should.have.been.calledOnce.and.calledWith(
@ -309,9 +330,9 @@ describe('TpdsUpdateSender', function () {
group: group0,
job: job0,
method: method0,
} = this.request.firstCall.args[0].json
} = JSON.parse(this.fetch.firstCall.args[1].body)
group0.should.equal(userId)
group0.should.equal(userId.toString())
method0.should.equal('standardHttpRequest')
job0.method.should.equal('put')
job0.uri.should.equal(
@ -321,14 +342,18 @@ describe('TpdsUpdateSender', function () {
job0.json.endPath.should.equal(newProjectName)
job0.headers.sl_all_user_ids.should.eql(JSON.stringify([userId]))
const { group: group1, job: job1 } = this.request.secondCall.args[0].json
group1.should.equal(collaberatorRef)
const { group: group1, job: job1 } = JSON.parse(
this.fetch.secondCall.args[1].body
)
group1.should.equal(collaberatorRef.toString())
job1.headers.sl_all_user_ids.should.equal(
JSON.stringify([collaberatorRef])
)
const { group: group2, job: job2 } = this.request.thirdCall.args[0].json
group2.should.equal(readOnlyRef)
const { group: group2, job: job2 } = JSON.parse(
this.fetch.thirdCall.args[1].body
)
group2.should.equal(readOnlyRef.toString())
job2.headers.sl_all_user_ids.should.equal(JSON.stringify([readOnlyRef]))
this.UserGetter.promises.getUsers.should.have.been.calledOnce.and.calledWith(
@ -343,33 +368,34 @@ describe('TpdsUpdateSender', function () {
})
it('pollDropboxForUser', async function () {
await this.TpdsUpdateSender.promises.pollDropboxForUser(userId)
await this.TpdsUpdateSender.promises.pollDropboxForUser(userId.toString())
const {
group: group0,
job: job0,
method: method0,
} = this.request.firstCall.args[0].json
} = JSON.parse(this.fetch.firstCall.args[1].body)
group0.should.equal(userId)
group0.should.equal(userId.toString())
method0.should.equal('standardHttpRequest')
job0.method.should.equal('post')
job0.uri.should.equal(`${thirdPartyDataStoreApiUrl}/user/poll`)
job0.json.user_ids[0].should.equal(userId)
job0.json.user_ids[0].should.equal(userId.toString())
})
})
describe('deleteProject', function () {
it('should not call request if there is no project archiver url', async function () {
await this.TpdsUpdateSender.promises.deleteProject({ projectId })
this.request.should.not.have.been.called
this.fetch.should.not.have.been.called
})
it('should make a delete request to project archiver', async function () {
this.settings.apis.project_archiver = { url: projectArchiverUrl }
await this.TpdsUpdateSender.promises.deleteProject({ projectId })
const { uri, method } = this.request.firstCall.args[0]
method.should.equal('delete')
uri.should.equal(`${projectArchiverUrl}/project/${projectId}`)
this.fetch.should.have.been.calledWith(
`${projectArchiverUrl}/project/${projectId}`,
{ method: 'DELETE' }
)
})
})
})