Merge pull request #17525 from overleaf/ae-upgrade-prettier

Upgrade Prettier to v3

GitOrigin-RevId: 6f1338f196408f3edb4892d5220ad3665ff1a5bc
This commit is contained in:
Alf Eaton 2024-03-25 10:51:40 +00:00 committed by Copybot
parent 6d55f2e09d
commit 6cc2db3cdd
106 changed files with 1033 additions and 982 deletions

View file

@ -114,9 +114,8 @@ describe('FSPersistorTests', function () {
})
it('should not write the target file', async function () {
await expect(
fsPromises.access(scenario.fsPath(files.wombat))
).to.be.rejected
await expect(fsPromises.access(scenario.fsPath(files.wombat))).to
.be.rejected
})
it('should delete the temporary file', async function () {
@ -135,9 +134,8 @@ describe('FSPersistorTests', function () {
const contents = await fsPromises.readFile(
scenario.fsPath(files.wombat)
)
expect(
contents.equals(localFiles['/uploads/info.txt'])
).to.be.true
expect(contents.equals(localFiles['/uploads/info.txt'])).to.be
.true
})
})
@ -151,9 +149,8 @@ describe('FSPersistorTests', function () {
})
it('should not write the target file', async function () {
await expect(
fsPromises.access(scenario.fsPath(files.wombat))
).to.be.rejected
await expect(fsPromises.access(scenario.fsPath(files.wombat))).to
.be.rejected
})
it('should delete the temporary file', async function () {
@ -203,9 +200,8 @@ describe('FSPersistorTests', function () {
const contents = await fsPromises.readFile(
scenario.fsPath(files.wombat)
)
expect(
contents.equals(localFiles['/uploads/info.txt'])
).to.be.true
expect(contents.equals(localFiles['/uploads/info.txt'])).to.be
.true
})
it('should delete the temporary file', async function () {
@ -224,9 +220,8 @@ describe('FSPersistorTests', function () {
const contents = await fsPromises.readFile(
scenario.fsPath(files.wombat)
)
expect(
contents.equals(localFiles['/uploads/other.txt'])
).to.be.true
expect(contents.equals(localFiles['/uploads/other.txt'])).to.be
.true
})
})
@ -243,9 +238,8 @@ describe('FSPersistorTests', function () {
const contents = await fsPromises.readFile(
scenario.fsPath(files.wombat)
)
expect(
contents.equals(localFiles['/uploads/info.txt'])
).to.be.true
expect(contents.equals(localFiles['/uploads/info.txt'])).to.be
.true
})
it('should delete the temporary file', async function () {
@ -280,9 +274,8 @@ describe('FSPersistorTests', function () {
)
const contents = await streamToBuffer(stream)
// end is inclusive in ranges, but exclusive in slice()
expect(
contents.equals(localFiles['/uploads/info.txt'].slice(5, 17))
).to.be.true
expect(contents.equals(localFiles['/uploads/info.txt'].slice(5, 17)))
.to.be.true
})
it('should give a NotFoundError if the file does not exist', async function () {
@ -332,9 +325,8 @@ describe('FSPersistorTests', function () {
it('should delete the file', async function () {
await persistor.deleteObject(location, files.wombat)
await expect(
fsPromises.access(scenario.fsPath(files.wombat))
).to.be.rejected
await expect(fsPromises.access(scenario.fsPath(files.wombat))).to.be
.rejected
})
it("should ignore files that don't exist", async function () {

View file

@ -792,8 +792,8 @@ function getSimpleOp(operation) {
return ops[0] instanceof RetainOp
? ops[1]
: ops[1] instanceof RetainOp
? ops[0]
: null
? ops[0]
: null
case 3:
if (ops[0] instanceof RetainOp && ops[2] instanceof RetainOp) {
return ops[1]

23
package-lock.json generated
View file

@ -49,7 +49,7 @@
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-prettier": "^4.0.0",
"eslint-plugin-promise": "^6.0.0",
"prettier": "2.5.1",
"prettier": "3.2.5",
"resolve-url-loader": "^5.0.0",
"sass": "^1.69.5",
"sass-loader": "^13.3.2",
@ -35248,15 +35248,18 @@
}
},
"node_modules/prettier": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.5.1.tgz",
"integrity": "sha512-vBZcPRUR5MZJwoyi3ZoyQlc1rXeEck8KgeC9AwwOn+exuxLxq5toTRDTSaVrXHxelDMHy9zlicw8u66yxoSUFg==",
"version": "3.2.5",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.2.5.tgz",
"integrity": "sha512-3/GWa9aOC0YeD7LUfvOG2NiDyhOWRvt1k+rcKhOuYnMY24iiCphgneUfJDyFXd6rZCAnuLBv6UeAULtrhT/F4A==",
"dev": true,
"bin": {
"prettier": "bin-prettier.js"
"prettier": "bin/prettier.cjs"
},
"engines": {
"node": ">=10.13.0"
"node": ">=14"
},
"funding": {
"url": "https://github.com/prettier/prettier?sponsor=1"
}
},
"node_modules/prettier-linter-helpers": {
@ -44299,7 +44302,6 @@
"postcss-loader": "^7.3.0",
"postcss-preset-env": "^8.3.2",
"postcss-reporter": "^7.0.5",
"prettier": "^2.5.1",
"react-test-renderer": "^16.7.0",
"react-transform-hmr": "^1.0.4",
"redux-mock-store": "1.5.0",
@ -72046,7 +72048,6 @@
"postcss-loader": "^7.3.0",
"postcss-preset-env": "^8.3.2",
"postcss-reporter": "^7.0.5",
"prettier": "^2.5.1",
"prop-types": "^15.8.1",
"react": "^16.8.6",
"react-bootstrap": "^0.31.5",
@ -77090,9 +77091,9 @@
"integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g=="
},
"prettier": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.5.1.tgz",
"integrity": "sha512-vBZcPRUR5MZJwoyi3ZoyQlc1rXeEck8KgeC9AwwOn+exuxLxq5toTRDTSaVrXHxelDMHy9zlicw8u66yxoSUFg==",
"version": "3.2.5",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.2.5.tgz",
"integrity": "sha512-3/GWa9aOC0YeD7LUfvOG2NiDyhOWRvt1k+rcKhOuYnMY24iiCphgneUfJDyFXd6rZCAnuLBv6UeAULtrhT/F4A==",
"dev": true
},
"prettier-linter-helpers": {

View file

@ -17,7 +17,7 @@
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-prettier": "^4.0.0",
"eslint-plugin-promise": "^6.0.0",
"prettier": "2.5.1",
"prettier": "3.2.5",
"resolve-url-loader": "^5.0.0",
"sass": "^1.69.5",
"sass-loader": "^13.3.2",

View file

@ -382,9 +382,7 @@ if (process.env.OVERLEAF_EMAIL_FROM_ADDRESS != null) {
// i18n
if (process.env.OVERLEAF_LANG_DOMAIN_MAPPING != null) {
settings.i18n.subdomainLang = parse(
process.env.OVERLEAF_LANG_DOMAIN_MAPPING
)
settings.i18n.subdomainLang = parse(process.env.OVERLEAF_LANG_DOMAIN_MAPPING)
}
// Password Settings

View file

@ -1,39 +1,39 @@
module.exports = [
{
name: 'web'
name: 'web',
},
{
name: 'real-time'
name: 'real-time',
},
{
name: 'document-updater'
name: 'document-updater',
},
{
name: 'clsi'
name: 'clsi',
},
{
name: 'filestore'
name: 'filestore',
},
{
name: 'docstore'
name: 'docstore',
},
{
name: 'chat'
name: 'chat',
},
{
name: 'spelling'
name: 'spelling',
},
{
name: 'contacts'
name: 'contacts',
},
{
name: 'notifications'
name: 'notifications',
},
{
name: 'project-history'
name: 'project-history',
},
{
name: 'history-v1'
name: 'history-v1',
},
]

View file

@ -47,9 +47,8 @@ describe('Destroying a project', async function () {
const globalThreadMessage = await getMessage(this.globalThreadMessageId)
expect(globalThreadMessage).to.exist
const { response: responseDestroy } = await ChatClient.destroyProject(
projectId
)
const { response: responseDestroy } =
await ChatClient.destroyProject(projectId)
expect(responseDestroy.statusCode).to.equal(204)
})

View file

@ -43,9 +43,8 @@ describe('Editing a message', async function () {
})
it('should then list the updated message in the threads', async function () {
const { response, body: threads } = await ChatClient.getThreads(
projectId
)
const { response, body: threads } =
await ChatClient.getThreads(projectId)
expect(response.statusCode).to.equal(200)
expect(threads[threadId].messages.length).to.equal(1)
expect(threads[threadId].messages[0].content).to.equal(newContent)
@ -65,9 +64,8 @@ describe('Editing a message', async function () {
})
it('should then list the updated message in the threads', async function () {
const { response, body: threads } = await ChatClient.getThreads(
projectId
)
const { response, body: threads } =
await ChatClient.getThreads(projectId)
expect(response.statusCode).to.equal(200)
expect(threads[threadId].messages.length).to.equal(1)
expect(threads[threadId].messages[0].content).to.equal(newContent)
@ -87,9 +85,8 @@ describe('Editing a message', async function () {
})
it('should then list the old message in the threads', async function () {
const { response, body: threads } = await ChatClient.getThreads(
projectId
)
const { response, body: threads } =
await ChatClient.getThreads(projectId)
expect(response.statusCode).to.equal(200)
expect(threads[threadId].messages.length).to.equal(1)
expect(threads[threadId].messages[0].content).to.equal(content)

View file

@ -46,9 +46,8 @@ describe('Getting messages', async function () {
})
it('should contain the messages and populated users when getting the messages', async function () {
const { response, body: messages } = await ChatClient.getGlobalMessages(
projectId
)
const { response, body: messages } =
await ChatClient.getGlobalMessages(projectId)
expect(response.statusCode).to.equal(200)
expect(messages.length).to.equal(2)
messages.reverse()

View file

@ -26,9 +26,8 @@ describe('Sending a message', async function () {
})
it('should then list the message in the project messages', async function () {
const { response, body: messages } = await ChatClient.getGlobalMessages(
projectId
)
const { response, body: messages } =
await ChatClient.getGlobalMessages(projectId)
expect(response.statusCode).to.equal(200)
expect(messages.length).to.equal(1)
expect(messages[0].content).to.equal(content)
@ -61,9 +60,8 @@ describe('Sending a message', async function () {
})
it('should not appear in the global messages', async function () {
const { response, body: messages } = await ChatClient.getGlobalMessages(
projectId
)
const { response, body: messages } =
await ChatClient.getGlobalMessages(projectId)
expect(response.statusCode).to.equal(200)
expect(messages.length).to.equal(0)
})

View file

@ -88,15 +88,18 @@ module.exports = ProjectPersistenceManager = {
})
},
() => {
setInterval(() => {
ProjectPersistenceManager.refreshExpiryTimeout(() => {
ProjectPersistenceManager.clearExpiredProjects(err => {
if (err) {
logger.error({ err }, 'clearing expired projects failed')
}
setInterval(
() => {
ProjectPersistenceManager.refreshExpiryTimeout(() => {
ProjectPersistenceManager.clearExpiredProjects(err => {
if (err) {
logger.error({ err }, 'clearing expired projects failed')
}
})
})
})
}, 10 * 60 * 1000)
},
10 * 60 * 1000
)
}
)
})
@ -111,39 +114,38 @@ module.exports = ProjectPersistenceManager = {
if (callback == null) {
callback = function () {}
}
return ProjectPersistenceManager._findExpiredProjectIds(function (
error,
projectIds
) {
if (error != null) {
return callback(error)
}
logger.debug({ projectIds }, 'clearing expired projects')
const jobs = Array.from(projectIds || []).map(projectId =>
(
projectId => callback =>
ProjectPersistenceManager.clearProjectFromCache(
projectId,
{ reason: 'expired' },
function (err) {
if (err != null) {
logger.error({ err, projectId }, 'error clearing project')
}
return callback()
}
)
)(projectId)
)
return async.series(jobs, function (error) {
return ProjectPersistenceManager._findExpiredProjectIds(
function (error, projectIds) {
if (error != null) {
return callback(error)
}
return CompileManager.clearExpiredProjects(
ProjectPersistenceManager.EXPIRY_TIMEOUT,
error => callback(error)
logger.debug({ projectIds }, 'clearing expired projects')
const jobs = Array.from(projectIds || []).map(projectId =>
(
projectId => callback =>
ProjectPersistenceManager.clearProjectFromCache(
projectId,
{ reason: 'expired' },
function (err) {
if (err != null) {
logger.error({ err, projectId }, 'error clearing project')
}
return callback()
}
)
)(projectId)
)
})
})
return async.series(jobs, function (error) {
if (error != null) {
return callback(error)
}
return CompileManager.clearExpiredProjects(
ProjectPersistenceManager.EXPIRY_TIMEOUT,
error => callback(error)
)
})
}
)
}, // ignore any errors from deleting directories
clearProject(projectId, userId, callback) {

View file

@ -74,9 +74,8 @@ const UpdateManager = {
doc_id: docId,
})
const updates = await RealTimeRedisManager.promises.getPendingUpdatesForDoc(
docId
)
const updates =
await RealTimeRedisManager.promises.getPendingUpdatesForDoc(docId)
logger.debug(
{ projectId, docId, count: updates.length },
'processing updates'

View file

@ -129,8 +129,8 @@ type._takeDoc = takeDoc = function (
? part.slice(position.offset, position.offset + maxlength)
: part.slice(position.offset)
: maxlength === undefined || tombsIndivisible
? part - position.offset
: Math.min(maxlength, part - position.offset)
? part - position.offset
: Math.min(maxlength, part - position.offset)
const resultLen = result.length || result

View file

@ -162,9 +162,12 @@ class StressTestClient {
continue() {
if (this.updateCount > 0) {
this.updateCount--
return setTimeout(() => {
return this.sendUpdate()
}, this.options.updateDelay * (0.5 + Math.random()))
return setTimeout(
() => {
return this.sendUpdate()
},
this.options.updateDelay * (0.5 + Math.random())
)
} else {
return this.updateCallback()
}

View file

@ -26,25 +26,24 @@ function getBlobHash(byteLength) {
* @param {stream.Readable} stream
* @return {Promise.<string>} hexadecimal SHA-1 hash
*/
exports.fromStream = BPromise.method(function blobHashFromStream(
byteLength,
stream
) {
assert.integer(byteLength, 'blobHash: bad byteLength')
assert.object(stream, 'blobHash: bad stream')
exports.fromStream = BPromise.method(
function blobHashFromStream(byteLength, stream) {
assert.integer(byteLength, 'blobHash: bad byteLength')
assert.object(stream, 'blobHash: bad stream')
const hash = getBlobHash(byteLength)
return new BPromise(function (resolve, reject) {
pipeline(stream, hash, function (err) {
if (err) {
reject(err)
} else {
hash.end()
resolve(hash.read())
}
const hash = getBlobHash(byteLength)
return new BPromise(function (resolve, reject) {
pipeline(stream, hash, function (err) {
if (err) {
reject(err)
} else {
hash.end()
resolve(hash.read())
}
})
})
})
})
}
)
/**
* Compute the git blob hash for a blob with the given string content.

View file

@ -194,111 +194,113 @@ export function getFailures(callback) {
if (callback == null) {
callback = function () {}
}
return getFailuresByType(function (
error,
failureCounts,
failureAttempts,
failureRequests,
maxQueueSize
) {
let attempts, failureType, label, requests
if (error != null) {
return callback(OError.tag(error))
return getFailuresByType(
function (
error,
failureCounts,
failureAttempts,
failureRequests,
maxQueueSize
) {
let attempts, failureType, label, requests
if (error != null) {
return callback(OError.tag(error))
}
const shortNames = {
'Error: bad response from filestore: 404': 'filestore-404',
'Error: bad response from filestore: 500': 'filestore-500',
'NotFoundError: got a 404 from web api': 'web-api-404',
'Error: history store a non-success status code: 413':
'history-store-413',
'Error: history store a non-success status code: 422':
'history-store-422',
'Error: history store a non-success status code: 500':
'history-store-500',
'Error: history store a non-success status code: 503':
'history-store-503',
'Error: web returned a non-success status code: 500 (attempts: 2)':
'web-500',
'Error: ESOCKETTIMEDOUT': 'socket-timeout',
'Error: no project found': 'no-project-found',
'OpsOutOfOrderError: project structure version out of order on incoming updates':
'incoming-project-version-out-of-order',
'OpsOutOfOrderError: doc version out of order on incoming updates':
'incoming-doc-version-out-of-order',
'OpsOutOfOrderError: project structure version out of order':
'chunk-project-version-out-of-order',
'OpsOutOfOrderError: doc version out of order':
'chunk-doc-version-out-of-order',
'Error: failed to extend lock': 'lock-overrun',
'Error: tried to release timed out lock': 'lock-overrun',
'Error: Timeout': 'lock-overrun',
'Error: sync ongoing': 'sync-ongoing',
'SyncError: unexpected resyncProjectStructure update': 'sync-error',
'[object Error]': 'unknown-error-object',
'UpdateWithUnknownFormatError: update with unknown format':
'unknown-format',
'Error: update with unknown format': 'unknown-format',
'TextOperationError: The base length of the second operation has to be the target length of the first operation':
'text-op-error',
'Error: ENOSPC: no space left on device, write': 'ENOSPC',
'*': 'other',
}
// set all the known errors to zero if not present (otherwise gauges stay on their last value)
const summaryCounts = {}
const summaryAttempts = {}
const summaryRequests = {}
const summaryMaxQueueSize = {}
for (failureType in shortNames) {
label = shortNames[failureType]
summaryCounts[label] = 0
summaryAttempts[label] = 0
summaryRequests[label] = 0
summaryMaxQueueSize[label] = 0
}
// record a metric for each type of failure
for (failureType in failureCounts) {
const failureCount = failureCounts[failureType]
label = shortNames[failureType] || shortNames['*']
summaryCounts[label] += failureCount
summaryAttempts[label] += failureAttempts[failureType]
summaryRequests[label] += failureRequests[failureType]
summaryMaxQueueSize[label] = Math.max(
maxQueueSize[failureType],
summaryMaxQueueSize[label]
)
}
for (label in summaryCounts) {
const count = summaryCounts[label]
metrics.globalGauge('failed', count, 1, { status: label })
}
for (label in summaryAttempts) {
attempts = summaryAttempts[label]
metrics.globalGauge('attempts', attempts, 1, { status: label })
}
for (label in summaryRequests) {
requests = summaryRequests[label]
metrics.globalGauge('requests', requests, 1, { status: label })
}
for (label in summaryMaxQueueSize) {
const queueSize = summaryMaxQueueSize[label]
metrics.globalGauge('max-queue-size', queueSize, 1, { status: label })
}
return callback(null, {
counts: summaryCounts,
attempts: summaryAttempts,
requests: summaryRequests,
maxQueueSize: summaryMaxQueueSize,
})
}
const shortNames = {
'Error: bad response from filestore: 404': 'filestore-404',
'Error: bad response from filestore: 500': 'filestore-500',
'NotFoundError: got a 404 from web api': 'web-api-404',
'Error: history store a non-success status code: 413':
'history-store-413',
'Error: history store a non-success status code: 422':
'history-store-422',
'Error: history store a non-success status code: 500':
'history-store-500',
'Error: history store a non-success status code: 503':
'history-store-503',
'Error: web returned a non-success status code: 500 (attempts: 2)':
'web-500',
'Error: ESOCKETTIMEDOUT': 'socket-timeout',
'Error: no project found': 'no-project-found',
'OpsOutOfOrderError: project structure version out of order on incoming updates':
'incoming-project-version-out-of-order',
'OpsOutOfOrderError: doc version out of order on incoming updates':
'incoming-doc-version-out-of-order',
'OpsOutOfOrderError: project structure version out of order':
'chunk-project-version-out-of-order',
'OpsOutOfOrderError: doc version out of order':
'chunk-doc-version-out-of-order',
'Error: failed to extend lock': 'lock-overrun',
'Error: tried to release timed out lock': 'lock-overrun',
'Error: Timeout': 'lock-overrun',
'Error: sync ongoing': 'sync-ongoing',
'SyncError: unexpected resyncProjectStructure update': 'sync-error',
'[object Error]': 'unknown-error-object',
'UpdateWithUnknownFormatError: update with unknown format':
'unknown-format',
'Error: update with unknown format': 'unknown-format',
'TextOperationError: The base length of the second operation has to be the target length of the first operation':
'text-op-error',
'Error: ENOSPC: no space left on device, write': 'ENOSPC',
'*': 'other',
}
// set all the known errors to zero if not present (otherwise gauges stay on their last value)
const summaryCounts = {}
const summaryAttempts = {}
const summaryRequests = {}
const summaryMaxQueueSize = {}
for (failureType in shortNames) {
label = shortNames[failureType]
summaryCounts[label] = 0
summaryAttempts[label] = 0
summaryRequests[label] = 0
summaryMaxQueueSize[label] = 0
}
// record a metric for each type of failure
for (failureType in failureCounts) {
const failureCount = failureCounts[failureType]
label = shortNames[failureType] || shortNames['*']
summaryCounts[label] += failureCount
summaryAttempts[label] += failureAttempts[failureType]
summaryRequests[label] += failureRequests[failureType]
summaryMaxQueueSize[label] = Math.max(
maxQueueSize[failureType],
summaryMaxQueueSize[label]
)
}
for (label in summaryCounts) {
const count = summaryCounts[label]
metrics.globalGauge('failed', count, 1, { status: label })
}
for (label in summaryAttempts) {
attempts = summaryAttempts[label]
metrics.globalGauge('attempts', attempts, 1, { status: label })
}
for (label in summaryRequests) {
requests = summaryRequests[label]
metrics.globalGauge('requests', requests, 1, { status: label })
}
for (label in summaryMaxQueueSize) {
const queueSize = summaryMaxQueueSize[label]
metrics.globalGauge('max-queue-size', queueSize, 1, { status: label })
}
return callback(null, {
counts: summaryCounts,
attempts: summaryAttempts,
requests: summaryRequests,
maxQueueSize: summaryMaxQueueSize,
})
})
)
}
export const promises = {

View file

@ -59,79 +59,81 @@ export function flushOldOps(options, callback) {
if (error != null) {
return callback(OError.tag(error))
}
return ErrorRecorder.getFailedProjects(function (
error,
projectHistoryFailures
) {
if (error != null) {
return callback(OError.tag(error))
}
// exclude failed projects already in projectHistoryFailures
const failedProjects = new Set()
for (const entry of Array.from(projectHistoryFailures)) {
failedProjects.add(entry.project_id)
}
// randomise order so we get different projects if there is a limit
projectIds = _.shuffle(projectIds)
const maxAge = options.maxAge || 6 * 3600 // default to 6 hours
const cutoffTime = new Date(Date.now() - maxAge * 1000)
const startTime = new Date()
let count = 0
const jobs = projectIds.map(
projectId =>
function (cb) {
const timeTaken = new Date() - startTime
count++
if (
(options != null ? options.timeout : undefined) &&
timeTaken > options.timeout
) {
// finish early due to timeout, return an error to bail out of the async iteration
logger.debug('background retries timed out')
return cb(new OError('retries timed out'))
}
if (
(options != null ? options.limit : undefined) &&
count > options.limit
) {
// finish early due to reaching limit, return an error to bail out of the async iteration
logger.debug({ count }, 'background retries hit limit')
return cb(new OError('hit limit'))
}
if (failedProjects.has(projectId)) {
// skip failed projects
return setTimeout(cb, options.queueDelay || 100) // pause between flushes
}
return flushIfOld(projectId, cutoffTime, function (err) {
if (err != null) {
logger.warn(
{ projectId, flushErr: err },
'error flushing old project'
)
return ErrorRecorder.getFailedProjects(
function (error, projectHistoryFailures) {
if (error != null) {
return callback(OError.tag(error))
}
// exclude failed projects already in projectHistoryFailures
const failedProjects = new Set()
for (const entry of Array.from(projectHistoryFailures)) {
failedProjects.add(entry.project_id)
}
// randomise order so we get different projects if there is a limit
projectIds = _.shuffle(projectIds)
const maxAge = options.maxAge || 6 * 3600 // default to 6 hours
const cutoffTime = new Date(Date.now() - maxAge * 1000)
const startTime = new Date()
let count = 0
const jobs = projectIds.map(
projectId =>
function (cb) {
const timeTaken = new Date() - startTime
count++
if (
(options != null ? options.timeout : undefined) &&
timeTaken > options.timeout
) {
// finish early due to timeout, return an error to bail out of the async iteration
logger.debug('background retries timed out')
return cb(new OError('retries timed out'))
}
if (
(options != null ? options.limit : undefined) &&
count > options.limit
) {
// finish early due to reaching limit, return an error to bail out of the async iteration
logger.debug({ count }, 'background retries hit limit')
return cb(new OError('hit limit'))
}
if (failedProjects.has(projectId)) {
// skip failed projects
return setTimeout(cb, options.queueDelay || 100) // pause between flushes
}
return flushIfOld(projectId, cutoffTime, function (err) {
if (err != null) {
logger.warn(
{ projectId, flushErr: err },
'error flushing old project'
)
}
return setTimeout(cb, options.queueDelay || 100)
})
}
) // pause between flushes
return async.series(
async.reflectAll(jobs),
function (error, results) {
const success = []
const failure = []
results.forEach((result, i) => {
if (
result.error != null &&
!['retries timed out', 'hit limit'].includes(
result?.error?.message
)
) {
// ignore expected errors
return failure.push(projectIds[i])
} else {
return success.push(projectIds[i])
}
return setTimeout(cb, options.queueDelay || 100)
})
return callback(error, { success, failure, failedProjects })
}
) // pause between flushes
return async.series(async.reflectAll(jobs), function (error, results) {
const success = []
const failure = []
results.forEach((result, i) => {
if (
result.error != null &&
!['retries timed out', 'hit limit'].includes(
result?.error?.message
)
) {
// ignore expected errors
return failure.push(projectIds[i])
} else {
return success.push(projectIds[i])
}
})
return callback(error, { success, failure, failedProjects })
})
})
)
}
)
}
)
}

View file

@ -175,9 +175,8 @@ async function checkProjectHasHistoryId(projectId) {
async function waitUntilRedisQueueIsEmpty(projectId) {
for (let attempts = 0; attempts < 30; attempts++) {
const updatesCount = await RedisManager.promises.countUnprocessedUpdates(
projectId
)
const updatesCount =
await RedisManager.promises.countUnprocessedUpdates(projectId)
if (updatesCount === 0) {
return
}

View file

@ -252,12 +252,12 @@ function _concatTwoUpdates(firstUpdate, secondUpdate) {
firstOp.p === secondOp.p
) {
offset = firstOp.p
const diffUpdates = diffAsShareJsOps(firstOp.d, secondOp.i).map(function (
op
) {
op.p += offset
return mergeUpdatesWithOp(firstUpdate, secondUpdate, op)
})
const diffUpdates = diffAsShareJsOps(firstOp.d, secondOp.i).map(
function (op) {
op.p += offset
return mergeUpdatesWithOp(firstUpdate, secondUpdate, op)
}
)
// Doing a diff like this loses track of the doc lengths for each
// update, so recalculate them

View file

@ -25,9 +25,8 @@ async function main() {
let clearedTimestamps = 0
let processed = 0
for (const projectId of projectIdsWithFirstOpTimestamps) {
const result = await RedisManager.promises.clearDanglingFirstOpTimestamp(
projectId
)
const result =
await RedisManager.promises.clearDanglingFirstOpTimestamp(projectId)
processed++
clearedTimestamps += result
if (processed % 1000 === 0) {

View file

@ -8,10 +8,10 @@ module.exports = {
const redisType = x.cluster
? 'cluster'
: x.sentinels
? 'sentinel'
: x.host
? 'single'
: 'unknown'
? 'sentinel'
: x.host
? 'single'
: 'unknown'
logger.debug({ redis: redisType }, 'creating redis client')
return redis.createClient(x)
})

View file

@ -166,9 +166,8 @@ const AuthenticationManager = {
let isPasswordReused
try {
isPasswordReused = await HaveIBeenPwned.promises.checkPasswordForReuse(
password
)
isPasswordReused =
await HaveIBeenPwned.promises.checkPasswordForReuse(password)
} catch (err) {
logger.err({ err }, 'cannot check password for re-use')
}
@ -333,9 +332,8 @@ const AuthenticationManager = {
let isPasswordReused
try {
isPasswordReused = await HaveIBeenPwned.promises.checkPasswordForReuse(
password
)
isPasswordReused =
await HaveIBeenPwned.promises.checkPasswordForReuse(password)
} catch (error) {
logger.err({ error }, 'cannot check password for re-use')
}

View file

@ -34,9 +34,8 @@ const CollaboratorsInviteController = {
async getAllInvites(req, res) {
const projectId = req.params.Project_id
logger.debug({ projectId }, 'getting all active invites for project')
const invites = await CollaboratorsInviteHandler.promises.getAllInvites(
projectId
)
const invites =
await CollaboratorsInviteHandler.promises.getAllInvites(projectId)
res.json({ invites })
},
@ -115,9 +114,8 @@ const CollaboratorsInviteController = {
return res.status(400).json({ errorReason: 'invalid_email' })
}
const underRateLimit = await CollaboratorsInviteController._checkRateLimit(
sendingUserId
)
const underRateLimit =
await CollaboratorsInviteController._checkRateLimit(sendingUserId)
if (!underRateLimit) {
return res.sendStatus(429)
}

View file

@ -152,17 +152,15 @@ async function joinProject(req, res, next) {
}
async function _buildJoinProjectView(req, projectId, userId) {
const project = await ProjectGetter.promises.getProjectWithoutDocLines(
projectId
)
const project =
await ProjectGetter.promises.getProjectWithoutDocLines(projectId)
if (project == null) {
throw new Errors.NotFoundError('project not found')
}
let deletedDocsFromDocstore = []
try {
deletedDocsFromDocstore = await DocstoreManager.promises.getAllDeletedDocs(
projectId
)
deletedDocsFromDocstore =
await DocstoreManager.promises.getAllDeletedDocs(projectId)
} catch (err) {
// The query in docstore is not optimized at this time and fails for
// projects with many very large, deleted documents.
@ -187,9 +185,8 @@ async function _buildJoinProjectView(req, projectId, userId) {
if (privilegeLevel == null || privilegeLevel === PrivilegeLevels.NONE) {
return { project: null, privilegeLevel: null, isRestrictedUser: false }
}
const invites = await CollaboratorsInviteHandler.promises.getAllInvites(
projectId
)
const invites =
await CollaboratorsInviteHandler.promises.getAllInvites(projectId)
const isTokenMember = await CollaboratorsHandler.promises.userIsTokenMember(
userId,
projectId

View file

@ -375,10 +375,10 @@ module.exports = _.template(`\
? `${settings.email.template.customFooter}<br>`
: ''
}${settings.appName} &bull; <a href="${
settings.siteUrl
}" style="Margin: 0; color: #0F7A06; font-family: Helvetica, Arial, sans-serif; font-weight: normal; line-height: 1.3; margin: 0; padding: 0; text-align: left; text-decoration: none;">${
settings.siteUrl
}</a>
settings.siteUrl
}" style="Margin: 0; color: #0F7A06; font-family: Helvetica, Arial, sans-serif; font-weight: normal; line-height: 1.3; margin: 0; padding: 0; text-align: left; text-decoration: none;">${
settings.siteUrl
}</a>
</small></p>
</td></tr></table>
</td></tr></tbody></table>

View file

@ -138,8 +138,8 @@ async function injectUserDetails(data) {
const entries = Array.isArray(data.diff)
? data.diff
: Array.isArray(data.updates)
? data.updates
: []
? data.updates
: []
for (const entry of entries) {
for (const user of (entry.meta && entry.meta.users) || []) {
if (typeof user === 'string') {

View file

@ -105,9 +105,8 @@ const InstitutionsManager = {
async refreshInstitutionUsers(institutionId, notify) {
const refreshFunction = notify ? refreshFeaturesAndNotify : refreshFeatures
const { institution, affiliations } = await fetchInstitutionAndAffiliations(
institutionId
)
const { institution, affiliations } =
await fetchInstitutionAndAffiliations(institutionId)
for (const affiliation of affiliations) {
affiliation.institutionName = institution.name
@ -316,9 +315,8 @@ async function refreshFeaturesAndNotify(affiliation) {
const getUserInfo = async userId => {
const user = await UserGetter.promises.getUser(userId, { _id: 1 })
const subscription = await SubscriptionLocator.promises.getUsersSubscription(
user
)
const subscription =
await SubscriptionLocator.promises.getUsersSubscription(user)
return { user, subscription }
}

View file

@ -114,9 +114,8 @@ async function requestReset(req, res, next) {
let status
try {
status = await PasswordResetHandler.promises.generateAndEmailResetToken(
email
)
status =
await PasswordResetHandler.promises.generateAndEmailResetToken(email)
} catch (err) {
OError.tag(err, 'failed to generate and email password reset token', {
email,

View file

@ -751,8 +751,8 @@ const ProjectController = {
? // TODO: Create React version of detached page
'project/editor_detached'
: idePageReact
? 'project/ide-react'
: 'project/editor'
? 'project/ide-react'
: 'project/editor'
res.render(template, {
title: project.name,

View file

@ -418,9 +418,8 @@ let deletedFilesProjectIdIndexExist
async function doesDeletedFilesProjectIdIndexExist() {
if (typeof deletedFilesProjectIdIndexExist !== 'boolean') {
// Resolve this about once. No need for locking or retry handling.
deletedFilesProjectIdIndexExist = await db.deletedFiles.indexExists(
'projectId_1'
)
deletedFilesProjectIdIndexExist =
await db.deletedFiles.indexExists('projectId_1')
}
return deletedFilesProjectIdIndexExist
}

View file

@ -265,9 +265,8 @@ async function mkdirp(projectId, path, options = {}) {
// to make matching case-sensitive
const folders = path.split('/').filter(folder => folder.length !== 0)
const project = await ProjectGetter.promises.getProjectWithOnlyFolders(
projectId
)
const project =
await ProjectGetter.promises.getProjectWithOnlyFolders(projectId)
if (path === '/') {
return { newFolders: [], folder: project.rootFolder[0] }
}

View file

@ -645,153 +645,159 @@ const ProjectEntityUpdateHandler = {
)
},
upsertDoc: wrapWithLock(function (
projectId,
folderId,
docName,
docLines,
source,
userId,
callback
) {
if (!SafePath.isCleanFilename(docName)) {
return callback(new Errors.InvalidNameError('invalid element name'))
}
ProjectLocator.findElement(
{ project_id: projectId, element_id: folderId, type: 'folder' },
(error, folder, folderPath) => {
if (error != null) {
if (error instanceof Errors.NotFoundError && folder == null) {
return callback(new Error('folder_not_found'))
}
return callback(error)
}
if (folder == null) {
return callback(new Error("Couldn't find folder"))
}
const existingDoc = folder.docs.find(({ name }) => name === docName)
const existingFile = folder.fileRefs.find(
({ name }) => name === docName
)
if (existingFile) {
const doc = new Doc({ name: docName })
const filePath = `${folderPath.fileSystem}/${existingFile.name}`
DocstoreManager.updateDoc(
projectId.toString(),
doc._id.toString(),
docLines,
0,
{},
(err, modified, rev) => {
if (err != null) {
return callback(err)
}
doc.rev = rev
ProjectEntityMongoUpdateHandler.replaceFileWithDoc(
projectId,
existingFile._id,
doc,
(err, project) => {
if (err) {
return callback(err)
}
TpdsUpdateSender.addDoc(
{
projectId,
docId: doc._id,
path: filePath,
projectName: project.name,
rev: existingFile.rev + 1,
folderId,
},
err => {
if (err) {
return callback(err)
}
const projectHistoryId =
project.overleaf &&
project.overleaf.history &&
project.overleaf.history.id
const newDocs = [
{
doc,
path: filePath,
docLines: docLines.join('\n'),
},
]
const oldFiles = [
{
file: existingFile,
path: filePath,
},
]
DocumentUpdaterHandler.updateProjectStructure(
projectId,
projectHistoryId,
userId,
{ oldFiles, newDocs, newProject: project },
source,
error => {
if (error != null) {
return callback(error)
}
EditorRealTimeController.emitToRoom(
projectId,
'removeEntity',
existingFile._id,
'convertFileToDoc'
)
callback(null, doc, true)
}
)
}
)
}
)
}
)
} else if (existingDoc) {
DocumentUpdaterHandler.setDocument(
projectId,
existingDoc._id,
userId,
docLines,
source,
(err, result) => {
if (err != null) {
return callback(err)
}
logger.debug(
{ projectId, docId: existingDoc._id },
'notifying users that the document has been updated'
)
// there is no need to flush the doc to mongo at this point as docupdater
// flushes it as part of setDoc.
//
// combine rev from response with existing doc metadata
callback(null, { ...existingDoc, ...result }, existingDoc == null)
}
)
} else {
ProjectEntityUpdateHandler.addDocWithRanges.withoutLock(
projectId,
folderId,
docName,
docLines,
{},
userId,
source,
(err, doc) => {
if (err != null) {
return callback(err)
}
callback(null, doc, existingDoc == null)
}
)
}
upsertDoc: wrapWithLock(
function (
projectId,
folderId,
docName,
docLines,
source,
userId,
callback
) {
if (!SafePath.isCleanFilename(docName)) {
return callback(new Errors.InvalidNameError('invalid element name'))
}
)
}),
ProjectLocator.findElement(
{ project_id: projectId, element_id: folderId, type: 'folder' },
(error, folder, folderPath) => {
if (error != null) {
if (error instanceof Errors.NotFoundError && folder == null) {
return callback(new Error('folder_not_found'))
}
return callback(error)
}
if (folder == null) {
return callback(new Error("Couldn't find folder"))
}
const existingDoc = folder.docs.find(({ name }) => name === docName)
const existingFile = folder.fileRefs.find(
({ name }) => name === docName
)
if (existingFile) {
const doc = new Doc({ name: docName })
const filePath = `${folderPath.fileSystem}/${existingFile.name}`
DocstoreManager.updateDoc(
projectId.toString(),
doc._id.toString(),
docLines,
0,
{},
(err, modified, rev) => {
if (err != null) {
return callback(err)
}
doc.rev = rev
ProjectEntityMongoUpdateHandler.replaceFileWithDoc(
projectId,
existingFile._id,
doc,
(err, project) => {
if (err) {
return callback(err)
}
TpdsUpdateSender.addDoc(
{
projectId,
docId: doc._id,
path: filePath,
projectName: project.name,
rev: existingFile.rev + 1,
folderId,
},
err => {
if (err) {
return callback(err)
}
const projectHistoryId =
project.overleaf &&
project.overleaf.history &&
project.overleaf.history.id
const newDocs = [
{
doc,
path: filePath,
docLines: docLines.join('\n'),
},
]
const oldFiles = [
{
file: existingFile,
path: filePath,
},
]
DocumentUpdaterHandler.updateProjectStructure(
projectId,
projectHistoryId,
userId,
{ oldFiles, newDocs, newProject: project },
source,
error => {
if (error != null) {
return callback(error)
}
EditorRealTimeController.emitToRoom(
projectId,
'removeEntity',
existingFile._id,
'convertFileToDoc'
)
callback(null, doc, true)
}
)
}
)
}
)
}
)
} else if (existingDoc) {
DocumentUpdaterHandler.setDocument(
projectId,
existingDoc._id,
userId,
docLines,
source,
(err, result) => {
if (err != null) {
return callback(err)
}
logger.debug(
{ projectId, docId: existingDoc._id },
'notifying users that the document has been updated'
)
// there is no need to flush the doc to mongo at this point as docupdater
// flushes it as part of setDoc.
//
// combine rev from response with existing doc metadata
callback(
null,
{ ...existingDoc, ...result },
existingDoc == null
)
}
)
} else {
ProjectEntityUpdateHandler.addDocWithRanges.withoutLock(
projectId,
folderId,
docName,
docLines,
{},
userId,
source,
(err, doc) => {
if (err != null) {
return callback(err)
}
callback(null, doc, existingDoc == null)
}
)
}
}
)
}
),
upsertFile: wrapWithLock({
beforeLock(next) {
@ -983,43 +989,38 @@ const ProjectEntityUpdateHandler = {
},
}),
upsertDocWithPath: wrapWithLock(function (
projectId,
elementPath,
docLines,
source,
userId,
callback
) {
if (!SafePath.isCleanPath(elementPath)) {
return callback(new Errors.InvalidNameError('invalid element name'))
}
const docName = Path.basename(elementPath)
const folderPath = Path.dirname(elementPath)
ProjectEntityUpdateHandler.mkdirp.withoutLock(
projectId,
folderPath,
(err, newFolders, folder) => {
if (err != null) {
return callback(err)
}
ProjectEntityUpdateHandler.upsertDoc.withoutLock(
projectId,
folder._id,
docName,
docLines,
source,
userId,
(err, doc, isNewDoc) => {
if (err != null) {
return callback(err)
}
callback(null, doc, isNewDoc, newFolders, folder)
}
)
upsertDocWithPath: wrapWithLock(
function (projectId, elementPath, docLines, source, userId, callback) {
if (!SafePath.isCleanPath(elementPath)) {
return callback(new Errors.InvalidNameError('invalid element name'))
}
)
}),
const docName = Path.basename(elementPath)
const folderPath = Path.dirname(elementPath)
ProjectEntityUpdateHandler.mkdirp.withoutLock(
projectId,
folderPath,
(err, newFolders, folder) => {
if (err != null) {
return callback(err)
}
ProjectEntityUpdateHandler.upsertDoc.withoutLock(
projectId,
folder._id,
docName,
docLines,
source,
userId,
(err, doc, isNewDoc) => {
if (err != null) {
return callback(err)
}
callback(null, doc, isNewDoc, newFolders, folder)
}
)
}
)
}
),
upsertFileWithPath: wrapWithLock({
beforeLock(next) {
@ -1115,64 +1116,62 @@ const ProjectEntityUpdateHandler = {
},
}),
deleteEntity: wrapWithLock(function (
projectId,
entityId,
entityType,
userId,
source,
callback
) {
logger.debug({ entityId, entityType, projectId }, 'deleting project entity')
if (entityType == null) {
logger.warn({ err: 'No entityType set', projectId, entityId })
return callback(new Error('No entityType set'))
}
entityType = entityType.toLowerCase()
ProjectEntityMongoUpdateHandler.deleteEntity(
projectId,
entityId,
entityType,
(error, entity, path, projectBeforeDeletion, newProject) => {
if (error != null) {
return callback(error)
}
ProjectEntityUpdateHandler._cleanUpEntity(
projectBeforeDeletion,
newProject,
entity,
entityType,
path.fileSystem,
userId,
source,
(error, subtreeListing) => {
if (error != null) {
return callback(error)
}
const subtreeEntityIds = subtreeListing.map(entry =>
entry.entity._id.toString()
)
TpdsUpdateSender.deleteEntity(
{
projectId,
path: path.fileSystem,
projectName: projectBeforeDeletion.name,
entityId,
entityType,
subtreeEntityIds,
},
error => {
if (error != null) {
return callback(error)
}
callback(null, entityId)
}
)
}
)
deleteEntity: wrapWithLock(
function (projectId, entityId, entityType, userId, source, callback) {
logger.debug(
{ entityId, entityType, projectId },
'deleting project entity'
)
if (entityType == null) {
logger.warn({ err: 'No entityType set', projectId, entityId })
return callback(new Error('No entityType set'))
}
)
}),
entityType = entityType.toLowerCase()
ProjectEntityMongoUpdateHandler.deleteEntity(
projectId,
entityId,
entityType,
(error, entity, path, projectBeforeDeletion, newProject) => {
if (error != null) {
return callback(error)
}
ProjectEntityUpdateHandler._cleanUpEntity(
projectBeforeDeletion,
newProject,
entity,
entityType,
path.fileSystem,
userId,
source,
(error, subtreeListing) => {
if (error != null) {
return callback(error)
}
const subtreeEntityIds = subtreeListing.map(entry =>
entry.entity._id.toString()
)
TpdsUpdateSender.deleteEntity(
{
projectId,
path: path.fileSystem,
projectName: projectBeforeDeletion.name,
entityId,
entityType,
subtreeEntityIds,
},
error => {
if (error != null) {
return callback(error)
}
callback(null, entityId)
}
)
}
)
}
)
}
),
deleteEntityWithPath: wrapWithLock(
(projectId, path, userId, source, callback) =>
@ -1225,166 +1224,167 @@ const ProjectEntityUpdateHandler = {
)
}),
addFolder: wrapWithLock(function (
projectId,
parentFolderId,
folderName,
callback
) {
if (!SafePath.isCleanFilename(folderName)) {
return callback(new Errors.InvalidNameError('invalid element name'))
addFolder: wrapWithLock(
function (projectId, parentFolderId, folderName, callback) {
if (!SafePath.isCleanFilename(folderName)) {
return callback(new Errors.InvalidNameError('invalid element name'))
}
ProjectEntityMongoUpdateHandler.addFolder(
projectId,
parentFolderId,
folderName,
callback
)
}
ProjectEntityMongoUpdateHandler.addFolder(
),
moveEntity: wrapWithLock(
function (
projectId,
parentFolderId,
folderName,
entityId,
destFolderId,
entityType,
userId,
source,
callback
)
}),
moveEntity: wrapWithLock(function (
projectId,
entityId,
destFolderId,
entityType,
userId,
source,
callback
) {
logger.debug(
{ entityType, entityId, projectId, destFolderId },
'moving entity'
)
if (entityType == null) {
logger.warn({ err: 'No entityType set', projectId, entityId })
return callback(new Error('No entityType set'))
}
entityType = entityType.toLowerCase()
DocumentUpdaterHandler.flushProjectToMongo(projectId, err => {
if (err) {
return callback(err)
}
ProjectEntityMongoUpdateHandler.moveEntity(
projectId,
entityId,
destFolderId,
entityType,
(err, project, startPath, endPath, rev, changes) => {
if (err != null) {
return callback(err)
}
const projectHistoryId =
project.overleaf &&
project.overleaf.history &&
project.overleaf.history.id
TpdsUpdateSender.moveEntity(
{
projectId,
projectName: project.name,
startPath,
endPath,
rev,
entityId,
entityType,
folderId: destFolderId,
},
err => {
if (err) {
logger.error({ err }, 'error sending tpds update')
}
DocumentUpdaterHandler.updateProjectStructure(
projectId,
projectHistoryId,
userId,
changes,
source,
callback
)
}
)
}
) {
logger.debug(
{ entityType, entityId, projectId, destFolderId },
'moving entity'
)
})
}),
renameEntity: wrapWithLock(function (
projectId,
entityId,
entityType,
newName,
userId,
source,
callback
) {
if (!newName || typeof newName !== 'string') {
const err = new OError('invalid newName value', {
value: newName,
type: typeof newName,
projectId,
entityId,
entityType,
userId,
source,
if (entityType == null) {
logger.warn({ err: 'No entityType set', projectId, entityId })
return callback(new Error('No entityType set'))
}
entityType = entityType.toLowerCase()
DocumentUpdaterHandler.flushProjectToMongo(projectId, err => {
if (err) {
return callback(err)
}
ProjectEntityMongoUpdateHandler.moveEntity(
projectId,
entityId,
destFolderId,
entityType,
(err, project, startPath, endPath, rev, changes) => {
if (err != null) {
return callback(err)
}
const projectHistoryId =
project.overleaf &&
project.overleaf.history &&
project.overleaf.history.id
TpdsUpdateSender.moveEntity(
{
projectId,
projectName: project.name,
startPath,
endPath,
rev,
entityId,
entityType,
folderId: destFolderId,
},
err => {
if (err) {
logger.error({ err }, 'error sending tpds update')
}
DocumentUpdaterHandler.updateProjectStructure(
projectId,
projectHistoryId,
userId,
changes,
source,
callback
)
}
)
}
)
})
logger.error({ err }, 'Invalid newName passed to renameEntity')
return callback(err)
}
if (!SafePath.isCleanFilename(newName)) {
return callback(new Errors.InvalidNameError('invalid element name'))
}
logger.debug({ entityId, projectId }, `renaming ${entityType}`)
if (entityType == null) {
logger.warn({ err: 'No entityType set', projectId, entityId })
return callback(new Error('No entityType set'))
}
entityType = entityType.toLowerCase()
),
DocumentUpdaterHandler.flushProjectToMongo(projectId, err => {
if (err) {
renameEntity: wrapWithLock(
function (
projectId,
entityId,
entityType,
newName,
userId,
source,
callback
) {
if (!newName || typeof newName !== 'string') {
const err = new OError('invalid newName value', {
value: newName,
type: typeof newName,
projectId,
entityId,
entityType,
userId,
source,
})
logger.error({ err }, 'Invalid newName passed to renameEntity')
return callback(err)
}
ProjectEntityMongoUpdateHandler.renameEntity(
projectId,
entityId,
entityType,
newName,
(err, project, startPath, endPath, rev, changes) => {
if (err != null) {
return callback(err)
}
const projectHistoryId =
project.overleaf &&
project.overleaf.history &&
project.overleaf.history.id
TpdsUpdateSender.moveEntity(
{
projectId,
projectName: project.name,
startPath,
endPath,
rev,
entityId,
entityType,
folderId: null, // this means the folder has not changed
},
err => {
if (err) {
logger.error({ err }, 'error sending tpds update')
}
DocumentUpdaterHandler.updateProjectStructure(
projectId,
projectHistoryId,
userId,
changes,
source,
callback
)
}
)
if (!SafePath.isCleanFilename(newName)) {
return callback(new Errors.InvalidNameError('invalid element name'))
}
logger.debug({ entityId, projectId }, `renaming ${entityType}`)
if (entityType == null) {
logger.warn({ err: 'No entityType set', projectId, entityId })
return callback(new Error('No entityType set'))
}
entityType = entityType.toLowerCase()
DocumentUpdaterHandler.flushProjectToMongo(projectId, err => {
if (err) {
return callback(err)
}
)
})
}),
ProjectEntityMongoUpdateHandler.renameEntity(
projectId,
entityId,
entityType,
newName,
(err, project, startPath, endPath, rev, changes) => {
if (err != null) {
return callback(err)
}
const projectHistoryId =
project.overleaf &&
project.overleaf.history &&
project.overleaf.history.id
TpdsUpdateSender.moveEntity(
{
projectId,
projectName: project.name,
startPath,
endPath,
rev,
entityId,
entityType,
folderId: null, // this means the folder has not changed
},
err => {
if (err) {
logger.error({ err }, 'error sending tpds update')
}
DocumentUpdaterHandler.updateProjectStructure(
projectId,
projectHistoryId,
userId,
changes,
source,
callback
)
}
)
}
)
})
}
),
// This doesn't directly update project structure but we need to take the lock
// to prevent anything else being queued before the resync update
@ -1471,8 +1471,8 @@ const ProjectEntityUpdateHandler = {
const originalName = entity.folder
? entity.folder.name
: entity.doc
? entity.doc.name
: entity.file.name
? entity.doc.name
: entity.file.name
let newPath = entity.path
let newName = originalName
@ -1540,8 +1540,8 @@ const ProjectEntityUpdateHandler = {
const entityId = entity.folder
? entity.folder._id
: entity.doc
? entity.doc._id
: entity.file._id
? entity.doc._id
: entity.file._id
const entityType = entity.folder ? 'folder' : entity.doc ? 'doc' : 'file'
ProjectEntityMongoUpdateHandler.renameEntity(
projectId,

View file

@ -93,19 +93,18 @@ const AdminController = {
})()
}
return SystemMessageManager.getMessagesFromDB(function (
error,
systemMessages
) {
if (error != null) {
return next(error)
return SystemMessageManager.getMessagesFromDB(
function (error, systemMessages) {
if (error != null) {
return next(error)
}
return res.render('admin/index', {
title: 'System Admin',
openSockets,
systemMessages,
})
}
return res.render('admin/index', {
title: 'System Admin',
openSockets,
systemMessages,
})
})
)
},
disconnectAllUsers: (req, res) => {

View file

@ -114,16 +114,14 @@ async function computeFeatures(userId) {
}
async function _getIndividualFeatures(userId) {
const sub = await SubscriptionLocator.promises.getUserIndividualSubscription(
userId
)
const sub =
await SubscriptionLocator.promises.getUserIndividualSubscription(userId)
return _subscriptionToFeatures(sub)
}
async function _getGroupFeatureSets(userId) {
const subs = await SubscriptionLocator.promises.getGroupSubscriptionsMemberOf(
userId
)
const subs =
await SubscriptionLocator.promises.getGroupSubscriptionsMemberOf(userId)
return (subs || []).map(_subscriptionToFeatures)
}

View file

@ -28,9 +28,8 @@ async function isUserPartOfGroup(userId, subscriptionId) {
}
async function getTotalConfirmedUsersInGroup(subscriptionId) {
const subscription = await SubscriptionLocator.promises.getSubscription(
subscriptionId
)
const subscription =
await SubscriptionLocator.promises.getSubscription(subscriptionId)
return subscription?.member_ids?.length
}

View file

@ -207,9 +207,8 @@ async function syncSubscription(recurlySubscription, requesterData) {
// This is used because Recurly doesn't always attempt collection of paast due
// invoices after Paypal billing info were updated.
async function attemptPaypalInvoiceCollection(recurlyAccountCode) {
const billingInfo = await RecurlyWrapper.promises.getBillingInfo(
recurlyAccountCode
)
const billingInfo =
await RecurlyWrapper.promises.getBillingInfo(recurlyAccountCode)
if (!billingInfo.paypal_billing_agreement_id) {
// this is not a Paypal user

View file

@ -47,9 +47,8 @@ async function syncSubscription(
adminUserId,
requesterData = {}
) {
let subscription = await SubscriptionLocator.promises.getUsersSubscription(
adminUserId
)
let subscription =
await SubscriptionLocator.promises.getUsersSubscription(adminUserId)
if (subscription == null) {
subscription = await _createNewSubscription(adminUserId)
}

View file

@ -66,9 +66,8 @@ async function viewInvite(req, res, next) {
const { token } = req.params
const userId = SessionManager.getLoggedInUserId(req.session)
const { invite, subscription } = await TeamInvitesHandler.promises.getInvite(
token
)
const { invite, subscription } =
await TeamInvitesHandler.promises.getInvite(token)
if (!invite) {
return ErrorController.notFound(req, res)
}

View file

@ -124,9 +124,8 @@ async function revokeInvite(teamManagerId, subscription, email) {
// email is in Subscription.invited_emails when they join. We'll remove this
// after a short while.
async function createTeamInvitesForLegacyInvitedEmail(email) {
const teams = await SubscriptionLocator.promises.getGroupsWithEmailInvite(
email
)
const teams =
await SubscriptionLocator.promises.getGroupsWithEmailInvite(email)
return Promise.all(
teams.map(team => createInvite(team.admin_id, team, email))
)

View file

@ -43,9 +43,8 @@ async function writeUpdateToDisk(projectId, updateStream) {
}
async function _findExistingFileType(projectId, path) {
const { docs, files } = await ProjectEntityHandler.promises.getAllEntities(
projectId
)
const { docs, files } =
await ProjectEntityHandler.promises.getAllEntities(projectId)
if (_.some(docs, d => d.path === path)) {
return 'doc'
}

View file

@ -119,12 +119,10 @@ async function _initializeProjectWithZipContents(
project,
contentsPath
) {
const topLevelDir = await ArchiveManager.promises.findTopLevelDirectory(
contentsPath
)
const importEntries = await FileSystemImportManager.promises.importDir(
topLevelDir
)
const topLevelDir =
await ArchiveManager.promises.findTopLevelDirectory(contentsPath)
const importEntries =
await FileSystemImportManager.promises.importDir(topLevelDir)
const { fileEntries, docEntries } = await _createEntriesFromImports(
project._id,
importEntries

View file

@ -32,9 +32,8 @@ async function _ensureCanAddIdentifier(userId, institutionEmail, providerId) {
throw new Errors.SAMLAlreadyLinkedError()
}
const userWithEmail = await UserGetter.promises.getUserByAnyEmail(
institutionEmail
)
const userWithEmail =
await UserGetter.promises.getUserByAnyEmail(institutionEmail)
if (!userWithEmail) {
// email doesn't exist; all good

View file

@ -103,9 +103,8 @@ async function expireDeletedUsersAfterDuration() {
}
async function ensureCanDeleteUser(user) {
const subscription = await SubscriptionLocator.promises.getUsersSubscription(
user
)
const subscription =
await SubscriptionLocator.promises.getUsersSubscription(user)
if (subscription) {
throw new Errors.SubscriptionAdminDeletionError({})
}

View file

@ -68,9 +68,8 @@ async function getUserFullEmails(userId) {
return decorateFullEmails(user.email, user.emails, [], [])
}
const affiliationsData = await InstitutionsAPIPromises.getUserAffiliations(
userId
)
const affiliationsData =
await InstitutionsAPIPromises.getUserAffiliations(userId)
return decorateFullEmails(
user.email,

View file

@ -24,9 +24,8 @@ async function postRegistrationAnalytics(userId) {
}
async function checkAffiliations(userId) {
const affiliationsData = await InstitutionsAPIPromises.getUserAffiliations(
userId
)
const affiliationsData =
await InstitutionsAPIPromises.getUserAffiliations(userId)
const hasCommonsAccountAffiliation = affiliationsData.some(
affiliationData =>
affiliationData.institution && affiliationData.institution.commonsAccount

View file

@ -354,9 +354,8 @@ async function maybeCreateRedundantSubscriptionNotification(userId, email) {
return
}
const affiliations = await InstitutionsAPI.promises.getUserAffiliations(
userId
)
const affiliations =
await InstitutionsAPI.promises.getUserAffiliations(userId)
const confirmedAffiliation = affiliations.find(a => a.email === email)
if (!confirmedAffiliation || confirmedAffiliation.licence === 'free') {
return

View file

@ -30,11 +30,9 @@ function loadModules() {
}
for (const moduleName of Settings.moduleImportSequence || []) {
const loadedModule = require(Path.join(
MODULE_BASE_PATH,
moduleName,
'index.js'
))
const loadedModule = require(
Path.join(MODULE_BASE_PATH, moduleName, 'index.js')
)
loadedModule.name = moduleName
_modules.push(loadedModule)
}

View file

@ -4,7 +4,8 @@
*/
@font-face {
font-family: 'FontAwesome';
src: url('font-awesome-v470.woff2') format('woff2'),
src:
url('font-awesome-v470.woff2') format('woff2'),
url('font-awesome-v470.woff') format('woff');
font-weight: normal;
font-style: normal;

View file

@ -2,7 +2,8 @@
font-family: 'Lato';
font-style: normal;
font-weight: 400;
src: url('lato/lato-v2-latin-ext-regular.woff2') format('woff2'),
src:
url('lato/lato-v2-latin-ext-regular.woff2') format('woff2'),
url('lato/lato-v2-latin-ext-regular.woff') format('woff');
}
@ -10,7 +11,8 @@
font-family: 'Lato';
font-style: italic;
font-weight: 400;
src: url('lato/lato-v2-latin-ext-italic.woff2') format('woff2'),
src:
url('lato/lato-v2-latin-ext-italic.woff2') format('woff2'),
url('lato/lato-v2-latin-ext-italic.woff') format('woff');
}
@ -18,7 +20,8 @@
font-family: 'Lato';
font-style: normal;
font-weight: 700;
src: url('lato/lato-v2-latin-ext-700.woff2') format('woff2'),
src:
url('lato/lato-v2-latin-ext-700.woff2') format('woff2'),
url('lato/lato-v2-latin-ext-700.woff') format('woff');
}
@ -26,6 +29,7 @@
font-family: 'Lato';
font-style: italic;
font-weight: 700;
src: url('lato/lato-v2-latin-ext-700italic.woff2') format('woff2'),
src:
url('lato/lato-v2-latin-ext-700italic.woff2') format('woff2'),
url('lato/lato-v2-latin-ext-700italic.woff') format('woff');
}

View file

@ -15,7 +15,11 @@
font-weight: normal;
font-style: normal;
font-size: 20px;
font-variation-settings: 'FILL' 1, 'wght' 400, 'GRAD' 0, 'opsz' 20;
font-variation-settings:
'FILL' 1,
'wght' 400,
'GRAD' 0,
'opsz' 20;
line-height: 1;
letter-spacing: normal;
text-transform: none;

View file

@ -2,7 +2,9 @@
font-family: 'Merriweather';
font-style: normal;
font-weight: 400;
src: local('Merriweather Regular'), local('Merriweather-Regular'),
src:
local('Merriweather Regular'),
local('Merriweather-Regular'),
url('merriweather-v21-latin-regular.woff2') format('woff2'),
url('merriweather-v21-latin-regular.woff') format('woff');
}
@ -10,7 +12,9 @@
font-family: 'Merriweather';
font-style: italic;
font-weight: 400;
src: local('Merriweather Italic'), local('Merriweather-Italic'),
src:
local('Merriweather Italic'),
local('Merriweather-Italic'),
url('merriweather-v21-latin-italic.woff2') format('woff2'),
url('merriweather-v21-latin-italic.woff') format('woff');
}
@ -18,7 +22,9 @@
font-family: 'Merriweather';
font-style: normal;
font-weight: 700;
src: local('Merriweather Bold'), local('Merriweather-Bold'),
src:
local('Merriweather Bold'),
local('Merriweather-Bold'),
url('merriweather-v21-latin-700.woff2') format('woff2'),
url('merriweather-v21-latin-700.woff') format('woff');
}
@ -26,7 +32,9 @@
font-family: 'Merriweather';
font-style: italic;
font-weight: 700;
src: local('Merriweather Bold Italic'), local('Merriweather-BoldItalic'),
src:
local('Merriweather Bold Italic'),
local('Merriweather-BoldItalic'),
url('merriweather-v21-latin-700italic.woff2') format('woff2'),
url('merriweather-v21-latin-700italic.woff') format('woff');
}

View file

@ -2,7 +2,9 @@
font-family: 'Open Sans';
font-style: normal;
font-weight: 300;
src: local('Open Sans Light'), local('OpenSans-Light'),
src:
local('Open Sans Light'),
local('OpenSans-Light'),
url('open-sans-v17-latin-300.woff2') format('woff2'),
url('open-sans-v17-latin-300.woff') format('woff');
}
@ -10,7 +12,9 @@
font-family: 'Open Sans';
font-style: normal;
font-weight: 400;
src: local('Open Sans Regular'), local('OpenSans-Regular'),
src:
local('Open Sans Regular'),
local('OpenSans-Regular'),
url('open-sans-v17-latin-regular.woff2') format('woff2'),
url('open-sans-v17-latin-regular.woff') format('woff');
}
@ -18,7 +22,9 @@
font-family: 'Open Sans';
font-style: normal;
font-weight: 600;
src: local('Open Sans SemiBold'), local('OpenSans-SemiBold'),
src:
local('Open Sans SemiBold'),
local('OpenSans-SemiBold'),
url('open-sans-v17-latin-600.woff2') format('woff2'),
url('open-sans-v17-latin-600.woff') format('woff');
}
@ -26,7 +32,9 @@
font-family: 'Open Sans';
font-style: normal;
font-weight: 700;
src: local('Open Sans Bold'), local('OpenSans-Bold'),
src:
local('Open Sans Bold'),
local('OpenSans-Bold'),
url('open-sans-v17-latin-700.woff2') format('woff2'),
url('open-sans-v17-latin-700.woff') format('woff');
}

View file

@ -2,7 +2,9 @@
font-family: 'Source Code Pro';
font-style: normal;
font-weight: 400;
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'),
src:
local('Source Code Pro Regular'),
local('SourceCodePro-Regular'),
url('source-code-pro-v13-latin-regular.woff2') format('woff2'),
url('source-code-pro-v13-latin-regular.woff') format('woff');
}

View file

@ -75,16 +75,19 @@ function getFilesWithOps(
return filesWithOps
} else {
const filesWithOps = files.reduce((curFilesWithOps, file) => {
if ('operation' in file) {
curFilesWithOps.push({
pathname: file.pathname,
editable: isFileEditable(file),
operation: file.operation,
})
}
return curFilesWithOps
}, <FileWithOps[]>[])
const filesWithOps = files.reduce(
(curFilesWithOps, file) => {
if ('operation' in file) {
curFilesWithOps.push({
pathname: file.pathname,
editable: isFileEditable(file),
operation: file.operation,
})
}
return curFilesWithOps
},
<FileWithOps[]>[]
)
return filesWithOps
}

View file

@ -226,8 +226,8 @@ function useReviewPanelState(): ReviewPanel.ReviewPanelState {
MergeAndOverride<
ReviewPanelCommentThread,
ReviewPanelCommentThreadsApi[ThreadId]
>
]
>,
],
]
for (const [threadId, thread] of threadsEntries) {
for (const comment of thread.messages) {
@ -565,8 +565,8 @@ function useReviewPanelState(): ReviewPanel.ReviewPanelState {
[
UserId,
NonNullable<
typeof trackChangesState[keyof typeof trackChangesState]
>
(typeof trackChangesState)[keyof typeof trackChangesState]
>,
]
>
for (const [userId, { value }] of entries) {
@ -605,7 +605,7 @@ function useReviewPanelState(): ReviewPanel.ReviewPanelState {
}
const state =
newTrackChangesState[userId] ??
({} as NonNullable<typeof newTrackChangesState[UserId]>)
({} as NonNullable<(typeof newTrackChangesState)[UserId]>)
newTrackChangesState[userId] = state
if (state.syncState == null || state.syncState === 'synced') {

View file

@ -69,8 +69,8 @@ export function useEditingSessionHeartbeat() {
heartbeatsSent <= 2
? 30
: heartbeatsSent <= 6
? (heartbeatsSent - 2) * 60
: 300
? (heartbeatsSent - 2) * 60
: 300
setNextHeartbeatAt(moment().add(backoffSecs, 'seconds').toDate())
}, [getEditorType, heartbeatsSent, nextHeartbeatAt, projectId])

View file

@ -111,8 +111,8 @@ function FasterCompilesFeedbackContent() {
{feedback === 'faster'
? t('faster_compiles_feedback_seems_faster')
: feedback === 'same'
? t('faster_compiles_feedback_seems_same')
: t('faster_compiles_feedback_seems_slower')}
? t('faster_compiles_feedback_seems_same')
: t('faster_compiles_feedback_seems_slower')}
</Button>
))}
</div>

View file

@ -1,8 +1,8 @@
import { lazy, memo } from 'react'
import { useDetachCompileContext as useCompileContext } from '../../../shared/context/detach-compile-context'
const PdfJsViewer = lazy(() =>
import(/* webpackChunkName: "pdf-js-viewer" */ './pdf-js-viewer')
const PdfJsViewer = lazy(
() => import(/* webpackChunkName: "pdf-js-viewer" */ './pdf-js-viewer')
)
function PdfViewer() {

View file

@ -7,7 +7,7 @@ import { useProjectListContext } from '../../context/project-list-context'
import { useTranslation } from 'react-i18next'
const variants = ['did-you-know', 'on-premise', 'people', 'FOMO'] as const
type GroupsAndEnterpriseBannerVariant = typeof variants[number]
type GroupsAndEnterpriseBannerVariant = (typeof variants)[number]
let viewEventSent = false

View file

@ -116,8 +116,8 @@ function TagsDropdown() {
containsAllSelectedProjects(tag)
? 'check-square-o'
: containsSomeSelectedProjects(tag)
? 'minus-square-o'
: 'square-o'
? 'minus-square-o'
: 'square-o'
}
className="tag-checkbox"
/>{' '}

View file

@ -9,7 +9,8 @@ type CountryInputProps = {
inputRef?: React.ForwardedRef<HTMLInputElement>
} & React.InputHTMLAttributes<HTMLInputElement>
const itemToString = (item: typeof countries[number] | null) => item?.name ?? ''
const itemToString = (item: (typeof countries)[number] | null) =>
item?.name ?? ''
function Downshift({ setValue, inputRef }: CountryInputProps) {
const { t } = useTranslation()

View file

@ -58,4 +58,6 @@ for (const domain of commonDomains) {
}
}
export default domainBlocklist as ReadonlyArray<typeof domainBlocklist[number]>
export default domainBlocklist as ReadonlyArray<
(typeof domainBlocklist)[number]
>

View file

@ -268,8 +268,8 @@ const FigureModalContent = () => {
{helpShown
? t('help')
: sourcePickerShown
? t('replace_figure')
: getTitle(source)}{' '}
? t('replace_figure')
: getTitle(source)}{' '}
<FeedbackBadge
id="figure-modal-feedback"
url="https://forms.gle/PfEtwceYBNQ32DF4A"

View file

@ -209,8 +209,8 @@ export const FigureModalUploadFileSource: FC = () => {
uploading
? FileUploadStatus.UPLOADING
: uploadError
? FileUploadStatus.ERROR
: FileUploadStatus.NOT_ATTEMPTED
? FileUploadStatus.ERROR
: FileUploadStatus.NOT_ATTEMPTED
}
onDelete={() => {
uppy.removeFile(file.id)

View file

@ -4,7 +4,7 @@ const shallowEqual = (arr1: unknown[], arr2: unknown[]) =>
// Compares props for a component, but comparing the specified props using
// shallow array comparison rather than identity
export default function comparePropsWithShallowArrayCompare<
T extends Record<string, unknown>
T extends Record<string, unknown>,
>(...args: Array<keyof T>) {
return (prevProps: T, nextProps: T) => {
for (const k in prevProps) {

View file

@ -747,9 +747,8 @@ export const setColumnWidth = (
const alignmentCharacter = getParagraphAlignmentCharacter(
columnSpecification[i]
)
columnSpecification[
i
].content = `${alignmentCharacter}{${widthValue}${suffix}}`
columnSpecification[i].content =
`${alignmentCharacter}{${widthValue}${suffix}}`
}
}
const newSpecification = generateColumnSpecification(columnSpecification)

View file

@ -28,10 +28,10 @@ export default {
ie_version: ieUpTo10
? doc.documentMode || 6
: ie11Up
? +ie11Up[1]
: ieEdge
? +ieEdge[1]
: 0,
? +ie11Up[1]
: ieEdge
? +ieEdge[1]
: 0,
gecko,
gecko_version: gecko
? +(/Firefox\/(\d+)/.exec(nav.userAgent) || [0, 0])[1]

View file

@ -221,7 +221,10 @@ class ChangeDeletedWidget extends WidgetType {
}
class ChangeCalloutWidget extends WidgetType {
constructor(public change: Change, public opType: string) {
constructor(
public change: Change,
public opType: string
) {
super()
}

View file

@ -542,7 +542,7 @@ const createSelector = <
T extends string,
E extends HTMLElement = T extends keyof HTMLElementTagNameMap
? HTMLElementTagNameMap[T]
: HTMLElement
: HTMLElement,
>({
selector,
...elementSelector

View file

@ -13,8 +13,10 @@ import { SplitTestProvider } from '../../../../shared/context/split-test-context
export type PastedContent = { latex: string; text: string }
const pastedContentEffect =
StateEffect.define<{ content: PastedContent; formatted: boolean }>()
const pastedContentEffect = StateEffect.define<{
content: PastedContent
formatted: boolean
}>()
export const insertPastedContent = (
view: EditorView,

View file

@ -1,7 +1,10 @@
import { EditorView, WidgetType } from '@codemirror/view'
export class EnvironmentLineWidget extends WidgetType {
constructor(public environment: string, public line?: 'begin' | 'end') {
constructor(
public environment: string,
public line?: 'begin' | 'end'
) {
super()
}

View file

@ -371,9 +371,8 @@ export default App.controller('ReviewPanelController', [
ide.$scope.reviewPanel.overview.docsCollapsedState[doc.id] ==
null
) {
ide.$scope.reviewPanel.overview.docsCollapsedState[
doc.id
] = false
ide.$scope.reviewPanel.overview.docsCollapsedState[doc.id] =
false
}
if (doc.id !== $scope.editor.open_doc_id) {
// this is kept up to date in real-time, don't overwrite

View file

@ -93,8 +93,8 @@ App.factory('eventTracking', [
heartbeatsSent <= 2
? 30
: heartbeatsSent <= 6
? (heartbeatsSent - 2) * 60
: 300
? (heartbeatsSent - 2) * 60
: 300
nextHeartbeat = moment().add(backoffSecs, 'seconds').toDate()
},

View file

@ -94,8 +94,8 @@ export const ProjectProvider: FC = ({ children }) => {
forceNewCompileTimeout === 'active'
? 'active'
: forceNewCompileTimeout === 'changing'
? 'changing'
: undefined
? 'changing'
: undefined
const value = useMemo(() => {
return {

View file

@ -1,6 +1,6 @@
export function callFnsInSequence<
Args,
Fn extends ((...args: Args[]) => void) | void
Fn extends ((...args: Args[]) => void) | void,
>(...fns: Fn[]) {
return (...args: Args[]) => fns.forEach(fn => fn?.(...args))
}

View file

@ -155,7 +155,9 @@
position: relative;
height: 40px;
margin-top: 0;
transition: margin 0.15s ease-in-out, opacity 0.15s ease-in-out;
transition:
margin 0.15s ease-in-out,
opacity 0.15s ease-in-out;
padding-bottom: @padding-sm;
text-align: center;
background-image: linear-gradient(0, @ol-blue-gray-1, transparent);

View file

@ -47,7 +47,11 @@
}
.toolbar-pdf-hybrid {
.btn:not(.detach-compile-button):not(.btn-orphan):not(.detach-synctex-control):not(.switch-to-editor-btn):not(.split-menu-dropdown-toggle):not(.split-menu-button) {
.btn:not(.detach-compile-button):not(.btn-orphan):not(
.detach-synctex-control
):not(.switch-to-editor-btn):not(.split-menu-dropdown-toggle):not(
.split-menu-button
) {
display: inline-block;
color: @toolbar-btn-color;
background-color: transparent;
@ -208,7 +212,9 @@
z-index: 10; // above the PDF viewer
.btn-group {
transition: opacity 0.5s ease, visibility 0 linear 0.5s;
transition:
opacity 0.5s ease,
visibility 0 linear 0.5s;
visibility: hidden;
opacity: 0;
}

View file

@ -231,10 +231,15 @@
border-radius: 3px;
color: #fff;
cursor: pointer;
transition: top @rp-entry-animation-speed, left 0.1s, right 0.1s;
transition:
top @rp-entry-animation-speed,
left 0.1s,
right 0.1s;
.no-animate & {
transition: left 0.1s, right 0.1s;
transition:
left 0.1s,
right 0.1s;
}
&-focused {
@ -381,10 +386,15 @@
border-left: solid @rp-entry-ribbon-width transparent;
border-radius: 3px;
background-color: #fff;
transition: top @rp-entry-animation-speed, left 0.1s, right 0.1s;
transition:
top @rp-entry-animation-speed,
left 0.1s,
right 0.1s;
.no-animate & {
transition: left 0.1s, right 0.1s;
transition:
left 0.1s,
right 0.1s;
}
&-insert,
@ -407,8 +417,11 @@
z-index: 3;
transform: scale(0.1);
transform-origin: 0 0;
transition: top 0.35s ease-out, left 0.35s ease-out,
transform 0.35s ease-out, opacity 0.35s ease-out 0.2s;
transition:
top 0.35s ease-out,
left 0.35s ease-out,
transform 0.35s ease-out,
opacity 0.35s ease-out 0.2s;
}
&-comment-resolved {
@ -654,7 +667,9 @@
}
.rp-entry-callout {
transition: top @rp-entry-animation-speed, height @rp-entry-animation-speed;
transition:
top @rp-entry-animation-speed,
height @rp-entry-animation-speed;
.rp-state-current-file & {
position: absolute;

View file

@ -252,7 +252,9 @@
user-select: none;
color: @text-color;
border-radius: @btn-border-radius-base;
transition: color 0.12s ease-out, background-color 0.12s ease-out,
transition:
color 0.12s ease-out,
background-color 0.12s ease-out,
box-shadow 0.12s ease-out;
overflow: hidden;

View file

@ -590,7 +590,9 @@ span.plans-v2-license-picker-educational-discount-learn-more-container {
background-clip: padding-box; /* needed for firefox when there is bg color */
text-align: center;
&:not(.plans-v2-table-cell-before-green-highlighted-column):not(.plans-v2-table-green-highlighted):not(.plans-v2-table-divider-highlighted) {
&:not(.plans-v2-table-cell-before-green-highlighted-column):not(
.plans-v2-table-green-highlighted
):not(.plans-v2-table-divider-highlighted) {
border-right: 1px solid @ol-blue-gray-0;
@media (max-width: @screen-xs-max) {
@ -623,7 +625,9 @@ span.plans-v2-license-picker-educational-discount-learn-more-container {
vertical-align: middle;
height: 100%;
&:last-child:not(.plans-v2-table-green-highlighted):not(.plans-v2-table-divider-highlighted) {
&:last-child:not(.plans-v2-table-green-highlighted):not(
.plans-v2-table-divider-highlighted
) {
border-right: 0;
}
}
@ -649,7 +653,9 @@ span.plans-v2-license-picker-educational-discount-learn-more-container {
}
}
&:not(.plans-v2-table-row-last-row-per-section):not(.plans-v2-table-divider):not(:last-of-type) {
&:not(.plans-v2-table-row-last-row-per-section):not(
.plans-v2-table-divider
):not(:last-of-type) {
th > .plans-v2-table-th > .plans-v2-table-th-content,
td > .plans-v2-table-feature-name,
td > .plans-v2-table-cell > .plans-v2-table-cell-content {

View file

@ -923,7 +923,10 @@
vertical-align: middle;
&:focus-visible {
box-shadow: 0 0 0 2px @white, 0 0 0 3px @blue-50, 0 0 0 5px @blue-30;
box-shadow:
0 0 0 2px @white,
0 0 0 3px @blue-50,
0 0 0 5px @blue-30;
}
&.more-button {

View file

@ -214,7 +214,9 @@
display: inline-block;
transform: translate(150px, 0);
opacity: 0;
transition: transform 0.8s ease 0s, opacity 0.8s ease 0s;
transition:
transform 0.8s ease 0s,
opacity 0.8s ease 0s;
&:nth-child(2) {
transition-delay: 0.5s, 0.5s;
}
@ -244,7 +246,9 @@
margin-bottom: 2em;
transform: translate(0, 100px);
opacity: 0;
transition: transform 0.8s ease 1s, opacity 0.8s ease 1s;
transition:
transform 0.8s ease 1s,
opacity 0.8s ease 1s;
box-shadow: none;
max-width: none;
@ -271,7 +275,9 @@
}
}
.rfp-video-anim {
transition: transform 0.8s ease, opacity 0.8s ease;
transition:
transform 0.8s ease,
opacity 0.8s ease;
transform: translate(100%, 0);
opacity: 0;
}
@ -294,7 +300,7 @@
border-left: 0;
max-width: 30em;
font-size: @rfp-lead-size;
quotes: '\201C''\201D';
quotes: '\201C' '\201D';
box-shadow: @rfp-card-shadow;
border-radius: @rfp-border-radius;
background-color: #fff;
@ -333,7 +339,7 @@
border-left: 0;
margin: 0 auto;
padding: 0;
quotes: '\201C''\201D';
quotes: '\201C' '\201D';
font-size: @rfp-lead-size;
@media (min-width: @screen-md-min) {
display: flex;
@ -454,7 +460,9 @@
position: absolute;
left: 50%;
text-transform: uppercase;
transition: opacity 0.25s, transform 0.25s;
transition:
opacity 0.25s,
transform 0.25s;
transform: translate(-50%, 100%);
opacity: 0;
font-size: 0.5em;

View file

@ -140,7 +140,9 @@
.info-card {
border-radius: 8px;
height: 100%;
box-shadow: 0px 2px 4px 0px #1e253014, 0px 4px 12px 0px #1e25301f;
box-shadow:
0px 2px 4px 0px #1e253014,
0px 4px 12px 0px #1e25301f;
border-top: 8px solid var(--sapphire-blue);
padding: 32px 40px 32px 40px;
@ -345,7 +347,8 @@
}
video {
box-shadow: 0px 4px 6px 0px rgba(30, 37, 48, 0.12),
box-shadow:
0px 4px 6px 0px rgba(30, 37, 48, 0.12),
0px 8px 16px 0px rgba(30, 37, 48, 0.12);
max-height: 100%;
width: auto;
@ -998,7 +1001,7 @@
font-size: 1.875rem;
line-height: 1.333;
font-weight: 600;
quotes: '\201C''\201D'; // override default quotes
quotes: '\201C' '\201D'; // override default quotes
padding: unset;
margin: unset;
font-family: 'Noto Sans', sans-serif;

View file

@ -5,12 +5,14 @@
// Tooltips, Callouts, Dropdowns, etc.
@mixin shadow-md {
box-shadow: 0px 4px 24px rgba(30, 37, 48, 0.12),
box-shadow:
0px 4px 24px rgba(30, 37, 48, 0.12),
0px 1px 4px rgba(30, 37, 48, 0.08);
}
// Modals, drawers
@mixin shadow-lg {
box-shadow: 0px 8px 24px rgba(30, 37, 48, 0.16),
box-shadow:
0px 8px 24px rgba(30, 37, 48, 0.16),
0px 4px 8px rgba(30, 37, 48, 0.16);
}

View file

@ -1,4 +1,6 @@
.expand-collapse-container {
overflow: hidden;
transition: height 0.15s ease-in-out, width 0.15s ease-in-out;
transition:
height 0.15s ease-in-out,
width 0.15s ease-in-out;
}

View file

@ -331,7 +331,8 @@ input[type='checkbox'],
); // Redeclare so transitions work
&:focus {
border-color: darken(@state-danger-text, 10%);
@shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075),
@shadow:
inset 0 1px 1px rgba(0, 0, 0, 0.075),
0 0 6px lighten(@state-danger-text, 20%);
.box-shadow(@shadow);
}

View file

@ -19,7 +19,9 @@
padding: 1px;
background-color: @rp-highlight-blue;
border-radius: 0.875em;
transition: background 0.15s ease, border-color 0.15s ease;
transition:
background 0.15s ease,
border-color 0.15s ease;
&::before {
content: '';
@ -31,7 +33,10 @@
top: 1px;
background-color: #fff;
border-radius: 0.875em;
transition: background-color 0.15s ease, color 0.15s ease, left 0.15s ease;
transition:
background-color 0.15s ease,
color 0.15s ease,
left 0.15s ease;
}
}

View file

@ -324,7 +324,8 @@
padding: 10px @navbar-padding-horizontal;
border-top: 1px solid transparent;
border-bottom: 1px solid transparent;
@shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1),
@shadow:
inset 0 1px 0 rgba(255, 255, 255, 0.1),
0 1px 0 rgba(255, 255, 255, 0.1);
.box-shadow(@shadow);

View file

@ -34,7 +34,8 @@
background-color: transparent;
> .btn {
border-color: @input-border-focus;
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075),
box-shadow:
inset 0 1px 1px rgba(0, 0, 0, 0.075),
0 0 8px fade(@input-border-focus, 60%);
padding-top: @input-suggestion-v-offset;
}

View file

@ -328,7 +328,7 @@ blockquote {
padding: (@line-height-computed / 2) @line-height-computed;
margin: 0 0 @line-height-computed;
font-size: @blockquote-font-size;
quotes: '\201C''\201D''\2018''\2019';
quotes: '\201C' '\201D' '\2018' '\2019';
border-left: 5px solid @blockquote-border-color;
&:before {
content: open-quote;

View file

@ -962,7 +962,8 @@
@btn-border-bottom-width: 0;
// Shadows
@box-shadow: 0px 4px 12px rgba(30, 37, 48, 0.12),
@box-shadow:
0px 4px 12px rgba(30, 37, 48, 0.12),
0px 2px 4px rgba(30, 37, 48, 0.08);
// Cards

View file

@ -787,7 +787,8 @@
@btn-secondary-hover-bg-color: @neutral-20;
// Shadows
@box-shadow: 0px 4px 12px rgba(30, 37, 48, 0.12),
@box-shadow:
0px 4px 12px rgba(30, 37, 48, 0.12),
0px 2px 4px rgba(30, 37, 48, 0.08);
// Cards

View file

@ -156,7 +156,8 @@ body > .select2-container.open {
text-decoration: none;
outline: 5px auto -webkit-focus-ring-color;
outline-offset: -2px;
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075),
box-shadow:
inset 0 1px 1px rgba(0, 0, 0, 0.075),
0 0 8px rgba(102, 175, 233, 0.6);
}

View file

@ -45,9 +45,8 @@ async function checkActiveSubscriptions() {
if (subscriptions.length) {
const groupIds = subscriptions.map(sub => sub._id)
const bigQueryGroupMemberships = await fetchBigQueryMembershipStatuses(
groupIds
)
const bigQueryGroupMemberships =
await fetchBigQueryMembershipStatuses(groupIds)
const membershipsByGroupId = _.groupBy(
bigQueryGroupMemberships,
'group_id'
@ -85,9 +84,8 @@ async function checkDeletedSubscriptions() {
if (deletedSubscriptions.length) {
const groupIds = deletedSubscriptions.map(sub => sub._id.toString())
const bigQueryGroupMemberships = await fetchBigQueryMembershipStatuses(
groupIds
)
const bigQueryGroupMemberships =
await fetchBigQueryMembershipStatuses(groupIds)
const membershipsByGroupId = _.groupBy(
bigQueryGroupMemberships,

View file

@ -68,9 +68,8 @@ async function main() {
new Set(docs.map(doc => doc.project_id.toString()))
).map(id => new ObjectId(id))
console.log('Checking projects', JSON.stringify(projectIds))
const { nProjectsWithOrphanedDocs, nDeletedDocs } = await processBatch(
projectIds
)
const { nProjectsWithOrphanedDocs, nDeletedDocs } =
await processBatch(projectIds)
nProjectsProcessedTotal += projectIds.length
nProjectsWithOrphanedDocsTotal += nProjectsWithOrphanedDocs
nDeletedDocsTotal += nDeletedDocs

View file

@ -8,9 +8,8 @@ const PROJECT_ID = ARGV.shift()
const FILE_NAMES_TO_RESTORE = ARGV
async function main() {
const deletedDocs = await DocstoreManager.promises.getAllDeletedDocs(
PROJECT_ID
)
const deletedDocs =
await DocstoreManager.promises.getAllDeletedDocs(PROJECT_ID)
const docsToRestore = deletedDocs.filter(doc =>
FILE_NAMES_TO_RESTORE.includes(doc.name)
)

View file

@ -19,9 +19,8 @@ waitForDb()
})
async function main() {
const result = await SAMLUserIdMigrationHandler.promises.checkMigration(
institutionId
)
const result =
await SAMLUserIdMigrationHandler.promises.checkMigration(institutionId)
if (emitUsers) {
console.log(

View file

@ -19,9 +19,8 @@ waitForDb()
})
async function main() {
const result = await SAMLUserIdMigrationHandler.promises.removeNotMigrated(
institutionId
)
const result =
await SAMLUserIdMigrationHandler.promises.removeNotMigrated(institutionId)
if (emitUsers) {
console.log(

View file

@ -11,7 +11,7 @@ const testData = [1, 2, 3].map(index => ({
sub: `Subtitle ${index}`,
}))
type RenderProps = Partial<SelectProps<typeof testData[number]>> & {
type RenderProps = Partial<SelectProps<(typeof testData)[number]>> & {
onSubmit?: (formData: object) => void
}

View file

@ -803,9 +803,8 @@ describe('<ProjectListRoot />', function () {
fireEvent.click(allCheckboxes[1]) // select a project owned by the current user
const actionsToolbar = screen.getAllByRole('toolbar')[0]
const moreDropdown = await within(
actionsToolbar
).findByText<HTMLElement>('More')
const moreDropdown =
await within(actionsToolbar).findByText<HTMLElement>('More')
fireEvent.click(moreDropdown)
const editButton =
@ -851,9 +850,8 @@ describe('<ProjectListRoot />', function () {
status: 200,
}
)
const moreDropdown = await within(
actionsToolbar
).findByText<HTMLElement>('More')
const moreDropdown =
await within(actionsToolbar).findByText<HTMLElement>('More')
fireEvent.click(moreDropdown)
const renameButton =
@ -888,9 +886,8 @@ describe('<ProjectListRoot />', function () {
within(table).getByText(newProjectName)
expect(within(table).queryByText(oldName)).to.be.null
const allCheckboxesInTable = await within(
table
).findAllByRole<HTMLInputElement>('checkbox')
const allCheckboxesInTable =
await within(table).findAllByRole<HTMLInputElement>('checkbox')
const allCheckboxesChecked = allCheckboxesInTable.filter(
c => c.checked
)

Some files were not shown because too many files have changed in this diff Show more