remove excessive logging

GitOrigin-RevId: 62024bbe0415a4fdae66eb1b9c6707e5faec7cd1
This commit is contained in:
Ersun Warncke 2019-11-19 10:19:08 -04:00 committed by sharelatex
parent debb82960c
commit 2c335802ca
83 changed files with 26 additions and 955 deletions

View file

@ -113,15 +113,6 @@ module.exports = AnnouncementsHandler = {
return announcement
})
logger.log(
{
announcementsLength:
announcements != null ? announcements.length : undefined,
user_id: user != null ? user._id : undefined
},
'returning announcements'
)
return callback(null, announcements)
}
)

View file

@ -42,7 +42,6 @@ module.exports = BlogController = {
return BlogController._directProxy(blogUrl, res)
}
logger.log({ url }, 'proxying request to blog api')
return request.get(blogUrl, function(err, r, data) {
if (
(r != null ? r.statusCode : undefined) === 404 ||

View file

@ -32,13 +32,6 @@ module.exports = BlogHandler = {
if (res.statusCode !== 200) {
return callback(new Error('blog announcement returned non 200'))
}
logger.log(
{
announcementsLength:
announcements != null ? announcements.length : undefined
},
'announcements returned'
)
announcements = _.map(announcements, function(announcement) {
announcement.date = new Date(announcement.date)
return announcement

View file

@ -62,7 +62,6 @@ module.exports = ChatController = {
getMessages(req, res, next) {
const { project_id } = req.params
const { query } = req
logger.log({ project_id, query }, 'getting messages')
return ChatApiHandler.getGlobalMessages(
project_id,
query.limit,
@ -77,10 +76,6 @@ module.exports = ChatController = {
if (err != null) {
return next(err)
}
logger.log(
{ length: messages != null ? messages.length : undefined },
'sending messages to client'
)
return res.json(messages)
}
)

View file

@ -70,7 +70,6 @@ const ClsiManager = {
)
}
}
logger.log({ projectId }, 'sending compile to CLSI')
ClsiManager._sendBuiltRequest(
projectId,
userId,
@ -96,11 +95,6 @@ const ClsiManager = {
if (options == null) {
options = {}
}
logger.log(
{ submissionId },
'sending external compile to CLSI',
clsiRequest
)
ClsiManager._sendBuiltRequest(
submissionId,
null,
@ -217,18 +211,6 @@ const ClsiManager = {
}).withCause(err)
)
}
if (response != null) {
logger.log(
{
projectId,
outputFilesLength:
response.outputFiles && response.outputFiles.length,
status: response.status,
compile_status: response.compile && response.compile.status
},
'received compile response from CLSI'
)
}
ClsiCookieManager._getServerId(projectId, (err, clsiServerId) => {
if (err != null) {
return callback(
@ -522,15 +504,6 @@ const ClsiManager = {
// note: we don't bail out when there's an error getting
// incremental files from the docupdater, we just fall back
// to a normal compile below
} else {
logger.log(
{
projectId,
projectStateHash,
docs: docUpdaterDocs != null
},
'checked project state'
)
}
// see if we can send an incremental update to the CLSI
if (
@ -791,7 +764,6 @@ const ClsiManager = {
}
if (rootResourcePath == null) {
if (hasMainFile) {
logger.log({ projectId }, 'no root document found, setting to main.tex')
rootResourcePath = 'main.tex'
} else if (numberOfDocsInProject === 1) {
// only one file, must be the main document
@ -799,10 +771,6 @@ const ClsiManager = {
// Remove leading /
rootResourcePath = path.replace(/^\//, '')
}
logger.warn(
{ projectId, rootResourcePath },
'no root document found, single document in project'
)
} else {
return callback(
new OError({

View file

@ -74,7 +74,6 @@ module.exports = CompileController = {
if (req.body != null ? req.body.incrementalCompilesEnabled : undefined) {
options.incrementalCompilesEnabled = true
}
logger.log({ options, project_id, user_id }, 'got compile request')
return CompileManager.compile(project_id, user_id, options, function(
error,
status,
@ -106,7 +105,6 @@ module.exports = CompileController = {
}
const project_id = req.params.Project_id
const user_id = AuthenticationController.getLoggedInUserId(req)
logger.log({ project_id, user_id }, 'stop compile request')
return CompileManager.stopCompile(project_id, user_id, function(error) {
if (error != null) {
return next(error)
@ -145,7 +143,6 @@ module.exports = CompileController = {
options.timeout =
(req.body != null ? req.body.timeout : undefined) ||
Settings.defaultFeatures.compileTimeout
logger.log({ options, submission_id }, 'got compileSubmission request')
return ClsiManager.sendExternalRequest(
submission_id,
req.body,
@ -154,10 +151,6 @@ module.exports = CompileController = {
if (error != null) {
return next(error)
}
logger.log(
{ submission_id, files: outputFiles },
'compileSubmission output files'
)
res.contentType('application/json')
return res.status(200).send(
JSON.stringify({
@ -221,10 +214,8 @@ module.exports = CompileController = {
const filename = `${CompileController._getSafeProjectName(project)}.pdf`
if (req.query.popupDownload) {
logger.log({ project_id }, 'download pdf as popup download')
res.setContentDisposition('attachment', { filename })
} else {
logger.log({ project_id }, 'download pdf to embed in browser')
res.setContentDisposition('', { filename })
}
@ -491,7 +482,6 @@ module.exports = CompileController = {
}
const compilerUrl = Settings.apis.clsi.url
url = `${compilerUrl}${url}`
logger.log({ url }, 'proxying to CLSI')
const oneMinute = 60 * 1000
// the base request
const options = { url, method: req.method, timeout: oneMinute, jar }

View file

@ -39,7 +39,6 @@ module.exports = CompileManager = {
return _callback(...Array.from(args || []))
}
logger.log({ project_id, user_id }, 'compiling project')
return CompileManager._checkIfRecentlyCompiled(
project_id,
user_id,
@ -105,7 +104,6 @@ module.exports = CompileManager = {
if (error != null) {
return callback(error)
}
logger.log({ files: outputFiles }, 'output files')
return callback(
null,
status,

View file

@ -25,7 +25,6 @@ module.exports = ContactManager = {
if (callback == null) {
callback = function(error, contacts) {}
}
logger.log({ user_id }, 'getting user contacts')
const url = `${settings.apis.contacts.url}/user/${user_id}/contacts`
return request.get(
{
@ -61,7 +60,6 @@ module.exports = ContactManager = {
if (callback == null) {
callback = function(error) {}
}
logger.log({ user_id, contact_id }, 'add user contact')
const url = `${settings.apis.contacts.url}/user/${user_id}/contacts`
return request.post(
{

View file

@ -25,7 +25,6 @@ module.exports = DocstoreManager = {
if (callback == null) {
callback = function(error) {}
}
logger.log({ project_id, doc_id }, 'deleting doc in docstore api')
const url = `${
settings.apis.docstore.url
}/project/${project_id}/doc/${doc_id}`
@ -63,7 +62,6 @@ module.exports = DocstoreManager = {
if (callback == null) {
callback = function(error) {}
}
logger.log({ project_id }, 'getting all docs for project in docstore api')
const url = `${settings.apis.docstore.url}/project/${project_id}/doc`
return request.get(
{
@ -95,10 +93,6 @@ module.exports = DocstoreManager = {
if (callback == null) {
callback = function(error) {}
}
logger.log(
{ project_id },
'getting all doc ranges for project in docstore api'
)
const url = `${settings.apis.docstore.url}/project/${project_id}/ranges`
return request.get(
{
@ -137,7 +131,6 @@ module.exports = DocstoreManager = {
callback = options
options = {}
}
logger.log({ project_id, doc_id, options }, 'getting doc in docstore api')
let url = `${
settings.apis.docstore.url
}/project/${project_id}/doc/${doc_id}`
@ -185,7 +178,6 @@ module.exports = DocstoreManager = {
if (callback == null) {
callback = function(error, modified, rev) {}
}
logger.log({ project_id, doc_id }, 'updating doc in docstore api')
const url = `${
settings.apis.docstore.url
}/project/${project_id}/doc/${doc_id}`

View file

@ -30,7 +30,6 @@ module.exports = DocumentUpdaterHandler = {
if (callback == null) {
callback = function(error) {}
}
logger.log({ project_id }, 'flushing project from document updater')
return DocumentUpdaterHandler._makeRequest(
{
path: `/project/${project_id}/flush`,
@ -77,7 +76,6 @@ module.exports = DocumentUpdaterHandler = {
if (callback == null) {
callback = function(error) {}
}
logger.log({ project_id, doc_id }, 'flushing doc from document updater')
return DocumentUpdaterHandler._makeRequest(
{
path: `/project/${project_id}/doc/${doc_id}/flush`,
@ -93,7 +91,6 @@ module.exports = DocumentUpdaterHandler = {
if (callback == null) {
callback = function() {}
}
logger.log({ project_id, doc_id }, 'deleting doc from document updater')
return DocumentUpdaterHandler._makeRequest(
{
path: `/project/${project_id}/doc/${doc_id}`,
@ -109,7 +106,6 @@ module.exports = DocumentUpdaterHandler = {
if (callback == null) {
callback = function(error, doclines, version, ranges, ops) {}
}
logger.log({ project_id, doc_id }, 'getting doc from document updater')
return DocumentUpdaterHandler._makeRequest(
{
path: `/project/${project_id}/doc/${doc_id}?fromVersion=${fromVersion}`,
@ -130,10 +126,6 @@ module.exports = DocumentUpdaterHandler = {
if (callback == null) {
callback = function(error) {}
}
logger.log(
{ project_id, doc_id, source, user_id },
'setting doc in document updater'
)
return DocumentUpdaterHandler._makeRequest(
{
path: `/project/${project_id}/doc/${doc_id}`,
@ -161,7 +153,6 @@ module.exports = DocumentUpdaterHandler = {
const url = `${
settings.apis.documentupdater.url
}/project/${project_id}/get_and_flush_if_old?state=${projectStateHash}`
logger.log({ project_id }, 'getting project docs from document updater')
return request.post(url, function(error, res, body) {
timer.done()
if (error != null) {
@ -181,10 +172,6 @@ module.exports = DocumentUpdaterHandler = {
return callback()
} else if (res.statusCode >= 200 && res.statusCode < 300) {
let docs
logger.log(
{ project_id },
'got project docs from document document updater'
)
try {
docs = JSON.parse(body)
} catch (error1) {
@ -210,8 +197,6 @@ module.exports = DocumentUpdaterHandler = {
if (callback == null) {
callback = function(error) {}
}
logger.log({ project_id }, 'clearing project state from document updater')
return DocumentUpdaterHandler._makeRequest(
{
path: `/project/${project_id}/clearState`,
@ -230,8 +215,6 @@ module.exports = DocumentUpdaterHandler = {
if (callback == null) {
callback = function(error) {}
}
logger.log({ project_id, doc_id }, `accepting ${change_ids.length} changes`)
return DocumentUpdaterHandler._makeRequest(
{
path: `/project/${project_id}/doc/${doc_id}/change/accept`,
@ -251,10 +234,6 @@ module.exports = DocumentUpdaterHandler = {
callback = function(error) {}
}
const timer = new metrics.Timer('delete-thread')
logger.log(
{ project_id, doc_id, thread_id },
'deleting comment range in document updater'
)
return DocumentUpdaterHandler._makeRequest(
{
path: `/project/${project_id}/doc/${doc_id}/comment/${thread_id}`,
@ -267,10 +246,6 @@ module.exports = DocumentUpdaterHandler = {
},
resyncProjectHistory(project_id, projectHistoryId, docs, files, callback) {
logger.info(
{ project_id, docs, files },
'resyncing project history in doc updater'
)
return DocumentUpdaterHandler._makeRequest(
{
path: `/project/${project_id}/history/resync`,
@ -328,7 +303,6 @@ module.exports = DocumentUpdaterHandler = {
return callback(new Error('did not receive project version in changes'))
}
logger.log({ project_id }, 'updating project structure in doc updater')
return DocumentUpdaterHandler._makeRequest(
{
path: `/project/${project_id}`,

View file

@ -28,10 +28,6 @@ module.exports = {
const { doc_id } = req.params
const plain =
__guard__(req != null ? req.query : undefined, x => x.plain) === 'true'
logger.log(
{ doc_id, project_id },
'receiving get document request from api (docupdater)'
)
return ProjectGetter.getProject(
project_id,
{ rootFolder: true, overleaf: true },
@ -100,10 +96,6 @@ module.exports = {
const project_id = req.params.Project_id
const { doc_id } = req.params
const { lines, version, ranges, lastUpdatedAt, lastUpdatedBy } = req.body
logger.log(
{ doc_id, project_id },
'receiving set document request from api (docupdater)'
)
return ProjectEntityUpdateHandler.updateDocLines(
project_id,
doc_id,

View file

@ -22,7 +22,6 @@ module.exports = ProjectDownloadsController = {
downloadProject(req, res, next) {
const project_id = req.params.Project_id
Metrics.inc('zip-downloads')
logger.log({ project_id }, 'downloading project')
return DocumentUpdaterHandler.flushProjectToMongo(project_id, function(
error
) {
@ -56,7 +55,6 @@ module.exports = ProjectDownloadsController = {
downloadMultipleProjects(req, res, next) {
const project_ids = req.query.project_ids.split(',')
Metrics.inc('zip-downloads-multiple')
logger.log({ project_ids }, 'downloading multiple projects')
return DocumentUpdaterHandler.flushMultipleProjectsToMongo(
project_ids,
function(error) {

View file

@ -37,8 +37,6 @@ module.exports = ProjectZipStreamManager = {
)
callback(null, archive)
logger.log({ project_ids }, 'creating zip stream of multiple projects')
const jobs = []
for (let project_id of Array.from(project_ids || [])) {
;(project_id =>

View file

@ -58,10 +58,6 @@ const EditorController = {
callback = function(error, doc) {}
}
docName = docName.trim()
logger.log(
{ project_id, folder_id, docName, source },
'sending new doc to project'
)
Metrics.inc('editor.add-doc')
return ProjectEntityUpdateHandler.addDocWithRanges(
project_id,
@ -104,18 +100,6 @@ const EditorController = {
callback = function(error, file) {}
}
fileName = fileName.trim()
logger.log(
{
project_id,
folder_id,
fileName,
fsPath,
linkedFileData,
source,
user_id
},
'sending new file to project'
)
Metrics.inc('editor.add-file')
return ProjectEntityUpdateHandler.addFile(
project_id,
@ -323,10 +307,6 @@ const EditorController = {
callback = function(error, folder) {}
}
folderName = folderName.trim()
logger.log(
{ project_id, folder_id, folderName, source },
'sending new folder to project'
)
Metrics.inc('editor.add-folder')
return ProjectEntityUpdateHandler.addFolder(
project_id,
@ -387,10 +367,6 @@ const EditorController = {
if (callback == null) {
callback = function(error) {}
}
logger.log(
{ project_id, entity_id, entityType, source },
'start delete process of entity'
)
Metrics.inc('editor.delete-entity')
return ProjectEntityUpdateHandler.deleteEntity(
project_id,
@ -476,7 +452,6 @@ const EditorController = {
deleteProject(project_id, callback) {
Metrics.inc('editor.delete-project')
logger.log({ project_id }, 'recived message to delete project')
return ProjectDeleter.deleteProject(project_id, callback)
},
@ -486,10 +461,6 @@ const EditorController = {
}
newName = sanitize.escape(newName)
Metrics.inc('editor.rename-entity')
logger.log(
{ entity_id, entity_id, entity_id },
'reciving new name for entity for project'
)
return ProjectEntityUpdateHandler.renameEntity(
project_id,
entity_id,
@ -577,7 +548,6 @@ const EditorController = {
if (err != null) {
return callback(err)
}
logger.log({ compiler, project_id }, 'setting compiler')
EditorRealTimeController.emitToRoom(
project_id,
'compilerUpdated',
@ -597,7 +567,6 @@ const EditorController = {
if (err != null) {
return callback(err)
}
logger.log({ imageName, project_id }, 'setting imageName')
EditorRealTimeController.emitToRoom(
project_id,
'imageNameUpdated',
@ -618,10 +587,6 @@ const EditorController = {
if (err != null) {
return callback(err)
}
logger.log(
{ languageCode, project_id },
'setting languageCode for spell check'
)
EditorRealTimeController.emitToRoom(
project_id,
'spellCheckLanguageUpdated',
@ -712,10 +677,6 @@ const EditorController = {
if (callback == null) {
callback = function(error) {}
}
logger.log(
{ project_id, folder, parentFolder_id: folder_id },
'sending newly created folder out to users'
)
EditorRealTimeController.emitToRoom(
project_id,
'reciveNewFolder',

View file

@ -21,7 +21,6 @@ module.exports = EditorHttpController = {
if (userId === 'anonymous-user') {
userId = null
}
logger.log({ userId, projectId }, 'join project request')
Metrics.inc('editor.join-project')
EditorHttpController._buildJoinProjectView(req, projectId, userId, function(
error,
@ -56,7 +55,6 @@ module.exports = EditorHttpController = {
if (callback == null) {
callback = function() {}
}
logger.log({ projectId, userId }, 'building the joinProject view')
ProjectGetter.getProjectWithoutDocLines(projectId, function(
error,
project
@ -99,16 +97,6 @@ module.exports = EditorHttpController = {
if (error) {
return callback(error)
}
logger.log(
{
projectId,
userId,
memberCount: members.length,
inviteCount: invites.length,
privilegeLevel
},
'returning project model view'
)
CollaboratorsHandler.userIsTokenMember(
userId,
projectId,
@ -150,10 +138,7 @@ module.exports = EditorHttpController = {
const { name } = req.body
const parentFolderId = req.body.parent_folder_id
const userId = AuthenticationController.getLoggedInUserId(req)
logger.log(
{ projectId, name, parentFolderId },
'getting request to add doc to project'
)
if (!EditorHttpController._nameIsAcceptableLength(name)) {
return res.sendStatus(400)
}

View file

@ -73,10 +73,6 @@ function getClient() {
async function sendEmail(options) {
try {
logger.log(
{ receiver: options.to, subject: options.subject },
'sending email'
)
const canContinue = await checkCanSendEmail(options)
if (!canContinue) {
logger.log(
@ -104,7 +100,6 @@ async function sendEmail(options) {
sendMailOptions.textEncoding = EMAIL_SETTINGS.textEncoding
}
await client.sendMail(sendMailOptions)
logger.log(`Message sent to ${options.to}`)
} catch (err) {
throw new OError({
message: 'error sending message'

View file

@ -10,7 +10,6 @@ module.exports = {
const fileId = req.params.File_id
const queryString = req.query
const userAgent = req.get('User-Agent')
logger.log({ projectId, fileId, queryString }, 'file download')
ProjectLocator.findElement(
{ project_id: projectId, element_id: fileId, type: 'file' },
function(err, file) {
@ -34,10 +33,6 @@ module.exports = {
}
// mobile safari will try to render html files, prevent this
if (isMobileSafari(userAgent) && isHtml(file)) {
logger.log(
{ filename: file.name, userAgent },
'sending html file to mobile-safari as plain text'
)
res.setHeader('Content-Type', 'text/plain')
}
res.setContentDisposition('attachment', { filename: file.name })

View file

@ -66,10 +66,6 @@ const FileStoreHandler = {
}
const fileRef = new File(Object.assign({}, fileArgs, { hash: hashValue }))
const fileId = fileRef._id
logger.log(
{ projectId, fileId, fsPath, hash: hashValue, fileRef },
'uploading file from disk'
)
const readStream = fs.createReadStream(fsPath)
readStream.on('error', function(err) {
logger.warn(
@ -117,10 +113,6 @@ const FileStoreHandler = {
},
getFileStream(projectId, fileId, query, callback) {
logger.log(
{ projectId, fileId, query },
'getting file stream from file store'
)
let queryString = ''
if (query != null && query['format'] != null) {
queryString = `?format=${query['format']}`

View file

@ -69,7 +69,6 @@ module.exports = HealthCheckController = {
},
checkMongo(req, res, next) {
logger.log('running mongo health check')
return UserGetter.getUserEmail(settings.smokeTest.userId, function(
err,
email
@ -84,7 +83,6 @@ module.exports = HealthCheckController = {
)
return res.sendStatus(500)
} else {
logger.log({ email }, 'mongo health check passed')
return res.sendStatus(200)
}
})

View file

@ -59,7 +59,6 @@ module.exports = HistoryController = {
if (req.useProjectHistory != null) {
return next()
} else {
logger.log({ project_id }, 'project history not enabled')
return res.sendStatus(404)
}
},
@ -72,7 +71,6 @@ module.exports = HistoryController = {
const url =
HistoryController.buildHistoryServiceUrl(req.useProjectHistory) + req.url
logger.log({ url }, 'proxying to history api')
const getReq = request({
url,
method: req.method,
@ -94,7 +92,6 @@ module.exports = HistoryController = {
const user_id = AuthenticationController.getLoggedInUserId(req)
const url =
HistoryController.buildHistoryServiceUrl(req.useProjectHistory) + req.url
logger.log({ url }, 'proxying to history api')
return HistoryController._makeRequest(
{
url,
@ -150,7 +147,6 @@ module.exports = HistoryController = {
const { project_id } = req.params
const { version, pathname } = req.body
const user_id = AuthenticationController.getLoggedInUserId(req)
logger.log({ project_id, version, pathname }, 'restoring file from v2')
return RestoreManager.restoreFileFromV2(
user_id,
project_id,
@ -175,10 +171,6 @@ module.exports = HistoryController = {
if (name == null) {
return res.sendStatus(400) // Malformed request
}
logger.log(
{ project_id, doc_id, user_id },
'restoring doc from v1 deleted doc'
)
return RestoreManager.restoreDocFromDeletedDoc(
user_id,
project_id,
@ -278,7 +270,6 @@ module.exports = HistoryController = {
downloadZipOfVersion(req, res, next) {
const { project_id, version } = req.params
logger.log({ project_id, version }, 'got request for zip file at version')
return ProjectDetailsHandler.getDetails(project_id, function(err, project) {
if (err != null) {
return next(err)
@ -310,10 +301,6 @@ module.exports = HistoryController = {
const url = `${
settings.apis.v1_history.url
}/projects/${v1_project_id}/version/${version}/zip`
logger.log(
{ v1_project_id, version, url },
'getting s3 url from history api'
)
const options = {
auth: {
user: settings.apis.v1_history.user,

View file

@ -11,11 +11,9 @@
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const InactiveProjectManager = require('./InactiveProjectManager')
const logger = require('logger-sharelatex')
module.exports = {
deactivateOldProjects(req, res) {
logger.log('recived request to deactivate old projects')
const numberOfProjectsToArchive = parseInt(
req.body.numberOfProjectsToArchive,
10
@ -36,7 +34,6 @@ module.exports = {
deactivateProject(req, res) {
const { project_id } = req.params
logger.log({ project_id }, 'recived request to deactivating project')
return InactiveProjectManager.deactivateProject(project_id, function(err) {
if (err != null) {
return res.sendStatus(500)

View file

@ -60,10 +60,6 @@ module.exports = InactiveProjectManager = {
daysOld = 360
}
const oldProjectDate = new Date() - MILISECONDS_IN_DAY * daysOld
logger.log(
{ oldProjectDate, limit, daysOld },
'starting process of deactivating old projects'
)
return Project.find()
.where('lastOpened')
.lt(oldProjectDate)

View file

@ -61,10 +61,6 @@ module.exports = LinkedFilesController = {
const { project_id } = req.params
const { name, provider, data, parent_folder_id } = req.body
const user_id = AuthenticationController.getLoggedInUserId(req)
logger.log(
{ project_id, name, provider, data, parent_folder_id, user_id },
'create linked file request'
)
const Agent = LinkedFilesController._getAgent(provider)
if (Agent == null) {
@ -91,7 +87,6 @@ module.exports = LinkedFilesController = {
refreshLinkedFile(req, res, next) {
const { project_id, file_id } = req.params
const user_id = AuthenticationController.getLoggedInUserId(req)
logger.log({ project_id, file_id, user_id }, 'refresh linked file request')
return LinkedFilesHandler.getFileById(project_id, file_id, function(
err,

View file

@ -10,7 +10,6 @@
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const logger = require('logger-sharelatex')
const NotificationsHandler = require('./NotificationsHandler')
const request = require('request')
const settings = require('settings-sharelatex')
@ -85,15 +84,6 @@ module.exports = {
projectId: project._id.toString(),
token: invite.token
}
logger.log(
{
user_id: user._id,
project_id: project._id,
invite_id: invite._id,
key: this.key
},
'creating project invite notification for user'
)
return NotificationsHandler.createNotification(
user._id,
this.key,
@ -143,10 +133,6 @@ module.exports = {
university_name: body.name,
content: body.enrolment_ad_html
}
logger.log(
{ user_id: userId, key },
'creating notification key for user'
)
return NotificationsHandler.createNotification(
userId,
key,

View file

@ -12,7 +12,6 @@
*/
const NotificationsHandler = require('./NotificationsHandler')
const AuthenticationController = require('../Authentication/AuthenticationController')
const logger = require('logger-sharelatex')
const _ = require('underscore')
module.exports = {
@ -37,6 +36,5 @@ module.exports = {
const user_id = AuthenticationController.getLoggedInUserId(req)
const { notification_id } = req.params
NotificationsHandler.markAsRead(user_id, notification_id, () => res.send())
return logger.log({ user_id, notification_id }, 'mark notification as read')
}
}

View file

@ -90,7 +90,6 @@ module.exports = {
method: 'POST',
json: payload
}
logger.log({ opts }, 'creating notification for user')
return makeRequest(opts, callback)
},
@ -107,10 +106,6 @@ module.exports = {
key
}
}
logger.log(
{ user_id, key },
'sending mark notification as read with key to notifications api'
)
return makeRequest(opts, callback)
},
@ -124,10 +119,6 @@ module.exports = {
}/user/${user_id}/notification/${notification_id}`,
timeout: oneSecond
}
logger.log(
{ user_id, notification_id },
'sending mark notification as read to notifications api'
)
return makeRequest(opts, callback)
},
@ -143,10 +134,6 @@ module.exports = {
method: 'DELETE',
timeout: oneSecond
}
logger.log(
{ key },
'sending mark notification as read with key-only to notifications api'
)
return makeRequest(opts, callback)
}
}

View file

@ -9,7 +9,6 @@ const logger = require('logger-sharelatex')
module.exports = {
renderRequestResetForm(req, res) {
logger.log('rendering request reset form')
res.render('user/passwordReset', { title: 'reset_password' })
},

View file

@ -122,7 +122,6 @@ const ProjectController = {
deleteProject(req, res) {
const projectId = req.params.Project_id
const forever = (req.query != null ? req.query.forever : undefined) != null
logger.log({ projectId, forever }, 'received request to archive project')
const user = AuthenticationController.getSessionUser(req)
const cb = err => {
if (err != null) {
@ -146,7 +145,6 @@ const ProjectController = {
archiveProject(req, res, next) {
const projectId = req.params.Project_id
const userId = AuthenticationController.getLoggedInUserId(req)
logger.log({ projectId }, 'received request to archive project')
ProjectDeleter.archiveProject(projectId, userId, function(err) {
if (err != null) {
@ -160,7 +158,6 @@ const ProjectController = {
unarchiveProject(req, res, next) {
const projectId = req.params.Project_id
const userId = AuthenticationController.getLoggedInUserId(req)
logger.log({ projectId }, 'received request to unarchive project')
ProjectDeleter.unarchiveProject(projectId, userId, function(err) {
if (err != null) {
@ -174,7 +171,6 @@ const ProjectController = {
trashProject(req, res, next) {
const projectId = req.params.project_id
const userId = AuthenticationController.getLoggedInUserId(req)
logger.log({ projectId }, 'received request to trash project')
ProjectDeleter.trashProject(projectId, userId, function(err) {
if (err != null) {
@ -188,7 +184,6 @@ const ProjectController = {
untrashProject(req, res, next) {
const projectId = req.params.project_id
const userId = AuthenticationController.getLoggedInUserId(req)
logger.log({ projectId }, 'received request to untrash project')
ProjectDeleter.untrashProject(projectId, userId, function(err) {
if (err != null) {
@ -200,9 +195,6 @@ const ProjectController = {
},
expireDeletedProjectsAfterDuration(req, res) {
logger.log(
'received request to look for old deleted projects and expire them'
)
ProjectDeleter.expireDeletedProjectsAfterDuration(err => {
if (err != null) {
res.sendStatus(500)
@ -214,7 +206,6 @@ const ProjectController = {
expireDeletedProject(req, res, next) {
const { projectId } = req.params
logger.log('received request to expire deleted project', { projectId })
ProjectDeleter.expireDeletedProject(projectId, err => {
if (err != null) {
next(err)
@ -226,7 +217,6 @@ const ProjectController = {
restoreProject(req, res) {
const projectId = req.params.Project_id
logger.log({ projectId }, 'received request to restore project')
ProjectDeleter.restoreProject(projectId, err => {
if (err != null) {
res.sendStatus(500)
@ -272,10 +262,7 @@ const ProjectController = {
const projectName =
req.body.projectName != null ? req.body.projectName.trim() : undefined
const { template } = req.body
logger.log(
{ user: userId, projectType: template, name: projectName },
'creating project'
)
async.waterfall(
[
cb => {
@ -290,10 +277,6 @@ const ProjectController = {
if (err != null) {
return next(err)
}
logger.log(
{ project, userId, name: projectName, templateType: template },
'created project'
)
res.send({ project_id: project._id })
}
)
@ -585,10 +568,7 @@ const ProjectController = {
parseInt(user._id.toString().slice(-2), 16) <
freeUserProportion * 255
const showFrontWidget = paidUser || sampleFreeUser
logger.log(
{ paidUser, sampleFreeUser, showFrontWidget },
'deciding whether to show front widget'
)
if (showFrontWidget) {
viewModel.frontChatWidgetRoomId =
Settings.overleaf != null
@ -619,7 +599,6 @@ const ProjectController = {
}
const projectId = req.params.Project_id
logger.log({ projectId, anonymous, userId }, 'loading editor')
// record failures to load the custom websocket
if ((req.query != null ? req.query.ws : undefined) === 'fallback') {
@ -738,10 +717,6 @@ const ProjectController = {
const daysSinceLastUpdated =
(new Date() - project.lastUpdated) / 86400000
logger.log(
{ projectId, daysSinceLastUpdated },
'got db results for loading editor'
)
const token = TokenAccessHandler.getRequestToken(req, projectId)
const { isTokenMember } = results
@ -774,7 +749,6 @@ const ProjectController = {
allowedFreeTrial = !!subscription.freeTrial.allowed || true
}
logger.log({ projectId }, 'rendering editor page')
res.render('project/editor', {
title: project.name,
priority_title: true,

View file

@ -46,7 +46,6 @@ const ProjectCreationHandler = {
if (error != null) {
return callback(error)
}
logger.log({ owner_id, projectName }, 'creating blank project')
if (attributes.overleaf !== undefined && attributes.overleaf != null) {
return ProjectCreationHandler._createBlankProject(
owner_id,

View file

@ -51,18 +51,13 @@ const ProjectDeleter = {
if (callback == null) {
callback = function(error) {}
}
logger.log(
{ project_id },
'removing flag marking project as deleted by external data source'
)
const conditions = { _id: project_id }
const update = { deletedByExternalDataSource: false }
return Project.update(conditions, update, {}, callback)
},
deleteUsersProjects(user_id, callback) {
logger.log({ user_id }, 'deleting users projects')
return Project.find({ owner_ref: user_id }, function(error, projects) {
if (error != null) {
return callback(error)
@ -117,7 +112,6 @@ const ProjectDeleter = {
}
)
} else {
logger.log({}, 'No deleted projects for duration were found')
callback(err)
}
}
@ -128,7 +122,6 @@ const ProjectDeleter = {
if (callback == null) {
callback = function(error) {}
}
logger.log({ project_id }, 'archived project from user request')
return Project.update(
{ _id: project_id },
{ $set: { archived: true } },
@ -137,10 +130,6 @@ const ProjectDeleter = {
logger.warn({ err }, 'problem archived project')
return callback(err)
}
logger.log(
{ project_id },
'successfully archived project from user request'
)
return callback()
}
)
@ -161,8 +150,6 @@ const ProjectDeleter = {
// Async methods
async function archiveProject(projectId, userId) {
logger.log({ projectId }, 'archiving project from user request')
try {
let project = await Project.findOne({ _id: projectId }).exec()
if (!project) {
@ -185,8 +172,6 @@ async function archiveProject(projectId, userId) {
}
async function unarchiveProject(projectId, userId) {
logger.log({ projectId }, 'unarchiving project from user request')
try {
let project = await Project.findOne({ _id: projectId }).exec()
if (!project) {
@ -207,8 +192,6 @@ async function unarchiveProject(projectId, userId) {
}
async function trashProject(projectId, userId) {
logger.log({ projectId }, 'trashing project from user request')
try {
let project = await Project.findOne({ _id: projectId }).exec()
if (!project) {
@ -229,8 +212,6 @@ async function trashProject(projectId, userId) {
}
async function untrashProject(projectId, userId) {
logger.log({ projectId }, 'untrashing project from user request')
try {
let project = await Project.findOne({ _id: projectId }).exec()
if (!project) {
@ -248,8 +229,6 @@ async function untrashProject(projectId, userId) {
}
async function deleteProject(project_id, options = {}) {
logger.log({ project_id }, 'deleting project')
try {
let project = await Project.findOne({ _id: project_id }).exec()
if (!project) {
@ -381,8 +360,6 @@ async function expireDeletedProject(projectId) {
}
}
).exec()
logger.log({ projectId }, 'Successfully expired deleted project')
} catch (error) {
logger.warn({ projectId, error }, 'error expiring deleted project')
throw error

View file

@ -69,7 +69,6 @@ async function getDetails(projectId) {
if (project.overleaf != null) {
details.overleaf = project.overleaf
}
logger.log({ projectId, details }, 'getting project details')
return details
}
@ -183,7 +182,6 @@ function fixProjectName(name) {
}
async function setPublicAccessLevel(projectId, newAccessLevel) {
logger.log({ projectId, level: newAccessLevel }, 'set public access level')
// DEPRECATED: `READ_ONLY` and `READ_AND_WRITE` are still valid in, but should no longer
// be passed here. Remove after token-based access has been live for a while
if (
@ -215,16 +213,8 @@ async function ensureTokensArePresent(projectId) {
project.tokens.readOnly != null &&
project.tokens.readAndWrite != null
) {
logger.log({ projectId }, 'project already has tokens')
return project.tokens
}
const hasTokens = project.tokens != null
const hasReadOnly = hasTokens && project.tokens.readOnly != null
const hasReadAndWrite = hasTokens && project.tokens.readAndWrite != null
logger.log(
{ projectId, hasTokens, hasReadOnly, hasReadAndWrite },
'generating tokens for project'
)
await _generateTokens(project)
await Project.update(
{ _id: projectId },

View file

@ -8,8 +8,6 @@ const { promisifyAll } = require('../../util/promises')
const ProjectEntityHandler = {
getAllDocs(projectId, callback) {
logger.log({ projectId }, 'getting all docs for project')
// We get the path and name info from the project, and the lines and
// version info from the doc store.
DocstoreManager.getAllDocs(projectId, (error, docContentsArray) => {
@ -45,17 +43,13 @@ const ProjectEntityHandler = {
}
}
}
logger.log(
{ count: _.keys(docs).length, projectId },
'returning docs for project'
)
callback(null, docs)
})
})
},
getAllFiles(projectId, callback) {
logger.log({ projectId }, 'getting all files for project')
ProjectEntityHandler._getAllFolders(projectId, (err, folders) => {
if (folders == null) {
folders = {}
@ -90,7 +84,6 @@ const ProjectEntityHandler = {
},
getAllEntitiesFromProject(project, callback) {
logger.log({ project }, 'getting all entities for project')
ProjectEntityHandler._getAllFoldersFromProject(project, (err, folders) => {
if (folders == null) {
folders = {}
@ -130,7 +123,6 @@ const ProjectEntityHandler = {
},
getAllDocPathsFromProject(project, callback) {
logger.log({ project }, 'getting all docs for project')
ProjectEntityHandler._getAllFoldersFromProject(project, (err, folders) => {
if (folders == null) {
folders = {}
@ -145,10 +137,6 @@ const ProjectEntityHandler = {
docPath[doc._id] = path.join(folderPath, doc.name)
}
}
logger.log(
{ count: _.keys(docPath).length, projectId: project._id },
'returning docPaths for project'
)
callback(null, docPath)
})
},
@ -166,7 +154,6 @@ const ProjectEntityHandler = {
},
getDocPathByProjectIdAndDocId(projectId, docId, callback) {
logger.log({ projectId, docId }, 'getting path for doc and project')
ProjectGetter.getProjectWithoutDocLines(projectId, (err, project) => {
if (err != null) {
return callback(err)
@ -208,7 +195,6 @@ const ProjectEntityHandler = {
},
_getAllFolders(projectId, callback) {
logger.log({ projectId }, 'getting all folders for project')
ProjectGetter.getProjectWithoutDocLines(projectId, (err, project) => {
if (err != null) {
return callback(err)

View file

@ -196,10 +196,6 @@ async function mkdirp(projectId, path, options = {}) {
projectId
)
if (path === '/') {
logger.log(
{ projectId: project._id },
'mkdir is only trying to make path of / so sending back root folder'
)
return { newFolders: [], folder: project.rootFolder[0] }
}
@ -219,10 +215,6 @@ async function mkdirp(projectId, path, options = {}) {
lastFolder = foundFolder
} catch (err) {
// Folder couldn't be found. Create it.
logger.log(
{ path, projectId: project._id, folderName },
'making folder from mkdirp'
)
const parentFolderId = lastFolder && lastFolder._id
const {
folder: newFolder,
@ -593,13 +585,6 @@ async function _checkValidMove(
// in the destination folder
_checkValidElementName(destEntity, entity.name)
if (/folder/.test(entityType)) {
logger.log(
{
destFolderPath: destFolderPath.fileSystem,
folderPath: entityPath.fileSystem
},
'checking folder is not moving into child folder'
)
const isNestedFolder =
destFolderPath.fileSystem.slice(0, entityPath.fileSystem.length) ===
entityPath.fileSystem

View file

@ -253,7 +253,7 @@ const ProjectEntityUpdateHandler = {
if (doc == null) {
// Do not allow an update to a doc which has never exist on this project
logger.warn(
{ docId, projectId, lines },
{ docId, projectId },
'doc not found while updating doc lines'
)
return callback(new Errors.NotFoundError('doc not found'))
@ -272,7 +272,7 @@ const ProjectEntityUpdateHandler = {
(err, modified, rev) => {
if (err != null) {
logger.warn(
{ err, docId, projectId, lines },
{ err, docId, projectId },
'error sending doc to docstore'
)
return callback(err)

View file

@ -114,10 +114,6 @@ const ProjectGetter = {
query = { _id: ObjectId(project_id.toString()) }
} else {
const err = new Error('malformed get request')
logger.log(
{ project_id, err, type: typeof project_id },
'malformed get request'
)
return callback(err)
}

View file

@ -152,10 +152,6 @@ const ProjectHistoryHandler = {
if (err != null) {
return callback(err)
}
logger.log(
{ project_id, history_id: history.overleaf_id },
'started syncing project with new history id'
)
return HistoryManager.flushProject(project_id, callback)
}
)

View file

@ -264,10 +264,6 @@ const ProjectLocator = {
const needleName = foldersList.pop()
const rootFolder = project.rootFolder[0]
logger.log(
{ projectId: project._id, path: needlePath, foldersList },
'looking for element by path'
)
const jobs = []
jobs.push(cb => getParentFolder(rootFolder, foldersList, 0, cb))
jobs.push((folder, cb) => getEntity(folder, needleName, cb))
@ -291,10 +287,6 @@ const ProjectLocator = {
project.name.toLowerCase() === projectName &&
!ProjectHelper.isArchivedOrTrashed(project, userId)
)
logger.log(
{ userId, projectName, totalProjects: projects.length, project },
'looking for project by name'
)
callback(null, project)
}
)

View file

@ -1,5 +1,4 @@
const { Project } = require('../../models/Project')
const logger = require('logger-sharelatex')
const settings = require('settings-sharelatex')
const { promisifyAll } = require('../../util/promises')
@ -7,7 +6,6 @@ const safeCompilers = ['xelatex', 'pdflatex', 'latex', 'lualatex']
const ProjectOptionsHandler = {
setCompiler(projectId, compiler, callback) {
logger.log({ projectId, compiler }, 'setting the compiler')
if (!compiler) {
return callback()
}
@ -21,7 +19,6 @@ const ProjectOptionsHandler = {
},
setImageName(projectId, imageName, callback) {
logger.log({ projectId, imageName }, 'setting the imageName')
if (!imageName || !Array.isArray(settings.allowedImageNames)) {
return callback()
}
@ -38,7 +35,6 @@ const ProjectOptionsHandler = {
},
setSpellCheckLanguage(projectId, languageCode, callback) {
logger.log({ projectId, languageCode }, 'setting the spell check language')
if (!Array.isArray(settings.languages)) {
return callback()
}
@ -54,10 +50,6 @@ const ProjectOptionsHandler = {
},
setBrandVariationId(projectId, brandVariationId, callback) {
logger.log(
{ projectId, brandVariationId },
'setting the brand variation id'
)
if (!brandVariationId) {
return callback()
}
@ -67,7 +59,6 @@ const ProjectOptionsHandler = {
},
unsetBrandVariationId(projectId, callback) {
logger.log({ projectId }, 'unsetting the brand variation id')
const conditions = { _id: projectId }
const update = { $unset: { brandVariationId: 1 } }
Project.update(conditions, update, {}, callback)

View file

@ -14,7 +14,6 @@ const crypto = require('crypto')
const V1Api = require('../V1/V1Api')
const Features = require('../../infrastructure/Features')
const Async = require('async')
const logger = require('logger-sharelatex')
const { promisify } = require('util')
// This module mirrors the token generation in Overleaf (`random_token.rb`),
@ -71,7 +70,6 @@ const ProjectTokenGenerator = {
10,
function(cb) {
const token = ProjectTokenGenerator.readOnlyToken()
logger.log({ token }, 'Generated read-only token')
if (!Features.hasFeature('overleaf-integration')) {
return cb(null, token)
@ -98,10 +96,6 @@ const ProjectTokenGenerator = {
if (body.exists === true) {
return cb(new Error(`token already exists in v1: ${token}`))
} else {
logger.log(
{ token },
'Read-only token does not exist in v1, good to use'
)
return cb(null, token)
}
}

View file

@ -24,22 +24,15 @@ module.exports = ReferalAllocator = {
callback = function() {}
}
if (referal_id == null) {
logger.log({ new_user_id }, 'no referal for user')
return callback(null)
}
logger.log(
{ referal_id, new_user_id, referal_source, referal_medium },
'allocating users referal'
)
const query = { referal_id: referal_id }
return User.findOne(query, function(error, user) {
if (error != null) {
return callback(error)
}
if (user == null || user._id == null) {
logger.log({ new_user_id, referal_id }, 'no user found for referal id')
return callback(null)
}

View file

@ -14,7 +14,6 @@
*/
let ReferalFeatures
const _ = require('underscore')
const logger = require('logger-sharelatex')
const { User } = require('../../models/User')
const Settings = require('settings-sharelatex')
@ -31,10 +30,6 @@ module.exports = ReferalFeatures = {
if (user == null) {
return callback(new Error(`user not found ${user_id} for assignBonus`))
}
logger.log(
{ user_id, refered_user_count: user.refered_user_count },
'assigning bonus'
)
if (user.refered_user_count != null && user.refered_user_count > 0) {
const newFeatures = ReferalFeatures._calculateFeatures(user)
return callback(null, newFeatures)

View file

@ -28,7 +28,6 @@ module.exports = ReferencesController = {
)
return res.sendStatus(400)
}
logger.log({ projectId, docIds }, 'index references for project')
return ReferencesHandler.index(projectId, docIds, function(err, data) {
if (err != null) {
logger.err({ err, projectId }, 'error indexing all references')
@ -47,7 +46,6 @@ module.exports = ReferencesController = {
indexAll(req, res) {
const projectId = req.params.Project_id
const { shouldBroadcast } = req.body
logger.log({ projectId }, 'index all references for project')
return ReferencesHandler.indexAll(projectId, function(err, data) {
if (err != null) {
logger.err({ err, projectId }, 'error indexing all references')
@ -68,10 +66,6 @@ module.exports = ReferencesController = {
return res.json({ projectId, keys: [] })
}
if (shouldBroadcast) {
logger.log(
{ projectId },
'emitting new references keys to connected clients'
)
EditorRealTimeController.emitToRoom(
projectId,
'references:keys:updated',

View file

@ -180,10 +180,6 @@ module.exports = ReferencesHandler = {
ReferencesHandler._buildFileUrl(projectId, fileId)
)
const allUrls = bibDocUrls.concat(bibFileUrls)
logger.log(
{ projectId, isFullIndex, docIds, bibDocUrls },
'sending request to references service'
)
return request.post(
{
url: `${settings.apis.references.url}/project/${projectId}/index`,
@ -209,7 +205,6 @@ module.exports = ReferencesHandler = {
res.statusCode
}`
)
logger.log({ err, projectId }, 'error updating references')
return callback(err)
}
}

View file

@ -36,10 +36,6 @@ module.exports = {
const createdAt = new Date()
const expiresAt = new Date(createdAt.getTime() + expiresIn * 1000)
const token = crypto.randomBytes(32).toString('hex')
logger.log(
{ data, expiresIn, token_start: token.slice(0, 8) },
`generating token for ${use}`
)
return db.tokens.insert(
{
use,
@ -61,10 +57,6 @@ module.exports = {
if (callback == null) {
callback = function(error, data) {}
}
logger.log(
{ token_start: token.slice(0, 8) },
`getting data from ${use} token`
)
const now = new Date()
return db.tokens.findAndModify(
{

View file

@ -114,15 +114,6 @@ module.exports = LimitationsManager = {
).withCause(err)
)
}
logger.log(
{
user_id: user._id,
isMember,
hasSubscription,
hasV1Subscription
},
'checking if user has subscription or is group member'
)
return callback(
err,
isMember || hasSubscription || hasV1Subscription,
@ -143,7 +134,6 @@ module.exports = LimitationsManager = {
if (callback == null) {
callback = function(err, hasSubscription, subscription) {}
}
logger.log({ user_id: user._id }, 'checking if user has subscription')
return SubscriptionLocator.getUsersSubscription(user._id, function(
err,
subscription
@ -156,10 +146,6 @@ module.exports = LimitationsManager = {
(subscription.recurlySubscription_id != null ||
(subscription != null ? subscription.customAccount : undefined) ===
true)
logger.log(
{ user, hasValidSubscription, subscription },
'checking if user has subscription'
)
return callback(err, hasValidSubscription, subscription)
})
},
@ -191,10 +177,6 @@ module.exports = LimitationsManager = {
if (callback == null) {
callback = function(error, isMember, subscriptions) {}
}
logger.log(
{ user_id: user._id },
'checking is user is member of subscription groups'
)
return SubscriptionLocator.getMemberSubscriptions(user._id, function(
err,
subscriptions
@ -217,10 +199,6 @@ module.exports = LimitationsManager = {
err,
v1Subscription
) {
logger.log(
{ user_id: user._id, v1Subscription },
'[userHasV1Subscription]'
)
return callback(
err,
!!(v1Subscription != null ? v1Subscription.has_subscription : undefined)

View file

@ -86,10 +86,8 @@ module.exports = RecurlyWrapper = {
)
}
if (cache.userExists) {
logger.log({ user_id: user._id }, 'user already exists in recurly')
return next(null, cache)
}
logger.log({ user_id: user._id }, 'creating user in recurly')
const data = {
account_code: user._id,
email: user.email,
@ -434,6 +432,7 @@ module.exports = RecurlyWrapper = {
},
'error returned from recurly'
)
// TODO: this should be an Error object not a string
error = `Recurly API returned with status code: ${response.statusCode}`
}
if (response.statusCode === 404 && expect404) {
@ -761,10 +760,7 @@ module.exports = RecurlyWrapper = {
x1 => x1.description
) === "A canceled subscription can't transition to canceled"
) {
logger.log(
{ subscriptionId, error, body },
'subscription already cancelled, not really an error, proceeding'
)
// subscription already cancelled, not really an error, proceeding
return callback(null)
} else {
return callback(error)

View file

@ -39,7 +39,6 @@ module.exports = SubscriptionController = {
if (req.query.v != null) {
viewName = `${viewName}_${req.query.v}`
}
logger.log({ viewName }, 'showing plans page')
let currentUser = null
return GeoIpLookup.getCurrencyCode(
@ -165,21 +164,6 @@ module.exports = SubscriptionController = {
return next(error)
}
const fromPlansPage = req.query.hasSubscription
logger.log(
{
user,
hasSubscription,
fromPlansPage,
personalSubscription,
memberGroupSubscriptions,
managedGroupSubscriptions,
confirmedMemberInstitutions,
managedInstitutions,
managedPublishers,
v1SubscriptionStatus
},
'showing subscription dashboard'
)
const plans = SubscriptionViewModelBuilder.buildViewModel()
const data = {
title: 'your_subscription',
@ -209,10 +193,6 @@ module.exports = SubscriptionController = {
req.body.recurly_three_d_secure_action_result_token_id
}
const { subscriptionDetails } = req.body
logger.log(
{ user_id: user._id, subscriptionDetails },
'creating subscription'
)
return LimitationsManager.userHasV1OrV2Subscription(user, function(
err,
@ -419,10 +399,6 @@ module.exports = SubscriptionController = {
if (!hasSubscription) {
return res.redirect('/user/subscription/plans')
}
logger.log(
{ planName, user_id: user._id },
'rendering upgrade to annual page'
)
return res.render('subscriptions/upgradeToAnnual', {
title: 'Upgrade to annual',
planName

View file

@ -49,10 +49,7 @@ module.exports = {
if (error != null) {
return next(error)
}
logger.log(
{ adminUserId, userToRemove_id },
'removing user from group subscription after self request'
)
return SubscriptionGroupHandler.removeUserFromGroup(
subscription._id,
userToRemove_id,
@ -90,13 +87,13 @@ var getManagedSubscription = (managerId, callback) =>
err,
subscription
) {
if (subscription != null) {
logger.log({ managerId }, 'got managed subscription')
} else {
if (!err) {
err = new Error(`No subscription found managed by user ${managerId}`)
}
if (err) {
return callback(err)
} else if (!subscription) {
return callback(
new Error(`No subscription found managed by user ${managerId}`)
)
}
return callback(err, subscription)
return callback(null, subscription)
})

View file

@ -38,10 +38,6 @@ const SubscriptionGroupHandler = {
},
replaceUserReferencesInGroups(oldId, newId, callback) {
logger.log(
{ old_id: oldId, new_id: newId },
'replacing user reference in groups'
)
return Subscription.update(
{ admin_id: oldId },
{ admin_id: newId },
@ -87,10 +83,6 @@ const SubscriptionGroupHandler = {
} else {
partOfGroup = false
}
logger.log(
{ user_id, subscription_id, partOfGroup },
'checking if user is part of a group'
)
return callback(err, partOfGroup)
}
)
@ -115,10 +107,6 @@ const SubscriptionGroupHandler = {
}
var replaceInArray = function(model, property, oldValue, newValue, callback) {
logger.log(
`Replacing ${oldValue} with ${newValue} in ${property} of ${model}`
)
// Mongo won't let us pull and addToSet in the same query, so do it in
// two. Note we need to add first, since the query is based on the old user.
const query = {}

View file

@ -93,7 +93,6 @@ const SubscriptionHandler = {
},
updateSubscription(user, plan_code, coupon_code, callback) {
logger.log({ user, plan_code, coupon_code }, 'updating subscription')
return LimitationsManager.userHasV2Subscription(user, function(
err,
hasSubscription,
@ -108,10 +107,6 @@ const SubscriptionHandler = {
if (coupon_code == null) {
return cb()
}
logger.log(
{ user_id: user._id, plan_code, coupon_code },
'updating subscription with coupon code applied first'
)
return RecurlyWrapper.getSubscription(
subscription.recurlySubscription_id,
{ includeAccount: true },

View file

@ -21,7 +21,6 @@ const { ObjectId } = require('mongoose').Types
const SubscriptionLocator = {
getUsersSubscription(user_or_id, callback) {
const user_id = SubscriptionLocator._getUserId(user_or_id)
logger.log({ user_id }, 'getting users subscription')
return Subscription.findOne({ admin_id: user_id }, function(
err,
subscription
@ -32,7 +31,6 @@ const SubscriptionLocator = {
},
findManagedSubscription(managerId, callback) {
logger.log({ managerId }, 'finding managed subscription')
return Subscription.findOne({ manager_ids: managerId }, callback)
},
@ -51,7 +49,6 @@ const SubscriptionLocator = {
getMemberSubscriptions(user_or_id, callback) {
const user_id = SubscriptionLocator._getUserId(user_or_id)
logger.log({ user_id }, 'getting users group subscriptions')
return Subscription.find({ member_ids: user_id })
.populate('admin_id')
.exec(callback)
@ -86,7 +83,6 @@ const SubscriptionLocator = {
},
getUserDeletedSubscriptions(userId, callback) {
logger.log({ userId }, 'getting users deleted subscriptions')
DeletedSubscription.find({ 'subscription.admin_id': userId }, callback)
},

View file

@ -39,10 +39,6 @@ const SubscriptionUpdater = {
callback = requesterData
requesterData = {}
}
logger.log(
{ adminUserId, recurlySubscription },
'syncSubscription, creating new if subscription does not exist'
)
SubscriptionLocator.getUsersSubscription(adminUserId, function(
err,
subscription
@ -51,10 +47,6 @@ const SubscriptionUpdater = {
return callback(err)
}
if (subscription != null) {
logger.log(
{ adminUserId, recurlySubscription },
'subscription does exist'
)
SubscriptionUpdater._updateSubscriptionFromRecurly(
recurlySubscription,
subscription,
@ -62,10 +54,6 @@ const SubscriptionUpdater = {
callback
)
} else {
logger.log(
{ adminUserId, recurlySubscription },
'subscription does not exist, creating a new one'
)
SubscriptionUpdater._createNewSubscription(adminUserId, function(
err,
subscription
@ -102,10 +90,6 @@ const SubscriptionUpdater = {
},
addUsersToGroupWithoutFeaturesRefresh(subscriptionId, memberIds, callback) {
logger.log(
{ subscriptionId, memberIds },
'adding members into mongo subscription'
)
const searchOps = { _id: subscriptionId }
const insertOperation = { $addToSet: { member_ids: { $each: memberIds } } }
@ -167,10 +151,6 @@ const SubscriptionUpdater = {
if (callback == null) {
callback = function() {}
}
logger.log(
{ subscriptionId: subscription._id },
'deleting subscription and downgrading users'
)
async.series(
[
cb =>
@ -249,7 +229,6 @@ const SubscriptionUpdater = {
},
_createNewSubscription(adminUserId, callback) {
logger.log({ adminUserId }, 'creating new subscription')
const subscription = new Subscription({
admin_id: adminUserId,
manager_ids: [adminUserId]
@ -263,7 +242,6 @@ const SubscriptionUpdater = {
requesterData,
callback
) {
logger.log({ recurlySubscription, subscription }, 'updaing subscription')
if (recurlySubscription.state === 'expired') {
return SubscriptionUpdater.deleteSubscription(
subscription,

View file

@ -41,7 +41,6 @@ module.exports = TeamInvitesHandler = {
if (!email) {
return callback(new Error('invalid email'))
}
logger.log({ teamManagerId, email }, 'Creating manager team invite')
return UserGetter.getUser(teamManagerId, function(error, teamManager) {
if (error) {
return callback(error)
@ -81,7 +80,6 @@ module.exports = TeamInvitesHandler = {
},
acceptInvite(token, userId, callback) {
logger.log({ userId }, 'Accepting invite')
TeamInvitesHandler.getInvite(token, function(err, invite, subscription) {
if (err) {
return callback(err)
@ -107,7 +105,6 @@ module.exports = TeamInvitesHandler = {
if (!email) {
return callback(new Error('invalid email'))
}
logger.log({ teamManagerId, email }, 'Revoking invite')
removeInviteFromTeam(subscription.id, email, callback)
},
@ -131,10 +128,6 @@ module.exports = TeamInvitesHandler = {
}
var createInvite = function(subscription, email, inviter, callback) {
logger.log(
{ subscriptionId: subscription.id, email, inviterId: inviter._id },
'Creating invite'
)
checkIfInviteIsPossible(subscription, email, function(
error,
possible,
@ -213,10 +206,6 @@ var createInvite = function(subscription, email, inviter, callback) {
var removeInviteFromTeam = function(subscriptionId, email, callback) {
const searchConditions = { _id: new ObjectId(subscriptionId.toString()) }
const removeInvite = { $pull: { teamInvites: { email } } }
logger.log(
{ subscriptionId, email, searchConditions, removeInvite },
'removeInviteFromTeam'
)
async.series(
[

View file

@ -11,7 +11,6 @@
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const logger = require('logger-sharelatex')
const { User } = require('../../models/User')
module.exports = {
@ -21,7 +20,6 @@ module.exports = {
}
const conditions = { _id: user_id }
const update = {}
logger.log({ user_id, features }, 'updating users features')
for (let key in features) {
const value = features[key]
update[`features.${key}`] = value

View file

@ -15,7 +15,6 @@ let V1SubscriptionManager
const UserGetter = require('../User/UserGetter')
const request = require('request')
const settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
const { V1ConnectionError, NotFoundError } = require('../Errors/Errors')
module.exports = V1SubscriptionManager = {
@ -29,7 +28,6 @@ module.exports = V1SubscriptionManager = {
if (callback == null) {
callback = function(err, planCode, v1Id) {}
}
logger.log({ userId }, '[V1SubscriptionManager] fetching v1 plan for user')
return V1SubscriptionManager._v1Request(
userId,
{
@ -43,10 +41,6 @@ module.exports = V1SubscriptionManager = {
return callback(error)
}
let planName = body != null ? body.plan_name : undefined
logger.log(
{ userId, planName, body },
'[V1SubscriptionManager] fetched v1 plan for user'
)
if (['pro', 'pro_plus', 'student', 'free'].includes(planName)) {
planName = `v1_${planName}`
} else {
@ -121,12 +115,6 @@ module.exports = V1SubscriptionManager = {
user != null ? user.overleaf : undefined,
x => x.id
)
if (v1Id == null) {
logger.log(
{ userId },
'[V1SubscriptionManager] no v1 id found for user'
)
}
return callback(null, v1Id)
})

View file

@ -22,6 +22,8 @@ const Settings = require('settings-sharelatex')
module.exports = SudoModeController = {
sudoModePrompt(req, res, next) {
if (req.externalAuthenticationSystemUsed() && Settings.overleaf == null) {
// TODO: maybe we should have audit logging on sudo mode, but if so, it
// probably belongs in an internal database and not stackdriver
logger.log({ userId }, '[SudoMode] using external auth, redirecting')
return res.redirect('/project')
}

View file

@ -11,13 +11,11 @@
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const TagsHandler = require('./TagsHandler')
const logger = require('logger-sharelatex')
const AuthenticationController = require('../Authentication/AuthenticationController')
module.exports = {
getAllTags(req, res, next) {
const user_id = AuthenticationController.getLoggedInUserId(req)
logger.log({ user_id }, 'getting tags')
return TagsHandler.getAllTags(user_id, function(error, allTags) {
if (error != null) {
return next(error)
@ -29,7 +27,6 @@ module.exports = {
createTag(req, res, next) {
const user_id = AuthenticationController.getLoggedInUserId(req)
const { name } = req.body
logger.log({ user_id, name }, 'creating tag')
return TagsHandler.createTag(user_id, name, function(error, tag) {
if (error != null) {
return next(error)
@ -41,7 +38,6 @@ module.exports = {
addProjectToTag(req, res, next) {
const user_id = AuthenticationController.getLoggedInUserId(req)
const { tag_id, project_id } = req.params
logger.log({ user_id, tag_id, project_id }, 'adding tag to project')
return TagsHandler.addProjectToTag(user_id, tag_id, project_id, function(
error
) {
@ -55,7 +51,6 @@ module.exports = {
removeProjectFromTag(req, res, next) {
const user_id = AuthenticationController.getLoggedInUserId(req)
const { tag_id, project_id } = req.params
logger.log({ user_id, tag_id, project_id }, 'removing tag from project')
return TagsHandler.removeProjectFromTag(
user_id,
tag_id,
@ -72,7 +67,6 @@ module.exports = {
deleteTag(req, res, next) {
const user_id = AuthenticationController.getLoggedInUserId(req)
const { tag_id } = req.params
logger.log({ user_id, tag_id }, 'deleting tag')
return TagsHandler.deleteTag(user_id, tag_id, function(error) {
if (error != null) {
return next(error)
@ -88,7 +82,6 @@ module.exports = {
if (name == null) {
return res.status(400).end()
} else {
logger.log({ user_id, tag_id, name }, 'renaming tag')
return TagsHandler.renameTag(user_id, tag_id, name, function(error) {
if (error != null) {
return next(error)

View file

@ -26,10 +26,7 @@ module.exports = {
metrics.inc('tpds.merge-update')
const { filePath, user_id, projectName } = parseParams(req)
const source = req.headers['x-sl-update-source'] || 'unknown'
logger.log(
{ user_id, filePath, fullPath: req.params[0], projectName, source },
'reciving update request from tpds'
)
return tpdsUpdateHandler.newUpdate(
user_id,
projectName,
@ -37,10 +34,6 @@ module.exports = {
req,
source,
function(err) {
logger.log(
{ user_id, filePath, fullPath: req.params[0] },
'sending response that tpdsUpdate has been completed'
)
if (err != null) {
if (err.name === 'TooManyRequestsError') {
logger.warn(
@ -56,10 +49,6 @@ module.exports = {
return res.sendStatus(500)
}
} else {
logger.log(
{ user_id, filePath, projectName },
'telling tpds update has been processed'
)
return res.sendStatus(200)
}
}
@ -70,10 +59,6 @@ module.exports = {
metrics.inc('tpds.delete-update')
const { filePath, user_id, projectName } = parseParams(req)
const source = req.headers['x-sl-update-source'] || 'unknown'
logger.log(
{ user_id, filePath, projectName, fullPath: req.params[0], source },
'reciving delete request from tpds'
)
return tpdsUpdateHandler.deleteUpdate(
user_id,
projectName,
@ -87,10 +72,6 @@ module.exports = {
)
return res.sendStatus(500)
} else {
logger.log(
{ user_id, filePath, projectName },
'telling tpds delete has been processed'
)
return res.sendStatus(200)
}
}
@ -108,7 +89,6 @@ module.exports = {
const { project_id } = req.params
const path = `/${req.params[0]}` // UpdateMerger expects leading slash
const source = req.headers['x-sl-update-source'] || 'unknown'
logger.log({ project_id, path, source }, 'received project contents update')
return UpdateMerger.mergeUpdate(
null,
project_id,
@ -131,10 +111,7 @@ module.exports = {
const { project_id } = req.params
const path = `/${req.params[0]}` // UpdateMerger expects leading slash
const source = req.headers['x-sl-update-source'] || 'unknown'
logger.log(
{ project_id, path, source },
'received project contents delete request'
)
return UpdateMerger.deleteUpdate(null, project_id, path, source, function(
error
) {

View file

@ -31,10 +31,6 @@ module.exports = {
user_id,
projectName,
(err, project) => {
logger.log(
{ user_id, filePath: path, projectName },
'handling new update from tpds'
)
if (project == null) {
return projectCreationHandler.createBlankProject(
user_id,
@ -68,10 +64,6 @@ module.exports = {
return callback(err)
}
if (projectIsOnCooldown) {
logger.log(
{ projectId: project._id },
'project is on cooldown, denying request'
)
return callback(
new Errors.TooManyRequestsError('project on cooldown')
)

View file

@ -68,7 +68,6 @@ const TpdsUpdateSender = {
)
return callback()
} else {
logger.log({ group, job }, 'successfully queued up job for tpdsworker')
return callback()
}
})
@ -189,16 +188,6 @@ const TpdsUpdateSender = {
user_id,
allUserIds
) {
logger.log(
{
project_id: options.project_id,
user_id,
startPath,
endPath,
uri: options.uri
},
'moving entity in third party data store'
)
const moveOptions = {
method: 'put',
title: 'moveEntity',
@ -233,15 +222,6 @@ const TpdsUpdateSender = {
user_id,
allUserIds
) {
logger.log(
{
project_id: options.project_id,
user_id,
path: options.path,
uri: options.uri
},
'deleting entity in third party data store'
)
const deleteOptions = {
method: 'DELETE',
headers: {
@ -270,7 +250,6 @@ const TpdsUpdateSender = {
callback = function(err) {}
}
metrics.inc('tpds.poll-dropbox')
logger.log({ user_id }, 'polling dropbox for user')
const options = {
method: 'POST',
uri: `${tpdsUrl}/user/poll`,

View file

@ -26,7 +26,6 @@ module.exports = UpdateMerger = {
if (callback == null) {
callback = function(error) {}
}
logger.log({ project_id, path }, 'merging update from tpds')
return FileWriter.writeStreamToDisk(project_id, updateRequest, function(
err,
fsPath
@ -176,10 +175,6 @@ module.exports = UpdateMerger = {
source,
user_id,
function() {
logger.log(
{ project_id, path },
'finished processing update to delete entity from tpds'
)
return callback()
}
)
@ -206,10 +201,6 @@ module.exports = UpdateMerger = {
source,
user_id,
function(err) {
logger.log(
{ project_id },
'completed processing file update from tpds'
)
return callback(err)
}
)
@ -217,7 +208,6 @@ module.exports = UpdateMerger = {
},
processFile(project_id, fsPath, path, source, user_id, callback) {
logger.log({ project_id }, 'processing file update from tpds')
return EditorController.upsertFileWithPath(
project_id,
path,
@ -226,10 +216,6 @@ module.exports = UpdateMerger = {
source,
user_id,
function(err) {
logger.log(
{ project_id },
'completed processing file update from tpds'
)
return callback(err)
}
)

View file

@ -84,15 +84,7 @@ module.exports = TokenAccessController = {
next
)
} else if (project == null) {
logger.log(
{ token, userId },
'[TokenAccess] no token-based project found for readAndWrite token'
)
if (userId == null) {
logger.log(
{ token },
'[TokenAccess] No project found with read-write token, anonymous user, deny'
)
return next(new Errors.NotFoundError())
}
return TokenAccessController._tryHigherAccess(
@ -105,10 +97,6 @@ module.exports = TokenAccessController = {
} else {
if (userId == null) {
if (TokenAccessHandler.ANONYMOUS_READ_AND_WRITE_ENABLED) {
logger.log(
{ token, projectId: project._id },
'[TokenAccess] allow anonymous read-and-write token access'
)
TokenAccessHandler.grantSessionTokenAccess(req, project._id, token)
req._anonymousAccessToken = token
return TokenAccessController._loadEditor(
@ -163,10 +151,7 @@ module.exports = TokenAccessController = {
readOnlyToken(req, res, next) {
const userId = AuthenticationController.getLoggedInUserId(req)
const token = req.params['read_only_token']
logger.log(
{ userId, token },
'[TokenAccess] requesting read-only token access'
)
return TokenAccessHandler.getV1DocPublishedInfo(token, function(
err,
doc_published_info
@ -203,15 +188,7 @@ module.exports = TokenAccessController = {
next
)
} else if (project == null) {
logger.log(
{ token, userId },
'[TokenAccess] no project found for readOnly token'
)
if (userId == null) {
logger.log(
{ token },
'[TokenAccess] No project found with readOnly token, anonymous user, deny'
)
return next(new Errors.NotFoundError())
}
return TokenAccessController._tryHigherAccess(
@ -223,10 +200,6 @@ module.exports = TokenAccessController = {
)
} else {
if (userId == null) {
logger.log(
{ userId, projectId: project._id },
'[TokenAccess] adding anonymous user to project with readOnly token'
)
TokenAccessHandler.grantSessionTokenAccess(req, project._id, token)
req._anonymousAccessToken = token
return TokenAccessController._loadEditor(
@ -237,10 +210,6 @@ module.exports = TokenAccessController = {
)
} else {
if (project.owner_ref.toString() === userId) {
logger.log(
{ userId, projectId: project._id },
'[TokenAccess] user is already project owner'
)
return TokenAccessController._loadEditor(
project._id,
req,

View file

@ -148,10 +148,6 @@ const ArchiveManager = {
let entryFileCount = 0
zipfile.on('entry', function(entry) {
logger.log(
{ source, fileName: entry.fileName },
'processing zip file entry'
)
return ArchiveManager._checkFilePath(entry, destination, function(
err,
destFile

View file

@ -74,10 +74,6 @@ module.exports = ProjectUploadController = {
})
}
} else {
logger.log(
{ project: project._id, file_path: path, file_name: name },
'uploaded project'
)
return res.send({ success: true, project_id: project._id })
}
}
@ -94,7 +90,6 @@ module.exports = ProjectUploadController = {
logger.err({ project_id, name }, 'bad name when trying to upload file')
return res.send({ success: false })
}
logger.log({ folder_id, project_id }, 'getting upload file request')
const user_id = AuthenticationController.getLoggedInUserId(req)
return FileSystemImportManager.addEntity(
@ -120,10 +115,6 @@ module.exports = ProjectUploadController = {
)
return res.send({ success: false })
} else {
logger.log(
{ project_id, file_path: path, file_name: name, folder_id },
'uploaded file'
)
return res.send({
success: true,
entity_id: entity != null ? entity._id : undefined,

View file

@ -85,7 +85,6 @@ async function _addInstitutionEmail(userId, email, providerId) {
}
}
if (user == null) {
logger.log(userId, 'could not find user for institution SAML linking')
throw new Errors.NotFoundError('user not found')
}
const emailAlreadyAssociated = user.emails.find(e => e.email === email)

View file

@ -21,7 +21,7 @@ const UserController = {
tryDeleteUser(req, res, next) {
const userId = AuthenticationController.getLoggedInUserId(req)
const { password } = req.body
logger.log({ userId }, 'trying to delete user account')
if (password == null || password === '') {
logger.err(
{ userId },
@ -106,7 +106,6 @@ const UserController = {
updateUserSettings(req, res, next) {
const userId = AuthenticationController.getLoggedInUserId(req)
logger.log({ userId }, 'updating account settings')
User.findById(userId, (err, user) => {
if (err != null || user == null) {
logger.err({ err, userId }, 'problem updaing user settings')
@ -310,7 +309,6 @@ const UserController = {
clearSessions(req, res, next) {
metrics.inc('user.clear-sessions')
const user = AuthenticationController.getSessionUser(req)
logger.log({ userId: user._id }, 'clearing sessions for user')
UserSessionsManager.revokeAllUserSessions(user, [req.sessionID], err => {
if (err != null) {
return next(err)

View file

@ -69,7 +69,6 @@ async function expireDeletedUsersAfterDuration() {
}).exec()
if (deletedUsers.length === 0) {
logger.log('No deleted users were found for duration')
return
}

View file

@ -3,7 +3,6 @@ const EmailHandler = require('../Email/EmailHandler')
const OneTimeTokenHandler = require('../Security/OneTimeTokenHandler')
const settings = require('settings-sharelatex')
const Errors = require('../Errors/Errors')
const logger = require('logger-sharelatex')
const UserUpdater = require('./UserUpdater')
const UserGetter = require('./UserGetter')
@ -48,10 +47,6 @@ const UserEmailsConfirmationHandler = {
},
confirmEmailFromToken(token, callback) {
logger.log(
{ token_start: token.slice(0, 8) },
'confirming email from token'
)
OneTimeTokenHandler.getValueFromTokenAndExpire(
'email_confirmation',
token,
@ -64,10 +59,7 @@ const UserEmailsConfirmationHandler = {
}
const userId = data.user_id
const email = data.email
logger.log(
{ data, userId, email, token_start: token.slice(0, 8) },
'found data for email confirmation'
)
if (!userId || email !== EmailHelper.parseEmail(email)) {
return callback(new Errors.NotFoundError('invalid data'))
}

View file

@ -5,7 +5,6 @@ const UserUpdater = require('./UserUpdater')
const EmailHelper = require('../Helpers/EmailHelper')
const UserEmailsConfirmationHandler = require('./UserEmailsConfirmationHandler')
const { endorseAffiliation } = require('../Institutions/InstitutionsAPI')
const logger = require('logger-sharelatex')
const Errors = require('../Errors/Errors')
const HttpErrors = require('@overleaf/o-error/http')
@ -49,13 +48,8 @@ function resendConfirmation(req, res, next) {
return next(error)
}
if (!user || user._id.toString() !== userId) {
logger.log(
{ userId, email, foundUserId: user && user._id },
"email doesn't match logged in user"
)
return res.sendStatus(422)
}
logger.log({ userId, email }, 'resending email confirmation token')
UserEmailsConfirmationHandler.sendConfirmationEmail(userId, email, function(
error
) {

View file

@ -1,16 +1,11 @@
let UserController
const UserGetter = require('./UserGetter')
const logger = require('logger-sharelatex')
const AuthenticationController = require('../Authentication/AuthenticationController')
const { ObjectId } = require('mongojs')
module.exports = UserController = {
getLoggedInUsersPersonalInfo(req, res, next) {
const userId = AuthenticationController.getLoggedInUserId(req)
logger.log(
{ userId },
'reciving request for getting logged in users personal info'
)
if (!userId) {
return next(new Error('User is not logged in'))
}
@ -49,10 +44,6 @@ module.exports = UserController = {
query,
{ _id: true, first_name: true, last_name: true, email: true },
function(error, user) {
logger.log(
{ userId },
'receiving request for getting users personal info'
)
if (error) {
return next(error)
}

View file

@ -30,7 +30,6 @@ const UserPagesController = {
activateAccountPage(req, res, next) {
// An 'activation' is actually just a password reset on an account that
// was set with a random password originally.
logger.log({ query: req.query }, 'activiate account page called')
if (req.query.user_id == null || req.query.token == null) {
return ErrorController.notFound(req, res)
}
@ -46,10 +45,6 @@ const UserPagesController = {
return ErrorController.notFound(req, res)
}
if (user.loginCount > 0) {
logger.log(
{ user },
'user has already logged in so is active, sending them to /login'
)
// Already seen this user, so account must be activate
// This lets users keep clicking the 'activate' link in their email
// as a way to log in which, if I know our users, they will.
@ -72,10 +67,6 @@ const UserPagesController = {
req.query.redir != null &&
AuthenticationController._getRedirectFromSession(req) == null
) {
logger.log(
{ redir: req.query.redir },
'setting explicit redirect from login page'
)
AuthenticationController.setRedirectInSession(req, req.query.redir)
}
res.render('user/login', {
@ -137,7 +128,6 @@ const UserPagesController = {
'requestedEmail'
])
delete req.session.saml
logger.log({ user: userId }, 'loading settings page')
let shouldAllowEditingDetails = true
if (Settings.ldap && Settings.ldap.updateUserDetailsOnLogin) {
shouldAllowEditingDetails = false

View file

@ -89,7 +89,6 @@ const UserRegistrationHandler = {
} // this can be slow, just fire it off
],
err => {
logger.log({ user }, 'registered')
Analytics.recordEvent(user._id, 'user-registered')
callback(err, user)
}
@ -99,7 +98,6 @@ const UserRegistrationHandler = {
},
registerNewUserAndSendActivationEmail(email, callback) {
logger.log({ email }, 'registering new user')
UserRegistrationHandler.registerNewUser(
{
email,

View file

@ -14,14 +14,11 @@ module.exports = UserSessionsManager = {
trackSession(user, sessionId, callback) {
if (!user) {
logger.log({ sessionId }, 'no user to track, returning')
return callback(null)
}
if (!sessionId) {
logger.log({ user_id: user._id }, 'no sessionId to track, returning')
return callback(null)
}
logger.log({ user_id: user._id, sessionId }, 'onLogin handler')
const sessionSetKey = UserSessionsRedis.sessionSetKey(user)
const value = UserSessionsManager._sessionKey(sessionId)
rclient
@ -46,14 +43,11 @@ module.exports = UserSessionsManager = {
callback = function() {}
}
if (!user) {
logger.log({ sessionId }, 'no user to untrack, returning')
return callback(null)
}
if (!sessionId) {
logger.log({ user_id: user._id }, 'no sessionId to untrack, returning')
return callback(null)
}
logger.log({ user_id: user._id, sessionId }, 'onLogout handler')
const sessionSetKey = UserSessionsRedis.sessionSetKey(user)
const value = UserSessionsManager._sessionKey(sessionId)
rclient
@ -130,10 +124,8 @@ module.exports = UserSessionsManager = {
}
retain = retain.map(i => UserSessionsManager._sessionKey(i))
if (!user) {
logger.log({}, 'no user to revoke sessions for, returning')
return callback(null)
}
logger.log({ user_id: user._id }, 'revoking all existing sessions for user')
const sessionSetKey = UserSessionsRedis.sessionSetKey(user)
rclient.smembers(sessionSetKey, function(err, sessionKeys) {
if (err) {
@ -185,7 +177,6 @@ module.exports = UserSessionsManager = {
touch(user, callback) {
if (!user) {
logger.log({}, 'no user to touch sessions for, returning')
return callback(null)
}
const sessionSetKey = UserSessionsRedis.sessionSetKey(user)
@ -207,10 +198,8 @@ module.exports = UserSessionsManager = {
_checkSessions(user, callback) {
if (!user) {
logger.log({}, 'no user, returning')
return callback(null)
}
logger.log({ user_id: user._id }, 'checking sessions for user')
const sessionSetKey = UserSessionsRedis.sessionSetKey(user)
rclient.smembers(sessionSetKey, function(err, sessionKeys) {
if (err) {
@ -220,10 +209,6 @@ module.exports = UserSessionsManager = {
)
return callback(err)
}
logger.log(
{ user_id: user._id, count: sessionKeys.length },
'checking sessions for user'
)
Async.series(
sessionKeys.map(key => next =>
rclient.get(key, function(err, val) {
@ -231,10 +216,6 @@ module.exports = UserSessionsManager = {
return next(err)
}
if (!val) {
logger.log(
{ user_id: user._id, key },
'>> removing key from UserSessions set'
)
rclient.srem(sessionSetKey, key, function(err, result) {
return next(err)
})
@ -244,7 +225,6 @@ module.exports = UserSessionsManager = {
})
),
function(err, results) {
logger.log({ user_id: user._id }, 'done checking sessions for user')
callback(err)
}
)

View file

@ -43,7 +43,6 @@ const UserUpdater = {
if (newEmail == null) {
return callback(new Error('invalid email'))
}
logger.log({ userId, newEmail }, 'updaing email address of user')
let oldEmail = null
async.series(
@ -218,7 +217,6 @@ const UserUpdater = {
if (error != null) {
return callback(error)
}
logger.log({ res, userId, email }, 'tried to confirm email')
if (res.n === 0) {
return callback(
new Errors.NotFoundError('user id and email do no match')

View file

@ -14,7 +14,6 @@ const AuthenticationController = require('../Authentication/AuthenticationContro
const UserMembershipHandler = require('./UserMembershipHandler')
const Errors = require('../Errors/Errors')
const EmailHelper = require('../Helpers/EmailHelper')
const logger = require('logger-sharelatex')
module.exports = {
index(req, res, next) {
@ -134,7 +133,7 @@ module.exports = {
exportCsv(req, res, next) {
const { entity, entityConfig } = req
logger.log({ subscriptionId: entity._id }, 'exporting csv')
return UserMembershipHandler.getUsers(entity, entityConfig, function(
error,
users

View file

@ -67,7 +67,7 @@ module.exports = V1Handler = {
if (callback == null) {
callback = function(err, created) {}
}
logger.log({ v1_user_id }, 'sending password reset request to v1 login api')
return V1Api.request(
{
method: 'POST',

View file

@ -143,15 +143,6 @@ describe('CompileManager', function() {
it('should time the compile', function() {
return this.Metrics.Timer.prototype.done.called.should.equal(true)
})
it('should log out the compile', function() {
return this.logger.log
.calledWith(
{ project_id: this.project_id, user_id: this.user_id },
'compiling project'
)
.should.equal(true)
})
})
describe('when the project has been recently compiled', function() {

View file

@ -106,26 +106,6 @@ describe('DocstoreManager', function() {
)
.should.equal(true)
})
it('should log the error', function() {
return this.logger.warn
.calledWith(
{
err: sinon.match
.instanceOf(Error)
.and(
sinon.match.has(
'message',
'docstore api responded with non-success code: 500'
)
),
project_id: this.project_id,
doc_id: this.doc_id
},
'error deleting doc in docstore'
)
.should.equal(true)
})
})
describe('with a missing (404) response code', function() {
@ -154,26 +134,6 @@ describe('DocstoreManager', function() {
)
.should.equal(true)
})
it('should log the error', function() {
return this.logger.warn
.calledWith(
{
err: sinon.match
.instanceOf(Errors.NotFoundError)
.and(
sinon.match.has(
'message',
'tried to delete doc not in docstore'
)
),
project_id: this.project_id,
doc_id: this.doc_id
},
'tried to delete doc not in docstore'
)
.should.equal(true)
})
})
})
@ -258,26 +218,6 @@ describe('DocstoreManager', function() {
)
.should.equal(true)
})
it('should log the error', function() {
return this.logger.warn
.calledWith(
{
err: sinon.match
.instanceOf(Error)
.and(
sinon.match.has(
'message',
'docstore api responded with non-success code: 500'
)
),
project_id: this.project_id,
doc_id: this.doc_id
},
'error updating doc in docstore'
)
.should.equal(true)
})
})
})
@ -348,26 +288,6 @@ describe('DocstoreManager', function() {
)
.should.equal(true)
})
it('should log the error', function() {
return this.logger.warn
.calledWith(
{
err: sinon.match
.instanceOf(Error)
.and(
sinon.match.has(
'message',
'docstore api responded with non-success code: 500'
)
),
project_id: this.project_id,
doc_id: this.doc_id
},
'error getting doc from docstore'
)
.should.equal(true)
})
})
describe('with include_deleted=true', function() {
@ -423,21 +343,6 @@ describe('DocstoreManager', function() {
)
.should.equal(true)
})
it('should log the error', function() {
return this.logger.warn
.calledWith(
{
err: sinon.match
.instanceOf(Errors.NotFoundError)
.and(sinon.match.has('message', 'doc not found in docstore')),
project_id: this.project_id,
doc_id: this.doc_id
},
'doc not found in docstore'
)
.should.equal(true)
})
})
})
@ -494,25 +399,6 @@ describe('DocstoreManager', function() {
)
.should.equal(true)
})
it('should log the error', function() {
return this.logger.warn
.calledWith(
{
err: sinon.match
.instanceOf(Error)
.and(
sinon.match.has(
'message',
'docstore api responded with non-success code: 500'
)
),
project_id: this.project_id
},
'error getting all docs from docstore'
)
.should.equal(true)
})
})
})
@ -569,25 +455,6 @@ describe('DocstoreManager', function() {
)
.should.equal(true)
})
it('should log the error', function() {
return this.logger.warn
.calledWith(
{
err: sinon.match
.instanceOf(Error)
.and(
sinon.match.has(
'message',
'docstore api responded with non-success code: 500'
)
),
project_id: this.project_id
},
'error getting all doc ranges from docstore'
)
.should.equal(true)
})
})
})

View file

@ -106,12 +106,6 @@ describe('ProjectDownloadsController', function() {
it('should record the action via Metrics', function() {
return this.metrics.inc.calledWith('zip-downloads').should.equal(true)
})
it('should log the action', function() {
return this.logger.log
.calledWith(sinon.match.any, 'downloading project')
.should.equal(true)
})
})
describe('downloadMultipleProjects', function() {
@ -170,11 +164,5 @@ describe('ProjectDownloadsController', function() {
.calledWith('zip-downloads-multiple')
.should.equal(true)
})
it('should log the action', function() {
return this.logger.log
.calledWith(sinon.match.any, 'downloading multiple projects')
.should.equal(true)
})
})
})

View file

@ -449,19 +449,6 @@ describe('ProjectEntityUpdateHandler', function() {
)
})
it('should log out the error', function() {
return this.logger.warn
.calledWith(
{
projectId: project_id,
docId: doc_id,
lines: this.docLines
},
'doc not found while updating doc lines'
)
.should.equal(true)
})
it('should return a not found error', function() {
return this.callback
.calledWith(sinon.match.instanceOf(Errors.NotFoundError))

View file

@ -101,12 +101,6 @@ describe('ArchiveManager', function() {
it('should time the unzip', function() {
return this.metrics.Timer.prototype.done.called.should.equal(true)
})
it('should log the unzip', function() {
return this.logger.log
.calledWith(sinon.match.any, 'unzipping file')
.should.equal(true)
})
})
describe('with a zipfile containing an empty directory', function() {
@ -184,10 +178,6 @@ describe('ArchiveManager', function() {
sinon.match.instanceOf(ArchiveErrors.InvalidZipFileError)
)
})
it('should log out the error', function() {
return this.logger.warn.called.should.equal(true)
})
})
describe('with a zip that is too large', function() {
@ -237,10 +227,6 @@ describe('ArchiveManager', function() {
.and(sinon.match.has('message', 'Something went wrong'))
)
})
it('should log out the error', function() {
return this.logger.warn.called.should.equal(true)
})
})
describe('with a relative extracted file path', function() {
@ -261,10 +247,6 @@ describe('ArchiveManager', function() {
it('should not write try to read the file entry', function() {
return this.zipfile.openReadStream.called.should.equal(false)
})
it('should log out a warning', function() {
return this.logger.warn.called.should.equal(true)
})
})
describe('with an unnormalized extracted file path', function() {
@ -285,10 +267,6 @@ describe('ArchiveManager', function() {
it('should not try to read the file entry', function() {
return this.zipfile.openReadStream.called.should.equal(false)
})
it('should log out a warning', function() {
return this.logger.warn.called.should.equal(true)
})
})
describe('with backslashes in the path', function() {
@ -386,10 +364,6 @@ describe('ArchiveManager', function() {
)
})
it('should log out the error', function() {
return this.logger.warn.called.should.equal(true)
})
it('should close the zipfile', function() {
return this.zipfile.close.called.should.equal(true)
})
@ -426,10 +400,6 @@ describe('ArchiveManager', function() {
)
})
it('should log out the error', function() {
return this.logger.warn.called.should.equal(true)
})
it('should close the zipfile', function() {
return this.zipfile.close.called.should.equal(true)
})
@ -468,10 +438,6 @@ describe('ArchiveManager', function() {
)
})
it('should log out the error', function() {
return this.logger.warn.called.should.equal(true)
})
it('should unpipe from the readstream', function() {
return this.readStream.unpipe.called.should.equal(true)
})

View file

@ -119,12 +119,6 @@ describe('ProjectUploadController', function() {
return this.metrics.Timer.prototype.done.called.should.equal(true)
})
it('should output a log line', function() {
return this.logger.log
.calledWith(sinon.match.any, 'uploaded project')
.should.equal(true)
})
it('should remove the uploaded file', function() {
return this.fs.unlink.calledWith(this.path).should.equal(true)
})
@ -143,12 +137,6 @@ describe('ProjectUploadController', function() {
JSON.stringify({ success: false, error: 'upload_failed' })
)
})
it('should output an error log line', function() {
return this.logger.error
.calledWith(sinon.match.any, 'error uploading project')
.should.equal(true)
})
})
describe('when ProjectUploadManager.createProjectFromZipArchive reports the file as invalid', function() {
@ -173,12 +161,6 @@ describe('ProjectUploadController', function() {
it("should return an 'unprocessable entity' status code", function() {
return expect(this.res.statusCode).to.equal(422)
})
it('should output an error log line', function() {
return this.logger.error
.calledWith(sinon.match.any, 'error uploading project')
.should.equal(true)
})
})
})
@ -234,12 +216,6 @@ describe('ProjectUploadController', function() {
})
})
it('should output a log line', function() {
return this.logger.log
.calledWith(sinon.match.any, 'uploaded file')
.should.equal(true)
})
it('should time the request', function() {
return this.metrics.Timer.prototype.done.called.should.equal(true)
})
@ -262,12 +238,6 @@ describe('ProjectUploadController', function() {
success: false
})
})
it('should output an error log line', function() {
return this.logger.error
.calledWith(sinon.match.any, 'error uploading file')
.should.equal(true)
})
})
describe('with a bad request', function() {

View file

@ -227,12 +227,6 @@ describe('UserDeleter', function() {
.be.rejected
this.UserMock.verify()
})
it('should log a warning', async function() {
await expect(this.UserDeleter.promises.deleteUser(this.userId)).to
.be.rejected
sinon.assert.called(this.logger.warn)
})
})
describe('when called as a callback', function() {