Delete old sources

To reduce confusion, we should not have the old code in this branch

Signed-off-by: David Mehren <git@herrmehren.de>
This commit is contained in:
David Mehren 2021-01-05 20:26:03 +01:00
parent 0ffe7d155c
commit a5ad35f94d
No known key found for this signature in database
GPG key ID: 185982BA4C42B7C3
100 changed files with 0 additions and 8904 deletions

View file

@ -1,6 +0,0 @@
old_src/lib/ot
old_src/lib/migrations
public/vendor
public/build
node_modules
build

View file

@ -1,27 +0,0 @@
language: node_js
dist: xenial
cache: yarn
stages:
- Static Tests
- test
node_js:
- 10
- 12
- 14
env:
- TEST_SUITE=test
- TEST_SUITE=test:e2e
script: "yarn run $TEST_SUITE"
jobs:
include:
- stage: Static Tests
name: eslint
script:
- yarn run lint
- name: ShellCheck
script:
- shellcheck bin/heroku bin/setup
language: generic

135
app.json
View file

@ -1,135 +0,0 @@
{
"name": "CodiMD",
"description": "Realtime collaborative markdown notes on all platforms",
"keywords": [
"Collaborative",
"Markdown",
"Notes"
],
"website": "https://codimd.org",
"repository": "https://github.com/codimd/server",
"logo": "https://github.com/codimd/server/raw/master/public/codimd-icon-1024.png",
"success_url": "/",
"env": {
"NPM_CONFIG_PRODUCTION": {
"description": "Let npm also install development build tool",
"value": "false"
},
"DB_TYPE": {
"description": "Specify database type. See sequelize available databases. Default using postgres",
"value": "postgres"
},
"CMD_SESSION_SECRET": {
"description": "Secret used to secure session cookies.",
"required": false
},
"CMD_HSTS_ENABLE": {
"description": "whether to also use HSTS if HTTPS is enabled",
"required": false
},
"CMD_HSTS_MAX_AGE": {
"description": "max duration, in seconds, to tell clients to keep HSTS status",
"required": false
},
"CMD_HSTS_INCLUDE_SUBDOMAINS": {
"description": "whether to tell clients to also regard subdomains as HSTS hosts",
"required": false
},
"CMD_HSTS_PRELOAD": {
"description": "whether to allow at all adding of the site to HSTS preloads (e.g. in browsers)",
"required": false
},
"CMD_DOMAIN": {
"description": "domain name",
"required": false
},
"CMD_URL_PATH": {
"description": "sub url path, like `www.example.com/<URL_PATH>`",
"required": false
},
"CMD_ALLOW_ORIGIN": {
"description": "domain name whitelist (use comma to separate)",
"required": false,
"value": "localhost"
},
"CMD_PROTOCOL_USESSL": {
"description": "set to use ssl protocol for resources path (only applied when domain is set)",
"required": false
},
"CMD_URL_ADDPORT": {
"description": "set to add port on callback url (port 80 or 443 won't applied) (only applied when domain is set)",
"required": false
},
"CMD_FACEBOOK_CLIENTID": {
"description": "Facebook API client id",
"required": false
},
"CMD_FACEBOOK_CLIENTSECRET": {
"description": "Facebook API client secret",
"required": false
},
"CMD_TWITTER_CONSUMERKEY": {
"description": "Twitter API consumer key",
"required": false
},
"CMD_TWITTER_CONSUMERSECRET": {
"description": "Twitter API consumer secret",
"required": false
},
"CMD_GITHUB_CLIENTID": {
"description": "GitHub API client id",
"required": false
},
"CMD_GITHUB_CLIENTSECRET": {
"description": "GitHub API client secret",
"required": false
},
"CMD_GITLAB_BASEURL": {
"description": "GitLab authentication endpoint, set to use other endpoint than GitLab.com (optional)",
"required": false
},
"CMD_GITLAB_CLIENTID": {
"description": "GitLab API client id",
"required": false
},
"CMD_GITLAB_CLIENTSECRET": {
"description": "GitLab API client secret",
"required": false
},
"CMD_GITLAB_SCOPE": {
"description": "GitLab API client scope (optional)",
"required": false
},
"CMD_DROPBOX_CLIENTID": {
"description": "Dropbox API client id",
"required": false
},
"CMD_DROPBOX_CLIENTSECRET": {
"description": "Dropbox API client secret",
"required": false
},
"CMD_DROPBOX_APP_KEY": {
"description": "Dropbox app key (for import/export)",
"required": false
},
"CMD_GOOGLE_CLIENTID": {
"description": "Google API client id",
"required": false
},
"CMD_GOOGLE_CLIENTSECRET": {
"description": "Google API client secret",
"required": false
},
"CMD_GOOGLE_HOSTEDDOMAIN": {
"description": "Google API hosted domain (Provided only if the user belongs to a hosted domain)",
"required": false
},
"CMD_IMGUR_CLIENTID": {
"description": "Imgur API client id",
"required": false
}
},
"addons": [
"heroku-postgresql"
]
}

View file

@ -1,126 +0,0 @@
{
"test": {
"db": {
"dialect": "sqlite",
"storage": ":memory:"
},
"linkifyHeaderStyle": "gfm"
},
"development": {
"loglevel": "debug",
"hsts": {
"enable": false
},
"db": {
"dialect": "sqlite",
"storage": "./db.codimd.sqlite"
},
"linkifyHeaderStyle": "gfm"
},
"production": {
"domain": "localhost",
"loglevel": "info",
"hsts": {
"enable": true,
"maxAgeSeconds": 31536000,
"includeSubdomains": true,
"preload": true
},
"csp": {
"enable": true,
"directives": {
},
"upgradeInsecureRequests": "auto",
"addDefaults": true,
"addDisqus": true,
"addGoogleAnalytics": true
},
"db": {
"username": "",
"password": "",
"database": "codimd",
"host": "localhost",
"port": "5432",
"dialect": "postgres"
},
"facebook": {
"clientID": "change this",
"clientSecret": "change this"
},
"twitter": {
"consumerKey": "change this",
"consumerSecret": "change this"
},
"github": {
"clientID": "change this",
"clientSecret": "change this"
},
"gitlab": {
"baseURL": "change this",
"clientID": "change this",
"clientSecret": "change this",
"scope": "use 'read_user' scope for auth user only or remove this property if you need gitlab snippet import/export support (will result to be default scope 'api')",
"version": "use 'v4' if gitlab version > 11, 'v3' otherwise. Default to 'v4'"
},
"dropbox": {
"clientID": "change this",
"clientSecret": "change this",
"appKey": "change this"
},
"google": {
"clientID": "change this",
"clientSecret": "change this",
"apiKey": "change this"
},
"ldap": {
"url": "ldap://change_this",
"bindDn": null,
"bindCredentials": null,
"searchBase": "change this",
"searchFilter": "change this",
"searchAttributes": ["change this"],
"usernameField": "change this e.g. cn",
"useridField": "change this e.g. uid",
"tlsOptions": {
"changeme": "See https://nodejs.org/api/tls.html#tls_tls_connect_options_callback"
}
},
"saml": {
"idpSsoUrl": "change: authentication endpoint of IdP",
"idpCert": "change: certificate file path of IdP in PEM format",
"issuer": "change or delete: identity of the service provider (default: config.serverURL)",
"identifierFormat": "change or delete: name identifier format (default: 'urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress')",
"disableRequestedAuthnContext": "change or delete: true to allow any authentication method, false restricts to password authentication method (default: false)",
"groupAttribute": "change or delete: attribute name for group list (ex: memberOf)",
"requiredGroups": [ "change or delete: group names that allowed" ],
"externalGroups": [ "change or delete: group names that not allowed" ],
"attribute": {
"id": "change or delete this: attribute map for `id` (default: NameID)",
"username": "change or delete this: attribute map for `username` (default: NameID)",
"email": "change or delete this: attribute map for `email` (default: NameID)"
}
},
"imgur": {
"clientID": "change this"
},
"minio": {
"accessKey": "change this",
"secretKey": "change this",
"endPoint": "change this",
"secure": true,
"port": 9000
},
"s3": {
"accessKeyId": "change this",
"secretAccessKey": "change this",
"region": "change this"
},
"s3bucket": "change this",
"azure":
{
"connectionString": "change this",
"container": "change this"
},
"linkifyHeaderStyle": "gfm"
}
}

View file

@ -1,312 +0,0 @@
import compression from 'compression'
import flash from 'connect-flash'
// eslint-disable-next-line @typescript-eslint/camelcase
import connect_session_sequelize from 'connect-session-sequelize'
import cookieParser from 'cookie-parser'
import ejs from 'ejs'
import express from 'express'
import session from 'express-session'
import childProcess from 'child_process'
import helmet from 'helmet'
import http from 'http'
import https from 'https'
import i18n from 'i18n'
import fs from 'fs'
import methodOverride from 'method-override'
import morgan from 'morgan'
import passport from 'passport'
import passportSocketIo from 'passport.socketio'
import path from 'path'
import SocketIO from 'socket.io'
import WebSocket from 'ws'
import { config } from './config'
import { addNonceToLocals, computeDirectives } from './csp'
import { errors } from './errors'
import { logger } from './logger'
import { Revision, sequelize, runMigrations } from './models'
import { realtime, State } from './realtime'
import { handleTermSignals } from './utils/functions'
import { AuthRouter, BaseRouter, HistoryRouter, ImageRouter, NoteRouter, StatusRouter, UserRouter } from './web'
import { tooBusy, checkURI, redirectWithoutTrailingSlashes, codiMDVersion } from './web/middleware'
const rootPath = path.join(__dirname, '..')
// session store
const SequelizeStore = connect_session_sequelize(session.Store)
const sessionStore = new SequelizeStore({
db: sequelize
})
// server setup
const app = express()
let server: http.Server
if (config.useSSL) {
const ca: string[] = []
for (const path of config.sslCAPath) {
ca.push(fs.readFileSync(path, 'utf8'))
}
const options = {
key: fs.readFileSync(config.sslKeyPath, 'utf8'),
cert: fs.readFileSync(config.sslCertPath, 'utf8'),
ca: ca,
dhparam: fs.readFileSync(config.dhParamPath, 'utf8'),
requestCert: false,
rejectUnauthorized: false,
heartbeatInterval: config.heartbeatInterval,
heartbeatTimeout: config.heartbeatTimeout
}
server = https.createServer(options, app)
} else {
server = http.createServer(app)
}
// if we manage to provide HTTPS domains, but don't provide TLS ourselves
// obviously a proxy is involded. In order to make sure express is aware of
// this, we provide the option to trust proxies here.
if (!config.useSSL && config.protocolUseSSL) {
app.set('trust proxy', 1)
}
// socket io
const io = SocketIO(server, { cookie: false })
io.engine.ws = new WebSocket.Server({
noServer: true,
perMessageDeflate: false
})
// assign socket io to realtime
realtime.io = io
// socket.io secure
io.use(realtime.secure)
// socket.io auth
io.use(passportSocketIo.authorize({
cookieParser: cookieParser,
key: config.sessionName,
secret: config.sessionSecret,
store: sessionStore,
success: realtime.onAuthorizeSuccess,
fail: realtime.onAuthorizeFail
}))
// socket.io connection
io.sockets.on('connection', realtime.connection)
// logger
app.use(morgan('combined', {
stream: {
write: function (message): void {
logger.info(message)
}
}
}))
// use hsts to tell https users stick to this
if (config.hsts.enable) {
app.use(helmet.hsts({
maxAge: config.hsts.maxAgeSeconds,
includeSubdomains: config.hsts.includeSubdomains,
preload: config.hsts.preload
}))
} else if (config.useSSL) {
logger.info('Consider enabling HSTS for extra security:')
logger.info('https://en.wikipedia.org/wiki/HTTP_Strict_Transport_Security')
}
// Generate a random nonce per request, for CSP with inline scripts
app.use(addNonceToLocals)
// use Content-Security-Policy to limit XSS, dangerous plugins, etc.
// https://helmetjs.github.io/docs/csp/
if (config.csp.enable) {
app.use(helmet.contentSecurityPolicy({
directives: computeDirectives()
}))
} else {
logger.info('Content-Security-Policy is disabled. This may be a security risk.')
}
// Add referrer policy to improve privacy
app.use(
helmet.referrerPolicy({
policy: 'same-origin'
})
)
// methodOverride
app.use(methodOverride('_method'))
// compression
app.use(compression())
app.use(cookieParser())
i18n.configure({
locales: ['en', 'zh-CN', 'zh-TW', 'fr', 'de', 'ja', 'es', 'ca', 'el', 'pt', 'it', 'tr', 'ru', 'nl', 'hr', 'pl', 'uk', 'hi', 'sv', 'eo', 'da', 'ko', 'id', 'sr', 'vi', 'ar', 'cs', 'sk'],
cookie: 'locale',
indent: ' ', // this is the style poeditor.com exports it, this creates less churn
directory: path.resolve(rootPath, config.localesPath),
updateFiles: config.updateI18nFiles
})
app.use(i18n.init)
// set generally available variables for all views
app.locals.useCDN = config.useCDN
app.locals.serverURL = config.serverURL
app.locals.sourceURL = config.sourceURL
app.locals.allowAnonymous = config.allowAnonymous
app.locals.allowAnonymousEdits = config.allowAnonymousEdits
app.locals.authProviders = {
facebook: config.isFacebookEnable,
twitter: config.isTwitterEnable,
github: config.isGitHubEnable,
gitlab: config.isGitLabEnable,
dropbox: config.isDropboxEnable,
google: config.isGoogleEnable,
ldap: config.isLDAPEnable,
ldapProviderName: config.ldap.providerName,
saml: config.isSAMLEnable,
oauth2: config.isOAuth2Enable,
oauth2ProviderName: config.oauth2.providerName,
openID: config.isOpenIDEnable,
email: config.isEmailEnable,
allowEmailRegister: config.allowEmailRegister
}
// Export/Import menu items
app.locals.enableDropBoxSave = config.isDropboxEnable
app.locals.enableGitHubGist = config.isGitHubEnable
app.locals.enableGitlabSnippets = config.isGitlabSnippetsEnable
// session
app.use(session({
name: config.sessionName,
secret: config.sessionSecret,
resave: false, // don't save session if unmodified
saveUninitialized: true, // always create session to ensure the origin
rolling: true, // reset maxAge on every response
cookie: {
maxAge: config.sessionLife,
sameSite: 'lax',
secure: config.useSSL || config.protocolUseSSL || false
},
store: sessionStore
}))
// session resumption
const tlsSessionStore = {}
server.on('newSession', function (id, data, cb) {
tlsSessionStore[id.toString('hex')] = data
cb()
})
server.on('resumeSession', function (id, cb) {
cb(null, tlsSessionStore[id.toString('hex')] || null)
})
// middleware which blocks requests when we're too busy
app.use(tooBusy)
app.use(flash())
// passport
app.use(passport.initialize())
app.use(passport.session())
// check uri is valid before going further
app.use(checkURI)
// redirect url without trailing slashes
app.use(redirectWithoutTrailingSlashes)
app.use(codiMDVersion)
// routes without sessions
// static files
app.use('/', express.static(path.resolve(rootPath, config.publicPath), { maxAge: config.staticCacheTime, index: false, redirect: false }))
app.use('/docs', express.static(path.resolve(rootPath, config.docsPath), { maxAge: config.staticCacheTime, redirect: false }))
app.use('/uploads', express.static(path.resolve(rootPath, config.uploadsPath), { maxAge: config.staticCacheTime, redirect: false }))
app.use('/default.md', express.static(path.resolve(rootPath, config.defaultNotePath), { maxAge: config.staticCacheTime }))
// routes need sessions
// template files
app.set('views', config.viewPath)
// set render engine
app.engine('ejs', ejs.renderFile)
// set view engine
app.set('view engine', 'ejs')
app.use(BaseRouter)
app.use(StatusRouter)
app.use(AuthRouter)
app.use(HistoryRouter)
app.use(UserRouter)
app.use(ImageRouter)
app.use(NoteRouter)
// response not found if no any route matxches
app.get('*', function (req, res) {
errors.errorNotFound(res)
})
// log uncaught exception
process.on('uncaughtException', function (err) {
logger.error('An uncaught exception has occured.')
logger.error(err)
logger.error('Process will exit now.')
process.exit(1)
})
// listen
function startListen (): void {
let address
const listenCallback = function (): void {
const schema = config.useSSL ? 'HTTPS' : 'HTTP'
logger.info('%s Server listening at %s', schema, address)
realtime.state = State.Running
}
const unixCallback = function (): void {
const throwErr = function (err): void { if (err) throw err }
if (config.socket.owner !== undefined) {
childProcess.spawn('chown', [config.socket.owner, config.path]).on('error', throwErr)
}
if (config.socket.group !== undefined) {
childProcess.spawn('chgrp', [config.socket.group, config.path]).on('error', throwErr)
}
if (config.socket.mode !== undefined) {
fs.chmod(config.path, config.socket.mode, throwErr)
}
listenCallback()
}
// use unix domain socket if 'path' is specified
if (config.path) {
address = config.path
server.listen(config.path, unixCallback)
} else {
address = config.host + ':' + config.port
server.listen(config.port, config.host, listenCallback)
}
}
// sync db then start listen
sequelize.authenticate().then(async function () {
await runMigrations()
sessionStore.sync()
// check if realtime is ready
if (realtime.isReady()) {
Revision.checkAllNotesRevision(function (err, notes) {
if (err) {
throw new Error(err)
}
if (!notes || notes.length <= 0) {
return startListen()
}
})
} else {
throw new Error('server still not ready after db synced')
}
})
process.on('SIGINT', () => handleTermSignals(io))
process.on('SIGTERM', () => handleTermSignals(io))
process.on('SIGQUIT', () => handleTermSignals(io))

View file

@ -1,122 +0,0 @@
import os from 'os'
import { Config } from './interfaces'
import { Permission } from './enum'
export const defaultConfig: Config = {
permission: Permission,
domain: '',
urlPath: '',
host: '0.0.0.0',
port: 3000,
socket: {
group: undefined,
owner: undefined,
mode: undefined
},
loglevel: 'info',
urlAddPort: false,
allowOrigin: ['localhost'],
useSSL: false,
hsts: {
enable: true,
maxAgeSeconds: 60 * 60 * 24 * 365,
includeSubdomains: true,
preload: true
},
csp: {
enable: true,
directives: {},
addDefaults: true,
addDisqus: true,
addGoogleAnalytics: true,
upgradeInsecureRequests: 'auto',
reportURI: undefined
},
protocolUseSSL: false,
useCDN: false,
allowAnonymous: true,
allowAnonymousEdits: false,
allowFreeURL: false,
forbiddenNoteIDs: ['robots.txt', 'favicon.ico', 'api', 'build', 'css', 'docs', 'fonts', 'js', 'uploads', 'vendor', 'views'],
defaultPermission: 'editable',
dbURL: '',
db: {},
// ssl path
sslKeyPath: '',
sslCertPath: '',
sslCAPath: [],
dhParamPath: '',
// other path
publicPath: './public',
viewPath: './public/views',
tmpPath: os.tmpdir(),
defaultNotePath: './public/default.md',
docsPath: './public/docs',
uploadsPath: './public/uploads',
localesPath: './locales',
// session
sessionName: 'connect.sid',
sessionSecret: 'secret',
sessionSecretLen: 128,
sessionLife: 14 * 24 * 60 * 60 * 1000, // 14 days
staticCacheTime: 1 * 24 * 60 * 60 * 1000, // 1 day
// socket.io
heartbeatInterval: 5000,
heartbeatTimeout: 10000,
// too busy timeout
tooBusyLag: 70,
// document
documentMaxLength: 100000,
// image upload setting, available options are imgur/s3/filesystem/azure/lutim
imageUploadType: 'filesystem',
lutim: {
url: 'https://framapic.org/'
},
minio: {
accessKey: undefined,
secretKey: undefined,
endPoint: undefined,
secure: true,
port: 9000
},
gitlab: {
baseURL: undefined,
clientID: undefined,
clientSecret: undefined,
scope: undefined,
version: 'v4'
},
saml: {
idpSsoUrl: undefined,
idpCert: undefined,
issuer: undefined,
identifierFormat: 'urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress',
disableRequestedAuthnContext: false,
groupAttribute: undefined,
externalGroups: [],
requiredGroups: [],
attribute: {
id: undefined,
username: undefined,
email: undefined
}
},
email: true,
allowEmailRegister: true,
allowGravatar: true,
openID: false,
// linkifyHeaderStyle - How is a header text converted into a link id.
// Header Example: "3.1. Good Morning my Friend! - Do you have 5$?"
// * 'keep-case' is the legacy CodiMD value.
// Generated id: "31-Good-Morning-my-Friend---Do-you-have-5"
// * 'lower-case' is the same like legacy (see above), but converted to lower-case.
// Generated id: "#31-good-morning-my-friend---do-you-have-5"
// * 'gfm' _GitHub-Flavored Markdown_ style as described here:
// https://gist.github.com/asabaylus/3071099#gistcomment-1593627
// It works like 'lower-case', but making sure the ID is unique.
// This is What GitHub, GitLab and (hopefully) most other tools use.
// Generated id: "31-good-morning-my-friend---do-you-have-5"
// 2nd appearance: "31-good-morning-my-friend---do-you-have-5-1"
// 3rd appearance: "31-good-morning-my-friend---do-you-have-5-2"
linkifyHeaderStyle: 'keep-case'
}

View file

@ -1,15 +0,0 @@
import fs from 'fs'
function getFile (path): string {
if (fs.existsSync(path)) {
return path
}
return ''
}
export const defaultSSL = {
sslKeyPath: getFile('/run/secrets/key.pem'),
sslCertPath: getFile('/run/secrets/cert.pem'),
sslCAPath: getFile('/run/secrets/ca.pem') !== undefined ? [getFile('/run/secrets/ca.pem')] : [],
dhParamPath: getFile('/run/secrets/dhparam.pem')
}

View file

@ -1,57 +0,0 @@
import fs from 'fs'
import path from 'path'
const basePath = path.resolve('/run/secrets/')
function getSecret (secret): string | undefined {
const filePath = path.join(basePath, secret)
if (fs.existsSync(filePath)) return fs.readFileSync(filePath, 'utf-8')
return undefined
}
export let dockerSecret: { s3: { accessKeyId: string | undefined; secretAccessKey: string | undefined }; github: { clientID: string | undefined; clientSecret: string | undefined }; facebook: { clientID: string | undefined; clientSecret: string | undefined }; google: { clientID: string | undefined; hostedDomain: string | undefined; clientSecret: string | undefined }; sessionSecret: string | undefined; sslKeyPath: string | undefined; twitter: { consumerSecret: string | undefined; consumerKey: string | undefined }; dropbox: { clientID: string | undefined; clientSecret: string | undefined; appKey: string | undefined }; gitlab: { clientID: string | undefined; clientSecret: string | undefined }; imgur: string | undefined; sslCertPath: string | undefined; sslCAPath: (string | undefined)[]; dhParamPath: string | undefined; dbURL: string | undefined; azure: { connectionString: string | undefined } }
if (fs.existsSync(basePath)) {
dockerSecret = {
dbURL: getSecret('dbURL'),
sessionSecret: getSecret('sessionsecret'),
sslKeyPath: getSecret('sslkeypath'),
sslCertPath: getSecret('sslcertpath'),
sslCAPath: [getSecret('sslcapath')],
dhParamPath: getSecret('dhparampath'),
s3: {
accessKeyId: getSecret('s3_acccessKeyId'),
secretAccessKey: getSecret('s3_secretAccessKey')
},
azure: {
connectionString: getSecret('azure_connectionString')
},
facebook: {
clientID: getSecret('facebook_clientID'),
clientSecret: getSecret('facebook_clientSecret')
},
twitter: {
consumerKey: getSecret('twitter_consumerKey'),
consumerSecret: getSecret('twitter_consumerSecret')
},
github: {
clientID: getSecret('github_clientID'),
clientSecret: getSecret('github_clientSecret')
},
gitlab: {
clientID: getSecret('gitlab_clientID'),
clientSecret: getSecret('gitlab_clientSecret')
},
dropbox: {
clientID: getSecret('dropbox_clientID'),
clientSecret: getSecret('dropbox_clientSecret'),
appKey: getSecret('dropbox_appKey')
},
google: {
clientID: getSecret('google_clientID'),
clientSecret: getSecret('google_clientSecret'),
hostedDomain: getSecret('google_hostedDomain')
},
imgur: getSecret('imgur_clientid')
}
}

View file

@ -1,29 +0,0 @@
export interface Environment {
development: string;
production: string;
test: string;
}
export const Environment: Environment = {
development: 'development',
production: 'production',
test: 'test'
}
export interface Permission {
freely: string;
editable: string;
limited: string;
locked: string;
protected: string;
private: string;
}
export const Permission: Permission = {
freely: 'freely',
editable: 'editable',
limited: 'limited',
locked: 'locked',
protected: 'protected',
private: 'private'
}

View file

@ -1,138 +0,0 @@
import { toArrayConfig, toBooleanConfig, toIntegerConfig } from './utils'
export const environment = {
sourceURL: process.env.CMD_SOURCE_URL,
domain: process.env.CMD_DOMAIN,
urlPath: process.env.CMD_URL_PATH,
host: process.env.CMD_HOST,
port: toIntegerConfig(process.env.CMD_PORT),
path: process.env.CMD_PATH,
socket: {
group: process.env.CMD_SOCKET_GROUP,
owner: process.env.CMD_SOCKET_OWNER,
mode: process.env.CMD_SOCKET_MODE
},
loglevel: process.env.CMD_LOGLEVEL,
urlAddPort: toBooleanConfig(process.env.CMD_URL_ADDPORT),
useSSL: toBooleanConfig(process.env.CMD_USESSL),
hsts: {
enable: toBooleanConfig(process.env.CMD_HSTS_ENABLE),
maxAgeSeconds: toIntegerConfig(process.env.CMD_HSTS_MAX_AGE),
includeSubdomains: toBooleanConfig(process.env.CMD_HSTS_INCLUDE_SUBDOMAINS),
preload: toBooleanConfig(process.env.CMD_HSTS_PRELOAD)
},
csp: {
enable: toBooleanConfig(process.env.CMD_CSP_ENABLE),
reportURI: process.env.CMD_CSP_REPORTURI
},
protocolUseSSL: toBooleanConfig(process.env.CMD_PROTOCOL_USESSL),
allowOrigin: toArrayConfig(process.env.CMD_ALLOW_ORIGIN),
useCDN: toBooleanConfig(process.env.CMD_USECDN),
allowAnonymous: toBooleanConfig(process.env.CMD_ALLOW_ANONYMOUS),
allowAnonymousEdits: toBooleanConfig(process.env.CMD_ALLOW_ANONYMOUS_EDITS),
allowFreeURL: toBooleanConfig(process.env.CMD_ALLOW_FREEURL),
forbiddenNoteIDs: toArrayConfig(process.env.CMD_FORBIDDEN_NOTE_IDS),
defaultPermission: process.env.CMD_DEFAULT_PERMISSION,
dbURL: process.env.CMD_DB_URL,
sessionSecret: process.env.CMD_SESSION_SECRET,
sessionLife: toIntegerConfig(process.env.CMD_SESSION_LIFE),
tooBusyLag: toIntegerConfig(process.env.CMD_TOOBUSY_LAG),
imageUploadType: process.env.CMD_IMAGE_UPLOAD_TYPE,
imgur: {
clientID: process.env.CMD_IMGUR_CLIENTID
},
s3: {
accessKeyId: process.env.CMD_S3_ACCESS_KEY_ID,
secretAccessKey: process.env.CMD_S3_SECRET_ACCESS_KEY,
region: process.env.CMD_S3_REGION,
endpoint: process.env.CMD_S3_ENDPOINT
},
minio: {
accessKey: process.env.CMD_MINIO_ACCESS_KEY,
secretKey: process.env.CMD_MINIO_SECRET_KEY,
endPoint: process.env.CMD_MINIO_ENDPOINT,
secure: toBooleanConfig(process.env.CMD_MINIO_SECURE),
port: toIntegerConfig(process.env.CMD_MINIO_PORT)
},
lutim: {
url: process.env.CMD_LUTIM_URL
},
s3bucket: process.env.CMD_S3_BUCKET,
azure: {
connectionString: process.env.CMD_AZURE_CONNECTION_STRING,
container: process.env.CMD_AZURE_CONTAINER
},
facebook: {
clientID: process.env.CMD_FACEBOOK_CLIENTID,
clientSecret: process.env.CMD_FACEBOOK_CLIENTSECRET
},
twitter: {
consumerKey: process.env.CMD_TWITTER_CONSUMERKEY,
consumerSecret: process.env.CMD_TWITTER_CONSUMERSECRET
},
github: {
clientID: process.env.CMD_GITHUB_CLIENTID,
clientSecret: process.env.CMD_GITHUB_CLIENTSECRET
},
gitlab: {
baseURL: process.env.CMD_GITLAB_BASEURL,
clientID: process.env.CMD_GITLAB_CLIENTID,
clientSecret: process.env.CMD_GITLAB_CLIENTSECRET,
scope: process.env.CMD_GITLAB_SCOPE
},
oauth2: {
providerName: process.env.CMD_OAUTH2_PROVIDERNAME,
baseURL: process.env.CMD_OAUTH2_BASEURL,
userProfileURL: process.env.CMD_OAUTH2_USER_PROFILE_URL,
userProfileUsernameAttr: process.env.CMD_OAUTH2_USER_PROFILE_USERNAME_ATTR,
userProfileDisplayNameAttr: process.env.CMD_OAUTH2_USER_PROFILE_DISPLAY_NAME_ATTR,
userProfileEmailAttr: process.env.CMD_OAUTH2_USER_PROFILE_EMAIL_ATTR,
tokenURL: process.env.CMD_OAUTH2_TOKEN_URL,
authorizationURL: process.env.CMD_OAUTH2_AUTHORIZATION_URL,
clientID: process.env.CMD_OAUTH2_CLIENT_ID,
clientSecret: process.env.CMD_OAUTH2_CLIENT_SECRET,
scope: process.env.CMD_OAUTH2_SCOPE
},
dropbox: {
clientID: process.env.CMD_DROPBOX_CLIENTID,
clientSecret: process.env.CMD_DROPBOX_CLIENTSECRET,
appKey: process.env.CMD_DROPBOX_APPKEY
},
google: {
clientID: process.env.CMD_GOOGLE_CLIENTID,
clientSecret: process.env.CMD_GOOGLE_CLIENTSECRET,
hostedDomain: process.env.CMD_GOOGLE_HOSTEDDOMAIN
},
ldap: {
providerName: process.env.CMD_LDAP_PROVIDERNAME,
url: process.env.CMD_LDAP_URL,
bindDn: process.env.CMD_LDAP_BINDDN,
bindCredentials: process.env.CMD_LDAP_BINDCREDENTIALS,
searchBase: process.env.CMD_LDAP_SEARCHBASE,
searchFilter: process.env.CMD_LDAP_SEARCHFILTER,
searchAttributes: toArrayConfig(process.env.CMD_LDAP_SEARCHATTRIBUTES),
usernameField: process.env.CMD_LDAP_USERNAMEFIELD,
useridField: process.env.CMD_LDAP_USERIDFIELD,
tlsca: process.env.CMD_LDAP_TLS_CA
},
saml: {
idpSsoUrl: process.env.CMD_SAML_IDPSSOURL,
idpCert: process.env.CMD_SAML_IDPCERT,
issuer: process.env.CMD_SAML_ISSUER,
identifierFormat: process.env.CMD_SAML_IDENTIFIERFORMAT,
disableRequestedAuthnContext: toBooleanConfig(process.env.CMD_SAML_DISABLEREQUESTEDAUTHNCONTEXT),
groupAttribute: process.env.CMD_SAML_GROUPATTRIBUTE,
externalGroups: toArrayConfig(process.env.CMD_SAML_EXTERNALGROUPS, '|', []),
requiredGroups: toArrayConfig(process.env.CMD_SAML_REQUIREDGROUPS, '|', []),
attribute: {
id: process.env.CMD_SAML_ATTRIBUTE_ID,
username: process.env.CMD_SAML_ATTRIBUTE_USERNAME,
email: process.env.CMD_SAML_ATTRIBUTE_EMAIL
}
},
email: toBooleanConfig(process.env.CMD_EMAIL),
allowEmailRegister: toBooleanConfig(process.env.CMD_ALLOW_EMAIL_REGISTER),
allowGravatar: toBooleanConfig(process.env.CMD_ALLOW_GRAVATAR),
openID: toBooleanConfig(process.env.CMD_OPENID),
linkifyHeaderStyle: process.env.CMD_LINKIFY_HEADER_STYLE
}

View file

@ -1,118 +0,0 @@
import { toArrayConfig, toBooleanConfig, toIntegerConfig } from './utils'
export const hackmdEnvironment = {
domain: process.env.HMD_DOMAIN,
urlPath: process.env.HMD_URL_PATH,
port: toIntegerConfig(process.env.HMD_PORT),
urlAddPort: toBooleanConfig(process.env.HMD_URL_ADDPORT),
useSSL: toBooleanConfig(process.env.HMD_USESSL),
hsts: {
enable: toBooleanConfig(process.env.HMD_HSTS_ENABLE),
maxAgeSeconds: toIntegerConfig(process.env.HMD_HSTS_MAX_AGE),
includeSubdomains: toBooleanConfig(process.env.HMD_HSTS_INCLUDE_SUBDOMAINS),
preload: toBooleanConfig(process.env.HMD_HSTS_PRELOAD)
},
csp: {
enable: toBooleanConfig(process.env.HMD_CSP_ENABLE),
reportURI: process.env.HMD_CSP_REPORTURI
},
protocolUseSSL: toBooleanConfig(process.env.HMD_PROTOCOL_USESSL),
allowOrigin: toArrayConfig(process.env.HMD_ALLOW_ORIGIN),
useCDN: toBooleanConfig(process.env.HMD_USECDN),
allowAnonymous: toBooleanConfig(process.env.HMD_ALLOW_ANONYMOUS),
allowAnonymousEdits: toBooleanConfig(process.env.HMD_ALLOW_ANONYMOUS_EDITS),
allowFreeURL: toBooleanConfig(process.env.HMD_ALLOW_FREEURL),
defaultPermission: process.env.HMD_DEFAULT_PERMISSION,
dbURL: process.env.HMD_DB_URL,
sessionSecret: process.env.HMD_SESSION_SECRET,
sessionLife: toIntegerConfig(process.env.HMD_SESSION_LIFE),
imageUploadType: process.env.HMD_IMAGE_UPLOAD_TYPE,
imgur: {
clientID: process.env.HMD_IMGUR_CLIENTID
},
s3: {
accessKeyId: process.env.HMD_S3_ACCESS_KEY_ID,
secretAccessKey: process.env.HMD_S3_SECRET_ACCESS_KEY,
region: process.env.HMD_S3_REGION
},
minio: {
accessKey: process.env.HMD_MINIO_ACCESS_KEY,
secretKey: process.env.HMD_MINIO_SECRET_KEY,
endPoint: process.env.HMD_MINIO_ENDPOINT,
secure: toBooleanConfig(process.env.HMD_MINIO_SECURE),
port: toIntegerConfig(process.env.HMD_MINIO_PORT)
},
s3bucket: process.env.HMD_S3_BUCKET,
azure: {
connectionString: process.env.HMD_AZURE_CONNECTION_STRING,
container: process.env.HMD_AZURE_CONTAINER
},
facebook: {
clientID: process.env.HMD_FACEBOOK_CLIENTID,
clientSecret: process.env.HMD_FACEBOOK_CLIENTSECRET
},
twitter: {
consumerKey: process.env.HMD_TWITTER_CONSUMERKEY,
consumerSecret: process.env.HMD_TWITTER_CONSUMERSECRET
},
github: {
clientID: process.env.HMD_GITHUB_CLIENTID,
clientSecret: process.env.HMD_GITHUB_CLIENTSECRET
},
gitlab: {
baseURL: process.env.HMD_GITLAB_BASEURL,
clientID: process.env.HMD_GITLAB_CLIENTID,
clientSecret: process.env.HMD_GITLAB_CLIENTSECRET,
scope: process.env.HMD_GITLAB_SCOPE
},
oauth2: {
baseURL: process.env.HMD_OAUTH2_BASEURL,
userProfileURL: process.env.HMD_OAUTH2_USER_PROFILE_URL,
userProfileUsernameAttr: process.env.HMD_OAUTH2_USER_PROFILE_USERNAME_ATTR,
userProfileDisplayNameAttr: process.env.HMD_OAUTH2_USER_PROFILE_DISPLAY_NAME_ATTR,
userProfileEmailAttr: process.env.HMD_OAUTH2_USER_PROFILE_EMAIL_ATTR,
tokenURL: process.env.HMD_OAUTH2_TOKEN_URL,
authorizationURL: process.env.HMD_OAUTH2_AUTHORIZATION_URL,
clientID: process.env.HMD_OAUTH2_CLIENT_ID,
clientSecret: process.env.HMD_OAUTH2_CLIENT_SECRET,
scope: process.env.HMD_OAUTH2_SCOPE
},
dropbox: {
clientID: process.env.HMD_DROPBOX_CLIENTID,
clientSecret: process.env.HMD_DROPBOX_CLIENTSECRET,
appKey: process.env.HMD_DROPBOX_APPKEY
},
google: {
clientID: process.env.HMD_GOOGLE_CLIENTID,
clientSecret: process.env.HMD_GOOGLE_CLIENTSECRET
},
ldap: {
providerName: process.env.HMD_LDAP_PROVIDERNAME,
url: process.env.HMD_LDAP_URL,
bindDn: process.env.HMD_LDAP_BINDDN,
bindCredentials: process.env.HMD_LDAP_BINDCREDENTIALS,
searchBase: process.env.HMD_LDAP_SEARCHBASE,
searchFilter: process.env.HMD_LDAP_SEARCHFILTER,
searchAttributes: toArrayConfig(process.env.HMD_LDAP_SEARCHATTRIBUTES),
usernameField: process.env.HMD_LDAP_USERNAMEFIELD,
useridField: process.env.HMD_LDAP_USERIDFIELD,
tlsca: process.env.HMD_LDAP_TLS_CA
},
saml: {
idpSsoUrl: process.env.HMD_SAML_IDPSSOURL,
idpCert: process.env.HMD_SAML_IDPCERT,
issuer: process.env.HMD_SAML_ISSUER,
identifierFormat: process.env.HMD_SAML_IDENTIFIERFORMAT,
disableRequestedAuthnContext: toBooleanConfig(process.env.HMD_SAML_DISABLEREQUESTEDAUTHNCONTEXT),
groupAttribute: process.env.HMD_SAML_GROUPATTRIBUTE,
externalGroups: toArrayConfig(process.env.HMD_SAML_EXTERNALGROUPS, '|', []),
requiredGroups: toArrayConfig(process.env.HMD_SAML_REQUIREDGROUPS, '|', []),
attribute: {
id: process.env.HMD_SAML_ATTRIBUTE_ID,
username: process.env.HMD_SAML_ATTRIBUTE_USERNAME,
email: process.env.HMD_SAML_ATTRIBUTE_EMAIL
}
},
email: toBooleanConfig(process.env.HMD_EMAIL),
allowEmailRegister: toBooleanConfig(process.env.HMD_ALLOW_EMAIL_REGISTER)
}

View file

@ -1,211 +0,0 @@
import crypto from 'crypto'
import fs from 'fs'
import path from 'path'
import { merge } from 'lodash'
import { Environment, Permission } from './enum'
import { logger } from '../logger'
import { getGitCommit, getGitHubURL } from './utils'
import { defaultConfig } from './default'
import { defaultSSL } from './defaultSSL'
import { oldDefault } from './oldDefault'
import { oldEnvironment } from './oldEnvironment'
import { hackmdEnvironment } from './hackmdEnvironment'
import { environment } from './environment'
import { dockerSecret } from './dockerSecret'
import deepFreeze = require('deep-freeze')
const appRootPath = path.resolve(__dirname, '../../../')
const env = process.env.NODE_ENV || Environment.development
const debugConfig = {
debug: (env === Environment.development)
}
// Get version string from package.json
// TODO: There are other ways to geht the current version
// eslint-disable-next-line @typescript-eslint/no-var-requires
const { version, repository } = require(path.join(appRootPath, 'package.json'))
const commitID = getGitCommit(appRootPath)
const sourceURL = getGitHubURL(repository.url, commitID || version)
const fullversion = commitID ? `${version}-${commitID}` : version
const packageConfig = {
version: version,
minimumCompatibleVersion: '0.5.0',
fullversion: fullversion,
sourceURL: sourceURL
}
const configFilePath = path.resolve(appRootPath, process.env.CMD_CONFIG_FILE ||
'config.json')
const fileConfig = fs.existsSync(configFilePath) ? require(configFilePath)[env] : undefined
merge(defaultConfig, defaultSSL)
merge(defaultConfig, oldDefault)
merge(defaultConfig, debugConfig)
merge(defaultConfig, packageConfig)
merge(defaultConfig, fileConfig)
merge(defaultConfig, oldEnvironment)
merge(defaultConfig, hackmdEnvironment)
merge(defaultConfig, environment)
merge(defaultConfig, dockerSecret)
if (['debug', 'verbose', 'info', 'warn', 'error'].includes(defaultConfig.loglevel)) {
logger.level = defaultConfig.loglevel
} else {
logger.error('Selected loglevel %s doesn\'t exist, using default level \'debug\'. Available options: debug, verbose, info, warn, error', defaultConfig.loglevel)
}
// load LDAP CA
if (defaultConfig.ldap?.tlsca) {
const ca = defaultConfig.ldap.tlsca.split(',')
const caContent: string[] = []
for (const i of ca) {
if (fs.existsSync(i)) {
caContent.push(fs.readFileSync(i, 'utf8'))
}
}
const tlsOptions = {
ca: caContent
}
defaultConfig.ldap.tlsOptions = defaultConfig.ldap.tlsOptions ? Object.assign(defaultConfig.ldap.tlsOptions, tlsOptions) : tlsOptions
}
// Permission
defaultConfig.permission = Permission
if (!defaultConfig.allowAnonymous && !defaultConfig.allowAnonymousEdits) {
delete defaultConfig.permission.freely
}
if (!(defaultConfig.defaultPermission in defaultConfig.permission)) {
defaultConfig.defaultPermission = defaultConfig.permission.editable
}
// cache result, cannot change config in runtime!!!
defaultConfig.isStandardHTTPsPort = (function isStandardHTTPsPort (): boolean {
return defaultConfig.useSSL && defaultConfig.port === 443
})()
defaultConfig.isStandardHTTPPort = (function isStandardHTTPPort (): boolean {
return !defaultConfig.useSSL && defaultConfig.port === 80
})()
// cache serverURL
defaultConfig.serverURL = (function getserverurl (): string {
let url = ''
if (defaultConfig.domain) {
const protocol = defaultConfig.protocolUseSSL ? 'https://' : 'http://'
url = protocol + defaultConfig.domain
if (defaultConfig.urlAddPort) {
if (!defaultConfig.isStandardHTTPPort || !defaultConfig.isStandardHTTPsPort) {
url += ':' + defaultConfig.port
}
}
}
if (defaultConfig.urlPath) {
url += '/' + defaultConfig.urlPath
}
return url
})()
if (defaultConfig.serverURL === '') {
logger.warn('Neither \'domain\' nor \'CMD_DOMAIN\' is configured. This can cause issues with various components.\nHint: Make sure \'protocolUseSSL\' and \'urlAddPort\' or \'CMD_PROTOCOL_USESSL\' and \'CMD_URL_ADDPORT\' are configured properly.')
}
defaultConfig.Environment = Environment
// auth method
defaultConfig.isFacebookEnable = defaultConfig.facebook?.clientID && defaultConfig.facebook.clientSecret
defaultConfig.isGoogleEnable = defaultConfig.google?.clientID && defaultConfig.google.clientSecret
defaultConfig.isDropboxEnable = defaultConfig.dropbox?.clientID && defaultConfig.dropbox.clientSecret
defaultConfig.isTwitterEnable = defaultConfig.twitter?.consumerKey && defaultConfig.twitter.consumerSecret
defaultConfig.isEmailEnable = defaultConfig.email
defaultConfig.isOpenIDEnable = defaultConfig.openID
defaultConfig.isGitHubEnable = defaultConfig.github?.clientID && defaultConfig.github.clientSecret
defaultConfig.isGitLabEnable = defaultConfig.gitlab?.clientID && defaultConfig.gitlab.clientSecret
defaultConfig.isLDAPEnable = defaultConfig.ldap?.url
defaultConfig.isSAMLEnable = defaultConfig.saml?.idpSsoUrl
defaultConfig.isOAuth2Enable = defaultConfig.oauth2?.clientID && defaultConfig.oauth2.clientSecret
// Check gitlab api version
if (defaultConfig.gitlab && defaultConfig.gitlab.version !== 'v4' && defaultConfig.gitlab.version !== 'v3') {
logger.warn('config.js contains wrong version (' + defaultConfig.gitlab.version + ') for gitlab api; it should be \'v3\' or \'v4\'. Defaulting to v4')
defaultConfig.gitlab.version = 'v4'
}
// If gitlab scope is api, enable snippets Export/import
defaultConfig.isGitlabSnippetsEnable = (!defaultConfig.gitlab?.scope || defaultConfig.gitlab.scope === 'api') && defaultConfig.isGitLabEnable
// Only update i18n files in development setups
defaultConfig.updateI18nFiles = (env === Environment.development)
// merge legacy values
const keys = Object.keys(defaultConfig)
const uppercase = /[A-Z]/
for (let i = keys.length; i--;) {
const lowercaseKey = keys[i].toLowerCase()
// if the config contains uppercase letters
// and a lowercase version of this setting exists
// and the config with uppercase is not set
// we set the new config using the old key.
if (uppercase.test(keys[i]) &&
defaultConfig[lowercaseKey] !== undefined &&
fileConfig[keys[i]] === undefined) {
logger.warn('config.js contains deprecated lowercase setting for ' + keys[i] + '. Please change your config.js file to replace ' + lowercaseKey + ' with ' + keys[i])
defaultConfig[keys[i]] = defaultConfig[lowercaseKey]
}
}
// Notify users about the prefix change and inform them they use legacy prefix for environment variables
if (Object.keys(process.env).toString().includes('HMD_')) {
logger.warn('Using legacy HMD prefix for environment variables. Please change your variables in future. For details see: https://github.com/codimd/server#environment-variables-will-overwrite-other-server-configs')
}
// Generate session secret if it stays on default values
if (defaultConfig.sessionSecret === 'secret') {
logger.warn('Session secret not set. Using random generated one. Please set `sessionSecret` in your config.js file. All users will be logged out.')
defaultConfig.sessionSecret = crypto.randomBytes(Math.ceil(defaultConfig.sessionSecretLen / 2)) // generate crypto graphic random number
.toString('hex') // convert to hexadecimal format
.slice(0, defaultConfig.sessionSecretLen) // return required number of characters
}
// Validate upload upload providers
if (!['filesystem', 's3', 'minio', 'imgur', 'azure', 'lutim'].includes(defaultConfig.imageUploadType)) {
logger.error('"imageuploadtype" is not correctly set. Please use "filesystem", "s3", "minio", "azure", "lutim" or "imgur". Defaulting to "filesystem"')
defaultConfig.imageUploadType = 'filesystem'
}
// figure out mime types for image uploads
switch (defaultConfig.imageUploadType) {
case 'imgur':
defaultConfig.allowedUploadMimeTypes = [
'image/jpeg',
'image/png',
'image/jpg',
'image/gif'
]
break
default:
defaultConfig.allowedUploadMimeTypes = [
'image/jpeg',
'image/png',
'image/jpg',
'image/gif',
'image/svg+xml'
]
}
// generate correct path
defaultConfig.sslCAPath.forEach(function (capath, i, array) {
array[i] = path.resolve(appRootPath, capath)
})
defaultConfig.sslCertPath = path.resolve(appRootPath, defaultConfig.sslCertPath)
defaultConfig.sslKeyPath = path.resolve(appRootPath, defaultConfig.sslKeyPath)
defaultConfig.dhParamPath = path.resolve(appRootPath, defaultConfig.dhParamPath)
defaultConfig.viewPath = path.resolve(appRootPath, defaultConfig.viewPath)
defaultConfig.tmpPath = path.resolve(appRootPath, defaultConfig.tmpPath)
defaultConfig.publicPath = path.resolve(appRootPath, defaultConfig.publicPath)
defaultConfig.defaultNotePath = path.resolve(appRootPath, defaultConfig.defaultNotePath)
defaultConfig.docsPath = path.resolve(appRootPath, defaultConfig.docsPath)
defaultConfig.uploadsPath = path.resolve(appRootPath, defaultConfig.uploadsPath)
defaultConfig.localesPath = path.resolve(appRootPath, defaultConfig.localesPath)
// make config readonly
export const config = deepFreeze(defaultConfig)

View file

@ -1,159 +0,0 @@
import { Permission } from './enum'
import { IHelmetContentSecurityPolicyDirectives } from 'helmet'
type CSPDirectives = IHelmetContentSecurityPolicyDirectives
export interface Config {
permission: Permission;
domain: string;
urlPath: string;
host: string;
port: number;
loglevel: string;
urlAddPort: boolean;
allowOrigin: string[];
useSSL: boolean;
hsts: {
enable: boolean;
maxAgeSeconds: number;
includeSubdomains: boolean;
preload: boolean;
};
csp: {
enable: boolean;
directives?: CSPDirectives;
addDefaults: boolean;
addDisqus: boolean;
addGoogleAnalytics: boolean;
upgradeInsecureRequests: string | boolean;
reportURI?: string;
};
protocolUseSSL: boolean;
useCDN: boolean;
allowAnonymous: boolean;
allowAnonymousEdits: boolean;
allowFreeURL: boolean;
forbiddenNoteIDs: string[];
defaultPermission: string;
dbURL: string;
db;
sslKeyPath: string;
sslCertPath: string;
sslCAPath: string[];
dhParamPath: string;
publicPath: string;
viewPath: string;
tmpPath: string;
defaultNotePath: string;
docsPath: string;
uploadsPath: string;
sessionName: string;
sessionSecret: string;
sessionSecretLen: number;
sessionLife: number;
staticCacheTime: number;
heartbeatInterval: number;
heartbeatTimeout: number;
tooBusyLag: number;
documentMaxLength: number;
imageUploadType: 'azure' | 'filesystem' | 'imgur' | 'lutim' | 'minio' | 's3';
lutim?: {
url: string;
};
imgur?: {
clientID: string;
};
s3?: {
accessKeyId: string;
secretAccessKey: string;
region: string;
};
minio?: {
accessKey?: string;
secretKey?: string;
endPoint?: string;
secure?: boolean;
port?: number;
};
s3bucket?: string;
azure?: {
connectionString: string;
container: string;
};
oauth2?: {
providerName: string;
authorizationURL: string;
tokenURL: string;
clientID: string;
clientSecret: string;
};
facebook?: {
clientID: string;
clientSecret: string;
};
twitter?: {
consumerKey: string;
consumerSecret: string;
};
github?: {
clientID: string;
clientSecret: string;
};
gitlab?: {
baseURL?: string;
clientID?: string;
clientSecret?: string;
scope?: string;
version?: string;
};
dropbox?: {
clientID: string;
clientSecret: string;
appKey: string;
};
google?: {
clientID: string;
clientSecret: string;
hostedDomain: string;
};
ldap?: {
providerName: string;
url: string;
bindDn: string;
bindCredentials: string;
searchBase: string;
searchFilter: string;
searchAttributes: string;
usernameField: string;
useridField: string;
tlsca: string;
starttls?: boolean;
tlsOptions: {
ca: string[];
};
};
saml?: {
idpSsoUrl?: string;
idpCert?: string;
issuer?: string;
identifierFormat?: string;
disableRequestedAuthnContext?: boolean;
groupAttribute?: string;
externalGroups?: string[];
requiredGroups?: string[];
attribute?: {
id?: string;
username?: string;
email?: string;
};
};
email: boolean;
allowEmailRegister: boolean;
allowGravatar: boolean;
openID: boolean;
linkifyHeaderStyle: string;
// TODO: Remove escape hatch for dynamically added properties
// eslint-disable-next-line @typescript-eslint/no-explicit-any
[propName: string]: any;
}

View file

@ -1,39 +0,0 @@
export const oldDefault = {
urlpath: undefined,
urladdport: undefined,
alloworigin: undefined,
usessl: undefined,
protocolusessl: undefined,
usecdn: undefined,
allowanonymous: undefined,
allowanonymousedits: undefined,
allowfreeurl: undefined,
defaultpermission: undefined,
dburl: undefined,
// ssl path
sslkeypath: undefined,
sslcertpath: undefined,
sslcapath: undefined,
dhparampath: undefined,
// other path
tmppath: undefined,
defaultnotepath: undefined,
docspath: undefined,
indexpath: undefined,
hackmdpath: undefined,
errorpath: undefined,
prettypath: undefined,
slidepath: undefined,
// session
sessionname: undefined,
sessionsecret: undefined,
sessionlife: undefined,
staticcachetime: undefined,
// socket.io
heartbeatinterval: undefined,
heartbeattimeout: undefined,
// document
documentmaxlength: undefined,
imageuploadtype: undefined,
allowemailregister: undefined
}

View file

@ -1,8 +0,0 @@
import { toBooleanConfig } from './utils'
export const oldEnvironment = {
debug: toBooleanConfig(process.env.DEBUG),
dburl: process.env.DATABASE_URL,
urlpath: process.env.URL_PATH,
port: process.env.PORT
}

View file

@ -1,53 +0,0 @@
import fs from 'fs'
import path from 'path'
export function toBooleanConfig (configValue: string | boolean | undefined): boolean | undefined {
if (typeof configValue === 'string') {
return (configValue === 'true')
}
return configValue
}
export function toArrayConfig (configValue: string | undefined, separator = ',', fallback = []): string[] {
if (configValue) {
return (configValue.split(separator).map(arrayItem => arrayItem.trim()))
}
return fallback
}
export function toIntegerConfig (configValue): number {
if (configValue && typeof configValue === 'string') {
return parseInt(configValue)
}
return configValue
}
export function getGitCommit (repodir): string {
if (!fs.existsSync(repodir + '/.git/HEAD')) {
return ''
}
let reference = fs.readFileSync(repodir + '/.git/HEAD', 'utf8')
if (reference.startsWith('ref: ')) {
reference = reference.substr(5).replace('\n', '')
reference = fs.readFileSync(path.resolve(repodir + '/.git', reference), 'utf8')
}
reference = reference.replace('\n', '')
return reference
}
export function getGitHubURL (repo, reference): string {
// if it's not a github reference, we handle handle that anyway
if (!repo.startsWith('https://github.com') && !repo.startsWith('git@github.com')) {
return repo
}
if (repo.startsWith('git@github.com') || repo.startsWith('ssh://git@github.com')) {
repo = repo.replace(/^(ssh:\/\/)?git@github.com:/, 'https://github.com/')
}
if (repo.endsWith('.git')) {
repo = repo.replace(/\.git$/, '/')
} else if (!repo.endsWith('/')) {
repo = repo + '/'
}
return repo + 'tree/' + reference
}

View file

@ -1,106 +0,0 @@
import { config } from './config'
import { IHelmetContentSecurityPolicyDirectives } from 'helmet'
import uuid from 'uuid'
import { NextFunction, Request, Response } from 'express'
type CSPDirectives = IHelmetContentSecurityPolicyDirectives
const defaultDirectives = {
defaultSrc: ['\'self\''],
scriptSrc: ['\'self\'', 'vimeo.com', 'https://gist.github.com', 'www.slideshare.net', 'https://query.yahooapis.com', '\'unsafe-eval\''],
// ^ TODO: Remove unsafe-eval - webpack script-loader issues https://github.com/hackmdio/codimd/issues/594
imgSrc: ['*'],
styleSrc: ['\'self\'', '\'unsafe-inline\'', 'https://github.githubassets.com'], // unsafe-inline is required for some libs, plus used in views
fontSrc: ['\'self\'', 'data:', 'https://public.slidesharecdn.com'],
objectSrc: ['*'], // Chrome PDF viewer treats PDFs as objects :/
mediaSrc: ['*'],
childSrc: ['*'],
connectSrc: ['*']
}
const cdnDirectives = {
scriptSrc: ['https://cdnjs.cloudflare.com', 'https://cdn.mathjax.org'],
styleSrc: ['https://cdnjs.cloudflare.com', 'https://fonts.googleapis.com'],
fontSrc: ['https://cdnjs.cloudflare.com', 'https://fonts.gstatic.com']
}
const disqusDirectives = {
scriptSrc: ['https://disqus.com', 'https://*.disqus.com', 'https://*.disquscdn.com'],
styleSrc: ['https://*.disquscdn.com'],
fontSrc: ['https://*.disquscdn.com']
}
const googleAnalyticsDirectives = {
scriptSrc: ['https://www.google-analytics.com']
}
function mergeDirectives (existingDirectives: CSPDirectives, newDirectives: CSPDirectives): void {
for (const propertyName in newDirectives) {
const newDirective = newDirectives[propertyName]
if (newDirective) {
const existingDirective = existingDirectives[propertyName] || []
existingDirectives[propertyName] = existingDirective.concat(newDirective)
}
}
}
function mergeDirectivesIf (condition: boolean, existingDirectives: CSPDirectives, newDirectives: CSPDirectives): void {
if (condition) {
mergeDirectives(existingDirectives, newDirectives)
}
}
function areAllInlineScriptsAllowed (directives: CSPDirectives): boolean {
if (directives.scriptSrc) {
return directives.scriptSrc.includes('\'unsafe-inline\'')
}
return false
}
function getCspNonce (req: Request, res: Response): string {
return "'nonce-" + res.locals.nonce + "'"
}
function addInlineScriptExceptions (directives: CSPDirectives): void {
if (!directives.scriptSrc) {
directives.scriptSrc = []
}
directives.scriptSrc.push(getCspNonce)
// TODO: This is the SHA-256 hash of the inline script in build/reveal.js/plugins/notes/notes.html
// Any more clean solution appreciated.
directives.scriptSrc.push('\'sha256-81acLZNZISnyGYZrSuoYhpzwDTTxi7vC1YM4uNxqWaM=\'')
}
function addUpgradeUnsafeRequestsOptionTo (directives: CSPDirectives): void {
if (config.csp.upgradeInsecureRequests === 'auto' && config.useSSL) {
directives.upgradeInsecureRequests = true
} else if (config.csp.upgradeInsecureRequests === true) {
directives.upgradeInsecureRequests = true
}
}
function addReportURI (directives): void {
if (config.csp.reportURI) {
directives.reportUri = config.csp.reportURI
}
}
export function addNonceToLocals (req: Request, res: Response, next: NextFunction): void {
res.locals.nonce = uuid.v4()
next()
}
export function computeDirectives (): CSPDirectives {
const directives: CSPDirectives = {}
mergeDirectives(directives, config.csp.directives)
mergeDirectivesIf(config.csp.addDefaults, directives, defaultDirectives)
mergeDirectivesIf(config.useCDN, directives, cdnDirectives)
mergeDirectivesIf(config.csp.addDisqus, directives, disqusDirectives)
mergeDirectivesIf(config.csp.addGoogleAnalytics, directives, googleAnalyticsDirectives)
if (!areAllInlineScriptsAllowed(directives)) {
addInlineScriptExceptions(directives)
}
addUpgradeUnsafeRequestsOptionTo(directives)
addReportURI(directives)
return directives
}

View file

@ -1,54 +0,0 @@
import { config } from './config'
function responseError (res, code: number, detail: string, msg: string): void {
res.status(code).render('error.ejs', {
title: code + ' ' + detail + ' ' + msg,
code: code,
detail: detail,
msg: msg,
opengraph: []
})
}
function errorForbidden (res): void {
const { req } = res
if (req.user) {
responseError(res, 403, 'Forbidden', 'oh no.')
} else {
if (!req.session) req.session = {}
req.session.returnTo = req.originalUrl || config.serverUrl + '/'
req.flash('error', 'You are not allowed to access this page. Maybe try logging in?')
res.redirect(config.serverURL + '/')
}
}
function errorNotFound (res): void {
responseError(res, 404, 'Not Found', 'oops.')
}
function errorBadRequest (res): void {
responseError(res, 400, 'Bad Request', 'something not right.')
}
function errorTooLong (res): void {
responseError(res, 413, 'Payload Too Large', 'Shorten your note!')
}
function errorInternalError (res): void {
responseError(res, 500, 'Internal Error', 'wtf.')
}
function errorServiceUnavailable (res): void {
responseError(res, 503, 'Service Unvavilable', 'I\'m busy right now, try again later.')
}
const errors = {
errorForbidden: errorForbidden,
errorNotFound: errorNotFound,
errorBadRequest: errorBadRequest,
errorTooLong: errorTooLong,
errorInternalError: errorInternalError,
errorServiceUnavailable: errorServiceUnavailable
}
export { errors }

View file

@ -1,208 +0,0 @@
// history
// external modules
import LZString from 'lz-string'
// core
import { logger } from './logger'
import { Note, User } from './models'
import { errors } from './errors'
import { LogEntry } from 'winston'
// public
class HistoryObject {
id: string
text: string
time: number
tags: string[]
pinned?: boolean
}
function parseHistoryMapToArray (historyMap: Map<string, HistoryObject>): HistoryObject[] {
const historyArray: HistoryObject[] = []
for (const [, value] of historyMap) {
historyArray.push(value)
}
return historyArray
}
function parseHistoryArrayToMap (historyArray: HistoryObject[]): Map<string, HistoryObject> {
const historyMap = new Map()
for (let i = 0; i < historyArray.length; i++) {
const item = historyArray[i]
historyMap.set(item.id, item)
}
return historyMap
}
function getHistory (userId, callback: (err: unknown, history: Map<string, HistoryObject> | null) => void): void {
User.findOne({
where: {
id: userId
}
}).then(function (user) {
if (!user) {
return callback(null, null)
}
if (user.history) {
const history: HistoryObject[] = JSON.parse(user.history)
// migrate LZString encoded note id to base64url encoded note id
for (let i = 0, l = history.length; i < l; i++) {
// Calculate minimal string length for an UUID that is encoded
// base64 encoded and optimize comparsion by using -1
// this should make a lot of LZ-String parsing errors obsolete
// as we can assume that a nodeId that is 48 chars or longer is a
// noteID.
const base64UuidLength = ((4 * 36) / 3) - 1
if (!(history[i].id.length > base64UuidLength)) {
continue
}
try {
const id = LZString.decompressFromBase64(history[i].id)
if (id && Note.checkNoteIdValid(id)) {
history[i].id = Note.encodeNoteId(id)
}
} catch (err) {
// most error here comes from LZString, ignore
if (err.message === 'Cannot read property \'charAt\' of undefined') {
logger.warning('Looks like we can not decode "' + history[i].id + '" with LZString. Can be ignored.')
} else {
logger.error(err)
}
}
}
logger.debug(`read history success: ${user.id}`)
return callback(null, parseHistoryArrayToMap(history))
}
logger.debug(`read empty history: ${user.id}`)
return callback(null, new Map<string, HistoryObject>())
}).catch(function (err) {
logger.error('read history failed: ' + err)
return callback(err, null)
})
}
function setHistory (userId: string, history: HistoryObject[], callback: (err: LogEntry | null, count: [number, User[]] | null) => void): void {
User.update({
history: JSON.stringify(history)
}, {
where: {
id: userId
}
}).then(function (count) {
return callback(null, count)
}).catch(function (err) {
logger.error('set history failed: ' + err)
return callback(err, null)
})
}
function updateHistory (userId: string, noteId: string, document, time): void {
if (userId && noteId && typeof document !== 'undefined') {
getHistory(userId, function (err, history) {
if (err || !history) return
const noteHistory = history.get(noteId) || new HistoryObject()
const noteInfo = Note.parseNoteInfo(document)
noteHistory.id = noteId
noteHistory.text = noteInfo.title
noteHistory.time = time || Date.now()
noteHistory.tags = noteInfo.tags
history.set(noteId, noteHistory)
setHistory(userId, parseHistoryMapToArray(history), function (err, _) {
if (err) {
logger.log(err)
}
})
})
}
}
function historyGet (req, res): void {
if (req.isAuthenticated()) {
getHistory(req.user.id, function (err, history) {
if (err) return errors.errorInternalError(res)
if (!history) return errors.errorNotFound(res)
res.send({
history: parseHistoryMapToArray(history)
})
})
} else {
return errors.errorForbidden(res)
}
}
function historyPost (req, res): void {
if (req.isAuthenticated()) {
const noteId = req.params.noteId
if (!noteId) {
if (typeof req.body.history === 'undefined') return errors.errorBadRequest(res)
logger.debug(`SERVER received history from [${req.user.id}]: ${req.body.history}`)
let history
try {
history = JSON.parse(req.body.history)
} catch (err) {
return errors.errorBadRequest(res)
}
if (Array.isArray(history)) {
setHistory(req.user.id, history, function (err, _) {
if (err) return errors.errorInternalError(res)
res.end()
})
} else {
return errors.errorBadRequest(res)
}
} else {
if (typeof req.body.pinned === 'undefined') return errors.errorBadRequest(res)
getHistory(req.user.id, function (err, history) {
if (err) return errors.errorInternalError(res)
if (!history) return errors.errorNotFound(res)
const noteHistory = history.get(noteId)
if (!noteHistory) return errors.errorNotFound(res)
if (req.body.pinned === 'true' || req.body.pinned === 'false') {
noteHistory.pinned = (req.body.pinned === 'true')
setHistory(req.user.id, parseHistoryMapToArray(history), function (err, _) {
if (err) return errors.errorInternalError(res)
res.end()
})
} else {
return errors.errorBadRequest(res)
}
})
}
} else {
return errors.errorForbidden(res)
}
}
function historyDelete (req, res): void {
if (req.isAuthenticated()) {
const noteId = req.params.noteId
if (!noteId) {
setHistory(req.user.id, [], function (err, _) {
if (err) return errors.errorInternalError(res)
res.end()
})
} else {
getHistory(req.user.id, function (err, history) {
if (err) return errors.errorInternalError(res)
if (!history) return errors.errorNotFound(res)
history.delete(noteId)
setHistory(req.user.id, parseHistoryMapToArray(history), function (err, _) {
if (err) return errors.errorInternalError(res)
res.end()
})
})
}
} else {
return errors.errorForbidden(res)
}
}
const History = {
historyGet: historyGet,
historyPost: historyPost,
historyDelete: historyDelete,
updateHistory: updateHistory
}
export { History, HistoryObject }

View file

@ -1,45 +0,0 @@
import { createHash } from 'crypto'
import randomColor from 'randomcolor'
import { config } from './config'
// core
export function generateAvatar (name: string): string {
const color = randomColor({
seed: name,
luminosity: 'dark'
})
const letter = name.substring(0, 1).toUpperCase()
let svg = '<?xml version="1.0" encoding="UTF-8" standalone="no"?>'
svg += '<svg xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns="http://www.w3.org/2000/svg" height="96" width="96" version="1.1" viewBox="0 0 96 96">'
svg += '<g>'
svg += '<rect width="96" height="96" fill="' + color + '" />'
svg += '<text font-size="64px" font-family="sans-serif" text-anchor="middle" fill="#ffffff">'
svg += '<tspan x="48" y="72" stroke-width=".26458px" fill="#ffffff">' + letter + '</tspan>'
svg += '</text>'
svg += '</g>'
svg += '</svg>'
return svg
}
export function generateAvatarURL (name: string, email = '', big = true): string {
let photo
name = encodeURIComponent(name)
const hash = createHash('md5')
hash.update(email.toLowerCase())
const hexDigest = hash.digest('hex')
if (email !== '' && config.allowGravatar) {
photo = 'https://cdn.libravatar.org/avatar/' + hexDigest
if (big) {
photo += '?s=400'
} else {
photo += '?s=96'
}
} else {
photo = config.serverURL + '/user/' + (name || email.substring(0, email.lastIndexOf('@')) || hexDigest) + '/avatar.svg'
}
return photo
}

View file

@ -1,8 +0,0 @@
import { User } from './models'
declare module 'express' {
export interface Request {
user?: User;
flash (type: string, msg?: string): [] | object | number;
}
}

View file

@ -1,20 +0,0 @@
import { createLogger, format, transports } from 'winston'
const logger = createLogger({
level: 'debug',
format: format.combine(
format.uncolorize(),
format.timestamp(),
format.align(),
format.splat(),
format.printf(info => `${info.timestamp} ${info.level}: ${info.message}`)
),
transports: [
new transports.Console({
handleExceptions: true
})
],
exitOnError: false
})
export { logger }

View file

@ -1,24 +0,0 @@
'use strict'
module.exports = {
up: async function (queryInterface, Sequelize) {
return queryInterface.createTable('Users', {
id: {
type: Sequelize.UUID,
primaryKey: true,
defaultValue: Sequelize.UUIDV4
},
profileid: {
type: Sequelize.STRING,
unique: true
},
profile: Sequelize.TEXT,
history: Sequelize.TEXT,
createdAt: Sequelize.DATE,
updatedAt: Sequelize.DATE
})
},
down: async function (queryInterface, Sequelize) {
return queryInterface.dropTable('Users')
}
}

View file

@ -1,21 +0,0 @@
'use strict'
module.exports = {
up: async function (queryInterface, Sequelize) {
return queryInterface.createTable('Notes', {
id: {
type: Sequelize.UUID,
primaryKey: true,
defaultValue: Sequelize.UUIDV4
},
ownerId: Sequelize.UUID,
content: Sequelize.TEXT,
title: Sequelize.STRING,
createdAt: Sequelize.DATE,
updatedAt: Sequelize.DATE
})
},
down: async function (queryInterface, Sequelize) {
return queryInterface.dropTable('Notes')
}
}

View file

@ -1,18 +0,0 @@
'use strict'
module.exports = {
up: async function (queryInterface, Sequelize) {
return queryInterface.createTable('Temp', {
id: {
type: Sequelize.STRING,
primaryKey: true
},
date: Sequelize.TEXT,
createdAt: Sequelize.DATE,
updatedAt: Sequelize.DATE
})
},
down: async function (queryInterface, Sequelize) {
return queryInterface.dropTable('Temp')
}
}

View file

@ -1,45 +0,0 @@
'use strict'
module.exports = {
up: async function (queryInterface, Sequelize) {
return queryInterface.addColumn('Notes', 'shortid', {
type: Sequelize.STRING,
defaultValue: '0000000000',
allowNull: false
}).then(function () {
return queryInterface.addIndex('Notes', ['shortid'], {
indicesType: 'UNIQUE'
})
}).then(function () {
return queryInterface.addColumn('Notes', 'permission', {
type: Sequelize.STRING,
defaultValue: 'private',
allowNull: false
})
}).then(function () {
return queryInterface.addColumn('Notes', 'viewcount', {
type: Sequelize.INTEGER,
defaultValue: 0
})
}).catch(function (error) {
if (error.message === 'SQLITE_ERROR: duplicate column name: shortid' || error.message === "ER_DUP_FIELDNAME: Duplicate column name 'shortid'" || error.message === 'column "shortid" of relation "Notes" already exists') {
// eslint-disable-next-line no-console
console.log('Migration has already run… ignoring.')
} else {
throw error
}
})
},
down: async function (queryInterface, Sequelize) {
return queryInterface.removeColumn('Notes', 'viewcount')
.then(function () {
return queryInterface.removeColumn('Notes', 'permission')
})
.then(function () {
return queryInterface.removeIndex('Notes', ['shortid'])
})
.then(function () {
return queryInterface.removeColumn('Notes', 'shortid')
})
}
}

View file

@ -1,28 +0,0 @@
'use strict'
function isSQLite (sequelize) {
return sequelize.options.dialect === 'sqlite'
}
module.exports = {
up: async function (queryInterface, Sequelize) {
return queryInterface.changeColumn('Notes', 'title', {
type: Sequelize.TEXT
}).then(function () {
if (isSQLite(queryInterface.sequelize)) {
// manual added index will be removed in sqlite
return queryInterface.addIndex('Notes', ['shortid'])
}
})
},
down: async function (queryInterface, Sequelize) {
return queryInterface.changeColumn('Notes', 'title', {
type: Sequelize.STRING
}).then(function () {
if (isSQLite(queryInterface.sequelize)) {
// manual added index will be removed in sqlite
return queryInterface.addIndex('Notes', ['shortid'])
}
})
}
}

View file

@ -1,26 +0,0 @@
'use strict'
module.exports = {
up: async function (queryInterface, Sequelize) {
return queryInterface.addColumn('Notes', 'lastchangeuserId', {
type: Sequelize.UUID
}).then(function () {
return queryInterface.addColumn('Notes', 'lastchangeAt', {
type: Sequelize.DATE
})
}).catch(function (error) {
if (error.message === 'SQLITE_ERROR: duplicate column name: lastchangeuserId' || error.message === "ER_DUP_FIELDNAME: Duplicate column name 'lastchangeuserId'" || error.message === 'column "lastchangeuserId" of relation "Notes" already exists') {
// eslint-disable-next-line no-console
console.log('Migration has already run… ignoring.')
} else {
throw error
}
})
},
down: async function (queryInterface, Sequelize) {
return queryInterface.removeColumn('Notes', 'lastchangeAt')
.then(function () {
return queryInterface.removeColumn('Notes', 'lastchangeuserId')
})
}
}

View file

@ -1,25 +0,0 @@
'use strict'
module.exports = {
up: async function (queryInterface, Sequelize) {
return queryInterface.addColumn('Notes', 'alias', {
type: Sequelize.STRING
}).then(function () {
return queryInterface.addIndex('Notes', ['alias'], {
indicesType: 'UNIQUE'
})
}).catch(function (error) {
if (error.message === 'SQLITE_ERROR: duplicate column name: alias' || error.message === "ER_DUP_FIELDNAME: Duplicate column name 'alias'" || error.message === 'column "alias" of relation "Notes" already exists') {
// eslint-disable-next-line no-console
console.log('Migration has already run… ignoring.')
} else {
throw error
}
})
},
down: async function (queryInterface, Sequelize) {
return queryInterface.removeColumn('Notes', 'alias').then(function () {
return queryInterface.removeIndex('Notes', ['alias'])
})
}
}

View file

@ -1,21 +0,0 @@
'use strict'
module.exports = {
up: async function (queryInterface, Sequelize) {
return queryInterface.addColumn('Users', 'accessToken', Sequelize.STRING).then(function () {
return queryInterface.addColumn('Users', 'refreshToken', Sequelize.STRING)
}).catch(function (error) {
if (error.message === 'SQLITE_ERROR: duplicate column name: accessToken' || error.message === "ER_DUP_FIELDNAME: Duplicate column name 'accessToken'" || error.message === 'column "accessToken" of relation "Users" already exists') {
// eslint-disable-next-line no-console
console.log('Migration has already run… ignoring.')
} else {
throw error
}
})
},
down: async function (queryInterface, Sequelize) {
return queryInterface.removeColumn('Users', 'accessToken').then(function () {
return queryInterface.removeColumn('Users', 'refreshToken')
})
}
}

View file

@ -1,33 +0,0 @@
'use strict'
module.exports = {
up: async function (queryInterface, Sequelize) {
return queryInterface.addColumn('Notes', 'savedAt', Sequelize.DATE).then(function () {
return queryInterface.createTable('Revisions', {
id: {
type: Sequelize.UUID,
primaryKey: true
},
noteId: Sequelize.UUID,
patch: Sequelize.TEXT,
lastContent: Sequelize.TEXT,
content: Sequelize.TEXT,
length: Sequelize.INTEGER,
createdAt: Sequelize.DATE,
updatedAt: Sequelize.DATE
})
}).catch(function (error) {
if (error.message === 'SQLITE_ERROR: duplicate column name: savedAt' | error.message === "ER_DUP_FIELDNAME: Duplicate column name 'savedAt'" || error.message === 'column "savedAt" of relation "Notes" already exists') {
// eslint-disable-next-line no-console
console.log('Migration has already run… ignoring.')
} else {
throw error
}
})
},
down: async function (queryInterface, Sequelize) {
return queryInterface.dropTable('Revisions').then(function () {
return queryInterface.removeColumn('Notes', 'savedAt')
})
}
}

View file

@ -1,36 +0,0 @@
'use strict'
module.exports = {
up: async function (queryInterface, Sequelize) {
return queryInterface.addColumn('Notes', 'authorship', Sequelize.TEXT).then(function () {
return queryInterface.addColumn('Revisions', 'authorship', Sequelize.TEXT)
}).then(function () {
return queryInterface.createTable('Authors', {
id: {
type: Sequelize.INTEGER,
primaryKey: true,
autoIncrement: true
},
color: Sequelize.STRING,
noteId: Sequelize.UUID,
userId: Sequelize.UUID,
createdAt: Sequelize.DATE,
updatedAt: Sequelize.DATE
})
}).catch(function (error) {
if (error.message === 'SQLITE_ERROR: duplicate column name: authorship' || error.message === "ER_DUP_FIELDNAME: Duplicate column name 'authorship'" || error.message === 'column "authorship" of relation "Notes" already exists') {
// eslint-disable-next-line no-console
console.log('Migration has already run… ignoring.')
} else {
throw error
}
})
},
down: async function (queryInterface, Sequelize) {
return queryInterface.dropTable('Authors').then(function () {
return queryInterface.removeColumn('Revisions', 'authorship')
}).then(function () {
return queryInterface.removeColumn('Notes', 'authorship')
})
}
}

View file

@ -1,17 +0,0 @@
'use strict'
module.exports = {
up: async function (queryInterface, Sequelize) {
return queryInterface.addColumn('Notes', 'deletedAt', Sequelize.DATE).catch(function (error) {
if (error.message === 'SQLITE_ERROR: duplicate column name: deletedAt' || error.message === "ER_DUP_FIELDNAME: Duplicate column name 'deletedAt'" || error.message === 'column "deletedAt" of relation "Notes" already exists') {
// eslint-disable-next-line no-console
console.log('Migration has already run… ignoring.')
} else {
throw error
}
})
},
down: async function (queryInterface, Sequelize) {
return queryInterface.removeColumn('Notes', 'deletedAt')
}
}

View file

@ -1,28 +0,0 @@
'use strict'
module.exports = {
up: async function (queryInterface, Sequelize) {
return queryInterface.addColumn('Users', 'email', Sequelize.TEXT).then(function () {
return queryInterface.addColumn('Users', 'password', Sequelize.TEXT).catch(function (error) {
if (error.message === "ER_DUP_FIELDNAME: Duplicate column name 'password'" || error.message === 'column "password" of relation "Users" already exists') {
// eslint-disable-next-line no-console
console.log('Migration has already run… ignoring.')
} else {
throw error
}
})
}).catch(function (error) {
if (error.message === 'SQLITE_ERROR: duplicate column name: email' || error.message === "ER_DUP_FIELDNAME: Duplicate column name 'email'" || error.message === 'column "email" of relation "Users" already exists') {
// eslint-disable-next-line no-console
console.log('Migration has already run… ignoring.')
} else {
throw error
}
})
},
down: async function (queryInterface, Sequelize) {
return queryInterface.removeColumn('Users', 'email').then(function () {
return queryInterface.removeColumn('Users', 'password')
})
}
}

View file

@ -1,16 +0,0 @@
'use strict'
module.exports = {
up: async function (queryInterface, Sequelize) {
await queryInterface.changeColumn('Notes', 'content', { type: Sequelize.TEXT('long') })
await queryInterface.changeColumn('Revisions', 'patch', { type: Sequelize.TEXT('long') })
await queryInterface.changeColumn('Revisions', 'content', { type: Sequelize.TEXT('long') })
await queryInterface.changeColumn('Revisions', 'lastContent', { type: Sequelize.TEXT('long') })
},
down: async function (queryInterface, Sequelize) {
await queryInterface.changeColumn('Notes', 'content', { type: Sequelize.TEXT })
await queryInterface.changeColumn('Revisions', 'patch', { type: Sequelize.TEXT })
await queryInterface.changeColumn('Revisions', 'content', { type: Sequelize.TEXT })
await queryInterface.changeColumn('Revisions', 'lastContent', { type: Sequelize.TEXT })
}
}

View file

@ -1,13 +0,0 @@
'use strict'
module.exports = {
up: async function (queryInterface, Sequelize) {
await queryInterface.changeColumn('Notes', 'authorship', { type: Sequelize.TEXT('long') })
await queryInterface.changeColumn('Revisions', 'authorship', { type: Sequelize.TEXT('long') })
},
down: async function (queryInterface, Sequelize) {
await queryInterface.changeColumn('Notes', 'authorship', { type: Sequelize.TEXT })
await queryInterface.changeColumn('Revisions', 'authorship', { type: Sequelize.TEXT })
}
}

View file

@ -1,11 +0,0 @@
'use strict'
module.exports = {
up: async function (queryInterface, Sequelize) {
return queryInterface.changeColumn('Notes', 'permission', { type: Sequelize.ENUM('freely', 'editable', 'limited', 'locked', 'protected', 'private') })
},
down: async function (queryInterface, Sequelize) {
return queryInterface.changeColumn('Notes', 'permission', { type: Sequelize.ENUM('freely', 'editable', 'locked', 'private') })
}
}

View file

@ -1,23 +0,0 @@
'use strict'
module.exports = {
up: async function (queryInterface, Sequelize) {
return queryInterface.changeColumn('Users', 'accessToken', {
type: Sequelize.TEXT
}).then(function () {
return queryInterface.changeColumn('Users', 'refreshToken', {
type: Sequelize.TEXT
})
})
},
down: async function (queryInterface, Sequelize) {
return queryInterface.changeColumn('Users', 'accessToken', {
type: Sequelize.STRING
}).then(function () {
return queryInterface.changeColumn('Users', 'refreshToken', {
type: Sequelize.STRING
})
})
}
}

View file

@ -1,13 +0,0 @@
'use strict'
module.exports = {
up: async function (queryInterface, Sequelize) {
return queryInterface.addColumn('Users', 'deleteToken', {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4
})
},
down: async function (queryInterface, Sequelize) {
return queryInterface.removeColumn('Users', 'deleteToken')
}
}

View file

@ -1,41 +0,0 @@
import {
AutoIncrement,
BelongsTo,
Column,
createIndexDecorator,
DataType,
ForeignKey,
Model,
PrimaryKey,
Table
} from 'sequelize-typescript'
import { Note, User } from './index'
const NoteUserIndex = createIndexDecorator({ unique: true })
@Table
export class Author extends Model<Author> {
@PrimaryKey
@AutoIncrement
@Column(DataType.INTEGER)
id: number
@Column(DataType.STRING)
color: string
@ForeignKey(() => Note)
@NoteUserIndex
@Column(DataType.UUID)
noteId: string
@BelongsTo(() => Note, { foreignKey: 'noteId', onDelete: 'CASCADE', constraints: false, hooks: true })
note: Note
@ForeignKey(() => User)
@NoteUserIndex
@Column(DataType.UUID)
userId: string
@BelongsTo(() => User, { foreignKey: 'userId', onDelete: 'CASCADE', constraints: false, hooks: true })
user: User
}

View file

@ -1,60 +0,0 @@
import { Sequelize } from 'sequelize-typescript'
import { cloneDeep } from 'lodash'
import * as path from 'path'
import { Author } from './author'
import { Note } from './note'
import { Revision } from './revision'
import { Temp } from './temp'
import { User } from './user'
import { logger } from '../logger'
import { config } from '../config'
import Umzug from 'umzug'
import SequelizeTypes from 'sequelize'
const dbconfig = cloneDeep(config.db)
dbconfig.logging = config.debug ? (data): void => {
logger.info(data)
} : false
export let sequelize: Sequelize
// Heroku specific
if (config.dbURL) {
sequelize = new Sequelize(config.dbURL, dbconfig)
} else {
sequelize = new Sequelize(dbconfig.database, dbconfig.username, dbconfig.password, dbconfig)
}
const umzug = new Umzug({
migrations: {
path: path.resolve(__dirname, '..', 'migrations'),
params: [
sequelize.getQueryInterface(),
SequelizeTypes
]
},
// Required wrapper function required to prevent winstion issue
// https://github.com/winstonjs/winston/issues/1577
logging: (message): void => {
logger.info(message)
},
storage: 'sequelize',
storageOptions: {
sequelize: sequelize
}
})
export async function runMigrations (): Promise<void> {
// checks migrations and run them if they are not already applied
// exit in case of unsuccessful migrations
await umzug.up().catch(error => {
logger.error(error)
logger.error('Database migration failed. Exiting…')
process.exit(1)
})
logger.info('All migrations performed successfully')
}
sequelize.addModels([Author, Note, Revision, Temp, User])
export { Author, Note, Revision, Temp, User }

View file

@ -1,672 +0,0 @@
import async from 'async'
import base64url from 'base64url'
import cheerio from 'cheerio'
// eslint-disable-next-line @typescript-eslint/camelcase
import { diff_match_patch, patch_obj } from 'diff-match-patch'
import fs from 'fs'
import LZString from 'lz-string'
import markdownIt from 'markdown-it'
import metaMarked from 'meta-marked'
import moment from 'moment'
import path from 'path'
import Sequelize from 'sequelize'
import {
AfterCreate,
AllowNull,
BeforeCreate,
BelongsTo,
Column,
DataType,
Default,
ForeignKey,
HasMany,
Model,
PrimaryKey,
Table,
Unique
} from 'sequelize-typescript'
import { generate as shortIdGenerate, isValid as shortIdIsValid } from 'shortid'
import S from 'string'
import { config } from '../config'
import { logger } from '../logger'
import ot from '../ot/index'
import { processData, stripNullByte } from '../utils/functions'
import { Author, Revision, User } from './index'
const md = markdownIt()
// eslint-disable-next-line new-cap
const dmp = new diff_match_patch()
// permission types
enum PermissionEnum {
freely = 'freely',
editable = 'editable',
limited = 'limited',
locked = 'locked',
protected = 'protected',
private = 'private'
}
export class OpengraphMetadata {
title: string | number
description: string | number
type: string
}
export class NoteMetadata {
title: string
description: string
robots: string
GA: string
disqus: string
slideOptions
opengraph: OpengraphMetadata
}
export type NoteAuthorship = [string, number, number, number, number]
@Table({ paranoid: false })
export class Note extends Model<Note> {
@PrimaryKey
@Default(Sequelize.UUIDV4)
@Column(DataType.UUID)
id: string
@AllowNull(false)
@Default(shortIdGenerate)
@Unique
@Column(DataType.STRING)
shortid: string
@Unique
@Column(DataType.STRING)
alias: string
@Column(DataType.ENUM({ values: Object.keys(PermissionEnum).map(k => PermissionEnum[k]) }))
permission: PermissionEnum
@AllowNull(false)
@Default(0)
@Column(DataType.INTEGER)
viewcount: number
// ToDo: use @UpdatedAt instead? (https://www.npmjs.com/package/sequelize-typescript#createdat--updatedat--deletedat)
@Column(DataType.DATE)
lastchangeAt: Date
// ToDo: use @UpdatedAt instead? (https://www.npmjs.com/package/sequelize-typescript#createdat--updatedat--deletedat)
@Column(DataType.DATE)
savedAt: Date
@ForeignKey(() => User)
@Column
ownerId: string
@BelongsTo(() => User, { foreignKey: 'ownerId', constraints: false, onDelete: 'CASCADE', hooks: true })
owner: User
@ForeignKey(() => User)
@Column
lastchangeuserId: string
@BelongsTo(() => User, { foreignKey: 'lastchangeuserId', constraints: false })
lastchangeuser: User
@HasMany(() => Revision, { foreignKey: 'noteId', constraints: false })
revisions: Revision[]
@HasMany(() => Author, { foreignKey: 'noteId', constraints: false })
authors: Author[]
@Column(DataType.TEXT)
get title (): string {
return this.getDataValue('title') ?? ''
}
set title (value: string) {
this.setDataValue('title', stripNullByte(value))
}
@Column(DataType.TEXT({ length: 'long' }))
get content (): string {
return this.getDataValue('content') ?? ''
}
set content (value: string) {
this.setDataValue('content', stripNullByte(value))
}
@Column(DataType.TEXT({ length: 'long' }))
get authorship (): NoteAuthorship[] {
return processData(this.getDataValue('authorship'), [], JSON.parse)
}
set authorship (value: NoteAuthorship[]) {
// Evil hack for TypeScript to accept saving a string in a NoteAuthorship DB-field
this.setDataValue('authorship', JSON.stringify(value) as unknown as NoteAuthorship[])
}
@BeforeCreate
static async defaultContentAndPermissions (note: Note): Promise<Note> {
return await new Promise(function (resolve) {
// if no content specified then use default note
if (!note.content) {
let filePath: string
if (!note.alias) {
filePath = config.defaultNotePath
} else {
filePath = path.join(config.docsPath, note.alias + '.md')
}
if (Note.checkFileExist(filePath)) {
const fsCreatedTime = moment(fs.statSync(filePath).ctime)
const body = fs.readFileSync(filePath, 'utf8')
note.title = Note.parseNoteTitle(body)
note.content = body
if (filePath !== config.defaultNotePath) {
note.createdAt = fsCreatedTime
}
}
}
// if no permission specified and have owner then give default permission in config, else default permission is freely
if (!note.permission) {
if (note.ownerId) {
// TODO: Might explode if the user-defined permission does not exist
note.permission = PermissionEnum[config.defaultPermission]
} else {
note.permission = PermissionEnum.freely
}
}
return resolve(note)
})
}
@AfterCreate
static saveRevision (note): Promise<Note> {
return new Promise(function (resolve, reject) {
Revision.saveNoteRevision(note, function (err, _) {
if (err) {
return reject(err)
}
return resolve(note)
})
})
}
static checkFileExist (filePath): boolean {
try {
return fs.statSync(filePath).isFile()
} catch (err) {
return false
}
}
static encodeNoteId (id): string {
// remove dashes in UUID and encode in url-safe base64
const str = id.replace(/-/g, '')
const hexStr = Buffer.from(str, 'hex')
return base64url.encode(hexStr)
}
static decodeNoteId (encodedId): string {
// decode from url-safe base64
const id: string = base64url.toBuffer(encodedId).toString('hex')
// add dashes between the UUID string parts
const idParts: string[] = []
idParts.push(id.substr(0, 8))
idParts.push(id.substr(8, 4))
idParts.push(id.substr(12, 4))
idParts.push(id.substr(16, 4))
idParts.push(id.substr(20, 12))
return idParts.join('-')
}
static checkNoteIdValid (id): boolean {
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i
const result = id.match(uuidRegex)
return !!(result && result.length === 1)
}
static parseNoteId (noteId, callback): void {
async.series({
parseNoteIdByAlias: function (_callback) {
// try to parse note id by alias (e.g. doc)
Note.findOne({
where: {
alias: noteId
}
}).then(function (note) {
if (note) {
const filePath = path.join(config.docsPath, noteId + '.md')
if (Note.checkFileExist(filePath)) {
// if doc in filesystem have newer modified time than last change time
// then will update the doc in db
const fsModifiedTime = moment(fs.statSync(filePath).mtime)
const dbModifiedTime = moment(note.lastchangeAt || note.createdAt)
const body = fs.readFileSync(filePath, 'utf8')
const contentLength = body.length
const title = Note.parseNoteTitle(body)
if (fsModifiedTime.isAfter(dbModifiedTime) && note.content !== body) {
note.update({
title: title,
content: body,
lastchangeAt: fsModifiedTime
}).then(function (note) {
Revision.saveNoteRevision(note, function (err, revision) {
if (err) return _callback(err, null)
// update authorship on after making revision of docs
const patch = dmp.patch_fromText(revision.patch)
const operations = Note.transformPatchToOperations(patch, contentLength)
let authorship = note.authorship
for (let i = 0; i < operations.length; i++) {
authorship = Note.updateAuthorshipByOperation(operations[i], null, authorship)
}
note.update({
authorship: authorship
}).then(function (note) {
return callback(null, note.id)
}).catch(function (err) {
return _callback(err, null)
})
})
}).catch(function (err) {
return _callback(err, null)
})
} else {
return callback(null, note.id)
}
} else {
return callback(null, note.id)
}
} else {
const filePath = path.join(config.docsPath, noteId + '.md')
if (Note.checkFileExist(filePath)) {
Note.create({
alias: noteId,
owner: null,
permission: 'locked'
}).then(function (note) {
return callback(null, note.id)
}).catch(function (err) {
return _callback(err, null)
})
} else {
return _callback(null, null)
}
}
}).catch(function (err) {
return _callback(err, null)
})
},
// parse note id by LZString is deprecated, here for compability
parseNoteIdByLZString: function (_callback) {
// Calculate minimal string length for an UUID that is encoded
// base64 encoded and optimize comparsion by using -1
// this should make a lot of LZ-String parsing errors obsolete
// as we can assume that a nodeId that is 48 chars or longer is a
// noteID.
const base64UuidLength = ((4 * 36) / 3) - 1
if (!(noteId.length > base64UuidLength)) {
return _callback(null, null)
}
// try to parse note id by LZString Base64
try {
const id = LZString.decompressFromBase64(noteId)
if (id && Note.checkNoteIdValid(id)) {
return callback(null, id)
} else {
return _callback(null, null)
}
} catch (err) {
if (err.message === 'Cannot read property \'charAt\' of undefined') {
logger.warning('Looks like we can not decode "' + noteId + '" with LZString. Can be ignored.')
} else {
logger.error(err)
}
return _callback(null, null)
}
},
parseNoteIdByBase64Url: function (_callback) {
// try to parse note id by base64url
try {
const id = Note.decodeNoteId(noteId)
if (id && Note.checkNoteIdValid(id)) {
return callback(null, id)
} else {
return _callback(null, null)
}
} catch (err) {
logger.error(err)
return _callback(null, null)
}
},
parseNoteIdByShortId: function (_callback) {
// try to parse note id by shortId
try {
if (shortIdIsValid(noteId)) {
Note.findOne({
where: {
shortid: noteId
}
}).then(function (note) {
if (!note) return _callback(null, null)
return callback(null, note.id)
}).catch(function (err) {
return _callback(err, null)
})
} else {
return _callback(null, null)
}
} catch (err) {
return _callback(err, null)
}
}
}, function (err, _) {
if (err) {
logger.error(err)
return callback(err, null)
}
return callback(null, null)
})
}
static parseNoteTitle (body): string {
const parsed = Note.extractMeta(body)
const $ = cheerio.load(md.render(parsed.markdown))
return Note.extractNoteTitle(parsed.meta, $)
}
static extractNoteTitle (meta, $): string {
let title = ''
if (meta.title && (typeof meta.title === 'string' || typeof meta.title === 'number')) {
title = meta.title
} else {
const h1s = $('h1')
if (h1s.length > 0 && h1s.first().text().split('\n').length === 1) {
title = S(h1s.first().text()).stripTags().s
}
}
if (!title) title = 'Untitled'
return title
}
static generateDescription (markdown): string {
return markdown.substr(0, 100).replace(/(?:\r\n|\r|\n)/g, ' ')
}
static decodeTitle (title): string {
return title || 'Untitled'
}
static generateWebTitle (title): string {
title = !title || title === 'Untitled' ? 'CodiMD - Collaborative markdown notes' : title + ' - CodiMD'
return title
}
static extractNoteTags (meta, $): string[] {
const tags: string[] = []
const rawtags: string[] = []
if (meta.tags && (typeof meta.tags === 'string' || typeof meta.tags === 'number')) {
const metaTags = ('' + meta.tags).split(',')
for (let i = 0; i < metaTags.length; i++) {
const text: string = metaTags[i].trim()
if (text) rawtags.push(text)
}
} else {
const h6s = $('h6')
h6s.each(function (key, value) {
if (/^tags/gmi.test($(value).text())) {
const codes = $(value).find('code')
for (let i = 0; i < codes.length; i++) {
const text = S($(codes[i]).text().trim()).stripTags().s
if (text) rawtags.push(text)
}
}
})
}
for (let i = 0; i < rawtags.length; i++) {
let found = false
for (let j = 0; j < tags.length; j++) {
if (tags[j] === rawtags[i]) {
found = true
break
}
}
if (!found) {
tags.push(rawtags[i])
}
}
return tags
}
static extractMeta (content): { markdown: string; meta: {} } {
try {
const obj = metaMarked(content)
if (!obj.markdown) obj.markdown = ''
if (!obj.meta) obj.meta = {}
return obj
} catch (err) {
return {
markdown: content,
meta: {}
}
}
}
static parseMeta (meta): NoteMetadata {
const _meta = new NoteMetadata()
if (meta) {
if (meta.title && (typeof meta.title === 'string' || typeof meta.title === 'number')) {
_meta.title = meta.title
}
if (meta.description && (typeof meta.description === 'string' || typeof meta.description === 'number')) {
_meta.description = meta.description
}
if (meta.robots && (typeof meta.robots === 'string' || typeof meta.robots === 'number')) {
_meta.robots = meta.robots
}
if (meta.GA && (typeof meta.GA === 'string' || typeof meta.GA === 'number')) {
_meta.GA = meta.GA
}
if (meta.disqus && (typeof meta.disqus === 'string' || typeof meta.disqus === 'number')) {
_meta.disqus = meta.disqus
}
if (meta.slideOptions && (typeof meta.slideOptions === 'object')) {
_meta.slideOptions = meta.slideOptions
}
if (meta.opengraph && (typeof meta.opengraph === 'object')) {
_meta.opengraph = meta.opengraph
}
}
return _meta
}
static parseOpengraph (meta, title: string): OpengraphMetadata {
let _ogdata = new OpengraphMetadata()
if (meta.opengraph) {
_ogdata = meta.opengraph
}
if (!(_ogdata.title && (typeof _ogdata.title === 'string' || typeof _ogdata.title === 'number'))) {
_ogdata.title = title
}
if (!(_ogdata.description && (typeof _ogdata.description === 'string' || typeof _ogdata.description === 'number'))) {
_ogdata.description = meta.description || ''
}
if (!(_ogdata.type && (typeof _ogdata.type === 'string'))) {
_ogdata.type = 'website'
}
return _ogdata
}
static updateAuthorshipByOperation (operation, userId: string | null, authorships): NoteAuthorship[] {
let index = 0
const timestamp = Date.now()
for (let i = 0; i < operation.length; i++) {
const op = operation[i]
if (ot.TextOperation.isRetain(op)) {
index += op
} else if (ot.TextOperation.isInsert(op)) {
const opStart = index
const opEnd = index + op.length
let inserted = false
// authorship format: [userId, startPos, endPos, createdAt, updatedAt]
if (authorships.length <= 0) authorships.push([userId, opStart, opEnd, timestamp, timestamp])
else {
for (let j = 0; j < authorships.length; j++) {
const authorship = authorships[j]
if (!inserted) {
const nextAuthorship = authorships[j + 1] || -1
if ((nextAuthorship !== -1 && nextAuthorship[1] >= opEnd) || j >= authorships.length - 1) {
if (authorship[1] < opStart && authorship[2] > opStart) {
// divide
const postLength = authorship[2] - opStart
authorship[2] = opStart
authorship[4] = timestamp
authorships.splice(j + 1, 0, [userId, opStart, opEnd, timestamp, timestamp])
authorships.splice(j + 2, 0, [authorship[0], opEnd, opEnd + postLength, authorship[3], timestamp])
j += 2
inserted = true
} else if (authorship[1] >= opStart) {
authorships.splice(j, 0, [userId, opStart, opEnd, timestamp, timestamp])
j += 1
inserted = true
} else if (authorship[2] <= opStart) {
authorships.splice(j + 1, 0, [userId, opStart, opEnd, timestamp, timestamp])
j += 1
inserted = true
}
}
}
if (authorship[1] >= opStart) {
authorship[1] += op.length
authorship[2] += op.length
}
}
}
index += op.length
} else if (ot.TextOperation.isDelete(op)) {
const opStart = index
const opEnd = index - op
if (operation.length === 1) {
authorships = []
} else if (authorships.length > 0) {
for (let j = 0; j < authorships.length; j++) {
const authorship = authorships[j]
if (authorship[1] >= opStart && authorship[1] <= opEnd && authorship[2] >= opStart && authorship[2] <= opEnd) {
authorships.splice(j, 1)
j -= 1
} else if (authorship[1] < opStart && authorship[1] < opEnd && authorship[2] > opStart && authorship[2] > opEnd) {
authorship[2] += op
authorship[4] = timestamp
} else if (authorship[2] >= opStart && authorship[2] <= opEnd) {
authorship[2] = opStart
authorship[4] = timestamp
} else if (authorship[1] >= opStart && authorship[1] <= opEnd) {
authorship[1] = opEnd
authorship[4] = timestamp
}
if (authorship[1] >= opEnd) {
authorship[1] += op
authorship[2] += op
}
}
}
index += op
}
}
// merge
for (let j = 0; j < authorships.length; j++) {
const authorship = authorships[j]
for (let k = j + 1; k < authorships.length; k++) {
const nextAuthorship = authorships[k]
if (nextAuthorship && authorship[0] === nextAuthorship[0] && authorship[2] === nextAuthorship[1]) {
const minTimestamp = Math.min(authorship[3], nextAuthorship[3])
const maxTimestamp = Math.max(authorship[3], nextAuthorship[3])
authorships.splice(j, 1, [authorship[0], authorship[1], nextAuthorship[2], minTimestamp, maxTimestamp])
authorships.splice(k, 1)
j -= 1
break
}
}
}
// clear
for (let j = 0; j < authorships.length; j++) {
const authorship = authorships[j]
if (!authorship[0]) {
authorships.splice(j, 1)
j -= 1
}
}
return authorships
}
// eslint-disable-next-line @typescript-eslint/camelcase
static transformPatchToOperations (patch: patch_obj[], contentLength): number[][] {
const operations: number[][] = []
if (patch.length > 0) {
// calculate original content length
for (let j = patch.length - 1; j >= 0; j--) {
const p = patch[j]
for (let i = 0; i < p.diffs.length; i++) {
const diff = p.diffs[i]
switch (diff[0]) {
case 1: // insert
contentLength -= diff[1].length
break
case -1: // delete
contentLength += diff[1].length
break
}
}
}
// generate operations
let bias = 0
let lengthBias = 0
for (let j = 0; j < patch.length; j++) {
const operation: number[] = []
const p = patch[j]
let currIndex = p.start1 || 0
const currLength = contentLength - bias
for (let i = 0; i < p.diffs.length; i++) {
const diff = p.diffs[i]
switch (diff[0]) {
case 0: // retain
if (i === 0) {
// first
operation.push(currIndex + diff[1].length)
} else if (i !== p.diffs.length - 1) {
// mid
operation.push(diff[1].length)
} else {
// last
operation.push(currLength + lengthBias - currIndex)
}
currIndex += diff[1].length
break
case 1: // insert
operation.push(diff[1].length)
lengthBias += diff[1].length
currIndex += diff[1].length
break
case -1: // delete
operation.push(-diff[1].length)
bias += diff[1].length
currIndex += diff[1].length
break
}
}
operations.push(operation)
}
}
return operations
}
static parseNoteInfo (body): { title: string; tags: string[] } {
const parsed = Note.extractMeta(body)
const $ = cheerio.load(md.render(parsed.markdown))
return {
title: Note.extractNoteTitle(parsed.meta, $),
tags: Note.extractNoteTags(parsed.meta, $)
}
}
}

View file

@ -1,347 +0,0 @@
import { ChildProcess } from 'child_process'
import Sequelize from 'sequelize'
import { BelongsTo, Column, DataType, Default, ForeignKey, Model, PrimaryKey, Table } from 'sequelize-typescript'
// core
import { logger } from '../logger'
import { processData, stripNullByte } from '../utils/functions'
import { Note } from './note'
import async = require('async')
import childProcess = require('child_process')
import moment = require('moment')
import path = require('path')
import shortId = require('shortid')
const Op = Sequelize.Op
const dmpCallbackCache = {}
class Data {
msg
cacheKey
error
result
level
}
function createDmpWorker (): ChildProcess {
const worker = childProcess.fork(path.resolve(__dirname, '../workers/dmpWorker'), ['ignore'])
logger.debug('dmp worker process started')
worker.on('message', function (data: Data) {
if (!data || !data.msg || !data.cacheKey) {
logger.error('dmp worker error: not enough data on message')
return
}
const cacheKey = data.cacheKey
switch (data.msg) {
case 'log':
logger.log(data.level, data.result[0], ...data.result[1])
// The cacheKey is a dummy value and we want to skip the delete line.
return
case 'error':
dmpCallbackCache[cacheKey](data.error, null)
break
case 'check':
dmpCallbackCache[cacheKey](null, data.result)
break
}
delete dmpCallbackCache[cacheKey]
})
worker.on('close', function (code) {
logger.debug(`dmp worker process exited with code ${code}`)
})
return worker
}
let dmpWorker: ChildProcess = createDmpWorker()
function sendDmpWorker (data, callback): void {
if (!dmpWorker) {
dmpWorker = createDmpWorker()
}
const cacheKey = Date.now() + '_' + shortId.generate()
dmpCallbackCache[cacheKey] = callback
data = Object.assign(data, {
cacheKey: cacheKey
})
dmpWorker.send(data)
}
@Table
export class Revision extends Model<Revision> {
@Default(Sequelize.UUIDV4)
@PrimaryKey
@Column(DataType.UUID)
id: string
@Column(DataType.INTEGER)
length: number
@ForeignKey(() => Note)
@Column(DataType.UUID)
noteId: string
@BelongsTo(() => Note, { foreignKey: 'noteId', constraints: false, onDelete: 'CASCADE', hooks: true })
note: Note
@Column(DataType.TEXT({ length: 'long' }))
get patch (): string {
return this.getDataValue('patch') ?? ''
}
set patch (value: string) {
this.setDataValue('patch', stripNullByte(value))
}
@Column(DataType.TEXT({ length: 'long' }))
get lastContent (): string {
return this.getDataValue('lastContent') ?? ''
}
set lastContent (value: string) {
this.setDataValue('lastContent', stripNullByte(value))
}
@Column(DataType.TEXT({ length: 'long' }))
get content (): string {
return this.getDataValue('content') ?? ''
}
set content (value: string) {
this.setDataValue('content', stripNullByte(value))
}
@Column(DataType.TEXT({ length: 'long' }))
get authorship (): string {
return processData(this.getDataValue('authorship'), [], JSON.parse)
}
set authorship (value: string) {
this.setDataValue('authorship', value ? JSON.stringify(value) : value)
}
static getNoteRevisions (note: Note, callback): void {
Revision.findAll({
where: {
noteId: note.id
},
order: [['createdAt', 'DESC']]
}).then(function (revisions: Revision[]) {
class RevisionDataActions { // TODO: Fix Type in actions.ts
time
length
}
const data: RevisionDataActions[] = []
revisions.forEach(function (revision: Revision) {
data.push({
time: moment(revision.createdAt).valueOf(),
length: revision.length
})
})
callback(null, data)
}).catch(function (err) {
callback(err, null)
})
}
static getPatchedNoteRevisionByTime (note: Note, time, errorCallback): void {
// find all revisions to prepare for all possible calculation
Revision.findAll({
where: {
noteId: note.id
},
order: [['createdAt', 'DESC']]
}).then(function (revisions: Revision[]) {
if (revisions.length <= 0) {
errorCallback(null, null)
return
}
// measure target revision position
Revision.count({
where: {
noteId: note.id,
createdAt: {
[Op.gte]: time
}
}
}).then(function (count: number) {
if (count <= 0) {
errorCallback(null, null)
return
}
sendDmpWorker({
msg: 'get revision',
revisions: revisions,
count: count
}, errorCallback)
}).catch(function (err) {
errorCallback(err, null)
})
}).catch(function (err) {
errorCallback(err, null)
})
}
static checkAllNotesRevision (callback): void {
Revision.saveAllNotesRevision(function (err, notes: Note[]) {
if (err) {
callback(err, null)
return
}
if (!notes || notes.length <= 0) {
callback(null, notes)
} else {
Revision.checkAllNotesRevision(callback)
}
})
}
static saveAllNotesRevision (callback): void {
Note.findAll({
// query all notes that need to save for revision
where: {
[Op.and]: [
{
lastchangeAt: {
[Op.or]: {
[Op.eq]: null,
[Op.and]: {
[Op.ne]: null,
[Op.gt]: Sequelize.col('createdAt')
}
}
}
},
{
savedAt: {
[Op.or]: {
[Op.eq]: null,
[Op.lt]: Sequelize.col('lastchangeAt')
}
}
}
]
}
}).then(function (notes: Note[]) {
if (notes.length <= 0) {
callback(null, notes)
return
}
const savedNotes: Note[] = []
async.each(notes, function (note: Note, _callback) {
// revision saving policy: note not been modified for 5 mins or not save for 10 mins
if (note.lastchangeAt && note.savedAt) {
const lastchangeAt = moment(note.lastchangeAt)
const savedAt = moment(note.savedAt)
if (moment().isAfter(lastchangeAt.add(5, 'minutes'))) {
savedNotes.push(note)
Revision.saveNoteRevision(note, _callback)
} else if (lastchangeAt.isAfter(savedAt.add(10, 'minutes'))) {
savedNotes.push(note)
Revision.saveNoteRevision(note, _callback)
} else {
_callback(null, null)
}
} else {
savedNotes.push(note)
Revision.saveNoteRevision(note, _callback)
}
}, function (err) {
if (err) {
callback(err, null)
return
}
// return null when no notes need saving at this moment but have delayed tasks to be done
const result = ((savedNotes.length === 0) && (notes.length > 0)) ? null : savedNotes
callback(null, result)
})
}).catch(function (err) {
callback(err, null)
})
}
static saveNoteRevision (note: Note, callback): void {
Revision.findAll({
where: {
noteId: note.id
},
order: [['createdAt', 'DESC']]
}).then(function (revisions: Revision[]) {
if (revisions.length <= 0) {
// if no revision available
let noteContent = note.content
if (noteContent.length === 0) {
noteContent = ''
}
Revision.create({
noteId: note.id,
lastContent: noteContent,
length: noteContent.length,
authorship: note.authorship
}).then(function (revision: Revision) {
Revision.finishSaveNoteRevision(note, revision, callback)
}).catch(function (err) {
callback(err, null)
})
} else {
const latestRevision = revisions[0]
const lastContent = latestRevision.content || latestRevision.lastContent
const content = note.content
sendDmpWorker({
msg: 'create patch',
lastDoc: lastContent,
currDoc: content
}, function (err, patch) {
if (err) {
logger.error('save note revision error', err)
return
}
if (!patch) {
// if patch is empty (means no difference) then just update the latest revision updated time
latestRevision.changed('updatedAt', true)
latestRevision.update({
updatedAt: Date.now()
}).then(function (revision: Revision) {
Revision.finishSaveNoteRevision(note, revision, callback)
}).catch(function (err) {
callback(err, null)
})
} else {
Revision.create({
noteId: note.id,
patch: patch,
content: note.content,
length: note.content.length,
authorship: note.authorship
}).then(function (revision: Revision) {
// clear last revision content to reduce db size
latestRevision.update({
content: null
}).then(function () {
Revision.finishSaveNoteRevision(note, revision, callback)
}).catch(function (err) {
callback(err, null)
})
}).catch(function (err) {
callback(err, null)
})
}
})
}
}).catch(function (err) {
callback(err, null)
})
}
static finishSaveNoteRevision (note: Note, revision: Revision, callback): void {
note.update({
savedAt: revision.updatedAt
}).then(function () {
callback(null, revision)
}).catch(function (err) {
callback(err, null)
})
}
}

View file

@ -1,13 +0,0 @@
import { DataType, Model, Table, PrimaryKey, Column, Default } from 'sequelize-typescript'
import { generate as shortIdGenerate } from 'shortid'
@Table
export class Temp extends Model<Temp> {
@Default(shortIdGenerate)
@PrimaryKey
@Column(DataType.STRING)
id: string;
@Column(DataType.TEXT)
data: string
}

View file

@ -1,74 +0,0 @@
import scrypt from 'scrypt-kdf'
import { UUIDV4 } from 'sequelize'
import {
BeforeCreate,
BeforeUpdate,
Column,
DataType,
Default,
HasMany,
IsEmail,
Model,
PrimaryKey,
Table,
Unique
} from 'sequelize-typescript'
import { Note } from './note'
@Table
export class User extends Model<User> {
@PrimaryKey
@Default(UUIDV4)
@Column(DataType.UUID)
id: string
@Unique
@Column(DataType.STRING)
profileid: string
@Column(DataType.TEXT)
profile: string
@Column(DataType.TEXT)
history: string
@Column(DataType.TEXT)
accessToken: string
@Column(DataType.TEXT)
refreshToken: string
@Column(DataType.UUID)
deleteToken: string
@IsEmail
@Column(DataType.TEXT)
email: string
@Column(DataType.TEXT)
password: string
@HasMany(() => Note, { foreignKey: 'lastchangeuserId', constraints: false })
@HasMany(() => Note, { foreignKey: 'ownerId', constraints: false })
@BeforeUpdate
@BeforeCreate
static async updatePasswordHashHook (user: User): Promise<void> {
// suggested way to hash passwords to be able to do this asynchronously:
// @see https://github.com/sequelize/sequelize/issues/1821#issuecomment-44265819
if (!user.changed('password')) {
return Promise.resolve()
}
return scrypt
.kdf(user.getDataValue('password'), { logN: 15, r: 8, p: 1 })
.then(keyBuf => {
user.setDataValue('password', keyBuf.toString('hex'))
})
}
verifyPassword (attempt: string): Promise<boolean> {
return scrypt.verify(Buffer.from(this.password, 'hex'), attempt)
}
}

View file

@ -1,312 +0,0 @@
// translation of https://github.com/djspiewak/cccp/blob/master/agent/src/main/scala/com/codecommit/cccp/agent/state.scala
if (typeof ot === 'undefined') {
var ot = {};
}
ot.Client = (function (global) {
'use strict';
// Client constructor
function Client (revision) {
this.revision = revision; // the next expected revision number
this.setState(synchronized_); // start state
}
Client.prototype.setState = function (state) {
this.state = state;
};
// Call this method when the user changes the document.
Client.prototype.applyClient = function (operation) {
this.setState(this.state.applyClient(this, operation));
};
// Call this method with a new operation from the server
Client.prototype.applyServer = function (revision, operation) {
this.setState(this.state.applyServer(this, revision, operation));
};
Client.prototype.applyOperations = function (head, operations) {
this.setState(this.state.applyOperations(this, head, operations));
};
Client.prototype.serverAck = function (revision) {
this.setState(this.state.serverAck(this, revision));
};
Client.prototype.serverReconnect = function () {
if (typeof this.state.resend === 'function') { this.state.resend(this); }
};
// Transforms a selection from the latest known server state to the current
// client state. For example, if we get from the server the information that
// another user's cursor is at position 3, but the server hasn't yet received
// our newest operation, an insertion of 5 characters at the beginning of the
// document, the correct position of the other user's cursor in our current
// document is 8.
Client.prototype.transformSelection = function (selection) {
return this.state.transformSelection(selection);
};
// Override this method.
Client.prototype.sendOperation = function (revision, operation) {
throw new Error("sendOperation must be defined in child class");
};
// Override this method.
Client.prototype.applyOperation = function (operation) {
throw new Error("applyOperation must be defined in child class");
};
// In the 'Synchronized' state, there is no pending operation that the client
// has sent to the server.
function Synchronized () {}
Client.Synchronized = Synchronized;
Synchronized.prototype.applyClient = function (client, operation) {
// When the user makes an edit, send the operation to the server and
// switch to the 'AwaitingConfirm' state
client.sendOperation(client.revision, operation);
return new AwaitingConfirm(operation);
};
Synchronized.prototype.applyServer = function (client, revision, operation) {
if (revision - client.revision > 1) {
throw new Error("Invalid revision.");
}
client.revision = revision;
// When we receive a new operation from the server, the operation can be
// simply applied to the current document
client.applyOperation(operation);
return this;
};
Synchronized.prototype.serverAck = function (client, revision) {
throw new Error("There is no pending operation.");
};
// Nothing to do because the latest server state and client state are the same.
Synchronized.prototype.transformSelection = function (x) { return x; };
// Singleton
var synchronized_ = new Synchronized();
// In the 'AwaitingConfirm' state, there's one operation the client has sent
// to the server and is still waiting for an acknowledgement.
function AwaitingConfirm (outstanding) {
// Save the pending operation
this.outstanding = outstanding;
}
Client.AwaitingConfirm = AwaitingConfirm;
AwaitingConfirm.prototype.applyClient = function (client, operation) {
// When the user makes an edit, don't send the operation immediately,
// instead switch to 'AwaitingWithBuffer' state
return new AwaitingWithBuffer(this.outstanding, operation);
};
AwaitingConfirm.prototype.applyServer = function (client, revision, operation) {
if (revision - client.revision > 1) {
throw new Error("Invalid revision.");
}
client.revision = revision;
// This is another client's operation. Visualization:
//
// /\
// this.outstanding / \ operation
// / \
// \ /
// pair[1] \ / pair[0] (new outstanding)
// (can be applied \/
// to the client's
// current document)
var pair = operation.constructor.transform(this.outstanding, operation);
client.applyOperation(pair[1]);
return new AwaitingConfirm(pair[0]);
};
AwaitingConfirm.prototype.serverAck = function (client, revision) {
if (revision - client.revision > 1) {
return new Stale(this.outstanding, client, revision).getOperations();
}
client.revision = revision;
// The client's operation has been acknowledged
// => switch to synchronized state
return synchronized_;
};
AwaitingConfirm.prototype.transformSelection = function (selection) {
return selection.transform(this.outstanding);
};
AwaitingConfirm.prototype.resend = function (client) {
// The confirm didn't come because the client was disconnected.
// Now that it has reconnected, we resend the outstanding operation.
client.sendOperation(client.revision, this.outstanding);
};
// In the 'AwaitingWithBuffer' state, the client is waiting for an operation
// to be acknowledged by the server while buffering the edits the user makes
function AwaitingWithBuffer (outstanding, buffer) {
// Save the pending operation and the user's edits since then
this.outstanding = outstanding;
this.buffer = buffer;
}
Client.AwaitingWithBuffer = AwaitingWithBuffer;
AwaitingWithBuffer.prototype.applyClient = function (client, operation) {
// Compose the user's changes onto the buffer
var newBuffer = this.buffer.compose(operation);
return new AwaitingWithBuffer(this.outstanding, newBuffer);
};
AwaitingWithBuffer.prototype.applyServer = function (client, revision, operation) {
if (revision - client.revision > 1) {
throw new Error("Invalid revision.");
}
client.revision = revision;
// Operation comes from another client
//
// /\
// this.outstanding / \ operation
// / \
// /\ /
// this.buffer / \* / pair1[0] (new outstanding)
// / \/
// \ /
// pair2[1] \ / pair2[0] (new buffer)
// the transformed \/
// operation -- can
// be applied to the
// client's current
// document
//
// * pair1[1]
var transform = operation.constructor.transform;
var pair1 = transform(this.outstanding, operation);
var pair2 = transform(this.buffer, pair1[1]);
client.applyOperation(pair2[1]);
return new AwaitingWithBuffer(pair1[0], pair2[0]);
};
AwaitingWithBuffer.prototype.serverAck = function (client, revision) {
if (revision - client.revision > 1) {
return new StaleWithBuffer(this.outstanding, this.buffer, client, revision).getOperations();
}
client.revision = revision;
// The pending operation has been acknowledged
// => send buffer
client.sendOperation(client.revision, this.buffer);
return new AwaitingConfirm(this.buffer);
};
AwaitingWithBuffer.prototype.transformSelection = function (selection) {
return selection.transform(this.outstanding).transform(this.buffer);
};
AwaitingWithBuffer.prototype.resend = function (client) {
// The confirm didn't come because the client was disconnected.
// Now that it has reconnected, we resend the outstanding operation.
client.sendOperation(client.revision, this.outstanding);
};
function Stale(acknowlaged, client, revision) {
this.acknowlaged = acknowlaged;
this.client = client;
this.revision = revision;
}
Client.Stale = Stale;
Stale.prototype.applyClient = function (client, operation) {
return new StaleWithBuffer(this.acknowlaged, operation, client, this.revision);
};
Stale.prototype.applyServer = function (client, revision, operation) {
throw new Error("Ignored server-side change.");
};
Stale.prototype.applyOperations = function (client, head, operations) {
var transform = this.acknowlaged.constructor.transform;
for (var i = 0; i < operations.length; i++) {
var op = ot.TextOperation.fromJSON(operations[i]);
var pair = transform(this.acknowlaged, op);
client.applyOperation(pair[1]);
this.acknowlaged = pair[0];
}
client.revision = this.revision;
return synchronized_;
};
Stale.prototype.serverAck = function (client, revision) {
throw new Error("There is no pending operation.");
};
Stale.prototype.transformSelection = function (selection) {
return selection;
};
Stale.prototype.getOperations = function () {
this.client.getOperations(this.client.revision, this.revision - 1); // acknowlaged is the one at revision
return this;
};
function StaleWithBuffer(acknowlaged, buffer, client, revision) {
this.acknowlaged = acknowlaged;
this.buffer = buffer;
this.client = client;
this.revision = revision;
}
Client.StaleWithBuffer = StaleWithBuffer;
StaleWithBuffer.prototype.applyClient = function (client, operation) {
var buffer = this.buffer.compose(operation);
return new StaleWithBuffer(this.acknowlaged, buffer, client, this.revision);
};
StaleWithBuffer.prototype.applyServer = function (client, revision, operation) {
throw new Error("Ignored server-side change.");
};
StaleWithBuffer.prototype.applyOperations = function (client, head, operations) {
var transform = this.acknowlaged.constructor.transform;
for (var i = 0; i < operations.length; i++) {
var op = ot.TextOperation.fromJSON(operations[i]);
var pair1 = transform(this.acknowlaged, op);
var pair2 = transform(this.buffer, pair1[1]);
client.applyOperation(pair2[1]);
this.acknowlaged = pair1[0];
this.buffer = pair2[0];
}
client.revision = this.revision;
client.sendOperation(client.revision, this.buffer);
return new AwaitingConfirm(this.buffer);
};
StaleWithBuffer.prototype.serverAck = function (client, revision) {
throw new Error("There is no pending operation.");
};
StaleWithBuffer.prototype.transformSelection = function (selection) {
return selection;
};
StaleWithBuffer.prototype.getOperations = function () {
this.client.getOperations(this.client.revision, this.revision - 1); // acknowlaged is the one at revision
return this;
};
return Client;
}(this));
if (typeof module === 'object') {
module.exports = ot.Client;
}

View file

@ -1,154 +0,0 @@
import { EventEmitter } from 'events'
import { logger } from '../logger'
import { SocketWithNoteId } from '../realtime'
import Selection from './selection'
import Server from './server'
import TextOperation from './text-operation'
import WrappedOperation from './wrapped-operation'
export class EditorSocketIOServer extends Server {
private readonly users: {}
private readonly docId: any
private mayWrite: (socket: SocketWithNoteId, originIsOperation: boolean, callback: (mayEdit: boolean) => void) => void
constructor (document, operations, docId, mayWrite, operationCallback) {
super(document, operations)
// Whatever that does?
EventEmitter.call(this)
this.users = {}
this.docId = docId
this.mayWrite = mayWrite || function (_, originIsOperation, cb) {
cb(true)
}
this.operationCallback = operationCallback
}
addClient (socket) {
const self = this
socket.join(this.docId)
const docOut = {
str: this.document,
revision: this.operations.length,
clients: this.users
}
socket.emit('doc', docOut)
socket.on('operation', function (revision, operation, selection) {
self.mayWrite(socket, true, function (mayWrite) {
if (!mayWrite) {
logger.info("User doesn't have the right to edit.")
return
}
try {
self.onOperation(socket, revision, operation, selection)
if (typeof self.operationCallback === 'function')
self.operationCallback(socket, operation)
} catch (err) {
setTimeout(function () {
const docOut = {
str: self.document,
revision: self.operations.length,
clients: self.users,
force: true
}
socket.emit('doc', docOut)
}, 100)
}
})
})
socket.on('get_operations', function (base, head) {
self.onGetOperations(socket, base, head)
})
socket.on('selection', function (obj) {
self.mayWrite(socket, false, function (mayWrite) {
if (!mayWrite) {
logger.info("User doesn't have the right to edit.")
return
}
self.updateSelection(socket, obj && Selection.fromJSON(obj))
})
})
socket.on('disconnect', function () {
logger.debug("Disconnect")
socket.leave(self.docId)
self.onDisconnect(socket)
/*
if (socket.manager && socket.manager.sockets.clients(self.docId).length === 0) {
self.emit('empty-room');
}
*/
})
};
onOperation (socket, revision, operation, selection) {
let wrapped
try {
wrapped = new WrappedOperation(
TextOperation.fromJSON(operation),
selection && Selection.fromJSON(selection)
)
} catch (exc) {
logger.error("Invalid operation received: ")
logger.error(exc)
throw new Error(exc)
}
try {
const clientId = socket.id
const wrappedPrime = this.receiveOperation(revision, wrapped)
if (!wrappedPrime) return
logger.debug("new operation: " + JSON.stringify(wrapped))
this.getClient(clientId).selection = wrappedPrime.meta
revision = this.operations.length
socket.emit('ack', revision)
socket.broadcast.in(this.docId).emit(
'operation', clientId, revision,
wrappedPrime.wrapped.toJSON(), wrappedPrime.meta
)
//set document is dirty
this.isDirty = true
} catch (exc) {
logger.error(exc)
throw new Error(exc)
}
};
onGetOperations (socket, base, head) {
const operations = this.operations.slice(base, head).map(function (op) {
return op.wrapped.toJSON()
})
socket.emit('operations', head, operations)
};
updateSelection (socket, selection) {
const clientId = socket.id
if (selection) {
this.getClient(clientId).selection = selection
} else {
delete this.getClient(clientId).selection
}
socket.broadcast.to(this.docId).emit('selection', clientId, selection)
};
setName (socket, name) {
const clientId = socket.id
this.getClient(clientId).name = name
socket.broadcast.to(this.docId).emit('set_name', clientId, name)
};
setColor (socket, color) {
const clientId = socket.id
this.getClient(clientId).color = color
socket.broadcast.to(this.docId).emit('set_color', clientId, color)
};
getClient (clientId) {
return this.users[clientId] || (this.users[clientId] = {})
};
onDisconnect (socket) {
const clientId = socket.id
delete this.users[clientId]
socket.broadcast.to(this.docId).emit('client_left', clientId)
};
}

View file

@ -1,8 +0,0 @@
exports.version = '0.0.15';
exports.TextOperation = require('./text-operation');
exports.SimpleTextOperation = require('./simple-text-operation');
exports.Client = require('./client');
exports.Server = require('./server');
exports.Selection = require('./selection');
exports.EditorSocketIOServer = require('./editor-socketio-server');

View file

@ -1,117 +0,0 @@
if (typeof ot === 'undefined') {
// Export for browsers
var ot = {};
}
ot.Selection = (function (global) {
'use strict';
var TextOperation = global.ot ? global.ot.TextOperation : require('./text-operation');
// Range has `anchor` and `head` properties, which are zero-based indices into
// the document. The `anchor` is the side of the selection that stays fixed,
// `head` is the side of the selection where the cursor is. When both are
// equal, the range represents a cursor.
function Range (anchor, head) {
this.anchor = anchor;
this.head = head;
}
Range.fromJSON = function (obj) {
return new Range(obj.anchor, obj.head);
};
Range.prototype.equals = function (other) {
return this.anchor === other.anchor && this.head === other.head;
};
Range.prototype.isEmpty = function () {
return this.anchor === this.head;
};
Range.prototype.transform = function (other) {
function transformIndex (index) {
var newIndex = index;
var ops = other.ops;
for (var i = 0, l = other.ops.length; i < l; i++) {
if (TextOperation.isRetain(ops[i])) {
index -= ops[i];
} else if (TextOperation.isInsert(ops[i])) {
newIndex += ops[i].length;
} else {
newIndex -= Math.min(index, -ops[i]);
index += ops[i];
}
if (index < 0) { break; }
}
return newIndex;
}
var newAnchor = transformIndex(this.anchor);
if (this.anchor === this.head) {
return new Range(newAnchor, newAnchor);
}
return new Range(newAnchor, transformIndex(this.head));
};
// A selection is basically an array of ranges. Every range represents a real
// selection or a cursor in the document (when the start position equals the
// end position of the range). The array must not be empty.
function Selection (ranges) {
this.ranges = ranges || [];
}
Selection.Range = Range;
// Convenience method for creating selections only containing a single cursor
// and no real selection range.
Selection.createCursor = function (position) {
return new Selection([new Range(position, position)]);
};
Selection.fromJSON = function (obj) {
var objRanges = obj.ranges || obj;
for (var i = 0, ranges = []; i < objRanges.length; i++) {
ranges[i] = Range.fromJSON(objRanges[i]);
}
return new Selection(ranges);
};
Selection.prototype.equals = function (other) {
if (this.position !== other.position) { return false; }
if (this.ranges.length !== other.ranges.length) { return false; }
// FIXME: Sort ranges before comparing them?
for (var i = 0; i < this.ranges.length; i++) {
if (!this.ranges[i].equals(other.ranges[i])) { return false; }
}
return true;
};
Selection.prototype.somethingSelected = function () {
for (var i = 0; i < this.ranges.length; i++) {
if (!this.ranges[i].isEmpty()) { return true; }
}
return false;
};
// Return the more current selection information.
Selection.prototype.compose = function (other) {
return other;
};
// Update the selection with respect to an operation.
Selection.prototype.transform = function (other) {
for (var i = 0, newRanges = []; i < this.ranges.length; i++) {
newRanges[i] = this.ranges[i].transform(other);
}
return new Selection(newRanges);
};
return Selection;
}(this));
// Export for CommonJS
if (typeof module === 'object') {
module.exports = ot.Selection;
}

View file

@ -1,52 +0,0 @@
var config = require('../config');
if (typeof ot === 'undefined') {
var ot = {};
}
ot.Server = (function (global) {
'use strict';
// Constructor. Takes the current document as a string and optionally the array
// of all operations.
function Server (document, operations) {
this.document = document;
this.operations = operations || [];
}
// Call this method whenever you receive an operation from a client.
Server.prototype.receiveOperation = function (revision, operation) {
if (revision < 0 || this.operations.length < revision) {
throw new Error("operation revision not in history");
}
// Find all operations that the client didn't know of when it sent the
// operation ...
var concurrentOperations = this.operations.slice(revision);
// ... and transform the operation against all these operations ...
var transform = operation.constructor.transform;
for (var i = 0; i < concurrentOperations.length; i++) {
operation = transform(operation, concurrentOperations[i])[0];
}
// ... and apply that on the document.
var newDocument = operation.apply(this.document);
// ignore if exceed the max length of document
if(newDocument.length > config.documentMaxLength && newDocument.length > this.document.length)
return;
this.document = newDocument;
// Store operation in history.
this.operations.push(operation);
// It's the caller's responsibility to send the operation to all connected
// clients and an acknowledgement to the creator.
return operation;
};
return Server;
}(this));
if (typeof module === 'object') {
module.exports = ot.Server;
}

View file

@ -1,188 +0,0 @@
if (typeof ot === 'undefined') {
// Export for browsers
var ot = {};
}
ot.SimpleTextOperation = (function (global) {
var TextOperation = global.ot ? global.ot.TextOperation : require('./text-operation');
function SimpleTextOperation () {}
// Insert the string `str` at the zero-based `position` in the document.
function Insert (str, position) {
if (!this || this.constructor !== SimpleTextOperation) {
// => function was called without 'new'
return new Insert(str, position);
}
this.str = str;
this.position = position;
}
Insert.prototype = new SimpleTextOperation();
SimpleTextOperation.Insert = Insert;
Insert.prototype.toString = function () {
return 'Insert(' + JSON.stringify(this.str) + ', ' + this.position + ')';
};
Insert.prototype.equals = function (other) {
return other instanceof Insert &&
this.str === other.str &&
this.position === other.position;
};
Insert.prototype.apply = function (doc) {
return doc.slice(0, this.position) + this.str + doc.slice(this.position);
};
// Delete `count` many characters at the zero-based `position` in the document.
function Delete (count, position) {
if (!this || this.constructor !== SimpleTextOperation) {
return new Delete(count, position);
}
this.count = count;
this.position = position;
}
Delete.prototype = new SimpleTextOperation();
SimpleTextOperation.Delete = Delete;
Delete.prototype.toString = function () {
return 'Delete(' + this.count + ', ' + this.position + ')';
};
Delete.prototype.equals = function (other) {
return other instanceof Delete &&
this.count === other.count &&
this.position === other.position;
};
Delete.prototype.apply = function (doc) {
return doc.slice(0, this.position) + doc.slice(this.position + this.count);
};
// An operation that does nothing. This is needed for the result of the
// transformation of two deletions of the same character.
function Noop () {
if (!this || this.constructor !== SimpleTextOperation) { return new Noop(); }
}
Noop.prototype = new SimpleTextOperation();
SimpleTextOperation.Noop = Noop;
Noop.prototype.toString = function () {
return 'Noop()';
};
Noop.prototype.equals = function (other) { return other instanceof Noop; };
Noop.prototype.apply = function (doc) { return doc; };
var noop = new Noop();
SimpleTextOperation.transform = function (a, b) {
if (a instanceof Noop || b instanceof Noop) { return [a, b]; }
if (a instanceof Insert && b instanceof Insert) {
if (a.position < b.position || (a.position === b.position && a.str < b.str)) {
return [a, new Insert(b.str, b.position + a.str.length)];
}
if (a.position > b.position || (a.position === b.position && a.str > b.str)) {
return [new Insert(a.str, a.position + b.str.length), b];
}
return [noop, noop];
}
if (a instanceof Insert && b instanceof Delete) {
if (a.position <= b.position) {
return [a, new Delete(b.count, b.position + a.str.length)];
}
if (a.position >= b.position + b.count) {
return [new Insert(a.str, a.position - b.count), b];
}
// Here, we have to delete the inserted string of operation a.
// That doesn't preserve the intention of operation a, but it's the only
// thing we can do to get a valid transform function.
return [noop, new Delete(b.count + a.str.length, b.position)];
}
if (a instanceof Delete && b instanceof Insert) {
if (a.position >= b.position) {
return [new Delete(a.count, a.position + b.str.length), b];
}
if (a.position + a.count <= b.position) {
return [a, new Insert(b.str, b.position - a.count)];
}
// Same problem as above. We have to delete the string that was inserted
// in operation b.
return [new Delete(a.count + b.str.length, a.position), noop];
}
if (a instanceof Delete && b instanceof Delete) {
if (a.position === b.position) {
if (a.count === b.count) {
return [noop, noop];
} else if (a.count < b.count) {
return [noop, new Delete(b.count - a.count, b.position)];
}
return [new Delete(a.count - b.count, a.position), noop];
}
if (a.position < b.position) {
if (a.position + a.count <= b.position) {
return [a, new Delete(b.count, b.position - a.count)];
}
if (a.position + a.count >= b.position + b.count) {
return [new Delete(a.count - b.count, a.position), noop];
}
return [
new Delete(b.position - a.position, a.position),
new Delete(b.position + b.count - (a.position + a.count), a.position)
];
}
if (a.position > b.position) {
if (a.position >= b.position + b.count) {
return [new Delete(a.count, a.position - b.count), b];
}
if (a.position + a.count <= b.position + b.count) {
return [noop, new Delete(b.count - a.count, b.position)];
}
return [
new Delete(a.position + a.count - (b.position + b.count), b.position),
new Delete(a.position - b.position, b.position)
];
}
}
};
// Convert a normal, composable `TextOperation` into an array of
// `SimpleTextOperation`s.
SimpleTextOperation.fromTextOperation = function (operation) {
var simpleOperations = [];
var index = 0;
for (var i = 0; i < operation.ops.length; i++) {
var op = operation.ops[i];
if (TextOperation.isRetain(op)) {
index += op;
} else if (TextOperation.isInsert(op)) {
simpleOperations.push(new Insert(op, index));
index += op.length;
} else {
simpleOperations.push(new Delete(Math.abs(op), index));
}
}
return simpleOperations;
};
return SimpleTextOperation;
})(this);
// Export for CommonJS
if (typeof module === 'object') {
module.exports = ot.SimpleTextOperation;
}

View file

@ -1,530 +0,0 @@
if (typeof ot === 'undefined') {
// Export for browsers
var ot = {};
}
ot.TextOperation = (function () {
'use strict';
// Constructor for new operations.
function TextOperation () {
if (!this || this.constructor !== TextOperation) {
// => function was called without 'new'
return new TextOperation();
}
// When an operation is applied to an input string, you can think of this as
// if an imaginary cursor runs over the entire string and skips over some
// parts, deletes some parts and inserts characters at some positions. These
// actions (skip/delete/insert) are stored as an array in the "ops" property.
this.ops = [];
// An operation's baseLength is the length of every string the operation
// can be applied to.
this.baseLength = 0;
// The targetLength is the length of every string that results from applying
// the operation on a valid input string.
this.targetLength = 0;
}
TextOperation.prototype.equals = function (other) {
if (this.baseLength !== other.baseLength) { return false; }
if (this.targetLength !== other.targetLength) { return false; }
if (this.ops.length !== other.ops.length) { return false; }
for (var i = 0; i < this.ops.length; i++) {
if (this.ops[i] !== other.ops[i]) { return false; }
}
return true;
};
// Operation are essentially lists of ops. There are three types of ops:
//
// * Retain ops: Advance the cursor position by a given number of characters.
// Represented by positive ints.
// * Insert ops: Insert a given string at the current cursor position.
// Represented by strings.
// * Delete ops: Delete the next n characters. Represented by negative ints.
var isRetain = TextOperation.isRetain = function (op) {
return typeof op === 'number' && op > 0;
};
var isInsert = TextOperation.isInsert = function (op) {
return typeof op === 'string';
};
var isDelete = TextOperation.isDelete = function (op) {
return typeof op === 'number' && op < 0;
};
// After an operation is constructed, the user of the library can specify the
// actions of an operation (skip/insert/delete) with these three builder
// methods. They all return the operation for convenient chaining.
// Skip over a given number of characters.
TextOperation.prototype.retain = function (n) {
if (typeof n !== 'number') {
throw new Error("retain expects an integer");
}
if (n === 0) { return this; }
this.baseLength += n;
this.targetLength += n;
if (isRetain(this.ops[this.ops.length-1])) {
// The last op is a retain op => we can merge them into one op.
this.ops[this.ops.length-1] += n;
} else {
// Create a new op.
this.ops.push(n);
}
return this;
};
// Insert a string at the current position.
TextOperation.prototype.insert = function (str) {
if (typeof str !== 'string') {
throw new Error("insert expects a string");
}
if (str === '') { return this; }
this.targetLength += str.length;
var ops = this.ops;
if (isInsert(ops[ops.length-1])) {
// Merge insert op.
ops[ops.length-1] += str;
} else if (isDelete(ops[ops.length-1])) {
// It doesn't matter when an operation is applied whether the operation
// is delete(3), insert("something") or insert("something"), delete(3).
// Here we enforce that in this case, the insert op always comes first.
// This makes all operations that have the same effect when applied to
// a document of the right length equal in respect to the `equals` method.
if (isInsert(ops[ops.length-2])) {
ops[ops.length-2] += str;
} else {
ops[ops.length] = ops[ops.length-1];
ops[ops.length-2] = str;
}
} else {
ops.push(str);
}
return this;
};
// Delete a string at the current position.
TextOperation.prototype['delete'] = function (n) {
if (typeof n === 'string') { n = n.length; }
if (typeof n !== 'number') {
throw new Error("delete expects an integer or a string");
}
if (n === 0) { return this; }
if (n > 0) { n = -n; }
this.baseLength -= n;
if (isDelete(this.ops[this.ops.length-1])) {
this.ops[this.ops.length-1] += n;
} else {
this.ops.push(n);
}
return this;
};
// Tests whether this operation has no effect.
TextOperation.prototype.isNoop = function () {
return this.ops.length === 0 || (this.ops.length === 1 && isRetain(this.ops[0]));
};
// Pretty printing.
TextOperation.prototype.toString = function () {
// map: build a new array by applying a function to every element in an old
// array.
var map = Array.prototype.map || function (fn) {
var arr = this;
var newArr = [];
for (var i = 0, l = arr.length; i < l; i++) {
newArr[i] = fn(arr[i]);
}
return newArr;
};
return map.call(this.ops, function (op) {
if (isRetain(op)) {
return "retain " + op;
} else if (isInsert(op)) {
return "insert '" + op + "'";
} else {
return "delete " + (-op);
}
}).join(', ');
};
// Converts operation into a JSON value.
TextOperation.prototype.toJSON = function () {
return this.ops;
};
// Converts a plain JS object into an operation and validates it.
TextOperation.fromJSON = function (ops) {
var o = new TextOperation();
for (var i = 0, l = ops.length; i < l; i++) {
var op = ops[i];
if (isRetain(op)) {
o.retain(op);
} else if (isInsert(op)) {
o.insert(op);
} else if (isDelete(op)) {
o['delete'](op);
} else {
throw new Error("unknown operation: " + JSON.stringify(op));
}
}
return o;
};
// Apply an operation to a string, returning a new string. Throws an error if
// there's a mismatch between the input string and the operation.
TextOperation.prototype.apply = function (str) {
var operation = this;
if (str.length !== operation.baseLength) {
throw new Error("The operation's base length must be equal to the string's length.");
}
var newStr = [], j = 0;
var strIndex = 0;
var ops = this.ops;
for (var i = 0, l = ops.length; i < l; i++) {
var op = ops[i];
if (isRetain(op)) {
if (strIndex + op > str.length) {
throw new Error("Operation can't retain more characters than are left in the string.");
}
// Copy skipped part of the old string.
newStr[j++] = str.slice(strIndex, strIndex + op);
strIndex += op;
} else if (isInsert(op)) {
// Insert string.
newStr[j++] = op;
} else { // delete op
strIndex -= op;
}
}
if (strIndex !== str.length) {
throw new Error("The operation didn't operate on the whole string.");
}
return newStr.join('');
};
// Computes the inverse of an operation. The inverse of an operation is the
// operation that reverts the effects of the operation, e.g. when you have an
// operation 'insert("hello "); skip(6);' then the inverse is 'delete("hello ");
// skip(6);'. The inverse should be used for implementing undo.
TextOperation.prototype.invert = function (str) {
var strIndex = 0;
var inverse = new TextOperation();
var ops = this.ops;
for (var i = 0, l = ops.length; i < l; i++) {
var op = ops[i];
if (isRetain(op)) {
inverse.retain(op);
strIndex += op;
} else if (isInsert(op)) {
inverse['delete'](op.length);
} else { // delete op
inverse.insert(str.slice(strIndex, strIndex - op));
strIndex -= op;
}
}
return inverse;
};
// Compose merges two consecutive operations into one operation, that
// preserves the changes of both. Or, in other words, for each input string S
// and a pair of consecutive operations A and B,
// apply(apply(S, A), B) = apply(S, compose(A, B)) must hold.
TextOperation.prototype.compose = function (operation2) {
var operation1 = this;
if (operation1.targetLength !== operation2.baseLength) {
throw new Error("The base length of the second operation has to be the target length of the first operation");
}
var operation = new TextOperation(); // the combined operation
var ops1 = operation1.ops, ops2 = operation2.ops; // for fast access
var i1 = 0, i2 = 0; // current index into ops1 respectively ops2
var op1 = ops1[i1++], op2 = ops2[i2++]; // current ops
while (true) {
// Dispatch on the type of op1 and op2
if (typeof op1 === 'undefined' && typeof op2 === 'undefined') {
// end condition: both ops1 and ops2 have been processed
break;
}
if (isDelete(op1)) {
operation['delete'](op1);
op1 = ops1[i1++];
continue;
}
if (isInsert(op2)) {
operation.insert(op2);
op2 = ops2[i2++];
continue;
}
if (typeof op1 === 'undefined') {
throw new Error("Cannot compose operations: first operation is too short.");
}
if (typeof op2 === 'undefined') {
throw new Error("Cannot compose operations: first operation is too long.");
}
if (isRetain(op1) && isRetain(op2)) {
if (op1 > op2) {
operation.retain(op2);
op1 = op1 - op2;
op2 = ops2[i2++];
} else if (op1 === op2) {
operation.retain(op1);
op1 = ops1[i1++];
op2 = ops2[i2++];
} else {
operation.retain(op1);
op2 = op2 - op1;
op1 = ops1[i1++];
}
} else if (isInsert(op1) && isDelete(op2)) {
if (op1.length > -op2) {
op1 = op1.slice(-op2);
op2 = ops2[i2++];
} else if (op1.length === -op2) {
op1 = ops1[i1++];
op2 = ops2[i2++];
} else {
op2 = op2 + op1.length;
op1 = ops1[i1++];
}
} else if (isInsert(op1) && isRetain(op2)) {
if (op1.length > op2) {
operation.insert(op1.slice(0, op2));
op1 = op1.slice(op2);
op2 = ops2[i2++];
} else if (op1.length === op2) {
operation.insert(op1);
op1 = ops1[i1++];
op2 = ops2[i2++];
} else {
operation.insert(op1);
op2 = op2 - op1.length;
op1 = ops1[i1++];
}
} else if (isRetain(op1) && isDelete(op2)) {
if (op1 > -op2) {
operation['delete'](op2);
op1 = op1 + op2;
op2 = ops2[i2++];
} else if (op1 === -op2) {
operation['delete'](op2);
op1 = ops1[i1++];
op2 = ops2[i2++];
} else {
operation['delete'](op1);
op2 = op2 + op1;
op1 = ops1[i1++];
}
} else {
throw new Error(
"This shouldn't happen: op1: " +
JSON.stringify(op1) + ", op2: " +
JSON.stringify(op2)
);
}
}
return operation;
};
function getSimpleOp (operation, fn) {
var ops = operation.ops;
var isRetain = TextOperation.isRetain;
switch (ops.length) {
case 1:
return ops[0];
case 2:
return isRetain(ops[0]) ? ops[1] : (isRetain(ops[1]) ? ops[0] : null);
case 3:
if (isRetain(ops[0]) && isRetain(ops[2])) { return ops[1]; }
}
return null;
}
function getStartIndex (operation) {
if (isRetain(operation.ops[0])) { return operation.ops[0]; }
return 0;
}
// When you use ctrl-z to undo your latest changes, you expect the program not
// to undo every single keystroke but to undo your last sentence you wrote at
// a stretch or the deletion you did by holding the backspace key down. This
// This can be implemented by composing operations on the undo stack. This
// method can help decide whether two operations should be composed. It
// returns true if the operations are consecutive insert operations or both
// operations delete text at the same position. You may want to include other
// factors like the time since the last change in your decision.
TextOperation.prototype.shouldBeComposedWith = function (other) {
if (this.isNoop() || other.isNoop()) { return true; }
var startA = getStartIndex(this), startB = getStartIndex(other);
var simpleA = getSimpleOp(this), simpleB = getSimpleOp(other);
if (!simpleA || !simpleB) { return false; }
if (isInsert(simpleA) && isInsert(simpleB)) {
return startA + simpleA.length === startB;
}
if (isDelete(simpleA) && isDelete(simpleB)) {
// there are two possibilities to delete: with backspace and with the
// delete key.
return (startB - simpleB === startA) || startA === startB;
}
return false;
};
// Decides whether two operations should be composed with each other
// if they were inverted, that is
// `shouldBeComposedWith(a, b) = shouldBeComposedWithInverted(b^{-1}, a^{-1})`.
TextOperation.prototype.shouldBeComposedWithInverted = function (other) {
if (this.isNoop() || other.isNoop()) { return true; }
var startA = getStartIndex(this), startB = getStartIndex(other);
var simpleA = getSimpleOp(this), simpleB = getSimpleOp(other);
if (!simpleA || !simpleB) { return false; }
if (isInsert(simpleA) && isInsert(simpleB)) {
return startA + simpleA.length === startB || startA === startB;
}
if (isDelete(simpleA) && isDelete(simpleB)) {
return startB - simpleB === startA;
}
return false;
};
// Transform takes two operations A and B that happened concurrently and
// produces two operations A' and B' (in an array) such that
// `apply(apply(S, A), B') = apply(apply(S, B), A')`. This function is the
// heart of OT.
TextOperation.transform = function (operation1, operation2) {
if (operation1.baseLength !== operation2.baseLength) {
throw new Error("Both operations have to have the same base length");
}
var operation1prime = new TextOperation();
var operation2prime = new TextOperation();
var ops1 = operation1.ops, ops2 = operation2.ops;
var i1 = 0, i2 = 0;
var op1 = ops1[i1++], op2 = ops2[i2++];
while (true) {
// At every iteration of the loop, the imaginary cursor that both
// operation1 and operation2 have that operates on the input string must
// have the same position in the input string.
if (typeof op1 === 'undefined' && typeof op2 === 'undefined') {
// end condition: both ops1 and ops2 have been processed
break;
}
// next two cases: one or both ops are insert ops
// => insert the string in the corresponding prime operation, skip it in
// the other one. If both op1 and op2 are insert ops, prefer op1.
if (isInsert(op1)) {
operation1prime.insert(op1);
operation2prime.retain(op1.length);
op1 = ops1[i1++];
continue;
}
if (isInsert(op2)) {
operation1prime.retain(op2.length);
operation2prime.insert(op2);
op2 = ops2[i2++];
continue;
}
if (typeof op1 === 'undefined') {
throw new Error("Cannot compose operations: first operation is too short.");
}
if (typeof op2 === 'undefined') {
throw new Error("Cannot compose operations: first operation is too long.");
}
var minl;
if (isRetain(op1) && isRetain(op2)) {
// Simple case: retain/retain
if (op1 > op2) {
minl = op2;
op1 = op1 - op2;
op2 = ops2[i2++];
} else if (op1 === op2) {
minl = op2;
op1 = ops1[i1++];
op2 = ops2[i2++];
} else {
minl = op1;
op2 = op2 - op1;
op1 = ops1[i1++];
}
operation1prime.retain(minl);
operation2prime.retain(minl);
} else if (isDelete(op1) && isDelete(op2)) {
// Both operations delete the same string at the same position. We don't
// need to produce any operations, we just skip over the delete ops and
// handle the case that one operation deletes more than the other.
if (-op1 > -op2) {
op1 = op1 - op2;
op2 = ops2[i2++];
} else if (op1 === op2) {
op1 = ops1[i1++];
op2 = ops2[i2++];
} else {
op2 = op2 - op1;
op1 = ops1[i1++];
}
// next two cases: delete/retain and retain/delete
} else if (isDelete(op1) && isRetain(op2)) {
if (-op1 > op2) {
minl = op2;
op1 = op1 + op2;
op2 = ops2[i2++];
} else if (-op1 === op2) {
minl = op2;
op1 = ops1[i1++];
op2 = ops2[i2++];
} else {
minl = -op1;
op2 = op2 + op1;
op1 = ops1[i1++];
}
operation1prime['delete'](minl);
} else if (isRetain(op1) && isDelete(op2)) {
if (op1 > -op2) {
minl = -op2;
op1 = op1 + op2;
op2 = ops2[i2++];
} else if (op1 === -op2) {
minl = op1;
op1 = ops1[i1++];
op2 = ops2[i2++];
} else {
minl = op1;
op2 = op2 + op1;
op1 = ops1[i1++];
}
operation2prime['delete'](minl);
} else {
throw new Error("The two operations aren't compatible");
}
}
return [operation1prime, operation2prime];
};
return TextOperation;
}());
// Export for CommonJS
if (typeof module === 'object') {
module.exports = ot.TextOperation;
}

View file

@ -1,80 +0,0 @@
if (typeof ot === 'undefined') {
// Export for browsers
var ot = {};
}
ot.WrappedOperation = (function (global) {
'use strict';
// A WrappedOperation contains an operation and corresponing metadata.
function WrappedOperation (operation, meta) {
this.wrapped = operation;
this.meta = meta;
}
WrappedOperation.prototype.apply = function () {
return this.wrapped.apply.apply(this.wrapped, arguments);
};
WrappedOperation.prototype.invert = function () {
var meta = this.meta;
return new WrappedOperation(
this.wrapped.invert.apply(this.wrapped, arguments),
meta && typeof meta === 'object' && typeof meta.invert === 'function' ?
meta.invert.apply(meta, arguments) : meta
);
};
// Copy all properties from source to target.
function copy (source, target) {
for (var key in source) {
if (source.hasOwnProperty(key)) {
target[key] = source[key];
}
}
}
function composeMeta (a, b) {
if (a && typeof a === 'object') {
if (typeof a.compose === 'function') { return a.compose(b); }
var meta = {};
copy(a, meta);
copy(b, meta);
return meta;
}
return b;
}
WrappedOperation.prototype.compose = function (other) {
return new WrappedOperation(
this.wrapped.compose(other.wrapped),
composeMeta(this.meta, other.meta)
);
};
function transformMeta (meta, operation) {
if (meta && typeof meta === 'object') {
if (typeof meta.transform === 'function') {
return meta.transform(operation);
}
}
return meta;
}
WrappedOperation.transform = function (a, b) {
var transform = a.wrapped.constructor.transform;
var pair = transform(a.wrapped, b.wrapped);
return [
new WrappedOperation(pair[0], transformMeta(a.meta, b.wrapped)),
new WrappedOperation(pair[1], transformMeta(b.meta, a.wrapped))
];
};
return WrappedOperation;
}(this));
// Export for CommonJS
if (typeof module === 'object') {
module.exports = ot.WrappedOperation;
}

View file

@ -1,991 +0,0 @@
import Chance from 'chance'
import CodeMirror from 'codemirror'
import cookie from 'cookie'
import cookieParser from 'cookie-parser'
import moment from 'moment'
import randomcolor from 'randomcolor'
import SocketIO, { Socket } from 'socket.io'
import { config } from './config'
import { History } from './history'
import { logger } from './logger'
import { Author, Note, Revision, User } from './models'
import { NoteAuthorship } from './models/note'
import { PhotoProfile } from './utils/PhotoProfile'
import { EditorSocketIOServer } from './ot/editor-socketio-server'
import { mapToObject } from './utils/functions'
import { getPermission, Permission } from './web/note/util'
export type SocketWithNoteId = Socket & { noteId: string }
const chance = new Chance()
export enum State {
Starting,
Running,
Stopping
}
/* eslint-disable @typescript-eslint/no-use-before-define */
const realtime: {
onAuthorizeSuccess: (data, accept) => void;
onAuthorizeFail: (data, message, error, accept) => void;
io: SocketIO.Server; isReady: () => boolean;
connection: (socket: SocketWithNoteId) => void;
secure: (socket: SocketIO.Socket, next: (err?: Error) => void) => void;
getStatus: (callback) => void; state: State;
} = {
io: SocketIO(),
onAuthorizeSuccess: onAuthorizeSuccess,
onAuthorizeFail: onAuthorizeFail,
secure: secure,
connection: connection,
getStatus: getStatus,
isReady: isReady,
state: State.Starting
}
/* eslint-enable @typescript-eslint/no-use-before-define */
const disconnectSocketQueue: SocketWithNoteId[] = []
function onAuthorizeSuccess (data, accept): void {
accept()
}
function onAuthorizeFail (data, message, error, accept): void {
accept() // accept whether authorize or not to allow anonymous usage
}
// secure the origin by the cookie
function secure (socket: Socket, next: (err?: Error) => void): void {
try {
const handshakeData = socket.request
if (handshakeData.headers.cookie) {
handshakeData.cookie = cookie.parse(handshakeData.headers.cookie)
handshakeData.sessionID = cookieParser.signedCookie(handshakeData.cookie[config.sessionName], config.sessionSecret)
if (handshakeData.sessionID &&
handshakeData.cookie[config.sessionName] &&
handshakeData.cookie[config.sessionName] !== handshakeData.sessionID) {
logger.debug(`AUTH success cookie: ${handshakeData.sessionID}`)
return next()
} else {
next(new Error('AUTH failed: Cookie is invalid.'))
}
} else {
next(new Error('AUTH failed: No cookie transmitted.'))
}
} catch (ex) {
next(new Error('AUTH failed:' + JSON.stringify(ex)))
}
}
function emitCheck (note: NoteSession): void {
const out = {
title: note.title,
updatetime: note.updatetime,
lastchangeuser: note.lastchangeuser,
lastchangeuserprofile: note.lastchangeuserprofile,
authors: mapToObject(note.authors),
authorship: note.authorship
}
realtime.io.to(note.id).emit('check', out)
}
class UserSession {
id?: string
address?: string
login?: boolean
userid: string | null
'user-agent'?
photo: string
color: string
cursor?: CodeMirror.Position
name: string | null
idle?: boolean
type?: string
}
class NoteSession {
id: string
alias: string
title: string
ownerId: string
ownerprofile: PhotoProfile | null
permission: string
lastchangeuser: string | null
lastchangeuserprofile: PhotoProfile | null
socks: SocketWithNoteId[]
users: Map<string, UserSession>
tempUsers: Map<string, number> // time value
createtime: number
updatetime: number
server: EditorSocketIOServer
authors: Map<string, UserSession>
authorship: NoteAuthorship[]
}
// actions
const users: Map<string, UserSession> = new Map<string, UserSession>()
const notes: Map<string, NoteSession> = new Map<string, NoteSession>()
let saverSleep = false
function finishUpdateNote (note: NoteSession, _note: Note, callback: (err: Error | null, note: Note | null) => void): void {
if (!note || !note.server) return callback(null, null)
const body = note.server.document
const title = note.title = Note.parseNoteTitle(body)
const values = {
title: title,
content: body,
authorship: note.authorship,
lastchangeuserId: note.lastchangeuser,
lastchangeAt: Date.now()
}
_note.update(values).then(function (_note) {
saverSleep = false
return callback(null, _note)
}).catch(function (err) {
logger.error(err)
return callback(err, null)
})
}
function updateHistory (userId, note: NoteSession, time?): void {
const noteId = note.alias ? note.alias : Note.encodeNoteId(note.id)
if (note.server) History.updateHistory(userId, noteId, note.server.document, time)
}
function updateNote (note: NoteSession, callback: (err, note) => void): void {
Note.findOne({
where: {
id: note.id
}
}).then(function (_note) {
if (!_note) return callback(null, null)
// update user note history
const tempUsers = new Map(note.tempUsers)
note.tempUsers = new Map<string, number>()
for (const [key, time] of tempUsers) {
updateHistory(key, note, time)
}
if (note.lastchangeuser) {
if (_note.lastchangeuserId !== note.lastchangeuser) {
User.findOne({
where: {
id: note.lastchangeuser
}
}).then(function (user) {
if (!user) return callback(null, null)
note.lastchangeuserprofile = PhotoProfile.fromUser(user)
return finishUpdateNote(note, _note, callback)
}).catch(function (err) {
logger.error(err)
return callback(err, null)
})
} else {
return finishUpdateNote(note, _note, callback)
}
} else {
note.lastchangeuserprofile = null
return finishUpdateNote(note, _note, callback)
}
}).catch(function (err) {
logger.error(err)
return callback(err, null)
})
}
// update when the note is dirty
setInterval(function () {
for (const [key, note] of notes) {
if (note.server.isDirty) {
logger.debug(`updater found dirty note: ${key}`)
note.server.isDirty = false
updateNote(note, function (err, _note) {
// handle when note already been clean up
if (!note || !note.server) return
if (!_note) {
realtime.io.to(note.id).emit('info', {
code: 404
})
logger.error('note not found: ', note.id)
}
if (err || !_note) {
for (const sock of note.socks) {
if (sock) {
setTimeout(function () {
sock.disconnect()
}, 0)
}
}
return logger.error('updater error', err)
}
note.updatetime = moment(_note.lastchangeAt).valueOf()
emitCheck(note)
})
} else {
return
}
}
}, 1000)
// save note revision in interval
setInterval(function () {
if (saverSleep) return
Revision.saveAllNotesRevision(function (err, notes) {
if (err) return logger.error('revision saver failed: ' + err)
if (notes && notes.length <= 0) {
saverSleep = true
}
})
}, 60000 * 5)
let isConnectionBusy: boolean
let isDisconnectBusy: boolean
const connectionSocketQueue: SocketWithNoteId[] = []
function getStatus (callback): void {
Note.count().then(function (notecount) {
const distinctaddresses: string[] = []
const regaddresses: string[] = []
const distinctregaddresses: string[] = []
for (const user of users.values()) {
if (!user) return
let found = false
for (const distinctaddress of distinctaddresses) {
if (user.address === distinctaddress) {
found = true
break
}
}
if (!found) {
if (user.address != null) {
distinctaddresses.push(user.address)
}
}
if (user.login) {
if (user.address != null) {
regaddresses.push(user.address)
}
let found = false
for (let i = 0; i < distinctregaddresses.length; i++) {
if (user.address === distinctregaddresses[i]) {
found = true
break
}
}
if (!found) {
if (user.address != null) {
distinctregaddresses.push(user.address)
}
}
}
}
User.count().then(function (regcount) {
// eslint-disable-next-line standard/no-callback-literal
return callback ? callback({
onlineNotes: Object.keys(notes).length,
onlineUsers: Object.keys(users).length,
distinctOnlineUsers: distinctaddresses.length,
notesCount: notecount,
registeredUsers: regcount,
onlineRegisteredUsers: regaddresses.length,
distinctOnlineRegisteredUsers: distinctregaddresses.length,
isConnectionBusy: isConnectionBusy,
connectionSocketQueueLength: connectionSocketQueue.length,
isDisconnectBusy: isDisconnectBusy,
disconnectSocketQueueLength: disconnectSocketQueue.length
}) : null
}).catch(function (err) {
return logger.error('count user failed: ' + err)
})
}).catch(function (err) {
return logger.error('count note failed: ' + err)
})
}
function isReady (): boolean {
return realtime.io &&
Object.keys(notes).length === 0 && Object.keys(users).length === 0 &&
connectionSocketQueue.length === 0 && !isConnectionBusy &&
disconnectSocketQueue.length === 0 && !isDisconnectBusy
}
function extractNoteIdFromSocket (socket: Socket): string | boolean {
if (!socket || !socket.handshake) {
return false
}
if (socket.handshake.query && socket.handshake.query.noteId) {
return socket.handshake.query.noteId
} else {
return false
}
}
function parseNoteIdFromSocket (socket: Socket, callback: (err: string | null, noteId: string | null) => void): void {
const noteId = extractNoteIdFromSocket(socket)
if (!noteId) {
return callback(null, null)
}
Note.parseNoteId(noteId, function (err, id) {
if (err || !id) return callback(err, id)
return callback(null, id)
})
}
function buildUserOutData (user): UserSession {
return {
id: user.id,
login: user.login,
userid: user.userid,
photo: user.photo,
color: user.color,
cursor: user.cursor,
name: user.name,
idle: user.idle,
type: user.type
}
}
function emitOnlineUsers (socket: SocketWithNoteId): void {
const noteId = socket.noteId
if (!noteId) return
const note = notes.get(noteId)
if (!note) return
const users: UserSession[] = []
for (const user of note.users.values()) {
if (user) {
users.push(buildUserOutData(user))
}
}
const out = {
users: users
}
realtime.io.to(noteId).emit('online users', out)
}
function emitUserStatus (socket: SocketWithNoteId): void {
const noteId = socket.noteId
if (!noteId) return
const note = notes.get(noteId)
if (!note) return
const user = users.get(socket.id)
if (!user) return
const out = buildUserOutData(user)
socket.broadcast.to(noteId).emit('user status', out)
}
function emitRefresh (socket: SocketWithNoteId): void {
const noteId = socket.noteId
if (!noteId) return
const note = notes.get(noteId)
if (!note) return
const out = {
title: note.title,
docmaxlength: config.documentMaxLength,
owner: note.ownerId,
ownerprofile: note.ownerprofile,
lastchangeuser: note.lastchangeuser,
lastchangeuserprofile: note.lastchangeuserprofile,
authors: mapToObject(note.authors),
authorship: note.authorship,
permission: note.permission,
createtime: note.createtime,
updatetime: note.updatetime
}
socket.emit('refresh', out)
}
function isDuplicatedInSocketQueue (queue: Socket[], socket: Socket): boolean {
for (const sock of queue) {
if (sock && sock.id === socket.id) {
return true
}
}
return false
}
function clearSocketQueue (queue: Socket[], socket: Socket): void {
for (let i = 0; i < queue.length; i++) {
if (!queue[i] || queue[i].id === socket.id) {
queue.splice(i, 1)
i--
}
}
}
function connectNextSocket (): void {
setTimeout(function () {
isConnectionBusy = false
if (connectionSocketQueue.length > 0) {
// Otherwise we get a loop startConnection - failConnection - connectNextSocket
// eslint-disable-next-line @typescript-eslint/no-use-before-define
startConnection(connectionSocketQueue[0])
}
}, 1)
}
function failConnection (errorCode: number, errorMessage: string, socket: Socket): void {
logger.error(errorMessage)
// clear error socket in queue
clearSocketQueue(connectionSocketQueue, socket)
connectNextSocket()
// emit error info
socket.emit('info', {
code: errorCode
})
socket.disconnect(true)
}
function interruptConnection (socket: Socket, noteId: string, socketId): void {
notes.delete(noteId)
users.delete(socketId)
if (socket) {
clearSocketQueue(connectionSocketQueue, socket)
} else {
connectionSocketQueue.shift()
}
connectNextSocket()
}
function finishConnection (socket: SocketWithNoteId, noteId: string, socketId: string): void {
// if no valid info provided will drop the client
if (!socket || !notes.get(noteId) || !users.get(socketId)) {
return interruptConnection(socket, noteId, socketId)
}
// check view permission
const note = notes.get(noteId)
if (!note) return
if (getPermission(socket.request.user, note) === Permission.None) {
interruptConnection(socket, noteId, socketId)
return failConnection(403, 'connection forbidden', socket)
}
const user = users.get(socketId)
if (!user) {
logger.warn('Could not find user for socketId ' + socketId)
return
}
if (user.userid) {
// update user color to author color
const author = note.authors.get(user.userid)
if (author) {
const socketIdUser = users.get(socket.id)
if (!socketIdUser) return
user.color = author.color
socketIdUser.color = author.color
users.set(socket.id, user)
}
}
note.users.set(socket.id, user)
note.socks.push(socket)
note.server.addClient(socket)
note.server.setName(socket, user.name)
note.server.setColor(socket, user.color)
// update user note history
updateHistory(user.userid, note)
emitOnlineUsers(socket)
emitRefresh(socket)
// clear finished socket in queue
clearSocketQueue(connectionSocketQueue, socket)
// seek for next socket
connectNextSocket()
if (config.debug) {
const noteId = socket.noteId
logger.debug(`SERVER connected a client to [${noteId}]:`)
logger.debug(JSON.stringify(user))
logger.debug(notes)
getStatus(function (data) {
logger.debug(JSON.stringify(data))
})
}
}
function ifMayEdit (socket: SocketWithNoteId, originIsOperation: boolean, callback: (mayEdit: boolean) => void): void {
const noteId = socket.noteId
if (!noteId) return
const note = notes.get(noteId)
if (!note) return
const mayEdit = (getPermission(socket.request.user, note) >= Permission.Write)
// if user may edit and this is a text operation
if (originIsOperation && mayEdit) {
// save for the last change user id
if (socket.request.user && socket.request.user.logged_in) {
note.lastchangeuser = socket.request.user.id
} else {
note.lastchangeuser = null
}
}
return callback(mayEdit)
}
function operationCallback (socket: SocketWithNoteId, operation): void {
const noteId = socket.noteId
if (!noteId) return
const note = notes.get(noteId)
if (!note) return
let userId: string | null = null
// save authors
if (socket.request.user && socket.request.user.logged_in) {
const user = users.get(socket.id)
if (!user) return
userId = socket.request.user.id
if (!userId) return
const author = note.authors.get(userId)
if (!author) {
Author.findOrCreate({
where: {
noteId: noteId,
userId: userId
},
defaults: {
noteId: noteId,
userId: userId,
color: user.color
}
}).then(function ([author, _]) {
if (author) {
note.authors.set(author.userId, {
userid: author.userId,
color: author.color,
photo: user.photo,
name: user.name
})
}
}).catch(function (err) {
logger.error('operation callback failed: ' + err)
})
}
if (userId) note.tempUsers.set(userId, Date.now())
}
// save authorship - use timer here because it's an O(n) complexity algorithm
setImmediate(function () {
note.authorship = Note.updateAuthorshipByOperation(operation, userId, note.authorship)
})
}
function startConnection (socket: SocketWithNoteId): void {
if (isConnectionBusy) return
isConnectionBusy = true
const noteId: string = socket.noteId
if (!noteId) {
return failConnection(404, 'note id not found', socket)
}
if (!notes.get(noteId)) {
const include = [{
model: User,
as: 'owner'
}, {
model: User,
as: 'lastchangeuser'
}, {
model: Author,
as: 'authors',
include: [{
model: User,
as: 'user'
}]
}]
Note.findOne({
where: {
id: noteId
},
include: include
}).then(function (note) {
if (!note) {
return failConnection(404, 'note not found', socket)
}
const ownerId = note.ownerId
const ownerprofile = note.owner ? PhotoProfile.fromUser(note.owner) : null
const lastchangeuser = note.lastchangeuserId
const lastchangeuserprofile = note.lastchangeuser ? PhotoProfile.fromUser(note.lastchangeuser) : null
const body = note.content
const createtime = note.createdAt
const updatetime = note.lastchangeAt
const server = new EditorSocketIOServer(body, [], noteId, ifMayEdit, operationCallback)
const authors = new Map<string, UserSession>()
for (const author of note.authors) {
const profile = PhotoProfile.fromUser(author.user)
if (profile) {
authors.set(author.userId, {
userid: author.userId,
color: author.color,
photo: profile.photo,
name: profile.name
})
}
}
notes.set(noteId, {
id: noteId,
alias: note.alias,
title: note.title,
ownerId: ownerId,
ownerprofile: ownerprofile,
permission: note.permission,
lastchangeuser: lastchangeuser,
lastchangeuserprofile: lastchangeuserprofile,
socks: [],
users: new Map<string, UserSession>(),
tempUsers: new Map<string, number>(),
createtime: moment(createtime).valueOf(),
updatetime: moment(updatetime).valueOf(),
server: server,
authors: authors,
authorship: note.authorship
})
return finishConnection(socket, noteId, socket.id)
}).catch(function (err) {
return failConnection(500, err, socket)
})
} else {
return finishConnection(socket, noteId, socket.id)
}
}
isConnectionBusy = false
isDisconnectBusy = false
function disconnect (socket: SocketWithNoteId): void {
if (isDisconnectBusy) return
isDisconnectBusy = true
logger.debug('SERVER disconnected a client')
logger.debug(JSON.stringify(users.get(socket.id)))
if (users.get(socket.id)) {
users.delete(socket.id)
}
const noteId = socket.noteId
const note = notes.get(noteId)
if (note) {
// delete user in users
if (note.users.get(socket.id)) {
note.users.delete(socket.id)
}
// remove sockets in the note socks
let index
do {
index = note.socks.indexOf(socket)
if (index !== -1) {
note.socks.splice(index, 1)
}
} while (index !== -1)
// remove note in notes if no user inside
if (note.users.size <= 0) {
if (note.server.isDirty) {
updateNote(note, function (err, _) {
if (err) return logger.error('disconnect note failed: ' + err)
// clear server before delete to avoid memory leaks
note.server.document = ''
note.server.operations = []
delete note.server
notes.delete(noteId)
if (config.debug) {
logger.debug(notes)
getStatus(function (data) {
logger.debug(JSON.stringify(data))
})
}
})
} else {
delete note.server
notes.delete(noteId)
}
}
}
emitOnlineUsers(socket)
// clear finished socket in queue
clearSocketQueue(disconnectSocketQueue, socket)
// seek for next socket
isDisconnectBusy = false
if (disconnectSocketQueue.length > 0) {
disconnect(disconnectSocketQueue[0])
}
if (config.debug) {
logger.debug(notes)
getStatus(function (data) {
logger.debug(JSON.stringify(data))
})
}
}
// clean when user not in any rooms or user not in connected list
setInterval(function () {
for (const [key, user] of users) {
let socket = realtime.io.sockets.connected[key] as SocketWithNoteId
if ((!socket && user) ||
(socket && (!socket.rooms || Object.keys(socket.rooms).length <= 0))) {
logger.debug(`cleaner found redundant user: ${key}`)
if (!socket) {
socket = {
id: key
} as SocketWithNoteId
}
disconnectSocketQueue.push(socket)
disconnect(socket)
}
}
}, 60000)
function updateUserData (socket: Socket, user): void {
// retrieve user data from passport
if (socket.request.user && socket.request.user.logged_in) {
const profile = PhotoProfile.fromUser(socket.request.user)
user.photo = profile?.photo
user.name = profile?.name
user.userid = socket.request.user.id
user.login = true
} else {
user.userid = null
user.name = 'Guest ' + chance.last()
user.login = false
}
}
function connection (socket: SocketWithNoteId): void {
if (realtime.state !== State.Running) return
parseNoteIdFromSocket(socket, function (err, noteId) {
if (err) {
return failConnection(500, err, socket)
}
if (!noteId) {
return failConnection(404, 'note id not found', socket)
}
if (isDuplicatedInSocketQueue(connectionSocketQueue, socket)) return
// store noteId in this socket session
socket.noteId = noteId
// initialize user data
// random color
let color = randomcolor()
// make sure color not duplicated or reach max random count
const note = notes.get(noteId)
if (note) {
let randomcount = 0
const maxrandomcount = 10
let found = false
do {
for (const user of note.users.values()) {
if (user.color === color) {
found = true
}
}
if (found) {
color = randomcolor()
randomcount++
}
} while (found && randomcount < maxrandomcount)
}
// create user data
users.set(socket.id, {
id: socket.id,
address: socket.handshake.headers['x-forwarded-for'] || socket.handshake.address,
'user-agent': socket.handshake.headers['user-agent'],
color: color,
cursor: undefined,
login: false,
userid: null,
name: null,
idle: false,
type: '',
photo: ''
})
updateUserData(socket, users.get(socket.id))
// start connection
connectionSocketQueue.push(socket)
startConnection(socket)
})
// received client refresh request
socket.on('refresh', function () {
emitRefresh(socket)
})
// received user status
socket.on('user status', function (data) {
const noteId = socket.noteId
const user = users.get(socket.id)
if (!noteId || !notes.get(noteId) || !user) return
logger.debug(`SERVER received [${noteId}] user status from [${socket.id}]: ${JSON.stringify(data)}`)
if (data) {
user.idle = data.idle
user.type = data.type
}
emitUserStatus(socket)
})
// received note permission change request
socket.on('permission', function (permission) {
// need login to do more actions
if (socket.request.user && socket.request.user.logged_in) {
const noteId = socket.noteId
if (!noteId) return
const note = notes.get(noteId)
if (!note) return
// Only owner can change permission
if (getPermission(socket.request.user, note) === Permission.Owner) {
if (permission === 'freely' && !config.allowAnonymous && !config.allowAnonymousEdits) return
note.permission = permission
Note.update({
permission: permission
}, {
where: {
id: noteId
}
}).then(function (count) {
if (!count) {
return
}
const out = {
permission: permission
}
realtime.io.to(note.id).emit('permission', out)
for (let i = 0, l = note.socks.length; i < l; i++) {
const sock = note.socks[i]
if (typeof sock !== 'undefined' && sock) {
// check view permission
if (getPermission(sock.request.user, note) === Permission.None) {
sock.emit('info', {
code: 403
})
setTimeout(function () {
sock.disconnect(true)
}, 0)
}
}
}
}).catch(function (err) {
return logger.error('update note permission failed: ' + err)
})
}
}
})
// delete a note
socket.on('delete', function () {
// need login to do more actions
if (socket.request.user && socket.request.user.logged_in) {
const noteId = socket.noteId
if (!noteId) return
const note = notes.get(noteId)
if (!note) return
// Only owner can delete note
if (getPermission(socket.request.user, note) === Permission.Owner) {
Note.destroy({
where: {
id: noteId
}
}).then(function (count) {
if (!count) return
for (let i = 0, l = note.socks.length; i < l; i++) {
const sock = note.socks[i]
if (typeof sock !== 'undefined' && sock) {
sock.emit('delete')
setTimeout(function () {
sock.disconnect(true)
}, 0)
}
}
}).catch(function (err) {
return logger.error('delete note failed: ' + err)
})
}
}
})
// reveiced when user logout or changed
socket.on('user changed', function () {
logger.info('user changed')
const noteId = socket.noteId
if (!noteId) return
const note = notes.get(noteId)
if (!note) return
const user = note.users.get(socket.id)
if (!user) return
updateUserData(socket, user)
emitOnlineUsers(socket)
})
// received sync of online users request
socket.on('online users', function () {
const noteId = socket.noteId
if (!noteId) return
const note = notes.get(noteId)
if (!note) return
const users: UserSession[] = []
for (const user of note.users.values()) {
if (user) {
users.push(buildUserOutData(user))
}
}
const out = {
users: users
}
socket.emit('online users', out)
})
// check version
socket.on('version', function () {
socket.emit('version', {
version: config.fullversion,
minimumCompatibleVersion: config.minimumCompatibleVersion
})
})
// received cursor focus
socket.on('cursor focus', function (data) {
const noteId = socket.noteId
const user = users.get(socket.id)
if (!noteId || !notes.get(noteId) || !user) return
user.cursor = data
const out = buildUserOutData(user)
socket.broadcast.to(noteId).emit('cursor focus', out)
})
// received cursor activity
socket.on('cursor activity', function (data: CodeMirror.Position) {
const noteId = socket.noteId
const user = users.get(socket.id)
if (!noteId || !notes.get(noteId) || !user) return
user.cursor = data
const out = buildUserOutData(user)
socket.broadcast.to(noteId).emit('cursor activity', out)
})
// received cursor blur
socket.on('cursor blur', function () {
const noteId = socket.noteId
const user = users.get(socket.id)
if (!noteId || !notes.get(noteId) || !user) return
user.cursor = undefined
const out = {
id: socket.id
}
socket.broadcast.to(noteId).emit('cursor blur', out)
})
// when a new client disconnect
socket.on('disconnect', function () {
if (isDuplicatedInSocketQueue(disconnectSocketQueue, socket)) return
disconnectSocketQueue.push(socket)
disconnect(socket)
})
}
export { realtime }

View file

@ -1,184 +0,0 @@
'use strict'
import { config } from './config'
import { Note, User } from './models'
import fs from 'fs'
import { logger } from './logger'
import * as NoteUtils from './web/note/util'
import { errors } from './errors'
import path from 'path'
import request from 'request'
function showIndex (req, res, _): void {
const authStatus = req.isAuthenticated()
const deleteToken = ''
const data = {
signin: authStatus,
infoMessage: req.flash('info'),
errorMessage: req.flash('error'),
imprint: fs.existsSync(path.join(config.docsPath, 'imprint.md')),
privacyStatement: fs.existsSync(path.join(config.docsPath, 'privacy.md')),
termsOfUse: fs.existsSync(path.join(config.docsPath, 'terms-of-use.md')),
deleteToken: deleteToken
}
if (authStatus) {
User.findOne({
where: {
id: req.user.id
}
}).then(function (user: User | null) {
if (user) {
data.deleteToken = user.deleteToken
res.render('index.ejs', data)
}
})
} else {
res.render('index.ejs', data)
}
}
function githubActionGist (req, res, note: Note): void {
const code = req.query.code
const state = req.query.state
if (!code || !state) {
return errors.errorForbidden(res)
} else {
// This is the way the github api works, therefore we can't change it to camelcase
const data = {
// eslint-disable-next-line @typescript-eslint/camelcase
client_id: config.github.clientID,
// eslint-disable-next-line @typescript-eslint/camelcase
client_secret: config.github.clientSecret,
code: code,
state: state
}
const authUrl = 'https://github.com/login/oauth/access_token'
request({
url: authUrl,
method: 'POST',
json: data
}, function (error, httpResponse, body) {
if (!error && httpResponse.statusCode === 200) {
const accessToken = body.access_token
if (accessToken) {
const content = note.content
const title = Note.decodeTitle(note.title)
const filename = title.replace('/', ' ') + '.md'
const gist = {
files: {}
}
gist.files[filename] = {
content: content
}
const gistUrl = 'https://api.github.com/gists'
request({
url: gistUrl,
headers: {
'User-Agent': 'CodiMD',
Authorization: 'token ' + accessToken
},
method: 'POST',
json: gist
}, function (error, httpResponse, body) {
if (!error && httpResponse.statusCode === 201) {
res.setHeader('referer', '')
res.redirect(body.html_url)
} else {
errors.errorForbidden(res)
}
})
} else {
errors.errorForbidden(res)
}
} else {
errors.errorForbidden(res)
}
})
}
}
function githubActions (req, res, _): void {
const noteId = req.params.noteId
NoteUtils.findNoteOrCreate(req, res, function (note: Note) {
const action = req.params.action
switch (action) {
case 'gist':
githubActionGist(req, res, note)
break
default:
res.redirect(config.serverURL + '/' + noteId)
break
}
})
}
function gitlabActionProjects (req, res, _): void {
if (req.isAuthenticated()) {
User.findOne({
where: {
id: req.user.id
}
}).then(function (user) {
if (!user) {
errors.errorNotFound(res)
return
}
class GitlabReturn {
baseURL;
version;
accesstoken;
profileid;
projects;
}
const ret: GitlabReturn = new GitlabReturn()
ret.baseURL = config.gitlab.baseURL
ret.version = config.gitlab.version
ret.accesstoken = user.accessToken
ret.profileid = user.profileid
request(
config.gitlab.baseURL + '/api/' + config.gitlab.version + '/projects?membership=yes&per_page=100&access_token=' + user.accessToken,
function (error, httpResponse, body) {
if (!error && httpResponse.statusCode === 200) {
ret.projects = JSON.parse(body)
return res.send(ret)
} else {
return res.send(ret)
}
}
)
}).catch(function (err) {
logger.error('gitlab action projects failed: ' + err)
errors.errorInternalError(res)
})
} else {
errors.errorForbidden(res)
}
}
function gitlabActions (req, res, _): void {
const noteId = req.params.noteId
NoteUtils.findNoteOrCreate(req, res, function (note) {
const action = req.params.action
switch (action) {
case 'projects':
gitlabActionProjects(req, res, note)
break
default:
res.redirect(config.serverURL + '/' + noteId)
break
}
})
}
export const response = {
showIndex: showIndex,
githubActions: githubActions,
gitlabActions: gitlabActions
}

View file

@ -1,102 +0,0 @@
import { generateAvatarURL } from '../letter-avatars'
import { logger } from '../logger'
import { PassportProfile, ProviderEnum } from '../web/auth/utils'
import { User } from '../models'
export class PhotoProfile {
name: string
photo: string
biggerphoto: string
static fromUser (user: User): PhotoProfile | null {
if (!user) return null
if (user.profile) return PhotoProfile.fromJSON(user.profile)
if (user.email) return PhotoProfile.fromEmail(user.email)
return null
}
private static fromJSON (jsonProfile: string): PhotoProfile | null {
try {
const parsedProfile: PassportProfile = JSON.parse(jsonProfile)
return {
name: parsedProfile.displayName || parsedProfile.username,
photo: PhotoProfile.generatePhotoURL(parsedProfile, false),
biggerphoto: PhotoProfile.generatePhotoURL(parsedProfile, true)
}
} catch (err) {
logger.error(err)
return null
}
}
private static fromEmail (email: string): PhotoProfile {
return {
name: email.substring(0, email.lastIndexOf('@')),
photo: generateAvatarURL('', email, false),
biggerphoto: generateAvatarURL('', email, true)
}
}
private static generatePhotoURL (profile: PassportProfile, bigger: boolean): string {
let photo: string
switch (profile.provider) {
case ProviderEnum.facebook:
photo = 'https://graph.facebook.com/' + profile.id + '/picture'
if (bigger) {
photo += '?width=400'
} else {
photo += '?width=96'
}
break
case ProviderEnum.twitter:
photo = 'https://twitter.com/' + profile.username + '/profile_image'
if (bigger) {
photo += '?size=original'
} else {
photo += '?size=bigger'
}
break
case ProviderEnum.github:
photo = 'https://avatars.githubusercontent.com/u/' + profile.id
if (bigger) {
photo += '?s=400'
} else {
photo += '?s=96'
}
break
case ProviderEnum.gitlab:
photo = profile.avatarUrl
if (photo) {
if (bigger) {
photo = photo.replace(/(\?s=)\d*$/i, '$1400')
} else {
photo = photo.replace(/(\?s=)\d*$/i, '$196')
}
} else {
photo = generateAvatarURL(profile.username)
}
break
case ProviderEnum.dropbox:
photo = generateAvatarURL('', profile.emails[0], bigger)
break
case ProviderEnum.google:
photo = profile.photos[0].value
if (bigger) {
photo = photo.replace(/(\?sz=)\d*$/i, '$1400')
} else {
photo = photo.replace(/(\?sz=)\d*$/i, '$196')
}
break
case ProviderEnum.ldap:
photo = generateAvatarURL(profile.username, profile.emails[0], bigger)
break
case ProviderEnum.saml:
photo = generateAvatarURL(profile.username, profile.emails[0], bigger)
break
default:
photo = generateAvatarURL(profile.username)
break
}
return photo
}
}

View file

@ -1,96 +0,0 @@
import fs from 'fs'
import { config } from '../config'
import { logger } from '../logger'
import { Revision } from '../models'
import { realtime, State } from '../realtime'
/*
Converts a map from string to something into a plain JS object for transmitting via a websocket
*/
export function mapToObject<T> (map: Map<string, T>): object {
return Array.from(map).reduce((obj, [key, value]) => {
obj[key] = value
return obj
}, {})
}
export function getImageMimeType (imagePath: string): string | undefined {
const fileExtension = /[^.]+$/.exec(imagePath)
switch (fileExtension?.[0]) {
case 'bmp':
return 'image/bmp'
case 'gif':
return 'image/gif'
case 'jpg':
case 'jpeg':
return 'image/jpeg'
case 'png':
return 'image/png'
case 'tiff':
return 'image/tiff'
case 'svg':
return 'image/svg+xml'
default:
return undefined
}
}
// [Postgres] Handling NULL bytes
// https://github.com/sequelize/sequelize/issues/6485
export function stripNullByte (value: string): string {
value = '' + value
// eslint-disable-next-line no-control-regex
return value ? value.replace(/\u0000/g, '') : value
}
export function processData<T> (data: T, _default: T, process?: (T) => T): T | undefined {
if (data === undefined) return undefined
else if (data === null) return _default
else if (process) return process(data)
else return data
}
export function handleTermSignals (io): void {
if (realtime.state === State.Starting) {
process.exit(0)
}
if (realtime.state === State.Stopping) {
// The function is already running. Do nothing
return
}
logger.info('CodiMD has been killed by signal, try to exit gracefully...')
realtime.state = State.Stopping
// disconnect all socket.io clients
Object.keys(io.sockets.sockets).forEach(function (key) {
const socket = io.sockets.sockets[key]
// notify client server going into maintenance status
socket.emit('maintenance')
setTimeout(function () {
socket.disconnect(true)
}, 0)
})
if (config.path) {
// ToDo: add a proper error handler
// eslint-disable-next-line @typescript-eslint/no-empty-function
fs.unlink(config.path, (_) => {
})
}
const checkCleanTimer = setInterval(function () {
if (realtime.isReady()) {
Revision.checkAllNotesRevision(function (err, notes) {
if (err) {
return logger.error('Error while writing changes to database. We will abort after trying for 30 seconds.\n' + err)
}
if (!notes || notes.length <= 0) {
clearInterval(checkCleanTimer)
return process.exit(0)
}
})
}
}, 500)
setTimeout(function () {
logger.error('Failed to write changes to database. Aborting')
clearInterval(checkCleanTimer)
process.exit(1)
}, 30000)
}

View file

@ -1,41 +0,0 @@
import { NextFunction, Request, Response, Router } from 'express'
import passport from 'passport'
import { Strategy as DropboxStrategy } from 'passport-dropbox-oauth2'
import { config } from '../../../config'
import { User } from '../../../models'
import { AuthMiddleware } from '../interface'
import { passportGeneralCallback } from '../utils'
export const dropboxAuth = Router()
export const DropboxMiddleware: AuthMiddleware = {
getMiddleware (): Router {
passport.use(new DropboxStrategy({
apiVersion: '2',
clientID: config.dropbox.clientID,
clientSecret: config.dropbox.clientSecret,
callbackURL: config.serverURL + '/auth/dropbox/callback'
}, (
accessToken: string,
refreshToken: string,
profile,
done: (err?: Error | null, user?: User) => void
): void => {
// the Dropbox plugin wraps the email addresses in an object
// see https://github.com/florianheinemann/passport-dropbox-oauth2/blob/master/lib/passport-dropbox-oauth2/strategy.js#L146
profile.emails = profile.emails.map(element => element.value)
passportGeneralCallback(accessToken, refreshToken, profile, done)
}))
dropboxAuth.get('/auth/dropbox', function (req: Request, res: Response, next: NextFunction) {
passport.authenticate('dropbox-oauth2')(req, res, next)
})
dropboxAuth.get('/auth/dropbox/callback',
passport.authenticate('dropbox-oauth2', {
successReturnToOrRedirect: config.serverURL + '/',
failureRedirect: config.serverURL + '/'
})
)
return dropboxAuth
}
}

View file

@ -1,95 +0,0 @@
import { NextFunction, Request, Response, Router } from 'express'
import passport from 'passport'
import { Strategy as LocalStrategy } from 'passport-local'
import validator from 'validator'
import { config } from '../../../config'
import { errors } from '../../../errors'
import { logger } from '../../../logger'
import { User } from '../../../models'
import { urlencodedParser } from '../../utils'
import { AuthMiddleware } from '../interface'
const emailAuth = Router()
export const EmailMiddleware: AuthMiddleware = {
getMiddleware (): Router {
passport.use(new LocalStrategy({
usernameField: 'email'
}, function (email: string, password: string, done) {
if (!validator.isEmail(email)) return done(null, false)
User.findOne({
where: {
email: email
}
}).then(function (user: User) {
if (!user) return done(null, false)
user.verifyPassword(password).then(verified => {
if (verified) {
return done(null, user)
} else {
logger.warn('invalid password given for %s', user.email)
return done(null, false)
}
})
}).catch(function (err: Error) {
logger.error(err)
return done(err)
})
}))
if (config.allowEmailRegister) {
emailAuth.post('/register', urlencodedParser, function (req: Request, res: Response, _: NextFunction) {
if (!req.body.email || !req.body.password) {
errors.errorBadRequest(res)
return
}
if (!validator.isEmail(req.body.email)) {
errors.errorBadRequest(res)
return
}
User.findOrCreate({
where: {
email: req.body.email
},
defaults: {
password: req.body.password
}
}).then(function ([user, created]: [User, boolean]) {
if (user) {
if (created) {
logger.debug('user registered: ' + user.id)
req.flash('info', "You've successfully registered, please signin.")
return res.redirect(config.serverURL + '/')
} else {
logger.debug('user found: ' + user.id)
req.flash('error', 'This email has been used, please try another one.')
return res.redirect(config.serverURL + '/')
}
}
req.flash('error', 'Failed to register your account, please try again.')
return res.redirect(config.serverURL + '/')
}).catch(function (err) {
logger.error('auth callback failed: ' + err)
errors.errorInternalError(res)
})
})
}
emailAuth.post('/login', urlencodedParser, function (req: Request, res: Response, next: NextFunction) {
if (!req.body.email || !req.body.password) {
errors.errorBadRequest(res)
return
}
if (!validator.isEmail(req.body.email)) {
errors.errorBadRequest(res)
return
}
passport.authenticate('local', {
successReturnToOrRedirect: config.serverURL + '/',
failureRedirect: config.serverURL + '/',
failureFlash: 'Invalid email or password.'
})(req, res, next)
})
return emailAuth
}
}

View file

@ -1,31 +0,0 @@
import passport from 'passport'
import { config } from '../../../config'
import { AuthMiddleware } from '../interface'
import { Router } from 'express'
import { passportGeneralCallback } from '../utils'
import { Strategy as FacebookStrategy } from 'passport-facebook'
export const FacebookMiddleware: AuthMiddleware = {
getMiddleware (): Router {
const facebookAuth = Router()
passport.use(new FacebookStrategy({
clientID: config.facebook.clientID,
clientSecret: config.facebook.clientSecret,
callbackURL: config.serverURL + '/auth/facebook/callback'
}, passportGeneralCallback
))
facebookAuth.get('/auth/facebook', function (req, res, next) {
passport.authenticate('facebook')(req, res, next)
})
// facebook auth callback
facebookAuth.get('/auth/facebook/callback',
passport.authenticate('facebook', {
successReturnToOrRedirect: config.serverURL + '/',
failureRedirect: config.serverURL + '/'
})
)
return facebookAuth
}
}

View file

@ -1,36 +0,0 @@
import { Router } from 'express'
import passport from 'passport'
import { Strategy as GithubStrategy } from 'passport-github'
import { config } from '../../../config'
import { response } from '../../../response'
import { AuthMiddleware } from '../interface'
import { passportGeneralCallback } from '../utils'
export const GithubMiddleware: AuthMiddleware = {
getMiddleware (): Router {
const githubAuth = Router()
passport.use(new GithubStrategy({
clientID: config.github.clientID,
clientSecret: config.github.clientSecret,
callbackURL: config.serverURL + '/auth/github/callback'
}, passportGeneralCallback))
githubAuth.get('/auth/github', function (req, res, next) {
passport.authenticate('github')(req, res, next)
})
// github auth callback
githubAuth.get('/auth/github/callback',
passport.authenticate('github', {
successReturnToOrRedirect: config.serverURL + '/',
failureRedirect: config.serverURL + '/'
})
)
// github callback actions
githubAuth.get('/auth/github/callback/:noteId/:action', response.githubActions)
return githubAuth
}
}

View file

@ -1,41 +0,0 @@
import { Router } from 'express'
import passport from 'passport'
import { Strategy as GitlabStrategy } from 'passport-gitlab2'
import { config } from '../../../config'
import { response } from '../../../response'
import { AuthMiddleware } from '../interface'
import { passportGeneralCallback } from '../utils'
export const GitlabMiddleware: AuthMiddleware =
{
getMiddleware (): Router {
const gitlabAuth = module.exports = Router()
passport.use(new GitlabStrategy({
baseURL: config.gitlab.baseURL,
clientID: config.gitlab.clientID,
clientSecret: config.gitlab.clientSecret,
scope: config.gitlab.scope,
callbackURL: config.serverURL + '/auth/gitlab/callback'
}, passportGeneralCallback))
gitlabAuth.get('/auth/gitlab', function (req, res, next) {
passport.authenticate('gitlab')(req, res, next)
})
// gitlab auth callback
gitlabAuth.get('/auth/gitlab/callback',
passport.authenticate('gitlab', {
successReturnToOrRedirect: config.serverURL + '/',
failureRedirect: config.serverURL + '/'
})
)
if (!config.gitlab.scope || config.gitlab.scope === 'api'
) {
// gitlab callback actions
gitlabAuth.get('/auth/gitlab/callback/:noteId/:action', response.gitlabActions)
}
return gitlabAuth
}
}

View file

@ -1,44 +0,0 @@
import { Router } from 'express'
import passport from 'passport'
import * as Google from 'passport-google-oauth20'
import { config } from '../../../config'
import { AuthMiddleware } from '../interface'
import { passportGeneralCallback } from '../utils'
const googleAuth = Router()
export const GoogleMiddleware: AuthMiddleware = {
getMiddleware: function (): Router {
passport.use(new Google.Strategy({
clientID: config.google.clientID,
clientSecret: config.google.clientSecret,
callbackURL: config.serverURL + '/auth/google/callback',
userProfileURL: 'https://www.googleapis.com/oauth2/v3/userinfo'
}, (
accessToken: string,
refreshToken: string,
profile,
done) => {
/*
This ugly hack is neccessary, because the Google Strategy wants a done-callback with an err as Error | null | undefined
but the passportGeneralCallback (and every other PassportStrategy) want a done-callback with err as string | Error | undefined
Note the absence of null. The lambda converts all `null` to `undefined`.
*/
passportGeneralCallback(accessToken, refreshToken, profile, (err?, user?) => {
done(err === null ? undefined : err, user)
})
}))
googleAuth.get('/auth/google', function (req, res, next) {
const authOpts = { scope: ['profile'], hostedDomain: config.google.hostedDomain }
passport.authenticate('google', authOpts)(req, res, next)
})
googleAuth.get('/auth/google/callback',
passport.authenticate('google', {
successReturnToOrRedirect: config.serverURL + '/',
failureRedirect: config.serverURL + '/'
})
)
return googleAuth
}
}

View file

@ -1,69 +0,0 @@
import { Request, Response, Router } from 'express'
import passport from 'passport'
import { config } from '../../config'
import { logger } from '../../logger'
import { User } from '../../models'
import { FacebookMiddleware } from './facebook'
import { TwitterMiddleware } from './twitter'
import { GithubMiddleware } from './github'
import { GitlabMiddleware } from './gitlab'
import { DropboxMiddleware } from './dropbox'
import { GoogleMiddleware } from './google'
import { LdapMiddleware } from './ldap'
import { SamlMiddleware } from './saml'
import { OAuth2Middleware } from './oauth2'
import { EmailMiddleware } from './email'
import { OPenIDMiddleware } from './openid'
const AuthRouter = Router()
// serialize and deserialize
passport.serializeUser(function (user: User, done) {
logger.info('serializeUser: ' + user.id)
return done(null, user.id)
})
passport.deserializeUser(function (id: string, done) {
User.findOne({
where: {
id: id
}
}).then(function (user) {
// Don't die on non-existent user
if (user == null) {
// The extra object with message doesn't exits in @types/passport
return done(null, false) // , { message: 'Invalid UserID' })
}
logger.info('deserializeUser: ' + user.id)
return done(null, user)
}).catch(function (err) {
logger.error(err)
return done(err, null)
})
})
if (config.isFacebookEnable) AuthRouter.use(FacebookMiddleware.getMiddleware())
if (config.isTwitterEnable) AuthRouter.use(TwitterMiddleware.getMiddleware())
if (config.isGitHubEnable) AuthRouter.use(GithubMiddleware.getMiddleware())
if (config.isGitLabEnable) AuthRouter.use(GitlabMiddleware.getMiddleware())
if (config.isDropboxEnable) AuthRouter.use(DropboxMiddleware.getMiddleware())
if (config.isGoogleEnable) AuthRouter.use(GoogleMiddleware.getMiddleware())
if (config.isLDAPEnable) AuthRouter.use(LdapMiddleware.getMiddleware())
if (config.isSAMLEnable) AuthRouter.use(SamlMiddleware.getMiddleware())
if (config.isOAuth2Enable) AuthRouter.use(OAuth2Middleware.getMiddleware())
if (config.isEmailEnable) AuthRouter.use(EmailMiddleware.getMiddleware())
if (config.isOpenIDEnable) AuthRouter.use(OPenIDMiddleware.getMiddleware())
// logout
AuthRouter.get('/logout', function (req: Request, res: Response) {
if (config.debug && req.isAuthenticated()) {
if (req.user !== undefined) {
logger.debug('user logout: ' + req.user.id)
}
}
req.logout()
res.redirect(config.serverURL + '/')
})
export { AuthRouter }

View file

@ -1,5 +0,0 @@
import { Router } from 'express'
export interface AuthMiddleware {
getMiddleware (): Router;
}

View file

@ -1,96 +0,0 @@
import { Router } from 'express'
import passport from 'passport'
import LDAPStrategy from 'passport-ldapauth'
import { config } from '../../../config'
import { errors } from '../../../errors'
import { logger } from '../../../logger'
import { User } from '../../../models'
import { urlencodedParser } from '../../utils'
import { AuthMiddleware } from '../interface'
export const LdapMiddleware: AuthMiddleware = {
getMiddleware (): Router {
const LdapAuth = Router()
passport.use(new LDAPStrategy({
server: {
url: config.ldap.url || null,
bindDN: config.ldap.bindDn || null,
bindCredentials: config.ldap.bindCredentials || null,
searchBase: config.ldap.searchBase || null,
searchFilter: config.ldap.searchFilter || null,
searchAttributes: config.ldap.searchAttributes || null,
tlsOptions: config.ldap.tlsOptions || null,
starttls: config.ldap.starttls || null
}
}, function (user, done) {
let uuid = user.uidNumber || user.uid || user.sAMAccountName || undefined
if (config.ldap.useridField && user[config.ldap.useridField]) {
uuid = user[config.ldap.useridField]
}
if (typeof uuid === 'undefined') {
throw new Error('Could not determine UUID for LDAP user. Check that ' +
'either uidNumber, uid or sAMAccountName is set in your LDAP directory ' +
'or use another unique attribute and configure it using the ' +
'"useridField" option in ldap settings.')
}
let username = uuid
if (config.ldap.usernameField && user[config.ldap.usernameField]) {
username = user[config.ldap.usernameField]
}
const profile = {
id: 'LDAP-' + uuid,
username: username,
displayName: user.displayName,
emails: user.mail ? Array.isArray(user.mail) ? user.mail : [user.mail] : [],
avatarUrl: null,
profileUrl: null,
provider: 'ldap'
}
const stringifiedProfile = JSON.stringify(profile)
User.findOrCreate({
where: {
profileid: profile.id.toString()
},
defaults: {
profile: stringifiedProfile
}
}).then(function ([user, _]) {
if (user) {
let needSave = false
if (user.profile !== stringifiedProfile) {
user.profile = stringifiedProfile
needSave = true
}
if (needSave) {
user.save().then(function () {
logger.debug(`user login: ${user.id}`)
return done(null, user)
})
} else {
logger.debug(`user login: ${user.id}`)
return done(null, user)
}
}
}).catch(function (err) {
logger.error('ldap auth failed: ' + err)
return done(err, null)
})
}))
LdapAuth.post('/auth/ldap', urlencodedParser, function (req, res, next) {
if (!req.body.username || !req.body.password) return errors.errorBadRequest(res)
passport.authenticate('ldapauth', {
successReturnToOrRedirect: config.serverURL + '/',
failureRedirect: config.serverURL + '/',
failureFlash: true
})(req, res, next)
})
return LdapAuth
}
}

View file

@ -1,36 +0,0 @@
import { Router } from 'express'
import passport from 'passport'
import { OAuth2CustomStrategy } from './oauth2-custom-strategy'
import { config } from '../../../config'
import { passportGeneralCallback } from '../utils'
import { AuthMiddleware } from '../interface'
export const OAuth2Middleware: AuthMiddleware = {
getMiddleware (): Router {
const OAuth2Auth = Router()
passport.use(new OAuth2CustomStrategy({
authorizationURL: config.oauth2.authorizationURL,
tokenURL: config.oauth2.tokenURL,
clientID: config.oauth2.clientID,
clientSecret: config.oauth2.clientSecret,
callbackURL: config.serverURL + '/auth/oauth2/callback',
userProfileURL: config.oauth2.userProfileURL,
scope: config.oauth2.scope,
state: true
}, passportGeneralCallback))
OAuth2Auth.get('/auth/oauth2', passport.authenticate('oauth2'))
// github auth callback
OAuth2Auth.get('/auth/oauth2/callback',
passport.authenticate('oauth2', {
successReturnToOrRedirect: config.serverURL + '/',
failureRedirect: config.serverURL + '/'
})
)
return OAuth2Auth
}
}

View file

@ -1,84 +0,0 @@
import { InternalOAuthError, Strategy as OAuth2Strategy } from 'passport-oauth2'
import { config } from '../../../config'
import { PassportProfile, ProviderEnum } from '../utils'
import { logger } from '../../../logger'
function extractProfileAttribute (data, path: string): string {
// can handle stuff like `attrs[0].name`
const pathArray = path.split('.')
for (const segment of pathArray) {
const regex = /([\d\w]+)\[(.*)\]/
const m = regex.exec(segment)
data = m ? data[m[1]][m[2]] : data[segment]
}
return data
}
function parseProfile (data): Partial<PassportProfile> {
const username = extractProfileAttribute(data, config.oauth2.userProfileUsernameAttr)
let displayName: string | undefined
try {
// This may fail if the config.oauth2.userProfileDisplayNameAttr is undefined,
// or it is foo.bar and data["foo"] is undefined.
displayName = extractProfileAttribute(data, config.oauth2.userProfileDisplayNameAttr)
} catch (e) {
displayName = undefined
logger.debug('\'id_token[%s]\' is undefined. Setting \'displayName\' to \'undefined\'.\n%s', config.oauth2.userProfileDisplayNameAttr, e.message)
}
const emails: string[] = []
try {
const email = extractProfileAttribute(data, config.oauth2.userProfileEmailAttr)
if (email !== undefined) {
emails.push(email)
} else {
logger.debug('\'id_token[%s]\' is undefined. Setting \'emails\' to [].', config.oauth2.userProfileEmailAttr)
}
} catch (e) {
logger.debug('\'id_token[%s]\' is undefined. Setting \'emails\' to [].\n%s', config.oauth2.userProfileEmailAttr, e.message)
}
return {
id: username,
username: username,
displayName: displayName,
emails: emails
}
}
class OAuth2CustomStrategy extends OAuth2Strategy {
private readonly _userProfileURL: string;
constructor (options, verify) {
options.customHeaders = options.customHeaders || {}
super(options, verify)
this.name = 'oauth2'
this._userProfileURL = options.userProfileURL
this._oauth2.useAuthorizationHeaderforGET(true)
}
userProfile (accessToken, done): void {
this._oauth2.get(this._userProfileURL, accessToken, function (err, body, _) {
let json
if (err) {
return done(new InternalOAuthError('Failed to fetch user profile', err))
}
try {
if (body !== undefined) {
json = JSON.parse(body.toString())
}
} catch (ex) {
return done(new Error('Failed to parse user profile'))
}
const profile = parseProfile(json)
profile.provider = ProviderEnum.oauth2
done(null, profile)
})
}
}
export { OAuth2CustomStrategy }

View file

@ -1,59 +0,0 @@
import { Router } from 'express'
import passport from 'passport'
import * as OpenID from '@passport-next/passport-openid'
import { config } from '../../../config'
import { User } from '../../../models'
import { logger } from '../../../logger'
import { urlencodedParser } from '../../utils'
import { AuthMiddleware } from '../interface'
const openIDAuth = Router()
export const OPenIDMiddleware: AuthMiddleware = {
getMiddleware (): Router {
passport.use(new OpenID.Strategy({
returnURL: config.serverURL + '/auth/openid/callback',
realm: config.serverURL,
profile: true
}, function (openid, profile, done) {
const stringifiedProfile = JSON.stringify(profile)
User.findOrCreate({
where: {
profileid: openid
},
defaults: {
profile: stringifiedProfile
}
}).then(function ([user, _]) {
if (user) {
let needSave = false
if (user.profile !== stringifiedProfile) {
user.profile = stringifiedProfile
needSave = true
}
if (needSave) {
user.save().then(function () {
logger.debug(`user login: ${user.id}`)
return done(null, user)
})
} else {
logger.debug(`user login: ${user.id}`)
return done(null, user)
}
}
}).catch(function (err) {
logger.error('auth callback failed: ' + err)
return done(err, null)
})
}))
openIDAuth.post('/auth/openid', urlencodedParser, function (req, res, next) {
passport.authenticate('openid')(req, res, next)
})
openIDAuth.get('/auth/openid/callback',
passport.authenticate('openid', {
successReturnToOrRedirect: config.serverURL + '/',
failureRedirect: config.serverURL + '/'
})
)
return openIDAuth
}
}

View file

@ -1,107 +0,0 @@
import { Router } from 'express'
import passport from 'passport'
import { Strategy as SamlStrategy } from 'passport-saml'
import fs from 'fs'
import { config } from '../../../config'
import { User } from '../../../models'
import { logger } from '../../../logger'
import { urlencodedParser } from '../../utils'
import { AuthMiddleware } from '../interface'
function intersection<T> (array1: T[], array2: T[]): T[] {
return array1.filter((n) => array2.includes(n))
}
export const SamlMiddleware: AuthMiddleware = {
getMiddleware (): Router {
const SamlAuth = Router()
const samlStrategy = new SamlStrategy({
callbackUrl: config.serverURL + '/auth/saml/callback',
entryPoint: config.saml.idpSsoUrl,
issuer: config.saml.issuer || config.serverURL,
cert: fs.readFileSync(config.saml.idpCert, 'utf-8'),
identifierFormat: config.saml.identifierFormat,
disableRequestedAuthnContext: config.saml.disableRequestedAuthnContext
}, function (user, done) {
// check authorization if needed
if (config.saml.externalGroups && config.saml.groupAttribute) {
const externalGroups: string[] = intersection(config.saml.externalGroups, user[config.saml.groupAttribute])
if (externalGroups.length > 0) {
logger.error('saml permission denied: ' + externalGroups.join(', '))
return done('Permission denied', null)
}
}
if (config.saml.requiredGroups && config.saml.groupAttribute) {
if (intersection(config.saml.requiredGroups, user[config.saml.groupAttribute]).length === 0) {
logger.error('saml permission denied')
return done('Permission denied', null)
}
}
// user creation
const uuid = user[config.saml.attribute.id] || user.nameID
const profile = {
provider: 'saml',
id: 'SAML-' + uuid,
username: user[config.saml.attribute.username] || user.nameID,
emails: user[config.saml.attribute.email] ? [user[config.saml.attribute.email]] : []
}
if (profile.emails.length === 0 && config.saml.identifierFormat === 'urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress') {
profile.emails.push(user.nameID)
}
const stringifiedProfile = JSON.stringify(profile)
User.findOrCreate({
where: {
profileid: profile.id.toString()
},
defaults: {
profile: stringifiedProfile
}
}).then(function ([user, _]) {
if (user) {
let needSave = false
if (user.profile !== stringifiedProfile) {
user.profile = stringifiedProfile
needSave = true
}
if (needSave) {
user.save().then(function () {
logger.debug(`user login: ${user.id}`)
return done(null, user)
})
} else {
logger.debug(`user login: ${user.id}`)
return done(null, user)
}
}
}).catch(function (err) {
logger.error('saml auth failed: ' + err)
return done(err, null)
})
})
passport.use(samlStrategy)
SamlAuth.get('/auth/saml',
passport.authenticate('saml', {
successReturnToOrRedirect: config.serverURL + '/',
failureRedirect: config.serverURL + '/'
})
)
SamlAuth.post('/auth/saml/callback', urlencodedParser,
passport.authenticate('saml', {
successReturnToOrRedirect: config.serverURL + '/',
failureRedirect: config.serverURL + '/'
})
)
SamlAuth.get('/auth/saml/metadata', function (req, res) {
res.type('application/xml')
res.send(samlStrategy.generateServiceProviderMetadata(null))
})
return SamlAuth
}
}

View file

@ -1,33 +0,0 @@
import { Router } from 'express'
import passport from 'passport'
import { Strategy as TwitterStrategy } from 'passport-twitter'
import { config } from '../../../config'
import { passportGeneralCallback } from '../utils'
import { AuthMiddleware } from '../interface'
export const TwitterMiddleware: AuthMiddleware = {
getMiddleware (): Router {
const TwitterAuth = Router()
passport.use(new TwitterStrategy({
consumerKey: config.twitter.consumerKey,
consumerSecret: config.twitter.consumerSecret,
callbackURL: config.serverURL + '/auth/twitter/callback'
}, passportGeneralCallback))
TwitterAuth.get('/auth/twitter', function (req, res, next) {
passport.authenticate('twitter')(req, res, next)
})
// twitter auth callback
TwitterAuth.get('/auth/twitter/callback',
passport.authenticate('twitter', {
successReturnToOrRedirect: config.serverURL + '/',
failureRedirect: config.serverURL + '/'
})
)
return TwitterAuth
}
}

View file

@ -1,73 +0,0 @@
import { Profile } from 'passport'
import { logger } from '../../logger'
import { User } from '../../models'
export function passportGeneralCallback (
accessToken: string,
refreshToken: string,
profile: Profile,
done: (err?: Error | null, user?: User) => void
): void {
const stringifiedProfile = JSON.stringify(profile)
User.findOrCreate({
where: {
profileid: profile.id.toString()
},
defaults: {
profile: stringifiedProfile,
accessToken: accessToken,
refreshToken: refreshToken
}
}).then(function ([user, _]) {
if (user) {
let needSave = false
if (user.profile !== stringifiedProfile) {
user.profile = stringifiedProfile
needSave = true
}
if (user.accessToken !== accessToken) {
user.accessToken = accessToken
needSave = true
}
if (user.refreshToken !== refreshToken) {
user.refreshToken = refreshToken
needSave = true
}
if (needSave) {
user.save().then(function () {
logger.debug(`user login: ${user.id}`)
return done(null, user)
})
} else {
logger.debug(`user login: ${user.id}`)
return done(null, user)
}
}
}).catch(function (err) {
logger.error('auth callback failed: ' + err)
return done(err, undefined)
})
}
export enum ProviderEnum {
facebook = 'facebook',
twitter = 'twitter',
github = 'github',
gitlab = 'gitlab',
dropbox = 'dropbox',
google = 'google',
ldap = 'ldap',
oauth2 = 'oauth2',
saml = 'saml',
}
export type PassportProfile = {
id: string;
username: string;
displayName: string;
emails: string[];
avatarUrl: string;
profileUrl: string;
provider: ProviderEnum;
photos: { value: string }[];
}

View file

@ -1,22 +0,0 @@
import { response } from '../response'
import { errors } from '../errors'
import { Router } from 'express'
const BaseRouter = Router()
// get index
BaseRouter.get('/', response.showIndex)
// get 403 forbidden
BaseRouter.get('/403', function (req, res) {
errors.errorForbidden(res)
})
// get 404 not found
BaseRouter.get('/404', function (req, res) {
errors.errorNotFound(res)
})
// get 500 internal error
BaseRouter.get('/500', function (req, res) {
errors.errorInternalError(res)
})
export { BaseRouter }

View file

@ -1,18 +0,0 @@
import { urlencodedParser } from './utils'
import { History } from '../history'
import { Router } from 'express'
const HistoryRouter = Router()
// get history
HistoryRouter.get('/history', History.historyGet)
// post history
HistoryRouter.post('/history', urlencodedParser, History.historyPost)
// post history by note id
HistoryRouter.post('/history/:noteId', urlencodedParser, History.historyPost)
// delete history
HistoryRouter.delete('/history', History.historyDelete)
// delete history by note id
HistoryRouter.delete('/history/:noteId', History.historyDelete)
export { HistoryRouter }

View file

@ -1,38 +0,0 @@
import azure from 'azure-storage'
import path from 'path'
import { config } from '../../config'
import { logger } from '../../logger'
import { UploadProvider } from './index'
const AzureUploadProvider: UploadProvider = {
uploadImage: (imagePath, callback) => {
if (!callback || typeof callback !== 'function') {
logger.error('Callback has to be a function')
return
}
if (!imagePath) {
callback(new Error('Image path is missing or wrong'), undefined)
return
}
const azureBlobService = azure.createBlobService(config.azure.connectionString)
azureBlobService.createContainerIfNotExists(config.azure.container, { publicAccessLevel: 'blob' }, function (err, _, __) {
if (err) {
callback(new Error(err.message), undefined)
} else {
azureBlobService.createBlockBlobFromLocalFile(config.azure.container, path.basename(imagePath), imagePath, function (err, result, _) {
if (err) {
callback(new Error(err.message), undefined)
} else {
callback(undefined, azureBlobService.getUrl(config.azure.container, result.name))
}
})
}
})
}
}
export { AzureUploadProvider }

View file

@ -1,24 +0,0 @@
import path from 'path'
import { URL } from 'url'
import { config } from '../../config'
import { logger } from '../../logger'
import { UploadProvider } from './index'
const FilesystemUploadProvider: UploadProvider = {
uploadImage: (imagePath, callback) => {
if (!callback || typeof callback !== 'function') {
logger.error('Callback has to be a function')
return
}
if (!imagePath) {
callback(new Error('Image path is missing or wrong'), undefined)
return
}
callback(undefined, (new URL(path.basename(imagePath), config.serverURL + '/uploads/')).href)
}
}
export { FilesystemUploadProvider }

View file

@ -1,30 +0,0 @@
import imgur from 'old_src/lib/web/imageRouter/imgur'
import { config } from '../../config'
import { logger } from '../../logger'
import { UploadProvider } from './index'
const ImgurUploadProvider: UploadProvider = {
uploadImage: (imagePath, callback) => {
if (!callback || typeof callback !== 'function') {
logger.error('Callback has to be a function')
return
}
if (!imagePath) {
callback(new Error('Image path is missing or wrong'), undefined)
return
}
imgur.setClientId(config.imgur.clientID)
imgur.uploadFile(imagePath)
.then(function (json) {
logger.debug(`SERVER uploadimage success: ${JSON.stringify(json)}`)
callback(undefined, json.data.link.replace(/^http:\/\//i, 'https://'))
}).catch(function (err) {
callback(new Error(err), undefined)
})
}
}
export { ImgurUploadProvider }

View file

@ -1,75 +0,0 @@
import { Router } from 'express'
import formidable from 'formidable'
import { config } from '../../config'
import { logger } from '../../logger'
import { errors } from '../../errors'
import { AzureUploadProvider } from './azure'
import { FilesystemUploadProvider } from './filesystem'
import { ImgurUploadProvider } from './imgur'
import { LutimUploadProvider } from './lutim'
import { MinioUploadProvider } from './minio'
import { S3UploadProvider } from './s3'
interface UploadProvider {
uploadImage: (imagePath: string, callback: (err?: Error, url?: string) => void) => void;
}
const ImageRouter = Router()
// upload image
ImageRouter.post('/uploadimage', function (req, res) {
const form = new formidable.IncomingForm()
form.keepExtensions = true
if (config.imageUploadType === 'filesystem') {
form.uploadDir = config.uploadsPath
}
form.parse(req, function (err, fields, files) {
if (err || !files.image || !files.image.path) {
logger.error(`formidable error: ${err}`)
errors.errorForbidden(res)
} else {
logger.debug(`SERVER received uploadimage: ${JSON.stringify(files.image)}`)
let uploadProvider: UploadProvider
switch (config.imageUploadType) {
case 'azure':
uploadProvider = AzureUploadProvider
break
case 'filesystem':
default:
uploadProvider = FilesystemUploadProvider
break
case 'imgur':
uploadProvider = ImgurUploadProvider
break
case 'lutim':
uploadProvider = LutimUploadProvider
break
case 'minio':
uploadProvider = MinioUploadProvider
break
case 's3':
uploadProvider = S3UploadProvider
break
}
logger.debug(`imageRouter: Uploading ${files.image.path} using ${config.imageUploadType}`)
uploadProvider.uploadImage(files.image.path, function (err, url) {
if (err !== undefined) {
logger.error(err)
return res.status(500).end('upload image error')
}
logger.debug(`SERVER sending ${url} to client`)
res.send({
link: url
})
})
}
})
})
export { ImageRouter, UploadProvider }

View file

@ -1,34 +0,0 @@
import lutim from 'old_src/lib/web/imageRouter/lutim'
import { config } from '../../config'
import { logger } from '../../logger'
import { UploadProvider } from './index'
const LutimUploadProvider: UploadProvider = {
uploadImage: (imagePath, callback) => {
if (!callback || typeof callback !== 'function') {
logger.error('Callback has to be a function')
return
}
if (!imagePath) {
callback(new Error('Image path is missing or wrong'), undefined)
return
}
if (config.lutim && config.lutim.url) {
lutim.setAPIUrl(config.lutim.url)
logger.debug(`Set lutim URL to ${lutim.getAPIUrl()}`)
}
lutim.uploadImage(imagePath)
.then(function (json) {
logger.debug(`SERVER uploadimage success: ${JSON.stringify(json)}`)
callback(undefined, lutim.getAPIUrl() + json.msg.short)
}).catch(function (err) {
callback(new Error(err), undefined)
})
}
}
export { LutimUploadProvider }

View file

@ -1,60 +0,0 @@
import path from 'path'
import fs from 'fs'
import { Client } from 'old_src/lib/web/imageRouter/minio'
import { config } from '../../config'
import { getImageMimeType } from '../../utils/functions'
import { logger } from '../../logger'
import { UploadProvider } from './index'
let MinioUploadProvider: UploadProvider
if (config.minio.endPoint !== undefined) {
const minioClient = new Client({
endPoint: config.minio.endPoint,
port: config.minio.port,
useSSL: config.minio.secure,
accessKey: config.minio.accessKey,
secretKey: config.minio.secretKey
})
MinioUploadProvider = {
uploadImage: (imagePath, callback): void => {
if (!imagePath) {
callback(new Error('Image path is missing or wrong'), undefined)
return
}
if (!callback || typeof callback !== 'function') {
logger.error('Callback has to be a function')
return
}
fs.readFile(imagePath, function (err, buffer) {
if (err) {
callback(new Error(err.message), undefined)
return
}
const key = path.join('uploads', path.basename(imagePath))
const protocol = config.minio.secure ? 'https' : 'http'
const metaData = {
ContentType: getImageMimeType(imagePath)
}
minioClient.putObject(config.s3bucket, key, buffer, buffer.length, metaData, function (err, _) {
if (err) {
callback(new Error(err.message), undefined)
return
}
const hidePort = [80, 443].includes(config.minio.port)
const urlPort = hidePort ? '' : `:${config.minio.port}`
callback(undefined, `${protocol}://${config.minio.endPoint}${urlPort}/${config.s3bucket}/${key}`)
})
})
}
}
}
export { MinioUploadProvider }

View file

@ -1,59 +0,0 @@
import fs from 'fs'
import path from 'path'
import AWS from 'aws-sdk'
import { config } from '../../config'
// import { getImageMimeType } from '../../utils'
import { logger } from '../../logger'
import { UploadProvider } from './index'
const awsConfig = new AWS.Config(config.s3)
const s3 = new AWS.S3(awsConfig)
const S3UploadProvider: UploadProvider = {
uploadImage: (imagePath, callback) => {
if (!imagePath) {
callback(new Error('Image path is missing or wrong'), undefined)
return
}
if (!callback || typeof callback !== 'function') {
logger.error('Callback has to be a function')
return
}
fs.readFile(imagePath, function (err, buffer) {
if (err) {
callback(new Error(err.message), undefined)
return
}
const params = {
Bucket: config.s3bucket,
Key: path.join('uploads', path.basename(imagePath)),
Body: buffer
}
// ToDo: This does not exist (anymore?)
// const mimeType = getImageMimeType(imagePath)
// if (mimeType) { params.ContentType = mimeType }
logger.debug(`S3 object parameters: ${JSON.stringify(params)}`)
s3.putObject(params, function (err, _) {
if (err) {
callback(new Error(err.message), undefined)
return
}
let s3Endpoint = 's3.amazonaws.com'
if (config.s3.endpoint) {
s3Endpoint = config.s3.endpoint
} else if (config.s3.region && config.s3.region !== 'us-east-1') {
s3Endpoint = `s3-${config.s3.region}.amazonaws.com`
}
callback(undefined, `https://${s3Endpoint}/${config.s3bucket}/${params.Key}`)
})
})
}
}
export { S3UploadProvider }

View file

@ -1,9 +0,0 @@
import { AuthRouter } from './auth'
import { BaseRouter } from './baseRouter'
import { HistoryRouter } from './historyRouter'
import { ImageRouter } from './imageRouter'
import { NoteRouter } from './note/router'
import { StatusRouter } from './statusRouter'
import { UserRouter } from './userRouter'
export { AuthRouter, BaseRouter, HistoryRouter, ImageRouter, NoteRouter, StatusRouter, UserRouter }

View file

@ -1,13 +0,0 @@
import { logger } from '../../logger'
import { errors } from '../../errors'
import { NextFunction, Request, Response } from 'express'
export function checkURI (req: Request, res: Response, next: NextFunction): void {
try {
decodeURIComponent(req.path)
next()
} catch (err) {
logger.error(err)
errors.errorBadRequest(res)
}
}

View file

@ -1,9 +0,0 @@
import { config } from '../../config'
import { NextFunction, Request, Response } from 'express'
export function codiMDVersion (req: Request, res: Response, next: NextFunction): void {
res.set({
'CodiMD-Version': config.version
})
return next()
}

View file

@ -1,6 +0,0 @@
import { checkURI } from './checkURIValid'
import { codiMDVersion } from './codiMDVersion'
import { redirectWithoutTrailingSlashes } from './redirectWithoutTrailingSlashes'
import { tooBusy } from './tooBusy'
export { checkURI, codiMDVersion, redirectWithoutTrailingSlashes, tooBusy }

View file

@ -1,16 +0,0 @@
import { NextFunction, Request, Response } from 'express'
import { config } from '../../config'
export function redirectWithoutTrailingSlashes (req: Request, res: Response, next: NextFunction): void {
if (req.method === 'GET' && req.path.substr(-1) === '/' && req.path.length > 1) {
const queryString: string = req.url.slice(req.path.length)
const urlPath: string = req.path.slice(0, -1)
let serverURL: string = config.serverURL
if (config.urlPath) {
serverURL = serverURL.slice(0, -(config.urlPath.length + 1))
}
res.redirect(301, serverURL + urlPath + queryString)
} else {
next()
}
}

View file

@ -1,14 +0,0 @@
import toobusy from 'toobusy-js'
import { errors } from '../../errors'
import { config } from '../../config'
import { NextFunction, Request, Response } from 'express'
toobusy.maxLag(config.tooBusyLag)
export function tooBusy (req: Request, res: Response, next: NextFunction): void {
if (toobusy()) {
errors.errorServiceUnavailable(res)
} else {
next()
}
}

View file

@ -1,95 +0,0 @@
import { Request, Response } from 'express'
import { Note, Revision } from '../../models'
import { logger } from '../../logger'
import { config } from '../../config'
import { errors } from '../../errors'
import shortId from 'shortid'
import moment from 'moment'
import querystring from 'querystring'
export function getInfo (req: Request, res: Response, note: Note): void {
const body = note.content
const extracted = Note.extractMeta(body)
const markdown = extracted.markdown
const meta = Note.parseMeta(extracted.meta)
const title = Note.decodeTitle(note.title)
const data = {
title: meta.title || title,
description: meta.description || (markdown ? Note.generateDescription(markdown) : null),
viewcount: note.viewcount,
createtime: note.createdAt,
updatetime: note.lastchangeAt
}
res.set({
'Access-Control-Allow-Origin': '*', // allow CORS as API
'Access-Control-Allow-Headers': 'Range',
'Access-Control-Expose-Headers': 'Cache-Control, Content-Encoding, Content-Range',
'Cache-Control': 'private', // only cache by client
'X-Robots-Tag': 'noindex, nofollow' // prevent crawling
})
res.send(data)
}
export function createGist (req: Request, res: Response, note: Note): void {
const data = {
// eslint-disable-next-line @typescript-eslint/camelcase
client_id: config.github.clientID,
// eslint-disable-next-line @typescript-eslint/camelcase
redirect_uri: config.serverURL + '/auth/github/callback/' + Note.encodeNoteId(note.id) + '/gist',
scope: 'gist',
state: shortId.generate()
}
const query = querystring.stringify(data)
res.redirect('https://github.com/login/oauth/authorize?' + query)
}
export function getRevision (req: Request, res: Response, note: Note): void {
const actionId = req.params.actionId
if (actionId) {
const time = moment(parseInt(actionId))
if (time.isValid()) {
Revision.getPatchedNoteRevisionByTime(note, time, function (err, content) {
if (err) {
logger.error(err)
errors.errorInternalError(res)
return
}
if (!content) {
errors.errorNotFound(res)
return
}
res.set({
'Access-Control-Allow-Origin': '*', // allow CORS as API
'Access-Control-Allow-Headers': 'Range',
'Access-Control-Expose-Headers': 'Cache-Control, Content-Encoding, Content-Range',
'Cache-Control': 'private', // only cache by client
'X-Robots-Tag': 'noindex, nofollow' // prevent crawling
})
res.send(content)
})
} else {
errors.errorNotFound(res)
}
} else {
Revision.getNoteRevisions(note, function (err, data) {
if (err) {
logger.error(err)
errors.errorInternalError(res)
return
}
const out = {
revision: data
}
res.set({
'Access-Control-Allow-Origin': '*', // allow CORS as API
'Access-Control-Allow-Headers': 'Range',
'Access-Control-Expose-Headers': 'Cache-Control, Content-Encoding, Content-Range',
'Cache-Control': 'private', // only cache by client
'X-Robots-Tag': 'noindex, nofollow' // prevent crawling
})
res.send(out)
})
}
}

View file

@ -1,131 +0,0 @@
import { Request, Response } from 'express'
import { config } from '../../config'
import { errors } from '../../errors'
import { logger } from '../../logger'
import { Note } from '../../models'
import * as ActionController from './actions'
import * as NoteUtils from './util'
export function publishNoteActions (req: Request, res: Response): void {
NoteUtils.findNoteOrCreate(req, res, function (note) {
const action = req.params.action
switch (action) {
case 'download':
exports.downloadMarkdown(req, res, note)
break
case 'edit':
res.redirect(config.serverURL + '/' + (note.alias ? note.alias : Note.encodeNoteId(note.id)) + '?both')
break
default:
res.redirect(config.serverURL + '/s/' + note.shortid)
break
}
})
}
export function showPublishNote (req: Request, res: Response): void {
NoteUtils.findNoteOrCreate(req, res, function (note) {
// force to use short id
const shortid = req.params.shortid
if ((note.alias && shortid !== note.alias) || (!note.alias && shortid !== note.shortid)) {
return res.redirect(config.serverURL + '/s/' + (note.alias || note.shortid))
}
note.increment('viewcount').then(function (note) {
if (!note) {
return errors.errorNotFound(res)
}
NoteUtils.getPublishData(req, res, note, (data) => {
res.set({
'Cache-Control': 'private' // only cache by client
})
return res.render('pretty.ejs', data)
})
}).catch(function (err) {
logger.error(err)
return errors.errorInternalError(res)
})
})
}
export function showNote (req: Request, res: Response): void {
NoteUtils.findNoteOrCreate(req, res, function (note) {
// force to use note id
const noteId = req.params.noteId
const id = Note.encodeNoteId(note.id)
if ((note.alias && noteId !== note.alias) || (!note.alias && noteId !== id)) {
return res.redirect(config.serverURL + '/' + (note.alias || id))
}
const body = note.content
const extracted = Note.extractMeta(body)
const meta = Note.parseMeta(extracted.meta)
let title = Note.decodeTitle(note.title)
title = Note.generateWebTitle(meta.title || title)
const opengraph = Note.parseOpengraph(meta, title)
res.set({
'Cache-Control': 'private', // only cache by client
'X-Robots-Tag': 'noindex, nofollow' // prevent crawling
})
return res.render('codimd.ejs', {
title: title,
opengraph: opengraph
})
})
}
export function createFromPOST (req: Request, res: Response): void {
let body = ''
if (req.body && req.body.length > config.documentMaxLength) {
return errors.errorTooLong(res)
} else if (req.body) {
body = req.body
}
body = body.replace(/[\r]/g, '')
return NoteUtils.newNote(req, res, body)
}
export function doAction (req: Request, res: Response): void {
const noteId = req.params.noteId
NoteUtils.findNoteOrCreate(req, res, (note) => {
const action = req.params.action
// TODO: Don't switch on action, choose action in Router and use separate functions
switch (action) {
case 'publish':
case 'pretty': // pretty deprecated
res.redirect(config.serverURL + '/s/' + (note.alias || note.shortid))
break
case 'slide':
res.redirect(config.serverURL + '/p/' + (note.alias || note.shortid))
break
case 'download':
exports.downloadMarkdown(req, res, note)
break
case 'info':
ActionController.getInfo(req, res, note)
break
case 'gist':
ActionController.createGist(req, res, note)
break
case 'revision':
ActionController.getRevision(req, res, note)
break
default:
return res.redirect(config.serverURL + '/' + noteId)
}
})
}
export function downloadMarkdown (req: Request, res: Response, note): void {
const body = note.content
let filename = Note.decodeTitle(note.title)
filename = encodeURIComponent(filename)
res.set({
'Access-Control-Allow-Origin': '*', // allow CORS as API
'Access-Control-Allow-Headers': 'Range',
'Access-Control-Expose-Headers': 'Cache-Control, Content-Encoding, Content-Range',
'Content-Type': 'text/markdown; charset=UTF-8',
'Cache-Control': 'private',
'Content-disposition': 'attachment; filename=' + filename + '.md',
'X-Robots-Tag': 'noindex, nofollow' // prevent crawling
})
res.send(body)
}

View file

@ -1,29 +0,0 @@
import { Router } from 'express'
import { markdownParser } from '../utils'
import * as NoteController from './controller'
import * as SlideController from './slide'
const NoteRouter = Router()
// get new note
NoteRouter.get('/new', NoteController.createFromPOST)
// post new note with content
NoteRouter.post('/new', markdownParser, NoteController.createFromPOST)
// post new note with content and alias
NoteRouter.post('/new/:noteId', markdownParser, NoteController.createFromPOST)
// get publish note
NoteRouter.get('/s/:shortid', NoteController.showPublishNote)
// publish note actions
NoteRouter.get('/s/:shortid/:action', NoteController.publishNoteActions)
// get publish slide
NoteRouter.get('/p/:shortid', SlideController.showPublishSlide)
// publish slide actions
NoteRouter.get('/p/:shortid/:action', SlideController.publishSlideActions)
// get note by id
NoteRouter.get('/:noteId', NoteController.showNote)
// note actions
NoteRouter.get('/:noteId/:action', NoteController.doAction)
// note actions with action id
NoteRouter.get('/:noteId/:action/:actionId', NoteController.doAction)
export { NoteRouter }

View file

@ -1,41 +0,0 @@
import { Request, Response } from 'express'
import { config } from '../../config'
import { errors } from '../../errors'
import { logger } from '../../logger'
import { Note } from '../../models'
import * as NoteUtils from './util'
export function publishSlideActions (req: Request, res: Response): void {
NoteUtils.findNoteOrCreate(req, res, function (note) {
const action = req.params.action
if (action === 'edit') {
res.redirect(config.serverURL + '/' + (note.alias ? note.alias : Note.encodeNoteId(note.id)) + '?both')
} else {
res.redirect(config.serverURL + '/p/' + note.shortid)
}
})
}
export function showPublishSlide (req: Request, res: Response): void {
NoteUtils.findNoteOrCreate(req, res, function (note) {
// force to use short id
const shortid = req.params.shortid
if ((note.alias && shortid !== note.alias) || (!note.alias && shortid !== note.shortid)) {
return res.redirect(config.serverURL + '/p/' + (note.alias || note.shortid))
}
note.increment('viewcount').then(function (note) {
if (!note) {
return errors.errorNotFound(res)
}
NoteUtils.getPublishData(req, res, note, (data) => {
res.set({
'Cache-Control': 'private' // only cache by client
})
return res.render('slide.ejs', data)
})
}).catch(function (err) {
logger.error(err)
return errors.errorInternalError(res)
})
})
}

View file

@ -1,149 +0,0 @@
import { Request, Response } from 'express'
import fs from 'fs'
import path from 'path'
import { config } from '../../config'
import { errors } from '../../errors'
import { logger } from '../../logger'
import { Note } from '../../models'
import { PhotoProfile } from '../../utils/PhotoProfile'
export function newNote (req, res: Response, body: string | null): void {
let owner = null
const noteId = req.params.noteId ? req.params.noteId : null
if (req.isAuthenticated()) {
owner = req.user.id
} else if (!config.allowAnonymous) {
return errors.errorForbidden(res)
}
if (config.allowFreeURL && noteId && !config.forbiddenNoteIDs.includes(noteId)) {
req.alias = noteId
} else if (noteId) {
return req.method === 'POST' ? errors.errorForbidden(res) : errors.errorNotFound(res)
}
Note.create({
ownerId: owner,
alias: req.alias ? req.alias : null,
content: body
}).then(function (note) {
return res.redirect(config.serverURL + '/' + (note.alias ? note.alias : Note.encodeNoteId(note.id)))
}).catch(function (err) {
logger.error(err)
return errors.errorInternalError(res)
})
}
export enum Permission {
None,
Read,
Write,
Owner
}
interface NoteObject {
ownerId?: string;
permission: string;
}
export function getPermission (user, note: NoteObject): Permission {
// There are two possible User objects we get passed. One is from socket.io
// and the other is from passport directly. The former sets the logged_in
// parameter to either true or false, whereas for the latter, the logged_in
// parameter is always undefined, and the existence of user itself means the
// user is logged in.
if (!user || user.logged_in === false) {
// Anonymous
switch (note.permission) {
case 'freely':
return Permission.Write
case 'editable':
case 'locked':
return Permission.Read
default:
return Permission.None
}
} else if (note.ownerId === user.id) {
// Owner
return Permission.Owner
} else {
// Registered user
switch (note.permission) {
case 'editable':
case 'limited':
case 'freely':
return Permission.Write
case 'locked':
case 'protected':
return Permission.Read
default:
return Permission.None
}
}
}
export function findNoteOrCreate (req: Request, res: Response, callback: (note: Note) => void): void {
const id = req.params.noteId || req.params.shortid
Note.parseNoteId(id, function (err, _id) {
if (err) {
logger.error(err)
return errors.errorInternalError(res)
}
Note.findOne({
where: {
id: _id
}
}).then(function (note) {
if (!note) {
return newNote(req, res, '')
}
if (getPermission(req.user, note) === Permission.None) {
return errors.errorForbidden(res)
} else {
return callback(note)
}
}).catch(function (err) {
logger.error(err)
return errors.errorInternalError(res)
})
})
}
function isRevealTheme (theme: string): string | undefined {
if (fs.existsSync(path.join(__dirname, '..', '..', '..', '..', 'public', 'build', 'reveal.js', 'css', 'theme', theme + '.css'))) {
return theme
}
return undefined
}
export function getPublishData (req: Request, res: Response, note, callback: (data) => void): void {
const body = note.content
const extracted = Note.extractMeta(body)
const markdown = extracted.markdown
const meta = Note.parseMeta(extracted.meta)
const createtime = note.createdAt
const updatetime = note.lastchangeAt
let title = Note.decodeTitle(note.title)
title = Note.generateWebTitle(meta.title || title)
const ogdata = Note.parseOpengraph(meta, title)
const data = {
title: title,
description: meta.description || (markdown ? Note.generateDescription(markdown) : null),
viewcount: note.viewcount,
createtime: createtime,
updatetime: updatetime,
body: markdown,
theme: meta.slideOptions && isRevealTheme(meta.slideOptions.theme),
meta: JSON.stringify(extracted.meta),
owner: note.owner ? note.owner.id : null,
ownerprofile: note.owner ? PhotoProfile.fromUser(note.owner) : null,
lastchangeuser: note.lastchangeuser ? note.lastchangeuser.id : null,
lastchangeuserprofile: note.lastchangeuser ? PhotoProfile.fromUser(note.lastchangeuser) : null,
robots: meta.robots || false, // default allow robots
GA: meta.GA,
disqus: meta.disqus,
cspNonce: res.locals.nonce,
dnt: req.headers.dnt,
opengraph: ogdata
}
callback(data)
}

View file

@ -1,106 +0,0 @@
import { config } from '../config'
import { Router } from 'express'
import { errors } from '../errors'
import { realtime } from '../realtime'
import { Temp } from '../models'
import { logger } from '../logger'
import { urlencodedParser } from './utils'
const StatusRouter = Router()
// get status
StatusRouter.get('/status', function (req, res, _) {
realtime.getStatus(function (data) {
res.set({
'Cache-Control': 'private', // only cache by client
'X-Robots-Tag': 'noindex, nofollow', // prevent crawling
'Content-Type': 'application/json'
})
res.send(data)
})
})
// get status
StatusRouter.get('/temp', function (req, res) {
const host = req.get('host')
if (config.allowOrigin.indexOf(host) === -1) {
errors.errorForbidden(res)
} else {
const tempid = req.query.tempid
if (!tempid || typeof tempid !== 'string') {
errors.errorForbidden(res)
} else {
Temp.findOne({
where: {
id: tempid
}
}).then(function (temp) {
if (!temp) {
errors.errorNotFound(res)
} else {
res.header('Access-Control-Allow-Origin', '*')
res.send({
temp: temp.data
})
temp.destroy().catch(function (err) {
if (err) {
logger.error('remove temp failed: ' + err)
}
})
}
}).catch(function (err) {
logger.error(err)
return errors.errorInternalError(res)
})
}
}
})
// post status
StatusRouter.post('/temp', urlencodedParser, function (req, res) {
const host = req.get('host')
if (config.allowOrigin.indexOf(host) === -1) {
errors.errorForbidden(res)
} else {
const data = req.body.data
if (!data) {
errors.errorForbidden(res)
} else {
logger.debug(`SERVER received temp from [${host}]: ${req.body.data}`)
Temp.create({
data: data
}).then(function (temp) {
if (temp) {
res.header('Access-Control-Allow-Origin', '*')
res.send({
status: 'ok',
id: temp.id
})
} else {
errors.errorInternalError(res)
}
}).catch(function (err) {
logger.error(err)
return errors.errorInternalError(res)
})
}
}
})
StatusRouter.get('/config', function (req, res) {
const data = {
domain: config.domain,
urlpath: config.urlPath,
debug: config.debug,
version: config.fullversion,
DROPBOX_APP_KEY: config.dropbox.appKey,
allowedUploadMimeTypes: config.allowedUploadMimeTypes,
linkifyHeaderStyle: config.linkifyHeaderStyle
}
res.set({
'Cache-Control': 'private', // only cache by client
'X-Robots-Tag': 'noindex, nofollow', // prevent crawling
'Content-Type': 'application/javascript'
})
res.render('../js/lib/common/constant.ejs', data)
})
export { StatusRouter }

View file

@ -1,144 +0,0 @@
import archiver from 'archiver'
import async from 'async'
import { Request, Response, Router } from 'express'
import { errors } from '../errors'
import { Note, User } from '../models'
import { logger } from '../logger'
import { generateAvatar } from '../letter-avatars'
import { config } from '../config'
import { PhotoProfile } from '../utils/PhotoProfile'
const UserRouter = Router()
// get me info
UserRouter.get('/me', function (req: Request, res: Response) {
if (req.isAuthenticated()) {
if (req.user == null) {
return errors.errorInternalError(res)
}
User.findOne({
where: {
id: req.user.id
}
}).then(function (user) {
if (!user) { return errors.errorNotFound(res) }
const profile = PhotoProfile.fromUser(user)
if (profile == null) {
return errors.errorInternalError(res)
}
res.send({
status: 'ok',
id: user.id,
name: profile.name,
photo: profile.photo
})
}).catch(function (err) {
logger.error('read me failed: ' + err)
return errors.errorInternalError(res)
})
} else {
res.send({
status: 'forbidden'
})
}
})
// delete the currently authenticated user
UserRouter.get('/me/delete/:token?', function (req: Request, res: Response) {
if (req.isAuthenticated()) {
if (req.user == null) {
return errors.errorInternalError(res)
}
User.findOne({
where: {
id: req.user.id
}
}).then(function (user) {
if (!user) {
return errors.errorNotFound(res)
}
if (user.deleteToken === req.params.token) {
user.destroy().then(function () {
res.redirect(config.serverURL + '/')
})
} else {
return errors.errorForbidden(res)
}
}).catch(function (err) {
logger.error('delete user failed: ' + err)
return errors.errorInternalError(res)
})
} else {
return errors.errorForbidden(res)
}
})
// export the data of the authenticated user
UserRouter.get('/me/export', function (req: Request, res: Response) {
if (req.isAuthenticated()) {
if (req.user == null) {
return errors.errorInternalError(res)
}
// let output = fs.createWriteStream(__dirname + '/example.zip');
const archive = archiver('zip', {
zlib: { level: 3 } // Sets the compression level.
})
res.setHeader('Content-Type', 'application/zip')
res.attachment('archive.zip')
archive.pipe(res)
archive.on('error', function (err) {
logger.error('export user data failed: ' + err)
return errors.errorInternalError(res)
})
User.findOne({
where: {
id: req.user.id
}
}).then(function (user) {
if (user == null) {
return errors.errorInternalError(res)
}
Note.findAll({
where: {
ownerId: user.id
}
}).then(function (notes) {
const filenames = {}
async.each(notes, function (note, callback) {
const basename = note.title.replace(/\//g, '-') // Prevent subdirectories
let filename
let numberOfDuplicateFilename = 0
do {
const suffix = numberOfDuplicateFilename !== 0 ? '-' + numberOfDuplicateFilename : ''
filename = basename + suffix + '.md'
numberOfDuplicateFilename++
} while (filenames[filename])
filenames[filename] = true
logger.debug('Write: ' + filename)
archive.append(Buffer.from(note.content), { name: filename, date: note.lastchangeAt })
callback(null, null)
}, function (err) {
if (err) {
return errors.errorInternalError(res)
}
archive.finalize()
})
})
}).catch(function (err) {
logger.error('export user data failed: ' + err)
return errors.errorInternalError(res)
})
} else {
return errors.errorForbidden(res)
}
})
UserRouter.get('/user/:username/avatar.svg', function (req: Request, res: Response, _) {
res.setHeader('Content-Type', 'image/svg+xml')
res.setHeader('Cache-Control', 'public, max-age=86400')
res.send(generateAvatar(req.params.username))
})
export { UserRouter }

View file

@ -1,14 +0,0 @@
import bodyParser from 'body-parser'
// create application/x-www-form-urlencoded parser
export const urlencodedParser = bodyParser.urlencoded({
extended: false,
limit: 1024 * 1024 * 10 // 10 mb
})
// create text/markdown parser
export const markdownParser = bodyParser.text({
inflate: true,
type: ['text/plain', 'text/markdown'],
limit: 1024 * 1024 * 10 // 10 mb
})

View file

@ -1,173 +0,0 @@
// external modules
// eslint-disable-next-line @typescript-eslint/camelcase
import { DIFF_DELETE, DIFF_INSERT, diff_match_patch, patch_obj } from 'diff-match-patch'
import { Revision } from '../models'
// Function for suppressing TS2722
// eslint-disable-next-line @typescript-eslint/unbound-method,@typescript-eslint/no-empty-function
function processSend (options): boolean {
if (process?.send !== undefined) {
return process.send(options)
}
return false
}
// We can't use the logger directly, because we are in a distinct nodejs
// process and the global instance of logger is not the one of the parent. In
// particular, it does not have the log level set correctly.
function log (level: string, msg, ...splat): boolean {
return processSend({
msg: 'log',
level: level,
result: [msg, splat],
cacheKey: 1 // dummy value
})
}
// eslint-disable-next-line @typescript-eslint/camelcase,new-cap
const dmp: diff_match_patch = new diff_match_patch()
// eslint-disable-next-line @typescript-eslint/camelcase
function getRevision (revisions: Revision[], count: number): { content: string; patch: patch_obj[]; authorship: string } {
const msStart = (new Date()).getTime()
let startContent = ''
let lastPatch = ''
// eslint-disable-next-line @typescript-eslint/camelcase
let applyPatches: patch_obj[] = []
let authorship = ''
if (count <= Math.round(revisions.length / 2)) {
// start from top to target
for (let i = 0; i < count; i++) {
const revision = revisions[i]
if (i === 0) {
startContent = revision.content || revision.lastContent
}
if (i !== count - 1) {
// eslint-disable-next-line @typescript-eslint/camelcase
const patch: patch_obj[] = dmp.patch_fromText(revision.patch)
applyPatches = applyPatches.concat(patch)
}
lastPatch = revision.patch
authorship = revision.authorship
}
// swap DIFF_INSERT and DIFF_DELETE to achieve unpatching
for (let i = 0, l = applyPatches.length; i < l; i++) {
for (let j = 0, m = applyPatches[i].diffs.length; j < m; j++) {
const diff = applyPatches[i].diffs[j]
if (diff[0] === DIFF_INSERT) {
diff[0] = DIFF_DELETE
} else if (diff[0] === DIFF_DELETE) {
diff[0] = DIFF_INSERT
}
}
}
} else {
// start from bottom to target
const l = revisions.length - 1
for (let i = l; i >= count - 1; i--) {
const revision = revisions[i]
if (i === l) {
startContent = revision.lastContent
authorship = revision.authorship
}
if (revision.patch) {
// eslint-disable-next-line @typescript-eslint/camelcase
const patch: patch_obj[] = dmp.patch_fromText(revision.patch)
applyPatches = applyPatches.concat(patch)
}
lastPatch = revision.patch
authorship = revision.authorship
}
}
let finalContent = ''
try {
finalContent = dmp.patch_apply(applyPatches, startContent)[0]
} catch (err) {
throw new Error(err)
}
const data = {
content: finalContent,
patch: dmp.patch_fromText(lastPatch),
authorship: authorship
}
const msEnd = (new Date()).getTime()
log('debug', (msEnd - msStart) + 'ms')
return data
}
function createPatch (lastDoc: string, currDoc: string): string {
const msStart = (new Date()).getTime()
const diff = dmp.diff_main(lastDoc, currDoc)
// eslint-disable-next-line @typescript-eslint/camelcase
const patch: patch_obj[] = dmp.patch_make(lastDoc, diff)
const strPatch: string = dmp.patch_toText(patch)
const msEnd = (new Date()).getTime()
log('debug', strPatch)
log('debug', (msEnd - msStart) + 'ms')
return strPatch
}
class Data {
msg: string
cacheKey: string
lastDoc?: string
currDoc?: string
revisions?: Revision[]
count?: number
}
process.on('message', function (data: Data) {
if (!data || !data.msg || !data.cacheKey) {
return log('error', 'dmp worker error: not enough data')
}
switch (data.msg) {
case 'create patch':
if (data.lastDoc === undefined || data.currDoc === undefined) {
return log('error', 'dmp worker error: not enough data on create patch')
}
try {
const patch: string = createPatch(data.lastDoc, data.currDoc)
processSend({
msg: 'check',
result: patch,
cacheKey: data.cacheKey
})
} catch (err) {
log('error', 'create patch: dmp worker error', err)
processSend({
msg: 'error',
error: err,
cacheKey: data.cacheKey
})
}
break
case 'get revision':
if (data.revisions === undefined || data.count === undefined) {
return log('error', 'dmp worker error: not enough data on get revision')
}
try {
// eslint-disable-next-line @typescript-eslint/camelcase
const result: { content: string; patch: patch_obj[]; authorship: string } = getRevision(data.revisions, data.count)
processSend({
msg: 'check',
result: result,
cacheKey: data.cacheKey
})
} catch (err) {
log('error', 'get revision: dmp worker error', err)
processSend({
msg: 'error',
error: err,
cacheKey: data.cacheKey
})
}
break
}
})
// log uncaught exception
process.on('uncaughtException', function (err: Error) {
log('error', 'An uncaught exception has occured.')
log('error', err)
log('error', 'Process will exit now.')
process.exit(1)
})

View file

@ -1,110 +0,0 @@
import assert from 'assert'
import { ImportMock } from 'ts-mock-imports'
import * as configModule from '../lib/config'
import { DropboxMiddleware } from '../lib/web/auth/dropbox'
import { EmailMiddleware } from '../lib/web/auth/email'
import { FacebookMiddleware } from '../lib/web/auth/facebook'
import { GithubMiddleware } from '../lib/web/auth/github'
import { GitlabMiddleware } from '../lib/web/auth/gitlab'
import { GoogleMiddleware } from '../lib/web/auth/google'
import { LdapMiddleware } from '../lib/web/auth/ldap'
import { OAuth2Middleware } from '../lib/web/auth/oauth2'
import { OPenIDMiddleware } from '../lib/web/auth/openid'
import { TwitterMiddleware } from '../lib/web/auth/twitter'
describe('AuthMiddlewares', function () {
// We currently exclude the SAML Auth, because it needs a certificate file
const middlewareList = [{
name: 'Facebook',
middleware: FacebookMiddleware,
config: {
facebook: {
clientID: 'foobar',
clientSecret: 'foobar'
}
}
}, {
name: 'Twitter',
middleware: TwitterMiddleware,
config: {
twitter: {
consumerKey: 'foobar',
consumerSecret: 'foobar'
}
}
}, {
name: 'GitHub',
middleware: GithubMiddleware,
config: {
github: {
clientID: 'foobar',
clientSecret: 'foobar'
}
}
}, {
name: 'Gitlab',
middleware: GitlabMiddleware,
config: {
gitlab: {
clientID: 'foobar',
clientSecret: 'foobar'
}
}
}, {
name: 'Dropbox',
middleware: DropboxMiddleware,
config: {
dropbox: {
clientID: 'foobar',
clientSecret: 'foobar'
}
}
}, {
name: 'Google',
middleware: GoogleMiddleware,
config: {
google: {
clientID: 'foobar',
clientSecret: 'foobar'
}
}
}, {
name: 'LDAP',
middleware: LdapMiddleware,
config: {
ldap: {}
}
}, {
name: 'OAuth2',
middleware: OAuth2Middleware,
config: {
oauth2: {
clientID: 'foobar',
clientSecret: 'foobar',
authorizationURL: 'foobar',
tokenURL: 'foobar',
userProfileURL: 'foobar',
scope: 'foobar'
}
}
}, {
name: 'Email',
middleware: EmailMiddleware,
config: {}
}, {
name: 'OpenID',
middleware: OPenIDMiddleware,
config: {}
}]
middlewareList.forEach((middleware) => {
describe(middleware.name + 'Middleware', () => {
before(() => {
ImportMock.mockOther(configModule, 'config', middleware.config)
})
it('can be instantiated', () => {
assert.ok(middleware.middleware.getMiddleware())
})
})
})
})

View file

@ -1,109 +0,0 @@
/* eslint-env node, mocha */
'use strict'
import assert from 'assert'
import crypto from 'crypto'
import fs from 'fs'
import path from 'path'
import * as configModule from '../lib/config'
import { ImportMock } from 'ts-mock-imports'
describe('Content security policies', function () {
let defaultConfig, csp
before(function () {
csp = require('../lib/csp')
})
beforeEach(function () {
// Reset config to make sure we don't influence other tests
defaultConfig = {
csp: {
enable: true,
directives: {
},
addDefaults: true,
addDisqus: true,
addGoogleAnalytics: true,
upgradeInsecureRequests: 'auto',
reportURI: undefined
},
useCDN: true
}
})
// beginnging Tests
it('Disable CDN', function () {
const testconfig = defaultConfig
testconfig.useCDN = false
ImportMock.mockOther(configModule, 'config', testconfig)
assert(!csp.computeDirectives().scriptSrc.includes('https://cdnjs.cloudflare.com'))
assert(!csp.computeDirectives().scriptSrc.includes('https://cdn.mathjax.org'))
assert(!csp.computeDirectives().styleSrc.includes('https://cdnjs.cloudflare.com'))
assert(!csp.computeDirectives().styleSrc.includes('https://fonts.googleapis.com'))
assert(!csp.computeDirectives().fontSrc.includes('https://cdnjs.cloudflare.com'))
assert(!csp.computeDirectives().fontSrc.includes('https://fonts.gstatic.com'))
})
it('Disable Google Analytics', function () {
const testconfig = defaultConfig
testconfig.csp.addGoogleAnalytics = false
ImportMock.mockOther(configModule, 'config', testconfig)
assert(!csp.computeDirectives().scriptSrc.includes('https://www.google-analytics.com'))
})
it('Disable Disqus', function () {
const testconfig = defaultConfig
testconfig.csp.addDisqus = false
ImportMock.mockOther(configModule, 'config', testconfig)
assert(!csp.computeDirectives().scriptSrc.includes('https://disqus.com'))
assert(!csp.computeDirectives().scriptSrc.includes('https://*.disqus.com'))
assert(!csp.computeDirectives().scriptSrc.includes('https://*.disquscdn.com'))
assert(!csp.computeDirectives().styleSrc.includes('https://*.disquscdn.com'))
assert(!csp.computeDirectives().fontSrc.includes('https://*.disquscdn.com'))
})
it('Set ReportURI', function () {
const testconfig = defaultConfig
testconfig.csp.reportURI = 'https://example.com/reportURI'
ImportMock.mockOther(configModule, 'config', testconfig)
assert.strictEqual(csp.computeDirectives().reportUri, 'https://example.com/reportURI')
})
it('Set own directives', function () {
const testconfig = defaultConfig
ImportMock.mockOther(configModule, 'config', testconfig)
const unextendedCSP = csp.computeDirectives()
testconfig.csp.directives = {
defaultSrc: ['https://default.example.com'],
scriptSrc: ['https://script.example.com'],
imgSrc: ['https://img.example.com'],
styleSrc: ['https://style.example.com'],
fontSrc: ['https://font.example.com'],
objectSrc: ['https://object.example.com'],
mediaSrc: ['https://media.example.com'],
childSrc: ['https://child.example.com'],
connectSrc: ['https://connect.example.com']
}
ImportMock.mockOther(configModule, 'config', testconfig)
const variations = ['default', 'script', 'img', 'style', 'font', 'object', 'media', 'child', 'connect']
for (let i = 0; i < variations.length; i++) {
assert.strictEqual(csp.computeDirectives()[variations[i] + 'Src'].toString(), ['https://' + variations[i] + '.example.com'].concat(unextendedCSP[variations[i] + 'Src']).toString())
}
})
/*
* This test reminds us to update the CSP hash for the speaker notes
*/
it('Unchanged hash for reveal.js speaker notes plugin', function () {
const hash = crypto.createHash('sha1')
hash.update(fs.readFileSync(path.join(process.cwd(), '/node_modules/reveal.js/plugin/notes/notes.html'), 'utf8'), 'utf8')
assert.strictEqual(hash.digest('hex'), 'd5d872ae49b5db27f638b152e6e528837204d380')
})
})

View file

@ -1,51 +0,0 @@
/* eslint-env node, mocha */
'use strict'
import { ImportMock } from 'ts-mock-imports'
import * as configModule from '../lib/config'
import assert from 'assert'
import * as avatars from '../lib/letter-avatars'
describe('generateAvatarURL() gravatar enabled', function () {
beforeEach(function () {
// Reset config to make sure we don't influence other tests
const testconfig = {
allowGravatar: true,
serverURL: 'http://localhost:3000',
port: 3000
}
ImportMock.mockOther(configModule, 'config', testconfig)
})
it('should return correct urls', function () {
assert.strictEqual(avatars.generateAvatarURL('Daan Sprenkels', 'hello@dsprenkels.com', true), 'https://cdn.libravatar.org/avatar/d41b5f3508cc3f31865566a47dd0336b?s=400')
assert.strictEqual(avatars.generateAvatarURL('Daan Sprenkels', 'hello@dsprenkels.com', false), 'https://cdn.libravatar.org/avatar/d41b5f3508cc3f31865566a47dd0336b?s=96')
})
it('should return correct urls for names with spaces', function () {
assert.strictEqual(avatars.generateAvatarURL('Daan Sprenkels'), 'http://localhost:3000/user/Daan%20Sprenkels/avatar.svg')
})
})
describe('generateAvatarURL() gravatar disabled', function () {
beforeEach(function () {
// Reset config to make sure we don't influence other tests
const testconfig = {
allowGravatar: false,
serverURL: 'http://localhost:3000',
port: 3000
}
ImportMock.mockOther(configModule, 'config', testconfig)
})
it('should return correct urls', function () {
assert.strictEqual(avatars.generateAvatarURL('Daan Sprenkels', 'hello@dsprenkels.com', true), 'http://localhost:3000/user/Daan%20Sprenkels/avatar.svg')
assert.strictEqual(avatars.generateAvatarURL('Daan Sprenkels', 'hello@dsprenkels.com', false), 'http://localhost:3000/user/Daan%20Sprenkels/avatar.svg')
})
it('should return correct urls for names with spaces', function () {
assert.strictEqual(avatars.generateAvatarURL('Daan Sprenkels'), 'http://localhost:3000/user/Daan%20Sprenkels/avatar.svg')
})
})

View file

@ -1,60 +0,0 @@
/* eslint-env node, mocha */
import { User, sequelize } from '../lib/models'
import assert = require('assert')
describe('User Sequelize model', function () {
beforeEach(() => {
return sequelize.sync({ force: true })
})
it('stores a password hash on creation and verifies that password', function () {
const userData = {
password: 'test123'
}
const intentionallyInvalidPassword = 'stuff'
return User.create(userData).then(u => {
return Promise.all([
u.verifyPassword(userData.password).then(result => assert.strictEqual(result, true)),
u.verifyPassword(intentionallyInvalidPassword).then(result => assert.strictEqual(result, false))
]).catch(e => assert.fail(e))
})
})
it('can cope with password stored in standard scrypt header format', function () {
const testKey = '736372797074000e00000008000000018c7b8c1ac273fd339badde759b3efc418bc61b776debd02dfe95989383cf9980ad21d2403dce33f4b551f5e98ce84edb792aee62600b1303ab8d4e6f0a53b0746e73193dbf557b888efc83a2d6a055a9'
const validPassword = 'test'
const intentionallyInvalidPassword = 'stuff'
const u = User.build()
u.setDataValue('password', testKey) // this circumvents the setter - which we don't need in this case!
return Promise.all([
u.verifyPassword(validPassword).then(result => assert.strictEqual(result, true)),
u.verifyPassword(intentionallyInvalidPassword).then(result => assert.strictEqual(result, false))
]).catch(e => assert.fail(e))
})
it('deals with various characters correctly', function () {
const combinations = [
// ['correct password', 'scrypt syle hash']
['test', '736372797074000e00000008000000018c7b8c1ac273fd339badde759b3efc418bc61b776debd02dfe95989383cf9980ad21d2403dce33f4b551f5e98ce84edb792aee62600b1303ab8d4e6f0a53b0746e73193dbf557b888efc83a2d6a055a9'],
['ohai', '736372797074000e00000008000000010efec4e5ce6a5294491f1b1cccc38d3562f84844b9271aef635f8bc338cf4e0e0bac62ebb11379e85894c1f694e038fc39b087b4fdacd1280b50a7382d7ffbfc82f2190bef70d47708d2a94b75126294'],
['my secret pw', '736372797074000f0000000800000001ffb4cd10a1dfe9e64c1e5416fd6d55b390b6822e78b46fd1f963fe9f317a1e05f9c5fee15e1f618286f4e38b55364ae1e7dc295c9dc33ee0f5712e86afe37e5784ff9c7cf84cf0e631dd11f84f3621e7'],
['my secret pw', /* different hash! */ '736372797074000f0000000800000001f6083e9593365acd07550f7c72f19973fb7d52c3ef0a78026ff66c48ab14493843c642167b5e6b7f31927e8eeb912bc2639e41955fae15da5099998948cfeacd022f705624931c3b30104e6bb296b805'],
['i am so extremely long, it\'s not even funny. Wait, you\'re still reading?', '736372797074000f00000008000000012d205f7bb529bb3a8b8bb25f5ab46197c7e9baf1aad64cf5e7b2584c84748cacf5e60631d58d21cb51fa34ea93b517e2fe2eb722931db5a70ff5a1330d821288ee7380c4136369f064b71b191a785a5b']
]
const intentionallyInvalidPassword = 'stuff'
return Promise.all(combinations.map((combination, index) => {
const u = User.build()
u.setDataValue('password', combination[1])
return Promise.all([
u.verifyPassword(combination[0])
.then(result => assert.strictEqual(result, true, `password #${index} "${combination[0]}" should have been verified`)),
u.verifyPassword(intentionallyInvalidPassword)
.then(result => assert.strictEqual(result, false, `password #${index} "${combination[0]}" should NOT have been verified`))
])
})).catch(e => assert.fail(e))
})
})