Merge branch 'master' into DepauMD

This commit is contained in:
Davide Depau 2019-08-04 10:49:43 +02:00
commit 7b31ddecc3
Signed by: depau
GPG Key ID: C7D999B6A55EFE86
34 changed files with 3702 additions and 1764 deletions

View File

@ -23,7 +23,7 @@ We use the Developer Certificate of Origin (DCO) as a additional safeguard
for the CodiMD project. This is a well established and widely used for the CodiMD project. This is a well established and widely used
mechanism to assure contributors have confirmed their right to license mechanism to assure contributors have confirmed their right to license
their contribution under the project's license. their contribution under the project's license.
Please read [contribute/developer-certificate-of-origin][dcofile]. Please read [docs/legal/developer-certificate-of-origin.txt][dcofile].
If you can certify it, then just add a line to every git commit message: If you can certify it, then just add a line to every git commit message:
```` ````

View File

@ -39,8 +39,9 @@ all of these:
* [Docker](docs/setup/docker.md) * [Docker](docs/setup/docker.md)
* [Kubernetes](docs/setup/kubernetes.md) * [Kubernetes](docs/setup/kubernetes.md)
* [Cloudron](docs/setup/cloudron.md) * [Cloudron](docs/setup/cloudron.md)
* [LinuxServer.io (multi-arch docker)](docs/setup/docker-linuxserver.md)
* [Heroku](docs/setup/heroku.md) * [Heroku](docs/setup/heroku.md)
* [manual setup](docs/setup/manual-setup.md) * [Manual setup](docs/setup/manual-setup.md)
If you do not wish to run your own setup, you can find a commercial offering at If you do not wish to run your own setup, you can find a commercial offering at
https://hackmd.io. This is not the same codebase as this one, but it is a very https://hackmd.io. This is not the same codebase as this one, but it is a very

View File

@ -0,0 +1,14 @@
LinuxServer.io CodiMD Image
===
[![LinuxServer.io Discord](https://img.shields.io/discord/354974912613449730.svg?logo=discord&label=LSIO%20Discord&style=flat-square)](https://discord.gg/YWrKVTn)[![container version badge](https://images.microbadger.com/badges/version/linuxserver/codimd.svg)](https://microbadger.com/images/linuxserver/codimd "Get your own version badge on microbadger.com")[![container image size badge](https://images.microbadger.com/badges/image/linuxserver/codimd.svg)](https://microbadger.com/images/linuxserver/codimd "Get your own version badge on microbadger.com")![Docker Pulls](https://img.shields.io/docker/pulls/linuxserver/codimd.svg)![Docker Stars](https://img.shields.io/docker/stars/linuxserver/codimd.svg)[![Build Status](https://ci.linuxserver.io/buildStatus/icon?job=Docker-Pipeline-Builders/docker-codimd/master)](https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-codimd/job/master/)[![LinuxServer.io CI summary](https://lsio-ci.ams3.digitaloceanspaces.com/linuxserver/codimd/latest/badge.svg)](https://lsio-ci.ams3.digitaloceanspaces.com/linuxserver/codimd/latest/index.html)
[LinuxServer.io](https://linuxserver.io) have created an Ubuntu-based multi-arch container image for x86-64, arm64 and armhf which supports PDF export from all architectures using [PhantomJS](https://phantomjs.org/).
- It supports all the environment variables detailed in the [configuration documentation](../configuration-env-vars.md) to modify it according to your needs.
- It gets rebuilt on new releases from CodiMD and also weekly if necessary to update any other package changes in the underlying container, making it easy to keep your CodiMD instance up to date.
- It also details how to easily [utilize Docker networking to reverse proxy](https://github.com/linuxserver/docker-codimd/#application-setup) CodiMD using their [LetsEncrypt docker image](https://github.com/linuxserver/docker-letsencrypt)
In order to contribute check the LinuxServer.io [GitHub repository](https://github.com/linuxserver/docker-codimd/) for CodiMD.
And to find all tags and versions of the image, check the [Docker Hub repository](https://hub.docker.com/r/linuxserver/codimd).

View File

@ -1,4 +1,4 @@
CodiMD by docker container CodiMD Docker Image
=== ===
[![Try in PWD](https://cdn.rawgit.com/play-with-docker/stacks/cff22438/assets/images/button.png)](http://play-with-docker.com?stack=https://github.com/codimd/container/raw/master/docker-compose.yml&stack_name=codimd) [![Try in PWD](https://cdn.rawgit.com/play-with-docker/stacks/cff22438/assets/images/button.png)](http://play-with-docker.com?stack=https://github.com/codimd/container/raw/master/docker-compose.yml&stack_name=codimd)

View File

@ -13,6 +13,7 @@ function getSecret (secret) {
if (fs.existsSync(basePath)) { if (fs.existsSync(basePath)) {
module.exports = { module.exports = {
dbURL: getSecret('dbURL'),
sessionsecret: getSecret('sessionsecret'), sessionsecret: getSecret('sessionsecret'),
sslkeypath: getSecret('sslkeypath'), sslkeypath: getSecret('sslkeypath'),
sslcertpath: getSecret('sslcertpath'), sslcertpath: getSecret('sslcertpath'),

View File

@ -4,7 +4,6 @@
var LZString = require('lz-string') var LZString = require('lz-string')
// core // core
var config = require('./config')
var logger = require('./logger') var logger = require('./logger')
var response = require('./response') var response = require('./response')
var models = require('./models') var models = require('./models')
@ -56,9 +55,7 @@ function getHistory (userid, callback) {
} }
history = parseHistoryToObject(history) history = parseHistoryToObject(history)
} }
if (config.debug) { logger.debug(`read history success: ${user.id}`)
logger.info('read history success: ' + user.id)
}
return callback(null, history) return callback(null, history)
}).catch(function (err) { }).catch(function (err) {
logger.error('read history failed: ' + err) logger.error('read history failed: ' + err)
@ -140,7 +137,7 @@ function historyPost (req, res) {
var noteId = req.params.noteId var noteId = req.params.noteId
if (!noteId) { if (!noteId) {
if (typeof req.body['history'] === 'undefined') return response.errorBadRequest(res) if (typeof req.body['history'] === 'undefined') return response.errorBadRequest(res)
if (config.debug) { logger.info('SERVER received history from [' + req.user.id + ']: ' + req.body.history) } logger.debug(`SERVER received history from [${req.user.id}]: ${req.body.history}`)
try { try {
var history = JSON.parse(req.body.history) var history = JSON.parse(req.body.history)
} catch (err) { } catch (err) {

View File

@ -18,25 +18,25 @@ module.exports = function (sequelize, DataTypes) {
unique: true, unique: true,
fields: ['noteId', 'userId'] fields: ['noteId', 'userId']
} }
], ]
classMethods: {
associate: function (models) {
Author.belongsTo(models.Note, {
foreignKey: 'noteId',
as: 'note',
constraints: false,
onDelete: 'CASCADE',
hooks: true
})
Author.belongsTo(models.User, {
foreignKey: 'userId',
as: 'user',
constraints: false,
onDelete: 'CASCADE',
hooks: true
})
}
}
}) })
Author.associate = function (models) {
Author.belongsTo(models.Note, {
foreignKey: 'noteId',
as: 'note',
constraints: false,
onDelete: 'CASCADE',
hooks: true
})
Author.belongsTo(models.User, {
foreignKey: 'userId',
as: 'user',
constraints: false,
onDelete: 'CASCADE',
hooks: true
})
}
return Author return Author
} }

View File

@ -10,7 +10,9 @@ var config = require('../config')
var logger = require('../logger') var logger = require('../logger')
var dbconfig = cloneDeep(config.db) var dbconfig = cloneDeep(config.db)
dbconfig.logging = config.debug ? logger.info : false dbconfig.logging = config.debug ? (data) => {
logger.info(data)
} : false
var sequelize = null var sequelize = null

View File

@ -86,486 +86,492 @@ module.exports = function (sequelize, DataTypes) {
} }
}, { }, {
paranoid: false, paranoid: false,
classMethods: {
associate: function (models) {
Note.belongsTo(models.User, {
foreignKey: 'ownerId',
as: 'owner',
constraints: false,
onDelete: 'CASCADE',
hooks: true
})
Note.belongsTo(models.User, {
foreignKey: 'lastchangeuserId',
as: 'lastchangeuser',
constraints: false
})
Note.hasMany(models.Revision, {
foreignKey: 'noteId',
constraints: false
})
Note.hasMany(models.Author, {
foreignKey: 'noteId',
as: 'authors',
constraints: false
})
},
checkFileExist: function (filePath) {
try {
return fs.statSync(filePath).isFile()
} catch (err) {
return false
}
},
encodeNoteId: function (id) {
// remove dashes in UUID and encode in url-safe base64
let str = id.replace(/-/g, '')
let hexStr = Buffer.from(str, 'hex')
return base64url.encode(hexStr)
},
decodeNoteId: function (encodedId) {
// decode from url-safe base64
let id = base64url.toBuffer(encodedId).toString('hex')
// add dashes between the UUID string parts
let idParts = []
idParts.push(id.substr(0, 8))
idParts.push(id.substr(8, 4))
idParts.push(id.substr(12, 4))
idParts.push(id.substr(16, 4))
idParts.push(id.substr(20, 12))
return idParts.join('-')
},
checkNoteIdValid: function (id) {
var uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i
var result = id.match(uuidRegex)
if (result && result.length === 1) { return true } else { return false }
},
parseNoteId: function (noteId, callback) {
async.series({
parseNoteIdByAlias: function (_callback) {
// try to parse note id by alias (e.g. doc)
Note.findOne({
where: {
alias: noteId
}
}).then(function (note) {
if (note) {
let filePath = path.join(config.docsPath, noteId + '.md')
if (Note.checkFileExist(filePath)) {
// if doc in filesystem have newer modified time than last change time
// then will update the doc in db
var fsModifiedTime = moment(fs.statSync(filePath).mtime)
var dbModifiedTime = moment(note.lastchangeAt || note.createdAt)
var body = fs.readFileSync(filePath, 'utf8')
var contentLength = body.length
var title = Note.parseNoteTitle(body)
if (fsModifiedTime.isAfter(dbModifiedTime) && note.content !== body) {
note.update({
title: title,
content: body,
lastchangeAt: fsModifiedTime
}).then(function (note) {
sequelize.models.Revision.saveNoteRevision(note, function (err, revision) {
if (err) return _callback(err, null)
// update authorship on after making revision of docs
var patch = dmp.patch_fromText(revision.patch)
var operations = Note.transformPatchToOperations(patch, contentLength)
var authorship = note.authorship
for (let i = 0; i < operations.length; i++) {
authorship = Note.updateAuthorshipByOperation(operations[i], null, authorship)
}
note.update({
authorship: authorship
}).then(function (note) {
return callback(null, note.id)
}).catch(function (err) {
return _callback(err, null)
})
})
}).catch(function (err) {
return _callback(err, null)
})
} else {
return callback(null, note.id)
}
} else {
return callback(null, note.id)
}
} else {
var filePath = path.join(config.docsPath, noteId + '.md')
if (Note.checkFileExist(filePath)) {
Note.create({
alias: noteId,
owner: null,
permission: 'locked'
}).then(function (note) {
return callback(null, note.id)
}).catch(function (err) {
return _callback(err, null)
})
} else {
return _callback(null, null)
}
}
}).catch(function (err) {
return _callback(err, null)
})
},
// parse note id by LZString is deprecated, here for compability
parseNoteIdByLZString: function (_callback) {
// Calculate minimal string length for an UUID that is encoded
// base64 encoded and optimize comparsion by using -1
// this should make a lot of LZ-String parsing errors obsolete
// as we can assume that a nodeId that is 48 chars or longer is a
// noteID.
const base64UuidLength = ((4 * 36) / 3) - 1
if (!(noteId.length > base64UuidLength)) {
return _callback(null, null)
}
// try to parse note id by LZString Base64
try {
var id = LZString.decompressFromBase64(noteId)
if (id && Note.checkNoteIdValid(id)) { return callback(null, id) } else { return _callback(null, null) }
} catch (err) {
if (err.message === 'Cannot read property \'charAt\' of undefined') {
logger.warning('Looks like we can not decode "' + noteId + '" with LZString. Can be ignored.')
} else {
logger.error(err)
}
return _callback(null, null)
}
},
parseNoteIdByBase64Url: function (_callback) {
// try to parse note id by base64url
try {
var id = Note.decodeNoteId(noteId)
if (id && Note.checkNoteIdValid(id)) { return callback(null, id) } else { return _callback(null, null) }
} catch (err) {
logger.error(err)
return _callback(null, null)
}
},
parseNoteIdByShortId: function (_callback) {
// try to parse note id by shortId
try {
if (shortId.isValid(noteId)) {
Note.findOne({
where: {
shortid: noteId
}
}).then(function (note) {
if (!note) return _callback(null, null)
return callback(null, note.id)
}).catch(function (err) {
return _callback(err, null)
})
} else {
return _callback(null, null)
}
} catch (err) {
return _callback(err, null)
}
}
}, function (err, result) {
if (err) {
logger.error(err)
return callback(err, null)
}
return callback(null, null)
})
},
parseNoteInfo: function (body) {
var parsed = Note.extractMeta(body)
var $ = cheerio.load(md.render(parsed.markdown))
return {
title: Note.extractNoteTitle(parsed.meta, $),
tags: Note.extractNoteTags(parsed.meta, $)
}
},
parseNoteTitle: function (body) {
var parsed = Note.extractMeta(body)
var $ = cheerio.load(md.render(parsed.markdown))
return Note.extractNoteTitle(parsed.meta, $)
},
extractNoteTitle: function (meta, $) {
var title = ''
if (meta.title && (typeof meta.title === 'string' || typeof meta.title === 'number')) {
title = meta.title
} else {
var h1s = $('h1')
if (h1s.length > 0 && h1s.first().text().split('\n').length === 1) { title = S(h1s.first().text()).stripTags().s }
}
if (!title) title = 'Untitled'
return title
},
generateDescription: function (markdown) {
return markdown.substr(0, 100).replace(/(?:\r\n|\r|\n)/g, ' ')
},
decodeTitle: function (title) {
return title || 'Untitled'
},
generateWebTitle: function (title) {
title = !title || title === 'Untitled' ? 'DepauMD - Collaborative markdown notes' : title + ' - DepauMD'
return title
},
extractNoteTags: function (meta, $) {
var tags = []
var rawtags = []
if (meta.tags && (typeof meta.tags === 'string' || typeof meta.tags === 'number')) {
var metaTags = ('' + meta.tags).split(',')
for (let i = 0; i < metaTags.length; i++) {
var text = metaTags[i].trim()
if (text) rawtags.push(text)
}
} else {
var h6s = $('h6')
h6s.each(function (key, value) {
if (/^tags/gmi.test($(value).text())) {
var codes = $(value).find('code')
for (let i = 0; i < codes.length; i++) {
var text = S($(codes[i]).text().trim()).stripTags().s
if (text) rawtags.push(text)
}
}
})
}
for (let i = 0; i < rawtags.length; i++) {
var found = false
for (let j = 0; j < tags.length; j++) {
if (tags[j] === rawtags[i]) {
found = true
break
}
}
if (!found) { tags.push(rawtags[i]) }
}
return tags
},
extractMeta: function (content) {
var obj = null
try {
obj = metaMarked(content)
if (!obj.markdown) obj.markdown = ''
if (!obj.meta) obj.meta = {}
} catch (err) {
obj = {
markdown: content,
meta: {}
}
}
return obj
},
parseMeta: function (meta) {
var _meta = {}
if (meta) {
if (meta.title && (typeof meta.title === 'string' || typeof meta.title === 'number')) { _meta.title = meta.title }
if (meta.description && (typeof meta.description === 'string' || typeof meta.description === 'number')) { _meta.description = meta.description }
if (meta.robots && (typeof meta.robots === 'string' || typeof meta.robots === 'number')) { _meta.robots = meta.robots }
if (meta.GA && (typeof meta.GA === 'string' || typeof meta.GA === 'number')) { _meta.GA = meta.GA }
if (meta.disqus && (typeof meta.disqus === 'string' || typeof meta.disqus === 'number')) { _meta.disqus = meta.disqus }
if (meta.slideOptions && (typeof meta.slideOptions === 'object')) { _meta.slideOptions = meta.slideOptions }
}
return _meta
},
updateAuthorshipByOperation: function (operation, userId, authorships) {
var index = 0
var timestamp = Date.now()
for (let i = 0; i < operation.length; i++) {
var op = operation[i]
if (ot.TextOperation.isRetain(op)) {
index += op
} else if (ot.TextOperation.isInsert(op)) {
let opStart = index
let opEnd = index + op.length
var inserted = false
// authorship format: [userId, startPos, endPos, createdAt, updatedAt]
if (authorships.length <= 0) authorships.push([userId, opStart, opEnd, timestamp, timestamp])
else {
for (let j = 0; j < authorships.length; j++) {
let authorship = authorships[j]
if (!inserted) {
let nextAuthorship = authorships[j + 1] || -1
if ((nextAuthorship !== -1 && nextAuthorship[1] >= opEnd) || j >= authorships.length - 1) {
if (authorship[1] < opStart && authorship[2] > opStart) {
// divide
let postLength = authorship[2] - opStart
authorship[2] = opStart
authorship[4] = timestamp
authorships.splice(j + 1, 0, [userId, opStart, opEnd, timestamp, timestamp])
authorships.splice(j + 2, 0, [authorship[0], opEnd, opEnd + postLength, authorship[3], timestamp])
j += 2
inserted = true
} else if (authorship[1] >= opStart) {
authorships.splice(j, 0, [userId, opStart, opEnd, timestamp, timestamp])
j += 1
inserted = true
} else if (authorship[2] <= opStart) {
authorships.splice(j + 1, 0, [userId, opStart, opEnd, timestamp, timestamp])
j += 1
inserted = true
}
}
}
if (authorship[1] >= opStart) {
authorship[1] += op.length
authorship[2] += op.length
}
}
}
index += op.length
} else if (ot.TextOperation.isDelete(op)) {
let opStart = index
let opEnd = index - op
if (operation.length === 1) {
authorships = []
} else if (authorships.length > 0) {
for (let j = 0; j < authorships.length; j++) {
let authorship = authorships[j]
if (authorship[1] >= opStart && authorship[1] <= opEnd && authorship[2] >= opStart && authorship[2] <= opEnd) {
authorships.splice(j, 1)
j -= 1
} else if (authorship[1] < opStart && authorship[1] < opEnd && authorship[2] > opStart && authorship[2] > opEnd) {
authorship[2] += op
authorship[4] = timestamp
} else if (authorship[2] >= opStart && authorship[2] <= opEnd) {
authorship[2] = opStart
authorship[4] = timestamp
} else if (authorship[1] >= opStart && authorship[1] <= opEnd) {
authorship[1] = opEnd
authorship[4] = timestamp
}
if (authorship[1] >= opEnd) {
authorship[1] += op
authorship[2] += op
}
}
}
index += op
}
}
// merge
for (let j = 0; j < authorships.length; j++) {
let authorship = authorships[j]
for (let k = j + 1; k < authorships.length; k++) {
let nextAuthorship = authorships[k]
if (nextAuthorship && authorship[0] === nextAuthorship[0] && authorship[2] === nextAuthorship[1]) {
let minTimestamp = Math.min(authorship[3], nextAuthorship[3])
let maxTimestamp = Math.max(authorship[3], nextAuthorship[3])
authorships.splice(j, 1, [authorship[0], authorship[1], nextAuthorship[2], minTimestamp, maxTimestamp])
authorships.splice(k, 1)
j -= 1
break
}
}
}
// clear
for (let j = 0; j < authorships.length; j++) {
let authorship = authorships[j]
if (!authorship[0]) {
authorships.splice(j, 1)
j -= 1
}
}
return authorships
},
transformPatchToOperations: function (patch, contentLength) {
var operations = []
if (patch.length > 0) {
// calculate original content length
for (let j = patch.length - 1; j >= 0; j--) {
var p = patch[j]
for (let i = 0; i < p.diffs.length; i++) {
var diff = p.diffs[i]
switch (diff[0]) {
case 1: // insert
contentLength -= diff[1].length
break
case -1: // delete
contentLength += diff[1].length
break
}
}
}
// generate operations
var bias = 0
var lengthBias = 0
for (let j = 0; j < patch.length; j++) {
var operation = []
let p = patch[j]
var currIndex = p.start1
var currLength = contentLength - bias
for (let i = 0; i < p.diffs.length; i++) {
let diff = p.diffs[i]
switch (diff[0]) {
case 0: // retain
if (i === 0) {
// first
operation.push(currIndex + diff[1].length)
} else if (i !== p.diffs.length - 1) {
// mid
operation.push(diff[1].length)
} else {
// last
operation.push(currLength + lengthBias - currIndex)
}
currIndex += diff[1].length
break
case 1: // insert
operation.push(diff[1])
lengthBias += diff[1].length
currIndex += diff[1].length
break
case -1: // delete
operation.push(-diff[1].length)
bias += diff[1].length
currIndex += diff[1].length
break
}
}
operations.push(operation)
}
}
return operations
}
},
hooks: { hooks: {
beforeCreate: function (note, options, callback) { beforeCreate: function (note, options) {
// if no content specified then use default note return new Promise(function (resolve, reject) {
if (!note.content) { // if no content specified then use default note
var body = null if (!note.content) {
let filePath = null var body = null
if (!note.alias) { let filePath = null
filePath = config.defaultNotePath if (!note.alias) {
} else { filePath = config.defaultNotePath
filePath = path.join(config.docsPath, note.alias + '.md') } else {
} filePath = path.join(config.docsPath, note.alias + '.md')
if (Note.checkFileExist(filePath)) { }
var fsCreatedTime = moment(fs.statSync(filePath).ctime) if (Note.checkFileExist(filePath)) {
body = fs.readFileSync(filePath, 'utf8') var fsCreatedTime = moment(fs.statSync(filePath).ctime)
note.title = Note.parseNoteTitle(body) body = fs.readFileSync(filePath, 'utf8')
note.content = body note.title = Note.parseNoteTitle(body)
if (filePath !== config.defaultNotePath) { note.content = body
note.createdAt = fsCreatedTime if (filePath !== config.defaultNotePath) {
note.createdAt = fsCreatedTime
}
} }
} }
} // if no permission specified and have owner then give default permission in config, else default permission is freely
// if no permission specified and have owner then give default permission in config, else default permission is freely if (!note.permission) {
if (!note.permission) { if (note.ownerId) {
if (note.ownerId) { note.permission = config.defaultPermission
note.permission = config.defaultPermission } else {
} else { note.permission = 'freely'
note.permission = 'freely' }
} }
} return resolve(note)
return callback(null, note) })
}, },
afterCreate: function (note, options, callback) { afterCreate: function (note, options, callback) {
sequelize.models.Revision.saveNoteRevision(note, function (err, revision) { return new Promise(function (resolve, reject) {
callback(err, note) sequelize.models.Revision.saveNoteRevision(note, function (err, revision) {
if (err) {
return reject(err)
}
return resolve(note)
})
}) })
} }
} }
}) })
Note.associate = function (models) {
Note.belongsTo(models.User, {
foreignKey: 'ownerId',
as: 'owner',
constraints: false,
onDelete: 'CASCADE',
hooks: true
})
Note.belongsTo(models.User, {
foreignKey: 'lastchangeuserId',
as: 'lastchangeuser',
constraints: false
})
Note.hasMany(models.Revision, {
foreignKey: 'noteId',
constraints: false
})
Note.hasMany(models.Author, {
foreignKey: 'noteId',
as: 'authors',
constraints: false
})
}
Note.checkFileExist = function (filePath) {
try {
return fs.statSync(filePath).isFile()
} catch (err) {
return false
}
}
Note.encodeNoteId = function (id) {
// remove dashes in UUID and encode in url-safe base64
let str = id.replace(/-/g, '')
let hexStr = Buffer.from(str, 'hex')
return base64url.encode(hexStr)
}
Note.decodeNoteId = function (encodedId) {
// decode from url-safe base64
let id = base64url.toBuffer(encodedId).toString('hex')
// add dashes between the UUID string parts
let idParts = []
idParts.push(id.substr(0, 8))
idParts.push(id.substr(8, 4))
idParts.push(id.substr(12, 4))
idParts.push(id.substr(16, 4))
idParts.push(id.substr(20, 12))
return idParts.join('-')
}
Note.checkNoteIdValid = function (id) {
var uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i
var result = id.match(uuidRegex)
if (result && result.length === 1) { return true } else { return false }
}
Note.parseNoteId = function (noteId, callback) {
async.series({
parseNoteIdByAlias: function (_callback) {
// try to parse note id by alias (e.g. doc)
Note.findOne({
where: {
alias: noteId
}
}).then(function (note) {
if (note) {
let filePath = path.join(config.docsPath, noteId + '.md')
if (Note.checkFileExist(filePath)) {
// if doc in filesystem have newer modified time than last change time
// then will update the doc in db
var fsModifiedTime = moment(fs.statSync(filePath).mtime)
var dbModifiedTime = moment(note.lastchangeAt || note.createdAt)
var body = fs.readFileSync(filePath, 'utf8')
var contentLength = body.length
var title = Note.parseNoteTitle(body)
if (fsModifiedTime.isAfter(dbModifiedTime) && note.content !== body) {
note.update({
title: title,
content: body,
lastchangeAt: fsModifiedTime
}).then(function (note) {
sequelize.models.Revision.saveNoteRevision(note, function (err, revision) {
if (err) return _callback(err, null)
// update authorship on after making revision of docs
var patch = dmp.patch_fromText(revision.patch)
var operations = Note.transformPatchToOperations(patch, contentLength)
var authorship = note.authorship
for (let i = 0; i < operations.length; i++) {
authorship = Note.updateAuthorshipByOperation(operations[i], null, authorship)
}
note.update({
authorship: authorship
}).then(function (note) {
return callback(null, note.id)
}).catch(function (err) {
return _callback(err, null)
})
})
}).catch(function (err) {
return _callback(err, null)
})
} else {
return callback(null, note.id)
}
} else {
return callback(null, note.id)
}
} else {
var filePath = path.join(config.docsPath, noteId + '.md')
if (Note.checkFileExist(filePath)) {
Note.create({
alias: noteId,
owner: null,
permission: 'locked'
}).then(function (note) {
return callback(null, note.id)
}).catch(function (err) {
return _callback(err, null)
})
} else {
return _callback(null, null)
}
}
}).catch(function (err) {
return _callback(err, null)
})
},
// parse note id by LZString is deprecated, here for compability
parseNoteIdByLZString: function (_callback) {
// Calculate minimal string length for an UUID that is encoded
// base64 encoded and optimize comparsion by using -1
// this should make a lot of LZ-String parsing errors obsolete
// as we can assume that a nodeId that is 48 chars or longer is a
// noteID.
const base64UuidLength = ((4 * 36) / 3) - 1
if (!(noteId.length > base64UuidLength)) {
return _callback(null, null)
}
// try to parse note id by LZString Base64
try {
var id = LZString.decompressFromBase64(noteId)
if (id && Note.checkNoteIdValid(id)) { return callback(null, id) } else { return _callback(null, null) }
} catch (err) {
if (err.message === 'Cannot read property \'charAt\' of undefined') {
logger.warning('Looks like we can not decode "' + noteId + '" with LZString. Can be ignored.')
} else {
logger.error(err)
}
return _callback(null, null)
}
},
parseNoteIdByBase64Url: function (_callback) {
// try to parse note id by base64url
try {
var id = Note.decodeNoteId(noteId)
if (id && Note.checkNoteIdValid(id)) { return callback(null, id) } else { return _callback(null, null) }
} catch (err) {
logger.error(err)
return _callback(null, null)
}
},
parseNoteIdByShortId: function (_callback) {
// try to parse note id by shortId
try {
if (shortId.isValid(noteId)) {
Note.findOne({
where: {
shortid: noteId
}
}).then(function (note) {
if (!note) return _callback(null, null)
return callback(null, note.id)
}).catch(function (err) {
return _callback(err, null)
})
} else {
return _callback(null, null)
}
} catch (err) {
return _callback(err, null)
}
}
}, function (err, result) {
if (err) {
logger.error(err)
return callback(err, null)
}
return callback(null, null)
})
}
Note.parseNoteInfo = function (body) {
var parsed = Note.extractMeta(body)
var $ = cheerio.load(md.render(parsed.markdown))
return {
title: Note.extractNoteTitle(parsed.meta, $),
tags: Note.extractNoteTags(parsed.meta, $)
}
}
Note.parseNoteTitle = function (body) {
var parsed = Note.extractMeta(body)
var $ = cheerio.load(md.render(parsed.markdown))
return Note.extractNoteTitle(parsed.meta, $)
}
Note.extractNoteTitle = function (meta, $) {
var title = ''
if (meta.title && (typeof meta.title === 'string' || typeof meta.title === 'number')) {
title = meta.title
} else {
var h1s = $('h1')
if (h1s.length > 0 && h1s.first().text().split('\n').length === 1) { title = S(h1s.first().text()).stripTags().s }
}
if (!title) title = 'Untitled'
return title
}
Note.generateDescription = function (markdown) {
return markdown.substr(0, 100).replace(/(?:\r\n|\r|\n)/g, ' ')
}
Note.decodeTitle = function (title) {
return title || 'Untitled'
}
Note.generateWebTitle = function (title) {
title = !title || title === 'Untitled' ? 'CodiMD - Collaborative markdown notes' : title + ' - CodiMD'
return title
}
Note.extractNoteTags = function (meta, $) {
var tags = []
var rawtags = []
if (meta.tags && (typeof meta.tags === 'string' || typeof meta.tags === 'number')) {
var metaTags = ('' + meta.tags).split(',')
for (let i = 0; i < metaTags.length; i++) {
var text = metaTags[i].trim()
if (text) rawtags.push(text)
}
} else {
var h6s = $('h6')
h6s.each(function (key, value) {
if (/^tags/gmi.test($(value).text())) {
var codes = $(value).find('code')
for (let i = 0; i < codes.length; i++) {
var text = S($(codes[i]).text().trim()).stripTags().s
if (text) rawtags.push(text)
}
}
})
}
for (let i = 0; i < rawtags.length; i++) {
var found = false
for (let j = 0; j < tags.length; j++) {
if (tags[j] === rawtags[i]) {
found = true
break
}
}
if (!found) { tags.push(rawtags[i]) }
}
return tags
}
Note.extractMeta = function (content) {
var obj = null
try {
obj = metaMarked(content)
if (!obj.markdown) obj.markdown = ''
if (!obj.meta) obj.meta = {}
} catch (err) {
obj = {
markdown: content,
meta: {}
}
}
return obj
}
Note.parseMeta = function (meta) {
var _meta = {}
if (meta) {
if (meta.title && (typeof meta.title === 'string' || typeof meta.title === 'number')) { _meta.title = meta.title }
if (meta.description && (typeof meta.description === 'string' || typeof meta.description === 'number')) { _meta.description = meta.description }
if (meta.robots && (typeof meta.robots === 'string' || typeof meta.robots === 'number')) { _meta.robots = meta.robots }
if (meta.GA && (typeof meta.GA === 'string' || typeof meta.GA === 'number')) { _meta.GA = meta.GA }
if (meta.disqus && (typeof meta.disqus === 'string' || typeof meta.disqus === 'number')) { _meta.disqus = meta.disqus }
if (meta.slideOptions && (typeof meta.slideOptions === 'object')) { _meta.slideOptions = meta.slideOptions }
}
return _meta
}
Note.updateAuthorshipByOperation = function (operation, userId, authorships) {
var index = 0
var timestamp = Date.now()
for (let i = 0; i < operation.length; i++) {
var op = operation[i]
if (ot.TextOperation.isRetain(op)) {
index += op
} else if (ot.TextOperation.isInsert(op)) {
let opStart = index
let opEnd = index + op.length
var inserted = false
// authorship format: [userId, startPos, endPos, createdAt, updatedAt]
if (authorships.length <= 0) authorships.push([userId, opStart, opEnd, timestamp, timestamp])
else {
for (let j = 0; j < authorships.length; j++) {
let authorship = authorships[j]
if (!inserted) {
let nextAuthorship = authorships[j + 1] || -1
if ((nextAuthorship !== -1 && nextAuthorship[1] >= opEnd) || j >= authorships.length - 1) {
if (authorship[1] < opStart && authorship[2] > opStart) {
// divide
let postLength = authorship[2] - opStart
authorship[2] = opStart
authorship[4] = timestamp
authorships.splice(j + 1, 0, [userId, opStart, opEnd, timestamp, timestamp])
authorships.splice(j + 2, 0, [authorship[0], opEnd, opEnd + postLength, authorship[3], timestamp])
j += 2
inserted = true
} else if (authorship[1] >= opStart) {
authorships.splice(j, 0, [userId, opStart, opEnd, timestamp, timestamp])
j += 1
inserted = true
} else if (authorship[2] <= opStart) {
authorships.splice(j + 1, 0, [userId, opStart, opEnd, timestamp, timestamp])
j += 1
inserted = true
}
}
}
if (authorship[1] >= opStart) {
authorship[1] += op.length
authorship[2] += op.length
}
}
}
index += op.length
} else if (ot.TextOperation.isDelete(op)) {
let opStart = index
let opEnd = index - op
if (operation.length === 1) {
authorships = []
} else if (authorships.length > 0) {
for (let j = 0; j < authorships.length; j++) {
let authorship = authorships[j]
if (authorship[1] >= opStart && authorship[1] <= opEnd && authorship[2] >= opStart && authorship[2] <= opEnd) {
authorships.splice(j, 1)
j -= 1
} else if (authorship[1] < opStart && authorship[1] < opEnd && authorship[2] > opStart && authorship[2] > opEnd) {
authorship[2] += op
authorship[4] = timestamp
} else if (authorship[2] >= opStart && authorship[2] <= opEnd) {
authorship[2] = opStart
authorship[4] = timestamp
} else if (authorship[1] >= opStart && authorship[1] <= opEnd) {
authorship[1] = opEnd
authorship[4] = timestamp
}
if (authorship[1] >= opEnd) {
authorship[1] += op
authorship[2] += op
}
}
}
index += op
}
}
// merge
for (let j = 0; j < authorships.length; j++) {
let authorship = authorships[j]
for (let k = j + 1; k < authorships.length; k++) {
let nextAuthorship = authorships[k]
if (nextAuthorship && authorship[0] === nextAuthorship[0] && authorship[2] === nextAuthorship[1]) {
let minTimestamp = Math.min(authorship[3], nextAuthorship[3])
let maxTimestamp = Math.max(authorship[3], nextAuthorship[3])
authorships.splice(j, 1, [authorship[0], authorship[1], nextAuthorship[2], minTimestamp, maxTimestamp])
authorships.splice(k, 1)
j -= 1
break
}
}
}
// clear
for (let j = 0; j < authorships.length; j++) {
let authorship = authorships[j]
if (!authorship[0]) {
authorships.splice(j, 1)
j -= 1
}
}
return authorships
}
Note.transformPatchToOperations = function (patch, contentLength) {
var operations = []
if (patch.length > 0) {
// calculate original content length
for (let j = patch.length - 1; j >= 0; j--) {
var p = patch[j]
for (let i = 0; i < p.diffs.length; i++) {
var diff = p.diffs[i]
switch (diff[0]) {
case 1: // insert
contentLength -= diff[1].length
break
case -1: // delete
contentLength += diff[1].length
break
}
}
}
// generate operations
var bias = 0
var lengthBias = 0
for (let j = 0; j < patch.length; j++) {
var operation = []
let p = patch[j]
var currIndex = p.start1
var currLength = contentLength - bias
for (let i = 0; i < p.diffs.length; i++) {
let diff = p.diffs[i]
switch (diff[0]) {
case 0: // retain
if (i === 0) {
// first
operation.push(currIndex + diff[1].length)
} else if (i !== p.diffs.length - 1) {
// mid
operation.push(diff[1].length)
} else {
// last
operation.push(currLength + lengthBias - currIndex)
}
currIndex += diff[1].length
break
case 1: // insert
operation.push(diff[1])
lengthBias += diff[1].length
currIndex += diff[1].length
break
case -1: // delete
operation.push(-diff[1].length)
bias += diff[1].length
currIndex += diff[1].length
break
}
}
operations.push(operation)
}
}
return operations
}
return Note return Note
} }

View File

@ -7,8 +7,9 @@ var childProcess = require('child_process')
var shortId = require('shortid') var shortId = require('shortid')
var path = require('path') var path = require('path')
var Op = Sequelize.Op
// core // core
var config = require('../config')
var logger = require('../logger') var logger = require('../logger')
var dmpWorker = createDmpWorker() var dmpWorker = createDmpWorker()
@ -18,7 +19,7 @@ function createDmpWorker () {
var worker = childProcess.fork(path.resolve(__dirname, '../workers/dmpWorker.js'), { var worker = childProcess.fork(path.resolve(__dirname, '../workers/dmpWorker.js'), {
stdio: 'ignore' stdio: 'ignore'
}) })
if (config.debug) logger.info('dmp worker process started') logger.debug('dmp worker process started')
worker.on('message', function (data) { worker.on('message', function (data) {
if (!data || !data.msg || !data.cacheKey) { if (!data || !data.msg || !data.cacheKey) {
return logger.error('dmp worker error: not enough data on message') return logger.error('dmp worker error: not enough data on message')
@ -36,7 +37,7 @@ function createDmpWorker () {
}) })
worker.on('close', function (code) { worker.on('close', function (code) {
dmpWorker = null dmpWorker = null
if (config.debug) logger.info('dmp worker process exited with code ' + code) logger.debug(`dmp worker process exited with code ${code}`)
}) })
return worker return worker
} }
@ -97,214 +98,212 @@ module.exports = function (sequelize, DataTypes) {
this.setDataValue('authorship', value ? JSON.stringify(value) : value) this.setDataValue('authorship', value ? JSON.stringify(value) : value)
} }
} }
}, { })
classMethods: {
associate: function (models) { Revision.associate = function (models) {
Revision.belongsTo(models.Note, { Revision.belongsTo(models.Note, {
foreignKey: 'noteId', foreignKey: 'noteId',
as: 'note', as: 'note',
constraints: false, constraints: false,
onDelete: 'CASCADE', onDelete: 'CASCADE',
hooks: true hooks: true
}) })
}
Revision.getNoteRevisions = function (note, callback) {
Revision.findAll({
where: {
noteId: note.id
}, },
getNoteRevisions: function (note, callback) { order: [['createdAt', 'DESC']]
Revision.findAll({ }).then(function (revisions) {
where: { var data = []
noteId: note.id for (var i = 0, l = revisions.length; i < l; i++) {
}, var revision = revisions[i]
order: [['createdAt', 'DESC']] data.push({
}).then(function (revisions) { time: moment(revision.createdAt).valueOf(),
var data = [] length: revision.length
for (var i = 0, l = revisions.length; i < l; i++) { })
var revision = revisions[i] }
data.push({ callback(null, data)
time: moment(revision.createdAt).valueOf(), }).catch(function (err) {
length: revision.length callback(err, null)
}) })
}
Revision.getPatchedNoteRevisionByTime = function (note, time, callback) {
// find all revisions to prepare for all possible calculation
Revision.findAll({
where: {
noteId: note.id
},
order: [['createdAt', 'DESC']]
}).then(function (revisions) {
if (revisions.length <= 0) return callback(null, null)
// measure target revision position
Revision.count({
where: {
noteId: note.id,
createdAt: {
[Op.gte]: time
} }
callback(null, data) },
}).catch(function (err) { order: [['createdAt', 'DESC']]
callback(err, null) }).then(function (count) {
}) if (count <= 0) return callback(null, null)
}, sendDmpWorker({
getPatchedNoteRevisionByTime: function (note, time, callback) { msg: 'get revision',
// find all revisions to prepare for all possible calculation revisions: revisions,
Revision.findAll({ count: count
where: { }, callback)
noteId: note.id }).catch(function (err) {
}, return callback(err, null)
order: [['createdAt', 'DESC']] })
}).then(function (revisions) { }).catch(function (err) {
if (revisions.length <= 0) return callback(null, null) return callback(err, null)
// measure target revision position })
Revision.count({ }
where: { Revision.checkAllNotesRevision = function (callback) {
noteId: note.id, Revision.saveAllNotesRevision(function (err, notes) {
createdAt: { if (err) return callback(err, null)
$gte: time if (!notes || notes.length <= 0) {
return callback(null, notes)
} else {
Revision.checkAllNotesRevision(callback)
}
})
}
Revision.saveAllNotesRevision = function (callback) {
sequelize.models.Note.findAll({
// query all notes that need to save for revision
where: {
[Op.and]: [
{
lastchangeAt: {
[Op.or]: {
[Op.eq]: null,
[Op.and]: {
[Op.ne]: null,
[Op.gt]: sequelize.col('createdAt')
}
} }
}, }
order: [['createdAt', 'DESC']] },
}).then(function (count) { {
if (count <= 0) return callback(null, null) savedAt: {
sendDmpWorker({ [Op.or]: {
msg: 'get revision', [Op.eq]: null,
revisions: revisions, [Op.lt]: sequelize.col('lastchangeAt')
count: count }
}, callback) }
}).catch(function (err) { }
return callback(err, null) ]
}) }
}).catch(function (err) { }).then(function (notes) {
return callback(err, null) if (notes.length <= 0) return callback(null, notes)
}) var savedNotes = []
}, async.each(notes, function (note, _callback) {
checkAllNotesRevision: function (callback) { // revision saving policy: note not been modified for 5 mins or not save for 10 mins
Revision.saveAllNotesRevision(function (err, notes) { if (note.lastchangeAt && note.savedAt) {
if (err) return callback(err, null) var lastchangeAt = moment(note.lastchangeAt)
if (!notes || notes.length <= 0) { var savedAt = moment(note.savedAt)
return callback(null, notes) if (moment().isAfter(lastchangeAt.add(5, 'minutes'))) {
savedNotes.push(note)
Revision.saveNoteRevision(note, _callback)
} else if (lastchangeAt.isAfter(savedAt.add(10, 'minutes'))) {
savedNotes.push(note)
Revision.saveNoteRevision(note, _callback)
} else { } else {
Revision.checkAllNotesRevision(callback) return _callback(null, null)
} }
}) } else {
savedNotes.push(note)
Revision.saveNoteRevision(note, _callback)
}
}, function (err) {
if (err) {
return callback(err, null)
}
// return null when no notes need saving at this moment but have delayed tasks to be done
var result = ((savedNotes.length === 0) && (notes.length > savedNotes.length)) ? null : savedNotes
return callback(null, result)
})
}).catch(function (err) {
return callback(err, null)
})
}
Revision.saveNoteRevision = function (note, callback) {
Revision.findAll({
where: {
noteId: note.id
}, },
saveAllNotesRevision: function (callback) { order: [['createdAt', 'DESC']]
sequelize.models.Note.findAll({ }).then(function (revisions) {
// query all notes that need to save for revision if (revisions.length <= 0) {
where: { // if no revision available
$and: [ Revision.create({
{ noteId: note.id,
lastchangeAt: { lastContent: note.content ? note.content : '',
$or: { length: note.content ? note.content.length : 0,
$eq: null, authorship: note.authorship
$and: { }).then(function (revision) {
$ne: null, Revision.finishSaveNoteRevision(note, revision, callback)
$gt: sequelize.col('createdAt')
}
}
}
},
{
savedAt: {
$or: {
$eq: null,
$lt: sequelize.col('lastchangeAt')
}
}
}
]
}
}).then(function (notes) {
if (notes.length <= 0) return callback(null, notes)
var savedNotes = []
async.each(notes, function (note, _callback) {
// revision saving policy: note not been modified for 5 mins or not save for 10 mins
if (note.lastchangeAt && note.savedAt) {
var lastchangeAt = moment(note.lastchangeAt)
var savedAt = moment(note.savedAt)
if (moment().isAfter(lastchangeAt.add(5, 'minutes'))) {
savedNotes.push(note)
Revision.saveNoteRevision(note, _callback)
} else if (lastchangeAt.isAfter(savedAt.add(10, 'minutes'))) {
savedNotes.push(note)
Revision.saveNoteRevision(note, _callback)
} else {
return _callback(null, null)
}
} else {
savedNotes.push(note)
Revision.saveNoteRevision(note, _callback)
}
}, function (err) {
if (err) {
return callback(err, null)
}
// return null when no notes need saving at this moment but have delayed tasks to be done
var result = ((savedNotes.length === 0) && (notes.length > savedNotes.length)) ? null : savedNotes
return callback(null, result)
})
}).catch(function (err) { }).catch(function (err) {
return callback(err, null) return callback(err, null)
}) })
}, } else {
saveNoteRevision: function (note, callback) { var latestRevision = revisions[0]
Revision.findAll({ var lastContent = latestRevision.content || latestRevision.lastContent
where: { var content = note.content
noteId: note.id sendDmpWorker({
}, msg: 'create patch',
order: [['createdAt', 'DESC']] lastDoc: lastContent,
}).then(function (revisions) { currDoc: content
if (revisions.length <= 0) { }, function (err, patch) {
// if no revision available if (err) logger.error('save note revision error', err)
Revision.create({ if (!patch) {
noteId: note.id, // if patch is empty (means no difference) then just update the latest revision updated time
lastContent: note.content ? note.content : '', latestRevision.changed('updatedAt', true)
length: note.content ? note.content.length : 0, latestRevision.update({
authorship: note.authorship updatedAt: Date.now()
}).then(function (revision) { }).then(function (revision) {
Revision.finishSaveNoteRevision(note, revision, callback) Revision.finishSaveNoteRevision(note, revision, callback)
}).catch(function (err) { }).catch(function (err) {
return callback(err, null) return callback(err, null)
}) })
} else { } else {
var latestRevision = revisions[0] Revision.create({
var lastContent = latestRevision.content || latestRevision.lastContent noteId: note.id,
var content = note.content patch: patch,
sendDmpWorker({ content: note.content,
msg: 'create patch', length: note.content.length,
lastDoc: lastContent, authorship: note.authorship
currDoc: content }).then(function (revision) {
}, function (err, patch) { // clear last revision content to reduce db size
if (err) logger.error('save note revision error', err) latestRevision.update({
if (!patch) { content: null
// if patch is empty (means no difference) then just update the latest revision updated time }).then(function () {
latestRevision.changed('updatedAt', true) Revision.finishSaveNoteRevision(note, revision, callback)
latestRevision.update({ }).catch(function (err) {
updatedAt: Date.now() return callback(err, null)
}).then(function (revision) { })
Revision.finishSaveNoteRevision(note, revision, callback) }).catch(function (err) {
}).catch(function (err) { return callback(err, null)
return callback(err, null)
})
} else {
Revision.create({
noteId: note.id,
patch: patch,
content: note.content,
length: note.content.length,
authorship: note.authorship
}).then(function (revision) {
// clear last revision content to reduce db size
latestRevision.update({
content: null
}).then(function () {
Revision.finishSaveNoteRevision(note, revision, callback)
}).catch(function (err) {
return callback(err, null)
})
}).catch(function (err) {
return callback(err, null)
})
}
}) })
} }
}).catch(function (err) {
return callback(err, null)
})
},
finishSaveNoteRevision: function (note, revision, callback) {
note.update({
savedAt: revision.updatedAt
}).then(function () {
return callback(null, revision)
}).catch(function (err) {
return callback(err, null)
}) })
} }
} }).catch(function (err) {
}) return callback(err, null)
})
}
Revision.finishSaveNoteRevision = function (note, revision, callback) {
note.update({
savedAt: revision.updatedAt
}).then(function () {
return callback(null, revision)
}).catch(function (err) {
return callback(err, null)
})
}
return Revision return Revision
} }

View File

@ -52,119 +52,118 @@ module.exports = function (sequelize, DataTypes) {
password: { password: {
type: Sequelize.TEXT type: Sequelize.TEXT
} }
}, {
instanceMethods: {
verifyPassword: function (attempt) {
return scrypt.verify(Buffer.from(this.password, 'hex'), attempt)
}
},
classMethods: {
associate: function (models) {
User.hasMany(models.Note, {
foreignKey: 'ownerId',
constraints: false
})
User.hasMany(models.Note, {
foreignKey: 'lastchangeuserId',
constraints: false
})
},
getProfile: function (user) {
if (!user) {
return null
}
return user.profile ? User.parseProfile(user.profile) : (user.email ? User.parseProfileByEmail(user.email) : null)
},
parseProfile: function (profile) {
try {
profile = JSON.parse(profile)
} catch (err) {
logger.error(err)
profile = null
}
if (profile) {
profile = {
name: profile.displayName || profile.username,
photo: User.parsePhotoByProfile(profile),
biggerphoto: User.parsePhotoByProfile(profile, true)
}
}
return profile
},
parsePhotoByProfile: function (profile, bigger) {
var photo = null
switch (profile.provider) {
case 'facebook':
photo = 'https://graph.facebook.com/' + profile.id + '/picture'
if (bigger) photo += '?width=400'
else photo += '?width=96'
break
case 'twitter':
photo = 'https://twitter.com/' + profile.username + '/profile_image'
if (bigger) photo += '?size=original'
else photo += '?size=bigger'
break
case 'github':
photo = 'https://avatars.githubusercontent.com/u/' + profile.id
if (bigger) photo += '?s=400'
else photo += '?s=96'
break
case 'gitlab':
photo = profile.avatarUrl
if (photo) {
if (bigger) photo = photo.replace(/(\?s=)\d*$/i, '$1400')
else photo = photo.replace(/(\?s=)\d*$/i, '$196')
} else {
photo = generateAvatarURL(profile.username)
}
break
case 'mattermost':
photo = profile.avatarUrl
if (photo) {
if (bigger) photo = photo.replace(/(\?s=)\d*$/i, '$1400')
else photo = photo.replace(/(\?s=)\d*$/i, '$196')
} else {
photo = generateAvatarURL(profile.username)
}
break
case 'dropbox':
photo = generateAvatarURL('', profile.emails[0].value, bigger)
break
case 'google':
photo = profile.photos[0].value
if (bigger) photo = photo.replace(/(\?sz=)\d*$/i, '$1400')
else photo = photo.replace(/(\?sz=)\d*$/i, '$196')
break
case 'ldap':
photo = generateAvatarURL(profile.username, profile.emails[0], bigger)
break
case 'saml':
photo = generateAvatarURL(profile.username, profile.emails[0], bigger)
break
default:
photo = generateAvatarURL(profile.username)
break
}
return photo
},
parseProfileByEmail: function (email) {
return {
name: email.substring(0, email.lastIndexOf('@')),
photo: generateAvatarURL('', email, false),
biggerphoto: generateAvatarURL('', email, true)
}
}
}
}) })
function updatePasswordHashHook (user, options, done) { User.prototype.verifyPassword = function (attempt) {
return scrypt.verify(Buffer.from(this.password, 'hex'), attempt)
}
User.associate = function (models) {
User.hasMany(models.Note, {
foreignKey: 'ownerId',
constraints: false
})
User.hasMany(models.Note, {
foreignKey: 'lastchangeuserId',
constraints: false
})
}
User.getProfile = function (user) {
if (!user) {
return null
}
return user.profile ? User.parseProfile(user.profile) : (user.email ? User.parseProfileByEmail(user.email) : null)
}
User.parseProfile = function (profile) {
try {
profile = JSON.parse(profile)
} catch (err) {
logger.error(err)
profile = null
}
if (profile) {
profile = {
name: profile.displayName || profile.username,
photo: User.parsePhotoByProfile(profile),
biggerphoto: User.parsePhotoByProfile(profile, true)
}
}
return profile
}
User.parsePhotoByProfile = function (profile, bigger) {
var photo = null
switch (profile.provider) {
case 'facebook':
photo = 'https://graph.facebook.com/' + profile.id + '/picture'
if (bigger) photo += '?width=400'
else photo += '?width=96'
break
case 'twitter':
photo = 'https://twitter.com/' + profile.username + '/profile_image'
if (bigger) photo += '?size=original'
else photo += '?size=bigger'
break
case 'github':
photo = 'https://avatars.githubusercontent.com/u/' + profile.id
if (bigger) photo += '?s=400'
else photo += '?s=96'
break
case 'gitlab':
photo = profile.avatarUrl
if (photo) {
if (bigger) photo = photo.replace(/(\?s=)\d*$/i, '$1400')
else photo = photo.replace(/(\?s=)\d*$/i, '$196')
} else {
photo = generateAvatarURL(profile.username)
}
break
case 'mattermost':
photo = profile.avatarUrl
if (photo) {
if (bigger) photo = photo.replace(/(\?s=)\d*$/i, '$1400')
else photo = photo.replace(/(\?s=)\d*$/i, '$196')
} else {
photo = generateAvatarURL(profile.username)
}
break
case 'dropbox':
photo = generateAvatarURL('', profile.emails[0].value, bigger)
break
case 'google':
photo = profile.photos[0].value
if (bigger) photo = photo.replace(/(\?sz=)\d*$/i, '$1400')
else photo = photo.replace(/(\?sz=)\d*$/i, '$196')
break
case 'ldap':
photo = generateAvatarURL(profile.username, profile.emails[0], bigger)
break
case 'saml':
photo = generateAvatarURL(profile.username, profile.emails[0], bigger)
break
default:
photo = generateAvatarURL(profile.username)
break
}
return photo
}
User.parseProfileByEmail = function (email) {
return {
name: email.substring(0, email.lastIndexOf('@')),
photo: generateAvatarURL('', email, false),
biggerphoto: generateAvatarURL('', email, true)
}
}
function updatePasswordHashHook (user, options) {
// suggested way to hash passwords to be able to do this asynchronously: // suggested way to hash passwords to be able to do this asynchronously:
// @see https://github.com/sequelize/sequelize/issues/1821#issuecomment-44265819 // @see https://github.com/sequelize/sequelize/issues/1821#issuecomment-44265819
if (!user.changed('password')) { return done() }
scrypt.kdf(user.getDataValue('password'), { logN: 15 }).then(keyBuf => { if (!user.changed('password')) {
return Promise.resolve()
}
return scrypt.kdf(user.getDataValue('password'), { logN: 15 }).then(keyBuf => {
user.setDataValue('password', keyBuf.toString('hex')) user.setDataValue('password', keyBuf.toString('hex'))
done()
}) })
} }

View File

@ -49,7 +49,7 @@ function secure (socket, next) {
if (handshakeData.sessionID && if (handshakeData.sessionID &&
handshakeData.cookie[config.sessionName] && handshakeData.cookie[config.sessionName] &&
handshakeData.cookie[config.sessionName] !== handshakeData.sessionID) { handshakeData.cookie[config.sessionName] !== handshakeData.sessionID) {
if (config.debug) { logger.info('AUTH success cookie: ' + handshakeData.sessionID) } logger.debug(`AUTH success cookie: ${handshakeData.sessionID}`)
return next() return next()
} else { } else {
next(new Error('AUTH failed: Cookie is invalid.')) next(new Error('AUTH failed: Cookie is invalid.'))
@ -82,7 +82,7 @@ setInterval(function () {
async.each(Object.keys(notes), function (key, callback) { async.each(Object.keys(notes), function (key, callback) {
var note = notes[key] var note = notes[key]
if (note.server.isDirty) { if (note.server.isDirty) {
if (config.debug) logger.info('updater found dirty note: ' + key) logger.debug(`updater found dirty note: ${key}`)
note.server.isDirty = false note.server.isDirty = false
updateNote(note, function (err, _note) { updateNote(note, function (err, _note) {
// handle when note already been clean up // handle when note already been clean up
@ -182,7 +182,7 @@ setInterval(function () {
var socket = realtime.io.sockets.connected[key] var socket = realtime.io.sockets.connected[key]
if ((!socket && users[key]) || if ((!socket && users[key]) ||
(socket && (!socket.rooms || socket.rooms.length <= 0))) { (socket && (!socket.rooms || socket.rooms.length <= 0))) {
if (config.debug) { logger.info('cleaner found redundant user: ' + key) } logger.debug(`cleaner found redundant user: ${key}`)
if (!socket) { if (!socket) {
socket = { socket = {
id: key id: key
@ -429,11 +429,11 @@ function finishConnection (socket, noteId, socketId) {
if (config.debug) { if (config.debug) {
let noteId = socket.noteId let noteId = socket.noteId
logger.info('SERVER connected a client to [' + noteId + ']:') logger.debug(`SERVER connected a client to [${noteId}]:`)
logger.info(JSON.stringify(user)) logger.debug(JSON.stringify(user))
// logger.info(notes); logger.debug(notes)
getStatus(function (data) { getStatus(function (data) {
logger.info(JSON.stringify(data)) logger.debug(JSON.stringify(data))
}) })
} }
} }
@ -541,10 +541,8 @@ function disconnect (socket) {
if (isDisconnectBusy) return if (isDisconnectBusy) return
isDisconnectBusy = true isDisconnectBusy = true
if (config.debug) { logger.debug('SERVER disconnected a client')
logger.info('SERVER disconnected a client') logger.debug(JSON.stringify(users[socket.id]))
logger.info(JSON.stringify(users[socket.id]))
}
if (users[socket.id]) { if (users[socket.id]) {
delete users[socket.id] delete users[socket.id]
@ -574,9 +572,9 @@ function disconnect (socket) {
delete note.server delete note.server
delete notes[noteId] delete notes[noteId]
if (config.debug) { if (config.debug) {
// logger.info(notes); logger.debug(notes)
getStatus(function (data) { getStatus(function (data) {
logger.info(JSON.stringify(data)) logger.debug(JSON.stringify(data))
}) })
} }
}) })
@ -595,9 +593,9 @@ function disconnect (socket) {
if (disconnectSocketQueue.length > 0) { disconnect(disconnectSocketQueue[0]) } if (disconnectSocketQueue.length > 0) { disconnect(disconnectSocketQueue[0]) }
if (config.debug) { if (config.debug) {
// logger.info(notes); logger.debug(notes)
getStatus(function (data) { getStatus(function (data) {
logger.info(JSON.stringify(data)) logger.debug(JSON.stringify(data))
}) })
} }
} }
@ -774,7 +772,7 @@ function connection (socket) {
var noteId = socket.noteId var noteId = socket.noteId
var user = users[socket.id] var user = users[socket.id]
if (!noteId || !notes[noteId] || !user) return if (!noteId || !notes[noteId] || !user) return
if (config.debug) { logger.info('SERVER received [' + noteId + '] user status from [' + socket.id + ']: ' + JSON.stringify(data)) } logger.debug(`SERVER received [${noteId}] user status from [${socket.id}]: ${JSON.stringify(data)}`)
if (data) { if (data) {
user.idle = data.idle user.idle = data.idle
user.type = data.type user.type = data.type

View File

@ -226,7 +226,8 @@ function showPublishNote (req, res, next) {
robots: meta.robots || false, // default allow robots robots: meta.robots || false, // default allow robots
GA: meta.GA, GA: meta.GA,
disqus: meta.disqus, disqus: meta.disqus,
cspNonce: res.locals.nonce cspNonce: res.locals.nonce,
dnt: req.headers.dnt
} }
return renderPublish(data, res) return renderPublish(data, res)
}).catch(function (err) { }).catch(function (err) {
@ -608,7 +609,8 @@ function showPublishSlide (req, res, next) {
robots: meta.robots || false, // default allow robots robots: meta.robots || false, // default allow robots
GA: meta.GA, GA: meta.GA,
disqus: meta.disqus, disqus: meta.disqus,
cspNonce: res.locals.nonce cspNonce: res.locals.nonce,
dnt: req.headers.dnt
} }
return renderPublishSlide(data, res) return renderPublishSlide(data, res)
}).catch(function (err) { }).catch(function (err) {

View File

@ -21,6 +21,8 @@ exports.getImageMimeType = function getImageMimeType (imagePath) {
return 'image/png' return 'image/png'
case 'tiff': case 'tiff':
return 'image/tiff' return 'image/tiff'
case 'svg':
return 'image/svg+xml'
default: default:
return undefined return undefined
} }

View File

@ -66,11 +66,11 @@ passport.use(new LDAPStrategy({
} }
if (needSave) { if (needSave) {
user.save().then(function () { user.save().then(function () {
if (config.debug) { logger.debug('user login: ' + user.id) } logger.debug(`user login: ${user.id}`)
return done(null, user) return done(null, user)
}) })
} else { } else {
if (config.debug) { logger.debug('user login: ' + user.id) } logger.debug(`user login: ${user.id}`)
return done(null, user) return done(null, user)
} }
} }

View File

@ -33,11 +33,11 @@ passport.use(new OpenIDStrategy({
} }
if (needSave) { if (needSave) {
user.save().then(function () { user.save().then(function () {
if (config.debug) { logger.info('user login: ' + user.id) } logger.debug(`user login: ${user.id}`)
return done(null, user) return done(null, user)
}) })
} else { } else {
if (config.debug) { logger.info('user login: ' + user.id) } logger.debug(`user login: ${user.id}`)
return done(null, user) return done(null, user)
} }
} }

View File

@ -62,11 +62,11 @@ passport.use(new SamlStrategy({
} }
if (needSave) { if (needSave) {
user.save().then(function () { user.save().then(function () {
if (config.debug) { logger.debug('user login: ' + user.id) } logger.debug(`user login: ${user.id}`)
return done(null, user) return done(null, user)
}) })
} else { } else {
if (config.debug) { logger.debug('user login: ' + user.id) } logger.debug(`user login: ${user.id}`)
return done(null, user) return done(null, user)
} }
} }

View File

@ -1,7 +1,6 @@
'use strict' 'use strict'
const models = require('../../models') const models = require('../../models')
const config = require('../../config')
const logger = require('../../logger') const logger = require('../../logger')
exports.setReturnToFromReferer = function setReturnToFromReferer (req) { exports.setReturnToFromReferer = function setReturnToFromReferer (req) {
@ -38,11 +37,11 @@ exports.passportGeneralCallback = function callback (accessToken, refreshToken,
} }
if (needSave) { if (needSave) {
user.save().then(function () { user.save().then(function () {
if (config.debug) { logger.info('user login: ' + user.id) } logger.debug(`user login: ${user.id}`)
return done(null, user) return done(null, user)
}) })
} else { } else {
if (config.debug) { logger.info('user login: ' + user.id) } logger.debug(`user login: ${user.id}`)
return done(null, user) return done(null, user)
} }
} }

View File

@ -7,13 +7,13 @@ const logger = require('../../logger')
const azure = require('azure-storage') const azure = require('azure-storage')
exports.uploadImage = function (imagePath, callback) { exports.uploadImage = function (imagePath, callback) {
if (!imagePath || typeof imagePath !== 'string') { if (!callback || typeof callback !== 'function') {
callback(new Error('Image path is missing or wrong'), null) logger.error('Callback has to be a function')
return return
} }
if (!callback || typeof callback !== 'function') { if (!imagePath || typeof imagePath !== 'string') {
logger.error('Callback has to be a function') callback(new Error('Image path is missing or wrong'), null)
return return
} }

View File

@ -6,15 +6,15 @@ const config = require('../../config')
const logger = require('../../logger') const logger = require('../../logger')
exports.uploadImage = function (imagePath, callback) { exports.uploadImage = function (imagePath, callback) {
if (!imagePath || typeof imagePath !== 'string') {
callback(new Error('Image path is missing or wrong'), null)
return
}
if (!callback || typeof callback !== 'function') { if (!callback || typeof callback !== 'function') {
logger.error('Callback has to be a function') logger.error('Callback has to be a function')
return return
} }
if (!imagePath || typeof imagePath !== 'string') {
callback(new Error('Image path is missing or wrong'), null)
return
}
callback(null, (new URL(path.basename(imagePath), config.serverURL + '/uploads/')).href) callback(null, (new URL(path.basename(imagePath), config.serverURL + '/uploads/')).href)
} }

View File

@ -5,22 +5,20 @@ const logger = require('../../logger')
const imgur = require('imgur') const imgur = require('imgur')
exports.uploadImage = function (imagePath, callback) { exports.uploadImage = function (imagePath, callback) {
if (!imagePath || typeof imagePath !== 'string') { if (!callback || typeof callback !== 'function') {
callback(new Error('Image path is missing or wrong'), null) logger.error('Callback has to be a function')
return return
} }
if (!callback || typeof callback !== 'function') { if (!imagePath || typeof imagePath !== 'string') {
logger.error('Callback has to be a function') callback(new Error('Image path is missing or wrong'), null)
return return
} }
imgur.setClientId(config.imgur.clientID) imgur.setClientId(config.imgur.clientID)
imgur.uploadFile(imagePath) imgur.uploadFile(imagePath)
.then(function (json) { .then(function (json) {
if (config.debug) { logger.debug(`SERVER uploadimage success: ${JSON.stringify(json)}`)
logger.info('SERVER uploadimage success: ' + JSON.stringify(json))
}
callback(null, json.data.link.replace(/^http:\/\//i, 'https://')) callback(null, json.data.link.replace(/^http:\/\//i, 'https://'))
}).catch(function (err) { }).catch(function (err) {
callback(new Error(err), null) callback(new Error(err), null)

View File

@ -21,18 +21,19 @@ imageRouter.post('/uploadimage', function (req, res) {
form.parse(req, function (err, fields, files) { form.parse(req, function (err, fields, files) {
if (err || !files.image || !files.image.path) { if (err || !files.image || !files.image.path) {
logger.error(`formidable error: ${err}`)
response.errorForbidden(res) response.errorForbidden(res)
} else { } else {
if (config.debug) { logger.debug(`SERVER received uploadimage: ${JSON.stringify(files.image)}`)
logger.info('SERVER received uploadimage: ' + JSON.stringify(files.image))
}
const uploadProvider = require('./' + config.imageUploadType) const uploadProvider = require('./' + config.imageUploadType)
logger.debug(`imageRouter: Uploading ${files.image.path} using ${config.imageUploadType}`)
uploadProvider.uploadImage(files.image.path, function (err, url) { uploadProvider.uploadImage(files.image.path, function (err, url) {
if (err !== null) { if (err !== null) {
logger.error(err) logger.error(err)
return res.status(500).end('upload image error') return res.status(500).end('upload image error')
} }
logger.debug(`SERVER sending ${url} to client`)
res.send({ res.send({
link: url link: url
}) })

View File

@ -5,25 +5,24 @@ const logger = require('../../logger')
const lutim = require('lutim') const lutim = require('lutim')
exports.uploadImage = function (imagePath, callback) { exports.uploadImage = function (imagePath, callback) {
if (!imagePath || typeof imagePath !== 'string') {
callback(new Error('Image path is missing or wrong'), null)
return
}
if (!callback || typeof callback !== 'function') { if (!callback || typeof callback !== 'function') {
logger.error('Callback has to be a function') logger.error('Callback has to be a function')
return return
} }
if (!imagePath || typeof imagePath !== 'string') {
callback(new Error('Image path is missing or wrong'), null)
return
}
if (config.lutim && config.lutim.url) { if (config.lutim && config.lutim.url) {
lutim.setAPIUrl(config.lutim.url) lutim.setAPIUrl(config.lutim.url)
logger.debug(`Set lutim URL to ${lutim.getApiUrl()}`)
} }
lutim.uploadImage(imagePath) lutim.uploadImage(imagePath)
.then(function (json) { .then(function (json) {
if (config.debug) { logger.debug(`SERVER uploadimage success: ${JSON.stringify(json)}`)
logger.info('SERVER uploadimage success: ' + JSON.stringify(json))
}
callback(null, lutim.getAPIUrl() + json.msg.short) callback(null, lutim.getAPIUrl() + json.msg.short)
}).catch(function (err) { }).catch(function (err) {
callback(new Error(err), null) callback(new Error(err), null)

View File

@ -35,6 +35,7 @@ exports.uploadImage = function (imagePath, callback) {
const mimeType = getImageMimeType(imagePath) const mimeType = getImageMimeType(imagePath)
if (mimeType) { params.ContentType = mimeType } if (mimeType) { params.ContentType = mimeType }
logger.debug(`S3 object parameters: ${JSON.stringify(params)}`)
s3.putObject(params, function (err, data) { s3.putObject(params, function (err, data) {
if (err) { if (err) {
callback(new Error(err), null) callback(new Error(err), null)

View File

@ -68,9 +68,7 @@ statusRouter.post('/temp', urlencodedParser, function (req, res) {
if (!data) { if (!data) {
response.errorForbidden(res) response.errorForbidden(res)
} else { } else {
if (config.debug) { logger.debug(`SERVER received temp from [${host}]: ${req.body.data}`)
logger.info('SERVER received temp from [' + host + ']: ' + req.body.data)
}
models.Temp.create({ models.Temp.create({
data: data data: data
}).then(function (temp) { }).then(function (temp) {

View File

@ -4,7 +4,6 @@ var DiffMatchPatch = require('diff-match-patch')
var dmp = new DiffMatchPatch() var dmp = new DiffMatchPatch()
// core // core
var config = require('../config')
var logger = require('../logger') var logger = require('../logger')
process.on('message', function (data) { process.on('message', function (data) {
@ -61,10 +60,8 @@ function createPatch (lastDoc, currDoc) {
var patch = dmp.patch_make(lastDoc, diff) var patch = dmp.patch_make(lastDoc, diff)
patch = dmp.patch_toText(patch) patch = dmp.patch_toText(patch)
var msEnd = (new Date()).getTime() var msEnd = (new Date()).getTime()
if (config.debug) { logger.debug(patch)
logger.info(patch) logger.debug((msEnd - msStart) + 'ms')
logger.info((msEnd - msStart) + 'ms')
}
return patch return patch
} }
@ -123,9 +120,7 @@ function getRevision (revisions, count) {
authorship: authorship authorship: authorship
} }
var msEnd = (new Date()).getTime() var msEnd = (new Date()).getTime()
if (config.debug) { logger.debug((msEnd - msStart) + 'ms')
logger.info((msEnd - msStart) + 'ms')
}
return data return data
} }

View File

@ -1,12 +1,12 @@
{ {
"Collaborative markdown notes": "Gemeinschaftliche Markdown Notizen", "Collaborative markdown notes": "Gemeinschaftliche Markdown Notizen",
"Realtime collaborative markdown notes on all platforms.": "Gemeinschaftliche Notizen in Echtzeit auf allen Plattformen.", "Realtime collaborative markdown notes on all platforms.": "Gemeinschaftliche Notizen in Echtzeit auf allen Plattformen.",
"Best way to write and share your knowledge in markdown.": "Der beste Weg, Notizen zu schreiben und teilen.", "Best way to write and share your knowledge in markdown.": "Der beste Weg, Notizen zu schreiben und teilen.",
"Intro": "Intro", "Intro": "Einleitung",
"History": "Verlauf", "History": "Verlauf",
"New guest note": "Neue Gast Notiz", "New guest note": "Neue Gastnotiz",
"Collaborate with URL": "Zusammenarbeiten mit URL", "Collaborate with URL": "Zusammenarbeiten mit URL",
"Support charts and MathJax": "Unterstützt charts und Mathjax", "Support charts and MathJax": "Unterstützt Charts und MathJax",
"Support slide mode": "Unterstützt Präsentationsmodus", "Support slide mode": "Unterstützt Präsentationsmodus",
"Sign In": "Einloggen", "Sign In": "Einloggen",
"Below is the history from browser": "Lokaler Browserverlauf", "Below is the history from browser": "Lokaler Browserverlauf",
@ -28,7 +28,7 @@
"No history": "Kein Verlauf", "No history": "Kein Verlauf",
"Import from browser": "Vom Browser importieren", "Import from browser": "Vom Browser importieren",
"Releases": "Versionen", "Releases": "Versionen",
"Are you sure?": "Sind sie sicher?", "Are you sure?": "Sind Sie sicher?",
"Do you really want to delete this note?": "Möchten Sie diese Notiz wirklich löschen?", "Do you really want to delete this note?": "Möchten Sie diese Notiz wirklich löschen?",
"All users will lose their connection.": "Alle Benutzer werden getrennt.", "All users will lose their connection.": "Alle Benutzer werden getrennt.",
"Cancel": "Abbrechen", "Cancel": "Abbrechen",
@ -52,8 +52,8 @@
"Upload Image": "Foto hochladen", "Upload Image": "Foto hochladen",
"Menu": "Menü", "Menu": "Menü",
"This page need refresh": "Bitte laden Sie die Seite neu", "This page need refresh": "Bitte laden Sie die Seite neu",
"You have an incompatible client version.": "Ihre Client Version ist nicht mit dem Server kompatibel", "You have an incompatible client version.": "Ihre Client-Version ist nicht mit dem Server kompatibel",
"Refresh to update.": "Neu laden zum Updaten.", "Refresh to update.": "Neu laden zum aktualisieren.",
"New version available!": "Neue Version verfügbar.", "New version available!": "Neue Version verfügbar.",
"See releases notes here": "Versionshinweise", "See releases notes here": "Versionshinweise",
"Refresh to enjoy new features.": "Neu laden für neue Funktionen", "Refresh to enjoy new features.": "Neu laden für neue Funktionen",
@ -66,15 +66,15 @@
"Send us email": "Kontakt", "Send us email": "Kontakt",
"Documents": "Dokumente", "Documents": "Dokumente",
"Features": "Funktionen", "Features": "Funktionen",
"YAML Metadata": "YAML Metadaten", "YAML Metadata": "YAML-Metadaten",
"Slide Example": "Beispiel Präsentation", "Slide Example": "Beispiel-Präsentation",
"Cheatsheet": "Cheatsheet", "Cheatsheet": "Cheatsheet",
"Example": "Beispiel", "Example": "Beispiel",
"Syntax": "Syntax", "Syntax": "Syntax",
"Header": "Überschrift", "Header": "Überschrift",
"Unordered List": "Stichpunkte", "Unordered List": "Stichpunkte",
"Ordered List": "Nummeriert", "Ordered List": "Nummeriert",
"Todo List": "To Do Liste", "Todo List": "To-do-Liste",
"Blockquote": "Zitat", "Blockquote": "Zitat",
"Bold font": "Fett", "Bold font": "Fett",
"Italics font": "Kursiv", "Italics font": "Kursiv",
@ -92,12 +92,12 @@
"Clear": "Zurücksetzen", "Clear": "Zurücksetzen",
"This note is locked": "Diese Notiz ist gesperrt", "This note is locked": "Diese Notiz ist gesperrt",
"Sorry, only owner can edit this note.": "Entschuldigung, nur der Besitzer darf die Notiz bearbeiten.", "Sorry, only owner can edit this note.": "Entschuldigung, nur der Besitzer darf die Notiz bearbeiten.",
"OK": "Ok", "OK": "OK",
"Reach the limit": "Limit erreicht", "Reach the limit": "Limit erreicht",
"Sorry, you've reached the max length this note can be.": "Entschuldigung, die maximale Länge der Notiz ist erreicht.", "Sorry, you've reached the max length this note can be.": "Entschuldigung, die maximale Länge der Notiz ist erreicht.",
"Please reduce the content or divide it to more notes, thank you!": "Bitte reduzieren Sie den Inhalt oder nutzen zwei Notizen, danke.", "Please reduce the content or divide it to more notes, thank you!": "Bitte reduzieren Sie den Inhalt oder nutzen zwei Notizen, danke.",
"Import from Gist": "Aus GitHub Gist importieren", "Import from Gist": "Aus GitHub Gist importieren",
"Paste your gist url here...": "gist URL hier einfügen ...", "Paste your gist url here...": "Gist URL hier einfügen ...",
"Import from Snippet": "Aus Snippet importieren", "Import from Snippet": "Aus Snippet importieren",
"Select From Available Projects": "Aus verfügbaren Projekten wählen", "Select From Available Projects": "Aus verfügbaren Projekten wählen",
"Select From Available Snippets": "Aus verfügbaren Snippets wählen", "Select From Available Snippets": "Aus verfügbaren Snippets wählen",
@ -106,16 +106,16 @@
"Select Visibility Level": "Sichtbarkeit bestimmen", "Select Visibility Level": "Sichtbarkeit bestimmen",
"Night Theme": "Nachtmodus", "Night Theme": "Nachtmodus",
"Follow us on %s and %s.": "Folge uns auf %s und %s.", "Follow us on %s and %s.": "Folge uns auf %s und %s.",
"Privacy": "Privatsphäre", "Privacy": "Datenschutz",
"Terms of Use": "Nutzungsbedingungen", "Terms of Use": "Nutzungsbedingungen",
"Do you really want to delete your user account?": "Möchten Sie wirklich Ihr Nutzeraccount löschen?", "Do you really want to delete your user account?": "Möchten Sie wirklich Ihr Nutzerkonto löschen?",
"This will delete your account, all notes that are owned by you and remove all references to your account from other notes.": "Hiermit löschen Sie Ihren Account, alle Ihre Dokumente und alle Verweise auf Ihren Account aus anderen Dokumenten.", "This will delete your account, all notes that are owned by you and remove all references to your account from other notes.": "Hiermit löschen Sie Ihr Konto, alle Ihre Dokumente und alle Verweise auf Ihr Konto aus anderen Dokumenten.",
"Delete user": "Benutzer löschen", "Delete user": "Benutzer löschen",
"Export user data": "Exportiere Nutzerdaten", "Export user data": "Exportiere Nutzerdaten",
"Help us translating on %s": "Hilf uns übersetzen auf %s", "Help us translating on %s": "Hilf uns bei der Übersetzung auf %s",
"Source Code": "Quelltext", "Source Code": "Quelltext",
"Register": "Registrieren", "Register": "Registrieren",
"Powered by %s": "Ermöglicht durch %s", "Powered by %s": "Ermöglicht durch %s",
"Help us translating": "Hilf uns übersetzen", "Help us translating": "Hilf uns beim Übersetzen",
"Join the community": "Tritt der Community bei" "Join the community": "Tritt der Community bei"
} }

View File

@ -62,7 +62,7 @@
"Refresh": "Ladda om", "Refresh": "Ladda om",
"Contacts": "Kontakter", "Contacts": "Kontakter",
"Report an issue": "Rapportera ett fel", "Report an issue": "Rapportera ett fel",
"Meet us on %s": "Träffa oss på% s", "Meet us on %s": "Träffa oss på %s",
"Send us email": "Skicka e-post till oss", "Send us email": "Skicka e-post till oss",
"Documents": "Dokument", "Documents": "Dokument",
"Features": "Funktioner", "Features": "Funktioner",

View File

@ -31,7 +31,7 @@
"codemirror": "git+https://github.com/hackmdio/CodeMirror.git", "codemirror": "git+https://github.com/hackmdio/CodeMirror.git",
"compression": "^1.6.2", "compression": "^1.6.2",
"connect-flash": "^0.1.1", "connect-flash": "^0.1.1",
"connect-session-sequelize": "^4.1.0", "connect-session-sequelize": "^6.0.0",
"cookie": "0.3.1", "cookie": "0.3.1",
"cookie-parser": "1.4.3", "cookie-parser": "1.4.3",
"deep-freeze": "^0.0.1", "deep-freeze": "^0.0.1",
@ -82,7 +82,7 @@
"markdown-pdf": "^9.0.0", "markdown-pdf": "^9.0.0",
"mathjax": "~2.7.0", "mathjax": "~2.7.0",
"mattermost": "^3.4.0", "mattermost": "^3.4.0",
"mermaid": "~7.1.0", "mermaid": "~8.2.3",
"meta-marked": "git+https://github.com/codimd/meta-marked#semver:^0.4.2", "meta-marked": "git+https://github.com/codimd/meta-marked#semver:^0.4.2",
"method-override": "^2.3.7", "method-override": "^2.3.7",
"minimist": "^1.2.0", "minimist": "^1.2.0",
@ -114,8 +114,7 @@
"scrypt-async": "^2.0.1", "scrypt-async": "^2.0.1",
"scrypt-kdf": "^2.0.1", "scrypt-kdf": "^2.0.1",
"select2": "^3.5.2-browserify", "select2": "^3.5.2-browserify",
"sequelize": "^3.28.0", "sequelize": "^5.8.12",
"sequelize-cli": "^2.5.1",
"shortid": "2.2.8", "shortid": "2.2.8",
"socket.io": "~2.1.1", "socket.io": "~2.1.1",
"socket.io-client": "~2.1.1", "socket.io-client": "~2.1.1",
@ -195,6 +194,7 @@
"mocha": "^5.2.0", "mocha": "^5.2.0",
"mock-require": "^3.0.3", "mock-require": "^3.0.3",
"optimize-css-assets-webpack-plugin": "^5.0.0", "optimize-css-assets-webpack-plugin": "^5.0.0",
"sequelize-cli": "^5.4.0",
"script-loader": "^0.7.2", "script-loader": "^0.7.2",
"string-loader": "^0.0.1", "string-loader": "^0.0.1",
"style-loader": "^0.21.0", "style-loader": "^0.21.0",

View File

@ -63,7 +63,7 @@
</div> </div>
</div> </div>
<div id="ui-toc-affix" class="ui-affix-toc ui-toc-dropdown unselectable hidden-print" data-spy="affix" style="display:none;"></div> <div id="ui-toc-affix" class="ui-affix-toc ui-toc-dropdown unselectable hidden-print" data-spy="affix" style="display:none;"></div>
<% if(typeof disqus !== 'undefined' && disqus) { %> <% if(typeof disqus !== 'undefined' && disqus && !dnt) { %>
<div class="container-fluid" style="max-width: 758px; margin-bottom: 40px;"> <div class="container-fluid" style="max-width: 758px; margin-bottom: 40px;">
<%- include shared/disqus %> <%- include shared/disqus %>
</div> </div>

View File

@ -1,4 +1,4 @@
<% if(typeof GA !== 'undefined' && GA) { %> <% if(typeof GA !== 'undefined' && GA && !dnt) { %>
<script nonce="<%= cspNonce %>"> <script nonce="<%= cspNonce %>">
(function (i, s, o, g, r, a, m) { (function (i, s, o, g, r, a, m) {
i['GoogleAnalyticsObject'] = r; i['GoogleAnalyticsObject'] = r;

View File

@ -78,7 +78,7 @@
<% } %> <% } %>
</small> </small>
</div> </div>
<% if(typeof disqus !== 'undefined' && disqus) { %> <% if(typeof disqus !== 'undefined' && disqus && !dnt) { %>
<div class="slides-disqus"> <div class="slides-disqus">
<%- include shared/disqus %> <%- include shared/disqus %>
</div> </div>

3680
yarn.lock

File diff suppressed because it is too large Load Diff