fix: upgrade sequelize to latest version to fix CVE
Signed-off-by: BoHong Li <a60814billy@gmail.com>
This commit is contained in:
parent
02929cd4bf
commit
63c96e7359
6 changed files with 796 additions and 789 deletions
|
@ -18,25 +18,25 @@ module.exports = function (sequelize, DataTypes) {
|
||||||
unique: true,
|
unique: true,
|
||||||
fields: ['noteId', 'userId']
|
fields: ['noteId', 'userId']
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
classMethods: {
|
|
||||||
associate: function (models) {
|
|
||||||
Author.belongsTo(models.Note, {
|
|
||||||
foreignKey: 'noteId',
|
|
||||||
as: 'note',
|
|
||||||
constraints: false,
|
|
||||||
onDelete: 'CASCADE',
|
|
||||||
hooks: true
|
|
||||||
})
|
|
||||||
Author.belongsTo(models.User, {
|
|
||||||
foreignKey: 'userId',
|
|
||||||
as: 'user',
|
|
||||||
constraints: false,
|
|
||||||
onDelete: 'CASCADE',
|
|
||||||
hooks: true
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Author.associate = function (models) {
|
||||||
|
Author.belongsTo(models.Note, {
|
||||||
|
foreignKey: 'noteId',
|
||||||
|
as: 'note',
|
||||||
|
constraints: false,
|
||||||
|
onDelete: 'CASCADE',
|
||||||
|
hooks: true
|
||||||
|
})
|
||||||
|
Author.belongsTo(models.User, {
|
||||||
|
foreignKey: 'userId',
|
||||||
|
as: 'user',
|
||||||
|
constraints: false,
|
||||||
|
onDelete: 'CASCADE',
|
||||||
|
hooks: true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
return Author
|
return Author
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,9 @@ var config = require('../config')
|
||||||
var logger = require('../logger')
|
var logger = require('../logger')
|
||||||
|
|
||||||
var dbconfig = cloneDeep(config.db)
|
var dbconfig = cloneDeep(config.db)
|
||||||
dbconfig.logging = config.debug ? logger.info : false
|
dbconfig.logging = config.debug ? (data) => {
|
||||||
|
logger.info(data)
|
||||||
|
} : false
|
||||||
|
|
||||||
var sequelize = null
|
var sequelize = null
|
||||||
|
|
||||||
|
|
|
@ -86,486 +86,492 @@ module.exports = function (sequelize, DataTypes) {
|
||||||
}
|
}
|
||||||
}, {
|
}, {
|
||||||
paranoid: false,
|
paranoid: false,
|
||||||
classMethods: {
|
|
||||||
associate: function (models) {
|
|
||||||
Note.belongsTo(models.User, {
|
|
||||||
foreignKey: 'ownerId',
|
|
||||||
as: 'owner',
|
|
||||||
constraints: false,
|
|
||||||
onDelete: 'CASCADE',
|
|
||||||
hooks: true
|
|
||||||
})
|
|
||||||
Note.belongsTo(models.User, {
|
|
||||||
foreignKey: 'lastchangeuserId',
|
|
||||||
as: 'lastchangeuser',
|
|
||||||
constraints: false
|
|
||||||
})
|
|
||||||
Note.hasMany(models.Revision, {
|
|
||||||
foreignKey: 'noteId',
|
|
||||||
constraints: false
|
|
||||||
})
|
|
||||||
Note.hasMany(models.Author, {
|
|
||||||
foreignKey: 'noteId',
|
|
||||||
as: 'authors',
|
|
||||||
constraints: false
|
|
||||||
})
|
|
||||||
},
|
|
||||||
checkFileExist: function (filePath) {
|
|
||||||
try {
|
|
||||||
return fs.statSync(filePath).isFile()
|
|
||||||
} catch (err) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
encodeNoteId: function (id) {
|
|
||||||
// remove dashes in UUID and encode in url-safe base64
|
|
||||||
let str = id.replace(/-/g, '')
|
|
||||||
let hexStr = Buffer.from(str, 'hex')
|
|
||||||
return base64url.encode(hexStr)
|
|
||||||
},
|
|
||||||
decodeNoteId: function (encodedId) {
|
|
||||||
// decode from url-safe base64
|
|
||||||
let id = base64url.toBuffer(encodedId).toString('hex')
|
|
||||||
// add dashes between the UUID string parts
|
|
||||||
let idParts = []
|
|
||||||
idParts.push(id.substr(0, 8))
|
|
||||||
idParts.push(id.substr(8, 4))
|
|
||||||
idParts.push(id.substr(12, 4))
|
|
||||||
idParts.push(id.substr(16, 4))
|
|
||||||
idParts.push(id.substr(20, 12))
|
|
||||||
return idParts.join('-')
|
|
||||||
},
|
|
||||||
checkNoteIdValid: function (id) {
|
|
||||||
var uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i
|
|
||||||
var result = id.match(uuidRegex)
|
|
||||||
if (result && result.length === 1) { return true } else { return false }
|
|
||||||
},
|
|
||||||
parseNoteId: function (noteId, callback) {
|
|
||||||
async.series({
|
|
||||||
parseNoteIdByAlias: function (_callback) {
|
|
||||||
// try to parse note id by alias (e.g. doc)
|
|
||||||
Note.findOne({
|
|
||||||
where: {
|
|
||||||
alias: noteId
|
|
||||||
}
|
|
||||||
}).then(function (note) {
|
|
||||||
if (note) {
|
|
||||||
let filePath = path.join(config.docsPath, noteId + '.md')
|
|
||||||
if (Note.checkFileExist(filePath)) {
|
|
||||||
// if doc in filesystem have newer modified time than last change time
|
|
||||||
// then will update the doc in db
|
|
||||||
var fsModifiedTime = moment(fs.statSync(filePath).mtime)
|
|
||||||
var dbModifiedTime = moment(note.lastchangeAt || note.createdAt)
|
|
||||||
var body = fs.readFileSync(filePath, 'utf8')
|
|
||||||
var contentLength = body.length
|
|
||||||
var title = Note.parseNoteTitle(body)
|
|
||||||
if (fsModifiedTime.isAfter(dbModifiedTime) && note.content !== body) {
|
|
||||||
note.update({
|
|
||||||
title: title,
|
|
||||||
content: body,
|
|
||||||
lastchangeAt: fsModifiedTime
|
|
||||||
}).then(function (note) {
|
|
||||||
sequelize.models.Revision.saveNoteRevision(note, function (err, revision) {
|
|
||||||
if (err) return _callback(err, null)
|
|
||||||
// update authorship on after making revision of docs
|
|
||||||
var patch = dmp.patch_fromText(revision.patch)
|
|
||||||
var operations = Note.transformPatchToOperations(patch, contentLength)
|
|
||||||
var authorship = note.authorship
|
|
||||||
for (let i = 0; i < operations.length; i++) {
|
|
||||||
authorship = Note.updateAuthorshipByOperation(operations[i], null, authorship)
|
|
||||||
}
|
|
||||||
note.update({
|
|
||||||
authorship: authorship
|
|
||||||
}).then(function (note) {
|
|
||||||
return callback(null, note.id)
|
|
||||||
}).catch(function (err) {
|
|
||||||
return _callback(err, null)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}).catch(function (err) {
|
|
||||||
return _callback(err, null)
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
return callback(null, note.id)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return callback(null, note.id)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
var filePath = path.join(config.docsPath, noteId + '.md')
|
|
||||||
if (Note.checkFileExist(filePath)) {
|
|
||||||
Note.create({
|
|
||||||
alias: noteId,
|
|
||||||
owner: null,
|
|
||||||
permission: 'locked'
|
|
||||||
}).then(function (note) {
|
|
||||||
return callback(null, note.id)
|
|
||||||
}).catch(function (err) {
|
|
||||||
return _callback(err, null)
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
return _callback(null, null)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}).catch(function (err) {
|
|
||||||
return _callback(err, null)
|
|
||||||
})
|
|
||||||
},
|
|
||||||
// parse note id by LZString is deprecated, here for compability
|
|
||||||
parseNoteIdByLZString: function (_callback) {
|
|
||||||
// Calculate minimal string length for an UUID that is encoded
|
|
||||||
// base64 encoded and optimize comparsion by using -1
|
|
||||||
// this should make a lot of LZ-String parsing errors obsolete
|
|
||||||
// as we can assume that a nodeId that is 48 chars or longer is a
|
|
||||||
// noteID.
|
|
||||||
const base64UuidLength = ((4 * 36) / 3) - 1
|
|
||||||
if (!(noteId.length > base64UuidLength)) {
|
|
||||||
return _callback(null, null)
|
|
||||||
}
|
|
||||||
// try to parse note id by LZString Base64
|
|
||||||
try {
|
|
||||||
var id = LZString.decompressFromBase64(noteId)
|
|
||||||
if (id && Note.checkNoteIdValid(id)) { return callback(null, id) } else { return _callback(null, null) }
|
|
||||||
} catch (err) {
|
|
||||||
if (err.message === 'Cannot read property \'charAt\' of undefined') {
|
|
||||||
logger.warning('Looks like we can not decode "' + noteId + '" with LZString. Can be ignored.')
|
|
||||||
} else {
|
|
||||||
logger.error(err)
|
|
||||||
}
|
|
||||||
return _callback(null, null)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
parseNoteIdByBase64Url: function (_callback) {
|
|
||||||
// try to parse note id by base64url
|
|
||||||
try {
|
|
||||||
var id = Note.decodeNoteId(noteId)
|
|
||||||
if (id && Note.checkNoteIdValid(id)) { return callback(null, id) } else { return _callback(null, null) }
|
|
||||||
} catch (err) {
|
|
||||||
logger.error(err)
|
|
||||||
return _callback(null, null)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
parseNoteIdByShortId: function (_callback) {
|
|
||||||
// try to parse note id by shortId
|
|
||||||
try {
|
|
||||||
if (shortId.isValid(noteId)) {
|
|
||||||
Note.findOne({
|
|
||||||
where: {
|
|
||||||
shortid: noteId
|
|
||||||
}
|
|
||||||
}).then(function (note) {
|
|
||||||
if (!note) return _callback(null, null)
|
|
||||||
return callback(null, note.id)
|
|
||||||
}).catch(function (err) {
|
|
||||||
return _callback(err, null)
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
return _callback(null, null)
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
return _callback(err, null)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, function (err, result) {
|
|
||||||
if (err) {
|
|
||||||
logger.error(err)
|
|
||||||
return callback(err, null)
|
|
||||||
}
|
|
||||||
return callback(null, null)
|
|
||||||
})
|
|
||||||
},
|
|
||||||
parseNoteInfo: function (body) {
|
|
||||||
var parsed = Note.extractMeta(body)
|
|
||||||
var $ = cheerio.load(md.render(parsed.markdown))
|
|
||||||
return {
|
|
||||||
title: Note.extractNoteTitle(parsed.meta, $),
|
|
||||||
tags: Note.extractNoteTags(parsed.meta, $)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
parseNoteTitle: function (body) {
|
|
||||||
var parsed = Note.extractMeta(body)
|
|
||||||
var $ = cheerio.load(md.render(parsed.markdown))
|
|
||||||
return Note.extractNoteTitle(parsed.meta, $)
|
|
||||||
},
|
|
||||||
extractNoteTitle: function (meta, $) {
|
|
||||||
var title = ''
|
|
||||||
if (meta.title && (typeof meta.title === 'string' || typeof meta.title === 'number')) {
|
|
||||||
title = meta.title
|
|
||||||
} else {
|
|
||||||
var h1s = $('h1')
|
|
||||||
if (h1s.length > 0 && h1s.first().text().split('\n').length === 1) { title = S(h1s.first().text()).stripTags().s }
|
|
||||||
}
|
|
||||||
if (!title) title = 'Untitled'
|
|
||||||
return title
|
|
||||||
},
|
|
||||||
generateDescription: function (markdown) {
|
|
||||||
return markdown.substr(0, 100).replace(/(?:\r\n|\r|\n)/g, ' ')
|
|
||||||
},
|
|
||||||
decodeTitle: function (title) {
|
|
||||||
return title || 'Untitled'
|
|
||||||
},
|
|
||||||
generateWebTitle: function (title) {
|
|
||||||
title = !title || title === 'Untitled' ? 'CodiMD - Collaborative markdown notes' : title + ' - CodiMD'
|
|
||||||
return title
|
|
||||||
},
|
|
||||||
extractNoteTags: function (meta, $) {
|
|
||||||
var tags = []
|
|
||||||
var rawtags = []
|
|
||||||
if (meta.tags && (typeof meta.tags === 'string' || typeof meta.tags === 'number')) {
|
|
||||||
var metaTags = ('' + meta.tags).split(',')
|
|
||||||
for (let i = 0; i < metaTags.length; i++) {
|
|
||||||
var text = metaTags[i].trim()
|
|
||||||
if (text) rawtags.push(text)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
var h6s = $('h6')
|
|
||||||
h6s.each(function (key, value) {
|
|
||||||
if (/^tags/gmi.test($(value).text())) {
|
|
||||||
var codes = $(value).find('code')
|
|
||||||
for (let i = 0; i < codes.length; i++) {
|
|
||||||
var text = S($(codes[i]).text().trim()).stripTags().s
|
|
||||||
if (text) rawtags.push(text)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
for (let i = 0; i < rawtags.length; i++) {
|
|
||||||
var found = false
|
|
||||||
for (let j = 0; j < tags.length; j++) {
|
|
||||||
if (tags[j] === rawtags[i]) {
|
|
||||||
found = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!found) { tags.push(rawtags[i]) }
|
|
||||||
}
|
|
||||||
return tags
|
|
||||||
},
|
|
||||||
extractMeta: function (content) {
|
|
||||||
var obj = null
|
|
||||||
try {
|
|
||||||
obj = metaMarked(content)
|
|
||||||
if (!obj.markdown) obj.markdown = ''
|
|
||||||
if (!obj.meta) obj.meta = {}
|
|
||||||
} catch (err) {
|
|
||||||
obj = {
|
|
||||||
markdown: content,
|
|
||||||
meta: {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return obj
|
|
||||||
},
|
|
||||||
parseMeta: function (meta) {
|
|
||||||
var _meta = {}
|
|
||||||
if (meta) {
|
|
||||||
if (meta.title && (typeof meta.title === 'string' || typeof meta.title === 'number')) { _meta.title = meta.title }
|
|
||||||
if (meta.description && (typeof meta.description === 'string' || typeof meta.description === 'number')) { _meta.description = meta.description }
|
|
||||||
if (meta.robots && (typeof meta.robots === 'string' || typeof meta.robots === 'number')) { _meta.robots = meta.robots }
|
|
||||||
if (meta.GA && (typeof meta.GA === 'string' || typeof meta.GA === 'number')) { _meta.GA = meta.GA }
|
|
||||||
if (meta.disqus && (typeof meta.disqus === 'string' || typeof meta.disqus === 'number')) { _meta.disqus = meta.disqus }
|
|
||||||
if (meta.slideOptions && (typeof meta.slideOptions === 'object')) { _meta.slideOptions = meta.slideOptions }
|
|
||||||
}
|
|
||||||
return _meta
|
|
||||||
},
|
|
||||||
updateAuthorshipByOperation: function (operation, userId, authorships) {
|
|
||||||
var index = 0
|
|
||||||
var timestamp = Date.now()
|
|
||||||
for (let i = 0; i < operation.length; i++) {
|
|
||||||
var op = operation[i]
|
|
||||||
if (ot.TextOperation.isRetain(op)) {
|
|
||||||
index += op
|
|
||||||
} else if (ot.TextOperation.isInsert(op)) {
|
|
||||||
let opStart = index
|
|
||||||
let opEnd = index + op.length
|
|
||||||
var inserted = false
|
|
||||||
// authorship format: [userId, startPos, endPos, createdAt, updatedAt]
|
|
||||||
if (authorships.length <= 0) authorships.push([userId, opStart, opEnd, timestamp, timestamp])
|
|
||||||
else {
|
|
||||||
for (let j = 0; j < authorships.length; j++) {
|
|
||||||
let authorship = authorships[j]
|
|
||||||
if (!inserted) {
|
|
||||||
let nextAuthorship = authorships[j + 1] || -1
|
|
||||||
if ((nextAuthorship !== -1 && nextAuthorship[1] >= opEnd) || j >= authorships.length - 1) {
|
|
||||||
if (authorship[1] < opStart && authorship[2] > opStart) {
|
|
||||||
// divide
|
|
||||||
let postLength = authorship[2] - opStart
|
|
||||||
authorship[2] = opStart
|
|
||||||
authorship[4] = timestamp
|
|
||||||
authorships.splice(j + 1, 0, [userId, opStart, opEnd, timestamp, timestamp])
|
|
||||||
authorships.splice(j + 2, 0, [authorship[0], opEnd, opEnd + postLength, authorship[3], timestamp])
|
|
||||||
j += 2
|
|
||||||
inserted = true
|
|
||||||
} else if (authorship[1] >= opStart) {
|
|
||||||
authorships.splice(j, 0, [userId, opStart, opEnd, timestamp, timestamp])
|
|
||||||
j += 1
|
|
||||||
inserted = true
|
|
||||||
} else if (authorship[2] <= opStart) {
|
|
||||||
authorships.splice(j + 1, 0, [userId, opStart, opEnd, timestamp, timestamp])
|
|
||||||
j += 1
|
|
||||||
inserted = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (authorship[1] >= opStart) {
|
|
||||||
authorship[1] += op.length
|
|
||||||
authorship[2] += op.length
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
index += op.length
|
|
||||||
} else if (ot.TextOperation.isDelete(op)) {
|
|
||||||
let opStart = index
|
|
||||||
let opEnd = index - op
|
|
||||||
if (operation.length === 1) {
|
|
||||||
authorships = []
|
|
||||||
} else if (authorships.length > 0) {
|
|
||||||
for (let j = 0; j < authorships.length; j++) {
|
|
||||||
let authorship = authorships[j]
|
|
||||||
if (authorship[1] >= opStart && authorship[1] <= opEnd && authorship[2] >= opStart && authorship[2] <= opEnd) {
|
|
||||||
authorships.splice(j, 1)
|
|
||||||
j -= 1
|
|
||||||
} else if (authorship[1] < opStart && authorship[1] < opEnd && authorship[2] > opStart && authorship[2] > opEnd) {
|
|
||||||
authorship[2] += op
|
|
||||||
authorship[4] = timestamp
|
|
||||||
} else if (authorship[2] >= opStart && authorship[2] <= opEnd) {
|
|
||||||
authorship[2] = opStart
|
|
||||||
authorship[4] = timestamp
|
|
||||||
} else if (authorship[1] >= opStart && authorship[1] <= opEnd) {
|
|
||||||
authorship[1] = opEnd
|
|
||||||
authorship[4] = timestamp
|
|
||||||
}
|
|
||||||
if (authorship[1] >= opEnd) {
|
|
||||||
authorship[1] += op
|
|
||||||
authorship[2] += op
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
index += op
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// merge
|
|
||||||
for (let j = 0; j < authorships.length; j++) {
|
|
||||||
let authorship = authorships[j]
|
|
||||||
for (let k = j + 1; k < authorships.length; k++) {
|
|
||||||
let nextAuthorship = authorships[k]
|
|
||||||
if (nextAuthorship && authorship[0] === nextAuthorship[0] && authorship[2] === nextAuthorship[1]) {
|
|
||||||
let minTimestamp = Math.min(authorship[3], nextAuthorship[3])
|
|
||||||
let maxTimestamp = Math.max(authorship[3], nextAuthorship[3])
|
|
||||||
authorships.splice(j, 1, [authorship[0], authorship[1], nextAuthorship[2], minTimestamp, maxTimestamp])
|
|
||||||
authorships.splice(k, 1)
|
|
||||||
j -= 1
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// clear
|
|
||||||
for (let j = 0; j < authorships.length; j++) {
|
|
||||||
let authorship = authorships[j]
|
|
||||||
if (!authorship[0]) {
|
|
||||||
authorships.splice(j, 1)
|
|
||||||
j -= 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return authorships
|
|
||||||
},
|
|
||||||
transformPatchToOperations: function (patch, contentLength) {
|
|
||||||
var operations = []
|
|
||||||
if (patch.length > 0) {
|
|
||||||
// calculate original content length
|
|
||||||
for (let j = patch.length - 1; j >= 0; j--) {
|
|
||||||
var p = patch[j]
|
|
||||||
for (let i = 0; i < p.diffs.length; i++) {
|
|
||||||
var diff = p.diffs[i]
|
|
||||||
switch (diff[0]) {
|
|
||||||
case 1: // insert
|
|
||||||
contentLength -= diff[1].length
|
|
||||||
break
|
|
||||||
case -1: // delete
|
|
||||||
contentLength += diff[1].length
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// generate operations
|
|
||||||
var bias = 0
|
|
||||||
var lengthBias = 0
|
|
||||||
for (let j = 0; j < patch.length; j++) {
|
|
||||||
var operation = []
|
|
||||||
let p = patch[j]
|
|
||||||
var currIndex = p.start1
|
|
||||||
var currLength = contentLength - bias
|
|
||||||
for (let i = 0; i < p.diffs.length; i++) {
|
|
||||||
let diff = p.diffs[i]
|
|
||||||
switch (diff[0]) {
|
|
||||||
case 0: // retain
|
|
||||||
if (i === 0) {
|
|
||||||
// first
|
|
||||||
operation.push(currIndex + diff[1].length)
|
|
||||||
} else if (i !== p.diffs.length - 1) {
|
|
||||||
// mid
|
|
||||||
operation.push(diff[1].length)
|
|
||||||
} else {
|
|
||||||
// last
|
|
||||||
operation.push(currLength + lengthBias - currIndex)
|
|
||||||
}
|
|
||||||
currIndex += diff[1].length
|
|
||||||
break
|
|
||||||
case 1: // insert
|
|
||||||
operation.push(diff[1])
|
|
||||||
lengthBias += diff[1].length
|
|
||||||
currIndex += diff[1].length
|
|
||||||
break
|
|
||||||
case -1: // delete
|
|
||||||
operation.push(-diff[1].length)
|
|
||||||
bias += diff[1].length
|
|
||||||
currIndex += diff[1].length
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
operations.push(operation)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return operations
|
|
||||||
}
|
|
||||||
},
|
|
||||||
hooks: {
|
hooks: {
|
||||||
beforeCreate: function (note, options, callback) {
|
beforeCreate: function (note, options) {
|
||||||
// if no content specified then use default note
|
return new Promise(function (resolve, reject) {
|
||||||
if (!note.content) {
|
// if no content specified then use default note
|
||||||
var body = null
|
if (!note.content) {
|
||||||
let filePath = null
|
var body = null
|
||||||
if (!note.alias) {
|
let filePath = null
|
||||||
filePath = config.defaultNotePath
|
if (!note.alias) {
|
||||||
} else {
|
filePath = config.defaultNotePath
|
||||||
filePath = path.join(config.docsPath, note.alias + '.md')
|
} else {
|
||||||
}
|
filePath = path.join(config.docsPath, note.alias + '.md')
|
||||||
if (Note.checkFileExist(filePath)) {
|
}
|
||||||
var fsCreatedTime = moment(fs.statSync(filePath).ctime)
|
if (Note.checkFileExist(filePath)) {
|
||||||
body = fs.readFileSync(filePath, 'utf8')
|
var fsCreatedTime = moment(fs.statSync(filePath).ctime)
|
||||||
note.title = Note.parseNoteTitle(body)
|
body = fs.readFileSync(filePath, 'utf8')
|
||||||
note.content = body
|
note.title = Note.parseNoteTitle(body)
|
||||||
if (filePath !== config.defaultNotePath) {
|
note.content = body
|
||||||
note.createdAt = fsCreatedTime
|
if (filePath !== config.defaultNotePath) {
|
||||||
|
note.createdAt = fsCreatedTime
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
// if no permission specified and have owner then give default permission in config, else default permission is freely
|
||||||
// if no permission specified and have owner then give default permission in config, else default permission is freely
|
if (!note.permission) {
|
||||||
if (!note.permission) {
|
if (note.ownerId) {
|
||||||
if (note.ownerId) {
|
note.permission = config.defaultPermission
|
||||||
note.permission = config.defaultPermission
|
} else {
|
||||||
} else {
|
note.permission = 'freely'
|
||||||
note.permission = 'freely'
|
}
|
||||||
}
|
}
|
||||||
}
|
return resolve(note)
|
||||||
return callback(null, note)
|
})
|
||||||
},
|
},
|
||||||
afterCreate: function (note, options, callback) {
|
afterCreate: function (note, options, callback) {
|
||||||
sequelize.models.Revision.saveNoteRevision(note, function (err, revision) {
|
return new Promise(function (resolve, reject) {
|
||||||
callback(err, note)
|
sequelize.models.Revision.saveNoteRevision(note, function (err, revision) {
|
||||||
|
if (err) {
|
||||||
|
return reject(err)
|
||||||
|
}
|
||||||
|
return resolve(note)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Note.associate = function (models) {
|
||||||
|
Note.belongsTo(models.User, {
|
||||||
|
foreignKey: 'ownerId',
|
||||||
|
as: 'owner',
|
||||||
|
constraints: false,
|
||||||
|
onDelete: 'CASCADE',
|
||||||
|
hooks: true
|
||||||
|
})
|
||||||
|
Note.belongsTo(models.User, {
|
||||||
|
foreignKey: 'lastchangeuserId',
|
||||||
|
as: 'lastchangeuser',
|
||||||
|
constraints: false
|
||||||
|
})
|
||||||
|
Note.hasMany(models.Revision, {
|
||||||
|
foreignKey: 'noteId',
|
||||||
|
constraints: false
|
||||||
|
})
|
||||||
|
Note.hasMany(models.Author, {
|
||||||
|
foreignKey: 'noteId',
|
||||||
|
as: 'authors',
|
||||||
|
constraints: false
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Note.checkFileExist = function (filePath) {
|
||||||
|
try {
|
||||||
|
return fs.statSync(filePath).isFile()
|
||||||
|
} catch (err) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Note.encodeNoteId = function (id) {
|
||||||
|
// remove dashes in UUID and encode in url-safe base64
|
||||||
|
let str = id.replace(/-/g, '')
|
||||||
|
let hexStr = Buffer.from(str, 'hex')
|
||||||
|
return base64url.encode(hexStr)
|
||||||
|
}
|
||||||
|
Note.decodeNoteId = function (encodedId) {
|
||||||
|
// decode from url-safe base64
|
||||||
|
let id = base64url.toBuffer(encodedId).toString('hex')
|
||||||
|
// add dashes between the UUID string parts
|
||||||
|
let idParts = []
|
||||||
|
idParts.push(id.substr(0, 8))
|
||||||
|
idParts.push(id.substr(8, 4))
|
||||||
|
idParts.push(id.substr(12, 4))
|
||||||
|
idParts.push(id.substr(16, 4))
|
||||||
|
idParts.push(id.substr(20, 12))
|
||||||
|
return idParts.join('-')
|
||||||
|
}
|
||||||
|
Note.checkNoteIdValid = function (id) {
|
||||||
|
var uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i
|
||||||
|
var result = id.match(uuidRegex)
|
||||||
|
if (result && result.length === 1) { return true } else { return false }
|
||||||
|
}
|
||||||
|
Note.parseNoteId = function (noteId, callback) {
|
||||||
|
async.series({
|
||||||
|
parseNoteIdByAlias: function (_callback) {
|
||||||
|
// try to parse note id by alias (e.g. doc)
|
||||||
|
Note.findOne({
|
||||||
|
where: {
|
||||||
|
alias: noteId
|
||||||
|
}
|
||||||
|
}).then(function (note) {
|
||||||
|
if (note) {
|
||||||
|
let filePath = path.join(config.docsPath, noteId + '.md')
|
||||||
|
if (Note.checkFileExist(filePath)) {
|
||||||
|
// if doc in filesystem have newer modified time than last change time
|
||||||
|
// then will update the doc in db
|
||||||
|
var fsModifiedTime = moment(fs.statSync(filePath).mtime)
|
||||||
|
var dbModifiedTime = moment(note.lastchangeAt || note.createdAt)
|
||||||
|
var body = fs.readFileSync(filePath, 'utf8')
|
||||||
|
var contentLength = body.length
|
||||||
|
var title = Note.parseNoteTitle(body)
|
||||||
|
if (fsModifiedTime.isAfter(dbModifiedTime) && note.content !== body) {
|
||||||
|
note.update({
|
||||||
|
title: title,
|
||||||
|
content: body,
|
||||||
|
lastchangeAt: fsModifiedTime
|
||||||
|
}).then(function (note) {
|
||||||
|
sequelize.models.Revision.saveNoteRevision(note, function (err, revision) {
|
||||||
|
if (err) return _callback(err, null)
|
||||||
|
// update authorship on after making revision of docs
|
||||||
|
var patch = dmp.patch_fromText(revision.patch)
|
||||||
|
var operations = Note.transformPatchToOperations(patch, contentLength)
|
||||||
|
var authorship = note.authorship
|
||||||
|
for (let i = 0; i < operations.length; i++) {
|
||||||
|
authorship = Note.updateAuthorshipByOperation(operations[i], null, authorship)
|
||||||
|
}
|
||||||
|
note.update({
|
||||||
|
authorship: authorship
|
||||||
|
}).then(function (note) {
|
||||||
|
return callback(null, note.id)
|
||||||
|
}).catch(function (err) {
|
||||||
|
return _callback(err, null)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}).catch(function (err) {
|
||||||
|
return _callback(err, null)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
return callback(null, note.id)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return callback(null, note.id)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var filePath = path.join(config.docsPath, noteId + '.md')
|
||||||
|
if (Note.checkFileExist(filePath)) {
|
||||||
|
Note.create({
|
||||||
|
alias: noteId,
|
||||||
|
owner: null,
|
||||||
|
permission: 'locked'
|
||||||
|
}).then(function (note) {
|
||||||
|
return callback(null, note.id)
|
||||||
|
}).catch(function (err) {
|
||||||
|
return _callback(err, null)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
return _callback(null, null)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}).catch(function (err) {
|
||||||
|
return _callback(err, null)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
// parse note id by LZString is deprecated, here for compability
|
||||||
|
parseNoteIdByLZString: function (_callback) {
|
||||||
|
// Calculate minimal string length for an UUID that is encoded
|
||||||
|
// base64 encoded and optimize comparsion by using -1
|
||||||
|
// this should make a lot of LZ-String parsing errors obsolete
|
||||||
|
// as we can assume that a nodeId that is 48 chars or longer is a
|
||||||
|
// noteID.
|
||||||
|
const base64UuidLength = ((4 * 36) / 3) - 1
|
||||||
|
if (!(noteId.length > base64UuidLength)) {
|
||||||
|
return _callback(null, null)
|
||||||
|
}
|
||||||
|
// try to parse note id by LZString Base64
|
||||||
|
try {
|
||||||
|
var id = LZString.decompressFromBase64(noteId)
|
||||||
|
if (id && Note.checkNoteIdValid(id)) { return callback(null, id) } else { return _callback(null, null) }
|
||||||
|
} catch (err) {
|
||||||
|
if (err.message === 'Cannot read property \'charAt\' of undefined') {
|
||||||
|
logger.warning('Looks like we can not decode "' + noteId + '" with LZString. Can be ignored.')
|
||||||
|
} else {
|
||||||
|
logger.error(err)
|
||||||
|
}
|
||||||
|
return _callback(null, null)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
parseNoteIdByBase64Url: function (_callback) {
|
||||||
|
// try to parse note id by base64url
|
||||||
|
try {
|
||||||
|
var id = Note.decodeNoteId(noteId)
|
||||||
|
if (id && Note.checkNoteIdValid(id)) { return callback(null, id) } else { return _callback(null, null) }
|
||||||
|
} catch (err) {
|
||||||
|
logger.error(err)
|
||||||
|
return _callback(null, null)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
parseNoteIdByShortId: function (_callback) {
|
||||||
|
// try to parse note id by shortId
|
||||||
|
try {
|
||||||
|
if (shortId.isValid(noteId)) {
|
||||||
|
Note.findOne({
|
||||||
|
where: {
|
||||||
|
shortid: noteId
|
||||||
|
}
|
||||||
|
}).then(function (note) {
|
||||||
|
if (!note) return _callback(null, null)
|
||||||
|
return callback(null, note.id)
|
||||||
|
}).catch(function (err) {
|
||||||
|
return _callback(err, null)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
return _callback(null, null)
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
return _callback(err, null)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, function (err, result) {
|
||||||
|
if (err) {
|
||||||
|
logger.error(err)
|
||||||
|
return callback(err, null)
|
||||||
|
}
|
||||||
|
return callback(null, null)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Note.parseNoteInfo = function (body) {
|
||||||
|
var parsed = Note.extractMeta(body)
|
||||||
|
var $ = cheerio.load(md.render(parsed.markdown))
|
||||||
|
return {
|
||||||
|
title: Note.extractNoteTitle(parsed.meta, $),
|
||||||
|
tags: Note.extractNoteTags(parsed.meta, $)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Note.parseNoteTitle = function (body) {
|
||||||
|
var parsed = Note.extractMeta(body)
|
||||||
|
var $ = cheerio.load(md.render(parsed.markdown))
|
||||||
|
return Note.extractNoteTitle(parsed.meta, $)
|
||||||
|
}
|
||||||
|
Note.extractNoteTitle = function (meta, $) {
|
||||||
|
var title = ''
|
||||||
|
if (meta.title && (typeof meta.title === 'string' || typeof meta.title === 'number')) {
|
||||||
|
title = meta.title
|
||||||
|
} else {
|
||||||
|
var h1s = $('h1')
|
||||||
|
if (h1s.length > 0 && h1s.first().text().split('\n').length === 1) { title = S(h1s.first().text()).stripTags().s }
|
||||||
|
}
|
||||||
|
if (!title) title = 'Untitled'
|
||||||
|
return title
|
||||||
|
}
|
||||||
|
Note.generateDescription = function (markdown) {
|
||||||
|
return markdown.substr(0, 100).replace(/(?:\r\n|\r|\n)/g, ' ')
|
||||||
|
}
|
||||||
|
Note.decodeTitle = function (title) {
|
||||||
|
return title || 'Untitled'
|
||||||
|
}
|
||||||
|
Note.generateWebTitle = function (title) {
|
||||||
|
title = !title || title === 'Untitled' ? 'CodiMD - Collaborative markdown notes' : title + ' - CodiMD'
|
||||||
|
return title
|
||||||
|
}
|
||||||
|
Note.extractNoteTags = function (meta, $) {
|
||||||
|
var tags = []
|
||||||
|
var rawtags = []
|
||||||
|
if (meta.tags && (typeof meta.tags === 'string' || typeof meta.tags === 'number')) {
|
||||||
|
var metaTags = ('' + meta.tags).split(',')
|
||||||
|
for (let i = 0; i < metaTags.length; i++) {
|
||||||
|
var text = metaTags[i].trim()
|
||||||
|
if (text) rawtags.push(text)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var h6s = $('h6')
|
||||||
|
h6s.each(function (key, value) {
|
||||||
|
if (/^tags/gmi.test($(value).text())) {
|
||||||
|
var codes = $(value).find('code')
|
||||||
|
for (let i = 0; i < codes.length; i++) {
|
||||||
|
var text = S($(codes[i]).text().trim()).stripTags().s
|
||||||
|
if (text) rawtags.push(text)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
for (let i = 0; i < rawtags.length; i++) {
|
||||||
|
var found = false
|
||||||
|
for (let j = 0; j < tags.length; j++) {
|
||||||
|
if (tags[j] === rawtags[i]) {
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!found) { tags.push(rawtags[i]) }
|
||||||
|
}
|
||||||
|
return tags
|
||||||
|
}
|
||||||
|
Note.extractMeta = function (content) {
|
||||||
|
var obj = null
|
||||||
|
try {
|
||||||
|
obj = metaMarked(content)
|
||||||
|
if (!obj.markdown) obj.markdown = ''
|
||||||
|
if (!obj.meta) obj.meta = {}
|
||||||
|
} catch (err) {
|
||||||
|
obj = {
|
||||||
|
markdown: content,
|
||||||
|
meta: {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
Note.parseMeta = function (meta) {
|
||||||
|
var _meta = {}
|
||||||
|
if (meta) {
|
||||||
|
if (meta.title && (typeof meta.title === 'string' || typeof meta.title === 'number')) { _meta.title = meta.title }
|
||||||
|
if (meta.description && (typeof meta.description === 'string' || typeof meta.description === 'number')) { _meta.description = meta.description }
|
||||||
|
if (meta.robots && (typeof meta.robots === 'string' || typeof meta.robots === 'number')) { _meta.robots = meta.robots }
|
||||||
|
if (meta.GA && (typeof meta.GA === 'string' || typeof meta.GA === 'number')) { _meta.GA = meta.GA }
|
||||||
|
if (meta.disqus && (typeof meta.disqus === 'string' || typeof meta.disqus === 'number')) { _meta.disqus = meta.disqus }
|
||||||
|
if (meta.slideOptions && (typeof meta.slideOptions === 'object')) { _meta.slideOptions = meta.slideOptions }
|
||||||
|
}
|
||||||
|
return _meta
|
||||||
|
}
|
||||||
|
Note.updateAuthorshipByOperation = function (operation, userId, authorships) {
|
||||||
|
var index = 0
|
||||||
|
var timestamp = Date.now()
|
||||||
|
for (let i = 0; i < operation.length; i++) {
|
||||||
|
var op = operation[i]
|
||||||
|
if (ot.TextOperation.isRetain(op)) {
|
||||||
|
index += op
|
||||||
|
} else if (ot.TextOperation.isInsert(op)) {
|
||||||
|
let opStart = index
|
||||||
|
let opEnd = index + op.length
|
||||||
|
var inserted = false
|
||||||
|
// authorship format: [userId, startPos, endPos, createdAt, updatedAt]
|
||||||
|
if (authorships.length <= 0) authorships.push([userId, opStart, opEnd, timestamp, timestamp])
|
||||||
|
else {
|
||||||
|
for (let j = 0; j < authorships.length; j++) {
|
||||||
|
let authorship = authorships[j]
|
||||||
|
if (!inserted) {
|
||||||
|
let nextAuthorship = authorships[j + 1] || -1
|
||||||
|
if ((nextAuthorship !== -1 && nextAuthorship[1] >= opEnd) || j >= authorships.length - 1) {
|
||||||
|
if (authorship[1] < opStart && authorship[2] > opStart) {
|
||||||
|
// divide
|
||||||
|
let postLength = authorship[2] - opStart
|
||||||
|
authorship[2] = opStart
|
||||||
|
authorship[4] = timestamp
|
||||||
|
authorships.splice(j + 1, 0, [userId, opStart, opEnd, timestamp, timestamp])
|
||||||
|
authorships.splice(j + 2, 0, [authorship[0], opEnd, opEnd + postLength, authorship[3], timestamp])
|
||||||
|
j += 2
|
||||||
|
inserted = true
|
||||||
|
} else if (authorship[1] >= opStart) {
|
||||||
|
authorships.splice(j, 0, [userId, opStart, opEnd, timestamp, timestamp])
|
||||||
|
j += 1
|
||||||
|
inserted = true
|
||||||
|
} else if (authorship[2] <= opStart) {
|
||||||
|
authorships.splice(j + 1, 0, [userId, opStart, opEnd, timestamp, timestamp])
|
||||||
|
j += 1
|
||||||
|
inserted = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (authorship[1] >= opStart) {
|
||||||
|
authorship[1] += op.length
|
||||||
|
authorship[2] += op.length
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
index += op.length
|
||||||
|
} else if (ot.TextOperation.isDelete(op)) {
|
||||||
|
let opStart = index
|
||||||
|
let opEnd = index - op
|
||||||
|
if (operation.length === 1) {
|
||||||
|
authorships = []
|
||||||
|
} else if (authorships.length > 0) {
|
||||||
|
for (let j = 0; j < authorships.length; j++) {
|
||||||
|
let authorship = authorships[j]
|
||||||
|
if (authorship[1] >= opStart && authorship[1] <= opEnd && authorship[2] >= opStart && authorship[2] <= opEnd) {
|
||||||
|
authorships.splice(j, 1)
|
||||||
|
j -= 1
|
||||||
|
} else if (authorship[1] < opStart && authorship[1] < opEnd && authorship[2] > opStart && authorship[2] > opEnd) {
|
||||||
|
authorship[2] += op
|
||||||
|
authorship[4] = timestamp
|
||||||
|
} else if (authorship[2] >= opStart && authorship[2] <= opEnd) {
|
||||||
|
authorship[2] = opStart
|
||||||
|
authorship[4] = timestamp
|
||||||
|
} else if (authorship[1] >= opStart && authorship[1] <= opEnd) {
|
||||||
|
authorship[1] = opEnd
|
||||||
|
authorship[4] = timestamp
|
||||||
|
}
|
||||||
|
if (authorship[1] >= opEnd) {
|
||||||
|
authorship[1] += op
|
||||||
|
authorship[2] += op
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
index += op
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// merge
|
||||||
|
for (let j = 0; j < authorships.length; j++) {
|
||||||
|
let authorship = authorships[j]
|
||||||
|
for (let k = j + 1; k < authorships.length; k++) {
|
||||||
|
let nextAuthorship = authorships[k]
|
||||||
|
if (nextAuthorship && authorship[0] === nextAuthorship[0] && authorship[2] === nextAuthorship[1]) {
|
||||||
|
let minTimestamp = Math.min(authorship[3], nextAuthorship[3])
|
||||||
|
let maxTimestamp = Math.max(authorship[3], nextAuthorship[3])
|
||||||
|
authorships.splice(j, 1, [authorship[0], authorship[1], nextAuthorship[2], minTimestamp, maxTimestamp])
|
||||||
|
authorships.splice(k, 1)
|
||||||
|
j -= 1
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// clear
|
||||||
|
for (let j = 0; j < authorships.length; j++) {
|
||||||
|
let authorship = authorships[j]
|
||||||
|
if (!authorship[0]) {
|
||||||
|
authorships.splice(j, 1)
|
||||||
|
j -= 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return authorships
|
||||||
|
}
|
||||||
|
Note.transformPatchToOperations = function (patch, contentLength) {
|
||||||
|
var operations = []
|
||||||
|
if (patch.length > 0) {
|
||||||
|
// calculate original content length
|
||||||
|
for (let j = patch.length - 1; j >= 0; j--) {
|
||||||
|
var p = patch[j]
|
||||||
|
for (let i = 0; i < p.diffs.length; i++) {
|
||||||
|
var diff = p.diffs[i]
|
||||||
|
switch (diff[0]) {
|
||||||
|
case 1: // insert
|
||||||
|
contentLength -= diff[1].length
|
||||||
|
break
|
||||||
|
case -1: // delete
|
||||||
|
contentLength += diff[1].length
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// generate operations
|
||||||
|
var bias = 0
|
||||||
|
var lengthBias = 0
|
||||||
|
for (let j = 0; j < patch.length; j++) {
|
||||||
|
var operation = []
|
||||||
|
let p = patch[j]
|
||||||
|
var currIndex = p.start1
|
||||||
|
var currLength = contentLength - bias
|
||||||
|
for (let i = 0; i < p.diffs.length; i++) {
|
||||||
|
let diff = p.diffs[i]
|
||||||
|
switch (diff[0]) {
|
||||||
|
case 0: // retain
|
||||||
|
if (i === 0) {
|
||||||
|
// first
|
||||||
|
operation.push(currIndex + diff[1].length)
|
||||||
|
} else if (i !== p.diffs.length - 1) {
|
||||||
|
// mid
|
||||||
|
operation.push(diff[1].length)
|
||||||
|
} else {
|
||||||
|
// last
|
||||||
|
operation.push(currLength + lengthBias - currIndex)
|
||||||
|
}
|
||||||
|
currIndex += diff[1].length
|
||||||
|
break
|
||||||
|
case 1: // insert
|
||||||
|
operation.push(diff[1])
|
||||||
|
lengthBias += diff[1].length
|
||||||
|
currIndex += diff[1].length
|
||||||
|
break
|
||||||
|
case -1: // delete
|
||||||
|
operation.push(-diff[1].length)
|
||||||
|
bias += diff[1].length
|
||||||
|
currIndex += diff[1].length
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
operations.push(operation)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return operations
|
||||||
|
}
|
||||||
|
|
||||||
return Note
|
return Note
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,6 +7,8 @@ var childProcess = require('child_process')
|
||||||
var shortId = require('shortid')
|
var shortId = require('shortid')
|
||||||
var path = require('path')
|
var path = require('path')
|
||||||
|
|
||||||
|
var Op = Sequelize.Op
|
||||||
|
|
||||||
// core
|
// core
|
||||||
var logger = require('../logger')
|
var logger = require('../logger')
|
||||||
|
|
||||||
|
@ -96,214 +98,212 @@ module.exports = function (sequelize, DataTypes) {
|
||||||
this.setDataValue('authorship', value ? JSON.stringify(value) : value)
|
this.setDataValue('authorship', value ? JSON.stringify(value) : value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}, {
|
})
|
||||||
classMethods: {
|
|
||||||
associate: function (models) {
|
Revision.associate = function (models) {
|
||||||
Revision.belongsTo(models.Note, {
|
Revision.belongsTo(models.Note, {
|
||||||
foreignKey: 'noteId',
|
foreignKey: 'noteId',
|
||||||
as: 'note',
|
as: 'note',
|
||||||
constraints: false,
|
constraints: false,
|
||||||
onDelete: 'CASCADE',
|
onDelete: 'CASCADE',
|
||||||
hooks: true
|
hooks: true
|
||||||
})
|
})
|
||||||
|
}
|
||||||
|
Revision.getNoteRevisions = function (note, callback) {
|
||||||
|
Revision.findAll({
|
||||||
|
where: {
|
||||||
|
noteId: note.id
|
||||||
},
|
},
|
||||||
getNoteRevisions: function (note, callback) {
|
order: [['createdAt', 'DESC']]
|
||||||
Revision.findAll({
|
}).then(function (revisions) {
|
||||||
where: {
|
var data = []
|
||||||
noteId: note.id
|
for (var i = 0, l = revisions.length; i < l; i++) {
|
||||||
},
|
var revision = revisions[i]
|
||||||
order: [['createdAt', 'DESC']]
|
data.push({
|
||||||
}).then(function (revisions) {
|
time: moment(revision.createdAt).valueOf(),
|
||||||
var data = []
|
length: revision.length
|
||||||
for (var i = 0, l = revisions.length; i < l; i++) {
|
})
|
||||||
var revision = revisions[i]
|
}
|
||||||
data.push({
|
callback(null, data)
|
||||||
time: moment(revision.createdAt).valueOf(),
|
}).catch(function (err) {
|
||||||
length: revision.length
|
callback(err, null)
|
||||||
})
|
})
|
||||||
|
}
|
||||||
|
Revision.getPatchedNoteRevisionByTime = function (note, time, callback) {
|
||||||
|
// find all revisions to prepare for all possible calculation
|
||||||
|
Revision.findAll({
|
||||||
|
where: {
|
||||||
|
noteId: note.id
|
||||||
|
},
|
||||||
|
order: [['createdAt', 'DESC']]
|
||||||
|
}).then(function (revisions) {
|
||||||
|
if (revisions.length <= 0) return callback(null, null)
|
||||||
|
// measure target revision position
|
||||||
|
Revision.count({
|
||||||
|
where: {
|
||||||
|
noteId: note.id,
|
||||||
|
createdAt: {
|
||||||
|
[Op.gte]: time
|
||||||
}
|
}
|
||||||
callback(null, data)
|
},
|
||||||
}).catch(function (err) {
|
order: [['createdAt', 'DESC']]
|
||||||
callback(err, null)
|
}).then(function (count) {
|
||||||
})
|
if (count <= 0) return callback(null, null)
|
||||||
},
|
sendDmpWorker({
|
||||||
getPatchedNoteRevisionByTime: function (note, time, callback) {
|
msg: 'get revision',
|
||||||
// find all revisions to prepare for all possible calculation
|
revisions: revisions,
|
||||||
Revision.findAll({
|
count: count
|
||||||
where: {
|
}, callback)
|
||||||
noteId: note.id
|
}).catch(function (err) {
|
||||||
},
|
return callback(err, null)
|
||||||
order: [['createdAt', 'DESC']]
|
})
|
||||||
}).then(function (revisions) {
|
}).catch(function (err) {
|
||||||
if (revisions.length <= 0) return callback(null, null)
|
return callback(err, null)
|
||||||
// measure target revision position
|
})
|
||||||
Revision.count({
|
}
|
||||||
where: {
|
Revision.checkAllNotesRevision = function (callback) {
|
||||||
noteId: note.id,
|
Revision.saveAllNotesRevision(function (err, notes) {
|
||||||
createdAt: {
|
if (err) return callback(err, null)
|
||||||
$gte: time
|
if (!notes || notes.length <= 0) {
|
||||||
|
return callback(null, notes)
|
||||||
|
} else {
|
||||||
|
Revision.checkAllNotesRevision(callback)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Revision.saveAllNotesRevision = function (callback) {
|
||||||
|
sequelize.models.Note.findAll({
|
||||||
|
// query all notes that need to save for revision
|
||||||
|
where: {
|
||||||
|
[Op.and]: [
|
||||||
|
{
|
||||||
|
lastchangeAt: {
|
||||||
|
[Op.or]: {
|
||||||
|
[Op.eq]: null,
|
||||||
|
[Op.and]: {
|
||||||
|
[Op.ne]: null,
|
||||||
|
[Op.gt]: sequelize.col('createdAt')
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
order: [['createdAt', 'DESC']]
|
},
|
||||||
}).then(function (count) {
|
{
|
||||||
if (count <= 0) return callback(null, null)
|
savedAt: {
|
||||||
sendDmpWorker({
|
[Op.or]: {
|
||||||
msg: 'get revision',
|
[Op.eq]: null,
|
||||||
revisions: revisions,
|
[Op.lt]: sequelize.col('lastchangeAt')
|
||||||
count: count
|
}
|
||||||
}, callback)
|
}
|
||||||
}).catch(function (err) {
|
}
|
||||||
return callback(err, null)
|
]
|
||||||
})
|
}
|
||||||
}).catch(function (err) {
|
}).then(function (notes) {
|
||||||
return callback(err, null)
|
if (notes.length <= 0) return callback(null, notes)
|
||||||
})
|
var savedNotes = []
|
||||||
},
|
async.each(notes, function (note, _callback) {
|
||||||
checkAllNotesRevision: function (callback) {
|
// revision saving policy: note not been modified for 5 mins or not save for 10 mins
|
||||||
Revision.saveAllNotesRevision(function (err, notes) {
|
if (note.lastchangeAt && note.savedAt) {
|
||||||
if (err) return callback(err, null)
|
var lastchangeAt = moment(note.lastchangeAt)
|
||||||
if (!notes || notes.length <= 0) {
|
var savedAt = moment(note.savedAt)
|
||||||
return callback(null, notes)
|
if (moment().isAfter(lastchangeAt.add(5, 'minutes'))) {
|
||||||
|
savedNotes.push(note)
|
||||||
|
Revision.saveNoteRevision(note, _callback)
|
||||||
|
} else if (lastchangeAt.isAfter(savedAt.add(10, 'minutes'))) {
|
||||||
|
savedNotes.push(note)
|
||||||
|
Revision.saveNoteRevision(note, _callback)
|
||||||
} else {
|
} else {
|
||||||
Revision.checkAllNotesRevision(callback)
|
return _callback(null, null)
|
||||||
}
|
}
|
||||||
})
|
} else {
|
||||||
|
savedNotes.push(note)
|
||||||
|
Revision.saveNoteRevision(note, _callback)
|
||||||
|
}
|
||||||
|
}, function (err) {
|
||||||
|
if (err) {
|
||||||
|
return callback(err, null)
|
||||||
|
}
|
||||||
|
// return null when no notes need saving at this moment but have delayed tasks to be done
|
||||||
|
var result = ((savedNotes.length === 0) && (notes.length > savedNotes.length)) ? null : savedNotes
|
||||||
|
return callback(null, result)
|
||||||
|
})
|
||||||
|
}).catch(function (err) {
|
||||||
|
return callback(err, null)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Revision.saveNoteRevision = function (note, callback) {
|
||||||
|
Revision.findAll({
|
||||||
|
where: {
|
||||||
|
noteId: note.id
|
||||||
},
|
},
|
||||||
saveAllNotesRevision: function (callback) {
|
order: [['createdAt', 'DESC']]
|
||||||
sequelize.models.Note.findAll({
|
}).then(function (revisions) {
|
||||||
// query all notes that need to save for revision
|
if (revisions.length <= 0) {
|
||||||
where: {
|
// if no revision available
|
||||||
$and: [
|
Revision.create({
|
||||||
{
|
noteId: note.id,
|
||||||
lastchangeAt: {
|
lastContent: note.content ? note.content : '',
|
||||||
$or: {
|
length: note.content ? note.content.length : 0,
|
||||||
$eq: null,
|
authorship: note.authorship
|
||||||
$and: {
|
}).then(function (revision) {
|
||||||
$ne: null,
|
Revision.finishSaveNoteRevision(note, revision, callback)
|
||||||
$gt: sequelize.col('createdAt')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
savedAt: {
|
|
||||||
$or: {
|
|
||||||
$eq: null,
|
|
||||||
$lt: sequelize.col('lastchangeAt')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}).then(function (notes) {
|
|
||||||
if (notes.length <= 0) return callback(null, notes)
|
|
||||||
var savedNotes = []
|
|
||||||
async.each(notes, function (note, _callback) {
|
|
||||||
// revision saving policy: note not been modified for 5 mins or not save for 10 mins
|
|
||||||
if (note.lastchangeAt && note.savedAt) {
|
|
||||||
var lastchangeAt = moment(note.lastchangeAt)
|
|
||||||
var savedAt = moment(note.savedAt)
|
|
||||||
if (moment().isAfter(lastchangeAt.add(5, 'minutes'))) {
|
|
||||||
savedNotes.push(note)
|
|
||||||
Revision.saveNoteRevision(note, _callback)
|
|
||||||
} else if (lastchangeAt.isAfter(savedAt.add(10, 'minutes'))) {
|
|
||||||
savedNotes.push(note)
|
|
||||||
Revision.saveNoteRevision(note, _callback)
|
|
||||||
} else {
|
|
||||||
return _callback(null, null)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
savedNotes.push(note)
|
|
||||||
Revision.saveNoteRevision(note, _callback)
|
|
||||||
}
|
|
||||||
}, function (err) {
|
|
||||||
if (err) {
|
|
||||||
return callback(err, null)
|
|
||||||
}
|
|
||||||
// return null when no notes need saving at this moment but have delayed tasks to be done
|
|
||||||
var result = ((savedNotes.length === 0) && (notes.length > savedNotes.length)) ? null : savedNotes
|
|
||||||
return callback(null, result)
|
|
||||||
})
|
|
||||||
}).catch(function (err) {
|
}).catch(function (err) {
|
||||||
return callback(err, null)
|
return callback(err, null)
|
||||||
})
|
})
|
||||||
},
|
} else {
|
||||||
saveNoteRevision: function (note, callback) {
|
var latestRevision = revisions[0]
|
||||||
Revision.findAll({
|
var lastContent = latestRevision.content || latestRevision.lastContent
|
||||||
where: {
|
var content = note.content
|
||||||
noteId: note.id
|
sendDmpWorker({
|
||||||
},
|
msg: 'create patch',
|
||||||
order: [['createdAt', 'DESC']]
|
lastDoc: lastContent,
|
||||||
}).then(function (revisions) {
|
currDoc: content
|
||||||
if (revisions.length <= 0) {
|
}, function (err, patch) {
|
||||||
// if no revision available
|
if (err) logger.error('save note revision error', err)
|
||||||
Revision.create({
|
if (!patch) {
|
||||||
noteId: note.id,
|
// if patch is empty (means no difference) then just update the latest revision updated time
|
||||||
lastContent: note.content ? note.content : '',
|
latestRevision.changed('updatedAt', true)
|
||||||
length: note.content ? note.content.length : 0,
|
latestRevision.update({
|
||||||
authorship: note.authorship
|
updatedAt: Date.now()
|
||||||
}).then(function (revision) {
|
}).then(function (revision) {
|
||||||
Revision.finishSaveNoteRevision(note, revision, callback)
|
Revision.finishSaveNoteRevision(note, revision, callback)
|
||||||
}).catch(function (err) {
|
}).catch(function (err) {
|
||||||
return callback(err, null)
|
return callback(err, null)
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
var latestRevision = revisions[0]
|
Revision.create({
|
||||||
var lastContent = latestRevision.content || latestRevision.lastContent
|
noteId: note.id,
|
||||||
var content = note.content
|
patch: patch,
|
||||||
sendDmpWorker({
|
content: note.content,
|
||||||
msg: 'create patch',
|
length: note.content.length,
|
||||||
lastDoc: lastContent,
|
authorship: note.authorship
|
||||||
currDoc: content
|
}).then(function (revision) {
|
||||||
}, function (err, patch) {
|
// clear last revision content to reduce db size
|
||||||
if (err) logger.error('save note revision error', err)
|
latestRevision.update({
|
||||||
if (!patch) {
|
content: null
|
||||||
// if patch is empty (means no difference) then just update the latest revision updated time
|
}).then(function () {
|
||||||
latestRevision.changed('updatedAt', true)
|
Revision.finishSaveNoteRevision(note, revision, callback)
|
||||||
latestRevision.update({
|
}).catch(function (err) {
|
||||||
updatedAt: Date.now()
|
return callback(err, null)
|
||||||
}).then(function (revision) {
|
})
|
||||||
Revision.finishSaveNoteRevision(note, revision, callback)
|
}).catch(function (err) {
|
||||||
}).catch(function (err) {
|
return callback(err, null)
|
||||||
return callback(err, null)
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
Revision.create({
|
|
||||||
noteId: note.id,
|
|
||||||
patch: patch,
|
|
||||||
content: note.content,
|
|
||||||
length: note.content.length,
|
|
||||||
authorship: note.authorship
|
|
||||||
}).then(function (revision) {
|
|
||||||
// clear last revision content to reduce db size
|
|
||||||
latestRevision.update({
|
|
||||||
content: null
|
|
||||||
}).then(function () {
|
|
||||||
Revision.finishSaveNoteRevision(note, revision, callback)
|
|
||||||
}).catch(function (err) {
|
|
||||||
return callback(err, null)
|
|
||||||
})
|
|
||||||
}).catch(function (err) {
|
|
||||||
return callback(err, null)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}).catch(function (err) {
|
|
||||||
return callback(err, null)
|
|
||||||
})
|
|
||||||
},
|
|
||||||
finishSaveNoteRevision: function (note, revision, callback) {
|
|
||||||
note.update({
|
|
||||||
savedAt: revision.updatedAt
|
|
||||||
}).then(function () {
|
|
||||||
return callback(null, revision)
|
|
||||||
}).catch(function (err) {
|
|
||||||
return callback(err, null)
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}).catch(function (err) {
|
||||||
})
|
return callback(err, null)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Revision.finishSaveNoteRevision = function (note, revision, callback) {
|
||||||
|
note.update({
|
||||||
|
savedAt: revision.updatedAt
|
||||||
|
}).then(function () {
|
||||||
|
return callback(null, revision)
|
||||||
|
}).catch(function (err) {
|
||||||
|
return callback(err, null)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
return Revision
|
return Revision
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,119 +52,118 @@ module.exports = function (sequelize, DataTypes) {
|
||||||
password: {
|
password: {
|
||||||
type: Sequelize.TEXT
|
type: Sequelize.TEXT
|
||||||
}
|
}
|
||||||
}, {
|
|
||||||
instanceMethods: {
|
|
||||||
verifyPassword: function (attempt) {
|
|
||||||
return scrypt.verify(Buffer.from(this.password, 'hex'), attempt)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
classMethods: {
|
|
||||||
associate: function (models) {
|
|
||||||
User.hasMany(models.Note, {
|
|
||||||
foreignKey: 'ownerId',
|
|
||||||
constraints: false
|
|
||||||
})
|
|
||||||
User.hasMany(models.Note, {
|
|
||||||
foreignKey: 'lastchangeuserId',
|
|
||||||
constraints: false
|
|
||||||
})
|
|
||||||
},
|
|
||||||
getProfile: function (user) {
|
|
||||||
if (!user) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
return user.profile ? User.parseProfile(user.profile) : (user.email ? User.parseProfileByEmail(user.email) : null)
|
|
||||||
},
|
|
||||||
parseProfile: function (profile) {
|
|
||||||
try {
|
|
||||||
profile = JSON.parse(profile)
|
|
||||||
} catch (err) {
|
|
||||||
logger.error(err)
|
|
||||||
profile = null
|
|
||||||
}
|
|
||||||
if (profile) {
|
|
||||||
profile = {
|
|
||||||
name: profile.displayName || profile.username,
|
|
||||||
photo: User.parsePhotoByProfile(profile),
|
|
||||||
biggerphoto: User.parsePhotoByProfile(profile, true)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return profile
|
|
||||||
},
|
|
||||||
parsePhotoByProfile: function (profile, bigger) {
|
|
||||||
var photo = null
|
|
||||||
switch (profile.provider) {
|
|
||||||
case 'facebook':
|
|
||||||
photo = 'https://graph.facebook.com/' + profile.id + '/picture'
|
|
||||||
if (bigger) photo += '?width=400'
|
|
||||||
else photo += '?width=96'
|
|
||||||
break
|
|
||||||
case 'twitter':
|
|
||||||
photo = 'https://twitter.com/' + profile.username + '/profile_image'
|
|
||||||
if (bigger) photo += '?size=original'
|
|
||||||
else photo += '?size=bigger'
|
|
||||||
break
|
|
||||||
case 'github':
|
|
||||||
photo = 'https://avatars.githubusercontent.com/u/' + profile.id
|
|
||||||
if (bigger) photo += '?s=400'
|
|
||||||
else photo += '?s=96'
|
|
||||||
break
|
|
||||||
case 'gitlab':
|
|
||||||
photo = profile.avatarUrl
|
|
||||||
if (photo) {
|
|
||||||
if (bigger) photo = photo.replace(/(\?s=)\d*$/i, '$1400')
|
|
||||||
else photo = photo.replace(/(\?s=)\d*$/i, '$196')
|
|
||||||
} else {
|
|
||||||
photo = generateAvatarURL(profile.username)
|
|
||||||
}
|
|
||||||
break
|
|
||||||
case 'mattermost':
|
|
||||||
photo = profile.avatarUrl
|
|
||||||
if (photo) {
|
|
||||||
if (bigger) photo = photo.replace(/(\?s=)\d*$/i, '$1400')
|
|
||||||
else photo = photo.replace(/(\?s=)\d*$/i, '$196')
|
|
||||||
} else {
|
|
||||||
photo = generateAvatarURL(profile.username)
|
|
||||||
}
|
|
||||||
break
|
|
||||||
case 'dropbox':
|
|
||||||
photo = generateAvatarURL('', profile.emails[0].value, bigger)
|
|
||||||
break
|
|
||||||
case 'google':
|
|
||||||
photo = profile.photos[0].value
|
|
||||||
if (bigger) photo = photo.replace(/(\?sz=)\d*$/i, '$1400')
|
|
||||||
else photo = photo.replace(/(\?sz=)\d*$/i, '$196')
|
|
||||||
break
|
|
||||||
case 'ldap':
|
|
||||||
photo = generateAvatarURL(profile.username, profile.emails[0], bigger)
|
|
||||||
break
|
|
||||||
case 'saml':
|
|
||||||
photo = generateAvatarURL(profile.username, profile.emails[0], bigger)
|
|
||||||
break
|
|
||||||
default:
|
|
||||||
photo = generateAvatarURL(profile.username)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
return photo
|
|
||||||
},
|
|
||||||
parseProfileByEmail: function (email) {
|
|
||||||
return {
|
|
||||||
name: email.substring(0, email.lastIndexOf('@')),
|
|
||||||
photo: generateAvatarURL('', email, false),
|
|
||||||
biggerphoto: generateAvatarURL('', email, true)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
function updatePasswordHashHook (user, options, done) {
|
User.prototype.verifyPassword = function (attempt) {
|
||||||
|
return scrypt.verify(Buffer.from(this.password, 'hex'), attempt)
|
||||||
|
}
|
||||||
|
|
||||||
|
User.associate = function (models) {
|
||||||
|
User.hasMany(models.Note, {
|
||||||
|
foreignKey: 'ownerId',
|
||||||
|
constraints: false
|
||||||
|
})
|
||||||
|
User.hasMany(models.Note, {
|
||||||
|
foreignKey: 'lastchangeuserId',
|
||||||
|
constraints: false
|
||||||
|
})
|
||||||
|
}
|
||||||
|
User.getProfile = function (user) {
|
||||||
|
if (!user) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return user.profile ? User.parseProfile(user.profile) : (user.email ? User.parseProfileByEmail(user.email) : null)
|
||||||
|
}
|
||||||
|
User.parseProfile = function (profile) {
|
||||||
|
try {
|
||||||
|
profile = JSON.parse(profile)
|
||||||
|
} catch (err) {
|
||||||
|
logger.error(err)
|
||||||
|
profile = null
|
||||||
|
}
|
||||||
|
if (profile) {
|
||||||
|
profile = {
|
||||||
|
name: profile.displayName || profile.username,
|
||||||
|
photo: User.parsePhotoByProfile(profile),
|
||||||
|
biggerphoto: User.parsePhotoByProfile(profile, true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return profile
|
||||||
|
}
|
||||||
|
User.parsePhotoByProfile = function (profile, bigger) {
|
||||||
|
var photo = null
|
||||||
|
switch (profile.provider) {
|
||||||
|
case 'facebook':
|
||||||
|
photo = 'https://graph.facebook.com/' + profile.id + '/picture'
|
||||||
|
if (bigger) photo += '?width=400'
|
||||||
|
else photo += '?width=96'
|
||||||
|
break
|
||||||
|
case 'twitter':
|
||||||
|
photo = 'https://twitter.com/' + profile.username + '/profile_image'
|
||||||
|
if (bigger) photo += '?size=original'
|
||||||
|
else photo += '?size=bigger'
|
||||||
|
break
|
||||||
|
case 'github':
|
||||||
|
photo = 'https://avatars.githubusercontent.com/u/' + profile.id
|
||||||
|
if (bigger) photo += '?s=400'
|
||||||
|
else photo += '?s=96'
|
||||||
|
break
|
||||||
|
case 'gitlab':
|
||||||
|
photo = profile.avatarUrl
|
||||||
|
if (photo) {
|
||||||
|
if (bigger) photo = photo.replace(/(\?s=)\d*$/i, '$1400')
|
||||||
|
else photo = photo.replace(/(\?s=)\d*$/i, '$196')
|
||||||
|
} else {
|
||||||
|
photo = generateAvatarURL(profile.username)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
case 'mattermost':
|
||||||
|
photo = profile.avatarUrl
|
||||||
|
if (photo) {
|
||||||
|
if (bigger) photo = photo.replace(/(\?s=)\d*$/i, '$1400')
|
||||||
|
else photo = photo.replace(/(\?s=)\d*$/i, '$196')
|
||||||
|
} else {
|
||||||
|
photo = generateAvatarURL(profile.username)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
case 'dropbox':
|
||||||
|
photo = generateAvatarURL('', profile.emails[0].value, bigger)
|
||||||
|
break
|
||||||
|
case 'google':
|
||||||
|
photo = profile.photos[0].value
|
||||||
|
if (bigger) photo = photo.replace(/(\?sz=)\d*$/i, '$1400')
|
||||||
|
else photo = photo.replace(/(\?sz=)\d*$/i, '$196')
|
||||||
|
break
|
||||||
|
case 'ldap':
|
||||||
|
photo = generateAvatarURL(profile.username, profile.emails[0], bigger)
|
||||||
|
break
|
||||||
|
case 'saml':
|
||||||
|
photo = generateAvatarURL(profile.username, profile.emails[0], bigger)
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
photo = generateAvatarURL(profile.username)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
return photo
|
||||||
|
}
|
||||||
|
User.parseProfileByEmail = function (email) {
|
||||||
|
return {
|
||||||
|
name: email.substring(0, email.lastIndexOf('@')),
|
||||||
|
photo: generateAvatarURL('', email, false),
|
||||||
|
biggerphoto: generateAvatarURL('', email, true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function updatePasswordHashHook (user, options) {
|
||||||
// suggested way to hash passwords to be able to do this asynchronously:
|
// suggested way to hash passwords to be able to do this asynchronously:
|
||||||
// @see https://github.com/sequelize/sequelize/issues/1821#issuecomment-44265819
|
// @see https://github.com/sequelize/sequelize/issues/1821#issuecomment-44265819
|
||||||
if (!user.changed('password')) { return done() }
|
|
||||||
|
|
||||||
scrypt.kdf(user.getDataValue('password'), { logN: 15 }).then(keyBuf => {
|
if (!user.changed('password')) {
|
||||||
|
return Promise.resolve()
|
||||||
|
}
|
||||||
|
|
||||||
|
return scrypt.kdf(user.getDataValue('password'), { logN: 15 }).then(keyBuf => {
|
||||||
user.setDataValue('password', keyBuf.toString('hex'))
|
user.setDataValue('password', keyBuf.toString('hex'))
|
||||||
done()
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,7 @@
|
||||||
"codemirror": "git+https://github.com/hackmdio/CodeMirror.git",
|
"codemirror": "git+https://github.com/hackmdio/CodeMirror.git",
|
||||||
"compression": "^1.6.2",
|
"compression": "^1.6.2",
|
||||||
"connect-flash": "^0.1.1",
|
"connect-flash": "^0.1.1",
|
||||||
"connect-session-sequelize": "^4.1.0",
|
"connect-session-sequelize": "^6.0.0",
|
||||||
"cookie": "0.3.1",
|
"cookie": "0.3.1",
|
||||||
"cookie-parser": "1.4.3",
|
"cookie-parser": "1.4.3",
|
||||||
"deep-freeze": "^0.0.1",
|
"deep-freeze": "^0.0.1",
|
||||||
|
@ -113,8 +113,7 @@
|
||||||
"scrypt-async": "^2.0.1",
|
"scrypt-async": "^2.0.1",
|
||||||
"scrypt-kdf": "^2.0.1",
|
"scrypt-kdf": "^2.0.1",
|
||||||
"select2": "^3.5.2-browserify",
|
"select2": "^3.5.2-browserify",
|
||||||
"sequelize": "^3.28.0",
|
"sequelize": "5.3.2",
|
||||||
"sequelize-cli": "^2.5.1",
|
|
||||||
"shortid": "2.2.8",
|
"shortid": "2.2.8",
|
||||||
"socket.io": "~2.1.1",
|
"socket.io": "~2.1.1",
|
||||||
"socket.io-client": "~2.1.1",
|
"socket.io-client": "~2.1.1",
|
||||||
|
@ -194,6 +193,7 @@
|
||||||
"mocha": "^5.2.0",
|
"mocha": "^5.2.0",
|
||||||
"mock-require": "^3.0.3",
|
"mock-require": "^3.0.3",
|
||||||
"optimize-css-assets-webpack-plugin": "^5.0.0",
|
"optimize-css-assets-webpack-plugin": "^5.0.0",
|
||||||
|
"sequelize-cli": "^5.4.0",
|
||||||
"script-loader": "^0.7.2",
|
"script-loader": "^0.7.2",
|
||||||
"string-loader": "^0.0.1",
|
"string-loader": "^0.0.1",
|
||||||
"style-loader": "^0.21.0",
|
"style-loader": "^0.21.0",
|
||||||
|
|
Loading…
Reference in a new issue