Compare commits

..

1 commit

Author SHA1 Message Date
Yukai Huang
401cef8fb3 Experimental webpack 2
html-webpack-plugin chunks be changed to alphabetic order
2017-02-19 15:45:27 +08:00
262 changed files with 13958 additions and 33248 deletions

View file

@ -1,11 +1,6 @@
{ {
"presets": [ "presets": [
["env", { "es2015"
"targets": {
"node": "8",
"uglify": true
}
}]
], ],
"plugins": [ "plugins": [
"transform-runtime" "transform-runtime"

View file

@ -1,15 +1,11 @@
root = true root = true
# Tab indentation
[*] [*]
indent_style = space indent_style = space
indent_size = 2
trim_trailing_whitespace = true
insert_final_newline = true
[{*.html,*.ejs}]
indent_style = space
indent_size = 4 indent_size = 4
trim_trailing_whitespace = true trim_trailing_whitespace = true
insert_final_newline = true
[*.md] [*.md]
trim_trailing_whitespace = false trim_trailing_whitespace = false
@ -17,9 +13,3 @@ trim_trailing_whitespace = false
[{.travis.yml,npm-shrinkwrap.json,package.json}] [{.travis.yml,npm-shrinkwrap.json,package.json}]
indent_style = space indent_style = space
indent_size = 2 indent_size = 2
[locales/*.json]
# this is the exact style poeditor.com exports, so this should prevent churn.
insert_final_newline = false
indent_style = space
indent_size = 4

View file

@ -1,3 +1 @@
lib/ot *.min.js
public/vendor
public/build

268
.eslintrc Normal file
View file

@ -0,0 +1,268 @@
{
"env": {
"browser": true,
"es6": true,
"node": true
},
"extends": "eslint:recommended",
"parserOptions": {
"sourceType": "module"
},
"rules": {
"accessor-pairs": "error",
"array-bracket-spacing": [
"error",
"never"
],
"array-callback-return": "error",
"arrow-body-style": "error",
"arrow-spacing": "error",
"block-scoped-var": "off",
"block-spacing": "error",
"brace-style": [
"error",
"1tbs"
],
"callback-return": "off",
"camelcase": "error",
"class-methods-use-this": "error",
"comma-dangle": "error",
"comma-spacing": [
"error", {
"after": true,
"before": false
}
],
"comma-style": [
"error",
"last"
],
"complexity": "error",
"computed-property-spacing": [
"error",
"never"
],
"consistent-return": "off",
"consistent-this": "error",
"curly": "off",
"default-case": "error",
"dot-location": [
"error",
"property"
],
"dot-notation": [
"error", {
"allowKeywords": true
}
],
"eol-last": "error",
"eqeqeq": "off",
"func-call-spacing": "error",
"func-names": [
"error",
"never"
],
"func-style": [
"error",
"declaration"
],
"generator-star-spacing": "error",
"global-require": "off",
"guard-for-in": "error",
"handle-callback-err": "error",
"id-blacklist": "error",
"id-length": "off",
"id-match": "error",
"indent": 2,
"init-declarations": "off",
"jsx-quotes": "error",
"key-spacing": "error",
"keyword-spacing": [
"error", {
"after": true,
"before": true
}
],
"line-comment-position": "off",
"linebreak-style": [
"error",
"unix"
],
"lines-around-comment": "error",
"lines-around-directive": "error",
"max-depth": "error",
"max-len": "off",
"max-lines": "off",
"max-nested-callbacks": "error",
"max-params": "error",
"max-statements": "error",
"max-statements-per-line": "error",
"multiline-ternary": [
"error",
"never"
],
"new-cap": "error",
"new-parens": "error",
"newline-after-var": "off",
"newline-before-return": "off",
"newline-per-chained-call": "off",
"no-alert": "error",
"no-array-constructor": "error",
"no-bitwise": "error",
"no-caller": "error",
"no-catch-shadow": "off",
"no-confusing-arrow": "error",
"no-continue": "error",
"no-div-regex": "error",
"no-duplicate-imports": "error",
"no-else-return": "error",
"no-empty-function": "error",
"no-eq-null": "error",
"no-eval": "error",
"no-extend-native": "error",
"no-extra-bind": "error",
"no-extra-label": "error",
"no-extra-parens": "warn",
"no-floating-decimal": "error",
"no-global-assign": "error",
"no-implicit-coercion": "error",
"no-implicit-globals": "error",
"no-implied-eval": "error",
"no-inline-comments": "off",
"no-inner-declarations": [
"error",
"functions"
],
"no-invalid-this": "error",
"no-iterator": "error",
"no-label-var": "error",
"no-labels": "error",
"no-lone-blocks": "error",
"no-lonely-if": "error",
"no-loop-func": "error",
"no-magic-numbers": "off",
"no-mixed-operators": "error",
"no-mixed-requires": "error",
"no-multi-spaces": "error",
"no-multi-str": "error",
"no-multiple-empty-lines": "error",
"no-negated-condition": "off",
"no-nested-ternary": "error",
"no-new": "error",
"no-new-func": "error",
"no-new-object": "error",
"no-new-require": "error",
"no-new-wrappers": "error",
"no-octal-escape": "error",
"no-param-reassign": [
"error", {
"props": false
}
],
"no-path-concat": "off",
"no-plusplus": [
"error", {
"allowForLoopAfterthoughts": true
}
],
"no-process-env": "error",
"no-process-exit": "off",
"no-proto": "error",
"no-prototype-builtins": "error",
"no-restricted-globals": "error",
"no-restricted-imports": "error",
"no-restricted-modules": "error",
"no-restricted-properties": "error",
"no-restricted-syntax": "error",
"no-return-assign": "error",
"no-script-url": "error",
"no-self-compare": "error",
"no-sequences": "error",
"no-shadow": "off",
"no-shadow-restricted-names": "error",
"no-spaced-func": "error",
"no-sync": "off",
"no-tabs": "error",
"no-template-curly-in-string": "error",
"no-ternary": "off",
"no-throw-literal": "error",
"no-trailing-spaces": "error",
"no-undef-init": "error",
"no-undefined": "error",
"no-underscore-dangle": "error",
"no-unmodified-loop-condition": "error",
"no-unneeded-ternary": "error",
"no-unsafe-negation": "error",
"no-unused-expressions": "error",
"no-use-before-define": "warn",
"no-useless-call": "error",
"no-useless-computed-key": "error",
"no-useless-concat": "error",
"no-useless-constructor": "error",
"no-useless-escape": "error",
"no-useless-rename": "error",
"no-var": "off",
"no-void": "error",
"no-warning-comments": "error",
"no-whitespace-before-property": "error",
"no-with": "error",
"object-curly-newline": "off",
"object-curly-spacing": [
"error",
"always"
],
"object-property-newline": "error",
"object-shorthand": "off",
"one-var": "off",
"one-var-declaration-per-line": [
"error",
"initializations"
],
"operator-assignment": "error",
"operator-linebreak": "error",
"padded-blocks": "off",
"prefer-arrow-callback": "off",
"prefer-const": "error",
"prefer-numeric-literals": "error",
"prefer-reflect": "error",
"prefer-rest-params": "error",
"prefer-spread": "error",
"prefer-template": "off",
"quote-props": "off",
"quotes": "off",
"radix": "error",
"require-jsdoc": "off",
"rest-spread-spacing": "error",
"semi": "off",
"semi-spacing": [
"error", {
"after": true,
"before": false
}
],
"sort-imports": "error",
"sort-keys": "off",
"sort-vars": "error",
"space-before-blocks": "error",
"space-before-function-paren": "off",
"space-in-parens": [
"error",
"never"
],
"space-infix-ops": "error",
"space-unary-ops": "error",
"spaced-comment": "off",
"strict": "error",
"symbol-description": "error",
"template-curly-spacing": "error",
"unicode-bom": [
"error",
"never"
],
"valid-jsdoc": "error",
"vars-on-top": "off",
"wrap-regex": "error",
"yield-star-spacing": "error",
"yoda": "off"
}
}

View file

@ -1,22 +0,0 @@
module.exports = {
"root": true,
"extends": "standard",
"env": {
"node": true
},
"rules": {
// at some point all of these should return to their default "error" state
// but right now, this is not a good choice, because too many places are
// wrong.
"import/first": ["warn"],
"indent": ["warn"],
"no-console": ["warn"],
"no-multiple-empty-lines": ["warn"],
"no-multi-spaces": ["warn"],
"object-curly-spacing": ["warn"],
"one-var": ["warn"],
"quotes": ["warn"],
"semi": ["warn"],
"space-infix-ops": ["warn"]
}
};

2
.gitignore vendored
View file

@ -1,5 +1,4 @@
node_modules node_modules
package-lock.json
composer.phar composer.phar
composer.lock composer.lock
.env.*.php .env.*.php
@ -8,6 +7,7 @@ composer.lock
.idea/ .idea/
Thumbs.db Thumbs.db
npm-debug.log npm-debug.log
hackmd_io
newrelic_agent.log newrelic_agent.log
logs/ logs/
tmp/ tmp/

View file

@ -1,14 +0,0 @@
Max Wu <jackymaxj@gmail.com> Wu Cheng-Han <jacky_cute0808@hotmail.com>
Max Wu <jackymaxj@gmail.com> Cheng-Han, Wu <jackymaxj@gmail.com>
Max Wu <jackymaxj@gmail.com> jackycute <jackymaxj@gmail.com>
Max Wu <jackymaxj@gmail.com> Wu, Cheng-Han <jackymaxj@gmail.com>
Max Wu <jackymaxj@gmail.com> jackycute <jacky_cute0808@hotmail.com>
Sheogorath <sheogorath@shivering-isles.com> Christoph (Sheogorath) Kern <sheogorath@shivering-isles.com>
Raccoon <raccoon@hackmd.io> Raccoon Li <a60814billy@gmail.com>
Raccoon <raccoon@hackmd.io> Raccoon <a60814billy@gmail.com>
Peter Dave Hello <hsu@peterdavehello.org> Peter Dave Hello <PeterDaveHello@users.noreply.github.com>
Claudius Coenen <github@amenthes.de> Claudius Coenen <opensource@amenthes.de>

View file

@ -1,40 +1,13 @@
language: node_js language: node_js
dist: xenial node_js:
cache: yarn - 6
- 7
jobs: - stable
include: env:
- stage: Static Tests - CXX=g++-4.8
name: eslint addons:
node_js: apt:
- 10 sources:
script: - ubuntu-toolchain-r-test
- yarn run eslint packages:
- name: ShellCheck - g++-4.8
script:
- shellcheck bin/heroku bin/setup
language: generic
- name: json-lint
addons:
apt:
packages:
- jq
script:
- yarn run jsonlint
language: generic
- stage: Dynamic Tests
name: Node.js 8
node_js:
- 8
script:
- yarn run mocha-suite
- name: Node.js 10
node_js:
- 10
script:
- yarn run mocha-suite
- name: Node.js 12
node_js:
- 12
script:
- yarn run mocha-suite

104
AUTHORS
View file

@ -1,69 +1,35 @@
alecdwm <alec@owls.io> List of HackMD contributors.
bananaappletw <bananaappletw@gmail.com>
Bartlomiej Szala <fenix440@gmail.com> bananaapple
BoHong Li <a60814billy@gmail.com> Bartlomiej Szala
Bryan Davis <bd808@wikimedia.org> Colin Maudry
butlerx <butlerx@notthe.cloud> Dmytro Kytsmen
Cheng-Han, Wu <jackymaxj@gmail.com> Fabien Meghazi
Christian Schuhmann <madebyherzblut@users.noreply.github.com> Florian Rhiem
Colin Maudry <colin@maudry.com> Ikumi Shimizu
Dmytro Kytsmen <dmitrokytsmen@gmail.com> ivanorsolic
Fabien Meghazi <agr@amigrave.com> Jason Croft
Florian Rhiem <florian.rhiem@gmail.com> Jannik Lorenz
geekyd <singhsince94@gmail.com> James Stephenson
GhiMax <ghina8@gmail.com> Jordan Matelsky
greenkeeperio-bot <support@greenkeeper.io> Kenji Doi
Himura Kazuto <Himura2la@users.noreply.github.com> Lars Kajes
Ho33e5 <ho33e5@gmail.com> Lapinot
Ian Dees <ian.dees@gmail.com> Laura Kyle
Ikumi Shimizu <193s@users.noreply.github.com> Marcelo Alencar
ivanorsolic <ivanorsolic@users.noreply.github.com> Martijnpold
jackycute <jacky_cute0808@hotmail.com> Massimo Ghinassi
jackycute <jackymaxj@gmail.com> Max Wu
Jakub Sygnowski <sygnowski@gmail.com> Ömer Erdinç Yağmurlu
James Stephenson <c4p7.fl1n7@gmail.com> p0v1n0m
Jan Kunzmann <jan-github@phobia.de> Pablo Guerrero
Jannik Lorenz <dev@janniklorenz.de> paraschadha2052
Jason Croft <jcroft@velocity.org> Peter Dave Hello
Johannes Weißl <jargon@molb.org> Qubo
Jordan Matelsky <j6k4m8@gmail.com> Sergio Valverde
Jun SAKATA <jun.bj141400@gmail.com> Tom Wyckhuys
Kaiyu Shi <skyisno.1@gmail.com> Yukai Huang
knjcode <knjcode@gmail.com> Zacharias Traianos
Kotaro Yamamoto <kota.crk@gmail.com> Zankio
Lars Karlsson <lars@kajes.se> Xavier
Laura Kyle <laura.kyle91@gmail.com> 葉家郡
LluisArevalo <thorin119@gmail.com>
Marcelo Alencar <marceloalves@ufpa.br>
Martijnpold <martijntje7@gmail.com>
Max Wu <jackymaxj@gmail.com>
neopostmodern <clemens@neopostmodern.com>
NV <nvsofts@gmail.com>
Ömer Erdinç Yağmurlu <omeryagmurlu@gmail.com>
p0v1n0m <p0v1n0m@gmail.com>
Pablo Guerrero <pablo.guerrero@gmail.com>
Pablo Guerrero <pablo.guerrero@sap.com>
Paras <paraschadha2052@gmail.com>
Patrick Andersen <patrick@bacha.dk>
Peter Dave Hello <hsu@peterdavehello.org>
Peter Dave Hello <PeterDaveHello@users.noreply.github.com>
Philipp Zumstein <zuphilip@users.noreply.github.com>
Raccoon Li <a60814billy@gmail.com>
robert <ahmerov.rt@molodost.bz>
Sergio Valverde <svg153@users.noreply.github.com>
Sheogorath <sheogorath@shivering-isles.com>
Simon Joda Stößer <SimJoSt@users.noreply.github.com>
S.Noda <noda@fenrir.co.jp>
Stratos Gerakakis <stratosgear@gmail.com>
The Gitter Badger <badger@gitter.im>
tkqubo <tk.qubo@gmail.com>
tkykm <tkykm@users.noreply.github.com>
Tom Wyckhuys <tomwyckhuys@gmail.com>
Wonder Chang <iwonder.tw@gmail.com>
Wu Cheng-Han <jacky_cute0808@hotmail.com>
Xavier Marques <xaviermarques4f@gmail.com>
xnum <s000032001@gmail.com>
Yukai Huang <yukaihuangtw@gmail.com>
zachariast <zachariastraianos@gmail.com>
Zankio <xxoojoeooxx1@gmail.com>
蒼時弦也 <elct9620@frost.tw>

View file

@ -1,7 +0,0 @@
# CHANGELOG
Please refer to the release notes published under
[`public/docs/release-notes.md`](public/docs/release-notes.md).
These are also available on each CodiMD instance under
https://[domain-name]/release-notes

View file

@ -1,37 +0,0 @@
Contributor Code of Conduct
===
As contributors and maintainers of this project, and in the interest of fostering an open and
welcoming community, we pledge to respect all people who contribute through reporting issues,
posting feature requests, updating documentation, submitting pull requests or patches, and other
activities.
We are committed to making participation in this project a harassment-free experience for everyone,
regardless of level of experience, gender, gender identity and expression, sexual orientation,
disability, personal appearance, body size, race, ethnicity, age, religion, or nationality.
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery
* Personal attacks
* Trolling or insulting/derogatory comments
* Public or private harassment
* Publishing other's private information, such as physical or electronic addresses, without explicit
permission
* Other unethical or unprofessional conduct.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits,
code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct. By
adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently
applying these principles to every aspect of managing this project. Project maintainers who do not
follow or enforce the Code of Conduct may be permanently removed from the project team.
This code of conduct applies both within project spaces and in public spaces when an individual is
representing the project or its community.
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by opening an
issue or contacting one or more of the project maintainers.
This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org),
version 1.2.0, available at
[http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/)

View file

@ -1,37 +1,54 @@
# Contributing # Contributing
When contributing to this repository, please first discuss the change you wish to make via issue, When contributing to this repository, please first discuss the change you wish to make via issue,
email, or any other method with the owners of this repository before making a change. email, or any other method with the owners of this repository before making a change.
Please note we have a [code of conduct](CODE_OF_CONDUCT.md), please follow it in all your Please note we have a code of conduct, please follow it in all your interactions with the project.
interactions with the project.
## Pull Request Process ## Pull Request Process
1. Ensure you signed all your commits with Developer Certificate of Origin (DCO).
2. Ensure any install or build dependencies are removed before the end of the layer when doing a 1. Ensure any install or build dependencies are removed before the end of the layer when doing a
build. build.
3. Update the README.md with details of changes to the interface, this includes new environment 2. Update the README.md with details of changes to the interface, this includes new environment
variables, exposed ports, useful file locations and container parameters. variables, exposed ports, useful file locations and container parameters.
4. Increase the version numbers in any examples files and the README.md to the new version that this 3. Increase the version numbers in any examples files and the README.md to the new version that this
Pull Request would represent. The versioning scheme we use is [SemVer](http://semver.org/). Pull Request would represent. The versioning scheme we use is [SemVer](http://semver.org/).
5. You may merge the Pull Request in once you have the sign-off of two other developers, or if you 4. You may merge the Pull Request in once you have the sign-off of two other developers, or if you
do not have permission to do that, you may request the second reviewer to merge it for you. do not have permission to do that, you may request the second reviewer to merge it for you.
## Sign your work ## Contributor Code of Conduct
We use the Developer Certificate of Origin (DCO) as a additional safeguard As contributors and maintainers of this project, and in the interest of fostering an open and
for the CodiMD project. This is a well established and widely used welcoming community, we pledge to respect all people who contribute through reporting issues,
mechanism to assure contributors have confirmed their right to license posting feature requests, updating documentation, submitting pull requests or patches, and other
their contribution under the project's license. activities.
Please read [docs/legal/developer-certificate-of-origin.txt][dcofile].
If you can certify it, then just add a line to every git commit message:
```` We are committed to making participation in this project a harassment-free experience for everyone,
Signed-off-by: Random J Developer <random@developer.example.org> regardless of level of experience, gender, gender identity and expression, sexual orientation,
```` disability, personal appearance, body size, race, ethnicity, age, religion, or nationality.
Use your real name (sorry, no pseudonyms or anonymous contributions). Examples of unacceptable behavior by participants include:
If you set your `user.name` and `user.email` git configs, you can sign your
commit automatically with `git commit -s`. You can also use git [aliases](https://git-scm.com/book/tr/v2/Git-Basics-Git-Aliases) * The use of sexualized language or imagery
like `git config --global alias.ci 'commit -s'`. Now you can commit with * Personal attacks
`git ci` and the commit will be signed. * Trolling or insulting/derogatory comments
* Public or private harassment
* Publishing other's private information, such as physical or electronic addresses, without explicit
permission
* Other unethical or unprofessional conduct.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits,
code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct. By
adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently
applying these principles to every aspect of managing this project. Project maintainers who do not
follow or enforce the Code of Conduct may be permanently removed from the project team.
This code of conduct applies both within project spaces and in public spaces when an individual is
representing the project or its community.
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by opening an
issue or contacting one or more of the project maintainers.
This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org),
version 1.2.0, available at
[http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/)

680
LICENSE
View file

@ -1,668 +1,22 @@
GNU AFFERO GENERAL PUBLIC LICENSE The MIT License (MIT)
Version 3, 19 November 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/> Copyright (c) 2017 Max Wu <jackymaxj@gmail.com> and others
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The GNU Affero General Public License is a free, copyleft license for The above copyright notice and this permission notice shall be included in all
software and other kinds of works, specifically designed to ensure copies or substantial portions of the Software.
cooperation with the community in the case of network server software.
The licenses for most software and other practical works are designed THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
to take away your freedom to share and change the works. By contrast, IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
our General Public Licenses are intended to guarantee your freedom to FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
share and change all versions of a program--to make sure it remains free AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
software for all its users. LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.
A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate. Many developers of free software are heartened and
encouraged by the resulting cooperation. However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.
The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community. It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server. Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.
An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals. This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing under
this license.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU Affero General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Remote Network Interaction; Use with the GNU General Public License.
Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your version
supports such interaction) an opportunity to receive the Corresponding
Source of your version by providing access to the Corresponding Source
from a network server at no charge, through some standard or customary
means of facilitating copying of software. This Corresponding Source
shall include the Corresponding Source for any work covered by version 3
of the GNU General Public License that is incorporated pursuant to the
following paragraph.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU Affero General Public License from time to time. Such new versions
will be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
CodiMD - Realtime collaborative markdown notes on all platforms.
Copyright (C) 2019 Christoph (Sheogorath) Kern
Copyright (C) 2019 Claudius Coenen
Copyright (C) 2019 Max Wu
Copyright (C) 2017 Yukai Huang
And more can be found on https://github.com/codimd/server/graphs/contributors
Or in the local AUTHORS file
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU AGPL, see
<http://www.gnu.org/licenses/>.

284
README.md
View file

@ -1,104 +1,230 @@
CodiMD HackMD
=== ===
[![#CodiMD on matrix.org][matrix.org-image]][matrix.org-url] [![Join the chat at https://gitter.im/hackmdio/hackmd][gitter-image]][gitter-url]
[![build status][travis-image]][travis-url] [![build status][travis-image]][travis-url]
[![version][github-version-badge]][github-release-page]
[![POEditor][poeditor-image]][poeditor-url]
[![Mastodon][social-mastodon-image]][social-mastodon]
CodiMD lets you create real-time collaborative markdown notes. You can test-drive
it by visiting our [CodiMD demo server][codimd-demo].
It is inspired by Hackpad, Etherpad and similar collaborative editors. This
project originated with the team at [HackMD](https://hackmd.io) and now forked
into its own organisation. [A longer writeup can be read in the history doc](docs/history.md).
[![CodiMD 1.3.2 with its feature demonstration page open](docs/images/CodiMD-1.3.2-features.png)][codimd-demo-features]
## Community and Contributions HackMD lets you create realtime collaborative markdown notes on all platforms.
Inspired by Hackpad, with more focus on speed and flexibility.
Still in the early stage, feel free to fork or contribute to HackMD.
We welcome contributions! There's a lot to do: If you would like to report bugs, Thanks for using! :smile:
the [issue tracker][github-issue-tracker] is the right place. If you can help
translating, find us on [POEditor][poeditor-url]. To get started developing,
take a look at the [docs/dev](docs/dev) directory. In any case: come talk to us,
we'll be delighted to help you with the first steps.
To stay up to date with our work or get support it's recommended to join our [docker-hackmd](https://github.com/hackmdio/docker-hackmd)
[Matrix channel][matrix.org-url], stop by our [community forums][codimd-community] ---
or subscribe to the [release feed][github-release-feed]. We also engage in
regular [community calls][codimd-community-calls] ([RSS](https://community.codimd.org/t/codimd-community-call/19.rss)) which you are very welcome to join.
Before you go too far, here is the great docker repo for HackMD.
With docker, you can deploy a server in minutes without any downtime.
## Installation / Upgrading Heroku Deployment
---
You can run CodiMD in a number of ways, and we created setup instructions for You can quickly setup a sample heroku hackmd application by clicking the button below.
all of these:
* [Docker](docs/setup/docker.md) [![Deploy](https://www.herokucdn.com/deploy/button.svg)](https://heroku.com/deploy)
* [Kubernetes](docs/setup/kubernetes.md)
* [Cloudron](docs/setup/cloudron.md)
* [LinuxServer.io (multi-arch docker)](docs/setup/docker-linuxserver.md)
* [Heroku](docs/setup/heroku.md)
* [Manual setup](docs/setup/manual-setup.md)
If you do not wish to run your own setup, you can find a commercial offering at [migration-to-0.5.0](https://github.com/hackmdio/migration-to-0.5.0)
https://hackmd.io. This is not the same codebase as this one, but it is a very ---
similar project.
We don't use LZString to compress socket.io data and DB data after version 0.5.0.
Please run the migration tool if you're upgrading from the old version.
## Configuration [migration-to-0.4.0](https://github.com/hackmdio/migration-to-0.4.0)
---
Theres two main ways to configure your CodiMD instance: We've dropped MongoDB after version 0.4.0.
[Config file](docs/configuration-config-file.md) or So here is the migration tool for you to transfer the old DB data to the new DB.
[environment variables](docs/configuration-env-vars.md). You can choose what This tool is also used for official service.
works best for you.
CodiMD can integrate with Browsers Requirement
---
* facebook, twitter, github, gitlab, mattermost, dropbox, google, ldap, saml and [oauth2](docs/guides/auth/oauth.md) **for login** - Chrome >= 47, Chrome for Android >= 47
* imgur, s3, minio, azure **for image/attachment storage** (files can also be local!) - Safari >= 9, iOS Safari >= 8.4
* dropbox **for export and import** - Firefox >= 44
- IE >= 9, Edge >= 12
More info about that can be found in the configuration docs above. - Opera >= 34, Opera Mini not supported
## Browser support
To use CodiMD, your browser should match or exceed these versions:
- ![Chrome](http://browserbadge.com/chrome/47/18px) Chrome >= 47, Chrome for Android >= 47
- ![Safari](http://browserbadge.com/safari/9/18px) Safari >= 9, iOS Safari >= 8.4
- ![Firefox](http://browserbadge.com/firefox/44/18px) Firefox >= 44
- ![IE](http://browserbadge.com/ie/9/18px) IE >= 9, Edge >= 12
- ![Opera](http://browserbadge.com/opera/34/18px) Opera >= 34, Opera Mini not supported
- Android Browser >= 4.4 - Android Browser >= 4.4
Prerequisite
---
## Related Tools - Node.js 6.x or up (test up to 7.5.0)
- Database (PostgreSQL, MySQL, MariaDB, SQLite, MSSQL) use charset `utf8`
- npm (and its dependencies, especially [uWebSockets](https://github.com/uWebSockets/uWebSockets#nodejs-developers), [node-gyp](https://github.com/nodejs/node-gyp#installation))
Our community has created related tools, we'd like to highlight [codimd-cli](https://github.com/codimd/cli) Get started
which lets you use CodiMD from the comfort of your command line. ---
1. Download a release and unzip or clone into a directory
2. Enter the directory and type `bin/setup`, which will install npm dependencies and create configs. The setup script is written in Bash, you would need bash as a prerequisite.
3. Setup the configs, see more below
4. Setup environment variables which will overwrite the configs
5. Build front-end bundle by `npm run build` (use `npm run dev` if you are in development)
6. Run the server as you like (node, forever, pm2)
# License Upgrade guide
---
Licensed under AGPLv3. For our list of contributors, see [AUTHORS](AUTHORS). If you are upgrading HackMD from an older version, follow these steps:
[matrix.org-image]: https://img.shields.io/badge/Matrix.org-%23CodiMD@matrix.org-green.svg 1. Fully stop your old server first (important)
[matrix.org-url]: https://riot.im/app/#/room/#codimd:matrix.org 2. `git pull` or do whatever that updates the files
[travis-image]: https://travis-ci.org/codimd/server.svg?branch=master 3. `npm install` to update dependencies
[travis-url]: https://travis-ci.org/codimd/server 4. Build front-end bundle by `npm run build` (use `npm run dev` if you are in development)
[github-version-badge]: https://img.shields.io/github/release/codimd/server.svg 5. Modify the file named `.sequelizerc`, change the value of the variable `url` with your db connection string
[github-release-page]: https://github.com/codimd/server/releases For example: `postgres://username:password@localhost:5432/hackmd`
[github-release-feed]: https://github.com/codimd/server/releases.atom 6. Run `node_modules/.bin/sequelize db:migrate`, this step will migrate your db to the latest schema
[github-issue-tracker]: https://github.com/codimd/server/issues/ 7. Start your whole new server!
[poeditor-image]: https://img.shields.io/badge/POEditor-translate-blue.svg
[poeditor-url]: https://poeditor.com/join/project/1OpGjF2Jir Structure
[codimd-demo]: https://demo.codimd.org ---
[codimd-demo-features]: https://demo.codimd.org/features
[codimd-community]: https://community.codimd.org ```text
[codimd-community-calls]: https://community.codimd.org/t/codimd-community-call/19 hackmd/
[social-mastodon]: https://social.codimd.org/mastodon ├── tmp/ --- temporary files
[social-mastodon-image]: https://img.shields.io/badge/social-mastodon-3c99dc.svg ├── docs/ --- document files
├── lib/ --- server libraries
└── public/ --- client files
├── css/ --- css styles
├── js/ --- js scripts
├── vendor/ --- vendor includes
└── views/ --- view templates
```
Configuration files
---
There are some configs you need to change in the files below
```
./config.json ----application settings
```
Environment variables (will overwrite other server configs)
---
| variables | example values | description |
| --------- | ------ | ----------- |
| NODE_ENV | `production` or `development` | set current environment (will apply corresponding settings in the `config.json`) |
| DEBUG | `true` or `false` | set debug mode, show more logs |
| HMD_DOMAIN | `hackmd.io` | domain name |
| HMD_URL_PATH | `hackmd` | sub url path, like `www.example.com/<URL_PATH>` |
| HMD_PORT | `80` | web app port |
| HMD_ALLOW_ORIGIN | `localhost, hackmd.io` | domain name whitelist (use comma to separate) |
| HMD_PROTOCOL_USESSL | `true` or `false` | set to use ssl protocol for resources path (only applied when domain is set) |
| HMD_URL_ADDPORT | `true` or `false` | set to add port on callback url (port 80 or 443 won't applied) (only applied when domain is set) |
| HMD_USECDN | `true` or `false` | set to use CDN resources or not (default is `true`) |
| HMD_ALLOW_ANONYMOUS | `true` or `false` | set to allow anonymous usage (default is `true`) |
| HMD_ALLOW_FREEURL | `true` or `false` | set to allow new note by accessing not exist note url |
| HMD_DEFAULT_PERMISSION | `freely`, `editable`, `limited`, `locked` or `private` | set notes default permission (only applied on signed users) |
| HMD_DB_URL | `mysql://localhost:3306/database` | set the db url |
| HMD_FACEBOOK_CLIENTID | no example | Facebook API client id |
| HMD_FACEBOOK_CLIENTSECRET | no example | Facebook API client secret |
| HMD_TWITTER_CONSUMERKEY | no example | Twitter API consumer key |
| HMD_TWITTER_CONSUMERSECRET | no example | Twitter API consumer secret |
| HMD_GITHUB_CLIENTID | no example | GitHub API client id |
| HMD_GITHUB_CLIENTSECRET | no example | GitHub API client secret |
| HMD_GITLAB_BASEURL | no example | GitLab authentication endpoint, set to use other endpoint than GitLab.com (optional) |
| HMD_GITLAB_CLIENTID | no example | GitLab API client id |
| HMD_GITLAB_CLIENTSECRET | no example | GitLab API client secret |
| HMD_DROPBOX_CLIENTID | no example | Dropbox API client id |
| HMD_DROPBOX_CLIENTSECRET | no example | Dropbox API client secret |
| HMD_GOOGLE_CLIENTID | no example | Google API client id |
| HMD_GOOGLE_CLIENTSECRET | no example | Google API client secret |
| HMD_LDAP_URL | `ldap://example.com` | url of LDAP server |
| HMD_LDAP_BINDDN | no example | bindDn for LDAP access |
| HMD_LDAP_BINDCREDENTIALS | no example | bindCredentials for LDAP access |
| HMD_LDAP_TOKENSECRET | `supersecretkey` | secret used for generating access/refresh tokens |
| HMD_LDAP_SEARCHBASE | `o=users,dc=example,dc=com` | LDAP directory to begin search from |
| HMD_LDAP_SEARCHFILTER | `(uid={{username}})` | LDAP filter to search with |
| HMD_LDAP_SEARCHATTRIBUTES | no example | LDAP attributes to search with |
| HMD_LDAP_TLS_CA | `server-cert.pem, root.pem` | Root CA for LDAP TLS in PEM format (use comma to separate) |
| HMD_LDAP_PROVIDERNAME | `My institution` | Optional name to be displayed at login form indicating the LDAP provider |
| HMD_IMGUR_CLIENTID | no example | Imgur API client id |
| HMD_EMAIL | `true` or `false` | set to allow email signin |
| HMD_ALLOW_EMAIL_REGISTER | `true` or `false` | set to allow email register (only applied when email is set, default is `true`) |
| HMD_IMAGE_UPLOAD_TYPE | `imgur`, `s3` or `filesystem` | Where to upload image. For S3, see our [S3 Image Upload Guide](docs/guides/s3-image-upload.md) |
| HMD_S3_ACCESS_KEY_ID | no example | AWS access key id |
| HMD_S3_SECRET_ACCESS_KEY | no example | AWS secret key |
| HMD_S3_REGION | `ap-northeast-1` | AWS S3 region |
| HMD_S3_BUCKET | no example | AWS S3 bucket name |
Application settings `config.json`
---
| variables | example values | description |
| --------- | ------ | ----------- |
| debug | `true` or `false` | set debug mode, show more logs |
| domain | `localhost` | domain name |
| urlpath | `hackmd` | sub url path, like `www.example.com/<urlpath>` |
| port | `80` | web app port |
| alloworigin | `['localhost']` | domain name whitelist |
| usessl | `true` or `false` | set to use ssl server (if true will auto turn on `protocolusessl`) |
| protocolusessl | `true` or `false` | set to use ssl protocol for resources path (only applied when domain is set) |
| urladdport | `true` or `false` | set to add port on callback url (port 80 or 443 won't applied) (only applied when domain is set) |
| usecdn | `true` or `false` | set to use CDN resources or not (default is `true`) |
| allowanonymous | `true` or `false` | set to allow anonymous usage (default is `true`) |
| allowfreeurl | `true` or `false` | set to allow new note by accessing not exist note url |
| defaultpermission | `freely`, `editable`, `limited`, `locked` or `private` | set notes default permission (only applied on signed users) |
| dburl | `mysql://localhost:3306/database` | set the db url, if set this variable then below db config won't be applied |
| db | `{ "dialect": "sqlite", "storage": "./db.hackmd.sqlite" }` | set the db configs, [see more here](http://sequelize.readthedocs.org/en/latest/api/sequelize/) |
| sslkeypath | `./cert/client.key` | ssl key path (only need when you set usessl) |
| sslcertpath | `./cert/hackmd_io.crt` | ssl cert path (only need when you set usessl) |
| sslcapath | `['./cert/COMODORSAAddTrustCA.crt']` | ssl ca chain (only need when you set usessl) |
| dhparampath | `./cert/dhparam.pem` | ssl dhparam path (only need when you set usessl) |
| tmppath | `./tmp/` | temp directory path |
| defaultnotepath | `./public/default.md` | default note file path |
| docspath | `./public/docs` | docs directory path |
| indexpath | `./public/views/index.ejs` | index template file path |
| hackmdpath | `./public/views/hackmd.ejs` | hackmd template file path |
| errorpath | `./public/views/error.ejs` | error template file path |
| prettypath | `./public/views/pretty.ejs` | pretty template file path |
| slidepath | `./public/views/slide.hbs` | slide template file path |
| sessionname | `connect.sid` | cookie session name |
| sessionsecret | `secret` | cookie session secret |
| sessionlife | `14 * 24 * 60 * 60 * 1000` | cookie session life |
| staticcachetime | `1 * 24 * 60 * 60 * 1000` | static file cache time |
| heartbeatinterval | `5000` | socket.io heartbeat interval |
| heartbeattimeout | `10000` | socket.io heartbeat timeout |
| documentmaxlength | `100000` | note max length |
| email | `true` or `false` | set to allow email signin |
| allowemailregister | `true` or `false` | set to allow email register (only applied when email is set, default is `true`) |
| imageUploadType | `imgur`(default), `s3` or `filesystem` | Where to upload image
| s3 | `{ "accessKeyId": "YOUR_S3_ACCESS_KEY_ID", "secretAccessKey": "YOUR_S3_ACCESS_KEY", "region": "YOUR_S3_REGION", "bucket": "YOUR_S3_BUCKET_NAME" }` | When `imageUploadType` be setted to `s3`, you would also need to setup this key, check our [S3 Image Upload Guide](docs/guides/s3-image-upload.md) |
Third-party integration api key settings
---
| service | settings location | description |
| ------- | --------- | ----------- |
| facebook, twitter, github, gitlab, dropbox, google, ldap | environment variables or `config.json` | for signin |
| imgur | environment variables or `config.json` | for image upload |
| google drive(`google/apiKey`, `google/clientID`), dropbox(`dropbox/appKey`) | `config.json` | for export and import |
Third-party integration oauth callback urls
---
| service | callback url (after the server url) |
| ------- | --------- |
| facebook | `/auth/facebook/callback` |
| twitter | `/auth/twitter/callback` |
| github | `/auth/github/callback` |
| gitlab | `/auth/gitlab/callback` |
| dropbox | `/auth/dropbox/callback` |
| google | `/auth/google/callback` |
Operational Transformation
---
From 0.3.2, we started supporting operational transformation.
It makes concurrent editing safe and will not break up other users' operations.
Additionally, now can show other clients' selections.
See more at [http://operational-transformation.github.io/](http://operational-transformation.github.io/)
**License under MIT.**
[gitter-image]: https://badges.gitter.im/Join%20Chat.svg
[gitter-url]: https://gitter.im/hackmdio/hackmd?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge
[travis-image]: https://travis-ci.org/hackmdio/hackmd.svg?branch=master
[travis-url]: https://travis-ci.org/hackmdio/hackmd

851
app.js
View file

@ -1,309 +1,654 @@
'use strict' //app
// app //external modules
// external modules var express = require('express');
var express = require('express') var toobusy = require('toobusy-js');
var ejs = require('ejs');
var ejs = require('ejs') var passport = require('passport');
var passport = require('passport') var methodOverride = require('method-override');
var methodOverride = require('method-override') var cookieParser = require('cookie-parser');
var cookieParser = require('cookie-parser') var bodyParser = require('body-parser');
var compression = require('compression') var compression = require('compression')
var session = require('express-session') var session = require('express-session');
var SequelizeStore = require('connect-session-sequelize')(session.Store) var SequelizeStore = require('connect-session-sequelize')(session.Store);
var fs = require('fs') var fs = require('fs');
var path = require('path') var url = require('url');
var path = require('path');
var imgur = require('imgur');
var formidable = require('formidable');
var morgan = require('morgan');
var passportSocketIo = require("passport.socketio");
var helmet = require('helmet');
var i18n = require('i18n');
var flash = require('connect-flash');
var validator = require('validator');
var morgan = require('morgan') //core
var passportSocketIo = require('passport.socketio') var config = require("./lib/config.js");
var helmet = require('helmet') var logger = require("./lib/logger.js");
var i18n = require('i18n') var auth = require("./lib/auth.js");
var flash = require('connect-flash') var response = require("./lib/response.js");
var models = require("./lib/models");
// core //server setup
var config = require('./lib/config') if (config.usessl) {
var logger = require('./lib/logger') var ca = (function () {
var response = require('./lib/response') var i, len, results;
var models = require('./lib/models') results = [];
var csp = require('./lib/csp') for (i = 0, len = config.sslcapath.length; i < len; i++) {
results.push(fs.readFileSync(config.sslcapath[i], 'utf8'));
// server setup }
var app = express() return results;
var server = null })();
if (config.useSSL) { var options = {
var ca = (function () { key: fs.readFileSync(config.sslkeypath, 'utf8'),
var i, len, results cert: fs.readFileSync(config.sslcertpath, 'utf8'),
results = [] ca: ca,
for (i = 0, len = config.sslCAPath.length; i < len; i++) { dhparam: fs.readFileSync(config.dhparampath, 'utf8'),
results.push(fs.readFileSync(config.sslCAPath[i], 'utf8')) requestCert: false,
} rejectUnauthorized: false
return results };
})() var app = express();
var options = { var server = require('https').createServer(options, app);
key: fs.readFileSync(config.sslKeyPath, 'utf8'),
cert: fs.readFileSync(config.sslCertPath, 'utf8'),
ca: ca,
dhparam: fs.readFileSync(config.dhParamPath, 'utf8'),
requestCert: false,
rejectUnauthorized: false
}
server = require('https').createServer(options, app)
} else { } else {
server = require('http').createServer(app) var app = express();
var server = require('http').createServer(app);
} }
// logger //logger
app.use(morgan('combined', { app.use(morgan('combined', {
'stream': logger.stream "stream": logger.stream
})) }));
// socket io //socket io
var io = require('socket.io')(server) var io = require('socket.io')(server);
io.engine.ws = new (require('ws').Server)({ io.engine.ws = new (require('uws').Server)({
noServer: true, noServer: true,
perMessageDeflate: false perMessageDeflate: false
}) });
// others //others
var realtime = require('./lib/realtime.js') var realtime = require("./lib/realtime.js");
// assign socket io to realtime //assign socket io to realtime
realtime.io = io realtime.io = io;
// methodOverride //methodOverride
app.use(methodOverride('_method')) app.use(methodOverride('_method'));
// session store // create application/json parser
var jsonParser = bodyParser.json({
limit: 1024 * 1024 * 10 // 10 mb
});
// create application/x-www-form-urlencoded parser
var urlencodedParser = bodyParser.urlencoded({
extended: false,
limit: 1024 * 1024 * 10 // 10 mb
});
//session store
var sessionStore = new SequelizeStore({ var sessionStore = new SequelizeStore({
db: models.sequelize db: models.sequelize
}) });
// compression //compression
app.use(compression()) app.use(compression());
// use hsts to tell https users stick to this // use hsts to tell https users stick to this
if (config.hsts.enable) { app.use(helmet.hsts({
app.use(helmet.hsts({ maxAge: 31536000 * 1000, // 365 days
maxAge: config.hsts.maxAgeSeconds, includeSubdomains: true,
includeSubdomains: config.hsts.includeSubdomains, preload: true
preload: config.hsts.preload }));
}))
} else if (config.useSSL) {
logger.info('Consider enabling HSTS for extra security:')
logger.info('https://en.wikipedia.org/wiki/HTTP_Strict_Transport_Security')
}
// Add referrer policy to improve privacy
app.use(
helmet.referrerPolicy({
policy: 'same-origin'
})
)
// Generate a random nonce per request, for CSP with inline scripts
app.use(csp.addNonceToLocals)
// use Content-Security-Policy to limit XSS, dangerous plugins, etc.
// https://helmetjs.github.io/docs/csp/
if (config.csp.enable) {
app.use(helmet.contentSecurityPolicy({
directives: csp.computeDirectives()
}))
} else {
logger.info('Content-Security-Policy is disabled. This may be a security risk.')
}
i18n.configure({ i18n.configure({
locales: ['en', 'zh-CN', 'zh-TW', 'fr', 'de', 'ja', 'es', 'ca', 'el', 'pt', 'it', 'tr', 'ru', 'nl', 'hr', 'pl', 'uk', 'hi', 'sv', 'eo', 'da', 'ko', 'id', 'sr', 'vi'], locales: ['en', 'zh', 'fr', 'de', 'ja', 'es', 'el', 'pt', 'it', 'tr', 'ru', 'nl', 'hr', 'pl', 'uk', 'hi', 'sv', 'eo'],
cookie: 'locale', cookie: 'locale',
indent: ' ', // this is the style poeditor.com exports it, this creates less churn directory: __dirname + '/locales'
directory: path.join(__dirname, '/locales'), });
updateFiles: config.updateI18nFiles
})
app.use(cookieParser()) app.use(cookieParser());
app.use(i18n.init) app.use(i18n.init);
// routes without sessions // routes without sessions
// static files // static files
app.use('/', express.static(path.join(__dirname, '/public'), { maxAge: config.staticCacheTime, index: false })) app.use('/', express.static(__dirname + '/public', { maxAge: config.staticcachetime }));
app.use('/docs', express.static(path.resolve(__dirname, config.docsPath), { maxAge: config.staticCacheTime }))
app.use('/uploads', express.static(path.resolve(__dirname, config.uploadsPath), { maxAge: config.staticCacheTime }))
app.use('/default.md', express.static(path.resolve(__dirname, config.defaultNotePath), { maxAge: config.staticCacheTime }))
// session //session
app.use(session({ app.use(session({
name: config.sessionName, name: config.sessionname,
secret: config.sessionSecret, secret: config.sessionsecret,
resave: false, // don't save session if unmodified resave: false, //don't save session if unmodified
saveUninitialized: true, // always create session to ensure the origin saveUninitialized: true, //always create session to ensure the origin
rolling: true, // reset maxAge on every response rolling: true, // reset maxAge on every response
cookie: { cookie: {
maxAge: config.sessionLife maxAge: config.sessionlife
}, },
store: sessionStore store: sessionStore
})) }));
// session resumption // session resumption
var tlsSessionStore = {} var tlsSessionStore = {};
server.on('newSession', function (id, data, cb) { server.on('newSession', function (id, data, cb) {
tlsSessionStore[id.toString('hex')] = data tlsSessionStore[id.toString('hex')] = data;
cb() cb();
}) });
server.on('resumeSession', function (id, cb) { server.on('resumeSession', function (id, cb) {
cb(null, tlsSessionStore[id.toString('hex')] || null) cb(null, tlsSessionStore[id.toString('hex')] || null);
}) });
// middleware which blocks requests when we're too busy //middleware which blocks requests when we're too busy
app.use(require('./lib/web/middleware/tooBusy')) app.use(function (req, res, next) {
if (toobusy()) {
response.errorServiceUnavailable(res);
} else {
next();
}
});
app.use(flash()) app.use(flash());
// passport //passport
app.use(passport.initialize()) app.use(passport.initialize());
app.use(passport.session()) app.use(passport.session());
//serialize and deserialize
passport.serializeUser(function (user, done) {
logger.info('serializeUser: ' + user.id);
return done(null, user.id);
});
passport.deserializeUser(function (id, done) {
models.User.findOne({
where: {
id: id
}
}).then(function (user) {
logger.info('deserializeUser: ' + user.id);
return done(null, user);
}).catch(function (err) {
logger.error(err);
return done(err, null);
});
});
// check uri is valid before going further // check uri is valid before going further
app.use(require('./lib/web/middleware/checkURIValid')) app.use(function(req, res, next) {
try {
decodeURIComponent(req.path);
} catch (err) {
logger.error(err);
return response.errorBadRequest(res);
}
next();
});
// redirect url without trailing slashes // redirect url without trailing slashes
app.use(require('./lib/web/middleware/redirectWithoutTrailingSlashes')) app.use(function(req, res, next) {
app.use(require('./lib/web/middleware/codiMDVersion')) if ("GET" == req.method && req.path.substr(-1) == '/' && req.path.length > 1) {
var query = req.url.slice(req.path.length);
var urlpath = req.path.slice(0, -1);
var serverurl = config.serverurl;
if (config.urlpath) serverurl = serverurl.slice(0, -(config.urlpath.length + 1));
res.redirect(301, serverurl + urlpath + query);
} else {
next();
}
});
// routes need sessions // routes need sessions
// template files //template files
app.set('views', config.viewPath) app.set('views', __dirname + '/public/views');
// set render engine //set render engine
app.engine('ejs', ejs.renderFile) app.engine('ejs', ejs.renderFile);
// set view engine //set view engine
app.set('view engine', 'ejs') app.set('view engine', 'ejs');
// set generally available variables for all views //get index
app.locals.useCDN = config.useCDN app.get("/", response.showIndex);
app.locals.serverURL = config.serverURL //get 403 forbidden
app.locals.sourceURL = config.sourceURL app.get("/403", function (req, res) {
app.locals.allowAnonymous = config.allowAnonymous response.errorForbidden(res);
app.locals.allowAnonymousEdits = config.allowAnonymousEdits });
app.locals.allowPDFExport = config.allowPDFExport //get 404 not found
app.locals.authProviders = { app.get("/404", function (req, res) {
facebook: config.isFacebookEnable, response.errorNotFound(res);
twitter: config.isTwitterEnable, });
github: config.isGitHubEnable, //get 500 internal error
gitlab: config.isGitLabEnable, app.get("/500", function (req, res) {
mattermost: config.isMattermostEnable, response.errorInternalError(res);
dropbox: config.isDropboxEnable, });
google: config.isGoogleEnable, //get status
ldap: config.isLDAPEnable, app.get("/status", function (req, res, next) {
ldapProviderName: config.ldap.providerName, realtime.getStatus(function (data) {
saml: config.isSAMLEnable, res.set({
oauth2: config.isOAuth2Enable, 'Cache-Control': 'private', // only cache by client
oauth2ProviderName: config.oauth2.providerName, 'X-Robots-Tag': 'noindex, nofollow' // prevent crawling
openID: config.isOpenIDEnable, });
email: config.isEmailEnable, res.send(data);
allowEmailRegister: config.allowEmailRegister });
});
//get status
app.get("/temp", function (req, res) {
var host = req.get('host');
if (config.alloworigin.indexOf(host) == -1)
response.errorForbidden(res);
else {
var tempid = req.query.tempid;
if (!tempid)
response.errorForbidden(res);
else {
models.Temp.findOne({
where: {
id: tempid
}
}).then(function (temp) {
if (!temp)
response.errorNotFound(res);
else {
res.header("Access-Control-Allow-Origin", "*");
res.send({
temp: temp.data
});
temp.destroy().catch(function (err) {
if (err)
logger.error('remove temp failed: ' + err);
});
}
}).catch(function (err) {
logger.error(err);
return response.errorInternalError(res);
});
}
}
});
//post status
app.post("/temp", urlencodedParser, function (req, res) {
var host = req.get('host');
if (config.alloworigin.indexOf(host) == -1)
response.errorForbidden(res);
else {
var data = req.body.data;
if (!data)
response.errorForbidden(res);
else {
if (config.debug)
logger.info('SERVER received temp from [' + host + ']: ' + req.body.data);
models.Temp.create({
data: data
}).then(function (temp) {
if (temp) {
res.header("Access-Control-Allow-Origin", "*");
res.send({
status: 'ok',
id: temp.id
});
} else
response.errorInternalError(res);
}).catch(function (err) {
logger.error(err);
return response.errorInternalError(res);
});
}
}
});
function setReturnToFromReferer(req) {
var referer = req.get('referer');
if (!req.session) req.session = {};
req.session.returnTo = referer;
} }
// Export/Import menu items //facebook auth
app.locals.enableDropBoxSave = config.isDropboxEnable if (config.facebook) {
app.locals.enableGitHubGist = config.isGitHubEnable app.get('/auth/facebook', function (req, res, next) {
app.locals.enableGitlabSnippets = config.isGitlabSnippetsEnable setReturnToFromReferer(req);
passport.authenticate('facebook')(req, res, next);
});
//facebook auth callback
app.get('/auth/facebook/callback',
passport.authenticate('facebook', {
successReturnToOrRedirect: config.serverurl + '/',
failureRedirect: config.serverurl + '/'
}));
}
//twitter auth
if (config.twitter) {
app.get('/auth/twitter', function (req, res, next) {
setReturnToFromReferer(req);
passport.authenticate('twitter')(req, res, next);
});
//twitter auth callback
app.get('/auth/twitter/callback',
passport.authenticate('twitter', {
successReturnToOrRedirect: config.serverurl + '/',
failureRedirect: config.serverurl + '/'
}));
}
//github auth
if (config.github) {
app.get('/auth/github', function (req, res, next) {
setReturnToFromReferer(req);
passport.authenticate('github')(req, res, next);
});
//github auth callback
app.get('/auth/github/callback',
passport.authenticate('github', {
successReturnToOrRedirect: config.serverurl + '/',
failureRedirect: config.serverurl + '/'
}));
//github callback actions
app.get('/auth/github/callback/:noteId/:action', response.githubActions);
}
//gitlab auth
if (config.gitlab) {
app.get('/auth/gitlab', function (req, res, next) {
setReturnToFromReferer(req);
passport.authenticate('gitlab')(req, res, next);
});
//gitlab auth callback
app.get('/auth/gitlab/callback',
passport.authenticate('gitlab', {
successReturnToOrRedirect: config.serverurl + '/',
failureRedirect: config.serverurl + '/'
}));
//gitlab callback actions
app.get('/auth/gitlab/callback/:noteId/:action', response.gitlabActions);
}
//dropbox auth
if (config.dropbox) {
app.get('/auth/dropbox', function (req, res, next) {
setReturnToFromReferer(req);
passport.authenticate('dropbox-oauth2')(req, res, next);
});
//dropbox auth callback
app.get('/auth/dropbox/callback',
passport.authenticate('dropbox-oauth2', {
successReturnToOrRedirect: config.serverurl + '/',
failureRedirect: config.serverurl + '/'
}));
}
//google auth
if (config.google) {
app.get('/auth/google', function (req, res, next) {
setReturnToFromReferer(req);
passport.authenticate('google', { scope: ['profile'] })(req, res, next);
});
//google auth callback
app.get('/auth/google/callback',
passport.authenticate('google', {
successReturnToOrRedirect: config.serverurl + '/',
failureRedirect: config.serverurl + '/'
}));
}
// ldap auth
if (config.ldap) {
app.post('/auth/ldap', urlencodedParser, function (req, res, next) {
if (!req.body.username || !req.body.password) return response.errorBadRequest(res);
setReturnToFromReferer(req);
passport.authenticate('ldapauth', {
successReturnToOrRedirect: config.serverurl + '/',
failureRedirect: config.serverurl + '/',
failureFlash: true
})(req, res, next);
});
}
// email auth
if (config.email) {
if (config.allowemailregister)
app.post('/register', urlencodedParser, function (req, res, next) {
if (!req.body.email || !req.body.password) return response.errorBadRequest(res);
if (!validator.isEmail(req.body.email)) return response.errorBadRequest(res);
models.User.findOrCreate({
where: {
email: req.body.email
},
defaults: {
password: req.body.password
}
}).spread(function (user, created) {
if (user) {
if (created) {
if (config.debug) logger.info('user registered: ' + user.id);
req.flash('info', "You've successfully registered, please signin.");
} else {
if (config.debug) logger.info('user found: ' + user.id);
req.flash('error', "This email has been used, please try another one.");
}
return res.redirect(config.serverurl + '/');
}
req.flash('error', "Failed to register your account, please try again.");
return res.redirect(config.serverurl + '/');
}).catch(function (err) {
logger.error('auth callback failed: ' + err);
return response.errorInternalError(res);
});
});
app.use(require('./lib/web/baseRouter')) app.post('/login', urlencodedParser, function (req, res, next) {
app.use(require('./lib/web/statusRouter')) if (!req.body.email || !req.body.password) return response.errorBadRequest(res);
app.use(require('./lib/web/auth')) if (!validator.isEmail(req.body.email)) return response.errorBadRequest(res);
app.use(require('./lib/web/historyRouter')) setReturnToFromReferer(req);
app.use(require('./lib/web/userRouter')) passport.authenticate('local', {
app.use(require('./lib/web/imageRouter')) successReturnToOrRedirect: config.serverurl + '/',
app.use(require('./lib/web/noteRouter')) failureRedirect: config.serverurl + '/',
failureFlash: 'Invalid email or password.'
})(req, res, next);
});
}
//logout
app.get('/logout', function (req, res) {
if (config.debug && req.isAuthenticated())
logger.info('user logout: ' + req.user.id);
req.logout();
res.redirect(config.serverurl + '/');
});
var history = require("./lib/history.js");
//get history
app.get('/history', history.historyGet);
//post history
app.post('/history', urlencodedParser, history.historyPost);
//post history by note id
app.post('/history/:noteId', urlencodedParser, history.historyPost);
//delete history
app.delete('/history', history.historyDelete);
//delete history by note id
app.delete('/history/:noteId', history.historyDelete);
//get me info
app.get('/me', function (req, res) {
if (req.isAuthenticated()) {
models.User.findOne({
where: {
id: req.user.id
}
}).then(function (user) {
if (!user)
return response.errorNotFound(res);
var profile = models.User.getProfile(user);
res.send({
status: 'ok',
id: req.user.id,
name: profile.name,
photo: profile.photo
});
}).catch(function (err) {
logger.error('read me failed: ' + err);
return response.errorInternalError(res);
});
} else {
res.send({
status: 'forbidden'
});
}
});
// response not found if no any route matxches //upload image
app.post('/uploadimage', function (req, res) {
var form = new formidable.IncomingForm();
form.keepExtensions = true;
if (config.imageUploadType === 'filesystem') {
form.uploadDir = "public/uploads";
}
form.parse(req, function (err, fields, files) {
if (err || !files.image || !files.image.path) {
response.errorForbidden(res);
} else {
if (config.debug)
logger.info('SERVER received uploadimage: ' + JSON.stringify(files.image));
try {
switch (config.imageUploadType) {
case 'filesystem':
res.send({
link: url.resolve(config.serverurl + '/', files.image.path.match(/^public\/(.+$)/)[1])
});
break;
case 's3':
var AWS = require('aws-sdk');
var awsConfig = new AWS.Config(config.s3);
var s3 = new AWS.S3(awsConfig);
fs.readFile(files.image.path, function (err, buffer) {
var params = {
Bucket: config.s3bucket,
Key: path.join('uploads', path.basename(files.image.path)),
Body: buffer
};
s3.putObject(params, function (err, data) {
if (err) {
logger.error(err);
res.status(500).end('upload image error');
} else {
res.send({
link: `https://s3-${config.s3.region}.amazonaws.com/${config.s3bucket}/${params.Key}`
});
}
});
});
break;
case 'imgur':
default:
imgur.setClientId(config.imgur.clientID);
imgur.uploadFile(files.image.path)
.then(function (json) {
if (config.debug)
logger.info('SERVER uploadimage success: ' + JSON.stringify(json));
res.send({
link: json.data.link.replace(/^http:\/\//i, 'https://')
});
})
.catch(function (err) {
logger.error(err);
return res.status(500).end('upload image error');
});
break;
}
} catch (err) {
logger.error(err);
return res.status(500).end('upload image error');
}
}
});
});
//get new note
app.get("/new", response.newNote);
//get publish note
app.get("/s/:shortid", response.showPublishNote);
//publish note actions
app.get("/s/:shortid/:action", response.publishNoteActions);
//get publish slide
app.get("/p/:shortid", response.showPublishSlide);
//publish slide actions
app.get("/p/:shortid/:action", response.publishSlideActions);
//get note by id
app.get("/:noteId", response.showNote);
//note actions
app.get("/:noteId/:action", response.noteActions);
//note actions with action id
app.get("/:noteId/:action/:actionId", response.noteActions);
// response not found if no any route matches
app.get('*', function (req, res) { app.get('*', function (req, res) {
response.errorNotFound(res) response.errorNotFound(res);
}) });
// socket.io secure //socket.io secure
io.use(realtime.secure) io.use(realtime.secure);
// socket.io auth //socket.io auth
io.use(passportSocketIo.authorize({ io.use(passportSocketIo.authorize({
cookieParser: cookieParser, cookieParser: cookieParser,
key: config.sessionName, key: config.sessionname,
secret: config.sessionSecret, secret: config.sessionsecret,
store: sessionStore, store: sessionStore,
success: realtime.onAuthorizeSuccess, success: realtime.onAuthorizeSuccess,
fail: realtime.onAuthorizeFail fail: realtime.onAuthorizeFail
})) }));
// socket.io heartbeat //socket.io heartbeat
io.set('heartbeat interval', config.heartbeatInterval) io.set('heartbeat interval', config.heartbeatinterval);
io.set('heartbeat timeout', config.heartbeatTimeout) io.set('heartbeat timeout', config.heartbeattimeout);
// socket.io connection //socket.io connection
io.sockets.on('connection', realtime.connection) io.sockets.on('connection', realtime.connection);
// listen //listen
function startListen () { function startListen() {
var address server.listen(config.port, function () {
var listenCallback = function () { var schema = config.usessl ? 'HTTPS' : 'HTTP';
var schema = config.useSSL ? 'HTTPS' : 'HTTP' logger.info('%s Server listening at port %d', schema, config.port);
logger.info('%s Server listening at %s', schema, address) config.maintenance = false;
realtime.maintenance = false });
}
// use unix domain socket if 'path' is specified
if (config.path) {
address = config.path
server.listen(config.path, listenCallback)
} else {
address = config.host + ':' + config.port
server.listen(config.port, config.host, listenCallback)
}
} }
// sync db then start listen // sync db then start listen
models.sequelize.sync().then(function () { models.sequelize.sync().then(function () {
// check if realtime is ready // check if realtime is ready
if (realtime.isReady()) { if (realtime.isReady()) {
models.Revision.checkAllNotesRevision(function (err, notes) { models.Revision.checkAllNotesRevision(function (err, notes) {
if (err) throw new Error(err) if (err) throw new Error(err);
if (!notes || notes.length <= 0) return startListen() if (!notes || notes.length <= 0) return startListen();
}) });
} else { } else {
throw new Error('server still not ready after db synced') throw new Error('server still not ready after db synced');
} }
}) });
// log uncaught exception // log uncaught exception
process.on('uncaughtException', function (err) { process.on('uncaughtException', function (err) {
logger.error('An uncaught exception has occured.') logger.error('An uncaught exception has occured.');
logger.error(err) logger.error(err);
logger.error('Process will exit now.') logger.error('Process will exit now.');
process.exit(1) process.exit(1);
}) });
// install exit handler // install exit handler
function handleTermSignals () { function handleTermSignals() {
logger.info('CodiMD has been killed by signal, try to exit gracefully...') config.maintenance = true;
realtime.maintenance = true // disconnect all socket.io clients
// disconnect all socket.io clients Object.keys(io.sockets.sockets).forEach(function (key) {
Object.keys(io.sockets.sockets).forEach(function (key) { var socket = io.sockets.sockets[key];
var socket = io.sockets.sockets[key] // notify client server going into maintenance status
// notify client server going into maintenance status socket.emit('maintenance');
socket.emit('maintenance') setTimeout(function () {
setTimeout(function () { socket.disconnect(true);
socket.disconnect(true) }, 0);
}, 0) });
}) var checkCleanTimer = setInterval(function () {
if (config.path) { if (realtime.isReady()) {
fs.unlink(config.path) models.Revision.checkAllNotesRevision(function (err, notes) {
} if (err) return logger.error(err);
var checkCleanTimer = setInterval(function () { if (!notes || notes.length <= 0) {
if (realtime.isReady()) { clearInterval(checkCleanTimer);
models.Revision.checkAllNotesRevision(function (err, notes) { return process.exit(0);
if (err) return logger.error(err) }
if (!notes || notes.length <= 0) { });
clearInterval(checkCleanTimer)
return process.exit(0)
} }
}) }, 100);
}
}, 100)
} }
process.on('SIGINT', handleTermSignals) process.on('SIGINT', handleTermSignals);
process.on('SIGTERM', handleTermSignals) process.on('SIGTERM', handleTermSignals);
process.on('SIGQUIT', handleTermSignals)

111
app.json
View file

@ -1,16 +1,23 @@
{ {
"name": "CodiMD", "name": "HackMD",
"description": "Realtime collaborative markdown notes on all platforms", "description": "Realtime collaborative markdown notes on all platforms",
"keywords": [ "keywords": [
"Collaborative", "Collaborative",
"Markdown", "Markdown",
"Notes" "Notes"
], ],
"website": "https://codimd.org", "website": "https://hackmd.io",
"repository": "https://github.com/codimd/server", "repository": "https://github.com/hackmdio/hackmd",
"logo": "https://github.com/codimd/server/raw/master/public/codimd-icon-1024.png", "logo": "https://github.com/hackmdio/hackmd/raw/master/public/hackmd-icon-1024.png",
"success_url": "/", "success_url": "/",
"scripts": {
"postdeploy": "./node_modules/.bin/sequelize db:migrate"
},
"env": { "env": {
"BUILD_ASSETS": {
"description": "Our build script variable",
"value": "true"
},
"NPM_CONFIG_PRODUCTION": { "NPM_CONFIG_PRODUCTION": {
"description": "Let npm also install development build tool", "description": "Let npm also install development build tool",
"value": "false" "value": "false"
@ -19,129 +26,99 @@
"description": "Specify database type. See sequelize available databases. Default using postgres", "description": "Specify database type. See sequelize available databases. Default using postgres",
"value": "postgres" "value": "postgres"
}, },
"CMD_SESSION_SECRET": {
"description": "Secret used to secure session cookies.", "HMD_DOMAIN": {
"required": false
},
"CMD_HSTS_ENABLE": {
"description": "whether to also use HSTS if HTTPS is enabled",
"required": false
},
"CMD_HSTS_MAX_AGE": {
"description": "max duration, in seconds, to tell clients to keep HSTS status",
"required": false
},
"CMD_HSTS_INCLUDE_SUBDOMAINS": {
"description": "whether to tell clients to also regard subdomains as HSTS hosts",
"required": false
},
"CMD_HSTS_PRELOAD": {
"description": "whether to allow at all adding of the site to HSTS preloads (e.g. in browsers)",
"required": false
},
"CMD_DOMAIN": {
"description": "domain name", "description": "domain name",
"required": false "required": false
}, },
"CMD_URL_PATH": { "HMD_URL_PATH": {
"description": "sub url path, like `www.example.com/<URL_PATH>`", "description": "sub url path, like `www.example.com/<URL_PATH>`",
"required": false "required": false
}, },
"CMD_ALLOW_ORIGIN": { "HMD_PORT": {
"description": "web app port",
"required": false,
"value": "80"
},
"HMD_ALLOW_ORIGIN": {
"description": "domain name whitelist (use comma to separate)", "description": "domain name whitelist (use comma to separate)",
"required": false, "required": false,
"value": "localhost" "value": "localhost"
}, },
"CMD_PROTOCOL_USESSL": { "HMD_PROTOCOL_USESSL": {
"description": "set to use ssl protocol for resources path (only applied when domain is set)", "description": "set to use ssl protocol for resources path (only applied when domain is set)",
"required": false "required": false
}, },
"CMD_URL_ADDPORT": { "HMD_URL_ADDPORT": {
"description": "set to add port on callback url (port 80 or 443 won't applied) (only applied when domain is set)", "description": "set to add port on callback url (port 80 or 443 won't applied) (only applied when domain is set)",
"required": false "required": false
}, },
"CMD_FACEBOOK_CLIENTID": { "HMD_FACEBOOK_CLIENTID": {
"description": "Facebook API client id", "description": "Facebook API client id",
"required": false "required": false
}, },
"CMD_FACEBOOK_CLIENTSECRET": { "HMD_FACEBOOK_CLIENTSECRET": {
"description": "Facebook API client secret", "description": "Facebook API client secret",
"required": false "required": false
}, },
"CMD_TWITTER_CONSUMERKEY": { "HMD_TWITTER_CONSUMERKEY": {
"description": "Twitter API consumer key", "description": "Twitter API consumer key",
"required": false "required": false
}, },
"CMD_TWITTER_CONSUMERSECRET": { "HMD_TWITTER_CONSUMERSECRET": {
"description": "Twitter API consumer secret", "description": "Twitter API consumer secret",
"required": false "required": false
}, },
"CMD_GITHUB_CLIENTID": { "HMD_GITHUB_CLIENTID": {
"description": "GitHub API client id", "description": "GitHub API client id",
"required": false "required": false
}, },
"CMD_GITHUB_CLIENTSECRET": { "HMD_GITHUB_CLIENTSECRET": {
"description": "GitHub API client secret", "description": "GitHub API client secret",
"required": false "required": false
}, },
"CMD_GITLAB_BASEURL": { "HMD_GITLAB_BASEURL": {
"description": "GitLab authentication endpoint, set to use other endpoint than GitLab.com (optional)", "description": "GitLab authentication endpoint, set to use other endpoint than GitLab.com (optional)",
"required": false "required": false
}, },
"CMD_GITLAB_CLIENTID": { "HMD_GITLAB_CLIENTID": {
"description": "GitLab API client id", "description": "GitLab API client id",
"required": false "required": false
}, },
"CMD_GITLAB_CLIENTSECRET": { "HMD_GITLAB_CLIENTSECRET": {
"description": "GitLab API client secret", "description": "GitLab API client secret",
"required": false "required": false
}, },
"CMD_GITLAB_SCOPE": { "HMD_DROPBOX_CLIENTID": {
"description": "GitLab API client scope (optional)",
"required": false
},
"CMD_MATTERMOST_BASEURL": {
"description": "Mattermost authentication endpoint",
"required": false
},
"CMD_MATTERMOST_CLIENTID": {
"description": "Mattermost API client id",
"required": false
},
"CMD_MATTERMOST_CLIENTSECRET": {
"description": "Mattermost API client secret",
"required": false
},
"CMD_DROPBOX_CLIENTID": {
"description": "Dropbox API client id", "description": "Dropbox API client id",
"required": false "required": false
}, },
"CMD_DROPBOX_CLIENTSECRET": { "HMD_DROPBOX_CLIENTSECRET": {
"description": "Dropbox API client secret", "description": "Dropbox API client secret",
"required": false "required": false
}, },
"CMD_DROPBOX_APP_KEY": { "HMD_GOOGLE_CLIENTID": {
"description": "Dropbox app key (for import/export)",
"required": false
},
"CMD_GOOGLE_CLIENTID": {
"description": "Google API client id", "description": "Google API client id",
"required": false "required": false
}, },
"CMD_GOOGLE_CLIENTSECRET": { "HMD_GOOGLE_CLIENTSECRET": {
"description": "Google API client secret", "description": "Google API client secret",
"required": false "required": false
}, },
"CMD_IMGUR_CLIENTID": { "HMD_IMGUR_CLIENTID": {
"description": "Imgur API client id", "description": "Imgur API client id",
"required": false "required": false
},
"CMD_ALLOW_PDF_EXPORT": {
"description": "Enable or disable PDF exports",
"required": false
} }
}, },
"addons": [ "addons": [
"heroku-postgresql" "heroku-postgresql"
],
"buildpacks": [
{
"url": "https://github.com/alex88/heroku-buildpack-vips"
},
{
"url": "https://github.com/heroku/heroku-buildpack-nodejs"
}
] ]
} }

View file

@ -2,7 +2,11 @@
set -e set -e
cat << EOF > .sequelizerc if [ "$BUILD_ASSETS" = true ]; then
BUILD_ASSETS=false npm install
# setup config files
cat << EOF > .sequelizerc
var path = require('path'); var path = require('path');
module.exports = { module.exports = {
@ -15,7 +19,7 @@ module.exports = {
EOF EOF
cat << EOF > config.json cat << EOF > config.json
{ {
"production": { "production": {
@ -23,3 +27,7 @@ cat << EOF > config.json
} }
EOF EOF
# build app
npm run build
fi

View file

@ -1,119 +0,0 @@
#!/usr/bin/env node
// First configure the logger so it does not spam the console
const logger = require("../lib/logger");
logger.transports.forEach((transport) => transport.level = "warning")
const models = require("../lib/models/");
const readline = require("readline-sync");
const minimist = require("minimist");
function showUsage(tips) {
console.log(`${tips}
Command-line utility to create users for email-signin.
Usage: bin/manage_users [--pass password] (--add | --del) user-email
Options:
--add Add user with the specified user-email
--del Delete user with specified user-email
--reset Reset user password with specified user-email
--pass Use password from cmdline rather than prompting
`);
process.exit(1);
}
function getPass(argv, action) {
// Find whether we use cmdline or prompt password
if(typeof argv["pass"] !== 'string') {
return readline.question(`Password for ${argv[action]}:`, {hideEchoBack: true});
}
console.log("Using password from commandline...");
return argv["pass"];
}
// Using an async function to be able to use await inside
async function createUser(argv) {
const existing_user = await models.User.findOne({where: {email: argv["add"]}});
// Cannot create already-existing users
if(existing_user != undefined) {
console.log(`User with e-mail ${existing_user.email} already exists! Aborting ...`);
process.exit(1);
}
const pass = getPass(argv, "add");
// Lets try to create, and check success
const ref = await models.User.create({email: argv["add"], password: pass});
if(ref == undefined) {
console.log(`Could not create user with email ${argv["add"]}`);
process.exit(1);
} else
console.log(`Created user with email ${argv["add"]}`);
}
// Using an async function to be able to use await inside
async function deleteUser(argv) {
// Cannot delete non-existing users
const existing_user = await models.User.findOne({where: {email: argv["del"]}});
if(existing_user === undefined) {
console.log(`User with e-mail ${argv["del"]} does not exist, cannot delete`);
process.exit(1);
}
// Sadly .destroy() does not return any success value with all
// backends. See sequelize #4124
await existing_user.destroy();
console.log(`Deleted user ${argv["del"]} ...`);
}
// Using an async function to be able to use await inside
async function resetUser(argv) {
const existing_user = await models.User.findOne({where: {email: argv["reset"]}});
// Cannot reset non-existing users
if(existing_user == undefined) {
console.log(`User with e-mail ${argv["reset"]} does not exist, cannot reset`);
process.exit(1);
}
const pass = getPass(argv, "reset");
// set password and save
existing_user.password = pass;
await existing_user.save();
console.log(`User with email ${argv["reset"]} password has been reset`);
}
const options = {
add: createUser,
del: deleteUser,
reset: resetUser,
};
// Perform commandline-parsing
const argv = minimist(process.argv.slice(2));
const keys = Object.keys(options);
const opts = keys.filter((key) => argv[key] !== undefined);
const action = opts[0];
// Check for options missing
if (opts.length === 0) {
showUsage(`You did not specify either ${keys.map((key) => `--${key}`).join(' or ')}!`);
}
// Check if both are specified
if (opts.length > 1) {
showUsage(`You cannot ${action.join(' and ')} at the same time!`);
}
// Check if not string
if (typeof argv[action] !== 'string') {
showUsage(`You must follow an email after --${action}`);
}
// Call respective processing functions
options[action](argv).then(function() {
process.exit(0);
});

View file

@ -5,15 +5,14 @@ set -e
# run command at repo root # run command at repo root
CURRENT_PATH=$PWD CURRENT_PATH=$PWD
if [ -d .git ]; then if [ -d .git ]; then
cd "$(git rev-parse --show-toplevel)" cd $(git rev-parse --show-toplevel)
fi fi
if ! type yarn > /dev/null if ! type npm > /dev/null
then then
cat << EOF cat << EOF
yarn is not installed, please install Node.js, npm and yarn. npm is not installed, please install Node.js and npm.
Read more on Node.js official website: https://nodejs.org Read more on Node.js official website: https://nodejs.org
And for yarn package manager at: https://yarnpkg.com/en/
Setup will not be run Setup will not be run
EOF EOF
exit 0 exit 0
@ -28,20 +27,20 @@ if [ ! -f .sequelizerc ]; then
cp .sequelizerc.example .sequelizerc cp .sequelizerc.example .sequelizerc
fi fi
echo "install packages" echo "install npm packages"
yarn install --pure-lockfile BUILD_ASSETS=false npm install
yarn install --production=false --pure-lockfile
cat << EOF cat << EOF
Edit the following config file to setup CodiMD server and client. Edit the following config file to setup hackmd server and client.
Read more info at https://github.com/codimd/server#configuration-files Read more info at https://github.com/hackmdio/hackmd#configuration-files
* config.json -- CodiMD config * config.json -- server config
* public/js/config.js -- client config
* .sequelizerc -- db config * .sequelizerc -- db config
EOF EOF
# change directory back # change directory back
cd "$CURRENT_PATH" cd $CURRENT_PATH

View file

@ -6,37 +6,17 @@
} }
}, },
"development": { "development": {
"loglevel": "debug",
"hsts": {
"enable": false
},
"db": { "db": {
"dialect": "sqlite", "dialect": "sqlite",
"storage": "./db.codimd.sqlite" "storage": "./db.hackmd.sqlite"
} }
}, },
"production": { "production": {
"domain": "localhost", "domain": "localhost",
"loglevel": "info",
"hsts": {
"enable": true,
"maxAgeSeconds": 31536000,
"includeSubdomains": true,
"preload": true
},
"csp": {
"enable": true,
"directives": {
},
"upgradeInsecureRequests": "auto",
"addDefaults": true,
"addDisqus": true,
"addGoogleAnalytics": true
},
"db": { "db": {
"username": "", "username": "",
"password": "", "password": "",
"database": "codimd", "database": "hackmd",
"host": "localhost", "host": "localhost",
"port": "5432", "port": "5432",
"dialect": "postgres" "dialect": "postgres"
@ -54,13 +34,6 @@
"clientSecret": "change this" "clientSecret": "change this"
}, },
"gitlab": { "gitlab": {
"baseURL": "change this",
"clientID": "change this",
"clientSecret": "change this",
"scope": "use 'read_user' scope for auth user only or remove this property if you need gitlab snippet import/export support (will result to be default scope 'api')",
"version": "use 'v4' if gitlab version > 11, 'v3' otherwise. Default to 'v4'"
},
"mattermost": {
"baseURL": "change this", "baseURL": "change this",
"clientID": "change this", "clientID": "change this",
"clientSecret": "change this" "clientSecret": "change this"
@ -79,50 +52,16 @@
"url": "ldap://change_this", "url": "ldap://change_this",
"bindDn": null, "bindDn": null,
"bindCredentials": null, "bindCredentials": null,
"tokenSecret": "change this",
"searchBase": "change this", "searchBase": "change this",
"searchFilter": "change this", "searchFilter": "change this",
"searchAttributes": ["change this"], "searchAttributes": "change this",
"usernameField": "change this e.g. cn",
"useridField": "change this e.g. uid",
"tlsOptions": { "tlsOptions": {
"changeme": "See https://nodejs.org/api/tls.html#tls_tls_connect_options_callback" "changeme": "See https://nodejs.org/api/tls.html#tls_tls_connect_options_callback"
} }
}, },
"saml": {
"idpSsoUrl": "change: authentication endpoint of IdP",
"idpCert": "change: certificate file path of IdP in PEM format",
"issuer": "change or delete: identity of the service provider (default: serverurl)",
"identifierFormat": "change or delete: name identifier format (default: 'urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress')",
"disableRequestedAuthnContext": "change or delete: true to allow any authentication method, false restricts to password authentication method (default: false)",
"groupAttribute": "change or delete: attribute name for group list (ex: memberOf)",
"requiredGroups": [ "change or delete: group names that allowed" ],
"externalGroups": [ "change or delete: group names that not allowed" ],
"attribute": {
"id": "change or delete this: attribute map for `id` (default: NameID)",
"username": "change or delete this: attribute map for `username` (default: NameID)",
"email": "change or delete this: attribute map for `email` (default: NameID)"
}
},
"imgur": { "imgur": {
"clientID": "change this" "clientID": "change this"
},
"minio": {
"accessKey": "change this",
"secretKey": "change this",
"endPoint": "change this",
"secure": true,
"port": 9000
},
"s3": {
"accessKeyId": "change this",
"secretAccessKey": "change this",
"region": "change this"
},
"s3bucket": "change this",
"azure":
{
"connectionString": "change this",
"container": "change this"
} }
} }
} }

View file

@ -1,154 +0,0 @@
Configuration Using Config file
===
You can choose to configure CodiMD with either a config file or with
[environment variables](configuration-env-vars.md). The config file is processed
in [`lib/config/index.js`](../lib/config/index.js) - so this is the first
place to look if anything is missing not obvious from this document. The
default values are defined in [`lib/config/default.js`](../lib/config/default.js),
in case you wonder if you even need to override it.
Environment variables take precedence over configurations from the config files.
To get started, it is a good idea to take the `config.json.example` and copy it
to `config.json` before filling in your own details.
## Node.JS
| variables | example values | description |
| --------- | ------ | ----------- |
| `debug` | `true` or `false` | set debug mode, show more logs |
## CodiMD basics
| variables | example values | description |
| --------- | ------ | ----------- |
| `allowPDFExport` | `true` | Whether or not PDF export is offered. |
| `db` | `{ "dialect": "sqlite", "storage": "./db.codimd.sqlite" }` | set the db configs, [see more here](http://sequelize.readthedocs.org/en/latest/api/sequelize/) |
| `dbURL` | `mysql://localhost:3306/database` | set the db URL; if set, then db config (below) won't be applied |
| `forbiddenNoteIDs` | `['robots.txt']` | disallow creation of notes, even if `allowFreeUrl` is `true` |
| `loglevel` | `info` | Defines what kind of logs are provided to stdout. |
| `imageUploadType` | `imgur`, `s3`, `minio`, `azure`, `lutim` or `filesystem`(default) | Where to upload images. For S3, see our Image Upload Guides for [S3](guides/s3-image-upload.md) or [Minio](guides/minio-image-upload.md)|
| `sourceURL` | `https://github.com/codimd/server/tree/<current commit>` | Provides the link to the source code of CodiMD on the entry page (Please, make sure you change this when you run a modified version) |
| `staticCacheTime` | `1 * 24 * 60 * 60 * 1000` | static file cache time |
| `tooBusyLag` | `70` | CPU time for one eventloop tick until node throttles connections. (milliseconds) |
| `heartbeatInterval` | `5000` | socket.io heartbeat interval |
| `heartbeatTimeout` | `10000` | socket.io heartbeat timeout |
| `documentMaxLength` | `100000` | note max length |
## CodiMD paths stuff
these are rarely used for various reasons.
| variables | example values | description |
| --------- | ------ | ----------- |
| `defaultNotePath` | `./public/default.md` | default note file path<sup>1</sup>, empty notes will be created with this template. |
| `dhParamPath` | `./cert/dhparam.pem` | SSL dhparam path<sup>1</sup> (only need when you set `useSSL`) |
| `sslCAPath` | `['./cert/COMODORSAAddTrustCA.crt']` | SSL ca chain<sup>1</sup> (only need when you set `useSSL`) |
| `sslCertPath` | `./cert/codimd_io.crt` | SSL cert path<sup>1</sup> (only need when you set `useSSL`) |
| `sslKeyPath` | `./cert/client.key` | SSL key path<sup>1</sup> (only need when you set `useSSL`) |
| `tmpPath` | `./tmp/` | temp directory path<sup>1</sup> |
| `docsPath` | `./public/docs` | docs directory path<sup>1</sup> |
| `viewPath` | `./public/views` | template directory path<sup>1</sup> |
| `uploadsPath` | `./public/uploads` | uploads directory<sup>1</sup> - needs to be persistent when you use imageUploadType `filesystem` |
## CodiMD Location
| variables | example values | description |
| --------- | ------ | ----------- |
| `domain` | `localhost` | domain name |
| `urlPath` | `codimd` | sub URL path, like `www.example.com/<urlpath>` |
| `host` | `localhost` | interface/ip to listen on |
| `port` | `80` | port to listen on |
| `path` | `/var/run/codimd.sock` | path to UNIX domain socket to listen on (if specified, `host` and `port` are ignored) |
| `protocolUseSSL` | `true` or `false` | set to use SSL protocol for resources path (only applied when domain is set) |
| `useSSL` | `true` or `false` | set to use SSL server (if `true`, will auto turn on `protocolUseSSL`) |
| `urlAddPort` | `true` or `false` | set to add port on callback URL (ports `80` or `443` won't be applied) (only applied when domain is set) |
| `allowOrigin` | `['localhost']` | domain name whitelist |
## CSP and HSTS
| variables | example values | description |
| --------- | ------ | ----------- |
| `hsts` | `{"enable": true, "maxAgeSeconds": 31536000, "includeSubdomains": true, "preload": true}` | [HSTS](https://en.wikipedia.org/wiki/HTTP_Strict_Transport_Security) options to use with HTTPS (default is the example value, max age is a year) |
| `csp` | `{"enable": true, "directives": {"scriptSrc": "trustworthy-scripts.example.com"}, "upgradeInsecureRequests": "auto", "addDefaults": true}` | Configures [Content Security Policy](https://helmetjs.github.io/docs/csp/). Directives are passed to Helmet - see [their documentation](https://helmetjs.github.io/docs/csp/) for more information on the format. Some defaults are added to the configured values so that the application doesn't break. To disable this behaviour, set `addDefaults` to `false`. Further, if `usecdn` is on, some CDN locations are allowed too. By default (`auto`), insecure (HTTP) requests are upgraded to HTTPS via CSP if `useSSL` is on. To change this behaviour, set `upgradeInsecureRequests` to either `true` or `false`. |
## Privacy and External Requests
| variables | example values | description |
| --------- | ------ | ----------- |
| `allowGravatar` | `true` or `false` | set to `false` to disable gravatar as profile picture source on your instance |
| `useCDN` | `true` or `false` | set to use CDN resources or not (default is `true`) |
## Users and Privileges
| variables | example values | description |
| --------- | ------ | ----------- |
| `allowAnonymous` | `true` or `false` | set to allow anonymous usage (default is `true`) |
| `allowAnonymousEdits` | `true` or `false` | if `allowAnonymous` is `true`: allow users to select `freely` permission, allowing guests to edit existing notes (default is `false`) |
| `allowFreeURL` | `true` or `false` | set to allow new note creation by accessing a nonexistent note URL |
| `defaultPermission` | `freely`, `editable`, `limited`, `locked`, `protected` or `private` | set notes default permission (only applied on signed users) |
| `sessionName` | `connect.sid` | cookie session name |
| `sessionLife` | `14 * 24 * 60 * 60 * 1000` | cookie session life |
| `sessionSecret` | `secret` | cookie session secret | If none is set, one will randomly generated on each startup, meaning all your users will be logged out. |
## Login methods
Most of these have never been documented for the config.json, feel free to expand these
### Email (local account)
| variables | example values | description |
| --------- | ------ | ----------- |
| `email` | `true` or `false` | set to allow email signin |
| `allowEmailRegister` | `true` or `false` | set to allow email register (only applied when email is set, default is `true`. Note `bin/manage_users` might help you if registration is `false`.) |
### Dropbox Login
### Facebook Login
### GitHub Login
### GitLab Login
### Google Login
### LDAP Login
### Mattermost Login
### OAuth2 Login
| variables | example values | description |
| --------- | ------ | ----------- |
| `oauth2` | `{baseURL: ..., userProfileURL: ..., userProfileUsernameAttr: ..., userProfileDisplayNameAttr: ..., userProfileEmailAttr: ..., tokenURL: ..., authorizationURL: ..., clientID: ..., clientSecret: ...}` | An object detailing your OAuth2 provider. Refer to the [Mattermost](guides/auth/mattermost-self-hosted.md) or [Nextcloud](guides/auth/nextcloud.md) examples for more details!|
### SAML Login
### Twitter Login
## Upload Storage
Most of these have never been documented for the config.json, feel free to expand these
### Amazon S3
| variables | example values | description |
| --------- | ------ | ----------- |
| `s3` | `{ "accessKeyId": "YOUR_S3_ACCESS_KEY_ID", "secretAccessKey": "YOUR_S3_ACCESS_KEY", "region": "YOUR_S3_REGION" }` | When `imageuploadtype` be set to `s3`, you would also need to setup this key, check our [S3 Image Upload Guide](guides/s3-image-upload.md) |
| `s3bucket` | `YOUR_S3_BUCKET_NAME` | bucket name when `imageUploadType` is set to `s3` or `minio` |
### Azure Blob Storage
### imgur
### Minio
| variables | example values | description |
| --------- | ------ | ----------- |
| `minio` | `{ "accessKey": "YOUR_MINIO_ACCESS_KEY", "secretKey": "YOUR_MINIO_SECRET_KEY", "endpoint": "YOUR_MINIO_HOST", port: 9000, secure: true }` | When `imageUploadType` is set to `minio`, you need to set this key. Also check out our [Minio Image Upload Guide](guides/minio-image-upload.md) |
### Lutim
| variables | example values | description |
| --------- | ------ | ----------- |
|`lutim`| `{"url": "YOUR_LUTIM_URL"}`| When `imageUploadType` is set to `lutim`, you can setup the lutim url|
<sup>1</sup>: relative paths are based on CodiMD's base directory

View file

@ -1,256 +0,0 @@
Configuration Using Environment variables
===
You can choose to configure CodiMD with either a
[config file](configuration-config-file.md) or with environment variables.
Environment variables are processed in
[`lib/config/environment.js`](../lib/config/environment.js) - so this is the first
place to look if anything is missing not obvious from this document. The
default values are defined in [`lib/config/default.js`](../lib/config/default.js),
in case you wonder if you even need to override it.
Environment variables take precedence over configurations from the config files.
They generally start with `CMD_` for our own options, but we also list
node-specific options you can configure this way.
## Node.JS
| variable | example value | description |
| -------- | ------------- | ----------- |
| `NODE_ENV` | `production` or `development` | set current environment (will apply corresponding settings in the `config.json`) |
| `DEBUG` | `true` or `false` | set debug mode; show more logs |
## CodiMD basics
defaultNotePath can't be set from env-vars
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_ALLOW_PDF_EXPORT` | `true` or `false` | Enable or disable PDF exports |
| `CMD_CONFIG_FILE` | `/path/to/config.json` | optional override for the path to CodiMD's config file |
| `CMD_DB_URL` | `mysql://localhost:3306/database` | set the database URL |
| `CMD_LOGLEVEL` | `info`, `debug` ... | Defines what kind of logs are provided to stdout. |
| `CMD_FORBIDDEN_NOTE_IDS` | `'robots.txt'` | disallow creation of notes, even if `CMD_ALLOW_FREEURL` is `true` |
| `CMD_IMAGE_UPLOAD_TYPE` | `imgur`, `s3`, `minio`, `lutim` or `filesystem` | Where to upload images. For S3, see our Image Upload Guides for [S3](guides/s3-image-upload.md) or [Minio](guides/minio-image-upload.md), also there's a whole section on their respective env vars below. |
| `CMD_SOURCE_URL` | `https://github.com/codimd/server/tree/<current commit>` | Provides the link to the source code of CodiMD on the entry page (Please, make sure you change this when you run a modified version) |
| `CMD_TOOBUSY_LAG` | `70` | CPU time for one eventloop tick until node throttles connections. (milliseconds) |
## CodiMD Location
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_DOMAIN` | `codimd.org` | domain name |
| `CMD_URL_PATH` | `codimd` | If CodiMD is run from a subdirectory like `www.example.com/<urlpath>` |
| `CMD_HOST` | `localhost` | interface/ip to listen on |
| `CMD_PORT` | `80` | port to listen on |
| `CMD_PATH` | `/var/run/codimd.sock` | path to UNIX domain socket to listen on (if specified, `CMD_HOST` and `CMD_PORT` are ignored) |
| `CMD_PROTOCOL_USESSL` | `true` or `false` | set to use SSL protocol for resources path (only applied when domain is set) |
| `CMD_URL_ADDPORT` | `true` or `false` | set to add port on callback URL (ports `80` or `443` won't be applied) (only applied when domain is set) |
| `CMD_ALLOW_ORIGIN` | `localhost, codimd.org` | domain name whitelist (use comma to separate) |
## CSP and HSTS
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_CSP_ENABLE` | `true` | whether to enable Content Security Policy (directives cannot be configured with environment variables) |
| `CMD_CSP_REPORTURI` | `https://<someid>.report-uri.com/r/d/csp/enforce` | Allows to add a URL for CSP reports in case of violations |
| `CMD_HSTS_ENABLE` | ` true` | set to enable [HSTS](https://en.wikipedia.org/wiki/HTTP_Strict_Transport_Security) if HTTPS is also enabled (default is ` true`) |
| `CMD_HSTS_INCLUDE_SUBDOMAINS` | `true` | set to include subdomains in HSTS (default is `true`) |
| `CMD_HSTS_MAX_AGE` | `31536000` | max duration in seconds to tell clients to keep HSTS status (default is a year) |
| `CMD_HSTS_PRELOAD` | `true` | whether to allow preloading of the site's HSTS status (e.g. into browsers) |
## Privacy and External Requests
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_ALLOW_GRAVATAR` | `true` or `false` | set to `false` to disable gravatar as profile picture source on your instance |
| `CMD_USECDN` | `true` or `false` | set to use CDN resources or not|
## Users and Privileges
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_ALLOW_ANONYMOUS` | `true` or `false` | set to allow anonymous usage (default is `true`) |
| `CMD_ALLOW_ANONYMOUS_EDITS` | `true` or `false` | if `allowAnonymous` is `true`, allow users to select `freely` permission, allowing guests to edit existing notes (default is `false`) |
| `CMD_ALLOW_FREEURL` | `true` or `false` | set to allow new note creation by accessing a nonexistent note URL |
| `CMD_DEFAULT_PERMISSION` | `freely`, `editable`, `limited`, `locked` or `private` | set notes default permission (only applied on signed users) |
| `CMD_SESSION_LIFE` | `1209600000` | Session life time. (milliseconds) |
| `CMD_SESSION_SECRET` | no example | Secret used to sign the session cookie. If none is set, one will randomly generated on each startup, meaning all your users will be logged out. |
## Login methods
### Email (local account)
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_EMAIL` | `true` or `false` | set to allow email signin |
| `CMD_ALLOW_EMAIL_REGISTER` | `true` or `false` | set to allow email register (only applied when email is set, default is `true`. Note `bin/manage_users` might help you if registration is `false`.) |
### Dropbox Login
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_DROPBOX_CLIENTID` | no example | Dropbox API client id |
| `CMD_DROPBOX_CLIENTSECRET` | no example | Dropbox API client secret |
### Facebook Login
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_FACEBOOK_CLIENTID` | no example | Facebook API client id |
| `CMD_FACEBOOK_CLIENTSECRET` | no example | Facebook API client secret |
### GitHub Login
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_GITHUB_CLIENTID` | no example | GitHub API client id |
| `CMD_GITHUB_CLIENTSECRET` | no example | GitHub API client secret |
### GitLab Login
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_GITLAB_SCOPE` | `read_user` or `api` | GitLab API requested scope (default is `api`) (GitLab snippet import/export need `api` scope) |
| `CMD_GITLAB_BASEURL` | no example | GitLab authentication endpoint, set to use other endpoint than GitLab.com (optional) |
| `CMD_GITLAB_CLIENTID` | no example | GitLab API client id |
| `CMD_GITLAB_CLIENTSECRET` | no example | GitLab API client secret |
| `CMD_GITLAB_VERSION` | no example | GitLab API version (v3 or v4) |
### Google Login
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_GOOGLE_CLIENTID` | no example | Google API client id |
| `CMD_GOOGLE_CLIENTSECRET` | no example | Google API client secret |
### LDAP Login
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_LDAP_URL` | `ldap://example.com` | URL of LDAP server |
| `CMD_LDAP_BINDDN` | no example | bindDn for LDAP access |
| `CMD_LDAP_BINDCREDENTIALS` | no example | bindCredentials for LDAP access |
| `CMD_LDAP_SEARCHBASE` | `o=users,dc=example,dc=com` | LDAP directory to begin search from |
| `CMD_LDAP_SEARCHFILTER` | `(uid={{username}})` | LDAP filter to search with |
| `CMD_LDAP_SEARCHATTRIBUTES` | `displayName, mail` | LDAP attributes to search with (use comma to separate) |
| `CMD_LDAP_USERIDFIELD` | `uidNumber` or `uid` or `sAMAccountName` | The LDAP field which is used uniquely identify a user on CodiMD |
| `CMD_LDAP_USERNAMEFIELD` | Fallback to userid | The LDAP field which is used as the username on CodiMD |
| `CMD_LDAP_TLS_CA` | `server-cert.pem, root.pem` | Root CA for LDAP TLS in PEM format (use comma to separate) |
| `CMD_LDAP_PROVIDERNAME` | `My institution` | Optional name to be displayed at login form indicating the LDAP provider |
### Mattermost Login
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_MATTERMOST_BASEURL` | no example | Mattermost authentication endpoint for versions below 5.0. For Mattermost version 5.0 and above, see [guide](guides/auth/mattermost-self-hosted.md). |
| `CMD_MATTERMOST_CLIENTID` | no example | Mattermost API client id |
| `CMD_MATTERMOST_CLIENTSECRET` | no example | Mattermost API client secret |
### OAuth2 Login
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_OAUTH2_USER_PROFILE_URL` | `https://example.com` | where retrieve information about a user after succesful login. Needs to output JSON. (no default value) Refer to the [Mattermost](guides/auth/mattermost-self-hosted.md) or [Nextcloud](guides/auth/nextcloud.md) examples for more details on all of the `CMD_OAUTH2...` options. |
| `CMD_OAUTH2_USER_PROFILE_USERNAME_ATTR` | `name` | where to find the username in the JSON from the user profile URL. (no default value)|
| `CMD_OAUTH2_USER_PROFILE_DISPLAY_NAME_ATTR` | `display-name` | where to find the display-name in the JSON from the user profile URL. (no default value) |
| `CMD_OAUTH2_USER_PROFILE_EMAIL_ATTR` | `email` | where to find the email address in the JSON from the user profile URL. (no default value) |
| `CMD_OAUTH2_TOKEN_URL` | `https://example.com` | sometimes called token endpoint, please refer to the documentation of your OAuth2 provider (no default value) |
| `CMD_OAUTH2_AUTHORIZATION_URL` | `https://example.com` | authorization URL of your provider, please refer to the documentation of your OAuth2 provider (no default value) |
| `CMD_OAUTH2_CLIENT_ID` | `afae02fckafd...` | you will get this from your OAuth2 provider when you register CodiMD as OAuth2-client, (no default value) |
| `CMD_OAUTH2_CLIENT_SECRET` | `afae02fckafd...` | you will get this from your OAuth2 provider when you register CodiMD as OAuth2-client, (no default value) |
| `CMD_OAUTH2_PROVIDERNAME` | `My institution` | Optional name to be displayed at login form indicating the oAuth2 provider |
### SAML Login
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_SAML_IDPSSOURL` | `https://idp.example.com/sso` | authentication endpoint of IdP. for details, see [guide](guides/auth/saml-onelogin.md). |
| `CMD_SAML_IDPCERT` | `/path/to/cert.pem` | certificate file path of IdP in PEM format |
| `CMD_SAML_ISSUER` | no example | identity of the service provider (optional, default: serverurl)" |
| `CMD_SAML_DISABLEREQUESTEDAUTHNCONTEXT` | `true` or `false` | true to allow any authentication method, false restricts to password authentication (PasswordProtectedTransport) method (default: false) |
| `CMD_SAML_IDENTIFIERFORMAT` | no example | name identifier format (optional, default: `urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress`) |
| `CMD_SAML_GROUPATTRIBUTE` | `memberOf` | attribute name for group list (optional) |
| `CMD_SAML_REQUIREDGROUPS` | `codimd-users` | group names that allowed (use vertical bar to separate) (optional) |
| `CMD_SAML_EXTERNALGROUPS` | `Temporary-staff` | group names that not allowed (use vertical bar to separate) (optional) |
| `CMD_SAML_ATTRIBUTE_ID` | `sAMAccountName` | attribute map for `id` (optional, default: NameID of SAML response) |
| `CMD_SAML_ATTRIBUTE_USERNAME` | `mailNickname` | attribute map for `username` (optional, default: NameID of SAML response) |
| `CMD_SAML_ATTRIBUTE_EMAIL` | `mail` | attribute map for `email` (optional, default: NameID of SAML response if `CMD_SAML_IDENTIFIERFORMAT` is default) |
### Twitter Login
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_TWITTER_CONSUMERKEY` | no example | Twitter API consumer key |
| `CMD_TWITTER_CONSUMERSECRET` | no example | Twitter API consumer secret |
## Upload Storage
These are only relevant when they are also configured in sync with their
`CMD_IMAGE_UPLOAD_TYPE`. Also keep in mind, that `filesystem` is available, so
you don't have to use either of these.
### Amazon S3
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_S3_ACCESS_KEY_ID` | no example | AWS access key id |
| `CMD_S3_SECRET_ACCESS_KEY` | no example | AWS secret key |
| `CMD_S3_REGION` | `ap-northeast-1` | AWS S3 region |
| `CMD_S3_BUCKET` | no example | AWS S3 bucket name |
### Azure Blob Storage
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_AZURE_CONNECTION_STRING` | no example | Azure Blob Storage connection string |
| `CMD_AZURE_CONTAINER` | no example | Azure Blob Storage container name (automatically created if non existent) |
### imgur
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_IMGUR_CLIENTID` | no example | Imgur API client id |
### Minio
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_MINIO_ACCESS_KEY` | no example | Minio access key |
| `CMD_MINIO_SECRET_KEY` | no example | Minio secret key |
| `CMD_MINIO_ENDPOINT` | `minio.example.org` | Address of your Minio endpoint/instance |
| `CMD_MINIO_PORT` | `9000` | Port that is used for your Minio instance |
| `CMD_MINIO_SECURE` | `true` | If set to `true` HTTPS is used for Minio |
### Lutim
| variable | example value | description |
| -------- | ------------- | ----------- |
| `CMD_LUTIM_URL` | `https://framapic.org/` | When `CMD_IMAGE_UPLOAD_TYPE` is set to `lutim`, you can setup the lutim url |
**Note:** *Due to the rename process we renamed all `HMD_`-prefix variables to be `CMD_`-prefixed. The old ones continue to work.*
**Note:** *relative paths are based on CodiMD's base directory*

View file

@ -1,57 +0,0 @@
Developer Notes
===
## Preparing for running the code
**Notice:** *There's [specialised instructions for docker](../setup/docker.md) or [heroku](../setup/heroku.md), if you prefer running code this way!*
1. Clone the repository with `git clone https://github.com/codimd/server.git codimd-server`
(cloning is the preferred way, but you can also download and unzip a release)
2. Enter the directory and run `bin/setup`, which will install npm dependencies
and create configs. The setup script is written in Bash, you would need bash
as a prerequisite.
3. Setup the [config file](../configuration-config-file.md) or set up
[environment variables](../configuration-env-vars.md).
## Running the Code
Now that everything is in place, we can start CodiMD:
4. `npm run build` will build the frontend bundle. It uses webpack to do that.
5. Run the server with `node app.js`
## Running the Code with Auto-Reload
The commands above are fine for production, but you're a developer and surely
you want to change things. You would need to restart both commands whenever you
change something. Luckily, you can run these commands that will automatically
rebuild the frontend or restart the server if necessary.
The commands will stay active in your terminal, so you will need multiple tabs
to run both at the same time.
4. Use `npm run dev` if you want webpack to continuously rebuild the frontend
code.
5. To auto-reload the server, the easiest method is to install [nodemon](https://www.npmjs.com/package/nodemon)
and run `nodemon --watch app.js --watch lib --watch locales app.js`.
## Structure
The repository contains two parts: a server (backend) and a client (frontend).
most of the server code is in `/lib` and most of the client code is in `public`.
```text
codimd-server/
├── docs/ --- documentation
├── lib/ --- server code
├── test/ --- test suite
└── public/ --- client code
├── css/ --- css styles
├── docs/ --- default documents
├── js/ --- js scripts
├── vendor/ --- vendor includes
└── views/ --- view templates
```

View file

@ -1,14 +0,0 @@
Operational Transformation
===
From 0.3.2, we started supporting operational transformation.
It makes concurrent editing safe and will not break up other users' operations.
Additionally, now can show other clients' selections.
See more at [https://operational-transformation.github.io/](https://operational-transformation.github.io/)
And even more in this 2010 article series:
* https://drive.googleblog.com/2010/09/whats-different-about-new-google-docs_21.html
* https://drive.googleblog.com/2010/09/whats-different-about-new-google-docs_22.html
* https://drive.googleblog.com/2010/09/whats-different-about-new-google-docs.html

View file

@ -1,33 +0,0 @@
Webpack
===
Webpack is a JavaScript build system for frontend code. You can find out all
about it on [the webpack website](https://webpack.js.org/).
Here's how we're using it:
## `webpack.common.js`
This file contains all common definition for chunks and plugins, that are needed by the whole app.
**TODO:** Document which entry points are used for what.
## `webpack.htmlexport.js`
Separate config for the "save as html" feature.
Packs all CSS from `public/js/htmlExport.js` to `build/html.min.css`.
This file is then downloaded by client-side JS and used to create the HTML.
See `exportToHTML()` in `public/js/extra.js`.
## `webpack.dev.js`
The development config uses both common configs, enables development mode and enables "cheap" source maps (lines only).
If you need more detailed source maps while developing, you might want to use the `source-maps` option.
See https://webpack.js.org/configuration/devtool/ for details.
## `webpack.prod.js`
The production config uses both common configs and enables production mode.
This automatically enables various optimizations (e.g. UglifyJS). See https://webpack.js.org/concepts/mode/ for details.
For the global app config, the name of the emitted chunks is changed to include the content hash.
See https://webpack.js.org/guides/caching/ on why this is a good idea.
For the HTML export config, CSS minification is enabled.

View file

@ -1,35 +0,0 @@
Authentication guide - GitHub
===
**Note:** *This guide was written before the renaming. Just replace `HackMD` with `CodiMD` in your mind :smile: thanks!*
1. Sign-in or sign-up for a GitHub account
2. Navigate to developer settings in your GitHub account [here](https://github.com/settings/developers) and select the "OAuth Apps" tab
3. Click on the **New OAuth App** button, to create a new OAuth App:
![create-oauth-app](../../images/auth/create-oauth-app.png)
4. Fill out the new OAuth application registration form, and click **Register Application**
![register-oauth-application-form](../../images/auth/register-oauth-application-form.png)
**Note:** *The callback URL is <your-codimd-url>/auth/github/callback*
5. After successfully registering the application, you'll receive the Client ID and Client Secret for the application
![application-page](../../images/auth/application-page.png)
6. Add the Client ID and Client Secret to your config.json file or pass them as environment variables
* `config.json`:
```js
{
"production": {
"github": {
"clientID": "3747d30eaccXXXXXXXXX",
"clientSecret": "2a8e682948eee0c580XXXXXXXXXXXXXXXXXXXXXX"
}
}
}
```
* environment variables:
```sh
CMD_GITHUB_CLIENTID=3747d30eaccXXXXXXXXX
CMD_GITHUB_CLIENTSECRET=2a8e682948eee0c580XXXXXXXXXXXXXXXXXXXXXX
````

View file

@ -1,32 +0,0 @@
GitLab (self-hosted)
===
**Note:** *This guide was written before the renaming. Just replace `HackMD` with `CodiMD` in your mind :smile: thanks!*
1. Sign in to your GitLab
2. Navigate to the application management page at `https://your.gitlab.domain/admin/applications` (admin permissions required)
3. Click **New application** to create a new application and fill out the registration form:
![New GitLab application](../../images/auth/gitlab-new-application.png)
4. Click **Submit**
5. In the list of applications select **HackMD**. Leave that site open to copy the application ID and secret in the next step.
![Application: HackMD](../../images/auth/gitlab-application-details.png)
6. In the `docker-compose.yml` add the following environment variables to `app:` `environment:`
```
- HMD_DOMAIN=your.codimd.domain
- HMD_URL_ADDPORT=443
- HMD_PROTOCOL_USESSL=true
- HMD_GITLAB_BASEURL=https://your.gitlab.domain
- HMD_GITLAB_CLIENTID=23462a34example99XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
- HMD_GITLAB_CLIENTSECRET=5532e9dexamplXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
```
7. Run `docker-compose up -d` to apply your settings.
8. Sign in to your CodiMD using your GitLab ID:
![Sign in via GitLab](../../images/auth/gitlab-sign-in.png)

View file

@ -1,50 +0,0 @@
Keycloak/Red Hat SSO (self-hosted)
===
## Prerequisites
This guide assumes you have run and configured Keycloak. If you'd like to meet this prerequisite quickly, it can be achieved by running a `jboss/keycloak` container and attaching it to your network. Set the environment variables KEYCLOAK_USER and `KEYCLOAK_PASSWORD`, and expose port 8080.
Where HTTPS is specified throughout, use HTTP instead. You may also have to specify the exposed port, 8080.
## Steps
1. Sign in to the administration portal for your Keycloak instance at https://keycloak.example.com/auth/admin/master/console
You may note that a separate realm is specified throughout this tutorial. It is best practice not to use the master realm, as it normally contains the realm-management client that federates access using the policies and permissions you can create.
2. Navigate to the client management page at `https://keycloak.example.com/auth/admin/master/console/#/realms/your-realm/clients` (admin permissions required)
3. Click **Create** to create a new client and fill out the registration form. You should set the Root URL to the fully qualified public URL of your CodiMD instance.
4. Click **Save**
5. Set the **Access Type** of the client to `confidential`. This will make your client require a client secret upon authentication.
---
### Additional steps to circumvent generic OAuth2 issue:
1. Select Client Scopes from the sidebar, and begin to create a new client scope using the Create button.
2. Ensure that the **Name** field is set to `id`.
3. Create a new mapper under the Mappers tab. This should reference the User Property `id`. `Claim JSON Type` should be String and all switches below should be enabled. Save the mapper.
4. Go to the client you set up in the previous steps using the Clients page, then choose the Client Scopes tab. Apply the scope you've created. This should mitigate errors as seen in [codimd/server#56](https://github.com/codimd/server/issues/56), as the `/userinfo` endpoint should now bring back the user's ID under the `id` key as well as `sub`.
---
6. In the `docker-compose.yml` add the following environment variables to `app:` `environment:`
```
CMD_OAUTH2_USER_PROFILE_URL=https://keycloak.example.com/auth/realms/your-realm/protocol/openid-connect/userinfo
CMD_OAUTH2_USER_PROFILE_USERNAME_ATTR=preferred_username
CMD_OAUTH2_USER_PROFILE_DISPLAY_NAME_ATTR=name
CMD_OAUTH2_USER_PROFILE_EMAIL_ATTR=email
CMD_OAUTH2_TOKEN_URL=https://keycloak.example.com/auth/realms/your-realm/protocol/openid-connect/token
CMD_OAUTH2_AUTHORIZATION_URL=https://keycloak.example.com/auth/realms/your-realm/protocol/openid-connect/auth
CMD_OAUTH2_CLIENT_ID=<your client ID>
CMD_OAUTH2_CLIENT_SECRET=<your client secret, which you can find under the Credentials tab for your client>
CMD_OAUTH2_PROVIDERNAME=Keycloak
CMD_DOMAIN=<codimd.example.com>
CMD_PROTOCOL_USESSL=true
CMD_URL_ADDPORT=false
```
7. Run `docker-compose up -d` to apply your settings.
8. Sign in to your CodiMD using your Keycloak ID

View file

@ -1,41 +0,0 @@
AD LDAP auth
===
To setup your CodiMD instance with Active Directory you need the following configs:
```
CMD_LDAP_URL=ldap://internal.example.com
CMD_LDAP_BINDDN=cn=binduser,cn=Users,dc=internal,dc=example,dc=com
CMD_LDAP_BINDCREDENTIALS=<super secret password>
CMD_LDAP_SEARCHBASE=dc=internal,dc=example,dc=com
CMD_LDAP_SEARCHFILTER=(&(objectcategory=person)(objectclass=user)(|(sAMAccountName={{username}})(mail={{username}})))
CMD_LDAP_USERIDFIELD=sAMAccountName
CMD_LDAP_PROVIDERNAME=Example Inc AD
```
`CMD_LDAP_BINDDN` is either the `distinguishedName` or the `userPrincipalName`. *This can cause "username/password is invalid" when either this value or the password from `CMD_LDAP_BINDCREDENTIALS` are incorrect.*
`CMD_LDAP_SEARCHFILTER` matches on all users and uses either the email address or the `sAMAccountName` (usually the login name you also use to login to Windows).
*Only using `sAMAccountName` looks like this:* `(&(objectcategory=person)(objectclass=user)(sAMAccountName={{username}}))`
`CMD_LDAP_USERIDFIELD` says we want to use `sAMAccountName` as unique identifier for the account itself.
`CMD_LDAP_PROVIDERNAME` just the name written above the username and password field on the login page.
Same in json:
```json
"ldap": {
"url": "ldap://internal.example.com",
"bindDn": "cn=binduser,cn=Users,dc=internal,dc=example,dc=com",
"bindCredentials": "<super secret password>",
"searchBase": "dc=internal,dc=example,dc=com",
"searchFilter": "(&(objectcategory=person)(objectclass=user)(|(sAMAccountName={{username}})(mail={{username}})))",
"useridField": "sAMAccountName",
},
```
More details and example: https://www.npmjs.com/package/passport-ldapauth

View file

@ -1,54 +0,0 @@
Authentication guide - Mattermost (self-hosted)
===
**Note:** *The Mattermost setup portion of this document is just a quick guide. See the [official documentation](https://docs.mattermost.com/developer/oauth-2-0-applications.html) for more details.*
This guide uses the generic OAuth2 module for compatibility with Mattermost version 5.0 and above.
1. Sign-in with an administrator account to your Mattermost instance
2. Make sure **OAuth 2.0 Service Provider** is enabled in the Main Menu (menu button next to your username in the top left corner) --> System Console --> Custom Integrations menu, which you can find at `https://your.mattermost.domain/admin_console/integrations/custom`
![mattermost-enable-oauth2](../../images/auth/mattermost-enable-oauth2.png)
3. Navigate to the OAuth integration settings through Main Menu --> Integrations --> OAuth 2.0 Applications, at `https://your.mattermost.domain/yourteam/integrations/oauth2-apps`
4. Click on the **Add OAuth 2.0 Application** button to add a new OAuth application
![mattermost-oauth-app-add](../../images/auth/mattermost-oauth-app-add.png)
5. Fill out the form and click **Save**
![mattermost-oauth-app-form](../../images/auth/mattermost-oauth-app-form.png)
*Note: The callback URL is \<your-codimd-url\>/auth/oauth2/callback*
6. After saving the application, you'll receive the Client ID and Client Secret
![mattermost-oauth-app-done](../../images/auth/mattermost-oauth-app-done.png)
7. Add the Client ID and Client Secret to your config.json file or pass them as environment variables
* `config.json`:
```javascript
{
"production": {
"oauth2": {
"baseURL": "https://your.mattermost.domain",
"userProfileURL": "https://your.mattermost.domain/api/v4/users/me",
"userProfileUsernameAttr": "id",
"userProfileDisplayNameAttr": "username",
"userProfileEmailAttr": "email",
"tokenURL": "https://your.mattermost.domain/oauth/access_token",
"authorizationURL": "https://your.mattermost.domain/oauth/authorize",
"clientID": "ii4p1u3jz7dXXXXXXXXXXXXXXX",
"clientSecret": "mqzzx6fydbXXXXXXXXXXXXXXXX"
}
}
}
```
* environment variables:
```sh
CMD_OAUTH2_BASEURL=https://your.mattermost.domain
CMD_OAUTH2_USER_PROFILE_URL=https://your.mattermost.domain/api/v4/users/me
CMD_OAUTH2_USER_PROFILE_USERNAME_ATTR=id
CMD_OAUTH2_USER_PROFILE_DISPLAY_NAME_ATTR=username
CMD_OAUTH2_USER_PROFILE_EMAIL_ATTR=email
CMD_OAUTH2_TOKEN_URL=https://your.mattermost.domain/oauth/access_token
CMD_OAUTH2_AUTHORIZATION_URL=https://your.mattermost.domain/oauth/authorize
CMD_OAUTH2_CLIENT_ID=ii4p1u3jz7dXXXXXXXXXXXXXXX
CMD_OAUTH2_CLIENT_SECRET=mqzzx6fydbXXXXXXXXXXXXXXXX
```

View file

@ -1,52 +0,0 @@
Authentication guide - Nextcloud (self-hosted)
===
*This has been constructed using the [Nextcloud OAuth2 Documentation](https://docs.nextcloud.com/server/14/admin_manual/configuration_server/oauth2.html?highlight=oauth2) combined with [this issue comment on the nextcloud bugtracker](https://github.com/nextcloud/server/issues/5694#issuecomment-314761326).*
This guide uses the generic OAuth2 module for compatibility with Nextcloud 13 and above (this guide has been tested successfully with Nextcloud 14).
1. Sign-in with an administrator account to your Nextcloud server
2. Navigate to the OAuth integration settings: Profile Icon (top right) --> Settings
Then choose Security Settings from the *Administration* part of the list - Don't confuse this with Personal Security Settings, where you would change your personal password!
At the top there's OAuth 2.0-Clients.
![Where to find OAuth2 in Nextcloud](../../images/auth/nextcloud-oauth2-1-settings.png)
3. Add your CodiMD instance by giving it a *name* (perhaps CodiMD, but could be anything) and a *Redirection-URI*. The Redirection-URI will be `\<your-codimd-url\>/auth/oauth2/callback`. Click <kbd>Add</kbd>.
![Adding a client to Nextcloud](../../images/auth/nextcloud-oauth2-2-client-add.png)
4. You'll now see a line containing a *client identifier* and a *Secret*.
![Successfully added OAuth2-client](../../images/auth/nextcloud-oauth2-3-clientid-secret.png)
5. That's it for Nextcloud, the rest is configured in your CodiMD `config.json` or via the `CMD_` environment variables!
6. Add the Client ID and Client Secret to your `config.json` file or pass them as environment variables. Make sure you also replace `<your-nextcloud-domain>` with the right domain name.
* `config.json`:
```javascript
{
"production": {
"oauth2": {
"clientID": "ii4p1u3jz7dXXXXXXXXXXXXXXX",
"clientSecret": "mqzzx6fydbXXXXXXXXXXXXXXXX",
"authorizationURL": "https://<your-nextcloud-domain>/apps/oauth2/authorize",
"tokenURL": "https://<your-nextcloud-domain>/apps/oauth2/api/v1/token",
"userProfileURL": "https://<your-nextcloud-domain>/ocs/v2.php/cloud/user?format=json",
"userProfileUsernameAttr": "ocs.data.id",
"userProfileDisplayNameAttr": "ocs.data.display-name",
"userProfileEmailAttr": "ocs.data.email"
}
}
}
```
* environment variables:
```sh
CMD_OAUTH2_CLIENT_ID=ii4p1u3jz7dXXXXXXXXXXXXXXX
CMD_OAUTH2_CLIENT_SECRET=mqzzx6fydbXXXXXXXXXXXXXXXX
CMD_OAUTH2_AUTHORIZATION_URL=https://<your-nextcloud-domain>/apps/oauth2/authorize
CMD_OAUTH2_TOKEN_URL=https://<your-nextcloud-domain>/apps/oauth2/api/v1/token
CMD_OAUTH2_USER_PROFILE_URL=https://<your-nextcloud-domain>/ocs/v2.php/cloud/user?format=json
CMD_OAUTH2_USER_PROFILE_USERNAME_ATTR=ocs.data.id
CMD_OAUTH2_USER_PROFILE_DISPLAY_NAME_ATTR=ocs.data.display-name
CMD_OAUTH2_USER_PROFILE_EMAIL_ATTR=ocs.data.email
```

View file

@ -1,12 +0,0 @@
# OAuth general information
| service | callback URL (after the server URL) |
| ------- | --------- |
| facebook | `/auth/facebook/callback` |
| twitter | `/auth/twitter/callback` |
| github | `/auth/github/callback` |
| gitlab | `/auth/gitlab/callback` |
| mattermost | `/auth/mattermost/callback` |
| dropbox | `/auth/dropbox/callback` |
| google | `/auth/google/callback` |
| saml | `/auth/saml/callback` |

View file

@ -1,48 +0,0 @@
Authentication guide - SAML (OneLogin)
===
**Note:** *This guide was written before the renaming. Just replace `HackMD` with `CodiMD` in your mind :smile: thanks!*
1. Sign-in or sign-up for an OneLogin account. (available free trial for 2 weeks)
2. Go to the administration page.
3. Select the **APPS** menu and click on the **Add Apps**.
![onelogin-add-app](../../images/auth/onelogin-add-app.png)
4. Find "SAML Test Connector (SP)" for template of settings and select it.
![onelogin-select-template](../../images/auth/onelogin-select-template.png)
5. Edit display name and icons for OneLogin dashboard as you want, and click **SAVE**.
![onelogin-edit-app-name](../../images/auth/onelogin-edit-app-name.png)
6. After that other tabs will appear, click the **Configuration**, and fill out the below items, and click **SAVE**.
* RelayState: The base URL of your CodiMD, which is issuer. (last slash is not needed)
* ACS (Consumer) URL Validator: The callback URL of your CodiMD. (serverurl + /auth/saml/callback)
* ACS (Consumer) URL: same as above.
* Login URL: login URL(SAML requester) of your CopiMD. (serverurl + /auth/saml)
![onelogin-edit-sp-metadata](../../images/auth/onelogin-edit-sp-metadata.png)
7. The registration is completed. Next, click **SSO** and copy or download the items below.
* X.509 Certificate: Click **View Details** and **DOWNLOAD** or copy the content of certificate ....(A)
* SAML 2.0 Endpoint (HTTP): Copy the URL ....(B)
![onelogin-copy-idp-metadata](../../images/auth/onelogin-copy-idp-metadata.png)
8. In your CodiMD server, create IdP certificate file from (A)
9. Add the IdP URL (B) and the Idp certificate file path to your config.json file or pass them as environment variables.
* `config.json`:
```javascript
{
"production": {
"saml": {
"idpSsoUrl": "https://*******.onelogin.com/trust/saml2/http-post/sso/******",
"idpCert": "/path/to/idp_cert.pem"
}
}
}
```
* environment variables
```sh
CMD_SAML_IDPSSOURL=https://*******.onelogin.com/trust/saml2/http-post/sso/******
CMD_SAML_IDPCERT=/path/to/idp_cert.pem
```
10. Try sign-in with SAML from your CodiMD sign-in button or OneLogin dashboard (like the screenshot below).
![onelogin-use-dashboard](../../images/auth/onelogin-use-dashboard.png)

View file

@ -1,85 +0,0 @@
Authentication guide - SAML
===
**Note:** *This guide was written before the renaming. Just replace `HackMD` with `CodiMD` in your mind :smile: thanks!*
The basic procedure is the same as the case of OneLogin which is mentioned in [OneLogin-Guide](./saml-onelogin.md). If you want to match your IdP, you can use more configurations as below.
* If your IdP accepts metadata XML of the service provider to ease configuration, use this url to download metadata XML.
* {{your-serverurl}}/auth/saml/metadata
* _Note: If not accessible from IdP, download to local once and upload to IdP._
* Change the value of `issuer`, `identifierFormat` to match your IdP.
* `issuer`: A unique id to identify the application to the IdP, which is the base URL of your CodiMD as default
* `identifierFormat`: A format of unique id to identify the user of IdP, which is the format based on email address as default. It is recommend that you use as below.
* urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress (default)
* urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified
* `config.json`:
```javascript
{
"production": {
"saml": {
/* omitted */
"issuer": "mycodimd"
"identifierFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified"
}
}
}
```
* environment variables
```
CMD_SAML_ISSUER=mycodimd
CMD_SAML_IDENTIFIERFORMAT=urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified
```
* Change mapping of attribute names to customize the displaying user name and email address to match your IdP.
* `attribute`: A dictionary to map attribute names
* `attribute.id`: A primary key of user table for your CodiMD
* `attribute.username`: Attribute name of displaying user name on CodiMD
* `attribute.email`: Attribute name of email address, which will be also used for Gravatar
* _Note: Default value of all attributes is NameID of SAML response, which is email address if `identifierFormat` is default._
* `config.json`:
```javascript
{
"production": {
"saml": {
/* omitted */
"attribute": {
"id": "sAMAccountName",
"username": "displayName",
"email": "mail"
}
}
}
}
```
* environment variables
```sh
CMD_SAML_ATTRIBUTE_ID=sAMAccountName
CMD_SAML_ATTRIBUTE_USERNAME=nickName
CMD_SAML_ATTRIBUTE_EMAIL=mail
```
* If you want to control permission by group membership, add group attribute name and required group (allowed) or external group (not allowed).
* `groupAttribute`: An attribute name of group membership
* `requiredGroups`: Group names array for allowed access to CodiMD. Use vertical bar to separate for environment variables.
* `externalGroups`: Group names array for not allowed access to CodiMD. Use vertical bar to separate for environment variables.
* _Note: Evaluates `externalGroups` first_
* `config.json`:
```javascript
{
"production": {
"saml": {
/* omitted */
"groupAttribute": "memberOf",
"requiredGroups": [ "codimd-users", "board-members" ],
"externalGroups": [ "temporary-staff" ]
}
}
}
```
* environment variables
```sh
CMD_SAML_GROUPATTRIBUTE=memberOf
CMD_SAML_REQUIREDGROUPS=codimd-users|board-members
CMD_SAML_EXTERNALGROUPS=temporary-staff
```

View file

@ -1,40 +0,0 @@
Authentication guide - Twitter
===
**Note:** *This guide was written before the renaming. Just replace `HackMD` with `CodiMD` in your mind :smile: thanks!*
1. Sign-in or sign-up for a Twitter account
2. Go to the Twitter Application management page [here](https://apps.twitter.com/)
3. Click on the **Create New App** button to create a new Twitter app:
![create-twitter-app](../../images/auth/create-twitter-app.png)
4. Fill out the create application form, check the developer agreement box, and click **Create Your Twitter Application**
![register-twitter-application](../../images/auth/register-twitter-application.png)
*Note: you may have to register your phone number with Twitter to create a Twitter application*
To do this Click your profile icon --> Settings and privacy --> Mobile --> Select Country/region --> Enter phone number --> Click Continue
5. After you receive confirmation that the Twitter application was created, click **Keys and Access Tokens**
![twitter-app-confirmation](../../images/auth/twitter-app-confirmation.png)
6. Obtain your Twitter Consumer Key and Consumer Secret
![twitter-app-keys](../../images/auth/twitter-app-keys.png)
7. Add your Consumer Key and Consumer Secret to your `config.json` file or pass them as environment variables:
* `config.json`:
```javascript
{
"production": {
"twitter": {
"consumerKey": "esTCJFXXXXXXXXXXXXXXXXXXX",
"consumerSecret": "zpCs4tU86pRVXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
}
}
}
```
* environment variables:
```sh
CMD_TWITTER_CONSUMERKEY=esTCJFXXXXXXXXXXXXXXXXXXX
CMD_TWITTER_CONSUMERSECRET=zpCs4tU86pRVXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
```

View file

Before

Width:  |  Height:  |  Size: 53 KiB

After

Width:  |  Height:  |  Size: 53 KiB

View file

Before

Width:  |  Height:  |  Size: 70 KiB

After

Width:  |  Height:  |  Size: 70 KiB

View file

Before

Width:  |  Height:  |  Size: 69 KiB

After

Width:  |  Height:  |  Size: 69 KiB

View file

Before

Width:  |  Height:  |  Size: 54 KiB

After

Width:  |  Height:  |  Size: 54 KiB

View file

Before

Width:  |  Height:  |  Size: 89 KiB

After

Width:  |  Height:  |  Size: 89 KiB

View file

Before

Width:  |  Height:  |  Size: 98 KiB

After

Width:  |  Height:  |  Size: 98 KiB

View file

@ -1,131 +0,0 @@
Pad migration guide from etherpad-lite
===
The goal of this migration is to do a "dumb" import from all the pads in Etherpad, to notes in
CodiMD. In particular, the url locations of the pads in Etherpad will be lost. Furthermore, any
metadata in Etherpad, such as revisions, author data and also formatted text will not be migrated
to CodiMD (only the plain text contents).
Note that this guide is not really meant as a support guide. I migrated my own Etherpad to CodiMD,
and it turned out to be quite easy in my opinion. In this guide I share my experience. Stuff may
require some creativity to work properly in your case. When I wrote this guide, I was using
[Etherpad 1.7.0] and [CodiMD 1.2.1]. Good luck!
[Etherpad 1.7.0]: https://github.com/ether/etherpad-lite/tree/1.7.0
[CodiMD 1.2.1]: https://github.com/codimd/server/tree/1.2.1
## 0. Requirements
- `curl`
- running Etherpad server
- running CodiMD server
- [codimd-cli]
[codimd-cli]: https://github.com/codimd/cli/blob/master/bin/codimd
## 1. Retrieve the list of pads
First, compose a list of all the pads that you want to have migrated from your Etherpad. Other than
the admin interface, Etherpad does not have a dedicated function to dump a list of all the pads.
However, the Etherpad wiki explains how to list all the pads by [talking directly to the
database][howtolistallpads].
You will end up with a file containing a pad name on each line:
```
date-ideas
groceries
london
weddingchecklist
(...)
```
[howtolistallpads]: https://github.com/ether/etherpad-lite/wiki/How-to-list-all-pads/49701ecdcbe07aea7ad27ffa23aed0d99c2e17db
## 2. Run the migration
Download [codimd-cli] and put the script in the same directory as the file containing the pad names.
Add to this directory the file listed below, I called it `migrate-etherpad.sh`. Modify at least the
configuration settings `ETHERPAD_SERVER` and `CODIMD_SERVER`.
```shell
#!/bin/sh
# migrate-etherpad.sh
#
# Description: Migrate pads from etherpad to codimd
# Author: Daan Sprenkels <hello@dsprenkels.com>
# This script uses the codimd command line script[1] to import a list of pads from
# [1]: https://github.com/codimd/cli/blob/master/bin/codimd
# The base url to where etherpad is hosted
ETHERPAD_SERVER="https://etherpad.example.com"
# The base url where codimd is hosted
CODIMD_SERVER="https://codimd.example.com"
# Write a list of pads and the urls which they were migrated to
REDIRECTS_FILE="redirects.txt"
# Fail if not called correctly
if (( $# != 1 )); then
echo "Usage: $0 PAD_NAMES_FILE"
exit 2
fi
# Do the migration
for PAD_NAME in $1; do
# Download the pad
PAD_FILE="$(mktemp)"
curl "$ETHERPAD_SERVER/p/$PAD_NAME/export/txt" >"$PAD_FILE"
# Import the pad into codimd
OUTPUT="$(./codimd import "$PAD_FILE")"
echo "$PAD_NAME -> $OUTPUT" >>"$REDIRECTS_FILE"
done
```
Call this file like this:
```shell
./migrate-etherpad.sh pad_names.txt
```
This will download all the pads in `pad_names.txt` and put them on CodiMD. They will get assigned
random ids, so you won't be able to find them. The script will save the mappings to a file though
(in my case `redirects.txt`). You can use this file to redirect your users when they visit your
etherpad using a `301 Permanent Redirect` status code (see the next section).
## 3. Setup redirects (optional)
I got a `redirects.txt` file that looked a bit like this:
```
date-ideas -> Found. Redirecting to https://codimd.example.com/mPt0KfiKSBOTQ3mNcdfn
groceries -> Found. Redirecting to https://codimd.example.com/UukqgwLfhYyUUtARlcJ2_y
london -> Found. Redirecting to https://codimd.example.com/_d3wa-BE8t4Swv5w7O2_9R
weddingchecklist -> Found. Redirecting to https://codimd.example.com/XcQGqlBjl0u40wfT0N8TzQ
(...)
```
Using some `sed` magic, I changed it to an nginx config snippet:
```
location = /p/date-ideas {
return 301 https://codimd.example.com/mPt0M1KfiKSBOTQ3mNcdfn;
}
location = /p/groceries {
return 301 https://codimd.example.com/UukqgwLfhYyUUtARlcJ2_y;
}
location = /p/london {
return 301 https://codimd.example.com/_d3wa-BE8t4Swv5w7O2_9R;
}
location = /p/weddingchecklist {
return 301 https://codimd.example.com/XcQGqlBjl0u40wfT0N8TzQ;
}
```
I put this file into my `etherpad.example.com` nginx config, such that all the users would be
redirected accordingly.

View file

@ -1,56 +0,0 @@
Migrations and Notable Changes
===
## Migrating to 1.4.0
We dropped support for node 6 with this version. If you have any trouble running this version, please double check that you are running at least node 8!
## Migrating to 1.3.2
This is not a breaking change, but to stay up to date with the community
repository, you may need to update a few urls. This is not a breaking change.
See more at [issue #10](https://github.com/codimd/server/issues/10)
**Native setup using git:**
Change the upstream remote using `git remote set-url origin https://github.com/codimd/server.git`.
**Docker:**
When you use our [container repository](https://github.com/codimd/container)
(which was previously `codimd-container`) all you can simply run `git pull` and
your `docker-compose.yml` will be updated.
When you setup things yourself, make sure you use the new image:
[`quay.io/codimd/server`](https://quay.io/repository/codimd/server?tab=tags).
**Heroku:**
All you need to do is [disconnect GitHub](https://devcenter.heroku.com/articles/github-integration#disconnecting-from-github)
and [reconnect it](https://devcenter.heroku.com/articles/github-integration#enabling-github-integration)
with this new repository.
Or you can use our Heroku button and redeploy your instance and link the old
database again.
## Migrating to 1.1.0
We deprecated the older lower case config style and moved on to camel case style. Please have a look at the current `config.json.example` and check the warnings on startup.
*Notice: This is not a breaking change right now but will be in the future*
## Migrating to 0.5.0
[migration-to-0.5.0 migration tool](https://github.com/hackmdio/migration-to-0.5.0)
We don't use LZString to compress socket.io data and DB data after version 0.5.0.
Please run the migration tool if you're upgrading from the old version.
## Migrating to 0.4.0
[migration-to-0.4.0 migration tool](https://github.com/hackmdio/migration-to-0.4.0)
We've dropped MongoDB after version 0.4.0.
So here is the migration tool for you to transfer the old DB data to the new DB.
This tool is also used for official service.

View file

@ -1,85 +0,0 @@
Minio Guide for CodiMD
===
**Note:** *This guide was written before the renaming. Just replace `HackMD` with `CodiMD` in your mind :smile: thanks!*
1. First of all you need to setup Minio itself.
Please refer to the [official Minio docs](https://docs.minio.io/) for an
production setup.
For checking it out and development purposes a non-persistent setup is enough:
```sh
docker run --name test-minio --rm -d -p 9000:9000 minio/minio server /data
```
*Please notice this is not for productive use as all your data gets lost
when you stop this container*
2. Next step is to get the credentials form the container:
```sh
docker logs test-minio
```
![docker logs](../images/minio-image-upload/docker-logs.png)
3. Open http://localhost:9000 and login with the shown credentials.
![minio default view](../images/minio-image-upload/default-view.png)
4. Create a bucket for CodiMD
![minio create bucket](../images/minio-image-upload/create-bucket.png)
5. Add a policy for the prefix `uploads` and make it read-only.
![minio edit policy](../images/minio-image-upload/open-edit-policy.png)
*Open policy editor*
![minio policy adding](../images/minio-image-upload/create-policy.png)
*Add policy for uploads*
6. Set credentials and configs for Minio in CodiMD's `config.json`
```JSON
"minio": {
"accessKey": "888MXJ7EP4XXXXXXXXX",
"secretKey": "yQS2EbM1Y6IJrp/1BUKWq2/XXXXXXXXXXXXXXX",
"endPoint": "localhost",
"port": 9000,
"secure": false
}
```
*You have to use different values for `endpoint` and `port` for a production
setup. Keep in mind the `endpoint`-address has to be public accessible from
your browser.*
7. Set bucket name
```JSON
"s3bucket": "codimd"
```
8. Set upload type.
```JSON
"imageuploadtype": "minio"
```
9. Review your config.
```json
{
// all your other config…
"minio": {
"accessKey": "888MXJ7EP4XXXXXXXXX",
"secretKey": "yQS2EbM1Y6IJrp/1BUKWq2/XXXXXXXXXXXXXXX",
"endPoint": "localhost",
"port": 9000,
"secure": false
},
"s3bucket": "codimd",
"imageuploadtype": "minio"
}
```

View file

@ -1,17 +0,0 @@
Setup your terms of use
===
To setup your terms of use, you need to provide a document called `terms-of-use.md` which contains them. Of course written in Markdown.
It has to be provided under `./public/docs/` and will be automatically turned into a CodiMD document. It will also automatically updated as soon as you change the document on disk.
As soon as the file exists a link will show up in the bottom part along with the release notes and link to them.
Setup your privacy policy
===
To add a privacy policy you can use the same technique as for the terms of use. The main difference is that the document is called `privacy.md`.
See our example file `./public/docs/privacy.md.example` container some useful hints for writing your own privacy policy.
As with the terms of use, a link to the privacy notices will show up in the area where the release notes are provided on the index page.

View file

@ -1,19 +1,16 @@
Guide - Setup CodiMD S3 image upload # Guide - Setup HackMD S3 image upload
===
**Note:** *This guide was written before the renaming. Just replace `HackMD` with `CodiMD` in your mind :smile: thanks!*
1. Go to [AWS S3 console](https://console.aws.amazon.com/s3/home) and create a new bucket. 1. Go to [AWS S3 console](https://console.aws.amazon.com/s3/home) and create a new bucket.
![create-bucket](../images/s3-image-upload/create-bucket.png) ![create-bucket](images/s3-image-upload/create-bucket.png)
2. Click on bucket, select **Properties** on the side panel, and find **Permission** section. Click **Edit bucket policy**. 2. Click on bucket, select **Properties** on the side panel, and find **Permission** section. Click **Edit bucket policy**.
![bucket-property](../images/s3-image-upload/bucket-property.png) ![bucket-property](images/s3-image-upload/bucket-property.png)
3. Enter the following policy, replace `bucket_name` with your bucket name: 3. Enter the following policy, replace `bucket_name` with your bucket name:
![bucket-policy-editor](../images/s3-image-upload/bucket-policy-editor.png) ![bucket-policy-editor](images/s3-image-upload/bucket-policy-editor.png)
```json ```json
{ {
@ -33,15 +30,15 @@ Guide - Setup CodiMD S3 image upload
5. Enter user page, select **Permission** tab, look at **Inline Policies** section, and click **Create User Policy** 5. Enter user page, select **Permission** tab, look at **Inline Policies** section, and click **Create User Policy**
![iam-user](../images/s3-image-upload/iam-user.png) ![iam-user](images/s3-image-upload/iam-user.png)
6. Select **Custom Policy** 6. Select **Custom Policy**
![custom-policy](../images/s3-image-upload/custom-policy.png) ![custom-policy](images/s3-image-upload/custom-policy.png)
7. Enter the following policy, replace `bucket_name` with your bucket name: 7. Enter the following policy, replace `bucket_name` with your bucket name:
![review-policy](../images/s3-image-upload/review-policy.png) ![review-policy](images/s3-image-upload/review-policy.png)
```json ```json
{ {
@ -66,18 +63,18 @@ Guide - Setup CodiMD S3 image upload
{ {
"production": { "production": {
... ...
"imageuploadtype": "s3", "imageUploadType": "s3",
"s3": { "s3": {
"accessKeyId": "YOUR_S3_ACCESS_KEY_ID", "accessKeyId": "YOUR_S3_ACCESS_KEY_ID",
"secretAccessKey": "YOUR_S3_ACCESS_KEY", "secretAccessKey": "YOUR_S3_ACCESS_KEY",
"region": "YOUR_S3_REGION" // example: ap-northeast-1 "region": "YOUR_S3_REGION", // example: ap-northeast-1
}, "bucket": "YOUR_S3_BUCKET_NAME"
"s3bucket": "YOUR_S3_BUCKET_NAME" }
} }
} }
``` ```
9. In additional to edit `config.json` directly, you could also try [environment variables](../configuration-env-vars.md). 9. In additional to edit `config.json` directly, you could also try [environment variable](https://github.com/hackmdio/hackmd#environment-variables-will-overwrite-other-server-configs).
## Related Tools ## Related Tools

View file

@ -1,40 +0,0 @@
History of CodiMD
===
## It started with HackMD
HackMD is the origin of this project, which was mostly developed by Max Wu and
Yukai Huang. Originally, this was open source under MIT license, but was
[relicensed in October 2017 to be AGPLv3](https://github.com/hackmdio/codimd/pull/578).
At the same time, [hackmd.io](https://hackmd.io) was founded to offer a
commercial version of HackMD.
The AGPLv3-version was developed and released by the community, this was for a
while referred to as "HackMD community edition".
*For more on the splitting of the projects, please refer to [A note to our community (2017-10-11)](https://hackmd.io/c/community-news/https%3A%2F%2Fhackmd.io%2Fs%2Fr1_4j9_hZ).*
## HackMD CE became CodiMD
In June 2018, CodiMD was renamed from its former name "HackMD" and continued to
be developed under AGPLv3 by the community. We decided to change the name to
break the confusion between HackMD (enterprise offering) and CodiMD (community
project), as people mistook it for an open core development model.
*For the whole renaming story, see the [issue where the renaming was discussed](https://github.com/hackmdio/hackmd/issues/720).*
## CodiMD went independent
In March 2019, a discussion over licensing, governance and the future of CodiMD
lead to the formation of a distinct GitHub organization. Up to that point, the
community project resided in the organization of hackmdio but was for the most
part self-organized.
During that debate, we did not reach an agreement that would have allowed us to
move the repository, so we simply forked it. We still welcome the HackMD team
as part of our community, especially since a large portion of this code base
originated with them.
*For the debate that lead to this step, please refer to the [governance debate](https://github.com/hackmdio/hackmd/issues/1170) and [the announcement of the new repository](https://github.com/codimd/server/issues/10).*

Binary file not shown.

Before

Width:  |  Height:  |  Size: 80 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 120 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 113 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 61 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 234 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 120 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 180 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 72 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 60 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 198 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 187 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 159 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 72 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

View file

@ -1,35 +0,0 @@
Developer Certificate of Origin
Version 1.1
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
660 York Street, Suite 102,
San Francisco, CA 94110 USA
Everyone is permitted to copy and distribute verbatim copies of this
license document, but changing it is not allowed.
Developer's Certificate of Origin 1.1
By making a contribution to this project, I certify that:
(a) The contribution was created in whole or in part by me and I
have the right to submit it under the open source license
indicated in the file; or
(b) The contribution is based upon previous work that, to the best
of my knowledge, is covered under an appropriate open source
license and I have the right under that license to submit that
work with modifications, whether created in whole or in part
by me, under the same open source license (unless I am
permitted to submit under a different license), as indicated
in the file; or
(c) The contribution was provided directly to me by some other
person who certified (a), (b) or (c) and I have not modified
it.
(d) I understand and agree that this project and the contribution
are public and that a record of the contribution (including all
personal information I submit with it, including my sign-off) is
maintained indefinitely and may be redistributed consistent with
this project or the open source license(s) involved.

View file

@ -1,6 +0,0 @@
Cloudron
===
Install CodiMD on [Cloudron](https://cloudron.io):
[![Install](https://cloudron.io/img/button.svg)](https://cloudron.io/button.html?app=io.hackmd.cloudronapp)

View file

@ -1,14 +0,0 @@
LinuxServer.io CodiMD Image
===
[![LinuxServer.io Discord](https://img.shields.io/discord/354974912613449730.svg?logo=discord&label=LSIO%20Discord&style=flat-square)](https://discord.gg/YWrKVTn)[![container version badge](https://images.microbadger.com/badges/version/linuxserver/codimd.svg)](https://microbadger.com/images/linuxserver/codimd "Get your own version badge on microbadger.com")[![container image size badge](https://images.microbadger.com/badges/image/linuxserver/codimd.svg)](https://microbadger.com/images/linuxserver/codimd "Get your own version badge on microbadger.com")![Docker Pulls](https://img.shields.io/docker/pulls/linuxserver/codimd.svg)![Docker Stars](https://img.shields.io/docker/stars/linuxserver/codimd.svg)[![Build Status](https://ci.linuxserver.io/buildStatus/icon?job=Docker-Pipeline-Builders/docker-codimd/master)](https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-codimd/job/master/)[![LinuxServer.io CI summary](https://lsio-ci.ams3.digitaloceanspaces.com/linuxserver/codimd/latest/badge.svg)](https://lsio-ci.ams3.digitaloceanspaces.com/linuxserver/codimd/latest/index.html)
[LinuxServer.io](https://linuxserver.io) have created an Ubuntu-based multi-arch container image for x86-64, arm64 and armhf which supports PDF export from all architectures using [PhantomJS](https://phantomjs.org/).
- It supports all the environment variables detailed in the [configuration documentation](../configuration-env-vars.md) to modify it according to your needs.
- It gets rebuilt on new releases from CodiMD and also weekly if necessary to update any other package changes in the underlying container, making it easy to keep your CodiMD instance up to date.
- It also details how to easily [utilize Docker networking to reverse proxy](https://github.com/linuxserver/docker-codimd/#application-setup) CodiMD using their [LetsEncrypt docker image](https://github.com/linuxserver/docker-letsencrypt)
In order to contribute check the LinuxServer.io [GitHub repository](https://github.com/linuxserver/docker-codimd/) for CodiMD.
And to find all tags and versions of the image, check the [Docker Hub repository](https://hub.docker.com/r/linuxserver/codimd).

View file

@ -1,23 +0,0 @@
CodiMD Docker Image
===
[![Try in PWD](https://cdn.rawgit.com/play-with-docker/stacks/cff22438/assets/images/button.png)](http://play-with-docker.com?stack=https://github.com/codimd/container/raw/master/docker-compose.yml&stack_name=codimd)
**Debian-based version:**
[![Docker Repository on Quay](https://quay.io/repository/codimd/server/status "Docker Repository on Quay")](https://quay.io/repository/codimd/server)
**Alpine-based version:**
[![Docker Repository on Quay](https://quay.io/repository/codimd/server/status "Docker Repository on Quay")](https://quay.io/repository/codimd/server)
The easiest way to setup CodiMD using docker are using the following three commands:
```sh
git clone https://github.com/codimd/container.git codimd-container
cd codimd-container
docker-compose up
```
Read more about it in the [container repository](https://github.com/codimd/container).

View file

@ -1,7 +0,0 @@
Heroku Deployment
===
You can quickly setup a sample Heroku CodiMD application by clicking the button
below.
[![Deploy on Heroku](https://www.herokucdn.com/deploy/button.svg)](https://heroku.com/deploy?template=https://github.com/codimd/server/tree/master)

View file

@ -1,6 +0,0 @@
Kubernetes
===
To install use `helm install stable/hackmd`.
For all further details, please check out the offical CodiMD [K8s helm chart](https://github.com/kubernetes/charts/tree/master/stable/hackmd).

View file

@ -1,39 +0,0 @@
Manual Installation
===
## Requirements on your server
- Node.js 8.5 or up
- Database (PostgreSQL, MySQL, MariaDB, SQLite, MSSQL) use charset `utf8`
- npm (and its dependencies, [node-gyp](https://github.com/nodejs/node-gyp#installation))
- yarn
- Bash (for the setup script)
- For **building** CodiMD we recommend to use a machine with at least **2GB** RAM
## Instructions
1. Check if you meet the [requirements at the top of this document](#requirements-on-your-server).
2. Clone this repository (preferred) or download a release and unzip it.
3. Enter the directory and type `bin/setup`, which will install npm dependencies and create configs.
4. Setup the configs, see more below
5. Setup environment variables which will overwrite the configs
6. Build front-end bundle by `npm run build` (use `npm run dev` if you are in development)
7. Modify the file named `.sequelizerc`, change the value of the variable `url` with your db connection string
For example: `postgres://username:password@localhost:5432/codimd`
8. It is recommended to start your server manually once: `npm start --production`, this way it's easier to see warnings or errors that might occur (leave out `--production` for development).
9. Run the server as you like (node, forever, pm2, SystemD, Init-Scripts)
## How to upgrade your installation
If you are upgrading CodiMD from an older version, follow these steps:
1. Check if you meet the [requirements at the top of this document](#requirements-on-your-server).
2. Verify which version you were running before and take a look at [migrations and breaking changes](../guides/migrations-and-breaking-changes.md) to see if additional steps, or configuration changes are necessary!
3. Fully stop your old CodiMD server.
4. `git pull` or unzip a new release in the directory.
5. Run `bin/setup`. This will take care of installing dependencies. It is safe to run on an existing installation.
6. Build front-end bundle by `npm run build` (use `npm run dev` if you are in development).
7. It is recommended to start your server manually once: `npm start --production`, this way it's easier to see warnings or errors that might occur (leave out `--production` for development).
8. You can now restart the CodiMD server!

View file

@ -1,161 +0,0 @@
Slide Separators
===
If you're getting started with reveal.js slides, there are a few things you need to know.
There are two types of slides, those that transition horizontally and those that transition vertically (subslides).
The following separators are used for each in the CodiMD syntax:
```
# First Slide
---
# Next slide
----
## Subslide
```
as you can see, horizontal transitions are separated by `---` and vertical transitions by `----`
## Basic YAML header
It's possible to customise the slide options using the YAML header in the slide markdown.
eg:
```
---
title: Example Slide
tags: presentation
slideOptions:
theme: solarized
transition: 'fade'
# parallaxBackgroundImage: 'https://s3.amazonaws.com/hakim-static/reveal-js/reveal-parallax-1.jpg'
---
```
make sure to have two spaces only at the start of the listed slide options.
you can comment out options with a `#`
### Some other options
```
# Display controls in the bottom right corner
controls: true
# Display a presentation progress bar
progress: true
# Set default timing of 2 minutes per slide
defaultTiming: 120
# Display the page number of the current slide
slideNumber: false
# Push each slide change to the browser history
history: false
# Enable keyboard shortcuts for navigation
keyboard: true
# Enable the slide overview mode
overview: true
# Vertical centering of slides
center: true
# Enables touch navigation on devices with touch input
touch: true
# Loop the presentation
loop: false
# Change the presentation direction to be RTL
rtl: false
# Randomizes the order of slides each time the presentation loads
shuffle: false
# Turns fragments on and off globally
fragments: true
# Flags if the presentation is running in an embedded mode,
# i.e. contained within a limited portion of the screen
embedded: false
# Flags if we should show a help overlay when the questionmark
# key is pressed
help: true
# Flags if speaker notes should be visible to all viewers
showNotes: false
# Global override for autolaying embedded media (video/audio/iframe)
# - null: Media will only autoplay if data-autoplay is present
# - true: All media will autoplay, regardless of individual setting
# - false: No media will autoplay, regardless of individual setting
autoPlayMedia: null
# Number of milliseconds between automatically proceeding to the
# next slide, disabled when set to 0, this value can be overwritten
# by using a data-autoslide attribute on your slides
autoSlide: 0
# Stop auto-sliding after user input
autoSlideStoppable: true
# Use this method for navigation when auto-sliding
autoSlideMethod: Reveal.navigateNext
# Enable slide navigation via mouse wheel
mouseWheel: false
# Hides the address bar on mobile devices
hideAddressBar: true
# Opens links in an iframe preview overlay
previewLinks: false
# Transition style
transition: 'slide'
# none/fade/slide/convex/concave/zoom
# Transition speed
transitionSpeed: 'default'
# default/fast/slow
# Transition style for full page slide backgrounds
backgroundTransition: 'fade'
# none/fade/slide/convex/concave/zoom
# Number of slides away from the current that are visible
viewDistance: 3
# Parallax background image
parallaxBackgroundImage: ''
# e.g. "'https://s3.amazonaws.com/hakim-static/reveal-js/reveal-parallax-1.jpg'"
# Parallax background size
parallaxBackgroundSize: ''
# CSS syntax, e.g. "2100px 900px"
# Number of pixels to move the parallax background per slide
# - Calculated automatically unless specified
# - Set to 0 to disable movement along an axis
parallaxBackgroundHorizontal: null
parallaxBackgroundVertical: null
# The display mode that will be used to show slides
display: 'block'
```
## Customising individual slides
custom background image:
```
---
<!-- .slide: data-background="https://s3.amazonaws.com/hakim-static/reveal-js/reveal-parallax-1.jpg" -->
#### testslide
---
```

190
lib/auth.js Normal file
View file

@ -0,0 +1,190 @@
//auth
//external modules
var passport = require('passport');
var FacebookStrategy = require('passport-facebook').Strategy;
var TwitterStrategy = require('passport-twitter').Strategy;
var GithubStrategy = require('passport-github').Strategy;
var GitlabStrategy = require('passport-gitlab2').Strategy;
var DropboxStrategy = require('passport-dropbox-oauth2').Strategy;
var GoogleStrategy = require('passport-google-oauth20').Strategy;
var LdapStrategy = require('passport-ldapauth');
var LocalStrategy = require('passport-local').Strategy;
var validator = require('validator');
//core
var config = require('./config.js');
var logger = require("./logger.js");
var models = require("./models");
function callback(accessToken, refreshToken, profile, done) {
//logger.info(profile.displayName || profile.username);
var stringifiedProfile = JSON.stringify(profile);
models.User.findOrCreate({
where: {
profileid: profile.id.toString()
},
defaults: {
profile: stringifiedProfile,
accessToken: accessToken,
refreshToken: refreshToken
}
}).spread(function (user, created) {
if (user) {
var needSave = false;
if (user.profile != stringifiedProfile) {
user.profile = stringifiedProfile;
needSave = true;
}
if (user.accessToken != accessToken) {
user.accessToken = accessToken;
needSave = true;
}
if (user.refreshToken != refreshToken) {
user.refreshToken = refreshToken;
needSave = true;
}
if (needSave) {
user.save().then(function () {
if (config.debug)
logger.info('user login: ' + user.id);
return done(null, user);
});
} else {
if (config.debug)
logger.info('user login: ' + user.id);
return done(null, user);
}
}
}).catch(function (err) {
logger.error('auth callback failed: ' + err);
return done(err, null);
});
}
//facebook
if (config.facebook) {
module.exports = passport.use(new FacebookStrategy({
clientID: config.facebook.clientID,
clientSecret: config.facebook.clientSecret,
callbackURL: config.serverurl + '/auth/facebook/callback'
}, callback));
}
//twitter
if (config.twitter) {
passport.use(new TwitterStrategy({
consumerKey: config.twitter.consumerKey,
consumerSecret: config.twitter.consumerSecret,
callbackURL: config.serverurl + '/auth/twitter/callback'
}, callback));
}
//github
if (config.github) {
passport.use(new GithubStrategy({
clientID: config.github.clientID,
clientSecret: config.github.clientSecret,
callbackURL: config.serverurl + '/auth/github/callback'
}, callback));
}
//gitlab
if (config.gitlab) {
passport.use(new GitlabStrategy({
baseURL: config.gitlab.baseURL,
clientID: config.gitlab.clientID,
clientSecret: config.gitlab.clientSecret,
callbackURL: config.serverurl + '/auth/gitlab/callback'
}, callback));
}
//dropbox
if (config.dropbox) {
passport.use(new DropboxStrategy({
apiVersion: '2',
clientID: config.dropbox.clientID,
clientSecret: config.dropbox.clientSecret,
callbackURL: config.serverurl + '/auth/dropbox/callback'
}, callback));
}
//google
if (config.google) {
passport.use(new GoogleStrategy({
clientID: config.google.clientID,
clientSecret: config.google.clientSecret,
callbackURL: config.serverurl + '/auth/google/callback'
}, callback));
}
// ldap
if (config.ldap) {
passport.use(new LdapStrategy({
server: {
url: config.ldap.url || null,
bindDn: config.ldap.bindDn || null,
bindCredentials: config.ldap.bindCredentials || null,
searchBase: config.ldap.searchBase || null,
searchFilter: config.ldap.searchFilter || null,
searchAttributes: config.ldap.searchAttributes || null,
tlsOptions: config.ldap.tlsOptions || null
},
},
function(user, done) {
var profile = {
id: 'LDAP-' + user.uidNumber,
username: user.uid,
displayName: user.displayName,
emails: user.mail ? [user.mail] : [],
avatarUrl: null,
profileUrl: null,
provider: 'ldap',
}
var stringifiedProfile = JSON.stringify(profile);
models.User.findOrCreate({
where: {
profileid: profile.id.toString()
},
defaults: {
profile: stringifiedProfile,
}
}).spread(function (user, created) {
if (user) {
var needSave = false;
if (user.profile != stringifiedProfile) {
user.profile = stringifiedProfile;
needSave = true;
}
if (needSave) {
user.save().then(function () {
if (config.debug)
logger.info('user login: ' + user.id);
return done(null, user);
});
} else {
if (config.debug)
logger.info('user login: ' + user.id);
return done(null, user);
}
}
}).catch(function (err) {
logger.error('ldap auth failed: ' + err);
return done(err, null);
});
}));
}
// email
if (config.email) {
passport.use(new LocalStrategy({
usernameField: 'email'
},
function(email, password, done) {
if (!validator.isEmail(email)) return done(null, false);
models.User.findOne({
where: {
email: email
}
}).then(function (user) {
if (!user) return done(null, false);
if (!user.verifyPassword(password)) return done(null, false);
return done(null, user);
}).catch(function (err) {
logger.error(err);
return done(err);
});
}));
}

219
lib/config.js Normal file
View file

@ -0,0 +1,219 @@
// external modules
var fs = require('fs');
var path = require('path');
// configs
var env = process.env.NODE_ENV || 'development';
var config = require(path.join(__dirname, '..', 'config.json'))[env];
var debug = process.env.DEBUG ? (process.env.DEBUG === 'true') : ((typeof config.debug === 'boolean') ? config.debug : (env === 'development'));
// url
var domain = process.env.DOMAIN || process.env.HMD_DOMAIN || config.domain || '';
var urlpath = process.env.URL_PATH || process.env.HMD_URL_PATH || config.urlpath || '';
var port = process.env.PORT || process.env.HMD_PORT || config.port || 3000;
var alloworigin = process.env.HMD_ALLOW_ORIGIN ? process.env.HMD_ALLOW_ORIGIN.split(',') : (config.alloworigin || ['localhost']);
var usessl = !!config.usessl;
var protocolusessl = (usessl === true && typeof process.env.HMD_PROTOCOL_USESSL === 'undefined' && typeof config.protocolusessl === 'undefined')
? true : (process.env.HMD_PROTOCOL_USESSL ? (process.env.HMD_PROTOCOL_USESSL === 'true') : !!config.protocolusessl);
var urladdport = process.env.HMD_URL_ADDPORT ? (process.env.HMD_URL_ADDPORT === 'true') : !!config.urladdport;
var usecdn = process.env.HMD_USECDN ? (process.env.HMD_USECDN === 'true') : ((typeof config.usecdn === 'boolean') ? config.usecdn : true);
var allowanonymous = process.env.HMD_ALLOW_ANONYMOUS ? (process.env.HMD_ALLOW_ANONYMOUS === 'true') : ((typeof config.allowanonymous === 'boolean') ? config.allowanonymous : true);
var allowfreeurl = process.env.HMD_ALLOW_FREEURL ? (process.env.HMD_ALLOW_FREEURL === 'true') : !!config.allowfreeurl;
var permissions = ['editable', 'limited', 'locked', 'protected', 'private'];
if (allowanonymous) {
permissions.unshift('freely');
}
var defaultpermission = process.env.HMD_DEFAULT_PERMISSION || config.defaultpermission;
defaultpermission = permissions.indexOf(defaultpermission) != -1 ? defaultpermission : 'editable';
// db
var dburl = process.env.HMD_DB_URL || process.env.DATABASE_URL || config.dburl;
var db = config.db || {};
// ssl path
var sslkeypath = config.sslkeypath || '';
var sslcertpath = config.sslcertpath || '';
var sslcapath = config.sslcapath || '';
var dhparampath = config.dhparampath || '';
// other path
var tmppath = config.tmppath || './tmp';
var defaultnotepath = config.defaultnotepath || './public/default.md';
var docspath = config.docspath || './public/docs';
var indexpath = config.indexpath || './public/views/index.ejs';
var hackmdpath = config.hackmdpath || './public/views/hackmd.ejs';
var errorpath = config.errorpath || './public/views/error.ejs';
var prettypath = config.prettypath || './public/views/pretty.ejs';
var slidepath = config.slidepath || './public/views/slide.ejs';
// session
var sessionname = config.sessionname || 'connect.sid';
var sessionsecret = config.sessionsecret || 'secret';
var sessionlife = config.sessionlife || 14 * 24 * 60 * 60 * 1000; //14 days
// static files
var staticcachetime = config.staticcachetime || 1 * 24 * 60 * 60 * 1000; // 1 day
// socket.io
var heartbeatinterval = config.heartbeatinterval || 5000;
var heartbeattimeout = config.heartbeattimeout || 10000;
// document
var documentmaxlength = config.documentmaxlength || 100000;
// image upload setting, available options are imgur/s3/filesystem
var imageUploadType = process.env.HMD_IMAGE_UPLOAD_TYPE || config.imageUploadType || 'imgur';
config.s3 = config.s3 || {};
var s3 = {
accessKeyId: process.env.HMD_S3_ACCESS_KEY_ID || config.s3.accessKeyId,
secretAccessKey: process.env.HMD_S3_SECRET_ACCESS_KEY || config.s3.secretAccessKey,
region: process.env.HMD_S3_REGION || config.s3.region
}
var s3bucket = process.env.HMD_S3_BUCKET || config.s3.bucket;
// auth
var facebook = (process.env.HMD_FACEBOOK_CLIENTID && process.env.HMD_FACEBOOK_CLIENTSECRET) ? {
clientID: process.env.HMD_FACEBOOK_CLIENTID,
clientSecret: process.env.HMD_FACEBOOK_CLIENTSECRET
} : config.facebook || false;
var twitter = (process.env.HMD_TWITTER_CONSUMERKEY && process.env.HMD_TWITTER_CONSUMERSECRET) ? {
consumerKey: process.env.HMD_TWITTER_CONSUMERKEY,
consumerSecret: process.env.HMD_TWITTER_CONSUMERSECRET
} : config.twitter || false;
var github = (process.env.HMD_GITHUB_CLIENTID && process.env.HMD_GITHUB_CLIENTSECRET) ? {
clientID: process.env.HMD_GITHUB_CLIENTID,
clientSecret: process.env.HMD_GITHUB_CLIENTSECRET
} : config.github || false;
var gitlab = (process.env.HMD_GITLAB_CLIENTID && process.env.HMD_GITLAB_CLIENTSECRET) ? {
baseURL: process.env.HMD_GITLAB_BASEURL,
clientID: process.env.HMD_GITLAB_CLIENTID,
clientSecret: process.env.HMD_GITLAB_CLIENTSECRET
} : config.gitlab || false;
var dropbox = (process.env.HMD_DROPBOX_CLIENTID && process.env.HMD_DROPBOX_CLIENTSECRET) ? {
clientID: process.env.HMD_DROPBOX_CLIENTID,
clientSecret: process.env.HMD_DROPBOX_CLIENTSECRET
} : (config.dropbox && config.dropbox.clientID && config.dropbox.clientSecret && config.dropbox) || false;
var google = (process.env.HMD_GOOGLE_CLIENTID && process.env.HMD_GOOGLE_CLIENTSECRET) ? {
clientID: process.env.HMD_GOOGLE_CLIENTID,
clientSecret: process.env.HMD_GOOGLE_CLIENTSECRET
} : (config.google && config.google.clientID && config.google.clientSecret && config.google) || false;
var ldap = config.ldap || ((
process.env.HMD_LDAP_URL ||
process.env.HMD_LDAP_BINDDN ||
process.env.HMD_LDAP_BINDCREDENTIALS ||
process.env.HMD_LDAP_TOKENSECRET ||
process.env.HMD_LDAP_SEARCHBASE ||
process.env.HMD_LDAP_SEARCHFILTER ||
process.env.HMD_LDAP_SEARCHATTRIBUTES ||
process.env.HMD_LDAP_TLS_CA ||
process.env.HMD_LDAP_PROVIDERNAME
) ? {} : false);
if (process.env.HMD_LDAP_URL)
ldap.url = process.env.HMD_LDAP_URL;
if (process.env.HMD_LDAP_BINDDN)
ldap.bindDn = process.env.HMD_LDAP_BINDDN;
if (process.env.HMD_LDAP_BINDCREDENTIALS)
ldap.bindCredentials = process.env.HMD_LDAP_BINDCREDENTIALS;
if (process.env.HMD_LDAP_TOKENSECRET)
ldap.tokenSecret = process.env.HMD_LDAP_TOKENSECRET;
if (process.env.HMD_LDAP_SEARCHBASE)
ldap.searchBase = process.env.HMD_LDAP_SEARCHBASE;
if (process.env.HMD_LDAP_SEARCHFILTER)
ldap.searchFilter = process.env.HMD_LDAP_SEARCHFILTER;
if (process.env.HMD_LDAP_SEARCHATTRIBUTES)
ldap.searchAttributes = process.env.HMD_LDAP_SEARCHATTRIBUTES;
if (process.env.HMD_LDAP_TLS_CA) {
var ca = {
ca: process.env.HMD_LDAP_TLS_CA.split(',')
}
ldap.tlsOptions = ldap.tlsOptions ? Object.assign(ldap.tlsOptions, ca) : ca;
if (Array.isArray(ldap.tlsOptions.ca) && ldap.tlsOptions.ca.length > 0) {
var i, len, results;
results = [];
for (i = 0, len = ldap.tlsOptions.ca.length; i < len; i++) {
results.push(fs.readFileSync(ldap.tlsOptions.ca[i], 'utf8'));
}
ldap.tlsOptions.ca = results;
}
}
if (process.env.HMD_LDAP_PROVIDERNAME) {
ldap.providerName = process.env.HMD_LDAP_PROVIDERNAME;
}
var imgur = process.env.HMD_IMGUR_CLIENTID || config.imgur || false;
var email = process.env.HMD_EMAIL ? (process.env.HMD_EMAIL === 'true') : !!config.email;
var allowemailregister = process.env.HMD_ALLOW_EMAIL_REGISTER ? (process.env.HMD_ALLOW_EMAIL_REGISTER === 'true') : ((typeof config.allowemailregister === 'boolean') ? config.allowemailregister : true);
function getserverurl() {
var url = '';
if (domain) {
var protocol = protocolusessl ? 'https://' : 'http://';
url = protocol + domain;
if (urladdport && ((usessl && port != 443) || (!usessl && port != 80)))
url += ':' + port;
}
if (urlpath)
url += '/' + urlpath;
return url;
}
var version = '0.5.0';
var minimumCompatibleVersion = '0.5.0';
var maintenance = true;
var cwd = path.join(__dirname, '..');
module.exports = {
version: version,
minimumCompatibleVersion: minimumCompatibleVersion,
maintenance: maintenance,
debug: debug,
urlpath: urlpath,
port: port,
alloworigin: alloworigin,
usessl: usessl,
serverurl: getserverurl(),
usecdn: usecdn,
allowanonymous: allowanonymous,
allowfreeurl: allowfreeurl,
defaultpermission: defaultpermission,
dburl: dburl,
db: db,
sslkeypath: path.join(cwd, sslkeypath),
sslcertpath: path.join(cwd, sslcertpath),
sslcapath: path.join(cwd, sslcapath),
dhparampath: path.join(cwd, dhparampath),
tmppath: path.join(cwd, tmppath),
defaultnotepath: path.join(cwd, defaultnotepath),
docspath: path.join(cwd, docspath),
indexpath: path.join(cwd, indexpath),
hackmdpath: path.join(cwd, hackmdpath),
errorpath: path.join(cwd, errorpath),
prettypath: path.join(cwd, prettypath),
slidepath: path.join(cwd, slidepath),
sessionname: sessionname,
sessionsecret: sessionsecret,
sessionlife: sessionlife,
staticcachetime: staticcachetime,
heartbeatinterval: heartbeatinterval,
heartbeattimeout: heartbeattimeout,
documentmaxlength: documentmaxlength,
facebook: facebook,
twitter: twitter,
github: github,
gitlab: gitlab,
dropbox: dropbox,
google: google,
ldap: ldap,
imgur: imgur,
email: email,
allowemailregister: allowemailregister,
imageUploadType: imageUploadType,
s3: s3,
s3bucket: s3bucket
};

View file

@ -1,161 +0,0 @@
'use strict'
const os = require('os')
module.exports = {
domain: '',
urlPath: '',
host: '0.0.0.0',
port: 3000,
loglevel: 'info',
urlAddPort: false,
allowOrigin: ['localhost'],
useSSL: false,
hsts: {
enable: true,
maxAgeSeconds: 60 * 60 * 24 * 365,
includeSubdomains: true,
preload: true
},
csp: {
enable: true,
directives: {
},
addDefaults: true,
addDisqus: true,
addGoogleAnalytics: true,
upgradeInsecureRequests: 'auto',
reportURI: undefined
},
protocolUseSSL: false,
useCDN: true,
allowAnonymous: true,
allowAnonymousEdits: false,
allowFreeURL: false,
forbiddenNoteIDs: ['robots.txt', 'favicon.ico', 'api'],
defaultPermission: 'editable',
dbURL: '',
db: {},
// ssl path
sslKeyPath: '',
sslCertPath: '',
sslCAPath: '',
dhParamPath: '',
// other path
viewPath: './public/views',
tmpPath: os.tmpdir(),
defaultNotePath: './public/default.md',
docsPath: './public/docs',
uploadsPath: './public/uploads',
// session
sessionName: 'connect.sid',
sessionSecret: 'secret',
sessionSecretLen: 128,
sessionLife: 14 * 24 * 60 * 60 * 1000, // 14 days
staticCacheTime: 1 * 24 * 60 * 60 * 1000, // 1 day
// socket.io
heartbeatInterval: 5000,
heartbeatTimeout: 10000,
// too busy timeout
tooBusyLag: 70,
// document
documentMaxLength: 100000,
// image upload setting, available options are imgur/s3/filesystem/azure/lutim
imageUploadType: 'filesystem',
lutim: {
url: 'https://framapic.org/'
},
imgur: {
clientID: undefined
},
s3: {
accessKeyId: undefined,
secretAccessKey: undefined,
region: undefined
},
minio: {
accessKey: undefined,
secretKey: undefined,
endPoint: undefined,
secure: true,
port: 9000
},
s3bucket: undefined,
azure: {
connectionString: undefined,
container: undefined
},
// authentication
oauth2: {
providerName: undefined,
authorizationURL: undefined,
tokenURL: undefined,
clientID: undefined,
clientSecret: undefined
},
facebook: {
clientID: undefined,
clientSecret: undefined
},
twitter: {
consumerKey: undefined,
consumerSecret: undefined
},
github: {
clientID: undefined,
clientSecret: undefined
},
gitlab: {
baseURL: undefined,
clientID: undefined,
clientSecret: undefined,
scope: undefined,
version: 'v4'
},
mattermost: {
baseURL: undefined,
clientID: undefined,
clientSecret: undefined
},
dropbox: {
clientID: undefined,
clientSecret: undefined,
appKey: undefined
},
google: {
clientID: undefined,
clientSecret: undefined
},
ldap: {
providerName: undefined,
url: undefined,
bindDn: undefined,
bindCredentials: undefined,
searchBase: undefined,
searchFilter: undefined,
searchAttributes: undefined,
usernameField: undefined,
useridField: undefined,
tlsca: undefined
},
saml: {
idpSsoUrl: undefined,
idpCert: undefined,
issuer: undefined,
identifierFormat: 'urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress',
disableRequestedAuthnContext: false,
groupAttribute: undefined,
externalGroups: [],
requiredGroups: [],
attribute: {
id: undefined,
username: undefined,
email: undefined
}
},
email: true,
allowEmailRegister: true,
allowGravatar: true,
allowPDFExport: true,
openID: false
}

View file

@ -1,17 +0,0 @@
'use strict'
const fs = require('fs')
function getFile (path) {
if (fs.existsSync(path)) {
return path
}
return undefined
}
module.exports = {
sslKeyPath: getFile('/run/secrets/key.pem'),
sslCertPath: getFile('/run/secrets/cert.pem'),
sslCAPath: getFile('/run/secrets/ca.pem') !== undefined ? [getFile('/run/secrets/ca.pem')] : [],
dhParamPath: getFile('/run/secrets/dhparam.pem')
}

View file

@ -1,60 +0,0 @@
'use strict'
const fs = require('fs')
const path = require('path')
const basePath = path.resolve('/var/run/secrets/')
function getSecret (secret) {
const filePath = path.join(basePath, secret)
if (fs.existsSync(filePath)) return fs.readFileSync(filePath)
return undefined
}
if (fs.existsSync(basePath)) {
module.exports = {
dbURL: getSecret('dbURL'),
sessionsecret: getSecret('sessionsecret'),
sslkeypath: getSecret('sslkeypath'),
sslcertpath: getSecret('sslcertpath'),
sslcapath: getSecret('sslcapath'),
dhparampath: getSecret('dhparampath'),
s3: {
accessKeyId: getSecret('s3_acccessKeyId'),
secretAccessKey: getSecret('s3_secretAccessKey')
},
azure: {
connectionString: getSecret('azure_connectionString')
},
facebook: {
clientID: getSecret('facebook_clientID'),
clientSecret: getSecret('facebook_clientSecret')
},
twitter: {
consumerKey: getSecret('twitter_consumerKey'),
consumerSecret: getSecret('twitter_consumerSecret')
},
github: {
clientID: getSecret('github_clientID'),
clientSecret: getSecret('github_clientSecret')
},
gitlab: {
clientID: getSecret('gitlab_clientID'),
clientSecret: getSecret('gitlab_clientSecret')
},
mattermost: {
clientID: getSecret('mattermost_clientID'),
clientSecret: getSecret('mattermost_clientSecret')
},
dropbox: {
clientID: getSecret('dropbox_clientID'),
clientSecret: getSecret('dropbox_clientSecret'),
appKey: getSecret('dropbox_appKey')
},
google: {
clientID: getSecret('google_clientID'),
clientSecret: getSecret('google_clientSecret')
},
imgur: getSecret('imgur_clientid')
}
}

View file

@ -1,16 +0,0 @@
'use strict'
exports.Environment = {
development: 'development',
production: 'production',
test: 'test'
}
exports.Permission = {
freely: 'freely',
editable: 'editable',
limited: 'limited',
locked: 'locked',
protected: 'protected',
private: 'private'
}

View file

@ -1,137 +0,0 @@
'use strict'
const { toBooleanConfig, toArrayConfig, toIntegerConfig } = require('./utils')
module.exports = {
sourceURL: process.env.CMD_SOURCE_URL,
domain: process.env.CMD_DOMAIN,
urlPath: process.env.CMD_URL_PATH,
host: process.env.CMD_HOST,
port: toIntegerConfig(process.env.CMD_PORT),
path: process.env.CMD_PATH,
loglevel: process.env.CMD_LOGLEVEL,
urlAddPort: toBooleanConfig(process.env.CMD_URL_ADDPORT),
useSSL: toBooleanConfig(process.env.CMD_USESSL),
hsts: {
enable: toBooleanConfig(process.env.CMD_HSTS_ENABLE),
maxAgeSeconds: toIntegerConfig(process.env.CMD_HSTS_MAX_AGE),
includeSubdomains: toBooleanConfig(process.env.CMD_HSTS_INCLUDE_SUBDOMAINS),
preload: toBooleanConfig(process.env.CMD_HSTS_PRELOAD)
},
csp: {
enable: toBooleanConfig(process.env.CMD_CSP_ENABLE),
reportURI: process.env.CMD_CSP_REPORTURI
},
protocolUseSSL: toBooleanConfig(process.env.CMD_PROTOCOL_USESSL),
allowOrigin: toArrayConfig(process.env.CMD_ALLOW_ORIGIN),
useCDN: toBooleanConfig(process.env.CMD_USECDN),
allowAnonymous: toBooleanConfig(process.env.CMD_ALLOW_ANONYMOUS),
allowAnonymousEdits: toBooleanConfig(process.env.CMD_ALLOW_ANONYMOUS_EDITS),
allowFreeURL: toBooleanConfig(process.env.CMD_ALLOW_FREEURL),
forbiddenNoteIDs: toArrayConfig(process.env.CMD_FORBIDDEN_NOTE_IDS),
defaultPermission: process.env.CMD_DEFAULT_PERMISSION,
dbURL: process.env.CMD_DB_URL,
sessionSecret: process.env.CMD_SESSION_SECRET,
sessionLife: toIntegerConfig(process.env.CMD_SESSION_LIFE),
tooBusyLag: toIntegerConfig(process.env.CMD_TOOBUSY_LAG),
imageUploadType: process.env.CMD_IMAGE_UPLOAD_TYPE,
imgur: {
clientID: process.env.CMD_IMGUR_CLIENTID
},
s3: {
accessKeyId: process.env.CMD_S3_ACCESS_KEY_ID,
secretAccessKey: process.env.CMD_S3_SECRET_ACCESS_KEY,
region: process.env.CMD_S3_REGION
},
minio: {
accessKey: process.env.CMD_MINIO_ACCESS_KEY,
secretKey: process.env.CMD_MINIO_SECRET_KEY,
endPoint: process.env.CMD_MINIO_ENDPOINT,
secure: toBooleanConfig(process.env.CMD_MINIO_SECURE),
port: toIntegerConfig(process.env.CMD_MINIO_PORT)
},
lutim: {
url: process.env.CMD_LUTIM_URL
},
s3bucket: process.env.CMD_S3_BUCKET,
azure: {
connectionString: process.env.CMD_AZURE_CONNECTION_STRING,
container: process.env.CMD_AZURE_CONTAINER
},
facebook: {
clientID: process.env.CMD_FACEBOOK_CLIENTID,
clientSecret: process.env.CMD_FACEBOOK_CLIENTSECRET
},
twitter: {
consumerKey: process.env.CMD_TWITTER_CONSUMERKEY,
consumerSecret: process.env.CMD_TWITTER_CONSUMERSECRET
},
github: {
clientID: process.env.CMD_GITHUB_CLIENTID,
clientSecret: process.env.CMD_GITHUB_CLIENTSECRET
},
gitlab: {
baseURL: process.env.CMD_GITLAB_BASEURL,
clientID: process.env.CMD_GITLAB_CLIENTID,
clientSecret: process.env.CMD_GITLAB_CLIENTSECRET,
scope: process.env.CMD_GITLAB_SCOPE
},
mattermost: {
baseURL: process.env.CMD_MATTERMOST_BASEURL,
clientID: process.env.CMD_MATTERMOST_CLIENTID,
clientSecret: process.env.CMD_MATTERMOST_CLIENTSECRET
},
oauth2: {
providerName: process.env.CMD_OAUTH2_PROVIDERNAME,
baseURL: process.env.CMD_OAUTH2_BASEURL,
userProfileURL: process.env.CMD_OAUTH2_USER_PROFILE_URL,
userProfileUsernameAttr: process.env.CMD_OAUTH2_USER_PROFILE_USERNAME_ATTR,
userProfileDisplayNameAttr: process.env.CMD_OAUTH2_USER_PROFILE_DISPLAY_NAME_ATTR,
userProfileEmailAttr: process.env.CMD_OAUTH2_USER_PROFILE_EMAIL_ATTR,
tokenURL: process.env.CMD_OAUTH2_TOKEN_URL,
authorizationURL: process.env.CMD_OAUTH2_AUTHORIZATION_URL,
clientID: process.env.CMD_OAUTH2_CLIENT_ID,
clientSecret: process.env.CMD_OAUTH2_CLIENT_SECRET
},
dropbox: {
clientID: process.env.CMD_DROPBOX_CLIENTID,
clientSecret: process.env.CMD_DROPBOX_CLIENTSECRET,
appKey: process.env.CMD_DROPBOX_APPKEY
},
google: {
clientID: process.env.CMD_GOOGLE_CLIENTID,
clientSecret: process.env.CMD_GOOGLE_CLIENTSECRET
},
ldap: {
providerName: process.env.CMD_LDAP_PROVIDERNAME,
url: process.env.CMD_LDAP_URL,
bindDn: process.env.CMD_LDAP_BINDDN,
bindCredentials: process.env.CMD_LDAP_BINDCREDENTIALS,
searchBase: process.env.CMD_LDAP_SEARCHBASE,
searchFilter: process.env.CMD_LDAP_SEARCHFILTER,
searchAttributes: toArrayConfig(process.env.CMD_LDAP_SEARCHATTRIBUTES),
usernameField: process.env.CMD_LDAP_USERNAMEFIELD,
useridField: process.env.CMD_LDAP_USERIDFIELD,
tlsca: process.env.CMD_LDAP_TLS_CA
},
saml: {
idpSsoUrl: process.env.CMD_SAML_IDPSSOURL,
idpCert: process.env.CMD_SAML_IDPCERT,
issuer: process.env.CMD_SAML_ISSUER,
identifierFormat: process.env.CMD_SAML_IDENTIFIERFORMAT,
disableRequestedAuthnContext: toBooleanConfig(process.env.CMD_SAML_DISABLEREQUESTEDAUTHNCONTEXT),
groupAttribute: process.env.CMD_SAML_GROUPATTRIBUTE,
externalGroups: toArrayConfig(process.env.CMD_SAML_EXTERNALGROUPS, '|', []),
requiredGroups: toArrayConfig(process.env.CMD_SAML_REQUIREDGROUPS, '|', []),
attribute: {
id: process.env.CMD_SAML_ATTRIBUTE_ID,
username: process.env.CMD_SAML_ATTRIBUTE_USERNAME,
email: process.env.CMD_SAML_ATTRIBUTE_EMAIL
}
},
email: toBooleanConfig(process.env.CMD_EMAIL),
allowEmailRegister: toBooleanConfig(process.env.CMD_ALLOW_EMAIL_REGISTER),
allowGravatar: toBooleanConfig(process.env.CMD_ALLOW_GRAVATAR),
allowPDFExport: toBooleanConfig(process.env.CMD_ALLOW_PDF_EXPORT),
openID: toBooleanConfig(process.env.CMD_OPENID)
}

View file

@ -1,125 +0,0 @@
'use strict'
const { toBooleanConfig, toArrayConfig, toIntegerConfig } = require('./utils')
module.exports = {
domain: process.env.HMD_DOMAIN,
urlPath: process.env.HMD_URL_PATH,
port: toIntegerConfig(process.env.HMD_PORT),
urlAddPort: toBooleanConfig(process.env.HMD_URL_ADDPORT),
useSSL: toBooleanConfig(process.env.HMD_USESSL),
hsts: {
enable: toBooleanConfig(process.env.HMD_HSTS_ENABLE),
maxAgeSeconds: toIntegerConfig(process.env.HMD_HSTS_MAX_AGE),
includeSubdomains: toBooleanConfig(process.env.HMD_HSTS_INCLUDE_SUBDOMAINS),
preload: toBooleanConfig(process.env.HMD_HSTS_PRELOAD)
},
csp: {
enable: toBooleanConfig(process.env.HMD_CSP_ENABLE),
reportURI: process.env.HMD_CSP_REPORTURI
},
protocolUseSSL: toBooleanConfig(process.env.HMD_PROTOCOL_USESSL),
allowOrigin: toArrayConfig(process.env.HMD_ALLOW_ORIGIN),
useCDN: toBooleanConfig(process.env.HMD_USECDN),
allowAnonymous: toBooleanConfig(process.env.HMD_ALLOW_ANONYMOUS),
allowAnonymousEdits: toBooleanConfig(process.env.HMD_ALLOW_ANONYMOUS_EDITS),
allowFreeURL: toBooleanConfig(process.env.HMD_ALLOW_FREEURL),
defaultPermission: process.env.HMD_DEFAULT_PERMISSION,
dbURL: process.env.HMD_DB_URL,
sessionSecret: process.env.HMD_SESSION_SECRET,
sessionLife: toIntegerConfig(process.env.HMD_SESSION_LIFE),
imageUploadType: process.env.HMD_IMAGE_UPLOAD_TYPE,
imgur: {
clientID: process.env.HMD_IMGUR_CLIENTID
},
s3: {
accessKeyId: process.env.HMD_S3_ACCESS_KEY_ID,
secretAccessKey: process.env.HMD_S3_SECRET_ACCESS_KEY,
region: process.env.HMD_S3_REGION
},
minio: {
accessKey: process.env.HMD_MINIO_ACCESS_KEY,
secretKey: process.env.HMD_MINIO_SECRET_KEY,
endPoint: process.env.HMD_MINIO_ENDPOINT,
secure: toBooleanConfig(process.env.HMD_MINIO_SECURE),
port: toIntegerConfig(process.env.HMD_MINIO_PORT)
},
s3bucket: process.env.HMD_S3_BUCKET,
azure: {
connectionString: process.env.HMD_AZURE_CONNECTION_STRING,
container: process.env.HMD_AZURE_CONTAINER
},
facebook: {
clientID: process.env.HMD_FACEBOOK_CLIENTID,
clientSecret: process.env.HMD_FACEBOOK_CLIENTSECRET
},
twitter: {
consumerKey: process.env.HMD_TWITTER_CONSUMERKEY,
consumerSecret: process.env.HMD_TWITTER_CONSUMERSECRET
},
github: {
clientID: process.env.HMD_GITHUB_CLIENTID,
clientSecret: process.env.HMD_GITHUB_CLIENTSECRET
},
gitlab: {
baseURL: process.env.HMD_GITLAB_BASEURL,
clientID: process.env.HMD_GITLAB_CLIENTID,
clientSecret: process.env.HMD_GITLAB_CLIENTSECRET,
scope: process.env.HMD_GITLAB_SCOPE
},
mattermost: {
baseURL: process.env.HMD_MATTERMOST_BASEURL,
clientID: process.env.HMD_MATTERMOST_CLIENTID,
clientSecret: process.env.HMD_MATTERMOST_CLIENTSECRET
},
oauth2: {
baseURL: process.env.HMD_OAUTH2_BASEURL,
userProfileURL: process.env.HMD_OAUTH2_USER_PROFILE_URL,
userProfileUsernameAttr: process.env.HMD_OAUTH2_USER_PROFILE_USERNAME_ATTR,
userProfileDisplayNameAttr: process.env.HMD_OAUTH2_USER_PROFILE_DISPLAY_NAME_ATTR,
userProfileEmailAttr: process.env.HMD_OAUTH2_USER_PROFILE_EMAIL_ATTR,
tokenURL: process.env.HMD_OAUTH2_TOKEN_URL,
authorizationURL: process.env.HMD_OAUTH2_AUTHORIZATION_URL,
clientID: process.env.HMD_OAUTH2_CLIENT_ID,
clientSecret: process.env.HMD_OAUTH2_CLIENT_SECRET
},
dropbox: {
clientID: process.env.HMD_DROPBOX_CLIENTID,
clientSecret: process.env.HMD_DROPBOX_CLIENTSECRET,
appKey: process.env.HMD_DROPBOX_APPKEY
},
google: {
clientID: process.env.HMD_GOOGLE_CLIENTID,
clientSecret: process.env.HMD_GOOGLE_CLIENTSECRET
},
ldap: {
providerName: process.env.HMD_LDAP_PROVIDERNAME,
url: process.env.HMD_LDAP_URL,
bindDn: process.env.HMD_LDAP_BINDDN,
bindCredentials: process.env.HMD_LDAP_BINDCREDENTIALS,
searchBase: process.env.HMD_LDAP_SEARCHBASE,
searchFilter: process.env.HMD_LDAP_SEARCHFILTER,
searchAttributes: toArrayConfig(process.env.HMD_LDAP_SEARCHATTRIBUTES),
usernameField: process.env.HMD_LDAP_USERNAMEFIELD,
useridField: process.env.HMD_LDAP_USERIDFIELD,
tlsca: process.env.HMD_LDAP_TLS_CA
},
saml: {
idpSsoUrl: process.env.HMD_SAML_IDPSSOURL,
idpCert: process.env.HMD_SAML_IDPCERT,
issuer: process.env.HMD_SAML_ISSUER,
identifierFormat: process.env.HMD_SAML_IDENTIFIERFORMAT,
disableRequestedAuthnContext: toBooleanConfig(process.env.HMD_SAML_DISABLEREQUESTEDAUTHNCONTEXT),
groupAttribute: process.env.HMD_SAML_GROUPATTRIBUTE,
externalGroups: toArrayConfig(process.env.HMD_SAML_EXTERNALGROUPS, '|', []),
requiredGroups: toArrayConfig(process.env.HMD_SAML_REQUIREDGROUPS, '|', []),
attribute: {
id: process.env.HMD_SAML_ATTRIBUTE_ID,
username: process.env.HMD_SAML_ATTRIBUTE_USERNAME,
email: process.env.HMD_SAML_ATTRIBUTE_EMAIL
}
},
email: toBooleanConfig(process.env.HMD_EMAIL),
allowEmailRegister: toBooleanConfig(process.env.HMD_ALLOW_EMAIL_REGISTER),
allowPDFExport: toBooleanConfig(process.env.HMD_ALLOW_PDF_EXPORT)
}

View file

@ -1,209 +0,0 @@
'use strict'
const crypto = require('crypto')
const fs = require('fs')
const path = require('path')
const { merge } = require('lodash')
const deepFreeze = require('deep-freeze')
const { Environment, Permission } = require('./enum')
const logger = require('../logger')
const { getGitCommit, getGitHubURL } = require('./utils')
const appRootPath = path.resolve(__dirname, '../../')
const env = process.env.NODE_ENV || Environment.development
const debugConfig = {
debug: (env === Environment.development)
}
// Get version string from package.json
const { version, repository } = require(path.join(appRootPath, 'package.json'))
const commitID = getGitCommit(appRootPath)
const sourceURL = getGitHubURL(repository.url, commitID || version)
const fullversion = commitID ? `${version}-${commitID}` : version
const packageConfig = {
version: version,
minimumCompatibleVersion: '0.5.0',
fullversion: fullversion,
sourceURL: sourceURL
}
const configFilePath = path.resolve(appRootPath, process.env.CMD_CONFIG_FILE ||
'config.json')
const fileConfig = fs.existsSync(configFilePath) ? require(configFilePath)[env] : undefined
let config = require('./default')
merge(config, require('./defaultSSL'))
merge(config, require('./oldDefault'))
merge(config, debugConfig)
merge(config, packageConfig)
merge(config, fileConfig)
merge(config, require('./oldEnvironment'))
merge(config, require('./hackmdEnvironment'))
merge(config, require('./environment'))
merge(config, require('./dockerSecret'))
if (['debug', 'verbose', 'info', 'warn', 'error'].includes(config.loglevel)) {
logger.level = config.loglevel
} else {
logger.error('Selected loglevel %s doesn\'t exist, using default level \'debug\'. Available options: debug, verbose, info, warn, error', config.loglevel)
}
// load LDAP CA
if (config.ldap.tlsca) {
let ca = config.ldap.tlsca.split(',')
let caContent = []
for (let i of ca) {
if (fs.existsSync(i)) {
caContent.push(fs.readFileSync(i, 'utf8'))
}
}
let tlsOptions = {
ca: caContent
}
config.ldap.tlsOptions = config.ldap.tlsOptions ? Object.assign(config.ldap.tlsOptions, tlsOptions) : tlsOptions
}
// Permission
config.permission = Permission
if (!config.allowAnonymous && !config.allowAnonymousEdits) {
delete config.permission.freely
}
if (!(config.defaultPermission in config.permission)) {
config.defaultPermission = config.permission.editable
}
// cache result, cannot change config in runtime!!!
config.isStandardHTTPsPort = (function isStandardHTTPsPort () {
return config.useSSL && config.port === 443
})()
config.isStandardHTTPPort = (function isStandardHTTPPort () {
return !config.useSSL && config.port === 80
})()
// cache serverURL
config.serverURL = (function getserverurl () {
var url = ''
if (config.domain) {
var protocol = config.protocolUseSSL ? 'https://' : 'http://'
url = protocol + config.domain
if (config.urlAddPort) {
if (!config.isStandardHTTPPort || !config.isStandardHTTPsPort) {
url += ':' + config.port
}
}
}
if (config.urlPath) {
url += '/' + config.urlPath
}
return url
})()
if (config.serverURL === '') {
logger.warn('Neither \'domain\' nor \'CMD_DOMAIN\' is configured. This can cause issues with various components.\nHint: Make sure \'protocolUseSSL\' and \'urlAddPort\' or \'CMD_PROTOCOL_USESSL\' and \'CMD_URL_ADDPORT\' are configured properly.')
}
config.Environment = Environment
// auth method
config.isFacebookEnable = config.facebook.clientID && config.facebook.clientSecret
config.isGoogleEnable = config.google.clientID && config.google.clientSecret
config.isDropboxEnable = config.dropbox.clientID && config.dropbox.clientSecret
config.isTwitterEnable = config.twitter.consumerKey && config.twitter.consumerSecret
config.isEmailEnable = config.email
config.isOpenIDEnable = config.openID
config.isGitHubEnable = config.github.clientID && config.github.clientSecret
config.isGitLabEnable = config.gitlab.clientID && config.gitlab.clientSecret
config.isMattermostEnable = config.mattermost.clientID && config.mattermost.clientSecret
config.isLDAPEnable = config.ldap.url
config.isSAMLEnable = config.saml.idpSsoUrl
config.isOAuth2Enable = config.oauth2.clientID && config.oauth2.clientSecret
config.isPDFExportEnable = config.allowPDFExport
// Check gitlab api version
if (config.gitlab && config.gitlab.version !== 'v4' && config.gitlab.version !== 'v3') {
logger.warn('config.js contains wrong version (' + config.gitlab.version + ') for gitlab api; it should be \'v3\' or \'v4\'. Defaulting to v4')
config.gitlab.version = 'v4'
}
// If gitlab scope is api, enable snippets Export/import
config.isGitlabSnippetsEnable = (!config.gitlab.scope || config.gitlab.scope === 'api') && config.isGitLabEnable
// Only update i18n files in development setups
config.updateI18nFiles = (env === Environment.development)
// merge legacy values
let keys = Object.keys(config)
const uppercase = /[A-Z]/
for (let i = keys.length; i--;) {
let lowercaseKey = keys[i].toLowerCase()
// if the config contains uppercase letters
// and a lowercase version of this setting exists
// and the config with uppercase is not set
// we set the new config using the old key.
if (uppercase.test(keys[i]) &&
config[lowercaseKey] !== undefined &&
fileConfig[keys[i]] === undefined) {
logger.warn('config.js contains deprecated lowercase setting for ' + keys[i] + '. Please change your config.js file to replace ' + lowercaseKey + ' with ' + keys[i])
config[keys[i]] = config[lowercaseKey]
}
}
// Notify users about the prefix change and inform them they use legacy prefix for environment variables
if (Object.keys(process.env).toString().indexOf('HMD_') !== -1) {
logger.warn('Using legacy HMD prefix for environment variables. Please change your variables in future. For details see: https://github.com/codimd/server#environment-variables-will-overwrite-other-server-configs')
}
// Generate session secret if it stays on default values
if (config.sessionSecret === 'secret') {
logger.warn('Session secret not set. Using random generated one. Please set `sessionSecret` in your config.js file. All users will be logged out.')
config.sessionSecret = crypto.randomBytes(Math.ceil(config.sessionSecretLen / 2)) // generate crypto graphic random number
.toString('hex') // convert to hexadecimal format
.slice(0, config.sessionSecretLen) // return required number of characters
}
// Validate upload upload providers
if (['filesystem', 's3', 'minio', 'imgur', 'azure', 'lutim'].indexOf(config.imageUploadType) === -1) {
logger.error('"imageuploadtype" is not correctly set. Please use "filesystem", "s3", "minio", "azure", "lutim" or "imgur". Defaulting to "filesystem"')
config.imageUploadType = 'filesystem'
}
// figure out mime types for image uploads
switch (config.imageUploadType) {
case 'imgur':
config.allowedUploadMimeTypes = [
'image/jpeg',
'image/png',
'image/jpg',
'image/gif'
]
break
default:
config.allowedUploadMimeTypes = [
'image/jpeg',
'image/png',
'image/jpg',
'image/gif',
'image/svg+xml'
]
}
// generate correct path
config.sslCAPath.forEach(function (capath, i, array) {
array[i] = path.resolve(appRootPath, capath)
})
config.sslCertPath = path.resolve(appRootPath, config.sslCertPath)
config.sslKeyPath = path.resolve(appRootPath, config.sslKeyPath)
config.dhParamPath = path.resolve(appRootPath, config.dhParamPath)
config.viewPath = path.resolve(appRootPath, config.viewPath)
config.tmpPath = path.resolve(appRootPath, config.tmpPath)
config.defaultNotePath = path.resolve(appRootPath, config.defaultNotePath)
config.docsPath = path.resolve(appRootPath, config.docsPath)
config.uploadsPath = path.resolve(appRootPath, config.uploadsPath)
// make config readonly
config = deepFreeze(config)
module.exports = config

View file

@ -1,42 +0,0 @@
'use strict'
module.exports = {
urlpath: undefined,
urladdport: undefined,
alloworigin: undefined,
usessl: undefined,
protocolusessl: undefined,
usecdn: undefined,
allowanonymous: undefined,
allowanonymousedits: undefined,
allowfreeurl: undefined,
defaultpermission: undefined,
dburl: undefined,
// ssl path
sslkeypath: undefined,
sslcertpath: undefined,
sslcapath: undefined,
dhparampath: undefined,
// other path
tmppath: undefined,
defaultnotepath: undefined,
docspath: undefined,
indexpath: undefined,
hackmdpath: undefined,
errorpath: undefined,
prettypath: undefined,
slidepath: undefined,
// session
sessionname: undefined,
sessionsecret: undefined,
sessionlife: undefined,
staticcachetime: undefined,
// socket.io
heartbeatinterval: undefined,
heartbeattimeout: undefined,
// document
documentmaxlength: undefined,
imageuploadtype: undefined,
allowemailregister: undefined,
allowpdfexport: undefined
}

View file

@ -1,10 +0,0 @@
'use strict'
const { toBooleanConfig } = require('./utils')
module.exports = {
debug: toBooleanConfig(process.env.DEBUG),
dburl: process.env.DATABASE_URL,
urlpath: process.env.URL_PATH,
port: process.env.PORT
}

View file

@ -1,55 +0,0 @@
'use strict'
const fs = require('fs')
const path = require('path')
exports.toBooleanConfig = function toBooleanConfig (configValue) {
if (configValue && typeof configValue === 'string') {
return (configValue === 'true')
}
return configValue
}
exports.toArrayConfig = function toArrayConfig (configValue, separator = ',', fallback) {
if (configValue && typeof configValue === 'string') {
return (configValue.split(separator).map(arrayItem => arrayItem.trim()))
}
return fallback
}
exports.toIntegerConfig = function toIntegerConfig (configValue) {
if (configValue && typeof configValue === 'string') {
return parseInt(configValue)
}
return configValue
}
exports.getGitCommit = function getGitCommit (repodir) {
if (!fs.existsSync(repodir + '/.git/HEAD')) {
return undefined
}
let reference = fs.readFileSync(repodir + '/.git/HEAD', 'utf8')
if (reference.startsWith('ref: ')) {
reference = reference.substr(5).replace('\n', '')
reference = fs.readFileSync(path.resolve(repodir + '/.git', reference), 'utf8')
}
reference = reference.replace('\n', '')
return reference
}
exports.getGitHubURL = function getGitHubURL (repo, reference) {
// if it's not a github reference, we handle handle that anyway
if (!repo.startsWith('https://github.com') && !repo.startsWith('git@github.com')) {
return repo
}
if (repo.startsWith('git@github.com') || repo.startsWith('ssh://git@github.com')) {
repo = repo.replace(/^(ssh:\/\/)?git@github.com:/, 'https://github.com/')
}
if (repo.endsWith('.git')) {
repo = repo.replace(/\.git$/, '/')
} else if (!repo.endsWith('/')) {
repo = repo + '/'
}
return repo + 'tree/' + reference
}

View file

@ -1,100 +0,0 @@
var config = require('./config')
var uuid = require('uuid')
var CspStrategy = {}
var defaultDirectives = {
defaultSrc: ['\'self\''],
scriptSrc: ['\'self\'', 'vimeo.com', 'https://gist.github.com', 'www.slideshare.net', 'https://query.yahooapis.com', '\'unsafe-eval\''],
// ^ TODO: Remove unsafe-eval - webpack script-loader issues https://github.com/hackmdio/codimd/issues/594
imgSrc: ['*'],
styleSrc: ['\'self\'', '\'unsafe-inline\'', 'https://github.githubassets.com'], // unsafe-inline is required for some libs, plus used in views
fontSrc: ['\'self\'', 'data:', 'https://public.slidesharecdn.com'],
objectSrc: ['*'], // Chrome PDF viewer treats PDFs as objects :/
mediaSrc: ['*'],
childSrc: ['*'],
connectSrc: ['*']
}
var cdnDirectives = {
scriptSrc: ['https://cdnjs.cloudflare.com', 'https://cdn.mathjax.org'],
styleSrc: ['https://cdnjs.cloudflare.com', 'https://fonts.googleapis.com'],
fontSrc: ['https://cdnjs.cloudflare.com', 'https://fonts.gstatic.com']
}
var disqusDirectives = {
scriptSrc: ['https://disqus.com', 'https://*.disqus.com', 'https://*.disquscdn.com'],
styleSrc: ['https://*.disquscdn.com'],
fontSrc: ['https://*.disquscdn.com']
}
var googleAnalyticsDirectives = {
scriptSrc: ['https://www.google-analytics.com']
}
CspStrategy.computeDirectives = function () {
var directives = {}
mergeDirectives(directives, config.csp.directives)
mergeDirectivesIf(config.csp.addDefaults, directives, defaultDirectives)
mergeDirectivesIf(config.useCDN, directives, cdnDirectives)
mergeDirectivesIf(config.csp.addDisqus, directives, disqusDirectives)
mergeDirectivesIf(config.csp.addGoogleAnalytics, directives, googleAnalyticsDirectives)
if (!areAllInlineScriptsAllowed(directives)) {
addInlineScriptExceptions(directives)
}
addUpgradeUnsafeRequestsOptionTo(directives)
addReportURI(directives)
return directives
}
function mergeDirectives (existingDirectives, newDirectives) {
for (var propertyName in newDirectives) {
var newDirective = newDirectives[propertyName]
if (newDirective) {
var existingDirective = existingDirectives[propertyName] || []
existingDirectives[propertyName] = existingDirective.concat(newDirective)
}
}
}
function mergeDirectivesIf (condition, existingDirectives, newDirectives) {
if (condition) {
mergeDirectives(existingDirectives, newDirectives)
}
}
function areAllInlineScriptsAllowed (directives) {
return directives.scriptSrc.indexOf('\'unsafe-inline\'') !== -1
}
function addInlineScriptExceptions (directives) {
directives.scriptSrc.push(getCspNonce)
// TODO: This is the SHA-256 hash of the inline script in build/reveal.js/plugins/notes/notes.html
// Any more clean solution appreciated.
directives.scriptSrc.push('\'sha256-Lc+VnBdinzYTTAkFrIoUqdoA9EQFeS1AF9ybmF+LLfM=\'')
}
function getCspNonce (req, res) {
return "'nonce-" + res.locals.nonce + "'"
}
function addUpgradeUnsafeRequestsOptionTo (directives) {
if (config.csp.upgradeInsecureRequests === 'auto' && config.useSSL) {
directives.upgradeInsecureRequests = true
} else if (config.csp.upgradeInsecureRequests === true) {
directives.upgradeInsecureRequests = true
}
}
function addReportURI (directives) {
if (config.csp.reportURI) {
directives.reportUri = config.csp.reportURI
}
}
CspStrategy.addNonceToLocals = function (req, res, next) {
res.locals.nonce = uuid.v4()
next()
}
module.exports = CspStrategy

View file

@ -1,200 +1,172 @@
'use strict' //history
// history //external modules
// external modules var async = require('async');
var LZString = require('lz-string')
// core //core
var logger = require('./logger') var config = require("./config.js");
var response = require('./response') var logger = require("./logger.js");
var models = require('./models') var response = require("./response.js");
var models = require("./models");
// public //public
var History = { var History = {
historyGet: historyGet, historyGet: historyGet,
historyPost: historyPost, historyPost: historyPost,
historyDelete: historyDelete, historyDelete: historyDelete,
updateHistory: updateHistory updateHistory: updateHistory
} };
function getHistory (userid, callback) { function getHistory(userid, callback) {
models.User.findOne({ models.User.findOne({
where: { where: {
id: userid id: userid
}
}).then(function (user) {
if (!user) {
return callback(null, null)
}
var history = {}
if (user.history) {
history = JSON.parse(user.history)
// migrate LZString encoded note id to base64url encoded note id
for (let i = 0, l = history.length; i < l; i++) {
// Calculate minimal string length for an UUID that is encoded
// base64 encoded and optimize comparsion by using -1
// this should make a lot of LZ-String parsing errors obsolete
// as we can assume that a nodeId that is 48 chars or longer is a
// noteID.
const base64UuidLength = ((4 * 36) / 3) - 1
if (!(history[i].id.length > base64UuidLength)) {
continue
} }
try { }).then(function (user) {
let id = LZString.decompressFromBase64(history[i].id) if (!user)
if (id && models.Note.checkNoteIdValid(id)) { return callback(null, null);
history[i].id = models.Note.encodeNoteId(id) var history = {};
} if (user.history)
} catch (err) { history = parseHistoryToObject(JSON.parse(user.history));
// most error here comes from LZString, ignore if (config.debug)
if (err.message === 'Cannot read property \'charAt\' of undefined') { logger.info('read history success: ' + user.id);
logger.warning('Looks like we can not decode "' + history[i].id + '" with LZString. Can be ignored.') return callback(null, history);
} else { }).catch(function (err) {
logger.error(err) logger.error('read history failed: ' + err);
} return callback(err, null);
});
}
function setHistory(userid, history, callback) {
models.User.update({
history: JSON.stringify(parseHistoryToArray(history))
}, {
where: {
id: userid
} }
} }).then(function (count) {
history = parseHistoryToObject(history) return callback(null, count);
}).catch(function (err) {
logger.error('set history failed: ' + err);
return callback(err, null);
});
}
function updateHistory(userid, noteId, document, time) {
if (userid && noteId && typeof document !== 'undefined') {
getHistory(userid, function (err, history) {
if (err || !history) return;
if (!history[noteId]) {
history[noteId] = {};
}
var noteHistory = history[noteId];
var noteInfo = models.Note.parseNoteInfo(document);
noteHistory.id = noteId;
noteHistory.text = noteInfo.title;
noteHistory.time = time || Date.now();
noteHistory.tags = noteInfo.tags;
setHistory(userid, history, function (err, count) {
return;
});
});
} }
logger.debug(`read history success: ${user.id}`)
return callback(null, history)
}).catch(function (err) {
logger.error('read history failed: ' + err)
return callback(err, null)
})
} }
function setHistory (userid, history, callback) { function parseHistoryToArray(history) {
models.User.update({ var _history = [];
history: JSON.stringify(parseHistoryToArray(history)) Object.keys(history).forEach(function (key) {
}, { var item = history[key];
where: { _history.push(item);
id: userid });
return _history;
}
function parseHistoryToObject(history) {
var _history = {};
for (var i = 0, l = history.length; i < l; i++) {
var item = history[i];
_history[item.id] = item;
} }
}).then(function (count) { return _history;
return callback(null, count)
}).catch(function (err) {
logger.error('set history failed: ' + err)
return callback(err, null)
})
} }
function updateHistory (userid, noteId, document, time) { function historyGet(req, res) {
if (userid && noteId && typeof document !== 'undefined') { if (req.isAuthenticated()) {
getHistory(userid, function (err, history) { getHistory(req.user.id, function (err, history) {
if (err || !history) return if (err) return response.errorInternalError(res);
if (!history[noteId]) { if (!history) return response.errorNotFound(res);
history[noteId] = {} res.send({
} history: parseHistoryToArray(history)
var noteHistory = history[noteId] });
var noteInfo = models.Note.parseNoteInfo(document) });
noteHistory.id = noteId
noteHistory.text = noteInfo.title
noteHistory.time = time || Date.now()
noteHistory.tags = noteInfo.tags
setHistory(userid, history, function (err, count) {
if (err) {
logger.log(err)
}
})
})
}
}
function parseHistoryToArray (history) {
var _history = []
Object.keys(history).forEach(function (key) {
var item = history[key]
_history.push(item)
})
return _history
}
function parseHistoryToObject (history) {
var _history = {}
for (var i = 0, l = history.length; i < l; i++) {
var item = history[i]
_history[item.id] = item
}
return _history
}
function historyGet (req, res) {
if (req.isAuthenticated()) {
getHistory(req.user.id, function (err, history) {
if (err) return response.errorInternalError(res)
if (!history) return response.errorNotFound(res)
res.send({
history: parseHistoryToArray(history)
})
})
} else {
return response.errorForbidden(res)
}
}
function historyPost (req, res) {
if (req.isAuthenticated()) {
var noteId = req.params.noteId
if (!noteId) {
if (typeof req.body['history'] === 'undefined') return response.errorBadRequest(res)
logger.debug(`SERVER received history from [${req.user.id}]: ${req.body.history}`)
try {
var history = JSON.parse(req.body.history)
} catch (err) {
return response.errorBadRequest(res)
}
if (Array.isArray(history)) {
setHistory(req.user.id, history, function (err, count) {
if (err) return response.errorInternalError(res)
res.end()
})
} else {
return response.errorBadRequest(res)
}
} else { } else {
if (typeof req.body['pinned'] === 'undefined') return response.errorBadRequest(res) return response.errorForbidden(res);
getHistory(req.user.id, function (err, history) { }
if (err) return response.errorInternalError(res) }
if (!history) return response.errorNotFound(res)
if (!history[noteId]) return response.errorNotFound(res) function historyPost(req, res) {
if (req.body.pinned === 'true' || req.body.pinned === 'false') { if (req.isAuthenticated()) {
history[noteId].pinned = (req.body.pinned === 'true') var noteId = req.params.noteId;
setHistory(req.user.id, history, function (err, count) { if (!noteId) {
if (err) return response.errorInternalError(res) if (typeof req.body['history'] === 'undefined') return response.errorBadRequest(res);
res.end() if (config.debug)
}) logger.info('SERVER received history from [' + req.user.id + ']: ' + req.body.history);
try {
var history = JSON.parse(req.body.history);
} catch (err) {
return response.errorBadRequest(res);
}
if (Array.isArray(history)) {
setHistory(req.user.id, history, function (err, count) {
if (err) return response.errorInternalError(res);
res.end();
});
} else {
return response.errorBadRequest(res);
}
} else { } else {
return response.errorBadRequest(res) if (typeof req.body['pinned'] === 'undefined') return response.errorBadRequest(res);
getHistory(req.user.id, function (err, history) {
if (err) return response.errorInternalError(res);
if (!history) return response.errorNotFound(res);
if (!history[noteId]) return response.errorNotFound(res);
if (req.body.pinned === 'true' || req.body.pinned === 'false') {
history[noteId].pinned = (req.body.pinned === 'true');
setHistory(req.user.id, history, function (err, count) {
if (err) return response.errorInternalError(res);
res.end();
});
} else {
return response.errorBadRequest(res);
}
});
} }
})
}
} else {
return response.errorForbidden(res)
}
}
function historyDelete (req, res) {
if (req.isAuthenticated()) {
var noteId = req.params.noteId
if (!noteId) {
setHistory(req.user.id, [], function (err, count) {
if (err) return response.errorInternalError(res)
res.end()
})
} else { } else {
getHistory(req.user.id, function (err, history) { return response.errorForbidden(res);
if (err) return response.errorInternalError(res)
if (!history) return response.errorNotFound(res)
delete history[noteId]
setHistory(req.user.id, history, function (err, count) {
if (err) return response.errorInternalError(res)
res.end()
})
})
} }
} else {
return response.errorForbidden(res)
}
} }
module.exports = History function historyDelete(req, res) {
if (req.isAuthenticated()) {
var noteId = req.params.noteId;
if (!noteId) {
setHistory(req.user.id, [], function (err, count) {
if (err) return response.errorInternalError(res);
res.end();
});
} else {
getHistory(req.user.id, function (err, history) {
if (err) return response.errorInternalError(res);
if (!history) return response.errorNotFound(res);
delete history[noteId];
setHistory(req.user.id, history, function (err, count) {
if (err) return response.errorInternalError(res);
res.end();
});
});
}
} else {
return response.errorForbidden(res);
}
}
module.exports = History;

View file

@ -1,50 +1,25 @@
'use strict' "use strict";
// external modules // external modules
const crypto = require('crypto') var randomcolor = require('randomcolor');
const randomcolor = require('randomcolor')
const config = require('./config')
// core // core
exports.generateAvatar = function (name) { module.exports = function(name) {
const color = randomcolor({ var color = randomcolor({
seed: name, seed: name,
luminosity: 'dark' luminosity: 'dark'
}) });
const letter = name.substring(0, 1).toUpperCase() var letter = name.substring(0, 1).toUpperCase();
let svg = '<?xml version="1.0" encoding="UTF-8" standalone="no"?>' var svg = '<?xml version="1.0" encoding="UTF-8" standalone="no"?>';
svg += '<svg xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns="http://www.w3.org/2000/svg" height="96" width="96" version="1.1" viewBox="0 0 96 96">' svg += '<svg xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns="http://www.w3.org/2000/svg" height="96" width="96" version="1.1" viewBox="0 0 96 96">';
svg += '<g>' svg += '<g>';
svg += '<rect width="96" height="96" fill="' + color + '" />' svg += '<rect width="96" height="96" fill="' + color + '" />';
svg += '<text font-size="64px" font-family="sans-serif" text-anchor="middle" fill="#ffffff">' svg += '<text font-size="64px" font-family="sans-serif" text-anchor="middle" fill="#ffffff">';
svg += '<tspan x="48" y="72" stroke-width=".26458px" fill="#ffffff">' + letter + '</tspan>' svg += '<tspan x="48" y="72" stroke-width=".26458px" fill="#ffffff">' + letter + '</tspan>';
svg += '</text>' svg += '</text>';
svg += '</g>' svg += '</g>';
svg += '</svg>' svg += '</svg>';
return svg return 'data:image/svg+xml;base64,' + new Buffer(svg).toString('base64');
} };
exports.generateAvatarURL = function (name, email = '', big = true) {
let photo
if (typeof email !== 'string') {
email = '' + name + '@example.com'
}
name = encodeURIComponent(name)
let hash = crypto.createHash('md5')
hash.update(email.toLowerCase())
let hexDigest = hash.digest('hex')
if (email !== '' && config.allowGravatar) {
photo = 'https://cdn.libravatar.org/avatar/' + hexDigest
if (big) {
photo += '?s=400'
} else {
photo += '?s=96'
}
} else {
photo = config.serverURL + '/user/' + (name || email.substring(0, email.lastIndexOf('@')) || hexDigest) + '/avatar.svg'
}
return photo
}

View file

@ -1,27 +1,22 @@
'use strict' var winston = require('winston');
const { createLogger, format, transports } = require('winston') winston.emitErrs = true;
const logger = createLogger({ var logger = new winston.Logger({
level: 'debug', transports: [
format: format.combine( new winston.transports.Console({
format.uncolorize(), level: 'debug',
format.timestamp(), handleExceptions: true,
format.align(), json: false,
format.splat(), colorize: true,
format.printf(info => `${info.timestamp} ${info.level}: ${info.message}`) timestamp: true
), })
transports: [ ],
new transports.Console({ exitOnError: false
handleExceptions: true });
})
],
exitOnError: false
})
logger.stream = { module.exports = logger;
write: function (message, encoding) { module.exports.stream = {
logger.info(message) write: function(message, encoding){
} logger.info(message);
} }
};
module.exports = logger

Some files were not shown because too many files have changed in this diff Show more