diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml
new file mode 100644
index 00000000000..8fd968ed280
--- /dev/null
+++ b/.github/FUNDING.yml
@@ -0,0 +1,3 @@
+# These are supported funding model platforms
+
+github: ether
diff --git a/.github/workflows/backend-tests.yml b/.github/workflows/backend-tests.yml
new file mode 100644
index 00000000000..e50491d232e
--- /dev/null
+++ b/.github/workflows/backend-tests.yml
@@ -0,0 +1,60 @@
+name: "Backend tests"
+
+# any branch is useful for testing before a PR is submitted
+on: [push, pull_request]
+
+jobs:
+ withoutplugins:
+ # run on pushes to any branch
+ # run on PRs from external forks
+ if: |
+ (github.event_name != 'pull_request')
+ || (github.event.pull_request.head.repo.id != github.event.pull_request.base.repo.id)
+ name: without plugins
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v2
+
+ - name: Install libreoffice
+ run: |
+ sudo add-apt-repository -y ppa:libreoffice/ppa
+ sudo apt update
+ sudo apt install -y --no-install-recommends libreoffice libreoffice-pdfimport
+
+ - name: Install all dependencies and symlink for ep_etherpad-lite
+ run: bin/installDeps.sh
+
+ # configures some settings and runs npm run test
+ - name: Run the backend tests
+ run: tests/frontend/travis/runnerBackend.sh
+
+ withplugins:
+ # run on pushes to any branch
+ # run on PRs from external forks
+ if: |
+ (github.event_name != 'pull_request')
+ || (github.event.pull_request.head.repo.id != github.event.pull_request.base.repo.id)
+ name: with Plugins
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v2
+
+ - name: Install libreoffice
+ run: |
+ sudo add-apt-repository -y ppa:libreoffice/ppa
+ sudo apt update
+ sudo apt install -y --no-install-recommends libreoffice libreoffice-pdfimport
+
+ - name: Install all dependencies and symlink for ep_etherpad-lite
+ run: bin/installDeps.sh
+
+ - name: Install etherpad plugins
+ run: npm install ep_align ep_author_hover ep_cursortrace ep_font_size ep_hash_auth ep_headings2 ep_markdown ep_readonly_guest ep_spellcheck ep_subscript_and_superscript ep_table_of_contents
+
+ # configures some settings and runs npm run test
+ - name: Run the backend tests
+ run: tests/frontend/travis/runnerBackend.sh
diff --git a/.github/workflows/dockerfile.yml b/.github/workflows/dockerfile.yml
new file mode 100644
index 00000000000..8f6d5c3b037
--- /dev/null
+++ b/.github/workflows/dockerfile.yml
@@ -0,0 +1,26 @@
+name: "Dockerfile"
+
+# any branch is useful for testing before a PR is submitted
+on: [push, pull_request]
+
+jobs:
+ dockerfile:
+ # run on pushes to any branch
+ # run on PRs from external forks
+ if: |
+ (github.event_name != 'pull_request')
+ || (github.event.pull_request.head.repo.id != github.event.pull_request.base.repo.id)
+ name: build image and run connectivity test
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v2
+
+ - name: docker build
+ run: |
+ docker build -t etherpad:test .
+ docker run -d -p 9001:9001 etherpad:test
+ ./bin/installDeps.sh
+ sleep 3 # delay for startup?
+ cd src && npm run test-container
diff --git a/.github/workflows/frontend-tests.yml b/.github/workflows/frontend-tests.yml
new file mode 100644
index 00000000000..3b178622ea0
--- /dev/null
+++ b/.github/workflows/frontend-tests.yml
@@ -0,0 +1,83 @@
+name: "Frontend tests"
+
+on: [push]
+
+jobs:
+ withoutplugins:
+ name: without plugins
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v2
+
+ - name: Run sauce-connect-action
+ shell: bash
+ env:
+ SAUCE_USERNAME: ${{ secrets.SAUCE_USERNAME }}
+ SAUCE_ACCESS_KEY: ${{ secrets.SAUCE_ACCESS_KEY }}
+ TRAVIS_JOB_NUMBER: ${{ github.run_id }}-${{ github.run_number }}-${{ github.job }}
+ run: tests/frontend/travis/sauce_tunnel.sh
+
+ - name: Install all dependencies and symlink for ep_etherpad-lite
+ run: bin/installDeps.sh
+
+ - name: export GIT_HASH to env
+ id: environment
+ run: echo "::set-output name=sha_short::$(git rev-parse --short ${{ github.sha }})"
+
+ - name: Write custom settings.json with loglevel WARN
+ run: "sed 's/\"loglevel\": \"INFO\",/\"loglevel\": \"WARN\",/' < settings.json.template > settings.json"
+
+ - name: Run the frontend tests
+ shell: bash
+ env:
+ SAUCE_USERNAME: ${{ secrets.SAUCE_USERNAME }}
+ SAUCE_ACCESS_KEY: ${{ secrets.SAUCE_ACCESS_KEY }}
+ TRAVIS_JOB_NUMBER: ${{ github.run_id }}-${{ github.run_number }}-${{ github.job }}
+ GIT_HASH: ${{ steps.environment.outputs.sha_short }}
+ run: |
+ tests/frontend/travis/runner.sh
+
+ withplugins:
+ name: with plugins
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v2
+
+ - name: Run sauce-connect-action
+ shell: bash
+ env:
+ SAUCE_USERNAME: ${{ secrets.SAUCE_USERNAME }}
+ SAUCE_ACCESS_KEY: ${{ secrets.SAUCE_ACCESS_KEY }}
+ TRAVIS_JOB_NUMBER: ${{ github.run_id }}-${{ github.run_number }}-${{ github.job }}
+ run: tests/frontend/travis/sauce_tunnel.sh
+
+ - name: Install all dependencies and symlink for ep_etherpad-lite
+ run: bin/installDeps.sh
+
+ - name: Install etherpad plugins
+ run: npm install ep_align ep_author_hover ep_cursortrace ep_font_size ep_hash_auth ep_headings2 ep_markdown ep_readonly_guest ep_spellcheck ep_subscript_and_superscript ep_table_of_contents ep_set_title_on_pad
+
+ - name: export GIT_HASH to env
+ id: environment
+ run: echo "::set-output name=sha_short::$(git rev-parse --short ${{ github.sha }})"
+
+ - name: Write custom settings.json with loglevel WARN
+ run: "sed 's/\"loglevel\": \"INFO\",/\"loglevel\": \"WARN\",/' < settings.json.template > settings.json"
+
+ # XXX we should probably run all tests, because plugins could effect their results
+ - name: Remove standard frontend test files, so only plugin tests are run
+ run: rm tests/frontend/specs/*
+
+ - name: Run the frontend tests
+ shell: bash
+ env:
+ SAUCE_USERNAME: ${{ secrets.SAUCE_USERNAME }}
+ SAUCE_ACCESS_KEY: ${{ secrets.SAUCE_ACCESS_KEY }}
+ TRAVIS_JOB_NUMBER: ${{ github.run_id }}-${{ github.run_number }}-${{ github.job }}
+ GIT_HASH: ${{ steps.environment.outputs.sha_short }}
+ run: |
+ tests/frontend/travis/runner.sh
diff --git a/.github/workflows/lint-package-lock.yml b/.github/workflows/lint-package-lock.yml
new file mode 100644
index 00000000000..beef64ffe33
--- /dev/null
+++ b/.github/workflows/lint-package-lock.yml
@@ -0,0 +1,24 @@
+name: "Lint"
+
+# any branch is useful for testing before a PR is submitted
+on: [push, pull_request]
+
+jobs:
+ lint-package-lock:
+ # run on pushes to any branch
+ # run on PRs from external forks
+ if: |
+ (github.event_name != 'pull_request')
+ || (github.event.pull_request.head.repo.id != github.event.pull_request.base.repo.id)
+ name: package-lock.json
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v2
+
+ - name: Install lockfile-lint
+ run: npm install lockfile-lint
+
+ - name: Run lockfile-lint on package-lock.json
+ run: npx lockfile-lint --path src/package-lock.json --validate-https --allowed-hosts npm
diff --git a/.github/workflows/load-test.yml b/.github/workflows/load-test.yml
new file mode 100644
index 00000000000..095adc785b0
--- /dev/null
+++ b/.github/workflows/load-test.yml
@@ -0,0 +1,53 @@
+name: "Loadtest"
+
+# any branch is useful for testing before a PR is submitted
+on: [push, pull_request]
+
+jobs:
+ withoutplugins:
+ # run on pushes to any branch
+ # run on PRs from external forks
+ if: |
+ (github.event_name != 'pull_request')
+ || (github.event.pull_request.head.repo.id != github.event.pull_request.base.repo.id)
+ name: without plugins
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v2
+
+ - name: Install all dependencies and symlink for ep_etherpad-lite
+ run: bin/installDeps.sh
+
+ - name: Install etherpad-load-test
+ run: sudo npm install -g etherpad-load-test
+
+ - name: Run load test
+ run: tests/frontend/travis/runnerLoadTest.sh
+
+ withplugins:
+ # run on pushes to any branch
+ # run on PRs from external forks
+ if: |
+ (github.event_name != 'pull_request')
+ || (github.event.pull_request.head.repo.id != github.event.pull_request.base.repo.id)
+ name: with Plugins
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v2
+
+ - name: Install all dependencies and symlink for ep_etherpad-lite
+ run: bin/installDeps.sh
+
+ - name: Install etherpad-load-test
+ run: sudo npm install -g etherpad-load-test
+
+ - name: Install etherpad plugins
+ run: npm install ep_align ep_author_hover ep_cursortrace ep_font_size ep_hash_auth ep_headings2 ep_markdown ep_readonly_guest ep_spellcheck ep_subscript_and_superscript ep_table_of_contents
+
+ # configures some settings and runs npm run test
+ - name: Run load test
+ run: tests/frontend/travis/runnerLoadTest.sh
diff --git a/.github/workflows/rate-limit.yml b/.github/workflows/rate-limit.yml
new file mode 100644
index 00000000000..4bdfc219419
--- /dev/null
+++ b/.github/workflows/rate-limit.yml
@@ -0,0 +1,39 @@
+name: "rate limit"
+
+# any branch is useful for testing before a PR is submitted
+on: [push, pull_request]
+
+jobs:
+ ratelimit:
+ # run on pushes to any branch
+ # run on PRs from external forks
+ if: |
+ (github.event_name != 'pull_request')
+ || (github.event.pull_request.head.repo.id != github.event.pull_request.base.repo.id)
+ name: test
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v2
+
+ - name: docker network
+ run: docker network create --subnet=172.23.42.0/16 ep_net
+
+ - name: build docker image
+ run: |
+ docker build -f Dockerfile -t epl-debian-slim .
+ docker build -f tests/ratelimit/Dockerfile.nginx -t nginx-latest .
+ docker build -f tests/ratelimit/Dockerfile.anotherip -t anotherip .
+ - name: run docker images
+ run: |
+ docker run --name etherpad-docker -p 9000:9001 --rm --network ep_net --ip 172.23.42.2 -e 'TRUST_PROXY=true' epl-debian-slim &
+ docker run -p 8081:80 --rm --network ep_net --ip 172.23.42.1 -d nginx-latest
+ docker run --rm --network ep_net --ip 172.23.42.3 --name anotherip -dt anotherip
+
+ - name: install dependencies and create symlink for ep_etherpad-lite
+ run: bin/installDeps.sh
+
+ - name: run rate limit test
+ run: |
+ cd tests/ratelimit
+ ./testlimits.sh
diff --git a/.gitignore b/.gitignore
index 95aed121feb..c75e5a61f73 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,7 +10,6 @@ var/dirty.db
bin/convertSettings.json
*~
*.patch
-src/static/js/jquery.js
npm-debug.log
*.DS_Store
.ep_initialized
diff --git a/.travis.yml b/.travis.yml
index 7b0ed03aef7..3f8ad1cf105 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -8,59 +8,127 @@ services:
cache: false
-before_install:
- - sudo add-apt-repository -y ppa:libreoffice/ppa
- - sudo apt-get update
- - sudo apt-get -y install libreoffice
- - sudo apt-get -y install libreoffice-pdfimport
-
-install:
- - "bin/installDeps.sh"
- - "export GIT_HASH=$(git rev-parse --verify --short HEAD)"
-
-script:
- - "tests/frontend/travis/runner.sh"
-
env:
global:
- secure: "WMGxFkOeTTlhWB+ChMucRtIqVmMbwzYdNHuHQjKCcj8HBEPdZLfCuK/kf4rG\nVLcLQiIsyllqzNhBGVHG1nyqWr0/LTm8JRqSCDDVIhpyzp9KpCJQQJG2Uwjk\n6/HIJJh/wbxsEdLNV2crYU/EiVO3A4Bq0YTHUlbhUqG3mSCr5Ec="
- secure: "gejXUAHYscbR6Bodw35XexpToqWkv2ifeECsbeEmjaLkYzXmUUNWJGknKSu7\nEUsSfQV8w+hxApr1Z+jNqk9aX3K1I4btL3cwk2trnNI8XRAvu1c1Iv60eerI\nkE82Rsd5lwUaMEh+/HoL8ztFCZamVndoNgX7HWp5J/NRZZMmh4g="
+_set_loglevel_warn: &set_loglevel_warn |
+ sed -e 's/"loglevel":[^,]*/"loglevel": "WARN"/' \
+ settings.json.template >settings.json.template.new &&
+ mv settings.json.template.new settings.json.template
+
+_install_libreoffice: &install_libreoffice >-
+ sudo add-apt-repository -y ppa:libreoffice/ppa &&
+ sudo apt-get update &&
+ sudo apt-get -y install libreoffice libreoffice-pdfimport
+
+_install_plugins: &install_plugins >-
+ npm install
+ ep_align
+ ep_author_hover
+ ep_cursortrace
+ ep_font_size
+ ep_hash_auth
+ ep_headings2
+ ep_markdown
+ ep_readonly_guest
+ ep_spellcheck
+ ep_subscript_and_superscript
+ ep_table_of_contents
+ ep_set_title_on_pad
+
jobs:
include:
# we can only frontend tests from the ether/ organization and not from forks.
# To request tests to be run ask a maintainer to fork your repo to ether/
- if: fork = false
- name: "Test the Frontend"
+ name: "Test the Frontend without Plugins"
install:
- #FIXME
- - "sed 's/\"loglevel\": \"INFO\",/\"loglevel\": \"WARN\",/g' settings.json.template > settings.json"
+ - *set_loglevel_warn
- "tests/frontend/travis/sauce_tunnel.sh"
- "bin/installDeps.sh"
- "export GIT_HASH=$(git rev-parse --verify --short HEAD)"
script:
- - "tests/frontend/travis/runner.sh"
- - name: "Run the Backend tests"
+ - "./tests/frontend/travis/runner.sh"
+ - name: "Run the Backend tests without Plugins"
install:
+ - *install_libreoffice
+ - *set_loglevel_warn
- "bin/installDeps.sh"
- "cd src && npm install && cd -"
script:
- "tests/frontend/travis/runnerBackend.sh"
-## Temporarily commented out the Dockerfile tests
-# - name: "Test the Dockerfile"
-# install:
-# - "cd src && npm install && cd -"
-# script:
-# - "docker build -t etherpad:test ."
-# - "docker run -d -p 9001:9001 etherpad:test && sleep 3"
-# - "cd src && npm run test-container"
- - name: "Load test Etherpad"
+ - name: "Test the Dockerfile"
+ install:
+ - "cd src && npm install && cd -"
+ script:
+ - "docker build -t etherpad:test ."
+ - "docker run -d -p 9001:9001 etherpad:test && sleep 3"
+ - "cd src && npm run test-container"
+ - name: "Load test Etherpad without Plugins"
install:
+ - *set_loglevel_warn
- "bin/installDeps.sh"
- "cd src && npm install && cd -"
- "npm install -g etherpad-load-test"
script:
- "tests/frontend/travis/runnerLoadTest.sh"
+ # we can only frontend tests from the ether/ organization and not from forks.
+ # To request tests to be run ask a maintainer to fork your repo to ether/
+ - if: fork = false
+ name: "Test the Frontend Plugins only"
+ install:
+ - *set_loglevel_warn
+ - "tests/frontend/travis/sauce_tunnel.sh"
+ - "bin/installDeps.sh"
+ - "rm tests/frontend/specs/*"
+ - *install_plugins
+ - "export GIT_HASH=$(git rev-parse --verify --short HEAD)"
+ script:
+ - "./tests/frontend/travis/runner.sh"
+ - name: "Lint test package-lock.json"
+ install:
+ - "npm install lockfile-lint"
+ script:
+ - npx lockfile-lint --path src/package-lock.json --validate-https --allowed-hosts npm
+ - name: "Run the Backend tests with Plugins"
+ install:
+ - *install_libreoffice
+ - *set_loglevel_warn
+ - "bin/installDeps.sh"
+ - *install_plugins
+ - "cd src && npm install && cd -"
+ script:
+ - "tests/frontend/travis/runnerBackend.sh"
+ - name: "Test the Dockerfile"
+ install:
+ - "cd src && npm install && cd -"
+ script:
+ - "docker build -t etherpad:test ."
+ - "docker run -d -p 9001:9001 etherpad:test && sleep 3"
+ - "cd src && npm run test-container"
+ - name: "Load test Etherpad with Plugins"
+ install:
+ - *set_loglevel_warn
+ - "bin/installDeps.sh"
+ - *install_plugins
+ - "cd src && npm install && cd -"
+ - "npm install -g etherpad-load-test"
+ script:
+ - "tests/frontend/travis/runnerLoadTest.sh"
+ - name: "Test rate limit"
+ install:
+ - "docker network create --subnet=172.23.42.0/16 ep_net"
+ - "docker build -f Dockerfile -t epl-debian-slim ."
+ - "docker build -f tests/ratelimit/Dockerfile.nginx -t nginx-latest ."
+ - "docker build -f tests/ratelimit/Dockerfile.anotherip -t anotherip ."
+ - "docker run -p 8081:80 --rm --network ep_net --ip 172.23.42.1 -d nginx-latest"
+ - "docker run --name etherpad-docker -p 9000:9001 --rm --network ep_net --ip 172.23.42.2 -e 'TRUST_PROXY=true' epl-debian-slim &"
+ - "docker run --rm --network ep_net --ip 172.23.42.3 --name anotherip -dt anotherip"
+ - "./bin/installDeps.sh"
+ script:
+ - "cd tests/ratelimit && bash testlimits.sh"
notifications:
irc:
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 0931502848f..b63f571b9b2 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,88 @@
-# Develop -- TODO Change to 1.8.x.
-* ...
+# 1.8.7
+### Compatibility-breaking changes
+* **IMPORTANT:** It is no longer possible to protect a group pad with a
+ password. All API calls to `setPassword` or `isPasswordProtected` will fail.
+ Existing group pads that were previously password protected will no longer be
+ password protected. If you need fine-grained access control, you can restrict
+ API session creation in your frontend service, or you can use plugins.
+* All workarounds for Microsoft Internet Explorer have been removed. IE might
+ still work, but it is untested.
+* Plugin hook functions are now subject to new sanity checks. Buggy hook
+ functions will cause an error message to be logged
+* Authorization failures now return 403 by default instead of 401
+* The `authorize` hook is now only called after successful authentication. Use
+ the new `preAuthorize` hook if you need to bypass authentication
+* The `authFailure` hook is deprecated; use the new `authnFailure` and
+ `authzFailure` hooks instead
+* The `indexCustomInlineScripts` hook was removed
+* The `client` context property for the `handleMessage` and
+ `handleMessageSecurity` hooks has been renamed to `socket` (the old name is
+ still usable but deprecated)
+* The `aceAttribClasses` hook functions are now called synchronously
+* The format of `ENTER`, `CREATE`, and `LEAVE` log messages has changed
+* Strings passed to `$.gritter.add()` are now expected to be plain text, not
+ HTML. Use jQuery or DOM objects if you need formatting
+
+### Notable new features
+* Users can now import without creating and editing the pad first
+* Added a new `readOnly` user setting that makes it possible to create users in
+ `settings.json` that can read pads but not create or modify them
+* Added a new `canCreate` user setting that makes it possible to create users in
+ `settings.json` that can modify pads but not create them
+* The `authorize` hook now accepts `readOnly` to grant read-only access to a pad
+* The `authorize` hook now accepts `modify` to grant modify-only (creation
+ prohibited) access to a pad
+* All authentication successes and failures are now logged
+* Added a new `cookie.sameSite` setting that makes it possible to enable
+ authentication when Etherpad is embedded in an iframe from another site
+* New `exportHTMLAdditionalContent` hook to include additional HTML content
+* New `exportEtherpadAdditionalContent` hook to include additional database
+ content in `.etherpad` exports
+* New `expressCloseServer` hook to close Express when required
+* The `padUpdate` hook context now includes `revs` and `changeset`
+* `checkPlugins.js` has various improvements to help plugin developers
+* The HTTP request object (and therefore the express-session state) is now
+ accessible from within most `eejsBlock_*` hooks
+* Users without a `password` or `hash` property in `settings.json` are no longer
+ ignored, so they can now be used by authentication plugins
+* New permission denied modal and block ``permissionDenied``
+* Plugins are now updated to the latest version instead of minor or patches
+
+### Notable fixes
+* Fixed rate limit accounting when Etherpad is behind a reverse proxy
+* Fixed typos that prevented access to pads via an HTTP API session
+* Fixed authorization failures for pad URLs containing a percent-encoded
+ character
+* Fixed exporting of read-only pads
+* Passwords are no longer written to connection state database entries or logged
+ in debug logs
+* When using the keyboard to navigate through the toolbar buttons the button
+ with the focus is now highlighted
+* Fixed support for Node.js 10 by passing the `--experimental-worker` flag
+* Fixed export of HTML attributes within a line
+* Fixed occasional "Cannot read property 'offsetTop' of undefined" error in
+ timeslider when "follow pad contents" is checked
+* socket.io errors are now displayed instead of silently ignored
+* Pasting while the caret is in a link now works (except for middle-click paste
+ on X11 systems)
+* Removal of Microsoft Internet Explorer specific code
+* Import better handles line breaks and white space
+* Fix issue with ``createDiffHTML`` incorrect call of ``getInternalRevisionAText``
+* Allow additional characters in URLs
+* MySQL engine fix and various other UeberDB updates (See UeberDB changelog).
+* Admin UI improvements on search results (to remove duplicate items)
+* Removal of unused cruft from ``clientVars`` (``ip`` and ``userAgent``)
+
+
+### Minor changes
+* Temporary disconnections no longer force a full page refresh
+* Toolbar layout for narrow screens is improved
+* Fixed `SameSite` cookie attribute for the `language`, `token`, and `pref`
+ cookies
+* Fixed superfluous database accesses when deleting a pad
+* Expanded test coverage.
+* `package-lock.json` is now lint checked on commit
+* Various lint fixes/modernization of code
# 1.8.6
* IMPORTANT: This fixes a severe problem with postgresql in 1.8.5
diff --git a/Dockerfile b/Dockerfile
index 45601c8764f..aa6091a5943 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -51,4 +51,4 @@ COPY --chown=etherpad:0 ./settings.json.docker /opt/etherpad-lite/settings.json
RUN chmod -R g=u .
EXPOSE 9001
-CMD ["node", "node_modules/ep_etherpad-lite/node/server.js"]
+CMD ["node", "--experimental-worker", "node_modules/ep_etherpad-lite/node/server.js"]
diff --git a/README.md b/README.md
index cf319d32225..086441b19b1 100644
--- a/README.md
+++ b/README.md
@@ -1,12 +1,12 @@
# A real-time collaborative editor for the web
-[![Travis (.org)](https://api.travis-ci.org/ether/etherpad-lite.svg?branch=develop)](https://travis-ci.org/github/ether/etherpad-lite)
+[![Travis (.com)](https://api.travis-ci.com/ether/etherpad-lite.svg?branch=develop)](https://travis-ci.com/github/ether/etherpad-lite)
![Demo Etherpad Animated Jif](doc/images/etherpad_demo.gif "Etherpad in action")
# About
-Etherpad is a real-time collaborative editor scalable to thousands of simultaneous real time users. It provides full data export capabilities, and runs on _your_ server, under _your_ control.
+Etherpad is a real-time collaborative editor [scalable to thousands of simultaneous real time users](http://scale.etherpad.org/). It provides [full data export](https://github.com/ether/etherpad-lite/wiki/Understanding-Etherpad's-Full-Data-Export-capabilities) capabilities, and runs on _your_ server, under _your_ control.
**[Try it out](https://video.etherpad.com)**
@@ -19,7 +19,7 @@ Etherpad is a real-time collaborative editor scalable to thousands of simultaneo
### Quick install on Debian/Ubuntu
```
-curl -sL https://deb.nodesource.com/setup_10.x | sudo -E bash -
+curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash -
sudo apt install -y nodejs
git clone --branch master https://github.com/ether/etherpad-lite.git && cd etherpad-lite && bin/run.sh
```
@@ -127,7 +127,8 @@ Read our [**Developer Guidelines**](https://github.com/ether/etherpad-lite/blob/
# Get in touch
The official channel for contacting the development team is via the [Github issues](https://github.com/ether/etherpad-lite/issues).
-For **responsible disclosure of vulnerabilities**, please write a mail to the maintainer (a.mux@inwind.it).
+For **responsible disclosure of vulnerabilities**, please write a mail to the maintainers (a.mux@inwind.it and contact@etherpad.org).
+Join the official [Etherpad Discord Channel](https://discord.com/invite/daEjfhw)
# HTTP API
Etherpad is designed to be easily embeddable and provides a [HTTP API](https://github.com/ether/etherpad-lite/wiki/HTTP-API)
diff --git a/bin/checkAllPads.js b/bin/checkAllPads.js
index 0d4e8bb8d7e..f90e57aef10 100644
--- a/bin/checkAllPads.js
+++ b/bin/checkAllPads.js
@@ -3,88 +3,85 @@
*/
if (process.argv.length != 2) {
- console.error("Use: node bin/checkAllPads.js");
+ console.error('Use: node bin/checkAllPads.js');
process.exit(1);
}
// load and initialize NPM
-let npm = require('../src/node_modules/npm');
-npm.load({}, async function() {
-
+const npm = require('../src/node_modules/npm');
+npm.load({}, async () => {
try {
// initialize the database
- let settings = require('../src/node/utils/Settings');
- let db = require('../src/node/db/DB');
+ const settings = require('../src/node/utils/Settings');
+ const db = require('../src/node/db/DB');
await db.init();
// load modules
- let Changeset = require('../src/static/js/Changeset');
- let padManager = require('../src/node/db/PadManager');
+ const Changeset = require('../src/static/js/Changeset');
+ const padManager = require('../src/node/db/PadManager');
// get all pads
- let res = await padManager.listAllPads();
-
- for (let padId of res.padIDs) {
+ const res = await padManager.listAllPads();
- let pad = await padManager.getPad(padId);
+ for (const padId of res.padIDs) {
+ const pad = await padManager.getPad(padId);
// check if the pad has a pool
if (pad.pool === undefined) {
- console.error("[" + pad.id + "] Missing attribute pool");
+ console.error(`[${pad.id}] Missing attribute pool`);
continue;
}
// create an array with key kevisions
// key revisions always save the full pad atext
- let head = pad.getHeadRevisionNumber();
- let keyRevisions = [];
+ const head = pad.getHeadRevisionNumber();
+ const keyRevisions = [];
for (let rev = 0; rev < head; rev += 100) {
keyRevisions.push(rev);
}
// run through all key revisions
- for (let keyRev of keyRevisions) {
-
+ for (const keyRev of keyRevisions) {
// create an array of revisions we need till the next keyRevision or the End
- var revisionsNeeded = [];
- for (let rev = keyRev ; rev <= keyRev + 100 && rev <= head; rev++) {
+ const revisionsNeeded = [];
+ for (let rev = keyRev; rev <= keyRev + 100 && rev <= head; rev++) {
revisionsNeeded.push(rev);
}
// this array will hold all revision changesets
- var revisions = [];
+ const revisions = [];
// run through all needed revisions and get them from the database
- for (let revNum of revisionsNeeded) {
- let revision = await db.get("pad:" + pad.id + ":revs:" + revNum);
- revisions[revNum] = revision;
+ for (const revNum of revisionsNeeded) {
+ const revision = await db.get(`pad:${pad.id}:revs:${revNum}`);
+ revisions[revNum] = revision;
}
// check if the revision exists
if (revisions[keyRev] == null) {
- console.error("[" + pad.id + "] Missing revision " + keyRev);
+ console.error(`[${pad.id}] Missing revision ${keyRev}`);
continue;
}
// check if there is a atext in the keyRevisions
if (revisions[keyRev].meta === undefined || revisions[keyRev].meta.atext === undefined) {
- console.error("[" + pad.id + "] Missing atext in revision " + keyRev);
+ console.error(`[${pad.id}] Missing atext in revision ${keyRev}`);
continue;
}
- let apool = pad.pool;
+ const apool = pad.pool;
let atext = revisions[keyRev].meta.atext;
for (let rev = keyRev + 1; rev <= keyRev + 100 && rev <= head; rev++) {
try {
- let cs = revisions[rev].changeset;
+ const cs = revisions[rev].changeset;
atext = Changeset.applyToAText(cs, atext, apool);
} catch (e) {
- console.error("[" + pad.id + "] Bad changeset at revision " + i + " - " + e.message);
+ console.error(`[${pad.id}] Bad changeset at revision ${i} - ${e.message}`);
}
}
}
- console.log("finished");
+ console.log('finished');
process.exit(0);
}
} catch (err) {
diff --git a/bin/checkPad.js b/bin/checkPad.js
index c6a3a19711b..323840e7261 100644
--- a/bin/checkPad.js
+++ b/bin/checkPad.js
@@ -3,7 +3,7 @@
*/
if (process.argv.length != 3) {
- console.error("Use: node bin/checkPad.js $PADID");
+ console.error('Use: node bin/checkPad.js $PADID');
process.exit(1);
}
@@ -11,83 +11,80 @@ if (process.argv.length != 3) {
const padId = process.argv[2];
// load and initialize NPM;
-let npm = require('../src/node_modules/npm');
-npm.load({}, async function() {
-
+const npm = require('../src/node_modules/npm');
+npm.load({}, async () => {
try {
// initialize database
- let settings = require('../src/node/utils/Settings');
- let db = require('../src/node/db/DB');
+ const settings = require('../src/node/utils/Settings');
+ const db = require('../src/node/db/DB');
await db.init();
// load modules
- let Changeset = require('ep_etherpad-lite/static/js/Changeset');
- let padManager = require('../src/node/db/PadManager');
+ const Changeset = require('ep_etherpad-lite/static/js/Changeset');
+ const padManager = require('../src/node/db/PadManager');
- let exists = await padManager.doesPadExists(padId);
+ const exists = await padManager.doesPadExists(padId);
if (!exists) {
- console.error("Pad does not exist");
+ console.error('Pad does not exist');
process.exit(1);
}
// get the pad
- let pad = await padManager.getPad(padId);
+ const pad = await padManager.getPad(padId);
// create an array with key revisions
// key revisions always save the full pad atext
- let head = pad.getHeadRevisionNumber();
- let keyRevisions = [];
+ const head = pad.getHeadRevisionNumber();
+ const keyRevisions = [];
for (let rev = 0; rev < head; rev += 100) {
keyRevisions.push(rev);
}
// run through all key revisions
- for (let keyRev of keyRevisions) {
-
+ for (const keyRev of keyRevisions) {
// create an array of revisions we need till the next keyRevision or the End
- let revisionsNeeded = [];
+ const revisionsNeeded = [];
for (let rev = keyRev; rev <= keyRev + 100 && rev <= head; rev++) {
revisionsNeeded.push(rev);
}
// this array will hold all revision changesets
- var revisions = [];
+ const revisions = [];
// run through all needed revisions and get them from the database
- for (let revNum of revisionsNeeded) {
- let revision = await db.get("pad:" + padId + ":revs:" + revNum);
+ for (const revNum of revisionsNeeded) {
+ const revision = await db.get(`pad:${padId}:revs:${revNum}`);
revisions[revNum] = revision;
}
// check if the pad has a pool
- if (pad.pool === undefined ) {
- console.error("Attribute pool is missing");
+ if (pad.pool === undefined) {
+ console.error('Attribute pool is missing');
process.exit(1);
}
// check if there is an atext in the keyRevisions
if (revisions[keyRev] === undefined || revisions[keyRev].meta === undefined || revisions[keyRev].meta.atext === undefined) {
- console.error("No atext in key revision " + keyRev);
+ console.error(`No atext in key revision ${keyRev}`);
continue;
}
- let apool = pad.pool;
+ const apool = pad.pool;
let atext = revisions[keyRev].meta.atext;
for (let rev = keyRev + 1; rev <= keyRev + 100 && rev <= head; rev++) {
try {
// console.log("check revision " + rev);
- let cs = revisions[rev].changeset;
+ const cs = revisions[rev].changeset;
atext = Changeset.applyToAText(cs, atext, apool);
- } catch(e) {
- console.error("Bad changeset at revision " + rev + " - " + e.message);
+ } catch (e) {
+ console.error(`Bad changeset at revision ${rev} - ${e.message}`);
continue;
}
}
- console.log("finished");
+ console.log('finished');
process.exit(0);
}
-
} catch (e) {
console.trace(e);
process.exit(1);
diff --git a/bin/checkPadDeltas.js b/bin/checkPadDeltas.js
index f1bd3ffe515..1e45f7148bd 100644
--- a/bin/checkPadDeltas.js
+++ b/bin/checkPadDeltas.js
@@ -1,120 +1,111 @@
-/*
- * This is a debug tool. It checks all revisions for data corruption
- */
-
-if (process.argv.length != 3) {
- console.error("Use: node bin/checkPadDeltas.js $PADID");
- process.exit(1);
-}
-
-// get the padID
-const padId = process.argv[2];
-
-// load and initialize NPM;
-var expect = require('expect.js')
-var diff = require('diff')
-var async = require('async')
-
-let npm = require('../src/node_modules/npm');
-var async = require("ep_etherpad-lite/node_modules/async");
-var Changeset = require("ep_etherpad-lite/static/js/Changeset");
-
-npm.load({}, async function() {
-
- try {
- // initialize database
- let settings = require('../src/node/utils/Settings');
- let db = require('../src/node/db/DB');
- await db.init();
-
- // load modules
- let Changeset = require('ep_etherpad-lite/static/js/Changeset');
- let padManager = require('../src/node/db/PadManager');
-
- let exists = await padManager.doesPadExists(padId);
- if (!exists) {
- console.error("Pad does not exist");
- process.exit(1);
- }
-
- // get the pad
- let pad = await padManager.getPad(padId);
-
- //create an array with key revisions
- //key revisions always save the full pad atext
- var head = pad.getHeadRevisionNumber();
- var keyRevisions = [];
- for(var i=0;i
{
+ try {
+ // initialize database
+ const settings = require('../src/node/utils/Settings');
+ const db = require('../src/node/db/DB');
+ await db.init();
+
+ // load modules
+ const Changeset = require('ep_etherpad-lite/static/js/Changeset');
+ const padManager = require('../src/node/db/PadManager');
+
+ const exists = await padManager.doesPadExists(padId);
+ if (!exists) {
+ console.error('Pad does not exist');
+ process.exit(1);
+ }
+
+ // get the pad
+ const pad = await padManager.getPad(padId);
+
+ // create an array with key revisions
+ // key revisions always save the full pad atext
+ const head = pad.getHeadRevisionNumber();
+ const keyRevisions = [];
+ for (var i = 0; i < head; i += 100) {
+ keyRevisions.push(i);
+ }
+
+ // create an array with all revisions
+ const revisions = [];
+ for (var i = 0; i <= head; i++) {
+ revisions.push(i);
+ }
+
+ let atext = Changeset.makeAText('\n');
+
+ // run trough all revisions
+ async.forEachSeries(revisions, (revNum, callback) => {
+ // console.log('Fetching', revNum)
+ db.db.get(`pad:${padId}:revs:${revNum}`, (err, revision) => {
+ if (err) return callback(err);
+
+ // check if there is a atext in the keyRevisions
+ if (~keyRevisions.indexOf(revNum) && (revision === undefined || revision.meta === undefined || revision.meta.atext === undefined)) {
+ console.error(`No atext in key revision ${revNum}`);
+ callback();
+ return;
+ }
+
+ try {
+ // console.log("check revision ", revNum);
+ const cs = revision.changeset;
+ atext = Changeset.applyToAText(cs, atext, pad.pool);
+ } catch (e) {
+ console.error(`Bad changeset at revision ${revNum} - ${e.message}`);
+ callback();
+ return;
+ }
+
+ if (~keyRevisions.indexOf(revNum)) {
+ try {
+ expect(revision.meta.atext.text).to.eql(atext.text);
+ expect(revision.meta.atext.attribs).to.eql(atext.attribs);
+ } catch (e) {
+ console.error(`Atext in key revision ${revNum} doesn't match computed one.`);
+ console.log(diff.diffChars(atext.text, revision.meta.atext.text).map((op) => { if (!op.added && !op.removed) op.value = op.value.length; return op; }));
+ // console.error(e)
+ // console.log('KeyRev. :', revision.meta.atext)
+ // console.log('Computed:', atext)
+ callback();
+ return;
+ }
+ }
+
+ setImmediate(callback);
+ });
+ }, (er) => {
+ if (pad.atext.text == atext.text) { console.log('ok'); } else {
+ console.error('Pad AText doesn\'t match computed one! (Computed ', atext.text.length, ', db', pad.atext.text.length, ')');
+ console.log(diff.diffChars(atext.text, pad.atext.text).map((op) => { if (!op.added && !op.removed) op.value = op.value.length; return op; }));
+ }
+ callback(er);
+ });
+
+ process.exit(0);
+ } catch (e) {
+ console.trace(e);
+ process.exit(1);
+ }
+});
diff --git a/bin/cleanRun.sh b/bin/cleanRun.sh
index 379b770a5a2..57de27e5cd6 100755
--- a/bin/cleanRun.sh
+++ b/bin/cleanRun.sh
@@ -1,7 +1,10 @@
#!/bin/sh
-#Move to the folder where ep-lite is installed
-cd $(dirname $0)
+# Move to the folder where ep-lite is installed
+cd "$(dirname "$0")"/..
+
+# Source constants and usefull functions
+. bin/functions.sh
#Was this script started in the bin folder? if yes move out
if [ -d "../bin" ]; then
@@ -38,4 +41,4 @@ bin/installDeps.sh "$@" || exit 1
echo "Started Etherpad..."
SCRIPTPATH=$(pwd -P)
-node "${SCRIPTPATH}/node_modules/ep_etherpad-lite/node/server.js" "$@"
+node $(compute_node_args) "${SCRIPTPATH}/node_modules/ep_etherpad-lite/node/server.js" "$@"
diff --git a/bin/convert.js b/bin/convert.js
index 82e0f757919..47f8b2d275a 100644
--- a/bin/convert.js
+++ b/bin/convert.js
@@ -1,128 +1,116 @@
-var startTime = Date.now();
-var fs = require("fs");
-var ueberDB = require("../src/node_modules/ueberdb2");
-var mysql = require("../src/node_modules/ueberdb2/node_modules/mysql");
-var async = require("../src/node_modules/async");
-var Changeset = require("ep_etherpad-lite/static/js/Changeset");
-var randomString = require('ep_etherpad-lite/static/js/pad_utils').randomString;
-var AttributePool = require("ep_etherpad-lite/static/js/AttributePool");
-
-var settingsFile = process.argv[2];
-var sqlOutputFile = process.argv[3];
-
-//stop if the settings file is not set
-if(!settingsFile || !sqlOutputFile)
-{
- console.error("Use: node convert.js $SETTINGSFILE $SQLOUTPUT");
+const startTime = Date.now();
+const fs = require('fs');
+const ueberDB = require('../src/node_modules/ueberdb2');
+const mysql = require('../src/node_modules/ueberdb2/node_modules/mysql');
+const async = require('../src/node_modules/async');
+const Changeset = require('ep_etherpad-lite/static/js/Changeset');
+const randomString = require('ep_etherpad-lite/static/js/pad_utils').randomString;
+const AttributePool = require('ep_etherpad-lite/static/js/AttributePool');
+
+const settingsFile = process.argv[2];
+const sqlOutputFile = process.argv[3];
+
+// stop if the settings file is not set
+if (!settingsFile || !sqlOutputFile) {
+ console.error('Use: node convert.js $SETTINGSFILE $SQLOUTPUT');
process.exit(1);
}
-log("read settings file...");
-//read the settings file and parse the json
-var settings = JSON.parse(fs.readFileSync(settingsFile, "utf8"));
-log("done");
-
-log("open output file...");
-var sqlOutput = fs.openSync(sqlOutputFile, "w");
-var sql = "SET CHARACTER SET UTF8;\n" +
- "CREATE TABLE IF NOT EXISTS `store` ( \n" +
- "`key` VARCHAR( 100 ) NOT NULL , \n" +
- "`value` LONGTEXT NOT NULL , \n" +
- "PRIMARY KEY ( `key` ) \n" +
- ") ENGINE = INNODB;\n" +
- "START TRANSACTION;\n\n";
+log('read settings file...');
+// read the settings file and parse the json
+const settings = JSON.parse(fs.readFileSync(settingsFile, 'utf8'));
+log('done');
+
+log('open output file...');
+const sqlOutput = fs.openSync(sqlOutputFile, 'w');
+const sql = 'SET CHARACTER SET UTF8;\n' +
+ 'CREATE TABLE IF NOT EXISTS `store` ( \n' +
+ '`key` VARCHAR( 100 ) NOT NULL , \n' +
+ '`value` LONGTEXT NOT NULL , \n' +
+ 'PRIMARY KEY ( `key` ) \n' +
+ ') ENGINE = INNODB;\n' +
+ 'START TRANSACTION;\n\n';
fs.writeSync(sqlOutput, sql);
-log("done");
-
-var etherpadDB = mysql.createConnection({
- host : settings.etherpadDB.host,
- user : settings.etherpadDB.user,
- password : settings.etherpadDB.password,
- database : settings.etherpadDB.database,
- port : settings.etherpadDB.port
+log('done');
+
+const etherpadDB = mysql.createConnection({
+ host: settings.etherpadDB.host,
+ user: settings.etherpadDB.user,
+ password: settings.etherpadDB.password,
+ database: settings.etherpadDB.database,
+ port: settings.etherpadDB.port,
});
-//get the timestamp once
-var timestamp = Date.now();
+// get the timestamp once
+const timestamp = Date.now();
-var padIDs;
+let padIDs;
async.series([
- //get all padids out of the database...
- function(callback)
- {
- log("get all padIds out of the database...");
+ // get all padids out of the database...
+ function (callback) {
+ log('get all padIds out of the database...');
- etherpadDB.query("SELECT ID FROM PAD_META", [], function(err, _padIDs)
- {
+ etherpadDB.query('SELECT ID FROM PAD_META', [], (err, _padIDs) => {
padIDs = _padIDs;
callback(err);
});
},
- function(callback)
- {
- log("done");
-
- //create a queue with a concurrency 100
- var queue = async.queue(function (padId, callback)
- {
- convertPad(padId, function(err)
- {
+ function (callback) {
+ log('done');
+
+ // create a queue with a concurrency 100
+ const queue = async.queue((padId, callback) => {
+ convertPad(padId, (err) => {
incrementPadStats();
callback(err);
});
}, 100);
- //set the step callback as the queue callback
+ // set the step callback as the queue callback
queue.drain = callback;
- //add the padids to the worker queue
- for(var i=0,length=padIDs.length;i {
+ if (err) throw err;
+
+ // write the groups
+ let sql = '';
+ for (const proID in proID2groupID) {
+ const groupID = proID2groupID[proID];
+ const subdomain = proID2subdomain[proID];
+
+ sql += `REPLACE INTO store VALUES (${etherpadDB.escape(`group:${groupID}`)}, ${etherpadDB.escape(JSON.stringify(groups[groupID]))});\n`;
+ sql += `REPLACE INTO store VALUES (${etherpadDB.escape(`mapper2group:subdomain:${subdomain}`)}, ${etherpadDB.escape(groupID)});\n`;
}
- //close transaction
- sql+="COMMIT;";
+ // close transaction
+ sql += 'COMMIT;';
- //end the sql file
- fs.writeSync(sqlOutput, sql, undefined, "utf-8");
+ // end the sql file
+ fs.writeSync(sqlOutput, sql, undefined, 'utf-8');
fs.closeSync(sqlOutput);
- log("finished.");
+ log('finished.');
process.exit(0);
});
-function log(str)
-{
- console.log((Date.now() - startTime)/1000 + "\t" + str);
+function log(str) {
+ console.log(`${(Date.now() - startTime) / 1000}\t${str}`);
}
-var padsDone = 0;
+let padsDone = 0;
-function incrementPadStats()
-{
+function incrementPadStats() {
padsDone++;
- if(padsDone%100 == 0)
- {
- var averageTime = Math.round(padsDone/((Date.now() - startTime)/1000));
- log(padsDone + "/" + padIDs.length + "\t" + averageTime + " pad/s")
+ if (padsDone % 100 == 0) {
+ const averageTime = Math.round(padsDone / ((Date.now() - startTime) / 1000));
+ log(`${padsDone}/${padIDs.length}\t${averageTime} pad/s`);
}
}
@@ -130,293 +118,246 @@ var proID2groupID = {};
var proID2subdomain = {};
var groups = {};
-function convertPad(padId, callback)
-{
- var changesets = [];
- var changesetsMeta = [];
- var chatMessages = [];
- var authors = [];
- var apool;
- var subdomain;
- var padmeta;
+function convertPad(padId, callback) {
+ const changesets = [];
+ const changesetsMeta = [];
+ const chatMessages = [];
+ const authors = [];
+ let apool;
+ let subdomain;
+ let padmeta;
async.series([
- //get all needed db values
- function(callback)
- {
+ // get all needed db values
+ function (callback) {
async.parallel([
- //get the pad revisions
- function(callback)
- {
- var sql = "SELECT * FROM `PAD_CHAT_TEXT` WHERE NUMID = (SELECT `NUMID` FROM `PAD_CHAT_META` WHERE ID=?)";
-
- etherpadDB.query(sql, [padId], function(err, results)
- {
- if(!err)
- {
- try
- {
- //parse the pages
- for(var i=0,length=results.length;i {
+ if (!err) {
+ try {
+ // parse the pages
+ for (let i = 0, length = results.length; i < length; i++) {
parsePage(chatMessages, results[i].PAGESTART, results[i].OFFSETS, results[i].DATA, true);
}
- }catch(e) {err = e}
+ } catch (e) { err = e; }
}
callback(err);
});
},
- //get the chat entries
- function(callback)
- {
- var sql = "SELECT * FROM `PAD_REVS_TEXT` WHERE NUMID = (SELECT `NUMID` FROM `PAD_REVS_META` WHERE ID=?)";
-
- etherpadDB.query(sql, [padId], function(err, results)
- {
- if(!err)
- {
- try
- {
- //parse the pages
- for(var i=0,length=results.length;i {
+ if (!err) {
+ try {
+ // parse the pages
+ for (let i = 0, length = results.length; i < length; i++) {
parsePage(changesets, results[i].PAGESTART, results[i].OFFSETS, results[i].DATA, false);
}
- }catch(e) {err = e}
+ } catch (e) { err = e; }
}
callback(err);
});
},
- //get the pad revisions meta data
- function(callback)
- {
- var sql = "SELECT * FROM `PAD_REVMETA_TEXT` WHERE NUMID = (SELECT `NUMID` FROM `PAD_REVMETA_META` WHERE ID=?)";
-
- etherpadDB.query(sql, [padId], function(err, results)
- {
- if(!err)
- {
- try
- {
- //parse the pages
- for(var i=0,length=results.length;i {
+ if (!err) {
+ try {
+ // parse the pages
+ for (let i = 0, length = results.length; i < length; i++) {
parsePage(changesetsMeta, results[i].PAGESTART, results[i].OFFSETS, results[i].DATA, true);
}
- }catch(e) {err = e}
+ } catch (e) { err = e; }
}
callback(err);
});
},
- //get the attribute pool of this pad
- function(callback)
- {
- var sql = "SELECT `JSON` FROM `PAD_APOOL` WHERE `ID` = ?";
-
- etherpadDB.query(sql, [padId], function(err, results)
- {
- if(!err)
- {
- try
- {
- apool=JSON.parse(results[0].JSON).x;
- }catch(e) {err = e}
+ // get the attribute pool of this pad
+ function (callback) {
+ const sql = 'SELECT `JSON` FROM `PAD_APOOL` WHERE `ID` = ?';
+
+ etherpadDB.query(sql, [padId], (err, results) => {
+ if (!err) {
+ try {
+ apool = JSON.parse(results[0].JSON).x;
+ } catch (e) { err = e; }
}
callback(err);
});
},
- //get the authors informations
- function(callback)
- {
- var sql = "SELECT * FROM `PAD_AUTHORS_TEXT` WHERE NUMID = (SELECT `NUMID` FROM `PAD_AUTHORS_META` WHERE ID=?)";
-
- etherpadDB.query(sql, [padId], function(err, results)
- {
- if(!err)
- {
- try
- {
- //parse the pages
- for(var i=0, length=results.length;i {
+ if (!err) {
+ try {
+ // parse the pages
+ for (let i = 0, length = results.length; i < length; i++) {
parsePage(authors, results[i].PAGESTART, results[i].OFFSETS, results[i].DATA, true);
}
- }catch(e) {err = e}
+ } catch (e) { err = e; }
}
callback(err);
});
},
- //get the pad information
- function(callback)
- {
- var sql = "SELECT JSON FROM `PAD_META` WHERE ID=?";
-
- etherpadDB.query(sql, [padId], function(err, results)
- {
- if(!err)
- {
- try
- {
+ // get the pad information
+ function (callback) {
+ const sql = 'SELECT JSON FROM `PAD_META` WHERE ID=?';
+
+ etherpadDB.query(sql, [padId], (err, results) => {
+ if (!err) {
+ try {
padmeta = JSON.parse(results[0].JSON).x;
- }catch(e) {err = e}
+ } catch (e) { err = e; }
}
callback(err);
});
},
- //get the subdomain
- function(callback)
- {
- //skip if this is no proPad
- if(padId.indexOf("$") == -1)
- {
+ // get the subdomain
+ function (callback) {
+ // skip if this is no proPad
+ if (padId.indexOf('$') == -1) {
callback();
return;
}
- //get the proID out of this padID
- var proID = padId.split("$")[0];
+ // get the proID out of this padID
+ const proID = padId.split('$')[0];
- var sql = "SELECT subDomain FROM pro_domains WHERE ID = ?";
+ const sql = 'SELECT subDomain FROM pro_domains WHERE ID = ?';
- etherpadDB.query(sql, [proID], function(err, results)
- {
- if(!err)
- {
+ etherpadDB.query(sql, [proID], (err, results) => {
+ if (!err) {
subdomain = results[0].subDomain;
}
callback(err);
});
- }
+ },
], callback);
},
- function(callback)
- {
- //saves all values that should be written to the database
- var values = {};
-
- //this is a pro pad, let's convert it to a group pad
- if(padId.indexOf("$") != -1)
- {
- var padIdParts = padId.split("$");
- var proID = padIdParts[0];
- var padName = padIdParts[1];
-
- var groupID
-
- //this proID is not converted so far, do it
- if(proID2groupID[proID] == null)
- {
- groupID = "g." + randomString(16);
-
- //create the mappers for this new group
+ function (callback) {
+ // saves all values that should be written to the database
+ const values = {};
+
+ // this is a pro pad, let's convert it to a group pad
+ if (padId.indexOf('$') != -1) {
+ const padIdParts = padId.split('$');
+ const proID = padIdParts[0];
+ const padName = padIdParts[1];
+
+ let groupID;
+
+ // this proID is not converted so far, do it
+ if (proID2groupID[proID] == null) {
+ groupID = `g.${randomString(16)}`;
+
+ // create the mappers for this new group
proID2groupID[proID] = groupID;
proID2subdomain[proID] = subdomain;
groups[groupID] = {pads: {}};
}
- //use the generated groupID;
+ // use the generated groupID;
groupID = proID2groupID[proID];
- //rename the pad
- padId = groupID + "$" + padName;
+ // rename the pad
+ padId = `${groupID}$${padName}`;
- //set the value for this pad in the group
+ // set the value for this pad in the group
groups[groupID].pads[padId] = 1;
}
- try
- {
- var newAuthorIDs = {};
- var oldName2newName = {};
+ try {
+ const newAuthorIDs = {};
+ const oldName2newName = {};
- //replace the authors with generated authors
+ // replace the authors with generated authors
// we need to do that cause where the original etherpad saves pad local authors, the new (lite) etherpad uses them global
- for(var i in apool.numToAttrib)
- {
+ for (var i in apool.numToAttrib) {
var key = apool.numToAttrib[i][0];
- var value = apool.numToAttrib[i][1];
+ const value = apool.numToAttrib[i][1];
- //skip non authors and anonymous authors
- if(key != "author" || value == "")
- continue;
+ // skip non authors and anonymous authors
+ if (key != 'author' || value == '') continue;
- //generate new author values
- var authorID = "a." + randomString(16);
- var authorColorID = authors[i].colorId || Math.floor(Math.random()*(exports.getColorPalette().length));
- var authorName = authors[i].name || null;
+ // generate new author values
+ const authorID = `a.${randomString(16)}`;
+ const authorColorID = authors[i].colorId || Math.floor(Math.random() * (exports.getColorPalette().length));
+ const authorName = authors[i].name || null;
- //overwrite the authorID of the attribute pool
+ // overwrite the authorID of the attribute pool
apool.numToAttrib[i][1] = authorID;
- //write the author to the database
- values["globalAuthor:" + authorID] = {"colorId" : authorColorID, "name": authorName, "timestamp": timestamp};
+ // write the author to the database
+ values[`globalAuthor:${authorID}`] = {colorId: authorColorID, name: authorName, timestamp};
- //save in mappers
+ // save in mappers
newAuthorIDs[i] = authorID;
oldName2newName[value] = authorID;
}
- //save all revisions
- for(var i=0;i __dirname + '/../' + f;
+const m = (f) => `${__dirname}/../${f}`;
const fs = require('fs');
const path = require('path');
@@ -12,10 +12,10 @@ const settings = require(m('src/node/utils/Settings'));
const supertest = require(m('src/node_modules/supertest'));
(async () => {
- const api = supertest('http://'+settings.ip+':'+settings.port);
+ const api = supertest(`http://${settings.ip}:${settings.port}`);
const filePath = path.join(__dirname, '../APIKEY.txt');
- const apikey = fs.readFileSync(filePath, {encoding: 'utf-8'});
+ const apikey = fs.readFileSync(filePath, {encoding: 'utf-8'});
let res;
@@ -43,5 +43,5 @@ const supertest = require(m('src/node_modules/supertest'));
res = await api.post(uri('createSession', {apikey, groupID, authorID, validUntil}));
if (res.body.code === 1) throw new Error(`Error creating session: ${res.body}`);
console.log('Session made: ====> create a cookie named sessionID and set the value to',
- res.body.data.sessionID);
+ res.body.data.sessionID);
})();
diff --git a/bin/debugRun.sh b/bin/debugRun.sh
index d9b18aaa24d..9b2fff9bd41 100755
--- a/bin/debugRun.sh
+++ b/bin/debugRun.sh
@@ -3,6 +3,9 @@
# Move to the folder where ep-lite is installed
cd "$(dirname "$0")"/..
+# Source constants and usefull functions
+. bin/functions.sh
+
# Prepare the environment
bin/installDeps.sh || exit 1
@@ -12,4 +15,4 @@ echo "Open 'chrome://inspect' on Chrome to start debugging."
# Use 0.0.0.0 to allow external connections to the debugger
# (ex: running Etherpad on a docker container). Use default port # (9229)
-node --inspect=0.0.0.0:9229 node_modules/ep_etherpad-lite/node/server.js "$@"
+node $(compute_node_args) --inspect=0.0.0.0:9229 node_modules/ep_etherpad-lite/node/server.js "$@"
diff --git a/bin/deleteAllGroupSessions.js b/bin/deleteAllGroupSessions.js
index cda4a3a59a9..ee0058ffa2a 100644
--- a/bin/deleteAllGroupSessions.js
+++ b/bin/deleteAllGroupSessions.js
@@ -4,48 +4,48 @@
*/
const request = require('../src/node_modules/request');
-const settings = require(__dirname+'/../tests/container/loadSettings').loadSettings();
-const supertest = require(__dirname+'/../src/node_modules/supertest');
-const api = supertest('http://'+settings.ip+":"+settings.port);
+const settings = require(`${__dirname}/../tests/container/loadSettings`).loadSettings();
+const supertest = require(`${__dirname}/../src/node_modules/supertest`);
+const api = supertest(`http://${settings.ip}:${settings.port}`);
const path = require('path');
const fs = require('fs');
// get the API Key
-var filePath = path.join(__dirname, '../APIKEY.txt');
-var apikey = fs.readFileSync(filePath, {encoding: 'utf-8'});
+const filePath = path.join(__dirname, '../APIKEY.txt');
+const apikey = fs.readFileSync(filePath, {encoding: 'utf-8'});
// Set apiVersion to base value, we change this later.
-var apiVersion = 1;
-var guids;
+let apiVersion = 1;
+let guids;
// Update the apiVersion
api.get('/api/')
-.expect(function(res){
- apiVersion = res.body.currentVersion;
- if (!res.body.currentVersion) throw new Error("No version set in API");
- return;
-})
-.then(function(){
- let guri = '/api/'+apiVersion+'/listAllGroups?apikey='+apikey;
- api.get(guri)
- .then(function(res){
- guids = res.body.data.groupIDs;
- guids.forEach(function(groupID){
- let luri = '/api/'+apiVersion+'/listSessionsOfGroup?apikey='+apikey + "&groupID="+groupID;
- api.get(luri)
- .then(function(res){
- if(res.body.data){
- Object.keys(res.body.data).forEach(function(sessionID){
- if(sessionID){
- console.log("Deleting", sessionID);
- let duri = '/api/'+apiVersion+'/deleteSession?apikey='+apikey + "&sessionID="+sessionID;
- api.post(duri); // deletes
- }
- })
- }else{
- // no session in this group.
- }
- })
+ .expect((res) => {
+ apiVersion = res.body.currentVersion;
+ if (!res.body.currentVersion) throw new Error('No version set in API');
+ return;
})
- })
-})
+ .then(() => {
+ const guri = `/api/${apiVersion}/listAllGroups?apikey=${apikey}`;
+ api.get(guri)
+ .then((res) => {
+ guids = res.body.data.groupIDs;
+ guids.forEach((groupID) => {
+ const luri = `/api/${apiVersion}/listSessionsOfGroup?apikey=${apikey}&groupID=${groupID}`;
+ api.get(luri)
+ .then((res) => {
+ if (res.body.data) {
+ Object.keys(res.body.data).forEach((sessionID) => {
+ if (sessionID) {
+ console.log('Deleting', sessionID);
+ const duri = `/api/${apiVersion}/deleteSession?apikey=${apikey}&sessionID=${sessionID}`;
+ api.post(duri); // deletes
+ }
+ });
+ } else {
+ // no session in this group.
+ }
+ });
+ });
+ });
+ });
diff --git a/bin/deletePad.js b/bin/deletePad.js
index 2ce82f8a428..e145d63a05d 100644
--- a/bin/deletePad.js
+++ b/bin/deletePad.js
@@ -4,47 +4,45 @@
*/
const request = require('../src/node_modules/request');
-const settings = require(__dirname+'/../tests/container/loadSettings').loadSettings();
-const supertest = require(__dirname+'/../src/node_modules/supertest');
-const api = supertest('http://'+settings.ip+":"+settings.port);
+const settings = require(`${__dirname}/../tests/container/loadSettings`).loadSettings();
+const supertest = require(`${__dirname}/../src/node_modules/supertest`);
+const api = supertest(`http://${settings.ip}:${settings.port}`);
const path = require('path');
const fs = require('fs');
if (process.argv.length != 3) {
- console.error("Use: node deletePad.js $PADID");
+ console.error('Use: node deletePad.js $PADID');
process.exit(1);
}
// get the padID
-let padId = process.argv[2];
+const padId = process.argv[2];
// get the API Key
-var filePath = path.join(__dirname, '../APIKEY.txt');
-var apikey = fs.readFileSync(filePath, {encoding: 'utf-8'});
+const filePath = path.join(__dirname, '../APIKEY.txt');
+const apikey = fs.readFileSync(filePath, {encoding: 'utf-8'});
// Set apiVersion to base value, we change this later.
-var apiVersion = 1;
+let apiVersion = 1;
// Update the apiVersion
api.get('/api/')
- .expect(function(res){
- apiVersion = res.body.currentVersion;
- if (!res.body.currentVersion) throw new Error("No version set in API");
- return;
- })
- .end(function(err, res){
-
+ .expect((res) => {
+ apiVersion = res.body.currentVersion;
+ if (!res.body.currentVersion) throw new Error('No version set in API');
+ return;
+ })
+ .end((err, res) => {
// Now we know the latest API version, let's delete pad
- var uri = '/api/'+apiVersion+'/deletePad?apikey='+apikey+'&padID='+padId;
- api.post(uri)
- .expect(function(res){
- if (res.body.code === 1){
- console.error("Error deleting pad", res.body);
- }else{
- console.log("Deleted pad", res.body);
- }
- return;
- })
- .end(function(){})
- });
+ const uri = `/api/${apiVersion}/deletePad?apikey=${apikey}&padID=${padId}`;
+ api.post(uri)
+ .expect((res) => {
+ if (res.body.code === 1) {
+ console.error('Error deleting pad', res.body);
+ } else {
+ console.log('Deleted pad', res.body);
+ }
+ return;
+ })
+ .end(() => {});
+ });
// end
-
diff --git a/bin/doc/generate.js b/bin/doc/generate.js
index b3a2c2aceea..803f5017e12 100644
--- a/bin/doc/generate.js
+++ b/bin/doc/generate.js
@@ -20,19 +20,19 @@
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
-var marked = require('marked');
-var fs = require('fs');
-var path = require('path');
+const marked = require('marked');
+const fs = require('fs');
+const path = require('path');
// parse the args.
// Don't use nopt or whatever for this. It's simple enough.
-var args = process.argv.slice(2);
-var format = 'json';
-var template = null;
-var inputFile = null;
+const args = process.argv.slice(2);
+let format = 'json';
+let template = null;
+let inputFile = null;
-args.forEach(function (arg) {
+args.forEach((arg) => {
if (!arg.match(/^\-\-/)) {
inputFile = arg;
} else if (arg.match(/^\-\-format=/)) {
@@ -40,7 +40,7 @@ args.forEach(function (arg) {
} else if (arg.match(/^\-\-template=/)) {
template = arg.replace(/^\-\-template=/, '');
}
-})
+});
if (!inputFile) {
@@ -49,25 +49,25 @@ if (!inputFile) {
console.error('Input file = %s', inputFile);
-fs.readFile(inputFile, 'utf8', function(er, input) {
+fs.readFile(inputFile, 'utf8', (er, input) => {
if (er) throw er;
// process the input for @include lines
processIncludes(inputFile, input, next);
});
-var includeExpr = /^@include\s+([A-Za-z0-9-_\/]+)(?:\.)?([a-zA-Z]*)$/gmi;
-var includeData = {};
+const includeExpr = /^@include\s+([A-Za-z0-9-_\/]+)(?:\.)?([a-zA-Z]*)$/gmi;
+const includeData = {};
function processIncludes(inputFile, input, cb) {
- var includes = input.match(includeExpr);
+ const includes = input.match(includeExpr);
if (includes === null) return cb(null, input);
- var errState = null;
+ let errState = null;
console.error(includes);
- var incCount = includes.length;
+ let incCount = includes.length;
if (incCount === 0) cb(null, input);
- includes.forEach(function(include) {
- var fname = include.replace(/^@include\s+/, '');
+ includes.forEach((include) => {
+ let fname = include.replace(/^@include\s+/, '');
if (!fname.match(/\.md$/)) fname += '.md';
if (includeData.hasOwnProperty(fname)) {
@@ -78,11 +78,11 @@ function processIncludes(inputFile, input, cb) {
}
}
- var fullFname = path.resolve(path.dirname(inputFile), fname);
- fs.readFile(fullFname, 'utf8', function(er, inc) {
+ const fullFname = path.resolve(path.dirname(inputFile), fname);
+ fs.readFile(fullFname, 'utf8', (er, inc) => {
if (errState) return;
if (er) return cb(errState = er);
- processIncludes(fullFname, inc, function(er, inc) {
+ processIncludes(fullFname, inc, (er, inc) => {
if (errState) return;
if (er) return cb(errState = er);
incCount--;
@@ -101,20 +101,20 @@ function next(er, input) {
if (er) throw er;
switch (format) {
case 'json':
- require('./json.js')(input, inputFile, function(er, obj) {
+ require('./json.js')(input, inputFile, (er, obj) => {
console.log(JSON.stringify(obj, null, 2));
if (er) throw er;
});
break;
case 'html':
- require('./html.js')(input, inputFile, template, function(er, html) {
+ require('./html.js')(input, inputFile, template, (er, html) => {
if (er) throw er;
console.log(html);
});
break;
default:
- throw new Error('Invalid format: ' + format);
+ throw new Error(`Invalid format: ${format}`);
}
}
diff --git a/bin/doc/html.js b/bin/doc/html.js
index 700ab18ccb6..26cf3f18557 100644
--- a/bin/doc/html.js
+++ b/bin/doc/html.js
@@ -19,15 +19,15 @@
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
-var fs = require('fs');
-var marked = require('marked');
-var path = require('path');
+const fs = require('fs');
+const marked = require('marked');
+const path = require('path');
module.exports = toHTML;
function toHTML(input, filename, template, cb) {
- var lexed = marked.lexer(input);
- fs.readFile(template, 'utf8', function(er, template) {
+ const lexed = marked.lexer(input);
+ fs.readFile(template, 'utf8', (er, template) => {
if (er) return cb(er);
render(lexed, filename, template, cb);
});
@@ -35,7 +35,7 @@ function toHTML(input, filename, template, cb) {
function render(lexed, filename, template, cb) {
// get the section
- var section = getSection(lexed);
+ const section = getSection(lexed);
filename = path.basename(filename, '.md');
@@ -43,7 +43,7 @@ function render(lexed, filename, template, cb) {
// generate the table of contents.
// this mutates the lexed contents in-place.
- buildToc(lexed, filename, function(er, toc) {
+ buildToc(lexed, filename, (er, toc) => {
if (er) return cb(er);
template = template.replace(/__FILENAME__/g, filename);
@@ -63,11 +63,11 @@ function render(lexed, filename, template, cb) {
// just update the list item text in-place.
// lists that come right after a heading are what we're after.
function parseLists(input) {
- var state = null;
- var depth = 0;
- var output = [];
+ let state = null;
+ let depth = 0;
+ const output = [];
output.links = input.links;
- input.forEach(function(tok) {
+ input.forEach((tok) => {
if (state === null) {
if (tok.type === 'heading') {
state = 'AFTERHEADING';
@@ -79,7 +79,7 @@ function parseLists(input) {
if (tok.type === 'list_start') {
state = 'LIST';
if (depth === 0) {
- output.push({ type:'html', text: '
' });
+ output.push({type: 'html', text: '
'});
}
depth++;
output.push(tok);
@@ -99,7 +99,7 @@ function parseLists(input) {
depth--;
if (depth === 0) {
state = null;
- output.push({ type:'html', text: '
' });
+ output.push({type: 'html', text: '
'});
}
output.push(tok);
return;
@@ -117,16 +117,16 @@ function parseLists(input) {
function parseListItem(text) {
text = text.replace(/\{([^\}]+)\}/, '$1');
- //XXX maybe put more stuff here?
+ // XXX maybe put more stuff here?
return text;
}
// section is just the first heading
function getSection(lexed) {
- var section = '';
- for (var i = 0, l = lexed.length; i < l; i++) {
- var tok = lexed[i];
+ const section = '';
+ for (let i = 0, l = lexed.length; i < l; i++) {
+ const tok = lexed[i];
if (tok.type === 'heading') return tok.text;
}
return '';
@@ -134,40 +134,39 @@ function getSection(lexed) {
function buildToc(lexed, filename, cb) {
- var indent = 0;
- var toc = [];
- var depth = 0;
- lexed.forEach(function(tok) {
+ const indent = 0;
+ let toc = [];
+ let depth = 0;
+ lexed.forEach((tok) => {
if (tok.type !== 'heading') return;
if (tok.depth - depth > 1) {
- return cb(new Error('Inappropriate heading level\n' +
- JSON.stringify(tok)));
+ return cb(new Error(`Inappropriate heading level\n${
+ JSON.stringify(tok)}`));
}
depth = tok.depth;
- var id = getId(filename + '_' + tok.text.trim());
- toc.push(new Array((depth - 1) * 2 + 1).join(' ') +
- '* ' +
- tok.text + '');
- tok.text += '#';
+ const id = getId(`${filename}_${tok.text.trim()}`);
+ toc.push(`${new Array((depth - 1) * 2 + 1).join(' ')
+ }* ${
+ tok.text}`);
+ tok.text += `#`;
});
toc = marked.parse(toc.join('\n'));
cb(null, toc);
}
-var idCounters = {};
+const idCounters = {};
function getId(text) {
text = text.toLowerCase();
text = text.replace(/[^a-z0-9]+/g, '_');
text = text.replace(/^_+|_+$/, '');
text = text.replace(/^([^a-z])/, '_$1');
if (idCounters.hasOwnProperty(text)) {
- text += '_' + (++idCounters[text]);
+ text += `_${++idCounters[text]}`;
} else {
idCounters[text] = 0;
}
return text;
}
-
diff --git a/bin/doc/json.js b/bin/doc/json.js
index a404675b585..3ce62a30136 100644
--- a/bin/doc/json.js
+++ b/bin/doc/json.js
@@ -24,24 +24,24 @@ module.exports = doJSON;
// Take the lexed input, and return a JSON-encoded object
// A module looks like this: https://gist.github.com/1777387
-var marked = require('marked');
+const marked = require('marked');
function doJSON(input, filename, cb) {
- var root = {source: filename};
- var stack = [root];
- var depth = 0;
- var current = root;
- var state = null;
- var lexed = marked.lexer(input);
- lexed.forEach(function (tok) {
- var type = tok.type;
- var text = tok.text;
+ const root = {source: filename};
+ const stack = [root];
+ let depth = 0;
+ let current = root;
+ let state = null;
+ const lexed = marked.lexer(input);
+ lexed.forEach((tok) => {
+ const type = tok.type;
+ let text = tok.text;
//
// This is for cases where the markdown semantic structure is lacking.
if (type === 'paragraph' || type === 'html') {
- var metaExpr = /\n*/g;
- text = text.replace(metaExpr, function(_0, k, v) {
+ const metaExpr = /\n*/g;
+ text = text.replace(metaExpr, (_0, k, v) => {
current[k.trim()] = v.trim();
return '';
});
@@ -52,8 +52,8 @@ function doJSON(input, filename, cb) {
if (type === 'heading' &&
!text.trim().match(/^example/i)) {
if (tok.depth - depth > 1) {
- return cb(new Error('Inappropriate heading level\n'+
- JSON.stringify(tok)));
+ return cb(new Error(`Inappropriate heading level\n${
+ JSON.stringify(tok)}`));
}
// Sometimes we have two headings with a single
@@ -61,7 +61,7 @@ function doJSON(input, filename, cb) {
if (current &&
state === 'AFTERHEADING' &&
depth === tok.depth) {
- var clone = current;
+ const clone = current;
current = newSection(tok);
current.clone = clone;
// don't keep it around on the stack.
@@ -75,7 +75,7 @@ function doJSON(input, filename, cb) {
// root is always considered the level=0 section,
// and the lowest heading is 1, so this should always
// result in having a valid parent node.
- var d = tok.depth;
+ let d = tok.depth;
while (d <= depth) {
finishSection(stack.pop(), stack[stack.length - 1]);
d++;
@@ -98,7 +98,7 @@ function doJSON(input, filename, cb) {
//
// If one of these isn't found, then anything that comes between
// here and the next heading should be parsed as the desc.
- var stability
+ let stability;
if (state === 'AFTERHEADING') {
if (type === 'code' &&
(stability = text.match(/^Stability: ([0-5])(?:\s*-\s*)?(.*)$/))) {
@@ -138,7 +138,6 @@ function doJSON(input, filename, cb) {
current.desc = current.desc || [];
current.desc.push(tok);
-
});
// finish any sections left open
@@ -146,7 +145,7 @@ function doJSON(input, filename, cb) {
finishSection(current, stack[stack.length - 1]);
}
- return cb(null, root)
+ return cb(null, root);
}
@@ -193,14 +192,14 @@ function doJSON(input, filename, cb) {
// default: 'false' } ] } ]
function processList(section) {
- var list = section.list;
- var values = [];
- var current;
- var stack = [];
+ const list = section.list;
+ const values = [];
+ let current;
+ const stack = [];
// for now, *just* build the hierarchical list
- list.forEach(function(tok) {
- var type = tok.type;
+ list.forEach((tok) => {
+ const type = tok.type;
if (type === 'space') return;
if (type === 'list_item_start') {
if (!current) {
@@ -217,26 +216,26 @@ function processList(section) {
return;
} else if (type === 'list_item_end') {
if (!current) {
- throw new Error('invalid list - end without current item\n' +
- JSON.stringify(tok) + '\n' +
- JSON.stringify(list));
+ throw new Error(`invalid list - end without current item\n${
+ JSON.stringify(tok)}\n${
+ JSON.stringify(list)}`);
}
current = stack.pop();
} else if (type === 'text') {
if (!current) {
- throw new Error('invalid list - text without current item\n' +
- JSON.stringify(tok) + '\n' +
- JSON.stringify(list));
+ throw new Error(`invalid list - text without current item\n${
+ JSON.stringify(tok)}\n${
+ JSON.stringify(list)}`);
}
current.textRaw = current.textRaw || '';
- current.textRaw += tok.text + ' ';
+ current.textRaw += `${tok.text} `;
}
});
// shove the name in there for properties, since they are always
// just going to be the value etc.
if (section.type === 'property' && values[0]) {
- values[0].textRaw = '`' + section.name + '` ' + values[0].textRaw;
+ values[0].textRaw = `\`${section.name}\` ${values[0].textRaw}`;
}
// now pull the actual values out of the text bits.
@@ -252,9 +251,9 @@ function processList(section) {
// each item is an argument, unless the name is 'return',
// in which case it's the return value.
section.signatures = section.signatures || [];
- var sig = {}
+ var sig = {};
section.signatures.push(sig);
- sig.params = values.filter(function(v) {
+ sig.params = values.filter((v) => {
if (v.name === 'return') {
sig.return = v;
return false;
@@ -271,7 +270,7 @@ function processList(section) {
delete value.name;
section.typeof = value.type;
delete value.type;
- Object.keys(value).forEach(function(k) {
+ Object.keys(value).forEach((k) => {
section[k] = value[k];
});
break;
@@ -289,36 +288,36 @@ function processList(section) {
// textRaw = "someobject.someMethod(a, [b=100], [c])"
function parseSignature(text, sig) {
- var params = text.match(paramExpr);
+ let params = text.match(paramExpr);
if (!params) return;
params = params[1];
// the ] is irrelevant. [ indicates optionalness.
params = params.replace(/\]/g, '');
- params = params.split(/,/)
- params.forEach(function(p, i, _) {
+ params = params.split(/,/);
+ params.forEach((p, i, _) => {
p = p.trim();
if (!p) return;
- var param = sig.params[i];
- var optional = false;
- var def;
+ let param = sig.params[i];
+ let optional = false;
+ let def;
// [foo] -> optional
if (p.charAt(0) === '[') {
optional = true;
p = p.substr(1);
}
- var eq = p.indexOf('=');
+ const eq = p.indexOf('=');
if (eq !== -1) {
def = p.substr(eq + 1);
p = p.substr(0, eq);
}
if (!param) {
- param = sig.params[i] = { name: p };
+ param = sig.params[i] = {name: p};
}
// at this point, the name should match.
if (p !== param.name) {
console.error('Warning: invalid param "%s"', p);
- console.error(' > ' + JSON.stringify(param));
- console.error(' > ' + text);
+ console.error(` > ${JSON.stringify(param)}`);
+ console.error(` > ${text}`);
}
if (optional) param.optional = true;
if (def !== undefined) param.default = def;
@@ -332,18 +331,18 @@ function parseListItem(item) {
// the goal here is to find the name, type, default, and optional.
// anything left over is 'desc'
- var text = item.textRaw.trim();
+ let text = item.textRaw.trim();
// text = text.replace(/^(Argument|Param)s?\s*:?\s*/i, '');
text = text.replace(/^, /, '').trim();
- var retExpr = /^returns?\s*:?\s*/i;
- var ret = text.match(retExpr);
+ const retExpr = /^returns?\s*:?\s*/i;
+ const ret = text.match(retExpr);
if (ret) {
item.name = 'return';
text = text.replace(retExpr, '');
} else {
- var nameExpr = /^['`"]?([^'`": \{]+)['`"]?\s*:?\s*/;
- var name = text.match(nameExpr);
+ const nameExpr = /^['`"]?([^'`": \{]+)['`"]?\s*:?\s*/;
+ const name = text.match(nameExpr);
if (name) {
item.name = name[1];
text = text.replace(nameExpr, '');
@@ -351,24 +350,24 @@ function parseListItem(item) {
}
text = text.trim();
- var defaultExpr = /\(default\s*[:=]?\s*['"`]?([^, '"`]*)['"`]?\)/i;
- var def = text.match(defaultExpr);
+ const defaultExpr = /\(default\s*[:=]?\s*['"`]?([^, '"`]*)['"`]?\)/i;
+ const def = text.match(defaultExpr);
if (def) {
item.default = def[1];
text = text.replace(defaultExpr, '');
}
text = text.trim();
- var typeExpr = /^\{([^\}]+)\}/;
- var type = text.match(typeExpr);
+ const typeExpr = /^\{([^\}]+)\}/;
+ const type = text.match(typeExpr);
if (type) {
item.type = type[1];
text = text.replace(typeExpr, '');
}
text = text.trim();
- var optExpr = /^Optional\.|(?:, )?Optional$/;
- var optional = text.match(optExpr);
+ const optExpr = /^Optional\.|(?:, )?Optional$/;
+ const optional = text.match(optExpr);
if (optional) {
item.optional = true;
text = text.replace(optExpr, '');
@@ -382,9 +381,9 @@ function parseListItem(item) {
function finishSection(section, parent) {
if (!section || !parent) {
- throw new Error('Invalid finishSection call\n'+
- JSON.stringify(section) + '\n' +
- JSON.stringify(parent));
+ throw new Error(`Invalid finishSection call\n${
+ JSON.stringify(section)}\n${
+ JSON.stringify(parent)}`);
}
if (!section.type) {
@@ -394,7 +393,7 @@ function finishSection(section, parent) {
}
section.displayName = section.name;
section.name = section.name.toLowerCase()
- .trim().replace(/\s+/g, '_');
+ .trim().replace(/\s+/g, '_');
}
if (section.desc && Array.isArray(section.desc)) {
@@ -411,10 +410,10 @@ function finishSection(section, parent) {
// Merge them into the parent.
if (section.type === 'class' && section.ctors) {
section.signatures = section.signatures || [];
- var sigs = section.signatures;
- section.ctors.forEach(function(ctor) {
+ const sigs = section.signatures;
+ section.ctors.forEach((ctor) => {
ctor.signatures = ctor.signatures || [{}];
- ctor.signatures.forEach(function(sig) {
+ ctor.signatures.forEach((sig) => {
sig.desc = ctor.desc;
});
sigs.push.apply(sigs, ctor.signatures);
@@ -425,7 +424,7 @@ function finishSection(section, parent) {
// properties are a bit special.
// their "type" is the type of object, not "property"
if (section.properties) {
- section.properties.forEach(function (p) {
+ section.properties.forEach((p) => {
if (p.typeof) p.type = p.typeof;
else delete p.type;
delete p.typeof;
@@ -434,27 +433,27 @@ function finishSection(section, parent) {
// handle clones
if (section.clone) {
- var clone = section.clone;
+ const clone = section.clone;
delete section.clone;
delete clone.clone;
deepCopy(section, clone);
finishSection(clone, parent);
}
- var plur;
+ let plur;
if (section.type.slice(-1) === 's') {
- plur = section.type + 'es';
+ plur = `${section.type}es`;
} else if (section.type.slice(-1) === 'y') {
plur = section.type.replace(/y$/, 'ies');
} else {
- plur = section.type + 's';
+ plur = `${section.type}s`;
}
// if the parent's type is 'misc', then it's just a random
// collection of stuff, like the "globals" section.
// Make the children top-level items.
if (section.type === 'misc') {
- Object.keys(section).forEach(function(k) {
+ Object.keys(section).forEach((k) => {
switch (k) {
case 'textRaw':
case 'name':
@@ -486,9 +485,7 @@ function finishSection(section, parent) {
// Not a general purpose deep copy.
// But sufficient for these basic things.
function deepCopy(src, dest) {
- Object.keys(src).filter(function(k) {
- return !dest.hasOwnProperty(k);
- }).forEach(function(k) {
+ Object.keys(src).filter((k) => !dest.hasOwnProperty(k)).forEach((k) => {
dest[k] = deepCopy_(src[k]);
});
}
@@ -497,14 +494,14 @@ function deepCopy_(src) {
if (!src) return src;
if (Array.isArray(src)) {
var c = new Array(src.length);
- src.forEach(function(v, i) {
+ src.forEach((v, i) => {
c[i] = deepCopy_(v);
});
return c;
}
if (typeof src === 'object') {
var c = {};
- Object.keys(src).forEach(function(k) {
+ Object.keys(src).forEach((k) => {
c[k] = deepCopy_(src[k]);
});
return c;
@@ -514,21 +511,21 @@ function deepCopy_(src) {
// these parse out the contents of an H# tag
-var eventExpr = /^Event(?::|\s)+['"]?([^"']+).*$/i;
-var classExpr = /^Class:\s*([^ ]+).*?$/i;
-var propExpr = /^(?:property:?\s*)?[^\.]+\.([^ \.\(\)]+)\s*?$/i;
-var braceExpr = /^(?:property:?\s*)?[^\.\[]+(\[[^\]]+\])\s*?$/i;
-var classMethExpr =
+const eventExpr = /^Event(?::|\s)+['"]?([^"']+).*$/i;
+const classExpr = /^Class:\s*([^ ]+).*?$/i;
+const propExpr = /^(?:property:?\s*)?[^\.]+\.([^ \.\(\)]+)\s*?$/i;
+const braceExpr = /^(?:property:?\s*)?[^\.\[]+(\[[^\]]+\])\s*?$/i;
+const classMethExpr =
/^class\s*method\s*:?[^\.]+\.([^ \.\(\)]+)\([^\)]*\)\s*?$/i;
-var methExpr =
+const methExpr =
/^(?:method:?\s*)?(?:[^\.]+\.)?([^ \.\(\)]+)\([^\)]*\)\s*?$/i;
-var newExpr = /^new ([A-Z][a-z]+)\([^\)]*\)\s*?$/;
+const newExpr = /^new ([A-Z][a-z]+)\([^\)]*\)\s*?$/;
var paramExpr = /\((.*)\);?$/;
function newSection(tok) {
- var section = {};
+ const section = {};
// infer the type from the text.
- var text = section.textRaw = tok.text;
+ const text = section.textRaw = tok.text;
if (text.match(eventExpr)) {
section.type = 'event';
section.name = text.replace(eventExpr, '$1');
diff --git a/bin/extractPadData.js b/bin/extractPadData.js
index cce297f7113..a811076efff 100644
--- a/bin/extractPadData.js
+++ b/bin/extractPadData.js
@@ -5,60 +5,60 @@
*/
if (process.argv.length != 3) {
- console.error("Use: node extractPadData.js $PADID");
+ console.error('Use: node extractPadData.js $PADID');
process.exit(1);
}
// get the padID
-let padId = process.argv[2];
+const padId = process.argv[2];
-let npm = require('../src/node_modules/npm');
+const npm = require('../src/node_modules/npm');
-npm.load({}, async function(er) {
+npm.load({}, async (er) => {
if (er) {
- console.error("Could not load NPM: " + er)
+ console.error(`Could not load NPM: ${er}`);
process.exit(1);
}
try {
// initialize database
- let settings = require('../src/node/utils/Settings');
- let db = require('../src/node/db/DB');
+ const settings = require('../src/node/utils/Settings');
+ const db = require('../src/node/db/DB');
await db.init();
// load extra modules
- let dirtyDB = require('../src/node_modules/dirty');
- let padManager = require('../src/node/db/PadManager');
- let util = require('util');
+ const dirtyDB = require('../src/node_modules/dirty');
+ const padManager = require('../src/node/db/PadManager');
+ const util = require('util');
// initialize output database
- let dirty = dirtyDB(padId + '.db');
+ const dirty = dirtyDB(`${padId}.db`);
// Promise wrapped get and set function
- let wrapped = db.db.db.wrappedDB;
- let get = util.promisify(wrapped.get.bind(wrapped));
- let set = util.promisify(dirty.set.bind(dirty));
+ const wrapped = db.db.db.wrappedDB;
+ const get = util.promisify(wrapped.get.bind(wrapped));
+ const set = util.promisify(dirty.set.bind(dirty));
// array in which required key values will be accumulated
- let neededDBValues = ['pad:' + padId];
+ const neededDBValues = [`pad:${padId}`];
// get the actual pad object
- let pad = await padManager.getPad(padId);
+ const pad = await padManager.getPad(padId);
// add all authors
- neededDBValues.push(...pad.getAllAuthors().map(author => 'globalAuthor:' + author));
+ neededDBValues.push(...pad.getAllAuthors().map((author) => `globalAuthor:${author}`));
// add all revisions
for (let rev = 0; rev <= pad.head; ++rev) {
- neededDBValues.push('pad:' + padId + ':revs:' + rev);
+ neededDBValues.push(`pad:${padId}:revs:${rev}`);
}
// add all chat values
for (let chat = 0; chat <= pad.chatHead; ++chat) {
- neededDBValues.push('pad:' + padId + ':chat:' + chat);
+ neededDBValues.push(`pad:${padId}:chat:${chat}`);
}
- for (let dbkey of neededDBValues) {
+ for (const dbkey of neededDBValues) {
let dbvalue = await get(dbkey);
if (dbvalue && typeof dbvalue !== 'object') {
dbvalue = JSON.parse(dbvalue);
diff --git a/bin/fastRun.sh b/bin/fastRun.sh
index e00bb8c72c4..90d83dc8e2d 100755
--- a/bin/fastRun.sh
+++ b/bin/fastRun.sh
@@ -12,6 +12,9 @@ set -eu
# source: https://stackoverflow.com/questions/59895/how-to-get-the-source-directory-of-a-bash-script-from-within-the-script-itself#246128
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
+# Source constants and usefull functions
+. ${DIR}/../bin/functions.sh
+
echo "Running directly, without checking/installing dependencies"
# move to the base Etherpad directory. This will be necessary until Etherpad
@@ -19,4 +22,4 @@ echo "Running directly, without checking/installing dependencies"
cd "${DIR}/.."
# run Etherpad main class
-node "${DIR}/../node_modules/ep_etherpad-lite/node/server.js" "${@}"
+node $(compute_node_args) "${DIR}/../node_modules/ep_etherpad-lite/node/server.js" "$@"
diff --git a/bin/functions.sh b/bin/functions.sh
new file mode 100644
index 00000000000..c7f3c85561f
--- /dev/null
+++ b/bin/functions.sh
@@ -0,0 +1,74 @@
+# minimum required node version
+REQUIRED_NODE_MAJOR=10
+REQUIRED_NODE_MINOR=13
+
+# minimum required npm version
+REQUIRED_NPM_MAJOR=5
+REQUIRED_NPM_MINOR=5
+
+pecho() { printf %s\\n "$*"; }
+log() { pecho "$@"; }
+error() { log "ERROR: $@" >&2; }
+fatal() { error "$@"; exit 1; }
+is_cmd() { command -v "$@" >/dev/null 2>&1; }
+
+
+get_program_version() {
+ PROGRAM="$1"
+ KIND="${2:-full}"
+ PROGRAM_VERSION_STRING=$($PROGRAM --version)
+ PROGRAM_VERSION_STRING=${PROGRAM_VERSION_STRING#"v"}
+
+ DETECTED_MAJOR=$(pecho "$PROGRAM_VERSION_STRING" | cut -s -d "." -f 1)
+ [ -n "$DETECTED_MAJOR" ] || fatal "Cannot extract $PROGRAM major version from version string \"$PROGRAM_VERSION_STRING\""
+ case "$DETECTED_MAJOR" in
+ ''|*[!0-9]*)
+ fatal "$PROGRAM_LABEL major version from \"$VERSION_STRING\" is not a number. Detected: \"$DETECTED_MAJOR\""
+ ;;
+ esac
+
+ DETECTED_MINOR=$(pecho "$PROGRAM_VERSION_STRING" | cut -s -d "." -f 2)
+ [ -n "$DETECTED_MINOR" ] || fatal "Cannot extract $PROGRAM minor version from version string \"$PROGRAM_VERSION_STRING\""
+ case "$DETECTED_MINOR" in
+ ''|*[!0-9]*)
+ fatal "$PROGRAM_LABEL minor version from \"$VERSION_STRING\" is not a number. Detected: \"$DETECTED_MINOR\""
+ esac
+
+ case $KIND in
+ major)
+ echo $DETECTED_MAJOR
+ exit;;
+ minor)
+ echo $DETECTED_MINOR
+ exit;;
+ *)
+ echo $DETECTED_MAJOR.$DETECTED_MINOR
+ exit;;
+ esac
+
+ echo $VERSION
+}
+
+
+compute_node_args() {
+ ARGS=""
+
+ NODE_MAJOR=$(get_program_version "node" "major")
+ [ "$NODE_MAJOR" -eq "10" ] && ARGS="$ARGS --experimental-worker"
+
+ echo $ARGS
+}
+
+
+require_minimal_version() {
+ PROGRAM_LABEL="$1"
+ VERSION="$2"
+ REQUIRED_MAJOR="$3"
+ REQUIRED_MINOR="$4"
+
+ VERSION_MAJOR=$(pecho "$VERSION" | cut -s -d "." -f 1)
+ VERSION_MINOR=$(pecho "$VERSION" | cut -s -d "." -f 2)
+
+ [ "$VERSION_MAJOR" -gt "$REQUIRED_MAJOR" ] || ([ "$VERSION_MAJOR" -eq "$REQUIRED_MAJOR" ] && [ "$VERSION_MINOR" -ge "$REQUIRED_MINOR" ]) \
+ || fatal "Your $PROGRAM_LABEL version \"$VERSION_MAJOR.$VERSION_MINOR\" is too old. $PROGRAM_LABEL $REQUIRED_MAJOR.$REQUIRED_MINOR.x or higher is required."
+}
diff --git a/bin/importSqlFile.js b/bin/importSqlFile.js
index 8bc78323a17..a67cb8bf0a5 100644
--- a/bin/importSqlFile.js
+++ b/bin/importSqlFile.js
@@ -1,94 +1,87 @@
-var startTime = Date.now();
+const startTime = Date.now();
-require("ep_etherpad-lite/node_modules/npm").load({}, function(er,npm) {
+require('ep_etherpad-lite/node_modules/npm').load({}, (er, npm) => {
+ const fs = require('fs');
- var fs = require("fs");
+ const ueberDB = require('ep_etherpad-lite/node_modules/ueberdb2');
+ const settings = require('ep_etherpad-lite/node/utils/Settings');
+ const log4js = require('ep_etherpad-lite/node_modules/log4js');
- var ueberDB = require("ep_etherpad-lite/node_modules/ueberdb2");
- var settings = require("ep_etherpad-lite/node/utils/Settings");
- var log4js = require('ep_etherpad-lite/node_modules/log4js');
-
- var dbWrapperSettings = {
+ const dbWrapperSettings = {
cache: 0,
writeInterval: 100,
- json: false // data is already json encoded
+ json: false, // data is already json encoded
};
- var db = new ueberDB.database(settings.dbType, settings.dbSettings, dbWrapperSettings, log4js.getLogger("ueberDB"));
+ const db = new ueberDB.database(settings.dbType, settings.dbSettings, dbWrapperSettings, log4js.getLogger('ueberDB'));
- var sqlFile = process.argv[2];
+ const sqlFile = process.argv[2];
- //stop if the settings file is not set
- if(!sqlFile)
- {
- console.error("Use: node importSqlFile.js $SQLFILE");
+ // stop if the settings file is not set
+ if (!sqlFile) {
+ console.error('Use: node importSqlFile.js $SQLFILE');
process.exit(1);
}
- log("initializing db");
- db.init(function(err)
- {
- //there was an error while initializing the database, output it and stop
- if(err)
- {
- console.error("ERROR: Problem while initializing the database");
+ log('initializing db');
+ db.init((err) => {
+ // there was an error while initializing the database, output it and stop
+ if (err) {
+ console.error('ERROR: Problem while initializing the database');
console.error(err.stack ? err.stack : err);
process.exit(1);
- }
- else
- {
- log("done");
+ } else {
+ log('done');
- log("open output file...");
- var lines = fs.readFileSync(sqlFile, 'utf8').split("\n");
+ log('open output file...');
+ const lines = fs.readFileSync(sqlFile, 'utf8').split('\n');
- var count = lines.length;
- var keyNo = 0;
+ const count = lines.length;
+ let keyNo = 0;
- process.stdout.write("Start importing " + count + " keys...\n");
- lines.forEach(function(l) {
- if (l.substr(0, 27) == "REPLACE INTO store VALUES (") {
- var pos = l.indexOf("', '");
- var key = l.substr(28, pos - 28);
- var value = l.substr(pos + 3);
+ process.stdout.write(`Start importing ${count} keys...\n`);
+ lines.forEach((l) => {
+ if (l.substr(0, 27) == 'REPLACE INTO store VALUES (') {
+ const pos = l.indexOf("', '");
+ const key = l.substr(28, pos - 28);
+ let value = l.substr(pos + 3);
value = value.substr(0, value.length - 2);
- console.log("key: " + key + " val: " + value);
- console.log("unval: " + unescape(value));
+ console.log(`key: ${key} val: ${value}`);
+ console.log(`unval: ${unescape(value)}`);
db.set(key, unescape(value), null);
keyNo++;
if (keyNo % 1000 == 0) {
- process.stdout.write(" " + keyNo + "/" + count + "\n");
+ process.stdout.write(` ${keyNo}/${count}\n`);
}
}
});
- process.stdout.write("\n");
- process.stdout.write("done. waiting for db to finish transaction. depended on dbms this may take some time...\n");
+ process.stdout.write('\n');
+ process.stdout.write('done. waiting for db to finish transaction. depended on dbms this may take some time...\n');
- db.doShutdown(function() {
- log("finished, imported " + keyNo + " keys.");
+ db.doShutdown(() => {
+ log(`finished, imported ${keyNo} keys.`);
process.exit(0);
});
}
});
});
-function log(str)
-{
- console.log((Date.now() - startTime)/1000 + "\t" + str);
+function log(str) {
+ console.log(`${(Date.now() - startTime) / 1000}\t${str}`);
}
-unescape = function(val) {
+unescape = function (val) {
// value is a string
if (val.substr(0, 1) == "'") {
val = val.substr(0, val.length - 1).substr(1);
- return val.replace(/\\[0nrbtZ\\'"]/g, function(s) {
- switch(s) {
- case "\\0": return "\0";
- case "\\n": return "\n";
- case "\\r": return "\r";
- case "\\b": return "\b";
- case "\\t": return "\t";
- case "\\Z": return "\x1a";
+ return val.replace(/\\[0nrbtZ\\'"]/g, (s) => {
+ switch (s) {
+ case '\\0': return '\0';
+ case '\\n': return '\n';
+ case '\\r': return '\r';
+ case '\\b': return '\b';
+ case '\\t': return '\t';
+ case '\\Z': return '\x1a';
default: return s.substr(1);
}
});
diff --git a/bin/installDeps.sh b/bin/installDeps.sh
index 5e0bbb931eb..bdce38fc75a 100755
--- a/bin/installDeps.sh
+++ b/bin/installDeps.sh
@@ -1,52 +1,11 @@
#!/bin/sh
-# minimum required node version
-REQUIRED_NODE_MAJOR=10
-REQUIRED_NODE_MINOR=13
-
-# minimum required npm version
-REQUIRED_NPM_MAJOR=5
-REQUIRED_NPM_MINOR=5
-
-pecho() { printf %s\\n "$*"; }
-log() { pecho "$@"; }
-error() { log "ERROR: $@" >&2; }
-fatal() { error "$@"; exit 1; }
-is_cmd() { command -v "$@" >/dev/null 2>&1; }
-
-require_minimal_version() {
- PROGRAM_LABEL="$1"
- VERSION_STRING="$2"
- REQUIRED_MAJOR="$3"
- REQUIRED_MINOR="$4"
-
- # Flag -s (--only-delimited on GNU cut) ensures no string is returned
- # when there is no match
- DETECTED_MAJOR=$(pecho "$VERSION_STRING" | cut -s -d "." -f 1)
- DETECTED_MINOR=$(pecho "$VERSION_STRING" | cut -s -d "." -f 2)
-
- [ -n "$DETECTED_MAJOR" ] || fatal "Cannot extract $PROGRAM_LABEL major version from version string \"$VERSION_STRING\""
-
- [ -n "$DETECTED_MINOR" ] || fatal "Cannot extract $PROGRAM_LABEL minor version from version string \"$VERSION_STRING\""
-
- case "$DETECTED_MAJOR" in
- ''|*[!0-9]*)
- fatal "$PROGRAM_LABEL major version from \"$VERSION_STRING\" is not a number. Detected: \"$DETECTED_MAJOR\""
- ;;
- esac
-
- case "$DETECTED_MINOR" in
- ''|*[!0-9]*)
- fatal "$PROGRAM_LABEL minor version from \"$VERSION_STRING\" is not a number. Detected: \"$DETECTED_MINOR\""
- esac
-
- [ "$DETECTED_MAJOR" -gt "$REQUIRED_MAJOR" ] || ([ "$DETECTED_MAJOR" -eq "$REQUIRED_MAJOR" ] && [ "$DETECTED_MINOR" -ge "$REQUIRED_MINOR" ]) \
- || fatal "Your $PROGRAM_LABEL version \"$VERSION_STRING\" is too old. $PROGRAM_LABEL $REQUIRED_MAJOR.$REQUIRED_MINOR.x or higher is required."
-}
-
# Move to the folder where ep-lite is installed
cd "$(dirname "$0")"/..
+# Source constants and usefull functions
+. bin/functions.sh
+
# Is node installed?
# Not checking io.js, default installation creates a symbolic link to node
is_cmd node || fatal "Please install node.js ( https://nodejs.org )"
@@ -55,15 +14,10 @@ is_cmd node || fatal "Please install node.js ( https://nodejs.org )"
is_cmd npm || fatal "Please install npm ( https://npmjs.org )"
# Check npm version
-NPM_VERSION_STRING=$(npm --version)
-
-require_minimal_version "npm" "$NPM_VERSION_STRING" "$REQUIRED_NPM_MAJOR" "$REQUIRED_NPM_MINOR"
+require_minimal_version "npm" $(get_program_version "npm") "$REQUIRED_NPM_MAJOR" "$REQUIRED_NPM_MINOR"
# Check node version
-NODE_VERSION_STRING=$(node --version)
-NODE_VERSION_STRING=${NODE_VERSION_STRING#"v"}
-
-require_minimal_version "nodejs" "$NODE_VERSION_STRING" "$REQUIRED_NODE_MAJOR" "$REQUIRED_NODE_MINOR"
+require_minimal_version "nodejs" $(get_program_version "node") "$REQUIRED_NODE_MAJOR" "$REQUIRED_NODE_MINOR"
# Get the name of the settings file
settings="settings.json"
diff --git a/bin/migrateDirtyDBtoRealDB.js b/bin/migrateDirtyDBtoRealDB.js
index ba329aa342a..63425cab7be 100644
--- a/bin/migrateDirtyDBtoRealDB.js
+++ b/bin/migrateDirtyDBtoRealDB.js
@@ -1,6 +1,5 @@
-require("ep_etherpad-lite/node_modules/npm").load({}, function(er,npm) {
-
- process.chdir(npm.root+'/..')
+require('ep_etherpad-lite/node_modules/npm').load({}, (er, npm) => {
+ process.chdir(`${npm.root}/..`);
// This script requires that you have modified your settings.json file
// to work with a real database. Please make a backup of your dirty.db
@@ -10,40 +9,40 @@ require("ep_etherpad-lite/node_modules/npm").load({}, function(er,npm) {
// `node --max-old-space-size=4096 bin/migrateDirtyDBtoRealDB.js`
- var settings = require("ep_etherpad-lite/node/utils/Settings");
- var dirty = require("../src/node_modules/dirty");
- var ueberDB = require("../src/node_modules/ueberdb2");
- var log4js = require("../src/node_modules/log4js");
- var dbWrapperSettings = {
- "cache": "0", // The cache slows things down when you're mostly writing.
- "writeInterval": 0 // Write directly to the database, don't buffer
+ const settings = require('ep_etherpad-lite/node/utils/Settings');
+ let dirty = require('../src/node_modules/dirty');
+ const ueberDB = require('../src/node_modules/ueberdb2');
+ const log4js = require('../src/node_modules/log4js');
+ const dbWrapperSettings = {
+ cache: '0', // The cache slows things down when you're mostly writing.
+ writeInterval: 0, // Write directly to the database, don't buffer
};
- var db = new ueberDB.database(settings.dbType, settings.dbSettings, dbWrapperSettings, log4js.getLogger("ueberDB"));
- var i = 0;
- var length = 0;
-
- db.init(function() {
- console.log("Waiting for dirtyDB to parse its file.");
- dirty = dirty('var/dirty.db').on("load", function() {
- dirty.forEach(function(){
+ const db = new ueberDB.database(settings.dbType, settings.dbSettings, dbWrapperSettings, log4js.getLogger('ueberDB'));
+ let i = 0;
+ let length = 0;
+
+ db.init(() => {
+ console.log('Waiting for dirtyDB to parse its file.');
+ dirty = dirty('var/dirty.db').on('load', () => {
+ dirty.forEach(() => {
length++;
});
console.log(`Found ${length} records, processing now.`);
- dirty.forEach(async function(key, value) {
- let error = await db.set(key, value);
+ dirty.forEach(async (key, value) => {
+ const error = await db.set(key, value);
console.log(`Wrote record ${i}`);
i++;
if (i === length) {
- console.log("finished, just clearing up for a bit...");
- setTimeout(function() {
+ console.log('finished, just clearing up for a bit...');
+ setTimeout(() => {
process.exit(0);
}, 5000);
}
});
- console.log("Please wait for all records to flush to database, then kill this process.");
+ console.log('Please wait for all records to flush to database, then kill this process.');
});
- console.log("done?")
+ console.log('done?');
});
});
diff --git a/bin/plugins/README.md b/bin/plugins/README.md
index dc929798c29..81d5a42988f 100755
--- a/bin/plugins/README.md
+++ b/bin/plugins/README.md
@@ -1,46 +1,52 @@
-The files in this folder are for Plugin developers.
-
-# Get suggestions to improve your Plugin
-
-This code will check your plugin for known usual issues and some suggestions for improvements. No changes will be made to your project.
-
-```
-node bin/plugins/checkPlugin.js $PLUGIN_NAME$
-```
-
-# Basic Example:
-```
-node bin/plugins/checkPlugin.js ep_webrtc
-```
-
-## Autofixing - will autofix any issues it can
-```
-node bin/plugins/checkPlugins.js ep_whatever autofix
-```
-
-## Autocommitting, push, npm minor patch and npm publish (highly dangerous)
-```
-node bin/plugins/checkPlugins.js ep_whatever autofix autocommit
-```
-
-# All the plugins
-Replace johnmclear with your github username
-
-```
-# Clones
-cd node_modules
-GHUSER=johnmclear; curl "https://api.github.com/users/$GHUSER/repos?per_page=1000" | grep -o 'git@[^"]*' | grep /ep_ | xargs -L1 git clone
-cd ..
-
-# autofixes and autocommits /pushes & npm publishes
-for dir in `ls node_modules`;
-do
-# echo $0
-if [[ $dir == *"ep_"* ]]; then
-if [[ $dir != "ep_etherpad-lite" ]]; then
-node bin/plugins/checkPlugin.js $dir autofix autocommit
-fi
-fi
-# echo $dir
-done
-```
+The files in this folder are for Plugin developers.
+
+# Get suggestions to improve your Plugin
+
+This code will check your plugin for known usual issues and some suggestions for improvements. No changes will be made to your project.
+
+```
+node bin/plugins/checkPlugin.js $PLUGIN_NAME$
+```
+
+# Basic Example:
+```
+node bin/plugins/checkPlugin.js ep_webrtc
+```
+
+## Autofixing - will autofix any issues it can
+```
+node bin/plugins/checkPlugins.js ep_whatever autofix
+```
+
+## Autocommitting, push, npm minor patch and npm publish (highly dangerous)
+```
+node bin/plugins/checkPlugins.js ep_whatever autofix autocommit
+```
+
+# All the plugins
+Replace johnmclear with your github username
+
+```
+# Clones
+cd node_modules
+GHUSER=johnmclear; curl "https://api.github.com/users/$GHUSER/repos?per_page=1000" | grep -o 'git@[^"]*' | grep /ep_ | xargs -L1 git clone
+cd ..
+
+# autofixes and autocommits /pushes & npm publishes
+for dir in `ls node_modules`;
+do
+# echo $0
+if [[ $dir == *"ep_"* ]]; then
+if [[ $dir != "ep_etherpad-lite" ]]; then
+node bin/plugins/checkPlugin.js $dir autofix autocommit
+fi
+fi
+# echo $dir
+done
+```
+
+# Automating update of ether organization plugins
+```
+getCorePlugins.sh
+updateCorePlugins.sh
+```
diff --git a/bin/plugins/checkPlugin.js b/bin/plugins/checkPlugin.js
index 0fccb4f1203..fd31c148e9e 100755
--- a/bin/plugins/checkPlugin.js
+++ b/bin/plugins/checkPlugin.js
@@ -1,246 +1,469 @@
-// pro usage for all your plugins, replace johnmclear with your github username
-/*
-cd node_modules
-GHUSER=johnmclear; curl "https://api.github.com/users/$GHUSER/repos?per_page=1000" | grep -o 'git@[^"]*' | grep /ep_ | xargs -L1 git clone
-cd ..
-
-for dir in `ls node_modules`;
-do
-# echo $0
-if [[ $dir == *"ep_"* ]]; then
-if [[ $dir != "ep_etherpad-lite" ]]; then
-node bin/plugins/checkPlugin.js $dir autofix autocommit
-fi
-fi
-# echo $dir
-done
-*/
-
-/*
-*
-* Usage
-*
-* Normal usage: node bin/plugins/checkPlugins.js ep_whatever
-* Auto fix the things it can: node bin/plugins/checkPlugins.js ep_whatever autofix
-* Auto commit, push and publish(to npm) * highly dangerous:
-node bin/plugins/checkPlugins.js ep_whatever autofix autocommit
-
-*/
-
-const fs = require("fs");
-const { exec } = require("child_process");
-
-// get plugin name & path from user input
-const pluginName = process.argv[2];
-const pluginPath = "node_modules/"+pluginName;
-
-console.log("Checking the plugin: "+ pluginName)
-
-// Should we autofix?
-if (process.argv[3] && process.argv[3] === "autofix") var autoFix = true;
-
-// Should we update files where possible?
-if (process.argv[5] && process.argv[5] === "autoupdate") var autoUpdate = true;
-
-// Should we automcommit and npm publish?!
-if (process.argv[4] && process.argv[4] === "autocommit") var autoCommit = true;
-
-
-if(autoCommit){
- console.warn("Auto commit is enabled, I hope you know what you are doing...")
-}
-
-fs.readdir(pluginPath, function (err, rootFiles) {
- //handling error
- if (err) {
- return console.log('Unable to scan directory: ' + err);
- }
-
- // rewriting files to lower case
- var files = [];
-
- // some files we need to know the actual file name. Not compulsory but might help in the future.
- var readMeFileName;
- var repository;
- var hasAutofixed = false;
-
- for (var i = 0; i < rootFiles.length; i++) {
- if(rootFiles[i].toLowerCase().indexOf("readme") !== -1) readMeFileName = rootFiles[i];
- files.push(rootFiles[i].toLowerCase());
- }
-
- if(files.indexOf("package.json") === -1){
- console.warn("no package.json, please create");
- }
-
- if(files.indexOf("package.json") !== -1){
- let packageJSON = fs.readFileSync(pluginPath+"/package.json", {encoding:'utf8', flag:'r'});
-
- if(packageJSON.toLowerCase().indexOf("repository") === -1){
- console.warn("No repository in package.json");
- if(autoFix){
- console.warn("Repository not detected in package.json. Please add repository section manually.")
- }
- }else{
- // useful for creating README later.
- repository = JSON.parse(packageJSON).repository.url;
- }
-
- }
- if(files.indexOf("readme") === -1 && files.indexOf("readme.md") === -1){
- console.warn("README.md file not found, please create");
- if(autoFix){
- console.log("Autofixing missing README.md file, please edit the README.md file further to include plugin specific details.");
- let readme = fs.readFileSync("bin/plugins/lib/README.md", {encoding:'utf8', flag:'r'})
- readme = readme.replace(/\[plugin_name\]/g, pluginName);
- if(repository){
- let org = repository.split("/")[3];
- let name = repository.split("/")[4];
- readme = readme.replace(/\[org_name\]/g, org);
- readme = readme.replace(/\[repo_url\]/g, name);
- fs.writeFileSync(pluginPath+"/README.md", readme);
- }else{
- console.warn("Unable to find repository in package.json, aborting.")
- }
- }
- }
-
- if(files.indexOf("readme") !== -1 && files.indexOf("readme.md") !== -1){
- let readme = fs.readFileSync(pluginPath+"/"+readMeFileName, {encoding:'utf8', flag:'r'});
- if(readme.toLowerCase().indexOf("license") === -1){
- console.warn("No license section in README");
- if(autoFix){
- console.warn("Please add License section to README manually.")
- }
- }
- }
-
- if(files.indexOf("license") === -1 && files.indexOf("license.md") === -1){
- console.warn("LICENSE.md file not found, please create");
- if(autoFix){
- hasAutofixed = true;
- console.log("Autofixing missing LICENSE.md file, including Apache 2 license.");
- exec("git config user.name", (error, name, stderr) => {
- if (error) {
- console.log(`error: ${error.message}`);
- return;
- }
- if (stderr) {
- console.log(`stderr: ${stderr}`);
- return;
- }
- let license = fs.readFileSync("bin/plugins/lib/LICENSE.md", {encoding:'utf8', flag:'r'});
- license = license.replace("[yyyy]", new Date().getFullYear());
- license = license.replace("[name of copyright owner]", name)
- fs.writeFileSync(pluginPath+"/LICENSE.md", license);
- });
- }
- }
-
- var travisConfig = fs.readFileSync("bin/plugins/lib/travis.yml", {encoding:'utf8', flag:'r'});
- travisConfig = travisConfig.replace(/\[plugin_name\]/g, pluginName);
-
- if(files.indexOf(".travis.yml") === -1){
- console.warn(".travis.yml file not found, please create. .travis.yml is used for automatically CI testing Etherpad. It is useful to know if your plugin breaks another feature for example.")
- // TODO: Make it check version of the .travis file to see if it needs an update.
- if(autoFix){
- hasAutofixed = true;
- console.log("Autofixing missing .travis.yml file");
- fs.writeFileSync(pluginPath+"/.travis.yml", travisConfig);
- console.log("Travis file created, please sign into travis and enable this repository")
- }
- }
- if(autoFix && autoUpdate){
- // checks the file versioning of .travis and updates it to the latest.
- let existingConfig = fs.readFileSync(pluginPath + "/.travis.yml", {encoding:'utf8', flag:'r'});
- let existingConfigLocation = existingConfig.indexOf("##ETHERPAD_TRAVIS_V=");
- let existingValue = existingConfig.substr(existingConfigLocation+20, existingConfig.length);
-
- let newConfigLocation = travisConfig.indexOf("##ETHERPAD_TRAVIS_V=");
- let newValue = travisConfig.substr(newConfigLocation+20, travisConfig.length);
-
- if(existingConfigLocation === -1){
- console.warn("no previous .travis.yml version found so writing new.")
- // we will write the newTravisConfig to the location.
- fs.writeFileSync(pluginPath + "/.travis.yml", travisConfig);
- }else{
- if(newValue > existingValue){
- console.log("updating .travis.yml");
- fs.writeFileSync(pluginPath + "/.travis.yml", travisConfig);
- hasAutofixed = true;
- }
- }
- }
-
- if(files.indexOf(".gitignore") === -1){
- console.warn(".gitignore file not found, please create. .gitignore files are useful to ensure files aren't incorrectly commited to a repository.")
- if(autoFix){
- hasAutofixed = true;
- console.log("Autofixing missing .gitignore file");
- let gitignore = fs.readFileSync("bin/plugins/lib/gitignore", {encoding:'utf8', flag:'r'});
- fs.writeFileSync(pluginPath+"/.gitignore", gitignore);
- }
- }
-
- if(files.indexOf("locales") === -1){
- console.warn("Translations not found, please create. Translation files help with Etherpad accessibility.");
- }
-
-
- if(files.indexOf(".ep_initialized") !== -1){
- console.warn(".ep_initialized found, please remove. .ep_initialized should never be commited to git and should only exist once the plugin has been executed one time.")
- if(autoFix){
- hasAutofixed = true;
- console.log("Autofixing incorrectly existing .ep_initialized file");
- fs.unlinkSync(pluginPath+"/.ep_initialized");
- }
- }
-
- if(files.indexOf("npm-debug.log") !== -1){
- console.warn("npm-debug.log found, please remove. npm-debug.log should never be commited to your repository.")
- if(autoFix){
- hasAutofixed = true;
- console.log("Autofixing incorrectly existing npm-debug.log file");
- fs.unlinkSync(pluginPath+"/npm-debug.log");
- }
- }
-
- if(files.indexOf("static") !== -1){
- fs.readdir(pluginPath+"/static", function (errRead, staticFiles) {
- if(staticFiles.indexOf("tests") === -1){
- console.warn("Test files not found, please create tests. https://github.com/ether/etherpad-lite/wiki/Creating-a-plugin#writing-and-running-front-end-tests-for-your-plugin")
- }
- })
- }else{
- console.warn("Test files not found, please create tests. https://github.com/ether/etherpad-lite/wiki/Creating-a-plugin#writing-and-running-front-end-tests-for-your-plugin")
- }
-
- if(hasAutofixed){
- console.log("Fixes applied, please check git diff then run the following command:\n\n")
- // bump npm Version
- if(autoCommit){
- // holy shit you brave.
- console.log("Attempting autocommit and auto publish to npm")
- exec("cd node_modules/"+ pluginName + " && git add -A && git commit --allow-empty -m 'autofixes from Etherpad checkPlugins.js' && npm version patch && git add package.json && git commit --allow-empty -m 'bump version' && git push && npm publish && cd ../..", (error, name, stderr) => {
- if (error) {
- console.log(`error: ${error.message}`);
- return;
- }
- if (stderr) {
- console.log(`stderr: ${stderr}`);
- return;
- }
- console.log("I think she's got it! By George she's got it!")
- process.exit(0)
- });
- }else{
- console.log("cd node_modules/"+ pluginName + " && git add -A && git commit --allow-empty -m 'autofixes from Etherpad checkPlugins.js' && npm version patch && git add package.json && git commit --allow-empty -m 'bump version' && git push && npm publish && cd ../..")
- }
- }
-
- //listing all files using forEach
- files.forEach(function (file) {
- // Do whatever you want to do with the file
- // console.log(file.toLowerCase());
- });
-});
+/*
+*
+* Usage -- see README.md
+*
+* Normal usage: node bin/plugins/checkPlugins.js ep_whatever
+* Auto fix the things it can: node bin/plugins/checkPlugins.js ep_whatever autofix
+* Auto commit, push and publish(to npm) * highly dangerous:
+node bin/plugins/checkPlugins.js ep_whatever autofix autocommit
+
+*/
+
+const fs = require('fs');
+const {exec} = require('child_process');
+
+// get plugin name & path from user input
+const pluginName = process.argv[2];
+
+if (!pluginName) {
+ console.error('no plugin name specified');
+ process.exit(1);
+}
+
+const pluginPath = `node_modules/${pluginName}`;
+
+console.log(`Checking the plugin: ${pluginName}`);
+
+// Should we autofix?
+if (process.argv[3] && process.argv[3] === 'autofix') var autoFix = true;
+
+// Should we update files where possible?
+if (process.argv[5] && process.argv[5] === 'autoupdate') var autoUpdate = true;
+
+// Should we automcommit and npm publish?!
+if (process.argv[4] && process.argv[4] === 'autocommit') var autoCommit = true;
+
+
+if (autoCommit) {
+ console.warn('Auto commit is enabled, I hope you know what you are doing...');
+}
+
+fs.readdir(pluginPath, (err, rootFiles) => {
+ // handling error
+ if (err) {
+ return console.log(`Unable to scan directory: ${err}`);
+ }
+
+ // rewriting files to lower case
+ const files = [];
+
+ // some files we need to know the actual file name. Not compulsory but might help in the future.
+ let readMeFileName;
+ let repository;
+ let hasAutoFixed = false;
+
+ for (let i = 0; i < rootFiles.length; i++) {
+ if (rootFiles[i].toLowerCase().indexOf('readme') !== -1) readMeFileName = rootFiles[i];
+ files.push(rootFiles[i].toLowerCase());
+ }
+
+ if (files.indexOf('.git') === -1) {
+ console.error('No .git folder, aborting');
+ process.exit(1);
+ }
+
+ // do a git pull...
+ var child_process = require('child_process');
+ try {
+ child_process.execSync('git pull ', {cwd: `${pluginPath}/`});
+ } catch (e) {
+ console.error('Error git pull', e);
+ }
+
+ try {
+ const path = `${pluginPath}/.github/workflows/npmpublish.yml`;
+ if (!fs.existsSync(path)) {
+ console.log('no .github/workflows/npmpublish.yml, create one and set npm secret to auto publish to npm on commit');
+ if (autoFix) {
+ const npmpublish =
+ fs.readFileSync('bin/plugins/lib/npmpublish.yml', {encoding: 'utf8', flag: 'r'});
+ fs.mkdirSync(`${pluginPath}/.github/workflows`, {recursive: true});
+ fs.writeFileSync(path, npmpublish);
+ hasAutoFixed = true;
+ console.log("If you haven't already, setup autopublish for this plugin https://github.com/ether/etherpad-lite/wiki/Plugins:-Automatically-publishing-to-npm-on-commit-to-Github-Repo");
+ } else {
+ console.log('Setup autopublish for this plugin https://github.com/ether/etherpad-lite/wiki/Plugins:-Automatically-publishing-to-npm-on-commit-to-Github-Repo');
+ }
+ } else {
+ // autopublish exists, we should check the version..
+ // checkVersion takes two file paths and checks for a version string in them.
+ const currVersionFile = fs.readFileSync(path, {encoding: 'utf8', flag: 'r'});
+ const existingConfigLocation = currVersionFile.indexOf('##ETHERPAD_NPM_V=');
+ const existingValue = parseInt(currVersionFile.substr(existingConfigLocation + 17, existingConfigLocation.length));
+
+ const reqVersionFile = fs.readFileSync('bin/plugins/lib/npmpublish.yml', {encoding: 'utf8', flag: 'r'});
+ const reqConfigLocation = reqVersionFile.indexOf('##ETHERPAD_NPM_V=');
+ const reqValue = parseInt(reqVersionFile.substr(reqConfigLocation + 17, reqConfigLocation.length));
+
+ if (!existingValue || (reqValue > existingValue)) {
+ const npmpublish =
+ fs.readFileSync('bin/plugins/lib/npmpublish.yml', {encoding: 'utf8', flag: 'r'});
+ fs.mkdirSync(`${pluginPath}/.github/workflows`, {recursive: true});
+ fs.writeFileSync(path, npmpublish);
+ hasAutoFixed = true;
+ }
+ }
+ } catch (err) {
+ console.error(err);
+ }
+
+
+ try {
+ const path = `${pluginPath}/.github/workflows/backend-tests.yml`;
+ if (!fs.existsSync(path)) {
+ console.log('no .github/workflows/backend-tests.yml, create one and set npm secret to auto publish to npm on commit');
+ if (autoFix) {
+ const backendTests =
+ fs.readFileSync('bin/plugins/lib/backend-tests.yml', {encoding: 'utf8', flag: 'r'});
+ fs.mkdirSync(`${pluginPath}/.github/workflows`, {recursive: true});
+ fs.writeFileSync(path, backendTests);
+ hasAutoFixed = true;
+ }
+ } else {
+ // autopublish exists, we should check the version..
+ // checkVersion takes two file paths and checks for a version string in them.
+ const currVersionFile = fs.readFileSync(path, {encoding: 'utf8', flag: 'r'});
+ const existingConfigLocation = currVersionFile.indexOf('##ETHERPAD_NPM_V=');
+ const existingValue = parseInt(currVersionFile.substr(existingConfigLocation + 17, existingConfigLocation.length));
+
+ const reqVersionFile = fs.readFileSync('bin/plugins/lib/backend-tests.yml', {encoding: 'utf8', flag: 'r'});
+ const reqConfigLocation = reqVersionFile.indexOf('##ETHERPAD_NPM_V=');
+ const reqValue = parseInt(reqVersionFile.substr(reqConfigLocation + 17, reqConfigLocation.length));
+
+ if (!existingValue || (reqValue > existingValue)) {
+ const backendTests =
+ fs.readFileSync('bin/plugins/lib/backend-tests.yml', {encoding: 'utf8', flag: 'r'});
+ fs.mkdirSync(`${pluginPath}/.github/workflows`, {recursive: true});
+ fs.writeFileSync(path, backendTests);
+ hasAutoFixed = true;
+ }
+ }
+ } catch (err) {
+ console.error(err);
+ }
+
+ if (files.indexOf('package.json') === -1) {
+ console.warn('no package.json, please create');
+ }
+
+ if (files.indexOf('package.json') !== -1) {
+ const packageJSON = fs.readFileSync(`${pluginPath}/package.json`, {encoding: 'utf8', flag: 'r'});
+ const parsedPackageJSON = JSON.parse(packageJSON);
+ if (autoFix) {
+ let updatedPackageJSON = false;
+ if (!parsedPackageJSON.funding) {
+ updatedPackageJSON = true;
+ parsedPackageJSON.funding = {
+ type: 'individual',
+ url: 'https://etherpad.org/',
+ };
+ }
+ if (updatedPackageJSON) {
+ hasAutoFixed = true;
+ fs.writeFileSync(`${pluginPath}/package.json`, JSON.stringify(parsedPackageJSON, null, 2));
+ }
+ }
+
+ if (packageJSON.toLowerCase().indexOf('repository') === -1) {
+ console.warn('No repository in package.json');
+ if (autoFix) {
+ console.warn('Repository not detected in package.json. Please add repository section manually.');
+ }
+ } else {
+ // useful for creating README later.
+ repository = parsedPackageJSON.repository.url;
+ }
+
+ // include lint config
+ if (packageJSON.toLowerCase().indexOf('devdependencies') === -1 || !parsedPackageJSON.devDependencies.eslint) {
+ console.warn('Missing eslint reference in devDependencies');
+ if (autoFix) {
+ const devDependencies = {
+ 'eslint': '^7.14.0',
+ 'eslint-config-etherpad': '^1.0.13',
+ 'eslint-plugin-mocha': '^8.0.0',
+ 'eslint-plugin-node': '^11.1.0',
+ 'eslint-plugin-prefer-arrow': '^1.2.2',
+ 'eslint-plugin-promise': '^4.2.1',
+ };
+ hasAutoFixed = true;
+ parsedPackageJSON.devDependencies = devDependencies;
+ fs.writeFileSync(`${pluginPath}/package.json`, JSON.stringify(parsedPackageJSON, null, 2));
+
+ const child_process = require('child_process');
+ try {
+ child_process.execSync('npm install', {cwd: `${pluginPath}/`});
+ hasAutoFixed = true;
+ } catch (e) {
+ console.error('Failed to create package-lock.json');
+ }
+ }
+ }
+
+ // include peer deps config
+ if (packageJSON.toLowerCase().indexOf('peerdependencies') === -1 || !parsedPackageJSON.peerDependencies) {
+ console.warn('Missing peer deps reference in package.json');
+ if (autoFix) {
+ const peerDependencies = {
+ 'ep_etherpad-lite': '>=1.8.6',
+ };
+ hasAutoFixed = true;
+ parsedPackageJSON.peerDependencies = peerDependencies;
+ fs.writeFileSync(`${pluginPath}/package.json`, JSON.stringify(parsedPackageJSON, null, 2));
+ const child_process = require('child_process');
+ try {
+ child_process.execSync('npm install --no-save ep_etherpad-lite@file:../../src', {cwd: `${pluginPath}/`});
+ hasAutoFixed = true;
+ } catch (e) {
+ console.error('Failed to create package-lock.json');
+ }
+ }
+ }
+
+ if (packageJSON.toLowerCase().indexOf('eslintconfig') === -1) {
+ console.warn('No esLintConfig in package.json');
+ if (autoFix) {
+ const eslintConfig = {
+ root: true,
+ extends: 'etherpad/plugin',
+ };
+ hasAutoFixed = true;
+ parsedPackageJSON.eslintConfig = eslintConfig;
+ fs.writeFileSync(`${pluginPath}/package.json`, JSON.stringify(parsedPackageJSON, null, 2));
+ }
+ }
+
+ if (packageJSON.toLowerCase().indexOf('scripts') === -1) {
+ console.warn('No scripts in package.json');
+ if (autoFix) {
+ const scripts = {
+ 'lint': 'eslint .',
+ 'lint:fix': 'eslint --fix .',
+ };
+ hasAutoFixed = true;
+ parsedPackageJSON.scripts = scripts;
+ fs.writeFileSync(`${pluginPath}/package.json`, JSON.stringify(parsedPackageJSON, null, 2));
+ }
+ }
+
+ if ((packageJSON.toLowerCase().indexOf('engines') === -1) || !parsedPackageJSON.engines.node) {
+ console.warn('No engines or node engine in package.json');
+ if (autoFix) {
+ const engines = {
+ node: '>=10.13.0',
+ };
+ hasAutoFixed = true;
+ parsedPackageJSON.engines = engines;
+ fs.writeFileSync(`${pluginPath}/package.json`, JSON.stringify(parsedPackageJSON, null, 2));
+ }
+ }
+ }
+
+ if (files.indexOf('package-lock.json') === -1) {
+ console.warn('package-lock.json file not found. Please run npm install in the plugin folder and commit the package-lock.json file.');
+ if (autoFix) {
+ var child_process = require('child_process');
+ try {
+ child_process.execSync('npm install', {cwd: `${pluginPath}/`});
+ console.log('Making package-lock.json');
+ hasAutoFixed = true;
+ } catch (e) {
+ console.error('Failed to create package-lock.json');
+ }
+ }
+ }
+
+ if (files.indexOf('readme') === -1 && files.indexOf('readme.md') === -1) {
+ console.warn('README.md file not found, please create');
+ if (autoFix) {
+ console.log('Autofixing missing README.md file, please edit the README.md file further to include plugin specific details.');
+ let readme = fs.readFileSync('bin/plugins/lib/README.md', {encoding: 'utf8', flag: 'r'});
+ readme = readme.replace(/\[plugin_name\]/g, pluginName);
+ if (repository) {
+ const org = repository.split('/')[3];
+ const name = repository.split('/')[4];
+ readme = readme.replace(/\[org_name\]/g, org);
+ readme = readme.replace(/\[repo_url\]/g, name);
+ fs.writeFileSync(`${pluginPath}/README.md`, readme);
+ } else {
+ console.warn('Unable to find repository in package.json, aborting.');
+ }
+ }
+ }
+
+ if (files.indexOf('contributing') === -1 && files.indexOf('contributing.md') === -1) {
+ console.warn('CONTRIBUTING.md file not found, please create');
+ if (autoFix) {
+ console.log('Autofixing missing CONTRIBUTING.md file, please edit the CONTRIBUTING.md file further to include plugin specific details.');
+ let contributing = fs.readFileSync('bin/plugins/lib/CONTRIBUTING.md', {encoding: 'utf8', flag: 'r'});
+ contributing = contributing.replace(/\[plugin_name\]/g, pluginName);
+ fs.writeFileSync(`${pluginPath}/CONTRIBUTING.md`, contributing);
+ }
+ }
+
+
+ if (files.indexOf('readme') !== -1 && files.indexOf('readme.md') !== -1) {
+ const readme = fs.readFileSync(`${pluginPath}/${readMeFileName}`, {encoding: 'utf8', flag: 'r'});
+ if (readme.toLowerCase().indexOf('license') === -1) {
+ console.warn('No license section in README');
+ if (autoFix) {
+ console.warn('Please add License section to README manually.');
+ }
+ }
+ }
+
+ if (files.indexOf('license') === -1 && files.indexOf('license.md') === -1) {
+ console.warn('LICENSE.md file not found, please create');
+ if (autoFix) {
+ hasAutoFixed = true;
+ console.log('Autofixing missing LICENSE.md file, including Apache 2 license.');
+ exec('git config user.name', (error, name, stderr) => {
+ if (error) {
+ console.log(`error: ${error.message}`);
+ return;
+ }
+ if (stderr) {
+ console.log(`stderr: ${stderr}`);
+ return;
+ }
+ let license = fs.readFileSync('bin/plugins/lib/LICENSE.md', {encoding: 'utf8', flag: 'r'});
+ license = license.replace('[yyyy]', new Date().getFullYear());
+ license = license.replace('[name of copyright owner]', name);
+ fs.writeFileSync(`${pluginPath}/LICENSE.md`, license);
+ });
+ }
+ }
+
+ let travisConfig = fs.readFileSync('bin/plugins/lib/travis.yml', {encoding: 'utf8', flag: 'r'});
+ travisConfig = travisConfig.replace(/\[plugin_name\]/g, pluginName);
+
+ if (files.indexOf('.travis.yml') === -1) {
+ console.warn('.travis.yml file not found, please create. .travis.yml is used for automatically CI testing Etherpad. It is useful to know if your plugin breaks another feature for example.');
+ // TODO: Make it check version of the .travis file to see if it needs an update.
+ if (autoFix) {
+ hasAutoFixed = true;
+ console.log('Autofixing missing .travis.yml file');
+ fs.writeFileSync(`${pluginPath}/.travis.yml`, travisConfig);
+ console.log('Travis file created, please sign into travis and enable this repository');
+ }
+ }
+ if (autoFix && autoUpdate) {
+ // checks the file versioning of .travis and updates it to the latest.
+ const existingConfig = fs.readFileSync(`${pluginPath}/.travis.yml`, {encoding: 'utf8', flag: 'r'});
+ const existingConfigLocation = existingConfig.indexOf('##ETHERPAD_TRAVIS_V=');
+ const existingValue = parseInt(existingConfig.substr(existingConfigLocation + 20, existingConfig.length));
+
+ const newConfigLocation = travisConfig.indexOf('##ETHERPAD_TRAVIS_V=');
+ const newValue = parseInt(travisConfig.substr(newConfigLocation + 20, travisConfig.length));
+ if (existingConfigLocation === -1) {
+ console.warn('no previous .travis.yml version found so writing new.');
+ // we will write the newTravisConfig to the location.
+ fs.writeFileSync(`${pluginPath}/.travis.yml`, travisConfig);
+ } else if (newValue > existingValue) {
+ console.log('updating .travis.yml');
+ fs.writeFileSync(`${pluginPath}/.travis.yml`, travisConfig);
+ hasAutoFixed = true;
+ }//
+ }
+
+ if (files.indexOf('.gitignore') === -1) {
+ console.warn(".gitignore file not found, please create. .gitignore files are useful to ensure files aren't incorrectly commited to a repository.");
+ if (autoFix) {
+ hasAutoFixed = true;
+ console.log('Autofixing missing .gitignore file');
+ const gitignore = fs.readFileSync('bin/plugins/lib/gitignore', {encoding: 'utf8', flag: 'r'});
+ fs.writeFileSync(`${pluginPath}/.gitignore`, gitignore);
+ }
+ } else {
+ let gitignore =
+ fs.readFileSync(`${pluginPath}/.gitignore`, {encoding: 'utf8', flag: 'r'});
+ if (gitignore.indexOf('node_modules/') === -1) {
+ console.warn('node_modules/ missing from .gitignore');
+ if (autoFix) {
+ gitignore += 'node_modules/';
+ fs.writeFileSync(`${pluginPath}/.gitignore`, gitignore);
+ hasAutoFixed = true;
+ }
+ }
+ }
+
+ // if we include templates but don't have translations...
+ if (files.indexOf('templates') !== -1 && files.indexOf('locales') === -1) {
+ console.warn('Translations not found, please create. Translation files help with Etherpad accessibility.');
+ }
+
+
+ if (files.indexOf('.ep_initialized') !== -1) {
+ console.warn('.ep_initialized found, please remove. .ep_initialized should never be commited to git and should only exist once the plugin has been executed one time.');
+ if (autoFix) {
+ hasAutoFixed = true;
+ console.log('Autofixing incorrectly existing .ep_initialized file');
+ fs.unlinkSync(`${pluginPath}/.ep_initialized`);
+ }
+ }
+
+ if (files.indexOf('npm-debug.log') !== -1) {
+ console.warn('npm-debug.log found, please remove. npm-debug.log should never be commited to your repository.');
+ if (autoFix) {
+ hasAutoFixed = true;
+ console.log('Autofixing incorrectly existing npm-debug.log file');
+ fs.unlinkSync(`${pluginPath}/npm-debug.log`);
+ }
+ }
+
+ if (files.indexOf('static') !== -1) {
+ fs.readdir(`${pluginPath}/static`, (errRead, staticFiles) => {
+ if (staticFiles.indexOf('tests') === -1) {
+ console.warn('Test files not found, please create tests. https://github.com/ether/etherpad-lite/wiki/Creating-a-plugin#writing-and-running-front-end-tests-for-your-plugin');
+ }
+ });
+ } else {
+ console.warn('Test files not found, please create tests. https://github.com/ether/etherpad-lite/wiki/Creating-a-plugin#writing-and-running-front-end-tests-for-your-plugin');
+ }
+
+ // linting begins
+ if (autoFix) {
+ var lintCmd = 'npm run lint:fix';
+ } else {
+ var lintCmd = 'npm run lint';
+ }
+
+ try {
+ child_process.execSync(lintCmd, {cwd: `${pluginPath}/`});
+ console.log('Linting...');
+ if (autoFix) {
+ // todo: if npm run lint doesn't do anything no need for...
+ hasAutoFixed = true;
+ }
+ } catch (e) {
+ // it is gonna throw an error anyway
+ console.log('Manual linting probably required, check with: npm run lint');
+ }
+ // linting ends.
+
+ if (hasAutoFixed) {
+ console.log('Fixes applied, please check git diff then run the following command:\n\n');
+ // bump npm Version
+ if (autoCommit) {
+ // holy shit you brave.
+ console.log('Attempting autocommit and auto publish to npm');
+ // github should push to npm for us :)
+ exec(`cd node_modules/${pluginName} && git rm -rf node_modules --ignore-unmatch && git add -A && git commit --allow-empty -m 'autofixes from Etherpad checkPlugins.js' && git push && cd ../..`, (error, name, stderr) => {
+ if (error) {
+ console.log(`error: ${error.message}`);
+ return;
+ }
+ if (stderr) {
+ console.log(`stderr: ${stderr}`);
+ return;
+ }
+ console.log("I think she's got it! By George she's got it!");
+ process.exit(0);
+ });
+ } else {
+ console.log(`cd node_modules/${pluginName} && git add -A && git commit --allow-empty -m 'autofixes from Etherpad checkPlugins.js' && npm version patch && git add package.json && git commit --allow-empty -m 'bump version' && git push && npm publish && cd ../..`);
+ }
+ }
+
+ console.log('Finished');
+});
diff --git a/bin/plugins/getCorePlugins.sh b/bin/plugins/getCorePlugins.sh
new file mode 100755
index 00000000000..e8ce68b21f5
--- /dev/null
+++ b/bin/plugins/getCorePlugins.sh
@@ -0,0 +1,4 @@
+cd node_modules/
+GHUSER=ether; curl "https://api.github.com/users/$GHUSER/repos?per_page=100" | grep -o 'git@[^"]*' | grep /ep_ | xargs -L1 git clone
+GHUSER=ether; curl "https://api.github.com/users/$GHUSER/repos?per_page=100&page=2&" | grep -o 'git@[^"]*' | grep /ep_ | xargs -L1 git clone
+GHUSER=ether; curl "https://api.github.com/users/$GHUSER/repos?per_page=100&page=3&" | grep -o 'git@[^"]*' | grep /ep_ | xargs -L1 git clone
diff --git a/bin/plugins/lib/CONTRIBUTING.md b/bin/plugins/lib/CONTRIBUTING.md
new file mode 100644
index 00000000000..724e02ac021
--- /dev/null
+++ b/bin/plugins/lib/CONTRIBUTING.md
@@ -0,0 +1,133 @@
+# Contributor Guidelines
+(Please talk to people on the mailing list before you change this page, see our section on [how to get in touch](https://github.com/ether/etherpad-lite#get-in-touch))
+
+## Pull requests
+
+* the commit series in the PR should be _linear_ (it **should not contain merge commits**). This is necessary because we want to be able to [bisect](https://en.wikipedia.org/wiki/Bisection_(software_engineering)) bugs easily. Rewrite history/perform a rebase if necessary
+* PRs should be issued against the **develop** branch: we never pull directly into **master**
+* PRs **should not have conflicts** with develop. If there are, please resolve them rebasing and force-pushing
+* when preparing your PR, please make sure that you have included the relevant **changes to the documentation** (preferably with usage examples)
+* contain meaningful and detailed **commit messages** in the form:
+ ```
+ submodule: description
+
+ longer description of the change you have made, eventually mentioning the
+ number of the issue that is being fixed, in the form: Fixes #someIssueNumber
+ ```
+* if the PR is a **bug fix**:
+ * the first commit in the series must be a test that shows the failure
+ * subsequent commits will fix the bug and make the test pass
+ * the final commit message should include the text `Fixes: #xxx` to link it to its bug report
+* think about stability: code has to be backwards compatible as much as possible. Always **assume your code will be run with an older version of the DB/config file**
+* if you want to remove a feature, **deprecate it instead**:
+ * write an issue with your deprecation plan
+ * output a `WARN` in the log informing that the feature is going to be removed
+ * remove the feature in the next version
+* if you want to add a new feature, put it under a **feature flag**:
+ * once the new feature has reached a minimal level of stability, do a PR for it, so it can be integrated early
+ * expose a mechanism for enabling/disabling the feature
+ * the new feature should be **disabled** by default. With the feature disabled, the code path should be exactly the same as before your contribution. This is a __necessary condition__ for early integration
+* think of the PR not as something that __you wrote__, but as something that __someone else is going to read__. The commit series in the PR should tell a novice developer the story of your thoughts when developing it
+
+## How to write a bug report
+
+* Please be polite, we all are humans and problems can occur.
+* Please add as much information as possible, for example
+ * client os(s) and version(s)
+ * browser(s) and version(s), is the problem reproducible on different clients
+ * special environments like firewalls or antivirus
+ * host os and version
+ * npm and nodejs version
+ * Logfiles if available
+ * steps to reproduce
+ * what you expected to happen
+ * what actually happened
+* Please format logfiles and code examples with markdown see github Markdown help below the issue textarea for more information.
+
+If you send logfiles, please set the loglevel switch DEBUG in your settings.json file:
+
+```
+/* The log level we are using, can be: DEBUG, INFO, WARN, ERROR */
+ "loglevel": "DEBUG",
+```
+
+The logfile location is defined in startup script or the log is directly shown in the commandline after you have started etherpad.
+
+## General goals of Etherpad
+To make sure everybody is going in the same direction:
+* easy to install for admins and easy to use for people
+* easy to integrate into other apps, but also usable as standalone
+* lightweight and scalable
+* extensible, as much functionality should be extendable with plugins so changes don't have to be done in core.
+Also, keep it maintainable. We don't wanna end up as the monster Etherpad was!
+
+## How to work with git?
+* Don't work in your master branch.
+* Make a new branch for every feature you're working on. (This ensures that you can work you can do lots of small, independent pull requests instead of one big one with complete different features)
+* Don't use the online edit function of github (this only creates ugly and not working commits!)
+* Try to make clean commits that are easy readable (including descriptive commit messages!)
+* Test before you push. Sounds easy, it isn't!
+* Don't check in stuff that gets generated during build or runtime
+* Make small pull requests that are easy to review but make sure they do add value by themselves / individually
+
+## Coding style
+* Do write comments. (You don't have to comment every line, but if you come up with something that's a bit complex/weird, just leave a comment. Bear in mind that you will probably leave the project at some point and that other people will read your code. Undocumented huge amounts of code are worthless!)
+* Never ever use tabs
+* Indentation: JS/CSS: 2 spaces; HTML: 4 spaces
+* Don't overengineer. Don't try to solve any possible problem in one step, but try to solve problems as easy as possible and improve the solution over time!
+* Do generalize sooner or later! (if an old solution, quickly hacked together, poses more problems than it solves today, refactor it!)
+* Keep it compatible. Do not introduce changes to the public API, db schema or configurations too lightly. Don't make incompatible changes without good reasons!
+* If you do make changes, document them! (see below)
+* Use protocol independent urls "//"
+
+## Branching model / git workflow
+see git flow http://nvie.com/posts/a-successful-git-branching-model/
+
+### `master` branch
+* the stable
+* This is the branch everyone should use for production stuff
+
+### `develop`branch
+* everything that is READY to go into master at some point in time
+* This stuff is tested and ready to go out
+
+### release branches
+* stuff that should go into master very soon
+* only bugfixes go into these (see http://nvie.com/posts/a-successful-git-branching-model/ for why)
+* we should not be blocking new features to develop, just because we feel that we should be releasing it to master soon. This is the situation that release branches solve/handle.
+
+### hotfix branches
+* fixes for bugs in master
+
+### feature branches (in your own repos)
+* these are the branches where you develop your features in
+* If it's ready to go out, it will be merged into develop
+
+Over the time we pull features from feature branches into the develop branch. Every month we pull from develop into master. Bugs in master get fixed in hotfix branches. These branches will get merged into master AND develop. There should never be commits in master that aren't in develop
+
+## Documentation
+The docs are in the `doc/` folder in the git repository, so people can easily find the suitable docs for the current git revision.
+
+Documentation should be kept up-to-date. This means, whenever you add a new API method, add a new hook or change the database model, pack the relevant changes to the docs in the same pull request.
+
+You can build the docs e.g. produce html, using `make docs`. At some point in the future we will provide an online documentation. The current documentation in the github wiki should always reflect the state of `master` (!), since there are no docs in master, yet.
+
+## Testing
+Front-end tests are found in the `tests/frontend/` folder in the repository. Run them by pointing your browser to `/tests/frontend`.
+
+Back-end tests can be run from the `src` directory, via `npm test`.
+
+## Things you can help with
+Etherpad is much more than software. So if you aren't a developer then worry not, there is still a LOT you can do! A big part of what we do is community engagement. You can help in the following ways
+ * Triage bugs (applying labels) and confirming their existence
+ * Testing fixes (simply applying them and seeing if it fixes your issue or not) - Some git experience required
+ * Notifying large site admins of new releases
+ * Writing Changelogs for releases
+ * Creating Windows packages
+ * Creating releases
+ * Bumping dependencies periodically and checking they don't break anything
+ * Write proposals for grants
+ * Co-Author and Publish CVEs
+ * Work with SFC to maintain legal side of project
+ * Maintain TODO page - https://github.com/ether/etherpad-lite/wiki/TODO#IMPORTANT_TODOS
+
diff --git a/bin/plugins/lib/LICENSE.md b/bin/plugins/lib/LICENSE.md
index 8cb6bc0c609..004c62e1b1e 100755
--- a/bin/plugins/lib/LICENSE.md
+++ b/bin/plugins/lib/LICENSE.md
@@ -1,13 +1,13 @@
-Copyright [yyyy] [name of copyright owner]
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/bin/plugins/lib/README.md b/bin/plugins/lib/README.md
index c3a3b1fbf37..3a1e2619330 100755
--- a/bin/plugins/lib/README.md
+++ b/bin/plugins/lib/README.md
@@ -1,28 +1,29 @@
-[![Travis (.org)](https://api.travis-ci.org/[org_name]/[repo_url].svg?branch=develop)](https://travis-ci.org/github/[org_name]/[repo_url])
-
-# My awesome plugin README example
-Explain what your plugin does and who it's useful for.
-
-## Example animated gif of usage if appropriate
-
-## Installing
-npm install [plugin_name]
-
-or Use the Etherpad ``/admin`` interface.
-
-## Settings
-Document settings if any
-
-## Testing
-Document how to run backend / frontend tests.
-
-### Frontend
-
-Visit http://whatever/tests/frontend/ to run the frontend tests.
-
-### backend
-
-Type ``cd src && npm run test`` to run the backend tests.
-
-## LICENSE
-Apache 2.0
+[![Travis (.com)](https://api.travis-ci.com/[org_name]/[repo_url].svg?branch=develop)](https://travis-ci.com/github/[org_name]/[repo_url])
+
+# My awesome plugin README example
+Explain what your plugin does and who it's useful for.
+
+## Example animated gif of usage if appropriate
+![screenshot](https://user-images.githubusercontent.com/220864/99979953-97841d80-2d9f-11eb-9782-5f65817c58f4.PNG)
+
+## Installing
+npm install [plugin_name]
+
+or Use the Etherpad ``/admin`` interface.
+
+## Settings
+Document settings if any
+
+## Testing
+Document how to run backend / frontend tests.
+
+### Frontend
+
+Visit http://whatever/tests/frontend/ to run the frontend tests.
+
+### backend
+
+Type ``cd src && npm run test`` to run the backend tests.
+
+## LICENSE
+Apache 2.0
diff --git a/bin/plugins/lib/backend-tests.yml b/bin/plugins/lib/backend-tests.yml
new file mode 100644
index 00000000000..324cc4baf0a
--- /dev/null
+++ b/bin/plugins/lib/backend-tests.yml
@@ -0,0 +1,51 @@
+# You need to change lines 38 and 46 in case the plugin's name on npmjs.com is different
+# from the repository name
+
+name: "Backend tests"
+
+# any branch is useful for testing before a PR is submitted
+on: [push, pull_request]
+
+jobs:
+ withplugins:
+ # run on pushes to any branch
+ # run on PRs from external forks
+ if: |
+ (github.event_name != 'pull_request')
+ || (github.event.pull_request.head.repo.id != github.event.pull_request.base.repo.id)
+ name: with Plugins
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Install libreoffice
+ run: |
+ sudo add-apt-repository -y ppa:libreoffice/ppa
+ sudo apt update
+ sudo apt install -y --no-install-recommends libreoffice libreoffice-pdfimport
+
+ # clone etherpad-lite
+ - name: Install etherpad core
+ uses: actions/checkout@v2
+ with:
+ repository: ether/etherpad-lite
+
+ - name: Install all dependencies and symlink for ep_etherpad-lite
+ run: bin/installDeps.sh
+
+ # clone this repository into node_modules/ep_plugin-name
+ - name: Checkout plugin repository
+ uses: actions/checkout@v2
+ with:
+ path: ./node_modules/${{github.event.repository.name}}
+
+ - name: Install plugin dependencies
+ run: |
+ cd node_modules/${{github.event.repository.name}}
+ npm ci
+
+ # configures some settings and runs npm run test
+ - name: Run the backend tests
+ run: tests/frontend/travis/runnerBackend.sh
+
+##ETHERPAD_NPM_V=1
+## NPM configuration automatically created using bin/plugins/updateAllPluginsScript.sh
diff --git a/bin/plugins/lib/gitignore b/bin/plugins/lib/gitignore
index f6d13a09674..0719a85c1bd 100755
--- a/bin/plugins/lib/gitignore
+++ b/bin/plugins/lib/gitignore
@@ -1,5 +1,5 @@
-.ep_initialized
-.DS_Store
-node_modules/
-node_modules
-npm-debug.log
+.ep_initialized
+.DS_Store
+node_modules/
+node_modules
+npm-debug.log
diff --git a/bin/plugins/lib/npmpublish.yml b/bin/plugins/lib/npmpublish.yml
new file mode 100644
index 00000000000..8d94ce88ae0
--- /dev/null
+++ b/bin/plugins/lib/npmpublish.yml
@@ -0,0 +1,73 @@
+# This workflow will run tests using node and then publish a package to the npm registry when a release is created
+# For more information see: https://help.github.com/actions/language-and-framework-guides/publishing-nodejs-packages
+
+name: Node.js Package
+
+on:
+ pull_request:
+ push:
+ branches:
+ - main
+ - master
+
+jobs:
+ test:
+ runs-on: ubuntu-latest
+ steps:
+ # Clone ether/etherpad-lite to ../etherpad-lite so that ep_etherpad-lite
+ # can be "installed" in this plugin's node_modules. The checkout v2 action
+ # doesn't support cloning outside of $GITHUB_WORKSPACE (see
+ # https://github.com/actions/checkout/issues/197), so the repo is first
+ # cloned to etherpad-lite then moved to ../etherpad-lite. To avoid
+ # conflicts with this plugin's clone, etherpad-lite must be cloned and
+ # moved out before this plugin's repo is cloned to $GITHUB_WORKSPACE.
+ - uses: actions/checkout@v2
+ with:
+ repository: ether/etherpad-lite
+ path: etherpad-lite
+ - run: mv etherpad-lite ..
+ # etherpad-lite has been moved outside of $GITHUB_WORKSPACE, so it is now
+ # safe to clone this plugin's repo to $GITHUB_WORKSPACE.
+ - uses: actions/checkout@v2
+ - uses: actions/setup-node@v1
+ with:
+ node-version: 12
+ # All of ep_etherpad-lite's devDependencies are installed because the
+ # plugin might do `require('ep_etherpad-lite/node_modules/${devDep}')`.
+ # Eventually it would be nice to create an ESLint plugin that prohibits
+ # Etherpad plugins from piggybacking off of ep_etherpad-lite's
+ # devDependencies. If we had that, we could change this line to only
+ # install production dependencies.
+ - run: cd ../etherpad-lite/src && npm ci
+ - run: npm ci
+ # This runs some sanity checks and creates a symlink at
+ # node_modules/ep_etherpad-lite that points to ../../etherpad-lite/src.
+ # This step must be done after `npm ci` installs the plugin's dependencies
+ # because npm "helpfully" cleans up such symlinks. :( Installing
+ # ep_etherpad-lite in the plugin's node_modules prevents lint errors and
+ # unit test failures if the plugin does `require('ep_etherpad-lite/foo')`.
+ - run: npm install --no-save ep_etherpad-lite@file:../etherpad-lite/src
+ - run: npm test
+ - run: npm run lint
+
+ publish-npm:
+ if: github.event_name == 'push'
+ needs: test
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions/setup-node@v1
+ with:
+ node-version: 12
+ registry-url: https://registry.npmjs.org/
+ - run: git config user.name 'github-actions[bot]'
+ - run: git config user.email '41898282+github-actions[bot]@users.noreply.github.com'
+ - run: npm ci
+ - run: npm version patch
+ - run: npm publish
+ env:
+ NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
+ - run: git push --follow-tags
+
+##ETHERPAD_NPM_V=1
+## NPM configuration automatically created using bin/plugins/updateAllPluginsScript.sh
diff --git a/bin/plugins/lib/travis.yml b/bin/plugins/lib/travis.yml
old mode 100755
new mode 100644
index 81e7d336e1d..099d7e4459b
--- a/bin/plugins/lib/travis.yml
+++ b/bin/plugins/lib/travis.yml
@@ -1,68 +1,70 @@
-language: node_js
-
-node_js:
- - "lts/*"
-
-cache: false
-
-before_install:
- - sudo add-apt-repository -y ppa:libreoffice/ppa
- - sudo apt-get update
- - sudo apt-get -y install libreoffice
- - sudo apt-get -y install libreoffice-pdfimport
-
-services:
- - docker
-
-install:
- - "bin/installDeps.sh"
- - "export GIT_HASH=$(git rev-parse --verify --short HEAD)"
-
-before_script:
- - "tests/frontend/travis/sauce_tunnel.sh"
-
-script:
- - "tests/frontend/travis/runner.sh"
-
-env:
- global:
- - secure: "WMGxFkOeTTlhWB+ChMucRtIqVmMbwzYdNHuHQjKCcj8HBEPdZLfCuK/kf4rG\nVLcLQiIsyllqzNhBGVHG1nyqWr0/LTm8JRqSCDDVIhpyzp9KpCJQQJG2Uwjk\n6/HIJJh/wbxsEdLNV2crYU/EiVO3A4Bq0YTHUlbhUqG3mSCr5Ec="
- - secure: "gejXUAHYscbR6Bodw35XexpToqWkv2ifeECsbeEmjaLkYzXmUUNWJGknKSu7\nEUsSfQV8w+hxApr1Z+jNqk9aX3K1I4btL3cwk2trnNI8XRAvu1c1Iv60eerI\nkE82Rsd5lwUaMEh+/HoL8ztFCZamVndoNgX7HWp5J/NRZZMmh4g="
-
-jobs:
- include:
- - name: "Run the Backend tests"
- install:
- - "npm install"
- - "mkdir [plugin_name]"
- - "mv !([plugin_name]) [plugin_name]"
- - "git clone https://github.com/ether/etherpad-lite.git etherpad"
- - "cd etherpad"
- - "mkdir node_modules"
- - "mv ../[plugin_name] node_modules"
- - "bin/installDeps.sh"
- - "export GIT_HASH=$(git rev-parse --verify --short HEAD)"
- - "cd src && npm install && cd -"
- script:
- - "tests/frontend/travis/runnerBackend.sh"
- - name: "Test the Frontend"
- install:
- - "npm install"
- - "mkdir [plugin_name]"
- - "mv !([plugin_name]) [plugin_name]"
- - "git clone https://github.com/ether/etherpad-lite.git etherpad"
- - "cd etherpad"
- - "mkdir node_modules"
- - "mv ../[plugin_name] node_modules"
- - "bin/installDeps.sh"
- - "export GIT_HASH=$(git rev-parse --verify --short HEAD)"
- script:
- - "tests/frontend/travis/runner.sh"
-
-notifications:
- irc:
- channels:
- - "irc.freenode.org#etherpad-lite-dev"
-
-##ETHERPAD_TRAVIS_V=3
-## Travis configuration automatically created using bin/plugins/updateAllPluginsScript.sh
+language: node_js
+
+node_js:
+ - "lts/*"
+
+cache: false
+
+services:
+ - docker
+
+install:
+ - "export GIT_HASH=$(git rev-parse --verify --short HEAD)"
+
+#script:
+# - "tests/frontend/travis/runner.sh"
+
+env:
+ global:
+ - secure: "WMGxFkOeTTlhWB+ChMucRtIqVmMbwzYdNHuHQjKCcj8HBEPdZLfCuK/kf4rG\nVLcLQiIsyllqzNhBGVHG1nyqWr0/LTm8JRqSCDDVIhpyzp9KpCJQQJG2Uwjk\n6/HIJJh/wbxsEdLNV2crYU/EiVO3A4Bq0YTHUlbhUqG3mSCr5Ec="
+ - secure: "gejXUAHYscbR6Bodw35XexpToqWkv2ifeECsbeEmjaLkYzXmUUNWJGknKSu7\nEUsSfQV8w+hxApr1Z+jNqk9aX3K1I4btL3cwk2trnNI8XRAvu1c1Iv60eerI\nkE82Rsd5lwUaMEh+/HoL8ztFCZamVndoNgX7HWp5J/NRZZMmh4g="
+
+jobs:
+ include:
+ - name: "Lint test package-lock"
+ install:
+ - "npm install lockfile-lint"
+ script:
+ - npx lockfile-lint --path package-lock.json --validate-https --allowed-hosts npm
+ - name: "Run the Backend tests"
+ before_install:
+ - sudo add-apt-repository -y ppa:libreoffice/ppa
+ - sudo apt-get update
+ - sudo apt-get -y install libreoffice
+ - sudo apt-get -y install libreoffice-pdfimport
+ install:
+ - "npm install"
+ - "mkdir [plugin_name]"
+ - "mv !([plugin_name]) [plugin_name]"
+ - "git clone https://github.com/ether/etherpad-lite.git etherpad"
+ - "cd etherpad"
+ - "mkdir -p node_modules"
+ - "mv ../[plugin_name] node_modules"
+ - "bin/installDeps.sh"
+ - "export GIT_HASH=$(git rev-parse --verify --short HEAD)"
+ - "cd src && npm install && cd -"
+ script:
+ - "tests/frontend/travis/runnerBackend.sh"
+ - name: "Test the Frontend"
+ before_script:
+ - "tests/frontend/travis/sauce_tunnel.sh"
+ install:
+ - "npm install"
+ - "mkdir [plugin_name]"
+ - "mv !([plugin_name]) [plugin_name]"
+ - "git clone https://github.com/ether/etherpad-lite.git etherpad"
+ - "cd etherpad"
+ - "mkdir -p node_modules"
+ - "mv ../[plugin_name] node_modules"
+ - "bin/installDeps.sh"
+ - "export GIT_HASH=$(git rev-parse --verify --short HEAD)"
+ script:
+ - "tests/frontend/travis/runner.sh"
+
+notifications:
+ irc:
+ channels:
+ - "irc.freenode.org#etherpad-lite-dev"
+
+##ETHERPAD_TRAVIS_V=9
+## Travis configuration automatically created using bin/plugins/updateAllPluginsScript.sh
diff --git a/bin/plugins/updateCorePlugins.sh b/bin/plugins/updateCorePlugins.sh
new file mode 100755
index 00000000000..bf4e6b6d652
--- /dev/null
+++ b/bin/plugins/updateCorePlugins.sh
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+set -e
+
+for dir in node_modules/ep_*; do
+ dir=${dir#node_modules/}
+ [ "$dir" != ep_etherpad-lite ] || continue
+ node bin/plugins/checkPlugin.js "$dir" autofix autocommit autoupdate
+done
diff --git a/bin/rebuildPad.js b/bin/rebuildPad.js
index 0013718a9c2..12ff218479c 100644
--- a/bin/rebuildPad.js
+++ b/bin/rebuildPad.js
@@ -3,121 +3,124 @@
known "good" revision.
*/
-if(process.argv.length != 4 && process.argv.length != 5) {
- console.error("Use: node bin/repairPad.js $PADID $REV [$NEWPADID]");
+if (process.argv.length != 4 && process.argv.length != 5) {
+ console.error('Use: node bin/repairPad.js $PADID $REV [$NEWPADID]');
process.exit(1);
}
-var npm = require("../src/node_modules/npm");
-var async = require("../src/node_modules/async");
-var ueberDB = require("../src/node_modules/ueberdb2");
+const npm = require('../src/node_modules/npm');
+const async = require('../src/node_modules/async');
+const ueberDB = require('../src/node_modules/ueberdb2');
-var padId = process.argv[2];
-var newRevHead = process.argv[3];
-var newPadId = process.argv[4] || padId + "-rebuilt";
+const padId = process.argv[2];
+const newRevHead = process.argv[3];
+const newPadId = process.argv[4] || `${padId}-rebuilt`;
-var db, oldPad, newPad, settings;
-var AuthorManager, ChangeSet, Pad, PadManager;
+let db, oldPad, newPad, settings;
+let AuthorManager, ChangeSet, Pad, PadManager;
async.series([
- function(callback) {
- npm.load({}, function(err) {
- if(err) {
- console.error("Could not load NPM: " + err)
+ function (callback) {
+ npm.load({}, (err) => {
+ if (err) {
+ console.error(`Could not load NPM: ${err}`);
process.exit(1);
} else {
callback();
}
- })
+ });
},
- function(callback) {
+ function (callback) {
// Get a handle into the database
db = require('../src/node/db/DB');
db.init(callback);
- }, function(callback) {
- PadManager = require('../src/node/db/PadManager');
- Pad = require('../src/node/db/Pad').Pad;
- // Get references to the original pad and to a newly created pad
- // HACK: This is a standalone script, so we want to write everything
- // out to the database immediately. The only problem with this is
- // that a driver (like the mysql driver) can hardcode these values.
- db.db.db.settings = {cache: 0, writeInterval: 0, json: true};
- // Validate the newPadId if specified and that a pad with that ID does
- // not already exist to avoid overwriting it.
- if (!PadManager.isValidPadId(newPadId)) {
- console.error("Cannot create a pad with that id as it is invalid");
- process.exit(1);
- }
- PadManager.doesPadExists(newPadId, function(err, exists) {
- if (exists) {
- console.error("Cannot create a pad with that id as it already exists");
- process.exit(1);
- }
- });
- PadManager.getPad(padId, function(err, pad) {
- oldPad = pad;
- newPad = new Pad(newPadId);
- callback();
- });
- }, function(callback) {
+ },
+ function (callback) {
+ PadManager = require('../src/node/db/PadManager');
+ Pad = require('../src/node/db/Pad').Pad;
+ // Get references to the original pad and to a newly created pad
+ // HACK: This is a standalone script, so we want to write everything
+ // out to the database immediately. The only problem with this is
+ // that a driver (like the mysql driver) can hardcode these values.
+ db.db.db.settings = {cache: 0, writeInterval: 0, json: true};
+ // Validate the newPadId if specified and that a pad with that ID does
+ // not already exist to avoid overwriting it.
+ if (!PadManager.isValidPadId(newPadId)) {
+ console.error('Cannot create a pad with that id as it is invalid');
+ process.exit(1);
+ }
+ PadManager.doesPadExists(newPadId, (err, exists) => {
+ if (exists) {
+ console.error('Cannot create a pad with that id as it already exists');
+ process.exit(1);
+ }
+ });
+ PadManager.getPad(padId, (err, pad) => {
+ oldPad = pad;
+ newPad = new Pad(newPadId);
+ callback();
+ });
+ },
+ function (callback) {
// Clone all Chat revisions
- var chatHead = oldPad.chatHead;
- for(var i = 0, curHeadNum = 0; i <= chatHead; i++) {
- db.db.get("pad:" + padId + ":chat:" + i, function (err, chat) {
- db.db.set("pad:" + newPadId + ":chat:" + curHeadNum++, chat);
- console.log("Created: Chat Revision: pad:" + newPadId + ":chat:" + curHeadNum);
+ const chatHead = oldPad.chatHead;
+ for (var i = 0, curHeadNum = 0; i <= chatHead; i++) {
+ db.db.get(`pad:${padId}:chat:${i}`, (err, chat) => {
+ db.db.set(`pad:${newPadId}:chat:${curHeadNum++}`, chat);
+ console.log(`Created: Chat Revision: pad:${newPadId}:chat:${curHeadNum}`);
});
}
callback();
- }, function(callback) {
+ },
+ function (callback) {
// Rebuild Pad from revisions up to and including the new revision head
- AuthorManager = require("../src/node/db/AuthorManager");
- Changeset = require("ep_etherpad-lite/static/js/Changeset");
+ AuthorManager = require('../src/node/db/AuthorManager');
+ Changeset = require('ep_etherpad-lite/static/js/Changeset');
// Author attributes are derived from changesets, but there can also be
// non-author attributes with specific mappings that changesets depend on
// and, AFAICT, cannot be recreated any other way
newPad.pool.numToAttrib = oldPad.pool.numToAttrib;
- for(var curRevNum = 0; curRevNum <= newRevHead; curRevNum++) {
- db.db.get("pad:" + padId + ":revs:" + curRevNum, function(err, rev) {
+ for (let curRevNum = 0; curRevNum <= newRevHead; curRevNum++) {
+ db.db.get(`pad:${padId}:revs:${curRevNum}`, (err, rev) => {
if (rev.meta) {
- throw "The specified revision number could not be found.";
+ throw 'The specified revision number could not be found.';
}
- var newRevNum = ++newPad.head;
- var newRevId = "pad:" + newPad.id + ":revs:" + newRevNum;
+ const newRevNum = ++newPad.head;
+ const newRevId = `pad:${newPad.id}:revs:${newRevNum}`;
db.db.set(newRevId, rev);
AuthorManager.addPad(rev.meta.author, newPad.id);
newPad.atext = Changeset.applyToAText(rev.changeset, newPad.atext, newPad.pool);
- console.log("Created: Revision: pad:" + newPad.id + ":revs:" + newRevNum);
+ console.log(`Created: Revision: pad:${newPad.id}:revs:${newRevNum}`);
if (newRevNum == newRevHead) {
callback();
}
});
}
- }, function(callback) {
+ },
+ function (callback) {
// Add saved revisions up to the new revision head
console.log(newPad.head);
- var newSavedRevisions = [];
- for(var i in oldPad.savedRevisions) {
- savedRev = oldPad.savedRevisions[i]
+ const newSavedRevisions = [];
+ for (const i in oldPad.savedRevisions) {
+ savedRev = oldPad.savedRevisions[i];
if (savedRev.revNum <= newRevHead) {
newSavedRevisions.push(savedRev);
- console.log("Added: Saved Revision: " + savedRev.revNum);
+ console.log(`Added: Saved Revision: ${savedRev.revNum}`);
}
}
newPad.savedRevisions = newSavedRevisions;
callback();
- }, function(callback) {
+ },
+ function (callback) {
// Save the source pad
- db.db.set("pad:"+newPadId, newPad, function(err) {
- console.log("Created: Source Pad: pad:" + newPadId);
- newPad.saveToDatabase();
- callback();
+ db.db.set(`pad:${newPadId}`, newPad, (err) => {
+ console.log(`Created: Source Pad: pad:${newPadId}`);
+ newPad.saveToDatabase().then(() => callback(), callback);
});
- }
-], function (err) {
- if(err) throw err;
- else {
- console.info("finished");
+ },
+], (err) => {
+ if (err) { throw err; } else {
+ console.info('finished');
process.exit(0);
}
});
diff --git a/bin/release.js b/bin/release.js
new file mode 100644
index 00000000000..b2c7c1a354a
--- /dev/null
+++ b/bin/release.js
@@ -0,0 +1,65 @@
+'use strict';
+const fs = require('fs');
+const child_process = require('child_process');
+const semver = require('../src/node_modules/semver');
+
+/*
+
+Usage
+
+node bin/release.js patch
+
+*/
+const usage = 'node bin/release.js [patch/minor/major] -- example: "node bin/release.js patch"';
+
+const release = process.argv[2];
+
+if(!release) {
+ console.log(usage);
+ throw new Error('No release type included');
+}
+
+const changelog = fs.readFileSync('CHANGELOG.md', {encoding: 'utf8', flag: 'r'});
+let packageJson = fs.readFileSync('./src/package.json', {encoding: 'utf8', flag: 'r'});
+packageJson = JSON.parse(packageJson);
+const currentVersion = packageJson.version;
+
+const newVersion = semver.inc(currentVersion, release);
+if(!newVersion) {
+ console.log(usage);
+ throw new Error('Unable to generate new version from input');
+}
+
+const changelogIncludesVersion = changelog.indexOf(newVersion) !== -1;
+
+if(!changelogIncludesVersion) {
+ throw new Error('No changelog record for ', newVersion, ' - please create changelog record');
+}
+
+console.log('Okay looks good, lets create the package.json and package-lock.json');
+
+packageJson.version = newVersion;
+
+fs.writeFileSync('src/package.json', JSON.stringify(packageJson, null, 2));
+
+// run npm version `release` where release is patch, minor or major
+child_process.execSync('npm install --package-lock-only', {cwd: `src/`});
+// run npm install --package-lock-only <-- required???
+
+child_process.execSync(`git checkout -b release/${newVersion}`);
+child_process.execSync(`git add src/package.json`);
+child_process.execSync(`git add src/package-lock.json`);
+child_process.execSync(`git commit -m 'bump version'`);
+child_process.execSync(`git push origin release/${newVersion}`);
+
+
+child_process.execSync(`make docs`);
+child_process.execSync(`git clone git@github.com:ether/ether.github.com.git`);
+child_process.execSync(`cp -R out/doc/ ether.github.com/doc/${newVersion}`);
+
+console.log('Once merged into master please run the following commands');
+console.log(`git tag -a ${newVersion} && git push origin master`);
+console.log(`cd ether.github.com && git add . && git commit -m ${newVersion} docs`);
+
+console.log('Once the new docs are uploaded then modify the download link on etherpad.org and then pull master onto develop');
+console.log('Finally go public with an announcement via our comms channels :)');
diff --git a/bin/repairPad.js b/bin/repairPad.js
index d495baef51b..8408e4b72fa 100644
--- a/bin/repairPad.js
+++ b/bin/repairPad.js
@@ -2,47 +2,47 @@
* This is a repair tool. It extracts all datas of a pad, removes and inserts them again.
*/
-console.warn("WARNING: This script must not be used while etherpad is running!");
+console.warn('WARNING: This script must not be used while etherpad is running!');
if (process.argv.length != 3) {
- console.error("Use: node bin/repairPad.js $PADID");
+ console.error('Use: node bin/repairPad.js $PADID');
process.exit(1);
}
// get the padID
-var padId = process.argv[2];
+const padId = process.argv[2];
-let npm = require("../src/node_modules/npm");
-npm.load({}, async function(er) {
+const npm = require('../src/node_modules/npm');
+npm.load({}, async (er) => {
if (er) {
- console.error("Could not load NPM: " + er)
+ console.error(`Could not load NPM: ${er}`);
process.exit(1);
}
try {
// intialize database
- let settings = require('../src/node/utils/Settings');
- let db = require('../src/node/db/DB');
+ const settings = require('../src/node/utils/Settings');
+ const db = require('../src/node/db/DB');
await db.init();
// get the pad
- let padManager = require('../src/node/db/PadManager');
- let pad = await padManager.getPad(padId);
+ const padManager = require('../src/node/db/PadManager');
+ const pad = await padManager.getPad(padId);
// accumulate the required keys
- let neededDBValues = ["pad:" + padId];
+ const neededDBValues = [`pad:${padId}`];
// add all authors
- neededDBValues.push(...pad.getAllAuthors().map(author => "globalAuthor:"));
+ neededDBValues.push(...pad.getAllAuthors().map((author) => 'globalAuthor:'));
// add all revisions
for (let rev = 0; rev <= pad.head; ++rev) {
- neededDBValues.push("pad:" + padId + ":revs:" + rev);
+ neededDBValues.push(`pad:${padId}:revs:${rev}`);
}
// add all chat values
for (let chat = 0; chat <= pad.chatHead; ++chat) {
- neededDBValues.push("pad:" + padId + ":chat:" + chat);
+ neededDBValues.push(`pad:${padId}:chat:${chat}`);
}
//
@@ -55,21 +55,20 @@ npm.load({}, async function(er) {
//
// See gitlab issue #3545
//
- console.info("aborting [gitlab #3545]");
+ console.info('aborting [gitlab #3545]');
process.exit(1);
// now fetch and reinsert every key
- neededDBValues.forEach(function(key, value) {
- console.log("Key: " + key+ ", value: " + value);
+ neededDBValues.forEach((key, value) => {
+ console.log(`Key: ${key}, value: ${value}`);
db.remove(key);
db.set(key, value);
});
- console.info("finished");
+ console.info('finished');
process.exit(0);
-
} catch (er) {
- if (er.name === "apierror") {
+ if (er.name === 'apierror') {
console.error(er);
} else {
console.trace(er);
diff --git a/bin/run.sh b/bin/run.sh
index ff6b3de093c..50bce4bdd55 100755
--- a/bin/run.sh
+++ b/bin/run.sh
@@ -1,13 +1,11 @@
#!/bin/sh
-pecho() { printf %s\\n "$*"; }
-log() { pecho "$@"; }
-error() { log "ERROR: $@" >&2; }
-fatal() { error "$@"; exit 1; }
-
# Move to the folder where ep-lite is installed
cd "$(dirname "$0")"/..
+# Source constants and usefull functions
+. bin/functions.sh
+
ignoreRoot=0
for ARG in "$@"; do
if [ "$ARG" = "--root" ]; then
@@ -34,4 +32,4 @@ bin/installDeps.sh "$@" || exit 1
log "Starting Etherpad..."
SCRIPTPATH=$(pwd -P)
-exec node "$SCRIPTPATH/node_modules/ep_etherpad-lite/node/server.js" "$@"
+exec node $(compute_node_args) "$SCRIPTPATH/node_modules/ep_etherpad-lite/node/server.js" "$@"
diff --git a/doc/api/hooks_overview.md b/doc/api/hooks_overview.md
index 1de547c9009..35a88dbe1b2 100644
--- a/doc/api/hooks_overview.md
+++ b/doc/api/hooks_overview.md
@@ -4,48 +4,114 @@ A hook function is registered with a hook via the plugin's `ep.json` file. See
the Plugins section for details. A hook may have many registered functions from
different plugins.
-When a hook is invoked, its registered functions are called with three
-arguments:
+Some hooks call their registered functions one at a time until one of them
+returns a value. Others always call all of their registered functions and
+combine the results (if applicable).
-1. hookName - The name of the hook being invoked.
-2. context - An object with some relevant information about the context of the
+## Registered hook functions
+
+Note: The documentation in this section applies to every hook unless the
+hook-specific documentation says otherwise.
+
+### Arguments
+
+Hook functions are called with three arguments:
+
+1. `hookName` - The name of the hook being invoked.
+2. `context` - An object with some relevant information about the context of the
call. See the hook-specific documentation for details.
-3. callback - Function to call when done. This callback takes a single argument,
- the meaning of which depends on the hook. See the "Return values" section for
- general information that applies to most hooks. The value returned by this
- callback must be returned by the hook function unless otherwise specified.
+3. `cb` - For asynchronous operations this callback can be called to signal
+ completion and optionally provide a return value. The callback takes a single
+ argument, the meaning of which depends on the hook (see the "Return values"
+ section for general information that applies to most hooks). This callback
+ always returns `undefined`.
-## Return values
+### Expected behavior
-Note: This section applies to every hook unless the hook-specific documentation
-says otherwise.
+The presence of a callback parameter suggests that every hook function can run
+asynchronously. While that is the eventual goal, there are some legacy hooks
+that expect their hook functions to provide a value synchronously. For such
+hooks, the hook functions must do one of the following:
-Hook functions return zero or more values to Etherpad by passing an array to the
-provided callback. Hook functions typically provide a single value (array of
-length one). If the function does not want to or need to provide a value, it may
-pass an empty array or `undefined` (which is treated the same as an empty
-array). Hook functions may also provide more than one value (array of length two
-or more).
+* Call the callback with a non-Promise value (`undefined` is acceptable) and
+ return `undefined`, in that order.
+* Return a non-Promise value other than `undefined` (`null` is acceptable) and
+ never call the callback. Note that `async` functions *always* return a
+ Promise, so they must never be used for synchronous hooks.
+* Only have two parameters (`hookName` and `context`) and return any non-Promise
+ value (`undefined` is acceptable).
-Some hooks concatenate the arrays provided by its registered functions. For
-example, if a hook's registered functions pass `[1, 2]`, `undefined`, `[3, 4]`,
-`[]`, and `[5]` to the provided callback, then the hook's return value is `[1,
-2, 3, 4, 5]`.
+For hooks that permit asynchronous behavior, the hook functions must do one or
+more of the following:
-Other hooks only use the first non-empty array provided by a registered
-function. In this case, each of the hook's registered functions is called one at
-a time until one provides a non-empty array. The remaining functions are
-skipped. If none of the functions provide a non-empty array, or there are no
-registered functions, the hook's return value is `[]`.
+* Return `undefined` and call the callback, in either order.
+* Return something other than `undefined` (`null` is acceptable) and never call
+ the callback. Note that `async` functions *always* return a Promise, so they
+ must never call the callback.
+* Only have two parameters (`hookName` and `context`).
-Example:
+Note that the acceptable behaviors for asynchronous hook functions is a superset
+of the acceptable behaviors for synchronous hook functions.
-```
-exports.abstractHook = (hookName, context, callback) => {
- if (notApplicableToThisPlugin(context)) {
- return callback();
- }
- const value = doSomeProcessing(context);
- return callback([value]);
+WARNING: The number of parameters is determined by examining
+[Function.length](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/length),
+which does not count [default
+parameters](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions/Default_parameters)
+or ["rest"
+parameters](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions/rest_parameters).
+To avoid problems, do not use default or rest parameters when defining hook
+functions.
+
+### Return values
+
+A hook function can provide a value to Etherpad in one of the following ways:
+
+* Pass the desired value as the first argument to the callback.
+* Return the desired value directly. The value must not be `undefined` unless
+ the hook function only has two parameters. (Hook functions with three
+ parameters that want to provide `undefined` should instead use the callback.)
+* For hooks that permit asynchronous behavior, return a Promise that resolves to
+ the desired value.
+* For hooks that permit asynchronous behavior, pass a Promise that resolves to
+ the desired value as the first argument to the callback.
+
+Examples:
+
+```javascript
+exports.exampleOne = (hookName, context, callback) => {
+ return 'valueOne';
+};
+
+exports.exampleTwo = (hookName, context, callback) => {
+ callback('valueTwo');
+ return;
+};
+
+// ONLY FOR HOOKS THAT PERMIT ASYNCHRONOUS BEHAVIOR
+exports.exampleThree = (hookName, context, callback) => {
+ return new Promise('valueThree');
+};
+
+// ONLY FOR HOOKS THAT PERMIT ASYNCHRONOUS BEHAVIOR
+exports.exampleFour = (hookName, context, callback) => {
+ callback(new Promise('valueFour'));
+ return;
+};
+
+// ONLY FOR HOOKS THAT PERMIT ASYNCHRONOUS BEHAVIOR
+exports.exampleFive = async (hookName, context) => {
+ // Note that this function is async, so it actually returns a Promise that
+ // is resolved to 'valueFive'.
+ return 'valueFive';
};
```
+
+Etherpad collects the values provided by the hook functions into an array,
+filters out all `undefined` values, then flattens the array one level.
+Flattening one level makes it possible for a hook function to behave as if it
+were multiple separate hook functions.
+
+For example: Suppose a hook has eight registered functions that return the
+following values: `1`, `[2]`, `['3a', '3b']` `[[4]]`, `undefined`,
+`[undefined]`, `[]`, and `null`. The value returned to the caller of the hook is
+`[1, 2, '3a', '3b', [4], undefined, null]`.
diff --git a/doc/api/hooks_server-side.md b/doc/api/hooks_server-side.md
index b4ef1e5250a..e13adfa97f5 100644
--- a/doc/api/hooks_server-side.md
+++ b/doc/api/hooks_server-side.md
@@ -10,6 +10,28 @@ Things in context:
Use this hook to receive the global settings in your plugin.
+## shutdown
+Called from: src/node/server.js
+
+Things in context: None
+
+This hook runs before shutdown. Use it to stop timers, close sockets and files,
+flush buffers, etc. The database is not available while this hook is running.
+The shutdown function must not block for long because there is a short timeout
+before the process is forcibly terminated.
+
+The shutdown function must return a Promise, which must resolve to `undefined`.
+Returning `callback(value)` will return a Promise that is resolved to `value`.
+
+Example:
+
+```
+// using an async function
+exports.shutdown = async (hookName, context) => {
+ await flushBuffers();
+};
+```
+
## pluginUninstall
Called from: src/static/js/pluginfw/installer.js
@@ -54,6 +76,25 @@ Things in context:
This hook gets called after the application object has been created, but before it starts listening. This is similar to the expressConfigure hook, but it's not guaranteed that the application object will have all relevant configuration variables.
+## expressCloseServer
+
+Called from: src/node/hooks/express.js
+
+Things in context: Nothing
+
+This hook is called when the HTTP server is closing, which happens during
+shutdown (see the shutdown hook) and when the server restarts (e.g., when a
+plugin is installed via the `/admin/plugins` page). The HTTP server may or may
+not already be closed when this hook executes.
+
+Example:
+
+```
+exports.expressCloseServer = async () => {
+ await doSomeCleanup();
+};
+```
+
## eejsBlock_``
Called from: src/node/eejs/index.js
@@ -96,7 +137,6 @@ Available blocks in `pad.html` are:
* `indexCustomStyles` - contains the `index.css` `` tag, allows you to add your own or to customize the one provided by the active skin
* `indexWrapper` - contains the form for creating new pads
* `indexCustomScripts` - contains the `index.js` `");
+ res.send(``);
});
-}
+};
diff --git a/src/node/handler/PadMessageHandler.js b/src/node/handler/PadMessageHandler.js
index 6f3cab8a51d..279b08dfaba 100644
--- a/src/node/handler/PadMessageHandler.js
+++ b/src/node/handler/PadMessageHandler.js
@@ -18,31 +18,32 @@
* limitations under the License.
*/
-
-var padManager = require("../db/PadManager");
-var Changeset = require("ep_etherpad-lite/static/js/Changeset");
-var AttributePool = require("ep_etherpad-lite/static/js/AttributePool");
-var AttributeManager = require("ep_etherpad-lite/static/js/AttributeManager");
-var authorManager = require("../db/AuthorManager");
-var readOnlyManager = require("../db/ReadOnlyManager");
-var settings = require('../utils/Settings');
-var securityManager = require("../db/SecurityManager");
-var plugins = require("ep_etherpad-lite/static/js/pluginfw/plugin_defs.js");
-var log4js = require('log4js');
-var messageLogger = log4js.getLogger("message");
-var accessLogger = log4js.getLogger("access");
-var _ = require('underscore');
-var hooks = require("ep_etherpad-lite/static/js/pluginfw/hooks.js");
-var channels = require("channels");
-var stats = require('../stats');
-var remoteAddress = require("../utils/RemoteAddress").remoteAddress;
+/* global exports, process, require */
+
+const padManager = require('../db/PadManager');
+const Changeset = require('ep_etherpad-lite/static/js/Changeset');
+const AttributePool = require('ep_etherpad-lite/static/js/AttributePool');
+const AttributeManager = require('ep_etherpad-lite/static/js/AttributeManager');
+const authorManager = require('../db/AuthorManager');
+const readOnlyManager = require('../db/ReadOnlyManager');
+const settings = require('../utils/Settings');
+const securityManager = require('../db/SecurityManager');
+const plugins = require('ep_etherpad-lite/static/js/pluginfw/plugin_defs.js');
+const log4js = require('log4js');
+const messageLogger = log4js.getLogger('message');
+const accessLogger = log4js.getLogger('access');
+const _ = require('underscore');
+const hooks = require('ep_etherpad-lite/static/js/pluginfw/hooks.js');
+const channels = require('channels');
+const stats = require('../stats');
const assert = require('assert').strict;
-const nodeify = require("nodeify");
-const { RateLimiterMemory } = require('rate-limiter-flexible');
+const nodeify = require('nodeify');
+const {RateLimiterMemory} = require('rate-limiter-flexible');
+const webaccess = require('../hooks/express/webaccess');
const rateLimiter = new RateLimiterMemory({
points: settings.commitRateLimiting.points,
- duration: settings.commitRateLimiting.duration
+ duration: settings.commitRateLimiting.duration,
});
/**
@@ -53,22 +54,18 @@ const rateLimiter = new RateLimiterMemory({
* readonlyPadId = The readonly pad id of the pad
* readonly = Wether the client has only read access (true) or read/write access (false)
* rev = That last revision that was send to this client
- * author = the author name of this session
+ * author = the author ID used for this session
*/
-var sessioninfos = {};
+const sessioninfos = {};
exports.sessioninfos = sessioninfos;
// Measure total amount of users
-stats.gauge('totalUsers', function() {
- return Object.keys(socketio.sockets.sockets).length;
-});
+stats.gauge('totalUsers', () => Object.keys(socketio.sockets.sockets).length);
/**
* A changeset queue per pad that is processed by handleUserChanges()
*/
-var padChannels = new channels.channels(function(data, callback) {
- return nodeify(handleUserChanges(data), callback);
-});
+const padChannels = new channels.channels(({socket, message}, callback) => nodeify(handleUserChanges(socket, message), callback));
/**
* Saves the Socket class we need to send and receive data from the client
@@ -79,107 +76,96 @@ let socketio;
* This Method is called by server.js to tell the message handler on which socket it should send
* @param socket_io The Socket
*/
-exports.setSocketIO = function(socket_io)
-{
- socketio=socket_io;
-}
+exports.setSocketIO = function (socket_io) {
+ socketio = socket_io;
+};
/**
* Handles the connection of a new user
- * @param client the new client
+ * @param socket the socket.io Socket object for the new connection from the client
*/
-exports.handleConnect = function(client)
-{
+exports.handleConnect = (socket) => {
stats.meter('connects').mark();
// Initalize sessioninfos for this new session
- sessioninfos[client.id]={};
-}
+ sessioninfos[socket.id] = {};
+};
/**
* Kicks all sessions from a pad
- * @param client the new client
*/
-exports.kickSessionsFromPad = function(padID)
-{
- if(typeof socketio.sockets['clients'] !== 'function')
- return;
+exports.kickSessionsFromPad = function (padID) {
+ if (typeof socketio.sockets.clients !== 'function') return;
// skip if there is nobody on this pad
- if(_getRoomClients(padID).length === 0)
- return;
+ if (_getRoomSockets(padID).length === 0) return;
// disconnect everyone from this pad
- socketio.sockets.in(padID).json.send({disconnect:"deleted"});
-}
+ socketio.sockets.in(padID).json.send({disconnect: 'deleted'});
+};
/**
* Handles the disconnection of a user
- * @param client the client that leaves
+ * @param socket the socket.io Socket object for the client
*/
-exports.handleDisconnect = async function(client)
-{
+exports.handleDisconnect = async (socket) => {
stats.meter('disconnects').mark();
// save the padname of this session
- let session = sessioninfos[client.id];
+ const session = sessioninfos[socket.id];
// if this connection was already etablished with a handshake, send a disconnect message to the others
if (session && session.author) {
- // Get the IP address from our persistant object
- let ip = remoteAddress[client.id];
-
- // Anonymize the IP address if IP logging is disabled
- if (settings.disableIPlogging) {
- ip = 'ANONYMOUS';
- }
-
- accessLogger.info('[LEAVE] Pad "' + session.padId + '": Author "' + session.author + '" on client ' + client.id + ' with IP "' + ip + '" left the pad');
+ const {session: {user} = {}} = socket.client.request;
+ accessLogger.info(`${'[LEAVE]' +
+ ` pad:${session.padId}` +
+ ` socket:${socket.id}` +
+ ` IP:${settings.disableIPlogging ? 'ANONYMOUS' : socket.request.ip}` +
+ ` authorID:${session.author}`}${
+ (user && user.username) ? ` username:${user.username}` : ''}`);
// get the author color out of the db
- let color = await authorManager.getAuthorColorId(session.author);
+ const color = await authorManager.getAuthorColorId(session.author);
// prepare the notification for the other users on the pad, that this user left
- let messageToTheOtherUsers = {
- "type": "COLLABROOM",
- "data": {
- type: "USER_LEAVE",
+ const messageToTheOtherUsers = {
+ type: 'COLLABROOM',
+ data: {
+ type: 'USER_LEAVE',
userInfo: {
- "ip": "127.0.0.1",
- "colorId": color,
- "userAgent": "Anonymous",
- "userId": session.author
- }
- }
+ colorId: color,
+ userId: session.author,
+ },
+ },
};
// Go through all user that are still on the pad, and send them the USER_LEAVE message
- client.broadcast.to(session.padId).json.send(messageToTheOtherUsers);
+ socket.broadcast.to(session.padId).json.send(messageToTheOtherUsers);
// Allow plugins to hook into users leaving the pad
- hooks.callAll("userLeave", session);
+ hooks.callAll('userLeave', session);
}
// Delete the sessioninfos entrys of this session
- delete sessioninfos[client.id];
-}
+ delete sessioninfos[socket.id];
+};
/**
* Handles a message from a user
- * @param client the client that send this message
+ * @param socket the socket.io Socket object for the client
* @param message the message from the client
*/
-exports.handleMessage = async function(client, message)
-{
- var env = process.env.NODE_ENV || 'development';
+exports.handleMessage = async (socket, message) => {
+ const env = process.env.NODE_ENV || 'development';
if (env === 'production') {
try {
- await rateLimiter.consume(client.handshake.address); // consume 1 point per event from IP
- }catch(e){
- console.warn("Rate limited: ", client.handshake.address, " to reduce the amount of rate limiting that happens edit the rateLimit values in settings.json");
+ await rateLimiter.consume(socket.request.ip); // consume 1 point per event from IP
+ } catch (e) {
+ console.warn(`Rate limited: ${socket.request.ip} to reduce the amount of rate limiting ` +
+ 'that happens edit the rateLimit values in settings.json');
stats.meter('rateLimited').mark();
- client.json.send({disconnect:"rateLimited"});
+ socket.json.send({disconnect: 'rateLimited'});
return;
}
}
@@ -192,109 +178,115 @@ exports.handleMessage = async function(client, message)
return;
}
- let thisSession = sessioninfos[client.id];
+ const thisSession = sessioninfos[socket.id];
if (!thisSession) {
- messageLogger.warn("Dropped message from an unknown connection.")
+ messageLogger.warn('Dropped message from an unknown connection.');
return;
}
- // Allow plugins to bypass the readonly message blocker
- if ((await hooks.aCallAll('handleMessageSecurity', {client, message})).some((w) => w === true)) {
- thisSession.readonly = false;
- }
-
- // Call handleMessage hook. If a plugin returns null, the message will be dropped. Note that for
- // all messages handleMessage will be called, even if the client is not authorized
- if ((await hooks.aCallAll('handleMessage', {client, message})).some((m) => m === null)) {
- return;
- }
-
- if (message.type === "CLIENT_READY") {
+ if (message.type === 'CLIENT_READY') {
// client tried to auth for the first time (first msg from the client)
- createSessionInfoAuth(client, message);
+ createSessionInfoAuth(thisSession, message);
}
- // the session may have been dropped during earlier processing
- if (!sessioninfos[client.id]) {
- messageLogger.warn("Dropping message from a connection that has gone away.")
+ const auth = thisSession.auth;
+ if (!auth) {
+ console.error('Auth was never applied to a session. If you are using the stress-test tool then restart Etherpad and the Stress test tool.');
return;
}
- // Simulate using the load testing tool
- if (!sessioninfos[client.id].auth) {
- console.error("Auth was never applied to a session. If you are using the stress-test tool then restart Etherpad and the Stress test tool.")
- return;
- }
-
- let auth = sessioninfos[client.id].auth;
-
// check if pad is requested via readOnly
let padId = auth.padID;
- if (padId.indexOf("r.") === 0) {
+ if (padId.indexOf('r.') === 0) {
// Pad is readOnly, first get the real Pad ID
padId = await readOnlyManager.getPadId(padId);
}
- const {session: {user} = {}} = client.client.request;
- const {accessStatus} =
- await securityManager.checkAccess(padId, auth.sessionID, auth.token, auth.password, user);
+ const {session: {user} = {}} = socket.client.request;
+ const {accessStatus, authorID} =
+ await securityManager.checkAccess(padId, auth.sessionID, auth.token, user);
+ if (accessStatus !== 'grant') {
+ // Access denied. Send the reason to the user.
+ socket.json.send({accessStatus});
+ return;
+ }
+ if (thisSession.author != null && thisSession.author !== authorID) {
+ messageLogger.warn(
+ `${'Rejecting message from client because the author ID changed mid-session.' +
+ ' Bad or missing token or sessionID?' +
+ ` socket:${socket.id}` +
+ ` IP:${settings.disableIPlogging ? 'ANONYMOUS' : socket.request.ip}` +
+ ` originalAuthorID:${thisSession.author}` +
+ ` newAuthorID:${authorID}`}${
+ (user && user.username) ? ` username:${user.username}` : ''
+ } message:${message}`);
+ socket.json.send({disconnect: 'rejected'});
+ return;
+ }
+ thisSession.author = authorID;
- if (accessStatus !== "grant") {
- // no access, send the client a message that tells him why
- client.json.send({ accessStatus });
+ // Allow plugins to bypass the readonly message blocker
+ const context = {message, socket, client: socket}; // `client` for backwards compatibility.
+ if ((await hooks.aCallAll('handleMessageSecurity', context)).some((w) => w === true)) {
+ thisSession.readonly = false;
+ }
+
+ // Call handleMessage hook. If a plugin returns null, the message will be dropped.
+ if ((await hooks.aCallAll('handleMessage', context)).some((m) => m === null)) {
return;
}
- // access was granted
+ // Drop the message if the client disconnected during the above processing.
+ if (sessioninfos[socket.id] !== thisSession) {
+ messageLogger.warn('Dropping message from a connection that has gone away.');
+ return;
+ }
// Check what type of message we get and delegate to the other methods
- if (message.type === "CLIENT_READY") {
- handleClientReady(client, message);
- } else if (message.type === "CHANGESET_REQ") {
- handleChangesetRequest(client, message);
- } else if(message.type === "COLLABROOM") {
+ if (message.type === 'CLIENT_READY') {
+ await handleClientReady(socket, message, authorID);
+ } else if (message.type === 'CHANGESET_REQ') {
+ await handleChangesetRequest(socket, message);
+ } else if (message.type === 'COLLABROOM') {
if (thisSession.readonly) {
- messageLogger.warn("Dropped message, COLLABROOM for readonly pad");
- } else if (message.data.type === "USER_CHANGES") {
- stats.counter('pendingEdits').inc()
- padChannels.emit(message.padId, {client: client, message: message}); // add to pad queue
- } else if (message.data.type === "USERINFO_UPDATE") {
- handleUserInfoUpdate(client, message);
- } else if (message.data.type === "CHAT_MESSAGE") {
- handleChatMessage(client, message);
- } else if (message.data.type === "GET_CHAT_MESSAGES") {
- handleGetChatMessages(client, message);
- } else if (message.data.type === "SAVE_REVISION") {
- handleSaveRevisionMessage(client, message);
- } else if (message.data.type === "CLIENT_MESSAGE" &&
+ messageLogger.warn('Dropped message, COLLABROOM for readonly pad');
+ } else if (message.data.type === 'USER_CHANGES') {
+ stats.counter('pendingEdits').inc();
+ padChannels.emit(message.padId, {socket, message}); // add to pad queue
+ } else if (message.data.type === 'USERINFO_UPDATE') {
+ await handleUserInfoUpdate(socket, message);
+ } else if (message.data.type === 'CHAT_MESSAGE') {
+ await handleChatMessage(socket, message);
+ } else if (message.data.type === 'GET_CHAT_MESSAGES') {
+ await handleGetChatMessages(socket, message);
+ } else if (message.data.type === 'SAVE_REVISION') {
+ await handleSaveRevisionMessage(socket, message);
+ } else if (message.data.type === 'CLIENT_MESSAGE' &&
message.data.payload != null &&
- message.data.payload.type === "suggestUserName") {
- handleSuggestUserName(client, message);
+ message.data.payload.type === 'suggestUserName') {
+ handleSuggestUserName(socket, message);
} else {
- messageLogger.warn("Dropped message, unknown COLLABROOM Data Type " + message.data.type);
+ messageLogger.warn(`Dropped message, unknown COLLABROOM Data Type ${message.data.type}`);
}
- } else if(message.type === "SWITCH_TO_PAD") {
- handleSwitchToPad(client, message);
+ } else if (message.type === 'SWITCH_TO_PAD') {
+ await handleSwitchToPad(socket, message, authorID);
} else {
- messageLogger.warn("Dropped message, unknown Message Type " + message.type);
+ messageLogger.warn(`Dropped message, unknown Message Type ${message.type}`);
}
-}
+};
/**
* Handles a save revision message
- * @param client the client that send this message
+ * @param socket the socket.io Socket object for the client
* @param message the message from the client
*/
-async function handleSaveRevisionMessage(client, message)
-{
- var padId = sessioninfos[client.id].padId;
- var userId = sessioninfos[client.id].author;
-
- let pad = await padManager.getPad(padId);
- pad.addSavedRevision(pad.head, userId);
+async function handleSaveRevisionMessage(socket, message) {
+ const {padId, author: authorId} = sessioninfos[socket.id];
+ const pad = await padManager.getPad(padId);
+ await pad.addSavedRevision(pad.head, authorId);
}
/**
@@ -304,9 +296,9 @@ async function handleSaveRevisionMessage(client, message)
* @param msg {Object} the message we're sending
* @param sessionID {string} the socketIO session to which we're sending this message
*/
-exports.handleCustomObjectMessage = function(msg, sessionID) {
- if (msg.data.type === "CUSTOM") {
- if (sessionID){
+exports.handleCustomObjectMessage = function (msg, sessionID) {
+ if (msg.data.type === 'CUSTOM') {
+ if (sessionID) {
// a sessionID is targeted: directly to this sessionID
socketio.sockets.socket(sessionID).json.send(msg);
} else {
@@ -314,7 +306,7 @@ exports.handleCustomObjectMessage = function(msg, sessionID) {
socketio.sockets.in(msg.data.payload.padId).json.send(msg);
}
}
-}
+};
/**
* Handles a custom message (sent via HTTP API request)
@@ -322,31 +314,28 @@ exports.handleCustomObjectMessage = function(msg, sessionID) {
* @param padID {Pad} the pad to which we're sending this message
* @param msgString {String} the message we're sending
*/
-exports.handleCustomMessage = function(padID, msgString) {
- let time = Date.now();
- let msg = {
+exports.handleCustomMessage = function (padID, msgString) {
+ const time = Date.now();
+ const msg = {
type: 'COLLABROOM',
data: {
type: msgString,
- time: time
- }
+ time,
+ },
};
socketio.sockets.in(padID).json.send(msg);
-}
+};
/**
* Handles a Chat Message
- * @param client the client that send this message
+ * @param socket the socket.io Socket object for the client
* @param message the message from the client
*/
-function handleChatMessage(client, message)
-{
- var time = Date.now();
- var userId = sessioninfos[client.id].author;
- var text = message.data.text;
- var padId = sessioninfos[client.id].padId;
-
- exports.sendChatMessageToPadClients(time, userId, text, padId);
+async function handleChatMessage(socket, message) {
+ const time = Date.now();
+ const text = message.data.text;
+ const {padId, author: authorId} = sessioninfos[socket.id];
+ await exports.sendChatMessageToPadClients(time, authorId, text, padId);
}
/**
@@ -356,157 +345,157 @@ function handleChatMessage(client, message)
* @param text the text of the chat message
* @param padId the padId to send the chat message to
*/
-exports.sendChatMessageToPadClients = async function(time, userId, text, padId)
-{
+exports.sendChatMessageToPadClients = async function (time, userId, text, padId) {
// get the pad
- let pad = await padManager.getPad(padId);
+ const pad = await padManager.getPad(padId);
// get the author
- let userName = await authorManager.getAuthorName(userId);
+ const userName = await authorManager.getAuthorName(userId);
// save the chat message
- pad.appendChatMessage(text, userId, time);
+ const promise = pad.appendChatMessage(text, userId, time);
- let msg = {
- type: "COLLABROOM",
- data: { type: "CHAT_MESSAGE", userId, userName, time, text }
+ const msg = {
+ type: 'COLLABROOM',
+ data: {type: 'CHAT_MESSAGE', userId, userName, time, text},
};
// broadcast the chat message to everyone on the pad
socketio.sockets.in(padId).json.send(msg);
-}
+
+ await promise;
+};
/**
* Handles the clients request for more chat-messages
- * @param client the client that send this message
+ * @param socket the socket.io Socket object for the client
* @param message the message from the client
*/
-async function handleGetChatMessages(client, message)
-{
+async function handleGetChatMessages(socket, message) {
if (message.data.start == null) {
- messageLogger.warn("Dropped message, GetChatMessages Message has no start!");
+ messageLogger.warn('Dropped message, GetChatMessages Message has no start!');
return;
}
if (message.data.end == null) {
- messageLogger.warn("Dropped message, GetChatMessages Message has no start!");
+ messageLogger.warn('Dropped message, GetChatMessages Message has no start!');
return;
}
- let start = message.data.start;
- let end = message.data.end;
- let count = end - start;
+ const start = message.data.start;
+ const end = message.data.end;
+ const count = end - start;
if (count < 0 || count > 100) {
- messageLogger.warn("Dropped message, GetChatMessages Message, client requested invalid amount of messages!");
+ messageLogger.warn('Dropped message, GetChatMessages Message, client requested invalid amount of messages!');
return;
}
- let padId = sessioninfos[client.id].padId;
- let pad = await padManager.getPad(padId);
+ const padId = sessioninfos[socket.id].padId;
+ const pad = await padManager.getPad(padId);
- let chatMessages = await pad.getChatMessages(start, end);
- let infoMsg = {
- type: "COLLABROOM",
+ const chatMessages = await pad.getChatMessages(start, end);
+ const infoMsg = {
+ type: 'COLLABROOM',
data: {
- type: "CHAT_MESSAGES",
- messages: chatMessages
- }
+ type: 'CHAT_MESSAGES',
+ messages: chatMessages,
+ },
};
// send the messages back to the client
- client.json.send(infoMsg);
+ socket.json.send(infoMsg);
}
/**
* Handles a handleSuggestUserName, that means a user have suggest a userName for a other user
- * @param client the client that send this message
+ * @param socket the socket.io Socket object for the client
* @param message the message from the client
*/
-function handleSuggestUserName(client, message)
-{
+function handleSuggestUserName(socket, message) {
// check if all ok
if (message.data.payload.newName == null) {
- messageLogger.warn("Dropped message, suggestUserName Message has no newName!");
+ messageLogger.warn('Dropped message, suggestUserName Message has no newName!');
return;
}
if (message.data.payload.unnamedId == null) {
- messageLogger.warn("Dropped message, suggestUserName Message has no unnamedId!");
+ messageLogger.warn('Dropped message, suggestUserName Message has no unnamedId!');
return;
}
- var padId = sessioninfos[client.id].padId;
- var roomClients = _getRoomClients(padId);
+ const padId = sessioninfos[socket.id].padId;
// search the author and send him this message
- roomClients.forEach(function(client) {
- var session = sessioninfos[client.id];
+ _getRoomSockets(padId).forEach((socket) => {
+ const session = sessioninfos[socket.id];
if (session && session.author === message.data.payload.unnamedId) {
- client.json.send(message);
+ socket.json.send(message);
}
});
}
/**
* Handles a USERINFO_UPDATE, that means that a user have changed his color or name. Anyway, we get both informations
- * @param client the client that send this message
+ * @param socket the socket.io Socket object for the client
* @param message the message from the client
*/
-function handleUserInfoUpdate(client, message)
-{
+async function handleUserInfoUpdate(socket, message) {
// check if all ok
if (message.data.userInfo == null) {
- messageLogger.warn("Dropped message, USERINFO_UPDATE Message has no userInfo!");
+ messageLogger.warn('Dropped message, USERINFO_UPDATE Message has no userInfo!');
return;
}
if (message.data.userInfo.colorId == null) {
- messageLogger.warn("Dropped message, USERINFO_UPDATE Message has no colorId!");
+ messageLogger.warn('Dropped message, USERINFO_UPDATE Message has no colorId!');
return;
}
// Check that we have a valid session and author to update.
- var session = sessioninfos[client.id];
+ const session = sessioninfos[socket.id];
if (!session || !session.author || !session.padId) {
- messageLogger.warn("Dropped message, USERINFO_UPDATE Session not ready." + message.data);
+ messageLogger.warn(`Dropped message, USERINFO_UPDATE Session not ready.${message.data}`);
return;
}
// Find out the author name of this session
- var author = session.author;
+ const author = session.author;
// Check colorId is a Hex color
- var isColor = /(^#[0-9A-F]{6}$)|(^#[0-9A-F]{3}$)/i.test(message.data.userInfo.colorId) // for #f00 (Thanks Smamatti)
+ const isColor = /(^#[0-9A-F]{6}$)|(^#[0-9A-F]{3}$)/i.test(message.data.userInfo.colorId); // for #f00 (Thanks Smamatti)
if (!isColor) {
- messageLogger.warn("Dropped message, USERINFO_UPDATE Color is malformed." + message.data);
+ messageLogger.warn(`Dropped message, USERINFO_UPDATE Color is malformed.${message.data}`);
return;
}
// Tell the authorManager about the new attributes
- authorManager.setAuthorColorId(author, message.data.userInfo.colorId);
- authorManager.setAuthorName(author, message.data.userInfo.name);
+ const p = Promise.all([
+ authorManager.setAuthorColorId(author, message.data.userInfo.colorId),
+ authorManager.setAuthorName(author, message.data.userInfo.name),
+ ]);
- var padId = session.padId;
+ const padId = session.padId;
- var infoMsg = {
- type: "COLLABROOM",
+ const infoMsg = {
+ type: 'COLLABROOM',
data: {
// The Client doesn't know about USERINFO_UPDATE, use USER_NEWINFO
- type: "USER_NEWINFO",
+ type: 'USER_NEWINFO',
userInfo: {
userId: author,
// set a null name, when there is no name set. cause the client wants it null
name: message.data.userInfo.name || null,
colorId: message.data.userInfo.colorId,
- userAgent: "Anonymous",
- ip: "127.0.0.1",
- }
- }
+ },
+ },
};
// Send the other clients on the pad the update message
- client.broadcast.to(padId).json.send(infoMsg);
+ socket.broadcast.to(padId).json.send(infoMsg);
+
+ // Block until the authorManager has stored the new attributes.
+ await p;
}
/**
@@ -520,55 +509,51 @@ function handleUserInfoUpdate(client, message)
* This function is based on a similar one in the original Etherpad.
* See https://github.com/ether/pad/blob/master/etherpad/src/etherpad/collab/collab_server.js in the function applyUserChanges()
*
- * @param client the client that send this message
+ * @param socket the socket.io Socket object for the client
* @param message the message from the client
*/
-async function handleUserChanges(data)
-{
- var client = data.client
- , message = data.message
-
+async function handleUserChanges(socket, message) {
// This one's no longer pending, as we're gonna process it now
- stats.counter('pendingEdits').dec()
+ stats.counter('pendingEdits').dec();
// Make sure all required fields are present
if (message.data.baseRev == null) {
- messageLogger.warn("Dropped message, USER_CHANGES Message has no baseRev!");
+ messageLogger.warn('Dropped message, USER_CHANGES Message has no baseRev!');
return;
}
if (message.data.apool == null) {
- messageLogger.warn("Dropped message, USER_CHANGES Message has no apool!");
+ messageLogger.warn('Dropped message, USER_CHANGES Message has no apool!');
return;
}
if (message.data.changeset == null) {
- messageLogger.warn("Dropped message, USER_CHANGES Message has no changeset!");
+ messageLogger.warn('Dropped message, USER_CHANGES Message has no changeset!');
return;
}
+ // The client might disconnect between our callbacks. We should still
+ // finish processing the changeset, so keep a reference to the session.
+ const thisSession = sessioninfos[socket.id];
+
// TODO: this might happen with other messages too => find one place to copy the session
// and always use the copy. atm a message will be ignored if the session is gone even
// if the session was valid when the message arrived in the first place
- if (!sessioninfos[client.id]) {
- messageLogger.warn("Dropped message, disconnect happened in the mean time");
+ if (!thisSession) {
+ messageLogger.warn('Dropped message, disconnect happened in the mean time');
return;
}
// get all Vars we need
- var baseRev = message.data.baseRev;
- var wireApool = (new AttributePool()).fromJsonable(message.data.apool);
- var changeset = message.data.changeset;
-
- // The client might disconnect between our callbacks. We should still
- // finish processing the changeset, so keep a reference to the session.
- var thisSession = sessioninfos[client.id];
+ const baseRev = message.data.baseRev;
+ const wireApool = (new AttributePool()).fromJsonable(message.data.apool);
+ let changeset = message.data.changeset;
// Measure time to process edit
- var stopWatch = stats.timer('edits').start();
+ const stopWatch = stats.timer('edits').start();
// get the pad
- let pad = await padManager.getPad(thisSession.padId);
+ const pad = await padManager.getPad(thisSession.padId);
// create the changeset
try {
@@ -578,24 +563,24 @@ async function handleUserChanges(data)
// Verify that the attribute indexes used in the changeset are all
// defined in the accompanying attribute pool.
- Changeset.eachAttribNumber(changeset, function(n) {
+ Changeset.eachAttribNumber(changeset, (n) => {
if (!wireApool.getAttrib(n)) {
- throw new Error("Attribute pool is missing attribute " + n + " for changeset " + changeset);
+ throw new Error(`Attribute pool is missing attribute ${n} for changeset ${changeset}`);
}
});
// Validate all added 'author' attribs to be the same value as the current user
- var iterator = Changeset.opIterator(Changeset.unpack(changeset).ops)
- , op;
+ const iterator = Changeset.opIterator(Changeset.unpack(changeset).ops);
+ let op;
while (iterator.hasNext()) {
- op = iterator.next()
+ op = iterator.next();
// + can add text with attribs
// = can change or add attribs
// - can have attribs, but they are discarded and don't show up in the attribs - but do show up in the pool
- op.attribs.split('*').forEach(function(attr) {
+ op.attribs.split('*').forEach((attr) => {
if (!attr) return;
attr = wireApool.getAttrib(attr);
@@ -603,7 +588,7 @@ async function handleUserChanges(data)
// the empty author is used in the clearAuthorship functionality so this should be the only exception
if ('author' === attr[0] && (attr[1] !== thisSession.author && attr[1] !== '')) {
- throw new Error("Trying to submit changes as another author in changeset " + changeset);
+ throw new Error(`Author ${thisSession.author} tried to submit changes as author ${attr[1]} in changeset ${changeset}`);
}
});
}
@@ -612,16 +597,15 @@ async function handleUserChanges(data)
// Afaik, it copies the new attributes from the changeset, to the global Attribute Pool
changeset = Changeset.moveOpsToNewPool(changeset, wireApool, pad.pool);
-
- } catch(e) {
+ } catch (e) {
// There is an error in this changeset, so just refuse it
- client.json.send({ disconnect: "badChangeset" });
+ socket.json.send({disconnect: 'badChangeset'});
stats.meter('failedChangesets').mark();
- throw new Error("Can't apply USER_CHANGES, because " + e.message);
+ throw new Error(`Can't apply USER_CHANGES from Socket ${socket.id} because: ${e.message}`);
}
// ex. applyUserChanges
- let apool = pad.pool;
+ const apool = pad.pool;
let r = baseRev;
// The client's changeset might not be based on the latest revision,
@@ -630,7 +614,7 @@ async function handleUserChanges(data)
while (r < pad.getHeadRevisionNumber()) {
r++;
- let c = await pad.getRevisionChangeset(r);
+ const c = await pad.getRevisionChangeset(r);
// At this point, both "c" (from the pad) and "changeset" (from the
// client) are relative to revision r - 1. The follow function
@@ -642,44 +626,44 @@ async function handleUserChanges(data)
// prevent eplite from accepting it TODO: better send the client a NEW_CHANGES
// of that revision
if (baseRev + 1 === r && c === changeset) {
- client.json.send({disconnect:"badChangeset"});
+ socket.json.send({disconnect: 'badChangeset'});
stats.meter('failedChangesets').mark();
throw new Error("Won't apply USER_CHANGES, because it contains an already accepted changeset");
}
changeset = Changeset.follow(c, changeset, false, apool);
- } catch(e) {
- client.json.send({disconnect:"badChangeset"});
+ } catch (e) {
+ socket.json.send({disconnect: 'badChangeset'});
stats.meter('failedChangesets').mark();
- throw new Error("Can't apply USER_CHANGES, because " + e.message);
+ throw new Error(`Can't apply USER_CHANGES, because ${e.message}`);
}
}
- let prevText = pad.text();
+ const prevText = pad.text();
if (Changeset.oldLen(changeset) !== prevText.length) {
- client.json.send({disconnect:"badChangeset"});
+ socket.json.send({disconnect: 'badChangeset'});
stats.meter('failedChangesets').mark();
- throw new Error("Can't apply USER_CHANGES "+changeset+" with oldLen " + Changeset.oldLen(changeset) + " to document of length " + prevText.length);
+ throw new Error(`Can't apply USER_CHANGES ${changeset} with oldLen ${Changeset.oldLen(changeset)} to document of length ${prevText.length}`);
}
try {
- pad.appendRevision(changeset, thisSession.author);
- } catch(e) {
- client.json.send({ disconnect: "badChangeset" });
+ await pad.appendRevision(changeset, thisSession.author);
+ } catch (e) {
+ socket.json.send({disconnect: 'badChangeset'});
stats.meter('failedChangesets').mark();
throw e;
}
- let correctionChangeset = _correctMarkersInPad(pad.atext, pad.pool);
+ const correctionChangeset = _correctMarkersInPad(pad.atext, pad.pool);
if (correctionChangeset) {
- pad.appendRevision(correctionChangeset);
+ await pad.appendRevision(correctionChangeset);
}
// Make sure the pad always ends with an empty line.
- if (pad.text().lastIndexOf("\n") !== pad.text().length-1) {
- var nlChangeset = Changeset.makeSplice(pad.text(), pad.text().length - 1, 0, "\n");
- pad.appendRevision(nlChangeset);
+ if (pad.text().lastIndexOf('\n') !== pad.text().length - 1) {
+ const nlChangeset = Changeset.makeSplice(pad.text(), pad.text().length - 1, 0, '\n');
+ await pad.appendRevision(nlChangeset);
}
await exports.updatePadClients(pad);
@@ -690,14 +674,10 @@ async function handleUserChanges(data)
stopWatch.end();
}
-exports.updatePadClients = async function(pad)
-{
+exports.updatePadClients = async function (pad) {
// skip this if no-one is on this pad
- let roomClients = _getRoomClients(pad.id);
-
- if (roomClients.length === 0) {
- return;
- }
+ const roomSockets = _getRoomSockets(pad.id);
+ if (roomSockets.length === 0) return;
// since all clients usually get the same set of changesets, store them in local cache
// to remove unnecessary roundtrip to the datalayer
@@ -706,24 +686,24 @@ exports.updatePadClients = async function(pad)
// BEFORE first result will be landed to our cache object. The solution is to replace parallel processing
// via async.forEach with sequential for() loop. There is no real benefits of running this in parallel,
// but benefit of reusing cached revision object is HUGE
- let revCache = {};
+ const revCache = {};
// go through all sessions on this pad
- for (let client of roomClients) {
- let sid = client.id;
+ for (const socket of roomSockets) {
+ const sid = socket.id;
// send them all new changesets
while (sessioninfos[sid] && sessioninfos[sid].rev < pad.getHeadRevisionNumber()) {
- let r = sessioninfos[sid].rev + 1;
+ const r = sessioninfos[sid].rev + 1;
let revision = revCache[r];
if (!revision) {
revision = await pad.getRevision(r);
revCache[r] = revision;
}
- let author = revision.meta.author,
- revChangeset = revision.changeset,
- currentTime = revision.meta.timestamp;
+ const author = revision.meta.author;
+ const revChangeset = revision.changeset;
+ const currentTime = revision.meta.timestamp;
// next if session has not been deleted
if (sessioninfos[sid] == null) {
@@ -731,20 +711,19 @@ exports.updatePadClients = async function(pad)
}
if (author === sessioninfos[sid].author) {
- client.json.send({ "type": "COLLABROOM", "data":{ type: "ACCEPT_COMMIT", newRev: r }});
+ socket.json.send({type: 'COLLABROOM', data: {type: 'ACCEPT_COMMIT', newRev: r}});
} else {
- let forWire = Changeset.prepareForWire(revChangeset, pad.pool);
- let wireMsg = {"type": "COLLABROOM",
- "data": { type:"NEW_CHANGES",
- newRev:r,
- changeset: forWire.translated,
- apool: forWire.pool,
- author: author,
- currentTime: currentTime,
- timeDelta: currentTime - sessioninfos[sid].time
- }};
-
- client.json.send(wireMsg);
+ const forWire = Changeset.prepareForWire(revChangeset, pad.pool);
+ const wireMsg = {type: 'COLLABROOM',
+ data: {type: 'NEW_CHANGES',
+ newRev: r,
+ changeset: forWire.translated,
+ apool: forWire.pool,
+ author,
+ currentTime,
+ timeDelta: currentTime - sessioninfos[sid].time}};
+
+ socket.json.send(wireMsg);
}
if (sessioninfos[sid]) {
@@ -753,29 +732,27 @@ exports.updatePadClients = async function(pad)
}
}
}
-}
+};
/**
* Copied from the Etherpad Source Code. Don't know what this method does excatly...
*/
function _correctMarkersInPad(atext, apool) {
- var text = atext.text;
+ const text = atext.text;
// collect char positions of line markers (e.g. bullets) in new atext
// that aren't at the start of a line
- var badMarkers = [];
- var iter = Changeset.opIterator(atext.attribs);
- var offset = 0;
+ const badMarkers = [];
+ const iter = Changeset.opIterator(atext.attribs);
+ let offset = 0;
while (iter.hasNext()) {
var op = iter.next();
- var hasMarker = _.find(AttributeManager.lineAttributes, function(attribute) {
- return Changeset.opAttributeValue(op, attribute, apool);
- }) !== undefined;
+ const hasMarker = _.find(AttributeManager.lineAttributes, (attribute) => Changeset.opAttributeValue(op, attribute, apool)) !== undefined;
if (hasMarker) {
- for (var i = 0; i < op.chars; i++) {
- if (offset > 0 && text.charAt(offset-1) !== '\n') {
+ for (let i = 0; i < op.chars; i++) {
+ if (offset > 0 && text.charAt(offset - 1) !== '\n') {
badMarkers.push(offset);
}
offset++;
@@ -792,194 +769,177 @@ function _correctMarkersInPad(atext, apool) {
// create changeset that removes these bad markers
offset = 0;
- var builder = Changeset.builder(text.length);
+ const builder = Changeset.builder(text.length);
- badMarkers.forEach(function(pos) {
+ badMarkers.forEach((pos) => {
builder.keepText(text.substring(offset, pos));
builder.remove(1);
- offset = pos+1;
+ offset = pos + 1;
});
return builder.toString();
}
-function handleSwitchToPad(client, message)
-{
- // clear the session and leave the room
- let currentSession = sessioninfos[client.id];
- let padId = currentSession.padId;
- let roomClients = _getRoomClients(padId);
+async function handleSwitchToPad(socket, message, _authorID) {
+ const currentSessionInfo = sessioninfos[socket.id];
+ const padId = currentSessionInfo.padId;
+
+ // Check permissions for the new pad.
+ const newPadIds = await readOnlyManager.getIds(message.padId);
+ const {session: {user} = {}} = socket.client.request;
+ const {accessStatus, authorID} = await securityManager.checkAccess(
+ newPadIds.padId, message.sessionID, message.token, user);
+ if (accessStatus !== 'grant') {
+ // Access denied. Send the reason to the user.
+ socket.json.send({accessStatus});
+ return;
+ }
+ // The same token and session ID were passed to checkAccess in handleMessage, so this second call
+ // to checkAccess should return the same author ID.
+ assert(authorID === _authorID);
+ assert(authorID === currentSessionInfo.author);
- roomClients.forEach(client => {
- let sinfo = sessioninfos[client.id];
- if (sinfo && sinfo.author === currentSession.author) {
+ // Check if the connection dropped during the access check.
+ if (sessioninfos[socket.id] !== currentSessionInfo) return;
+
+ // clear the session and leave the room
+ _getRoomSockets(padId).forEach((socket) => {
+ const sinfo = sessioninfos[socket.id];
+ if (sinfo && sinfo.author === currentSessionInfo.author) {
// fix user's counter, works on page refresh or if user closes browser window and then rejoins
- sessioninfos[client.id] = {};
- client.leave(padId);
+ sessioninfos[socket.id] = {};
+ socket.leave(padId);
}
});
// start up the new pad
- createSessionInfoAuth(client, message);
- handleClientReady(client, message);
+ const newSessionInfo = sessioninfos[socket.id];
+ createSessionInfoAuth(newSessionInfo, message);
+ await handleClientReady(socket, message, authorID);
}
-// Creates/replaces the auth object in the client's session info. Session info for the client must
-// already exist.
-function createSessionInfoAuth(client, message)
-{
+// Creates/replaces the auth object in the given session info.
+function createSessionInfoAuth(sessionInfo, message) {
// Remember this information since we won't
// have the cookie in further socket.io messages.
// This information will be used to check if
// the sessionId of this connection is still valid
// since it could have been deleted by the API.
- sessioninfos[client.id].auth =
- {
+ sessionInfo.auth = {
sessionID: message.sessionID,
padID: message.padId,
- token : message.token,
- password: message.password
+ token: message.token,
};
}
/**
* Handles a CLIENT_READY. A CLIENT_READY is the first message from the client to the server. The Client sends his token
* and the pad it wants to enter. The Server answers with the inital values (clientVars) of the pad
- * @param client the client that send this message
+ * @param socket the socket.io Socket object for the client
* @param message the message from the client
*/
-async function handleClientReady(client, message)
-{
+async function handleClientReady(socket, message, authorID) {
// check if all ok
if (!message.token) {
- messageLogger.warn("Dropped message, CLIENT_READY Message has no token!");
+ messageLogger.warn('Dropped message, CLIENT_READY Message has no token!');
return;
}
if (!message.padId) {
- messageLogger.warn("Dropped message, CLIENT_READY Message has no padId!");
+ messageLogger.warn('Dropped message, CLIENT_READY Message has no padId!');
return;
}
if (!message.protocolVersion) {
- messageLogger.warn("Dropped message, CLIENT_READY Message has no protocolVersion!");
+ messageLogger.warn('Dropped message, CLIENT_READY Message has no protocolVersion!');
return;
}
if (message.protocolVersion !== 2) {
- messageLogger.warn("Dropped message, CLIENT_READY Message has a unknown protocolVersion '" + message.protocolVersion + "'!");
+ messageLogger.warn(`Dropped message, CLIENT_READY Message has a unknown protocolVersion '${message.protocolVersion}'!`);
return;
}
- hooks.callAll("clientReady", message);
+ hooks.callAll('clientReady', message);
// Get ro/rw id:s
- let padIds = await readOnlyManager.getIds(message.padId);
-
- // FIXME: Allow to override readwrite access with readonly
- const {session: {user} = {}} = client.client.request;
- const {accessStatus, authorID} = await securityManager.checkAccess(
- padIds.padId, message.sessionID, message.token, message.password, user);
-
- // no access, send the client a message that tells him why
- if (accessStatus !== "grant") {
- client.json.send({ accessStatus });
- return;
- }
+ const padIds = await readOnlyManager.getIds(message.padId);
// get all authordata of this new user
assert(authorID);
- let value = await authorManager.getAuthor(authorID);
- let authorColorId = value.colorId;
- let authorName = value.name;
+ const value = await authorManager.getAuthor(authorID);
+ const authorColorId = value.colorId;
+ const authorName = value.name;
// load the pad-object from the database
- let pad = await padManager.getPad(padIds.padId);
+ const pad = await padManager.getPad(padIds.padId);
// these db requests all need the pad object (timestamp of latest revision, author data)
- let authors = pad.getAllAuthors();
+ const authors = pad.getAllAuthors();
// get timestamp of latest revision needed for timeslider
- let currentTime = await pad.getRevisionDate(pad.getHeadRevisionNumber());
+ const currentTime = await pad.getRevisionDate(pad.getHeadRevisionNumber());
// get all author data out of the database (in parallel)
- let historicalAuthorData = {};
- await Promise.all(authors.map(authorId => {
- return authorManager.getAuthor(authorId).then(author => {
- if (!author) {
- messageLogger.error("There is no author for authorId: ", authorId, ". This is possibly related to https://github.com/ether/etherpad-lite/issues/2802");
- } else {
- historicalAuthorData[authorId] = { name: author.name, colorId: author.colorId }; // Filter author attribs (e.g. don't send author's pads to all clients)
- }
- });
- }));
-
- let thisUserHasEditedThisPad = false;
- if (historicalAuthorData[authorID]) {
- /*
- * This flag is set to true when a user contributes to a specific pad for
- * the first time. It is used for deciding if importing to that pad is
- * allowed or not.
- */
- thisUserHasEditedThisPad = true;
- }
+ const historicalAuthorData = {};
+ await Promise.all(authors.map((authorId) => authorManager.getAuthor(authorId).then((author) => {
+ if (!author) {
+ messageLogger.error('There is no author for authorId: ', authorId, '. This is possibly related to https://github.com/ether/etherpad-lite/issues/2802');
+ } else {
+ historicalAuthorData[authorId] = {name: author.name, colorId: author.colorId}; // Filter author attribs (e.g. don't send author's pads to all clients)
+ }
+ })));
// glue the clientVars together, send them and tell the other clients that a new one is there
// Check that the client is still here. It might have disconnected between callbacks.
- if (sessioninfos[client.id] === undefined) {
- return;
- }
+ const sessionInfo = sessioninfos[socket.id];
+ if (sessionInfo == null) return;
// Check if this author is already on the pad, if yes, kick the other sessions!
- let roomClients = _getRoomClients(pad.id);
+ const roomSockets = _getRoomSockets(pad.id);
- for (let client of roomClients) {
- let sinfo = sessioninfos[client.id];
+ for (const socket of roomSockets) {
+ const sinfo = sessioninfos[socket.id];
if (sinfo && sinfo.author === authorID) {
// fix user's counter, works on page refresh or if user closes browser window and then rejoins
- sessioninfos[client.id] = {};
- client.leave(padIds.padId);
- client.json.send({disconnect:"userdup"});
+ sessioninfos[socket.id] = {};
+ socket.leave(padIds.padId);
+ socket.json.send({disconnect: 'userdup'});
}
}
// Save in sessioninfos that this session belonges to this pad
- sessioninfos[client.id].padId = padIds.padId;
- sessioninfos[client.id].readOnlyPadId = padIds.readOnlyPadId;
- sessioninfos[client.id].readonly = padIds.readonly;
-
- // Log creation/(re-)entering of a pad
- let ip = remoteAddress[client.id];
-
- // Anonymize the IP address if IP logging is disabled
- if (settings.disableIPlogging) {
- ip = 'ANONYMOUS';
- }
-
- if (pad.head > 0) {
- accessLogger.info('[ENTER] Pad "' + padIds.padId + '": Client ' + client.id + ' with IP "' + ip + '" entered the pad');
- } else if (pad.head === 0) {
- accessLogger.info('[CREATE] Pad "' + padIds.padId + '": Client ' + client.id + ' with IP "' + ip + '" created the pad');
- }
+ sessionInfo.padId = padIds.padId;
+ sessionInfo.readOnlyPadId = padIds.readOnlyPadId;
+ sessionInfo.readonly =
+ padIds.readonly || !webaccess.userCanModify(message.padId, socket.client.request);
+
+ const {session: {user} = {}} = socket.client.request;
+ accessLogger.info(`${`[${pad.head > 0 ? 'ENTER' : 'CREATE'}]` +
+ ` pad:${padIds.padId}` +
+ ` socket:${socket.id}` +
+ ` IP:${settings.disableIPlogging ? 'ANONYMOUS' : socket.request.ip}` +
+ ` authorID:${authorID}`}${
+ (user && user.username) ? ` username:${user.username}` : ''}`);
if (message.reconnect) {
// If this is a reconnect, we don't have to send the client the ClientVars again
// Join the pad and start receiving updates
- client.join(padIds.padId);
+ socket.join(padIds.padId);
// Save the revision in sessioninfos, we take the revision from the info the client send to us
- sessioninfos[client.id].rev = message.client_rev;
+ sessionInfo.rev = message.client_rev;
// During the client reconnect, client might miss some revisions from other clients. By using client revision,
// this below code sends all the revisions missed during the client reconnect
- var revisionsNeeded = [];
- var changesets = {};
+ const revisionsNeeded = [];
+ const changesets = {};
- var startNum = message.client_rev + 1;
- var endNum = pad.getHeadRevisionNumber() + 1;
+ let startNum = message.client_rev + 1;
+ let endNum = pad.getHeadRevisionNumber() + 1;
- var headNum = pad.getHeadRevisionNumber();
+ const headNum = pad.getHeadRevisionNumber();
if (endNum > headNum + 1) {
endNum = headNum + 1;
@@ -995,52 +955,48 @@ async function handleClientReady(client, message)
}
// get changesets, author and timestamp needed for pending revisions (in parallel)
- let promises = [];
- for (let revNum of revisionsNeeded) {
- let cs = changesets[revNum];
- promises.push( pad.getRevisionChangeset(revNum).then(result => cs.changeset = result ));
- promises.push( pad.getRevisionAuthor(revNum).then(result => cs.author = result ));
- promises.push( pad.getRevisionDate(revNum).then(result => cs.timestamp = result ));
+ const promises = [];
+ for (const revNum of revisionsNeeded) {
+ const cs = changesets[revNum];
+ promises.push(pad.getRevisionChangeset(revNum).then((result) => cs.changeset = result));
+ promises.push(pad.getRevisionAuthor(revNum).then((result) => cs.author = result));
+ promises.push(pad.getRevisionDate(revNum).then((result) => cs.timestamp = result));
}
await Promise.all(promises);
// return pending changesets
- for (let r of revisionsNeeded) {
-
- let forWire = Changeset.prepareForWire(changesets[r]['changeset'], pad.pool);
- let wireMsg = {"type":"COLLABROOM",
- "data":{type:"CLIENT_RECONNECT",
- headRev:pad.getHeadRevisionNumber(),
- newRev:r,
- changeset:forWire.translated,
- apool: forWire.pool,
- author: changesets[r]['author'],
- currentTime: changesets[r]['timestamp']
- }};
- client.json.send(wireMsg);
+ for (const r of revisionsNeeded) {
+ const forWire = Changeset.prepareForWire(changesets[r].changeset, pad.pool);
+ const wireMsg = {type: 'COLLABROOM',
+ data: {type: 'CLIENT_RECONNECT',
+ headRev: pad.getHeadRevisionNumber(),
+ newRev: r,
+ changeset: forWire.translated,
+ apool: forWire.pool,
+ author: changesets[r].author,
+ currentTime: changesets[r].timestamp}};
+ socket.json.send(wireMsg);
}
if (startNum === endNum) {
- var Msg = {"type":"COLLABROOM",
- "data":{type:"CLIENT_RECONNECT",
- noChanges: true,
- newRev: pad.getHeadRevisionNumber()
- }};
- client.json.send(Msg);
+ const Msg = {type: 'COLLABROOM',
+ data: {type: 'CLIENT_RECONNECT',
+ noChanges: true,
+ newRev: pad.getHeadRevisionNumber()}};
+ socket.json.send(Msg);
}
-
} else {
// This is a normal first connect
// prepare all values for the wire, there's a chance that this throws, if the pad is corrupted
try {
var atext = Changeset.cloneAText(pad.atext);
- var attribsForWire = Changeset.prepareForWire(atext.attribs, pad.pool);
+ const attribsForWire = Changeset.prepareForWire(atext.attribs, pad.pool);
var apool = attribsForWire.pool.toJsonable();
atext.attribs = attribsForWire.translated;
- } catch(e) {
- console.error(e.stack || e)
- client.json.send({ disconnect:"corruptPad" }); // pull the brakes
+ } catch (e) {
+ console.error(e.stack || e);
+ socket.json.send({disconnect: 'corruptPad'}); // pull the brakes
return;
}
@@ -1048,66 +1004,61 @@ async function handleClientReady(client, message)
// Warning: never ever send padIds.padId to the client. If the
// client is read only you would open a security hole 1 swedish
// mile wide...
- var clientVars = {
- "skinName": settings.skinName,
- "skinVariants": settings.skinVariants,
- "randomVersionString": settings.randomVersionString,
- "accountPrivs": {
- "maxRevisions": 100
+ const clientVars = {
+ skinName: settings.skinName,
+ skinVariants: settings.skinVariants,
+ randomVersionString: settings.randomVersionString,
+ accountPrivs: {
+ maxRevisions: 100,
},
- "automaticReconnectionTimeout": settings.automaticReconnectionTimeout,
- "initialRevisionList": [],
- "initialOptions": {
- "guestPolicy": "deny"
+ automaticReconnectionTimeout: settings.automaticReconnectionTimeout,
+ initialRevisionList: [],
+ initialOptions: {},
+ savedRevisions: pad.getSavedRevisions(),
+ collab_client_vars: {
+ initialAttributedText: atext,
+ clientIp: '127.0.0.1',
+ padId: message.padId,
+ historicalAuthorData,
+ apool,
+ rev: pad.getHeadRevisionNumber(),
+ time: currentTime,
},
- "savedRevisions": pad.getSavedRevisions(),
- "collab_client_vars": {
- "initialAttributedText": atext,
- "clientIp": "127.0.0.1",
- "padId": message.padId,
- "historicalAuthorData": historicalAuthorData,
- "apool": apool,
- "rev": pad.getHeadRevisionNumber(),
- "time": currentTime,
- },
- "colorPalette": authorManager.getColorPalette(),
- "clientIp": "127.0.0.1",
- "userIsGuest": true,
- "userColor": authorColorId,
- "padId": message.padId,
- "padOptions": settings.padOptions,
- "padShortcutEnabled": settings.padShortcutEnabled,
- "initialTitle": "Pad: " + message.padId,
- "opts": {},
+ colorPalette: authorManager.getColorPalette(),
+ clientIp: '127.0.0.1',
+ userColor: authorColorId,
+ padId: message.padId,
+ padOptions: settings.padOptions,
+ padShortcutEnabled: settings.padShortcutEnabled,
+ initialTitle: `Pad: ${message.padId}`,
+ opts: {},
// tell the client the number of the latest chat-message, which will be
// used to request the latest 100 chat-messages later (GET_CHAT_MESSAGES)
- "chatHead": pad.chatHead,
- "numConnectedUsers": roomClients.length,
- "readOnlyId": padIds.readOnlyPadId,
- "readonly": padIds.readonly,
- "serverTimestamp": Date.now(),
- "userId": authorID,
- "abiwordAvailable": settings.abiwordAvailable(),
- "sofficeAvailable": settings.sofficeAvailable(),
- "exportAvailable": settings.exportAvailable(),
- "plugins": {
- "plugins": plugins.plugins,
- "parts": plugins.parts,
+ chatHead: pad.chatHead,
+ numConnectedUsers: roomSockets.length,
+ readOnlyId: padIds.readOnlyPadId,
+ readonly: sessionInfo.readonly,
+ serverTimestamp: Date.now(),
+ userId: authorID,
+ abiwordAvailable: settings.abiwordAvailable(),
+ sofficeAvailable: settings.sofficeAvailable(),
+ exportAvailable: settings.exportAvailable(),
+ plugins: {
+ plugins: plugins.plugins,
+ parts: plugins.parts,
},
- "indentationOnNewLine": settings.indentationOnNewLine,
- "scrollWhenFocusLineIsOutOfViewport": {
- "percentage" : {
- "editionAboveViewport": settings.scrollWhenFocusLineIsOutOfViewport.percentage.editionAboveViewport,
- "editionBelowViewport": settings.scrollWhenFocusLineIsOutOfViewport.percentage.editionBelowViewport,
+ indentationOnNewLine: settings.indentationOnNewLine,
+ scrollWhenFocusLineIsOutOfViewport: {
+ percentage: {
+ editionAboveViewport: settings.scrollWhenFocusLineIsOutOfViewport.percentage.editionAboveViewport,
+ editionBelowViewport: settings.scrollWhenFocusLineIsOutOfViewport.percentage.editionBelowViewport,
},
- "duration": settings.scrollWhenFocusLineIsOutOfViewport.duration,
- "scrollWhenCaretIsInTheLastLineOfViewport": settings.scrollWhenFocusLineIsOutOfViewport.scrollWhenCaretIsInTheLastLineOfViewport,
- "percentageToScrollWhenUserPressesArrowUp": settings.scrollWhenFocusLineIsOutOfViewport.percentageToScrollWhenUserPressesArrowUp,
+ duration: settings.scrollWhenFocusLineIsOutOfViewport.duration,
+ scrollWhenCaretIsInTheLastLineOfViewport: settings.scrollWhenFocusLineIsOutOfViewport.scrollWhenCaretIsInTheLastLineOfViewport,
+ percentageToScrollWhenUserPressesArrowUp: settings.scrollWhenFocusLineIsOutOfViewport.percentageToScrollWhenUserPressesArrowUp,
},
- "initialChangesets": [], // FIXME: REMOVE THIS SHIT
- "thisUserHasEditedThisPad": thisUserHasEditedThisPad,
- "allowAnyoneToImport": settings.allowAnyoneToImport
- }
+ initialChangesets: [], // FIXME: REMOVE THIS SHIT
+ };
// Add a username to the clientVars if one avaiable
if (authorName != null) {
@@ -1115,36 +1066,32 @@ async function handleClientReady(client, message)
}
// call the clientVars-hook so plugins can modify them before they get sent to the client
- let messages = await hooks.aCallAll('clientVars', {clientVars, pad, socket: client});
+ const messages = await hooks.aCallAll('clientVars', {clientVars, pad, socket});
// combine our old object with the new attributes from the hook
- for (let msg of messages) {
+ for (const msg of messages) {
Object.assign(clientVars, msg);
}
// Join the pad and start receiving updates
- client.join(padIds.padId);
+ socket.join(padIds.padId);
// Send the clientVars to the Client
- client.json.send({type: "CLIENT_VARS", data: clientVars});
+ socket.json.send({type: 'CLIENT_VARS', data: clientVars});
// Save the current revision in sessioninfos, should be the same as in clientVars
- sessioninfos[client.id].rev = pad.getHeadRevisionNumber();
-
- sessioninfos[client.id].author = authorID;
+ sessionInfo.rev = pad.getHeadRevisionNumber();
// prepare the notification for the other users on the pad, that this user joined
- let messageToTheOtherUsers = {
- "type": "COLLABROOM",
- "data": {
- type: "USER_NEWINFO",
+ const messageToTheOtherUsers = {
+ type: 'COLLABROOM',
+ data: {
+ type: 'USER_NEWINFO',
userInfo: {
- "ip": "127.0.0.1",
- "colorId": authorColorId,
- "userAgent": "Anonymous",
- "userId": authorID,
- }
- }
+ colorId: authorColorId,
+ userId: authorID,
+ },
+ },
};
// Add the authorname of this new User, if avaiable
@@ -1153,68 +1100,51 @@ async function handleClientReady(client, message)
}
// notify all existing users about new user
- client.broadcast.to(padIds.padId).json.send(messageToTheOtherUsers);
+ socket.broadcast.to(padIds.padId).json.send(messageToTheOtherUsers);
// Get sessions for this pad and update them (in parallel)
- roomClients = _getRoomClients(pad.id);
- await Promise.all(_getRoomClients(pad.id).map(async roomClient => {
-
+ await Promise.all(_getRoomSockets(pad.id).map(async (roomSocket) => {
// Jump over, if this session is the connection session
- if (roomClient.id === client.id) {
+ if (roomSocket.id === socket.id) {
return;
}
// Since sessioninfos might change while being enumerated, check if the
// sessionID is still assigned to a valid session
- if (sessioninfos[roomClient.id] === undefined) {
- return;
- }
+ const sessionInfo = sessioninfos[roomSocket.id];
+ if (sessionInfo == null) return;
// get the authorname & colorId
- let author = sessioninfos[roomClient.id].author;
- let cached = historicalAuthorData[author];
+ const authorId = sessionInfo.author;
+ // The authorId of this other user might be unknown if the other user just connected and has
+ // not yet sent a CLIENT_READY message.
+ if (authorId == null) return;
// reuse previously created cache of author's data
- let authorInfo = cached ? cached : (await authorManager.getAuthor(author));
-
- // default fallback color to use if authorInfo.colorId is null
- const defaultColor = "#daf0b2";
-
- if (!authorInfo) {
- console.warn(`handleClientReady(): no authorInfo parameter was received. Default values are going to be used. See issue #3612. This can be caused by a user clicking undo after clearing all authorship colors see #2802`);
- authorInfo = {};
- }
-
- // For some reason sometimes name isn't set
- // Catch this issue here and use a fixed name.
- if (!authorInfo.name) {
- console.warn(`handleClientReady(): client submitted no author name. Using "Anonymous". See: issue #3612`);
- authorInfo.name = "Anonymous";
- }
-
- // For some reason sometimes colorId isn't set
- // Catch this issue here and use a fixed color.
- if (!authorInfo.colorId) {
- console.warn(`handleClientReady(): author "${authorInfo.name}" has no property colorId. Using the default color ${defaultColor}. See issue #3612`);
- authorInfo.colorId = defaultColor;
+ const authorInfo = historicalAuthorData[authorId] || await authorManager.getAuthor(authorId);
+ if (authorInfo == null) {
+ messageLogger.error(
+ `Author ${authorId} connected via socket.io session ${roomSocket.id} is missing from ` +
+ 'the global author database. This should never happen because the author ID is ' +
+ 'generated by the same code that adds the author to the database.');
+ // Don't bother telling the new user about this mystery author.
+ return;
}
// Send the new User a Notification about this other user
- let msg = {
- "type": "COLLABROOM",
- "data": {
- type: "USER_NEWINFO",
+ const msg = {
+ type: 'COLLABROOM',
+ data: {
+ type: 'USER_NEWINFO',
userInfo: {
- "ip": "127.0.0.1",
- "colorId": authorInfo.colorId,
- "name": authorInfo.name,
- "userAgent": "Anonymous",
- "userId": author
- }
- }
+ colorId: authorInfo.colorId,
+ name: authorInfo.name,
+ userId: authorId,
+ },
+ },
};
- client.json.send(msg);
+ socket.json.send(msg);
}));
}
}
@@ -1222,53 +1152,52 @@ async function handleClientReady(client, message)
/**
* Handles a request for a rough changeset, the timeslider client needs it
*/
-async function handleChangesetRequest(client, message)
-{
+async function handleChangesetRequest(socket, message) {
// check if all ok
if (message.data == null) {
- messageLogger.warn("Dropped message, changeset request has no data!");
+ messageLogger.warn('Dropped message, changeset request has no data!');
return;
}
if (message.padId == null) {
- messageLogger.warn("Dropped message, changeset request has no padId!");
+ messageLogger.warn('Dropped message, changeset request has no padId!');
return;
}
if (message.data.granularity == null) {
- messageLogger.warn("Dropped message, changeset request has no granularity!");
+ messageLogger.warn('Dropped message, changeset request has no granularity!');
return;
}
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isInteger#Polyfill
if (Math.floor(message.data.granularity) !== message.data.granularity) {
- messageLogger.warn("Dropped message, changeset request granularity is not an integer!");
+ messageLogger.warn('Dropped message, changeset request granularity is not an integer!');
return;
}
if (message.data.start == null) {
- messageLogger.warn("Dropped message, changeset request has no start!");
+ messageLogger.warn('Dropped message, changeset request has no start!');
return;
}
if (message.data.requestID == null) {
- messageLogger.warn("Dropped message, changeset request has no requestID!");
+ messageLogger.warn('Dropped message, changeset request has no requestID!');
return;
}
- let granularity = message.data.granularity;
- let start = message.data.start;
- let end = start + (100 * granularity);
+ const granularity = message.data.granularity;
+ const start = message.data.start;
+ const end = start + (100 * granularity);
- let padIds = await readOnlyManager.getIds(message.padId);
+ const padIds = await readOnlyManager.getIds(message.padId);
// build the requested rough changesets and send them back
try {
- let data = await getChangesetInfo(padIds.padId, start, end, granularity);
+ const data = await getChangesetInfo(padIds.padId, start, end, granularity);
data.requestID = message.data.requestID;
- client.json.send({ type: "CHANGESET_REQ", data });
+ socket.json.send({type: 'CHANGESET_REQ', data});
} catch (err) {
- console.error('Error while handling a changeset request for ' + padIds.padId, err.toString(), message.data);
+ console.error(`Error while handling a changeset request for ${padIds.padId}`, err.toString(), message.data);
}
}
@@ -1276,10 +1205,9 @@ async function handleChangesetRequest(client, message)
* Tries to rebuild the getChangestInfo function of the original Etherpad
* https://github.com/ether/pad/blob/master/etherpad/src/etherpad/control/pad/pad_changeset_control.js#L144
*/
-async function getChangesetInfo(padId, startNum, endNum, granularity)
-{
- let pad = await padManager.getPad(padId);
- let head_revision = pad.getHeadRevisionNumber();
+async function getChangesetInfo(padId, startNum, endNum, granularity) {
+ const pad = await padManager.getPad(padId);
+ const head_revision = pad.getHeadRevisionNumber();
// calculate the last full endnum
if (endNum > head_revision + 1) {
@@ -1287,15 +1215,15 @@ async function getChangesetInfo(padId, startNum, endNum, granularity)
}
endNum = Math.floor(endNum / granularity) * granularity;
- let compositesChangesetNeeded = [];
- let revTimesNeeded = [];
+ const compositesChangesetNeeded = [];
+ const revTimesNeeded = [];
// figure out which composite Changeset and revTimes we need, to load them in bulk
for (let start = startNum; start < endNum; start += granularity) {
- let end = start + granularity;
+ const end = start + granularity;
// add the composite Changeset we needed
- compositesChangesetNeeded.push({ start, end });
+ compositesChangesetNeeded.push({start, end});
// add the t1 time we need
revTimesNeeded.push(start === 0 ? 0 : start - 1);
@@ -1308,24 +1236,20 @@ async function getChangesetInfo(padId, startNum, endNum, granularity)
// it would make all the lookups run in series
// get all needed composite Changesets
- let composedChangesets = {};
- let p1 = Promise.all(compositesChangesetNeeded.map(item => {
- return composePadChangesets(padId, item.start, item.end).then(changeset => {
- composedChangesets[item.start + "/" + item.end] = changeset;
- });
- }));
+ const composedChangesets = {};
+ const p1 = Promise.all(compositesChangesetNeeded.map((item) => composePadChangesets(padId, item.start, item.end).then((changeset) => {
+ composedChangesets[`${item.start}/${item.end}`] = changeset;
+ })));
// get all needed revision Dates
- let revisionDate = [];
- let p2 = Promise.all(revTimesNeeded.map(revNum => {
- return pad.getRevisionDate(revNum).then(revDate => {
- revisionDate[revNum] = Math.floor(revDate / 1000);
- });
- }));
+ const revisionDate = [];
+ const p2 = Promise.all(revTimesNeeded.map((revNum) => pad.getRevisionDate(revNum).then((revDate) => {
+ revisionDate[revNum] = Math.floor(revDate / 1000);
+ })));
// get the lines
let lines;
- let p3 = getPadLines(padId, startNum - 1).then(_lines => {
+ const p3 = getPadLines(padId, startNum - 1).then((_lines) => {
lines = _lines;
});
@@ -1333,46 +1257,45 @@ async function getChangesetInfo(padId, startNum, endNum, granularity)
await Promise.all([p1, p2, p3]);
// doesn't know what happens here exactly :/
- let timeDeltas = [];
- let forwardsChangesets = [];
- let backwardsChangesets = [];
- let apool = new AttributePool();
+ const timeDeltas = [];
+ const forwardsChangesets = [];
+ const backwardsChangesets = [];
+ const apool = new AttributePool();
for (let compositeStart = startNum; compositeStart < endNum; compositeStart += granularity) {
- let compositeEnd = compositeStart + granularity;
+ const compositeEnd = compositeStart + granularity;
if (compositeEnd > endNum || compositeEnd > head_revision + 1) {
break;
}
- let forwards = composedChangesets[compositeStart + "/" + compositeEnd];
- let backwards = Changeset.inverse(forwards, lines.textlines, lines.alines, pad.apool());
+ const forwards = composedChangesets[`${compositeStart}/${compositeEnd}`];
+ const backwards = Changeset.inverse(forwards, lines.textlines, lines.alines, pad.apool());
Changeset.mutateAttributionLines(forwards, lines.alines, pad.apool());
Changeset.mutateTextLines(forwards, lines.textlines);
- let forwards2 = Changeset.moveOpsToNewPool(forwards, pad.apool(), apool);
- let backwards2 = Changeset.moveOpsToNewPool(backwards, pad.apool(), apool);
+ const forwards2 = Changeset.moveOpsToNewPool(forwards, pad.apool(), apool);
+ const backwards2 = Changeset.moveOpsToNewPool(backwards, pad.apool(), apool);
- let t1 = (compositeStart === 0) ? revisionDate[0] : revisionDate[compositeStart - 1];
- let t2 = revisionDate[compositeEnd - 1];
+ const t1 = (compositeStart === 0) ? revisionDate[0] : revisionDate[compositeStart - 1];
+ const t2 = revisionDate[compositeEnd - 1];
timeDeltas.push(t2 - t1);
forwardsChangesets.push(forwards2);
backwardsChangesets.push(backwards2);
}
- return { forwardsChangesets, backwardsChangesets,
- apool: apool.toJsonable(), actualEndNum: endNum,
- timeDeltas, start: startNum, granularity };
+ return {forwardsChangesets, backwardsChangesets,
+ apool: apool.toJsonable(), actualEndNum: endNum,
+ timeDeltas, start: startNum, granularity};
}
/**
* Tries to rebuild the getPadLines function of the original Etherpad
* https://github.com/ether/pad/blob/master/etherpad/src/etherpad/control/pad/pad_changeset_control.js#L263
*/
-async function getPadLines(padId, revNum)
-{
- let pad = await padManager.getPad(padId);
+async function getPadLines(padId, revNum) {
+ const pad = await padManager.getPad(padId);
// get the atext
let atext;
@@ -1380,12 +1303,12 @@ async function getPadLines(padId, revNum)
if (revNum >= 0) {
atext = await pad.getInternalRevisionAText(revNum);
} else {
- atext = Changeset.makeAText("\n");
+ atext = Changeset.makeAText('\n');
}
return {
textlines: Changeset.splitTextLines(atext.text),
- alines: Changeset.splitAttributionLines(atext.attribs, atext.text)
+ alines: Changeset.splitAttributionLines(atext.attribs, atext.text),
};
}
@@ -1393,85 +1316,79 @@ async function getPadLines(padId, revNum)
* Tries to rebuild the composePadChangeset function of the original Etherpad
* https://github.com/ether/pad/blob/master/etherpad/src/etherpad/control/pad/pad_changeset_control.js#L241
*/
-async function composePadChangesets (padId, startNum, endNum)
-{
- let pad = await padManager.getPad(padId);
+async function composePadChangesets(padId, startNum, endNum) {
+ const pad = await padManager.getPad(padId);
// fetch all changesets we need
- let headNum = pad.getHeadRevisionNumber();
+ const headNum = pad.getHeadRevisionNumber();
endNum = Math.min(endNum, headNum + 1);
startNum = Math.max(startNum, 0);
// create an array for all changesets, we will
// replace the values with the changeset later
- let changesetsNeeded = [];
- for (let r = startNum ; r < endNum; r++) {
+ const changesetsNeeded = [];
+ for (let r = startNum; r < endNum; r++) {
changesetsNeeded.push(r);
}
// get all changesets
- let changesets = {};
- await Promise.all(changesetsNeeded.map(revNum => {
- return pad.getRevisionChangeset(revNum).then(changeset => changesets[revNum] = changeset);
- }));
+ const changesets = {};
+ await Promise.all(changesetsNeeded.map((revNum) => pad.getRevisionChangeset(revNum).then((changeset) => changesets[revNum] = changeset)));
// compose Changesets
let r;
try {
let changeset = changesets[startNum];
- let pool = pad.apool();
+ const pool = pad.apool();
for (r = startNum + 1; r < endNum; r++) {
- let cs = changesets[r];
+ const cs = changesets[r];
changeset = Changeset.compose(changeset, cs, pool);
}
return changeset;
-
} catch (e) {
// r-1 indicates the rev that was build starting with startNum, applying startNum+1, +2, +3
- console.warn("failed to compose cs in pad:", padId, " startrev:", startNum," current rev:", r);
+ console.warn('failed to compose cs in pad:', padId, ' startrev:', startNum, ' current rev:', r);
throw e;
}
}
-function _getRoomClients(padID) {
- var roomClients = [];
- var room = socketio.sockets.adapter.rooms[padID];
+function _getRoomSockets(padID) {
+ const roomSockets = [];
+ const room = socketio.sockets.adapter.rooms[padID];
if (room) {
- for (var id in room.sockets) {
- roomClients.push(socketio.sockets.sockets[id]);
+ for (const id in room.sockets) {
+ roomSockets.push(socketio.sockets.sockets[id]);
}
}
- return roomClients;
+ return roomSockets;
}
/**
* Get the number of users in a pad
*/
-exports.padUsersCount = function(padID) {
+exports.padUsersCount = function (padID) {
return {
- padUsersCount: _getRoomClients(padID).length
- }
-}
+ padUsersCount: _getRoomSockets(padID).length,
+ };
+};
/**
* Get the list of users in a pad
*/
-exports.padUsers = async function(padID) {
-
- let padUsers = [];
- let roomClients = _getRoomClients(padID);
+exports.padUsers = async function (padID) {
+ const padUsers = [];
// iterate over all clients (in parallel)
- await Promise.all(roomClients.map(async roomClient => {
- let s = sessioninfos[roomClient.id];
+ await Promise.all(_getRoomSockets(padID).map(async (roomSocket) => {
+ const s = sessioninfos[roomSocket.id];
if (s) {
- return authorManager.getAuthor(s.author).then(author => {
+ return authorManager.getAuthor(s.author).then((author) => {
// Fixes: https://github.com/ether/etherpad-lite/issues/4120
// On restart author might not be populated?
- if(author){
+ if (author) {
author.id = s.author;
padUsers.push(author);
}
@@ -1479,7 +1396,7 @@ exports.padUsers = async function(padID) {
}
}));
- return { padUsers };
-}
+ return {padUsers};
+};
exports.sessioninfos = sessioninfos;
diff --git a/src/node/handler/SocketIORouter.js b/src/node/handler/SocketIORouter.js
index a5220d2f4bc..56e5c5be426 100644
--- a/src/node/handler/SocketIORouter.js
+++ b/src/node/handler/SocketIORouter.js
@@ -19,136 +19,70 @@
* limitations under the License.
*/
-var log4js = require('log4js');
-var messageLogger = log4js.getLogger("message");
-var securityManager = require("../db/SecurityManager");
-var readOnlyManager = require("../db/ReadOnlyManager");
-var remoteAddress = require("../utils/RemoteAddress").remoteAddress;
-var settings = require('../utils/Settings');
+const log4js = require('log4js');
+const messageLogger = log4js.getLogger('message');
+const securityManager = require('../db/SecurityManager');
+const readOnlyManager = require('../db/ReadOnlyManager');
+const settings = require('../utils/Settings');
/**
* Saves all components
* key is the component name
* value is the component module
*/
-var components = {};
+const components = {};
-var socket;
+let socket;
/**
* adds a component
*/
-exports.addComponent = function(moduleName, module)
-{
+exports.addComponent = function (moduleName, module) {
// save the component
components[moduleName] = module;
// give the module the socket
module.setSocketIO(socket);
-}
+};
/**
* sets the socket.io and adds event functions for routing
*/
-exports.setSocketIO = function(_socket) {
+exports.setSocketIO = function (_socket) {
// save this socket internaly
socket = _socket;
- socket.sockets.on('connection', function(client)
- {
- // Broken: See http://stackoverflow.com/questions/4647348/send-message-to-specific-client-with-socket-io-and-node-js
- // Fixed by having a persistant object, ideally this would actually be in the database layer
- // TODO move to database layer
- if (settings.trustProxy && client.handshake.headers['x-forwarded-for'] !== undefined) {
- remoteAddress[client.id] = client.handshake.headers['x-forwarded-for'];
- } else {
- remoteAddress[client.id] = client.handshake.address;
- }
-
- var clientAuthorized = false;
-
+ socket.sockets.on('connection', (client) => {
// wrap the original send function to log the messages
client._send = client.send;
- client.send = function(message) {
- messageLogger.debug("to " + client.id + ": " + stringifyWithoutPassword(message));
+ client.send = function (message) {
+ messageLogger.debug(`to ${client.id}: ${JSON.stringify(message)}`);
client._send(message);
- }
+ };
// tell all components about this connect
- for (let i in components) {
+ for (const i in components) {
components[i].handleConnect(client);
}
- client.on('message', async function(message) {
+ client.on('message', async (message) => {
if (message.protocolVersion && message.protocolVersion != 2) {
- messageLogger.warn("Protocolversion header is not correct:" + stringifyWithoutPassword(message));
+ messageLogger.warn(`Protocolversion header is not correct: ${JSON.stringify(message)}`);
return;
}
-
- if (clientAuthorized) {
- // client is authorized, everything ok
- handleMessage(client, message);
- } else {
- // try to authorize the client
- if (message.padId !== undefined && message.sessionID !== undefined && message.token !== undefined && message.password !== undefined) {
- // check for read-only pads
- let padId = message.padId;
- if (padId.indexOf("r.") === 0) {
- padId = await readOnlyManager.getPadId(message.padId);
- }
-
- const {session: {user} = {}} = client.client.request;
- const {accessStatus} = await securityManager.checkAccess(
- padId, message.sessionID, message.token, message.password, user);
-
- if (accessStatus === "grant") {
- // access was granted, mark the client as authorized and handle the message
- clientAuthorized = true;
- handleMessage(client, message);
- } else {
- // no access, send the client a message that tells him why
- messageLogger.warn("Authentication try failed:" + stringifyWithoutPassword(message));
- client.json.send({ accessStatus });
- }
- } else {
- // drop message
- messageLogger.warn("Dropped message because of bad permissions:" + stringifyWithoutPassword(message));
- }
+ if (!message.component || !components[message.component]) {
+ messageLogger.error(`Can't route the message: ${JSON.stringify(message)}`);
+ return;
}
+ messageLogger.debug(`from ${client.id}: ${JSON.stringify(message)}`);
+ await components[message.component].handleMessage(client, message);
});
- client.on('disconnect', function() {
+ client.on('disconnect', () => {
// tell all components about this disconnect
- for (let i in components) {
+ for (const i in components) {
components[i].handleDisconnect(client);
}
});
});
-}
-
-// try to handle the message of this client
-function handleMessage(client, message)
-{
- if (message.component && components[message.component]) {
- // check if component is registered in the components array
- if (components[message.component]) {
- messageLogger.debug("from " + client.id + ": " + stringifyWithoutPassword(message));
- components[message.component].handleMessage(client, message);
- }
- } else {
- messageLogger.error("Can't route the message:" + stringifyWithoutPassword(message));
- }
-}
-
-// returns a stringified representation of a message, removes the password
-// this ensures there are no passwords in the log
-function stringifyWithoutPassword(message)
-{
- let newMessage = Object.assign({}, message);
-
- if (newMessage.password != null) {
- newMessage.password = "xxx";
- }
-
- return JSON.stringify(newMessage);
-}
+};
diff --git a/src/node/hooks/express.js b/src/node/hooks/express.js
index 7ff7d4ffc62..b3d4f34e469 100644
--- a/src/node/hooks/express.js
+++ b/src/node/hooks/express.js
@@ -1,24 +1,43 @@
-var hooks = require("ep_etherpad-lite/static/js/pluginfw/hooks");
-var express = require('express');
-var settings = require('../utils/Settings');
-var fs = require('fs');
-var path = require('path');
-var npm = require("npm/lib/npm.js");
-var _ = require("underscore");
-
-var server;
-var serverName;
-
-exports.createServer = function () {
- console.log("Report bugs at https://github.com/ether/etherpad-lite/issues")
+'use strict';
+
+const _ = require('underscore');
+const cookieParser = require('cookie-parser');
+const express = require('express');
+const expressSession = require('express-session');
+const fs = require('fs');
+const hooks = require('../../static/js/pluginfw/hooks');
+const log4js = require('log4js');
+const SessionStore = require('../db/SessionStore');
+const settings = require('../utils/Settings');
+const stats = require('../stats');
+const util = require('util');
+
+const logger = log4js.getLogger('http');
+let serverName;
+
+exports.server = null;
+
+const closeServer = async () => {
+ if (exports.server == null) return;
+ logger.info('Closing HTTP server...');
+ await Promise.all([
+ util.promisify(exports.server.close.bind(exports.server))(),
+ hooks.aCallAll('expressCloseServer'),
+ ]);
+ exports.server = null;
+ logger.info('HTTP server closed');
+};
+
+exports.createServer = async () => {
+ console.log('Report bugs at https://github.com/ether/etherpad-lite/issues');
serverName = `Etherpad ${settings.getGitCommit()} (https://etherpad.org)`;
console.log(`Your Etherpad version is ${settings.getEpVersion()} (${settings.getGitCommit()})`);
- exports.restartServer();
+ await exports.restartServer();
- if (settings.ip === "") {
+ if (settings.ip === '') {
// using Unix socket for connectivity
console.log(`You can access your Etherpad instance using the Unix socket at ${settings.port}`);
} else {
@@ -28,59 +47,59 @@ exports.createServer = function () {
if (!_.isEmpty(settings.users)) {
console.log(`The plugin admin page is at http://${settings.ip}:${settings.port}/admin/plugins`);
} else {
- console.warn("Admin username and password not set in settings.json. To access admin please uncomment and edit 'users' in settings.json");
+ console.warn('Admin username and password not set in settings.json. ' +
+ 'To access admin please uncomment and edit "users" in settings.json');
}
- var env = process.env.NODE_ENV || 'development';
+ const env = process.env.NODE_ENV || 'development';
if (env !== 'production') {
- console.warn("Etherpad is running in Development mode. This mode is slower for users and less secure than production mode. You should set the NODE_ENV environment variable to production by using: export NODE_ENV=production");
+ console.warn('Etherpad is running in Development mode. This mode is slower for users and ' +
+ 'less secure than production mode. You should set the NODE_ENV environment ' +
+ 'variable to production by using: export NODE_ENV=production');
}
-}
+};
-exports.restartServer = function () {
- if (server) {
- console.log("Restarting express server");
- server.close();
- }
+exports.restartServer = async () => {
+ await closeServer();
- var app = express(); // New syntax for express v3
+ const app = express(); // New syntax for express v3
if (settings.ssl) {
- console.log("SSL -- enabled");
+ console.log('SSL -- enabled');
console.log(`SSL -- server key file: ${settings.ssl.key}`);
console.log(`SSL -- Certificate Authority's certificate file: ${settings.ssl.cert}`);
- var options = {
- key: fs.readFileSync( settings.ssl.key ),
- cert: fs.readFileSync( settings.ssl.cert )
+ const options = {
+ key: fs.readFileSync(settings.ssl.key),
+ cert: fs.readFileSync(settings.ssl.cert),
};
if (settings.ssl.ca) {
options.ca = [];
- for (var i = 0; i < settings.ssl.ca.length; i++) {
- var caFileName = settings.ssl.ca[i];
+ for (let i = 0; i < settings.ssl.ca.length; i++) {
+ const caFileName = settings.ssl.ca[i];
options.ca.push(fs.readFileSync(caFileName));
}
}
- var https = require('https');
- server = https.createServer(options, app);
+ const https = require('https');
+ exports.server = https.createServer(options, app);
} else {
- var http = require('http');
- server = http.createServer(app);
+ const http = require('http');
+ exports.server = http.createServer(app);
}
- app.use(function(req, res, next) {
+ app.use((req, res, next) => {
// res.header("X-Frame-Options", "deny"); // breaks embedded pads
if (settings.ssl) {
// we use SSL
- res.header("Strict-Transport-Security", "max-age=31536000; includeSubDomains");
+ res.header('Strict-Transport-Security', 'max-age=31536000; includeSubDomains');
}
// Stop IE going into compatability mode
// https://github.com/ether/etherpad-lite/issues/2547
- res.header("X-UA-Compatible", "IE=Edge,chrome=1");
+ res.header('X-UA-Compatible', 'IE=Edge,chrome=1');
// Enable a strong referrer policy. Same-origin won't drop Referers when
// loading local resources, but it will drop them when loading foreign resources.
@@ -89,11 +108,11 @@ exports.restartServer = function () {
// marked with
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy
// https://github.com/ether/etherpad-lite/pull/3636
- res.header("Referrer-Policy", "same-origin");
+ res.header('Referrer-Policy', 'same-origin');
// send git version in the Server response header if exposeVersion is true.
if (settings.exposeVersion) {
- res.header("Server", serverName);
+ res.header('Server', serverName);
}
next();
@@ -109,8 +128,66 @@ exports.restartServer = function () {
app.enable('trust proxy');
}
- hooks.callAll("expressConfigure", {"app": app});
- hooks.callAll("expressCreateServer", {"app": app, "server": server});
+ // Measure response time
+ app.use((req, res, next) => {
+ const stopWatch = stats.timer('httpRequests').start();
+ const sendFn = res.send.bind(res);
+ res.send = (...args) => { stopWatch.end(); sendFn(...args); };
+ next();
+ });
+
+ // If the log level specified in the config file is WARN or ERROR the application server never
+ // starts listening to requests as reported in issue #158. Not installing the log4js connect
+ // logger when the log level has a higher severity than INFO since it would not log at that level
+ // anyway.
+ if (!(settings.loglevel === 'WARN' && settings.loglevel === 'ERROR')) {
+ app.use(log4js.connectLogger(logger, {
+ level: log4js.levels.DEBUG,
+ format: ':status, :method :url',
+ }));
+ }
+
+ exports.sessionMiddleware = expressSession({
+ secret: settings.sessionKey,
+ store: new SessionStore(),
+ resave: false,
+ saveUninitialized: true,
+ // Set the cookie name to a javascript identifier compatible string. Makes code handling it
+ // cleaner :)
+ name: 'express_sid',
+ proxy: true,
+ cookie: {
+ sameSite: settings.cookie.sameSite,
+
+ // The automatic express-session mechanism for determining if the application is being served
+ // over ssl is similar to the one used for setting the language cookie, which check if one of
+ // these conditions is true:
+ //
+ // 1. we are directly serving the nodejs application over SSL, using the "ssl" options in
+ // settings.json
+ //
+ // 2. we are serving the nodejs application in plaintext, but we are using a reverse proxy
+ // that terminates SSL for us. In this case, the user has to set trustProxy = true in
+ // settings.json, and the information wheter the application is over SSL or not will be
+ // extracted from the X-Forwarded-Proto HTTP header
+ //
+ // Please note that this will not be compatible with applications being served over http and
+ // https at the same time.
+ //
+ // reference: https://github.com/expressjs/session/blob/v1.17.0/README.md#cookiesecure
+ secure: 'auto',
+ },
+ });
+ app.use(exports.sessionMiddleware);
+
+ app.use(cookieParser(settings.sessionKey, {}));
+
+ hooks.callAll('expressConfigure', {app});
+ hooks.callAll('expressCreateServer', {app, server: exports.server});
+
+ await util.promisify(exports.server.listen).bind(exports.server)(settings.port, settings.ip);
+};
- server.listen(settings.port, settings.ip);
-}
+exports.shutdown = async (hookName, context) => {
+ await closeServer();
+};
diff --git a/src/node/hooks/express/admin.js b/src/node/hooks/express/admin.js
index 0884cde56a4..417939600e2 100644
--- a/src/node/hooks/express/admin.js
+++ b/src/node/hooks/express/admin.js
@@ -1,9 +1,9 @@
-var eejs = require('ep_etherpad-lite/node/eejs');
+const eejs = require('ep_etherpad-lite/node/eejs');
exports.expressCreateServer = function (hook_name, args, cb) {
- args.app.get('/admin', function(req, res) {
- if('/' != req.path[req.path.length-1]) return res.redirect('./admin/');
- res.send( eejs.require("ep_etherpad-lite/templates/admin/index.html", {}) );
+ args.app.get('/admin', (req, res) => {
+ if ('/' != req.path[req.path.length - 1]) return res.redirect('./admin/');
+ res.send(eejs.require('ep_etherpad-lite/templates/admin/index.html', {req}));
});
-}
-
+ return cb();
+};
diff --git a/src/node/hooks/express/adminplugins.js b/src/node/hooks/express/adminplugins.js
index f6f184ed396..0a6d9780897 100644
--- a/src/node/hooks/express/adminplugins.js
+++ b/src/node/hooks/express/adminplugins.js
@@ -1,129 +1,131 @@
-var eejs = require('ep_etherpad-lite/node/eejs');
-var settings = require('ep_etherpad-lite/node/utils/Settings');
-var installer = require('ep_etherpad-lite/static/js/pluginfw/installer');
-var plugins = require('ep_etherpad-lite/static/js/pluginfw/plugin_defs');
-var _ = require('underscore');
-var semver = require('semver');
-const UpdateCheck = require('ep_etherpad-lite/node/utils/UpdateCheck');
-
-exports.expressCreateServer = function(hook_name, args, cb) {
- args.app.get('/admin/plugins', function(req, res) {
- var render_args = {
+'use strict';
+
+const eejs = require('../../eejs');
+const settings = require('../../utils/Settings');
+const installer = require('../../../static/js/pluginfw/installer');
+const plugins = require('../../../static/js/pluginfw/plugin_defs');
+const _ = require('underscore');
+const semver = require('semver');
+const UpdateCheck = require('../../utils/UpdateCheck');
+
+exports.expressCreateServer = (hookName, args, cb) => {
+ args.app.get('/admin/plugins', (req, res) => {
+ res.send(eejs.require('ep_etherpad-lite/templates/admin/plugins.html', {
plugins: plugins.plugins,
- search_results: {},
+ req,
errors: [],
- };
-
- res.send(eejs.require("ep_etherpad-lite/templates/admin/plugins.html", render_args));
+ }));
});
- args.app.get('/admin/plugins/info', function(req, res) {
- var gitCommit = settings.getGitCommit();
- var epVersion = settings.getEpVersion();
+ args.app.get('/admin/plugins/info', (req, res) => {
+ const gitCommit = settings.getGitCommit();
+ const epVersion = settings.getEpVersion();
- res.send(eejs.require("ep_etherpad-lite/templates/admin/plugins-info.html", {
- gitCommit: gitCommit,
- epVersion: epVersion,
- latestVersion: UpdateCheck.getLatestVersion()
+ res.send(eejs.require('ep_etherpad-lite/templates/admin/plugins-info.html', {
+ gitCommit,
+ epVersion,
+ latestVersion: UpdateCheck.getLatestVersion(),
+ req,
}));
});
-}
-exports.socketio = function(hook_name, args, cb) {
- var io = args.io.of("/pluginfw/installer");
- io.on('connection', function(socket) {
- if (!socket.conn.request.session || !socket.conn.request.session.user || !socket.conn.request.session.user.is_admin) return;
+ return cb();
+};
+
+exports.socketio = (hookName, args, cb) => {
+ const io = args.io.of('/pluginfw/installer');
+ io.on('connection', (socket) => {
+ const {session: {user: {is_admin: isAdmin} = {}} = {}} = socket.conn.request;
+ if (!isAdmin) return;
- socket.on("getInstalled", function(query) {
+ socket.on('getInstalled', (query) => {
// send currently installed plugins
- var installed = Object.keys(plugins.plugins).map(function(plugin) {
- return plugins.plugins[plugin].package
- });
+ const installed =
+ Object.keys(plugins.plugins).map((plugin) => plugins.plugins[plugin].package);
- socket.emit("results:installed", {installed: installed});
+ socket.emit('results:installed', {installed});
});
- socket.on("checkUpdates", async function() {
+ socket.on('checkUpdates', async () => {
// Check plugins for updates
try {
- let results = await installer.getAvailablePlugins(/*maxCacheAge:*/ 60 * 10);
+ const results = await installer.getAvailablePlugins(/* maxCacheAge:*/ 60 * 10);
- var updatable = _(plugins.plugins).keys().filter(function(plugin) {
+ const updatable = _(plugins.plugins).keys().filter((plugin) => {
if (!results[plugin]) return false;
- var latestVersion = results[plugin].version;
- var currentVersion = plugins.plugins[plugin].package.version;
+ const latestVersion = results[plugin].version;
+ const currentVersion = plugins.plugins[plugin].package.version;
return semver.gt(latestVersion, currentVersion);
});
- socket.emit("results:updatable", {updatable: updatable});
+ socket.emit('results:updatable', {updatable});
} catch (er) {
console.warn(er);
- socket.emit("results:updatable", {updatable: {}});
+ socket.emit('results:updatable', {updatable: {}});
}
});
- socket.on("getAvailable", async function(query) {
+ socket.on('getAvailable', async (query) => {
try {
- let results = await installer.getAvailablePlugins(/*maxCacheAge:*/ false);
- socket.emit("results:available", results);
+ const results = await installer.getAvailablePlugins(/* maxCacheAge:*/ false);
+ socket.emit('results:available', results);
} catch (er) {
console.error(er);
- socket.emit("results:available", {});
+ socket.emit('results:available', {});
}
});
- socket.on("search", async function(query) {
+ socket.on('search', async (query) => {
try {
- let results = await installer.search(query.searchTerm, /*maxCacheAge:*/ 60 * 10);
- var res = Object.keys(results)
- .map(function(pluginName) {
- return results[pluginName];
- })
- .filter(function(plugin) {
- return !plugins.plugins[plugin.name];
- });
+ const results = await installer.search(query.searchTerm, /* maxCacheAge:*/ 60 * 10);
+ let res = Object.keys(results)
+ .map((pluginName) => results[pluginName])
+ .filter((plugin) => !plugins.plugins[plugin.name]);
res = sortPluginList(res, query.sortBy, query.sortDir)
- .slice(query.offset, query.offset+query.limit);
- socket.emit("results:search", {results: res, query: query});
+ .slice(query.offset, query.offset + query.limit);
+ socket.emit('results:search', {results: res, query});
} catch (er) {
console.error(er);
- socket.emit("results:search", {results: {}, query: query});
+ socket.emit('results:search', {results: {}, query});
}
});
- socket.on("install", function(plugin_name) {
- installer.install(plugin_name, function(er) {
+ socket.on('install', (pluginName) => {
+ installer.install(pluginName, (er) => {
if (er) console.warn(er);
- socket.emit("finished:install", {plugin: plugin_name, code: er? er.code : null, error: er? er.message : null});
+ socket.emit('finished:install', {
+ plugin: pluginName,
+ code: er ? er.code : null,
+ error: er ? er.message : null,
+ });
});
});
- socket.on("uninstall", function(plugin_name) {
- installer.uninstall(plugin_name, function(er) {
+ socket.on('uninstall', (pluginName) => {
+ installer.uninstall(pluginName, (er) => {
if (er) console.warn(er);
- socket.emit("finished:uninstall", {plugin: plugin_name, error: er? er.message : null});
+ socket.emit('finished:uninstall', {plugin: pluginName, error: er ? er.message : null});
});
});
});
-}
+ return cb();
+};
-function sortPluginList(plugins, property, /*ASC?*/dir) {
- return plugins.sort(function(a, b) {
- if (a[property] < b[property]) {
- return dir? -1 : 1;
- }
+const sortPluginList = (plugins, property, /* ASC?*/dir) => plugins.sort((a, b) => {
+ if (a[property] < b[property]) {
+ return dir ? -1 : 1;
+ }
- if (a[property] > b[property]) {
- return dir? 1 : -1;
- }
+ if (a[property] > b[property]) {
+ return dir ? 1 : -1;
+ }
- // a must be equal to b
- return 0;
- });
-}
+ // a must be equal to b
+ return 0;
+});
diff --git a/src/node/hooks/express/adminsettings.js b/src/node/hooks/express/adminsettings.js
index 1e0d6004f78..139cce1b126 100644
--- a/src/node/hooks/express/adminsettings.js
+++ b/src/node/hooks/express/adminsettings.js
@@ -1,57 +1,54 @@
-var eejs = require('ep_etherpad-lite/node/eejs');
-var settings = require('ep_etherpad-lite/node/utils/Settings');
-var hooks = require("ep_etherpad-lite/static/js/pluginfw/hooks");
-var fs = require('fs');
-
-exports.expressCreateServer = function (hook_name, args, cb) {
- args.app.get('/admin/settings', function(req, res) {
-
- var render_args = {
- settings: "",
- search_results: {},
- errors: []
- };
-
- res.send( eejs.require("ep_etherpad-lite/templates/admin/settings.html", render_args) );
-
+'use strict';
+
+const eejs = require('../../eejs');
+const fs = require('fs');
+const hooks = require('../../../static/js/pluginfw/hooks');
+const settings = require('../../utils/Settings');
+
+exports.expressCreateServer = (hookName, args, cb) => {
+ args.app.get('/admin/settings', (req, res) => {
+ res.send(eejs.require('ep_etherpad-lite/templates/admin/settings.html', {
+ req,
+ settings: '',
+ errors: [],
+ }));
});
-}
+ return cb();
+};
-exports.socketio = function (hook_name, args, cb) {
- var io = args.io.of("/settings");
- io.on('connection', function (socket) {
+exports.socketio = (hookName, args, cb) => {
+ const io = args.io.of('/settings');
+ io.on('connection', (socket) => {
+ const {session: {user: {is_admin: isAdmin} = {}} = {}} = socket.conn.request;
+ if (!isAdmin) return;
- if (!socket.conn.request.session || !socket.conn.request.session.user || !socket.conn.request.session.user.is_admin) return;
-
- socket.on("load", function (query) {
- fs.readFile('settings.json', 'utf8', function (err,data) {
+ socket.on('load', (query) => {
+ fs.readFile('settings.json', 'utf8', (err, data) => {
if (err) {
return console.log(err);
}
// if showSettingsInAdminPage is set to false, then return NOT_ALLOWED in the result
- if(settings.showSettingsInAdminPage === false) {
- socket.emit("settings", {results: 'NOT_ALLOWED'});
- }
- else {
- socket.emit("settings", {results: data});
+ if (settings.showSettingsInAdminPage === false) {
+ socket.emit('settings', {results: 'NOT_ALLOWED'});
+ } else {
+ socket.emit('settings', {results: data});
}
});
});
- socket.on("saveSettings", function (settings) {
- fs.writeFile('settings.json', settings, function (err) {
+ socket.on('saveSettings', (settings) => {
+ fs.writeFile('settings.json', settings, (err) => {
if (err) throw err;
- socket.emit("saveprogress", "saved");
+ socket.emit('saveprogress', 'saved');
});
});
- socket.on("restartServer", function () {
- console.log("Admin request to restart server through a socket on /admin/settings");
+ socket.on('restartServer', async () => {
+ console.log('Admin request to restart server through a socket on /admin/settings');
settings.reloadSettings();
- hooks.aCallAll("restartServer", {}, function () {});
-
+ await hooks.aCallAll('restartServer');
});
-
});
-}
+ return cb();
+};
diff --git a/src/node/hooks/express/apicalls.js b/src/node/hooks/express/apicalls.js
index c0967c35fea..c87998e94ea 100644
--- a/src/node/hooks/express/apicalls.js
+++ b/src/node/hooks/express/apicalls.js
@@ -1,32 +1,34 @@
-var log4js = require('log4js');
-var clientLogger = log4js.getLogger("client");
-var formidable = require('formidable');
-var apiHandler = require('../../handler/APIHandler');
+const log4js = require('log4js');
+const clientLogger = log4js.getLogger('client');
+const formidable = require('formidable');
+const apiHandler = require('../../handler/APIHandler');
exports.expressCreateServer = function (hook_name, args, cb) {
- //The Etherpad client side sends information about how a disconnect happened
- args.app.post('/ep/pad/connection-diagnostic-info', function(req, res) {
- new formidable.IncomingForm().parse(req, function(err, fields, files) {
- clientLogger.info("DIAGNOSTIC-INFO: " + fields.diagnosticInfo);
- res.end("OK");
+ // The Etherpad client side sends information about how a disconnect happened
+ args.app.post('/ep/pad/connection-diagnostic-info', (req, res) => {
+ new formidable.IncomingForm().parse(req, (err, fields, files) => {
+ clientLogger.info(`DIAGNOSTIC-INFO: ${fields.diagnosticInfo}`);
+ res.end('OK');
});
});
- //The Etherpad client side sends information about client side javscript errors
- args.app.post('/jserror', function(req, res) {
- new formidable.IncomingForm().parse(req, function(err, fields, files) {
+ // The Etherpad client side sends information about client side javscript errors
+ args.app.post('/jserror', (req, res) => {
+ new formidable.IncomingForm().parse(req, (err, fields, files) => {
try {
- var data = JSON.parse(fields.errorInfo)
- }catch(e){
- return res.end()
+ var data = JSON.parse(fields.errorInfo);
+ } catch (e) {
+ return res.end();
}
- clientLogger.warn(data.msg+' --', data);
- res.end("OK");
+ clientLogger.warn(`${data.msg} --`, data);
+ res.end('OK');
});
});
- //Provide a possibility to query the latest available API version
- args.app.get('/api', function (req, res) {
- res.json({"currentVersion" : apiHandler.latestApiVersion});
+ // Provide a possibility to query the latest available API version
+ args.app.get('/api', (req, res) => {
+ res.json({currentVersion: apiHandler.latestApiVersion});
});
-}
+
+ return cb();
+};
diff --git a/src/node/hooks/express/errorhandling.js b/src/node/hooks/express/errorhandling.js
index 66553621cf4..4a20b70d215 100644
--- a/src/node/hooks/express/errorhandling.js
+++ b/src/node/hooks/express/errorhandling.js
@@ -1,73 +1,17 @@
-var os = require("os");
-var db = require('../../db/DB');
-var stats = require('ep_etherpad-lite/node/stats')
-
-
-exports.onShutdown = false;
-exports.gracefulShutdown = function(err) {
- if(err && err.stack) {
- console.error(err.stack);
- } else if(err) {
- console.error(err);
- }
-
- // ensure there is only one graceful shutdown running
- if (exports.onShutdown) {
- return;
- }
-
- exports.onShutdown = true;
-
- console.log("graceful shutdown...");
-
- // do the db shutdown
- db.doShutdown().then(function() {
- console.log("db sucessfully closed.");
-
- process.exit(0);
- });
-
- setTimeout(function() {
- process.exit(1);
- }, 3000);
-}
-
-process.on('uncaughtException', exports.gracefulShutdown);
+const stats = require('ep_etherpad-lite/node/stats');
exports.expressCreateServer = function (hook_name, args, cb) {
exports.app = args.app;
// Handle errors
- args.app.use(function(err, req, res, next) {
+ args.app.use((err, req, res, next) => {
// if an error occurs Connect will pass it down
// through these "error-handling" middleware
// allowing you to respond however you like
- res.status(500).send({ error: 'Sorry, something bad happened!' });
- console.error(err.stack? err.stack : err.toString());
- stats.meter('http500').mark()
+ res.status(500).send({error: 'Sorry, something bad happened!'});
+ console.error(err.stack ? err.stack : err.toString());
+ stats.meter('http500').mark();
});
- /*
- * Connect graceful shutdown with sigint and uncaught exception
- *
- * Until Etherpad 1.7.5, process.on('SIGTERM') and process.on('SIGINT') were
- * not hooked up under Windows, because old nodejs versions did not support
- * them.
- *
- * According to nodejs 6.x documentation, it is now safe to do so. This
- * allows to gracefully close the DB connection when hitting CTRL+C under
- * Windows, for example.
- *
- * Source: https://nodejs.org/docs/latest-v6.x/api/process.html#process_signal_events
- *
- * - SIGTERM is not supported on Windows, it can be listened on.
- * - SIGINT from the terminal is supported on all platforms, and can usually
- * be generated with +C (though this may be configurable). It is not
- * generated when terminal raw mode is enabled.
- */
- process.on('SIGINT', exports.gracefulShutdown);
-
- // when running as PID1 (e.g. in docker container)
- // allow graceful shutdown on SIGTERM c.f. #3265
- process.on('SIGTERM', exports.gracefulShutdown);
-}
+ return cb();
+};
diff --git a/src/node/hooks/express/importexport.js b/src/node/hooks/express/importexport.js
index 4aa06ecb80e..7a6c38655c8 100644
--- a/src/node/hooks/express/importexport.js
+++ b/src/node/hooks/express/importexport.js
@@ -1,94 +1,75 @@
const assert = require('assert').strict;
-var hasPadAccess = require("../../padaccess");
-var settings = require('../../utils/Settings');
-var exportHandler = require('../../handler/ExportHandler');
-var importHandler = require('../../handler/ImportHandler');
-var padManager = require("../../db/PadManager");
-var authorManager = require("../../db/AuthorManager");
-const rateLimit = require("express-rate-limit");
-const securityManager = require("../../db/SecurityManager");
+const hasPadAccess = require('../../padaccess');
+const settings = require('../../utils/Settings');
+const exportHandler = require('../../handler/ExportHandler');
+const importHandler = require('../../handler/ImportHandler');
+const padManager = require('../../db/PadManager');
+const readOnlyManager = require('../../db/ReadOnlyManager');
+const authorManager = require('../../db/AuthorManager');
+const rateLimit = require('express-rate-limit');
+const securityManager = require('../../db/SecurityManager');
+const webaccess = require('./webaccess');
-settings.importExportRateLimiting.onLimitReached = function(req, res, options) {
+settings.importExportRateLimiting.onLimitReached = function (req, res, options) {
// when the rate limiter triggers, write a warning in the logs
console.warn(`Import/Export rate limiter triggered on "${req.originalUrl}" for IP address ${req.ip}`);
-}
+};
-var limiter = rateLimit(settings.importExportRateLimiting);
+const limiter = rateLimit(settings.importExportRateLimiting);
exports.expressCreateServer = function (hook_name, args, cb) {
-
// handle export requests
args.app.use('/p/:pad/:rev?/export/:type', limiter);
- args.app.get('/p/:pad/:rev?/export/:type', async function(req, res, next) {
- var types = ["pdf", "doc", "txt", "html", "odt", "etherpad"];
- //send a 404 if we don't support this filetype
+ args.app.get('/p/:pad/:rev?/export/:type', async (req, res, next) => {
+ const types = ['pdf', 'doc', 'txt', 'html', 'odt', 'etherpad'];
+ // send a 404 if we don't support this filetype
if (types.indexOf(req.params.type) == -1) {
return next();
}
// if abiword is disabled, and this is a format we only support with abiword, output a message
- if (settings.exportAvailable() == "no" &&
- ["odt", "pdf", "doc"].indexOf(req.params.type) !== -1) {
+ if (settings.exportAvailable() == 'no' &&
+ ['odt', 'pdf', 'doc'].indexOf(req.params.type) !== -1) {
console.error(`Impossible to export pad "${req.params.pad}" in ${req.params.type} format. There is no converter configured`);
// ACHTUNG: do not include req.params.type in res.send() because there is no HTML escaping and it would lead to an XSS
- res.send("This export is not enabled at this Etherpad instance. Set the path to Abiword or soffice (LibreOffice) in settings.json to enable this feature");
+ res.send('This export is not enabled at this Etherpad instance. Set the path to Abiword or soffice (LibreOffice) in settings.json to enable this feature');
return;
}
- res.header("Access-Control-Allow-Origin", "*");
+ res.header('Access-Control-Allow-Origin', '*');
if (await hasPadAccess(req, res)) {
- let exists = await padManager.doesPadExists(req.params.pad);
+ let padId = req.params.pad;
+
+ let readOnlyId = null;
+ if (readOnlyManager.isReadOnlyId(padId)) {
+ readOnlyId = padId;
+ padId = await readOnlyManager.getPadId(readOnlyId);
+ }
+
+ const exists = await padManager.doesPadExists(padId);
if (!exists) {
- console.warn(`Someone tried to export a pad that doesn't exist (${req.params.pad})`);
+ console.warn(`Someone tried to export a pad that doesn't exist (${padId})`);
return next();
}
console.log(`Exporting pad "${req.params.pad}" in ${req.params.type} format`);
- exportHandler.doExport(req, res, req.params.pad, req.params.type);
+ exportHandler.doExport(req, res, padId, readOnlyId, req.params.type);
}
});
// handle import requests
args.app.use('/p/:pad/import', limiter);
- args.app.post('/p/:pad/import', async function(req, res, next) {
- if (!(await padManager.doesPadExists(req.params.pad))) {
- console.warn(`Someone tried to import into a pad that doesn't exist (${req.params.pad})`);
- return next();
- }
-
+ args.app.post('/p/:pad/import', async (req, res, next) => {
const {session: {user} = {}} = req;
- const {accessStatus, authorID} = await securityManager.checkAccess(
- req.params.pad, req.cookies.sessionID, req.cookies.token, req.cookies.password, user);
- if (accessStatus !== 'grant') return res.status(403).send('Forbidden');
- assert(authorID);
-
- /*
- * Starting from Etherpad 1.8.3 onwards, importing into a pad is allowed
- * only if a user has his browser opened and connected to the pad (i.e. a
- * Socket.IO session is estabilished for him) and he has already
- * contributed to that specific pad.
- *
- * Note that this does not have anything to do with the "session", used
- * for logging into "group pads". That kind of session is not needed here.
- *
- * This behaviour does not apply to API requests, only to /p/$PAD$/import
- *
- * See: https://github.com/ether/etherpad-lite/pull/3833#discussion_r407490205
- */
- if (!settings.allowAnyoneToImport) {
- const authorsPads = await authorManager.listPadsOfAuthor(authorID);
- if (!authorsPads) {
- console.warn(`Unable to import file into "${req.params.pad}". Author "${authorID}" exists but he never contributed to any pad`);
- return next();
- }
- if (authorsPads.padIDs.indexOf(req.params.pad) === -1) {
- console.warn(`Unable to import file into "${req.params.pad}". Author "${authorID}" exists but he never contributed to this pad`);
- return next();
- }
+ const {accessStatus} = await securityManager.checkAccess(
+ req.params.pad, req.cookies.sessionID, req.cookies.token, user);
+ if (accessStatus !== 'grant' || !webaccess.userCanModify(req.params.pad, req)) {
+ return res.status(403).send('Forbidden');
}
-
- importHandler.doImport(req, res, req.params.pad);
+ await importHandler.doImport(req, res, req.params.pad);
});
-}
+
+ return cb();
+};
diff --git a/src/node/hooks/express/isValidJSONPName.js b/src/node/hooks/express/isValidJSONPName.js
index 47755ef8624..442c963e9f1 100644
--- a/src/node/hooks/express/isValidJSONPName.js
+++ b/src/node/hooks/express/isValidJSONPName.js
@@ -62,14 +62,14 @@ const RESERVED_WORDS = [
'volatile',
'while',
'with',
- 'yield'
+ 'yield',
];
const regex = /^[a-zA-Z_$][0-9a-zA-Z_$]*(?:\[(?:".+"|\'.+\'|\d+)\])*?$/;
-module.exports.check = function(inputStr) {
- var isValid = true;
- inputStr.split(".").forEach(function(part) {
+module.exports.check = function (inputStr) {
+ let isValid = true;
+ inputStr.split('.').forEach((part) => {
if (!regex.test(part)) {
isValid = false;
}
@@ -80,4 +80,4 @@ module.exports.check = function(inputStr) {
});
return isValid;
-}
+};
diff --git a/src/node/hooks/express/openapi.js b/src/node/hooks/express/openapi.js
index 76ed6693247..8ea9529c760 100644
--- a/src/node/hooks/express/openapi.js
+++ b/src/node/hooks/express/openapi.js
@@ -14,7 +14,7 @@
const OpenAPIBackend = require('openapi-backend').default;
const formidable = require('formidable');
-const { promisify } = require('util');
+const {promisify} = require('util');
const cloneDeep = require('lodash.clonedeep');
const createHTTPError = require('http-errors');
@@ -57,12 +57,12 @@ const resources = {
create: {
operationId: 'createGroup',
summary: 'creates a new group',
- responseSchema: { groupID: { type: 'string' } },
+ responseSchema: {groupID: {type: 'string'}},
},
createIfNotExistsFor: {
operationId: 'createGroupIfNotExistsFor',
summary: 'this functions helps you to map your application group ids to Etherpad group ids',
- responseSchema: { groupID: { type: 'string' } },
+ responseSchema: {groupID: {type: 'string'}},
},
delete: {
operationId: 'deleteGroup',
@@ -71,7 +71,7 @@ const resources = {
listPads: {
operationId: 'listPads',
summary: 'returns all pads of this group',
- responseSchema: { padIDs: { type: 'array', items: { type: 'string' } } },
+ responseSchema: {padIDs: {type: 'array', items: {type: 'string'}}},
},
createPad: {
operationId: 'createGroupPad',
@@ -80,12 +80,12 @@ const resources = {
listSessions: {
operationId: 'listSessionsOfGroup',
summary: '',
- responseSchema: { sessions: { type: 'array', items: { $ref: '#/components/schemas/SessionInfo' } } },
+ responseSchema: {sessions: {type: 'array', items: {$ref: '#/components/schemas/SessionInfo'}}},
},
list: {
operationId: 'listAllGroups',
summary: '',
- responseSchema: { groupIDs: { type: 'array', items: { type: 'string' } } },
+ responseSchema: {groupIDs: {type: 'array', items: {type: 'string'}}},
},
},
@@ -94,28 +94,28 @@ const resources = {
create: {
operationId: 'createAuthor',
summary: 'creates a new author',
- responseSchema: { authorID: { type: 'string' } },
+ responseSchema: {authorID: {type: 'string'}},
},
createIfNotExistsFor: {
operationId: 'createAuthorIfNotExistsFor',
summary: 'this functions helps you to map your application author ids to Etherpad author ids',
- responseSchema: { authorID: { type: 'string' } },
+ responseSchema: {authorID: {type: 'string'}},
},
listPads: {
operationId: 'listPadsOfAuthor',
summary: 'returns an array of all pads this author contributed to',
- responseSchema: { padIDs: { type: 'array', items: { type: 'string' } } },
+ responseSchema: {padIDs: {type: 'array', items: {type: 'string'}}},
},
listSessions: {
operationId: 'listSessionsOfAuthor',
summary: 'returns all sessions of an author',
- responseSchema: { sessions: { type: 'array', items: { $ref: '#/components/schemas/SessionInfo' } } },
+ responseSchema: {sessions: {type: 'array', items: {$ref: '#/components/schemas/SessionInfo'}}},
},
// We need an operation that return a UserInfo so it can be picked up by the codegen :(
getName: {
operationId: 'getAuthorName',
summary: 'Returns the Author Name of the author',
- responseSchema: { info: { $ref: '#/components/schemas/UserInfo' } },
+ responseSchema: {info: {$ref: '#/components/schemas/UserInfo'}},
},
},
@@ -124,7 +124,7 @@ const resources = {
create: {
operationId: 'createSession',
summary: 'creates a new session. validUntil is an unix timestamp in seconds',
- responseSchema: { sessionID: { type: 'string' } },
+ responseSchema: {sessionID: {type: 'string'}},
},
delete: {
operationId: 'deleteSession',
@@ -134,7 +134,7 @@ const resources = {
info: {
operationId: 'getSessionInfo',
summary: 'returns informations about a session',
- responseSchema: { info: { $ref: '#/components/schemas/SessionInfo' } },
+ responseSchema: {info: {$ref: '#/components/schemas/SessionInfo'}},
},
},
@@ -143,7 +143,7 @@ const resources = {
listAll: {
operationId: 'listAllPads',
summary: 'list all the pads',
- responseSchema: { padIDs: { type: 'array', items: { type: 'string' } } },
+ responseSchema: {padIDs: {type: 'array', items: {type: 'string'}}},
},
createDiffHTML: {
operationId: 'createDiffHTML',
@@ -158,7 +158,7 @@ const resources = {
getText: {
operationId: 'getText',
summary: 'returns the text of a pad',
- responseSchema: { text: { type: 'string' } },
+ responseSchema: {text: {type: 'string'}},
},
setText: {
operationId: 'setText',
@@ -167,7 +167,7 @@ const resources = {
getHTML: {
operationId: 'getHTML',
summary: 'returns the text of a pad formatted as HTML',
- responseSchema: { html: { type: 'string' } },
+ responseSchema: {html: {type: 'string'}},
},
setHTML: {
operationId: 'setHTML',
@@ -176,12 +176,12 @@ const resources = {
getRevisionsCount: {
operationId: 'getRevisionsCount',
summary: 'returns the number of revisions of this pad',
- responseSchema: { revisions: { type: 'integer' } },
+ responseSchema: {revisions: {type: 'integer'}},
},
getLastEdited: {
operationId: 'getLastEdited',
summary: 'returns the timestamp of the last revision of the pad',
- responseSchema: { lastEdited: { type: 'integer' } },
+ responseSchema: {lastEdited: {type: 'integer'}},
},
delete: {
operationId: 'deletePad',
@@ -190,7 +190,7 @@ const resources = {
getReadOnlyID: {
operationId: 'getReadOnlyID',
summary: 'returns the read only link of a pad',
- responseSchema: { readOnlyID: { type: 'string' } },
+ responseSchema: {readOnlyID: {type: 'string'}},
},
setPublicStatus: {
operationId: 'setPublicStatus',
@@ -199,31 +199,22 @@ const resources = {
getPublicStatus: {
operationId: 'getPublicStatus',
summary: 'return true of false',
- responseSchema: { publicStatus: { type: 'boolean' } },
- },
- setPassword: {
- operationId: 'setPassword',
- summary: 'returns ok or a error message',
- },
- isPasswordProtected: {
- operationId: 'isPasswordProtected',
- summary: 'returns true or false',
- responseSchema: { passwordProtection: { type: 'boolean' } },
+ responseSchema: {publicStatus: {type: 'boolean'}},
},
authors: {
operationId: 'listAuthorsOfPad',
summary: 'returns an array of authors who contributed to this pad',
- responseSchema: { authorIDs: { type: 'array', items: { type: 'string' } } },
+ responseSchema: {authorIDs: {type: 'array', items: {type: 'string'}}},
},
usersCount: {
operationId: 'padUsersCount',
summary: 'returns the number of user that are currently editing this pad',
- responseSchema: { padUsersCount: { type: 'integer' } },
+ responseSchema: {padUsersCount: {type: 'integer'}},
},
users: {
operationId: 'padUsers',
summary: 'returns the list of users that are currently editing this pad',
- responseSchema: { padUsers: { type: 'array', items: { $ref: '#/components/schemas/UserInfo' } } },
+ responseSchema: {padUsers: {type: 'array', items: {$ref: '#/components/schemas/UserInfo'}}},
},
sendClientsMessage: {
operationId: 'sendClientsMessage',
@@ -236,13 +227,13 @@ const resources = {
getChatHistory: {
operationId: 'getChatHistory',
summary: 'returns the chat history',
- responseSchema: { messages: { type: 'array', items: { $ref: '#/components/schemas/Message' } } },
+ responseSchema: {messages: {type: 'array', items: {$ref: '#/components/schemas/Message'}}},
},
// We need an operation that returns a Message so it can be picked up by the codegen :(
getChatHead: {
operationId: 'getChatHead',
summary: 'returns the chatHead (chat-message) of the pad',
- responseSchema: { chatHead: { $ref: '#/components/schemas/Message' } },
+ responseSchema: {chatHead: {$ref: '#/components/schemas/Message'}},
},
appendChatMessage: {
operationId: 'appendChatMessage',
@@ -393,10 +384,10 @@ const defaultResponseRefs = {
const operations = {};
for (const resource in resources) {
for (const action in resources[resource]) {
- const { operationId, responseSchema, ...operation } = resources[resource][action];
+ const {operationId, responseSchema, ...operation} = resources[resource][action];
// add response objects
- const responses = { ...defaultResponseRefs };
+ const responses = {...defaultResponseRefs};
if (responseSchema) {
responses[200] = cloneDeep(defaultResponses.Success);
responses[200].content['application/json'].schema.properties.data = {
@@ -487,14 +478,14 @@ const generateDefinitionForVersion = (version, style = APIPathStyle.FLAT) => {
},
},
},
- security: [{ ApiKey: [] }],
+ security: [{ApiKey: []}],
};
// build operations
for (const funcName in apiHandler.version[version]) {
let operation = {};
if (operations[funcName]) {
- operation = { ...operations[funcName] };
+ operation = {...operations[funcName]};
} else {
// console.warn(`No operation found for function: ${funcName}`);
operation = {
@@ -506,7 +497,7 @@ const generateDefinitionForVersion = (version, style = APIPathStyle.FLAT) => {
// set parameters
operation.parameters = operation.parameters || [];
for (const paramName of apiHandler.version[version][funcName]) {
- operation.parameters.push({ $ref: `#/components/parameters/${paramName}` });
+ operation.parameters.push({$ref: `#/components/parameters/${paramName}`});
if (!definition.components.parameters[paramName]) {
definition.components.parameters[paramName] = {
name: paramName,
@@ -541,8 +532,8 @@ const generateDefinitionForVersion = (version, style = APIPathStyle.FLAT) => {
return definition;
};
-exports.expressCreateServer = async (_, args) => {
- const { app } = args;
+exports.expressCreateServer = (hookName, args, cb) => {
+ const {app} = args;
// create openapi-backend handlers for each api version under /api/{version}/*
for (const version in apiHandler.version) {
@@ -559,7 +550,7 @@ exports.expressCreateServer = async (_, args) => {
app.get(`${apiRoot}/openapi.json`, (req, res) => {
// For openapi definitions, wide CORS is probably fine
res.header('Access-Control-Allow-Origin', '*');
- res.json({ ...definition, servers: [generateServerForApiVersion(apiRoot, req)] });
+ res.json({...definition, servers: [generateServerForApiVersion(apiRoot, req)]});
});
// serve latest openapi definition file under /api/openapi.json
@@ -567,7 +558,7 @@ exports.expressCreateServer = async (_, args) => {
if (isLatestAPIVersion) {
app.get(`/${style}/openapi.json`, (req, res) => {
res.header('Access-Control-Allow-Origin', '*');
- res.json({ ...definition, servers: [generateServerForApiVersion(apiRoot, req)] });
+ res.json({...definition, servers: [generateServerForApiVersion(apiRoot, req)]});
});
}
@@ -584,10 +575,10 @@ exports.expressCreateServer = async (_, args) => {
// register default handlers
api.register({
notFound: () => {
- throw new createHTTPError.notFound('no such function');
+ throw new createHTTPError.NotFound('no such function');
},
notImplemented: () => {
- throw new createHTTPError.notImplemented('function not implemented');
+ throw new createHTTPError.NotImplemented('function not implemented');
},
});
@@ -595,7 +586,7 @@ exports.expressCreateServer = async (_, args) => {
for (const funcName in apiHandler.version[version]) {
const handler = async (c, req, res) => {
// parse fields from request
- const { header, params, query } = c.request;
+ const {header, params, query} = c.request;
// read form data if method was POST
let formData = {};
@@ -611,9 +602,9 @@ exports.expressCreateServer = async (_, args) => {
apiLogger.info(`REQUEST, v${version}:${funcName}, ${JSON.stringify(fields)}`);
// pass to api handler
- let data = await apiHandler.handle(version, funcName, fields, req, res).catch((err) => {
+ const data = await apiHandler.handle(version, funcName, fields, req, res).catch((err) => {
// convert all errors to http errors
- if (err instanceof createHTTPError.HttpError) {
+ if (createHTTPError.isHttpError(err)) {
// pass http errors thrown by handler forward
throw err;
} else if (err.name == 'apierror') {
@@ -629,7 +620,7 @@ exports.expressCreateServer = async (_, args) => {
});
// return in common format
- let response = { code: 0, message: 'ok', data: data || null };
+ const response = {code: 0, message: 'ok', data: data || null};
// log response
apiLogger.info(`RESPONSE, ${funcName}, ${JSON.stringify(response)}`);
@@ -663,24 +654,24 @@ exports.expressCreateServer = async (_, args) => {
// https://github.com/ether/etherpad-lite/tree/master/doc/api/http_api.md#response-format
switch (res.statusCode) {
case 403: // forbidden
- response = { code: 4, message: err.message, data: null };
+ response = {code: 4, message: err.message, data: null};
break;
case 401: // unauthorized (no or wrong api key)
- response = { code: 4, message: err.message, data: null };
+ response = {code: 4, message: err.message, data: null};
break;
case 404: // not found (no such function)
- response = { code: 3, message: err.message, data: null };
+ response = {code: 3, message: err.message, data: null};
break;
case 500: // server error (internal error)
- response = { code: 2, message: err.message, data: null };
+ response = {code: 2, message: err.message, data: null};
break;
case 400: // bad request (wrong parameters)
// respond with 200 OK to keep old behavior and pass tests
res.statusCode = 200; // @TODO: this is bad api design
- response = { code: 1, message: err.message, data: null };
+ response = {code: 1, message: err.message, data: null};
break;
default:
- response = { code: 1, message: err.message, data: null };
+ response = {code: 1, message: err.message, data: null};
break;
}
}
@@ -696,6 +687,7 @@ exports.expressCreateServer = async (_, args) => {
});
}
}
+ return cb();
};
// helper to get api root
diff --git a/src/node/hooks/express/padreadonly.js b/src/node/hooks/express/padreadonly.js
index 5264c17cdf8..f17f7f0d685 100644
--- a/src/node/hooks/express/padreadonly.js
+++ b/src/node/hooks/express/padreadonly.js
@@ -1,13 +1,12 @@
-var readOnlyManager = require("../../db/ReadOnlyManager");
-var hasPadAccess = require("../../padaccess");
-var exporthtml = require("../../utils/ExportHtml");
+const readOnlyManager = require('../../db/ReadOnlyManager');
+const hasPadAccess = require('../../padaccess');
+const exporthtml = require('../../utils/ExportHtml');
exports.expressCreateServer = function (hook_name, args, cb) {
// serve read only pad
- args.app.get('/ro/:id', async function(req, res) {
-
+ args.app.get('/ro/:id', async (req, res) => {
// translate the read only pad to a padId
- let padId = await readOnlyManager.getPadId(req.params.id);
+ const padId = await readOnlyManager.getPadId(req.params.id);
if (padId == null) {
res.status(404).send('404 - Not Found');
return;
@@ -18,9 +17,9 @@ exports.expressCreateServer = function (hook_name, args, cb) {
if (await hasPadAccess(req, res)) {
// render the html document
- let html = await exporthtml.getPadHTMLDocument(padId, null);
+ const html = await exporthtml.getPadHTMLDocument(padId, null);
res.send(html);
}
});
-
-}
+ return cb();
+};
diff --git a/src/node/hooks/express/padurlsanitize.js b/src/node/hooks/express/padurlsanitize.js
index ad8d3c43129..8a287a9619a 100644
--- a/src/node/hooks/express/padurlsanitize.js
+++ b/src/node/hooks/express/padurlsanitize.js
@@ -1,29 +1,29 @@
-var padManager = require('../../db/PadManager');
-var url = require('url');
+const padManager = require('../../db/PadManager');
+const url = require('url');
exports.expressCreateServer = function (hook_name, args, cb) {
-
// redirects browser to the pad's sanitized url if needed. otherwise, renders the html
- args.app.param('pad', async function (req, res, next, padId) {
+ args.app.param('pad', async (req, res, next, padId) => {
// ensure the padname is valid and the url doesn't end with a /
if (!padManager.isValidPadId(padId) || /\/$/.test(req.url)) {
res.status(404).send('Such a padname is forbidden');
return;
}
- let sanitizedPadId = await padManager.sanitizePadId(padId);
+ const sanitizedPadId = await padManager.sanitizePadId(padId);
if (sanitizedPadId === padId) {
// the pad id was fine, so just render it
next();
} else {
// the pad id was sanitized, so we redirect to the sanitized version
- var real_url = sanitizedPadId;
+ let real_url = sanitizedPadId;
real_url = encodeURIComponent(real_url);
- var query = url.parse(req.url).query;
- if ( query ) real_url += '?' + query;
+ const query = url.parse(req.url).query;
+ if (query) real_url += `?${query}`;
res.header('Location', real_url);
- res.status(302).send('You should be redirected to ' + real_url + '');
+ res.status(302).send(`You should be redirected to ${real_url}`);
}
});
-}
+ return cb();
+};
diff --git a/src/node/hooks/express/socketio.js b/src/node/hooks/express/socketio.js
index b1406afd280..3d9e9debe13 100644
--- a/src/node/hooks/express/socketio.js
+++ b/src/node/hooks/express/socketio.js
@@ -1,21 +1,40 @@
-var settings = require('../../utils/Settings');
-var socketio = require('socket.io');
-var socketIORouter = require("../../handler/SocketIORouter");
-var hooks = require("ep_etherpad-lite/static/js/pluginfw/hooks");
-var webaccess = require("ep_etherpad-lite/node/hooks/express/webaccess");
+'use strict';
-var padMessageHandler = require("../../handler/PadMessageHandler");
+const express = require('../express');
+const proxyaddr = require('proxy-addr');
+const settings = require('../../utils/Settings');
+const socketio = require('socket.io');
+const socketIORouter = require('../../handler/SocketIORouter');
+const hooks = require('../../../static/js/pluginfw/hooks');
+const padMessageHandler = require('../../handler/PadMessageHandler');
+const util = require('util');
-var cookieParser = require('cookie-parser');
-var sessionModule = require('express-session');
+let io;
-exports.expressCreateServer = function (hook_name, args, cb) {
- //init socket.io and redirect all requests to the MessageHandler
+exports.expressCloseServer = async () => {
+ // According to the socket.io documentation every client is always in the default namespace (and
+ // may also be in other namespaces).
+ const ns = io.sockets; // The Namespace object for the default namespace.
+ // Disconnect all socket.io clients. This probably isn't necessary; closing the socket.io Engine
+ // (see below) probably gracefully disconnects all clients. But that is not documented, and this
+ // doesn't seem to hurt, so hedge against surprising and undocumented socket.io behavior.
+ for (const id of await util.promisify(ns.clients.bind(ns))()) {
+ ns.connected[id].disconnect(true);
+ }
+ // Don't call io.close() because that closes the underlying HTTP server, which is already done
+ // elsewhere. (Closing an HTTP server twice throws an exception.) The `engine` property of
+ // socket.io Server objects is undocumented, but I don't see any other way to shut down socket.io
+ // without also closing the HTTP server.
+ io.engine.close();
+};
+
+exports.expressCreateServer = (hookName, args, cb) => {
+ // init socket.io and redirect all requests to the MessageHandler
// there shouldn't be a browser that isn't compatible to all
// transports in this list at once
// e.g. XHR is disabled in IE by default, so in IE it should use jsonp-polling
- var io = socketio({
- transports: settings.socketTransportProtocols
+ io = socketio({
+ transports: settings.socketTransportProtocols,
}).listen(args.server, {
/*
* Do not set the "io" cookie.
@@ -39,41 +58,23 @@ exports.expressCreateServer = function (hook_name, args, cb) {
cookie: false,
});
- // REQUIRE a signed express-session cookie to be present, then load the session. See
- // http://www.danielbaulig.de/socket-ioexpress for more info. After the session is loaded, ensure
- // that the user has authenticated (if authentication is required).
- //
- // !!!WARNING!!! Requests to /socket.io are NOT subject to the checkAccess middleware in
- // webaccess.js. If this handler fails to check for a signed express-session cookie or fails to
- // check whether the user has authenticated, then any random person on the Internet can read,
- // modify, or create any pad (unless the pad is password protected or an HTTP API session is
- // required).
- var cookieParserFn = cookieParser(webaccess.secret, {});
io.use((socket, next) => {
- var data = socket.request;
- if (!data.headers.cookie) {
+ const req = socket.request;
+ // Express sets req.ip but socket.io does not. Replicate Express's behavior here.
+ if (req.ip == null) {
+ if (settings.trustProxy) {
+ req.ip = proxyaddr(req, args.app.get('trust proxy fn'));
+ } else {
+ req.ip = socket.handshake.address;
+ }
+ }
+ if (!req.headers.cookie) {
// socketio.js-client on node.js doesn't support cookies (see https://git.io/JU8u9), so the
// token and express_sid cookies have to be passed via a query parameter for unit tests.
- data.headers.cookie = socket.handshake.query.cookie;
+ req.headers.cookie = socket.handshake.query.cookie;
}
- if (!data.headers.cookie && settings.loadTest) {
- console.warn('bypassing socket.io authentication check due to settings.loadTest');
- return next(null, true);
- }
- const fail = (msg) => { return next(new Error(msg), false); };
- cookieParserFn(data, {}, function(err) {
- if (err) return fail('access denied: unable to parse express_sid cookie');
- const expressSid = data.signedCookies.express_sid;
- if (!expressSid) return fail ('access denied: signed express_sid cookie is required');
- args.app.sessionStore.get(expressSid, (err, session) => {
- if (err || !session) return fail('access denied: bad session or session has expired');
- data.session = new sessionModule.Session(data, session);
- if (settings.requireAuthentication && data.session.user == null) {
- return fail('access denied: authentication required');
- }
- next(null, true);
- });
- });
+ // See: https://socket.io/docs/faq/#Usage-with-express-session
+ express.sessionMiddleware(req, {}, next);
});
// var socketIOLogger = log4js.getLogger("socket.io");
@@ -81,15 +82,17 @@ exports.expressCreateServer = function (hook_name, args, cb) {
// https://github.com/Automattic/socket.io/wiki/Migrating-to-1.0
// This debug logging environment is set in Settings.js
- //minify socket.io javascript
+ // minify socket.io javascript
// Due to a shitty decision by the SocketIO team minification is
// no longer available, details available at:
// http://stackoverflow.com/questions/23981741/minify-socket-io-socket-io-js-with-1-0
// if(settings.minify) io.enable('browser client minification');
- //Initalize the Socket.IO Router
+ // Initalize the Socket.IO Router
socketIORouter.setSocketIO(io);
- socketIORouter.addComponent("pad", padMessageHandler);
+ socketIORouter.addComponent('pad', padMessageHandler);
+
+ hooks.callAll('socketio', {app: args.app, io, server: args.server});
- hooks.callAll("socketio", {"app": args.app, "io": io, "server": args.server});
-}
+ return cb();
+};
diff --git a/src/node/hooks/express/specialpages.js b/src/node/hooks/express/specialpages.js
index b11f77a0075..f53ce1ac71a 100644
--- a/src/node/hooks/express/specialpages.js
+++ b/src/node/hooks/express/specialpages.js
@@ -1,89 +1,81 @@
-var path = require('path');
-var eejs = require('ep_etherpad-lite/node/eejs');
-var toolbar = require("ep_etherpad-lite/node/utils/toolbar");
-var hooks = require('ep_etherpad-lite/static/js/pluginfw/hooks');
-var settings = require('../../utils/Settings');
+const path = require('path');
+const eejs = require('ep_etherpad-lite/node/eejs');
+const toolbar = require('ep_etherpad-lite/node/utils/toolbar');
+const hooks = require('ep_etherpad-lite/static/js/pluginfw/hooks');
+const settings = require('../../utils/Settings');
+const webaccess = require('./webaccess');
exports.expressCreateServer = function (hook_name, args, cb) {
// expose current stats
- args.app.get('/stats', function(req, res) {
- res.json(require('ep_etherpad-lite/node/stats').toJSON())
- })
+ args.app.get('/stats', (req, res) => {
+ res.json(require('ep_etherpad-lite/node/stats').toJSON());
+ });
- //serve index.html under /
- args.app.get('/', function(req, res)
- {
- res.send(eejs.require("ep_etherpad-lite/templates/index.html"));
+ // serve index.html under /
+ args.app.get('/', (req, res) => {
+ res.send(eejs.require('ep_etherpad-lite/templates/index.html', {req}));
});
- //serve javascript.html
- args.app.get('/javascript', function(req, res)
- {
- res.send(eejs.require("ep_etherpad-lite/templates/javascript.html"));
+ // serve javascript.html
+ args.app.get('/javascript', (req, res) => {
+ res.send(eejs.require('ep_etherpad-lite/templates/javascript.html', {req}));
});
- //serve robots.txt
- args.app.get('/robots.txt', function(req, res)
- {
- var filePath = path.join(settings.root, "src", "static", "skins", settings.skinName, "robots.txt");
- res.sendFile(filePath, function(err)
- {
- //there is no custom robots.txt, send the default robots.txt which dissallows all
- if(err)
- {
- filePath = path.join(settings.root, "src", "static", "robots.txt");
+ // serve robots.txt
+ args.app.get('/robots.txt', (req, res) => {
+ let filePath = path.join(settings.root, 'src', 'static', 'skins', settings.skinName, 'robots.txt');
+ res.sendFile(filePath, (err) => {
+ // there is no custom robots.txt, send the default robots.txt which dissallows all
+ if (err) {
+ filePath = path.join(settings.root, 'src', 'static', 'robots.txt');
res.sendFile(filePath);
}
});
});
- //serve pad.html under /p
- args.app.get('/p/:pad', function(req, res, next)
- {
+ // serve pad.html under /p
+ args.app.get('/p/:pad', (req, res, next) => {
// The below might break for pads being rewritten
- var isReadOnly = req.url.indexOf("/p/r.") === 0;
+ const isReadOnly =
+ req.url.indexOf('/p/r.') === 0 || !webaccess.userCanModify(req.params.pad, req);
- hooks.callAll("padInitToolbar", {
- toolbar: toolbar,
- isReadOnly: isReadOnly
+ hooks.callAll('padInitToolbar', {
+ toolbar,
+ isReadOnly,
});
- res.send(eejs.require("ep_etherpad-lite/templates/pad.html", {
- req: req,
- toolbar: toolbar,
- isReadOnly: isReadOnly
+ res.send(eejs.require('ep_etherpad-lite/templates/pad.html', {
+ req,
+ toolbar,
+ isReadOnly,
}));
});
- //serve timeslider.html under /p/$padname/timeslider
- args.app.get('/p/:pad/timeslider', function(req, res, next)
- {
- hooks.callAll("padInitToolbar", {
- toolbar: toolbar
+ // serve timeslider.html under /p/$padname/timeslider
+ args.app.get('/p/:pad/timeslider', (req, res, next) => {
+ hooks.callAll('padInitToolbar', {
+ toolbar,
});
- res.send(eejs.require("ep_etherpad-lite/templates/timeslider.html", {
- req: req,
- toolbar: toolbar
+ res.send(eejs.require('ep_etherpad-lite/templates/timeslider.html', {
+ req,
+ toolbar,
}));
});
- //serve favicon.ico from all path levels except as a pad name
- args.app.get( /\/favicon.ico$/, function(req, res)
- {
- var filePath = path.join(settings.root, "src", "static", "skins", settings.skinName, "favicon.ico");
+ // serve favicon.ico from all path levels except as a pad name
+ args.app.get(/\/favicon.ico$/, (req, res) => {
+ let filePath = path.join(settings.root, 'src', 'static', 'skins', settings.skinName, 'favicon.ico');
- res.sendFile(filePath, function(err)
- {
- //there is no custom favicon, send the default favicon
- if(err)
- {
- filePath = path.join(settings.root, "src", "static", "favicon.ico");
+ res.sendFile(filePath, (err) => {
+ // there is no custom favicon, send the default favicon
+ if (err) {
+ filePath = path.join(settings.root, 'src', 'static', 'favicon.ico');
res.sendFile(filePath);
}
});
});
-
-}
+ return cb();
+};
diff --git a/src/node/hooks/express/static.js b/src/node/hooks/express/static.js
index b8c6c9d52b7..2df757e644d 100644
--- a/src/node/hooks/express/static.js
+++ b/src/node/hooks/express/static.js
@@ -1,14 +1,13 @@
-var minify = require('../../utils/Minify');
-var plugins = require("ep_etherpad-lite/static/js/pluginfw/plugin_defs");
-var CachingMiddleware = require('../../utils/caching_middleware');
-var settings = require("../../utils/Settings");
-var Yajsml = require('etherpad-yajsml');
-var _ = require("underscore");
+const minify = require('../../utils/Minify');
+const plugins = require('ep_etherpad-lite/static/js/pluginfw/plugin_defs');
+const CachingMiddleware = require('../../utils/caching_middleware');
+const settings = require('../../utils/Settings');
+const Yajsml = require('etherpad-yajsml');
+const _ = require('underscore');
exports.expressCreateServer = function (hook_name, args, cb) {
-
// Cache both minified and static.
- var assetCache = new CachingMiddleware;
+ const assetCache = new CachingMiddleware();
args.app.all(/\/javascripts\/(.*)/, assetCache.handle);
// Minify will serve static files compressed (minify enabled). It also has
@@ -18,41 +17,42 @@ exports.expressCreateServer = function (hook_name, args, cb) {
// Setup middleware that will package JavaScript files served by minify for
// CommonJS loader on the client-side.
// Hostname "invalid.invalid" is a dummy value to allow parsing as a URI.
- var jsServer = new (Yajsml.Server)({
- rootPath: 'javascripts/src/'
- , rootURI: 'http://invalid.invalid/static/js/'
- , libraryPath: 'javascripts/lib/'
- , libraryURI: 'http://invalid.invalid/static/plugins/'
- , requestURIs: minify.requestURIs // Loop-back is causing problems, this is a workaround.
+ const jsServer = new (Yajsml.Server)({
+ rootPath: 'javascripts/src/',
+ rootURI: 'http://invalid.invalid/static/js/',
+ libraryPath: 'javascripts/lib/',
+ libraryURI: 'http://invalid.invalid/static/plugins/',
+ requestURIs: minify.requestURIs, // Loop-back is causing problems, this is a workaround.
});
- var StaticAssociator = Yajsml.associators.StaticAssociator;
- var associations =
+ const StaticAssociator = Yajsml.associators.StaticAssociator;
+ const associations =
Yajsml.associators.associationsForSimpleMapping(minify.tar);
- var associator = new StaticAssociator(associations);
+ const associator = new StaticAssociator(associations);
jsServer.setAssociator(associator);
args.app.use(jsServer.handle.bind(jsServer));
// serve plugin definitions
// not very static, but served here so that client can do require("pluginfw/static/js/plugin-definitions.js");
- args.app.get('/pluginfw/plugin-definitions.json', function (req, res, next) {
-
- var clientParts = _(plugins.parts)
- .filter(function(part){ return _(part).has('client_hooks') });
+ args.app.get('/pluginfw/plugin-definitions.json', (req, res, next) => {
+ const clientParts = _(plugins.parts)
+ .filter((part) => _(part).has('client_hooks'));
- var clientPlugins = {};
+ const clientPlugins = {};
_(clientParts).chain()
- .map(function(part){ return part.plugin })
- .uniq()
- .each(function(name){
- clientPlugins[name] = _(plugins.plugins[name]).clone();
- delete clientPlugins[name]['package'];
- });
-
- res.header("Content-Type","application/json; charset=utf-8");
- res.write(JSON.stringify({"plugins": clientPlugins, "parts": clientParts}));
+ .map((part) => part.plugin)
+ .uniq()
+ .each((name) => {
+ clientPlugins[name] = _(plugins.plugins[name]).clone();
+ delete clientPlugins[name].package;
+ });
+
+ res.header('Content-Type', 'application/json; charset=utf-8');
+ res.write(JSON.stringify({plugins: clientPlugins, parts: clientParts}));
res.end();
});
-}
+
+ return cb();
+};
diff --git a/src/node/hooks/express/tests.js b/src/node/hooks/express/tests.js
index 216715d43a0..7b32a322d6e 100644
--- a/src/node/hooks/express/tests.js
+++ b/src/node/hooks/express/tests.js
@@ -1,92 +1,92 @@
-var path = require("path")
- , npm = require("npm")
- , fs = require("fs")
- , util = require("util");
+const path = require('path');
+const npm = require('npm');
+const fs = require('fs');
+const util = require('util');
exports.expressCreateServer = function (hook_name, args, cb) {
- args.app.get('/tests/frontend/specs_list.js', async function(req, res) {
- let [coreTests, pluginTests] = await Promise.all([
+ args.app.get('/tests/frontend/specs_list.js', async (req, res) => {
+ const [coreTests, pluginTests] = await Promise.all([
exports.getCoreTests(),
- exports.getPluginTests()
+ exports.getPluginTests(),
]);
// merge the two sets of results
let files = [].concat(coreTests, pluginTests).sort();
- // Remove swap files from tests
- files = files.filter(el => !/\.swp$/.test(el))
+ // Keep only *.js files
+ files = files.filter((f) => f.endsWith('.js'));
- console.debug("Sent browser the following test specs:", files);
+ console.debug('Sent browser the following test specs:', files);
res.setHeader('content-type', 'text/javascript');
- res.end("var specs_list = " + JSON.stringify(files) + ";\n");
+ res.end(`var specs_list = ${JSON.stringify(files)};\n`);
});
// path.join seems to normalize by default, but we'll just be explicit
- var rootTestFolder = path.normalize(path.join(npm.root, "../tests/frontend/"));
+ const rootTestFolder = path.normalize(path.join(npm.root, '../tests/frontend/'));
- var url2FilePath = function(url) {
- var subPath = url.substr("/tests/frontend".length);
- if (subPath == "") {
- subPath = "index.html"
+ const url2FilePath = function (url) {
+ let subPath = url.substr('/tests/frontend'.length);
+ if (subPath == '') {
+ subPath = 'index.html';
}
- subPath = subPath.split("?")[0];
+ subPath = subPath.split('?')[0];
- var filePath = path.normalize(path.join(rootTestFolder, subPath));
+ let filePath = path.normalize(path.join(rootTestFolder, subPath));
// make sure we jail the paths to the test folder, otherwise serve index
if (filePath.indexOf(rootTestFolder) !== 0) {
- filePath = path.join(rootTestFolder, "index.html");
+ filePath = path.join(rootTestFolder, 'index.html');
}
return filePath;
- }
+ };
- args.app.get('/tests/frontend/specs/*', function (req, res) {
- var specFilePath = url2FilePath(req.url);
- var specFileName = path.basename(specFilePath);
+ args.app.get('/tests/frontend/specs/*', (req, res) => {
+ const specFilePath = url2FilePath(req.url);
+ const specFileName = path.basename(specFilePath);
- fs.readFile(specFilePath, function(err, content) {
+ fs.readFile(specFilePath, (err, content) => {
if (err) { return res.send(500); }
- content = "describe(" + JSON.stringify(specFileName) + ", function(){ " + content + " });";
+ content = `describe(${JSON.stringify(specFileName)}, function(){ ${content} });`;
res.send(content);
});
});
- args.app.get('/tests/frontend/*', function (req, res) {
- var filePath = url2FilePath(req.url);
+ args.app.get('/tests/frontend/*', (req, res) => {
+ const filePath = url2FilePath(req.url);
res.sendFile(filePath);
});
- args.app.get('/tests/frontend', function (req, res) {
+ args.app.get('/tests/frontend', (req, res) => {
res.redirect('/tests/frontend/index.html');
});
-}
+
+ return cb();
+};
const readdir = util.promisify(fs.readdir);
-exports.getPluginTests = async function(callback) {
- const moduleDir = "node_modules/";
- const specPath = "/static/tests/frontend/specs/";
- const staticDir = "/static/plugins/";
-
- let pluginSpecs = [];
-
- let plugins = await readdir(moduleDir);
- let promises = plugins
- .map(plugin => [ plugin, moduleDir + plugin + specPath] )
- .filter(([plugin, specDir]) => fs.existsSync(specDir)) // check plugin exists
- .map(([plugin, specDir]) => {
- return readdir(specDir)
- .then(specFiles => specFiles.map(spec => {
- pluginSpecs.push(staticDir + plugin + specPath + spec);
- }));
- });
+exports.getPluginTests = async function (callback) {
+ const moduleDir = 'node_modules/';
+ const specPath = '/static/tests/frontend/specs/';
+ const staticDir = '/static/plugins/';
+
+ const pluginSpecs = [];
+
+ const plugins = await readdir(moduleDir);
+ const promises = plugins
+ .map((plugin) => [plugin, moduleDir + plugin + specPath])
+ .filter(([plugin, specDir]) => fs.existsSync(specDir)) // check plugin exists
+ .map(([plugin, specDir]) => readdir(specDir)
+ .then((specFiles) => specFiles.map((spec) => {
+ pluginSpecs.push(staticDir + plugin + specPath + spec);
+ })));
return Promise.all(promises).then(() => pluginSpecs);
-}
+};
-exports.getCoreTests = function() {
+exports.getCoreTests = function () {
// get the core test specs
return readdir('tests/frontend/specs');
-}
+};
diff --git a/src/node/hooks/express/webaccess.js b/src/node/hooks/express/webaccess.js
index b83fbbd00e3..51d57ae2e9f 100644
--- a/src/node/hooks/express/webaccess.js
+++ b/src/node/hooks/express/webaccess.js
@@ -1,18 +1,30 @@
-const express = require('express');
+'use strict';
+
+const assert = require('assert').strict;
const log4js = require('log4js');
const httpLogger = log4js.getLogger('http');
const settings = require('../../utils/Settings');
-const hooks = require('ep_etherpad-lite/static/js/pluginfw/hooks');
-const ueberStore = require('../../db/SessionStore');
-const stats = require('ep_etherpad-lite/node/stats');
-const sessionModule = require('express-session');
-const cookieParser = require('cookie-parser');
+const hooks = require('../../../static/js/pluginfw/hooks');
+const readOnlyManager = require('../../db/ReadOnlyManager');
+
+hooks.deprecationNotices.authFailure = 'use the authnFailure and authzFailure hooks instead';
+
+const staticPathsRE = new RegExp(`^/(?:${[
+ 'api/.*',
+ 'favicon\\.ico',
+ 'javascripts/.*',
+ 'locales\\.json',
+ 'pluginfw/.*',
+ 'static/.*',
+].join('|')})$`);
exports.normalizeAuthzLevel = (level) => {
if (!level) return false;
switch (level) {
case true:
return 'create';
+ case 'readOnly':
+ case 'modify':
case 'create':
return level;
default:
@@ -21,187 +33,160 @@ exports.normalizeAuthzLevel = (level) => {
return false;
};
-exports.checkAccess = (req, res, next) => {
- const hookResultMangle = (cb) => {
- return (err, data) => {
- return cb(!err && data.length && data[0]);
- };
- };
+exports.userCanModify = (padId, req) => {
+ if (readOnlyManager.isReadOnlyId(padId)) return false;
+ if (!settings.requireAuthentication) return true;
+ const {session: {user} = {}} = req;
+ assert(user); // If authn required and user == null, the request should have already been denied.
+ if (user.readOnly) return false;
+ assert(user.padAuthorizations); // This is populated even if !settings.requireAuthorization.
+ const level = exports.normalizeAuthzLevel(user.padAuthorizations[padId]);
+ assert(level); // If !level, the request should have already been denied.
+ return level !== 'readOnly';
+};
+
+// Exported so that tests can set this to 0 to avoid unnecessary test slowness.
+exports.authnFailureDelayMs = 1000;
+
+const checkAccess = async (req, res, next) => {
+ // Promisified wrapper around hooks.aCallFirst.
+ const aCallFirst = (hookName, context, pred = null) => new Promise((resolve, reject) => {
+ hooks.aCallFirst(hookName, context, (err, r) => err != null ? reject(err) : resolve(r), pred);
+ });
+
+ const aCallFirst0 =
+ async (hookName, context, pred = null) => (await aCallFirst(hookName, context, pred))[0];
- // This may be called twice per access: once before authentication is checked and once after (if
- // settings.requireAuthorization is true).
- const authorize = (fail) => {
- // Do not require auth for static paths and the API...this could be a bit brittle
- if (req.path.match(/^\/(static|javascripts|pluginfw|api)/)) return next();
+ const requireAdmin = req.path.toLowerCase().indexOf('/admin') === 0;
+ // This helper is used in steps 2 and 4 below, so it may be called twice per access: once before
+ // authentication is checked and once after (if settings.requireAuthorization is true).
+ const authorize = async () => {
const grant = (level) => {
level = exports.normalizeAuthzLevel(level);
- if (!level) return fail();
+ if (!level) return false;
const user = req.session.user;
- if (user == null) return next(); // This will happen if authentication is not required.
- const padID = (req.path.match(/^\/p\/(.*)$/) || [])[1];
- if (padID == null) return next();
+ if (user == null) return true; // This will happen if authentication is not required.
+ const encodedPadId = (req.path.match(/^\/p\/([^/]*)/) || [])[1];
+ if (encodedPadId == null) return true;
+ const padId = decodeURIComponent(encodedPadId);
// The user was granted access to a pad. Remember the authorization level in the user's
// settings so that SecurityManager can approve or deny specific actions.
if (user.padAuthorizations == null) user.padAuthorizations = {};
- user.padAuthorizations[padID] = level;
- return next();
+ user.padAuthorizations[padId] = level;
+ return true;
};
-
- if (req.path.toLowerCase().indexOf('/admin') !== 0) {
- if (!settings.requireAuthentication) return grant('create');
- if (!settings.requireAuthorization && req.session && req.session.user) return grant('create');
- }
-
- if (req.session && req.session.user && req.session.user.is_admin) return grant('create');
-
- hooks.aCallFirst('authorize', {req, res, next, resource: req.path}, hookResultMangle(grant));
+ const isAuthenticated = req.session && req.session.user;
+ if (isAuthenticated && req.session.user.is_admin) return grant('create');
+ const requireAuthn = requireAdmin || settings.requireAuthentication;
+ if (!requireAuthn) return grant('create');
+ if (!isAuthenticated) return grant(false);
+ if (requireAdmin && !req.session.user.is_admin) return grant(false);
+ if (!settings.requireAuthorization) return grant('create');
+ return grant(await aCallFirst0('authorize', {req, res, next, resource: req.path}));
};
- /* Authentication OR authorization failed. */
- const failure = () => {
- return hooks.aCallFirst('authFailure', {req, res, next}, hookResultMangle((ok) => {
- if (ok) return;
- // No plugin handled the authn/authz failure. Fall back to basic authentication.
+ // ///////////////////////////////////////////////////////////////////////////////////////////////
+ // Step 1: Check the preAuthorize hook for early permit/deny (permit is only allowed for non-admin
+ // pages). If any plugin explicitly grants or denies access, skip the remaining steps. Plugins can
+ // use the preAuthzFailure hook to override the default 403 error.
+ // ///////////////////////////////////////////////////////////////////////////////////////////////
+
+ let results;
+ try {
+ results = await aCallFirst('preAuthorize', {req, res, next},
+ // This predicate will cause aCallFirst to call the hook functions one at a time until one
+ // of them returns a non-empty list, with an exception: If the request is for an /admin
+ // page, truthy entries are filtered out before checking to see whether the list is empty.
+ // This prevents plugin authors from accidentally granting admin privileges to the general
+ // public.
+ (r) => (r != null && r.filter((x) => (!requireAdmin || !x)).length > 0));
+ } catch (err) {
+ httpLogger.error(`Error in preAuthorize hook: ${err.stack || err.toString()}`);
+ return res.status(500).send('Internal Server Error');
+ }
+ if (staticPathsRE.test(req.path)) results.push(true);
+ if (requireAdmin) {
+ // Filter out all 'true' entries to prevent plugin authors from accidentally granting admin
+ // privileges to the general public.
+ results = results.filter((x) => !x);
+ }
+ if (results.length > 0) {
+ // Access was explicitly granted or denied. If any value is false then access is denied.
+ if (results.every((x) => x)) return next();
+ if (await aCallFirst0('preAuthzFailure', {req, res})) return;
+ // No plugin handled the pre-authentication authorization failure.
+ return res.status(403).send('Forbidden');
+ }
+
+ // ///////////////////////////////////////////////////////////////////////////////////////////////
+ // Step 2: Try to just access the thing. If access fails (perhaps authentication has not yet
+ // completed, or maybe different credentials are required), go to the next step.
+ // ///////////////////////////////////////////////////////////////////////////////////////////////
+
+ if (await authorize()) return next();
+
+ // ///////////////////////////////////////////////////////////////////////////////////////////////
+ // Step 3: Authenticate the user. (Or, if already logged in, reauthenticate with different
+ // credentials if supported by the authn scheme.) If authentication fails, give the user a 401
+ // error to request new credentials. Otherwise, go to the next step. Plugins can use the
+ // authnFailure hook to override the default error handling behavior (e.g., to redirect to a login
+ // page).
+ // ///////////////////////////////////////////////////////////////////////////////////////////////
+
+ if (settings.users == null) settings.users = {};
+ const ctx = {req, res, users: settings.users, next};
+ // If the HTTP basic auth header is present, extract the username and password so it can be given
+ // to authn plugins.
+ const httpBasicAuth =
+ req.headers.authorization && req.headers.authorization.search('Basic ') === 0;
+ if (httpBasicAuth) {
+ const userpass =
+ Buffer.from(req.headers.authorization.split(' ')[1], 'base64').toString().split(':');
+ ctx.username = userpass.shift();
+ ctx.password = userpass.join(':');
+ }
+ if (!(await aCallFirst0('authenticate', ctx))) {
+ // Fall back to HTTP basic auth.
+ const {[ctx.username]: {password} = {}} = settings.users;
+ if (!httpBasicAuth || password == null || password !== ctx.password) {
+ httpLogger.info(`Failed authentication from IP ${req.ip}`);
+ if (await aCallFirst0('authnFailure', {req, res})) return;
+ if (await aCallFirst0('authFailure', {req, res, next})) return;
+ // No plugin handled the authentication failure. Fall back to basic authentication.
res.header('WWW-Authenticate', 'Basic realm="Protected Area"');
// Delay the error response for 1s to slow down brute force attacks.
- setTimeout(() => {
- res.status(401).send('Authentication Required');
- }, 1000);
- }));
- };
-
- // Access checking is done in three steps:
- //
- // 1) Try to just access the thing. If access fails (perhaps authentication has not yet completed,
- // or maybe different credentials are required), go to the next step.
- // 2) Try to authenticate. (Or, if already logged in, reauthenticate with different credentials if
- // supported by the authn scheme.) If authentication fails, give the user a 401 error to
- // request new credentials. Otherwise, go to the next step.
- // 3) Try to access the thing again. If this fails, give the user a 401 error.
- //
- // Plugins can use the 'next' callback (from the hook's context) to break out at any point (e.g.,
- // to process an OAuth callback). Plugins can use the authFailure hook to override the default
- // error handling behavior (e.g., to redirect to a login page).
-
- let step1PreAuthenticate, step2Authenticate, step3Authorize;
-
- step1PreAuthenticate = () => authorize(step2Authenticate);
-
- step2Authenticate = () => {
- if (settings.users == null) settings.users = {};
- const ctx = {req, res, users: settings.users, next};
- // If the HTTP basic auth header is present, extract the username and password so it can be
- // given to authn plugins.
- const httpBasicAuth =
- req.headers.authorization && req.headers.authorization.search('Basic ') === 0;
- if (httpBasicAuth) {
- const userpass =
- Buffer.from(req.headers.authorization.split(' ')[1], 'base64').toString().split(':');
- ctx.username = userpass.shift();
- ctx.password = userpass.join(':');
+ await new Promise((resolve) => setTimeout(resolve, exports.authnFailureDelayMs));
+ res.status(401).send('Authentication Required');
+ return;
}
- hooks.aCallFirst('authenticate', ctx, hookResultMangle((ok) => {
- if (!ok) {
- // Fall back to HTTP basic auth.
- if (!httpBasicAuth) return failure();
- if (!(ctx.username in settings.users)) {
- httpLogger.info(`Failed authentication from IP ${req.ip} - no such user`);
- return failure();
- }
- if (settings.users[ctx.username].password !== ctx.password) {
- httpLogger.info(`Failed authentication from IP ${req.ip} for user ${ctx.username} - incorrect password`);
- return failure();
- }
- httpLogger.info(`Successful authentication from IP ${req.ip} for user ${ctx.username}`);
- settings.users[ctx.username].username = ctx.username;
- req.session.user = settings.users[ctx.username];
- }
- if (req.session.user == null) {
- httpLogger.error('authenticate hook failed to add user settings to session');
- res.status(500).send('Internal Server Error');
- return;
- }
- step3Authorize();
- }));
- };
-
- step3Authorize = () => authorize(failure);
-
- step1PreAuthenticate();
-};
-
-exports.secret = null;
-
-exports.expressConfigure = (hook_name, args, cb) => {
- // Measure response time
- args.app.use((req, res, next) => {
- const stopWatch = stats.timer('httpRequests').start();
- const sendFn = res.send;
- res.send = function() { // function, not arrow, due to use of 'arguments'
- stopWatch.end();
- sendFn.apply(res, arguments);
- };
- next();
- });
-
- // If the log level specified in the config file is WARN or ERROR the application server never starts listening to requests as reported in issue #158.
- // Not installing the log4js connect logger when the log level has a higher severity than INFO since it would not log at that level anyway.
- if (!(settings.loglevel === 'WARN' || settings.loglevel === 'ERROR'))
- args.app.use(log4js.connectLogger(httpLogger, {level: log4js.levels.DEBUG, format: ':status, :method :url'}));
-
- /* Do not let express create the session, so that we can retain a
- * reference to it for socket.io to use. Also, set the key (cookie
- * name) to a javascript identifier compatible string. Makes code
- * handling it cleaner :) */
-
- if (!exports.sessionStore) {
- exports.sessionStore = new ueberStore();
- exports.secret = settings.sessionKey;
+ settings.users[ctx.username].username = ctx.username;
+ // Make a shallow copy so that the password property can be deleted (to prevent it from
+ // appearing in logs or in the database) without breaking future authentication attempts.
+ req.session.user = {...settings.users[ctx.username]};
+ delete req.session.user.password;
}
+ if (req.session.user == null) {
+ httpLogger.error('authenticate hook failed to add user settings to session');
+ return res.status(500).send('Internal Server Error');
+ }
+ const {username = ''} = req.session.user;
+ httpLogger.info(`Successful authentication from IP ${req.ip} for user ${username}`);
+
+ // ///////////////////////////////////////////////////////////////////////////////////////////////
+ // Step 4: Try to access the thing again. If this fails, give the user a 403 error. Plugins can
+ // use the authzFailure hook to override the default error handling behavior (e.g., to redirect to
+ // a login page).
+ // ///////////////////////////////////////////////////////////////////////////////////////////////
+
+ if (await authorize()) return next();
+ if (await aCallFirst0('authzFailure', {req, res})) return;
+ if (await aCallFirst0('authFailure', {req, res, next})) return;
+ // No plugin handled the authorization failure.
+ res.status(403).send('Forbidden');
+};
- const sameSite = settings.ssl ? 'Strict' : 'Lax';
-
- args.app.sessionStore = exports.sessionStore;
- args.app.use(sessionModule({
- secret: exports.secret,
- store: args.app.sessionStore,
- resave: false,
- saveUninitialized: true,
- name: 'express_sid',
- proxy: true,
- cookie: {
- /*
- * Firefox started enforcing sameSite, see https://github.com/ether/etherpad-lite/issues/3989
- * for details. In response we set it based on if SSL certs are set in Etherpad. Note that if
- * You use Nginx or so for reverse proxy this may cause problems. Use Certificate pinning to remedy.
- */
- sameSite: sameSite,
- /*
- * The automatic express-session mechanism for determining if the
- * application is being served over ssl is similar to the one used for
- * setting the language cookie, which check if one of these conditions is
- * true:
- *
- * 1. we are directly serving the nodejs application over SSL, using the
- * "ssl" options in settings.json
- *
- * 2. we are serving the nodejs application in plaintext, but we are using
- * a reverse proxy that terminates SSL for us. In this case, the user
- * has to set trustProxy = true in settings.json, and the information
- * wheter the application is over SSL or not will be extracted from the
- * X-Forwarded-Proto HTTP header
- *
- * Please note that this will not be compatible with applications being
- * served over http and https at the same time.
- *
- * reference: https://github.com/expressjs/session/blob/v1.17.0/README.md#cookiesecure
- */
- secure: 'auto',
- }
- }));
-
- args.app.use(cookieParser(settings.sessionKey, {}));
-
- args.app.use(exports.checkAccess);
+exports.expressConfigure = (hookName, args, cb) => {
+ args.app.use((req, res, next) => { checkAccess(req, res, next).catch(next); });
+ return cb();
};
diff --git a/src/node/hooks/i18n.js b/src/node/hooks/i18n.js
index 0928b293819..610c3f68f19 100644
--- a/src/node/hooks/i18n.js
+++ b/src/node/hooks/i18n.js
@@ -1,58 +1,58 @@
-var languages = require('languages4translatewiki')
- , fs = require('fs')
- , path = require('path')
- , _ = require('underscore')
- , npm = require('npm')
- , plugins = require('ep_etherpad-lite/static/js/pluginfw/plugin_defs.js').plugins
- , semver = require('semver')
- , existsSync = require('../utils/path_exists')
- , settings = require('../utils/Settings')
+const languages = require('languages4translatewiki');
+const fs = require('fs');
+const path = require('path');
+const _ = require('underscore');
+const npm = require('npm');
+const plugins = require('ep_etherpad-lite/static/js/pluginfw/plugin_defs.js').plugins;
+const semver = require('semver');
+const existsSync = require('../utils/path_exists');
+const settings = require('../utils/Settings')
;
// returns all existing messages merged together and grouped by langcode
// {es: {"foo": "string"}, en:...}
function getAllLocales() {
- var locales2paths = {};
+ const locales2paths = {};
// Puts the paths of all locale files contained in a given directory
// into `locales2paths` (files from various dirs are grouped by lang code)
// (only json files with valid language code as name)
function extractLangs(dir) {
- if(!existsSync(dir)) return;
- var stat = fs.lstatSync(dir);
+ if (!existsSync(dir)) return;
+ let stat = fs.lstatSync(dir);
if (!stat.isDirectory() || stat.isSymbolicLink()) return;
- fs.readdirSync(dir).forEach(function(file) {
+ fs.readdirSync(dir).forEach((file) => {
file = path.resolve(dir, file);
stat = fs.lstatSync(file);
if (stat.isDirectory() || stat.isSymbolicLink()) return;
- var ext = path.extname(file)
- , locale = path.basename(file, ext).toLowerCase();
+ const ext = path.extname(file);
+ const locale = path.basename(file, ext).toLowerCase();
if ((ext == '.json') && languages.isValid(locale)) {
- if(!locales2paths[locale]) locales2paths[locale] = [];
+ if (!locales2paths[locale]) locales2paths[locale] = [];
locales2paths[locale].push(file);
}
});
}
- //add core supported languages first
- extractLangs(npm.root+"/ep_etherpad-lite/locales");
+ // add core supported languages first
+ extractLangs(`${npm.root}/ep_etherpad-lite/locales`);
- //add plugins languages (if any)
- for(var pluginName in plugins) extractLangs(path.join(npm.root, pluginName, 'locales'));
+ // add plugins languages (if any)
+ for (const pluginName in plugins) extractLangs(path.join(npm.root, pluginName, 'locales'));
// Build a locale index (merge all locale data other than user-supplied overrides)
- var locales = {}
- _.each (locales2paths, function(files, langcode) {
- locales[langcode]={};
+ const locales = {};
+ _.each(locales2paths, (files, langcode) => {
+ locales[langcode] = {};
- files.forEach(function(file) {
+ files.forEach((file) => {
let fileContents;
try {
- fileContents = JSON.parse(fs.readFileSync(file,'utf8'));
+ fileContents = JSON.parse(fs.readFileSync(file, 'utf8'));
} catch (err) {
console.error(`failed to read JSON file ${file}: ${err}`);
throw err;
@@ -64,17 +64,17 @@ function getAllLocales() {
// Add custom strings from settings.json
// Since this is user-supplied, we'll do some extra sanity checks
const wrongFormatErr = Error(
- "customLocaleStrings in wrong format. See documentation " +
- "for Customization for Administrators, under Localization.")
+ 'customLocaleStrings in wrong format. See documentation ' +
+ 'for Customization for Administrators, under Localization.');
if (settings.customLocaleStrings) {
- if (typeof settings.customLocaleStrings !== "object") throw wrongFormatErr
- _.each(settings.customLocaleStrings, function(overrides, langcode) {
- if (typeof overrides !== "object") throw wrongFormatErr
- _.each(overrides, function(localeString, key) {
- if (typeof localeString !== "string") throw wrongFormatErr
- locales[langcode][key] = localeString
- })
- })
+ if (typeof settings.customLocaleStrings !== 'object') throw wrongFormatErr;
+ _.each(settings.customLocaleStrings, (overrides, langcode) => {
+ if (typeof overrides !== 'object') throw wrongFormatErr;
+ _.each(overrides, (localeString, key) => {
+ if (typeof localeString !== 'string') throw wrongFormatErr;
+ locales[langcode][key] = localeString;
+ });
+ });
}
return locales;
@@ -83,45 +83,44 @@ function getAllLocales() {
// returns a hash of all available languages availables with nativeName and direction
// e.g. { es: {nativeName: "español", direction: "ltr"}, ... }
function getAvailableLangs(locales) {
- var result = {};
- _.each(_.keys(locales), function(langcode) {
+ const result = {};
+ _.each(_.keys(locales), (langcode) => {
result[langcode] = languages.getLanguageInfo(langcode);
});
return result;
}
// returns locale index that will be served in /locales.json
-var generateLocaleIndex = function (locales) {
- var result = _.clone(locales) // keep English strings
- _.each(_.keys(locales), function(langcode) {
- if (langcode != 'en') result[langcode]='locales/'+langcode+'.json';
+const generateLocaleIndex = function (locales) {
+ const result = _.clone(locales); // keep English strings
+ _.each(_.keys(locales), (langcode) => {
+ if (langcode != 'en') result[langcode] = `locales/${langcode}.json`;
});
return JSON.stringify(result);
-}
+};
-exports.expressCreateServer = function(n, args) {
-
- //regenerate locales on server restart
- var locales = getAllLocales();
- var localeIndex = generateLocaleIndex(locales);
+exports.expressCreateServer = function (n, args, cb) {
+ // regenerate locales on server restart
+ const locales = getAllLocales();
+ const localeIndex = generateLocaleIndex(locales);
exports.availableLangs = getAvailableLangs(locales);
- args.app.get ('/locales/:locale', function(req, res) {
- //works with /locale/en and /locale/en.json requests
- var locale = req.params.locale.split('.')[0];
+ args.app.get('/locales/:locale', (req, res) => {
+ // works with /locale/en and /locale/en.json requests
+ const locale = req.params.locale.split('.')[0];
if (exports.availableLangs.hasOwnProperty(locale)) {
res.setHeader('Content-Type', 'application/json; charset=utf-8');
- res.send('{"'+locale+'":'+JSON.stringify(locales[locale])+'}');
+ res.send(`{"${locale}":${JSON.stringify(locales[locale])}}`);
} else {
res.status(404).send('Language not available');
}
- })
+ });
- args.app.get('/locales.json', function(req, res) {
+ args.app.get('/locales.json', (req, res) => {
res.setHeader('Content-Type', 'application/json; charset=utf-8');
res.send(localeIndex);
- })
-
-}
+ });
+ return cb();
+};
diff --git a/src/node/padaccess.js b/src/node/padaccess.js
index 6e294403ef3..617056a9753 100644
--- a/src/node/padaccess.js
+++ b/src/node/padaccess.js
@@ -1,13 +1,13 @@
-var securityManager = require('./db/SecurityManager');
+const securityManager = require('./db/SecurityManager');
// checks for padAccess
module.exports = async function (req, res) {
try {
const {session: {user} = {}} = req;
const accessObj = await securityManager.checkAccess(
- req.params.pad, req.cookies.sessionID, req.cookies.token, req.cookies.password, user);
+ req.params.pad, req.cookies.sessionID, req.cookies.token, user);
- if (accessObj.accessStatus === "grant") {
+ if (accessObj.accessStatus === 'grant') {
// there is access, continue
return true;
} else {
@@ -19,4 +19,4 @@ module.exports = async function (req, res) {
// @TODO - send internal server error here?
throw err;
}
-}
+};
diff --git a/src/node/server.js b/src/node/server.js
index a1f62df4ff6..3219f518564 100755
--- a/src/node/server.js
+++ b/src/node/server.js
@@ -1,4 +1,7 @@
#!/usr/bin/env node
+
+'use strict';
+
/**
* This module is started with bin/run.sh. It sets up a Express HTTP and a Socket.IO Server.
* Static file Requests are answered directly from this module, Socket.IO messages are passed
@@ -21,65 +24,112 @@
* limitations under the License.
*/
-const log4js = require('log4js')
- , NodeVersion = require('./utils/NodeVersion')
- , UpdateCheck = require('./utils/UpdateCheck')
- ;
-
+const log4js = require('log4js');
log4js.replaceConsole();
/*
* early check for version compatibility before calling
* any modules that require newer versions of NodeJS
*/
+const NodeVersion = require('./utils/NodeVersion');
NodeVersion.enforceMinNodeVersion('10.13.0');
-
-/*
- * Etherpad 1.8.3 will require at least nodejs 10.13.0.
- */
NodeVersion.checkDeprecationStatus('10.13.0', '1.8.3');
-// Check if Etherpad version is up-to-date
-UpdateCheck.check();
+const UpdateCheck = require('./utils/UpdateCheck');
+const db = require('./db/DB');
+const express = require('./hooks/express');
+const hooks = require('../static/js/pluginfw/hooks');
+const npm = require('npm/lib/npm.js');
+const plugins = require('../static/js/pluginfw/plugins');
+const settings = require('./utils/Settings');
+const util = require('util');
-/*
- * start up stats counting system
- */
-var stats = require('./stats');
-stats.gauge('memoryUsage', function() {
- return process.memoryUsage().rss;
-});
+let started = false;
+let stopped = false;
-/*
- * no use of let or await here because it would cause startup
- * to fail completely on very early versions of NodeJS
- */
-var npm = require("npm/lib/npm.js");
-
-npm.load({}, function() {
- var settings = require('./utils/Settings');
- var db = require('./db/DB');
- var plugins = require("ep_etherpad-lite/static/js/pluginfw/plugins");
- var hooks = require("ep_etherpad-lite/static/js/pluginfw/hooks");
-
- db.init()
- .then(plugins.update)
- .then(function() {
- console.info("Installed plugins: " + plugins.formatPluginsWithVersion());
- console.debug("Installed parts:\n" + plugins.formatParts());
- console.debug("Installed hooks:\n" + plugins.formatHooks());
-
- // Call loadSettings hook
- hooks.aCallAll("loadSettings", { settings: settings });
-
- // initalize the http server
- hooks.callAll("createServer", {});
- })
- .catch(function(e) {
- console.error("exception thrown: " + e.message);
- if (e.stack) {
- console.log(e.stack);
- }
- process.exit(1);
- });
-});
+exports.start = async () => {
+ if (started) return express.server;
+ started = true;
+ if (stopped) throw new Error('restart not supported');
+
+ // Check if Etherpad version is up-to-date
+ UpdateCheck.check();
+
+ // start up stats counting system
+ const stats = require('./stats');
+ stats.gauge('memoryUsage', () => process.memoryUsage().rss);
+
+ await util.promisify(npm.load)();
+
+ try {
+ await db.init();
+ await plugins.update();
+ console.info(`Installed plugins: ${plugins.formatPluginsWithVersion()}`);
+ console.debug(`Installed parts:\n${plugins.formatParts()}`);
+ console.debug(`Installed hooks:\n${plugins.formatHooks()}`);
+ await hooks.aCallAll('loadSettings', {settings});
+ await hooks.aCallAll('createServer');
+ } catch (e) {
+ console.error(`exception thrown: ${e.message}`);
+ if (e.stack) console.log(e.stack);
+ process.exit(1);
+ }
+
+ process.on('uncaughtException', exports.exit);
+
+ /*
+ * Connect graceful shutdown with sigint and uncaught exception
+ *
+ * Until Etherpad 1.7.5, process.on('SIGTERM') and process.on('SIGINT') were
+ * not hooked up under Windows, because old nodejs versions did not support
+ * them.
+ *
+ * According to nodejs 6.x documentation, it is now safe to do so. This
+ * allows to gracefully close the DB connection when hitting CTRL+C under
+ * Windows, for example.
+ *
+ * Source: https://nodejs.org/docs/latest-v6.x/api/process.html#process_signal_events
+ *
+ * - SIGTERM is not supported on Windows, it can be listened on.
+ * - SIGINT from the terminal is supported on all platforms, and can usually
+ * be generated with +C (though this may be configurable). It is not
+ * generated when terminal raw mode is enabled.
+ */
+ process.on('SIGINT', exports.exit);
+
+ // When running as PID1 (e.g. in docker container) allow graceful shutdown on SIGTERM c.f. #3265.
+ // Pass undefined to exports.exit because this is not an abnormal termination.
+ process.on('SIGTERM', () => exports.exit());
+
+ // Return the HTTP server to make it easier to write tests.
+ return express.server;
+};
+
+exports.stop = async () => {
+ if (stopped) return;
+ stopped = true;
+ console.log('Stopping Etherpad...');
+ await new Promise(async (resolve, reject) => {
+ const id = setTimeout(() => reject(new Error('Timed out waiting for shutdown tasks')), 3000);
+ await hooks.aCallAll('shutdown');
+ clearTimeout(id);
+ resolve();
+ });
+};
+
+exports.exit = async (err) => {
+ let exitCode = 0;
+ if (err) {
+ exitCode = 1;
+ console.error(err.stack ? err.stack : err);
+ }
+ try {
+ await exports.stop();
+ } catch (err) {
+ exitCode = 1;
+ console.error(err.stack ? err.stack : err);
+ }
+ process.exit(exitCode);
+};
+
+if (require.main === module) exports.start();
diff --git a/src/node/stats.js b/src/node/stats.js
index ff1752fe9e1..cecaca20d16 100644
--- a/src/node/stats.js
+++ b/src/node/stats.js
@@ -1,3 +1,9 @@
-var measured = require('measured-core')
+'use strict';
+
+const measured = require('measured-core');
module.exports = measured.createCollection();
+
+module.exports.shutdown = async (hookName, context) => {
+ module.exports.end();
+};
diff --git a/src/node/utils/Abiword.js b/src/node/utils/Abiword.js
index eed844e73a1..b75487d7578 100644
--- a/src/node/utils/Abiword.js
+++ b/src/node/utils/Abiword.js
@@ -18,45 +18,39 @@
* limitations under the License.
*/
-var spawn = require('child_process').spawn;
-var async = require("async");
-var settings = require("./Settings");
-var os = require('os');
-
-var doConvertTask;
-
-//on windows we have to spawn a process for each convertion, cause the plugin abicommand doesn't exist on this platform
-if(os.type().indexOf("Windows") > -1)
-{
- var stdoutBuffer = "";
-
- doConvertTask = function(task, callback)
- {
- //span an abiword process to perform the conversion
- var abiword = spawn(settings.abiword, ["--to=" + task.destFile, task.srcFile]);
-
- //delegate the processing of stdout to another function
- abiword.stdout.on('data', function (data)
- {
- //add data to buffer
- stdoutBuffer+=data.toString();
+const spawn = require('child_process').spawn;
+const async = require('async');
+const settings = require('./Settings');
+const os = require('os');
+
+let doConvertTask;
+
+// on windows we have to spawn a process for each convertion, cause the plugin abicommand doesn't exist on this platform
+if (os.type().indexOf('Windows') > -1) {
+ let stdoutBuffer = '';
+
+ doConvertTask = function (task, callback) {
+ // span an abiword process to perform the conversion
+ const abiword = spawn(settings.abiword, [`--to=${task.destFile}`, task.srcFile]);
+
+ // delegate the processing of stdout to another function
+ abiword.stdout.on('data', (data) => {
+ // add data to buffer
+ stdoutBuffer += data.toString();
});
- //append error messages to the buffer
- abiword.stderr.on('data', function (data)
- {
+ // append error messages to the buffer
+ abiword.stderr.on('data', (data) => {
stdoutBuffer += data.toString();
});
- //throw exceptions if abiword is dieing
- abiword.on('exit', function (code)
- {
- if(code != 0) {
+ // throw exceptions if abiword is dieing
+ abiword.on('exit', (code) => {
+ if (code != 0) {
return callback(`Abiword died with exit code ${code}`);
}
- if(stdoutBuffer != "")
- {
+ if (stdoutBuffer != '') {
console.log(stdoutBuffer);
}
@@ -64,55 +58,48 @@ if(os.type().indexOf("Windows") > -1)
});
};
- exports.convertFile = function(srcFile, destFile, type, callback)
- {
- doConvertTask({"srcFile": srcFile, "destFile": destFile, "type": type}, callback);
+ exports.convertFile = function (srcFile, destFile, type, callback) {
+ doConvertTask({srcFile, destFile, type}, callback);
};
}
-//on unix operating systems, we can start abiword with abicommand and communicate with it via stdin/stdout
-//thats much faster, about factor 10
-else
-{
- //spawn the abiword process
- var abiword;
- var stdoutCallback = null;
- var spawnAbiword = function (){
- abiword = spawn(settings.abiword, ["--plugin", "AbiCommand"]);
- var stdoutBuffer = "";
- var firstPrompt = true;
-
- //append error messages to the buffer
- abiword.stderr.on('data', function (data)
- {
+// on unix operating systems, we can start abiword with abicommand and communicate with it via stdin/stdout
+// thats much faster, about factor 10
+else {
+ // spawn the abiword process
+ let abiword;
+ let stdoutCallback = null;
+ var spawnAbiword = function () {
+ abiword = spawn(settings.abiword, ['--plugin', 'AbiCommand']);
+ let stdoutBuffer = '';
+ let firstPrompt = true;
+
+ // append error messages to the buffer
+ abiword.stderr.on('data', (data) => {
stdoutBuffer += data.toString();
});
- //abiword died, let's restart abiword and return an error with the callback
- abiword.on('exit', function (code)
- {
+ // abiword died, let's restart abiword and return an error with the callback
+ abiword.on('exit', (code) => {
spawnAbiword();
stdoutCallback(`Abiword died with exit code ${code}`);
});
- //delegate the processing of stdout to a other function
- abiword.stdout.on('data',function (data)
- {
- //add data to buffer
- stdoutBuffer+=data.toString();
-
- //we're searching for the prompt, cause this means everything we need is in the buffer
- if(stdoutBuffer.search("AbiWord:>") != -1)
- {
- //filter the feedback message
- var err = stdoutBuffer.search("OK") != -1 ? null : stdoutBuffer;
-
- //reset the buffer
- stdoutBuffer = "";
-
- //call the callback with the error message
- //skip the first prompt
- if(stdoutCallback != null && !firstPrompt)
- {
+ // delegate the processing of stdout to a other function
+ abiword.stdout.on('data', (data) => {
+ // add data to buffer
+ stdoutBuffer += data.toString();
+
+ // we're searching for the prompt, cause this means everything we need is in the buffer
+ if (stdoutBuffer.search('AbiWord:>') != -1) {
+ // filter the feedback message
+ const err = stdoutBuffer.search('OK') != -1 ? null : stdoutBuffer;
+
+ // reset the buffer
+ stdoutBuffer = '';
+
+ // call the callback with the error message
+ // skip the first prompt
+ if (stdoutCallback != null && !firstPrompt) {
stdoutCallback(err);
stdoutCallback = null;
}
@@ -123,26 +110,23 @@ else
};
spawnAbiword();
- doConvertTask = function(task, callback)
- {
- abiword.stdin.write("convert " + task.srcFile + " " + task.destFile + " " + task.type + "\n");
- //create a callback that calls the task callback and the caller callback
- stdoutCallback = function (err)
- {
+ doConvertTask = function (task, callback) {
+ abiword.stdin.write(`convert ${task.srcFile} ${task.destFile} ${task.type}\n`);
+ // create a callback that calls the task callback and the caller callback
+ stdoutCallback = function (err) {
callback();
- console.log("queue continue");
- try{
+ console.log('queue continue');
+ try {
task.callback(err);
- }catch(e){
- console.error("Abiword File failed to convert", e);
+ } catch (e) {
+ console.error('Abiword File failed to convert', e);
}
};
};
- //Queue with the converts we have to do
- var queue = async.queue(doConvertTask, 1);
- exports.convertFile = function(srcFile, destFile, type, callback)
- {
- queue.push({"srcFile": srcFile, "destFile": destFile, "type": type, "callback": callback});
+ // Queue with the converts we have to do
+ const queue = async.queue(doConvertTask, 1);
+ exports.convertFile = function (srcFile, destFile, type, callback) {
+ queue.push({srcFile, destFile, type, callback});
};
}
diff --git a/src/node/utils/AbsolutePaths.js b/src/node/utils/AbsolutePaths.js
index 9d864c474e8..22294cfe282 100644
--- a/src/node/utils/AbsolutePaths.js
+++ b/src/node/utils/AbsolutePaths.js
@@ -18,17 +18,17 @@
* limitations under the License.
*/
-var log4js = require('log4js');
-var path = require('path');
-var _ = require('underscore');
+const log4js = require('log4js');
+const path = require('path');
+const _ = require('underscore');
-var absPathLogger = log4js.getLogger('AbsolutePaths');
+const absPathLogger = log4js.getLogger('AbsolutePaths');
/*
* findEtherpadRoot() computes its value only on first invocation.
* Subsequent invocations are served from this variable.
*/
-var etherpadRoot = null;
+let etherpadRoot = null;
/**
* If stringArray's last elements are exactly equal to lastDesiredElements,
@@ -40,9 +40,9 @@ var etherpadRoot = null;
* @return {string[]|boolean} The shortened array, or false if there was no
* overlap.
*/
-var popIfEndsWith = function(stringArray, lastDesiredElements) {
+const popIfEndsWith = function (stringArray, lastDesiredElements) {
if (stringArray.length <= lastDesiredElements.length) {
- absPathLogger.debug(`In order to pop "${lastDesiredElements.join(path.sep)}" from "${stringArray.join(path.sep)}", it should contain at least ${lastDesiredElements.length + 1 } elements`);
+ absPathLogger.debug(`In order to pop "${lastDesiredElements.join(path.sep)}" from "${stringArray.join(path.sep)}", it should contain at least ${lastDesiredElements.length + 1} elements`);
return false;
}
@@ -72,7 +72,7 @@ var popIfEndsWith = function(stringArray, lastDesiredElements) {
* @return {string} The identified absolute base path. If such path cannot be
* identified, prints a log and exits the application.
*/
-exports.findEtherpadRoot = function() {
+exports.findEtherpadRoot = function () {
if (etherpadRoot !== null) {
return etherpadRoot;
}
@@ -87,7 +87,7 @@ exports.findEtherpadRoot = function() {
*
* \src
*/
- var maybeEtherpadRoot = popIfEndsWith(splitFoundRoot, ['src']);
+ let maybeEtherpadRoot = popIfEndsWith(splitFoundRoot, ['src']);
if ((maybeEtherpadRoot === false) && (process.platform === 'win32')) {
/*
@@ -126,7 +126,7 @@ exports.findEtherpadRoot = function() {
* it is returned unchanged. Otherwise it is interpreted
* relative to exports.root.
*/
-exports.makeAbsolute = function(somePath) {
+exports.makeAbsolute = function (somePath) {
if (path.isAbsolute(somePath)) {
return somePath;
}
@@ -145,7 +145,7 @@ exports.makeAbsolute = function(somePath) {
* a subdirectory of the base one
* @return {boolean}
*/
-exports.isSubdir = function(parent, arbitraryDir) {
+exports.isSubdir = function (parent, arbitraryDir) {
// modified from: https://stackoverflow.com/questions/37521893/determine-if-a-path-is-subdirectory-of-another-in-node-js#45242825
const relative = path.relative(parent, arbitraryDir);
const isSubdir = !!relative && !relative.startsWith('..') && !path.isAbsolute(relative);
diff --git a/src/node/utils/Cli.js b/src/node/utils/Cli.js
index 04c532fa007..6297a4f8ce4 100644
--- a/src/node/utils/Cli.js
+++ b/src/node/utils/Cli.js
@@ -22,30 +22,30 @@
// An object containing the parsed command-line options
exports.argv = {};
-var argv = process.argv.slice(2);
-var arg, prevArg;
+const argv = process.argv.slice(2);
+let arg, prevArg;
// Loop through args
-for ( var i = 0; i < argv.length; i++ ) {
+for (let i = 0; i < argv.length; i++) {
arg = argv[i];
// Override location of settings.json file
- if ( prevArg == '--settings' || prevArg == '-s' ) {
+ if (prevArg == '--settings' || prevArg == '-s') {
exports.argv.settings = arg;
}
// Override location of credentials.json file
- if ( prevArg == '--credentials' ) {
+ if (prevArg == '--credentials') {
exports.argv.credentials = arg;
}
// Override location of settings.json file
- if ( prevArg == '--sessionkey' ) {
+ if (prevArg == '--sessionkey') {
exports.argv.sessionkey = arg;
}
// Override location of settings.json file
- if ( prevArg == '--apikey' ) {
+ if (prevArg == '--apikey') {
exports.argv.apikey = arg;
}
diff --git a/src/node/utils/ExportEtherpad.js b/src/node/utils/ExportEtherpad.js
index 0e8ef3bf1c2..ace298ab748 100644
--- a/src/node/utils/ExportEtherpad.js
+++ b/src/node/utils/ExportEtherpad.js
@@ -15,40 +15,39 @@
*/
-let db = require("../db/DB");
+const db = require('../db/DB');
+const hooks = require('ep_etherpad-lite/static/js/pluginfw/hooks');
-exports.getPadRaw = async function(padId) {
+exports.getPadRaw = async function (padId) {
+ const padKey = `pad:${padId}`;
+ const padcontent = await db.get(padKey);
- let padKey = "pad:" + padId;
- let padcontent = await db.get(padKey);
-
- let records = [ padKey ];
+ const records = [padKey];
for (let i = 0; i <= padcontent.head; i++) {
- records.push(padKey + ":revs:" + i);
+ records.push(`${padKey}:revs:${i}`);
}
for (let i = 0; i <= padcontent.chatHead; i++) {
- records.push(padKey + ":chat:" + i);
+ records.push(`${padKey}:chat:${i}`);
}
- let data = {};
- for (let key of records) {
-
+ const data = {};
+ for (const key of records) {
// For each piece of info about a pad.
- let entry = data[key] = await db.get(key);
+ const entry = data[key] = await db.get(key);
// Get the Pad Authors
if (entry.pool && entry.pool.numToAttrib) {
- let authors = entry.pool.numToAttrib;
+ const authors = entry.pool.numToAttrib;
- for (let k of Object.keys(authors)) {
- if (authors[k][0] === "author") {
- let authorId = authors[k][1];
+ for (const k of Object.keys(authors)) {
+ if (authors[k][0] === 'author') {
+ const authorId = authors[k][1];
// Get the author info
- let authorEntry = await db.get("globalAuthor:" + authorId);
+ const authorEntry = await db.get(`globalAuthor:${authorId}`);
if (authorEntry) {
- data["globalAuthor:" + authorId] = authorEntry;
+ data[`globalAuthor:${authorId}`] = authorEntry;
if (authorEntry.padIDs) {
authorEntry.padIDs = padId;
}
@@ -58,5 +57,13 @@ exports.getPadRaw = async function(padId) {
}
}
+ // get content that has a different prefix IE comments:padId:foo
+ // a plugin would return something likle ['comments', 'cakes']
+ const prefixes = await hooks.aCallAll('exportEtherpadAdditionalContent');
+ await Promise.all(prefixes.map(async (prefix) => {
+ const key = `${prefix}:${padId}`;
+ data[key] = await db.get(key);
+ }));
+
return data;
-}
+};
diff --git a/src/node/utils/ExportHelper.js b/src/node/utils/ExportHelper.js
index f6ec4486ed8..e498d4c4263 100644
--- a/src/node/utils/ExportHelper.js
+++ b/src/node/utils/ExportHelper.js
@@ -18,24 +18,23 @@
* limitations under the License.
*/
-var Changeset = require("ep_etherpad-lite/static/js/Changeset");
+const Changeset = require('ep_etherpad-lite/static/js/Changeset');
-exports.getPadPlainText = function(pad, revNum){
- var _analyzeLine = exports._analyzeLine;
- var atext = ((revNum !== undefined) ? pad.getInternalRevisionAText(revNum) : pad.atext);
- var textLines = atext.text.slice(0, -1).split('\n');
- var attribLines = Changeset.splitAttributionLines(atext.attribs, atext.text);
- var apool = pad.pool;
+exports.getPadPlainText = function (pad, revNum) {
+ const _analyzeLine = exports._analyzeLine;
+ const atext = ((revNum !== undefined) ? pad.getInternalRevisionAText(revNum) : pad.atext);
+ const textLines = atext.text.slice(0, -1).split('\n');
+ const attribLines = Changeset.splitAttributionLines(atext.attribs, atext.text);
+ const apool = pad.pool;
- var pieces = [];
- for (var i = 0; i < textLines.length; i++){
- var line = _analyzeLine(textLines[i], attribLines[i], apool);
- if (line.listLevel){
- var numSpaces = line.listLevel * 2 - 1;
- var bullet = '*';
+ const pieces = [];
+ for (let i = 0; i < textLines.length; i++) {
+ const line = _analyzeLine(textLines[i], attribLines[i], apool);
+ if (line.listLevel) {
+ const numSpaces = line.listLevel * 2 - 1;
+ const bullet = '*';
pieces.push(new Array(numSpaces + 1).join(' '), bullet, ' ', line.text, '\n');
- }
- else{
+ } else {
pieces.push(line.text, '\n');
}
}
@@ -44,38 +43,37 @@ exports.getPadPlainText = function(pad, revNum){
};
-exports._analyzeLine = function(text, aline, apool){
- var line = {};
+exports._analyzeLine = function (text, aline, apool) {
+ const line = {};
// identify list
- var lineMarker = 0;
+ let lineMarker = 0;
line.listLevel = 0;
- if (aline){
- var opIter = Changeset.opIterator(aline);
- if (opIter.hasNext()){
- var listType = Changeset.opAttributeValue(opIter.next(), 'list', apool);
- if (listType){
+ if (aline) {
+ const opIter = Changeset.opIterator(aline);
+ if (opIter.hasNext()) {
+ let listType = Changeset.opAttributeValue(opIter.next(), 'list', apool);
+ if (listType) {
lineMarker = 1;
listType = /([a-z]+)([0-9]+)/.exec(listType);
- if (listType){
+ if (listType) {
line.listTypeName = listType[1];
line.listLevel = Number(listType[2]);
}
}
}
- var opIter2 = Changeset.opIterator(aline);
- if (opIter2.hasNext()){
- var start = Changeset.opAttributeValue(opIter2.next(), 'start', apool);
- if (start){
- line.start = start;
+ const opIter2 = Changeset.opIterator(aline);
+ if (opIter2.hasNext()) {
+ const start = Changeset.opAttributeValue(opIter2.next(), 'start', apool);
+ if (start) {
+ line.start = start;
}
}
}
- if (lineMarker){
+ if (lineMarker) {
line.text = text.substring(1);
line.aline = Changeset.subattribution(aline, 1);
- }
- else{
+ } else {
line.text = text;
line.aline = aline;
}
@@ -83,8 +81,6 @@ exports._analyzeLine = function(text, aline, apool){
};
-exports._encodeWhitespace = function(s){
- return s.replace(/[^\x21-\x7E\s\t\n\r]/gu, function(c){
- return "" +c.codePointAt(0) + ";";
- });
+exports._encodeWhitespace = function (s) {
+ return s.replace(/[^\x21-\x7E\s\t\n\r]/gu, (c) => `${c.codePointAt(0)};`);
};
diff --git a/src/node/utils/ExportHtml.js b/src/node/utils/ExportHtml.js
index d0ebf20de8a..2f5a77c9ac5 100644
--- a/src/node/utils/ExportHtml.js
+++ b/src/node/utils/ExportHtml.js
@@ -14,17 +14,17 @@
* limitations under the License.
*/
-var Changeset = require("ep_etherpad-lite/static/js/Changeset");
-var padManager = require("../db/PadManager");
-var _ = require('underscore');
-var Security = require('ep_etherpad-lite/static/js/security');
-var hooks = require('ep_etherpad-lite/static/js/pluginfw/hooks');
-var eejs = require('ep_etherpad-lite/node/eejs');
-var _analyzeLine = require('./ExportHelper')._analyzeLine;
-var _encodeWhitespace = require('./ExportHelper')._encodeWhitespace;
-
-async function getPadHTML(pad, revNum)
-{
+const Changeset = require('ep_etherpad-lite/static/js/Changeset');
+const padManager = require('../db/PadManager');
+const _ = require('underscore');
+const Security = require('ep_etherpad-lite/static/js/security');
+const hooks = require('ep_etherpad-lite/static/js/pluginfw/hooks');
+const eejs = require('ep_etherpad-lite/node/eejs');
+const _analyzeLine = require('./ExportHelper')._analyzeLine;
+const _encodeWhitespace = require('./ExportHelper')._encodeWhitespace;
+const padutils = require('../../static/js/pad_utils').padutils;
+
+async function getPadHTML(pad, revNum) {
let atext = pad.atext;
// fetch revision atext
@@ -33,113 +33,110 @@ async function getPadHTML(pad, revNum)
}
// convert atext to html
- return getHTMLFromAtext(pad, atext);
+ return await getHTMLFromAtext(pad, atext);
}
exports.getPadHTML = getPadHTML;
exports.getHTMLFromAtext = getHTMLFromAtext;
-function getHTMLFromAtext(pad, atext, authorColors)
-{
- var apool = pad.apool();
- var textLines = atext.text.slice(0, -1).split('\n');
- var attribLines = Changeset.splitAttributionLines(atext.attribs, atext.text);
-
- var tags = ['h1', 'h2', 'strong', 'em', 'u', 's'];
- var props = ['heading1', 'heading2', 'bold', 'italic', 'underline', 'strikethrough'];
-
- // prepare tags stored as ['tag', true] to be exported
- hooks.aCallAll("exportHtmlAdditionalTags", pad, function(err, newProps){
- newProps.forEach(function (propName, i) {
- tags.push(propName);
- props.push(propName);
- });
- });
+async function getHTMLFromAtext(pad, atext, authorColors) {
+ const apool = pad.apool();
+ const textLines = atext.text.slice(0, -1).split('\n');
+ const attribLines = Changeset.splitAttributionLines(atext.attribs, atext.text);
- // prepare tags stored as ['tag', 'value'] to be exported. This will generate HTML
- // with tags like
- hooks.aCallAll("exportHtmlAdditionalTagsWithData", pad, function(err, newProps){
- newProps.forEach(function (propName, i) {
- tags.push('span data-' + propName[0] + '="' + propName[1] + '"');
- props.push(propName);
- });
- });
+ const tags = ['h1', 'h2', 'strong', 'em', 'u', 's'];
+ const props = ['heading1', 'heading2', 'bold', 'italic', 'underline', 'strikethrough'];
+
+ await Promise.all([
+ // prepare tags stored as ['tag', true] to be exported
+ hooks.aCallAll('exportHtmlAdditionalTags', pad).then((newProps) => {
+ newProps.forEach((prop) => {
+ tags.push(prop);
+ props.push(prop);
+ });
+ }),
+ // prepare tags stored as ['tag', 'value'] to be exported. This will generate HTML with tags
+ // like
+ hooks.aCallAll('exportHtmlAdditionalTagsWithData', pad).then((newProps) => {
+ newProps.forEach((prop) => {
+ tags.push(`span data-${prop[0]}="${prop[1]}"`);
+ props.push(prop);
+ });
+ }),
+ ]);
// holds a map of used styling attributes (*1, *2, etc) in the apool
// and maps them to an index in props
// *3:2 -> the attribute *3 means strong
// *2:5 -> the attribute *2 means s(trikethrough)
- var anumMap = {};
- var css = "";
+ const anumMap = {};
+ let css = '';
- var stripDotFromAuthorID = function(id){
- return id.replace(/\./g,'_');
+ const stripDotFromAuthorID = function (id) {
+ return id.replace(/\./g, '_');
};
- if(authorColors){
- css+="";
+ css += '';
}
// iterates over all props(h1,h2,strong,...), checks if it is used in
// this pad, and if yes puts its attrib id->props value into anumMap
- props.forEach(function (propName, i)
- {
- var attrib = [propName, true];
+ props.forEach((propName, i) => {
+ let attrib = [propName, true];
if (_.isArray(propName)) {
// propName can be in the form of ['color', 'red'],
// see hook exportHtmlAdditionalTagsWithData
attrib = propName;
}
- var propTrueNum = apool.putAttrib(attrib, true);
- if (propTrueNum >= 0)
- {
+ const propTrueNum = apool.putAttrib(attrib, true);
+ if (propTrueNum >= 0) {
anumMap[propTrueNum] = i;
}
});
- function getLineHTML(text, attribs)
- {
+ function getLineHTML(text, attribs) {
// Use order of tags (b/i/u) as order of nesting, for simplicity
// and decent nesting. For example,
// Just boldBold and italicsJust italics
// becomes
// Just bold Bold and italicsJust italics
- var taker = Changeset.stringIterator(text);
- var assem = Changeset.stringAssembler();
- var openTags = [];
+ const taker = Changeset.stringIterator(text);
+ const assem = Changeset.stringAssembler();
+ const openTags = [];
- function getSpanClassFor(i){
- //return if author colors are disabled
+ function getSpanClassFor(i) {
+ // return if author colors are disabled
if (!authorColors) return false;
- var property = props[i];
+ const property = props[i];
// we are not insterested on properties in the form of ['color', 'red'],
// see hook exportHtmlAdditionalTagsWithData
@@ -147,12 +144,12 @@ function getHTMLFromAtext(pad, atext, authorColors)
return false;
}
- if(property.substr(0,6) === "author"){
+ if (property.substr(0, 6) === 'author') {
return stripDotFromAuthorID(property);
}
- if(property === "removed"){
- return "removed";
+ if (property === 'removed') {
+ return 'removed';
}
return false;
@@ -160,17 +157,16 @@ function getHTMLFromAtext(pad, atext, authorColors)
// tags added by exportHtmlAdditionalTagsWithData will be exported as with
// data attributes
- function isSpanWithData(i){
- var property = props[i];
+ function isSpanWithData(i) {
+ const property = props[i];
return _.isArray(property);
}
- function emitOpenTag(i)
- {
+ function emitOpenTag(i) {
openTags.unshift(i);
- var spanClass = getSpanClassFor(i);
+ const spanClass = getSpanClassFor(i);
- if(spanClass){
+ if (spanClass) {
assem.append('');
@@ -182,13 +178,12 @@ function getHTMLFromAtext(pad, atext, authorColors)
}
// this closes an open tag and removes its reference from openTags
- function emitCloseTag(i)
- {
+ function emitCloseTag(i) {
openTags.shift();
- var spanClass = getSpanClassFor(i);
- var spanWithData = isSpanWithData(i);
+ const spanClass = getSpanClassFor(i);
+ const spanWithData = isSpanWithData(i);
- if(spanClass || spanWithData){
+ if (spanClass || spanWithData) {
assem.append('');
} else {
assem.append('');
@@ -197,93 +192,78 @@ function getHTMLFromAtext(pad, atext, authorColors)
}
}
- var urls = _findURLs(text);
+ const urls = padutils.findURLs(text);
- var idx = 0;
+ let idx = 0;
- function processNextChars(numChars)
- {
- if (numChars <= 0)
- {
+ function processNextChars(numChars) {
+ if (numChars <= 0) {
return;
}
- var iter = Changeset.opIterator(Changeset.subattribution(attribs, idx, idx + numChars));
+ const iter = Changeset.opIterator(Changeset.subattribution(attribs, idx, idx + numChars));
idx += numChars;
// this iterates over every op string and decides which tags to open or to close
// based on the attribs used
- while (iter.hasNext())
- {
- var o = iter.next();
+ while (iter.hasNext()) {
+ const o = iter.next();
var usedAttribs = [];
// mark all attribs as used
- Changeset.eachAttribNumber(o.attribs, function (a)
- {
- if (a in anumMap)
- {
+ Changeset.eachAttribNumber(o.attribs, (a) => {
+ if (a in anumMap) {
usedAttribs.push(anumMap[a]); // i = 0 => bold, etc.
}
});
- var outermostTag = -1;
+ let outermostTag = -1;
// find the outer most open tag that is no longer used
- for (var i = openTags.length - 1; i >= 0; i--)
- {
- if (usedAttribs.indexOf(openTags[i]) === -1)
- {
+ for (var i = openTags.length - 1; i >= 0; i--) {
+ if (usedAttribs.indexOf(openTags[i]) === -1) {
outermostTag = i;
break;
}
}
// close all tags upto the outer most
- if (outermostTag !== -1)
- {
- while ( outermostTag >= 0 )
- {
+ if (outermostTag !== -1) {
+ while (outermostTag >= 0) {
emitCloseTag(openTags[0]);
outermostTag--;
}
}
// open all tags that are used but not open
- for (i=0; i < usedAttribs.length; i++)
- {
- if (openTags.indexOf(usedAttribs[i]) === -1)
- {
+ for (i = 0; i < usedAttribs.length; i++) {
+ if (openTags.indexOf(usedAttribs[i]) === -1) {
emitOpenTag(usedAttribs[i]);
}
}
- var chars = o.chars;
- if (o.lines)
- {
+ let chars = o.chars;
+ if (o.lines) {
chars--; // exclude newline at end of line, if present
}
- var s = taker.take(chars);
+ let s = taker.take(chars);
- //removes the characters with the code 12. Don't know where they come
- //from but they break the abiword parser and are completly useless
- s = s.replace(String.fromCharCode(12), "");
+ // removes the characters with the code 12. Don't know where they come
+ // from but they break the abiword parser and are completly useless
+ s = s.replace(String.fromCharCode(12), '');
assem.append(_encodeWhitespace(Security.escapeHTML(s)));
} // end iteration over spans in line
// close all the tags that are open after the last op
- while (openTags.length > 0)
- {
+ while (openTags.length > 0) {
emitCloseTag(openTags[0]);
}
} // end processNextChars
- if (urls)
- {
- urls.forEach(function (urlData)
- {
- var startIndex = urlData[0];
- var url = urlData[1];
- var urlLength = url.length;
+ if (urls) {
+ urls.forEach((urlData) => {
+ const startIndex = urlData[0];
+ const url = urlData[1];
+ const urlLength = url.length;
processNextChars(startIndex - idx);
// Using rel="noreferrer" stops leaking the URL/location of the exported HTML when clicking links in the document.
// Not all browsers understand this attribute, but it's part of the HTML5 standard.
@@ -292,16 +272,16 @@ function getHTMLFromAtext(pad, atext, authorColors)
// https://html.spec.whatwg.org/multipage/links.html#link-type-noopener
// https://mathiasbynens.github.io/rel-noopener/
// https://github.com/ether/etherpad-lite/pull/3636
- assem.append('');
+ assem.append(``);
processNextChars(urlLength);
assem.append('');
});
}
processNextChars(text.length - idx);
-
+
return _processSpaces(assem.toString());
} // end getLineHTML
- var pieces = [css];
+ const pieces = [css];
// Need to deal with constraints imposed on HTML lists; can
// only gain one level of nesting at once, can't change type
@@ -310,57 +290,48 @@ function getHTMLFromAtext(pad, atext, authorColors)
// so we want to do something reasonable there. We also
// want to deal gracefully with blank lines.
// => keeps track of the parents level of indentation
- var openLists = [];
- for (var i = 0; i < textLines.length; i++)
- {
+ let openLists = [];
+ for (let i = 0; i < textLines.length; i++) {
var context;
var line = _analyzeLine(textLines[i], attribLines[i], apool);
- var lineContent = getLineHTML(line.text, line.aline);
- if (line.listLevel)//If we are inside a list
+ const lineContent = getLineHTML(line.text, line.aline);
+ if (line.listLevel)// If we are inside a list
{
context = {
- line: line,
- lineContent: lineContent,
- apool: apool,
+ line,
+ lineContent,
+ apool,
attribLine: attribLines[i],
text: textLines[i],
- padId: pad.id
+ padId: pad.id,
};
- var prevLine = null;
- var nextLine = null;
- if (i > 0)
- {
- prevLine = _analyzeLine(textLines[i -1], attribLines[i -1], apool);
+ let prevLine = null;
+ let nextLine = null;
+ if (i > 0) {
+ prevLine = _analyzeLine(textLines[i - 1], attribLines[i - 1], apool);
}
- if (i < textLines.length)
- {
+ if (i < textLines.length) {
nextLine = _analyzeLine(textLines[i + 1], attribLines[i + 1], apool);
}
- hooks.aCallAll('getLineHTMLForExport', context);
- //To create list parent elements
- if ((!prevLine || prevLine.listLevel !== line.listLevel) || (prevLine && line.listTypeName !== prevLine.listTypeName))
- {
- var exists = _.find(openLists, function (item)
- {
- return (item.level === line.listLevel && item.type === line.listTypeName);
- });
+ await hooks.aCallAll('getLineHTMLForExport', context);
+ // To create list parent elements
+ if ((!prevLine || prevLine.listLevel !== line.listLevel) || (prevLine && line.listTypeName !== prevLine.listTypeName)) {
+ const exists = _.find(openLists, (item) => (item.level === line.listLevel && item.type === line.listTypeName));
if (!exists) {
- var prevLevel = 0;
+ let prevLevel = 0;
if (prevLine && prevLine.listLevel) {
prevLevel = prevLine.listLevel;
}
- if (prevLine && line.listTypeName !== prevLine.listTypeName)
- {
+ if (prevLine && line.listTypeName !== prevLine.listTypeName) {
prevLevel = 0;
}
for (var diff = prevLevel; diff < line.listLevel; diff++) {
openLists.push({level: diff, type: line.listTypeName});
- var prevPiece = pieces[pieces.length - 1];
+ const prevPiece = pieces[pieces.length - 1];
- if (prevPiece.indexOf("
") === 0)
- {
- /*
+ if (prevPiece.indexOf('
') === 0) {
+ /*
uncommenting this breaks nested ols..
if the previous item is NOT a ul, NOT an ol OR closing li then close the list
so we consider this HTML, I inserted ** where it throws a problem in Example Wrong..
@@ -376,19 +347,16 @@ function getHTMLFromAtext(pad, atext, authorColors)
// pieces.push("");
*/
- if( (nextLine.listTypeName === 'number') && (nextLine.text === '') ){
+ if ((nextLine.listTypeName === 'number') && (nextLine.text === '')) {
// is the listTypeName check needed here? null text might be completely fine!
// TODO Check against Uls
// don't do anything because the next item is a nested ol openener so we need to keep the li open
- }else{
- pieces.push("
");
+ } else {
+ pieces.push('
');
}
-
-
}
- if (line.listTypeName === "number")
- {
+ if (line.listTypeName === 'number') {
// We introduce line.start here, this is useful for continuing Ordered list line numbers
// in case you have a bullet in a list IE you Want
// 1. hello
@@ -399,182 +367,140 @@ function getHTMLFromAtext(pad, atext, authorColors)
// TODO: This logic could also be used to continue OL with indented content
// but that's a job for another day....
- if(line.start){
- pieces.push("");
- }else{
- pieces.push("");
+ if (line.start) {
+ pieces.push(``);
+ } else {
+ pieces.push(``);
}
- }
- else
- {
- pieces.push("
");
+ } else {
+ pieces.push(`
`);
}
}
}
}
// if we're going up a level we shouldn't be adding..
- if(context.lineContent){
- pieces.push("
", context.lineContent);
+ if (context.lineContent) {
+ pieces.push('
', context.lineContent);
}
// To close list elements
- if (nextLine && nextLine.listLevel === line.listLevel && line.listTypeName === nextLine.listTypeName)
- {
- if(context.lineContent){
- if( (nextLine.listTypeName === 'number') && (nextLine.text === '') ){
+ if (nextLine && nextLine.listLevel === line.listLevel && line.listTypeName === nextLine.listTypeName) {
+ if (context.lineContent) {
+ if ((nextLine.listTypeName === 'number') && (nextLine.text === '')) {
// is the listTypeName check needed here? null text might be completely fine!
// TODO Check against Uls
// don't do anything because the next item is a nested ol openener so we need to keep the li open
- }else{
- pieces.push("