Compare commits

..

224 Commits

Author SHA1 Message Date
Jamie Curnow
538d28d32d Refactor from Promises to async/await 2025-09-11 14:13:54 +10:00
Jamie Curnow
a7d4fd55d9 Fix proxy hosts routes throwing errors 2025-09-11 08:16:11 +10:00
Jamie Curnow
9682de1830 Biome update 2025-09-10 21:38:02 +10:00
Jamie Curnow
cde7460b5e Fix cypress tests following user wizard changes 2025-09-10 21:32:16 +10:00
Jamie Curnow
ca84e3a146 User Permissions Modal 2025-09-09 15:13:34 +10:00
Jamie Curnow
fa11945235 Introducing the Setup Wizard for creating the first user
- no longer setup a default
- still able to do that with env vars however
2025-09-09 13:44:35 +10:00
Jamie Curnow
432afe73ad User table polishing, user delete modal 2025-09-04 14:59:01 +10:00
Jamie Curnow
5a01da2916 Notification toasts, nicer loading, add new user support 2025-09-04 12:11:39 +10:00
Jamie Curnow
ebd9148813 React 2025-09-03 14:02:14 +10:00
Jamie Curnow
a12553fec7 Convert backend to ESM
- About 5 years overdue
- Remove eslint, use bomejs instead
2025-09-03 13:59:40 +10:00
jc21
5b6ca1bf00 Merge pull request #4664 from JMDirksen/develop
Some checks failed
Close stale issues and PRs / stale (push) Has been cancelled
Fix initial email with uppercase
2025-08-22 12:38:22 +10:00
jc21
5039738aa3 Merge pull request #4696 from NginxProxyManager/dependabot/npm_and_yarn/test/tmp-0.2.4
Bump tmp from 0.2.3 to 0.2.4 in /test
2025-08-22 12:34:03 +10:00
jc21
4451be8f1c Merge pull request #4722 from NginxProxyManager/dependabot/npm_and_yarn/frontend/cipher-base-1.0.6
Bump cipher-base from 1.0.4 to 1.0.6 in /frontend
2025-08-22 12:22:49 +10:00
jc21
bee2fd1978 Merge pull request #4723 from NginxProxyManager/dependabot/npm_and_yarn/frontend/sha.js-2.4.12
Bump sha.js from 2.4.11 to 2.4.12 in /frontend
2025-08-22 12:22:39 +10:00
dependabot[bot]
c8adbdfc15 Bump sha.js from 2.4.11 to 2.4.12 in /frontend
Bumps [sha.js](https://github.com/crypto-browserify/sha.js) from 2.4.11 to 2.4.12.
- [Changelog](https://github.com/browserify/sha.js/blob/master/CHANGELOG.md)
- [Commits](https://github.com/crypto-browserify/sha.js/compare/v2.4.11...v2.4.12)

---
updated-dependencies:
- dependency-name: sha.js
  dependency-version: 2.4.12
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-08-21 15:45:37 +00:00
dependabot[bot]
aff4182ab8 Bump cipher-base from 1.0.4 to 1.0.6 in /frontend
Bumps [cipher-base](https://github.com/crypto-browserify/cipher-base) from 1.0.4 to 1.0.6.
- [Changelog](https://github.com/browserify/cipher-base/blob/master/CHANGELOG.md)
- [Commits](https://github.com/crypto-browserify/cipher-base/compare/v1.0.4...v1.0.6)

---
updated-dependencies:
- dependency-name: cipher-base
  dependency-version: 1.0.6
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-08-21 15:13:31 +00:00
Jamie Curnow
8c9d2745e2 Fix remote execution bug where email address can contain malicious code
Some checks failed
Close stale issues and PRs / stale (push) Has been cancelled
also convert almost all cmd execs for certificates to properly escape arguments
2025-08-20 10:57:24 +10:00
dependabot[bot]
076d14b5e4 Bump tmp from 0.2.3 to 0.2.4 in /test
Bumps [tmp](https://github.com/raszi/node-tmp) from 0.2.3 to 0.2.4.
- [Changelog](https://github.com/raszi/node-tmp/blob/master/CHANGELOG.md)
- [Commits](https://github.com/raszi/node-tmp/compare/v0.2.3...v0.2.4)

---
updated-dependencies:
- dependency-name: tmp
  dependency-version: 0.2.4
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-08-06 17:12:10 +00:00
JMDirksen
8a6d815152 Fix initial email with upper case 2025-07-20 08:36:43 +02:00
Jamie Curnow
54d463ac36 Safer and flexible boolean env vars 2025-07-09 21:27:50 +10:00
Jamie Curnow
a23dc24021 Tweak ownership output 2025-07-09 21:01:21 +10:00
Jamie Curnow
4f9df893c8 Ownership script shakeup
- Don't touch a file to determine if we need to run
- Instead, check ownership of each location and skip it if we are happy
- Keeping SKIP_CERTBOT_OWNERSHIP flag
- More vebose logging of outcomes
2025-07-09 20:30:27 +10:00
Jamie Curnow
304b38e82b Fix ownership if statement 2025-07-09 18:19:50 +10:00
jc21
1b0929ade6 Merge branch 'master' into develop 2025-07-09 16:36:26 +10:00
Jamie Curnow
ddbafb62a6 bump version 2025-07-09 16:33:50 +10:00
Jamie Curnow
9a0383bc73 Move SKIP_CERTBOT_OWNERSHIP check around the entire certbot code 2025-07-09 16:30:45 +10:00
jc21
307cb94e84 Merge pull request #4651 from NginxProxyManager/develop
v2.12.5
2025-07-09 14:22:26 +10:00
jc21
63ae924fbc Merge branch 'master' into develop 2025-07-09 13:16:38 +10:00
Jamie Curnow
1710a263c0 Bump version 2025-07-09 13:15:15 +10:00
Jamie Curnow
1357774f21 Add SKIP_CERTBOT_OWNERSHIP env var support to skip certbot folder ownership 2025-07-09 13:14:27 +10:00
Jamie Curnow
5f54490d86 Set SETUPTOOLS_USE_DISTUTILS for all plugin installs, seems like they all need it. 2025-07-09 12:35:20 +10:00
Jamie Curnow
c97b8a339d Some auto formatting changes suggested by ide 2025-07-09 11:34:57 +10:00
Jamie Curnow
ed1d90ee7f Fix powerdns dns plugin install, deps are outrageously old ;(
Some checks failed
Close stale issues and PRs / stale (push) Has been cancelled
2025-07-09 11:34:19 +10:00
Jamie Curnow
70894e55b8 Remove cloudflare dep for certbot plugin, tested 2025-07-09 09:36:57 +10:00
Jamie Curnow
817021a43d Update s6 overlay
Some checks failed
Close stale issues and PRs / stale (push) Has been cancelled
2025-07-08 17:32:23 +10:00
Jamie Curnow
36e3449a56 Update cloudflare dependency 2025-07-08 17:14:20 +10:00
Jamie Curnow
db9f25638f Update PR comments to highlight verification requirements 2025-07-08 17:08:31 +10:00
jc21
ddd3355d95 Merge pull request #4645 from NginxProxyManager/revert-4574-develop
Revert "Update 'global/certbot-dns-plugins.json' to apply SSL certs for CloudFlare."
2025-07-08 11:19:53 +10:00
jc21
aade8b42fc Revert "Update 'global/certbot-dns-plugins.json' to apply SSL certs for CloudFlare." 2025-07-08 10:26:46 +10:00
Jamie Curnow
3735f3c11d Formating for ownership script 2025-07-08 09:44:10 +10:00
jc21
b84762b5b9 Merge pull request #4605 from NginxProxyManager/develop
v2.12.4
2025-07-01 11:12:08 +10:00
jc21
953faeac15 Merge branch 'master' into develop 2025-07-01 07:33:33 +10:00
Jamie Curnow
c58f3f3ec9 Bump version 2025-07-01 07:32:39 +10:00
jc21
0ee4d04d5f Merge pull request #4491 from addievo/fix-certbot-startup-time
Some checks failed
Close stale issues and PRs / stale (push) Has been cancelled
fix: optimize certbot ownership script to reduce container startup time
2025-06-30 15:31:09 +10:00
jc21
94f6756250 Merge pull request #4557 from 1ukastesar/patch-1
fix(modal): make textarea font actually monospace
2025-06-30 15:19:05 +10:00
jc21
27e3f73854 Merge pull request #4353 from mordyovits/patch-1
Update frontend copyright year to 2025
2025-06-30 14:57:07 +10:00
jc21
d98f4b43dc Merge pull request #4398 from cg-zhou/feature/add-ip-ranges-env-var
Added IP_RANGES_FETCH_ENABLED environment variable
2025-06-30 14:54:40 +10:00
jc21
ff3116a626 Merge pull request #4604 from NginxProxyManager/dependabot/npm_and_yarn/backend/brace-expansion-1.1.12
Bump brace-expansion from 1.1.11 to 1.1.12 in /backend
2025-06-30 14:47:58 +10:00
jc21
7047750b04 Merge pull request #4358 from pustekuchen91/update-cpanel-certbot-plugin
use latest certbot-dns-cpanel version
2025-06-30 14:43:48 +10:00
cg-zhou
0792fc0768 Remove unnecessary Promise.resolve() calls 2025-06-30 12:31:23 +08:00
dependabot[bot]
9758c12ca3 Bump brace-expansion from 1.1.11 to 1.1.12 in /backend
Bumps [brace-expansion](https://github.com/juliangruber/brace-expansion) from 1.1.11 to 1.1.12.
- [Release notes](https://github.com/juliangruber/brace-expansion/releases)
- [Commits](https://github.com/juliangruber/brace-expansion/compare/1.1.11...v1.1.12)

---
updated-dependencies:
- dependency-name: brace-expansion
  dependency-version: 1.1.12
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-06-30 03:55:09 +00:00
jc21
ccd69c8867 Update certbot-dns-plugins.json 2025-06-30 13:52:07 +10:00
jc21
23fd1fec6c Merge branch 'develop' into update-cpanel-certbot-plugin 2025-06-30 13:51:19 +10:00
jc21
6f04543744 Merge pull request #4368 from wisewtf/patch-1
Fixed error in sqlite installation compose file
2025-06-30 13:49:40 +10:00
jc21
cbb1fe44ca Merge pull request #4381 from ZeroDeng01/ZeroDeng01-patch-1
Fixed an issue with the 500 error code on the Stream list page
2025-06-30 13:45:05 +10:00
jc21
4c23f22d5b Merge pull request #4601 from NginxProxyManager/dependabot/npm_and_yarn/test/axios-1.10.0
Bump axios from 1.7.7 to 1.10.0 in /test
2025-06-30 13:13:02 +10:00
jc21
af5d3eccd6 Merge pull request #4602 from NginxProxyManager/dependabot/npm_and_yarn/docs/vite-5.4.19
Bump vite from 5.4.14 to 5.4.19 in /docs
2025-06-30 13:12:51 +10:00
jc21
a87283b030 Merge pull request #4603 from NginxProxyManager/dependabot/npm_and_yarn/frontend/elliptic-6.6.1
Bump elliptic from 6.6.0 to 6.6.1 in /frontend
2025-06-30 13:12:42 +10:00
Jamie Curnow
97dbbdd60f Fix incorrect swagger for streams list 2025-06-30 13:00:25 +10:00
Jamie Curnow
ec81f2489a Add cypress test to list streams 2025-06-30 11:10:45 +10:00
dependabot[bot]
d0ec8e89aa Bump elliptic from 6.6.0 to 6.6.1 in /frontend
Bumps [elliptic](https://github.com/indutny/elliptic) from 6.6.0 to 6.6.1.
- [Commits](https://github.com/indutny/elliptic/compare/v6.6.0...v6.6.1)

---
updated-dependencies:
- dependency-name: elliptic
  dependency-version: 6.6.1
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-06-30 00:56:57 +00:00
dependabot[bot]
9a96fbb5f4 Bump vite from 5.4.14 to 5.4.19 in /docs
---
updated-dependencies:
- dependency-name: vite
  dependency-version: 5.4.19
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-06-30 00:56:15 +00:00
dependabot[bot]
a573450bb8 Bump axios from 1.7.7 to 1.10.0 in /test
Bumps [axios](https://github.com/axios/axios) from 1.7.7 to 1.10.0.
- [Release notes](https://github.com/axios/axios/releases)
- [Changelog](https://github.com/axios/axios/blob/v1.x/CHANGELOG.md)
- [Commits](https://github.com/axios/axios/compare/v1.7.7...v1.10.0)

---
updated-dependencies:
- dependency-name: axios
  dependency-version: 1.10.0
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-06-30 00:54:38 +00:00
jc21
60a25ffbd5 Merge pull request #4560 from spions/patch-1
Added Selectel v2  DNS provider
2025-06-30 10:49:40 +10:00
jc21
7d2369b380 Merge pull request #4576 from NginxProxyManager/dependabot/npm_and_yarn/test/brace-expansion-1.1.12
Bump brace-expansion from 1.1.11 to 1.1.12 in /test
2025-06-30 10:49:20 +10:00
jc21
64f00e8dba Merge pull request #4577 from h33n0k/develop
Fix Incorrect Api status codes
2025-06-30 10:49:09 +10:00
jc21
c99143f548 Merge pull request #4596 from NginxProxyManager/dependabot/npm_and_yarn/frontend/pbkdf2-3.1.3
Bump pbkdf2 from 3.1.1 to 3.1.3 in /frontend
2025-06-30 10:48:57 +10:00
jc21
cc4ee6919a Merge pull request #4597 from aitor422/develop
All checks were successful
Close stale issues and PRs / stale (push) Successful in 1m0s
added CDMon DNS provider
2025-06-30 08:49:03 +10:00
jc21
8a69c65b40 Merge pull request #4551 from MinhPho/feature/update-strato-dns-plugin
Update strato dns plugin from 0.2.1 to 0.2.2
2025-06-30 08:09:39 +10:00
jc21
95ee5ca958 Merge pull request #4553 from gustavfroding/develop
Added spaceship DNS provider
2025-06-30 08:09:17 +10:00
jc21
40f22d30c4 Merge pull request #4574 from tom-kst/develop
Update 'global/certbot-dns-plugins.json' to apply SSL certs for CloudFlare.
2025-06-30 08:08:18 +10:00
aitor422
30dfa9e3de added CDMon DNS provider 2025-06-25 13:32:14 +02:00
dependabot[bot]
b873499feb Bump pbkdf2 from 3.1.1 to 3.1.3 in /frontend
Bumps [pbkdf2](https://github.com/crypto-browserify/pbkdf2) from 3.1.1 to 3.1.3.
- [Changelog](https://github.com/browserify/pbkdf2/blob/master/CHANGELOG.md)
- [Commits](https://github.com/crypto-browserify/pbkdf2/compare/v3.1.1...v3.1.3)

---
updated-dependencies:
- dependency-name: pbkdf2
  dependency-version: 3.1.3
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-06-24 09:20:21 +00:00
h33n0k
ef69be2036 Fix Incorrect Api status codes
Update Incorrect status code based off the api schema
2025-06-12 08:58:17 +02:00
dependabot[bot]
7580e65dd4 Bump brace-expansion from 1.1.11 to 1.1.12 in /test
Bumps [brace-expansion](https://github.com/juliangruber/brace-expansion) from 1.1.11 to 1.1.12.
- [Release notes](https://github.com/juliangruber/brace-expansion/releases)
- [Commits](https://github.com/juliangruber/brace-expansion/compare/1.1.11...v1.1.12)

---
updated-dependencies:
- dependency-name: brace-expansion
  dependency-version: 1.1.12
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-06-11 21:02:35 +00:00
Tom.KST
f11dc5d7c1 Update certbot-dns-plugins.json 2025-06-11 17:36:21 +08:00
Tom.KST
77061a7bd6 Update certbot-dns-plugins.json
I've tried multi times failed, and I found that show be a 'space break' ahead & after the equal mark...
So the correct script should be "dns_cloudflare_api_token = 0123456789abcdef0123456789abcdef01234567", instead of "dns_cloudflare_api_token=0123456789abcdef0123456789abcdef01234567"
2025-06-11 17:33:44 +08:00
Oleg
b6afc19135 Added selectel v2 DNS provider 2025-05-28 18:10:21 +03:00
Lukáš Tesař
09ba400d09 fix(modal): make textarea font actually monospace
Modal `textarea` element has this class `text-monospace`, but there is actually no CSS definition that sets the monospace font for it (neither in custom SCSS files, nor in included libs). This commit fixes the issue by setting `monospace` `font-family` for the `textarea`, greatly enhancing UX of configuration editing in UI.
2025-05-26 19:40:08 +02:00
gustavfroding
0291cfc270 Added spaceship DNS provider 2025-05-23 13:18:07 +02:00
jelly_moon
34267e0af9 Update strato dns plugin from 0.2.1 to 0.2.2 2025-05-23 04:43:52 +02:00
jc21
f327c1e825 Merge pull request #4406 from chindocaine/fix_domainoffensive_certbot
All checks were successful
Close stale issues and PRs / stale (push) Successful in 1m28s
Fix DomainOffensive certbot plugin
2025-05-21 20:56:12 +10:00
jc21
6f539979ec Merge pull request #4411 from henmohr/develop
Update cloudflare dns plugin from 2.19.4 to 4.0.*
2025-05-21 20:52:50 +10:00
jc21
3d8079a137 Merge pull request #4426 from foxtrotcz/develop
Updates Active24 plugin to API v2
2025-05-21 20:50:55 +10:00
jc21
6d6d83c0d0 Merge pull request #4435 from amateescu/update-gandi-plugin
Update the Gandi plugin.
2025-05-21 20:50:36 +10:00
jc21
100a4888d0 Merge pull request #4481 from godsgood33/patch-1
Update certbot-dns-plugins.json
2025-05-21 20:50:14 +10:00
jc21
34a46bd733 Merge pull request #4534 from chenghaopeng/develop
add Baidu as DNS provider
2025-05-21 20:48:11 +10:00
jc21
7f8adc7e50 Merge pull request #4538 from astamminger/add_dns_ddnss_plugin
Add DDNSS to the list of supported Providers for DNS-01 Challenges
2025-05-21 20:47:29 +10:00
jc21
98d118cb74 Merge pull request #4540 from hatharry/develop
Add First Domains DNS Provider
2025-05-21 20:47:02 +10:00
jc21
4fb93542c3 Merge pull request #4547 from vzagorovskiy/develop
Added nic.ru DNS provider
2025-05-21 20:46:39 +10:00
vzagorovskiy
4fe305520a Added nic.ru dns provider 2025-05-19 13:18:58 +03:00
A. Stamminger
76be31cf76 Update certbot-dns-plugins.json with dns-ddns plugin
This commit extends the global plugin list with the configuration for
certbot-dns-ddnss (https://pypi.org/project/certbot-dns-ddnss/),
a new plugin providing DNS-01 challenges for ddnss.de
2025-05-12 15:54:10 +02:00
鹏鹏
55dadb2004 Merge pull request #1 from chenghaopeng/dns-baidu
add Baidu as DNS provider
2025-05-11 12:46:27 +08:00
鹏鹏
d9cdb3dc2c add Baidu as DNS provider 2025-05-11 12:45:13 +08:00
Aditya
0cab720f23 fix: optimize certbot ownership script to reduce container startup time
Replace inefficient find/execdir implementation that was causing 3+ minute
startup delays with a more efficient approach that:

1. Uses a flag file to skip redundant operations on container restarts
2. Processes site-packages directories with bulk chown operations instead
   of individual file checks and changes
3. Maintains the same functionality while dramatically improving performance

This change should significantly reduce container startup time while ensuring
all necessary file permissions are still properly set.
2025-04-20 20:38:54 +10:00
Ryan P
f5879dff6c Update certbot-dns-plugins.json
Fix for bug #4429 add cpanel_api_token entry to credentials check. Will still need to update the documentation that the user will need to retrieve the api token from their cPanel.
2025-04-10 19:56:06 -04:00
Jamie Curnow
5e66d677f1 Adds test for dashboard endpoints
Some checks failed
Close stale issues and PRs / stale (push) Has been cancelled
2025-03-24 14:34:45 +10:00
Andrei Mateescu
18830f81b0 Update the Gandi plugin. 2025-03-13 23:47:31 +02:00
FoxtrotCZ
341ac65587 Updates Active24 plugin to API v2 2025-03-09 19:54:11 +01:00
henmohr
078baa255a Update certbot-dns-plugins.json 2025-03-03 16:40:38 -03:00
Michael Heilig
bf9d9bd43b Fix DomainOffensive certbot plugin
In https://github.com/NginxProxyManager/nginx-proxy-manager/pull/4235 the certbot plugin for do.de (Domain Offensive) was updated to use the more
official version. One necessary line modification was missed, resulting in an error when creating a new certificate.
2025-02-28 21:00:36 +01:00
cg-zhou
a394b25e61 fix eslint error 2025-02-26 19:45:49 +08:00
cg-zhou
1c47fc2ba4 feat: Add IP_RANGES_FETCH_ENABLED environment variable
This change adds a new environment variable to control whether IP ranges
are fetched during application startup. When set to 'false', the initial
fetch will be skipped, which can:

1. Speed up application startup
2. Avoid connectivity issues in environments with restricted internet access
3. Prevent startup failures when CloudFront or CloudFlare services are unreachable
2025-02-26 19:25:50 +08:00
ZeroDeng
312e2ab80c [fix]Stream List error code 500
Fix stream list page error code 500。
2025-02-21 14:56:00 +08:00
Wise
d147ccd88d Fixed error in sqlite installation compose file
If people copy and paste the sqlite installation without commenting environment docker compose will throw an error because environment will be null.
2025-02-14 14:44:54 +01:00
Marc
03fd292c61 use latest certbot-dns-cpanel version
this allows to use token for authentication
2025-02-09 11:41:30 +01:00
jc21
79d28f03d0 Merge pull request #4346 from Sander0542/feature/security-schemes-component
All checks were successful
Close stale issues and PRs / stale (push) Successful in 4s
API Schema Improvements
2025-02-07 12:39:49 +10:00
Mordy Ovits
b09147eca8 Update frontend copyright year to 2025 2025-02-06 19:40:23 -05:00
jc21
c5a319cb20 Merge pull request #4347 from NginxProxyManager/develop
v2.12.3
2025-02-06 20:25:09 +10:00
Jamie Curnow
c4df89df1f Fix dashboard loading loop and freezing the page 2025-02-06 13:38:47 +10:00
jc21
34c703f8b4 Merge branch 'master' into develop 2025-02-06 08:52:55 +10:00
Jamie Curnow
0a05d8f0ad Bump version 2025-02-06 08:39:03 +10:00
jc21
0a9141fad5 Merge pull request #4208 from badkeyy/feature/add-zone-edit-certbot-plugin
Add ZoneEdit certbot plugin
2025-02-06 08:33:11 +10:00
jc21
42836774b7 Merge branch 'develop' into feature/add-zone-edit-certbot-plugin 2025-02-06 08:33:01 +10:00
jc21
2a07544f58 Merge pull request #4235 from FabianK3/update-domainoffensive-certbot-plugin
Update DomainOffensive certbot plugin
2025-02-06 08:30:09 +10:00
jc21
dc9d884743 Merge pull request #4292 from icaksh/patch-1
feat: change htpasswd to openssl
2025-02-06 08:29:15 +10:00
jc21
0d5d2b1b7c Merge pull request #4283 from badkeyy/feature/show-active-host-in-cert-list
SSL Certificates: Show if cert is in use on host
2025-02-06 07:43:12 +10:00
Sander Jochems
df48b835c4 Update order to match others 2025-02-05 22:20:21 +01:00
Sander Jochems
8a1557154a Add certificate fields to boolFields 2025-02-05 22:15:12 +01:00
Sander Jochems
a6af5ec2c7 Remove certificate as required from proxy host 2025-02-05 18:18:50 +01:00
Sander Jochems
14d7c35fd7 Fix whitespaces 2025-02-05 17:31:09 +01:00
Sander Jochems
cfcf78aaee Set bearer auth security component 2025-02-05 17:29:40 +01:00
jc21
3a01b2c84f Merge pull request #4334 from nwagenmakers/mijn-host-patch
All checks were successful
Close stale issues and PRs / stale (push) Successful in 4s
Update certbot-dns-plugins.json (mijn-host)
2025-02-05 20:36:06 +10:00
jc21
e1c84a5c10 Merge pull request #4338 from Sander0542/fix/token-expires-type
Fix type for token.expires
2025-02-05 20:35:33 +10:00
jc21
c56c95a59a Merge pull request #4344 from NginxProxyManager/stream-ssl
SSL for Streams - 2025
2025-02-05 18:22:51 +10:00
Jamie Curnow
6a60627833 Cypress test for Streams
and updated cypress + packages
2025-02-05 16:02:17 +10:00
Jamie Curnow
b4793d3c16 Adds testssl.sh and mkcert to cypress stack 2025-02-05 08:10:11 +10:00
Jamie Curnow
68a7803513 Fix api schema after merging latest changes 2025-02-04 17:55:28 +10:00
jbowring
2657af97cf Fix stream update not persisting 2025-02-04 17:14:07 +10:00
jbowring
4452f014b9 Fix whitespace in nginx stream config 2025-02-04 17:14:07 +10:00
jbowring
cd80cc8e4d Add certificate to streams database model 2025-02-04 17:14:04 +10:00
jbowring
ee4250d770 Add SSL column to streams table UI 2025-02-04 17:12:05 +10:00
jbowring
3dbc70faa6 Add SSL tab to stream UI 2025-02-04 17:12:04 +10:00
jbowring
3091c21cae Add SSL certificate to TCP streams if certificate in database 2025-02-04 17:12:04 +10:00
Sander Jochems
57cd2a1919 Fix type for token.expires 2025-02-03 21:47:41 +01:00
nwagenmakers
ad5936c530 Update certbot-dns-plugins.json (mijn-host)
Updated credentials hint/text in mijn-host plugin entry
2025-02-01 13:10:53 +01:00
jc21
498109addb Merge pull request #4310 from NginxProxyManager/dependabot/npm_and_yarn/docs/vite-5.4.14
All checks were successful
Close stale issues and PRs / stale (push) Successful in 3s
Bump vite from 5.4.8 to 5.4.14 in /docs
2025-01-28 18:08:46 +10:00
jc21
3f3aacd7ec Merge pull request #4274 from Dim145/develop
[Postgres] fix error in access_list get
2025-01-28 14:03:07 +10:00
dependabot[bot]
bb4ecf812d Bump vite from 5.4.8 to 5.4.14 in /docs
Bumps [vite](https://github.com/vitejs/vite/tree/HEAD/packages/vite) from 5.4.8 to 5.4.14.
- [Release notes](https://github.com/vitejs/vite/releases)
- [Changelog](https://github.com/vitejs/vite/blob/v5.4.14/packages/vite/CHANGELOG.md)
- [Commits](https://github.com/vitejs/vite/commits/v5.4.14/packages/vite)

---
updated-dependencies:
- dependency-name: vite
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-01-22 07:09:04 +00:00
Julian Gassner
c05f9695d0 Merge branch 'develop' into feature/add-zone-edit-certbot-plugin 2025-01-15 15:37:53 +01:00
Julian Gassner
6343b398f0 Add --no-deps 2025-01-15 14:36:38 +00:00
icaksh
59362b7477 feat: change htpasswd to openssl 2025-01-12 19:16:38 +07:00
Julian Gassner
aedaaa18e0 Fix whitespace 2025-01-10 05:20:28 +01:00
Julian Gassner
080bd0b749 Added status of certificates to the certificate list and show on which domain names the certificates are in use 2025-01-10 05:15:22 +01:00
Jamie Curnow
9687e9e450 Use previous version of powerdns image, newer version is broken
All checks were successful
Close stale issues and PRs / stale (push) Successful in 3s
2025-01-07 10:30:08 +10:00
Jamie Curnow
5a234bb88c Fix incorrect test folder in ci results 2025-01-07 08:13:04 +10:00
jc21
4de4b65036 Merge pull request #4252 from GergelyGombai/develop
Add Gcore DNS Provider
2025-01-07 07:54:44 +10:00
dim145
f1c97c7c36 fix: add missing group_by clause for access_list get 2025-01-03 00:39:29 +01:00
jc21
b4f49969d6 Merge pull request #4261 from NginxProxyManager/develop
v2.12.2
2024-12-29 14:40:05 +10:00
jc21
ec12d8f9bf Merge pull request #4148 from Medan-rfz/develop
Added certbot plugin for Beget DNS service
2024-12-29 14:00:51 +10:00
jc21
e50e3def9d Merge pull request #4169 from andrew-codechimp/bump-porkbun
Bump certbot-dns-porkbun
2024-12-29 14:00:18 +10:00
jc21
6415f284f9 Merge pull request #4256 from bigcat26/develop
upgrade certbot-dns-aliyun plugin from 0.38.1 to 2.0.0
2024-12-29 13:52:03 +10:00
Chris Xiong
98e5997f0a upgrade certbot-dns-aliyun plugin from 0.38.1 to 2.0.0 2024-12-26 09:51:28 +08:00
Jamie Curnow
fc30a92bd4 Open port for authentik in dev
All checks were successful
Close stale issues and PRs / stale (push) Successful in 4s
2024-12-24 18:19:52 +10:00
Jamie Curnow
e2011ee45c Bump version 2024-12-24 17:51:25 +10:00
jc21
1406e75c2c Merge pull request #4254 from NginxProxyManager/postgres
Postgres
2024-12-24 17:24:05 +10:00
Jamie Curnow
ca3ee98c68 Postgres Support
- Combines #4086 and #4087 PRs
- Adds authentik in CI stack
2024-12-24 16:48:48 +10:00
jc21
f90d839ebe Merge pull request #4246 from JanzenJohn/develop
Remove infinite requests loop
2024-12-24 08:16:48 +10:00
jc21
be5278f31e Merge pull request #4247 from miguelangel-nubla/patch-1
Add custom configuration to 404 hosts
2024-12-24 08:15:55 +10:00
ComradeBlin
73110d5e1e Update Gcore apikey format
I managed to mis-write the format in my previous commit
2024-12-22 01:44:52 +01:00
ComradeBlin
356b98bf7e Add Gcore DNS Provider 2024-12-22 01:02:47 +01:00
Miguel Angel Nubla
3eecf7a38b Add custom configuration to 404 hosts 2024-12-20 01:03:21 +01:00
Miguel Angel Nubla
7f9240dda7 Add custom configuration to dead_host.conf 2024-12-20 00:59:26 +01:00
John Janzen
f537619ffe Revert "Change onRender function to always update the dashboard stats"
This reverts commit d26e8c1d0c.

This reopens #4204 (which i can't reproduce sadly)

The reverted commit is responsible for an infinite loop of requests to /hosts, which makes buttons unresponsive on the main page
another way to invalidate the cache needs to be found

this infinite requests loop happens on d26e8c1d0c
and on the docker image
`nginxproxymanager/nginx-proxy-manager-dev:pr-4206`

the docker image is attaced to the pr #4206 which merges the commit
2024-12-19 16:16:03 +01:00
jc21
805968aac6 Merge pull request #4185 from muescha/patch-1
All checks were successful
Close stale issues and PRs / stale (push) Successful in 4s
Update index.md: add link to Proxmox VE Helper-Scripts
2024-12-17 07:59:45 +10:00
jc21
2a4093c1b8 Merge pull request #4215 from TECH7Fox/patch-1
Add hostingnl DNS Challenge provider
2024-12-17 07:57:43 +10:00
jc21
ae2ac8a733 Merge pull request #4230 from NginxProxyManager/dependabot/npm_and_yarn/docs/nanoid-3.3.8
Bump nanoid from 3.3.7 to 3.3.8 in /docs
2024-12-17 07:52:24 +10:00
FabianK3
5d087f1256 Update DomainOffensive certbot plugin 2024-12-15 11:35:58 +01:00
dependabot[bot]
c6eca2578e Bump nanoid from 3.3.7 to 3.3.8 in /docs
Bumps [nanoid](https://github.com/ai/nanoid) from 3.3.7 to 3.3.8.
- [Release notes](https://github.com/ai/nanoid/releases)
- [Changelog](https://github.com/ai/nanoid/blob/main/CHANGELOG.md)
- [Commits](https://github.com/ai/nanoid/compare/3.3.7...3.3.8)

---
updated-dependencies:
- dependency-name: nanoid
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-12-14 10:02:55 +00:00
Jordy Kuhne
56033bee9c Add hostingnl 2024-12-08 15:23:37 +01:00
Medan-rfz
c6630e87bb Update version 'certbot-beget-plugin' & fix credentials content 2024-12-07 15:01:57 +04:00
Medan
d6b98f51b0 Merge branch 'NginxProxyManager:develop' into develop 2024-12-07 14:27:29 +04:00
Julian Gassner
1e322804ce Add ZoneEdit certbot plugin 2024-12-04 16:47:36 +01:00
jc21
b3de76c945 Merge pull request #4192 from badkeyy/bugfix/fix-user-edit-email-format-check
All checks were successful
Close stale issues and PRs / stale (push) Successful in 4s
Enforce email format when editing user
2024-12-04 14:50:42 +10:00
jc21
fcf4117f8e Merge pull request #4206 from badkeyy/bugfix/update-dashboard-stats-on-change
Update the dashboard stats every time the dashboard is shown
2024-12-04 13:08:21 +10:00
Julian Gassner
d26e8c1d0c Change onRender function to always update the dashboard stats 2024-12-04 03:45:56 +01:00
Julian Gassner
19ed4c1212 Change click to submit 2024-12-04 03:08:49 +01:00
Julian Gassner
03018d252b Merge branch 'NginxProxyManager:develop' into bugfix/fix-user-edit-email-format-check 2024-12-04 01:58:08 +01:00
jc21
8351dd41f6 Merge pull request #4199 from NginxProxyManager/dependabot/npm_and_yarn/test/cross-spawn-7.0.6
All checks were successful
Close stale issues and PRs / stale (push) Successful in 4s
Bump cross-spawn from 7.0.3 to 7.0.6 in /test
2024-12-02 10:45:00 +10:00
jc21
97212f2686 Merge pull request #4123 from NginxProxyManager/dependabot/npm_and_yarn/frontend/elliptic-6.6.0
Bump elliptic from 6.5.7 to 6.6.0 in /frontend
2024-12-02 10:44:20 +10:00
dependabot[bot]
fe068a8b51 Bump cross-spawn from 7.0.3 to 7.0.6 in /test
Bumps [cross-spawn](https://github.com/moxystudio/node-cross-spawn) from 7.0.3 to 7.0.6.
- [Changelog](https://github.com/moxystudio/node-cross-spawn/blob/master/CHANGELOG.md)
- [Commits](https://github.com/moxystudio/node-cross-spawn/compare/v7.0.3...v7.0.6)

---
updated-dependencies:
- dependency-name: cross-spawn
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-12-01 22:49:09 +00:00
jc21
61e2bde98f Merge pull request #4184 from NginxProxyManager/dependabot/npm_and_yarn/backend/cross-spawn-7.0.6
Bump cross-spawn from 7.0.3 to 7.0.6 in /backend
2024-12-02 08:48:08 +10:00
Julian Gassner
81c9038929 Refactor user form structure 2024-11-27 18:27:11 +01:00
jc21
4ea50ca40c Merge pull request #4126 from jonasrdl/remove-deprecated-version-line
All checks were successful
Close stale issues and PRs / stale (push) Successful in 4s
docs(setup): Remove deprecated version from docker-compose.yml
2024-11-26 07:37:41 +10:00
jc21
53ed12bcf2 Merge pull request #4163 from Jasparigus/stream_error_correction
Fix Container Bootloop if Stream is used for http/https ports
2024-11-26 07:37:14 +10:00
jc21
cb3e4ed59c Merge pull request #4137 from irexyc/add-woff2-asset
Add woff2 format to assets.conf for Cache Assets
2024-11-26 07:35:57 +10:00
jc21
b20dc5eade Merge pull request #4167 from NginxProxyManager/dependabot/npm_and_yarn/test/eslint/plugin-kit-0.2.3
Bump @eslint/plugin-kit from 0.2.0 to 0.2.3 in /test
2024-11-26 07:35:10 +10:00
jc21
586afc0c91 Merge pull request #4187 from kerstenremco/avatar
Fix entries of a deleted user break the UI
2024-11-26 07:31:03 +10:00
Remco Kersten
93ea17a9bb Fix entries of a deleted user break the UI 2024-11-25 20:37:49 +01:00
Muescha
151160a834 Update index.md: add link to Proxmox VE Helper-Scripts
Update index.md: add link to Proxmox VE Helper-Scripts
2024-11-24 20:10:17 +01:00
dependabot[bot]
2075f98cad Bump cross-spawn from 7.0.3 to 7.0.6 in /backend
Bumps [cross-spawn](https://github.com/moxystudio/node-cross-spawn) from 7.0.3 to 7.0.6.
- [Changelog](https://github.com/moxystudio/node-cross-spawn/blob/master/CHANGELOG.md)
- [Commits](https://github.com/moxystudio/node-cross-spawn/compare/v7.0.3...v7.0.6)

---
updated-dependencies:
- dependency-name: cross-spawn
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-11-24 03:36:44 +00:00
jc21
07a4e5791f Merge pull request #4179 from tametsi/develop
All checks were successful
Close stale issues and PRs / stale (push) Successful in 4s
Return generic auth error to prevent user enumeration attacks
2024-11-23 22:39:37 +10:00
tametsi
640a1eeb68 Return generic auth error to prevent user enumeration attacks
On invalid user/password error the error message "Invalid email or password" is returned.
Thereby, no information about the existence of the user is given.
2024-11-22 10:37:09 +01:00
Andrew Jackson
126d3d44ca Bump certbot-dns-porkbun 2024-11-17 10:44:29 +00:00
dependabot[bot]
20646e7bb5 Bump @eslint/plugin-kit from 0.2.0 to 0.2.3 in /test
Bumps [@eslint/plugin-kit](https://github.com/eslint/rewrite) from 0.2.0 to 0.2.3.
- [Release notes](https://github.com/eslint/rewrite/releases)
- [Changelog](https://github.com/eslint/rewrite/blob/main/release-please-config.json)
- [Commits](https://github.com/eslint/rewrite/compare/core-v0.2.0...plugin-kit-v0.2.3)

---
updated-dependencies:
- dependency-name: "@eslint/plugin-kit"
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-11-15 21:19:05 +00:00
Jasper Stubbe
87998a03ce Fix bootloop if stream is used for http/https port 2024-11-14 11:39:48 -08:00
hatharry
2cee211fb0 add First Domains plugin 2024-11-13 16:31:59 +13:00
Medan-rfz
a56342c76a Fix credentials 2024-11-10 19:23:28 +04:00
Medan-rfz
4c89379671 Update version 'certbot-beget-plugin' 2024-11-10 18:31:07 +04:00
Medan-rfz
10b9a49274 Update version 'certbot-beget-plugin' 2024-11-10 16:16:45 +04:00
Medan-rfz
595a742c40 Change beget plugin 2024-11-10 15:09:41 +04:00
Medan-rfz
c171752137 Added certbot plugin for Beget DNS service 2024-11-08 02:29:38 +04:00
irexyc
a0b26b9e98 Add woff2 format to assets.conf for Cache Assets 2024-11-04 20:01:39 +08:00
Jonas Riedel
d6791f4e38 docs(setup): Remove deprecated version from docker-compose.yml 2024-10-31 11:25:38 +01:00
dependabot[bot]
62c94f3099 Bump elliptic from 6.5.7 to 6.6.0 in /frontend
Bumps [elliptic](https://github.com/indutny/elliptic) from 6.5.7 to 6.6.0.
- [Commits](https://github.com/indutny/elliptic/compare/v6.5.7...v6.6.0)

---
updated-dependencies:
- dependency-name: elliptic
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-10-31 02:19:58 +00:00
jc21
25a26d6175 Merge pull request #4112 from prospo/develop
All checks were successful
Close stale issues and PRs / stale (push) Successful in 4s
feat: Add leaseweb to certbot-dns-plugins
2024-10-30 14:40:20 +10:00
jc21
17246e418f Merge pull request #4118 from mitossoft-rd/patch-1
Remove variable usage from proxy_pass directive to fix resolution issues
2024-10-30 14:39:48 +10:00
mitossoft-rd
f7d3ca0b07 Cleaning unused variable. 2024-10-28 15:18:54 +03:00
mitossoft-rd
a55de386e7 Fix URL format 2024-10-28 15:15:08 +03:00
mitossoft-rd
e9d4f5b827 Remove variable usage from proxy_pass directive to fix resolution issues
By using a static URL, the backend server can be accessed reliably, avoiding the common 404 errors or "no resolver defined" issues seen when variables are used.
2024-10-28 02:59:23 +03:00
Emil
1c1cee3836 feat: Add leaseweb to certbot-dns-plugins 2024-10-25 13:25:09 +00:00
jc21
eaf6335694 Merge pull request #4106 from dreik/develop
All checks were successful
Close stale issues and PRs / stale (push) Successful in 4s
http2 directive migration
2024-10-25 08:53:08 +10:00
jc21
ffe05ebd41 Merge pull request #4108 from chrismaffey/patch-2
Update put.json
2024-10-25 08:06:50 +10:00
Chris Maffey
2e9a4f1aed Update put.json
Password can be left blank for updates.  Otherwise you have to reenter the password every time you save the auth list
2024-10-24 17:29:16 +13:00
jc21
d17c85e4c8 Merge pull request #4107 from chrismaffey/patch-1
Update _access.conf
2024-10-24 11:31:12 +10:00
Chris Maffey
dad8d0ca00 Update _access.conf
the pass_auth and satisfy_any properties and now boolean true/false, they do not == 1 so the switching in this template breaks
2024-10-24 14:04:17 +13:00
Sergey 'dreik' Kolesnik
d7e0558a35 http2 directive
to reduce warns in logs
2024-10-24 01:30:14 +03:00
jc21
ee41bb5562 Merge pull request #4078 from Guiorgy/patch-1
All checks were successful
Close stale issues and PRs / stale (push) Successful in 4s
normalize indentations in certbot-dns-plugins.json
2024-10-22 10:14:31 +10:00
jc21
0cf6b9caa4 Merge pull request #4084 from ttodua/patch-1
doc(site) - default credentials change
2024-10-22 10:14:11 +10:00
T. Todua
68a9baf206 minor 2024-10-18 15:35:15 +04:00
T. Todua
d92421d098 doc(site) - default credentials change 2024-10-18 15:33:32 +04:00
Guiorgy
96c58b203e normalize indentations in certbot-dns-plugins.json 2024-10-17 15:34:04 +04:00
jc21
5084cb7296 Merge pull request #4077 from NginxProxyManager/develop
v2.12.1
2024-10-17 09:49:07 +10:00
jc21
e677bfa2e8 Merge pull request #4073 from NginxProxyManager/develop
v2.12.0
2024-10-16 15:41:55 +10:00
522 changed files with 17391 additions and 25224 deletions

View File

@@ -1 +1 @@
2.12.1 2.12.6

57
Jenkinsfile vendored
View File

@@ -128,7 +128,7 @@ pipeline {
sh 'docker-compose down --remove-orphans --volumes -t 30 || true' sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
} }
unstable { unstable {
dir(path: 'testing/results') { dir(path: 'test/results') {
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml') archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
} }
} }
@@ -161,7 +161,45 @@ pipeline {
sh 'docker-compose down --remove-orphans --volumes -t 30 || true' sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
} }
unstable { unstable {
dir(path: 'testing/results') { dir(path: 'test/results') {
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
}
}
}
}
stage('Test Postgres') {
environment {
COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_postgres"
COMPOSE_FILE = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.postgres.yml'
}
when {
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
steps {
sh 'rm -rf ./test/results/junit/*'
sh './scripts/ci/fulltest-cypress'
}
post {
always {
// Dumps to analyze later
sh 'mkdir -p debug/postgres'
sh 'docker logs $(docker-compose ps --all -q fullstack) > debug/postgres/docker_fullstack.log 2>&1'
sh 'docker logs $(docker-compose ps --all -q stepca) > debug/postgres/docker_stepca.log 2>&1'
sh 'docker logs $(docker-compose ps --all -q pdns) > debug/postgres/docker_pdns.log 2>&1'
sh 'docker logs $(docker-compose ps --all -q pdns-db) > debug/postgres/docker_pdns-db.log 2>&1'
sh 'docker logs $(docker-compose ps --all -q dnsrouter) > debug/postgres/docker_dnsrouter.log 2>&1'
sh 'docker logs $(docker-compose ps --all -q db-postgres) > debug/postgres/docker_db-postgres.log 2>&1'
sh 'docker logs $(docker-compose ps --all -q authentik) > debug/postgres/docker_authentik.log 2>&1'
sh 'docker logs $(docker-compose ps --all -q authentik-redis) > debug/postgres/docker_authentik-redis.log 2>&1'
sh 'docker logs $(docker-compose ps --all -q authentik-ldap) > debug/postgres/docker_authentik-ldap.log 2>&1'
junit 'test/results/junit/*'
sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
}
unstable {
dir(path: 'test/results') {
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml') archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
} }
} }
@@ -203,12 +241,17 @@ pipeline {
} }
steps { steps {
script { script {
npmGithubPrComment("""Docker Image for build ${BUILD_NUMBER} is available on npmGithubPrComment("""Docker Image for build ${BUILD_NUMBER} is available on [DockerHub](https://cloud.docker.com/repository/docker/nginxproxymanager/${IMAGE}-dev):
[DockerHub](https://cloud.docker.com/repository/docker/nginxproxymanager/${IMAGE}-dev) ```
as `nginxproxymanager/${IMAGE}-dev:${BRANCH_LOWER}` nginxproxymanager/${IMAGE}-dev:${BRANCH_LOWER}
```
**Note:** ensure you backup your NPM instance before testing this image! Especially if there are database changes > [!NOTE]
**Note:** this is a different docker image namespace than the official image > Ensure you backup your NPM instance before testing this image! Especially if there are database changes.
> This is a different docker image namespace than the official image.
> [!WARNING]
> Changes and additions to DNS Providers require verification by at least 2 members of the community!
""", true) """, true)
} }
} }

View File

@@ -1,7 +1,7 @@
<p align="center"> <p align="center">
<img src="https://nginxproxymanager.com/github.png"> <img src="https://nginxproxymanager.com/github.png">
<br><br> <br><br>
<img src="https://img.shields.io/badge/version-2.12.1-green.svg?style=for-the-badge"> <img src="https://img.shields.io/badge/version-2.12.6-green.svg?style=for-the-badge">
<a href="https://hub.docker.com/repository/docker/jc21/nginx-proxy-manager"> <a href="https://hub.docker.com/repository/docker/jc21/nginx-proxy-manager">
<img src="https://img.shields.io/docker/stars/jc21/nginx-proxy-manager.svg?style=for-the-badge"> <img src="https://img.shields.io/docker/stars/jc21/nginx-proxy-manager.svg?style=for-the-badge">
</a> </a>

View File

@@ -1,73 +0,0 @@
{
"env": {
"node": true,
"es6": true
},
"extends": [
"eslint:recommended"
],
"globals": {
"Atomics": "readonly",
"SharedArrayBuffer": "readonly"
},
"parserOptions": {
"ecmaVersion": 2018,
"sourceType": "module"
},
"plugins": [
"align-assignments"
],
"rules": {
"arrow-parens": [
"error",
"always"
],
"indent": [
"error",
"tab"
],
"linebreak-style": [
"error",
"unix"
],
"quotes": [
"error",
"single"
],
"semi": [
"error",
"always"
],
"key-spacing": [
"error",
{
"align": "value"
}
],
"comma-spacing": [
"error",
{
"before": false,
"after": true
}
],
"func-call-spacing": [
"error",
"never"
],
"keyword-spacing": [
"error",
{
"before": true
}
],
"no-irregular-whitespace": "error",
"no-unused-expressions": 0,
"align-assignments/align-assignments": [
2,
{
"requiresOnly": false
}
]
}
}

View File

@@ -1,11 +0,0 @@
{
"printWidth": 320,
"tabWidth": 4,
"useTabs": true,
"semi": true,
"singleQuote": true,
"bracketSpacing": true,
"jsxBracketSameLine": true,
"trailingComma": "all",
"proseWrap": "always"
}

View File

@@ -1,9 +1,12 @@
const express = require('express'); import bodyParser from "body-parser";
const bodyParser = require('body-parser'); import compression from "compression";
const fileUpload = require('express-fileupload'); import express from "express";
const compression = require('compression'); import fileUpload from "express-fileupload";
const config = require('./lib/config'); import { isDebugMode } from "./lib/config.js";
const log = require('./logger').express; import cors from "./lib/express/cors.js";
import jwt from "./lib/express/jwt.js";
import { express as logger } from "./logger.js";
import mainRoutes from "./routes/main.js";
/** /**
* App * App
@@ -20,71 +23,70 @@ app.use(compression());
* General Logging, BEFORE routes * General Logging, BEFORE routes
*/ */
app.disable('x-powered-by'); app.disable("x-powered-by");
app.enable('trust proxy', ['loopback', 'linklocal', 'uniquelocal']); app.enable("trust proxy", ["loopback", "linklocal", "uniquelocal"]);
app.enable('strict routing'); app.enable("strict routing");
// pretty print JSON when not live // pretty print JSON when not live
if (config.debug()) { if (isDebugMode()) {
app.set('json spaces', 2); app.set("json spaces", 2);
} }
// CORS for everything // CORS for everything
app.use(require('./lib/express/cors')); app.use(cors);
// General security/cache related headers + server header // General security/cache related headers + server header
app.use(function (req, res, next) { app.use((_, res, next) => {
let x_frame_options = 'DENY'; let x_frame_options = "DENY";
if (typeof process.env.X_FRAME_OPTIONS !== 'undefined' && process.env.X_FRAME_OPTIONS) { if (typeof process.env.X_FRAME_OPTIONS !== "undefined" && process.env.X_FRAME_OPTIONS) {
x_frame_options = process.env.X_FRAME_OPTIONS; x_frame_options = process.env.X_FRAME_OPTIONS;
} }
res.set({ res.set({
'X-XSS-Protection': '1; mode=block', "X-XSS-Protection": "1; mode=block",
'X-Content-Type-Options': 'nosniff', "X-Content-Type-Options": "nosniff",
'X-Frame-Options': x_frame_options, "X-Frame-Options": x_frame_options,
'Cache-Control': 'no-cache, no-store, max-age=0, must-revalidate', "Cache-Control": "no-cache, no-store, max-age=0, must-revalidate",
Pragma: 'no-cache', Pragma: "no-cache",
Expires: 0 Expires: 0,
}); });
next(); next();
}); });
app.use(require('./lib/express/jwt')()); app.use(jwt());
app.use('/', require('./routes/main')); app.use("/", mainRoutes);
// production error handler // production error handler
// no stacktraces leaked to user // no stacktraces leaked to user
// eslint-disable-next-line app.use((err, req, res, _) => {
app.use(function (err, req, res, next) { const payload = {
let payload = {
error: { error: {
code: err.status, code: err.status,
message: err.public ? err.message : 'Internal Error' message: err.public ? err.message : "Internal Error",
} },
}; };
if (config.debug() || (req.baseUrl + req.path).includes('nginx/certificates')) { if (typeof err.message_i18n !== "undefined") {
payload.error.message_i18n = err.message_i18n;
}
if (isDebugMode() || (req.baseUrl + req.path).includes("nginx/certificates")) {
payload.debug = { payload.debug = {
stack: typeof err.stack !== 'undefined' && err.stack ? err.stack.split('\n') : null, stack: typeof err.stack !== "undefined" && err.stack ? err.stack.split("\n") : null,
previous: err.previous previous: err.previous,
}; };
} }
// Not every error is worth logging - but this is good for now until it gets annoying. // Not every error is worth logging - but this is good for now until it gets annoying.
if (typeof err.stack !== 'undefined' && err.stack) { if (typeof err.stack !== "undefined" && err.stack) {
if (config.debug()) { logger.debug(err.stack);
log.debug(err.stack); if (typeof err.public === "undefined" || !err.public) {
} else if (typeof err.public == 'undefined' || !err.public) { logger.warn(err.message);
log.warn(err.message);
} }
} }
res res.status(err.status || 500).send(payload);
.status(err.status || 500)
.send(payload);
}); });
module.exports = app; export default app;

91
backend/biome.json Normal file
View File

@@ -0,0 +1,91 @@
{
"$schema": "https://biomejs.dev/schemas/2.2.4/schema.json",
"vcs": {
"enabled": true,
"clientKind": "git",
"useIgnoreFile": true
},
"files": {
"ignoreUnknown": false,
"includes": [
"**/*.ts",
"**/*.tsx",
"**/*.js",
"**/*.jsx",
"!**/dist/**/*"
]
},
"formatter": {
"enabled": true,
"indentStyle": "tab",
"indentWidth": 4,
"lineWidth": 120,
"formatWithErrors": true
},
"assist": {
"actions": {
"source": {
"organizeImports": {
"level": "on",
"options": {
"groups": [
":BUN:",
":NODE:",
[
"npm:*",
"npm:*/**"
],
":PACKAGE_WITH_PROTOCOL:",
":URL:",
":PACKAGE:",
[
"/src/*",
"/src/**"
],
[
"/**"
],
[
"#*",
"#*/**"
],
":PATH:"
]
}
}
}
}
},
"linter": {
"enabled": true,
"rules": {
"recommended": true,
"correctness": {
"useUniqueElementIds": "off"
},
"suspicious": {
"noExplicitAny": "off"
},
"performance": {
"noDelete": "off"
},
"nursery": "off",
"a11y": {
"useSemanticElements": "off",
"useValidAnchor": "off"
},
"style": {
"noParameterAssign": "error",
"useAsConstAssertion": "error",
"useDefaultParameterLast": "error",
"useEnumInitializers": "error",
"useSelfClosingElements": "error",
"useSingleVarDeclarator": "error",
"noUnusedTemplateLiteral": "error",
"useNumberNamespace": "error",
"noInferrableTypes": "error",
"noUselessElse": "error"
}
}
}
}

View File

@@ -1,14 +1,19 @@
const config = require('./lib/config'); import knex from "knex";
import {configGet, configHas} from "./lib/config.js";
if (!config.has('database')) { const generateDbConfig = () => {
throw new Error('Database config does not exist! Please read the instructions: https://nginxproxymanager.com/setup/'); if (!configHas("database")) {
throw new Error(
"Database config does not exist! Please read the instructions: https://nginxproxymanager.com/setup/",
);
} }
function generateDbConfig() { const cfg = configGet("database");
const cfg = config.get('database');
if (cfg.engine === 'knex-native') { if (cfg.engine === "knex-native") {
return cfg.knex; return cfg.knex;
} }
return { return {
client: cfg.engine, client: cfg.engine,
connection: { connection: {
@@ -16,12 +21,12 @@ function generateDbConfig() {
user: cfg.user, user: cfg.user,
password: cfg.password, password: cfg.password,
database: cfg.name, database: cfg.name,
port: cfg.port port: cfg.port,
}, },
migrations: { migrations: {
tableName: 'migrations' tableName: "migrations",
} },
};
}; };
}
module.exports = require('knex')(generateDbConfig()); export default knex(generateDbConfig());

View File

@@ -1,37 +1,47 @@
#!/usr/bin/env node #!/usr/bin/env node
const schema = require('./schema'); import app from "./app.js";
const logger = require('./logger').global; import internalCertificate from "./internal/certificate.js";
import internalIpRanges from "./internal/ip_ranges.js";
import { global as logger } from "./logger.js";
import { migrateUp } from "./migrate.js";
import { getCompiledSchema } from "./schema/index.js";
import setup from "./setup.js";
const IP_RANGES_FETCH_ENABLED = process.env.IP_RANGES_FETCH_ENABLED !== "false";
async function appStart() { async function appStart() {
const migrate = require('./migrate'); return migrateUp()
const setup = require('./setup');
const app = require('./app');
const internalCertificate = require('./internal/certificate');
const internalIpRanges = require('./internal/ip_ranges');
return migrate.latest()
.then(setup) .then(setup)
.then(schema.getCompiledSchema) .then(getCompiledSchema)
.then(internalIpRanges.fetch) .then(() => {
if (!IP_RANGES_FETCH_ENABLED) {
logger.info("IP Ranges fetch is disabled by environment variable");
return;
}
logger.info("IP Ranges fetch is enabled");
return internalIpRanges.fetch().catch((err) => {
logger.error("IP Ranges fetch failed, continuing anyway:", err.message);
});
})
.then(() => { .then(() => {
internalCertificate.initTimer(); internalCertificate.initTimer();
internalIpRanges.initTimer(); internalIpRanges.initTimer();
const server = app.listen(3000, () => { const server = app.listen(3000, () => {
logger.info('Backend PID ' + process.pid + ' listening on port 3000 ...'); logger.info(`Backend PID ${process.pid} listening on port 3000 ...`);
process.on('SIGTERM', () => { process.on("SIGTERM", () => {
logger.info('PID ' + process.pid + ' received SIGTERM'); logger.info(`PID ${process.pid} received SIGTERM`);
server.close(() => { server.close(() => {
logger.info('Stopping.'); logger.info("Stopping.");
process.exit(0); process.exit(0);
}); });
}); });
}); });
}) })
.catch((err) => { .catch((err) => {
logger.error(err.message, err); logger.error(`Startup Error: ${err.message}`, err);
setTimeout(appStart, 1000); setTimeout(appStart, 1000);
}); });
} }
@@ -39,7 +49,6 @@ async function appStart () {
try { try {
appStart(); appStart();
} catch (err) { } catch (err) {
logger.error(err.message, err); logger.fatal(err);
process.exit(1); process.exit(1);
} }

View File

@@ -1,103 +1,94 @@
const _ = require('lodash'); import fs from "node:fs";
const fs = require('fs'); import batchflow from "batchflow";
const batchflow = require('batchflow'); import _ from "lodash";
const logger = require('../logger').access; import errs from "../lib/error.js";
const error = require('../lib/error'); import utils from "../lib/utils.js";
const utils = require('../lib/utils'); import { access as logger } from "../logger.js";
const accessListModel = require('../models/access_list'); import accessListModel from "../models/access_list.js";
const accessListAuthModel = require('../models/access_list_auth'); import accessListAuthModel from "../models/access_list_auth.js";
const accessListClientModel = require('../models/access_list_client'); import accessListClientModel from "../models/access_list_client.js";
const proxyHostModel = require('../models/proxy_host'); import proxyHostModel from "../models/proxy_host.js";
const internalAuditLog = require('./audit-log'); import internalAuditLog from "./audit-log.js";
const internalNginx = require('./nginx'); import internalNginx from "./nginx.js";
function omissions () { const omissions = () => {
return ['is_deleted']; return ["is_deleted"];
} };
const internalAccessList = { const internalAccessList = {
/** /**
* @param {Access} access * @param {Access} access
* @param {Object} data * @param {Object} data
* @returns {Promise} * @returns {Promise}
*/ */
create: (access, data) => { create: async (access, data) => {
return access.can('access_lists:create', data) await access.can("access_lists:create", data);
.then((/*access_data*/) => { const row = await accessListModel
return accessListModel
.query() .query()
.insertAndFetch({ .insertAndFetch({
name: data.name, name: data.name,
satisfy_any: data.satisfy_any, satisfy_any: data.satisfy_any,
pass_auth: data.pass_auth, pass_auth: data.pass_auth,
owner_user_id: access.token.getUserId(1) owner_user_id: access.token.getUserId(1),
}) })
.then(utils.omitRow(omissions())); .then(utils.omitRow(omissions()));
})
.then((row) => {
data.id = row.id; data.id = row.id;
let promises = []; const promises = [];
// Items
// Now add the items
data.items.map((item) => { data.items.map((item) => {
promises.push(accessListAuthModel promises.push(
.query() accessListAuthModel.query().insert({
.insert({
access_list_id: row.id, access_list_id: row.id,
username: item.username, username: item.username,
password: item.password password: item.password,
}) }),
); );
return true;
}); });
// Now add the clients // Clients
if (typeof data.clients !== 'undefined' && data.clients) { data.clients?.map((client) => {
data.clients.map((client) => { promises.push(
promises.push(accessListClientModel accessListClientModel.query().insert({
.query()
.insert({
access_list_id: row.id, access_list_id: row.id,
address: client.address, address: client.address,
directive: client.directive directive: client.directive,
}) }),
); );
return true;
}); });
}
return Promise.all(promises); await Promise.all(promises);
})
.then(() => {
// re-fetch with expansions // re-fetch with expansions
return internalAccessList.get(access, { const freshRow = await internalAccessList.get(
access,
{
id: data.id, id: data.id,
expand: ['owner', 'items', 'clients', 'proxy_hosts.access_list.[clients,items]'] expand: ["owner", "items", "clients", "proxy_hosts.access_list.[clients,items]"],
}, true /* <- skip masking */); },
}) true // skip masking
.then((row) => { );
// Audit log
data.meta = _.assign({}, data.meta || {}, row.meta);
return internalAccessList.build(row) // Audit log
.then(() => { data.meta = _.assign({}, data.meta || {}, freshRow.meta);
if (row.proxy_host_count) { await internalAccessList.build(freshRow);
return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
if (Number.parseInt(freshRow.proxy_host_count, 10)) {
await internalNginx.bulkGenerateConfigs("proxy_host", freshRow.proxy_hosts);
} }
})
.then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { await internalAuditLog.add(access, {
action: 'created', action: "created",
object_type: 'access-list', object_type: "access-list",
object_id: row.id, object_id: freshRow.id,
meta: internalAccessList.maskItems(data) meta: internalAccessList.maskItems(data),
});
})
.then(() => {
return internalAccessList.maskItems(row);
});
}); });
return internalAccessList.maskItems(freshRow);
}, },
/** /**
@@ -108,129 +99,107 @@ const internalAccessList = {
* @param {String} [data.items] * @param {String} [data.items]
* @return {Promise} * @return {Promise}
*/ */
update: (access, data) => { update: async (access, data) => {
return access.can('access_lists:update', data.id) await access.can("access_lists:update", data.id);
.then((/*access_data*/) => { const row = await internalAccessList.get(access, { id: data.id });
return internalAccessList.get(access, {id: data.id});
})
.then((row) => {
if (row.id !== data.id) { if (row.id !== data.id) {
// Sanity check that something crazy hasn't happened // Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('Access List could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id); throw new errs.InternalValidationError(
`Access List could not be updated, IDs do not match: ${row.id} !== ${data.id}`,
);
} }
})
.then(() => {
// patch name if specified // patch name if specified
if (typeof data.name !== 'undefined' && data.name) { if (typeof data.name !== "undefined" && data.name) {
return accessListModel await accessListModel.query().where({ id: data.id }).patch({
.query()
.where({id: data.id})
.patch({
name: data.name, name: data.name,
satisfy_any: data.satisfy_any, satisfy_any: data.satisfy_any,
pass_auth: data.pass_auth, pass_auth: data.pass_auth,
}); });
} }
})
.then(() => {
// Check for items and add/update/remove them
if (typeof data.items !== 'undefined' && data.items) {
let promises = [];
let items_to_keep = [];
data.items.map(function (item) { // Check for items and add/update/remove them
if (typeof data.items !== "undefined" && data.items) {
const promises = [];
const itemsToKeep = [];
data.items.map((item) => {
if (item.password) { if (item.password) {
promises.push(accessListAuthModel promises.push(
.query() accessListAuthModel.query().insert({
.insert({
access_list_id: data.id, access_list_id: data.id,
username: item.username, username: item.username,
password: item.password password: item.password,
}) }),
); );
} else { } else {
// This was supplied with an empty password, which means keep it but don't change the password // This was supplied with an empty password, which means keep it but don't change the password
items_to_keep.push(item.username); itemsToKeep.push(item.username);
} }
return true;
}); });
let query = accessListAuthModel const query = accessListAuthModel.query().delete().where("access_list_id", data.id);
.query()
.delete()
.where('access_list_id', data.id);
if (items_to_keep.length) { if (itemsToKeep.length) {
query.andWhere('username', 'NOT IN', items_to_keep); query.andWhere("username", "NOT IN", itemsToKeep);
} }
return query await query;
.then(() => {
// Add new items // Add new items
if (promises.length) { if (promises.length) {
return Promise.all(promises); await Promise.all(promises);
} }
});
} }
})
.then(() => {
// Check for clients and add/update/remove them
if (typeof data.clients !== 'undefined' && data.clients) {
let promises = [];
data.clients.map(function (client) { // Check for clients and add/update/remove them
if (typeof data.clients !== "undefined" && data.clients) {
const clientPromises = [];
data.clients.map((client) => {
if (client.address) { if (client.address) {
promises.push(accessListClientModel clientPromises.push(
.query() accessListClientModel.query().insert({
.insert({
access_list_id: data.id, access_list_id: data.id,
address: client.address, address: client.address,
directive: client.directive directive: client.directive,
}) }),
); );
} }
return true;
}); });
let query = accessListClientModel const query = accessListClientModel.query().delete().where("access_list_id", data.id);
.query() await query;
.delete() // Add new clitens
.where('access_list_id', data.id); if (clientPromises.length) {
await Promise.all(clientPromises);
}
}
return query
.then(() => {
// Add new items
if (promises.length) {
return Promise.all(promises);
}
});
}
})
.then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { await internalAuditLog.add(access, {
action: 'updated', action: "updated",
object_type: 'access-list', object_type: "access-list",
object_id: data.id, object_id: data.id,
meta: internalAccessList.maskItems(data) meta: internalAccessList.maskItems(data),
}); });
})
.then(() => {
// re-fetch with expansions // re-fetch with expansions
return internalAccessList.get(access, { const freshRow = await internalAccessList.get(
access,
{
id: data.id, id: data.id,
expand: ['owner', 'items', 'clients', 'proxy_hosts.[certificate,access_list.[clients,items]]'] expand: ["owner", "items", "clients", "proxy_hosts.[certificate,access_list.[clients,items]]"],
}, true /* <- skip masking */); },
}) true // skip masking
.then((row) => { );
return internalAccessList.build(row)
.then(() => { await internalAccessList.build(freshRow)
if (row.proxy_host_count) { if (Number.parseInt(row.proxy_host_count, 10)) {
return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts); await internalNginx.bulkGenerateConfigs("proxy_host", row.proxy_hosts);
} }
}).then(internalNginx.reload) await internalNginx.reload();
.then(() => {
return internalAccessList.maskItems(row); return internalAccessList.maskItems(row);
});
});
}, },
/** /**
@@ -239,48 +208,50 @@ const internalAccessList = {
* @param {Integer} data.id * @param {Integer} data.id
* @param {Array} [data.expand] * @param {Array} [data.expand]
* @param {Array} [data.omit] * @param {Array} [data.omit]
* @param {Boolean} [skip_masking] * @param {Boolean} [skipMasking]
* @return {Promise} * @return {Promise}
*/ */
get: (access, data, skip_masking) => { get: async (access, data, skipMasking) => {
if (typeof data === 'undefined') { const thisData = data || {};
data = {}; const accessData = await access.can("access_lists:get", thisData.id)
}
return access.can('access_lists:get', data.id) const query = accessListModel
.then((access_data) => {
let query = accessListModel
.query() .query()
.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count')) .select("access_list.*", accessListModel.raw("COUNT(proxy_host.id) as proxy_host_count"))
.joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0') .leftJoin("proxy_host", function () {
.where('access_list.is_deleted', 0) this.on("proxy_host.access_list_id", "=", "access_list.id").andOn(
.andWhere('access_list.id', data.id) "proxy_host.is_deleted",
.allowGraph('[owner,items,clients,proxy_hosts.[certificate,access_list.[clients,items]]]') "=",
0,
);
})
.where("access_list.is_deleted", 0)
.andWhere("access_list.id", thisData.id)
.groupBy("access_list.id")
.allowGraph("[owner,items,clients,proxy_hosts.[certificate,access_list.[clients,items]]]")
.first(); .first();
if (access_data.permission_visibility !== 'all') { if (accessData.permission_visibility !== "all") {
query.andWhere('access_list.owner_user_id', access.token.getUserId(1)); query.andWhere("access_list.owner_user_id", access.token.getUserId(1));
} }
if (typeof data.expand !== 'undefined' && data.expand !== null) { if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
query.withGraphFetched('[' + data.expand.join(', ') + ']'); query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
} }
return query.then(utils.omitRow(omissions())); let row = await query.then(utils.omitRow(omissions()));
})
.then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(thisData.id);
} }
if (!skip_masking && typeof row.items !== 'undefined' && row.items) { if (!skipMasking && typeof row.items !== "undefined" && row.items) {
row = internalAccessList.maskItems(row); row = internalAccessList.maskItems(row);
} }
// Custom omissions // Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) { if (typeof data.omit !== "undefined" && data.omit !== null) {
row = _.omit(row, data.omit); row = _.omit(row, data.omit);
} }
return row; return row;
});
}, },
/** /**
@@ -290,14 +261,15 @@ const internalAccessList = {
* @param {String} [data.reason] * @param {String} [data.reason]
* @returns {Promise} * @returns {Promise}
*/ */
delete: (access, data) => { delete: async (access, data) => {
return access.can('access_lists:delete', data.id) await access.can("access_lists:delete", data.id);
.then(() => { const row = await internalAccessList.get(access, {
return internalAccessList.get(access, {id: data.id, expand: ['proxy_hosts', 'items', 'clients']}); id: data.id,
}) expand: ["proxy_hosts", "items", "clients"],
.then((row) => { });
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} }
// 1. update row to be deleted // 1. update row to be deleted
@@ -306,57 +278,47 @@ const internalAccessList = {
// 4. audit log // 4. audit log
// 1. update row to be deleted // 1. update row to be deleted
return accessListModel await accessListModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
is_deleted: 1 is_deleted: 1,
}) });
.then(() => {
// 2. update any proxy hosts that were using it (ignoring permissions) // 2. update any proxy hosts that were using it (ignoring permissions)
if (row.proxy_hosts) { if (row.proxy_hosts) {
return proxyHostModel await proxyHostModel
.query() .query()
.where('access_list_id', '=', row.id) .where("access_list_id", "=", row.id)
.patch({access_list_id: 0}) .patch({ access_list_id: 0 });
.then(() => {
// 3. reconfigure those hosts, then reload nginx // 3. reconfigure those hosts, then reload nginx
// set the access_list_id to zero for these items // set the access_list_id to zero for these items
row.proxy_hosts.map(function (val, idx) { row.proxy_hosts.map((_val, idx) => {
row.proxy_hosts[idx].access_list_id = 0; row.proxy_hosts[idx].access_list_id = 0;
});
return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
})
.then(() => {
return internalNginx.reload();
});
}
})
.then(() => {
// delete the htpasswd file
let htpasswd_file = internalAccessList.getFilename(row);
try {
fs.unlinkSync(htpasswd_file);
} catch (err) {
// do nothing
}
})
.then(() => {
// 4. audit log
return internalAuditLog.add(access, {
action: 'deleted',
object_type: 'access-list',
object_id: row.id,
meta: _.omit(internalAccessList.maskItems(row), ['is_deleted', 'proxy_hosts'])
});
});
})
.then(() => {
return true; return true;
}); });
await internalNginx.bulkGenerateConfigs("proxy_host", row.proxy_hosts);
}
await internalNginx.reload();
// delete the htpasswd file
try {
fs.unlinkSync(internalAccessList.getFilename(row));
} catch (_err) {
// do nothing
}
// 4. audit log
await internalAuditLog.add(access, {
action: "deleted",
object_type: "access-list",
object_id: row.id,
meta: _.omit(internalAccessList.maskItems(row), ["is_deleted", "proxy_hosts"]),
});
return true;
}, },
/** /**
@@ -364,72 +326,73 @@ const internalAccessList = {
* *
* @param {Access} access * @param {Access} access
* @param {Array} [expand] * @param {Array} [expand]
* @param {String} [search_query] * @param {String} [searchQuery]
* @returns {Promise} * @returns {Promise}
*/ */
getAll: (access, expand, search_query) => { getAll: async (access, expand, searchQuery) => {
return access.can('access_lists:list') const accessData = await access.can("access_lists:list");
.then((access_data) => {
let query = accessListModel
.query()
.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
.joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0')
.where('access_list.is_deleted', 0)
.groupBy('access_list.id')
.allowGraph('[owner,items,clients]')
.orderBy('access_list.name', 'ASC');
if (access_data.permission_visibility !== 'all') { const query = accessListModel
query.andWhere('access_list.owner_user_id', access.token.getUserId(1)); .query()
.select("access_list.*", accessListModel.raw("COUNT(proxy_host.id) as proxy_host_count"))
.leftJoin("proxy_host", function () {
this.on("proxy_host.access_list_id", "=", "access_list.id").andOn(
"proxy_host.is_deleted",
"=",
0,
);
})
.where("access_list.is_deleted", 0)
.groupBy("access_list.id")
.allowGraph("[owner,items,clients]")
.orderBy("access_list.name", "ASC");
if (accessData.permission_visibility !== "all") {
query.andWhere("access_list.owner_user_id", access.token.getUserId(1));
} }
// Query is used for searching // Query is used for searching
if (typeof search_query === 'string') { if (typeof searchQuery === "string") {
query.where(function () { query.where(function () {
this.where('name', 'like', '%' + search_query + '%'); this.where("name", "like", `%${searchQuery}%`);
}); });
} }
if (typeof expand !== 'undefined' && expand !== null) { if (typeof expand !== "undefined" && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']'); query.withGraphFetched(`[${expand.join(", ")}]`);
} }
return query.then(utils.omitRows(omissions())); const rows = await query.then(utils.omitRows(omissions()));
})
.then((rows) => {
if (rows) { if (rows) {
rows.map(function (row, idx) { rows.map((row, idx) => {
if (typeof row.items !== 'undefined' && row.items) { if (typeof row.items !== "undefined" && row.items) {
rows[idx] = internalAccessList.maskItems(row); rows[idx] = internalAccessList.maskItems(row);
} }
return true;
}); });
} }
return rows; return rows;
});
}, },
/** /**
* Report use * Count is used in reports
* *
* @param {Integer} user_id * @param {Integer} userId
* @param {String} visibility * @param {String} visibility
* @returns {Promise} * @returns {Promise}
*/ */
getCount: (user_id, visibility) => { getCount: async (userId, visibility) => {
let query = accessListModel const query = accessListModel
.query() .query()
.count('id as count') .count("id as count")
.where('is_deleted', 0); .where("is_deleted", 0);
if (visibility !== 'all') { if (visibility !== "all") {
query.andWhere('owner_user_id', user_id); query.andWhere("owner_user_id", userId);
} }
return query.first() const row = await query.first();
.then((row) => { return Number.parseInt(row.count, 10);
return parseInt(row.count, 10);
});
}, },
/** /**
@@ -437,21 +400,21 @@ const internalAccessList = {
* @returns {Object} * @returns {Object}
*/ */
maskItems: (list) => { maskItems: (list) => {
if (list && typeof list.items !== 'undefined') { if (list && typeof list.items !== "undefined") {
list.items.map(function (val, idx) { list.items.map((val, idx) => {
let repeat_for = 8; let repeatFor = 8;
let first_char = '*'; let firstChar = "*";
if (typeof val.password !== 'undefined' && val.password) { if (typeof val.password !== "undefined" && val.password) {
repeat_for = val.password.length - 1; repeatFor = val.password.length - 1;
first_char = val.password.charAt(0); firstChar = val.password.charAt(0);
} }
list.items[idx].hint = first_char + ('*').repeat(repeat_for); list.items[idx].hint = firstChar + "*".repeat(repeatFor);
list.items[idx].password = ''; list.items[idx].password = "";
return true;
}); });
} }
return list; return list;
}, },
@@ -461,7 +424,7 @@ const internalAccessList = {
* @returns {String} * @returns {String}
*/ */
getFilename: (list) => { getFilename: (list) => {
return '/data/access/' + list.id; return `/data/access/${list.id}`;
}, },
/** /**
@@ -471,38 +434,36 @@ const internalAccessList = {
* @param {Array} list.items * @param {Array} list.items
* @returns {Promise} * @returns {Promise}
*/ */
build: (list) => { build: async (list) => {
logger.info('Building Access file #' + list.id + ' for: ' + list.name); logger.info(`Building Access file #${list.id} for: ${list.name}`);
return new Promise((resolve, reject) => { const htpasswdFile = internalAccessList.getFilename(list);
let htpasswd_file = internalAccessList.getFilename(list);
// 1. remove any existing access file // 1. remove any existing access file
try { try {
fs.unlinkSync(htpasswd_file); fs.unlinkSync(htpasswdFile);
} catch (err) { } catch (_err) {
// do nothing // do nothing
} }
// 2. create empty access file // 2. create empty access file
fs.writeFileSync(htpasswdFile, '', {encoding: 'utf8'});
// 3. generate password for each user
if (list.items.length) {
await new Promise((resolve, reject) => {
batchflow(list.items).sequential()
.each((_i, item, next) => {
if (item.password?.length) {
logger.info(`Adding: ${item.username}`);
utils.execFile('openssl', ['passwd', '-apr1', item.password])
.then((res) => {
try { try {
fs.writeFileSync(htpasswd_file, '', {encoding: 'utf8'}); fs.appendFileSync(htpasswdFile, `${item.username}:${res}\n`, {encoding: 'utf8'});
resolve(htpasswd_file);
} catch (err) { } catch (err) {
reject(err); reject(err);
} }
})
.then((htpasswd_file) => {
// 3. generate password for each user
if (list.items.length) {
return new Promise((resolve, reject) => {
batchflow(list.items).sequential()
.each((i, item, next) => {
if (typeof item.password !== 'undefined' && item.password.length) {
logger.info('Adding: ' + item.username);
utils.execFile('/usr/bin/htpasswd', ['-b', htpasswd_file, item.username, item.password])
.then((/*result*/) => {
next(); next();
}) })
.catch((err) => { .catch((err) => {
@@ -516,13 +477,12 @@ const internalAccessList = {
reject(err); reject(err);
}) })
.end((results) => { .end((results) => {
logger.success('Built Access file #' + list.id + ' for: ' + list.name); logger.success(`Built Access file #${list.id} for: ${list.name}`);
resolve(results); resolve(results);
}); });
}); });
} }
});
} }
}; }
module.exports = internalAccessList; export default internalAccessList;

View File

@@ -1,5 +1,6 @@
const error = require('../lib/error'); import errs from "../lib/error.js";
const auditLogModel = require('../models/audit-log'); import { castJsonIfNeed } from "../lib/helpers.js";
import auditLogModel from "../models/audit-log.js";
const internalAuditLog = { const internalAuditLog = {
@@ -8,32 +9,31 @@ const internalAuditLog = {
* *
* @param {Access} access * @param {Access} access
* @param {Array} [expand] * @param {Array} [expand]
* @param {String} [search_query] * @param {String} [searchQuery]
* @returns {Promise} * @returns {Promise}
*/ */
getAll: (access, expand, search_query) => { getAll: async (access, expand, searchQuery) => {
return access.can('auditlog:list') await access.can("auditlog:list");
.then(() => {
let query = auditLogModel const query = auditLogModel
.query() .query()
.orderBy('created_on', 'DESC') .orderBy("created_on", "DESC")
.orderBy('id', 'DESC') .orderBy("id", "DESC")
.limit(100) .limit(100)
.allowGraph('[user]'); .allowGraph("[user]");
// Query is used for searching // Query is used for searching
if (typeof search_query === 'string') { if (typeof searchQuery === "string" && searchQuery.length > 0) {
query.where(function () { query.where(function () {
this.where('meta', 'like', '%' + search_query + '%'); this.where(castJsonIfNeed("meta"), "like", `%${searchQuery}`);
}); });
} }
if (typeof expand !== 'undefined' && expand !== null) { if (typeof expand !== "undefined" && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']'); query.withGraphFetched(`[${expand.join(", ")}]`);
} }
return query; return await query;
});
}, },
/** /**
@@ -50,29 +50,24 @@ const internalAuditLog = {
* @param {Object} [data.meta] * @param {Object} [data.meta]
* @returns {Promise} * @returns {Promise}
*/ */
add: (access, data) => { add: async (access, data) => {
return new Promise((resolve, reject) => { if (typeof data.user_id === "undefined" || !data.user_id) {
// Default the user id
if (typeof data.user_id === 'undefined' || !data.user_id) {
data.user_id = access.token.getUserId(1); data.user_id = access.token.getUserId(1);
} }
if (typeof data.action === 'undefined' || !data.action) { if (typeof data.action === "undefined" || !data.action) {
reject(new error.InternalValidationError('Audit log entry must contain an Action')); throw new errs.InternalValidationError("Audit log entry must contain an Action");
} else { }
// Make sure at least 1 of the IDs are set and action // Make sure at least 1 of the IDs are set and action
resolve(auditLogModel return await auditLogModel.query().insert({
.query()
.insert({
user_id: data.user_id, user_id: data.user_id,
action: data.action, action: data.action,
object_type: data.object_type || '', object_type: data.object_type || "",
object_id: data.object_id || 0, object_id: data.object_id || 0,
meta: data.meta || {} meta: data.meta || {},
}));
}
}); });
} },
}; };
module.exports = internalAuditLog; export default internalAuditLog;

File diff suppressed because it is too large Load Diff

View File

@@ -1,109 +1,89 @@
const _ = require('lodash'); import _ from "lodash";
const error = require('../lib/error'); import errs from "../lib/error.js";
const utils = require('../lib/utils'); import { castJsonIfNeed } from "../lib/helpers.js";
const deadHostModel = require('../models/dead_host'); import utils from "../lib/utils.js";
const internalHost = require('./host'); import deadHostModel from "../models/dead_host.js";
const internalNginx = require('./nginx'); import internalAuditLog from "./audit-log.js";
const internalAuditLog = require('./audit-log'); import internalCertificate from "./certificate.js";
const internalCertificate = require('./certificate'); import internalHost from "./host.js";
import internalNginx from "./nginx.js";
function omissions () { const omissions = () => {
return ['is_deleted']; return ["is_deleted"];
} };
const internalDeadHost = { const internalDeadHost = {
/** /**
* @param {Access} access * @param {Access} access
* @param {Object} data * @param {Object} data
* @returns {Promise} * @returns {Promise}
*/ */
create: (access, data) => { create: async (access, data) => {
let create_certificate = data.certificate_id === 'new'; const createCertificate = data.certificate_id === "new";
if (create_certificate) { if (createCertificate) {
delete data.certificate_id; delete data.certificate_id;
} }
return access.can('dead_hosts:create', data) await access.can("dead_hosts:create", data);
.then((/*access_data*/) => {
// Get a list of the domain names and check each of them against existing records // Get a list of the domain names and check each of them against existing records
let domain_name_check_promises = []; const domainNameCheckPromises = [];
data.domain_names.map(function (domain_name) { data.domain_names.map((domain_name) => {
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name)); domainNameCheckPromises.push(internalHost.isHostnameTaken(domain_name));
return true;
}); });
return Promise.all(domain_name_check_promises) await Promise.all(domainNameCheckPromises).then((check_results) => {
.then((check_results) => { check_results.map((result) => {
check_results.map(function (result) {
if (result.is_taken) { if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use'); throw new errs.ValidationError(`${result.hostname} is already in use`);
} }
return true;
}); });
}); });
})
.then(() => {
// At this point the domains should have been checked // At this point the domains should have been checked
data.owner_user_id = access.token.getUserId(1); data.owner_user_id = access.token.getUserId(1);
data = internalHost.cleanSslHstsData(data); const thisData = internalHost.cleanSslHstsData(data);
// Fix for db field not having a default value // Fix for db field not having a default value
// for this optional field. // for this optional field.
if (typeof data.advanced_config === 'undefined') { if (typeof data.advanced_config === "undefined") {
data.advanced_config = ''; thisData.advanced_config = "";
} }
return deadHostModel const row = await deadHostModel.query().insertAndFetch(thisData).then(utils.omitRow(omissions()));
.query()
.insertAndFetch(data) if (createCertificate) {
.then(utils.omitRow(omissions())); const cert = await internalCertificate.createQuickCertificate(access, data);
})
.then((row) => {
if (create_certificate) {
return internalCertificate.createQuickCertificate(access, data)
.then((cert) => {
// update host with cert id // update host with cert id
return internalDeadHost.update(access, { await internalDeadHost.update(access, {
id: row.id, id: row.id,
certificate_id: cert.id certificate_id: cert.id,
}); });
})
.then(() => {
return row;
});
} else {
return row;
} }
})
.then((row) => {
// re-fetch with cert // re-fetch with cert
return internalDeadHost.get(access, { const freshRow = await internalDeadHost.get(access, {
id: row.id, id: row.id,
expand: ['certificate', 'owner'] expand: ["certificate", "owner"],
}); });
})
.then((row) => {
// Configure nginx // Configure nginx
return internalNginx.configure(deadHostModel, 'dead_host', row) await internalNginx.configure(deadHostModel, "dead_host", freshRow);
.then(() => { data.meta = _.assign({}, data.meta || {}, freshRow.meta);
return row;
});
})
.then((row) => {
data.meta = _.assign({}, data.meta || {}, row.meta);
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { await internalAuditLog.add(access, {
action: 'created', action: "created",
object_type: 'dead-host', object_type: "dead-host",
object_id: row.id, object_id: freshRow.id,
meta: data meta: data,
})
.then(() => {
return row;
});
}); });
return freshRow;
}, },
/** /**
@@ -112,98 +92,79 @@ const internalDeadHost = {
* @param {Number} data.id * @param {Number} data.id
* @return {Promise} * @return {Promise}
*/ */
update: (access, data) => { update: async (access, data) => {
let create_certificate = data.certificate_id === 'new'; const createCertificate = data.certificate_id === "new";
if (create_certificate) { if (createCertificate) {
delete data.certificate_id; delete data.certificate_id;
} }
return access.can('dead_hosts:update', data.id) await access.can("dead_hosts:update", data.id);
.then((/*access_data*/) => {
// Get a list of the domain names and check each of them against existing records // Get a list of the domain names and check each of them against existing records
let domain_name_check_promises = []; const domainNameCheckPromises = [];
if (typeof data.domain_names !== "undefined") {
if (typeof data.domain_names !== 'undefined') { data.domain_names.map((domainName) => {
data.domain_names.map(function (domain_name) { domainNameCheckPromises.push(internalHost.isHostnameTaken(domainName, "dead", data.id));
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'dead', data.id)); return true;
}); });
return Promise.all(domain_name_check_promises) const checkResults = await Promise.all(domainNameCheckPromises);
.then((check_results) => { checkResults.map((result) => {
check_results.map(function (result) {
if (result.is_taken) { if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use'); throw new errs.ValidationError(`${result.hostname} is already in use`);
} }
}); return true;
}); });
} }
}) const row = await internalDeadHost.get(access, { id: data.id });
.then(() => {
return internalDeadHost.get(access, {id: data.id});
})
.then((row) => {
if (row.id !== data.id) { if (row.id !== data.id) {
// Sanity check that something crazy hasn't happened // Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('404 Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id); throw new errs.InternalValidationError(
`404 Host could not be updated, IDs do not match: ${row.id} !== ${data.id}`,
);
} }
if (create_certificate) { if (createCertificate) {
return internalCertificate.createQuickCertificate(access, { const cert = await internalCertificate.createQuickCertificate(access, {
domain_names: data.domain_names || row.domain_names, domain_names: data.domain_names || row.domain_names,
meta: _.assign({}, row.meta, data.meta) meta: _.assign({}, row.meta, data.meta),
}) });
.then((cert) => {
// update host with cert id // update host with cert id
data.certificate_id = cert.id; data.certificate_id = cert.id;
})
.then(() => {
return row;
});
} else {
return row;
} }
})
.then((row) => {
// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here. // Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
data = _.assign({}, { let thisData = _.assign(
domain_names: row.domain_names {},
}, data); {
domain_names: row.domain_names,
},
data,
);
data = internalHost.cleanSslHstsData(data, row); thisData = internalHost.cleanSslHstsData(thisData, row);
return deadHostModel
.query()
.where({id: data.id})
.patch(data)
.then((saved_row) => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { await internalAuditLog.add(access, {
action: 'updated', action: "updated",
object_type: 'dead-host', object_type: "dead-host",
object_id: row.id, object_id: row.id,
meta: data meta: thisData,
})
.then(() => {
return _.omit(saved_row, omissions());
}); });
const thisRow = await internalDeadHost
.get(access, {
id: thisData.id,
expand: ["owner", "certificate"],
}); });
})
.then(() => {
return internalDeadHost.get(access, {
id: data.id,
expand: ['owner', 'certificate']
})
.then((row) => {
// Configure nginx // Configure nginx
return internalNginx.configure(deadHostModel, 'dead_host', row) const newMeta = await internalNginx.configure(deadHostModel, "dead_host", row);
.then((new_meta) => { row.meta = newMeta;
row.meta = new_meta; return _.omit(internalHost.cleanRowCertificateMeta(thisRow), omissions());
row = internalHost.cleanRowCertificateMeta(row);
return _.omit(row, omissions());
});
});
});
}, },
/** /**
@@ -214,40 +175,32 @@ const internalDeadHost = {
* @param {Array} [data.omit] * @param {Array} [data.omit]
* @return {Promise} * @return {Promise}
*/ */
get: (access, data) => { get: async (access, data) => {
if (typeof data === 'undefined') { const accessData = await access.can("dead_hosts:get", data.id);
data = {}; const query = deadHostModel
}
return access.can('dead_hosts:get', data.id)
.then((access_data) => {
let query = deadHostModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.andWhere('id', data.id) .andWhere("id", data.id)
.allowGraph('[owner,certificate]') .allowGraph("[owner,certificate]")
.first(); .first();
if (access_data.permission_visibility !== 'all') { if (accessData.permission_visibility !== "all") {
query.andWhere('owner_user_id', access.token.getUserId(1)); query.andWhere("owner_user_id", access.token.getUserId(1));
} }
if (typeof data.expand !== 'undefined' && data.expand !== null) { if (typeof data.expand !== "undefined" && data.expand !== null) {
query.withGraphFetched('[' + data.expand.join(', ') + ']'); query.withGraphFetched(`[${data.expand.join(", ")}]`);
} }
return query.then(utils.omitRow(omissions())); const row = await query.then(utils.omitRow(omissions()));
})
.then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} }
// Custom omissions // Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) { if (typeof data.omit !== "undefined" && data.omit !== null) {
row = _.omit(row, data.omit); return _.omit(row, data.omit);
} }
return row; return row;
});
}, },
/** /**
@@ -257,41 +210,29 @@ const internalDeadHost = {
* @param {String} [data.reason] * @param {String} [data.reason]
* @returns {Promise} * @returns {Promise}
*/ */
delete: (access, data) => { delete: async (access, data) => {
return access.can('dead_hosts:delete', data.id) await access.can("dead_hosts:delete", data.id)
.then(() => { const row = await internalDeadHost.get(access, { id: data.id });
return internalDeadHost.get(access, {id: data.id});
})
.then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} }
return deadHostModel await deadHostModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
is_deleted: 1 is_deleted: 1,
}) });
.then(() => {
// Delete Nginx Config // Delete Nginx Config
return internalNginx.deleteConfig('dead_host', row) await internalNginx.deleteConfig("dead_host", row);
.then(() => { await internalNginx.reload();
return internalNginx.reload();
});
})
.then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { await internalAuditLog.add(access, {
action: 'deleted', action: "deleted",
object_type: 'dead-host', object_type: "dead-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
});
});
})
.then(() => {
return true;
}); });
}, },
@@ -302,46 +243,39 @@ const internalDeadHost = {
* @param {String} [data.reason] * @param {String} [data.reason]
* @returns {Promise} * @returns {Promise}
*/ */
enable: (access, data) => { enable: async (access, data) => {
return access.can('dead_hosts:update', data.id) await access.can("dead_hosts:update", data.id)
.then(() => { const row = await internalDeadHost.get(access, {
return internalDeadHost.get(access, {
id: data.id, id: data.id,
expand: ['certificate', 'owner'] expand: ["certificate", "owner"],
}); });
})
.then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} else if (row.enabled) { }
throw new error.ValidationError('Host is already enabled'); if (row.enabled) {
throw new errs.ValidationError("Host is already enabled");
} }
row.enabled = 1; row.enabled = 1;
return deadHostModel await deadHostModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
enabled: 1 enabled: 1,
}) });
.then(() => {
// Configure nginx // Configure nginx
return internalNginx.configure(deadHostModel, 'dead_host', row); await internalNginx.configure(deadHostModel, "dead_host", row);
})
.then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { await internalAuditLog.add(access, {
action: 'enabled', action: "enabled",
object_type: 'dead-host', object_type: "dead-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
});
})
.then(() => {
return true; return true;
});
}, },
/** /**
@@ -351,46 +285,37 @@ const internalDeadHost = {
* @param {String} [data.reason] * @param {String} [data.reason]
* @returns {Promise} * @returns {Promise}
*/ */
disable: (access, data) => { disable: async (access, data) => {
return access.can('dead_hosts:update', data.id) await access.can("dead_hosts:update", data.id)
.then(() => { const row = await internalDeadHost.get(access, { id: data.id });
return internalDeadHost.get(access, {id: data.id});
})
.then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} else if (!row.enabled) { }
throw new error.ValidationError('Host is already disabled'); if (!row.enabled) {
throw new errs.ValidationError("Host is already disabled");
} }
row.enabled = 0; row.enabled = 0;
return deadHostModel await deadHostModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
enabled: 0 enabled: 0,
}) });
.then(() => {
// Delete Nginx Config // Delete Nginx Config
return internalNginx.deleteConfig('dead_host', row) await internalNginx.deleteConfig("dead_host", row);
.then(() => { await internalNginx.reload();
return internalNginx.reload();
});
})
.then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { await internalAuditLog.add(access, {
action: 'disabled', action: "disabled",
object_type: 'dead-host', object_type: "dead-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
});
})
.then(() => {
return true; return true;
});
}, },
/** /**
@@ -398,43 +323,38 @@ const internalDeadHost = {
* *
* @param {Access} access * @param {Access} access
* @param {Array} [expand] * @param {Array} [expand]
* @param {String} [search_query] * @param {String} [searchQuery]
* @returns {Promise} * @returns {Promise}
*/ */
getAll: (access, expand, search_query) => { getAll: async (access, expand, searchQuery) => {
return access.can('dead_hosts:list') const accessData = await access.can("dead_hosts:list")
.then((access_data) => { const query = deadHostModel
let query = deadHostModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.groupBy('id') .groupBy("id")
.allowGraph('[owner,certificate]') .allowGraph("[owner,certificate]")
.orderBy('domain_names', 'ASC'); .orderBy(castJsonIfNeed("domain_names"), "ASC");
if (access_data.permission_visibility !== 'all') { if (accessData.permission_visibility !== "all") {
query.andWhere('owner_user_id', access.token.getUserId(1)); query.andWhere("owner_user_id", access.token.getUserId(1));
} }
// Query is used for searching // Query is used for searching
if (typeof search_query === 'string') { if (typeof searchQuery === "string" && searchQuery.length > 0) {
query.where(function () { query.where(function () {
this.where('domain_names', 'like', '%' + search_query + '%'); this.where(castJsonIfNeed("domain_names"), "like", `%${searchQuery}%`);
}); });
} }
if (typeof expand !== 'undefined' && expand !== null) { if (typeof expand !== "undefined" && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']'); query.withGraphFetched(`[${expand.join(", ")}]`);
} }
return query.then(utils.omitRows(omissions())); const rows = await query.then(utils.omitRows(omissions()));
}) if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
.then((rows) => { internalHost.cleanAllRowsCertificateMeta(rows);
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
return internalHost.cleanAllRowsCertificateMeta(rows);
} }
return rows; return rows;
});
}, },
/** /**
@@ -444,21 +364,16 @@ const internalDeadHost = {
* @param {String} visibility * @param {String} visibility
* @returns {Promise} * @returns {Promise}
*/ */
getCount: (user_id, visibility) => { getCount: async (user_id, visibility) => {
let query = deadHostModel const query = deadHostModel.query().count("id as count").where("is_deleted", 0);
.query()
.count('id as count')
.where('is_deleted', 0);
if (visibility !== 'all') { if (visibility !== "all") {
query.andWhere('owner_user_id', user_id); query.andWhere("owner_user_id", user_id);
} }
return query.first() const row = await query.first();
.then((row) => { return Number.parseInt(row.count, 10);
return parseInt(row.count, 10); },
});
}
}; };
module.exports = internalDeadHost; export default internalDeadHost;

View File

@@ -1,10 +1,10 @@
const _ = require('lodash'); import _ from "lodash";
const proxyHostModel = require('../models/proxy_host'); import { castJsonIfNeed } from "../lib/helpers.js";
const redirectionHostModel = require('../models/redirection_host'); import deadHostModel from "../models/dead_host.js";
const deadHostModel = require('../models/dead_host'); import proxyHostModel from "../models/proxy_host.js";
import redirectionHostModel from "../models/redirection_host.js";
const internalHost = { const internalHost = {
/** /**
* Makes sure that the ssl_* and hsts_* fields play nicely together. * Makes sure that the ssl_* and hsts_* fields play nicely together.
* ie: if there is no cert, then force_ssl is off. * ie: if there is no cert, then force_ssl is off.
@@ -14,25 +14,23 @@ const internalHost = {
* @param {object} [existing_data] * @param {object} [existing_data]
* @returns {object} * @returns {object}
*/ */
cleanSslHstsData: function (data, existing_data) { cleanSslHstsData: (data, existingData) => {
existing_data = existing_data === undefined ? {} : existing_data; const combinedData = _.assign({}, existingData || {}, data);
let combined_data = _.assign({}, existing_data, data); if (!combinedData.certificate_id) {
combinedData.ssl_forced = false;
if (!combined_data.certificate_id) { combinedData.http2_support = false;
combined_data.ssl_forced = false;
combined_data.http2_support = false;
} }
if (!combined_data.ssl_forced) { if (!combinedData.ssl_forced) {
combined_data.hsts_enabled = false; combinedData.hsts_enabled = false;
} }
if (!combined_data.hsts_enabled) { if (!combinedData.hsts_enabled) {
combined_data.hsts_subdomains = false; combinedData.hsts_subdomains = false;
} }
return combined_data; return combinedData;
}, },
/** /**
@@ -41,11 +39,12 @@ const internalHost = {
* @param {Array} rows * @param {Array} rows
* @returns {Array} * @returns {Array}
*/ */
cleanAllRowsCertificateMeta: function (rows) { cleanAllRowsCertificateMeta: (rows) => {
rows.map(function (row, idx) { rows.map((_, idx) => {
if (typeof rows[idx].certificate !== 'undefined' && rows[idx].certificate) { if (typeof rows[idx].certificate !== "undefined" && rows[idx].certificate) {
rows[idx].certificate.meta = {}; rows[idx].certificate.meta = {};
} }
return true;
}); });
return rows; return rows;
@@ -57,8 +56,8 @@ const internalHost = {
* @param {Object} row * @param {Object} row
* @returns {Object} * @returns {Object}
*/ */
cleanRowCertificateMeta: function (row) { cleanRowCertificateMeta: (row) => {
if (typeof row.certificate !== 'undefined' && row.certificate) { if (typeof row.certificate !== "undefined" && row.certificate) {
row.certificate.meta = {}; row.certificate.meta = {};
} }
@@ -66,54 +65,33 @@ const internalHost = {
}, },
/** /**
* This returns all the host types with any domain listed in the provided domain_names array. * This returns all the host types with any domain listed in the provided domainNames array.
* This is used by the certificates to temporarily disable any host that is using the domain * This is used by the certificates to temporarily disable any host that is using the domain
* *
* @param {Array} domain_names * @param {Array} domainNames
* @returns {Promise} * @returns {Promise}
*/ */
getHostsWithDomains: function (domain_names) { getHostsWithDomains: async (domainNames) => {
let promises = [ const responseObject = {
proxyHostModel
.query()
.where('is_deleted', 0),
redirectionHostModel
.query()
.where('is_deleted', 0),
deadHostModel
.query()
.where('is_deleted', 0)
];
return Promise.all(promises)
.then((promises_results) => {
let response_object = {
total_count: 0, total_count: 0,
dead_hosts: [], dead_hosts: [],
proxy_hosts: [], proxy_hosts: [],
redirection_hosts: [] redirection_hosts: [],
}; };
if (promises_results[0]) { const proxyRes = await proxyHostModel.query().where("is_deleted", 0);
// Proxy Hosts responseObject.proxy_hosts = internalHost._getHostsWithDomains(proxyRes, domainNames);
response_object.proxy_hosts = internalHost._getHostsWithDomains(promises_results[0], domain_names); responseObject.total_count += responseObject.proxy_hosts.length;
response_object.total_count += response_object.proxy_hosts.length;
}
if (promises_results[1]) { const redirRes = await redirectionHostModel.query().where("is_deleted", 0);
// Redirection Hosts responseObject.redirection_hosts = internalHost._getHostsWithDomains(redirRes, domainNames);
response_object.redirection_hosts = internalHost._getHostsWithDomains(promises_results[1], domain_names); responseObject.total_count += responseObject.redirection_hosts.length;
response_object.total_count += response_object.redirection_hosts.length;
}
if (promises_results[2]) { const deadRes = await deadHostModel.query().where("is_deleted", 0);
// Dead Hosts responseObject.dead_hosts = internalHost._getHostsWithDomains(deadRes, domainNames);
response_object.dead_hosts = internalHost._getHostsWithDomains(promises_results[2], domain_names); responseObject.total_count += responseObject.dead_hosts.length;
response_object.total_count += response_object.dead_hosts.length;
}
return response_object; return responseObject;
});
}, },
/** /**
@@ -124,50 +102,67 @@ const internalHost = {
* @param {Integer} [ignore_id] Must be supplied if type was also supplied * @param {Integer} [ignore_id] Must be supplied if type was also supplied
* @returns {Promise} * @returns {Promise}
*/ */
isHostnameTaken: function (hostname, ignore_type, ignore_id) { isHostnameTaken: (hostname, ignore_type, ignore_id) => {
let promises = [ const promises = [
proxyHostModel proxyHostModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.andWhere('domain_names', 'like', '%' + hostname + '%'), .andWhere(castJsonIfNeed("domain_names"), "like", `%${hostname}%`),
redirectionHostModel redirectionHostModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.andWhere('domain_names', 'like', '%' + hostname + '%'), .andWhere(castJsonIfNeed("domain_names"), "like", `%${hostname}%`),
deadHostModel deadHostModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.andWhere('domain_names', 'like', '%' + hostname + '%') .andWhere(castJsonIfNeed("domain_names"), "like", `%${hostname}%`),
]; ];
return Promise.all(promises) return Promise.all(promises).then((promises_results) => {
.then((promises_results) => {
let is_taken = false; let is_taken = false;
if (promises_results[0]) { if (promises_results[0]) {
// Proxy Hosts // Proxy Hosts
if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[0], ignore_type === 'proxy' && ignore_id ? ignore_id : 0)) { if (
internalHost._checkHostnameRecordsTaken(
hostname,
promises_results[0],
ignore_type === "proxy" && ignore_id ? ignore_id : 0,
)
) {
is_taken = true; is_taken = true;
} }
} }
if (promises_results[1]) { if (promises_results[1]) {
// Redirection Hosts // Redirection Hosts
if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[1], ignore_type === 'redirection' && ignore_id ? ignore_id : 0)) { if (
internalHost._checkHostnameRecordsTaken(
hostname,
promises_results[1],
ignore_type === "redirection" && ignore_id ? ignore_id : 0,
)
) {
is_taken = true; is_taken = true;
} }
} }
if (promises_results[2]) { if (promises_results[2]) {
// Dead Hosts // Dead Hosts
if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[2], ignore_type === 'dead' && ignore_id ? ignore_id : 0)) { if (
internalHost._checkHostnameRecordsTaken(
hostname,
promises_results[2],
ignore_type === "dead" && ignore_id ? ignore_id : 0,
)
) {
is_taken = true; is_taken = true;
} }
} }
return { return {
hostname: hostname, hostname: hostname,
is_taken: is_taken is_taken: is_taken,
}; };
}); });
}, },
@@ -176,60 +171,64 @@ const internalHost = {
* Private call only * Private call only
* *
* @param {String} hostname * @param {String} hostname
* @param {Array} existing_rows * @param {Array} existingRows
* @param {Integer} [ignore_id] * @param {Integer} [ignoreId]
* @returns {Boolean} * @returns {Boolean}
*/ */
_checkHostnameRecordsTaken: function (hostname, existing_rows, ignore_id) { _checkHostnameRecordsTaken: (hostname, existingRows, ignoreId) => {
let is_taken = false; let isTaken = false;
if (existing_rows && existing_rows.length) { if (existingRows?.length) {
existing_rows.map(function (existing_row) { existingRows.map((existingRow) => {
existing_row.domain_names.map(function (existing_hostname) { existingRow.domain_names.map((existingHostname) => {
// Does this domain match? // Does this domain match?
if (existing_hostname.toLowerCase() === hostname.toLowerCase()) { if (existingHostname.toLowerCase() === hostname.toLowerCase()) {
if (!ignore_id || ignore_id !== existing_row.id) { if (!ignoreId || ignoreId !== existingRow.id) {
is_taken = true; isTaken = true;
} }
} }
return true;
}); });
return true;
}); });
} }
return is_taken; return isTaken;
}, },
/** /**
* Private call only * Private call only
* *
* @param {Array} hosts * @param {Array} hosts
* @param {Array} domain_names * @param {Array} domainNames
* @returns {Array} * @returns {Array}
*/ */
_getHostsWithDomains: function (hosts, domain_names) { _getHostsWithDomains: (hosts, domainNames) => {
let response = []; const response = [];
if (hosts && hosts.length) { if (hosts?.length) {
hosts.map(function (host) { hosts.map((host) => {
let host_matches = false; let hostMatches = false;
domain_names.map(function (domain_name) { domainNames.map((domainName) => {
host.domain_names.map(function (host_domain_name) { host.domain_names.map((hostDomainName) => {
if (domain_name.toLowerCase() === host_domain_name.toLowerCase()) { if (domainName.toLowerCase() === hostDomainName.toLowerCase()) {
host_matches = true; hostMatches = true;
} }
return true;
}); });
return true;
}); });
if (host_matches) { if (hostMatches) {
response.push(host); response.push(host);
} }
return true;
}); });
} }
return response; return response;
} },
}; };
module.exports = internalHost; export default internalHost;

View File

@@ -1,43 +1,49 @@
const https = require('https'); import fs from "node:fs";
const fs = require('fs'); import https from "node:https";
const logger = require('../logger').ip_ranges; import { dirname } from "node:path";
const error = require('../lib/error'); import { fileURLToPath } from "node:url";
const utils = require('../lib/utils'); import errs from "../lib/error.js";
const internalNginx = require('./nginx'); import utils from "../lib/utils.js";
import { ipRanges as logger } from "../logger.js";
import internalNginx from "./nginx.js";
const CLOUDFRONT_URL = 'https://ip-ranges.amazonaws.com/ip-ranges.json'; const __filename = fileURLToPath(import.meta.url);
const CLOUDFARE_V4_URL = 'https://www.cloudflare.com/ips-v4'; const __dirname = dirname(__filename);
const CLOUDFARE_V6_URL = 'https://www.cloudflare.com/ips-v6';
const CLOUDFRONT_URL = "https://ip-ranges.amazonaws.com/ip-ranges.json";
const CLOUDFARE_V4_URL = "https://www.cloudflare.com/ips-v4";
const CLOUDFARE_V6_URL = "https://www.cloudflare.com/ips-v6";
const regIpV4 = /^(\d+\.?){4}\/\d+/; const regIpV4 = /^(\d+\.?){4}\/\d+/;
const regIpV6 = /^(([\da-fA-F]+)?:)+\/\d+/; const regIpV6 = /^(([\da-fA-F]+)?:)+\/\d+/;
const internalIpRanges = { const internalIpRanges = {
interval_timeout: 1000 * 60 * 60 * 6, // 6 hours interval_timeout: 1000 * 60 * 60 * 6, // 6 hours
interval: null, interval: null,
interval_processing: false, interval_processing: false,
iteration_count: 0, iteration_count: 0,
initTimer: () => { initTimer: () => {
logger.info('IP Ranges Renewal Timer initialized'); logger.info("IP Ranges Renewal Timer initialized");
internalIpRanges.interval = setInterval(internalIpRanges.fetch, internalIpRanges.interval_timeout); internalIpRanges.interval = setInterval(internalIpRanges.fetch, internalIpRanges.interval_timeout);
}, },
fetchUrl: (url) => { fetchUrl: (url) => {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
logger.info('Fetching ' + url); logger.info(`Fetching ${url}`);
return https.get(url, (res) => { return https
res.setEncoding('utf8'); .get(url, (res) => {
let raw_data = ''; res.setEncoding("utf8");
res.on('data', (chunk) => { let raw_data = "";
res.on("data", (chunk) => {
raw_data += chunk; raw_data += chunk;
}); });
res.on('end', () => { res.on("end", () => {
resolve(raw_data); resolve(raw_data);
}); });
}).on('error', (err) => { })
.on("error", (err) => {
reject(err); reject(err);
}); });
}); });
@@ -49,27 +55,30 @@ const internalIpRanges = {
fetch: () => { fetch: () => {
if (!internalIpRanges.interval_processing) { if (!internalIpRanges.interval_processing) {
internalIpRanges.interval_processing = true; internalIpRanges.interval_processing = true;
logger.info('Fetching IP Ranges from online services...'); logger.info("Fetching IP Ranges from online services...");
let ip_ranges = []; let ip_ranges = [];
return internalIpRanges.fetchUrl(CLOUDFRONT_URL) return internalIpRanges
.fetchUrl(CLOUDFRONT_URL)
.then((cloudfront_data) => { .then((cloudfront_data) => {
let data = JSON.parse(cloudfront_data); const data = JSON.parse(cloudfront_data);
if (data && typeof data.prefixes !== 'undefined') { if (data && typeof data.prefixes !== "undefined") {
data.prefixes.map((item) => { data.prefixes.map((item) => {
if (item.service === 'CLOUDFRONT') { if (item.service === "CLOUDFRONT") {
ip_ranges.push(item.ip_prefix); ip_ranges.push(item.ip_prefix);
} }
return true;
}); });
} }
if (data && typeof data.ipv6_prefixes !== 'undefined') { if (data && typeof data.ipv6_prefixes !== "undefined") {
data.ipv6_prefixes.map((item) => { data.ipv6_prefixes.map((item) => {
if (item.service === 'CLOUDFRONT') { if (item.service === "CLOUDFRONT") {
ip_ranges.push(item.ipv6_prefix); ip_ranges.push(item.ipv6_prefix);
} }
return true;
}); });
} }
}) })
@@ -77,26 +86,26 @@ const internalIpRanges = {
return internalIpRanges.fetchUrl(CLOUDFARE_V4_URL); return internalIpRanges.fetchUrl(CLOUDFARE_V4_URL);
}) })
.then((cloudfare_data) => { .then((cloudfare_data) => {
let items = cloudfare_data.split('\n').filter((line) => regIpV4.test(line)); const items = cloudfare_data.split("\n").filter((line) => regIpV4.test(line));
ip_ranges = [...ip_ranges, ...items]; ip_ranges = [...ip_ranges, ...items];
}) })
.then(() => { .then(() => {
return internalIpRanges.fetchUrl(CLOUDFARE_V6_URL); return internalIpRanges.fetchUrl(CLOUDFARE_V6_URL);
}) })
.then((cloudfare_data) => { .then((cloudfare_data) => {
let items = cloudfare_data.split('\n').filter((line) => regIpV6.test(line)); const items = cloudfare_data.split("\n").filter((line) => regIpV6.test(line));
ip_ranges = [...ip_ranges, ...items]; ip_ranges = [...ip_ranges, ...items];
}) })
.then(() => { .then(() => {
let clean_ip_ranges = []; const clean_ip_ranges = [];
ip_ranges.map((range) => { ip_ranges.map((range) => {
if (range) { if (range) {
clean_ip_ranges.push(range); clean_ip_ranges.push(range);
} }
return true;
}); });
return internalIpRanges.generateConfig(clean_ip_ranges) return internalIpRanges.generateConfig(clean_ip_ranges).then(() => {
.then(() => {
if (internalIpRanges.iteration_count) { if (internalIpRanges.iteration_count) {
// Reload nginx // Reload nginx
return internalNginx.reload(); return internalNginx.reload();
@@ -108,7 +117,7 @@ const internalIpRanges = {
internalIpRanges.iteration_count++; internalIpRanges.iteration_count++;
}) })
.catch((err) => { .catch((err) => {
logger.error(err.message); logger.fatal(err.message);
internalIpRanges.interval_processing = false; internalIpRanges.interval_processing = false;
}); });
} }
@@ -122,26 +131,26 @@ const internalIpRanges = {
const renderEngine = utils.getRenderEngine(); const renderEngine = utils.getRenderEngine();
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
let template = null; let template = null;
let filename = '/etc/nginx/conf.d/include/ip_ranges.conf'; const filename = "/etc/nginx/conf.d/include/ip_ranges.conf";
try { try {
template = fs.readFileSync(__dirname + '/../templates/ip_ranges.conf', {encoding: 'utf8'}); template = fs.readFileSync(`${__dirname}/../templates/ip_ranges.conf`, { encoding: "utf8" });
} catch (err) { } catch (err) {
reject(new error.ConfigurationError(err.message)); reject(new errs.ConfigurationError(err.message));
return; return;
} }
renderEngine renderEngine
.parseAndRender(template, { ip_ranges: ip_ranges }) .parseAndRender(template, { ip_ranges: ip_ranges })
.then((config_text) => { .then((config_text) => {
fs.writeFileSync(filename, config_text, {encoding: 'utf8'}); fs.writeFileSync(filename, config_text, { encoding: "utf8" });
resolve(true); resolve(true);
}) })
.catch((err) => { .catch((err) => {
logger.warn('Could not write ' + filename + ':', err.message); logger.warn(`Could not write ${filename}: ${err.message}`);
reject(new error.ConfigurationError(err.message)); reject(new errs.ConfigurationError(err.message));
}); });
}); });
} },
}; };
module.exports = internalIpRanges; export default internalIpRanges;

View File

@@ -1,12 +1,15 @@
const _ = require('lodash'); import fs from "node:fs";
const fs = require('fs'); import { dirname } from "node:path";
const logger = require('../logger').nginx; import { fileURLToPath } from "node:url";
const config = require('../lib/config'); import _ from "lodash";
const utils = require('../lib/utils'); import errs from "../lib/error.js";
const error = require('../lib/error'); import utils from "../lib/utils.js";
import { nginx as logger } from "../logger.js";
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const internalNginx = { const internalNginx = {
/** /**
* This will: * This will:
* - test the nginx config first to make sure it's OK * - test the nginx config first to make sure it's OK
@@ -24,7 +27,8 @@ const internalNginx = {
configure: (model, host_type, host) => { configure: (model, host_type, host) => {
let combined_meta = {}; let combined_meta = {};
return internalNginx.test() return internalNginx
.test()
.then(() => { .then(() => {
// Nginx is OK // Nginx is OK
// We're deleting this config regardless. // We're deleting this config regardless.
@@ -37,19 +41,17 @@ const internalNginx = {
}) })
.then(() => { .then(() => {
// Test nginx again and update meta with result // Test nginx again and update meta with result
return internalNginx.test() return internalNginx
.test()
.then(() => { .then(() => {
// nginx is ok // nginx is ok
combined_meta = _.assign({}, host.meta, { combined_meta = _.assign({}, host.meta, {
nginx_online: true, nginx_online: true,
nginx_err: null nginx_err: null,
}); });
return model return model.query().where("id", host.id).patch({
.query() meta: combined_meta,
.where('id', host.id)
.patch({
meta: combined_meta
}); });
}) })
.catch((err) => { .catch((err) => {
@@ -57,29 +59,28 @@ const internalNginx = {
// It will always look like this: // It will always look like this:
// nginx: [alert] could not open error log file: open() "/var/log/nginx/error.log" failed (6: No such device or address) // nginx: [alert] could not open error log file: open() "/var/log/nginx/error.log" failed (6: No such device or address)
let valid_lines = []; const valid_lines = [];
let err_lines = err.message.split('\n'); const err_lines = err.message.split("\n");
err_lines.map(function (line) { err_lines.map((line) => {
if (line.indexOf('/var/log/nginx/error.log') === -1) { if (line.indexOf("/var/log/nginx/error.log") === -1) {
valid_lines.push(line); valid_lines.push(line);
} }
return true;
}); });
if (config.debug()) { logger.debug("Nginx test failed:", valid_lines.join("\n"));
logger.error('Nginx test failed:', valid_lines.join('\n'));
}
// config is bad, update meta and delete config // config is bad, update meta and delete config
combined_meta = _.assign({}, host.meta, { combined_meta = _.assign({}, host.meta, {
nginx_online: false, nginx_online: false,
nginx_err: valid_lines.join('\n') nginx_err: valid_lines.join("\n"),
}); });
return model return model
.query() .query()
.where('id', host.id) .where("id", host.id)
.patch({ .patch({
meta: combined_meta meta: combined_meta,
}) })
.then(() => { .then(() => {
internalNginx.renameConfigAsError(host_type, host); internalNginx.renameConfigAsError(host_type, host);
@@ -101,21 +102,17 @@ const internalNginx = {
* @returns {Promise} * @returns {Promise}
*/ */
test: () => { test: () => {
if (config.debug()) { logger.debug("Testing Nginx configuration");
logger.info('Testing Nginx configuration'); return utils.execFile("/usr/sbin/nginx", ["-t", "-g", "error_log off;"]);
}
return utils.exec('/usr/sbin/nginx -t -g "error_log off;"');
}, },
/** /**
* @returns {Promise} * @returns {Promise}
*/ */
reload: () => { reload: () => {
return internalNginx.test() return internalNginx.test().then(() => {
.then(() => { logger.info("Reloading Nginx");
logger.info('Reloading Nginx'); return utils.execFile("/usr/sbin/nginx", ["-s", "reload"]);
return utils.exec('/usr/sbin/nginx -s reload');
}); });
}, },
@@ -125,10 +122,10 @@ const internalNginx = {
* @returns {String} * @returns {String}
*/ */
getConfigName: (host_type, host_id) => { getConfigName: (host_type, host_id) => {
if (host_type === 'default') { if (host_type === "default") {
return '/data/nginx/default_host/site.conf'; return "/data/nginx/default_host/site.conf";
} }
return '/data/nginx/' + internalNginx.getFileFriendlyHostType(host_type) + '/' + host_id + '.conf'; return `/data/nginx/${internalNginx.getFileFriendlyHostType(host_type)}/${host_id}.conf`;
}, },
/** /**
@@ -141,38 +138,45 @@ const internalNginx = {
let template; let template;
try { try {
template = fs.readFileSync(__dirname + '/../templates/_location.conf', {encoding: 'utf8'}); template = fs.readFileSync(`${__dirname}/../templates/_location.conf`, { encoding: "utf8" });
} catch (err) { } catch (err) {
reject(new error.ConfigurationError(err.message)); reject(new errs.ConfigurationError(err.message));
return; return;
} }
const renderEngine = utils.getRenderEngine(); const renderEngine = utils.getRenderEngine();
let renderedLocations = ''; let renderedLocations = "";
const locationRendering = async () => { const locationRendering = async () => {
for (let i = 0; i < host.locations.length; i++) { for (let i = 0; i < host.locations.length; i++) {
let locationCopy = Object.assign({}, {access_list_id: host.access_list_id}, {certificate_id: host.certificate_id}, const locationCopy = Object.assign(
{ssl_forced: host.ssl_forced}, {caching_enabled: host.caching_enabled}, {block_exploits: host.block_exploits}, {},
{allow_websocket_upgrade: host.allow_websocket_upgrade}, {http2_support: host.http2_support}, { access_list_id: host.access_list_id },
{hsts_enabled: host.hsts_enabled}, {hsts_subdomains: host.hsts_subdomains}, {access_list: host.access_list}, { certificate_id: host.certificate_id },
{certificate: host.certificate}, host.locations[i]); { ssl_forced: host.ssl_forced },
{ caching_enabled: host.caching_enabled },
{ block_exploits: host.block_exploits },
{ allow_websocket_upgrade: host.allow_websocket_upgrade },
{ http2_support: host.http2_support },
{ hsts_enabled: host.hsts_enabled },
{ hsts_subdomains: host.hsts_subdomains },
{ access_list: host.access_list },
{ certificate: host.certificate },
host.locations[i],
);
if (locationCopy.forward_host.indexOf('/') > -1) { if (locationCopy.forward_host.indexOf("/") > -1) {
const splitted = locationCopy.forward_host.split('/'); const splitted = locationCopy.forward_host.split("/");
locationCopy.forward_host = splitted.shift(); locationCopy.forward_host = splitted.shift();
locationCopy.forward_path = `/${splitted.join('/')}`; locationCopy.forward_path = `/${splitted.join("/")}`;
} }
// eslint-disable-next-line
renderedLocations += await renderEngine.parseAndRender(template, locationCopy); renderedLocations += await renderEngine.parseAndRender(template, locationCopy);
} }
}; };
locationRendering().then(() => resolve(renderedLocations)); locationRendering().then(() => resolve(renderedLocations));
}); });
}, },
@@ -183,23 +187,21 @@ const internalNginx = {
*/ */
generateConfig: (host_type, host_row) => { generateConfig: (host_type, host_row) => {
// Prevent modifying the original object: // Prevent modifying the original object:
let host = JSON.parse(JSON.stringify(host_row)); const host = JSON.parse(JSON.stringify(host_row));
const nice_host_type = internalNginx.getFileFriendlyHostType(host_type); const nice_host_type = internalNginx.getFileFriendlyHostType(host_type);
if (config.debug()) { logger.debug(`Generating ${nice_host_type} Config:`, JSON.stringify(host, null, 2));
logger.info('Generating ' + nice_host_type + ' Config:', JSON.stringify(host, null, 2));
}
const renderEngine = utils.getRenderEngine(); const renderEngine = utils.getRenderEngine();
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
let template = null; let template = null;
let filename = internalNginx.getConfigName(nice_host_type, host.id); const filename = internalNginx.getConfigName(nice_host_type, host.id);
try { try {
template = fs.readFileSync(__dirname + '/../templates/' + nice_host_type + '.conf', {encoding: 'utf8'}); template = fs.readFileSync(`${__dirname}/../templates/${nice_host_type}.conf`, { encoding: "utf8" });
} catch (err) { } catch (err) {
reject(new error.ConfigurationError(err.message)); reject(new errs.ConfigurationError(err.message));
return; return;
} }
@@ -207,9 +209,9 @@ const internalNginx = {
let origLocations; let origLocations;
// Manipulate the data a bit before sending it to the template // Manipulate the data a bit before sending it to the template
if (nice_host_type !== 'default') { if (nice_host_type !== "default") {
host.use_default_location = true; host.use_default_location = true;
if (typeof host.advanced_config !== 'undefined' && host.advanced_config) { if (typeof host.advanced_config !== "undefined" && host.advanced_config) {
host.use_default_location = !internalNginx.advancedConfigHasDefaultLocation(host.advanced_config); host.use_default_location = !internalNginx.advancedConfigHasDefaultLocation(host.advanced_config);
} }
} }
@@ -223,11 +225,10 @@ const internalNginx = {
// Allow someone who is using / custom location path to use it, and skip the default / location // Allow someone who is using / custom location path to use it, and skip the default / location
_.map(host.locations, (location) => { _.map(host.locations, (location) => {
if (location.path === '/') { if (location.path === "/") {
host.use_default_location = false; host.use_default_location = false;
} }
}); });
} else { } else {
locationsPromise = Promise.resolve(); locationsPromise = Promise.resolve();
} }
@@ -239,11 +240,8 @@ const internalNginx = {
renderEngine renderEngine
.parseAndRender(template, host) .parseAndRender(template, host)
.then((config_text) => { .then((config_text) => {
fs.writeFileSync(filename, config_text, {encoding: 'utf8'}); fs.writeFileSync(filename, config_text, { encoding: "utf8" });
logger.debug("Wrote config:", filename, config_text);
if (config.debug()) {
logger.success('Wrote config:', filename, config_text);
}
// Restore locations array // Restore locations array
host.locations = origLocations; host.locations = origLocations;
@@ -251,11 +249,8 @@ const internalNginx = {
resolve(true); resolve(true);
}) })
.catch((err) => { .catch((err) => {
if (config.debug()) { logger.debug(`Could not write ${filename}:`, err.message);
logger.warn('Could not write ' + filename + ':', err.message); reject(new errs.ConfigurationError(err.message));
}
reject(new error.ConfigurationError(err.message));
}); });
}); });
}); });
@@ -270,20 +265,17 @@ const internalNginx = {
* @returns {Promise} * @returns {Promise}
*/ */
generateLetsEncryptRequestConfig: (certificate) => { generateLetsEncryptRequestConfig: (certificate) => {
if (config.debug()) { logger.debug("Generating LetsEncrypt Request Config:", certificate);
logger.info('Generating LetsEncrypt Request Config:', certificate);
}
const renderEngine = utils.getRenderEngine(); const renderEngine = utils.getRenderEngine();
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
let template = null; let template = null;
let filename = '/data/nginx/temp/letsencrypt_' + certificate.id + '.conf'; const filename = `/data/nginx/temp/letsencrypt_${certificate.id}.conf`;
try { try {
template = fs.readFileSync(__dirname + '/../templates/letsencrypt-request.conf', {encoding: 'utf8'}); template = fs.readFileSync(`${__dirname}/../templates/letsencrypt-request.conf`, { encoding: "utf8" });
} catch (err) { } catch (err) {
reject(new error.ConfigurationError(err.message)); reject(new errs.ConfigurationError(err.message));
return; return;
} }
@@ -292,20 +284,13 @@ const internalNginx = {
renderEngine renderEngine
.parseAndRender(template, certificate) .parseAndRender(template, certificate)
.then((config_text) => { .then((config_text) => {
fs.writeFileSync(filename, config_text, {encoding: 'utf8'}); fs.writeFileSync(filename, config_text, { encoding: "utf8" });
logger.debug("Wrote config:", filename, config_text);
if (config.debug()) {
logger.success('Wrote config:', filename, config_text);
}
resolve(true); resolve(true);
}) })
.catch((err) => { .catch((err) => {
if (config.debug()) { logger.debug(`Could not write ${filename}:`, err.message);
logger.warn('Could not write ' + filename + ':', err.message); reject(new errs.ConfigurationError(err.message));
}
reject(new error.ConfigurationError(err.message));
}); });
}); });
}, },
@@ -316,11 +301,11 @@ const internalNginx = {
* @param {String} filename * @param {String} filename
*/ */
deleteFile: (filename) => { deleteFile: (filename) => {
logger.debug('Deleting file: ' + filename); logger.debug(`Deleting file: ${filename}`);
try { try {
fs.unlinkSync(filename); fs.unlinkSync(filename);
} catch (err) { } catch (err) {
logger.debug('Could not delete file:', JSON.stringify(err, null, 2)); logger.debug("Could not delete file:", JSON.stringify(err, null, 2));
} }
}, },
@@ -330,7 +315,7 @@ const internalNginx = {
* @returns String * @returns String
*/ */
getFileFriendlyHostType: (host_type) => { getFileFriendlyHostType: (host_type) => {
return host_type.replace(new RegExp('-', 'g'), '_'); return host_type.replace(/-/g, "_");
}, },
/** /**
@@ -340,7 +325,7 @@ const internalNginx = {
* @returns {Promise} * @returns {Promise}
*/ */
deleteLetsEncryptRequestConfig: (certificate) => { deleteLetsEncryptRequestConfig: (certificate) => {
const config_file = '/data/nginx/temp/letsencrypt_' + certificate.id + '.conf'; const config_file = `/data/nginx/temp/letsencrypt_${certificate.id}.conf`;
return new Promise((resolve /*, reject*/) => { return new Promise((resolve /*, reject*/) => {
internalNginx.deleteFile(config_file); internalNginx.deleteFile(config_file);
resolve(); resolve();
@@ -354,8 +339,11 @@ const internalNginx = {
* @returns {Promise} * @returns {Promise}
*/ */
deleteConfig: (host_type, host, delete_err_file) => { deleteConfig: (host_type, host, delete_err_file) => {
const config_file = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id); const config_file = internalNginx.getConfigName(
const config_file_err = config_file + '.err'; internalNginx.getFileFriendlyHostType(host_type),
typeof host === "undefined" ? 0 : host.id,
);
const config_file_err = `${config_file}.err`;
return new Promise((resolve /*, reject*/) => { return new Promise((resolve /*, reject*/) => {
internalNginx.deleteFile(config_file); internalNginx.deleteFile(config_file);
@@ -372,8 +360,11 @@ const internalNginx = {
* @returns {Promise} * @returns {Promise}
*/ */
renameConfigAsError: (host_type, host) => { renameConfigAsError: (host_type, host) => {
const config_file = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id); const config_file = internalNginx.getConfigName(
const config_file_err = config_file + '.err'; internalNginx.getFileFriendlyHostType(host_type),
typeof host === "undefined" ? 0 : host.id,
);
const config_file_err = `${config_file}.err`;
return new Promise((resolve /*, reject*/) => { return new Promise((resolve /*, reject*/) => {
fs.unlink(config_file, () => { fs.unlink(config_file, () => {
@@ -392,9 +383,10 @@ const internalNginx = {
* @returns {Promise} * @returns {Promise}
*/ */
bulkGenerateConfigs: (host_type, hosts) => { bulkGenerateConfigs: (host_type, hosts) => {
let promises = []; const promises = [];
hosts.map(function (host) { hosts.map((host) => {
promises.push(internalNginx.generateConfig(host_type, host)); promises.push(internalNginx.generateConfig(host_type, host));
return true;
}); });
return Promise.all(promises); return Promise.all(promises);
@@ -406,9 +398,10 @@ const internalNginx = {
* @returns {Promise} * @returns {Promise}
*/ */
bulkDeleteConfigs: (host_type, hosts) => { bulkDeleteConfigs: (host_type, hosts) => {
let promises = []; const promises = [];
hosts.map(function (host) { hosts.map((host) => {
promises.push(internalNginx.deleteConfig(host_type, host, true)); promises.push(internalNginx.deleteConfig(host_type, host, true));
return true;
}); });
return Promise.all(promises); return Promise.all(promises);
@@ -418,21 +411,19 @@ const internalNginx = {
* @param {string} config * @param {string} config
* @returns {boolean} * @returns {boolean}
*/ */
advancedConfigHasDefaultLocation: function (cfg) { advancedConfigHasDefaultLocation: (cfg) => !!cfg.match(/^(?:.*;)?\s*?location\s*?\/\s*?{/im),
return !!cfg.match(/^(?:.*;)?\s*?location\s*?\/\s*?{/im);
},
/** /**
* @returns {boolean} * @returns {boolean}
*/ */
ipv6Enabled: function () { ipv6Enabled: () => {
if (typeof process.env.DISABLE_IPV6 !== 'undefined') { if (typeof process.env.DISABLE_IPV6 !== "undefined") {
const disabled = process.env.DISABLE_IPV6.toLowerCase(); const disabled = process.env.DISABLE_IPV6.toLowerCase();
return !(disabled === 'on' || disabled === 'true' || disabled === '1' || disabled === 'yes'); return !(disabled === "on" || disabled === "true" || disabled === "1" || disabled === "yes");
} }
return true; return true;
} },
}; };
module.exports = internalNginx; export default internalNginx;

View File

@@ -1,105 +1,105 @@
const _ = require('lodash'); import _ from "lodash";
const error = require('../lib/error'); import errs from "../lib/error.js";
const utils = require('../lib/utils'); import { castJsonIfNeed } from "../lib/helpers.js";
const proxyHostModel = require('../models/proxy_host'); import utils from "../lib/utils.js";
const internalHost = require('./host'); import proxyHostModel from "../models/proxy_host.js";
const internalNginx = require('./nginx'); import internalAuditLog from "./audit-log.js";
const internalAuditLog = require('./audit-log'); import internalCertificate from "./certificate.js";
const internalCertificate = require('./certificate'); import internalHost from "./host.js";
import internalNginx from "./nginx.js";
function omissions () { const omissions = () => {
return ['is_deleted', 'owner.is_deleted']; return ["is_deleted", "owner.is_deleted"];
} };
const internalProxyHost = { const internalProxyHost = {
/** /**
* @param {Access} access * @param {Access} access
* @param {Object} data * @param {Object} data
* @returns {Promise} * @returns {Promise}
*/ */
create: (access, data) => { create: (access, data) => {
let create_certificate = data.certificate_id === 'new'; let thisData = data;
const createCertificate = thisData.certificate_id === "new";
if (create_certificate) { if (createCertificate) {
delete data.certificate_id; delete thisData.certificate_id;
} }
return access.can('proxy_hosts:create', data) return access
.can("proxy_hosts:create", thisData)
.then(() => { .then(() => {
// Get a list of the domain names and check each of them against existing records // Get a list of the domain names and check each of them against existing records
let domain_name_check_promises = []; const domain_name_check_promises = [];
data.domain_names.map(function (domain_name) { thisData.domain_names.map((domain_name) => {
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name)); domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
return true;
}); });
return Promise.all(domain_name_check_promises) return Promise.all(domain_name_check_promises).then((check_results) => {
.then((check_results) => { check_results.map((result) => {
check_results.map(function (result) {
if (result.is_taken) { if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use'); throw new errs.ValidationError(`${result.hostname} is already in use`);
} }
return true;
}); });
}); });
}) })
.then(() => { .then(() => {
// At this point the domains should have been checked // At this point the domains should have been checked
data.owner_user_id = access.token.getUserId(1); thisData.owner_user_id = access.token.getUserId(1);
data = internalHost.cleanSslHstsData(data); thisData = internalHost.cleanSslHstsData(thisData);
// Fix for db field not having a default value // Fix for db field not having a default value
// for this optional field. // for this optional field.
if (typeof data.advanced_config === 'undefined') { if (typeof thisData.advanced_config === "undefined") {
data.advanced_config = ''; thisData.advanced_config = "";
} }
return proxyHostModel return proxyHostModel.query().insertAndFetch(thisData).then(utils.omitRow(omissions()));
.query()
.insertAndFetch(data)
.then(utils.omitRow(omissions()));
}) })
.then((row) => { .then((row) => {
if (create_certificate) { if (createCertificate) {
return internalCertificate.createQuickCertificate(access, data) return internalCertificate
.createQuickCertificate(access, thisData)
.then((cert) => { .then((cert) => {
// update host with cert id // update host with cert id
return internalProxyHost.update(access, { return internalProxyHost.update(access, {
id: row.id, id: row.id,
certificate_id: cert.id certificate_id: cert.id,
}); });
}) })
.then(() => { .then(() => {
return row; return row;
}); });
} else {
return row;
} }
return row;
}) })
.then((row) => { .then((row) => {
// re-fetch with cert // re-fetch with cert
return internalProxyHost.get(access, { return internalProxyHost.get(access, {
id: row.id, id: row.id,
expand: ['certificate', 'owner', 'access_list.[clients,items]'] expand: ["certificate", "owner", "access_list.[clients,items]"],
}); });
}) })
.then((row) => { .then((row) => {
// Configure nginx // Configure nginx
return internalNginx.configure(proxyHostModel, 'proxy_host', row) return internalNginx.configure(proxyHostModel, "proxy_host", row).then(() => {
.then(() => {
return row; return row;
}); });
}) })
.then((row) => { .then((row) => {
// Audit log // Audit log
data.meta = _.assign({}, data.meta || {}, row.meta); thisData.meta = _.assign({}, thisData.meta || {}, row.meta);
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog
action: 'created', .add(access, {
object_type: 'proxy-host', action: "created",
object_type: "proxy-host",
object_id: row.id, object_id: row.id,
meta: data meta: thisData,
}) })
.then(() => { .then(() => {
return row; return row;
@@ -114,77 +114,88 @@ const internalProxyHost = {
* @return {Promise} * @return {Promise}
*/ */
update: (access, data) => { update: (access, data) => {
let create_certificate = data.certificate_id === 'new'; let thisData = data;
const create_certificate = thisData.certificate_id === "new";
if (create_certificate) { if (create_certificate) {
delete data.certificate_id; delete thisData.certificate_id;
} }
return access.can('proxy_hosts:update', data.id) return access
.can("proxy_hosts:update", thisData.id)
.then((/*access_data*/) => { .then((/*access_data*/) => {
// Get a list of the domain names and check each of them against existing records // Get a list of the domain names and check each of them against existing records
let domain_name_check_promises = []; const domain_name_check_promises = [];
if (typeof data.domain_names !== 'undefined') { if (typeof thisData.domain_names !== "undefined") {
data.domain_names.map(function (domain_name) { thisData.domain_names.map((domain_name) => {
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'proxy', data.id)); return domain_name_check_promises.push(
internalHost.isHostnameTaken(domain_name, "proxy", thisData.id),
);
}); });
return Promise.all(domain_name_check_promises) return Promise.all(domain_name_check_promises).then((check_results) => {
.then((check_results) => { check_results.map((result) => {
check_results.map(function (result) {
if (result.is_taken) { if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use'); throw new errs.ValidationError(`${result.hostname} is already in use`);
} }
return true;
}); });
}); });
} }
}) })
.then(() => { .then(() => {
return internalProxyHost.get(access, {id: data.id}); return internalProxyHost.get(access, { id: thisData.id });
}) })
.then((row) => { .then((row) => {
if (row.id !== data.id) { if (row.id !== thisData.id) {
// Sanity check that something crazy hasn't happened // Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('Proxy Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id); throw new errs.InternalValidationError(
`Proxy Host could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
);
} }
if (create_certificate) { if (create_certificate) {
return internalCertificate.createQuickCertificate(access, { return internalCertificate
domain_names: data.domain_names || row.domain_names, .createQuickCertificate(access, {
meta: _.assign({}, row.meta, data.meta) domain_names: thisData.domain_names || row.domain_names,
meta: _.assign({}, row.meta, thisData.meta),
}) })
.then((cert) => { .then((cert) => {
// update host with cert id // update host with cert id
data.certificate_id = cert.id; thisData.certificate_id = cert.id;
}) })
.then(() => { .then(() => {
return row; return row;
}); });
} else {
return row;
} }
return row;
}) })
.then((row) => { .then((row) => {
// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here. // Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
data = _.assign({}, { thisData = _.assign(
domain_names: row.domain_names {},
}, data); {
domain_names: row.domain_names,
},
data,
);
data = internalHost.cleanSslHstsData(data, row); thisData = internalHost.cleanSslHstsData(thisData, row);
return proxyHostModel return proxyHostModel
.query() .query()
.where({id: data.id}) .where({ id: thisData.id })
.patch(data) .patch(thisData)
.then(utils.omitRow(omissions())) .then(utils.omitRow(omissions()))
.then((saved_row) => { .then((saved_row) => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog
action: 'updated', .add(access, {
object_type: 'proxy-host', action: "updated",
object_type: "proxy-host",
object_id: row.id, object_id: row.id,
meta: data meta: thisData,
}) })
.then(() => { .then(() => {
return saved_row; return saved_row;
@@ -192,9 +203,10 @@ const internalProxyHost = {
}); });
}) })
.then(() => { .then(() => {
return internalProxyHost.get(access, { return internalProxyHost
id: data.id, .get(access, {
expand: ['owner', 'certificate', 'access_list.[clients,items]'] id: thisData.id,
expand: ["owner", "certificate", "access_list.[clients,items]"],
}) })
.then((row) => { .then((row) => {
if (!row.enabled) { if (!row.enabled) {
@@ -202,11 +214,9 @@ const internalProxyHost = {
return row; return row;
} }
// Configure nginx // Configure nginx
return internalNginx.configure(proxyHostModel, 'proxy_host', row) return internalNginx.configure(proxyHostModel, "proxy_host", row).then((new_meta) => {
.then((new_meta) => {
row.meta = new_meta; row.meta = new_meta;
row = internalHost.cleanRowCertificateMeta(row); return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
return _.omit(row, omissions());
}); });
}); });
}); });
@@ -221,39 +231,38 @@ const internalProxyHost = {
* @return {Promise} * @return {Promise}
*/ */
get: (access, data) => { get: (access, data) => {
if (typeof data === 'undefined') { const thisData = data || {};
data = {};
}
return access.can('proxy_hosts:get', data.id) return access
.can("proxy_hosts:get", thisData.id)
.then((access_data) => { .then((access_data) => {
let query = proxyHostModel const query = proxyHostModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.andWhere('id', data.id) .andWhere("id", thisData.id)
.allowGraph('[owner,access_list.[clients,items],certificate]') .allowGraph("[owner,access_list.[clients,items],certificate]")
.first(); .first();
if (access_data.permission_visibility !== 'all') { if (access_data.permission_visibility !== "all") {
query.andWhere('owner_user_id', access.token.getUserId(1)); query.andWhere("owner_user_id", access.token.getUserId(1));
} }
if (typeof data.expand !== 'undefined' && data.expand !== null) { if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
query.withGraphFetched('[' + data.expand.join(', ') + ']'); query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
} }
return query.then(utils.omitRow(omissions())); return query.then(utils.omitRow(omissions()));
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(thisData.id);
} }
row = internalHost.cleanRowCertificateMeta(row); const thisRow = internalHost.cleanRowCertificateMeta(row);
// Custom omissions // Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) { if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
row = _.omit(row, data.omit); return _.omit(row, thisData.omit);
} }
return row; return thisRow;
}); });
}, },
@@ -265,35 +274,35 @@ const internalProxyHost = {
* @returns {Promise} * @returns {Promise}
*/ */
delete: (access, data) => { delete: (access, data) => {
return access.can('proxy_hosts:delete', data.id) return access
.can("proxy_hosts:delete", data.id)
.then(() => { .then(() => {
return internalProxyHost.get(access, { id: data.id }); return internalProxyHost.get(access, { id: data.id });
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} }
return proxyHostModel return proxyHostModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
is_deleted: 1 is_deleted: 1,
}) })
.then(() => { .then(() => {
// Delete Nginx Config // Delete Nginx Config
return internalNginx.deleteConfig('proxy_host', row) return internalNginx.deleteConfig("proxy_host", row).then(() => {
.then(() => {
return internalNginx.reload(); return internalNginx.reload();
}); });
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'deleted', action: "deleted",
object_type: 'proxy-host', object_type: "proxy-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
}); });
}) })
@@ -310,39 +319,41 @@ const internalProxyHost = {
* @returns {Promise} * @returns {Promise}
*/ */
enable: (access, data) => { enable: (access, data) => {
return access.can('proxy_hosts:update', data.id) return access
.can("proxy_hosts:update", data.id)
.then(() => { .then(() => {
return internalProxyHost.get(access, { return internalProxyHost.get(access, {
id: data.id, id: data.id,
expand: ['certificate', 'owner', 'access_list'] expand: ["certificate", "owner", "access_list"],
}); });
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} else if (row.enabled) { }
throw new error.ValidationError('Host is already enabled'); if (row.enabled) {
throw new errs.ValidationError("Host is already enabled");
} }
row.enabled = 1; row.enabled = 1;
return proxyHostModel return proxyHostModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
enabled: 1 enabled: 1,
}) })
.then(() => { .then(() => {
// Configure nginx // Configure nginx
return internalNginx.configure(proxyHostModel, 'proxy_host', row); return internalNginx.configure(proxyHostModel, "proxy_host", row);
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'enabled', action: "enabled",
object_type: 'proxy-host', object_type: "proxy-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
}); });
}) })
@@ -359,39 +370,40 @@ const internalProxyHost = {
* @returns {Promise} * @returns {Promise}
*/ */
disable: (access, data) => { disable: (access, data) => {
return access.can('proxy_hosts:update', data.id) return access
.can("proxy_hosts:update", data.id)
.then(() => { .then(() => {
return internalProxyHost.get(access, { id: data.id }); return internalProxyHost.get(access, { id: data.id });
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} else if (!row.enabled) { }
throw new error.ValidationError('Host is already disabled'); if (!row.enabled) {
throw new errs.ValidationError("Host is already disabled");
} }
row.enabled = 0; row.enabled = 0;
return proxyHostModel return proxyHostModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
enabled: 0 enabled: 0,
}) })
.then(() => { .then(() => {
// Delete Nginx Config // Delete Nginx Config
return internalNginx.deleteConfig('proxy_host', row) return internalNginx.deleteConfig("proxy_host", row).then(() => {
.then(() => {
return internalNginx.reload(); return internalNginx.reload();
}); });
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'disabled', action: "disabled",
object_type: 'proxy-host', object_type: "proxy-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
}); });
}) })
@@ -408,40 +420,38 @@ const internalProxyHost = {
* @param {String} [search_query] * @param {String} [search_query]
* @returns {Promise} * @returns {Promise}
*/ */
getAll: (access, expand, search_query) => { getAll: async (access, expand, searchQuery) => {
return access.can('proxy_hosts:list') const accessData = await access.can("proxy_hosts:list");
.then((access_data) => {
let query = proxyHostModel
.query()
.where('is_deleted', 0)
.groupBy('id')
.allowGraph('[owner,access_list,certificate]')
.orderBy('domain_names', 'ASC');
if (access_data.permission_visibility !== 'all') { const query = proxyHostModel
query.andWhere('owner_user_id', access.token.getUserId(1)); .query()
.where("is_deleted", 0)
.groupBy("id")
.allowGraph("[owner,access_list,certificate]")
.orderBy(castJsonIfNeed("domain_names"), "ASC");
if (accessData.permission_visibility !== "all") {
query.andWhere("owner_user_id", access.token.getUserId(1));
} }
// Query is used for searching // Query is used for searching
if (typeof search_query === 'string') { if (typeof searchQuery === "string" && searchQuery.length > 0) {
query.where(function () { query.where(function () {
this.where('domain_names', 'like', '%' + search_query + '%'); this.where(castJsonIfNeed("domain_names"), "like", `%${searchQuery}%`);
}); });
} }
if (typeof expand !== 'undefined' && expand !== null) { if (typeof expand !== "undefined" && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']'); query.withGraphFetched(`[${expand.join(", ")}]`);
} }
return query.then(utils.omitRows(omissions())); const rows = await query.then(utils.omitRows(omissions()));
})
.then((rows) => { if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
return internalHost.cleanAllRowsCertificateMeta(rows); return internalHost.cleanAllRowsCertificateMeta(rows);
} }
return rows; return rows;
});
}, },
/** /**
@@ -452,20 +462,16 @@ const internalProxyHost = {
* @returns {Promise} * @returns {Promise}
*/ */
getCount: (user_id, visibility) => { getCount: (user_id, visibility) => {
let query = proxyHostModel const query = proxyHostModel.query().count("id as count").where("is_deleted", 0);
.query()
.count('id as count')
.where('is_deleted', 0);
if (visibility !== 'all') { if (visibility !== "all") {
query.andWhere('owner_user_id', user_id); query.andWhere("owner_user_id", user_id);
} }
return query.first() return query.first().then((row) => {
.then((row) => { return Number.parseInt(row.count, 10);
return parseInt(row.count, 10);
}); });
} },
}; };
module.exports = internalProxyHost; export default internalProxyHost;

View File

@@ -1,72 +1,73 @@
const _ = require('lodash'); import _ from "lodash";
const error = require('../lib/error'); import errs from "../lib/error.js";
const utils = require('../lib/utils'); import { castJsonIfNeed } from "../lib/helpers.js";
const redirectionHostModel = require('../models/redirection_host'); import utils from "../lib/utils.js";
const internalHost = require('./host'); import redirectionHostModel from "../models/redirection_host.js";
const internalNginx = require('./nginx'); import internalAuditLog from "./audit-log.js";
const internalAuditLog = require('./audit-log'); import internalCertificate from "./certificate.js";
const internalCertificate = require('./certificate'); import internalHost from "./host.js";
import internalNginx from "./nginx.js";
function omissions () { const omissions = () => {
return ['is_deleted']; return ["is_deleted"];
} };
const internalRedirectionHost = { const internalRedirectionHost = {
/** /**
* @param {Access} access * @param {Access} access
* @param {Object} data * @param {Object} data
* @returns {Promise} * @returns {Promise}
*/ */
create: (access, data) => { create: (access, data) => {
let create_certificate = data.certificate_id === 'new'; let thisData = data || {};
const createCertificate = thisData.certificate_id === "new";
if (create_certificate) { if (createCertificate) {
delete data.certificate_id; delete thisData.certificate_id;
} }
return access.can('redirection_hosts:create', data) return access
.can("redirection_hosts:create", thisData)
.then((/*access_data*/) => { .then((/*access_data*/) => {
// Get a list of the domain names and check each of them against existing records // Get a list of the domain names and check each of them against existing records
let domain_name_check_promises = []; const domain_name_check_promises = [];
data.domain_names.map(function (domain_name) { thisData.domain_names.map((domain_name) => {
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name)); domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
return true;
}); });
return Promise.all(domain_name_check_promises) return Promise.all(domain_name_check_promises).then((check_results) => {
.then((check_results) => { check_results.map((result) => {
check_results.map(function (result) {
if (result.is_taken) { if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use'); throw new errs.ValidationError(`${result.hostname} is already in use`);
} }
return true;
}); });
}); });
}) })
.then(() => { .then(() => {
// At this point the domains should have been checked // At this point the domains should have been checked
data.owner_user_id = access.token.getUserId(1); thisData.owner_user_id = access.token.getUserId(1);
data = internalHost.cleanSslHstsData(data); thisData = internalHost.cleanSslHstsData(thisData);
// Fix for db field not having a default value // Fix for db field not having a default value
// for this optional field. // for this optional field.
if (typeof data.advanced_config === 'undefined') { if (typeof data.advanced_config === "undefined") {
data.advanced_config = ''; data.advanced_config = "";
} }
return redirectionHostModel return redirectionHostModel.query().insertAndFetch(thisData).then(utils.omitRow(omissions()));
.query()
.insertAndFetch(data)
.then(utils.omitRow(omissions()));
}) })
.then((row) => { .then((row) => {
if (create_certificate) { if (createCertificate) {
return internalCertificate.createQuickCertificate(access, data) return internalCertificate
.createQuickCertificate(access, thisData)
.then((cert) => { .then((cert) => {
// update host with cert id // update host with cert id
return internalRedirectionHost.update(access, { return internalRedirectionHost.update(access, {
id: row.id, id: row.id,
certificate_id: cert.id certificate_id: cert.id,
}); });
}) })
.then(() => { .then(() => {
@@ -79,25 +80,25 @@ const internalRedirectionHost = {
// re-fetch with cert // re-fetch with cert
return internalRedirectionHost.get(access, { return internalRedirectionHost.get(access, {
id: row.id, id: row.id,
expand: ['certificate', 'owner'] expand: ["certificate", "owner"],
}); });
}) })
.then((row) => { .then((row) => {
// Configure nginx // Configure nginx
return internalNginx.configure(redirectionHostModel, 'redirection_host', row) return internalNginx.configure(redirectionHostModel, "redirection_host", row).then(() => {
.then(() => {
return row; return row;
}); });
}) })
.then((row) => { .then((row) => {
data.meta = _.assign({}, data.meta || {}, row.meta); thisData.meta = _.assign({}, thisData.meta || {}, row.meta);
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog
action: 'created', .add(access, {
object_type: 'redirection-host', action: "created",
object_type: "redirection-host",
object_id: row.id, object_id: row.id,
meta: data meta: thisData,
}) })
.then(() => { .then(() => {
return row; return row;
@@ -112,76 +113,88 @@ const internalRedirectionHost = {
* @return {Promise} * @return {Promise}
*/ */
update: (access, data) => { update: (access, data) => {
let create_certificate = data.certificate_id === 'new'; let thisData = data || {};
const createCertificate = thisData.certificate_id === "new";
if (create_certificate) { if (createCertificate) {
delete data.certificate_id; delete thisData.certificate_id;
} }
return access.can('redirection_hosts:update', data.id) return access
.can("redirection_hosts:update", thisData.id)
.then((/*access_data*/) => { .then((/*access_data*/) => {
// Get a list of the domain names and check each of them against existing records // Get a list of the domain names and check each of them against existing records
let domain_name_check_promises = []; const domain_name_check_promises = [];
if (typeof data.domain_names !== 'undefined') { if (typeof thisData.domain_names !== "undefined") {
data.domain_names.map(function (domain_name) { thisData.domain_names.map((domain_name) => {
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'redirection', data.id)); domain_name_check_promises.push(
internalHost.isHostnameTaken(domain_name, "redirection", thisData.id),
);
return true;
}); });
return Promise.all(domain_name_check_promises) return Promise.all(domain_name_check_promises).then((check_results) => {
.then((check_results) => { check_results.map((result) => {
check_results.map(function (result) {
if (result.is_taken) { if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use'); throw new errs.ValidationError(`${result.hostname} is already in use`);
} }
return true;
}); });
}); });
} }
}) })
.then(() => { .then(() => {
return internalRedirectionHost.get(access, {id: data.id}); return internalRedirectionHost.get(access, { id: thisData.id });
}) })
.then((row) => { .then((row) => {
if (row.id !== data.id) { if (row.id !== thisData.id) {
// Sanity check that something crazy hasn't happened // Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('Redirection Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id); throw new errs.InternalValidationError(
`Redirection Host could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
);
} }
if (create_certificate) { if (createCertificate) {
return internalCertificate.createQuickCertificate(access, { return internalCertificate
domain_names: data.domain_names || row.domain_names, .createQuickCertificate(access, {
meta: _.assign({}, row.meta, data.meta) domain_names: thisData.domain_names || row.domain_names,
meta: _.assign({}, row.meta, thisData.meta),
}) })
.then((cert) => { .then((cert) => {
// update host with cert id // update host with cert id
data.certificate_id = cert.id; thisData.certificate_id = cert.id;
}) })
.then(() => { .then(() => {
return row; return row;
}); });
} else {
return row;
} }
return row;
}) })
.then((row) => { .then((row) => {
// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here. // Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
data = _.assign({}, { thisData = _.assign(
domain_names: row.domain_names {},
}, data); {
domain_names: row.domain_names,
},
thisData,
);
data = internalHost.cleanSslHstsData(data, row); thisData = internalHost.cleanSslHstsData(thisData, row);
return redirectionHostModel return redirectionHostModel
.query() .query()
.where({id: data.id}) .where({ id: thisData.id })
.patch(data) .patch(thisData)
.then((saved_row) => { .then((saved_row) => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog
action: 'updated', .add(access, {
object_type: 'redirection-host', action: "updated",
object_type: "redirection-host",
object_id: row.id, object_id: row.id,
meta: data meta: thisData,
}) })
.then(() => { .then(() => {
return _.omit(saved_row, omissions()); return _.omit(saved_row, omissions());
@@ -189,17 +202,18 @@ const internalRedirectionHost = {
}); });
}) })
.then(() => { .then(() => {
return internalRedirectionHost.get(access, { return internalRedirectionHost
id: data.id, .get(access, {
expand: ['owner', 'certificate'] id: thisData.id,
expand: ["owner", "certificate"],
}) })
.then((row) => { .then((row) => {
// Configure nginx // Configure nginx
return internalNginx.configure(redirectionHostModel, 'redirection_host', row) return internalNginx
.configure(redirectionHostModel, "redirection_host", row)
.then((new_meta) => { .then((new_meta) => {
row.meta = new_meta; row.meta = new_meta;
row = internalHost.cleanRowCertificateMeta(row); return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
return _.omit(row, omissions());
}); });
}); });
}); });
@@ -214,39 +228,39 @@ const internalRedirectionHost = {
* @return {Promise} * @return {Promise}
*/ */
get: (access, data) => { get: (access, data) => {
if (typeof data === 'undefined') { const thisData = data || {};
data = {};
}
return access.can('redirection_hosts:get', data.id) return access
.can("redirection_hosts:get", thisData.id)
.then((access_data) => { .then((access_data) => {
let query = redirectionHostModel const query = redirectionHostModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.andWhere('id', data.id) .andWhere("id", thisData.id)
.allowGraph('[owner,certificate]') .allowGraph("[owner,certificate]")
.first(); .first();
if (access_data.permission_visibility !== 'all') { if (access_data.permission_visibility !== "all") {
query.andWhere('owner_user_id', access.token.getUserId(1)); query.andWhere("owner_user_id", access.token.getUserId(1));
} }
if (typeof data.expand !== 'undefined' && data.expand !== null) { if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
query.withGraphFetched('[' + data.expand.join(', ') + ']'); query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
} }
return query.then(utils.omitRow(omissions())); return query.then(utils.omitRow(omissions()));
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { let thisRow = row;
throw new error.ItemNotFoundError(data.id); if (!thisRow || !thisRow.id) {
throw new errs.ItemNotFoundError(thisData.id);
} }
row = internalHost.cleanRowCertificateMeta(row); thisRow = internalHost.cleanRowCertificateMeta(thisRow);
// Custom omissions // Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) { if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
row = _.omit(row, data.omit); return _.omit(thisRow, thisData.omit);
} }
return row; return thisRow;
}); });
}, },
@@ -258,35 +272,35 @@ const internalRedirectionHost = {
* @returns {Promise} * @returns {Promise}
*/ */
delete: (access, data) => { delete: (access, data) => {
return access.can('redirection_hosts:delete', data.id) return access
.can("redirection_hosts:delete", data.id)
.then(() => { .then(() => {
return internalRedirectionHost.get(access, { id: data.id }); return internalRedirectionHost.get(access, { id: data.id });
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} }
return redirectionHostModel return redirectionHostModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
is_deleted: 1 is_deleted: 1,
}) })
.then(() => { .then(() => {
// Delete Nginx Config // Delete Nginx Config
return internalNginx.deleteConfig('redirection_host', row) return internalNginx.deleteConfig("redirection_host", row).then(() => {
.then(() => {
return internalNginx.reload(); return internalNginx.reload();
}); });
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'deleted', action: "deleted",
object_type: 'redirection-host', object_type: "redirection-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
}); });
}) })
@@ -303,39 +317,41 @@ const internalRedirectionHost = {
* @returns {Promise} * @returns {Promise}
*/ */
enable: (access, data) => { enable: (access, data) => {
return access.can('redirection_hosts:update', data.id) return access
.can("redirection_hosts:update", data.id)
.then(() => { .then(() => {
return internalRedirectionHost.get(access, { return internalRedirectionHost.get(access, {
id: data.id, id: data.id,
expand: ['certificate', 'owner'] expand: ["certificate", "owner"],
}); });
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} else if (row.enabled) { }
throw new error.ValidationError('Host is already enabled'); if (row.enabled) {
throw new errs.ValidationError("Host is already enabled");
} }
row.enabled = 1; row.enabled = 1;
return redirectionHostModel return redirectionHostModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
enabled: 1 enabled: 1,
}) })
.then(() => { .then(() => {
// Configure nginx // Configure nginx
return internalNginx.configure(redirectionHostModel, 'redirection_host', row); return internalNginx.configure(redirectionHostModel, "redirection_host", row);
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'enabled', action: "enabled",
object_type: 'redirection-host', object_type: "redirection-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
}); });
}) })
@@ -352,39 +368,40 @@ const internalRedirectionHost = {
* @returns {Promise} * @returns {Promise}
*/ */
disable: (access, data) => { disable: (access, data) => {
return access.can('redirection_hosts:update', data.id) return access
.can("redirection_hosts:update", data.id)
.then(() => { .then(() => {
return internalRedirectionHost.get(access, { id: data.id }); return internalRedirectionHost.get(access, { id: data.id });
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} else if (!row.enabled) { }
throw new error.ValidationError('Host is already disabled'); if (!row.enabled) {
throw new errs.ValidationError("Host is already disabled");
} }
row.enabled = 0; row.enabled = 0;
return redirectionHostModel return redirectionHostModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
enabled: 0 enabled: 0,
}) })
.then(() => { .then(() => {
// Delete Nginx Config // Delete Nginx Config
return internalNginx.deleteConfig('redirection_host', row) return internalNginx.deleteConfig("redirection_host", row).then(() => {
.then(() => {
return internalNginx.reload(); return internalNginx.reload();
}); });
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'disabled', action: "disabled",
object_type: 'redirection-host', object_type: "redirection-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
}); });
}) })
@@ -402,34 +419,35 @@ const internalRedirectionHost = {
* @returns {Promise} * @returns {Promise}
*/ */
getAll: (access, expand, search_query) => { getAll: (access, expand, search_query) => {
return access.can('redirection_hosts:list') return access
.can("redirection_hosts:list")
.then((access_data) => { .then((access_data) => {
let query = redirectionHostModel const query = redirectionHostModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.groupBy('id') .groupBy("id")
.allowGraph('[owner,certificate]') .allowGraph("[owner,certificate]")
.orderBy('domain_names', 'ASC'); .orderBy(castJsonIfNeed("domain_names"), "ASC");
if (access_data.permission_visibility !== 'all') { if (access_data.permission_visibility !== "all") {
query.andWhere('owner_user_id', access.token.getUserId(1)); query.andWhere("owner_user_id", access.token.getUserId(1));
} }
// Query is used for searching // Query is used for searching
if (typeof search_query === 'string') { if (typeof search_query === "string" && search_query.length > 0) {
query.where(function () { query.where(function () {
this.where('domain_names', 'like', '%' + search_query + '%'); this.where(castJsonIfNeed("domain_names"), "like", `%${search_query}%`);
}); });
} }
if (typeof expand !== 'undefined' && expand !== null) { if (typeof expand !== "undefined" && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']'); query.withGraphFetched(`[${expand.join(", ")}]`);
} }
return query.then(utils.omitRows(omissions())); return query.then(utils.omitRows(omissions()));
}) })
.then((rows) => { .then((rows) => {
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) { if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
return internalHost.cleanAllRowsCertificateMeta(rows); return internalHost.cleanAllRowsCertificateMeta(rows);
} }
@@ -445,20 +463,16 @@ const internalRedirectionHost = {
* @returns {Promise} * @returns {Promise}
*/ */
getCount: (user_id, visibility) => { getCount: (user_id, visibility) => {
let query = redirectionHostModel const query = redirectionHostModel.query().count("id as count").where("is_deleted", 0);
.query()
.count('id as count')
.where('is_deleted', 0);
if (visibility !== 'all') { if (visibility !== "all") {
query.andWhere('owner_user_id', user_id); query.andWhere("owner_user_id", user_id);
} }
return query.first() return query.first().then((row) => {
.then((row) => { return Number.parseInt(row.count, 10);
return parseInt(row.count, 10);
}); });
} },
}; };
module.exports = internalRedirectionHost; export default internalRedirectionHost;

View File

@@ -1,24 +1,24 @@
const internalProxyHost = require('./proxy-host'); import internalDeadHost from "./dead-host.js";
const internalRedirectionHost = require('./redirection-host'); import internalProxyHost from "./proxy-host.js";
const internalDeadHost = require('./dead-host'); import internalRedirectionHost from "./redirection-host.js";
const internalStream = require('./stream'); import internalStream from "./stream.js";
const internalReport = { const internalReport = {
/** /**
* @param {Access} access * @param {Access} access
* @return {Promise} * @return {Promise}
*/ */
getHostsReport: (access) => { getHostsReport: (access) => {
return access.can('reports:hosts', 1) return access
.can("reports:hosts", 1)
.then((access_data) => { .then((access_data) => {
let user_id = access.token.getUserId(1); const userId = access.token.getUserId(1);
let promises = [ const promises = [
internalProxyHost.getCount(user_id, access_data.visibility), internalProxyHost.getCount(userId, access_data.visibility),
internalRedirectionHost.getCount(user_id, access_data.visibility), internalRedirectionHost.getCount(userId, access_data.visibility),
internalStream.getCount(user_id, access_data.visibility), internalStream.getCount(userId, access_data.visibility),
internalDeadHost.getCount(user_id, access_data.visibility) internalDeadHost.getCount(userId, access_data.visibility),
]; ];
return Promise.all(promises); return Promise.all(promises);
@@ -28,11 +28,10 @@ const internalReport = {
proxy: counts.shift(), proxy: counts.shift(),
redirection: counts.shift(), redirection: counts.shift(),
stream: counts.shift(), stream: counts.shift(),
dead: counts.shift() dead: counts.shift(),
}; };
}); });
},
}
}; };
module.exports = internalReport; export default internalReport;

View File

@@ -1,10 +1,9 @@
const fs = require('fs'); import fs from "node:fs";
const error = require('../lib/error'); import errs from "../lib/error.js";
const settingModel = require('../models/setting'); import settingModel from "../models/setting.js";
const internalNginx = require('./nginx'); import internalNginx from "./nginx.js";
const internalSetting = { const internalSetting = {
/** /**
* @param {Access} access * @param {Access} access
* @param {Object} data * @param {Object} data
@@ -12,37 +11,38 @@ const internalSetting = {
* @return {Promise} * @return {Promise}
*/ */
update: (access, data) => { update: (access, data) => {
return access.can('settings:update', data.id) return access
.can("settings:update", data.id)
.then((/*access_data*/) => { .then((/*access_data*/) => {
return internalSetting.get(access, { id: data.id }); return internalSetting.get(access, { id: data.id });
}) })
.then((row) => { .then((row) => {
if (row.id !== data.id) { if (row.id !== data.id) {
// Sanity check that something crazy hasn't happened // Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('Setting could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id); throw new errs.InternalValidationError(
`Setting could not be updated, IDs do not match: ${row.id} !== ${data.id}`,
);
} }
return settingModel return settingModel.query().where({ id: data.id }).patch(data);
.query()
.where({id: data.id})
.patch(data);
}) })
.then(() => { .then(() => {
return internalSetting.get(access, { return internalSetting.get(access, {
id: data.id id: data.id,
}); });
}) })
.then((row) => { .then((row) => {
if (row.id === 'default-site') { if (row.id === "default-site") {
// write the html if we need to // write the html if we need to
if (row.value === 'html') { if (row.value === "html") {
fs.writeFileSync('/data/nginx/default_www/index.html', row.meta.html, {encoding: 'utf8'}); fs.writeFileSync("/data/nginx/default_www/index.html", row.meta.html, { encoding: "utf8" });
} }
// Configure nginx // Configure nginx
return internalNginx.deleteConfig('default') return internalNginx
.deleteConfig("default")
.then(() => { .then(() => {
return internalNginx.generateConfig('default', row); return internalNginx.generateConfig("default", row);
}) })
.then(() => { .then(() => {
return internalNginx.test(); return internalNginx.test();
@@ -54,7 +54,8 @@ const internalSetting = {
return row; return row;
}) })
.catch((/*err*/) => { .catch((/*err*/) => {
internalNginx.deleteConfig('default') internalNginx
.deleteConfig("default")
.then(() => { .then(() => {
return internalNginx.test(); return internalNginx.test();
}) })
@@ -63,12 +64,11 @@ const internalSetting = {
}) })
.then(() => { .then(() => {
// I'm being slack here I know.. // I'm being slack here I know..
throw new error.ValidationError('Could not reconfigure Nginx. Please check logs.'); throw new errs.ValidationError("Could not reconfigure Nginx. Please check logs.");
}); });
}); });
} else {
return row;
} }
return row;
}); });
}, },
@@ -79,19 +79,16 @@ const internalSetting = {
* @return {Promise} * @return {Promise}
*/ */
get: (access, data) => { get: (access, data) => {
return access.can('settings:get', data.id) return access
.can("settings:get", data.id)
.then(() => { .then(() => {
return settingModel return settingModel.query().where("id", data.id).first();
.query()
.where('id', data.id)
.first();
}) })
.then((row) => { .then((row) => {
if (row) { if (row) {
return row; return row;
} else {
throw new error.ItemNotFoundError(data.id);
} }
throw new errs.ItemNotFoundError(data.id);
}); });
}, },
@@ -102,15 +99,13 @@ const internalSetting = {
* @returns {*} * @returns {*}
*/ */
getCount: (access) => { getCount: (access) => {
return access.can('settings:list') return access
.can("settings:list")
.then(() => { .then(() => {
return settingModel return settingModel.query().count("id as count").first();
.query()
.count('id as count')
.first();
}) })
.then((row) => { .then((row) => {
return parseInt(row.count, 10); return Number.parseInt(row.count, 10);
}); });
}, },
@@ -121,13 +116,10 @@ const internalSetting = {
* @returns {Promise} * @returns {Promise}
*/ */
getAll: (access) => { getAll: (access) => {
return access.can('settings:list') return access.can("settings:list").then(() => {
.then(() => { return settingModel.query().orderBy("description", "ASC");
return settingModel
.query()
.orderBy('description', 'ASC');
}); });
} },
}; };
module.exports = internalSetting; export default internalSetting;

View File

@@ -1,50 +1,84 @@
const _ = require('lodash'); import _ from "lodash";
const error = require('../lib/error'); import errs from "../lib/error.js";
const utils = require('../lib/utils'); import { castJsonIfNeed } from "../lib/helpers.js";
const streamModel = require('../models/stream'); import utils from "../lib/utils.js";
const internalNginx = require('./nginx'); import streamModel from "../models/stream.js";
const internalAuditLog = require('./audit-log'); import internalAuditLog from "./audit-log.js";
import internalCertificate from "./certificate.js";
import internalHost from "./host.js";
import internalNginx from "./nginx.js";
function omissions () { const omissions = () => {
return ['is_deleted']; return ["is_deleted", "owner.is_deleted", "certificate.is_deleted"];
} };
const internalStream = { const internalStream = {
/** /**
* @param {Access} access * @param {Access} access
* @param {Object} data * @param {Object} data
* @returns {Promise} * @returns {Promise}
*/ */
create: (access, data) => { create: (access, data) => {
return access.can('streams:create', data) const create_certificate = data.certificate_id === "new";
if (create_certificate) {
delete data.certificate_id;
}
return access
.can("streams:create", data)
.then((/*access_data*/) => { .then((/*access_data*/) => {
// TODO: At this point the existing ports should have been checked // TODO: At this point the existing ports should have been checked
data.owner_user_id = access.token.getUserId(1); data.owner_user_id = access.token.getUserId(1);
if (typeof data.meta === 'undefined') { if (typeof data.meta === "undefined") {
data.meta = {}; data.meta = {};
} }
return streamModel // streams aren't routed by domain name so don't store domain names in the DB
.query() const data_no_domains = structuredClone(data);
.insertAndFetch(data) delete data_no_domains.domain_names;
.then(utils.omitRow(omissions()));
return streamModel.query().insertAndFetch(data_no_domains).then(utils.omitRow(omissions()));
})
.then((row) => {
if (create_certificate) {
return internalCertificate
.createQuickCertificate(access, data)
.then((cert) => {
// update host with cert id
return internalStream.update(access, {
id: row.id,
certificate_id: cert.id,
});
})
.then(() => {
return row;
});
}
return row;
})
.then((row) => {
// re-fetch with cert
return internalStream.get(access, {
id: row.id,
expand: ["certificate", "owner"],
});
}) })
.then((row) => { .then((row) => {
// Configure nginx // Configure nginx
return internalNginx.configure(streamModel, 'stream', row) return internalNginx.configure(streamModel, "stream", row).then(() => {
.then(() => { return row;
return internalStream.get(access, {id: row.id, expand: ['owner']});
}); });
}) })
.then((row) => { .then((row) => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog
action: 'created', .add(access, {
object_type: 'stream', action: "created",
object_type: "stream",
object_id: row.id, object_id: row.id,
meta: data meta: data,
}) })
.then(() => { .then(() => {
return row; return row;
@@ -59,39 +93,78 @@ const internalStream = {
* @return {Promise} * @return {Promise}
*/ */
update: (access, data) => { update: (access, data) => {
return access.can('streams:update', data.id) let thisData = data;
const create_certificate = thisData.certificate_id === "new";
if (create_certificate) {
delete thisData.certificate_id;
}
return access
.can("streams:update", thisData.id)
.then((/*access_data*/) => { .then((/*access_data*/) => {
// TODO: at this point the existing streams should have been checked // TODO: at this point the existing streams should have been checked
return internalStream.get(access, {id: data.id}); return internalStream.get(access, { id: thisData.id });
}) })
.then((row) => { .then((row) => {
if (row.id !== data.id) { if (row.id !== thisData.id) {
// Sanity check that something crazy hasn't happened // Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('Stream could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id); throw new errs.InternalValidationError(
`Stream could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
);
} }
if (create_certificate) {
return internalCertificate
.createQuickCertificate(access, {
domain_names: thisData.domain_names || row.domain_names,
meta: _.assign({}, row.meta, thisData.meta),
})
.then((cert) => {
// update host with cert id
thisData.certificate_id = cert.id;
})
.then(() => {
return row;
});
}
return row;
})
.then((row) => {
// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
thisData = _.assign(
{},
{
domain_names: row.domain_names,
},
thisData,
);
return streamModel return streamModel
.query() .query()
.patchAndFetchById(row.id, data) .patchAndFetchById(row.id, thisData)
.then(utils.omitRow(omissions())) .then(utils.omitRow(omissions()))
.then((saved_row) => {
return internalNginx.configure(streamModel, 'stream', saved_row)
.then(() => {
return internalStream.get(access, {id: row.id, expand: ['owner']});
});
})
.then((saved_row) => { .then((saved_row) => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog
action: 'updated', .add(access, {
object_type: 'stream', action: "updated",
object_type: "stream",
object_id: row.id, object_id: row.id,
meta: data meta: thisData,
}) })
.then(() => { .then(() => {
return saved_row; return saved_row;
}); });
}); });
})
.then(() => {
return internalStream.get(access, { id: thisData.id, expand: ["owner", "certificate"] }).then((row) => {
return internalNginx.configure(streamModel, "stream", row).then((new_meta) => {
row.meta = new_meta;
return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
});
});
}); });
}, },
@@ -104,38 +177,39 @@ const internalStream = {
* @return {Promise} * @return {Promise}
*/ */
get: (access, data) => { get: (access, data) => {
if (typeof data === 'undefined') { const thisData = data || {};
data = {};
}
return access.can('streams:get', data.id) return access
.can("streams:get", thisData.id)
.then((access_data) => { .then((access_data) => {
let query = streamModel const query = streamModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.andWhere('id', data.id) .andWhere("id", thisData.id)
.allowGraph('[owner]') .allowGraph("[owner,certificate]")
.first(); .first();
if (access_data.permission_visibility !== 'all') { if (access_data.permission_visibility !== "all") {
query.andWhere('owner_user_id', access.token.getUserId(1)); query.andWhere("owner_user_id", access.token.getUserId(1));
} }
if (typeof data.expand !== 'undefined' && data.expand !== null) { if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
query.withGraphFetched('[' + data.expand.join(', ') + ']'); query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
} }
return query.then(utils.omitRow(omissions())); return query.then(utils.omitRow(omissions()));
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { let thisRow = row;
throw new error.ItemNotFoundError(data.id); if (!thisRow || !thisRow.id) {
throw new errs.ItemNotFoundError(thisData.id);
} }
thisRow = internalHost.cleanRowCertificateMeta(thisRow);
// Custom omissions // Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) { if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
row = _.omit(row, data.omit); return _.omit(thisRow, thisData.omit);
} }
return row; return thisRow;
}); });
}, },
@@ -147,35 +221,35 @@ const internalStream = {
* @returns {Promise} * @returns {Promise}
*/ */
delete: (access, data) => { delete: (access, data) => {
return access.can('streams:delete', data.id) return access
.can("streams:delete", data.id)
.then(() => { .then(() => {
return internalStream.get(access, { id: data.id }); return internalStream.get(access, { id: data.id });
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} }
return streamModel return streamModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
is_deleted: 1 is_deleted: 1,
}) })
.then(() => { .then(() => {
// Delete Nginx Config // Delete Nginx Config
return internalNginx.deleteConfig('stream', row) return internalNginx.deleteConfig("stream", row).then(() => {
.then(() => {
return internalNginx.reload(); return internalNginx.reload();
}); });
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'deleted', action: "deleted",
object_type: 'stream', object_type: "stream",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
}); });
}) })
@@ -192,39 +266,41 @@ const internalStream = {
* @returns {Promise} * @returns {Promise}
*/ */
enable: (access, data) => { enable: (access, data) => {
return access.can('streams:update', data.id) return access
.can("streams:update", data.id)
.then(() => { .then(() => {
return internalStream.get(access, { return internalStream.get(access, {
id: data.id, id: data.id,
expand: ['owner'] expand: ["certificate", "owner"],
}); });
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} else if (row.enabled) { }
throw new error.ValidationError('Host is already enabled'); if (row.enabled) {
throw new errs.ValidationError("Stream is already enabled");
} }
row.enabled = 1; row.enabled = 1;
return streamModel return streamModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
enabled: 1 enabled: 1,
}) })
.then(() => { .then(() => {
// Configure nginx // Configure nginx
return internalNginx.configure(streamModel, 'stream', row); return internalNginx.configure(streamModel, "stream", row);
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'enabled', action: "enabled",
object_type: 'stream', object_type: "stream",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
}); });
}) })
@@ -241,39 +317,40 @@ const internalStream = {
* @returns {Promise} * @returns {Promise}
*/ */
disable: (access, data) => { disable: (access, data) => {
return access.can('streams:update', data.id) return access
.can("streams:update", data.id)
.then(() => { .then(() => {
return internalStream.get(access, { id: data.id }); return internalStream.get(access, { id: data.id });
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} else if (!row.enabled) { }
throw new error.ValidationError('Host is already disabled'); if (!row.enabled) {
throw new errs.ValidationError("Stream is already disabled");
} }
row.enabled = 0; row.enabled = 0;
return streamModel return streamModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
enabled: 0 enabled: 0,
}) })
.then(() => { .then(() => {
// Delete Nginx Config // Delete Nginx Config
return internalNginx.deleteConfig('stream', row) return internalNginx.deleteConfig("stream", row).then(() => {
.then(() => {
return internalNginx.reload(); return internalNginx.reload();
}); });
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'disabled', action: "disabled",
object_type: 'stream-host', object_type: "stream-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
}); });
}) })
@@ -291,31 +368,39 @@ const internalStream = {
* @returns {Promise} * @returns {Promise}
*/ */
getAll: (access, expand, search_query) => { getAll: (access, expand, search_query) => {
return access.can('streams:list') return access
.can("streams:list")
.then((access_data) => { .then((access_data) => {
let query = streamModel const query = streamModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.groupBy('id') .groupBy("id")
.allowGraph('[owner]') .allowGraph("[owner,certificate]")
.orderBy('incoming_port', 'ASC'); .orderBy("incoming_port", "ASC");
if (access_data.permission_visibility !== 'all') { if (access_data.permission_visibility !== "all") {
query.andWhere('owner_user_id', access.token.getUserId(1)); query.andWhere("owner_user_id", access.token.getUserId(1));
} }
// Query is used for searching // Query is used for searching
if (typeof search_query === 'string') { if (typeof search_query === "string" && search_query.length > 0) {
query.where(function () { query.where(function () {
this.where('incoming_port', 'like', '%' + search_query + '%'); this.where(castJsonIfNeed("incoming_port"), "like", `%${search_query}%`);
}); });
} }
if (typeof expand !== 'undefined' && expand !== null) { if (typeof expand !== "undefined" && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']'); query.withGraphFetched(`[${expand.join(", ")}]`);
} }
return query.then(utils.omitRows(omissions())); return query.then(utils.omitRows(omissions()));
})
.then((rows) => {
if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
return internalHost.cleanAllRowsCertificateMeta(rows);
}
return rows;
}); });
}, },
@@ -327,20 +412,16 @@ const internalStream = {
* @returns {Promise} * @returns {Promise}
*/ */
getCount: (user_id, visibility) => { getCount: (user_id, visibility) => {
let query = streamModel const query = streamModel.query().count("id AS count").where("is_deleted", 0);
.query()
.count('id as count')
.where('is_deleted', 0);
if (visibility !== 'all') { if (visibility !== "all") {
query.andWhere('owner_user_id', user_id); query.andWhere("owner_user_id", user_id);
} }
return query.first() return query.first().then((row) => {
.then((row) => { return Number.parseInt(row.count, 10);
return parseInt(row.count, 10);
}); });
} },
}; };
module.exports = internalStream; export default internalStream;

View File

@@ -1,12 +1,14 @@
const _ = require('lodash'); import _ from "lodash";
const error = require('../lib/error'); import errs from "../lib/error.js";
const userModel = require('../models/user'); import { parseDatePeriod } from "../lib/helpers.js";
const authModel = require('../models/auth'); import authModel from "../models/auth.js";
const helpers = require('../lib/helpers'); import TokenModel from "../models/token.js";
const TokenModel = require('../models/token'); import userModel from "../models/user.js";
module.exports = { const ERROR_MESSAGE_INVALID_AUTH = "Invalid email or password";
const ERROR_MESSAGE_INVALID_AUTH_I18N = "error.invalid-auth";
export default {
/** /**
* @param {Object} data * @param {Object} data
* @param {String} data.identity * @param {String} data.identity
@@ -16,70 +18,66 @@ module.exports = {
* @param {String} [issuer] * @param {String} [issuer]
* @returns {Promise} * @returns {Promise}
*/ */
getTokenFromEmail: (data, issuer) => { getTokenFromEmail: async (data, issuer) => {
let Token = new TokenModel(); const Token = TokenModel();
data.scope = data.scope || 'user'; data.scope = data.scope || "user";
data.expiry = data.expiry || '1d'; data.expiry = data.expiry || "1d";
return userModel const user = await userModel
.query() .query()
.where('email', data.identity.toLowerCase().trim()) .where("email", data.identity.toLowerCase().trim())
.andWhere('is_deleted', 0) .andWhere("is_deleted", 0)
.andWhere('is_disabled', 0) .andWhere("is_disabled", 0)
.first() .first();
.then((user) => {
if (user) {
// Get auth
return authModel
.query()
.where('user_id', '=', user.id)
.where('type', '=', 'password')
.first()
.then((auth) => {
if (auth) {
return auth.verifyPassword(data.secret)
.then((valid) => {
if (valid) {
if (data.scope !== 'user' && _.indexOf(user.roles, data.scope) === -1) { if (!user) {
throw new errs.AuthError(ERROR_MESSAGE_INVALID_AUTH);
}
const auth = await authModel
.query()
.where("user_id", "=", user.id)
.where("type", "=", "password")
.first();
if (!auth) {
throw new errs.AuthError(ERROR_MESSAGE_INVALID_AUTH);
}
const valid = await auth.verifyPassword(data.secret);
if (!valid) {
throw new errs.AuthError(
ERROR_MESSAGE_INVALID_AUTH,
ERROR_MESSAGE_INVALID_AUTH_I18N,
);
}
if (data.scope !== "user" && _.indexOf(user.roles, data.scope) === -1) {
// The scope requested doesn't exist as a role against the user, // The scope requested doesn't exist as a role against the user,
// you shall not pass. // you shall not pass.
throw new error.AuthError('Invalid scope: ' + data.scope); throw new errs.AuthError(`Invalid scope: ${data.scope}`);
} }
// Create a moment of the expiry expression // Create a moment of the expiry expression
let expiry = helpers.parseDatePeriod(data.expiry); const expiry = parseDatePeriod(data.expiry);
if (expiry === null) { if (expiry === null) {
throw new error.AuthError('Invalid expiry time: ' + data.expiry); throw new errs.AuthError(`Invalid expiry time: ${data.expiry}`);
} }
return Token.create({ const signed = await Token.create({
iss: issuer || 'api', iss: issuer || "api",
attrs: { attrs: {
id: user.id id: user.id,
}, },
scope: [data.scope], scope: [data.scope],
expiresIn: data.expiry expiresIn: data.expiry,
}) });
.then((signed) => {
return { return {
token: signed.token, token: signed.token,
expires: expiry.toISOString() expires: expiry.toISOString(),
}; };
});
} else {
throw new error.AuthError('Invalid password');
}
});
} else {
throw new error.AuthError('No password auth for user');
}
});
} else {
throw new error.AuthError('No relevant user found');
}
});
}, },
/** /**
@@ -89,74 +87,70 @@ module.exports = {
* @param {String} [data.scope] Only considered if existing token scope is admin * @param {String} [data.scope] Only considered if existing token scope is admin
* @returns {Promise} * @returns {Promise}
*/ */
getFreshToken: (access, data) => { getFreshToken: async (access, data) => {
let Token = new TokenModel(); const Token = TokenModel();
const thisData = data || {};
data = data || {}; thisData.expiry = thisData.expiry || "1d";
data.expiry = data.expiry || '1d';
if (access && access.token.getUserId(0)) {
if (access?.token.getUserId(0)) {
// Create a moment of the expiry expression // Create a moment of the expiry expression
let expiry = helpers.parseDatePeriod(data.expiry); const expiry = parseDatePeriod(thisData.expiry);
if (expiry === null) { if (expiry === null) {
throw new error.AuthError('Invalid expiry time: ' + data.expiry); throw new errs.AuthError(`Invalid expiry time: ${thisData.expiry}`);
} }
let token_attrs = { const token_attrs = {
id: access.token.getUserId(0) id: access.token.getUserId(0),
}; };
// Only admins can request otherwise scoped tokens // Only admins can request otherwise scoped tokens
let scope = access.token.get('scope'); let scope = access.token.get("scope");
if (data.scope && access.token.hasScope('admin')) { if (thisData.scope && access.token.hasScope("admin")) {
scope = [data.scope]; scope = [thisData.scope];
if (data.scope === 'job-board' || data.scope === 'worker') { if (thisData.scope === "job-board" || thisData.scope === "worker") {
token_attrs.id = 0; token_attrs.id = 0;
} }
} }
return Token.create({ const signed = await Token.create({
iss: 'api', iss: "api",
scope: scope, scope: scope,
attrs: token_attrs, attrs: token_attrs,
expiresIn: data.expiry expiresIn: thisData.expiry,
}) });
.then((signed) => {
return { return {
token: signed.token, token: signed.token,
expires: expiry.toISOString() expires: expiry.toISOString(),
}; };
});
} else {
throw new error.AssertionFailedError('Existing token contained invalid user data');
} }
throw new error.AssertionFailedError("Existing token contained invalid user data");
}, },
/** /**
* @param {Object} user * @param {Object} user
* @returns {Promise} * @returns {Promise}
*/ */
getTokenFromUser: (user) => { getTokenFromUser: async (user) => {
const expire = '1d'; const expire = "1d";
const Token = new TokenModel(); const Token = TokenModel();
const expiry = helpers.parseDatePeriod(expire); const expiry = parseDatePeriod(expire);
return Token.create({ const signed = await Token.create({
iss: 'api', iss: "api",
attrs: { attrs: {
id: user.id id: user.id,
}, },
scope: ['user'], scope: ["user"],
expiresIn: expire expiresIn: expire,
}) });
.then((signed) => {
return { return {
token: signed.token, token: signed.token,
expires: expiry.toISOString(), expires: expiry.toISOString(),
user: user user: user,
}; };
}); },
}
}; };

View File

@@ -1,93 +1,76 @@
const _ = require('lodash'); import gravatar from "gravatar";
const error = require('../lib/error'); import _ from "lodash";
const utils = require('../lib/utils'); import errs from "../lib/error.js";
const userModel = require('../models/user'); import utils from "../lib/utils.js";
const userPermissionModel = require('../models/user_permission'); import authModel from "../models/auth.js";
const authModel = require('../models/auth'); import userModel from "../models/user.js";
const gravatar = require('gravatar'); import userPermissionModel from "../models/user_permission.js";
const internalToken = require('./token'); import internalAuditLog from "./audit-log.js";
const internalAuditLog = require('./audit-log'); import internalToken from "./token.js";
function omissions () { const omissions = () => {
return ['is_deleted']; return ["is_deleted", "permissions.id", "permissions.user_id", "permissions.created_on", "permissions.modified_on"];
} };
const DEFAULT_AVATAR = gravatar.url("admin@example.com", { default: "mm" });
const internalUser = { const internalUser = {
/** /**
* Create a user can happen unauthenticated only once and only when no active users exist.
* Otherwise, a valid auth method is required.
*
* @param {Access} access * @param {Access} access
* @param {Object} data * @param {Object} data
* @returns {Promise} * @returns {Promise}
*/ */
create: (access, data) => { create: async (access, data) => {
let auth = data.auth || null; const auth = data.auth || null;
delete data.auth; delete data.auth;
data.avatar = data.avatar || ''; data.avatar = data.avatar || "";
data.roles = data.roles || []; data.roles = data.roles || [];
if (typeof data.is_disabled !== 'undefined') { if (typeof data.is_disabled !== "undefined") {
data.is_disabled = data.is_disabled ? 1 : 0; data.is_disabled = data.is_disabled ? 1 : 0;
} }
return access.can('users:create', data) await access.can("users:create", data);
.then(() => { data.avatar = gravatar.url(data.email, { default: "mm" });
data.avatar = gravatar.url(data.email, {default: 'mm'});
return userModel let user = await userModel.query().insertAndFetch(data).then(utils.omitRow(omissions()));
.query()
.insertAndFetch(data)
.then(utils.omitRow(omissions()));
})
.then((user) => {
if (auth) { if (auth) {
return authModel user = await authModel.query().insert({
.query()
.insert({
user_id: user.id, user_id: user.id,
type: auth.type, type: auth.type,
secret: auth.secret, secret: auth.secret,
meta: {} meta: {},
})
.then(() => {
return user;
}); });
} else {
return user;
} }
})
.then((user) => {
// Create permissions row as well
let is_admin = data.roles.indexOf('admin') !== -1;
return userPermissionModel // Create permissions row as well
.query() const isAdmin = data.roles.indexOf("admin") !== -1;
.insert({
await userPermissionModel.query().insert({
user_id: user.id, user_id: user.id,
visibility: is_admin ? 'all' : 'user', visibility: isAdmin ? "all" : "user",
proxy_hosts: 'manage', proxy_hosts: "manage",
redirection_hosts: 'manage', redirection_hosts: "manage",
dead_hosts: 'manage', dead_hosts: "manage",
streams: 'manage', streams: "manage",
access_lists: 'manage', access_lists: "manage",
certificates: 'manage' certificates: "manage",
})
.then(() => {
return internalUser.get(access, {id: user.id, expand: ['permissions']});
}); });
})
.then((user) => { user = await internalUser.get(access, { id: user.id, expand: ["permissions"] });
// Add to audit log
return internalAuditLog.add(access, { await internalAuditLog.add(access, {
action: 'created', action: "created",
object_type: 'user', object_type: "user",
object_id: user.id, object_id: user.id,
meta: user meta: user,
}) });
.then(() => {
return user; return user;
});
});
}, },
/** /**
@@ -99,29 +82,25 @@ const internalUser = {
* @return {Promise} * @return {Promise}
*/ */
update: (access, data) => { update: (access, data) => {
if (typeof data.is_disabled !== 'undefined') { if (typeof data.is_disabled !== "undefined") {
data.is_disabled = data.is_disabled ? 1 : 0; data.is_disabled = data.is_disabled ? 1 : 0;
} }
return access.can('users:update', data.id) return access
.can("users:update", data.id)
.then(() => { .then(() => {
// Make sure that the user being updated doesn't change their email to another user that is already using it // Make sure that the user being updated doesn't change their email to another user that is already using it
// 1. get user we want to update // 1. get user we want to update
return internalUser.get(access, {id: data.id}) return internalUser.get(access, { id: data.id }).then((user) => {
.then((user) => {
// 2. if email is to be changed, find other users with that email // 2. if email is to be changed, find other users with that email
if (typeof data.email !== 'undefined') { if (typeof data.email !== "undefined") {
data.email = data.email.toLowerCase().trim(); data.email = data.email.toLowerCase().trim();
if (user.email !== data.email) { if (user.email !== data.email) {
return internalUser.isEmailAvailable(data.email, data.id) return internalUser.isEmailAvailable(data.email, data.id).then((available) => {
.then((available) => {
if (!available) { if (!available) {
throw new error.ValidationError('Email address already in use - ' + data.email); throw new errs.ValidationError(`Email address already in use - ${data.email}`);
} }
return user; return user;
}); });
} }
@@ -134,26 +113,25 @@ const internalUser = {
.then((user) => { .then((user) => {
if (user.id !== data.id) { if (user.id !== data.id) {
// Sanity check that something crazy hasn't happened // Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id); throw new errs.InternalValidationError(
`User could not be updated, IDs do not match: ${user.id} !== ${data.id}`,
);
} }
data.avatar = gravatar.url(data.email || user.email, {default: 'mm'}); data.avatar = gravatar.url(data.email || user.email, { default: "mm" });
return userModel.query().patchAndFetchById(user.id, data).then(utils.omitRow(omissions()));
return userModel
.query()
.patchAndFetchById(user.id, data)
.then(utils.omitRow(omissions()));
}) })
.then(() => { .then(() => {
return internalUser.get(access, { id: data.id }); return internalUser.get(access, { id: data.id });
}) })
.then((user) => { .then((user) => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog
action: 'updated', .add(access, {
object_type: 'user', action: "updated",
object_type: "user",
object_id: user.id, object_id: user.id,
meta: data meta: data,
}) })
.then(() => { .then(() => {
return user; return user;
@@ -170,37 +148,41 @@ const internalUser = {
* @return {Promise} * @return {Promise}
*/ */
get: (access, data) => { get: (access, data) => {
if (typeof data === 'undefined') { const thisData = data || {};
data = {};
if (typeof thisData.id === "undefined" || !thisData.id) {
thisData.id = access.token.getUserId(0);
} }
if (typeof data.id === 'undefined' || !data.id) { return access
data.id = access.token.getUserId(0); .can("users:get", thisData.id)
}
return access.can('users:get', data.id)
.then(() => { .then(() => {
let query = userModel const query = userModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.andWhere('id', data.id) .andWhere("id", thisData.id)
.allowGraph('[permissions]') .allowGraph("[permissions]")
.first(); .first();
if (typeof data.expand !== 'undefined' && data.expand !== null) { if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
query.withGraphFetched('[' + data.expand.join(', ') + ']'); query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
} }
return query.then(utils.omitRow(omissions())); return query.then(utils.omitRow(omissions()));
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(thisData.id);
} }
// Custom omissions // Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) { if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
row = _.omit(row, data.omit); return _.omit(row, thisData.omit);
} }
if (row.avatar === "") {
row.avatar = DEFAULT_AVATAR;
}
return row; return row;
}); });
}, },
@@ -213,18 +195,13 @@ const internalUser = {
* @param user_id * @param user_id
*/ */
isEmailAvailable: (email, user_id) => { isEmailAvailable: (email, user_id) => {
let query = userModel const query = userModel.query().where("email", "=", email.toLowerCase().trim()).where("is_deleted", 0).first();
.query()
.where('email', '=', email.toLowerCase().trim())
.where('is_deleted', 0)
.first();
if (typeof user_id !== 'undefined') { if (typeof user_id !== "undefined") {
query.where('id', '!=', user_id); query.where("id", "!=", user_id);
} }
return query return query.then((user) => {
.then((user) => {
return !user; return !user;
}); });
}, },
@@ -237,33 +214,34 @@ const internalUser = {
* @returns {Promise} * @returns {Promise}
*/ */
delete: (access, data) => { delete: (access, data) => {
return access.can('users:delete', data.id) return access
.can("users:delete", data.id)
.then(() => { .then(() => {
return internalUser.get(access, { id: data.id }); return internalUser.get(access, { id: data.id });
}) })
.then((user) => { .then((user) => {
if (!user) { if (!user) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} }
// Make sure user can't delete themselves // Make sure user can't delete themselves
if (user.id === access.token.getUserId(0)) { if (user.id === access.token.getUserId(0)) {
throw new error.PermissionError('You cannot delete yourself.'); throw new errs.PermissionError("You cannot delete yourself.");
} }
return userModel return userModel
.query() .query()
.where('id', user.id) .where("id", user.id)
.patch({ .patch({
is_deleted: 1 is_deleted: 1,
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'deleted', action: "deleted",
object_type: 'user', object_type: "user",
object_id: user.id, object_id: user.id,
meta: _.omit(user, omissions()) meta: _.omit(user, omissions()),
}); });
}); });
}) })
@@ -272,6 +250,14 @@ const internalUser = {
}); });
}, },
deleteAll: async () => {
await userModel
.query()
.patch({
is_deleted: 1,
});
},
/** /**
* This will only count the users * This will only count the users
* *
@@ -280,26 +266,26 @@ const internalUser = {
* @returns {*} * @returns {*}
*/ */
getCount: (access, search_query) => { getCount: (access, search_query) => {
return access.can('users:list') return access
.can("users:list")
.then(() => { .then(() => {
let query = userModel const query = userModel.query().count("id as count").where("is_deleted", 0).first();
.query()
.count('id as count')
.where('is_deleted', 0)
.first();
// Query is used for searching // Query is used for searching
if (typeof search_query === 'string') { if (typeof search_query === "string") {
query.where(function () { query.where(function () {
this.where('user.name', 'like', '%' + search_query + '%') this.where("user.name", "like", `%${search_query}%`).orWhere(
.orWhere('user.email', 'like', '%' + search_query + '%'); "user.email",
"like",
`%${search_query}%`,
);
}); });
} }
return query; return query;
}) })
.then((row) => { .then((row) => {
return parseInt(row.count, 10); return Number.parseInt(row.count, 10);
}); });
}, },
@@ -311,30 +297,28 @@ const internalUser = {
* @param {String} [search_query] * @param {String} [search_query]
* @returns {Promise} * @returns {Promise}
*/ */
getAll: (access, expand, search_query) => { getAll: async (access, expand, search_query) => {
return access.can('users:list') await access.can("users:list");
.then(() => { const query = userModel
let query = userModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.groupBy('id') .groupBy("id")
.allowGraph('[permissions]') .allowGraph("[permissions]")
.orderBy('name', 'ASC'); .orderBy("name", "ASC");
// Query is used for searching // Query is used for searching
if (typeof search_query === 'string') { if (typeof search_query === "string") {
query.where(function () { query.where(function () {
this.where('name', 'like', '%' + search_query + '%') this.where("name", "like", `%${search_query}%`).orWhere("email", "like", `%${search_query}%`);
.orWhere('email', 'like', '%' + search_query + '%');
}); });
} }
if (typeof expand !== 'undefined' && expand !== null) { if (typeof expand !== "undefined" && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']'); query.withGraphFetched(`[${expand.join(", ")}]`);
} }
return query.then(utils.omitRows(omissions())); const res = await query;
}); return utils.omitRows(omissions())(res);
}, },
/** /**
@@ -342,11 +326,11 @@ const internalUser = {
* @param {Integer} [id_requested] * @param {Integer} [id_requested]
* @returns {[String]} * @returns {[String]}
*/ */
getUserOmisionsByAccess: (access, id_requested) => { getUserOmisionsByAccess: (access, idRequested) => {
let response = []; // Admin response let response = []; // Admin response
if (!access.token.hasScope('admin') && access.token.getUserId(0) !== id_requested) { if (!access.token.hasScope("admin") && access.token.getUserId(0) !== idRequested) {
response = ['roles', 'is_deleted']; // Restricted response response = ["is_deleted"]; // Restricted response
} }
return response; return response;
@@ -361,25 +345,29 @@ const internalUser = {
* @return {Promise} * @return {Promise}
*/ */
setPassword: (access, data) => { setPassword: (access, data) => {
return access.can('users:password', data.id) return access
.can("users:password", data.id)
.then(() => { .then(() => {
return internalUser.get(access, { id: data.id }); return internalUser.get(access, { id: data.id });
}) })
.then((user) => { .then((user) => {
if (user.id !== data.id) { if (user.id !== data.id) {
// Sanity check that something crazy hasn't happened // Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id); throw new errs.InternalValidationError(
`User could not be updated, IDs do not match: ${user.id} !== ${data.id}`,
);
} }
if (user.id === access.token.getUserId(0)) { if (user.id === access.token.getUserId(0)) {
// they're setting their own password. Make sure their current password is correct // they're setting their own password. Make sure their current password is correct
if (typeof data.current === 'undefined' || !data.current) { if (typeof data.current === "undefined" || !data.current) {
throw new error.ValidationError('Current password was not supplied'); throw new errs.ValidationError("Current password was not supplied");
} }
return internalToken.getTokenFromEmail({ return internalToken
.getTokenFromEmail({
identity: user.email, identity: user.email,
secret: data.current secret: data.current,
}) })
.then(() => { .then(() => {
return user; return user;
@@ -392,43 +380,36 @@ const internalUser = {
// Get auth, patch if it exists // Get auth, patch if it exists
return authModel return authModel
.query() .query()
.where('user_id', user.id) .where("user_id", user.id)
.andWhere('type', data.type) .andWhere("type", data.type)
.first() .first()
.then((existing_auth) => { .then((existing_auth) => {
if (existing_auth) { if (existing_auth) {
// patch // patch
return authModel return authModel.query().where("user_id", user.id).andWhere("type", data.type).patch({
.query()
.where('user_id', user.id)
.andWhere('type', data.type)
.patch({
type: data.type, // This is required for the model to encrypt on save type: data.type, // This is required for the model to encrypt on save
secret: data.secret secret: data.secret,
}); });
} else { }
// insert // insert
return authModel return authModel.query().insert({
.query()
.insert({
user_id: user.id, user_id: user.id,
type: data.type, type: data.type,
secret: data.secret, secret: data.secret,
meta: {} meta: {},
}); });
}
}) })
.then(() => { .then(() => {
// Add to Audit Log // Add to Audit Log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'updated', action: "updated",
object_type: 'user', object_type: "user",
object_id: user.id, object_id: user.id,
meta: { meta: {
name: user.name, name: user.name,
password_changed: true, password_changed: true,
auth_type: data.type auth_type: data.type,
} },
}); });
}); });
}) })
@@ -443,14 +424,17 @@ const internalUser = {
* @return {Promise} * @return {Promise}
*/ */
setPermissions: (access, data) => { setPermissions: (access, data) => {
return access.can('users:permissions', data.id) return access
.can("users:permissions", data.id)
.then(() => { .then(() => {
return internalUser.get(access, { id: data.id }); return internalUser.get(access, { id: data.id });
}) })
.then((user) => { .then((user) => {
if (user.id !== data.id) { if (user.id !== data.id) {
// Sanity check that something crazy hasn't happened // Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id); throw new errs.InternalValidationError(
`User could not be updated, IDs do not match: ${user.id} !== ${data.id}`,
);
} }
return user; return user;
@@ -459,34 +443,30 @@ const internalUser = {
// Get perms row, patch if it exists // Get perms row, patch if it exists
return userPermissionModel return userPermissionModel
.query() .query()
.where('user_id', user.id) .where("user_id", user.id)
.first() .first()
.then((existing_auth) => { .then((existing_auth) => {
if (existing_auth) { if (existing_auth) {
// patch // patch
return userPermissionModel return userPermissionModel
.query() .query()
.where('user_id', user.id) .where("user_id", user.id)
.patchAndFetchById(existing_auth.id, _.assign({ user_id: user.id }, data)); .patchAndFetchById(existing_auth.id, _.assign({ user_id: user.id }, data));
} else {
// insert
return userPermissionModel
.query()
.insertAndFetch(_.assign({user_id: user.id}, data));
} }
// insert
return userPermissionModel.query().insertAndFetch(_.assign({ user_id: user.id }, data));
}) })
.then((permissions) => { .then((permissions) => {
// Add to Audit Log // Add to Audit Log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'updated', action: "updated",
object_type: 'user', object_type: "user",
object_id: user.id, object_id: user.id,
meta: { meta: {
name: user.name, name: user.name,
permissions: permissions permissions: permissions,
} },
}); });
}); });
}) })
.then(() => { .then(() => {
@@ -500,14 +480,15 @@ const internalUser = {
* @param {Integer} data.id * @param {Integer} data.id
*/ */
loginAs: (access, data) => { loginAs: (access, data) => {
return access.can('users:loginas', data.id) return access
.can("users:loginas", data.id)
.then(() => { .then(() => {
return internalUser.get(access, data); return internalUser.get(access, data);
}) })
.then((user) => { .then((user) => {
return internalToken.getTokenFromUser(user); return internalToken.getTokenFromUser(user);
}); });
} },
}; };
module.exports = internalUser; export default internalUser;

View File

@@ -4,27 +4,31 @@
* "scope" in this file means "where did this token come from and what is using it", so 99% of the time * "scope" in this file means "where did this token come from and what is using it", so 99% of the time
* the "scope" is going to be "user" because it would be a user token. This is not to be confused with * the "scope" is going to be "user" because it would be a user token. This is not to be confused with
* the "role" which could be "user" or "admin". The scope in fact, could be "worker" or anything else. * the "role" which could be "user" or "admin". The scope in fact, could be "worker" or anything else.
*
*
*/ */
const _ = require('lodash'); import fs from "node:fs";
const logger = require('../logger').access; import { dirname } from "node:path";
const Ajv = require('ajv/dist/2020'); import { fileURLToPath } from "node:url";
const error = require('./error'); import Ajv from "ajv/dist/2020.js";
const userModel = require('../models/user'); import _ from "lodash";
const proxyHostModel = require('../models/proxy_host'); import { access as logger } from "../logger.js";
const TokenModel = require('../models/token'); import proxyHostModel from "../models/proxy_host.js";
const roleSchema = require('./access/roles.json'); import TokenModel from "../models/token.js";
const permsSchema = require('./access/permissions.json'); import userModel from "../models/user.js";
import permsSchema from "./access/permissions.json" with { type: "json" };
import roleSchema from "./access/roles.json" with { type: "json" };
import errs from "./error.js";
module.exports = function (token_string) { const __filename = fileURLToPath(import.meta.url);
let Token = new TokenModel(); const __dirname = dirname(__filename);
let token_data = null;
export default function (tokenString) {
const Token = TokenModel();
let tokenData = null;
let initialised = false; let initialised = false;
let object_cache = {}; const objectCache = {};
let allow_internal_access = false; let allowInternalAccess = false;
let user_roles = []; let userRoles = [];
let permissions = {}; let permissions = {};
/** /**
@@ -32,63 +36,58 @@ module.exports = function (token_string) {
* *
* @returns {Promise} * @returns {Promise}
*/ */
this.init = () => { this.init = async () => {
return new Promise((resolve, reject) => {
if (initialised) { if (initialised) {
resolve(); return;
} else if (!token_string) { }
reject(new error.PermissionError('Permission Denied'));
} else { if (!tokenString) {
resolve(Token.load(token_string) throw new errs.PermissionError("Permission Denied");
.then((data) => { }
token_data = data;
tokenData = await Token.load(tokenString);
// At this point we need to load the user from the DB and make sure they: // At this point we need to load the user from the DB and make sure they:
// - exist (and not soft deleted) // - exist (and not soft deleted)
// - still have the appropriate scopes for this token // - still have the appropriate scopes for this token
// This is only required when the User ID is supplied or if the token scope has `user` // This is only required when the User ID is supplied or if the token scope has `user`
if (
if (token_data.attrs.id || (typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'user') !== -1)) { tokenData.attrs.id ||
(typeof tokenData.scope !== "undefined" && _.indexOf(tokenData.scope, "user") !== -1)
) {
// Has token user id or token user scope // Has token user id or token user scope
return userModel const user = await userModel
.query() .query()
.where('id', token_data.attrs.id) .where("id", tokenData.attrs.id)
.andWhere('is_deleted', 0) .andWhere("is_deleted", 0)
.andWhere('is_disabled', 0) .andWhere("is_disabled", 0)
.allowGraph('[permissions]') .allowGraph("[permissions]")
.withGraphFetched('[permissions]') .withGraphFetched("[permissions]")
.first() .first();
.then((user) => {
if (user) { if (user) {
// make sure user has all scopes of the token // make sure user has all scopes of the token
// The `user` role is not added against the user row, so we have to just add it here to get past this check. // The `user` role is not added against the user row, so we have to just add it here to get past this check.
user.roles.push('user'); user.roles.push("user");
let is_ok = true; let ok = true;
_.forEach(token_data.scope, (scope_item) => { _.forEach(tokenData.scope, (scope_item) => {
if (_.indexOf(user.roles, scope_item) === -1) { if (_.indexOf(user.roles, scope_item) === -1) {
is_ok = false; ok = false;
} }
}); });
if (!is_ok) { if (!ok) {
throw new error.AuthError('Invalid token scope for User'); throw new errs.AuthError("Invalid token scope for User");
} else { }
initialised = true; initialised = true;
user_roles = user.roles; userRoles = user.roles;
permissions = user.permissions; permissions = user.permissions;
}
} else { } else {
throw new error.AuthError('User cannot be loaded for Token'); throw new errs.AuthError("User cannot be loaded for Token");
}
} }
});
} else {
initialised = true; initialised = true;
}
}));
}
});
}; };
/** /**
@@ -96,140 +95,121 @@ module.exports = function (token_string) {
* This only applies to USER token scopes, as all other tokens are not really bound * This only applies to USER token scopes, as all other tokens are not really bound
* by object scopes * by object scopes
* *
* @param {String} object_type * @param {String} objectType
* @returns {Promise} * @returns {Promise}
*/ */
this.loadObjects = (object_type) => { this.loadObjects = async (objectType) => {
return new Promise((resolve, reject) => { let objects = null;
if (Token.hasScope('user')) {
if (typeof token_data.attrs.id === 'undefined' || !token_data.attrs.id) { if (Token.hasScope("user")) {
reject(new error.AuthError('User Token supplied without a User ID')); if (typeof tokenData.attrs.id === "undefined" || !tokenData.attrs.id) {
throw new errs.AuthError("User Token supplied without a User ID");
}
const tokenUserId = tokenData.attrs.id ? tokenData.attrs.id : 0;
if (typeof objectCache[objectType] !== "undefined") {
objects = objectCache[objectType];
} else { } else {
let token_user_id = token_data.attrs.id ? token_data.attrs.id : 0; switch (objectType) {
let query;
if (typeof object_cache[object_type] === 'undefined') {
switch (object_type) {
// USERS - should only return yourself // USERS - should only return yourself
case 'users': case "users":
resolve(token_user_id ? [token_user_id] : []); objects = tokenUserId ? [tokenUserId] : [];
break; break;
// Proxy Hosts // Proxy Hosts
case 'proxy_hosts': case "proxy_hosts": {
query = proxyHostModel const query = proxyHostModel
.query() .query()
.select('id') .select("id")
.andWhere('is_deleted', 0); .andWhere("is_deleted", 0);
if (permissions.visibility === 'user') { if (permissions.visibility === "user") {
query.andWhere('owner_user_id', token_user_id); query.andWhere("owner_user_id", tokenUserId);
} }
resolve(query const rows = await query;
.then((rows) => { objects = [];
let result = []; _.forEach(rows, (ruleRow) => {
_.forEach(rows, (rule_row) => { result.push(ruleRow.id);
result.push(rule_row.id);
}); });
// enum should not have less than 1 item // enum should not have less than 1 item
if (!result.length) { if (!objects.length) {
result.push(0); objects.push(0);
} }
return result;
})
);
break;
// DEFAULT: null
default:
resolve(null);
break; break;
} }
} else { }
resolve(object_cache[object_type]); objectCache[objectType] = objects;
} }
} }
} else {
resolve(null);
}
})
.then((objects) => {
object_cache[object_type] = objects;
return objects; return objects;
});
}; };
/** /**
* Creates a schema object on the fly with the IDs and other values required to be checked against the permissionSchema * Creates a schema object on the fly with the IDs and other values required to be checked against the permissionSchema
* *
* @param {String} permission_label * @param {String} permissionLabel
* @returns {Object} * @returns {Object}
*/ */
this.getObjectSchema = (permission_label) => { this.getObjectSchema = async (permissionLabel) => {
let base_object_type = permission_label.split(':').shift(); const baseObjectType = permissionLabel.split(":").shift();
let schema = { const schema = {
$id: 'objects', $id: "objects",
description: 'Actor Properties', description: "Actor Properties",
type: 'object', type: "object",
additionalProperties: false, additionalProperties: false,
properties: { properties: {
user_id: { user_id: {
anyOf: [ anyOf: [
{ {
type: 'number', type: "number",
enum: [Token.get('attrs').id] enum: [Token.get("attrs").id],
} },
] ],
}, },
scope: { scope: {
type: 'string', type: "string",
pattern: '^' + Token.get('scope') + '$' pattern: `^${Token.get("scope")}$`,
} },
} },
}; };
return this.loadObjects(base_object_type) const result = await this.loadObjects(baseObjectType);
.then((object_result) => { if (typeof result === "object" && result !== null) {
if (typeof object_result === 'object' && object_result !== null) { schema.properties[baseObjectType] = {
schema.properties[base_object_type] = { type: "number",
type: 'number', enum: result,
enum: object_result, minimum: 1,
minimum: 1
}; };
} else { } else {
schema.properties[base_object_type] = { schema.properties[baseObjectType] = {
type: 'number', type: "number",
minimum: 1 minimum: 1,
}; };
} }
return schema; return schema;
});
}; };
return { // here:
return {
token: Token, token: Token,
/** /**
* *
* @param {Boolean} [allow_internal] * @param {Boolean} [allowInternal]
* @returns {Promise} * @returns {Promise}
*/ */
load: (allow_internal) => { load: async (allowInternal) => {
return new Promise(function (resolve/*, reject*/) { if (tokenString) {
if (token_string) { return await Token.load(tokenString);
resolve(Token.load(token_string));
} else {
allow_internal_access = allow_internal;
resolve(allow_internal_access || null);
} }
}); allowInternalAccess = allowInternal;
return allowInternal || null;
}, },
reloadObjects: this.loadObjects, reloadObjects: this.loadObjects,
@@ -240,68 +220,59 @@ module.exports = function (token_string) {
* @param {*} [data] * @param {*} [data]
* @returns {Promise} * @returns {Promise}
*/ */
can: (permission, data) => { can: async (permission, data) => {
if (allow_internal_access === true) { if (allowInternalAccess === true) {
return Promise.resolve(true); return true;
//return true; }
} else {
return this.init() try {
.then(() => { await this.init();
// Initialised, token decoded ok const objectSchema = await this.getObjectSchema(permission);
return this.getObjectSchema(permission)
.then((objectSchema) => { const dataSchema = {
const data_schema = {
[permission]: { [permission]: {
data: data, data: data,
scope: Token.get('scope'), scope: Token.get("scope"),
roles: user_roles, roles: userRoles,
permission_visibility: permissions.visibility, permission_visibility: permissions.visibility,
permission_proxy_hosts: permissions.proxy_hosts, permission_proxy_hosts: permissions.proxy_hosts,
permission_redirection_hosts: permissions.redirection_hosts, permission_redirection_hosts: permissions.redirection_hosts,
permission_dead_hosts: permissions.dead_hosts, permission_dead_hosts: permissions.dead_hosts,
permission_streams: permissions.streams, permission_streams: permissions.streams,
permission_access_lists: permissions.access_lists, permission_access_lists: permissions.access_lists,
permission_certificates: permissions.certificates permission_certificates: permissions.certificates,
} },
}; };
let permissionSchema = { const permissionSchema = {
$async: true, $async: true,
$id: 'permissions', $id: "permissions",
type: 'object', type: "object",
additionalProperties: false, additionalProperties: false,
properties: {} properties: {},
}; };
permissionSchema.properties[permission] = require('./access/' + permission.replace(/:/gim, '-') + '.json'); const rawData = fs.readFileSync(`${__dirname}/access/${permission.replace(/:/gim, "-")}.json`, {
encoding: "utf8",
});
permissionSchema.properties[permission] = JSON.parse(rawData);
const ajv = new Ajv({ const ajv = new Ajv({
verbose: true, verbose: true,
allErrors: true, allErrors: true,
breakOnError: true, breakOnError: true,
coerceTypes: true, coerceTypes: true,
schemas: [ schemas: [roleSchema, permsSchema, objectSchema, permissionSchema],
roleSchema,
permsSchema,
objectSchema,
permissionSchema
]
}); });
return ajv.validate('permissions', data_schema) const valid = ajv.validate("permissions", dataSchema);
.then(() => { return valid && dataSchema[permission];
return data_schema[permission]; } catch (err) {
});
});
})
.catch((err) => {
err.permission = permission; err.permission = permission;
err.permission_data = data; err.permission_data = data;
logger.error(permission, data, err.message); logger.error(permission, data, err.message);
throw errs.PermissionError("Permission Denied", err);
throw new error.PermissionError('Permission Denied', err);
});
} }
},
};
} }
};
};

View File

@@ -1,17 +1,15 @@
const dnsPlugins = require('../global/certbot-dns-plugins.json'); import batchflow from "batchflow";
const utils = require('./utils'); import dnsPlugins from "../global/certbot-dns-plugins.json" with { type: "json" };
const error = require('./error'); import { certbot as logger } from "../logger.js";
const logger = require('../logger').certbot; import errs from "./error.js";
const batchflow = require('batchflow'); import utils from "./utils.js";
const CERTBOT_VERSION_REPLACEMENT = '$(certbot --version | grep -Eo \'[0-9](\\.[0-9]+)+\')'; const CERTBOT_VERSION_REPLACEMENT = "$(certbot --version | grep -Eo '[0-9](\\.[0-9]+)+')";
const certbot = {
/** /**
* @param {array} pluginKeys * @param {array} pluginKeys
*/ */
installPlugins: async function (pluginKeys) { const installPlugins = async (pluginKeys) => {
let hasErrors = false; let hasErrors = false;
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
@@ -20,9 +18,11 @@ const certbot = {
return; return;
} }
batchflow(pluginKeys).sequential() batchflow(pluginKeys)
.each((i, pluginKey, next) => { .sequential()
certbot.installPlugin(pluginKey) .each((_i, pluginKey, next) => {
certbot
.installPlugin(pluginKey)
.then(() => { .then(() => {
next(); next();
}) })
@@ -36,13 +36,15 @@ const certbot = {
}) })
.end(() => { .end(() => {
if (hasErrors) { if (hasErrors) {
reject(new error.CommandError('Some plugins failed to install. Please check the logs above', 1)); reject(
new errs.CommandError("Some plugins failed to install. Please check the logs above", 1),
);
} else { } else {
resolve(); resolve();
} }
}); });
}); });
}, };
/** /**
* Installs a cerbot plugin given the key for the object from * Installs a cerbot plugin given the key for the object from
@@ -51,10 +53,10 @@ const certbot = {
* @param {string} pluginKey * @param {string} pluginKey
* @returns {Object} * @returns {Object}
*/ */
installPlugin: async function (pluginKey) { const installPlugin = async (pluginKey) => {
if (typeof dnsPlugins[pluginKey] === 'undefined') { if (typeof dnsPlugins[pluginKey] === "undefined") {
// throw Error(`Certbot plugin ${pluginKey} not found`); // throw Error(`Certbot plugin ${pluginKey} not found`);
throw new error.ItemNotFoundError(pluginKey); throw new errs.ItemNotFoundError(pluginKey);
} }
const plugin = dnsPlugins[pluginKey]; const plugin = dnsPlugins[pluginKey];
@@ -63,8 +65,16 @@ const certbot = {
plugin.version = plugin.version.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT); plugin.version = plugin.version.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
plugin.dependencies = plugin.dependencies.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT); plugin.dependencies = plugin.dependencies.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
const cmd = '. /opt/certbot/bin/activate && pip install --no-cache-dir ' + plugin.dependencies + ' ' + plugin.package_name + plugin.version + ' ' + ' && deactivate'; // SETUPTOOLS_USE_DISTUTILS is required for certbot plugins to install correctly
return utils.exec(cmd) // in new versions of Python
let env = Object.assign({}, process.env, { SETUPTOOLS_USE_DISTUTILS: "stdlib" });
if (typeof plugin.env === "object") {
env = Object.assign(env, plugin.env);
}
const cmd = `. /opt/certbot/bin/activate && pip install --no-cache-dir ${plugin.dependencies} ${plugin.package_name}${plugin.version} && deactivate`;
return utils
.exec(cmd, { env })
.then((result) => { .then((result) => {
logger.complete(`Installed ${pluginKey}`); logger.complete(`Installed ${pluginKey}`);
return result; return result;
@@ -72,7 +82,6 @@ const certbot = {
.catch((err) => { .catch((err) => {
throw err; throw err;
}); });
},
}; };
module.exports = certbot; export { installPlugins, installPlugin };

View File

@@ -1,24 +1,29 @@
const fs = require('fs'); import fs from "node:fs";
const NodeRSA = require('node-rsa'); import NodeRSA from "node-rsa";
const logger = require('../logger').global; import { global as logger } from "../logger.js";
const keysFile = '/data/keys.json'; const keysFile = '/data/keys.json';
const mysqlEngine = 'mysql2';
const postgresEngine = 'pg';
const sqliteClientName = 'sqlite3';
let instance = null; let instance = null;
// 1. Load from config file first (not recommended anymore) // 1. Load from config file first (not recommended anymore)
// 2. Use config env variables next // 2. Use config env variables next
const configure = () => { const configure = () => {
const filename = (process.env.NODE_CONFIG_DIR || './config') + '/' + (process.env.NODE_ENV || 'default') + '.json'; const filename = `${process.env.NODE_CONFIG_DIR || "./config"}/${process.env.NODE_ENV || "default"}.json`;
if (fs.existsSync(filename)) { if (fs.existsSync(filename)) {
let configData; let configData;
try { try {
configData = require(filename); // Load this json synchronously
} catch (err) { const rawData = fs.readFileSync(filename);
configData = JSON.parse(rawData);
} catch (_) {
// do nothing // do nothing
} }
if (configData && configData.database) { if (configData?.database) {
logger.info(`Using configuration from file: ${filename}`); logger.info(`Using configuration from file: ${filename}`);
instance = configData; instance = configData;
instance.keys = getKeys(); instance.keys = getKeys();
@@ -31,10 +36,10 @@ const configure = () => {
const envMysqlName = process.env.DB_MYSQL_NAME || null; const envMysqlName = process.env.DB_MYSQL_NAME || null;
if (envMysqlHost && envMysqlUser && envMysqlName) { if (envMysqlHost && envMysqlUser && envMysqlName) {
// we have enough mysql creds to go with mysql // we have enough mysql creds to go with mysql
logger.info('Using MySQL configuration'); logger.info("Using MySQL configuration");
instance = { instance = {
database: { database: {
engine: 'mysql2', engine: mysqlEngine,
host: envMysqlHost, host: envMysqlHost,
port: process.env.DB_MYSQL_PORT || 3306, port: process.env.DB_MYSQL_PORT || 3306,
user: envMysqlUser, user: envMysqlUser,
@@ -46,18 +51,38 @@ const configure = () => {
return; return;
} }
const envSqliteFile = process.env.DB_SQLITE_FILE || '/data/database.sqlite'; const envPostgresHost = process.env.DB_POSTGRES_HOST || null;
const envPostgresUser = process.env.DB_POSTGRES_USER || null;
const envPostgresName = process.env.DB_POSTGRES_NAME || null;
if (envPostgresHost && envPostgresUser && envPostgresName) {
// we have enough postgres creds to go with postgres
logger.info("Using Postgres configuration");
instance = {
database: {
engine: postgresEngine,
host: envPostgresHost,
port: process.env.DB_POSTGRES_PORT || 5432,
user: envPostgresUser,
password: process.env.DB_POSTGRES_PASSWORD,
name: envPostgresName,
},
keys: getKeys(),
};
return;
}
const envSqliteFile = process.env.DB_SQLITE_FILE || "/data/database.sqlite";
logger.info(`Using Sqlite: ${envSqliteFile}`); logger.info(`Using Sqlite: ${envSqliteFile}`);
instance = { instance = {
database: { database: {
engine: 'knex-native', engine: "knex-native",
knex: { knex: {
client: 'sqlite3', client: sqliteClientName,
connection: { connection: {
filename: envSqliteFile filename: envSqliteFile,
},
useNullAsDefault: true,
}, },
useNullAsDefault: true
}
}, },
keys: getKeys(), keys: getKeys(),
}; };
@@ -65,54 +90,55 @@ const configure = () => {
const getKeys = () => { const getKeys = () => {
// Get keys from file // Get keys from file
logger.debug("Cheecking for keys file:", keysFile);
if (!fs.existsSync(keysFile)) { if (!fs.existsSync(keysFile)) {
generateKeys(); generateKeys();
} else if (process.env.DEBUG) { } else if (process.env.DEBUG) {
logger.info('Keys file exists OK'); logger.info("Keys file exists OK");
} }
try { try {
return require(keysFile); // Load this json keysFile synchronously and return the json object
const rawData = fs.readFileSync(keysFile);
return JSON.parse(rawData);
} catch (err) { } catch (err) {
logger.error('Could not read JWT key pair from config file: ' + keysFile, err); logger.error(`Could not read JWT key pair from config file: ${keysFile}`, err);
process.exit(1); process.exit(1);
} }
}; };
const generateKeys = () => { const generateKeys = () => {
logger.info('Creating a new JWT key pair...'); logger.info("Creating a new JWT key pair...");
// Now create the keys and save them in the config. // Now create the keys and save them in the config.
const key = new NodeRSA({ b: 2048 }); const key = new NodeRSA({ b: 2048 });
key.generateKeyPair(); key.generateKeyPair();
const keys = { const keys = {
key: key.exportKey('private').toString(), key: key.exportKey("private").toString(),
pub: key.exportKey('public').toString(), pub: key.exportKey("public").toString(),
}; };
// Write keys config // Write keys config
try { try {
fs.writeFileSync(keysFile, JSON.stringify(keys, null, 2)); fs.writeFileSync(keysFile, JSON.stringify(keys, null, 2));
} catch (err) { } catch (err) {
logger.error('Could not write JWT key pair to config file: ' + keysFile + ': ' + err.message); logger.error(`Could not write JWT key pair to config file: ${keysFile}: ${err.message}`);
process.exit(1); process.exit(1);
} }
logger.info('Wrote JWT key pair to config file: ' + keysFile); logger.info(`Wrote JWT key pair to config file: ${keysFile}`);
}; };
module.exports = {
/** /**
* *
* @param {string} key ie: 'database' or 'database.engine' * @param {string} key ie: 'database' or 'database.engine'
* @returns {boolean} * @returns {boolean}
*/ */
has: function(key) { const configHas = (key) => {
instance === null && configure(); instance === null && configure();
const keys = key.split('.'); const keys = key.split(".");
let level = instance; let level = instance;
let has = true; let has = true;
keys.forEach((keyItem) => { keys.forEach((keyItem) => {
if (typeof level[keyItem] === 'undefined') { if (typeof level[keyItem] === "undefined") {
has = false; has = false;
} else { } else {
level = level[keyItem]; level = level[keyItem];
@@ -120,7 +146,7 @@ module.exports = {
}); });
return has; return has;
}, };
/** /**
* Gets a specific key from the top level * Gets a specific key from the top level
@@ -128,67 +154,91 @@ module.exports = {
* @param {string} key * @param {string} key
* @returns {*} * @returns {*}
*/ */
get: function (key) { const configGet = (key) => {
instance === null && configure(); instance === null && configure();
if (key && typeof instance[key] !== 'undefined') { if (key && typeof instance[key] !== "undefined") {
return instance[key]; return instance[key];
} }
return instance; return instance;
}, };
/** /**
* Is this a sqlite configuration? * Is this a sqlite configuration?
* *
* @returns {boolean} * @returns {boolean}
*/ */
isSqlite: function () { const isSqlite = () => {
instance === null && configure(); instance === null && configure();
return instance.database.knex && instance.database.knex.client === 'sqlite3'; return instance.database.knex && instance.database.knex.client === sqliteClientName;
}, };
/**
* Is this a mysql configuration?
*
* @returns {boolean}
*/
const isMysql = () => {
instance === null && configure();
return instance.database.engine === mysqlEngine;
};
/**
* Is this a postgres configuration?
*
* @returns {boolean}
*/
const isPostgres = () => {
instance === null && configure();
return instance.database.engine === postgresEngine;
};
/** /**
* Are we running in debug mdoe? * Are we running in debug mdoe?
* *
* @returns {boolean} * @returns {boolean}
*/ */
debug: function () { const isDebugMode = () => !!process.env.DEBUG;
return !!process.env.DEBUG;
}, /**
* Are we running in CI?
*
* @returns {boolean}
*/
const isCI = () => process.env.CI === 'true' && process.env.DEBUG === 'true';
/** /**
* Returns a public key * Returns a public key
* *
* @returns {string} * @returns {string}
*/ */
getPublicKey: function () { const getPublicKey = () => {
instance === null && configure(); instance === null && configure();
return instance.keys.pub; return instance.keys.pub;
}, };
/** /**
* Returns a private key * Returns a private key
* *
* @returns {string} * @returns {string}
*/ */
getPrivateKey: function () { const getPrivateKey = () => {
instance === null && configure(); instance === null && configure();
return instance.keys.key; return instance.keys.key;
}, };
/** /**
* @returns {boolean} * @returns {boolean}
*/ */
useLetsencryptStaging: function () { const useLetsencryptStaging = () => !!process.env.LE_STAGING;
return !!process.env.LE_STAGING;
},
/** /**
* @returns {string|null} * @returns {string|null}
*/ */
useLetsencryptServer: function () { const useLetsencryptServer = () => {
if (process.env.LE_SERVER) { if (process.env.LE_SERVER) {
return process.env.LE_SERVER; return process.env.LE_SERVER;
} }
return null; return null;
}
}; };
export { isCI, configHas, configGet, isSqlite, isMysql, isPostgres, isDebugMode, getPrivateKey, getPublicKey, useLetsencryptStaging, useLetsencryptServer };

View File

@@ -1,13 +1,11 @@
const _ = require('lodash'); import _ from "lodash";
const util = require('util');
module.exports = { const errs = {
PermissionError: function (_, previous) {
PermissionError: function (message, previous) {
Error.captureStackTrace(this, this.constructor); Error.captureStackTrace(this, this.constructor);
this.name = this.constructor.name; this.name = this.constructor.name;
this.previous = previous; this.previous = previous;
this.message = 'Permission Denied'; this.message = "Permission Denied";
this.public = true; this.public = true;
this.status = 403; this.status = 403;
}, },
@@ -16,18 +14,22 @@ module.exports = {
Error.captureStackTrace(this, this.constructor); Error.captureStackTrace(this, this.constructor);
this.name = this.constructor.name; this.name = this.constructor.name;
this.previous = previous; this.previous = previous;
this.message = 'Item Not Found - ' + id; this.message = "Not Found";
if (id) {
this.message = `Not Found - ${id}`;
}
this.public = true; this.public = true;
this.status = 404; this.status = 404;
}, },
AuthError: function (message, previous) { AuthError: function (message, messageI18n, previous) {
Error.captureStackTrace(this, this.constructor); Error.captureStackTrace(this, this.constructor);
this.name = this.constructor.name; this.name = this.constructor.name;
this.previous = previous; this.previous = previous;
this.message = message; this.message = message;
this.message_i18n = messageI18n;
this.public = true; this.public = true;
this.status = 401; this.status = 400;
}, },
InternalError: function (message, previous) { InternalError: function (message, previous) {
@@ -94,6 +96,8 @@ module.exports = {
}, },
}; };
_.forEach(module.exports, function (error) { _.forEach(errs, (err) => {
util.inherits(error, Error); err.prototype = Object.create(Error.prototype);
}); });
export default errs;

View File

@@ -1,12 +1,13 @@
module.exports = function (req, res, next) { export default (req, res, next) => {
if (req.headers.origin) { if (req.headers.origin) {
res.set({ res.set({
'Access-Control-Allow-Origin': req.headers.origin, "Access-Control-Allow-Origin": req.headers.origin,
'Access-Control-Allow-Credentials': true, "Access-Control-Allow-Credentials": true,
'Access-Control-Allow-Methods': 'OPTIONS, GET, POST', "Access-Control-Allow-Methods": "OPTIONS, GET, POST",
'Access-Control-Allow-Headers': 'Content-Type, Cache-Control, Pragma, Expires, Authorization, X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit', "Access-Control-Allow-Headers":
'Access-Control-Max-Age': 5 * 60, "Content-Type, Cache-Control, Pragma, Expires, Authorization, X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit",
'Access-Control-Expose-Headers': 'X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit' "Access-Control-Max-Age": 5 * 60,
"Access-Control-Expose-Headers": "X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit",
}); });
next(); next();
} else { } else {

View File

@@ -1,15 +1,15 @@
const Access = require('../access'); import Access from "../access.js";
module.exports = () => { export default () => {
return function (req, res, next) { return async (_, res, next) => {
try {
res.locals.access = null; res.locals.access = null;
let access = new Access(res.locals.token || null); const access = new Access(res.locals.token || null);
access.load() await access.load();
.then(() => {
res.locals.access = access; res.locals.access = access;
next(); next();
}) } catch (err) {
.catch(next); next(err);
}
}; };
}; };

View File

@@ -1,13 +1,13 @@
module.exports = function () { export default function () {
return function (req, res, next) { return (req, res, next) => {
if (req.headers.authorization) { if (req.headers.authorization) {
let parts = req.headers.authorization.split(' '); const parts = req.headers.authorization.split(" ");
if (parts && parts[0] === 'Bearer' && parts[1]) { if (parts && parts[0] === "Bearer" && parts[1]) {
res.locals.token = parts[1]; res.locals.token = parts[1];
} }
} }
next(); next();
}; };
}; }

View File

@@ -1,7 +1,6 @@
let _ = require('lodash'); import _ from "lodash";
module.exports = function (default_sort, default_offset, default_limit, max_limit) {
export default (default_sort, default_offset, default_limit, max_limit) => {
/** /**
* This will setup the req query params with filtered data and defaults * This will setup the req query params with filtered data and defaults
* *
@@ -11,34 +10,35 @@ module.exports = function (default_sort, default_offset, default_limit, max_limi
* *
*/ */
return function (req, res, next) { return (req, _res, next) => {
req.query.offset =
req.query.offset = typeof req.query.limit === 'undefined' ? default_offset || 0 : parseInt(req.query.offset, 10); typeof req.query.limit === "undefined" ? default_offset || 0 : Number.parseInt(req.query.offset, 10);
req.query.limit = typeof req.query.limit === 'undefined' ? default_limit || 50 : parseInt(req.query.limit, 10); req.query.limit =
typeof req.query.limit === "undefined" ? default_limit || 50 : Number.parseInt(req.query.limit, 10);
if (max_limit && req.query.limit > max_limit) { if (max_limit && req.query.limit > max_limit) {
req.query.limit = max_limit; req.query.limit = max_limit;
} }
// Sorting // Sorting
let sort = typeof req.query.sort === 'undefined' ? default_sort : req.query.sort; let sort = typeof req.query.sort === "undefined" ? default_sort : req.query.sort;
let myRegexp = /.*\.(asc|desc)$/ig; const myRegexp = /.*\.(asc|desc)$/gi;
let sort_array = []; const sort_array = [];
sort = sort.split(','); sort = sort.split(",");
_.map(sort, function (val) { _.map(sort, (val) => {
let matches = myRegexp.exec(val); const matches = myRegexp.exec(val);
if (matches !== null) { if (matches !== null) {
let dir = matches[1]; const dir = matches[1];
sort_array.push({ sort_array.push({
field: val.substr(0, val.length - (dir.length + 1)), field: val.substr(0, val.length - (dir.length + 1)),
dir: dir.toLowerCase() dir: dir.toLowerCase(),
}); });
} else { } else {
sort_array.push({ sort_array.push({
field: val, field: val,
dir: 'asc' dir: "asc",
}); });
} }
}); });

View File

@@ -1,9 +1,8 @@
module.exports = (req, res, next) => { export default (req, res, next) => {
if (req.params.user_id === 'me' && res.locals.access) { if (req.params.user_id === 'me' && res.locals.access) {
req.params.user_id = res.locals.access.token.get('attrs').id; req.params.user_id = res.locals.access.token.get('attrs').id;
} else { } else {
req.params.user_id = parseInt(req.params.user_id, 10); req.params.user_id = Number.parseInt(req.params.user_id, 10);
} }
next(); next();
}; };

View File

@@ -1,6 +1,6 @@
const moment = require('moment'); import moment from "moment";
import { ref } from "objection";
module.exports = { import { isPostgres } from "./config.js";
/** /**
* Takes an expression such as 30d and returns a moment object of that date in future * Takes an expression such as 30d and returns a moment object of that date in future
@@ -20,31 +20,39 @@ module.exports = {
* @param {String} expression * @param {String} expression
* @returns {Object} * @returns {Object}
*/ */
parseDatePeriod: function (expression) { const parseDatePeriod = (expression) => {
let matches = expression.match(/^([0-9]+)(y|Q|M|w|d|h|m|s|ms)$/m); const matches = expression.match(/^([0-9]+)(y|Q|M|w|d|h|m|s|ms)$/m);
if (matches) { if (matches) {
return moment().add(matches[1], matches[2]); return moment().add(matches[1], matches[2]);
} }
return null; return null;
}, };
convertIntFieldsToBool: function (obj, fields) { const convertIntFieldsToBool = (obj, fields) => {
fields.forEach(function (field) { fields.forEach((field) => {
if (typeof obj[field] !== 'undefined') { if (typeof obj[field] !== "undefined") {
obj[field] = obj[field] === 1; obj[field] = obj[field] === 1;
} }
}); });
return obj; return obj;
}, };
convertBoolFieldsToInt: function (obj, fields) { const convertBoolFieldsToInt = (obj, fields) => {
fields.forEach(function (field) { fields.forEach((field) => {
if (typeof obj[field] !== 'undefined') { if (typeof obj[field] !== "undefined") {
obj[field] = obj[field] ? 1 : 0; obj[field] = obj[field] ? 1 : 0;
} }
}); });
return obj; return obj;
}
}; };
/**
* Casts a column to json if using postgres
*
* @param {string} colName
* @returns {string|Objection.ReferenceBuilder}
*/
const castJsonIfNeed = (colName) => (isPostgres() ? ref(colName).castText() : colName);
export { parseDatePeriod, convertIntFieldsToBool, convertBoolFieldsToInt, castJsonIfNeed };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'identifier_for_migrate'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "identifier_for_migrate";
/** /**
* Migrate * Migrate
@@ -7,16 +8,15 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex, Promise) { const up = (_knex) => {
logger.info(`[${migrateName}] Migrating Up...`);
logger.info('[' + migrate_name + '] Migrating Up...');
// Create Table example: // Create Table example:
/*return knex.schema.createTable('notification', (table) => { /*
return knex.schema.createTable('notification', (table) => {
table.increments().primary(); table.increments().primary();
table.string('name').notNull(); table.string('name').notNull();
table.string('type').notNull(); table.string('type').notNull();
@@ -24,10 +24,11 @@ exports.up = function (knex, Promise) {
table.integer('modified_on').notNull(); table.integer('modified_on').notNull();
}) })
.then(function () { .then(function () {
logger.info('[' + migrate_name + '] Notification Table created'); logger.info('[' + migrateName + '] Notification Table created');
});*/ });
*/
logger.info('[' + migrate_name + '] Migrating Up Complete'); logger.info(`[${migrateName}] Migrating Up Complete`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
@@ -36,20 +37,23 @@ exports.up = function (knex, Promise) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.info('[' + migrate_name + '] Migrating Down...'); logger.info(`[${migrateName}] Migrating Down...`);
// Drop table example: // Drop table example:
/*return knex.schema.dropTable('notification') /*
return knex.schema.dropTable('notification')
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] Notification Table dropped'); logger.info(`[${migrateName}] Notification Table dropped`);
});*/ });
*/
logger.info('[' + migrate_name + '] Migrating Down Complete'); logger.info(`[${migrateName}] Migrating Down Complete`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,49 +1,52 @@
const _ = require('lodash'); import { exec as nodeExec, execFile as nodeExecFile } from "node:child_process";
const exec = require('child_process').exec; import { dirname } from "node:path";
const execFile = require('child_process').execFile; import { fileURLToPath } from "node:url";
const { Liquid } = require('liquidjs'); import { Liquid } from "liquidjs";
const logger = require('../logger').global; import _ from "lodash";
const error = require('./error'); import { global as logger } from "../logger.js";
import errs from "./error.js";
module.exports = { const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
exec: async function(cmd, options = {}) {
logger.debug('CMD:', cmd);
const exec = async (cmd, options = {}) => {
logger.debug("CMD:", cmd);
const { stdout, stderr } = await new Promise((resolve, reject) => { const { stdout, stderr } = await new Promise((resolve, reject) => {
const child = exec(cmd, options, (isError, stdout, stderr) => { const child = nodeExec(cmd, options, (isError, stdout, stderr) => {
if (isError) { if (isError) {
reject(new error.CommandError(stderr, isError)); reject(new errs.CommandError(stderr, isError));
} else { } else {
resolve({ stdout, stderr }); resolve({ stdout, stderr });
} }
}); });
child.on('error', (e) => { child.on("error", (e) => {
reject(new error.CommandError(stderr, 1, e)); reject(new errs.CommandError(stderr, 1, e));
}); });
}); });
return stdout; return stdout;
}, };
/** /**
* @param {String} cmd * @param {String} cmd
* @param {Array} args * @param {Array} args
* @param {Object|undefined} options
* @returns {Promise} * @returns {Promise}
*/ */
execFile: function (cmd, args) { const execFile = (cmd, args, options) => {
// logger.debug('CMD: ' + cmd + ' ' + (args ? args.join(' ') : '')); logger.debug(`CMD: ${cmd} ${args ? args.join(" ") : ""}`);
const opts = options || {};
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
execFile(cmd, args, function (err, stdout, /*stderr*/) { nodeExecFile(cmd, args, opts, (err, stdout, stderr) => {
if (err && typeof err === 'object') { if (err && typeof err === "object") {
reject(err); reject(new errs.CommandError(stderr, 1, err));
} else { } else {
resolve(stdout.trim()); resolve(stdout.trim());
} }
}); });
}); });
}, };
/** /**
* Used in objection query builder * Used in objection query builder
@@ -51,7 +54,7 @@ module.exports = {
* @param {Array} omissions * @param {Array} omissions
* @returns {Function} * @returns {Function}
*/ */
omitRow: function (omissions) { const omitRow = (omissions) => {
/** /**
* @param {Object} row * @param {Object} row
* @returns {Object} * @returns {Object}
@@ -59,7 +62,7 @@ module.exports = {
return (row) => { return (row) => {
return _.omit(row, omissions); return _.omit(row, omissions);
}; };
}, };
/** /**
* Used in objection query builder * Used in objection query builder
@@ -67,7 +70,7 @@ module.exports = {
* @param {Array} omissions * @param {Array} omissions
* @returns {Function} * @returns {Function}
*/ */
omitRows: function (omissions) { const omitRows = (omissions) => {
/** /**
* @param {Array} rows * @param {Array} rows
* @returns {Object} * @returns {Object}
@@ -78,14 +81,14 @@ module.exports = {
}); });
return rows; return rows;
}; };
}, };
/** /**
* @returns {Object} Liquid render engine * @returns {Object} Liquid render engine
*/ */
getRenderEngine: function () { const getRenderEngine = () => {
const renderEngine = new Liquid({ const renderEngine = new Liquid({
root: __dirname + '/../templates/' root: `${__dirname}/../templates/`,
}); });
/** /**
@@ -94,13 +97,14 @@ module.exports = {
* directive string * directive string
* address string * address string
*/ */
renderEngine.registerFilter('nginxAccessRule', (v) => { renderEngine.registerFilter("nginxAccessRule", (v) => {
if (typeof v.directive !== 'undefined' && typeof v.address !== 'undefined' && v.directive && v.address) { if (typeof v.directive !== "undefined" && typeof v.address !== "undefined" && v.directive && v.address) {
return `${v.directive} ${v.address};`; return `${v.directive} ${v.address};`;
} }
return ''; return "";
}); });
return renderEngine; return renderEngine;
}
}; };
export default { exec, execFile, omitRow, omitRows, getRenderEngine };

View File

@@ -1,5 +1,5 @@
const Ajv = require('ajv/dist/2020'); import Ajv from "ajv/dist/2020.js";
const error = require('../error'); import errs from "../error.js";
const ajv = new Ajv({ const ajv = new Ajv({
verbose: true, verbose: true,
@@ -14,30 +14,27 @@ const ajv = new Ajv({
* @param {Object} payload * @param {Object} payload
* @returns {Promise} * @returns {Promise}
*/ */
function apiValidator (schema, payload/*, description*/) { const apiValidator = async (schema, payload /*, description*/) => {
return new Promise(function Promise_apiValidator (resolve, reject) { if (!schema) {
if (schema === null) { throw new errs.ValidationError("Schema is undefined");
reject(new error.ValidationError('Schema is undefined'));
return;
} }
if (typeof payload === 'undefined') { // Can't use falsy check here as valid payload could be `0` or `false`
reject(new error.ValidationError('Payload is undefined')); if (typeof payload === "undefined") {
return; throw new errs.ValidationError("Payload is undefined");
} }
const validate = ajv.compile(schema); const validate = ajv.compile(schema);
const valid = validate(payload); const valid = validate(payload);
if (valid && !validate.errors) { if (valid && !validate.errors) {
resolve(payload); return payload;
} else {
let message = ajv.errorsText(validate.errors);
let err = new error.ValidationError(message);
err.debug = [validate.errors, payload];
reject(err);
}
});
} }
module.exports = apiValidator; const message = ajv.errorsText(validate.errors);
const err = new errs.ValidationError(message);
err.debug = [validate.errors, payload];
throw err;
};
export default apiValidator;

View File

@@ -1,7 +1,7 @@
const _ = require('lodash'); import Ajv from 'ajv/dist/2020.js';
const Ajv = require('ajv/dist/2020'); import _ from "lodash";
const error = require('../error'); import commonDefinitions from "../../schema/common.json" with { type: "json" };
const commonDefinitions = require('../../schema/common.json'); import errs from "../error.js";
RegExp.prototype.toJSON = RegExp.prototype.toString; RegExp.prototype.toJSON = RegExp.prototype.toString;
@@ -11,7 +11,7 @@ const ajv = new Ajv({
allowUnionTypes: true, allowUnionTypes: true,
coerceTypes: true, coerceTypes: true,
strict: false, strict: false,
schemas: [commonDefinitions] schemas: [commonDefinitions],
}); });
/** /**
@@ -20,26 +20,26 @@ const ajv = new Ajv({
* @param {Object} payload * @param {Object} payload
* @returns {Promise} * @returns {Promise}
*/ */
function validator (schema, payload) { const validator = (schema, payload) => {
return new Promise(function (resolve, reject) { return new Promise((resolve, reject) => {
if (!payload) { if (!payload) {
reject(new error.InternalValidationError('Payload is falsy')); reject(new errs.InternalValidationError("Payload is falsy"));
} else { } else {
try { try {
let validate = ajv.compile(schema); const validate = ajv.compile(schema);
let valid = validate(payload); const valid = validate(payload);
if (valid && !validate.errors) { if (valid && !validate.errors) {
resolve(_.cloneDeep(payload)); resolve(_.cloneDeep(payload));
} else { } else {
let message = ajv.errorsText(validate.errors); const message = ajv.errorsText(validate.errors);
reject(new error.InternalValidationError(message)); reject(new errs.InternalValidationError(message));
} }
} catch (err) { } catch (err) {
reject(err); reject(err);
} }
} }
}); });
} };
module.exports = validator; export default validator;

View File

@@ -1,14 +1,18 @@
const {Signale} = require('signale'); import signale from "signale";
module.exports = { const opts = {
global: new Signale({scope: 'Global '}), logLevel: "info",
migrate: new Signale({scope: 'Migrate '}),
express: new Signale({scope: 'Express '}),
access: new Signale({scope: 'Access '}),
nginx: new Signale({scope: 'Nginx '}),
ssl: new Signale({scope: 'SSL '}),
certbot: new Signale({scope: 'Certbot '}),
import: new Signale({scope: 'Importer '}),
setup: new Signale({scope: 'Setup '}),
ip_ranges: new Signale({scope: 'IP Ranges'})
}; };
const global = new signale.Signale({ scope: "Global ", ...opts });
const migrate = new signale.Signale({ scope: "Migrate ", ...opts });
const express = new signale.Signale({ scope: "Express ", ...opts });
const access = new signale.Signale({ scope: "Access ", ...opts });
const nginx = new signale.Signale({ scope: "Nginx ", ...opts });
const ssl = new signale.Signale({ scope: "SSL ", ...opts });
const certbot = new signale.Signale({ scope: "Certbot ", ...opts });
const importer = new signale.Signale({ scope: "Importer ", ...opts });
const setup = new signale.Signale({ scope: "Setup ", ...opts });
const ipRanges = new signale.Signale({ scope: "IP Ranges", ...opts });
export { global, migrate, express, access, nginx, ssl, certbot, importer, setup, ipRanges };

View File

@@ -1,15 +1,13 @@
const db = require('./db'); import db from "./db.js";
const logger = require('./logger').migrate; import { migrate as logger } from "./logger.js";
module.exports = { const migrateUp = async () => {
latest: function () { const version = await db.migrate.currentVersion();
return db.migrate.currentVersion() logger.info("Current database version:", version);
.then((version) => { return await db.migrate.latest({
logger.info('Current database version:', version); tableName: "migrations",
return db.migrate.latest({ directory: "migrations",
tableName: 'migrations',
directory: 'migrations'
}); });
});
}
}; };
export { migrateUp };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'initial-schema'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "initial-schema";
/** /**
* Migrate * Migrate
@@ -7,199 +8,199 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.createTable('auth', (table) => { return knex.schema
.createTable("auth", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('user_id').notNull().unsigned(); table.integer("user_id").notNull().unsigned();
table.string('type', 30).notNull(); table.string("type", 30).notNull();
table.string('secret').notNull(); table.string("secret").notNull();
table.json('meta').notNull(); table.json("meta").notNull();
table.integer('is_deleted').notNull().unsigned().defaultTo(0); table.integer("is_deleted").notNull().unsigned().defaultTo(0);
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] auth Table created'); logger.info(`[${migrateName}] auth Table created`);
return knex.schema.createTable('user', (table) => { return knex.schema.createTable("user", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('is_deleted').notNull().unsigned().defaultTo(0); table.integer("is_deleted").notNull().unsigned().defaultTo(0);
table.integer('is_disabled').notNull().unsigned().defaultTo(0); table.integer("is_disabled").notNull().unsigned().defaultTo(0);
table.string('email').notNull(); table.string("email").notNull();
table.string('name').notNull(); table.string("name").notNull();
table.string('nickname').notNull(); table.string("nickname").notNull();
table.string('avatar').notNull(); table.string("avatar").notNull();
table.json('roles').notNull(); table.json("roles").notNull();
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] user Table created'); logger.info(`[${migrateName}] user Table created`);
return knex.schema.createTable('user_permission', (table) => { return knex.schema.createTable("user_permission", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('user_id').notNull().unsigned(); table.integer("user_id").notNull().unsigned();
table.string('visibility').notNull(); table.string("visibility").notNull();
table.string('proxy_hosts').notNull(); table.string("proxy_hosts").notNull();
table.string('redirection_hosts').notNull(); table.string("redirection_hosts").notNull();
table.string('dead_hosts').notNull(); table.string("dead_hosts").notNull();
table.string('streams').notNull(); table.string("streams").notNull();
table.string('access_lists').notNull(); table.string("access_lists").notNull();
table.string('certificates').notNull(); table.string("certificates").notNull();
table.unique('user_id'); table.unique("user_id");
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] user_permission Table created'); logger.info(`[${migrateName}] user_permission Table created`);
return knex.schema.createTable('proxy_host', (table) => { return knex.schema.createTable("proxy_host", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('owner_user_id').notNull().unsigned(); table.integer("owner_user_id").notNull().unsigned();
table.integer('is_deleted').notNull().unsigned().defaultTo(0); table.integer("is_deleted").notNull().unsigned().defaultTo(0);
table.json('domain_names').notNull(); table.json("domain_names").notNull();
table.string('forward_ip').notNull(); table.string("forward_ip").notNull();
table.integer('forward_port').notNull().unsigned(); table.integer("forward_port").notNull().unsigned();
table.integer('access_list_id').notNull().unsigned().defaultTo(0); table.integer("access_list_id").notNull().unsigned().defaultTo(0);
table.integer('certificate_id').notNull().unsigned().defaultTo(0); table.integer("certificate_id").notNull().unsigned().defaultTo(0);
table.integer('ssl_forced').notNull().unsigned().defaultTo(0); table.integer("ssl_forced").notNull().unsigned().defaultTo(0);
table.integer('caching_enabled').notNull().unsigned().defaultTo(0); table.integer("caching_enabled").notNull().unsigned().defaultTo(0);
table.integer('block_exploits').notNull().unsigned().defaultTo(0); table.integer("block_exploits").notNull().unsigned().defaultTo(0);
table.text('advanced_config').notNull().defaultTo(''); table.text("advanced_config").notNull().defaultTo("");
table.json('meta').notNull(); table.json("meta").notNull();
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] proxy_host Table created'); logger.info(`[${migrateName}] proxy_host Table created`);
return knex.schema.createTable('redirection_host', (table) => { return knex.schema.createTable("redirection_host", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('owner_user_id').notNull().unsigned(); table.integer("owner_user_id").notNull().unsigned();
table.integer('is_deleted').notNull().unsigned().defaultTo(0); table.integer("is_deleted").notNull().unsigned().defaultTo(0);
table.json('domain_names').notNull(); table.json("domain_names").notNull();
table.string('forward_domain_name').notNull(); table.string("forward_domain_name").notNull();
table.integer('preserve_path').notNull().unsigned().defaultTo(0); table.integer("preserve_path").notNull().unsigned().defaultTo(0);
table.integer('certificate_id').notNull().unsigned().defaultTo(0); table.integer("certificate_id").notNull().unsigned().defaultTo(0);
table.integer('ssl_forced').notNull().unsigned().defaultTo(0); table.integer("ssl_forced").notNull().unsigned().defaultTo(0);
table.integer('block_exploits').notNull().unsigned().defaultTo(0); table.integer("block_exploits").notNull().unsigned().defaultTo(0);
table.text('advanced_config').notNull().defaultTo(''); table.text("advanced_config").notNull().defaultTo("");
table.json('meta').notNull(); table.json("meta").notNull();
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] redirection_host Table created'); logger.info(`[${migrateName}] redirection_host Table created`);
return knex.schema.createTable('dead_host', (table) => { return knex.schema.createTable("dead_host", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('owner_user_id').notNull().unsigned(); table.integer("owner_user_id").notNull().unsigned();
table.integer('is_deleted').notNull().unsigned().defaultTo(0); table.integer("is_deleted").notNull().unsigned().defaultTo(0);
table.json('domain_names').notNull(); table.json("domain_names").notNull();
table.integer('certificate_id').notNull().unsigned().defaultTo(0); table.integer("certificate_id").notNull().unsigned().defaultTo(0);
table.integer('ssl_forced').notNull().unsigned().defaultTo(0); table.integer("ssl_forced").notNull().unsigned().defaultTo(0);
table.text('advanced_config').notNull().defaultTo(''); table.text("advanced_config").notNull().defaultTo("");
table.json('meta').notNull(); table.json("meta").notNull();
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] dead_host Table created'); logger.info(`[${migrateName}] dead_host Table created`);
return knex.schema.createTable('stream', (table) => { return knex.schema.createTable("stream", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('owner_user_id').notNull().unsigned(); table.integer("owner_user_id").notNull().unsigned();
table.integer('is_deleted').notNull().unsigned().defaultTo(0); table.integer("is_deleted").notNull().unsigned().defaultTo(0);
table.integer('incoming_port').notNull().unsigned(); table.integer("incoming_port").notNull().unsigned();
table.string('forward_ip').notNull(); table.string("forward_ip").notNull();
table.integer('forwarding_port').notNull().unsigned(); table.integer("forwarding_port").notNull().unsigned();
table.integer('tcp_forwarding').notNull().unsigned().defaultTo(0); table.integer("tcp_forwarding").notNull().unsigned().defaultTo(0);
table.integer('udp_forwarding').notNull().unsigned().defaultTo(0); table.integer("udp_forwarding").notNull().unsigned().defaultTo(0);
table.json('meta').notNull(); table.json("meta").notNull();
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] stream Table created'); logger.info(`[${migrateName}] stream Table created`);
return knex.schema.createTable('access_list', (table) => { return knex.schema.createTable("access_list", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('owner_user_id').notNull().unsigned(); table.integer("owner_user_id").notNull().unsigned();
table.integer('is_deleted').notNull().unsigned().defaultTo(0); table.integer("is_deleted").notNull().unsigned().defaultTo(0);
table.string('name').notNull(); table.string("name").notNull();
table.json('meta').notNull(); table.json("meta").notNull();
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] access_list Table created'); logger.info(`[${migrateName}] access_list Table created`);
return knex.schema.createTable('certificate', (table) => { return knex.schema.createTable("certificate", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('owner_user_id').notNull().unsigned(); table.integer("owner_user_id").notNull().unsigned();
table.integer('is_deleted').notNull().unsigned().defaultTo(0); table.integer("is_deleted").notNull().unsigned().defaultTo(0);
table.string('provider').notNull(); table.string("provider").notNull();
table.string('nice_name').notNull().defaultTo(''); table.string("nice_name").notNull().defaultTo("");
table.json('domain_names').notNull(); table.json("domain_names").notNull();
table.dateTime('expires_on').notNull(); table.dateTime("expires_on").notNull();
table.json('meta').notNull(); table.json("meta").notNull();
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] certificate Table created'); logger.info(`[${migrateName}] certificate Table created`);
return knex.schema.createTable('access_list_auth', (table) => { return knex.schema.createTable("access_list_auth", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('access_list_id').notNull().unsigned(); table.integer("access_list_id").notNull().unsigned();
table.string('username').notNull(); table.string("username").notNull();
table.string('password').notNull(); table.string("password").notNull();
table.json('meta').notNull(); table.json("meta").notNull();
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] access_list_auth Table created'); logger.info(`[${migrateName}] access_list_auth Table created`);
return knex.schema.createTable('audit_log', (table) => { return knex.schema.createTable("audit_log", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('user_id').notNull().unsigned(); table.integer("user_id").notNull().unsigned();
table.string('object_type').notNull().defaultTo(''); table.string("object_type").notNull().defaultTo("");
table.integer('object_id').notNull().unsigned().defaultTo(0); table.integer("object_id").notNull().unsigned().defaultTo(0);
table.string('action').notNull(); table.string("action").notNull();
table.json('meta').notNull(); table.json("meta").notNull();
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] audit_log Table created'); logger.info(`[${migrateName}] audit_log Table created`);
}); });
}; };
/** /**
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down the initial data.'); logger.warn(`[${migrateName}] You can't migrate down the initial data.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'websockets'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "websockets";
/** /**
* Migrate * Migrate
@@ -7,29 +8,29 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.table('proxy_host', function (proxy_host) { return knex.schema
proxy_host.integer('allow_websocket_upgrade').notNull().unsigned().defaultTo(0); .table("proxy_host", (proxy_host) => {
proxy_host.integer("allow_websocket_upgrade").notNull().unsigned().defaultTo(0);
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] proxy_host Table altered'); logger.info(`[${migrateName}] proxy_host Table altered`);
}); });
}; };
/** /**
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.'); logger.warn(`[${migrateName}] You can't migrate down this one.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'forward_host'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "forward_host";
/** /**
* Migrate * Migrate
@@ -7,17 +8,17 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.table('proxy_host', function (proxy_host) { return knex.schema
proxy_host.renameColumn('forward_ip', 'forward_host'); .table("proxy_host", (proxy_host) => {
proxy_host.renameColumn("forward_ip", "forward_host");
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] proxy_host Table altered'); logger.info(`[${migrateName}] proxy_host Table altered`);
}); });
}; };
@@ -25,10 +26,11 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.'); logger.warn(`[${migrateName}] You can't migrate down this one.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'http2_support'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "http2_support";
/** /**
* Migrate * Migrate
@@ -7,31 +8,31 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.table('proxy_host', function (proxy_host) { return knex.schema
proxy_host.integer('http2_support').notNull().unsigned().defaultTo(0); .table("proxy_host", (proxy_host) => {
proxy_host.integer("http2_support").notNull().unsigned().defaultTo(0);
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] proxy_host Table altered'); logger.info(`[${migrateName}] proxy_host Table altered`);
return knex.schema.table('redirection_host', function (redirection_host) { return knex.schema.table("redirection_host", (redirection_host) => {
redirection_host.integer('http2_support').notNull().unsigned().defaultTo(0); redirection_host.integer("http2_support").notNull().unsigned().defaultTo(0);
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] redirection_host Table altered'); logger.info(`[${migrateName}] redirection_host Table altered`);
return knex.schema.table('dead_host', function (dead_host) { return knex.schema.table("dead_host", (dead_host) => {
dead_host.integer('http2_support').notNull().unsigned().defaultTo(0); dead_host.integer("http2_support").notNull().unsigned().defaultTo(0);
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] dead_host Table altered'); logger.info(`[${migrateName}] dead_host Table altered`);
}); });
}; };
@@ -39,11 +40,11 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.'); logger.warn(`[${migrateName}] You can't migrate down this one.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'forward_scheme'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "forward_scheme";
/** /**
* Migrate * Migrate
@@ -7,17 +8,17 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.table('proxy_host', function (proxy_host) { return knex.schema
proxy_host.string('forward_scheme').notNull().defaultTo('http'); .table("proxy_host", (proxy_host) => {
proxy_host.string("forward_scheme").notNull().defaultTo("http");
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] proxy_host Table altered'); logger.info(`[${migrateName}] proxy_host Table altered`);
}); });
}; };
@@ -25,10 +26,11 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.'); logger.warn(`[${migrateName}] You can't migrate down this one.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'disabled'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "disabled";
/** /**
* Migrate * Migrate
@@ -7,38 +8,38 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.table('proxy_host', function (proxy_host) { return knex.schema
proxy_host.integer('enabled').notNull().unsigned().defaultTo(1); .table("proxy_host", (proxy_host) => {
proxy_host.integer("enabled").notNull().unsigned().defaultTo(1);
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] proxy_host Table altered'); logger.info(`[${migrateName}] proxy_host Table altered`);
return knex.schema.table('redirection_host', function (redirection_host) { return knex.schema.table("redirection_host", (redirection_host) => {
redirection_host.integer('enabled').notNull().unsigned().defaultTo(1); redirection_host.integer("enabled").notNull().unsigned().defaultTo(1);
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] redirection_host Table altered'); logger.info(`[${migrateName}] redirection_host Table altered`);
return knex.schema.table('dead_host', function (dead_host) { return knex.schema.table("dead_host", (dead_host) => {
dead_host.integer('enabled').notNull().unsigned().defaultTo(1); dead_host.integer("enabled").notNull().unsigned().defaultTo(1);
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] dead_host Table altered'); logger.info(`[${migrateName}] dead_host Table altered`);
return knex.schema.table('stream', function (stream) { return knex.schema.table("stream", (stream) => {
stream.integer('enabled').notNull().unsigned().defaultTo(1); stream.integer("enabled").notNull().unsigned().defaultTo(1);
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] stream Table altered'); logger.info(`[${migrateName}] stream Table altered`);
}); });
}; };
@@ -46,10 +47,11 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.'); logger.warn(`[${migrateName}] You can't migrate down this one.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'custom_locations'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "custom_locations";
/** /**
* Migrate * Migrate
@@ -8,17 +9,17 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.table('proxy_host', function (proxy_host) { return knex.schema
proxy_host.json('locations'); .table("proxy_host", (proxy_host) => {
proxy_host.json("locations");
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] proxy_host Table altered'); logger.info(`[${migrateName}] proxy_host Table altered`);
}); });
}; };
@@ -26,10 +27,11 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.'); logger.warn(`[${migrateName}] You can't migrate down this one.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'hsts'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "hsts";
/** /**
* Migrate * Migrate
@@ -7,34 +8,34 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.table('proxy_host', function (proxy_host) { return knex.schema
proxy_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0); .table("proxy_host", (proxy_host) => {
proxy_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0); proxy_host.integer("hsts_enabled").notNull().unsigned().defaultTo(0);
proxy_host.integer("hsts_subdomains").notNull().unsigned().defaultTo(0);
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] proxy_host Table altered'); logger.info(`[${migrateName}] proxy_host Table altered`);
return knex.schema.table('redirection_host', function (redirection_host) { return knex.schema.table("redirection_host", (redirection_host) => {
redirection_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0); redirection_host.integer("hsts_enabled").notNull().unsigned().defaultTo(0);
redirection_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0); redirection_host.integer("hsts_subdomains").notNull().unsigned().defaultTo(0);
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] redirection_host Table altered'); logger.info(`[${migrateName}] redirection_host Table altered`);
return knex.schema.table('dead_host', function (dead_host) { return knex.schema.table("dead_host", (dead_host) => {
dead_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0); dead_host.integer("hsts_enabled").notNull().unsigned().defaultTo(0);
dead_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0); dead_host.integer("hsts_subdomains").notNull().unsigned().defaultTo(0);
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] dead_host Table altered'); logger.info(`[${migrateName}] dead_host Table altered`);
}); });
}; };
@@ -42,10 +43,11 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.'); logger.warn(`[${migrateName}] You can't migrate down this one.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'settings'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "settings";
/** /**
* Migrate * Migrate
@@ -7,11 +8,10 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.createTable('setting', (table) => { return knex.schema.createTable('setting', (table) => {
table.string('id').notNull().primary(); table.string('id').notNull().primary();
@@ -21,7 +21,7 @@ exports.up = function (knex/*, Promise*/) {
table.json('meta').notNull(); table.json('meta').notNull();
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] setting Table created'); logger.info(`[${migrateName}] setting Table created`);
}); });
}; };
@@ -29,10 +29,11 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down the initial data.'); logger.warn(`[${migrateName}] You can't migrate down the initial data.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'access_list_client'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "access_list_client";
/** /**
* Migrate * Migrate
@@ -7,32 +8,30 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info(`[${migrateName}] Migrating Up...`);
logger.info('[' + migrate_name + '] Migrating Up...'); return knex.schema
.createTable("access_list_client", (table) => {
return knex.schema.createTable('access_list_client', (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('access_list_id').notNull().unsigned(); table.integer("access_list_id").notNull().unsigned();
table.string('address').notNull(); table.string("address").notNull();
table.string('directive').notNull(); table.string("directive").notNull();
table.json('meta').notNull(); table.json("meta").notNull();
}) })
.then(function () { .then(() => {
logger.info('[' + migrate_name + '] access_list_client Table created'); logger.info(`[${migrateName}] access_list_client Table created`);
return knex.schema.table('access_list', function (access_list) { return knex.schema.table("access_list", (access_list) => {
access_list.integer('satify_any').notNull().defaultTo(0); access_list.integer("satify_any").notNull().defaultTo(0);
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] access_list Table altered'); logger.info(`[${migrateName}] access_list Table altered`);
}); });
}; };
@@ -40,14 +39,14 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex/*, Promise*/) { const down = (knex) => {
logger.info('[' + migrate_name + '] Migrating Down...'); logger.info(`[${migrateName}] Migrating Down...`);
return knex.schema.dropTable('access_list_client') return knex.schema.dropTable("access_list_client").then(() => {
.then(() => { logger.info(`[${migrateName}] access_list_client Table dropped`);
logger.info('[' + migrate_name + '] access_list_client Table dropped');
}); });
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'access_list_client_fix'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "access_list_client_fix";
/** /**
* Migrate * Migrate
@@ -7,17 +8,17 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.table('access_list', function (access_list) { return knex.schema
access_list.renameColumn('satify_any', 'satisfy_any'); .table("access_list", (access_list) => {
access_list.renameColumn("satify_any", "satisfy_any");
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] access_list Table altered'); logger.info(`[${migrateName}] access_list Table altered`);
}); });
}; };
@@ -25,10 +26,11 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.'); logger.warn(`[${migrateName}] You can't migrate down this one.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'pass_auth'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "pass_auth";
/** /**
* Migrate * Migrate
@@ -7,18 +8,17 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info(`[${migrateName}] Migrating Up...`);
logger.info('[' + migrate_name + '] Migrating Up...'); return knex.schema
.table("access_list", (access_list) => {
return knex.schema.table('access_list', function (access_list) { access_list.integer("pass_auth").notNull().defaultTo(1);
access_list.integer('pass_auth').notNull().defaultTo(1);
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] access_list Table altered'); logger.info(`[${migrateName}] access_list Table altered`);
}); });
}; };
@@ -26,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex/*, Promise*/) { const down = (knex) => {
logger.info('[' + migrate_name + '] Migrating Down...'); logger.info(`[${migrateName}] Migrating Down...`);
return knex.schema.table('access_list', function (access_list) { return knex.schema
access_list.dropColumn('pass_auth'); .table("access_list", (access_list) => {
access_list.dropColumn("pass_auth");
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] access_list pass_auth Column dropped'); logger.info(`[${migrateName}] access_list pass_auth Column dropped`);
}); });
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'redirection_scheme'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "redirection_scheme";
/** /**
* Migrate * Migrate
@@ -7,18 +8,17 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info(`[${migrateName}] Migrating Up...`);
logger.info('[' + migrate_name + '] Migrating Up...'); return knex.schema
.table("redirection_host", (table) => {
return knex.schema.table('redirection_host', (table) => { table.string("forward_scheme").notNull().defaultTo("$scheme");
table.string('forward_scheme').notNull().defaultTo('$scheme');
}) })
.then(function () { .then(() => {
logger.info('[' + migrate_name + '] redirection_host Table altered'); logger.info(`[${migrateName}] redirection_host Table altered`);
}); });
}; };
@@ -26,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex/*, Promise*/) { const down = (knex) => {
logger.info('[' + migrate_name + '] Migrating Down...'); logger.info(`[${migrateName}] Migrating Down...`);
return knex.schema.table('redirection_host', (table) => { return knex.schema
table.dropColumn('forward_scheme'); .table("redirection_host", (table) => {
table.dropColumn("forward_scheme");
}) })
.then(function () { .then(() => {
logger.info('[' + migrate_name + '] redirection_host Table altered'); logger.info(`[${migrateName}] redirection_host Table altered`);
}); });
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'redirection_status_code'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "redirection_status_code";
/** /**
* Migrate * Migrate
@@ -7,18 +8,17 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info(`[${migrateName}] Migrating Up...`);
logger.info('[' + migrate_name + '] Migrating Up...'); return knex.schema
.table("redirection_host", (table) => {
return knex.schema.table('redirection_host', (table) => { table.integer("forward_http_code").notNull().unsigned().defaultTo(302);
table.integer('forward_http_code').notNull().unsigned().defaultTo(302);
}) })
.then(function () { .then(() => {
logger.info('[' + migrate_name + '] redirection_host Table altered'); logger.info(`[${migrateName}] redirection_host Table altered`);
}); });
}; };
@@ -26,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex/*, Promise*/) { const down = (knex) => {
logger.info('[' + migrate_name + '] Migrating Down...'); logger.info(`[${migrateName}] Migrating Down...`);
return knex.schema.table('redirection_host', (table) => { return knex.schema
table.dropColumn('forward_http_code'); .table("redirection_host", (table) => {
table.dropColumn("forward_http_code");
}) })
.then(function () { .then(() => {
logger.info('[' + migrate_name + '] redirection_host Table altered'); logger.info(`[${migrateName}] redirection_host Table altered`);
}); });
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'stream_domain'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "stream_domain";
/** /**
* Migrate * Migrate
@@ -7,17 +8,17 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.table('stream', (table) => { return knex.schema
table.renameColumn('forward_ip', 'forwarding_host'); .table("stream", (table) => {
table.renameColumn("forward_ip", "forwarding_host");
}) })
.then(function () { .then(() => {
logger.info('[' + migrate_name + '] stream Table altered'); logger.info(`[${migrateName}] stream Table altered`);
}); });
}; };
@@ -25,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex/*, Promise*/) { const down = (knex) => {
logger.info('[' + migrate_name + '] Migrating Down...'); logger.info(`[${migrateName}] Migrating Down...`);
return knex.schema.table('stream', (table) => { return knex.schema
table.renameColumn('forwarding_host', 'forward_ip'); .table("stream", (table) => {
table.renameColumn("forwarding_host", "forward_ip");
}) })
.then(function () { .then(() => {
logger.info('[' + migrate_name + '] stream Table altered'); logger.info(`[${migrateName}] stream Table altered`);
}); });
}; };
export { up, down };

View File

@@ -1,17 +1,19 @@
const migrate_name = 'stream_domain'; import internalNginx from "../internal/nginx.js";
const logger = require('../logger').migrate; import { migrate as logger } from "../logger.js";
const internalNginx = require('../internal/nginx');
const migrateName = "stream_domain";
async function regenerateDefaultHost(knex) { async function regenerateDefaultHost(knex) {
const row = await knex('setting').select('*').where('id', 'default-site').first(); const row = await knex("setting").select("*").where("id", "default-site").first();
if (!row) { if (!row) {
return Promise.resolve(); return Promise.resolve();
} }
return internalNginx.deleteConfig('default') return internalNginx
.deleteConfig("default")
.then(() => { .then(() => {
return internalNginx.generateConfig('default', row); return internalNginx.generateConfig("default", row);
}) })
.then(() => { .then(() => {
return internalNginx.test(); return internalNginx.test();
@@ -27,11 +29,10 @@ async function regenerateDefaultHost(knex) {
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return regenerateDefaultHost(knex); return regenerateDefaultHost(knex);
}; };
@@ -40,11 +41,12 @@ exports.up = function (knex) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex) { const down = (knex) => {
logger.info('[' + migrate_name + '] Migrating Down...'); logger.info(`[${migrateName}] Migrating Down...`);
return regenerateDefaultHost(knex); return regenerateDefaultHost(knex);
}; };
export { up, down };

View File

@@ -0,0 +1,43 @@
import { migrate as logger } from "../logger.js";
const migrateName = "stream_ssl";
/**
* Migrate
*
* @see http://knexjs.org/#Schema
*
* @param {Object} knex
* @returns {Promise}
*/
const up = (knex) => {
logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema
.table("stream", (table) => {
table.integer("certificate_id").notNull().unsigned().defaultTo(0);
})
.then(() => {
logger.info(`[${migrateName}] stream Table altered`);
});
};
/**
* Undo Migrate
*
* @param {Object} knex
* @returns {Promise}
*/
const down = (knex) => {
logger.info(`[${migrateName}] Migrating Down...`);
return knex.schema
.table("stream", (table) => {
table.dropColumn("certificate_id");
})
.then(() => {
logger.info(`[${migrateName}] stream Table altered`);
});
};
export { up, down };

View File

@@ -1,21 +1,18 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const helpers = require('../lib/helpers'); import db from "../db.js";
const Model = require('objection').Model; import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
const User = require('./user'); import AccessListAuth from "./access_list_auth.js";
const AccessListAuth = require('./access_list_auth'); import AccessListClient from "./access_list_client.js";
const AccessListClient = require('./access_list_client'); import now from "./now_helper.js";
const now = require('./now_helper'); import ProxyHostModel from "./proxy_host.js";
import User from "./user.js";
Model.knex(db); Model.knex(db);
const boolFields = [ const boolFields = ["is_deleted", "satisfy_any", "pass_auth"];
'is_deleted',
'satisfy_any',
'pass_auth',
];
class AccessList extends Model { class AccessList extends Model {
$beforeInsert() { $beforeInsert() {
@@ -23,7 +20,7 @@ class AccessList extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
} }
@@ -33,71 +30,69 @@ class AccessList extends Model {
} }
$parseDatabaseJson(json) { $parseDatabaseJson(json) {
json = super.$parseDatabaseJson(json); const thisJson = super.$parseDatabaseJson(json);
return helpers.convertIntFieldsToBool(json, boolFields); return convertIntFieldsToBool(thisJson, boolFields);
} }
$formatDatabaseJson(json) { $formatDatabaseJson(json) {
json = helpers.convertBoolFieldsToInt(json, boolFields); const thisJson = convertBoolFieldsToInt(json, boolFields);
return super.$formatDatabaseJson(json); return super.$formatDatabaseJson(thisJson);
} }
static get name() { static get name() {
return 'AccessList'; return "AccessList";
} }
static get tableName() { static get tableName() {
return 'access_list'; return "access_list";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['meta']; return ["meta"];
} }
static get relationMappings() { static get relationMappings() {
const ProxyHost = require('./proxy_host');
return { return {
owner: { owner: {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: User, modelClass: User,
join: { join: {
from: 'access_list.owner_user_id', from: "access_list.owner_user_id",
to: 'user.id' to: "user.id",
},
modify: (qb) => {
qb.where("user.is_deleted", 0);
}, },
modify: function (qb) {
qb.where('user.is_deleted', 0);
}
}, },
items: { items: {
relation: Model.HasManyRelation, relation: Model.HasManyRelation,
modelClass: AccessListAuth, modelClass: AccessListAuth,
join: { join: {
from: 'access_list.id', from: "access_list.id",
to: 'access_list_auth.access_list_id' to: "access_list_auth.access_list_id",
} },
}, },
clients: { clients: {
relation: Model.HasManyRelation, relation: Model.HasManyRelation,
modelClass: AccessListClient, modelClass: AccessListClient,
join: { join: {
from: 'access_list.id', from: "access_list.id",
to: 'access_list_client.access_list_id' to: "access_list_client.access_list_id",
} },
}, },
proxy_hosts: { proxy_hosts: {
relation: Model.HasManyRelation, relation: Model.HasManyRelation,
modelClass: ProxyHost, modelClass: ProxyHostModel,
join: { join: {
from: 'access_list.id', from: "access_list.id",
to: 'proxy_host.access_list_id' to: "proxy_host.access_list_id",
},
modify: (qb) => {
qb.where("proxy_host.is_deleted", 0);
},
}, },
modify: function (qb) {
qb.where('proxy_host.is_deleted', 0);
}
}
}; };
} }
} }
module.exports = AccessList; export default AccessList;

View File

@@ -1,9 +1,10 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const Model = require('objection').Model; import db from "../db.js";
const now = require('./now_helper'); import accessListModel from "./access_list.js";
import now from "./now_helper.js";
Model.knex(db); Model.knex(db);
@@ -13,7 +14,7 @@ class AccessListAuth extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
} }
@@ -23,32 +24,32 @@ class AccessListAuth extends Model {
} }
static get name() { static get name() {
return 'AccessListAuth'; return "AccessListAuth";
} }
static get tableName() { static get tableName() {
return 'access_list_auth'; return "access_list_auth";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['meta']; return ["meta"];
} }
static get relationMappings() { static get relationMappings() {
return { return {
access_list: { access_list: {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: require('./access_list'), modelClass: accessListModel,
join: { join: {
from: 'access_list_auth.access_list_id', from: "access_list_auth.access_list_id",
to: 'access_list.id' to: "access_list.id",
},
modify: (qb) => {
qb.where("access_list.is_deleted", 0);
},
}, },
modify: function (qb) {
qb.where('access_list.is_deleted', 0);
}
}
}; };
} }
} }
module.exports = AccessListAuth; export default AccessListAuth;

View File

@@ -1,9 +1,10 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const Model = require('objection').Model; import db from "../db.js";
const now = require('./now_helper'); import accessListModel from "./access_list.js";
import now from "./now_helper.js";
Model.knex(db); Model.knex(db);
@@ -13,7 +14,7 @@ class AccessListClient extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
} }
@@ -23,32 +24,32 @@ class AccessListClient extends Model {
} }
static get name() { static get name() {
return 'AccessListClient'; return "AccessListClient";
} }
static get tableName() { static get tableName() {
return 'access_list_client'; return "access_list_client";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['meta']; return ["meta"];
} }
static get relationMappings() { static get relationMappings() {
return { return {
access_list: { access_list: {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: require('./access_list'), modelClass: accessListModel,
join: { join: {
from: 'access_list_client.access_list_id', from: "access_list_client.access_list_id",
to: 'access_list.id' to: "access_list.id",
},
modify: (qb) => {
qb.where("access_list.is_deleted", 0);
},
}, },
modify: function (qb) {
qb.where('access_list.is_deleted', 0);
}
}
}; };
} }
} }
module.exports = AccessListClient; export default AccessListClient;

View File

@@ -1,10 +1,10 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const Model = require('objection').Model; import db from "../db.js";
const User = require('./user'); import now from "./now_helper.js";
const now = require('./now_helper'); import User from "./user.js";
Model.knex(db); Model.knex(db);
@@ -14,7 +14,7 @@ class AuditLog extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
} }
@@ -24,15 +24,15 @@ class AuditLog extends Model {
} }
static get name() { static get name() {
return 'AuditLog'; return "AuditLog";
} }
static get tableName() { static get tableName() {
return 'audit_log'; return "audit_log";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['meta']; return ["meta"];
} }
static get relationMappings() { static get relationMappings() {
@@ -41,12 +41,12 @@ class AuditLog extends Model {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: User, modelClass: User,
join: { join: {
from: 'audit_log.user_id', from: "audit_log.user_id",
to: 'user.id' to: "user.id",
} },
} },
}; };
} }
} }
module.exports = AuditLog; export default AuditLog;

View File

@@ -1,27 +1,21 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const bcrypt = require('bcrypt'); import bcrypt from "bcrypt";
const db = require('../db'); import { Model } from "objection";
const helpers = require('../lib/helpers'); import db from "../db.js";
const Model = require('objection').Model; import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
const User = require('./user'); import now from "./now_helper.js";
const now = require('./now_helper'); import User from "./user.js";
Model.knex(db); Model.knex(db);
const boolFields = [ const boolFields = ["is_deleted"];
'is_deleted',
];
function encryptPassword() { function encryptPassword() {
/* jshint -W040 */ if (this.type === "password" && this.secret) {
let _this = this; return bcrypt.hash(this.secret, 13).then((hash) => {
this.secret = hash;
if (_this.type === 'password' && _this.secret) {
return bcrypt.hash(_this.secret, 13)
.then(function (hash) {
_this.secret = hash;
}); });
} }
@@ -34,7 +28,7 @@ class Auth extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
@@ -47,13 +41,13 @@ class Auth extends Model {
} }
$parseDatabaseJson(json) { $parseDatabaseJson(json) {
json = super.$parseDatabaseJson(json); const thisJson = super.$parseDatabaseJson(json);
return helpers.convertIntFieldsToBool(json, boolFields); return convertIntFieldsToBool(thisJson, boolFields);
} }
$formatDatabaseJson(json) { $formatDatabaseJson(json) {
json = helpers.convertBoolFieldsToInt(json, boolFields); const thisJson = convertBoolFieldsToInt(json, boolFields);
return super.$formatDatabaseJson(json); return super.$formatDatabaseJson(thisJson);
} }
/** /**
@@ -67,15 +61,15 @@ class Auth extends Model {
} }
static get name() { static get name() {
return 'Auth'; return "Auth";
} }
static get tableName() { static get tableName() {
return 'auth'; return "auth";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['meta']; return ["meta"];
} }
static get relationMappings() { static get relationMappings() {
@@ -84,15 +78,15 @@ class Auth extends Model {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: User, modelClass: User,
join: { join: {
from: 'auth.user_id', from: "auth.user_id",
to: 'user.id' to: "user.id",
}, },
filter: { filter: {
is_deleted: 0 is_deleted: 0,
} },
} },
}; };
} }
} }
module.exports = Auth; export default Auth;

View File

@@ -1,17 +1,18 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const helpers = require('../lib/helpers'); import db from "../db.js";
const Model = require('objection').Model; import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
const User = require('./user'); import deadHostModel from "./dead_host.js";
const now = require('./now_helper'); import now from "./now_helper.js";
import proxyHostModel from "./proxy_host.js";
import redirectionHostModel from "./redirection_host.js";
import userModel from "./user.js";
Model.knex(db); Model.knex(db);
const boolFields = [ const boolFields = ["is_deleted"];
'is_deleted',
];
class Certificate extends Model { class Certificate extends Model {
$beforeInsert() { $beforeInsert() {
@@ -19,17 +20,17 @@ class Certificate extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for expires_on // Default for expires_on
if (typeof this.expires_on === 'undefined') { if (typeof this.expires_on === "undefined") {
this.expires_on = now(); this.expires_on = now();
} }
// Default for domain_names // Default for domain_names
if (typeof this.domain_names === 'undefined') { if (typeof this.domain_names === "undefined") {
this.domain_names = []; this.domain_names = [];
} }
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
@@ -40,48 +41,81 @@ class Certificate extends Model {
this.modified_on = now(); this.modified_on = now();
// Sort domain_names // Sort domain_names
if (typeof this.domain_names !== 'undefined') { if (typeof this.domain_names !== "undefined") {
this.domain_names.sort(); this.domain_names.sort();
} }
} }
$parseDatabaseJson(json) { $parseDatabaseJson(json) {
json = super.$parseDatabaseJson(json); const thisJson = super.$parseDatabaseJson(json);
return helpers.convertIntFieldsToBool(json, boolFields); return convertIntFieldsToBool(thisJson, boolFields);
} }
$formatDatabaseJson(json) { $formatDatabaseJson(json) {
json = helpers.convertBoolFieldsToInt(json, boolFields); const thisJson = convertBoolFieldsToInt(json, boolFields);
return super.$formatDatabaseJson(json); return super.$formatDatabaseJson(thisJson);
} }
static get name() { static get name() {
return 'Certificate'; return "Certificate";
} }
static get tableName() { static get tableName() {
return 'certificate'; return "certificate";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['domain_names', 'meta']; return ["domain_names", "meta"];
} }
static get relationMappings() { static get relationMappings() {
return { return {
owner: { owner: {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: User, modelClass: userModel,
join: { join: {
from: 'certificate.owner_user_id', from: "certificate.owner_user_id",
to: 'user.id' to: "user.id",
},
modify: (qb) => {
qb.where("user.is_deleted", 0);
},
},
proxy_hosts: {
relation: Model.HasManyRelation,
modelClass: proxyHostModel,
join: {
from: "certificate.id",
to: "proxy_host.certificate_id",
},
modify: (qb) => {
qb.where("proxy_host.is_deleted", 0);
},
},
dead_hosts: {
relation: Model.HasManyRelation,
modelClass: deadHostModel,
join: {
from: "certificate.id",
to: "dead_host.certificate_id",
},
modify: (qb) => {
qb.where("dead_host.is_deleted", 0);
},
},
redirection_hosts: {
relation: Model.HasManyRelation,
modelClass: redirectionHostModel,
join: {
from: "certificate.id",
to: "redirection_host.certificate_id",
},
modify: (qb) => {
qb.where("redirection_host.is_deleted", 0);
},
}, },
modify: function (qb) {
qb.where('user.is_deleted', 0);
}
}
}; };
} }
} }
module.exports = Certificate; export default Certificate;

View File

@@ -1,19 +1,16 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const helpers = require('../lib/helpers'); import db from "../db.js";
const Model = require('objection').Model; import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
const User = require('./user'); import Certificate from "./certificate.js";
const Certificate = require('./certificate'); import now from "./now_helper.js";
const now = require('./now_helper'); import User from "./user.js";
Model.knex(db); Model.knex(db);
const boolFields = [ const boolFields = ["is_deleted", "ssl_forced", "http2_support", "enabled", "hsts_enabled", "hsts_subdomains"];
'is_deleted',
'enabled',
];
class DeadHost extends Model { class DeadHost extends Model {
$beforeInsert() { $beforeInsert() {
@@ -21,12 +18,12 @@ class DeadHost extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for domain_names // Default for domain_names
if (typeof this.domain_names === 'undefined') { if (typeof this.domain_names === "undefined") {
this.domain_names = []; this.domain_names = [];
} }
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
@@ -37,31 +34,31 @@ class DeadHost extends Model {
this.modified_on = now(); this.modified_on = now();
// Sort domain_names // Sort domain_names
if (typeof this.domain_names !== 'undefined') { if (typeof this.domain_names !== "undefined") {
this.domain_names.sort(); this.domain_names.sort();
} }
} }
$parseDatabaseJson(json) { $parseDatabaseJson(json) {
json = super.$parseDatabaseJson(json); const thisJson = super.$parseDatabaseJson(json);
return helpers.convertIntFieldsToBool(json, boolFields); return convertIntFieldsToBool(thisJson, boolFields);
} }
$formatDatabaseJson(json) { $formatDatabaseJson(json) {
json = helpers.convertBoolFieldsToInt(json, boolFields); const thisJson = convertBoolFieldsToInt(json, boolFields);
return super.$formatDatabaseJson(json); return super.$formatDatabaseJson(thisJson);
} }
static get name() { static get name() {
return 'DeadHost'; return "DeadHost";
} }
static get tableName() { static get tableName() {
return 'dead_host'; return "dead_host";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['domain_names', 'meta']; return ["domain_names", "meta"];
} }
static get relationMappings() { static get relationMappings() {
@@ -70,26 +67,26 @@ class DeadHost extends Model {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: User, modelClass: User,
join: { join: {
from: 'dead_host.owner_user_id', from: "dead_host.owner_user_id",
to: 'user.id' to: "user.id",
},
modify: (qb) => {
qb.where("user.is_deleted", 0);
}, },
modify: function (qb) {
qb.where('user.is_deleted', 0);
}
}, },
certificate: { certificate: {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: Certificate, modelClass: Certificate,
join: { join: {
from: 'dead_host.certificate_id', from: "dead_host.certificate_id",
to: 'certificate.id' to: "certificate.id",
},
modify: (qb) => {
qb.where("certificate.is_deleted", 0);
},
}, },
modify: function (qb) {
qb.where('certificate.is_deleted', 0);
}
}
}; };
} }
} }
module.exports = DeadHost; export default DeadHost;

View File

@@ -1,13 +1,12 @@
const db = require('../db'); import { Model } from "objection";
const config = require('../lib/config'); import db from "../db.js";
const Model = require('objection').Model; import { isSqlite } from "../lib/config.js";
Model.knex(db); Model.knex(db);
module.exports = function () { export default () => {
if (config.isSqlite()) { if (isSqlite()) {
// eslint-disable-next-line
return Model.raw("datetime('now','localtime')"); return Model.raw("datetime('now','localtime')");
} }
return Model.raw('NOW()'); return Model.raw("NOW()");
}; };

View File

@@ -1,26 +1,26 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const helpers = require('../lib/helpers'); import db from "../db.js";
const Model = require('objection').Model; import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
const User = require('./user'); import AccessList from "./access_list.js";
const AccessList = require('./access_list'); import Certificate from "./certificate.js";
const Certificate = require('./certificate'); import now from "./now_helper.js";
const now = require('./now_helper'); import User from "./user.js";
Model.knex(db); Model.knex(db);
const boolFields = [ const boolFields = [
'is_deleted', "is_deleted",
'ssl_forced', "ssl_forced",
'caching_enabled', "caching_enabled",
'block_exploits', "block_exploits",
'allow_websocket_upgrade', "allow_websocket_upgrade",
'http2_support', "http2_support",
'enabled', "enabled",
'hsts_enabled', "hsts_enabled",
'hsts_subdomains', "hsts_subdomains",
]; ];
class ProxyHost extends Model { class ProxyHost extends Model {
@@ -29,12 +29,12 @@ class ProxyHost extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for domain_names // Default for domain_names
if (typeof this.domain_names === 'undefined') { if (typeof this.domain_names === "undefined") {
this.domain_names = []; this.domain_names = [];
} }
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
@@ -45,31 +45,31 @@ class ProxyHost extends Model {
this.modified_on = now(); this.modified_on = now();
// Sort domain_names // Sort domain_names
if (typeof this.domain_names !== 'undefined') { if (typeof this.domain_names !== "undefined") {
this.domain_names.sort(); this.domain_names.sort();
} }
} }
$parseDatabaseJson(json) { $parseDatabaseJson(json) {
json = super.$parseDatabaseJson(json); const thisJson = super.$parseDatabaseJson(json);
return helpers.convertIntFieldsToBool(json, boolFields); return convertIntFieldsToBool(thisJson, boolFields);
} }
$formatDatabaseJson(json) { $formatDatabaseJson(json) {
json = helpers.convertBoolFieldsToInt(json, boolFields); const thisJson = convertBoolFieldsToInt(json, boolFields);
return super.$formatDatabaseJson(json); return super.$formatDatabaseJson(thisJson);
} }
static get name() { static get name() {
return 'ProxyHost'; return "ProxyHost";
} }
static get tableName() { static get tableName() {
return 'proxy_host'; return "proxy_host";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['domain_names', 'meta', 'locations']; return ["domain_names", "meta", "locations"];
} }
static get relationMappings() { static get relationMappings() {
@@ -78,37 +78,37 @@ class ProxyHost extends Model {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: User, modelClass: User,
join: { join: {
from: 'proxy_host.owner_user_id', from: "proxy_host.owner_user_id",
to: 'user.id' to: "user.id",
},
modify: (qb) => {
qb.where("user.is_deleted", 0);
}, },
modify: function (qb) {
qb.where('user.is_deleted', 0);
}
}, },
access_list: { access_list: {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: AccessList, modelClass: AccessList,
join: { join: {
from: 'proxy_host.access_list_id', from: "proxy_host.access_list_id",
to: 'access_list.id' to: "access_list.id",
},
modify: (qb) => {
qb.where("access_list.is_deleted", 0);
}, },
modify: function (qb) {
qb.where('access_list.is_deleted', 0);
}
}, },
certificate: { certificate: {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: Certificate, modelClass: Certificate,
join: { join: {
from: 'proxy_host.certificate_id', from: "proxy_host.certificate_id",
to: 'certificate.id' to: "certificate.id",
},
modify: (qb) => {
qb.where("certificate.is_deleted", 0);
},
}, },
modify: function (qb) {
qb.where('certificate.is_deleted', 0);
}
}
}; };
} }
} }
module.exports = ProxyHost; export default ProxyHost;

View File

@@ -1,22 +1,24 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const helpers = require('../lib/helpers'); import db from "../db.js";
const Model = require('objection').Model; import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
const User = require('./user'); import Certificate from "./certificate.js";
const Certificate = require('./certificate'); import now from "./now_helper.js";
const now = require('./now_helper'); import User from "./user.js";
Model.knex(db); Model.knex(db);
const boolFields = [ const boolFields = [
'is_deleted', "is_deleted",
'enabled', "enabled",
'preserve_path', "preserve_path",
'ssl_forced', "ssl_forced",
'block_exploits', "block_exploits",
"hsts_enabled",
"hsts_subdomains",
"http2_support",
]; ];
class RedirectionHost extends Model { class RedirectionHost extends Model {
@@ -25,12 +27,12 @@ class RedirectionHost extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for domain_names // Default for domain_names
if (typeof this.domain_names === 'undefined') { if (typeof this.domain_names === "undefined") {
this.domain_names = []; this.domain_names = [];
} }
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
@@ -41,31 +43,31 @@ class RedirectionHost extends Model {
this.modified_on = now(); this.modified_on = now();
// Sort domain_names // Sort domain_names
if (typeof this.domain_names !== 'undefined') { if (typeof this.domain_names !== "undefined") {
this.domain_names.sort(); this.domain_names.sort();
} }
} }
$parseDatabaseJson(json) { $parseDatabaseJson(json) {
json = super.$parseDatabaseJson(json); const thisJson = super.$parseDatabaseJson(json);
return helpers.convertIntFieldsToBool(json, boolFields); return convertIntFieldsToBool(thisJson, boolFields);
} }
$formatDatabaseJson(json) { $formatDatabaseJson(json) {
json = helpers.convertBoolFieldsToInt(json, boolFields); const thisJson = convertBoolFieldsToInt(json, boolFields);
return super.$formatDatabaseJson(json); return super.$formatDatabaseJson(thisJson);
} }
static get name() { static get name() {
return 'RedirectionHost'; return "RedirectionHost";
} }
static get tableName() { static get tableName() {
return 'redirection_host'; return "redirection_host";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['domain_names', 'meta']; return ["domain_names", "meta"];
} }
static get relationMappings() { static get relationMappings() {
@@ -74,26 +76,26 @@ class RedirectionHost extends Model {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: User, modelClass: User,
join: { join: {
from: 'redirection_host.owner_user_id', from: "redirection_host.owner_user_id",
to: 'user.id' to: "user.id",
},
modify: (qb) => {
qb.where("user.is_deleted", 0);
}, },
modify: function (qb) {
qb.where('user.is_deleted', 0);
}
}, },
certificate: { certificate: {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: Certificate, modelClass: Certificate,
join: { join: {
from: 'redirection_host.certificate_id', from: "redirection_host.certificate_id",
to: 'certificate.id' to: "certificate.id",
},
modify: (qb) => {
qb.where("certificate.is_deleted", 0);
},
}, },
modify: function (qb) {
qb.where('certificate.is_deleted', 0);
}
}
}; };
} }
} }
module.exports = RedirectionHost; export default RedirectionHost;

View File

@@ -1,8 +1,8 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const Model = require('objection').Model; import db from "../db.js";
Model.knex(db); Model.knex(db);
@@ -27,4 +27,4 @@ class Setting extends Model {
} }
} }
module.exports = Setting; export default Setting;

View File

@@ -1,19 +1,13 @@
// Objection Docs: import { Model } from "objection";
// http://vincit.github.io/objection.js/ import db from "../db.js";
import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
const db = require('../db'); import Certificate from "./certificate.js";
const helpers = require('../lib/helpers'); import now from "./now_helper.js";
const Model = require('objection').Model; import User from "./user.js";
const User = require('./user');
const now = require('./now_helper');
Model.knex(db); Model.knex(db);
const boolFields = [ const boolFields = ["is_deleted", "enabled", "tcp_forwarding", "udp_forwarding"];
'is_deleted',
'tcp_forwarding',
'udp_forwarding',
];
class Stream extends Model { class Stream extends Model {
$beforeInsert() { $beforeInsert() {
@@ -21,7 +15,7 @@ class Stream extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
} }
@@ -31,25 +25,25 @@ class Stream extends Model {
} }
$parseDatabaseJson(json) { $parseDatabaseJson(json) {
json = super.$parseDatabaseJson(json); const thisJson = super.$parseDatabaseJson(json);
return helpers.convertIntFieldsToBool(json, boolFields); return convertIntFieldsToBool(thisJson, boolFields);
} }
$formatDatabaseJson(json) { $formatDatabaseJson(json) {
json = helpers.convertBoolFieldsToInt(json, boolFields); const thisJson = convertBoolFieldsToInt(json, boolFields);
return super.$formatDatabaseJson(json); return super.$formatDatabaseJson(thisJson);
} }
static get name() { static get name() {
return 'Stream'; return "Stream";
} }
static get tableName() { static get tableName() {
return 'stream'; return "stream";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['meta']; return ["meta"];
} }
static get relationMappings() { static get relationMappings() {
@@ -58,15 +52,26 @@ class Stream extends Model {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: User, modelClass: User,
join: { join: {
from: 'stream.owner_user_id', from: "stream.owner_user_id",
to: 'user.id' to: "user.id",
},
modify: (qb) => {
qb.where("user.is_deleted", 0);
},
},
certificate: {
relation: Model.HasOneRelation,
modelClass: Certificate,
join: {
from: "stream.certificate_id",
to: "certificate.id",
},
modify: (qb) => {
qb.where("certificate.is_deleted", 0);
},
}, },
modify: function (qb) {
qb.where('user.is_deleted', 0);
}
}
}; };
} }
} }
module.exports = Stream; export default Stream;

View File

@@ -3,17 +3,17 @@
and then has abilities after that. and then has abilities after that.
*/ */
const _ = require('lodash'); import crypto from "node:crypto";
const jwt = require('jsonwebtoken'); import jwt from "jsonwebtoken";
const crypto = require('crypto'); import _ from "lodash";
const config = require('../lib/config'); import { getPrivateKey, getPublicKey } from "../lib/config.js";
const error = require('../lib/error'); import errs from "../lib/error.js";
const logger = require('../logger').global; import { global as logger } from "../logger.js";
const ALGO = 'RS256';
module.exports = function () { const ALGO = "RS256";
let token_data = {}; export default () => {
let tokenData = {};
const self = { const self = {
/** /**
@@ -21,28 +21,26 @@ module.exports = function () {
* @returns {Promise} * @returns {Promise}
*/ */
create: (payload) => { create: (payload) => {
if (!config.getPrivateKey()) { if (!getPrivateKey()) {
logger.error('Private key is empty!'); logger.error("Private key is empty!");
} }
// sign with RSA SHA256 // sign with RSA SHA256
const options = { const options = {
algorithm: ALGO, algorithm: ALGO,
expiresIn: payload.expiresIn || '1d' expiresIn: payload.expiresIn || "1d",
}; };
payload.jti = crypto.randomBytes(12) payload.jti = crypto.randomBytes(12).toString("base64").substring(-8);
.toString('base64')
.substring(-8);
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
jwt.sign(payload, config.getPrivateKey(), options, (err, token) => { jwt.sign(payload, getPrivateKey(), options, (err, token) => {
if (err) { if (err) {
reject(err); reject(err);
} else { } else {
token_data = payload; tokenData = payload;
resolve({ resolve({
token: token, token: token,
payload: payload payload: payload,
}); });
} }
}); });
@@ -53,42 +51,47 @@ module.exports = function () {
* @param {String} token * @param {String} token
* @returns {Promise} * @returns {Promise}
*/ */
load: function (token) { load: (token) => {
if (!config.getPublicKey()) { if (!getPublicKey()) {
logger.error('Public key is empty!'); logger.error("Public key is empty!");
} }
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
try { try {
if (!token || token === null || token === 'null') { if (!token || token === null || token === "null") {
reject(new error.AuthError('Empty token')); reject(new errs.AuthError("Empty token"));
} else { } else {
jwt.verify(token, config.getPublicKey(), {ignoreExpiration: false, algorithms: [ALGO]}, (err, result) => { jwt.verify(
token,
getPublicKey(),
{ ignoreExpiration: false, algorithms: [ALGO] },
(err, result) => {
if (err) { if (err) {
if (err.name === "TokenExpiredError") {
if (err.name === 'TokenExpiredError') { reject(new errs.AuthError("Token has expired", err));
reject(new error.AuthError('Token has expired', err));
} else { } else {
reject(err); reject(err);
} }
} else { } else {
token_data = result; tokenData = result;
// Hack: some tokens out in the wild have a scope of 'all' instead of 'user'. // Hack: some tokens out in the wild have a scope of 'all' instead of 'user'.
// For 30 days at least, we need to replace 'all' with user. // For 30 days at least, we need to replace 'all' with user.
if ((typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'all') !== -1)) { if (
token_data.scope = ['user']; typeof tokenData.scope !== "undefined" &&
_.indexOf(tokenData.scope, "all") !== -1
) {
tokenData.scope = ["user"];
} }
resolve(token_data); resolve(tokenData);
} }
}); },
);
} }
} catch (err) { } catch (err) {
reject(err); reject(err);
} }
}); });
}, },
/** /**
@@ -97,17 +100,15 @@ module.exports = function () {
* @param {String} scope * @param {String} scope
* @returns {Boolean} * @returns {Boolean}
*/ */
hasScope: function (scope) { hasScope: (scope) => typeof tokenData.scope !== "undefined" && _.indexOf(tokenData.scope, scope) !== -1,
return typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, scope) !== -1;
},
/** /**
* @param {String} key * @param {String} key
* @return {*} * @return {*}
*/ */
get: function (key) { get: (key) => {
if (typeof token_data[key] !== 'undefined') { if (typeof tokenData[key] !== "undefined") {
return token_data[key]; return tokenData[key];
} }
return null; return null;
@@ -117,22 +118,22 @@ module.exports = function () {
* @param {String} key * @param {String} key
* @param {*} value * @param {*} value
*/ */
set: function (key, value) { set: (key, value) => {
token_data[key] = value; tokenData[key] = value;
}, },
/** /**
* @param [default_value] * @param [defaultValue]
* @returns {Integer} * @returns {Integer}
*/ */
getUserId: (default_value) => { getUserId: (defaultValue) => {
const attrs = self.get('attrs'); const attrs = self.get("attrs");
if (attrs && typeof attrs.id !== 'undefined' && attrs.id) { if (attrs?.id) {
return attrs.id; return attrs.id;
} }
return default_value || 0; return defaultValue || 0;
} },
}; };
return self; return self;

View File

@@ -1,18 +1,15 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const helpers = require('../lib/helpers'); import db from "../db.js";
const Model = require('objection').Model; import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
const UserPermission = require('./user_permission'); import now from "./now_helper.js";
const now = require('./now_helper'); import UserPermission from "./user_permission.js";
Model.knex(db); Model.knex(db);
const boolFields = [ const boolFields = ["is_deleted", "is_disabled"];
'is_deleted',
'is_disabled',
];
class User extends Model { class User extends Model {
$beforeInsert() { $beforeInsert() {
@@ -20,7 +17,7 @@ class User extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for roles // Default for roles
if (typeof this.roles === 'undefined') { if (typeof this.roles === "undefined") {
this.roles = []; this.roles = [];
} }
} }
@@ -30,25 +27,25 @@ class User extends Model {
} }
$parseDatabaseJson(json) { $parseDatabaseJson(json) {
json = super.$parseDatabaseJson(json); const thisJson = super.$parseDatabaseJson(json);
return helpers.convertIntFieldsToBool(json, boolFields); return convertIntFieldsToBool(thisJson, boolFields);
} }
$formatDatabaseJson(json) { $formatDatabaseJson(json) {
json = helpers.convertBoolFieldsToInt(json, boolFields); const thisJson = convertBoolFieldsToInt(json, boolFields);
return super.$formatDatabaseJson(json); return super.$formatDatabaseJson(thisJson);
} }
static get name() { static get name() {
return 'User'; return "User";
} }
static get tableName() { static get tableName() {
return 'user'; return "user";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['roles']; return ["roles"];
} }
static get relationMappings() { static get relationMappings() {
@@ -57,13 +54,12 @@ class User extends Model {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: UserPermission, modelClass: UserPermission,
join: { join: {
from: 'user.id', from: "user.id",
to: 'user_permission.user_id' to: "user_permission.user_id",
} },
} },
}; };
} }
} }
module.exports = User; export default User;

View File

@@ -1,9 +1,9 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const Model = require('objection').Model; import db from "../db.js";
const now = require('./now_helper'); import now from "./now_helper.js";
Model.knex(db); Model.knex(db);
@@ -26,4 +26,4 @@ class UserPermission extends Model {
} }
} }
module.exports = UserPermission; export default UserPermission;

View File

@@ -3,5 +3,5 @@
"ignore": [ "ignore": [
"data" "data"
], ],
"ext": "js json ejs" "ext": "js json ejs cjs"
} }

View File

@@ -1,8 +1,16 @@
{ {
"name": "nginx-proxy-manager", "name": "nginx-proxy-manager",
"version": "0.0.0", "version": "2.0.0",
"description": "A beautiful interface for creating Nginx endpoints", "description": "A beautiful interface for creating Nginx endpoints",
"author": "Jamie Curnow <jc@jc21.com>",
"license": "MIT",
"main": "index.js", "main": "index.js",
"type": "module",
"scripts": {
"lint": "biome lint",
"prettier": "biome format --write .",
"validate-schema": "node validate-schema.js"
},
"dependencies": { "dependencies": {
"@apidevtools/json-schema-ref-parser": "^11.7.0", "@apidevtools/json-schema-ref-parser": "^11.7.0",
"ajv": "^8.17.1", "ajv": "^8.17.1",
@@ -23,25 +31,19 @@
"node-rsa": "^1.0.8", "node-rsa": "^1.0.8",
"objection": "3.0.1", "objection": "3.0.1",
"path": "^0.12.7", "path": "^0.12.7",
"pg": "^8.13.1",
"signale": "1.4.0", "signale": "1.4.0",
"sqlite3": "5.1.6", "sqlite3": "5.1.6",
"temp-write": "^4.0.0" "temp-write": "^4.0.0"
}, },
"devDependencies": {
"@apidevtools/swagger-parser": "^10.1.0",
"@biomejs/biome": "^2.2.4",
"chalk": "4.1.2",
"nodemon": "^2.0.2"
},
"signale": { "signale": {
"displayDate": true, "displayDate": true,
"displayTimestamp": true "displayTimestamp": true
},
"author": "Jamie Curnow <jc@jc21.com>",
"license": "MIT",
"devDependencies": {
"@apidevtools/swagger-parser": "^10.1.0",
"chalk": "4.1.2",
"eslint": "^8.36.0",
"eslint-plugin-align-assignments": "^1.1.2",
"nodemon": "^2.0.2",
"prettier": "^2.0.4"
},
"scripts": {
"validate-schema": "node validate-schema.js"
} }
} }

View File

@@ -1,19 +1,20 @@
const express = require('express'); import express from "express";
const validator = require('../lib/validator'); import internalAuditLog from "../internal/audit-log.js";
const jwtdecode = require('../lib/express/jwt-decode'); import jwtdecode from "../lib/express/jwt-decode.js";
const internalAuditLog = require('../internal/audit-log'); import validator from "../lib/validator/index.js";
import { express as logger } from "../logger.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* /api/audit-log * /api/audit-log
*/ */
router router
.route('/') .route("/")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -24,29 +25,31 @@ router
* *
* Retrieve all logs * Retrieve all logs
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
const data = await validator(
{
additionalProperties: false, additionalProperties: false,
properties: { properties: {
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
}, },
query: { query: {
$ref: 'common#/properties/query' $ref: "common#/properties/query",
},
},
},
{
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
query: typeof req.query.query === "string" ? req.query.query : null,
},
);
const rows = await internalAuditLog.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
} }
}
}, {
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
query: (typeof req.query.query === 'string' ? req.query.query : null)
})
.then((data) => {
return internalAuditLog.getAll(res.locals.access, data.expand, data.query);
})
.then((rows) => {
res.status(200)
.send(rows);
})
.catch(next);
}); });
module.exports = router; export default router;

View File

@@ -1,51 +1,66 @@
const express = require('express'); import express from "express";
const pjson = require('../package.json'); import errs from "../lib/error.js";
const error = require('../lib/error'); import pjson from "../package.json" with { type: "json" };
import { isSetup } from "../setup.js";
import auditLogRoutes from "./audit-log.js";
import accessListsRoutes from "./nginx/access_lists.js";
import certificatesHostsRoutes from "./nginx/certificates.js";
import deadHostsRoutes from "./nginx/dead_hosts.js";
import proxyHostsRoutes from "./nginx/proxy_hosts.js";
import redirectionHostsRoutes from "./nginx/redirection_hosts.js";
import streamsRoutes from "./nginx/streams.js";
import reportsRoutes from "./reports.js";
import schemaRoutes from "./schema.js";
import settingsRoutes from "./settings.js";
import tokensRoutes from "./tokens.js";
import usersRoutes from "./users.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* Health Check * Health Check
* GET /api * GET /api
*/ */
router.get('/', (req, res/*, next*/) => { router.get("/", async (_, res /*, next*/) => {
let version = pjson.version.split('-').shift().split('.'); const version = pjson.version.split("-").shift().split(".");
const setup = await isSetup();
res.status(200).send({ res.status(200).send({
status: 'OK', status: "OK",
setup,
version: { version: {
major: parseInt(version.shift(), 10), major: Number.parseInt(version.shift(), 10),
minor: parseInt(version.shift(), 10), minor: Number.parseInt(version.shift(), 10),
revision: parseInt(version.shift(), 10) revision: Number.parseInt(version.shift(), 10),
} },
}); });
}); });
router.use('/schema', require('./schema')); router.use("/schema", schemaRoutes);
router.use('/tokens', require('./tokens')); router.use("/tokens", tokensRoutes);
router.use('/users', require('./users')); router.use("/users", usersRoutes);
router.use('/audit-log', require('./audit-log')); router.use("/audit-log", auditLogRoutes);
router.use('/reports', require('./reports')); router.use("/reports", reportsRoutes);
router.use('/settings', require('./settings')); router.use("/settings", settingsRoutes);
router.use('/nginx/proxy-hosts', require('./nginx/proxy_hosts')); router.use("/nginx/proxy-hosts", proxyHostsRoutes);
router.use('/nginx/redirection-hosts', require('./nginx/redirection_hosts')); router.use("/nginx/redirection-hosts", redirectionHostsRoutes);
router.use('/nginx/dead-hosts', require('./nginx/dead_hosts')); router.use("/nginx/dead-hosts", deadHostsRoutes);
router.use('/nginx/streams', require('./nginx/streams')); router.use("/nginx/streams", streamsRoutes);
router.use('/nginx/access-lists', require('./nginx/access_lists')); router.use("/nginx/access-lists", accessListsRoutes);
router.use('/nginx/certificates', require('./nginx/certificates')); router.use("/nginx/certificates", certificatesHostsRoutes);
/** /**
* API 404 for all other routes * API 404 for all other routes
* *
* ALL /api/* * ALL /api/*
*/ */
router.all(/(.+)/, function (req, _, next) { router.all(/(.+)/, (req, _, next) => {
req.params.page = req.params['0']; req.params.page = req.params["0"];
next(new error.ItemNotFoundError(req.params.page)); next(new errs.ItemNotFoundError(req.params.page));
}); });
module.exports = router; export default router;

View File

@@ -1,22 +1,23 @@
const express = require('express'); import express from "express";
const validator = require('../../lib/validator'); import internalAccessList from "../../internal/access-list.js";
const jwtdecode = require('../../lib/express/jwt-decode'); import jwtdecode from "../../lib/express/jwt-decode.js";
const apiValidator = require('../../lib/validator/api'); import apiValidator from "../../lib/validator/api.js";
const internalAccessList = require('../../internal/access-list'); import validator from "../../lib/validator/index.js";
const schema = require('../../schema'); import { express as logger } from "../../logger.js";
import { getValidationSchema } from "../../schema/index.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* /api/nginx/access-lists * /api/nginx/access-lists
*/ */
router router
.route('/') .route("/")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -26,29 +27,31 @@ router
* *
* Retrieve all access-lists * Retrieve all access-lists
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
const data = await validator(
{
additionalProperties: false, additionalProperties: false,
properties: { properties: {
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
}, },
query: { query: {
$ref: 'common#/properties/query' $ref: "common#/properties/query",
},
},
},
{
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
query: typeof req.query.query === "string" ? req.query.query : null,
},
);
const rows = await internalAccessList.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
} }
}
}, {
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
query: (typeof req.query.query === 'string' ? req.query.query : null)
})
.then((data) => {
return internalAccessList.getAll(res.locals.access, data.expand, data.query);
})
.then((rows) => {
res.status(200)
.send(rows);
})
.catch(next);
}) })
/** /**
@@ -56,16 +59,15 @@ router
* *
* Create a new access-list * Create a new access-list
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/access-lists', 'post'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/access-lists", "post"), req.body);
return internalAccessList.create(res.locals.access, payload); const result = await internalAccessList.create(res.locals.access, payload);
}) res.status(201).send(result);
.then((result) => { } catch (err) {
res.status(201) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}); });
/** /**
@@ -74,7 +76,7 @@ router
* /api/nginx/access-lists/123 * /api/nginx/access-lists/123
*/ */
router router
.route('/:list_id') .route("/:list_id")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -85,33 +87,35 @@ router
* *
* Retrieve a specific access-list * Retrieve a specific access-list
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
required: ['list_id'], const data = await validator(
{
required: ["list_id"],
additionalProperties: false, additionalProperties: false,
properties: { properties: {
list_id: { list_id: {
$ref: 'common#/properties/id' $ref: "common#/properties/id",
}, },
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
} },
} },
}, { },
{
list_id: req.params.list_id, list_id: req.params.list_id,
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null) expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
}) },
.then((data) => { );
return internalAccessList.get(res.locals.access, { const row = await internalAccessList.get(res.locals.access, {
id: parseInt(data.list_id, 10), id: Number.parseInt(data.list_id, 10),
expand: data.expand expand: data.expand,
}); });
}) res.status(200).send(row);
.then((row) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(row); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -119,17 +123,16 @@ router
* *
* Update and existing access-list * Update and existing access-list
*/ */
.put((req, res, next) => { .put(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/access-lists/{listID}', 'put'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/access-lists/{listID}", "put"), req.body);
payload.id = parseInt(req.params.list_id, 10); payload.id = Number.parseInt(req.params.list_id, 10);
return internalAccessList.update(res.locals.access, payload); const result = await internalAccessList.update(res.locals.access, payload);
}) res.status(200).send(result);
.then((result) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -137,13 +140,16 @@ router
* *
* Delete and existing access-list * Delete and existing access-list
*/ */
.delete((req, res, next) => { .delete(async (req, res, next) => {
internalAccessList.delete(res.locals.access, {id: parseInt(req.params.list_id, 10)}) try {
.then((result) => { const result = await internalAccessList.delete(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.list_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
module.exports = router; export default router;

View File

@@ -1,22 +1,23 @@
const express = require('express'); import express from "express";
const error = require('../../lib/error'); import internalCertificate from "../../internal/certificate.js";
const validator = require('../../lib/validator'); import errs from "../../lib/error.js";
const jwtdecode = require('../../lib/express/jwt-decode'); import jwtdecode from "../../lib/express/jwt-decode.js";
const apiValidator = require('../../lib/validator/api'); import apiValidator from "../../lib/validator/api.js";
const internalCertificate = require('../../internal/certificate'); import validator from "../../lib/validator/index.js";
const schema = require('../../schema'); import { express as logger } from "../../logger.js";
import { getValidationSchema } from "../../schema/index.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* /api/nginx/certificates * /api/nginx/certificates
*/ */
router router
.route('/') .route("/")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -27,29 +28,31 @@ router
* *
* Retrieve all certificates * Retrieve all certificates
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
const data = await validator(
{
additionalProperties: false, additionalProperties: false,
properties: { properties: {
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
}, },
query: { query: {
$ref: 'common#/properties/query' $ref: "common#/properties/query",
},
},
},
{
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
query: typeof req.query.query === "string" ? req.query.query : null,
},
);
const rows = await internalCertificate.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
} }
}
}, {
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
query: (typeof req.query.query === 'string' ? req.query.query : null)
})
.then((data) => {
return internalCertificate.getAll(res.locals.access, data.expand, data.query);
})
.then((rows) => {
res.status(200)
.send(rows);
})
.catch(next);
}) })
/** /**
@@ -57,17 +60,16 @@ router
* *
* Create a new certificate * Create a new certificate
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/certificates', 'post'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/certificates", "post"), req.body);
req.setTimeout(900000); // 15 minutes timeout req.setTimeout(900000); // 15 minutes timeout
return internalCertificate.create(res.locals.access, payload); const result = await internalCertificate.create(res.locals.access, payload);
}) res.status(201).send(result);
.then((result) => { } catch (err) {
res.status(201) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}); });
/** /**
@@ -76,7 +78,7 @@ router
* /api/nginx/certificates/test-http * /api/nginx/certificates/test-http
*/ */
router router
.route('/test-http') .route("/test-http")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -87,18 +89,22 @@ router
* *
* Test HTTP challenge for domains * Test HTTP challenge for domains
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
if (req.query.domains === undefined) { if (req.query.domains === undefined) {
next(new error.ValidationError('Domains are required as query parameters')); next(new errs.ValidationError("Domains are required as query parameters"));
return; return;
} }
internalCertificate.testHttpsChallenge(res.locals.access, JSON.parse(req.query.domains)) try {
.then((result) => { const result = await internalCertificate.testHttpsChallenge(
res.status(200) res.locals.access,
.send(result); JSON.parse(req.query.domains),
}) );
.catch(next); res.status(200).send(result);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -107,7 +113,7 @@ router
* /api/nginx/certificates/123 * /api/nginx/certificates/123
*/ */
router router
.route('/:certificate_id') .route("/:certificate_id")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -118,33 +124,35 @@ router
* *
* Retrieve a specific certificate * Retrieve a specific certificate
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
required: ['certificate_id'], const data = await validator(
{
required: ["certificate_id"],
additionalProperties: false, additionalProperties: false,
properties: { properties: {
certificate_id: { certificate_id: {
$ref: 'common#/properties/id' $ref: "common#/properties/id",
}, },
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
} },
} },
}, { },
{
certificate_id: req.params.certificate_id, certificate_id: req.params.certificate_id,
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null) expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
}) },
.then((data) => { );
return internalCertificate.get(res.locals.access, { const row = await internalCertificate.get(res.locals.access, {
id: parseInt(data.certificate_id, 10), id: Number.parseInt(data.certificate_id, 10),
expand: data.expand expand: data.expand,
}); });
}) res.status(200).send(row);
.then((row) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(row); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -152,13 +160,16 @@ router
* *
* Update and existing certificate * Update and existing certificate
*/ */
.delete((req, res, next) => { .delete(async (req, res, next) => {
internalCertificate.delete(res.locals.access, {id: parseInt(req.params.certificate_id, 10)}) try {
.then((result) => { const result = await internalCertificate.delete(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.certificate_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -167,7 +178,7 @@ router
* /api/nginx/certificates/123/upload * /api/nginx/certificates/123/upload
*/ */
router router
.route('/:certificate_id/upload') .route("/:certificate_id/upload")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -178,20 +189,21 @@ router
* *
* Upload certificates * Upload certificates
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
if (!req.files) { if (!req.files) {
res.status(400) res.status(400).send({ error: "No files were uploaded" });
.send({error: 'No files were uploaded'}); return;
} else { }
internalCertificate.upload(res.locals.access, {
id: parseInt(req.params.certificate_id, 10), try {
files: req.files const result = await internalCertificate.upload(res.locals.access, {
}) id: Number.parseInt(req.params.certificate_id, 10),
.then((result) => { files: req.files,
res.status(200) });
.send(result); res.status(200).send(result);
}) } catch (err) {
.catch(next); logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
} }
}); });
@@ -201,7 +213,7 @@ router
* /api/nginx/certificates/123/renew * /api/nginx/certificates/123/renew
*/ */
router router
.route('/:certificate_id/renew') .route("/:certificate_id/renew")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -212,16 +224,17 @@ router
* *
* Renew certificate * Renew certificate
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
req.setTimeout(900000); // 15 minutes timeout req.setTimeout(900000); // 15 minutes timeout
internalCertificate.renew(res.locals.access, { try {
id: parseInt(req.params.certificate_id, 10) const result = await internalCertificate.renew(res.locals.access, {
}) id: Number.parseInt(req.params.certificate_id, 10),
.then((result) => { });
res.status(200) res.status(200).send(result);
.send(result); } catch (err) {
}) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.catch(next); next(err);
}
}); });
/** /**
@@ -230,8 +243,8 @@ router
* /api/nginx/certificates/123/download * /api/nginx/certificates/123/download
*/ */
router router
.route('/:certificate_id/download') .route("/:certificate_id/download")
.options((req, res) => { .options((_req, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -241,15 +254,16 @@ router
* *
* Renew certificate * Renew certificate
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
internalCertificate.download(res.locals.access, { try {
id: parseInt(req.params.certificate_id, 10) const result = await internalCertificate.download(res.locals.access, {
}) id: Number.parseInt(req.params.certificate_id, 10),
.then((result) => { });
res.status(200) res.status(200).download(result.fileName);
.download(result.fileName); } catch (err) {
}) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.catch(next); next(err);
}
}); });
/** /**
@@ -258,7 +272,7 @@ router
* /api/nginx/certificates/validate * /api/nginx/certificates/validate
*/ */
router router
.route('/validate') .route("/validate")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -269,20 +283,21 @@ router
* *
* Validate certificates * Validate certificates
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
if (!req.files) { if (!req.files) {
res.status(400) res.status(400).send({ error: "No files were uploaded" });
.send({error: 'No files were uploaded'}); return;
} else { }
internalCertificate.validate({
files: req.files try {
}) const result = await internalCertificate.validate({
.then((result) => { files: req.files,
res.status(200) });
.send(result); res.status(200).send(result);
}) } catch (err) {
.catch(next); logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
} }
}); });
module.exports = router; export default router;

View File

@@ -1,21 +1,22 @@
const express = require('express'); import express from "express";
const validator = require('../../lib/validator'); import internalDeadHost from "../../internal/dead-host.js";
const jwtdecode = require('../../lib/express/jwt-decode'); import jwtdecode from "../../lib/express/jwt-decode.js";
const apiValidator = require('../../lib/validator/api'); import apiValidator from "../../lib/validator/api.js";
const internalDeadHost = require('../../internal/dead-host'); import validator from "../../lib/validator/index.js";
const schema = require('../../schema'); import { express as logger } from "../../logger.js";
import { getValidationSchema } from "../../schema/index.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* /api/nginx/dead-hosts * /api/nginx/dead-hosts
*/ */
router router
.route('/') .route("/")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -26,29 +27,31 @@ router
* *
* Retrieve all dead-hosts * Retrieve all dead-hosts
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
const data = await validator(
{
additionalProperties: false, additionalProperties: false,
properties: { properties: {
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
}, },
query: { query: {
$ref: 'common#/properties/query' $ref: "common#/properties/query",
},
},
},
{
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
query: typeof req.query.query === "string" ? req.query.query : null,
},
);
const rows = await internalDeadHost.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
} }
}
}, {
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
query: (typeof req.query.query === 'string' ? req.query.query : null)
})
.then((data) => {
return internalDeadHost.getAll(res.locals.access, data.expand, data.query);
})
.then((rows) => {
res.status(200)
.send(rows);
})
.catch(next);
}) })
/** /**
@@ -56,16 +59,15 @@ router
* *
* Create a new dead-host * Create a new dead-host
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/dead-hosts', 'post'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/dead-hosts", "post"), req.body);
return internalDeadHost.create(res.locals.access, payload); const result = await internalDeadHost.create(res.locals.access, payload);
}) res.status(201).send(result);
.then((result) => { } catch (err) {
res.status(201) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}); });
/** /**
@@ -74,8 +76,8 @@ router
* /api/nginx/dead-hosts/123 * /api/nginx/dead-hosts/123
*/ */
router router
.route('/:host_id') .route("/:host_id")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -85,33 +87,35 @@ router
* *
* Retrieve a specific dead-host * Retrieve a specific dead-host
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
required: ['host_id'], const data = await validator(
{
required: ["host_id"],
additionalProperties: false, additionalProperties: false,
properties: { properties: {
host_id: { host_id: {
$ref: 'common#/properties/id' $ref: "common#/properties/id",
}, },
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
} },
} },
}, { },
{
host_id: req.params.host_id, host_id: req.params.host_id,
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null) expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
}) },
.then((data) => { );
return internalDeadHost.get(res.locals.access, { const row = await internalDeadHost.get(res.locals.access, {
id: parseInt(data.host_id, 10), id: Number.parseInt(data.host_id, 10),
expand: data.expand expand: data.expand,
}); });
}) res.status(200).send(row);
.then((row) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(row); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -119,17 +123,16 @@ router
* *
* Update and existing dead-host * Update and existing dead-host
*/ */
.put((req, res, next) => { .put(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/dead-hosts/{hostID}', 'put'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/dead-hosts/{hostID}", "put"), req.body);
payload.id = parseInt(req.params.host_id, 10); payload.id = Number.parseInt(req.params.host_id, 10);
return internalDeadHost.update(res.locals.access, payload); const result = await internalDeadHost.update(res.locals.access, payload);
}) res.status(200).send(result);
.then((result) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -137,13 +140,16 @@ router
* *
* Update and existing dead-host * Update and existing dead-host
*/ */
.delete((req, res, next) => { .delete(async (req, res, next) => {
internalDeadHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalDeadHost.delete(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -152,7 +158,7 @@ router
* /api/nginx/dead-hosts/123/enable * /api/nginx/dead-hosts/123/enable
*/ */
router router
.route('/:host_id/enable') .route("/:host_id/enable")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -161,13 +167,16 @@ router
/** /**
* POST /api/nginx/dead-hosts/123/enable * POST /api/nginx/dead-hosts/123/enable
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
internalDeadHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalDeadHost.enable(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -176,7 +185,7 @@ router
* /api/nginx/dead-hosts/123/disable * /api/nginx/dead-hosts/123/disable
*/ */
router router
.route('/:host_id/disable') .route("/:host_id/disable")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -186,12 +195,13 @@ router
* POST /api/nginx/dead-hosts/123/disable * POST /api/nginx/dead-hosts/123/disable
*/ */
.post((req, res, next) => { .post((req, res, next) => {
internalDeadHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = internalDeadHost.disable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) });
res.status(200) res.status(200).send(result);
.send(result); } catch (err) {
}) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.catch(next); next(err);
}
}); });
module.exports = router; export default router;

View File

@@ -1,22 +1,23 @@
const express = require('express'); import express from "express";
const validator = require('../../lib/validator'); import internalProxyHost from "../../internal/proxy-host.js";
const jwtdecode = require('../../lib/express/jwt-decode'); import jwtdecode from "../../lib/express/jwt-decode.js";
const apiValidator = require('../../lib/validator/api'); import apiValidator from "../../lib/validator/api.js";
const internalProxyHost = require('../../internal/proxy-host'); import validator from "../../lib/validator/index.js";
const schema = require('../../schema'); import { express as logger } from "../../logger.js";
import { getValidationSchema } from "../../schema/index.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* /api/nginx/proxy-hosts * /api/nginx/proxy-hosts
*/ */
router router
.route('/') .route("/")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -26,29 +27,31 @@ router
* *
* Retrieve all proxy-hosts * Retrieve all proxy-hosts
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
const data = await validator(
{
additionalProperties: false, additionalProperties: false,
properties: { properties: {
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
}, },
query: { query: {
$ref: 'common#/properties/query' $ref: "common#/properties/query",
},
},
},
{
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
query: typeof req.query.query === "string" ? req.query.query : null,
},
);
const rows = await internalProxyHost.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
} }
}
}, {
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
query: (typeof req.query.query === 'string' ? req.query.query : null)
})
.then((data) => {
return internalProxyHost.getAll(res.locals.access, data.expand, data.query);
})
.then((rows) => {
res.status(200)
.send(rows);
})
.catch(next);
}) })
/** /**
@@ -56,16 +59,15 @@ router
* *
* Create a new proxy-host * Create a new proxy-host
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/proxy-hosts', 'post'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/proxy-hosts", "post"), req.body);
return internalProxyHost.create(res.locals.access, payload); const result = await internalProxyHost.create(res.locals.access, payload);
}) res.status(201).send(result);
.then((result) => { } catch (err) {
res.status(201) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}); });
/** /**
@@ -74,8 +76,8 @@ router
* /api/nginx/proxy-hosts/123 * /api/nginx/proxy-hosts/123
*/ */
router router
.route('/:host_id') .route("/:host_id")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -85,33 +87,35 @@ router
* *
* Retrieve a specific proxy-host * Retrieve a specific proxy-host
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
required: ['host_id'], const data = await validator(
{
required: ["host_id"],
additionalProperties: false, additionalProperties: false,
properties: { properties: {
host_id: { host_id: {
$ref: 'common#/properties/id' $ref: "common#/properties/id",
}, },
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
} },
} },
}, { },
{
host_id: req.params.host_id, host_id: req.params.host_id,
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null) expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
}) },
.then((data) => { );
return internalProxyHost.get(res.locals.access, { const row = await internalProxyHost.get(res.locals.access, {
id: parseInt(data.host_id, 10), id: Number.parseInt(data.host_id, 10),
expand: data.expand expand: data.expand,
}); });
}) res.status(200).send(row);
.then((row) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(row); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -119,17 +123,16 @@ router
* *
* Update and existing proxy-host * Update and existing proxy-host
*/ */
.put((req, res, next) => { .put(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/proxy-hosts/{hostID}', 'put'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/proxy-hosts/{hostID}", "put"), req.body);
payload.id = parseInt(req.params.host_id, 10); payload.id = Number.parseInt(req.params.host_id, 10);
return internalProxyHost.update(res.locals.access, payload); const result = await internalProxyHost.update(res.locals.access, payload);
}) res.status(200).send(result);
.then((result) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -137,13 +140,16 @@ router
* *
* Update and existing proxy-host * Update and existing proxy-host
*/ */
.delete((req, res, next) => { .delete(async (req, res, next) => {
internalProxyHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalProxyHost.delete(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -152,7 +158,7 @@ router
* /api/nginx/proxy-hosts/123/enable * /api/nginx/proxy-hosts/123/enable
*/ */
router router
.route('/:host_id/enable') .route("/:host_id/enable")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -161,13 +167,16 @@ router
/** /**
* POST /api/nginx/proxy-hosts/123/enable * POST /api/nginx/proxy-hosts/123/enable
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
internalProxyHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalProxyHost.enable(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -176,7 +185,7 @@ router
* /api/nginx/proxy-hosts/123/disable * /api/nginx/proxy-hosts/123/disable
*/ */
router router
.route('/:host_id/disable') .route("/:host_id/disable")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -185,13 +194,16 @@ router
/** /**
* POST /api/nginx/proxy-hosts/123/disable * POST /api/nginx/proxy-hosts/123/disable
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
internalProxyHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalProxyHost.disable(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
module.exports = router; export default router;

View File

@@ -1,22 +1,23 @@
const express = require('express'); import express from "express";
const validator = require('../../lib/validator'); import internalRedirectionHost from "../../internal/redirection-host.js";
const jwtdecode = require('../../lib/express/jwt-decode'); import jwtdecode from "../../lib/express/jwt-decode.js";
const apiValidator = require('../../lib/validator/api'); import apiValidator from "../../lib/validator/api.js";
const internalRedirectionHost = require('../../internal/redirection-host'); import validator from "../../lib/validator/index.js";
const schema = require('../../schema'); import { express as logger } from "../../logger.js";
import { getValidationSchema } from "../../schema/index.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* /api/nginx/redirection-hosts * /api/nginx/redirection-hosts
*/ */
router router
.route('/') .route("/")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -26,29 +27,31 @@ router
* *
* Retrieve all redirection-hosts * Retrieve all redirection-hosts
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
const data = await validator(
{
additionalProperties: false, additionalProperties: false,
properties: { properties: {
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
}, },
query: { query: {
$ref: 'common#/properties/query' $ref: "common#/properties/query",
},
},
},
{
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
query: typeof req.query.query === "string" ? req.query.query : null,
},
);
const rows = await internalRedirectionHost.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
} }
}
}, {
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
query: (typeof req.query.query === 'string' ? req.query.query : null)
})
.then((data) => {
return internalRedirectionHost.getAll(res.locals.access, data.expand, data.query);
})
.then((rows) => {
res.status(200)
.send(rows);
})
.catch(next);
}) })
/** /**
@@ -56,16 +59,15 @@ router
* *
* Create a new redirection-host * Create a new redirection-host
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/redirection-hosts', 'post'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/redirection-hosts", "post"), req.body);
return internalRedirectionHost.create(res.locals.access, payload); const result = await internalRedirectionHost.create(res.locals.access, payload);
}) res.status(201).send(result);
.then((result) => { } catch (err) {
res.status(201) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}); });
/** /**
@@ -74,8 +76,8 @@ router
* /api/nginx/redirection-hosts/123 * /api/nginx/redirection-hosts/123
*/ */
router router
.route('/:host_id') .route("/:host_id")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -85,33 +87,35 @@ router
* *
* Retrieve a specific redirection-host * Retrieve a specific redirection-host
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
required: ['host_id'], const data = await validator(
{
required: ["host_id"],
additionalProperties: false, additionalProperties: false,
properties: { properties: {
host_id: { host_id: {
$ref: 'common#/properties/id' $ref: "common#/properties/id",
}, },
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
} },
} },
}, { },
{
host_id: req.params.host_id, host_id: req.params.host_id,
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null) expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
}) },
.then((data) => { );
return internalRedirectionHost.get(res.locals.access, { const row = await internalRedirectionHost.get(res.locals.access, {
id: parseInt(data.host_id, 10), id: Number.parseInt(data.host_id, 10),
expand: data.expand expand: data.expand,
}); });
}) res.status(200).send(row);
.then((row) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(row); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -119,17 +123,19 @@ router
* *
* Update and existing redirection-host * Update and existing redirection-host
*/ */
.put((req, res, next) => { .put(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/redirection-hosts/{hostID}', 'put'), req.body) try {
.then((payload) => { const payload = await apiValidator(
payload.id = parseInt(req.params.host_id, 10); getValidationSchema("/nginx/redirection-hosts/{hostID}", "put"),
return internalRedirectionHost.update(res.locals.access, payload); req.body,
}) );
.then((result) => { payload.id = Number.parseInt(req.params.host_id, 10);
res.status(200) const result = await internalRedirectionHost.update(res.locals.access, payload);
.send(result); res.status(200).send(result);
}) } catch (err) {
.catch(next); logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}) })
/** /**
@@ -137,13 +143,16 @@ router
* *
* Update and existing redirection-host * Update and existing redirection-host
*/ */
.delete((req, res, next) => { .delete(async (req, res, next) => {
internalRedirectionHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalRedirectionHost.delete(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -152,8 +161,8 @@ router
* /api/nginx/redirection-hosts/123/enable * /api/nginx/redirection-hosts/123/enable
*/ */
router router
.route('/:host_id/enable') .route("/:host_id/enable")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -161,13 +170,16 @@ router
/** /**
* POST /api/nginx/redirection-hosts/123/enable * POST /api/nginx/redirection-hosts/123/enable
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
internalRedirectionHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalRedirectionHost.enable(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -176,8 +188,8 @@ router
* /api/nginx/redirection-hosts/123/disable * /api/nginx/redirection-hosts/123/disable
*/ */
router router
.route('/:host_id/disable') .route("/:host_id/disable")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -185,13 +197,16 @@ router
/** /**
* POST /api/nginx/redirection-hosts/123/disable * POST /api/nginx/redirection-hosts/123/disable
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
internalRedirectionHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalRedirectionHost.disable(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
module.exports = router; export default router;

View File

@@ -1,22 +1,23 @@
const express = require('express'); import express from "express";
const validator = require('../../lib/validator'); import internalStream from "../../internal/stream.js";
const jwtdecode = require('../../lib/express/jwt-decode'); import jwtdecode from "../../lib/express/jwt-decode.js";
const apiValidator = require('../../lib/validator/api'); import apiValidator from "../../lib/validator/api.js";
const internalStream = require('../../internal/stream'); import validator from "../../lib/validator/index.js";
const schema = require('../../schema'); import { express as logger } from "../../logger.js";
import { getValidationSchema } from "../../schema/index.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* /api/nginx/streams * /api/nginx/streams
*/ */
router router
.route('/') .route("/")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes .all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
@@ -26,29 +27,31 @@ router
* *
* Retrieve all streams * Retrieve all streams
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
const data = await validator(
{
additionalProperties: false, additionalProperties: false,
properties: { properties: {
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
}, },
query: { query: {
$ref: 'common#/properties/query' $ref: "common#/properties/query",
},
},
},
{
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
query: typeof req.query.query === "string" ? req.query.query : null,
},
);
const rows = await internalStream.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
} }
}
}, {
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
query: (typeof req.query.query === 'string' ? req.query.query : null)
})
.then((data) => {
return internalStream.getAll(res.locals.access, data.expand, data.query);
})
.then((rows) => {
res.status(200)
.send(rows);
})
.catch(next);
}) })
/** /**
@@ -56,16 +59,15 @@ router
* *
* Create a new stream * Create a new stream
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/streams', 'post'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/streams", "post"), req.body);
return internalStream.create(res.locals.access, payload); const result = await internalStream.create(res.locals.access, payload);
}) res.status(201).send(result);
.then((result) => { } catch (err) {
res.status(201) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}); });
/** /**
@@ -74,8 +76,8 @@ router
* /api/nginx/streams/123 * /api/nginx/streams/123
*/ */
router router
.route('/:stream_id') .route("/:stream_id")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes .all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
@@ -85,33 +87,35 @@ router
* *
* Retrieve a specific stream * Retrieve a specific stream
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
required: ['stream_id'], const data = await validator(
{
required: ["stream_id"],
additionalProperties: false, additionalProperties: false,
properties: { properties: {
stream_id: { stream_id: {
$ref: 'common#/properties/id' $ref: "common#/properties/id",
}, },
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
} },
} },
}, { },
{
stream_id: req.params.stream_id, stream_id: req.params.stream_id,
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null) expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
}) },
.then((data) => { );
return internalStream.get(res.locals.access, { const row = await internalStream.get(res.locals.access, {
id: parseInt(data.stream_id, 10), id: Number.parseInt(data.stream_id, 10),
expand: data.expand expand: data.expand,
}); });
}) res.status(200).send(row);
.then((row) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(row); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -119,17 +123,16 @@ router
* *
* Update and existing stream * Update and existing stream
*/ */
.put((req, res, next) => { .put(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/streams/{streamID}', 'put'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/streams/{streamID}", "put"), req.body);
payload.id = parseInt(req.params.stream_id, 10); payload.id = Number.parseInt(req.params.stream_id, 10);
return internalStream.update(res.locals.access, payload); const result = await internalStream.update(res.locals.access, payload);
}) res.status(200).send(result);
.then((result) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -137,13 +140,16 @@ router
* *
* Update and existing stream * Update and existing stream
*/ */
.delete((req, res, next) => { .delete(async (req, res, next) => {
internalStream.delete(res.locals.access, {id: parseInt(req.params.stream_id, 10)}) try {
.then((result) => { const result = await internalStream.delete(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.stream_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -152,7 +158,7 @@ router
* /api/nginx/streams/123/enable * /api/nginx/streams/123/enable
*/ */
router router
.route('/:host_id/enable') .route("/:host_id/enable")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -161,13 +167,16 @@ router
/** /**
* POST /api/nginx/streams/123/enable * POST /api/nginx/streams/123/enable
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
internalStream.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalStream.enable(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -176,7 +185,7 @@ router
* /api/nginx/streams/123/disable * /api/nginx/streams/123/disable
*/ */
router router
.route('/:host_id/disable') .route("/:host_id/disable")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -185,13 +194,16 @@ router
/** /**
* POST /api/nginx/streams/123/disable * POST /api/nginx/streams/123/disable
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
internalStream.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalStream.disable(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
module.exports = router; export default router;

View File

@@ -1,15 +1,16 @@
const express = require('express'); import express from "express";
const jwtdecode = require('../lib/express/jwt-decode'); import internalReport from "../internal/report.js";
const internalReport = require('../internal/report'); import jwtdecode from "../lib/express/jwt-decode.js";
import { express as logger } from "../logger.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
router router
.route('/hosts') .route("/hosts")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -17,13 +18,14 @@ router
/** /**
* GET /reports/hosts * GET /reports/hosts
*/ */
.get(jwtdecode(), (_, res, next) => { .get(jwtdecode(), async (req, res, next) => {
internalReport.getHostsReport(res.locals.access) try {
.then((data) => { const data = await internalReport.getHostsReport(res.locals.access);
res.status(200) res.status(200).send(data);
.send(data); } catch (err) {
}) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.catch(next); next(err);
}
}); });
module.exports = router; export default router;

View File

@@ -1,15 +1,16 @@
const express = require('express'); import express from "express";
const schema = require('../schema'); import { express as logger } from "../logger.js";
const PACKAGE = require('../package.json'); import PACKAGE from "../package.json" with { type: "json" };
import { getCompiledSchema } from "../schema/index.js";
const router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
router router
.route('/') .route("/")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -18,21 +19,26 @@ router
* GET /schema * GET /schema
*/ */
.get(async (req, res) => { .get(async (req, res) => {
let swaggerJSON = await schema.getCompiledSchema(); try {
const swaggerJSON = await getCompiledSchema();
let proto = req.protocol; let proto = req.protocol;
if (typeof req.headers['x-forwarded-proto'] !== 'undefined' && req.headers['x-forwarded-proto']) { if (typeof req.headers["x-forwarded-proto"] !== "undefined" && req.headers["x-forwarded-proto"]) {
proto = req.headers['x-forwarded-proto']; proto = req.headers["x-forwarded-proto"];
} }
let origin = proto + '://' + req.hostname; let origin = `${proto}://${req.hostname}`;
if (typeof req.headers.origin !== 'undefined' && req.headers.origin) { if (typeof req.headers.origin !== "undefined" && req.headers.origin) {
origin = req.headers.origin; origin = req.headers.origin;
} }
swaggerJSON.info.version = PACKAGE.version; swaggerJSON.info.version = PACKAGE.version;
swaggerJSON.servers[0].url = origin + '/api'; swaggerJSON.servers[0].url = `${origin}/api`;
res.status(200).send(swaggerJSON); res.status(200).send(swaggerJSON);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
module.exports = router; export default router;

View File

@@ -1,21 +1,22 @@
const express = require('express'); import express from "express";
const validator = require('../lib/validator'); import internalSetting from "../internal/setting.js";
const jwtdecode = require('../lib/express/jwt-decode'); import jwtdecode from "../lib/express/jwt-decode.js";
const apiValidator = require('../lib/validator/api'); import apiValidator from "../lib/validator/api.js";
const internalSetting = require('../internal/setting'); import validator from "../lib/validator/index.js";
const schema = require('../schema'); import { express as logger } from "../logger.js";
import { getValidationSchema } from "../schema/index.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* /api/settings * /api/settings
*/ */
router router
.route('/') .route("/")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -26,13 +27,14 @@ router
* *
* Retrieve all settings * Retrieve all settings
*/ */
.get((_, res, next) => { .get(async (req, res, next) => {
internalSetting.getAll(res.locals.access) try {
.then((rows) => { const rows = await internalSetting.getAll(res.locals.access);
res.status(200) res.status(200).send(rows);
.send(rows); } catch (err) {
}) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.catch(next); next(err);
}
}); });
/** /**
@@ -41,7 +43,7 @@ router
* /api/settings/something * /api/settings/something
*/ */
router router
.route('/:setting_id') .route("/:setting_id")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -52,29 +54,31 @@ router
* *
* Retrieve a specific setting * Retrieve a specific setting
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
required: ['setting_id'], const data = await validator(
{
required: ["setting_id"],
additionalProperties: false, additionalProperties: false,
properties: { properties: {
setting_id: { setting_id: {
type: 'string', type: "string",
minLength: 1 minLength: 1,
} },
} },
}, { },
setting_id: req.params.setting_id {
}) setting_id: req.params.setting_id,
.then((data) => { },
return internalSetting.get(res.locals.access, { );
id: data.setting_id const row = await internalSetting.get(res.locals.access, {
id: data.setting_id,
}); });
}) res.status(200).send(row);
.then((row) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(row); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -82,17 +86,16 @@ router
* *
* Update and existing setting * Update and existing setting
*/ */
.put((req, res, next) => { .put(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/settings/{settingID}', 'put'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/settings/{settingID}", "put"), req.body);
payload.id = req.params.setting_id; payload.id = req.params.setting_id;
return internalSetting.update(res.locals.access, payload); const result = await internalSetting.update(res.locals.access, payload);
}) res.status(200).send(result);
.then((result) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}); });
module.exports = router; export default router;

View File

@@ -1,17 +1,18 @@
const express = require('express'); import express from "express";
const jwtdecode = require('../lib/express/jwt-decode'); import internalToken from "../internal/token.js";
const apiValidator = require('../lib/validator/api'); import jwtdecode from "../lib/express/jwt-decode.js";
const internalToken = require('../internal/token'); import apiValidator from "../lib/validator/api.js";
const schema = require('../schema'); import { express as logger } from "../logger.js";
import { getValidationSchema } from "../schema/index.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
router router
.route('/') .route("/")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -23,16 +24,17 @@ router
* We also piggy back on to this method, allowing admins to get tokens * We also piggy back on to this method, allowing admins to get tokens
* for services like Job board and Worker. * for services like Job board and Worker.
*/ */
.get(jwtdecode(), (req, res, next) => { .get(jwtdecode(), async (req, res, next) => {
internalToken.getFreshToken(res.locals.access, { try {
expiry: (typeof req.query.expiry !== 'undefined' ? req.query.expiry : null), const data = await internalToken.getFreshToken(res.locals.access, {
scope: (typeof req.query.scope !== 'undefined' ? req.query.scope : null) expiry: typeof req.query.expiry !== "undefined" ? req.query.expiry : null,
}) scope: typeof req.query.scope !== "undefined" ? req.query.scope : null,
.then((data) => { });
res.status(200) res.status(200).send(data);
.send(data); } catch (err) {
}) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.catch(next); next(err);
}
}) })
/** /**
@@ -41,13 +43,14 @@ router
* Create a new Token * Create a new Token
*/ */
.post(async (req, res, next) => { .post(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/tokens', 'post'), req.body) try {
.then(internalToken.getTokenFromEmail) const data = await apiValidator(getValidationSchema("/tokens", "post"), req.body);
.then((data) => { const result = await internalToken.getTokenFromEmail(data);
res.status(200) res.status(200).send(result);
.send(data); } catch (err) {
}) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.catch(next); next(err);
}
}); });
module.exports = router; export default router;

View File

@@ -1,22 +1,27 @@
const express = require('express'); import express from "express";
const validator = require('../lib/validator'); import internalUser from "../internal/user.js";
const jwtdecode = require('../lib/express/jwt-decode'); import Access from "../lib/access.js";
const userIdFromMe = require('../lib/express/user-id-from-me'); import { isCI } from "../lib/config.js";
const internalUser = require('../internal/user'); import errs from "../lib/error.js";
const apiValidator = require('../lib/validator/api'); import jwtdecode from "../lib/express/jwt-decode.js";
const schema = require('../schema'); import userIdFromMe from "../lib/express/user-id-from-me.js";
import apiValidator from "../lib/validator/api.js";
import validator from "../lib/validator/index.js";
import { express as logger } from "../logger.js";
import { getValidationSchema } from "../schema/index.js";
import { isSetup } from "../setup.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* /api/users * /api/users
*/ */
router router
.route('/') .route("/")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -27,33 +32,38 @@ router
* *
* Retrieve all users * Retrieve all users
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
const data = await validator(
{
additionalProperties: false, additionalProperties: false,
properties: { properties: {
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
}, },
query: { query: {
$ref: 'common#/properties/query' $ref: "common#/properties/query",
} },
} },
}, { },
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null), {
query: (typeof req.query.query === 'string' ? req.query.query : null) expand:
}) typeof req.query.expand === "string"
.then((data) => { ? req.query.expand.split(",")
return internalUser.getAll(res.locals.access, data.expand, data.query); : null,
}) query: typeof req.query.query === "string" ? req.query.query : null,
.then((users) => { },
res.status(200) );
.send(users); const users = await internalUser.getAll(
}) res.locals.access,
.catch((err) => { data.expand,
console.log(err); data.query,
);
res.status(200).send(users);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err); next(err);
}); }
//.catch(next);
}) })
/** /**
@@ -61,16 +71,66 @@ router
* *
* Create a new User * Create a new User
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/users', 'post'), req.body) const body = req.body;
.then((payload) => {
return internalUser.create(res.locals.access, payload); try {
// If we are in setup mode, we don't check access for current user
const setup = await isSetup();
if (!setup) {
logger.info("Creating a new user in setup mode");
const access = new Access(null);
await access.load(true);
res.locals.access = access;
// We are in setup mode, set some defaults for this first new user, such as making
// them an admin.
body.is_disabled = false;
if (typeof body.roles !== "object" || body.roles === null) {
body.roles = [];
}
if (body.roles.indexOf("admin") === -1) {
body.roles.push("admin");
}
}
const payload = await apiValidator(
getValidationSchema("/users", "post"),
body,
);
const user = await internalUser.create(res.locals.access, payload);
res.status(201).send(user);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}) })
.then((result) => {
res.status(201) /**
.send(result); * DELETE /api/users
}) *
.catch(next); * Deletes ALL users. This is NOT GENERALLY AVAILABLE!
* (!) It is NOT an authenticated endpoint.
* (!) Only CI should be able to call this endpoint. As a result,
*
* it will only work when the env vars DEBUG=true and CI=true
*
* Do NOT set those env vars in a production environment!
*/
.delete(async (_, res, next) => {
if (isCI()) {
try {
logger.warn("Deleting all users - CI environment detected, allowing this operation");
await internalUser.deleteAll();
res.status(200).send(true);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
return;
}
next(new errs.ItemNotFoundError());
}); });
/** /**
@@ -79,7 +139,7 @@ router
* /api/users/123 * /api/users/123
*/ */
router router
.route('/:user_id') .route("/:user_id")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -91,37 +151,43 @@ router
* *
* Retrieve a specific user * Retrieve a specific user
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
required: ['user_id'], const data = await validator(
{
required: ["user_id"],
additionalProperties: false, additionalProperties: false,
properties: { properties: {
user_id: { user_id: {
$ref: 'common#/properties/id' $ref: "common#/properties/id",
}, },
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
} },
} },
}, { },
{
user_id: req.params.user_id, user_id: req.params.user_id,
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null) expand:
}) typeof req.query.expand === "string"
.then((data) => { ? req.query.expand.split(",")
return internalUser.get(res.locals.access, { : null,
},
);
const user = await internalUser.get(res.locals.access, {
id: data.user_id, id: data.user_id,
expand: data.expand, expand: data.expand,
omit: internalUser.getUserOmisionsByAccess(res.locals.access, data.user_id) omit: internalUser.getUserOmisionsByAccess(
res.locals.access,
data.user_id,
),
}); });
}) res.status(200).send(user);
.then((user) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(user);
})
.catch((err) => {
console.log(err);
next(err); next(err);
}); }
}) })
/** /**
@@ -129,17 +195,19 @@ router
* *
* Update and existing user * Update and existing user
*/ */
.put((req, res, next) => { .put(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/users/{userID}', 'put'), req.body) try {
.then((payload) => { const payload = await apiValidator(
getValidationSchema("/users/{userID}", "put"),
req.body,
);
payload.id = req.params.user_id; payload.id = req.params.user_id;
return internalUser.update(res.locals.access, payload); const result = await internalUser.update(res.locals.access, payload);
}) res.status(200).send(result);
.then((result) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -147,13 +215,16 @@ router
* *
* Update and existing user * Update and existing user
*/ */
.delete((req, res, next) => { .delete(async (req, res, next) => {
internalUser.delete(res.locals.access, {id: req.params.user_id}) try {
.then((result) => { const result = await internalUser.delete(res.locals.access, {
res.status(200) id: req.params.user_id,
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -162,8 +233,8 @@ router
* /api/users/123/auth * /api/users/123/auth
*/ */
router router
.route('/:user_id/auth') .route("/:user_id/auth")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -174,17 +245,19 @@ router
* *
* Update password for a user * Update password for a user
*/ */
.put((req, res, next) => { .put(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/users/{userID}/auth', 'put'), req.body) try {
.then((payload) => { const payload = await apiValidator(
getValidationSchema("/users/{userID}/auth", "put"),
req.body,
);
payload.id = req.params.user_id; payload.id = req.params.user_id;
return internalUser.setPassword(res.locals.access, payload); const result = await internalUser.setPassword(res.locals.access, payload);
}) res.status(200).send(result);
.then((result) => { } catch (err) {
res.status(201) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}); });
/** /**
@@ -193,8 +266,8 @@ router
* /api/users/123/permissions * /api/users/123/permissions
*/ */
router router
.route('/:user_id/permissions') .route("/:user_id/permissions")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -205,17 +278,22 @@ router
* *
* Set some or all permissions for a user * Set some or all permissions for a user
*/ */
.put((req, res, next) => { .put(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/users/{userID}/permissions', 'put'), req.body) try {
.then((payload) => { const payload = await apiValidator(
getValidationSchema("/users/{userID}/permissions", "put"),
req.body,
);
payload.id = req.params.user_id; payload.id = req.params.user_id;
return internalUser.setPermissions(res.locals.access, payload); const result = await internalUser.setPermissions(
}) res.locals.access,
.then((result) => { payload,
res.status(201) );
.send(result); res.status(200).send(result);
}) } catch (err) {
.catch(next); logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -224,7 +302,7 @@ router
* /api/users/123/login * /api/users/123/login
*/ */
router router
.route('/:user_id/login') .route("/:user_id/login")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -235,13 +313,16 @@ router
* *
* Log in as a user * Log in as a user
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
internalUser.loginAs(res.locals.access, {id: parseInt(req.params.user_id, 10)}) try {
.then((result) => { const result = await internalUser.loginAs(res.locals.access, {
res.status(201) id: Number.parseInt(req.params.user_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
module.exports = router; export default router;

View File

@@ -110,6 +110,11 @@
"caching_enabled": { "caching_enabled": {
"description": "Should we cache assets", "description": "Should we cache assets",
"type": "boolean" "type": "boolean"
},
"email": {
"description": "Email address",
"type": "string",
"pattern": "^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\\.[A-Za-z]{2,}$"
} }
} }
} }

View File

@@ -69,7 +69,7 @@
"type": "object" "type": "object"
}, },
"letsencrypt_email": { "letsencrypt_email": {
"type": "string" "$ref": "../common.json#/properties/email"
}, },
"propagation_seconds": { "propagation_seconds": {
"type": "integer", "type": "integer",

View File

@@ -9,6 +9,11 @@
"description": "Healthy", "description": "Healthy",
"example": "OK" "example": "OK"
}, },
"setup": {
"type": "boolean",
"description": "Whether the initial setup has been completed",
"example": true
},
"version": { "version": {
"type": "object", "type": "object",
"description": "The version object", "description": "The version object",

View File

@@ -22,8 +22,7 @@
"enabled", "enabled",
"locations", "locations",
"hsts_enabled", "hsts_enabled",
"hsts_subdomains", "hsts_subdomains"
"certificate"
], ],
"additionalProperties": false, "additionalProperties": false,
"properties": { "properties": {

View File

@@ -1,7 +1,7 @@
{ {
"type": "array", "type": "array",
"description": "Proxy Hosts list", "description": "Streams list",
"items": { "items": {
"$ref": "./proxy-host-object.json" "$ref": "./stream-object.json"
} }
} }

View File

@@ -53,8 +53,24 @@
"enabled": { "enabled": {
"$ref": "../common.json#/properties/enabled" "$ref": "../common.json#/properties/enabled"
}, },
"certificate_id": {
"$ref": "../common.json#/properties/certificate_id"
},
"meta": { "meta": {
"type": "object" "type": "object"
},
"owner": {
"$ref": "./user-object.json"
},
"certificate": {
"oneOf": [
{
"type": "null"
},
{
"$ref": "./certificate-object.json"
}
]
} }
} }
} }

View File

@@ -5,10 +5,9 @@
"additionalProperties": false, "additionalProperties": false,
"properties": { "properties": {
"expires": { "expires": {
"description": "Token Expiry Unix Time", "description": "Token Expiry ISO Time String",
"example": 1566540249, "example": "2025-02-04T20:40:46.340Z",
"minimum": 1, "type": "string"
"type": "number"
}, },
"token": { "token": {
"description": "JWT Token", "description": "JWT Token",

View File

@@ -54,6 +54,63 @@
"items": { "items": {
"type": "string" "type": "string"
} }
},
"permissions": {
"type": "object",
"description": "Permissions if expanded in request",
"required": [
"visibility",
"proxy_hosts",
"redirection_hosts",
"dead_hosts",
"streams",
"access_lists",
"certificates"
],
"properties": {
"visibility": {
"type": "string",
"description": "Visibility level",
"example": "all",
"pattern": "^(all|user)$"
},
"proxy_hosts": {
"type": "string",
"description": "Proxy Hosts access level",
"example": "all",
"pattern": "^(manage|view|hidden)$"
},
"redirection_hosts": {
"type": "string",
"description": "Redirection Hosts access level",
"example": "all",
"pattern": "^(manage|view|hidden)$"
},
"dead_hosts": {
"type": "string",
"description": "Dead Hosts access level",
"example": "all",
"pattern": "^(manage|view|hidden)$"
},
"streams": {
"type": "string",
"description": "Streams access level",
"example": "all",
"pattern": "^(manage|view|hidden)$"
},
"access_lists": {
"type": "string",
"description": "Access Lists access level",
"example": "all",
"pattern": "^(manage|view|hidden)$"
},
"certificates": {
"type": "string",
"description": "Certificates access level",
"example": "all",
"pattern": "^(manage|view|hidden)$"
}
}
} }
} }
} }

View File

@@ -1,21 +1,24 @@
const refParser = require('@apidevtools/json-schema-ref-parser'); import { dirname } from "node:path";
import { fileURLToPath } from "node:url";
import $RefParser from "@apidevtools/json-schema-ref-parser";
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
let compiledSchema = null; let compiledSchema = null;
module.exports = {
/** /**
* Compiles the schema, by dereferencing it, only once * Compiles the schema, by dereferencing it, only once
* and returns the memory cached value * and returns the memory cached value
*/ */
getCompiledSchema: async () => { const getCompiledSchema = async () => {
if (compiledSchema === null) { if (compiledSchema === null) {
compiledSchema = await refParser.dereference(__dirname + '/swagger.json', { compiledSchema = await $RefParser.dereference(`${__dirname}/swagger.json`, {
mutateInputSchema: false, mutateInputSchema: false,
}); });
} }
return compiledSchema; return compiledSchema;
}, };
/** /**
* Scans the schema for the validation schema for the given path and method * Scans the schema for the validation schema for the given path and method
@@ -25,17 +28,19 @@ module.exports = {
* @param {string} method * @param {string} method
* @returns string|null * @returns string|null
*/ */
getValidationSchema: (path, method) => { const getValidationSchema = (path, method) => {
if (compiledSchema !== null && if (
typeof compiledSchema.paths[path] !== 'undefined' && compiledSchema !== null &&
typeof compiledSchema.paths[path][method] !== 'undefined' && typeof compiledSchema.paths[path] !== "undefined" &&
typeof compiledSchema.paths[path][method].requestBody !== 'undefined' && typeof compiledSchema.paths[path][method] !== "undefined" &&
typeof compiledSchema.paths[path][method].requestBody.content !== 'undefined' && typeof compiledSchema.paths[path][method].requestBody !== "undefined" &&
typeof compiledSchema.paths[path][method].requestBody.content['application/json'] !== 'undefined' && typeof compiledSchema.paths[path][method].requestBody.content !== "undefined" &&
typeof compiledSchema.paths[path][method].requestBody.content['application/json'].schema !== 'undefined' typeof compiledSchema.paths[path][method].requestBody.content["application/json"] !== "undefined" &&
typeof compiledSchema.paths[path][method].requestBody.content["application/json"].schema !== "undefined"
) { ) {
return compiledSchema.paths[path][method].requestBody.content['application/json'].schema; return compiledSchema.paths[path][method].requestBody.content["application/json"].schema;
} }
return null; return null;
}
}; };
export { getCompiledSchema, getValidationSchema };

View File

@@ -11,6 +11,7 @@
"default": { "default": {
"value": { "value": {
"status": "OK", "status": "OK",
"setup": true,
"version": { "version": {
"major": 2, "major": 2,
"minor": 1, "minor": 1,

View File

@@ -49,8 +49,7 @@
"minLength": 1 "minLength": 1
}, },
"password": { "password": {
"type": "string", "type": "string"
"minLength": 1
} }
} }
} }

View File

@@ -14,7 +14,7 @@
"description": "Expansions", "description": "Expansions",
"schema": { "schema": {
"type": "string", "type": "string",
"enum": ["access_list", "owner", "certificate"] "enum": ["owner", "certificate"]
} }
} }
], ],
@@ -40,7 +40,8 @@
"nginx_online": true, "nginx_online": true,
"nginx_err": null "nginx_err": null
}, },
"enabled": true "enabled": true,
"certificate_id": 0
} }
] ]
} }

View File

@@ -32,6 +32,9 @@
"udp_forwarding": { "udp_forwarding": {
"$ref": "../../../components/stream-object.json#/properties/udp_forwarding" "$ref": "../../../components/stream-object.json#/properties/udp_forwarding"
}, },
"certificate_id": {
"$ref": "../../../components/stream-object.json#/properties/certificate_id"
},
"meta": { "meta": {
"$ref": "../../../components/stream-object.json#/properties/meta" "$ref": "../../../components/stream-object.json#/properties/meta"
} }
@@ -73,7 +76,8 @@
"nickname": "Admin", "nickname": "Admin",
"avatar": "", "avatar": "",
"roles": ["admin"] "roles": ["admin"]
} },
"certificate_id": 0
} }
} }
}, },

View File

@@ -40,7 +40,8 @@
"nginx_online": true, "nginx_online": true,
"nginx_err": null "nginx_err": null
}, },
"enabled": true "enabled": true,
"certificate_id": 0
} }
} }
}, },

Some files were not shown because too many files have changed in this diff Show More