mirror of
https://github.com/NginxProxyManager/nginx-proxy-manager.git
synced 2025-08-02 07:23:38 +00:00
update nginx/dep updates/fix eslint/change line endings
Signed-off-by: Zoey <zoey@z0ey.de>
This commit is contained in:
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
@@ -0,0 +1 @@
|
||||
* text=auto
|
6
.github/workflows/docker.yml
vendored
6
.github/workflows/docker.yml
vendored
@@ -10,6 +10,7 @@ on:
|
||||
- backend/**
|
||||
- global/**
|
||||
- rootfs/**
|
||||
- src/**
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/docker.yml
|
||||
@@ -18,6 +19,7 @@ on:
|
||||
- backend/**
|
||||
- global/**
|
||||
- rootfs/**
|
||||
- src/**
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
build:
|
||||
@@ -68,8 +70,6 @@ jobs:
|
||||
tags: |
|
||||
${{ steps.un.outputs.un }}/${{ steps.rn.outputs.rn }}:${{ github.ref_name }}
|
||||
ghcr.io/${{ steps.un.outputs.un }}/${{ steps.rn.outputs.rn }}:${{ github.ref_name }}
|
||||
build-args: |
|
||||
"BUILD=${{ steps.rn.outputs.rn }}"
|
||||
- name: show version
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
run: |
|
||||
@@ -88,8 +88,6 @@ jobs:
|
||||
platforms: linux/amd64,linux/arm64 #,linux/amd64/v2,linux/amd64/v3,linux/amd64/v4 #,linux/ppc64le,linux/s390x,linux/386,linux/arm/v7,linux/arm/v6
|
||||
push: ${{ github.event_name == 'pull_request' }}
|
||||
tags: ghcr.io/${{ steps.un.outputs.un }}/${{ steps.rn.outputs.rn }}:${{ steps.pr.outputs.pr }}
|
||||
build-args: |
|
||||
"BUILD=${{ steps.rn.outputs.rn }}"
|
||||
- name: show version (PR)
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
run: docker run --rm --entrypoint nginx ghcr.io/${{ steps.un.outputs.un }}/${{ steps.rn.outputs.rn }}:${{ steps.pr.outputs.pr }} -V
|
||||
|
3
.github/workflows/dockerlint.yml
vendored
3
.github/workflows/dockerlint.yml
vendored
@@ -18,9 +18,10 @@ jobs:
|
||||
run: |
|
||||
DOCKERFILES="$(find . -name "*Dockerfile*")"
|
||||
for file in $(echo "$DOCKERFILES" | tr " " "\n"); do
|
||||
# DL3003 warning: Use WORKDIR to switch to a directory
|
||||
# DL3018 warning: Pin versions in apk add. Instead of `apk add <package>` use `apk add <package>=<version>`
|
||||
# DL3013 warning: Pin versions in pip. Instead of `pip install <package>` use `pip install <package>==<version>` or `pip install --requirement <requirements file>`
|
||||
hadolint "$file" --ignore DL3013 --ignore DL3018 | tee -a hadolint.log
|
||||
hadolint "$file" --ignore DL3003 --ignore DL3013 --ignore DL3018 | tee -a hadolint.log
|
||||
done
|
||||
if grep -q "DL[0-9]\+\|SC[0-9]\+" hadolint.log; then
|
||||
exit 1
|
||||
|
2
.github/workflows/json.yml
vendored
2
.github/workflows/json.yml
vendored
@@ -11,4 +11,4 @@ jobs:
|
||||
- name: json-syntax-check
|
||||
uses: limitusus/json-syntax-check@v2
|
||||
with:
|
||||
pattern: "\\.json$*"
|
||||
pattern: "\\.json"
|
||||
|
2
.github/workflows/spellcheck.yml
vendored
2
.github/workflows/spellcheck.yml
vendored
@@ -15,4 +15,4 @@ jobs:
|
||||
with:
|
||||
check_filenames: true
|
||||
check_hidden: true
|
||||
skip: .gitignore,block-exploits.conf,showdown.min.js,jquery.min.js,xregexp-all.js
|
||||
skip: .git,.gitignore,showdown.min.js,jquery.min.js,xregexp-all.js
|
||||
|
10
.github/workflows/update-and-lint.yml
vendored
10
.github/workflows/update-and-lint.yml
vendored
@@ -1,6 +1,8 @@
|
||||
name: update-and-lint
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
schedule:
|
||||
- cron: "0 */6 * * *"
|
||||
workflow_dispatch:
|
||||
@@ -13,7 +15,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 19
|
||||
node-version: 21
|
||||
- name: eslint
|
||||
run: |
|
||||
cd backend
|
||||
@@ -23,9 +25,9 @@ jobs:
|
||||
run: |
|
||||
curl -L https://unpkg.com/xregexp/xregexp-all.js -o rootfs/nftd/xregexp-all.js
|
||||
curl -L https://unpkg.com/showdown/dist/showdown.min.js -o rootfs/nftd/showdown.min.js
|
||||
curl -L https://code.jquery.com/jquery-"$(git ls-remote --tags https://github.com/jquery/jquery | cut -d/ -f3 | sort -V | tail -1 | sed -E "s/\^\{\}//")".min.js -o rootfs/nftd/jquery.min.js
|
||||
curl -L https://cdn.jsdelivr.net/npm/bootstrap@"$(git ls-remote --tags https://github.com/twbs/bootstrap v3.3.* | cut -d/ -f3 | sort -V | tail -1 | sed -E "s/\^\{\}//")"/dist/css/bootstrap.min.css -o rootfs/html/404/bootstrap.min.css
|
||||
curl -L https://cdn.jsdelivr.net/npm/bootstrap@"$(git ls-remote --tags https://github.com/twbs/bootstrap v3.3.* | cut -d/ -f3 | sort -V | tail -1 | sed -E "s/\^\{\}//")"/dist/css/bootstrap.min.css -o rootfs/html/default/bootstrap.min.css
|
||||
curl -L https://code.jquery.com/jquery-"$(git ls-remote --tags https://github.com/jquery/jquery | cut -d/ -f3 | sort -V | tail -1)".min.js -o rootfs/nftd/jquery.min.js
|
||||
curl -L https://cdn.jsdelivr.net/npm/bootstrap@"$(git ls-remote --tags https://github.com/twbs/bootstrap v3.3.* | cut -d/ -f3 | sort -V | tail -1)"/dist/css/bootstrap.min.css -o rootfs/html/404/bootstrap.min.css
|
||||
curl -L https://cdn.jsdelivr.net/npm/bootstrap@"$(git ls-remote --tags https://github.com/twbs/bootstrap v3.3.* | cut -d/ -f3 | sort -V | tail -1)"/dist/css/bootstrap.min.css -o rootfs/html/default/bootstrap.min.css
|
||||
- name: nginxbeautifier
|
||||
run: |
|
||||
yarn global add nginxbeautifier
|
||||
|
20
.github/workflows/yq.yml
vendored
20
.github/workflows/yq.yml
vendored
@@ -1,20 +0,0 @@
|
||||
name: yq
|
||||
on:
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
yq:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.YQ }}
|
||||
- name: update workflows
|
||||
run: for workflow in .github/workflows/*.yml; do yq "$workflow" | tee "$workflow".tmp && mv "$workflow".tmp "$workflow"; done
|
||||
- name: push changes
|
||||
run: |
|
||||
git config user.name "GitHub"
|
||||
git config user.email "noreply@github.com"
|
||||
git add -A
|
||||
git diff-index --quiet HEAD || git commit -sm "yq"
|
||||
git push
|
10
Dockerfile
10
Dockerfile
@@ -56,14 +56,14 @@ RUN apk upgrade --no-cache -a && \
|
||||
echo "APPSEC_FAILURE_ACTION=deny" | tee -a /src/crowdsec-nginx-bouncer/lua-mod/config_example.conf && \
|
||||
sed -i "s|BOUNCING_ON_TYPE=all|BOUNCING_ON_TYPE=ban|g" /src/crowdsec-nginx-bouncer/lua-mod/config_example.conf
|
||||
|
||||
FROM zoeyvid/nginx-quic:262
|
||||
FROM zoeyvid/nginx-quic:271
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
ARG CRS_VER=v4.1.0
|
||||
|
||||
COPY rootfs /
|
||||
COPY --from=zoeyvid/certbot-docker:27 /usr/local /usr/local
|
||||
COPY --from=zoeyvid/curl-quic:376 /usr/local/bin/curl /usr/local/bin/curl
|
||||
COPY --from=zoeyvid/certbot-docker:34 /usr/local /usr/local
|
||||
COPY --from=zoeyvid/curl-quic:380 /usr/local/bin/curl /usr/local/bin/curl
|
||||
|
||||
RUN apk upgrade --no-cache -a && \
|
||||
apk add --no-cache ca-certificates tzdata tini \
|
||||
@@ -145,3 +145,7 @@ ENV PUID=0 \
|
||||
WORKDIR /app
|
||||
ENTRYPOINT ["tini", "--", "entrypoint.sh"]
|
||||
HEALTHCHECK CMD healthcheck.sh
|
||||
EXPOSE 80/tcp
|
||||
EXPOSE 81/tcp
|
||||
EXPOSE 443/tcp
|
||||
EXPOSE 443/udp
|
||||
|
@@ -1,73 +0,0 @@
|
||||
{
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true
|
||||
},
|
||||
"extends": [
|
||||
"eslint:recommended"
|
||||
],
|
||||
"globals": {
|
||||
"Atomics": "readonly",
|
||||
"SharedArrayBuffer": "readonly"
|
||||
},
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2018,
|
||||
"sourceType": "module"
|
||||
},
|
||||
"plugins": [
|
||||
"align-assignments"
|
||||
],
|
||||
"rules": {
|
||||
"arrow-parens": [
|
||||
"error",
|
||||
"always"
|
||||
],
|
||||
"indent": [
|
||||
"error",
|
||||
"tab"
|
||||
],
|
||||
"linebreak-style": [
|
||||
"error",
|
||||
"unix"
|
||||
],
|
||||
"quotes": [
|
||||
"error",
|
||||
"single"
|
||||
],
|
||||
"semi": [
|
||||
"error",
|
||||
"always"
|
||||
],
|
||||
"key-spacing": [
|
||||
"error",
|
||||
{
|
||||
"align": "value"
|
||||
}
|
||||
],
|
||||
"comma-spacing": [
|
||||
"error",
|
||||
{
|
||||
"before": false,
|
||||
"after": true
|
||||
}
|
||||
],
|
||||
"func-call-spacing": [
|
||||
"error",
|
||||
"never"
|
||||
],
|
||||
"keyword-spacing": [
|
||||
"error",
|
||||
{
|
||||
"before": true
|
||||
}
|
||||
],
|
||||
"no-irregular-whitespace": "error",
|
||||
"no-unused-expressions": 0,
|
||||
"align-assignments/align-assignments": [
|
||||
2,
|
||||
{
|
||||
"requiresOnly": false
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
7
backend/.prettierrc
Normal file
7
backend/.prettierrc
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"semi": true,
|
||||
"useTabs": true,
|
||||
"printWidth": 1000,
|
||||
"singleQuote": true,
|
||||
"bracketSameLine": true
|
||||
}
|
@@ -1,9 +1,9 @@
|
||||
const express = require('express');
|
||||
const bodyParser = require('body-parser');
|
||||
const fileUpload = require('express-fileupload');
|
||||
const express = require('express');
|
||||
const bodyParser = require('body-parser');
|
||||
const fileUpload = require('express-fileupload');
|
||||
const compression = require('compression');
|
||||
const config = require('./lib/config');
|
||||
const log = require('./logger').express;
|
||||
const config = require('./lib/config');
|
||||
const log = require('./logger').express;
|
||||
|
||||
/**
|
||||
* App
|
||||
@@ -11,7 +11,7 @@ const log = require('./logger').express;
|
||||
const app = express();
|
||||
app.use(fileUpload());
|
||||
app.use(bodyParser.json());
|
||||
app.use(bodyParser.urlencoded({extended: true}));
|
||||
app.use(bodyParser.urlencoded({ extended: true }));
|
||||
|
||||
// Gzip
|
||||
app.use(compression());
|
||||
@@ -41,12 +41,12 @@ app.use(function (req, res, next) {
|
||||
}
|
||||
|
||||
res.set({
|
||||
'X-XSS-Protection': '1; mode=block',
|
||||
'X-XSS-Protection': '1; mode=block',
|
||||
'X-Content-Type-Options': 'nosniff',
|
||||
'X-Frame-Options': x_frame_options,
|
||||
'Cache-Control': 'no-cache, no-store, max-age=0, must-revalidate',
|
||||
Pragma: 'no-cache',
|
||||
Expires: 0
|
||||
'X-Frame-Options': x_frame_options,
|
||||
'Cache-Control': 'no-cache, no-store, max-age=0, must-revalidate',
|
||||
Pragma: 'no-cache',
|
||||
Expires: 0,
|
||||
});
|
||||
next();
|
||||
});
|
||||
@@ -58,18 +58,17 @@ app.use('/', require('./routes/api/main'));
|
||||
// no stacktraces leaked to user
|
||||
// eslint-disable-next-line
|
||||
app.use(function (err, req, res, next) {
|
||||
|
||||
let payload = {
|
||||
const payload = {
|
||||
error: {
|
||||
code: err.status,
|
||||
message: err.public ? err.message : 'Internal Error'
|
||||
}
|
||||
code: err.status,
|
||||
message: err.public ? err.message : 'Internal Error',
|
||||
},
|
||||
};
|
||||
|
||||
if (config.debug() || (req.baseUrl + req.path).includes('nginx/certificates')) {
|
||||
payload.debug = {
|
||||
stack: typeof err.stack !== 'undefined' && err.stack ? err.stack.split('\n') : null,
|
||||
previous: err.previous
|
||||
stack: typeof err.stack !== 'undefined' && err.stack ? err.stack.split('\n') : null,
|
||||
previous: err.previous,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -77,14 +76,12 @@ app.use(function (err, req, res, next) {
|
||||
if (typeof err.stack !== 'undefined' && err.stack) {
|
||||
if (config.debug()) {
|
||||
log.debug(err.stack);
|
||||
} else if (typeof err.public == 'undefined' || !err.public) {
|
||||
} else if (typeof err.public === 'undefined' || !err.public) {
|
||||
log.warn(err.message);
|
||||
}
|
||||
}
|
||||
|
||||
res
|
||||
.status(err.status || 500)
|
||||
.send(payload);
|
||||
res.status(err.status || 500).send(payload);
|
||||
});
|
||||
|
||||
module.exports = app;
|
||||
|
@@ -10,18 +10,18 @@ function generateDbConfig() {
|
||||
return cfg.knex;
|
||||
}
|
||||
return {
|
||||
client: cfg.engine,
|
||||
client: cfg.engine,
|
||||
connection: {
|
||||
host: cfg.host,
|
||||
user: cfg.user,
|
||||
host: cfg.host,
|
||||
user: cfg.user,
|
||||
password: cfg.password,
|
||||
database: cfg.name,
|
||||
port: cfg.port,
|
||||
ssl: cfg.tls,
|
||||
port: cfg.port,
|
||||
ssl: cfg.tls,
|
||||
},
|
||||
migrations: {
|
||||
tableName: 'migrations'
|
||||
}
|
||||
tableName: 'migrations',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
5
backend/eslint.config.mjs
Normal file
5
backend/eslint.config.mjs
Normal file
@@ -0,0 +1,5 @@
|
||||
import globals from 'globals';
|
||||
import pluginJs from '@eslint/js';
|
||||
import eslintPluginPrettierRecommended from 'eslint-plugin-prettier/recommended';
|
||||
|
||||
export default [{ files: ['**/*.js'], languageOptions: { sourceType: 'commonjs' } }, { languageOptions: { globals: globals.node } }, pluginJs.configs.recommended, eslintPluginPrettierRecommended];
|
@@ -2,16 +2,17 @@
|
||||
|
||||
const logger = require('./logger').global;
|
||||
|
||||
async function appStart () {
|
||||
const migrate = require('./migrate');
|
||||
const setup = require('./setup');
|
||||
const app = require('./app');
|
||||
const apiValidator = require('./lib/validator/api');
|
||||
const internalNginx = require('./internal/nginx');
|
||||
async function appStart() {
|
||||
const migrate = require('./migrate');
|
||||
const setup = require('./setup');
|
||||
const app = require('./app');
|
||||
const apiValidator = require('./lib/validator/api');
|
||||
const internalNginx = require('./internal/nginx');
|
||||
const internalCertificate = require('./internal/certificate');
|
||||
const internalIpRanges = require('./internal/ip_ranges');
|
||||
const internalIpRanges = require('./internal/ip_ranges');
|
||||
|
||||
return migrate.latest()
|
||||
return migrate
|
||||
.latest()
|
||||
.then(setup)
|
||||
.then(() => {
|
||||
return apiValidator.loadSchemas;
|
||||
|
@@ -1,67 +1,65 @@
|
||||
const _ = require('lodash');
|
||||
const fs = require('fs');
|
||||
const batchflow = require('batchflow');
|
||||
const logger = require('../logger').access;
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const accessListModel = require('../models/access_list');
|
||||
const accessListAuthModel = require('../models/access_list_auth');
|
||||
const _ = require('lodash');
|
||||
const fs = require('fs');
|
||||
const batchflow = require('batchflow');
|
||||
const logger = require('../logger').access;
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const accessListModel = require('../models/access_list');
|
||||
const accessListAuthModel = require('../models/access_list_auth');
|
||||
const accessListClientModel = require('../models/access_list_client');
|
||||
const proxyHostModel = require('../models/proxy_host');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
const internalNginx = require('./nginx');
|
||||
const proxyHostModel = require('../models/proxy_host');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
const internalNginx = require('./nginx');
|
||||
|
||||
function omissions () {
|
||||
function omissions() {
|
||||
return ['is_deleted'];
|
||||
}
|
||||
|
||||
const internalAccessList = {
|
||||
|
||||
/**
|
||||
* @param {Access} access
|
||||
* @param {Object} data
|
||||
* @returns {Promise}
|
||||
*/
|
||||
create: (access, data) => {
|
||||
return access.can('access_lists:create', data)
|
||||
.then((/*access_data*/) => {
|
||||
return access
|
||||
.can('access_lists:create', data)
|
||||
.then((/* access_data */) => {
|
||||
return accessListModel
|
||||
.query()
|
||||
.insertAndFetch({
|
||||
name: data.name,
|
||||
satisfy_any: data.satisfy_any,
|
||||
pass_auth: data.pass_auth,
|
||||
owner_user_id: access.token.getUserId(1)
|
||||
name: data.name,
|
||||
satisfy_any: data.satisfy_any,
|
||||
pass_auth: data.pass_auth,
|
||||
owner_user_id: access.token.getUserId(1),
|
||||
})
|
||||
.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
data.id = row.id;
|
||||
|
||||
let promises = [];
|
||||
const promises = [];
|
||||
|
||||
// Now add the items
|
||||
data.items.map((item) => {
|
||||
promises.push(accessListAuthModel
|
||||
.query()
|
||||
.insert({
|
||||
promises.push(
|
||||
accessListAuthModel.query().insert({
|
||||
access_list_id: row.id,
|
||||
username: item.username,
|
||||
password: item.password
|
||||
})
|
||||
username: item.username,
|
||||
password: item.password,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
// Now add the clients
|
||||
if (typeof data.clients !== 'undefined' && data.clients) {
|
||||
data.clients.map((client) => {
|
||||
promises.push(accessListClientModel
|
||||
.query()
|
||||
.insert({
|
||||
promises.push(
|
||||
accessListClientModel.query().insert({
|
||||
access_list_id: row.id,
|
||||
address: client.address,
|
||||
directive: client.directive
|
||||
})
|
||||
address: client.address,
|
||||
directive: client.directive,
|
||||
}),
|
||||
);
|
||||
});
|
||||
}
|
||||
@@ -70,16 +68,21 @@ const internalAccessList = {
|
||||
})
|
||||
.then(() => {
|
||||
// re-fetch with expansions
|
||||
return internalAccessList.get(access, {
|
||||
id: data.id,
|
||||
expand: ['owner', 'items', 'clients', 'proxy_hosts.access_list.[clients,items]']
|
||||
}, true /* <- skip masking */);
|
||||
return internalAccessList.get(
|
||||
access,
|
||||
{
|
||||
id: data.id,
|
||||
expand: ['owner', 'items', 'clients', 'proxy_hosts.access_list.[clients,items]'],
|
||||
},
|
||||
true /* <- skip masking */,
|
||||
);
|
||||
})
|
||||
.then((row) => {
|
||||
// Audit log
|
||||
data.meta = _.assign({}, data.meta || {}, row.meta);
|
||||
|
||||
return internalAccessList.build(row)
|
||||
return internalAccessList
|
||||
.build(row)
|
||||
.then(() => {
|
||||
if (row.proxy_host_count) {
|
||||
return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
|
||||
@@ -88,10 +91,10 @@ const internalAccessList = {
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'created',
|
||||
action: 'created',
|
||||
object_type: 'access-list',
|
||||
object_id: row.id,
|
||||
meta: internalAccessList.maskItems(data)
|
||||
object_id: row.id,
|
||||
meta: internalAccessList.maskItems(data),
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
@@ -109,9 +112,10 @@ const internalAccessList = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
update: (access, data) => {
|
||||
return access.can('access_lists:update', data.id)
|
||||
.then((/*access_data*/) => {
|
||||
return internalAccessList.get(access, {id: data.id});
|
||||
return access
|
||||
.can('access_lists:update', data.id)
|
||||
.then((/* access_data */) => {
|
||||
return internalAccessList.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (row.id !== data.id) {
|
||||
@@ -122,31 +126,27 @@ const internalAccessList = {
|
||||
.then(() => {
|
||||
// patch name if specified
|
||||
if (typeof data.name !== 'undefined' && data.name) {
|
||||
return accessListModel
|
||||
.query()
|
||||
.where({id: data.id})
|
||||
.patch({
|
||||
name: data.name,
|
||||
satisfy_any: data.satisfy_any,
|
||||
pass_auth: data.pass_auth,
|
||||
});
|
||||
return accessListModel.query().where({ id: data.id }).patch({
|
||||
name: data.name,
|
||||
satisfy_any: data.satisfy_any,
|
||||
pass_auth: data.pass_auth,
|
||||
});
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
// Check for items and add/update/remove them
|
||||
if (typeof data.items !== 'undefined' && data.items) {
|
||||
let promises = [];
|
||||
let items_to_keep = [];
|
||||
const promises = [];
|
||||
const items_to_keep = [];
|
||||
|
||||
data.items.map(function (item) {
|
||||
if (item.password) {
|
||||
promises.push(accessListAuthModel
|
||||
.query()
|
||||
.insert({
|
||||
promises.push(
|
||||
accessListAuthModel.query().insert({
|
||||
access_list_id: data.id,
|
||||
username: item.username,
|
||||
password: item.password
|
||||
})
|
||||
username: item.username,
|
||||
password: item.password,
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
// This was supplied with an empty password, which means keep it but don't change the password
|
||||
@@ -154,79 +154,76 @@ const internalAccessList = {
|
||||
}
|
||||
});
|
||||
|
||||
let query = accessListAuthModel
|
||||
.query()
|
||||
.delete()
|
||||
.where('access_list_id', data.id);
|
||||
const query = accessListAuthModel.query().delete().where('access_list_id', data.id);
|
||||
|
||||
if (items_to_keep.length) {
|
||||
query.andWhere('username', 'NOT IN', items_to_keep);
|
||||
}
|
||||
|
||||
return query
|
||||
.then(() => {
|
||||
// Add new items
|
||||
if (promises.length) {
|
||||
return Promise.all(promises);
|
||||
}
|
||||
});
|
||||
return query.then(() => {
|
||||
// Add new items
|
||||
if (promises.length) {
|
||||
return Promise.all(promises);
|
||||
}
|
||||
});
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
// Check for clients and add/update/remove them
|
||||
if (typeof data.clients !== 'undefined' && data.clients) {
|
||||
let promises = [];
|
||||
const promises = [];
|
||||
|
||||
data.clients.map(function (client) {
|
||||
if (client.address) {
|
||||
promises.push(accessListClientModel
|
||||
.query()
|
||||
.insert({
|
||||
promises.push(
|
||||
accessListClientModel.query().insert({
|
||||
access_list_id: data.id,
|
||||
address: client.address,
|
||||
directive: client.directive
|
||||
})
|
||||
address: client.address,
|
||||
directive: client.directive,
|
||||
}),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
let query = accessListClientModel
|
||||
.query()
|
||||
.delete()
|
||||
.where('access_list_id', data.id);
|
||||
const query = accessListClientModel.query().delete().where('access_list_id', data.id);
|
||||
|
||||
return query
|
||||
.then(() => {
|
||||
// Add new items
|
||||
if (promises.length) {
|
||||
return Promise.all(promises);
|
||||
}
|
||||
});
|
||||
return query.then(() => {
|
||||
// Add new items
|
||||
if (promises.length) {
|
||||
return Promise.all(promises);
|
||||
}
|
||||
});
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'updated',
|
||||
action: 'updated',
|
||||
object_type: 'access-list',
|
||||
object_id: data.id,
|
||||
meta: internalAccessList.maskItems(data)
|
||||
object_id: data.id,
|
||||
meta: internalAccessList.maskItems(data),
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// re-fetch with expansions
|
||||
return internalAccessList.get(access, {
|
||||
id: data.id,
|
||||
expand: ['owner', 'items', 'clients', 'proxy_hosts.[certificate,access_list.[clients,items]]']
|
||||
}, true /* <- skip masking */);
|
||||
return internalAccessList.get(
|
||||
access,
|
||||
{
|
||||
id: data.id,
|
||||
expand: ['owner', 'items', 'clients', 'proxy_hosts.[certificate,access_list.[clients,items]]'],
|
||||
},
|
||||
true /* <- skip masking */,
|
||||
);
|
||||
})
|
||||
.then((row) => {
|
||||
return internalAccessList.build(row)
|
||||
return internalAccessList
|
||||
.build(row)
|
||||
.then(() => {
|
||||
if (row.proxy_host_count) {
|
||||
return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
|
||||
}
|
||||
}).then(internalNginx.reload)
|
||||
})
|
||||
.then(internalNginx.reload)
|
||||
.then(() => {
|
||||
return internalAccessList.maskItems(row);
|
||||
});
|
||||
@@ -247,16 +244,10 @@ const internalAccessList = {
|
||||
data = {};
|
||||
}
|
||||
|
||||
return access.can('access_lists:get', data.id)
|
||||
return access
|
||||
.can('access_lists:get', data.id)
|
||||
.then((access_data) => {
|
||||
let query = accessListModel
|
||||
.query()
|
||||
.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
|
||||
.joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0')
|
||||
.where('access_list.is_deleted', 0)
|
||||
.andWhere('access_list.id', data.id)
|
||||
.allowGraph('[owner,items,clients,proxy_hosts.[certificate,access_list.[clients,items]]]')
|
||||
.first();
|
||||
const query = accessListModel.query().select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count')).joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0').where('access_list.is_deleted', 0).andWhere('access_list.id', data.id).allowGraph('[owner,items,clients,proxy_hosts.[certificate,access_list.[clients,items]]]').first();
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('access_list.owner_user_id', access.token.getUserId(1));
|
||||
@@ -291,9 +282,10 @@ const internalAccessList = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
delete: (access, data) => {
|
||||
return access.can('access_lists:delete', data.id)
|
||||
return access
|
||||
.can('access_lists:delete', data.id)
|
||||
.then(() => {
|
||||
return internalAccessList.get(access, {id: data.id, expand: ['proxy_hosts', 'items', 'clients']});
|
||||
return internalAccessList.get(access, { id: data.id, expand: ['proxy_hosts', 'items', 'clients'] });
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row) {
|
||||
@@ -310,7 +302,7 @@ const internalAccessList = {
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.patch({
|
||||
is_deleted: 1
|
||||
is_deleted: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// 2. update any proxy hosts that were using it (ignoring permissions)
|
||||
@@ -318,7 +310,7 @@ const internalAccessList = {
|
||||
return proxyHostModel
|
||||
.query()
|
||||
.where('access_list_id', '=', row.id)
|
||||
.patch({access_list_id: 0})
|
||||
.patch({ access_list_id: 0 })
|
||||
.then(() => {
|
||||
// 3. reconfigure those hosts, then reload nginx
|
||||
|
||||
@@ -336,21 +328,21 @@ const internalAccessList = {
|
||||
})
|
||||
.then(() => {
|
||||
// delete the htpasswd file
|
||||
let htpasswd_file = internalAccessList.getFilename(row);
|
||||
const htpasswd_file = internalAccessList.getFilename(row);
|
||||
|
||||
try {
|
||||
fs.unlinkSync(htpasswd_file);
|
||||
} catch (err) {
|
||||
} catch {
|
||||
// do nothing
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
// 4. audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'deleted',
|
||||
action: 'deleted',
|
||||
object_type: 'access-list',
|
||||
object_id: row.id,
|
||||
meta: _.omit(internalAccessList.maskItems(row), ['is_deleted', 'proxy_hosts'])
|
||||
object_id: row.id,
|
||||
meta: _.omit(internalAccessList.maskItems(row), ['is_deleted', 'proxy_hosts']),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -368,16 +360,10 @@ const internalAccessList = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getAll: (access, expand, search_query) => {
|
||||
return access.can('access_lists:list')
|
||||
return access
|
||||
.can('access_lists:list')
|
||||
.then((access_data) => {
|
||||
let query = accessListModel
|
||||
.query()
|
||||
.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
|
||||
.joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0')
|
||||
.where('access_list.is_deleted', 0)
|
||||
.groupBy('access_list.id')
|
||||
.allowGraph('[owner,items,clients]')
|
||||
.orderBy('access_list.name', 'ASC');
|
||||
const query = accessListModel.query().select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count')).joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0').where('access_list.is_deleted', 0).groupBy('access_list.id').allowGraph('[owner,items,clients]').orderBy('access_list.name', 'ASC');
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('access_list.owner_user_id', access.token.getUserId(1));
|
||||
@@ -417,19 +403,15 @@ const internalAccessList = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getCount: (user_id, visibility) => {
|
||||
let query = accessListModel
|
||||
.query()
|
||||
.count('id as count')
|
||||
.where('is_deleted', 0);
|
||||
const query = accessListModel.query().count('id as count').where('is_deleted', 0);
|
||||
|
||||
if (visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', user_id);
|
||||
}
|
||||
|
||||
return query.first()
|
||||
.then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
});
|
||||
return query.first().then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -447,7 +429,7 @@ const internalAccessList = {
|
||||
first_char = val.password.charAt(0);
|
||||
}
|
||||
|
||||
list.items[idx].hint = first_char + ('*').repeat(repeat_for);
|
||||
list.items[idx].hint = first_char + '*'.repeat(repeat_for);
|
||||
list.items[idx].password = '';
|
||||
});
|
||||
}
|
||||
@@ -475,54 +457,55 @@ const internalAccessList = {
|
||||
logger.info('Building Access file #' + list.id + ' for: ' + list.name);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let htpasswd_file = internalAccessList.getFilename(list);
|
||||
const htpasswd_file = internalAccessList.getFilename(list);
|
||||
|
||||
// 1. remove any existing access file
|
||||
try {
|
||||
fs.unlinkSync(htpasswd_file);
|
||||
} catch (err) {
|
||||
} catch {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
// 2. create empty access file
|
||||
try {
|
||||
fs.writeFileSync(htpasswd_file, '', {encoding: 'utf8'});
|
||||
fs.writeFileSync(htpasswd_file, '', { encoding: 'utf8' });
|
||||
resolve(htpasswd_file);
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
})
|
||||
.then((htpasswd_file) => {
|
||||
// 3. generate password for each user
|
||||
if (list.items.length) {
|
||||
return new Promise((resolve, reject) => {
|
||||
batchflow(list.items).sequential()
|
||||
.each((i, item, next) => {
|
||||
if (typeof item.password !== 'undefined' && item.password.length) {
|
||||
logger.info('Adding: ' + item.username);
|
||||
}).then((htpasswd_file) => {
|
||||
// 3. generate password for each user
|
||||
if (list.items.length) {
|
||||
return new Promise((resolve, reject) => {
|
||||
batchflow(list.items)
|
||||
.sequential()
|
||||
.each((i, item, next) => {
|
||||
if (typeof item.password !== 'undefined' && item.password.length) {
|
||||
logger.info('Adding: ' + item.username);
|
||||
|
||||
utils.execFile('htpasswd', ['-b', htpasswd_file, item.username, item.password])
|
||||
.then((/*result*/) => {
|
||||
next();
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.error(err);
|
||||
next(err);
|
||||
});
|
||||
}
|
||||
})
|
||||
.error((err) => {
|
||||
logger.error(err);
|
||||
reject(err);
|
||||
})
|
||||
.end((results) => {
|
||||
logger.success('Built Access file #' + list.id + ' for: ' + list.name);
|
||||
resolve(results);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
utils
|
||||
.execFile('htpasswd', ['-b', htpasswd_file, item.username, item.password])
|
||||
.then((/* result */) => {
|
||||
next();
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.error(err);
|
||||
next(err);
|
||||
});
|
||||
}
|
||||
})
|
||||
.error((err) => {
|
||||
logger.error(err);
|
||||
reject(err);
|
||||
})
|
||||
.end((results) => {
|
||||
logger.success('Built Access file #' + list.id + ' for: ' + list.name);
|
||||
resolve(results);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalAccessList;
|
||||
|
@@ -1,8 +1,7 @@
|
||||
const error = require('../lib/error');
|
||||
const error = require('../lib/error');
|
||||
const auditLogModel = require('../models/audit-log');
|
||||
|
||||
const internalAuditLog = {
|
||||
|
||||
/**
|
||||
* All logs
|
||||
*
|
||||
@@ -12,28 +11,22 @@ const internalAuditLog = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getAll: (access, expand, search_query) => {
|
||||
return access.can('auditlog:list')
|
||||
.then(() => {
|
||||
let query = auditLogModel
|
||||
.query()
|
||||
.orderBy('created_on', 'DESC')
|
||||
.orderBy('id', 'DESC')
|
||||
.limit(100)
|
||||
.allowGraph('[user]');
|
||||
return access.can('auditlog:list').then(() => {
|
||||
const query = auditLogModel.query().orderBy('created_on', 'DESC').orderBy('id', 'DESC').limit(100).allowGraph('[user]');
|
||||
|
||||
// Query is used for searching
|
||||
if (typeof search_query === 'string') {
|
||||
query.where(function () {
|
||||
this.where('meta', 'like', '%' + search_query + '%');
|
||||
});
|
||||
}
|
||||
// Query is used for searching
|
||||
if (typeof search_query === 'string') {
|
||||
query.where(function () {
|
||||
this.where('meta', 'like', '%' + search_query + '%');
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
}
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
}
|
||||
|
||||
return query;
|
||||
});
|
||||
return query;
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -61,18 +54,18 @@ const internalAuditLog = {
|
||||
reject(new error.InternalValidationError('Audit log entry must contain an Action'));
|
||||
} else {
|
||||
// Make sure at least 1 of the IDs are set and action
|
||||
resolve(auditLogModel
|
||||
.query()
|
||||
.insert({
|
||||
user_id: data.user_id,
|
||||
action: data.action,
|
||||
resolve(
|
||||
auditLogModel.query().insert({
|
||||
user_id: data.user_id,
|
||||
action: data.action,
|
||||
object_type: data.object_type || '',
|
||||
object_id: data.object_id || 0,
|
||||
meta: data.meta || {}
|
||||
}));
|
||||
object_id: data.object_id || 0,
|
||||
meta: data.meta || {},
|
||||
}),
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalAuditLog;
|
||||
|
@@ -1,21 +1,21 @@
|
||||
const _ = require('lodash');
|
||||
const fs = require('fs');
|
||||
const https = require('https');
|
||||
const moment = require('moment');
|
||||
const logger = require('../logger').ssl;
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const _ = require('lodash');
|
||||
const fs = require('fs');
|
||||
const https = require('https');
|
||||
const moment = require('moment');
|
||||
const logger = require('../logger').ssl;
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const certificateModel = require('../models/certificate');
|
||||
const dnsPlugins = require('../certbot-dns-plugins.json');
|
||||
const dnsPlugins = require('../certbot-dns-plugins.json');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
const internalNginx = require('./nginx');
|
||||
const certbot = require('../lib/certbot');
|
||||
const archiver = require('archiver');
|
||||
const crypto = require('crypto');
|
||||
const path = require('path');
|
||||
const { isArray } = require('lodash');
|
||||
const internalNginx = require('./nginx');
|
||||
const certbot = require('../lib/certbot');
|
||||
const archiver = require('archiver');
|
||||
const crypto = require('crypto');
|
||||
const path = require('path');
|
||||
const { isArray } = require('lodash');
|
||||
|
||||
const certbotConfig = '/data/tls/certbot/config.ini';
|
||||
const certbotConfig = '/data/tls/certbot/config.ini';
|
||||
const certbotCommand = 'certbot --logs-dir /tmp/certbot-log --work-dir /tmp/certbot-work --config-dir /data/tls/certbot';
|
||||
|
||||
function omissions() {
|
||||
@@ -23,10 +23,9 @@ function omissions() {
|
||||
}
|
||||
|
||||
const internalCertificate = {
|
||||
|
||||
allowedSslFiles: ['certificate', 'certificate_key', 'intermediate_certificate'],
|
||||
intervalTimeout: 1000 * 60 * 60 * Number(process.env.CRT),
|
||||
interval: null,
|
||||
allowedSslFiles: ['certificate', 'certificate_key', 'intermediate_certificate'],
|
||||
intervalTimeout: 1000 * 60 * 60 * Number(process.env.CRT),
|
||||
interval: null,
|
||||
intervalProcessing: false,
|
||||
|
||||
initTimer: () => {
|
||||
@@ -42,22 +41,19 @@ const internalCertificate = {
|
||||
internalCertificate.intervalProcessing = true;
|
||||
logger.info('Renewing TLS certs close to expiry...');
|
||||
|
||||
const cmd = certbotCommand + ' renew --quiet ' +
|
||||
'--config "' + certbotConfig + '" ' +
|
||||
'--preferred-challenges "dns,http" ' +
|
||||
'--no-random-sleep-on-renew';
|
||||
const cmd = certbotCommand + ' renew --quiet ' + '--config "' + certbotConfig + '" ' + '--preferred-challenges "dns,http" ' + '--no-random-sleep-on-renew';
|
||||
|
||||
return utils.exec(cmd)
|
||||
return utils
|
||||
.exec(cmd)
|
||||
.then((result) => {
|
||||
if (result) {
|
||||
logger.info('Renew Result: ' + result);
|
||||
}
|
||||
|
||||
return internalNginx.reload()
|
||||
.then(() => {
|
||||
logger.info('Renew Complete');
|
||||
return result;
|
||||
});
|
||||
return internalNginx.reload().then(() => {
|
||||
logger.info('Renew Complete');
|
||||
return result;
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// Now go and fetch all the certbot certs from the db and query the files and update expiry times
|
||||
@@ -67,24 +63,25 @@ const internalCertificate = {
|
||||
.andWhere('provider', 'letsencrypt')
|
||||
.then((certificates) => {
|
||||
if (certificates && certificates.length) {
|
||||
let promises = [];
|
||||
const promises = [];
|
||||
|
||||
certificates.map(function (certificate) {
|
||||
promises.push(
|
||||
internalCertificate.getCertificateInfoFromFile('/data/tls/certbot/live/npm-' + certificate.id + '/fullchain.pem')
|
||||
internalCertificate
|
||||
.getCertificateInfoFromFile('/data/tls/certbot/live/npm-' + certificate.id + '/fullchain.pem')
|
||||
.then((cert_info) => {
|
||||
return certificateModel
|
||||
.query()
|
||||
.where('id', certificate.id)
|
||||
.andWhere('provider', 'letsencrypt')
|
||||
.patch({
|
||||
expires_on: moment(cert_info.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss')
|
||||
expires_on: moment(cert_info.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss'),
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
// Don't want to stop the train here, just log the error
|
||||
logger.error(err.message);
|
||||
})
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -108,7 +105,8 @@ const internalCertificate = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
create: (access, data) => {
|
||||
return access.can('certificates:create', data)
|
||||
return access
|
||||
.can('certificates:create', data)
|
||||
.then(() => {
|
||||
data.owner_user_id = access.token.getUserId(1);
|
||||
|
||||
@@ -116,30 +114,29 @@ const internalCertificate = {
|
||||
data.nice_name = data.domain_names.join(', ');
|
||||
}
|
||||
|
||||
return certificateModel
|
||||
.query()
|
||||
.insertAndFetch(data)
|
||||
.then(utils.omitRow(omissions()));
|
||||
return certificateModel.query().insertAndFetch(data).then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((certificate) => {
|
||||
if (certificate.provider === 'letsencrypt') {
|
||||
// Request a new Cert using Certbot. Let the fun begin.
|
||||
// Request a new Cert using Certbot. Let the fun begin.
|
||||
if (certificate.meta.dns_challenge) {
|
||||
return internalCertificate.requestLetsEncryptSslWithDnsChallenge(certificate)
|
||||
return internalCertificate
|
||||
.requestLetsEncryptSslWithDnsChallenge(certificate)
|
||||
.then(() => {
|
||||
return certificate;
|
||||
})
|
||||
.catch((err) => {
|
||||
// In the event of failure, throw err back
|
||||
// In the event of failure, throw err back
|
||||
throw err;
|
||||
});
|
||||
} else {
|
||||
return internalCertificate.requestLetsEncryptSsl(certificate)
|
||||
return internalCertificate
|
||||
.requestLetsEncryptSsl(certificate)
|
||||
.then(() => {
|
||||
return certificate;
|
||||
})
|
||||
.catch((err) => {
|
||||
// In the event of failure, throw err back
|
||||
// In the event of failure, throw err back
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
@@ -152,16 +149,12 @@ const internalCertificate = {
|
||||
// At this point, the certbot cert should exist on disk.
|
||||
// Lets get the expiry date from the file and update the row silently
|
||||
return internalCertificate
|
||||
.getCertificateInfoFromFile(
|
||||
'/data/tls/certbot/live/npm-' + certificate.id + '/fullchain.pem'
|
||||
)
|
||||
.getCertificateInfoFromFile('/data/tls/certbot/live/npm-' + certificate.id + '/fullchain.pem')
|
||||
.then((cert_info) => {
|
||||
return certificateModel
|
||||
.query()
|
||||
.patchAndFetchById(certificate.id, {
|
||||
expires_on: moment(cert_info.dates.to, 'X').format(
|
||||
'YYYY-MM-DD HH:mm:ss'
|
||||
),
|
||||
expires_on: moment(cert_info.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss'),
|
||||
})
|
||||
.then((saved_row) => {
|
||||
// Add cert data for audit log
|
||||
@@ -171,7 +164,8 @@ const internalCertificate = {
|
||||
|
||||
return saved_row;
|
||||
});
|
||||
}).catch(async (error) => {
|
||||
})
|
||||
.catch(async (error) => {
|
||||
// Delete the certificate from the database if it was not created successfully
|
||||
await certificateModel.query().deleteById(certificate.id);
|
||||
|
||||
@@ -182,23 +176,22 @@ const internalCertificate = {
|
||||
}
|
||||
})
|
||||
.then((certificate) => {
|
||||
|
||||
data.meta = _.assign({}, data.meta || {}, certificate.meta);
|
||||
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'created',
|
||||
object_type: 'certificate',
|
||||
object_id: certificate.id,
|
||||
meta: data
|
||||
})
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: 'created',
|
||||
object_type: 'certificate',
|
||||
object_id: certificate.id,
|
||||
meta: data,
|
||||
})
|
||||
.then(() => {
|
||||
return certificate;
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* @param {Access} access
|
||||
* @param {Object} data
|
||||
@@ -208,9 +201,10 @@ const internalCertificate = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
update: (access, data) => {
|
||||
return access.can('certificates:update', data.id)
|
||||
.then((/*access_data*/) => {
|
||||
return internalCertificate.get(access, {id: data.id});
|
||||
return access
|
||||
.can('certificates:update', data.id)
|
||||
.then((/* access_data */) => {
|
||||
return internalCertificate.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (row.id !== data.id) {
|
||||
@@ -224,7 +218,7 @@ const internalCertificate = {
|
||||
.then(utils.omitRow(omissions()))
|
||||
.then((saved_row) => {
|
||||
saved_row.meta = internalCertificate.cleanMeta(saved_row.meta);
|
||||
data.meta = internalCertificate.cleanMeta(data.meta);
|
||||
data.meta = internalCertificate.cleanMeta(data.meta);
|
||||
|
||||
// Add row.nice_name for custom certs
|
||||
if (saved_row.provider === 'other') {
|
||||
@@ -232,12 +226,13 @@ const internalCertificate = {
|
||||
}
|
||||
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'certificate',
|
||||
object_id: row.id,
|
||||
meta: _.omit(data, ['expires_on']) // this prevents json circular reference because expires_on might be raw
|
||||
})
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'certificate',
|
||||
object_id: row.id,
|
||||
meta: _.omit(data, ['expires_on']), // this prevents json circular reference because expires_on might be raw
|
||||
})
|
||||
.then(() => {
|
||||
return saved_row;
|
||||
});
|
||||
@@ -258,14 +253,10 @@ const internalCertificate = {
|
||||
data = {};
|
||||
}
|
||||
|
||||
return access.can('certificates:get', data.id)
|
||||
return access
|
||||
.can('certificates:get', data.id)
|
||||
.then((access_data) => {
|
||||
let query = certificateModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('id', data.id)
|
||||
.allowGraph('[owner]')
|
||||
.first();
|
||||
const query = certificateModel.query().where('is_deleted', 0).andWhere('id', data.id).allowGraph('[owner]').first();
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
@@ -297,7 +288,8 @@ const internalCertificate = {
|
||||
*/
|
||||
download: (access, data) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
access.can('certificates:get', data)
|
||||
access
|
||||
.can('certificates:get', data)
|
||||
.then(() => {
|
||||
return internalCertificate.get(access, data);
|
||||
})
|
||||
@@ -309,45 +301,46 @@ const internalCertificate = {
|
||||
throw new error.ItemNotFoundError('Certificate ' + certificate.nice_name + ' does not exists');
|
||||
}
|
||||
|
||||
let certFiles = fs.readdirSync(zipDirectory)
|
||||
const certFiles = fs
|
||||
.readdirSync(zipDirectory)
|
||||
.filter((fn) => fn.endsWith('.pem'))
|
||||
.map((fn) => fs.realpathSync(path.join(zipDirectory, fn)));
|
||||
const downloadName = 'npm-' + data.id + '-' + `${Date.now()}.zip`;
|
||||
const opName = '/tmp/' + downloadName;
|
||||
internalCertificate.zipFiles(certFiles, opName)
|
||||
const opName = '/tmp/' + downloadName;
|
||||
internalCertificate
|
||||
.zipFiles(certFiles, opName)
|
||||
.then(() => {
|
||||
logger.debug('zip completed : ', opName);
|
||||
const resp = {
|
||||
fileName: opName
|
||||
fileName: opName,
|
||||
};
|
||||
resolve(resp);
|
||||
}).catch((err) => reject(err));
|
||||
})
|
||||
.catch((err) => reject(err));
|
||||
} else {
|
||||
throw new error.ValidationError('Only Certbot certificates can be downloaded');
|
||||
}
|
||||
}).catch((err) => reject(err));
|
||||
})
|
||||
.catch((err) => reject(err));
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {String} source
|
||||
* @param {String} out
|
||||
* @returns {Promise}
|
||||
*/
|
||||
* @param {String} source
|
||||
* @param {String} out
|
||||
* @returns {Promise}
|
||||
*/
|
||||
zipFiles(source, out) {
|
||||
const archive = archiver('zip', { zlib: { level: 9 } });
|
||||
const stream = fs.createWriteStream(out);
|
||||
const stream = fs.createWriteStream(out);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
source
|
||||
.map((fl) => {
|
||||
let fileName = path.basename(fl);
|
||||
logger.debug(fl, 'added to certificate zip');
|
||||
archive.file(fl, { name: fileName });
|
||||
});
|
||||
archive
|
||||
.on('error', (err) => reject(err))
|
||||
.pipe(stream);
|
||||
source.map((fl) => {
|
||||
const fileName = path.basename(fl);
|
||||
logger.debug(fl, 'added to certificate zip');
|
||||
archive.file(fl, { name: fileName });
|
||||
});
|
||||
archive.on('error', (err) => reject(err)).pipe(stream);
|
||||
|
||||
stream.on('close', () => resolve());
|
||||
archive.finalize();
|
||||
@@ -362,9 +355,10 @@ const internalCertificate = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
delete: (access, data) => {
|
||||
return access.can('certificates:delete', data.id)
|
||||
return access
|
||||
.can('certificates:delete', data.id)
|
||||
.then(() => {
|
||||
return internalCertificate.get(access, {id: data.id});
|
||||
return internalCertificate.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row) {
|
||||
@@ -375,17 +369,17 @@ const internalCertificate = {
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.patch({
|
||||
is_deleted: 1
|
||||
is_deleted: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
row.meta = internalCertificate.cleanMeta(row.meta);
|
||||
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'deleted',
|
||||
action: 'deleted',
|
||||
object_type: 'certificate',
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
@@ -409,32 +403,26 @@ const internalCertificate = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getAll: (access, expand, search_query) => {
|
||||
return access.can('certificates:list')
|
||||
.then((access_data) => {
|
||||
let query = certificateModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.groupBy('id')
|
||||
.allowGraph('[owner]')
|
||||
.orderBy('nice_name', 'ASC');
|
||||
return access.can('certificates:list').then((access_data) => {
|
||||
const query = certificateModel.query().where('is_deleted', 0).groupBy('id').allowGraph('[owner]').orderBy('nice_name', 'ASC');
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
}
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
}
|
||||
|
||||
// Query is used for searching
|
||||
if (typeof search_query === 'string') {
|
||||
query.where(function () {
|
||||
this.where('nice_name', 'like', '%' + search_query + '%');
|
||||
});
|
||||
}
|
||||
// Query is used for searching
|
||||
if (typeof search_query === 'string') {
|
||||
query.where(function () {
|
||||
this.where('nice_name', 'like', '%' + search_query + '%');
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
}
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
}
|
||||
|
||||
return query.then(utils.omitRows(omissions()));
|
||||
});
|
||||
return query.then(utils.omitRows(omissions()));
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -445,19 +433,15 @@ const internalCertificate = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getCount: (user_id, visibility) => {
|
||||
let query = certificateModel
|
||||
.query()
|
||||
.count('id as count')
|
||||
.where('is_deleted', 0);
|
||||
const query = certificateModel.query().count('id as count').where('is_deleted', 0);
|
||||
|
||||
if (visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', user_id);
|
||||
}
|
||||
|
||||
return query.first()
|
||||
.then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
});
|
||||
return query.first().then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -504,18 +488,17 @@ const internalCertificate = {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.writeFile(dir + '/privkey.pem', certificate.meta.certificate_key, function (err) {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}).then(() => {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.writeFile(dir + '/privkey.pem', certificate.meta.certificate_key, function (err) {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -528,9 +511,9 @@ const internalCertificate = {
|
||||
*/
|
||||
createQuickCertificate: (access, data) => {
|
||||
return internalCertificate.create(access, {
|
||||
provider: 'letsencrypt',
|
||||
provider: 'letsencrypt',
|
||||
domain_names: data.domain_names,
|
||||
meta: data.meta
|
||||
meta: data.meta,
|
||||
});
|
||||
},
|
||||
|
||||
@@ -545,7 +528,7 @@ const internalCertificate = {
|
||||
validate: (data) => {
|
||||
return new Promise((resolve) => {
|
||||
// Put file contents into an object
|
||||
let files = {};
|
||||
const files = {};
|
||||
_.map(data.files, (file, name) => {
|
||||
if (internalCertificate.allowedSslFiles.indexOf(name) !== -1) {
|
||||
files[name] = file.data.toString();
|
||||
@@ -553,13 +536,13 @@ const internalCertificate = {
|
||||
});
|
||||
|
||||
resolve(files);
|
||||
})
|
||||
.then((files) => {
|
||||
// For each file, create a temp file and write the contents to it
|
||||
// Then test it depending on the file type
|
||||
let promises = [];
|
||||
_.map(files, (content, type) => {
|
||||
promises.push(new Promise((resolve) => {
|
||||
}).then((files) => {
|
||||
// For each file, create a temp file and write the contents to it
|
||||
// Then test it depending on the file type
|
||||
const promises = [];
|
||||
_.map(files, (content, type) => {
|
||||
promises.push(
|
||||
new Promise((resolve) => {
|
||||
if (type === 'certificate_key') {
|
||||
resolve(internalCertificate.checkPrivateKey(content));
|
||||
} else {
|
||||
@@ -567,21 +550,21 @@ const internalCertificate = {
|
||||
resolve(internalCertificate.getCertificateInfo(content, true));
|
||||
}
|
||||
}).then((res) => {
|
||||
return {[type]: res};
|
||||
}));
|
||||
return { [type]: res };
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
return Promise.all(promises).then((files) => {
|
||||
let data = {};
|
||||
|
||||
_.each(files, (file) => {
|
||||
data = _.assign({}, data, file);
|
||||
});
|
||||
|
||||
return Promise.all(promises)
|
||||
.then((files) => {
|
||||
let data = {};
|
||||
|
||||
_.each(files, (file) => {
|
||||
data = _.assign({}, data, file);
|
||||
});
|
||||
|
||||
return data;
|
||||
});
|
||||
return data;
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -592,40 +575,41 @@ const internalCertificate = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
upload: (access, data) => {
|
||||
return internalCertificate.get(access, {id: data.id})
|
||||
.then((row) => {
|
||||
if (row.provider !== 'other') {
|
||||
throw new error.ValidationError('Cannot upload certificates for this type of provider');
|
||||
}
|
||||
return internalCertificate.get(access, { id: data.id }).then((row) => {
|
||||
if (row.provider !== 'other') {
|
||||
throw new error.ValidationError('Cannot upload certificates for this type of provider');
|
||||
}
|
||||
|
||||
return internalCertificate.validate(data)
|
||||
.then((validations) => {
|
||||
if (typeof validations.certificate === 'undefined') {
|
||||
throw new error.ValidationError('Certificate file was not provided');
|
||||
return internalCertificate
|
||||
.validate(data)
|
||||
.then((validations) => {
|
||||
if (typeof validations.certificate === 'undefined') {
|
||||
throw new error.ValidationError('Certificate file was not provided');
|
||||
}
|
||||
|
||||
_.map(data.files, (file, name) => {
|
||||
if (internalCertificate.allowedSslFiles.indexOf(name) !== -1) {
|
||||
row.meta[name] = file.data.toString();
|
||||
}
|
||||
|
||||
_.map(data.files, (file, name) => {
|
||||
if (internalCertificate.allowedSslFiles.indexOf(name) !== -1) {
|
||||
row.meta[name] = file.data.toString();
|
||||
}
|
||||
});
|
||||
|
||||
// TODO: This uses a mysql only raw function that won't translate to postgres
|
||||
return internalCertificate.update(access, {
|
||||
id: data.id,
|
||||
expires_on: moment(validations.certificate.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss'),
|
||||
domain_names: [validations.certificate.cn],
|
||||
meta: _.clone(row.meta) // Prevent the update method from changing this value that we'll use later
|
||||
})
|
||||
.then((certificate) => {
|
||||
certificate.meta = row.meta;
|
||||
return internalCertificate.writeCustomCert(certificate);
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
return _.pick(row.meta, internalCertificate.allowedSslFiles);
|
||||
});
|
||||
});
|
||||
|
||||
// TODO: This uses a mysql only raw function that won't translate to postgres
|
||||
return internalCertificate
|
||||
.update(access, {
|
||||
id: data.id,
|
||||
expires_on: moment(validations.certificate.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss'),
|
||||
domain_names: [validations.certificate.cn],
|
||||
meta: _.clone(row.meta), // Prevent the update method from changing this value that we'll use later
|
||||
})
|
||||
.then((certificate) => {
|
||||
certificate.meta = row.meta;
|
||||
return internalCertificate.writeCustomCert(certificate);
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
return _.pick(row.meta, internalCertificate.allowedSslFiles);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -636,7 +620,7 @@ const internalCertificate = {
|
||||
*/
|
||||
checkPrivateKey: (private_key) => {
|
||||
const randomName = crypto.randomBytes(8).toString('hex');
|
||||
const filepath = path.join('/tmp', 'certificate_' + randomName);
|
||||
const filepath = path.join('/tmp', 'certificate_' + randomName);
|
||||
fs.writeFileSync(filepath, private_key);
|
||||
return new Promise((resolve, reject) => {
|
||||
const failTimeout = setTimeout(() => {
|
||||
@@ -669,13 +653,15 @@ const internalCertificate = {
|
||||
*/
|
||||
getCertificateInfo: (certificate, throw_expired) => {
|
||||
const randomName = crypto.randomBytes(8).toString('hex');
|
||||
const filepath = path.join('/tmp', 'certificate_' + randomName);
|
||||
const filepath = path.join('/tmp', 'certificate_' + randomName);
|
||||
fs.writeFileSync(filepath, certificate);
|
||||
return internalCertificate.getCertificateInfoFromFile(filepath, throw_expired)
|
||||
return internalCertificate
|
||||
.getCertificateInfoFromFile(filepath, throw_expired)
|
||||
.then((certData) => {
|
||||
fs.unlinkSync(filepath);
|
||||
return certData;
|
||||
}).catch((err) => {
|
||||
})
|
||||
.catch((err) => {
|
||||
fs.unlinkSync(filepath);
|
||||
throw err;
|
||||
});
|
||||
@@ -689,9 +675,10 @@ const internalCertificate = {
|
||||
* @param {Boolean} [throw_expired] Throw when the certificate is out of date
|
||||
*/
|
||||
getCertificateInfoFromFile: (certificate_file, throw_expired) => {
|
||||
let certData = {};
|
||||
const certData = {};
|
||||
|
||||
return utils.exec('openssl x509 -in ' + certificate_file + ' -subject -noout')
|
||||
return utils
|
||||
.exec('openssl x509 -in ' + certificate_file + ' -subject -noout')
|
||||
.then((result) => {
|
||||
// subject=CN = something.example.com
|
||||
const regex = /(?:subject=)?[^=]+=\s+(\S+)/gim;
|
||||
@@ -701,7 +688,7 @@ const internalCertificate = {
|
||||
throw new error.ValidationError('Could not determine subject from certificate: ' + result);
|
||||
}
|
||||
|
||||
certData['cn'] = match[1];
|
||||
certData.cn = match[1];
|
||||
})
|
||||
.then(() => {
|
||||
return utils.exec('openssl x509 -in ' + certificate_file + ' -issuer -noout');
|
||||
@@ -714,7 +701,7 @@ const internalCertificate = {
|
||||
throw new error.ValidationError('Could not determine issuer from certificate: ' + result);
|
||||
}
|
||||
|
||||
certData['issuer'] = match[1];
|
||||
certData.issuer = match[1];
|
||||
})
|
||||
.then(() => {
|
||||
return utils.exec('openssl x509 -in ' + certificate_file + ' -dates -noout');
|
||||
@@ -723,7 +710,7 @@ const internalCertificate = {
|
||||
// notBefore=Jul 14 04:04:29 2018 GMT
|
||||
// notAfter=Oct 12 04:04:29 2018 GMT
|
||||
let validFrom = null;
|
||||
let validTo = null;
|
||||
let validTo = null;
|
||||
|
||||
const lines = result.split('\n');
|
||||
lines.map(function (str) {
|
||||
@@ -749,13 +736,14 @@ const internalCertificate = {
|
||||
throw new error.ValidationError('Certificate has expired');
|
||||
}
|
||||
|
||||
certData['dates'] = {
|
||||
certData.dates = {
|
||||
from: validFrom,
|
||||
to: validTo
|
||||
to: validTo,
|
||||
};
|
||||
|
||||
return certData;
|
||||
}).catch((err) => {
|
||||
})
|
||||
.catch((err) => {
|
||||
throw new error.ValidationError('Certificate is not valid (' + err.message + ')', err);
|
||||
});
|
||||
},
|
||||
@@ -789,12 +777,7 @@ const internalCertificate = {
|
||||
requestLetsEncryptSsl: (certificate) => {
|
||||
logger.info('Requesting Certbot certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
|
||||
|
||||
let cmd = certbotCommand + ' certonly ' +
|
||||
'--config "' + certbotConfig + '" ' +
|
||||
'--cert-name "npm-' + certificate.id + '" ' +
|
||||
'--authenticator webroot ' +
|
||||
'--preferred-challenges "dns,http" ' +
|
||||
'--domains "' + certificate.domain_names.join(',') + '"';
|
||||
let cmd = certbotCommand + ' certonly ' + '--config "' + certbotConfig + '" ' + '--cert-name "npm-' + certificate.id + '" ' + '--authenticator webroot ' + '--preferred-challenges "dns,http" ' + '--domains "' + certificate.domain_names.join(',') + '"';
|
||||
|
||||
if (certificate.meta.letsencrypt_email === '') {
|
||||
cmd = cmd + ' --register-unsafely-without-email ';
|
||||
@@ -804,11 +787,10 @@ const internalCertificate = {
|
||||
|
||||
logger.info('Command:', cmd);
|
||||
|
||||
return utils.exec(cmd)
|
||||
.then((result) => {
|
||||
logger.success(result);
|
||||
return result;
|
||||
});
|
||||
return utils.exec(cmd).then((result) => {
|
||||
logger.success(result);
|
||||
return result;
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -825,20 +807,10 @@ const internalCertificate = {
|
||||
|
||||
const credentialsLocation = '/data/tls/certbot/credentials/credentials-' + certificate.id;
|
||||
// Escape single quotes and backslashes
|
||||
const escapedCredentials = certificate.meta.dns_provider_credentials.replaceAll('\'', '\\\'').replaceAll('\\', '\\\\');
|
||||
const credentialsCmd = `echo '${escapedCredentials}' | tee '${credentialsLocation}'`;
|
||||
const escapedCredentials = certificate.meta.dns_provider_credentials.replaceAll("'", "\\'").replaceAll('\\', '\\\\');
|
||||
const credentialsCmd = `echo '${escapedCredentials}' | tee '${credentialsLocation}'`;
|
||||
|
||||
let mainCmd = certbotCommand + ' certonly ' +
|
||||
'--config "' + certbotConfig + '" ' +
|
||||
'--cert-name "npm-' + certificate.id + '" ' +
|
||||
'--domains "' + certificate.domain_names.join(',') + '" ' +
|
||||
'--authenticator ' + dnsPlugin.full_plugin_name + ' ' +
|
||||
'--' + dnsPlugin.full_plugin_name + '-credentials "' + credentialsLocation + '"' +
|
||||
(
|
||||
certificate.meta.propagation_seconds !== undefined
|
||||
? ' --' + dnsPlugin.full_plugin_name + '-propagation-seconds ' + certificate.meta.propagation_seconds
|
||||
: ''
|
||||
);
|
||||
let mainCmd = certbotCommand + ' certonly ' + '--config "' + certbotConfig + '" ' + '--cert-name "npm-' + certificate.id + '" ' + '--domains "' + certificate.domain_names.join(',') + '" ' + '--authenticator ' + dnsPlugin.full_plugin_name + ' ' + '--' + dnsPlugin.full_plugin_name + '-credentials "' + credentialsLocation + '"' + (certificate.meta.propagation_seconds !== undefined ? ' --' + dnsPlugin.full_plugin_name + '-propagation-seconds ' + certificate.meta.propagation_seconds : '');
|
||||
|
||||
if (certificate.meta.letsencrypt_email === '') {
|
||||
mainCmd = mainCmd + ' --register-unsafely-without-email ';
|
||||
@@ -861,7 +833,6 @@ const internalCertificate = {
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* @param {Access} access
|
||||
* @param {Object} data
|
||||
@@ -869,7 +840,8 @@ const internalCertificate = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
renew: (access, data) => {
|
||||
return access.can('certificates:update', data)
|
||||
return access
|
||||
.can('certificates:update', data)
|
||||
.then(() => {
|
||||
return internalCertificate.get(access, data);
|
||||
})
|
||||
@@ -882,20 +854,19 @@ const internalCertificate = {
|
||||
return internalCertificate.getCertificateInfoFromFile('/data/tls/certbot/live/npm-' + certificate.id + '/fullchain.pem');
|
||||
})
|
||||
.then((cert_info) => {
|
||||
return certificateModel
|
||||
.query()
|
||||
.patchAndFetchById(certificate.id, {
|
||||
expires_on: moment(cert_info.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss')
|
||||
});
|
||||
return certificateModel.query().patchAndFetchById(certificate.id, {
|
||||
expires_on: moment(cert_info.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss'),
|
||||
});
|
||||
})
|
||||
.then((updated_certificate) => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'renewed',
|
||||
object_type: 'certificate',
|
||||
object_id: updated_certificate.id,
|
||||
meta: updated_certificate
|
||||
})
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: 'renewed',
|
||||
object_type: 'certificate',
|
||||
object_id: updated_certificate.id,
|
||||
meta: updated_certificate,
|
||||
})
|
||||
.then(() => {
|
||||
return updated_certificate;
|
||||
});
|
||||
@@ -913,19 +884,14 @@ const internalCertificate = {
|
||||
renewLetsEncryptSsl: (certificate) => {
|
||||
logger.info('Renewing Certbot certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
|
||||
|
||||
const cmd = certbotCommand + ' renew --force-renewal ' +
|
||||
'--config "' + certbotConfig + '" ' +
|
||||
'--cert-name "npm-' + certificate.id + '" ' +
|
||||
'--preferred-challenges "dns,http" ' +
|
||||
'--no-random-sleep-on-renew';
|
||||
const cmd = certbotCommand + ' renew --force-renewal ' + '--config "' + certbotConfig + '" ' + '--cert-name "npm-' + certificate.id + '" ' + '--preferred-challenges "dns,http" ' + '--no-random-sleep-on-renew';
|
||||
|
||||
logger.info('Command:', cmd);
|
||||
|
||||
return utils.exec(cmd)
|
||||
.then((result) => {
|
||||
logger.info(result);
|
||||
return result;
|
||||
});
|
||||
return utils.exec(cmd).then((result) => {
|
||||
logger.info(result);
|
||||
return result;
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -941,19 +907,14 @@ const internalCertificate = {
|
||||
|
||||
logger.info(`Renewing Certbot certificates via ${dnsPlugin.name} for Cert #${certificate.id}: ${certificate.domain_names.join(', ')}`);
|
||||
|
||||
let mainCmd = certbotCommand + ' renew --force-renewal ' +
|
||||
'--config "' + certbotConfig + '" ' +
|
||||
'--cert-name "npm-' + certificate.id + '" ' +
|
||||
'--preferred-challenges "dns,http" ' +
|
||||
'--no-random-sleep-on-renew';
|
||||
const mainCmd = certbotCommand + ' renew --force-renewal ' + '--config "' + certbotConfig + '" ' + '--cert-name "npm-' + certificate.id + '" ' + '--preferred-challenges "dns,http" ' + '--no-random-sleep-on-renew';
|
||||
|
||||
logger.info('Command:', mainCmd);
|
||||
|
||||
return utils.exec(mainCmd)
|
||||
.then(async (result) => {
|
||||
logger.info(result);
|
||||
return result;
|
||||
});
|
||||
return utils.exec(mainCmd).then(async (result) => {
|
||||
logger.info(result);
|
||||
return result;
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -964,18 +925,15 @@ const internalCertificate = {
|
||||
revokeLetsEncryptSsl: (certificate, throw_errors) => {
|
||||
logger.info('Revoking Certbot certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
|
||||
|
||||
const mainCmd = certbotCommand + ' revoke ' +
|
||||
'--config "' + certbotConfig + '" ' +
|
||||
'--cert-path "/data/tls/certbot/live/npm-' + certificate.id + '/privkey.pem" ' +
|
||||
'--cert-path "/data/tls/certbot/live/npm-' + certificate.id + '/fullchain.pem" ' +
|
||||
'--delete-after-revoke';
|
||||
const mainCmd = certbotCommand + ' revoke ' + '--config "' + certbotConfig + '" ' + '--cert-path "/data/tls/certbot/live/npm-' + certificate.id + '/privkey.pem" ' + '--cert-path "/data/tls/certbot/live/npm-' + certificate.id + '/fullchain.pem" ' + '--delete-after-revoke';
|
||||
|
||||
// Don't fail command if file does not exist
|
||||
const delete_credentialsCmd = `rm -f '/data/tls/certbot/credentials/credentials-${certificate.id}' || true`;
|
||||
|
||||
logger.info('Command:', mainCmd + '; ' + delete_credentialsCmd);
|
||||
|
||||
return utils.exec(mainCmd)
|
||||
return utils
|
||||
.exec(mainCmd)
|
||||
.then(async (result) => {
|
||||
await utils.exec(delete_credentialsCmd);
|
||||
logger.info(result);
|
||||
@@ -1009,7 +967,7 @@ const internalCertificate = {
|
||||
*/
|
||||
disableInUseHosts: (in_use_result) => {
|
||||
if (in_use_result.total_count) {
|
||||
let promises = [];
|
||||
const promises = [];
|
||||
|
||||
if (in_use_result.proxy_hosts.length) {
|
||||
promises.push(internalNginx.bulkDeleteConfigs('proxy_host', in_use_result.proxy_hosts));
|
||||
@@ -1024,7 +982,6 @@ const internalCertificate = {
|
||||
}
|
||||
|
||||
return Promise.all(promises);
|
||||
|
||||
} else {
|
||||
return Promise.resolve();
|
||||
}
|
||||
@@ -1039,7 +996,7 @@ const internalCertificate = {
|
||||
*/
|
||||
enableInUseHosts: (in_use_result) => {
|
||||
if (in_use_result.total_count) {
|
||||
let promises = [];
|
||||
const promises = [];
|
||||
|
||||
if (in_use_result.proxy_hosts.length) {
|
||||
promises.push(internalNginx.bulkGenerateConfigs('proxy_host', in_use_result.proxy_hosts));
|
||||
@@ -1054,7 +1011,6 @@ const internalCertificate = {
|
||||
}
|
||||
|
||||
return Promise.all(promises);
|
||||
|
||||
} else {
|
||||
return Promise.resolve();
|
||||
}
|
||||
@@ -1071,32 +1027,31 @@ const internalCertificate = {
|
||||
}
|
||||
|
||||
// Create a test challenge file
|
||||
const testChallengeDir = '/tmp/acme-challenge/.well-known/acme-challenge';
|
||||
const testChallengeDir = '/tmp/acme-challenge/.well-known/acme-challenge';
|
||||
const testChallengeFile = testChallengeDir + '/test-challenge';
|
||||
fs.mkdirSync(testChallengeDir, {recursive: true});
|
||||
fs.writeFileSync(testChallengeFile, 'Success', {encoding: 'utf8'});
|
||||
fs.mkdirSync(testChallengeDir, { recursive: true });
|
||||
fs.writeFileSync(testChallengeFile, 'Success', { encoding: 'utf8' });
|
||||
|
||||
async function performTestForDomain (domain) {
|
||||
async function performTestForDomain(domain) {
|
||||
logger.info('Testing http challenge for ' + domain);
|
||||
const url = `http://${domain}/.well-known/acme-challenge/test-challenge`;
|
||||
const url = `http://${domain}/.well-known/acme-challenge/test-challenge`;
|
||||
const formBody = `method=G&url=${encodeURI(url)}&bodytype=T&locationid=10`;
|
||||
const options = {
|
||||
method: 'POST',
|
||||
const options = {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
'Content-Length': Buffer.byteLength(formBody),
|
||||
'Connection': 'keep-alive',
|
||||
'User-Agent': 'NPMplus',
|
||||
'Accept': '*/*'
|
||||
}
|
||||
Connection: 'keep-alive',
|
||||
'User-Agent': 'NPMplus',
|
||||
Accept: '*/*',
|
||||
},
|
||||
};
|
||||
|
||||
const result = await new Promise((resolve) => {
|
||||
|
||||
const req = https.request('https://www.site24x7.com/tools/restapi-tester', options, function (res) {
|
||||
let responseBody = '';
|
||||
|
||||
res.on('data', (chunk) => responseBody = responseBody + chunk);
|
||||
res.on('data', (chunk) => (responseBody = responseBody + chunk));
|
||||
res.on('end', function () {
|
||||
try {
|
||||
const parsedBody = JSON.parse(responseBody + '');
|
||||
@@ -1120,8 +1075,10 @@ const internalCertificate = {
|
||||
// Make sure to write the request body.
|
||||
req.write(formBody);
|
||||
req.end();
|
||||
req.on('error', function (e) { logger.warn(`Failed to test HTTP challenge for domain ${domain}`, e);
|
||||
resolve(undefined); });
|
||||
req.on('error', function (e) {
|
||||
logger.warn(`Failed to test HTTP challenge for domain ${domain}`, e);
|
||||
resolve(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
if (!result) {
|
||||
@@ -1154,7 +1111,7 @@ const internalCertificate = {
|
||||
|
||||
const results = {};
|
||||
|
||||
for (const domain of domains){
|
||||
for (const domain of domains) {
|
||||
results[domain] = await performTestForDomain(domain);
|
||||
}
|
||||
|
||||
@@ -1162,7 +1119,7 @@ const internalCertificate = {
|
||||
fs.unlinkSync(testChallengeFile);
|
||||
|
||||
return results;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalCertificate;
|
||||
|
@@ -1,66 +1,63 @@
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const deadHostModel = require('../models/dead_host');
|
||||
const internalHost = require('./host');
|
||||
const internalNginx = require('./nginx');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const deadHostModel = require('../models/dead_host');
|
||||
const internalHost = require('./host');
|
||||
const internalNginx = require('./nginx');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
const internalCertificate = require('./certificate');
|
||||
|
||||
function omissions () {
|
||||
function omissions() {
|
||||
return ['is_deleted'];
|
||||
}
|
||||
|
||||
const internalDeadHost = {
|
||||
|
||||
/**
|
||||
* @param {Access} access
|
||||
* @param {Object} data
|
||||
* @returns {Promise}
|
||||
*/
|
||||
create: (access, data) => {
|
||||
let create_certificate = data.certificate_id === 'new';
|
||||
const create_certificate = data.certificate_id === 'new';
|
||||
|
||||
if (create_certificate) {
|
||||
delete data.certificate_id;
|
||||
}
|
||||
|
||||
return access.can('dead_hosts:create', data)
|
||||
.then((/*access_data*/) => {
|
||||
return access
|
||||
.can('dead_hosts:create', data)
|
||||
.then((/* access_data */) => {
|
||||
// Get a list of the domain names and check each of them against existing records
|
||||
let domain_name_check_promises = [];
|
||||
const domain_name_check_promises = [];
|
||||
|
||||
data.domain_names.map(function (domain_name) {
|
||||
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
|
||||
});
|
||||
|
||||
return Promise.all(domain_name_check_promises)
|
||||
.then((check_results) => {
|
||||
check_results.map(function (result) {
|
||||
if (result.is_taken) {
|
||||
throw new error.ValidationError(result.hostname + ' is already in use');
|
||||
}
|
||||
});
|
||||
return Promise.all(domain_name_check_promises).then((check_results) => {
|
||||
check_results.map(function (result) {
|
||||
if (result.is_taken) {
|
||||
throw new error.ValidationError(result.hostname + ' is already in use');
|
||||
}
|
||||
});
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// At this point the domains should have been checked
|
||||
data.owner_user_id = access.token.getUserId(1);
|
||||
data = internalHost.cleanSslHstsData(data);
|
||||
data = internalHost.cleanSslHstsData(data);
|
||||
|
||||
return deadHostModel
|
||||
.query()
|
||||
.insertAndFetch(data)
|
||||
.then(utils.omitRow(omissions()));
|
||||
return deadHostModel.query().insertAndFetch(data).then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (create_certificate) {
|
||||
return internalCertificate.createQuickCertificate(access, data)
|
||||
return internalCertificate
|
||||
.createQuickCertificate(access, data)
|
||||
.then((cert) => {
|
||||
// update host with cert id
|
||||
return internalDeadHost.update(access, {
|
||||
id: row.id,
|
||||
certificate_id: cert.id
|
||||
id: row.id,
|
||||
certificate_id: cert.id,
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
@@ -73,27 +70,27 @@ const internalDeadHost = {
|
||||
.then((row) => {
|
||||
// re-fetch with cert
|
||||
return internalDeadHost.get(access, {
|
||||
id: row.id,
|
||||
expand: ['certificate', 'owner']
|
||||
id: row.id,
|
||||
expand: ['certificate', 'owner'],
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
// Configure nginx
|
||||
return internalNginx.configure(deadHostModel, 'dead_host', row)
|
||||
.then(() => {
|
||||
return row;
|
||||
});
|
||||
return internalNginx.configure(deadHostModel, 'dead_host', row).then(() => {
|
||||
return row;
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
data.meta = _.assign({}, data.meta || {}, row.meta);
|
||||
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'created',
|
||||
object_type: 'dead-host',
|
||||
object_id: row.id,
|
||||
meta: data
|
||||
})
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: 'created',
|
||||
object_type: 'dead-host',
|
||||
object_id: row.id,
|
||||
meta: data,
|
||||
})
|
||||
.then(() => {
|
||||
return row;
|
||||
});
|
||||
@@ -107,34 +104,34 @@ const internalDeadHost = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
update: (access, data) => {
|
||||
let create_certificate = data.certificate_id === 'new';
|
||||
const create_certificate = data.certificate_id === 'new';
|
||||
|
||||
if (create_certificate) {
|
||||
delete data.certificate_id;
|
||||
}
|
||||
|
||||
return access.can('dead_hosts:update', data.id)
|
||||
.then((/*access_data*/) => {
|
||||
return access
|
||||
.can('dead_hosts:update', data.id)
|
||||
.then((/* access_data */) => {
|
||||
// Get a list of the domain names and check each of them against existing records
|
||||
let domain_name_check_promises = [];
|
||||
const domain_name_check_promises = [];
|
||||
|
||||
if (typeof data.domain_names !== 'undefined') {
|
||||
data.domain_names.map(function (domain_name) {
|
||||
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'dead', data.id));
|
||||
});
|
||||
|
||||
return Promise.all(domain_name_check_promises)
|
||||
.then((check_results) => {
|
||||
check_results.map(function (result) {
|
||||
if (result.is_taken) {
|
||||
throw new error.ValidationError(result.hostname + ' is already in use');
|
||||
}
|
||||
});
|
||||
return Promise.all(domain_name_check_promises).then((check_results) => {
|
||||
check_results.map(function (result) {
|
||||
if (result.is_taken) {
|
||||
throw new error.ValidationError(result.hostname + ' is already in use');
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
return internalDeadHost.get(access, {id: data.id});
|
||||
return internalDeadHost.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (row.id !== data.id) {
|
||||
@@ -143,10 +140,11 @@ const internalDeadHost = {
|
||||
}
|
||||
|
||||
if (create_certificate) {
|
||||
return internalCertificate.createQuickCertificate(access, {
|
||||
domain_names: data.domain_names || row.domain_names,
|
||||
meta: _.assign({}, row.meta, data.meta)
|
||||
})
|
||||
return internalCertificate
|
||||
.createQuickCertificate(access, {
|
||||
domain_names: data.domain_names || row.domain_names,
|
||||
meta: _.assign({}, row.meta, data.meta),
|
||||
})
|
||||
.then((cert) => {
|
||||
// update host with cert id
|
||||
data.certificate_id = cert.id;
|
||||
@@ -160,42 +158,47 @@ const internalDeadHost = {
|
||||
})
|
||||
.then((row) => {
|
||||
// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
|
||||
data = _.assign({}, {
|
||||
domain_names: row.domain_names
|
||||
}, data);
|
||||
data = _.assign(
|
||||
{},
|
||||
{
|
||||
domain_names: row.domain_names,
|
||||
},
|
||||
data,
|
||||
);
|
||||
|
||||
data = internalHost.cleanSslHstsData(data, row);
|
||||
|
||||
return deadHostModel
|
||||
.query()
|
||||
.where({id: data.id})
|
||||
.where({ id: data.id })
|
||||
.patch(data)
|
||||
.then((saved_row) => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'dead-host',
|
||||
object_id: row.id,
|
||||
meta: data
|
||||
})
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'dead-host',
|
||||
object_id: row.id,
|
||||
meta: data,
|
||||
})
|
||||
.then(() => {
|
||||
return _.omit(saved_row, omissions());
|
||||
});
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
return internalDeadHost.get(access, {
|
||||
id: data.id,
|
||||
expand: ['owner', 'certificate']
|
||||
})
|
||||
return internalDeadHost
|
||||
.get(access, {
|
||||
id: data.id,
|
||||
expand: ['owner', 'certificate'],
|
||||
})
|
||||
.then((row) => {
|
||||
// Configure nginx
|
||||
return internalNginx.configure(deadHostModel, 'dead_host', row)
|
||||
.then((new_meta) => {
|
||||
row.meta = new_meta;
|
||||
row = internalHost.cleanRowCertificateMeta(row);
|
||||
return _.omit(row, omissions());
|
||||
});
|
||||
return internalNginx.configure(deadHostModel, 'dead_host', row).then((new_meta) => {
|
||||
row.meta = new_meta;
|
||||
row = internalHost.cleanRowCertificateMeta(row);
|
||||
return _.omit(row, omissions());
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
@@ -213,14 +216,10 @@ const internalDeadHost = {
|
||||
data = {};
|
||||
}
|
||||
|
||||
return access.can('dead_hosts:get', data.id)
|
||||
return access
|
||||
.can('dead_hosts:get', data.id)
|
||||
.then((access_data) => {
|
||||
let query = deadHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('id', data.id)
|
||||
.allowGraph('[owner,certificate]')
|
||||
.first();
|
||||
const query = deadHostModel.query().where('is_deleted', 0).andWhere('id', data.id).allowGraph('[owner,certificate]').first();
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
@@ -252,9 +251,10 @@ const internalDeadHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
delete: (access, data) => {
|
||||
return access.can('dead_hosts:delete', data.id)
|
||||
return access
|
||||
.can('dead_hosts:delete', data.id)
|
||||
.then(() => {
|
||||
return internalDeadHost.get(access, {id: data.id});
|
||||
return internalDeadHost.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row) {
|
||||
@@ -265,22 +265,21 @@ const internalDeadHost = {
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.patch({
|
||||
is_deleted: 1
|
||||
is_deleted: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// Delete Nginx Config
|
||||
return internalNginx.deleteConfig('dead_host', row)
|
||||
.then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
return internalNginx.deleteConfig('dead_host', row).then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'deleted',
|
||||
action: 'deleted',
|
||||
object_type: 'dead-host',
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -297,11 +296,12 @@ const internalDeadHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
enable: (access, data) => {
|
||||
return access.can('dead_hosts:update', data.id)
|
||||
return access
|
||||
.can('dead_hosts:update', data.id)
|
||||
.then(() => {
|
||||
return internalDeadHost.get(access, {
|
||||
id: data.id,
|
||||
expand: ['certificate', 'owner']
|
||||
id: data.id,
|
||||
expand: ['certificate', 'owner'],
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
@@ -317,7 +317,7 @@ const internalDeadHost = {
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.patch({
|
||||
enabled: 1
|
||||
enabled: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// Configure nginx
|
||||
@@ -326,10 +326,10 @@ const internalDeadHost = {
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'enabled',
|
||||
action: 'enabled',
|
||||
object_type: 'dead-host',
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -346,9 +346,10 @@ const internalDeadHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
disable: (access, data) => {
|
||||
return access.can('dead_hosts:update', data.id)
|
||||
return access
|
||||
.can('dead_hosts:update', data.id)
|
||||
.then(() => {
|
||||
return internalDeadHost.get(access, {id: data.id});
|
||||
return internalDeadHost.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row) {
|
||||
@@ -363,22 +364,21 @@ const internalDeadHost = {
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.patch({
|
||||
enabled: 0
|
||||
enabled: 0,
|
||||
})
|
||||
.then(() => {
|
||||
// Delete Nginx Config
|
||||
return internalNginx.deleteConfig('dead_host', row)
|
||||
.then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
return internalNginx.deleteConfig('dead_host', row).then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'disabled',
|
||||
action: 'disabled',
|
||||
object_type: 'dead-host',
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -396,14 +396,10 @@ const internalDeadHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getAll: (access, expand, search_query) => {
|
||||
return access.can('dead_hosts:list')
|
||||
return access
|
||||
.can('dead_hosts:list')
|
||||
.then((access_data) => {
|
||||
let query = deadHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.groupBy('id')
|
||||
.allowGraph('[owner,certificate]')
|
||||
.orderBy('domain_names', 'ASC');
|
||||
const query = deadHostModel.query().where('is_deleted', 0).groupBy('id').allowGraph('[owner,certificate]').orderBy('domain_names', 'ASC');
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
@@ -439,20 +435,16 @@ const internalDeadHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getCount: (user_id, visibility) => {
|
||||
let query = deadHostModel
|
||||
.query()
|
||||
.count('id as count')
|
||||
.where('is_deleted', 0);
|
||||
const query = deadHostModel.query().count('id as count').where('is_deleted', 0);
|
||||
|
||||
if (visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', user_id);
|
||||
}
|
||||
|
||||
return query.first()
|
||||
.then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
});
|
||||
}
|
||||
return query.first().then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalDeadHost;
|
||||
|
@@ -1,10 +1,9 @@
|
||||
const _ = require('lodash');
|
||||
const proxyHostModel = require('../models/proxy_host');
|
||||
const _ = require('lodash');
|
||||
const proxyHostModel = require('../models/proxy_host');
|
||||
const redirectionHostModel = require('../models/redirection_host');
|
||||
const deadHostModel = require('../models/dead_host');
|
||||
const deadHostModel = require('../models/dead_host');
|
||||
|
||||
const internalHost = {
|
||||
|
||||
/**
|
||||
* Makes sure that the ssl_* and hsts_* fields play nicely together.
|
||||
* ie: if there is no cert, then force_ssl is off.
|
||||
@@ -17,10 +16,10 @@ const internalHost = {
|
||||
cleanSslHstsData: function (data, existing_data) {
|
||||
existing_data = existing_data === undefined ? {} : existing_data;
|
||||
|
||||
let combined_data = _.assign({}, existing_data, data);
|
||||
const combined_data = _.assign({}, existing_data, data);
|
||||
|
||||
if (!combined_data.certificate_id) {
|
||||
combined_data.ssl_forced = false;
|
||||
combined_data.ssl_forced = false;
|
||||
combined_data.hsts_subdomains = false;
|
||||
}
|
||||
|
||||
@@ -69,47 +68,36 @@ const internalHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getHostsWithDomains: function (domain_names) {
|
||||
let promises = [
|
||||
proxyHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0),
|
||||
redirectionHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0),
|
||||
deadHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
];
|
||||
const promises = [proxyHostModel.query().where('is_deleted', 0), redirectionHostModel.query().where('is_deleted', 0), deadHostModel.query().where('is_deleted', 0)];
|
||||
|
||||
return Promise.all(promises)
|
||||
.then((promises_results) => {
|
||||
let response_object = {
|
||||
total_count: 0,
|
||||
dead_hosts: [],
|
||||
proxy_hosts: [],
|
||||
redirection_hosts: []
|
||||
};
|
||||
return Promise.all(promises).then((promises_results) => {
|
||||
const response_object = {
|
||||
total_count: 0,
|
||||
dead_hosts: [],
|
||||
proxy_hosts: [],
|
||||
redirection_hosts: [],
|
||||
};
|
||||
|
||||
if (promises_results[0]) {
|
||||
// Proxy Hosts
|
||||
response_object.proxy_hosts = internalHost._getHostsWithDomains(promises_results[0], domain_names);
|
||||
response_object.total_count += response_object.proxy_hosts.length;
|
||||
}
|
||||
if (promises_results[0]) {
|
||||
// Proxy Hosts
|
||||
response_object.proxy_hosts = internalHost._getHostsWithDomains(promises_results[0], domain_names);
|
||||
response_object.total_count += response_object.proxy_hosts.length;
|
||||
}
|
||||
|
||||
if (promises_results[1]) {
|
||||
// Redirection Hosts
|
||||
response_object.redirection_hosts = internalHost._getHostsWithDomains(promises_results[1], domain_names);
|
||||
response_object.total_count += response_object.redirection_hosts.length;
|
||||
}
|
||||
if (promises_results[1]) {
|
||||
// Redirection Hosts
|
||||
response_object.redirection_hosts = internalHost._getHostsWithDomains(promises_results[1], domain_names);
|
||||
response_object.total_count += response_object.redirection_hosts.length;
|
||||
}
|
||||
|
||||
if (promises_results[2]) {
|
||||
// Dead Hosts
|
||||
response_object.dead_hosts = internalHost._getHostsWithDomains(promises_results[2], domain_names);
|
||||
response_object.total_count += response_object.dead_hosts.length;
|
||||
}
|
||||
if (promises_results[2]) {
|
||||
// Dead Hosts
|
||||
response_object.dead_hosts = internalHost._getHostsWithDomains(promises_results[2], domain_names);
|
||||
response_object.total_count += response_object.dead_hosts.length;
|
||||
}
|
||||
|
||||
return response_object;
|
||||
});
|
||||
return response_object;
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -121,7 +109,7 @@ const internalHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
isHostnameTaken: function (hostname, ignore_type, ignore_id) {
|
||||
let promises = [
|
||||
const promises = [
|
||||
proxyHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
@@ -133,39 +121,38 @@ const internalHost = {
|
||||
deadHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('domain_names', 'like', '%' + hostname + '%')
|
||||
.andWhere('domain_names', 'like', '%' + hostname + '%'),
|
||||
];
|
||||
|
||||
return Promise.all(promises)
|
||||
.then((promises_results) => {
|
||||
let is_taken = false;
|
||||
return Promise.all(promises).then((promises_results) => {
|
||||
let is_taken = false;
|
||||
|
||||
if (promises_results[0]) {
|
||||
// Proxy Hosts
|
||||
if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[0], ignore_type === 'proxy' && ignore_id ? ignore_id : 0)) {
|
||||
is_taken = true;
|
||||
}
|
||||
if (promises_results[0]) {
|
||||
// Proxy Hosts
|
||||
if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[0], ignore_type === 'proxy' && ignore_id ? ignore_id : 0)) {
|
||||
is_taken = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (promises_results[1]) {
|
||||
// Redirection Hosts
|
||||
if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[1], ignore_type === 'redirection' && ignore_id ? ignore_id : 0)) {
|
||||
is_taken = true;
|
||||
}
|
||||
if (promises_results[1]) {
|
||||
// Redirection Hosts
|
||||
if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[1], ignore_type === 'redirection' && ignore_id ? ignore_id : 0)) {
|
||||
is_taken = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (promises_results[2]) {
|
||||
// Dead Hosts
|
||||
if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[2], ignore_type === 'dead' && ignore_id ? ignore_id : 0)) {
|
||||
is_taken = true;
|
||||
}
|
||||
if (promises_results[2]) {
|
||||
// Dead Hosts
|
||||
if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[2], ignore_type === 'dead' && ignore_id ? ignore_id : 0)) {
|
||||
is_taken = true;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
hostname: hostname,
|
||||
is_taken: is_taken
|
||||
};
|
||||
});
|
||||
return {
|
||||
hostname,
|
||||
is_taken,
|
||||
};
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -203,7 +190,7 @@ const internalHost = {
|
||||
* @returns {Array}
|
||||
*/
|
||||
_getHostsWithDomains: function (hosts, domain_names) {
|
||||
let response = [];
|
||||
const response = [];
|
||||
|
||||
if (hosts && hosts.length) {
|
||||
hosts.map(function (host) {
|
||||
@@ -224,8 +211,7 @@ const internalHost = {
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalHost;
|
||||
|
@@ -1,11 +1,11 @@
|
||||
const https = require('https');
|
||||
const fs = require('fs');
|
||||
const logger = require('../logger').ip_ranges;
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const https = require('https');
|
||||
const fs = require('fs');
|
||||
const logger = require('../logger').ip_ranges;
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const internalNginx = require('./nginx');
|
||||
|
||||
const CLOUDFRONT_URL = 'https://ip-ranges.amazonaws.com/ip-ranges.json';
|
||||
const CLOUDFRONT_URL = 'https://ip-ranges.amazonaws.com/ip-ranges.json';
|
||||
const CLOUDFARE_V4_URL = 'https://www.cloudflare.com/ips-v4';
|
||||
const CLOUDFARE_V6_URL = 'https://www.cloudflare.com/ips-v6';
|
||||
|
||||
@@ -13,11 +13,10 @@ const regIpV4 = /^(\d+\.?){4}\/\d+/;
|
||||
const regIpV6 = /^(([\da-fA-F]+)?:)+\/\d+/;
|
||||
|
||||
const internalIpRanges = {
|
||||
|
||||
interval_timeout: 1000 * 60 * 60 * Number(process.env.IPRT),
|
||||
interval: null,
|
||||
interval_timeout: 1000 * 60 * 60 * Number(process.env.IPRT),
|
||||
interval: null,
|
||||
interval_processing: false,
|
||||
iteration_count: 0,
|
||||
iteration_count: 0,
|
||||
|
||||
initTimer: () => {
|
||||
if (process.env.SKIP_IP_RANGES === 'false') {
|
||||
@@ -29,19 +28,21 @@ const internalIpRanges = {
|
||||
fetchUrl: (url) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
logger.info('Fetching ' + url);
|
||||
return https.get(url, (res) => {
|
||||
res.setEncoding('utf8');
|
||||
let raw_data = '';
|
||||
res.on('data', (chunk) => {
|
||||
raw_data += chunk;
|
||||
});
|
||||
return https
|
||||
.get(url, (res) => {
|
||||
res.setEncoding('utf8');
|
||||
let raw_data = '';
|
||||
res.on('data', (chunk) => {
|
||||
raw_data += chunk;
|
||||
});
|
||||
|
||||
res.on('end', () => {
|
||||
resolve(raw_data);
|
||||
res.on('end', () => {
|
||||
resolve(raw_data);
|
||||
});
|
||||
})
|
||||
.on('error', (err) => {
|
||||
reject(err);
|
||||
});
|
||||
}).on('error', (err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
@@ -55,9 +56,10 @@ const internalIpRanges = {
|
||||
|
||||
let ip_ranges = [];
|
||||
|
||||
return internalIpRanges.fetchUrl(CLOUDFRONT_URL)
|
||||
return internalIpRanges
|
||||
.fetchUrl(CLOUDFRONT_URL)
|
||||
.then((cloudfront_data) => {
|
||||
let data = JSON.parse(cloudfront_data);
|
||||
const data = JSON.parse(cloudfront_data);
|
||||
|
||||
if (data && typeof data.prefixes !== 'undefined') {
|
||||
data.prefixes.map((item) => {
|
||||
@@ -79,31 +81,30 @@ const internalIpRanges = {
|
||||
return internalIpRanges.fetchUrl(CLOUDFARE_V4_URL);
|
||||
})
|
||||
.then((cloudfare_data) => {
|
||||
let items = cloudfare_data.split('\n').filter((line) => regIpV4.test(line));
|
||||
ip_ranges = [... ip_ranges, ... items];
|
||||
const items = cloudfare_data.split('\n').filter((line) => regIpV4.test(line));
|
||||
ip_ranges = [...ip_ranges, ...items];
|
||||
})
|
||||
.then(() => {
|
||||
return internalIpRanges.fetchUrl(CLOUDFARE_V6_URL);
|
||||
})
|
||||
.then((cloudfare_data) => {
|
||||
let items = cloudfare_data.split('\n').filter((line) => regIpV6.test(line));
|
||||
ip_ranges = [... ip_ranges, ... items];
|
||||
const items = cloudfare_data.split('\n').filter((line) => regIpV6.test(line));
|
||||
ip_ranges = [...ip_ranges, ...items];
|
||||
})
|
||||
.then(() => {
|
||||
let clean_ip_ranges = [];
|
||||
const clean_ip_ranges = [];
|
||||
ip_ranges.map((range) => {
|
||||
if (range) {
|
||||
clean_ip_ranges.push(range);
|
||||
}
|
||||
});
|
||||
|
||||
return internalIpRanges.generateConfig(clean_ip_ranges)
|
||||
.then(() => {
|
||||
if (internalIpRanges.iteration_count) {
|
||||
// Reload nginx
|
||||
return internalNginx.reload();
|
||||
}
|
||||
});
|
||||
return internalIpRanges.generateConfig(clean_ip_ranges).then(() => {
|
||||
if (internalIpRanges.iteration_count) {
|
||||
// Reload nginx
|
||||
return internalNginx.reload();
|
||||
}
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
internalIpRanges.interval_processing = false;
|
||||
@@ -124,18 +125,18 @@ const internalIpRanges = {
|
||||
const renderEngine = utils.getRenderEngine();
|
||||
return new Promise((resolve, reject) => {
|
||||
let template = null;
|
||||
let filename = '/data/nginx/ip_ranges.conf';
|
||||
const filename = '/data/nginx/ip_ranges.conf';
|
||||
try {
|
||||
template = fs.readFileSync(__dirname + '/../templates/ip_ranges.conf', {encoding: 'utf8'});
|
||||
template = fs.readFileSync(__dirname + '/../templates/ip_ranges.conf', { encoding: 'utf8' });
|
||||
} catch (err) {
|
||||
reject(new error.ConfigurationError(err.message));
|
||||
return;
|
||||
}
|
||||
|
||||
renderEngine
|
||||
.parseAndRender(template, {ip_ranges: ip_ranges})
|
||||
.parseAndRender(template, { ip_ranges })
|
||||
.then((config_text) => {
|
||||
fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
|
||||
fs.writeFileSync(filename, config_text, { encoding: 'utf8' });
|
||||
resolve(true);
|
||||
})
|
||||
.catch((err) => {
|
||||
@@ -143,7 +144,7 @@ const internalIpRanges = {
|
||||
reject(new error.ConfigurationError(err.message));
|
||||
});
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalIpRanges;
|
||||
|
@@ -1,14 +1,13 @@
|
||||
const _ = require('lodash');
|
||||
const fs = require('fs');
|
||||
const _ = require('lodash');
|
||||
const fs = require('fs');
|
||||
const logger = require('../logger').nginx;
|
||||
const config = require('../lib/config');
|
||||
const utils = require('../lib/utils');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const error = require('../lib/error');
|
||||
|
||||
const NgxPidFilePath = '/usr/local/nginx/logs/nginx.pid';
|
||||
|
||||
const internalNginx = {
|
||||
|
||||
/**
|
||||
* This will:
|
||||
* - test the nginx config first to make sure it's OK
|
||||
@@ -26,7 +25,8 @@ const internalNginx = {
|
||||
configure: (model, host_type, host) => {
|
||||
let combined_meta = {};
|
||||
|
||||
return internalNginx.test()
|
||||
return internalNginx
|
||||
.test()
|
||||
.then(() => {
|
||||
// Nginx is OK
|
||||
// We're deleting this config regardless.
|
||||
@@ -39,28 +39,26 @@ const internalNginx = {
|
||||
})
|
||||
.then(() => {
|
||||
// Test nginx again and update meta with result
|
||||
return internalNginx.test()
|
||||
return internalNginx
|
||||
.test()
|
||||
.then(() => {
|
||||
// nginx is ok
|
||||
combined_meta = _.assign({}, host.meta, {
|
||||
nginx_online: true,
|
||||
nginx_err: null
|
||||
nginx_err: null,
|
||||
});
|
||||
|
||||
return model
|
||||
.query()
|
||||
.where('id', host.id)
|
||||
.patch({
|
||||
meta: combined_meta
|
||||
});
|
||||
return model.query().where('id', host.id).patch({
|
||||
meta: combined_meta,
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
// Remove the error_log line because it's a docker-ism false positive that doesn't need to be reported.
|
||||
// It will always look like this:
|
||||
// nginx: [alert] could not open error log file: open() "/dev/null" failed (6: No such device or address)
|
||||
|
||||
let valid_lines = [];
|
||||
let err_lines = err.message.split('\n');
|
||||
const valid_lines = [];
|
||||
const err_lines = err.message.split('\n');
|
||||
err_lines.map(function (line) {
|
||||
if (line.indexOf('/dev/null') === -1) {
|
||||
valid_lines.push(line);
|
||||
@@ -74,14 +72,14 @@ const internalNginx = {
|
||||
// config is bad, update meta and delete config
|
||||
combined_meta = _.assign({}, host.meta, {
|
||||
nginx_online: false,
|
||||
nginx_err: valid_lines.join('\n')
|
||||
nginx_err: valid_lines.join('\n'),
|
||||
});
|
||||
|
||||
return model
|
||||
.query()
|
||||
.where('id', host.id)
|
||||
.patch({
|
||||
meta: combined_meta
|
||||
meta: combined_meta,
|
||||
})
|
||||
.then(() => {
|
||||
internalNginx.renameConfigAsError(host_type, host);
|
||||
@@ -115,22 +113,21 @@ const internalNginx = {
|
||||
*/
|
||||
|
||||
reload: () => {
|
||||
return internalNginx.test()
|
||||
.then(() => {
|
||||
if (fs.existsSync(NgxPidFilePath)) {
|
||||
const ngxPID = fs.readFileSync(NgxPidFilePath, 'utf8').trim();
|
||||
if (ngxPID.length > 0) {
|
||||
logger.info('Reloading Nginx');
|
||||
utils.exec('nginx -s reload');
|
||||
} else {
|
||||
logger.info('Starting Nginx');
|
||||
utils.execfg('nginx -e stderr');
|
||||
}
|
||||
return internalNginx.test().then(() => {
|
||||
if (fs.existsSync(NgxPidFilePath)) {
|
||||
const ngxPID = fs.readFileSync(NgxPidFilePath, 'utf8').trim();
|
||||
if (ngxPID.length > 0) {
|
||||
logger.info('Reloading Nginx');
|
||||
utils.exec('nginx -s reload');
|
||||
} else {
|
||||
logger.info('Starting Nginx');
|
||||
utils.execfg('nginx -e stderr');
|
||||
}
|
||||
});
|
||||
} else {
|
||||
logger.info('Starting Nginx');
|
||||
utils.execfg('nginx -e stderr');
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -155,22 +152,18 @@ const internalNginx = {
|
||||
let template;
|
||||
|
||||
try {
|
||||
template = fs.readFileSync(__dirname + '/../templates/_location.conf', {encoding: 'utf8'});
|
||||
template = fs.readFileSync(__dirname + '/../templates/_location.conf', { encoding: 'utf8' });
|
||||
} catch (err) {
|
||||
reject(new error.ConfigurationError(err.message));
|
||||
return;
|
||||
}
|
||||
|
||||
const renderEngine = utils.getRenderEngine();
|
||||
const renderEngine = utils.getRenderEngine();
|
||||
let renderedLocations = '';
|
||||
|
||||
const locationRendering = async () => {
|
||||
for (let i = 0; i < host.locations.length; i++) {
|
||||
let locationCopy = Object.assign({}, {access_list_id: host.access_list_id}, {certificate_id: host.certificate_id},
|
||||
{ssl_forced: host.ssl_forced}, {caching_enabled: host.caching_enabled}, {block_exploits: host.block_exploits},
|
||||
{allow_websocket_upgrade: host.allow_websocket_upgrade}, {http2_support: host.http2_support},
|
||||
{hsts_enabled: host.hsts_enabled}, {hsts_subdomains: host.hsts_subdomains}, {access_list: host.access_list},
|
||||
{certificate: host.certificate}, host.locations[i]);
|
||||
const locationCopy = Object.assign({}, { access_list_id: host.access_list_id }, { certificate_id: host.certificate_id }, { ssl_forced: host.ssl_forced }, { caching_enabled: host.caching_enabled }, { block_exploits: host.block_exploits }, { allow_websocket_upgrade: host.allow_websocket_upgrade }, { http2_support: host.http2_support }, { hsts_enabled: host.hsts_enabled }, { hsts_subdomains: host.hsts_subdomains }, { access_list: host.access_list }, { certificate: host.certificate }, host.locations[i]);
|
||||
|
||||
if (locationCopy.forward_host.indexOf('/') > -1) {
|
||||
const split = locationCopy.forward_host.split('/');
|
||||
@@ -179,14 +172,11 @@ const internalNginx = {
|
||||
locationCopy.forward_path = `/${split.join('/')}`;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line
|
||||
renderedLocations += await renderEngine.parseAndRender(template, locationCopy);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
locationRendering().then(() => resolve(renderedLocations));
|
||||
|
||||
});
|
||||
},
|
||||
|
||||
@@ -206,10 +196,10 @@ const internalNginx = {
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let template = null;
|
||||
let filename = internalNginx.getConfigName(nice_host_type, host.id);
|
||||
const filename = internalNginx.getConfigName(nice_host_type, host.id);
|
||||
|
||||
try {
|
||||
template = fs.readFileSync(__dirname + '/../templates/' + nice_host_type + '.conf', {encoding: 'utf8'});
|
||||
template = fs.readFileSync(__dirname + '/../templates/' + nice_host_type + '.conf', { encoding: 'utf8' });
|
||||
} catch (err) {
|
||||
reject(new error.ConfigurationError(err.message));
|
||||
return;
|
||||
@@ -227,8 +217,8 @@ const internalNginx = {
|
||||
}
|
||||
|
||||
if (host.locations) {
|
||||
//logger.info ('host.locations = ' + JSON.stringify(host.locations, null, 2));
|
||||
origLocations = [].concat(host.locations);
|
||||
// logger.info ('host.locations = ' + JSON.stringify(host.locations, null, 2));
|
||||
origLocations = [].concat(host.locations);
|
||||
locationsPromise = internalNginx.renderLocations(host).then((renderedLocations) => {
|
||||
host.locations = renderedLocations;
|
||||
});
|
||||
@@ -239,7 +229,6 @@ const internalNginx = {
|
||||
host.use_default_location = false;
|
||||
}
|
||||
});
|
||||
|
||||
} else {
|
||||
locationsPromise = Promise.resolve();
|
||||
}
|
||||
@@ -251,7 +240,7 @@ const internalNginx = {
|
||||
renderEngine
|
||||
.parseAndRender(template, host)
|
||||
.then((config_text) => {
|
||||
fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
|
||||
fs.writeFileSync(filename, config_text, { encoding: 'utf8' });
|
||||
|
||||
if (config.debug()) {
|
||||
logger.success('Wrote config:', filename, config_text);
|
||||
@@ -296,7 +285,6 @@ const internalNginx = {
|
||||
return host_type.replace(new RegExp('-', 'g'), '_');
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* @param {String} host_type
|
||||
* @param {Object} [host]
|
||||
@@ -304,10 +292,10 @@ const internalNginx = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
deleteConfig: (host_type, host, delete_err_file) => {
|
||||
const config_file = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id);
|
||||
const config_file = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id);
|
||||
const config_file_err = config_file + '.err';
|
||||
|
||||
return new Promise((resolve/*, reject*/) => {
|
||||
return new Promise((resolve /*, reject */) => {
|
||||
internalNginx.deleteFile(config_file);
|
||||
if (delete_err_file) {
|
||||
internalNginx.deleteFile(config_file_err);
|
||||
@@ -322,10 +310,10 @@ const internalNginx = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
renameConfigAsError: (host_type, host) => {
|
||||
const config_file = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id);
|
||||
const config_file = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id);
|
||||
const config_file_err = config_file + '.err';
|
||||
|
||||
return new Promise((resolve/*, reject*/) => {
|
||||
return new Promise((resolve /*, reject */) => {
|
||||
fs.unlink(config_file, () => {
|
||||
// ignore result, continue
|
||||
fs.rename(config_file, config_file_err, () => {
|
||||
@@ -342,7 +330,7 @@ const internalNginx = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
bulkGenerateConfigs: (host_type, hosts) => {
|
||||
let promises = [];
|
||||
const promises = [];
|
||||
hosts.map(function (host) {
|
||||
promises.push(internalNginx.generateConfig(host_type, host));
|
||||
});
|
||||
@@ -356,7 +344,7 @@ const internalNginx = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
bulkDeleteConfigs: (host_type, hosts) => {
|
||||
let promises = [];
|
||||
const promises = [];
|
||||
hosts.map(function (host) {
|
||||
promises.push(internalNginx.deleteConfig(host_type, host, true));
|
||||
});
|
||||
@@ -382,7 +370,7 @@ const internalNginx = {
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalNginx;
|
||||
|
@@ -1,66 +1,63 @@
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const proxyHostModel = require('../models/proxy_host');
|
||||
const internalHost = require('./host');
|
||||
const internalNginx = require('./nginx');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const proxyHostModel = require('../models/proxy_host');
|
||||
const internalHost = require('./host');
|
||||
const internalNginx = require('./nginx');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
const internalCertificate = require('./certificate');
|
||||
|
||||
function omissions () {
|
||||
function omissions() {
|
||||
return ['is_deleted'];
|
||||
}
|
||||
|
||||
const internalProxyHost = {
|
||||
|
||||
/**
|
||||
* @param {Access} access
|
||||
* @param {Object} data
|
||||
* @returns {Promise}
|
||||
*/
|
||||
create: (access, data) => {
|
||||
let create_certificate = data.certificate_id === 'new';
|
||||
const create_certificate = data.certificate_id === 'new';
|
||||
|
||||
if (create_certificate) {
|
||||
delete data.certificate_id;
|
||||
}
|
||||
|
||||
return access.can('proxy_hosts:create', data)
|
||||
return access
|
||||
.can('proxy_hosts:create', data)
|
||||
.then(() => {
|
||||
// Get a list of the domain names and check each of them against existing records
|
||||
let domain_name_check_promises = [];
|
||||
const domain_name_check_promises = [];
|
||||
|
||||
data.domain_names.map(function (domain_name) {
|
||||
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
|
||||
});
|
||||
|
||||
return Promise.all(domain_name_check_promises)
|
||||
.then((check_results) => {
|
||||
check_results.map(function (result) {
|
||||
if (result.is_taken) {
|
||||
throw new error.ValidationError(result.hostname + ' is already in use');
|
||||
}
|
||||
});
|
||||
return Promise.all(domain_name_check_promises).then((check_results) => {
|
||||
check_results.map(function (result) {
|
||||
if (result.is_taken) {
|
||||
throw new error.ValidationError(result.hostname + ' is already in use');
|
||||
}
|
||||
});
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// At this point the domains should have been checked
|
||||
data.owner_user_id = access.token.getUserId(1);
|
||||
data = internalHost.cleanSslHstsData(data);
|
||||
data = internalHost.cleanSslHstsData(data);
|
||||
|
||||
return proxyHostModel
|
||||
.query()
|
||||
.insertAndFetch(data)
|
||||
.then(utils.omitRow(omissions()));
|
||||
return proxyHostModel.query().insertAndFetch(data).then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (create_certificate) {
|
||||
return internalCertificate.createQuickCertificate(access, data)
|
||||
return internalCertificate
|
||||
.createQuickCertificate(access, data)
|
||||
.then((cert) => {
|
||||
// update host with cert id
|
||||
return internalProxyHost.update(access, {
|
||||
id: row.id,
|
||||
certificate_id: cert.id
|
||||
id: row.id,
|
||||
certificate_id: cert.id,
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
@@ -73,28 +70,28 @@ const internalProxyHost = {
|
||||
.then((row) => {
|
||||
// re-fetch with cert
|
||||
return internalProxyHost.get(access, {
|
||||
id: row.id,
|
||||
expand: ['certificate', 'owner', 'access_list.[clients,items]']
|
||||
id: row.id,
|
||||
expand: ['certificate', 'owner', 'access_list.[clients,items]'],
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
// Configure nginx
|
||||
return internalNginx.configure(proxyHostModel, 'proxy_host', row)
|
||||
.then(() => {
|
||||
return row;
|
||||
});
|
||||
return internalNginx.configure(proxyHostModel, 'proxy_host', row).then(() => {
|
||||
return row;
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
// Audit log
|
||||
data.meta = _.assign({}, data.meta || {}, row.meta);
|
||||
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'created',
|
||||
object_type: 'proxy-host',
|
||||
object_id: row.id,
|
||||
meta: data
|
||||
})
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: 'created',
|
||||
object_type: 'proxy-host',
|
||||
object_id: row.id,
|
||||
meta: data,
|
||||
})
|
||||
.then(() => {
|
||||
return row;
|
||||
});
|
||||
@@ -108,34 +105,34 @@ const internalProxyHost = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
update: (access, data) => {
|
||||
let create_certificate = data.certificate_id === 'new';
|
||||
const create_certificate = data.certificate_id === 'new';
|
||||
|
||||
if (create_certificate) {
|
||||
delete data.certificate_id;
|
||||
}
|
||||
|
||||
return access.can('proxy_hosts:update', data.id)
|
||||
.then((/*access_data*/) => {
|
||||
return access
|
||||
.can('proxy_hosts:update', data.id)
|
||||
.then((/* access_data */) => {
|
||||
// Get a list of the domain names and check each of them against existing records
|
||||
let domain_name_check_promises = [];
|
||||
const domain_name_check_promises = [];
|
||||
|
||||
if (typeof data.domain_names !== 'undefined') {
|
||||
data.domain_names.map(function (domain_name) {
|
||||
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'proxy', data.id));
|
||||
});
|
||||
|
||||
return Promise.all(domain_name_check_promises)
|
||||
.then((check_results) => {
|
||||
check_results.map(function (result) {
|
||||
if (result.is_taken) {
|
||||
throw new error.ValidationError(result.hostname + ' is already in use');
|
||||
}
|
||||
});
|
||||
return Promise.all(domain_name_check_promises).then((check_results) => {
|
||||
check_results.map(function (result) {
|
||||
if (result.is_taken) {
|
||||
throw new error.ValidationError(result.hostname + ' is already in use');
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
return internalProxyHost.get(access, {id: data.id});
|
||||
return internalProxyHost.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (row.id !== data.id) {
|
||||
@@ -144,10 +141,11 @@ const internalProxyHost = {
|
||||
}
|
||||
|
||||
if (create_certificate) {
|
||||
return internalCertificate.createQuickCertificate(access, {
|
||||
domain_names: data.domain_names || row.domain_names,
|
||||
meta: _.assign({}, row.meta, data.meta)
|
||||
})
|
||||
return internalCertificate
|
||||
.createQuickCertificate(access, {
|
||||
domain_names: data.domain_names || row.domain_names,
|
||||
meta: _.assign({}, row.meta, data.meta),
|
||||
})
|
||||
.then((cert) => {
|
||||
// update host with cert id
|
||||
data.certificate_id = cert.id;
|
||||
@@ -161,47 +159,52 @@ const internalProxyHost = {
|
||||
})
|
||||
.then((row) => {
|
||||
// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
|
||||
data = _.assign({}, {
|
||||
domain_names: row.domain_names
|
||||
}, data);
|
||||
data = _.assign(
|
||||
{},
|
||||
{
|
||||
domain_names: row.domain_names,
|
||||
},
|
||||
data,
|
||||
);
|
||||
|
||||
data = internalHost.cleanSslHstsData(data, row);
|
||||
|
||||
return proxyHostModel
|
||||
.query()
|
||||
.where({id: data.id})
|
||||
.where({ id: data.id })
|
||||
.patch(data)
|
||||
.then(utils.omitRow(omissions()))
|
||||
.then((saved_row) => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'proxy-host',
|
||||
object_id: row.id,
|
||||
meta: data
|
||||
})
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'proxy-host',
|
||||
object_id: row.id,
|
||||
meta: data,
|
||||
})
|
||||
.then(() => {
|
||||
return saved_row;
|
||||
});
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
return internalProxyHost.get(access, {
|
||||
id: data.id,
|
||||
expand: ['owner', 'certificate', 'access_list.[clients,items]']
|
||||
})
|
||||
return internalProxyHost
|
||||
.get(access, {
|
||||
id: data.id,
|
||||
expand: ['owner', 'certificate', 'access_list.[clients,items]'],
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row.enabled) {
|
||||
// No need to add nginx config if host is disabled
|
||||
return row;
|
||||
}
|
||||
// Configure nginx
|
||||
return internalNginx.configure(proxyHostModel, 'proxy_host', row)
|
||||
.then((new_meta) => {
|
||||
row.meta = new_meta;
|
||||
row = internalHost.cleanRowCertificateMeta(row);
|
||||
return _.omit(row, omissions());
|
||||
});
|
||||
return internalNginx.configure(proxyHostModel, 'proxy_host', row).then((new_meta) => {
|
||||
row.meta = new_meta;
|
||||
row = internalHost.cleanRowCertificateMeta(row);
|
||||
return _.omit(row, omissions());
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
@@ -219,14 +222,10 @@ const internalProxyHost = {
|
||||
data = {};
|
||||
}
|
||||
|
||||
return access.can('proxy_hosts:get', data.id)
|
||||
return access
|
||||
.can('proxy_hosts:get', data.id)
|
||||
.then((access_data) => {
|
||||
let query = proxyHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('id', data.id)
|
||||
.allowGraph('[owner,access_list.[clients,items],certificate]')
|
||||
.first();
|
||||
const query = proxyHostModel.query().where('is_deleted', 0).andWhere('id', data.id).allowGraph('[owner,access_list.[clients,items],certificate]').first();
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
@@ -259,9 +258,10 @@ const internalProxyHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
delete: (access, data) => {
|
||||
return access.can('proxy_hosts:delete', data.id)
|
||||
return access
|
||||
.can('proxy_hosts:delete', data.id)
|
||||
.then(() => {
|
||||
return internalProxyHost.get(access, {id: data.id});
|
||||
return internalProxyHost.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row) {
|
||||
@@ -272,22 +272,21 @@ const internalProxyHost = {
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.patch({
|
||||
is_deleted: 1
|
||||
is_deleted: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// Delete Nginx Config
|
||||
return internalNginx.deleteConfig('proxy_host', row)
|
||||
.then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
return internalNginx.deleteConfig('proxy_host', row).then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'deleted',
|
||||
action: 'deleted',
|
||||
object_type: 'proxy-host',
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -304,11 +303,12 @@ const internalProxyHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
enable: (access, data) => {
|
||||
return access.can('proxy_hosts:update', data.id)
|
||||
return access
|
||||
.can('proxy_hosts:update', data.id)
|
||||
.then(() => {
|
||||
return internalProxyHost.get(access, {
|
||||
id: data.id,
|
||||
expand: ['certificate', 'owner', 'access_list']
|
||||
id: data.id,
|
||||
expand: ['certificate', 'owner', 'access_list'],
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
@@ -324,7 +324,7 @@ const internalProxyHost = {
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.patch({
|
||||
enabled: 1
|
||||
enabled: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// Configure nginx
|
||||
@@ -333,10 +333,10 @@ const internalProxyHost = {
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'enabled',
|
||||
action: 'enabled',
|
||||
object_type: 'proxy-host',
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -353,9 +353,10 @@ const internalProxyHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
disable: (access, data) => {
|
||||
return access.can('proxy_hosts:update', data.id)
|
||||
return access
|
||||
.can('proxy_hosts:update', data.id)
|
||||
.then(() => {
|
||||
return internalProxyHost.get(access, {id: data.id});
|
||||
return internalProxyHost.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row) {
|
||||
@@ -370,22 +371,21 @@ const internalProxyHost = {
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.patch({
|
||||
enabled: 0
|
||||
enabled: 0,
|
||||
})
|
||||
.then(() => {
|
||||
// Delete Nginx Config
|
||||
return internalNginx.deleteConfig('proxy_host', row)
|
||||
.then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
return internalNginx.deleteConfig('proxy_host', row).then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'disabled',
|
||||
action: 'disabled',
|
||||
object_type: 'proxy-host',
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -403,14 +403,10 @@ const internalProxyHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getAll: (access, expand, search_query) => {
|
||||
return access.can('proxy_hosts:list')
|
||||
return access
|
||||
.can('proxy_hosts:list')
|
||||
.then((access_data) => {
|
||||
let query = proxyHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.groupBy('id')
|
||||
.allowGraph('[owner,access_list,certificate]')
|
||||
.orderBy('domain_names', 'ASC');
|
||||
const query = proxyHostModel.query().where('is_deleted', 0).groupBy('id').allowGraph('[owner,access_list,certificate]').orderBy('domain_names', 'ASC');
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
@@ -446,20 +442,16 @@ const internalProxyHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getCount: (user_id, visibility) => {
|
||||
let query = proxyHostModel
|
||||
.query()
|
||||
.count('id as count')
|
||||
.where('is_deleted', 0);
|
||||
const query = proxyHostModel.query().count('id as count').where('is_deleted', 0);
|
||||
|
||||
if (visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', user_id);
|
||||
}
|
||||
|
||||
return query.first()
|
||||
.then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
});
|
||||
}
|
||||
return query.first().then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalProxyHost;
|
||||
|
@@ -1,66 +1,63 @@
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const redirectionHostModel = require('../models/redirection_host');
|
||||
const internalHost = require('./host');
|
||||
const internalNginx = require('./nginx');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
const internalCertificate = require('./certificate');
|
||||
const internalHost = require('./host');
|
||||
const internalNginx = require('./nginx');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
const internalCertificate = require('./certificate');
|
||||
|
||||
function omissions () {
|
||||
function omissions() {
|
||||
return ['is_deleted'];
|
||||
}
|
||||
|
||||
const internalRedirectionHost = {
|
||||
|
||||
/**
|
||||
* @param {Access} access
|
||||
* @param {Object} data
|
||||
* @returns {Promise}
|
||||
*/
|
||||
create: (access, data) => {
|
||||
let create_certificate = data.certificate_id === 'new';
|
||||
const create_certificate = data.certificate_id === 'new';
|
||||
|
||||
if (create_certificate) {
|
||||
delete data.certificate_id;
|
||||
}
|
||||
|
||||
return access.can('redirection_hosts:create', data)
|
||||
.then((/*access_data*/) => {
|
||||
return access
|
||||
.can('redirection_hosts:create', data)
|
||||
.then((/* access_data */) => {
|
||||
// Get a list of the domain names and check each of them against existing records
|
||||
let domain_name_check_promises = [];
|
||||
const domain_name_check_promises = [];
|
||||
|
||||
data.domain_names.map(function (domain_name) {
|
||||
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
|
||||
});
|
||||
|
||||
return Promise.all(domain_name_check_promises)
|
||||
.then((check_results) => {
|
||||
check_results.map(function (result) {
|
||||
if (result.is_taken) {
|
||||
throw new error.ValidationError(result.hostname + ' is already in use');
|
||||
}
|
||||
});
|
||||
return Promise.all(domain_name_check_promises).then((check_results) => {
|
||||
check_results.map(function (result) {
|
||||
if (result.is_taken) {
|
||||
throw new error.ValidationError(result.hostname + ' is already in use');
|
||||
}
|
||||
});
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// At this point the domains should have been checked
|
||||
data.owner_user_id = access.token.getUserId(1);
|
||||
data = internalHost.cleanSslHstsData(data);
|
||||
data = internalHost.cleanSslHstsData(data);
|
||||
|
||||
return redirectionHostModel
|
||||
.query()
|
||||
.insertAndFetch(data)
|
||||
.then(utils.omitRow(omissions()));
|
||||
return redirectionHostModel.query().insertAndFetch(data).then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (create_certificate) {
|
||||
return internalCertificate.createQuickCertificate(access, data)
|
||||
return internalCertificate
|
||||
.createQuickCertificate(access, data)
|
||||
.then((cert) => {
|
||||
// update host with cert id
|
||||
return internalRedirectionHost.update(access, {
|
||||
id: row.id,
|
||||
certificate_id: cert.id
|
||||
id: row.id,
|
||||
certificate_id: cert.id,
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
@@ -72,27 +69,27 @@ const internalRedirectionHost = {
|
||||
.then((row) => {
|
||||
// re-fetch with cert
|
||||
return internalRedirectionHost.get(access, {
|
||||
id: row.id,
|
||||
expand: ['certificate', 'owner']
|
||||
id: row.id,
|
||||
expand: ['certificate', 'owner'],
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
// Configure nginx
|
||||
return internalNginx.configure(redirectionHostModel, 'redirection_host', row)
|
||||
.then(() => {
|
||||
return row;
|
||||
});
|
||||
return internalNginx.configure(redirectionHostModel, 'redirection_host', row).then(() => {
|
||||
return row;
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
data.meta = _.assign({}, data.meta || {}, row.meta);
|
||||
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'created',
|
||||
object_type: 'redirection-host',
|
||||
object_id: row.id,
|
||||
meta: data
|
||||
})
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: 'created',
|
||||
object_type: 'redirection-host',
|
||||
object_id: row.id,
|
||||
meta: data,
|
||||
})
|
||||
.then(() => {
|
||||
return row;
|
||||
});
|
||||
@@ -106,34 +103,34 @@ const internalRedirectionHost = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
update: (access, data) => {
|
||||
let create_certificate = data.certificate_id === 'new';
|
||||
const create_certificate = data.certificate_id === 'new';
|
||||
|
||||
if (create_certificate) {
|
||||
delete data.certificate_id;
|
||||
}
|
||||
|
||||
return access.can('redirection_hosts:update', data.id)
|
||||
.then((/*access_data*/) => {
|
||||
return access
|
||||
.can('redirection_hosts:update', data.id)
|
||||
.then((/* access_data */) => {
|
||||
// Get a list of the domain names and check each of them against existing records
|
||||
let domain_name_check_promises = [];
|
||||
const domain_name_check_promises = [];
|
||||
|
||||
if (typeof data.domain_names !== 'undefined') {
|
||||
data.domain_names.map(function (domain_name) {
|
||||
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'redirection', data.id));
|
||||
});
|
||||
|
||||
return Promise.all(domain_name_check_promises)
|
||||
.then((check_results) => {
|
||||
check_results.map(function (result) {
|
||||
if (result.is_taken) {
|
||||
throw new error.ValidationError(result.hostname + ' is already in use');
|
||||
}
|
||||
});
|
||||
return Promise.all(domain_name_check_promises).then((check_results) => {
|
||||
check_results.map(function (result) {
|
||||
if (result.is_taken) {
|
||||
throw new error.ValidationError(result.hostname + ' is already in use');
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
return internalRedirectionHost.get(access, {id: data.id});
|
||||
return internalRedirectionHost.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (row.id !== data.id) {
|
||||
@@ -142,10 +139,11 @@ const internalRedirectionHost = {
|
||||
}
|
||||
|
||||
if (create_certificate) {
|
||||
return internalCertificate.createQuickCertificate(access, {
|
||||
domain_names: data.domain_names || row.domain_names,
|
||||
meta: _.assign({}, row.meta, data.meta)
|
||||
})
|
||||
return internalCertificate
|
||||
.createQuickCertificate(access, {
|
||||
domain_names: data.domain_names || row.domain_names,
|
||||
meta: _.assign({}, row.meta, data.meta),
|
||||
})
|
||||
.then((cert) => {
|
||||
// update host with cert id
|
||||
data.certificate_id = cert.id;
|
||||
@@ -159,42 +157,47 @@ const internalRedirectionHost = {
|
||||
})
|
||||
.then((row) => {
|
||||
// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
|
||||
data = _.assign({}, {
|
||||
domain_names: row.domain_names
|
||||
}, data);
|
||||
data = _.assign(
|
||||
{},
|
||||
{
|
||||
domain_names: row.domain_names,
|
||||
},
|
||||
data,
|
||||
);
|
||||
|
||||
data = internalHost.cleanSslHstsData(data, row);
|
||||
|
||||
return redirectionHostModel
|
||||
.query()
|
||||
.where({id: data.id})
|
||||
.where({ id: data.id })
|
||||
.patch(data)
|
||||
.then((saved_row) => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'redirection-host',
|
||||
object_id: row.id,
|
||||
meta: data
|
||||
})
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'redirection-host',
|
||||
object_id: row.id,
|
||||
meta: data,
|
||||
})
|
||||
.then(() => {
|
||||
return _.omit(saved_row, omissions());
|
||||
});
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
return internalRedirectionHost.get(access, {
|
||||
id: data.id,
|
||||
expand: ['owner', 'certificate']
|
||||
})
|
||||
return internalRedirectionHost
|
||||
.get(access, {
|
||||
id: data.id,
|
||||
expand: ['owner', 'certificate'],
|
||||
})
|
||||
.then((row) => {
|
||||
// Configure nginx
|
||||
return internalNginx.configure(redirectionHostModel, 'redirection_host', row)
|
||||
.then((new_meta) => {
|
||||
row.meta = new_meta;
|
||||
row = internalHost.cleanRowCertificateMeta(row);
|
||||
return _.omit(row, omissions());
|
||||
});
|
||||
return internalNginx.configure(redirectionHostModel, 'redirection_host', row).then((new_meta) => {
|
||||
row.meta = new_meta;
|
||||
row = internalHost.cleanRowCertificateMeta(row);
|
||||
return _.omit(row, omissions());
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
@@ -212,14 +215,10 @@ const internalRedirectionHost = {
|
||||
data = {};
|
||||
}
|
||||
|
||||
return access.can('redirection_hosts:get', data.id)
|
||||
return access
|
||||
.can('redirection_hosts:get', data.id)
|
||||
.then((access_data) => {
|
||||
let query = redirectionHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('id', data.id)
|
||||
.allowGraph('[owner,certificate]')
|
||||
.first();
|
||||
const query = redirectionHostModel.query().where('is_deleted', 0).andWhere('id', data.id).allowGraph('[owner,certificate]').first();
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
@@ -252,9 +251,10 @@ const internalRedirectionHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
delete: (access, data) => {
|
||||
return access.can('redirection_hosts:delete', data.id)
|
||||
return access
|
||||
.can('redirection_hosts:delete', data.id)
|
||||
.then(() => {
|
||||
return internalRedirectionHost.get(access, {id: data.id});
|
||||
return internalRedirectionHost.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row) {
|
||||
@@ -265,22 +265,21 @@ const internalRedirectionHost = {
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.patch({
|
||||
is_deleted: 1
|
||||
is_deleted: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// Delete Nginx Config
|
||||
return internalNginx.deleteConfig('redirection_host', row)
|
||||
.then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
return internalNginx.deleteConfig('redirection_host', row).then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'deleted',
|
||||
action: 'deleted',
|
||||
object_type: 'redirection-host',
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -297,11 +296,12 @@ const internalRedirectionHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
enable: (access, data) => {
|
||||
return access.can('redirection_hosts:update', data.id)
|
||||
return access
|
||||
.can('redirection_hosts:update', data.id)
|
||||
.then(() => {
|
||||
return internalRedirectionHost.get(access, {
|
||||
id: data.id,
|
||||
expand: ['certificate', 'owner']
|
||||
id: data.id,
|
||||
expand: ['certificate', 'owner'],
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
@@ -317,7 +317,7 @@ const internalRedirectionHost = {
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.patch({
|
||||
enabled: 1
|
||||
enabled: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// Configure nginx
|
||||
@@ -326,10 +326,10 @@ const internalRedirectionHost = {
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'enabled',
|
||||
action: 'enabled',
|
||||
object_type: 'redirection-host',
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -346,9 +346,10 @@ const internalRedirectionHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
disable: (access, data) => {
|
||||
return access.can('redirection_hosts:update', data.id)
|
||||
return access
|
||||
.can('redirection_hosts:update', data.id)
|
||||
.then(() => {
|
||||
return internalRedirectionHost.get(access, {id: data.id});
|
||||
return internalRedirectionHost.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row) {
|
||||
@@ -363,22 +364,21 @@ const internalRedirectionHost = {
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.patch({
|
||||
enabled: 0
|
||||
enabled: 0,
|
||||
})
|
||||
.then(() => {
|
||||
// Delete Nginx Config
|
||||
return internalNginx.deleteConfig('redirection_host', row)
|
||||
.then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
return internalNginx.deleteConfig('redirection_host', row).then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'disabled',
|
||||
action: 'disabled',
|
||||
object_type: 'redirection-host',
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -396,14 +396,10 @@ const internalRedirectionHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getAll: (access, expand, search_query) => {
|
||||
return access.can('redirection_hosts:list')
|
||||
return access
|
||||
.can('redirection_hosts:list')
|
||||
.then((access_data) => {
|
||||
let query = redirectionHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.groupBy('id')
|
||||
.allowGraph('[owner,certificate]')
|
||||
.orderBy('domain_names', 'ASC');
|
||||
const query = redirectionHostModel.query().where('is_deleted', 0).groupBy('id').allowGraph('[owner,certificate]').orderBy('domain_names', 'ASC');
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
@@ -439,20 +435,16 @@ const internalRedirectionHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getCount: (user_id, visibility) => {
|
||||
let query = redirectionHostModel
|
||||
.query()
|
||||
.count('id as count')
|
||||
.where('is_deleted', 0);
|
||||
const query = redirectionHostModel.query().count('id as count').where('is_deleted', 0);
|
||||
|
||||
if (visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', user_id);
|
||||
}
|
||||
|
||||
return query.first()
|
||||
.then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
});
|
||||
}
|
||||
return query.first().then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalRedirectionHost;
|
||||
|
@@ -1,38 +1,32 @@
|
||||
const internalProxyHost = require('./proxy-host');
|
||||
const internalProxyHost = require('./proxy-host');
|
||||
const internalRedirectionHost = require('./redirection-host');
|
||||
const internalDeadHost = require('./dead-host');
|
||||
const internalStream = require('./stream');
|
||||
const internalDeadHost = require('./dead-host');
|
||||
const internalStream = require('./stream');
|
||||
|
||||
const internalReport = {
|
||||
|
||||
/**
|
||||
* @param {Access} access
|
||||
* @return {Promise}
|
||||
*/
|
||||
getHostsReport: (access) => {
|
||||
return access.can('reports:hosts', 1)
|
||||
return access
|
||||
.can('reports:hosts', 1)
|
||||
.then((access_data) => {
|
||||
let user_id = access.token.getUserId(1);
|
||||
const user_id = access.token.getUserId(1);
|
||||
|
||||
let promises = [
|
||||
internalProxyHost.getCount(user_id, access_data.visibility),
|
||||
internalRedirectionHost.getCount(user_id, access_data.visibility),
|
||||
internalStream.getCount(user_id, access_data.visibility),
|
||||
internalDeadHost.getCount(user_id, access_data.visibility)
|
||||
];
|
||||
const promises = [internalProxyHost.getCount(user_id, access_data.visibility), internalRedirectionHost.getCount(user_id, access_data.visibility), internalStream.getCount(user_id, access_data.visibility), internalDeadHost.getCount(user_id, access_data.visibility)];
|
||||
|
||||
return Promise.all(promises);
|
||||
})
|
||||
.then((counts) => {
|
||||
return {
|
||||
proxy: counts.shift(),
|
||||
proxy: counts.shift(),
|
||||
redirection: counts.shift(),
|
||||
stream: counts.shift(),
|
||||
dead: counts.shift()
|
||||
stream: counts.shift(),
|
||||
dead: counts.shift(),
|
||||
};
|
||||
});
|
||||
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalReport;
|
||||
|
@@ -1,10 +1,9 @@
|
||||
const fs = require('fs');
|
||||
const error = require('../lib/error');
|
||||
const settingModel = require('../models/setting');
|
||||
const fs = require('fs');
|
||||
const error = require('../lib/error');
|
||||
const settingModel = require('../models/setting');
|
||||
const internalNginx = require('./nginx');
|
||||
|
||||
const internalSetting = {
|
||||
|
||||
/**
|
||||
* @param {Access} access
|
||||
* @param {Object} data
|
||||
@@ -12,9 +11,10 @@ const internalSetting = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
update: (access, data) => {
|
||||
return access.can('settings:update', data.id)
|
||||
.then((/*access_data*/) => {
|
||||
return internalSetting.get(access, {id: data.id});
|
||||
return access
|
||||
.can('settings:update', data.id)
|
||||
.then((/* access_data */) => {
|
||||
return internalSetting.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (row.id !== data.id) {
|
||||
@@ -22,25 +22,23 @@ const internalSetting = {
|
||||
throw new error.InternalValidationError('Setting could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
|
||||
}
|
||||
|
||||
return settingModel
|
||||
.query()
|
||||
.where({id: data.id})
|
||||
.patch(data);
|
||||
return settingModel.query().where({ id: data.id }).patch(data);
|
||||
})
|
||||
.then(() => {
|
||||
return internalSetting.get(access, {
|
||||
id: data.id
|
||||
id: data.id,
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
if (row.id === 'default-site') {
|
||||
// write the html if we need to
|
||||
if (row.value === 'html') {
|
||||
fs.writeFileSync('/data/nginx/etc/index.html', row.meta.html, {encoding: 'utf8'});
|
||||
fs.writeFileSync('/data/nginx/etc/index.html', row.meta.html, { encoding: 'utf8' });
|
||||
}
|
||||
|
||||
// Configure nginx
|
||||
return internalNginx.deleteConfig('default')
|
||||
return internalNginx
|
||||
.deleteConfig('default')
|
||||
.then(() => {
|
||||
return internalNginx.generateConfig('default', row);
|
||||
})
|
||||
@@ -53,8 +51,9 @@ const internalSetting = {
|
||||
.then(() => {
|
||||
return row;
|
||||
})
|
||||
.catch((/*err*/) => {
|
||||
internalNginx.deleteConfig('default')
|
||||
.catch((/* err */) => {
|
||||
internalNginx
|
||||
.deleteConfig('default')
|
||||
.then(() => {
|
||||
return internalNginx.test();
|
||||
})
|
||||
@@ -79,12 +78,10 @@ const internalSetting = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
get: (access, data) => {
|
||||
return access.can('settings:get', data.id)
|
||||
return access
|
||||
.can('settings:get', data.id)
|
||||
.then(() => {
|
||||
return settingModel
|
||||
.query()
|
||||
.where('id', data.id)
|
||||
.first();
|
||||
return settingModel.query().where('id', data.id).first();
|
||||
})
|
||||
.then((row) => {
|
||||
if (row) {
|
||||
@@ -102,12 +99,10 @@ const internalSetting = {
|
||||
* @returns {*}
|
||||
*/
|
||||
getCount: (access) => {
|
||||
return access.can('settings:list')
|
||||
return access
|
||||
.can('settings:list')
|
||||
.then(() => {
|
||||
return settingModel
|
||||
.query()
|
||||
.count('id as count')
|
||||
.first();
|
||||
return settingModel.query().count('id as count').first();
|
||||
})
|
||||
.then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
@@ -121,13 +116,10 @@ const internalSetting = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getAll: (access) => {
|
||||
return access.can('settings:list')
|
||||
.then(() => {
|
||||
return settingModel
|
||||
.query()
|
||||
.orderBy('description', 'ASC');
|
||||
});
|
||||
}
|
||||
return access.can('settings:list').then(() => {
|
||||
return settingModel.query().orderBy('description', 'ASC');
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalSetting;
|
||||
|
@@ -1,24 +1,24 @@
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const streamModel = require('../models/stream');
|
||||
const internalNginx = require('./nginx');
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const streamModel = require('../models/stream');
|
||||
const internalNginx = require('./nginx');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
|
||||
function omissions () {
|
||||
function omissions() {
|
||||
return ['is_deleted'];
|
||||
}
|
||||
|
||||
const internalStream = {
|
||||
|
||||
/**
|
||||
* @param {Access} access
|
||||
* @param {Object} data
|
||||
* @returns {Promise}
|
||||
*/
|
||||
create: (access, data) => {
|
||||
return access.can('streams:create', data)
|
||||
.then((/*access_data*/) => {
|
||||
return access
|
||||
.can('streams:create', data)
|
||||
.then((/* access_data */) => {
|
||||
// TODO: At this point the existing ports should have been checked
|
||||
data.owner_user_id = access.token.getUserId(1);
|
||||
|
||||
@@ -26,26 +26,23 @@ const internalStream = {
|
||||
data.meta = {};
|
||||
}
|
||||
|
||||
return streamModel
|
||||
.query()
|
||||
.insertAndFetch(data)
|
||||
.then(utils.omitRow(omissions()));
|
||||
return streamModel.query().insertAndFetch(data).then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
// Configure nginx
|
||||
return internalNginx.configure(streamModel, 'stream', row)
|
||||
.then(() => {
|
||||
return internalStream.get(access, {id: row.id, expand: ['owner']});
|
||||
});
|
||||
return internalNginx.configure(streamModel, 'stream', row).then(() => {
|
||||
return internalStream.get(access, { id: row.id, expand: ['owner'] });
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'created',
|
||||
object_type: 'stream',
|
||||
object_id: row.id,
|
||||
meta: data
|
||||
})
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: 'created',
|
||||
object_type: 'stream',
|
||||
object_id: row.id,
|
||||
meta: data,
|
||||
})
|
||||
.then(() => {
|
||||
return row;
|
||||
});
|
||||
@@ -59,10 +56,11 @@ const internalStream = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
update: (access, data) => {
|
||||
return access.can('streams:update', data.id)
|
||||
.then((/*access_data*/) => {
|
||||
return access
|
||||
.can('streams:update', data.id)
|
||||
.then((/* access_data */) => {
|
||||
// TODO: at this point the existing streams should have been checked
|
||||
return internalStream.get(access, {id: data.id});
|
||||
return internalStream.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (row.id !== data.id) {
|
||||
@@ -75,19 +73,19 @@ const internalStream = {
|
||||
.patchAndFetchById(row.id, data)
|
||||
.then(utils.omitRow(omissions()))
|
||||
.then((saved_row) => {
|
||||
return internalNginx.configure(streamModel, 'stream', saved_row)
|
||||
.then(() => {
|
||||
return internalStream.get(access, {id: row.id, expand: ['owner']});
|
||||
});
|
||||
return internalNginx.configure(streamModel, 'stream', saved_row).then(() => {
|
||||
return internalStream.get(access, { id: row.id, expand: ['owner'] });
|
||||
});
|
||||
})
|
||||
.then((saved_row) => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'stream',
|
||||
object_id: row.id,
|
||||
meta: data
|
||||
})
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'stream',
|
||||
object_id: row.id,
|
||||
meta: data,
|
||||
})
|
||||
.then(() => {
|
||||
return saved_row;
|
||||
});
|
||||
@@ -108,14 +106,10 @@ const internalStream = {
|
||||
data = {};
|
||||
}
|
||||
|
||||
return access.can('streams:get', data.id)
|
||||
return access
|
||||
.can('streams:get', data.id)
|
||||
.then((access_data) => {
|
||||
let query = streamModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('id', data.id)
|
||||
.allowGraph('[owner]')
|
||||
.first();
|
||||
const query = streamModel.query().where('is_deleted', 0).andWhere('id', data.id).allowGraph('[owner]').first();
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
@@ -147,9 +141,10 @@ const internalStream = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
delete: (access, data) => {
|
||||
return access.can('streams:delete', data.id)
|
||||
return access
|
||||
.can('streams:delete', data.id)
|
||||
.then(() => {
|
||||
return internalStream.get(access, {id: data.id});
|
||||
return internalStream.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row) {
|
||||
@@ -160,22 +155,21 @@ const internalStream = {
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.patch({
|
||||
is_deleted: 1
|
||||
is_deleted: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// Delete Nginx Config
|
||||
return internalNginx.deleteConfig('stream', row)
|
||||
.then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
return internalNginx.deleteConfig('stream', row).then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'deleted',
|
||||
action: 'deleted',
|
||||
object_type: 'stream',
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -192,11 +186,12 @@ const internalStream = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
enable: (access, data) => {
|
||||
return access.can('streams:update', data.id)
|
||||
return access
|
||||
.can('streams:update', data.id)
|
||||
.then(() => {
|
||||
return internalStream.get(access, {
|
||||
id: data.id,
|
||||
expand: ['owner']
|
||||
id: data.id,
|
||||
expand: ['owner'],
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
@@ -212,7 +207,7 @@ const internalStream = {
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.patch({
|
||||
enabled: 1
|
||||
enabled: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// Configure nginx
|
||||
@@ -221,10 +216,10 @@ const internalStream = {
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'enabled',
|
||||
action: 'enabled',
|
||||
object_type: 'stream',
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -241,9 +236,10 @@ const internalStream = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
disable: (access, data) => {
|
||||
return access.can('streams:update', data.id)
|
||||
return access
|
||||
.can('streams:update', data.id)
|
||||
.then(() => {
|
||||
return internalStream.get(access, {id: data.id});
|
||||
return internalStream.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row) {
|
||||
@@ -258,22 +254,21 @@ const internalStream = {
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.patch({
|
||||
enabled: 0
|
||||
enabled: 0,
|
||||
})
|
||||
.then(() => {
|
||||
// Delete Nginx Config
|
||||
return internalNginx.deleteConfig('stream', row)
|
||||
.then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
return internalNginx.deleteConfig('stream', row).then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'disabled',
|
||||
action: 'disabled',
|
||||
object_type: 'stream-host',
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -291,32 +286,26 @@ const internalStream = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getAll: (access, expand, search_query) => {
|
||||
return access.can('streams:list')
|
||||
.then((access_data) => {
|
||||
let query = streamModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.groupBy('id')
|
||||
.allowGraph('[owner]')
|
||||
.orderBy('incoming_port', 'ASC');
|
||||
return access.can('streams:list').then((access_data) => {
|
||||
const query = streamModel.query().where('is_deleted', 0).groupBy('id').allowGraph('[owner]').orderBy('incoming_port', 'ASC');
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
}
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
}
|
||||
|
||||
// Query is used for searching
|
||||
if (typeof search_query === 'string') {
|
||||
query.where(function () {
|
||||
this.where('incoming_port', 'like', '%' + search_query + '%');
|
||||
});
|
||||
}
|
||||
// Query is used for searching
|
||||
if (typeof search_query === 'string') {
|
||||
query.where(function () {
|
||||
this.where('incoming_port', 'like', '%' + search_query + '%');
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
}
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
}
|
||||
|
||||
return query.then(utils.omitRows(omissions()));
|
||||
});
|
||||
return query.then(utils.omitRows(omissions()));
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -327,20 +316,16 @@ const internalStream = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getCount: (user_id, visibility) => {
|
||||
let query = streamModel
|
||||
.query()
|
||||
.count('id as count')
|
||||
.where('is_deleted', 0);
|
||||
const query = streamModel.query().count('id as count').where('is_deleted', 0);
|
||||
|
||||
if (visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', user_id);
|
||||
}
|
||||
|
||||
return query.first()
|
||||
.then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
});
|
||||
}
|
||||
return query.first().then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalStream;
|
||||
|
@@ -1,12 +1,11 @@
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const userModel = require('../models/user');
|
||||
const authModel = require('../models/auth');
|
||||
const helpers = require('../lib/helpers');
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const userModel = require('../models/user');
|
||||
const authModel = require('../models/auth');
|
||||
const helpers = require('../lib/helpers');
|
||||
const TokenModel = require('../models/token');
|
||||
|
||||
module.exports = {
|
||||
|
||||
/**
|
||||
* @param {Object} data
|
||||
* @param {String} data.identity
|
||||
@@ -17,9 +16,9 @@ module.exports = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getTokenFromEmail: (data, issuer) => {
|
||||
let Token = new TokenModel();
|
||||
const Token = new TokenModel();
|
||||
|
||||
data.scope = data.scope || 'user';
|
||||
data.scope = data.scope || 'user';
|
||||
data.expiry = data.expiry || '1d';
|
||||
|
||||
return userModel
|
||||
@@ -38,40 +37,37 @@ module.exports = {
|
||||
.first()
|
||||
.then((auth) => {
|
||||
if (auth) {
|
||||
return auth.verifyPassword(data.secret)
|
||||
.then((valid) => {
|
||||
if (valid) {
|
||||
|
||||
if (data.scope !== 'user' && _.indexOf(user.roles, data.scope) === -1) {
|
||||
// The scope requested doesn't exist as a role against the user,
|
||||
// you shall not pass.
|
||||
throw new error.AuthError('Invalid scope: ' + data.scope);
|
||||
}
|
||||
|
||||
// Create a moment of the expiry expression
|
||||
let expiry = helpers.parseDatePeriod(data.expiry);
|
||||
if (expiry === null) {
|
||||
throw new error.AuthError('Invalid expiry time: ' + data.expiry);
|
||||
}
|
||||
|
||||
return Token.create({
|
||||
iss: issuer || 'api',
|
||||
attrs: {
|
||||
id: user.id
|
||||
},
|
||||
scope: [data.scope],
|
||||
expiresIn: data.expiry
|
||||
})
|
||||
.then((signed) => {
|
||||
return {
|
||||
token: signed.token,
|
||||
expires: expiry.toISOString()
|
||||
};
|
||||
});
|
||||
} else {
|
||||
throw new error.AuthError('Invalid password');
|
||||
return auth.verifyPassword(data.secret).then((valid) => {
|
||||
if (valid) {
|
||||
if (data.scope !== 'user' && _.indexOf(user.roles, data.scope) === -1) {
|
||||
// The scope requested doesn't exist as a role against the user,
|
||||
// you shall not pass.
|
||||
throw new error.AuthError('Invalid scope: ' + data.scope);
|
||||
}
|
||||
});
|
||||
|
||||
// Create a moment of the expiry expression
|
||||
const expiry = helpers.parseDatePeriod(data.expiry);
|
||||
if (expiry === null) {
|
||||
throw new error.AuthError('Invalid expiry time: ' + data.expiry);
|
||||
}
|
||||
|
||||
return Token.create({
|
||||
iss: issuer || 'api',
|
||||
attrs: {
|
||||
id: user.id,
|
||||
},
|
||||
scope: [data.scope],
|
||||
expiresIn: data.expiry,
|
||||
}).then((signed) => {
|
||||
return {
|
||||
token: signed.token,
|
||||
expires: expiry.toISOString(),
|
||||
};
|
||||
});
|
||||
} else {
|
||||
throw new error.AuthError('Invalid password');
|
||||
}
|
||||
});
|
||||
} else {
|
||||
throw new error.AuthError('No password auth for user');
|
||||
}
|
||||
@@ -90,21 +86,20 @@ module.exports = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getFreshToken: (access, data) => {
|
||||
let Token = new TokenModel();
|
||||
const Token = new TokenModel();
|
||||
|
||||
data = data || {};
|
||||
data = data || {};
|
||||
data.expiry = data.expiry || '1d';
|
||||
|
||||
if (access && access.token.getUserId(0)) {
|
||||
|
||||
// Create a moment of the expiry expression
|
||||
let expiry = helpers.parseDatePeriod(data.expiry);
|
||||
const expiry = helpers.parseDatePeriod(data.expiry);
|
||||
if (expiry === null) {
|
||||
throw new error.AuthError('Invalid expiry time: ' + data.expiry);
|
||||
}
|
||||
|
||||
let token_attrs = {
|
||||
id: access.token.getUserId(0)
|
||||
const token_attrs = {
|
||||
id: access.token.getUserId(0),
|
||||
};
|
||||
|
||||
// Only admins can request otherwise scoped tokens
|
||||
@@ -118,17 +113,16 @@ module.exports = {
|
||||
}
|
||||
|
||||
return Token.create({
|
||||
iss: 'api',
|
||||
scope: scope,
|
||||
attrs: token_attrs,
|
||||
expiresIn: data.expiry
|
||||
})
|
||||
.then((signed) => {
|
||||
return {
|
||||
token: signed.token,
|
||||
expires: expiry.toISOString()
|
||||
};
|
||||
});
|
||||
iss: 'api',
|
||||
scope,
|
||||
attrs: token_attrs,
|
||||
expiresIn: data.expiry,
|
||||
}).then((signed) => {
|
||||
return {
|
||||
token: signed.token,
|
||||
expires: expiry.toISOString(),
|
||||
};
|
||||
});
|
||||
} else {
|
||||
throw new error.AssertionFailedError('Existing token contained invalid user data');
|
||||
}
|
||||
@@ -140,23 +134,22 @@ module.exports = {
|
||||
*/
|
||||
getTokenFromUser: (user) => {
|
||||
const expire = '1d';
|
||||
const Token = new TokenModel();
|
||||
const Token = new TokenModel();
|
||||
const expiry = helpers.parseDatePeriod(expire);
|
||||
|
||||
return Token.create({
|
||||
iss: 'api',
|
||||
iss: 'api',
|
||||
attrs: {
|
||||
id: user.id
|
||||
id: user.id,
|
||||
},
|
||||
scope: ['user'],
|
||||
expiresIn: expire
|
||||
})
|
||||
.then((signed) => {
|
||||
return {
|
||||
token: signed.token,
|
||||
expires: expiry.toISOString(),
|
||||
user: user
|
||||
};
|
||||
});
|
||||
}
|
||||
scope: ['user'],
|
||||
expiresIn: expire,
|
||||
}).then((signed) => {
|
||||
return {
|
||||
token: signed.token,
|
||||
expires: expiry.toISOString(),
|
||||
user,
|
||||
};
|
||||
});
|
||||
},
|
||||
};
|
||||
|
@@ -1,43 +1,40 @@
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const userModel = require('../models/user');
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const userModel = require('../models/user');
|
||||
const userPermissionModel = require('../models/user_permission');
|
||||
const authModel = require('../models/auth');
|
||||
const gravatar = require('gravatar');
|
||||
const internalToken = require('./token');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
const authModel = require('../models/auth');
|
||||
const gravatar = require('gravatar');
|
||||
const internalToken = require('./token');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
|
||||
function omissions () {
|
||||
function omissions() {
|
||||
return ['is_deleted'];
|
||||
}
|
||||
|
||||
const internalUser = {
|
||||
|
||||
/**
|
||||
* @param {Access} access
|
||||
* @param {Object} data
|
||||
* @returns {Promise}
|
||||
*/
|
||||
create: (access, data) => {
|
||||
let auth = data.auth || null;
|
||||
const auth = data.auth || null;
|
||||
delete data.auth;
|
||||
|
||||
data.avatar = data.avatar || '';
|
||||
data.roles = data.roles || [];
|
||||
data.roles = data.roles || [];
|
||||
|
||||
if (typeof data.is_disabled !== 'undefined') {
|
||||
data.is_disabled = data.is_disabled ? 1 : 0;
|
||||
}
|
||||
|
||||
return access.can('users:create', data)
|
||||
return access
|
||||
.can('users:create', data)
|
||||
.then(() => {
|
||||
data.avatar = gravatar.url(data.email, {default: 'mm'});
|
||||
data.avatar = gravatar.url(data.email, { default: 'mm' });
|
||||
|
||||
return userModel
|
||||
.query()
|
||||
.insertAndFetch(data)
|
||||
.then(utils.omitRow(omissions()));
|
||||
return userModel.query().insertAndFetch(data).then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((user) => {
|
||||
if (auth) {
|
||||
@@ -45,9 +42,9 @@ const internalUser = {
|
||||
.query()
|
||||
.insert({
|
||||
user_id: user.id,
|
||||
type: auth.type,
|
||||
secret: auth.secret,
|
||||
meta: {}
|
||||
type: auth.type,
|
||||
secret: auth.secret,
|
||||
meta: {},
|
||||
})
|
||||
.then(() => {
|
||||
return user;
|
||||
@@ -58,32 +55,33 @@ const internalUser = {
|
||||
})
|
||||
.then((user) => {
|
||||
// Create permissions row as well
|
||||
let is_admin = data.roles.indexOf('admin') !== -1;
|
||||
const is_admin = data.roles.indexOf('admin') !== -1;
|
||||
|
||||
return userPermissionModel
|
||||
.query()
|
||||
.insert({
|
||||
user_id: user.id,
|
||||
visibility: is_admin ? 'all' : 'user',
|
||||
proxy_hosts: 'manage',
|
||||
user_id: user.id,
|
||||
visibility: is_admin ? 'all' : 'user',
|
||||
proxy_hosts: 'manage',
|
||||
redirection_hosts: 'manage',
|
||||
dead_hosts: 'manage',
|
||||
streams: 'manage',
|
||||
access_lists: 'manage',
|
||||
certificates: 'manage'
|
||||
dead_hosts: 'manage',
|
||||
streams: 'manage',
|
||||
access_lists: 'manage',
|
||||
certificates: 'manage',
|
||||
})
|
||||
.then(() => {
|
||||
return internalUser.get(access, {id: user.id, expand: ['permissions']});
|
||||
return internalUser.get(access, { id: user.id, expand: ['permissions'] });
|
||||
});
|
||||
})
|
||||
.then((user) => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'created',
|
||||
object_type: 'user',
|
||||
object_id: user.id,
|
||||
meta: user
|
||||
})
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: 'created',
|
||||
object_type: 'user',
|
||||
object_id: user.id,
|
||||
meta: user,
|
||||
})
|
||||
.then(() => {
|
||||
return user;
|
||||
});
|
||||
@@ -103,33 +101,30 @@ const internalUser = {
|
||||
data.is_disabled = data.is_disabled ? 1 : 0;
|
||||
}
|
||||
|
||||
return access.can('users:update', data.id)
|
||||
return access
|
||||
.can('users:update', data.id)
|
||||
.then(() => {
|
||||
|
||||
// Make sure that the user being updated doesn't change their email to another user that is already using it
|
||||
// 1. get user we want to update
|
||||
return internalUser.get(access, {id: data.id})
|
||||
.then((user) => {
|
||||
return internalUser.get(access, { id: data.id }).then((user) => {
|
||||
// 2. if email is to be changed, find other users with that email
|
||||
if (typeof data.email !== 'undefined') {
|
||||
data.email = data.email.toLowerCase().trim();
|
||||
|
||||
// 2. if email is to be changed, find other users with that email
|
||||
if (typeof data.email !== 'undefined') {
|
||||
data.email = data.email.toLowerCase().trim();
|
||||
if (user.email !== data.email) {
|
||||
return internalUser.isEmailAvailable(data.email, data.id).then((available) => {
|
||||
if (!available) {
|
||||
throw new error.ValidationError('Email address already in use - ' + data.email);
|
||||
}
|
||||
|
||||
if (user.email !== data.email) {
|
||||
return internalUser.isEmailAvailable(data.email, data.id)
|
||||
.then((available) => {
|
||||
if (!available) {
|
||||
throw new error.ValidationError('Email address already in use - ' + data.email);
|
||||
}
|
||||
|
||||
return user;
|
||||
});
|
||||
}
|
||||
return user;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// No change to email:
|
||||
return user;
|
||||
});
|
||||
// No change to email:
|
||||
return user;
|
||||
});
|
||||
})
|
||||
.then((user) => {
|
||||
if (user.id !== data.id) {
|
||||
@@ -137,24 +132,22 @@ const internalUser = {
|
||||
throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
|
||||
}
|
||||
|
||||
data.avatar = gravatar.url(data.email || user.email, {default: 'mm'});
|
||||
data.avatar = gravatar.url(data.email || user.email, { default: 'mm' });
|
||||
|
||||
return userModel
|
||||
.query()
|
||||
.patchAndFetchById(user.id, data)
|
||||
.then(utils.omitRow(omissions()));
|
||||
return userModel.query().patchAndFetchById(user.id, data).then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then(() => {
|
||||
return internalUser.get(access, {id: data.id});
|
||||
return internalUser.get(access, { id: data.id });
|
||||
})
|
||||
.then((user) => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'user',
|
||||
object_id: user.id,
|
||||
meta: data
|
||||
})
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'user',
|
||||
object_id: user.id,
|
||||
meta: data,
|
||||
})
|
||||
.then(() => {
|
||||
return user;
|
||||
});
|
||||
@@ -178,14 +171,10 @@ const internalUser = {
|
||||
data.id = access.token.getUserId(0);
|
||||
}
|
||||
|
||||
return access.can('users:get', data.id)
|
||||
return access
|
||||
.can('users:get', data.id)
|
||||
.then(() => {
|
||||
let query = userModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('id', data.id)
|
||||
.allowGraph('[permissions]')
|
||||
.first();
|
||||
const query = userModel.query().where('is_deleted', 0).andWhere('id', data.id).allowGraph('[permissions]').first();
|
||||
|
||||
if (typeof data.expand !== 'undefined' && data.expand !== null) {
|
||||
query.withGraphFetched('[' + data.expand.join(', ') + ']');
|
||||
@@ -213,20 +202,15 @@ const internalUser = {
|
||||
* @param user_id
|
||||
*/
|
||||
isEmailAvailable: (email, user_id) => {
|
||||
let query = userModel
|
||||
.query()
|
||||
.where('email', '=', email.toLowerCase().trim())
|
||||
.where('is_deleted', 0)
|
||||
.first();
|
||||
const query = userModel.query().where('email', '=', email.toLowerCase().trim()).where('is_deleted', 0).first();
|
||||
|
||||
if (typeof user_id !== 'undefined') {
|
||||
query.where('id', '!=', user_id);
|
||||
}
|
||||
|
||||
return query
|
||||
.then((user) => {
|
||||
return !user;
|
||||
});
|
||||
return query.then((user) => {
|
||||
return !user;
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -237,9 +221,10 @@ const internalUser = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
delete: (access, data) => {
|
||||
return access.can('users:delete', data.id)
|
||||
return access
|
||||
.can('users:delete', data.id)
|
||||
.then(() => {
|
||||
return internalUser.get(access, {id: data.id});
|
||||
return internalUser.get(access, { id: data.id });
|
||||
})
|
||||
.then((user) => {
|
||||
if (!user) {
|
||||
@@ -255,15 +240,15 @@ const internalUser = {
|
||||
.query()
|
||||
.where('id', user.id)
|
||||
.patch({
|
||||
is_deleted: 1
|
||||
is_deleted: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'deleted',
|
||||
action: 'deleted',
|
||||
object_type: 'user',
|
||||
object_id: user.id,
|
||||
meta: _.omit(user, omissions())
|
||||
object_id: user.id,
|
||||
meta: _.omit(user, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -280,19 +265,15 @@ const internalUser = {
|
||||
* @returns {*}
|
||||
*/
|
||||
getCount: (access, search_query) => {
|
||||
return access.can('users:list')
|
||||
return access
|
||||
.can('users:list')
|
||||
.then(() => {
|
||||
let query = userModel
|
||||
.query()
|
||||
.count('id as count')
|
||||
.where('is_deleted', 0)
|
||||
.first();
|
||||
const query = userModel.query().count('id as count').where('is_deleted', 0).first();
|
||||
|
||||
// Query is used for searching
|
||||
if (typeof search_query === 'string') {
|
||||
query.where(function () {
|
||||
this.where('user.name', 'like', '%' + search_query + '%')
|
||||
.orWhere('user.email', 'like', '%' + search_query + '%');
|
||||
this.where('user.name', 'like', '%' + search_query + '%').orWhere('user.email', 'like', '%' + search_query + '%');
|
||||
});
|
||||
}
|
||||
|
||||
@@ -312,29 +293,22 @@ const internalUser = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getAll: (access, expand, search_query) => {
|
||||
return access.can('users:list')
|
||||
.then(() => {
|
||||
let query = userModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.groupBy('id')
|
||||
.allowGraph('[permissions]')
|
||||
.orderBy('name', 'ASC');
|
||||
return access.can('users:list').then(() => {
|
||||
const query = userModel.query().where('is_deleted', 0).groupBy('id').allowGraph('[permissions]').orderBy('name', 'ASC');
|
||||
|
||||
// Query is used for searching
|
||||
if (typeof search_query === 'string') {
|
||||
query.where(function () {
|
||||
this.where('name', 'like', '%' + search_query + '%')
|
||||
.orWhere('email', 'like', '%' + search_query + '%');
|
||||
});
|
||||
}
|
||||
// Query is used for searching
|
||||
if (typeof search_query === 'string') {
|
||||
query.where(function () {
|
||||
this.where('name', 'like', '%' + search_query + '%').orWhere('email', 'like', '%' + search_query + '%');
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
}
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
}
|
||||
|
||||
return query.then(utils.omitRows(omissions()));
|
||||
});
|
||||
return query.then(utils.omitRows(omissions()));
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -361,9 +335,10 @@ const internalUser = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
setPassword: (access, data) => {
|
||||
return access.can('users:password', data.id)
|
||||
return access
|
||||
.can('users:password', data.id)
|
||||
.then(() => {
|
||||
return internalUser.get(access, {id: data.id});
|
||||
return internalUser.get(access, { id: data.id });
|
||||
})
|
||||
.then((user) => {
|
||||
if (user.id !== data.id) {
|
||||
@@ -377,10 +352,11 @@ const internalUser = {
|
||||
throw new error.ValidationError('Current password was not supplied');
|
||||
}
|
||||
|
||||
return internalToken.getTokenFromEmail({
|
||||
identity: user.email,
|
||||
secret: data.current
|
||||
})
|
||||
return internalToken
|
||||
.getTokenFromEmail({
|
||||
identity: user.email,
|
||||
secret: data.current,
|
||||
})
|
||||
.then(() => {
|
||||
return user;
|
||||
});
|
||||
@@ -398,37 +374,31 @@ const internalUser = {
|
||||
.then((existing_auth) => {
|
||||
if (existing_auth) {
|
||||
// patch
|
||||
return authModel
|
||||
.query()
|
||||
.where('user_id', user.id)
|
||||
.andWhere('type', data.type)
|
||||
.patch({
|
||||
type: data.type, // This is required for the model to encrypt on save
|
||||
secret: data.secret
|
||||
});
|
||||
return authModel.query().where('user_id', user.id).andWhere('type', data.type).patch({
|
||||
type: data.type, // This is required for the model to encrypt on save
|
||||
secret: data.secret,
|
||||
});
|
||||
} else {
|
||||
// insert
|
||||
return authModel
|
||||
.query()
|
||||
.insert({
|
||||
user_id: user.id,
|
||||
type: data.type,
|
||||
secret: data.secret,
|
||||
meta: {}
|
||||
});
|
||||
return authModel.query().insert({
|
||||
user_id: user.id,
|
||||
type: data.type,
|
||||
secret: data.secret,
|
||||
meta: {},
|
||||
});
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
// Add to Audit Log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'updated',
|
||||
action: 'updated',
|
||||
object_type: 'user',
|
||||
object_id: user.id,
|
||||
meta: {
|
||||
name: user.name,
|
||||
object_id: user.id,
|
||||
meta: {
|
||||
name: user.name,
|
||||
password_changed: true,
|
||||
auth_type: data.type
|
||||
}
|
||||
auth_type: data.type,
|
||||
},
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -443,9 +413,10 @@ const internalUser = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
setPermissions: (access, data) => {
|
||||
return access.can('users:permissions', data.id)
|
||||
return access
|
||||
.can('users:permissions', data.id)
|
||||
.then(() => {
|
||||
return internalUser.get(access, {id: data.id});
|
||||
return internalUser.get(access, { id: data.id });
|
||||
})
|
||||
.then((user) => {
|
||||
if (user.id !== data.id) {
|
||||
@@ -467,26 +438,23 @@ const internalUser = {
|
||||
return userPermissionModel
|
||||
.query()
|
||||
.where('user_id', user.id)
|
||||
.patchAndFetchById(existing_auth.id, _.assign({user_id: user.id}, data));
|
||||
.patchAndFetchById(existing_auth.id, _.assign({ user_id: user.id }, data));
|
||||
} else {
|
||||
// insert
|
||||
return userPermissionModel
|
||||
.query()
|
||||
.insertAndFetch(_.assign({user_id: user.id}, data));
|
||||
return userPermissionModel.query().insertAndFetch(_.assign({ user_id: user.id }, data));
|
||||
}
|
||||
})
|
||||
.then((permissions) => {
|
||||
// Add to Audit Log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'updated',
|
||||
action: 'updated',
|
||||
object_type: 'user',
|
||||
object_id: user.id,
|
||||
meta: {
|
||||
name: user.name,
|
||||
permissions: permissions
|
||||
}
|
||||
object_id: user.id,
|
||||
meta: {
|
||||
name: user.name,
|
||||
permissions,
|
||||
},
|
||||
});
|
||||
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
@@ -500,14 +468,15 @@ const internalUser = {
|
||||
* @param {Integer} data.id
|
||||
*/
|
||||
loginAs: (access, data) => {
|
||||
return access.can('users:loginas', data.id)
|
||||
return access
|
||||
.can('users:loginas', data.id)
|
||||
.then(() => {
|
||||
return internalUser.get(access, data);
|
||||
})
|
||||
.then((user) => {
|
||||
return internalToken.getTokenFromUser(user);
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalUser;
|
||||
|
@@ -1,19 +1,19 @@
|
||||
module.exports = {
|
||||
development: {
|
||||
client: 'mysql',
|
||||
client: 'mysql',
|
||||
migrations: {
|
||||
tableName: 'migrations',
|
||||
stub: 'lib/migrate_template.js',
|
||||
directory: 'migrations'
|
||||
}
|
||||
stub: 'lib/migrate_template.js',
|
||||
directory: 'migrations',
|
||||
},
|
||||
},
|
||||
|
||||
production: {
|
||||
client: 'mysql',
|
||||
client: 'mysql',
|
||||
migrations: {
|
||||
tableName: 'migrations',
|
||||
stub: 'lib/migrate_template.js',
|
||||
directory: 'migrations'
|
||||
}
|
||||
}
|
||||
stub: 'lib/migrate_template.js',
|
||||
directory: 'migrations',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
@@ -8,24 +8,24 @@
|
||||
*
|
||||
*/
|
||||
|
||||
const _ = require('lodash');
|
||||
const logger = require('../logger').access;
|
||||
const validator = require('ajv');
|
||||
const error = require('./error');
|
||||
const userModel = require('../models/user');
|
||||
const _ = require('lodash');
|
||||
const logger = require('../logger').access;
|
||||
const validator = require('ajv');
|
||||
const error = require('./error');
|
||||
const userModel = require('../models/user');
|
||||
const proxyHostModel = require('../models/proxy_host');
|
||||
const TokenModel = require('../models/token');
|
||||
const roleSchema = require('./access/roles.json');
|
||||
const permsSchema = require('./access/permissions.json');
|
||||
const TokenModel = require('../models/token');
|
||||
const roleSchema = require('./access/roles.json');
|
||||
const permsSchema = require('./access/permissions.json');
|
||||
|
||||
module.exports = function (token_string) {
|
||||
let Token = new TokenModel();
|
||||
let token_data = null;
|
||||
let initialized = false;
|
||||
let object_cache = {};
|
||||
const Token = new TokenModel();
|
||||
let token_data = null;
|
||||
let initialized = false;
|
||||
const object_cache = {};
|
||||
let allow_internal_access = false;
|
||||
let user_roles = [];
|
||||
let permissions = {};
|
||||
let user_roles = [];
|
||||
let permissions = {};
|
||||
|
||||
/**
|
||||
* Loads the Token object from the token string
|
||||
@@ -39,8 +39,8 @@ module.exports = function (token_string) {
|
||||
} else if (!token_string) {
|
||||
reject(new error.PermissionError('Permission Denied'));
|
||||
} else {
|
||||
resolve(Token.load(token_string)
|
||||
.then((data) => {
|
||||
resolve(
|
||||
Token.load(token_string).then((data) => {
|
||||
token_data = data;
|
||||
|
||||
// At this point we need to load the user from the DB and make sure they:
|
||||
@@ -75,10 +75,9 @@ module.exports = function (token_string) {
|
||||
throw new error.AuthError('Invalid token scope for User');
|
||||
} else {
|
||||
initialized = true;
|
||||
user_roles = user.roles;
|
||||
user_roles = user.roles;
|
||||
permissions = user.permissions;
|
||||
}
|
||||
|
||||
} else {
|
||||
throw new error.AuthError('User cannot be loaded for Token');
|
||||
}
|
||||
@@ -86,7 +85,8 @@ module.exports = function (token_string) {
|
||||
} else {
|
||||
initialized = true;
|
||||
}
|
||||
}));
|
||||
}),
|
||||
);
|
||||
}
|
||||
});
|
||||
};
|
||||
@@ -105,49 +105,45 @@ module.exports = function (token_string) {
|
||||
if (typeof token_data.attrs.id === 'undefined' || !token_data.attrs.id) {
|
||||
reject(new error.AuthError('User Token supplied without a User ID'));
|
||||
} else {
|
||||
let token_user_id = token_data.attrs.id ? token_data.attrs.id : 0;
|
||||
const token_user_id = token_data.attrs.id ? token_data.attrs.id : 0;
|
||||
let query;
|
||||
|
||||
if (typeof object_cache[object_type] === 'undefined') {
|
||||
switch (object_type) {
|
||||
|
||||
// USERS - should only return yourself
|
||||
case 'users':
|
||||
resolve(token_user_id ? [token_user_id] : []);
|
||||
break;
|
||||
// USERS - should only return yourself
|
||||
case 'users':
|
||||
resolve(token_user_id ? [token_user_id] : []);
|
||||
break;
|
||||
|
||||
// Proxy Hosts
|
||||
case 'proxy_hosts':
|
||||
query = proxyHostModel
|
||||
.query()
|
||||
.select('id')
|
||||
.andWhere('is_deleted', 0);
|
||||
case 'proxy_hosts':
|
||||
query = proxyHostModel.query().select('id').andWhere('is_deleted', 0);
|
||||
|
||||
if (permissions.visibility === 'user') {
|
||||
query.andWhere('owner_user_id', token_user_id);
|
||||
}
|
||||
if (permissions.visibility === 'user') {
|
||||
query.andWhere('owner_user_id', token_user_id);
|
||||
}
|
||||
|
||||
resolve(query
|
||||
.then((rows) => {
|
||||
let result = [];
|
||||
_.forEach(rows, (rule_row) => {
|
||||
result.push(rule_row.id);
|
||||
});
|
||||
resolve(
|
||||
query.then((rows) => {
|
||||
const result = [];
|
||||
_.forEach(rows, (rule_row) => {
|
||||
result.push(rule_row.id);
|
||||
});
|
||||
|
||||
// enum should not have less than 1 item
|
||||
if (!result.length) {
|
||||
result.push(0);
|
||||
}
|
||||
// enum should not have less than 1 item
|
||||
if (!result.length) {
|
||||
result.push(0);
|
||||
}
|
||||
|
||||
return result;
|
||||
})
|
||||
);
|
||||
break;
|
||||
return result;
|
||||
}),
|
||||
);
|
||||
break;
|
||||
|
||||
// DEFAULT: null
|
||||
default:
|
||||
resolve(null);
|
||||
break;
|
||||
default:
|
||||
resolve(null);
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
resolve(object_cache[object_type]);
|
||||
@@ -156,11 +152,10 @@ module.exports = function (token_string) {
|
||||
} else {
|
||||
resolve(null);
|
||||
}
|
||||
})
|
||||
.then((objects) => {
|
||||
object_cache[object_type] = objects;
|
||||
return objects;
|
||||
});
|
||||
}).then((objects) => {
|
||||
object_cache[object_type] = objects;
|
||||
return objects;
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -170,51 +165,49 @@ module.exports = function (token_string) {
|
||||
* @returns {Object}
|
||||
*/
|
||||
this.getObjectSchema = (permission_label) => {
|
||||
let base_object_type = permission_label.split(':').shift();
|
||||
const base_object_type = permission_label.split(':').shift();
|
||||
|
||||
let schema = {
|
||||
$id: 'objects',
|
||||
$schema: 'http://json-schema.org/draft-07/schema#',
|
||||
description: 'Actor Properties',
|
||||
type: 'object',
|
||||
const schema = {
|
||||
$id: 'objects',
|
||||
$schema: 'http://json-schema.org/draft-07/schema#',
|
||||
description: 'Actor Properties',
|
||||
type: 'object',
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
properties: {
|
||||
user_id: {
|
||||
anyOf: [
|
||||
{
|
||||
type: 'number',
|
||||
enum: [Token.get('attrs').id]
|
||||
}
|
||||
]
|
||||
enum: [Token.get('attrs').id],
|
||||
},
|
||||
],
|
||||
},
|
||||
scope: {
|
||||
type: 'string',
|
||||
pattern: '^' + Token.get('scope') + '$'
|
||||
}
|
||||
}
|
||||
type: 'string',
|
||||
pattern: '^' + Token.get('scope') + '$',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
return this.loadObjects(base_object_type)
|
||||
.then((object_result) => {
|
||||
if (typeof object_result === 'object' && object_result !== null) {
|
||||
schema.properties[base_object_type] = {
|
||||
type: 'number',
|
||||
enum: object_result,
|
||||
minimum: 1
|
||||
};
|
||||
} else {
|
||||
schema.properties[base_object_type] = {
|
||||
type: 'number',
|
||||
minimum: 1
|
||||
};
|
||||
}
|
||||
return this.loadObjects(base_object_type).then((object_result) => {
|
||||
if (typeof object_result === 'object' && object_result !== null) {
|
||||
schema.properties[base_object_type] = {
|
||||
type: 'number',
|
||||
enum: object_result,
|
||||
minimum: 1,
|
||||
};
|
||||
} else {
|
||||
schema.properties[base_object_type] = {
|
||||
type: 'number',
|
||||
minimum: 1,
|
||||
};
|
||||
}
|
||||
|
||||
return schema;
|
||||
});
|
||||
return schema;
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
|
||||
token: Token,
|
||||
|
||||
/**
|
||||
@@ -223,7 +216,7 @@ module.exports = function (token_string) {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
load: (allow_internal) => {
|
||||
return new Promise(function (resolve/*, reject*/) {
|
||||
return new Promise(function (resolve /*, reject */) {
|
||||
if (token_string) {
|
||||
resolve(Token.load(token_string));
|
||||
} else {
|
||||
@@ -244,71 +237,64 @@ module.exports = function (token_string) {
|
||||
can: (permission, data) => {
|
||||
if (allow_internal_access === true) {
|
||||
return Promise.resolve(true);
|
||||
//return true;
|
||||
// return true;
|
||||
} else {
|
||||
return this.init()
|
||||
.then(() => {
|
||||
// initialized, token decoded ok
|
||||
return this.getObjectSchema(permission)
|
||||
.then((objectSchema) => {
|
||||
let data_schema = {
|
||||
[permission]: {
|
||||
data: data,
|
||||
scope: Token.get('scope'),
|
||||
roles: user_roles,
|
||||
permission_visibility: permissions.visibility,
|
||||
permission_proxy_hosts: permissions.proxy_hosts,
|
||||
permission_redirection_hosts: permissions.redirection_hosts,
|
||||
permission_dead_hosts: permissions.dead_hosts,
|
||||
permission_streams: permissions.streams,
|
||||
permission_access_lists: permissions.access_lists,
|
||||
permission_certificates: permissions.certificates
|
||||
}
|
||||
};
|
||||
return this.getObjectSchema(permission).then((objectSchema) => {
|
||||
const data_schema = {
|
||||
[permission]: {
|
||||
data,
|
||||
scope: Token.get('scope'),
|
||||
roles: user_roles,
|
||||
permission_visibility: permissions.visibility,
|
||||
permission_proxy_hosts: permissions.proxy_hosts,
|
||||
permission_redirection_hosts: permissions.redirection_hosts,
|
||||
permission_dead_hosts: permissions.dead_hosts,
|
||||
permission_streams: permissions.streams,
|
||||
permission_access_lists: permissions.access_lists,
|
||||
permission_certificates: permissions.certificates,
|
||||
},
|
||||
};
|
||||
|
||||
let permissionSchema = {
|
||||
$schema: 'http://json-schema.org/draft-07/schema#',
|
||||
$async: true,
|
||||
$id: 'permissions',
|
||||
additionalProperties: false,
|
||||
properties: {}
|
||||
};
|
||||
const permissionSchema = {
|
||||
$schema: 'http://json-schema.org/draft-07/schema#',
|
||||
$async: true,
|
||||
$id: 'permissions',
|
||||
additionalProperties: false,
|
||||
properties: {},
|
||||
};
|
||||
|
||||
permissionSchema.properties[permission] = require('./access/' + permission.replace(/:/gim, '-') + '.json');
|
||||
permissionSchema.properties[permission] = require('./access/' + permission.replace(/:/gim, '-') + '.json');
|
||||
|
||||
// logger.info('objectSchema', JSON.stringify(objectSchema, null, 2));
|
||||
// logger.info('permissionSchema', JSON.stringify(permissionSchema, null, 2));
|
||||
// logger.info('data_schema', JSON.stringify(data_schema, null, 2));
|
||||
// logger.info('objectSchema', JSON.stringify(objectSchema, null, 2));
|
||||
// logger.info('permissionSchema', JSON.stringify(permissionSchema, null, 2));
|
||||
// logger.info('data_schema', JSON.stringify(data_schema, null, 2));
|
||||
|
||||
let ajv = validator({
|
||||
verbose: true,
|
||||
allErrors: true,
|
||||
format: 'full',
|
||||
missingRefs: 'fail',
|
||||
breakOnError: true,
|
||||
coerceTypes: true,
|
||||
schemas: [
|
||||
roleSchema,
|
||||
permsSchema,
|
||||
objectSchema,
|
||||
permissionSchema
|
||||
]
|
||||
});
|
||||
|
||||
return ajv.validate('permissions', data_schema)
|
||||
.then(() => {
|
||||
return data_schema[permission];
|
||||
});
|
||||
const ajv = validator({
|
||||
verbose: true,
|
||||
allErrors: true,
|
||||
format: 'full',
|
||||
missingRefs: 'fail',
|
||||
breakOnError: true,
|
||||
coerceTypes: true,
|
||||
schemas: [roleSchema, permsSchema, objectSchema, permissionSchema],
|
||||
});
|
||||
|
||||
return ajv.validate('permissions', data_schema).then(() => {
|
||||
return data_schema[permission];
|
||||
});
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
err.permission = permission;
|
||||
err.permission = permission;
|
||||
err.permission_data = data;
|
||||
logger.error(permission, data, err.message);
|
||||
|
||||
throw new error.PermissionError('Permission Denied', err);
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
};
|
||||
|
@@ -1,11 +1,10 @@
|
||||
const dnsPlugins = require('../certbot-dns-plugins.json');
|
||||
const utils = require('./utils');
|
||||
const error = require('./error');
|
||||
const logger = require('../logger').certbot;
|
||||
const batchflow = require('batchflow');
|
||||
const utils = require('./utils');
|
||||
const error = require('./error');
|
||||
const logger = require('../logger').certbot;
|
||||
const batchflow = require('batchflow');
|
||||
|
||||
const certbot = {
|
||||
|
||||
/**
|
||||
* @param {array} pluginKeys
|
||||
*/
|
||||
@@ -18,9 +17,11 @@ const certbot = {
|
||||
return;
|
||||
}
|
||||
|
||||
batchflow(pluginKeys).sequential()
|
||||
batchflow(pluginKeys)
|
||||
.sequential()
|
||||
.each((i, pluginKey, next) => {
|
||||
certbot.installPlugin(pluginKey)
|
||||
certbot
|
||||
.installPlugin(pluginKey)
|
||||
.then(() => {
|
||||
next();
|
||||
})
|
||||
@@ -59,7 +60,8 @@ const certbot = {
|
||||
logger.start(`Installing ${pluginKey}...`);
|
||||
|
||||
const cmd = 'pip install --no-cache-dir ' + plugin.package_name;
|
||||
return utils.exec(cmd)
|
||||
return utils
|
||||
.exec(cmd)
|
||||
.then((result) => {
|
||||
logger.complete(`Installed ${pluginKey}`);
|
||||
return result;
|
||||
|
@@ -1,6 +1,6 @@
|
||||
const fs = require('fs');
|
||||
const fs = require('fs');
|
||||
const NodeRSA = require('node-rsa');
|
||||
const logger = require('../logger').global;
|
||||
const logger = require('../logger').global;
|
||||
|
||||
const keysFile = '/data/etc/npm/keys.json';
|
||||
|
||||
@@ -14,13 +14,13 @@ const configure = () => {
|
||||
let configData;
|
||||
try {
|
||||
configData = require(filename);
|
||||
} catch (err) {
|
||||
} catch {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
if (configData && configData.database) {
|
||||
logger.info(`Using configuration from file: ${filename}`);
|
||||
instance = configData;
|
||||
instance = configData;
|
||||
instance.keys = getKeys();
|
||||
return;
|
||||
}
|
||||
@@ -29,20 +29,20 @@ const configure = () => {
|
||||
const envMysqlHost = process.env.DB_MYSQL_HOST || null;
|
||||
const envMysqlUser = process.env.DB_MYSQL_USER || null;
|
||||
const envMysqlName = process.env.DB_MYSQL_NAME || null;
|
||||
const envMysqlTls = process.env.DB_MYSQL_TLS || null;
|
||||
const envMysqlCa = process.env.DB_MYSQL_CA || '/etc/ssl/certs/ca-certificates.crt';
|
||||
const envMysqlTls = process.env.DB_MYSQL_TLS || null;
|
||||
const envMysqlCa = process.env.DB_MYSQL_CA || '/etc/ssl/certs/ca-certificates.crt';
|
||||
if (envMysqlHost && envMysqlUser && envMysqlName) {
|
||||
// we have enough mysql creds to go with mysql
|
||||
logger.info('Using MySQL configuration');
|
||||
instance = {
|
||||
database: {
|
||||
engine: 'mysql',
|
||||
host: envMysqlHost,
|
||||
port: process.env.DB_MYSQL_PORT || 3306,
|
||||
user: envMysqlUser,
|
||||
engine: 'mysql',
|
||||
host: envMysqlHost,
|
||||
port: process.env.DB_MYSQL_PORT || 3306,
|
||||
user: envMysqlUser,
|
||||
password: process.env.DB_MYSQL_PASSWORD,
|
||||
name: envMysqlName,
|
||||
ssl: envMysqlTls ? { ca: fs.readFileSync(envMysqlCa) } : false,
|
||||
name: envMysqlName,
|
||||
ssl: envMysqlTls ? { ca: fs.readFileSync(envMysqlCa) } : false,
|
||||
},
|
||||
keys: getKeys(),
|
||||
};
|
||||
@@ -54,13 +54,13 @@ const configure = () => {
|
||||
instance = {
|
||||
database: {
|
||||
engine: 'knex-native',
|
||||
knex: {
|
||||
client: 'sqlite3',
|
||||
knex: {
|
||||
client: 'sqlite3',
|
||||
connection: {
|
||||
filename: envSqliteFile
|
||||
filename: envSqliteFile,
|
||||
},
|
||||
useNullAsDefault: true
|
||||
}
|
||||
useNullAsDefault: true,
|
||||
},
|
||||
},
|
||||
keys: getKeys(),
|
||||
};
|
||||
@@ -103,18 +103,17 @@ const generateKeys = () => {
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} key ie: 'database' or 'database.engine'
|
||||
* @returns {boolean}
|
||||
*/
|
||||
has: function(key) {
|
||||
has: function (key) {
|
||||
instance === null && configure();
|
||||
const keys = key.split('.');
|
||||
let level = instance;
|
||||
let has = true;
|
||||
keys.forEach((keyItem) =>{
|
||||
let level = instance;
|
||||
let has = true;
|
||||
keys.forEach((keyItem) => {
|
||||
if (typeof level[keyItem] === 'undefined') {
|
||||
has = false;
|
||||
} else {
|
||||
@@ -183,5 +182,5 @@ module.exports = {
|
||||
*/
|
||||
useLetsencryptStaging: function () {
|
||||
return !!process.env.LE_STAGING;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
@@ -1,96 +1,95 @@
|
||||
const _ = require('lodash');
|
||||
const _ = require('lodash');
|
||||
const util = require('util');
|
||||
|
||||
module.exports = {
|
||||
|
||||
PermissionError: function (message, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = 'Permission Denied';
|
||||
this.public = true;
|
||||
this.status = 403;
|
||||
this.message = 'Permission Denied';
|
||||
this.public = true;
|
||||
this.status = 403;
|
||||
},
|
||||
|
||||
ItemNotFoundError: function (id, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = 'Item Not Found - ' + id;
|
||||
this.public = true;
|
||||
this.status = 404;
|
||||
this.message = 'Item Not Found - ' + id;
|
||||
this.public = true;
|
||||
this.status = 404;
|
||||
},
|
||||
|
||||
AuthError: function (message, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = message;
|
||||
this.public = true;
|
||||
this.status = 401;
|
||||
this.message = message;
|
||||
this.public = true;
|
||||
this.status = 401;
|
||||
},
|
||||
|
||||
InternalError: function (message, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = message;
|
||||
this.status = 500;
|
||||
this.public = false;
|
||||
this.message = message;
|
||||
this.status = 500;
|
||||
this.public = false;
|
||||
},
|
||||
|
||||
InternalValidationError: function (message, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = message;
|
||||
this.status = 400;
|
||||
this.public = false;
|
||||
this.message = message;
|
||||
this.status = 400;
|
||||
this.public = false;
|
||||
},
|
||||
|
||||
ConfigurationError: function (message, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = message;
|
||||
this.status = 400;
|
||||
this.public = true;
|
||||
this.message = message;
|
||||
this.status = 400;
|
||||
this.public = true;
|
||||
},
|
||||
|
||||
CacheError: function (message, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.message = message;
|
||||
this.name = this.constructor.name;
|
||||
this.message = message;
|
||||
this.previous = previous;
|
||||
this.status = 500;
|
||||
this.public = false;
|
||||
this.status = 500;
|
||||
this.public = false;
|
||||
},
|
||||
|
||||
ValidationError: function (message, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = message;
|
||||
this.public = true;
|
||||
this.status = 400;
|
||||
this.message = message;
|
||||
this.public = true;
|
||||
this.status = 400;
|
||||
},
|
||||
|
||||
AssertionFailedError: function (message, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = message;
|
||||
this.public = false;
|
||||
this.status = 400;
|
||||
this.message = message;
|
||||
this.public = false;
|
||||
this.status = 400;
|
||||
},
|
||||
|
||||
CommandError: function (stdErr, code, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = stdErr;
|
||||
this.code = code;
|
||||
this.public = false;
|
||||
this.message = stdErr;
|
||||
this.code = code;
|
||||
this.public = false;
|
||||
},
|
||||
};
|
||||
|
||||
|
@@ -1,40 +1,36 @@
|
||||
const validator = require('../validator');
|
||||
|
||||
module.exports = function (req, res, next) {
|
||||
|
||||
if (req.headers.origin) {
|
||||
|
||||
const originSchema = {
|
||||
oneOf: [
|
||||
{
|
||||
type: 'string',
|
||||
pattern: '^[a-z\\-]+:\\/\\/(?:[\\w\\-\\.]+(:[0-9]+)?/?)?$'
|
||||
type: 'string',
|
||||
pattern: '^[a-z\\-]+:\\/\\/(?:[\\w\\-\\.]+(:[0-9]+)?/?)?$',
|
||||
},
|
||||
{
|
||||
type: 'string',
|
||||
pattern: '^[a-z\\-]+:\\/\\/(?:\\[([a-z0-9]{0,4}\\:?)+\\])?/?(:[0-9]+)?$'
|
||||
}
|
||||
]
|
||||
type: 'string',
|
||||
pattern: '^[a-z\\-]+:\\/\\/(?:\\[([a-z0-9]{0,4}\\:?)+\\])?/?(:[0-9]+)?$',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
// very relaxed validation....
|
||||
validator(originSchema, req.headers.origin)
|
||||
.then(function () {
|
||||
res.set({
|
||||
'Access-Control-Allow-Origin': req.headers.origin,
|
||||
'Access-Control-Allow-Origin': req.headers.origin,
|
||||
'Access-Control-Allow-Credentials': true,
|
||||
'Access-Control-Allow-Methods': 'OPTIONS, GET, POST',
|
||||
'Access-Control-Allow-Headers': 'Content-Type, Cache-Control, Pragma, Expires, Authorization, X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit',
|
||||
'Access-Control-Max-Age': 5 * 60,
|
||||
'Access-Control-Expose-Headers': 'X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit'
|
||||
'Access-Control-Allow-Methods': 'OPTIONS, GET, POST',
|
||||
'Access-Control-Allow-Headers': 'Content-Type, Cache-Control, Pragma, Expires, Authorization, X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit',
|
||||
'Access-Control-Max-Age': 5 * 60,
|
||||
'Access-Control-Expose-Headers': 'X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit',
|
||||
});
|
||||
next();
|
||||
})
|
||||
.catch(next);
|
||||
|
||||
} else {
|
||||
// No origin
|
||||
next();
|
||||
}
|
||||
|
||||
};
|
||||
|
@@ -3,8 +3,9 @@ const Access = require('../access');
|
||||
module.exports = () => {
|
||||
return function (req, res, next) {
|
||||
res.locals.access = null;
|
||||
let access = new Access(res.locals.token || null);
|
||||
access.load()
|
||||
const access = new Access(res.locals.token || null);
|
||||
access
|
||||
.load()
|
||||
.then(() => {
|
||||
res.locals.access = access;
|
||||
next();
|
||||
@@ -12,4 +13,3 @@ module.exports = () => {
|
||||
.catch(next);
|
||||
};
|
||||
};
|
||||
|
||||
|
@@ -1,7 +1,7 @@
|
||||
module.exports = function () {
|
||||
return function (req, res, next) {
|
||||
if (req.headers.authorization) {
|
||||
let parts = req.headers.authorization.split(' ');
|
||||
const parts = req.headers.authorization.split(' ');
|
||||
|
||||
if (parts && parts[0] === 'Bearer' && parts[1]) {
|
||||
res.locals.token = parts[1];
|
||||
|
@@ -1,7 +1,6 @@
|
||||
let _ = require('lodash');
|
||||
const _ = require('lodash');
|
||||
|
||||
module.exports = function (default_sort, default_offset, default_limit, max_limit) {
|
||||
|
||||
/**
|
||||
* This will setup the req query params with filtered data and defaults
|
||||
*
|
||||
@@ -12,33 +11,32 @@ module.exports = function (default_sort, default_offset, default_limit, max_limi
|
||||
*/
|
||||
|
||||
return function (req, res, next) {
|
||||
|
||||
req.query.offset = typeof req.query.limit === 'undefined' ? default_offset || 0 : parseInt(req.query.offset, 10);
|
||||
req.query.limit = typeof req.query.limit === 'undefined' ? default_limit || 50 : parseInt(req.query.limit, 10);
|
||||
req.query.limit = typeof req.query.limit === 'undefined' ? default_limit || 50 : parseInt(req.query.limit, 10);
|
||||
|
||||
if (max_limit && req.query.limit > max_limit) {
|
||||
req.query.limit = max_limit;
|
||||
}
|
||||
|
||||
// Sorting
|
||||
let sort = typeof req.query.sort === 'undefined' ? default_sort : req.query.sort;
|
||||
let myRegexp = /.*\.(asc|desc)$/ig;
|
||||
let sort_array = [];
|
||||
let sort = typeof req.query.sort === 'undefined' ? default_sort : req.query.sort;
|
||||
const myRegexp = /.*\.(asc|desc)$/gi;
|
||||
const sort_array = [];
|
||||
|
||||
sort = sort.split(',');
|
||||
_.map(sort, function (val) {
|
||||
let matches = myRegexp.exec(val);
|
||||
const matches = myRegexp.exec(val);
|
||||
|
||||
if (matches !== null) {
|
||||
let dir = matches[1];
|
||||
const dir = matches[1];
|
||||
sort_array.push({
|
||||
field: val.substr(0, val.length - (dir.length + 1)),
|
||||
dir: dir.toLowerCase()
|
||||
dir: dir.toLowerCase(),
|
||||
});
|
||||
} else {
|
||||
sort_array.push({
|
||||
field: val,
|
||||
dir: 'asc'
|
||||
dir: 'asc',
|
||||
});
|
||||
}
|
||||
});
|
||||
|
@@ -1,7 +1,6 @@
|
||||
const moment = require('moment');
|
||||
|
||||
module.exports = {
|
||||
|
||||
/**
|
||||
* Takes an expression such as 30d and returns a moment object of that date in future
|
||||
*
|
||||
@@ -21,12 +20,11 @@ module.exports = {
|
||||
* @returns {Object}
|
||||
*/
|
||||
parseDatePeriod: function (expression) {
|
||||
let matches = expression.match(/^([0-9]+)(y|Q|M|w|d|h|m|s|ms)$/m);
|
||||
const matches = expression.match(/^([0-9]+)(y|Q|M|w|d|h|m|s|ms)$/m);
|
||||
if (matches) {
|
||||
return moment().add(matches[1], matches[2]);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
},
|
||||
};
|
||||
|
@@ -1,5 +1,5 @@
|
||||
const migrate_name = 'identifier_for_migrate';
|
||||
const logger = require('../logger').migrate;
|
||||
const logger = require('../logger').migrate;
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -11,12 +11,11 @@ const logger = require('../logger').migrate;
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex, Promise) {
|
||||
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
// Create Table example:
|
||||
|
||||
/*return knex.schema.createTable('notification', (table) => {
|
||||
/* return knex.schema.createTable('notification', (table) => {
|
||||
table.increments().primary();
|
||||
table.string('name').notNull();
|
||||
table.string('type').notNull();
|
||||
@@ -25,7 +24,7 @@ exports.up = function (knex, Promise) {
|
||||
})
|
||||
.then(function () {
|
||||
logger.info('[' + migrate_name + '] Notification Table created');
|
||||
});*/
|
||||
}); */
|
||||
|
||||
logger.info('[' + migrate_name + '] Migrating Up Complete');
|
||||
|
||||
@@ -44,10 +43,10 @@ exports.down = function (knex, Promise) {
|
||||
|
||||
// Drop table example:
|
||||
|
||||
/*return knex.schema.dropTable('notification')
|
||||
/* return knex.schema.dropTable('notification')
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] Notification Table dropped');
|
||||
});*/
|
||||
}); */
|
||||
|
||||
logger.info('[' + migrate_name + '] Migrating Down Complete');
|
||||
|
||||
|
@@ -1,19 +1,17 @@
|
||||
const _ = require('lodash');
|
||||
const exec = require('child_process').exec;
|
||||
const spawn = require('child_process').spawn;
|
||||
const execFile = require('child_process').execFile;
|
||||
const _ = require('lodash');
|
||||
const exec = require('child_process').exec;
|
||||
const spawn = require('child_process').spawn;
|
||||
const execFile = require('child_process').execFile;
|
||||
const { Liquid } = require('liquidjs');
|
||||
const error = require('./error');
|
||||
//const logger = require('../logger').global;
|
||||
const error = require('./error');
|
||||
// const logger = require('../logger').global;
|
||||
|
||||
module.exports = {
|
||||
|
||||
|
||||
/**
|
||||
* @param {String} cmd
|
||||
*/
|
||||
exec: async function(cmd, options = {}) {
|
||||
//logger.debug('CMD:', cmd);
|
||||
exec: async function (cmd, options = {}) {
|
||||
// logger.debug('CMD:', cmd);
|
||||
|
||||
const { stdout, stderr } = await new Promise((resolve, reject) => {
|
||||
const child = exec(cmd, options, (isError, stdout, stderr) => {
|
||||
@@ -36,7 +34,7 @@ module.exports = {
|
||||
* @param {Array} args
|
||||
*/
|
||||
execFile: async function (cmd, args, options = {}) {
|
||||
//logger.debug('CMD: ' + cmd + ' ' + (args ? args.join(' ') : ''));
|
||||
// logger.debug('CMD: ' + cmd + ' ' + (args ? args.join(' ') : ''));
|
||||
|
||||
const { stdout, stderr } = await new Promise((resolve, reject) => {
|
||||
const child = execFile(cmd, args, options, (isError, stdout, stderr) => {
|
||||
@@ -60,9 +58,9 @@ module.exports = {
|
||||
execfg: function (cmd) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const childProcess = spawn(cmd, {
|
||||
shell: true,
|
||||
shell: true,
|
||||
detached: true,
|
||||
stdio: 'inherit'
|
||||
stdio: 'inherit',
|
||||
});
|
||||
|
||||
childProcess.on('error', (err) => {
|
||||
@@ -119,7 +117,7 @@ module.exports = {
|
||||
*/
|
||||
getRenderEngine: function () {
|
||||
const renderEngine = new Liquid({
|
||||
root: __dirname + '/../templates/'
|
||||
root: __dirname + '/../templates/',
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -136,5 +134,5 @@ module.exports = {
|
||||
});
|
||||
|
||||
return renderEngine;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
@@ -1,13 +1,13 @@
|
||||
const error = require('../error');
|
||||
const path = require('path');
|
||||
const error = require('../error');
|
||||
const path = require('path');
|
||||
const parser = require('@apidevtools/json-schema-ref-parser');
|
||||
|
||||
const ajv = require('ajv')({
|
||||
verbose: true,
|
||||
verbose: true,
|
||||
validateSchema: true,
|
||||
allErrors: false,
|
||||
format: 'full',
|
||||
coerceTypes: true
|
||||
allErrors: false,
|
||||
format: 'full',
|
||||
coerceTypes: true,
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -15,31 +15,29 @@ const ajv = require('ajv')({
|
||||
* @param {Object} payload
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function apiValidator (schema, payload/*, description*/) {
|
||||
return new Promise(function Promise_apiValidator (resolve, reject) {
|
||||
function apiValidator(schema, payload /*, description */) {
|
||||
return new Promise(function Promise_apiValidator(resolve, reject) {
|
||||
if (typeof payload === 'undefined') {
|
||||
reject(new error.ValidationError('Payload is undefined'));
|
||||
}
|
||||
|
||||
let validate = ajv.compile(schema);
|
||||
let valid = validate(payload);
|
||||
const validate = ajv.compile(schema);
|
||||
const valid = validate(payload);
|
||||
|
||||
if (valid && !validate.errors) {
|
||||
resolve(payload);
|
||||
} else {
|
||||
let message = ajv.errorsText(validate.errors);
|
||||
let err = new error.ValidationError(message);
|
||||
err.debug = [validate.errors, payload];
|
||||
const message = ajv.errorsText(validate.errors);
|
||||
const err = new error.ValidationError(message);
|
||||
err.debug = [validate.errors, payload];
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
apiValidator.loadSchemas = parser
|
||||
.dereference(path.resolve('schema/index.json'))
|
||||
.then((schema) => {
|
||||
ajv.addSchema(schema);
|
||||
return schema;
|
||||
});
|
||||
apiValidator.loadSchemas = parser.dereference(path.resolve('schema/index.json')).then((schema) => {
|
||||
ajv.addSchema(schema);
|
||||
return schema;
|
||||
});
|
||||
|
||||
module.exports = apiValidator;
|
||||
|
@@ -1,17 +1,15 @@
|
||||
const _ = require('lodash');
|
||||
const error = require('../error');
|
||||
const _ = require('lodash');
|
||||
const error = require('../error');
|
||||
const definitions = require('../../schema/definitions.json');
|
||||
|
||||
RegExp.prototype.toJSON = RegExp.prototype.toString;
|
||||
|
||||
const ajv = require('ajv')({
|
||||
verbose: true,
|
||||
allErrors: true,
|
||||
format: 'full', // strict regexes for format checks
|
||||
verbose: true,
|
||||
allErrors: true,
|
||||
format: 'full', // strict regexes for format checks
|
||||
coerceTypes: true,
|
||||
schemas: [
|
||||
definitions
|
||||
]
|
||||
schemas: [definitions],
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -20,30 +18,26 @@ const ajv = require('ajv')({
|
||||
* @param {Object} payload
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function validator (schema, payload) {
|
||||
function validator(schema, payload) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
if (!payload) {
|
||||
reject(new error.InternalValidationError('Payload is falsy'));
|
||||
} else {
|
||||
try {
|
||||
let validate = ajv.compile(schema);
|
||||
const validate = ajv.compile(schema);
|
||||
|
||||
let valid = validate(payload);
|
||||
const valid = validate(payload);
|
||||
if (valid && !validate.errors) {
|
||||
resolve(_.cloneDeep(payload));
|
||||
} else {
|
||||
let message = ajv.errorsText(validate.errors);
|
||||
const message = ajv.errorsText(validate.errors);
|
||||
reject(new error.InternalValidationError(message));
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
module.exports = validator;
|
||||
|
@@ -1,14 +1,14 @@
|
||||
const {Signale} = require('signale');
|
||||
const { Signale } = require('signale');
|
||||
|
||||
module.exports = {
|
||||
global: new Signale({scope: 'Global '}),
|
||||
migrate: new Signale({scope: 'Migrate '}),
|
||||
express: new Signale({scope: 'Express '}),
|
||||
access: new Signale({scope: 'Access '}),
|
||||
nginx: new Signale({scope: 'Nginx '}),
|
||||
ssl: new Signale({scope: 'SSL '}),
|
||||
certbot: new Signale({scope: 'Certbot '}),
|
||||
import: new Signale({scope: 'Importer '}),
|
||||
setup: new Signale({scope: 'Setup '}),
|
||||
ip_ranges: new Signale({scope: 'IP Ranges'})
|
||||
global: new Signale({ scope: 'Global ' }),
|
||||
migrate: new Signale({ scope: 'Migrate ' }),
|
||||
express: new Signale({ scope: 'Express ' }),
|
||||
access: new Signale({ scope: 'Access ' }),
|
||||
nginx: new Signale({ scope: 'Nginx ' }),
|
||||
ssl: new Signale({ scope: 'SSL ' }),
|
||||
certbot: new Signale({ scope: 'Certbot ' }),
|
||||
import: new Signale({ scope: 'Importer ' }),
|
||||
setup: new Signale({ scope: 'Setup ' }),
|
||||
ip_ranges: new Signale({ scope: 'IP Ranges' }),
|
||||
};
|
||||
|
@@ -1,15 +1,14 @@
|
||||
const db = require('./db');
|
||||
const db = require('./db');
|
||||
const logger = require('./logger').migrate;
|
||||
|
||||
module.exports = {
|
||||
latest: function () {
|
||||
return db.migrate.currentVersion()
|
||||
.then((version) => {
|
||||
logger.info('Current database version:', version);
|
||||
return db.migrate.latest({
|
||||
tableName: 'migrations',
|
||||
directory: 'migrations'
|
||||
});
|
||||
return db.migrate.currentVersion().then((version) => {
|
||||
logger.info('Current database version:', version);
|
||||
return db.migrate.latest({
|
||||
tableName: 'migrations',
|
||||
directory: 'migrations',
|
||||
});
|
||||
}
|
||||
});
|
||||
},
|
||||
};
|
||||
|
@@ -1,5 +1,5 @@
|
||||
const migrate_name = 'initial-schema';
|
||||
const logger = require('../logger').migrate;
|
||||
const logger = require('../logger').migrate;
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -10,19 +10,20 @@ const logger = require('../logger').migrate;
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
exports.up = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return knex.schema.createTable('auth', (table) => {
|
||||
table.increments().primary();
|
||||
table.dateTime('created_on').notNull();
|
||||
table.dateTime('modified_on').notNull();
|
||||
table.integer('user_id').notNull().unsigned();
|
||||
table.string('type', 30).notNull();
|
||||
table.string('secret').notNull();
|
||||
table.json('meta').notNull();
|
||||
table.integer('is_deleted').notNull().unsigned().defaultTo(0);
|
||||
})
|
||||
return knex.schema
|
||||
.createTable('auth', (table) => {
|
||||
table.increments().primary();
|
||||
table.dateTime('created_on').notNull();
|
||||
table.dateTime('modified_on').notNull();
|
||||
table.integer('user_id').notNull().unsigned();
|
||||
table.string('type', 30).notNull();
|
||||
table.string('secret').notNull();
|
||||
table.json('meta').notNull();
|
||||
table.integer('is_deleted').notNull().unsigned().defaultTo(0);
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] auth Table created');
|
||||
|
||||
@@ -189,7 +190,6 @@ exports.up = function (knex/*, Promise*/) {
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] audit_log Table created');
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -200,6 +200,6 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down the initial data.');
|
||||
logger.warn('[' + migrate_name + "] You can't migrate down the initial data.");
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
@@ -1,5 +1,5 @@
|
||||
const migrate_name = 'websockets';
|
||||
const logger = require('../logger').migrate;
|
||||
const logger = require('../logger').migrate;
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -10,16 +10,16 @@ const logger = require('../logger').migrate;
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
exports.up = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return knex.schema.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.integer('allow_websocket_upgrade').notNull().unsigned().defaultTo(0);
|
||||
})
|
||||
return knex.schema
|
||||
.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.integer('allow_websocket_upgrade').notNull().unsigned().defaultTo(0);
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] proxy_host Table altered');
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -30,6 +30,6 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
|
||||
logger.warn('[' + migrate_name + "] You can't migrate down this one.");
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
};
|
||||
|
@@ -1,5 +1,5 @@
|
||||
const migrate_name = 'forward_host';
|
||||
const logger = require('../logger').migrate;
|
||||
const logger = require('../logger').migrate;
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -10,12 +10,13 @@ const logger = require('../logger').migrate;
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
exports.up = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return knex.schema.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.renameColumn('forward_ip', 'forward_host');
|
||||
})
|
||||
return knex.schema
|
||||
.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.renameColumn('forward_ip', 'forward_host');
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] proxy_host Table altered');
|
||||
});
|
||||
@@ -29,6 +30,6 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
|
||||
logger.warn('[' + migrate_name + "] You can't migrate down this one.");
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
};
|
||||
|
@@ -1,5 +1,5 @@
|
||||
const migrate_name = 'http2_support';
|
||||
const logger = require('../logger').migrate;
|
||||
const logger = require('../logger').migrate;
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -10,12 +10,13 @@ const logger = require('../logger').migrate;
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
exports.up = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return knex.schema.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.integer('http2_support').notNull().unsigned().defaultTo(0);
|
||||
})
|
||||
return knex.schema
|
||||
.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.integer('http2_support').notNull().unsigned().defaultTo(0);
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] proxy_host Table altered');
|
||||
|
||||
@@ -43,7 +44,6 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
|
||||
logger.warn('[' + migrate_name + "] You can't migrate down this one.");
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
const migrate_name = 'forward_scheme';
|
||||
const logger = require('../logger').migrate;
|
||||
const logger = require('../logger').migrate;
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -10,12 +10,13 @@ const logger = require('../logger').migrate;
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
exports.up = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return knex.schema.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.string('forward_scheme').notNull().defaultTo('http');
|
||||
})
|
||||
return knex.schema
|
||||
.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.string('forward_scheme').notNull().defaultTo('http');
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] proxy_host Table altered');
|
||||
});
|
||||
@@ -29,6 +30,6 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
|
||||
logger.warn('[' + migrate_name + "] You can't migrate down this one.");
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
@@ -1,5 +1,5 @@
|
||||
const migrate_name = 'disabled';
|
||||
const logger = require('../logger').migrate;
|
||||
const logger = require('../logger').migrate;
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -10,12 +10,13 @@ const logger = require('../logger').migrate;
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
exports.up = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return knex.schema.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.integer('enabled').notNull().unsigned().defaultTo(1);
|
||||
})
|
||||
return knex.schema
|
||||
.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.integer('enabled').notNull().unsigned().defaultTo(1);
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] proxy_host Table altered');
|
||||
|
||||
@@ -50,6 +51,6 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
|
||||
logger.warn('[' + migrate_name + "] You can't migrate down this one.");
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
@@ -1,5 +1,5 @@
|
||||
const migrate_name = 'custom_locations';
|
||||
const logger = require('../logger').migrate;
|
||||
const logger = require('../logger').migrate;
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -11,12 +11,13 @@ const logger = require('../logger').migrate;
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
exports.up = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return knex.schema.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.json('locations');
|
||||
})
|
||||
return knex.schema
|
||||
.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.json('locations');
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] proxy_host Table altered');
|
||||
});
|
||||
@@ -30,6 +31,6 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
|
||||
logger.warn('[' + migrate_name + "] You can't migrate down this one.");
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
@@ -1,5 +1,5 @@
|
||||
const migrate_name = 'hsts';
|
||||
const logger = require('../logger').migrate;
|
||||
const logger = require('../logger').migrate;
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -10,13 +10,14 @@ const logger = require('../logger').migrate;
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
exports.up = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return knex.schema.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
|
||||
proxy_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
|
||||
})
|
||||
return knex.schema
|
||||
.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
|
||||
proxy_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] proxy_host Table altered');
|
||||
|
||||
@@ -46,6 +47,6 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
|
||||
logger.warn('[' + migrate_name + "] You can't migrate down this one.");
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
@@ -1,5 +1,5 @@
|
||||
const migrate_name = 'settings';
|
||||
const logger = require('../logger').migrate;
|
||||
const logger = require('../logger').migrate;
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -10,16 +10,17 @@ const logger = require('../logger').migrate;
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
exports.up = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return knex.schema.createTable('setting', (table) => {
|
||||
table.string('id').notNull().primary();
|
||||
table.string('name', 100).notNull();
|
||||
table.string('description', 255).notNull();
|
||||
table.string('value', 255).notNull();
|
||||
table.json('meta').notNull();
|
||||
})
|
||||
return knex.schema
|
||||
.createTable('setting', (table) => {
|
||||
table.string('id').notNull().primary();
|
||||
table.string('name', 100).notNull();
|
||||
table.string('description', 255).notNull();
|
||||
table.string('value', 255).notNull();
|
||||
table.json('meta').notNull();
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] setting Table created');
|
||||
});
|
||||
@@ -33,6 +34,6 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down the initial data.');
|
||||
logger.warn('[' + migrate_name + "] You can't migrate down the initial data.");
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
@@ -1,5 +1,5 @@
|
||||
const migrate_name = 'access_list_client';
|
||||
const logger = require('../logger').migrate;
|
||||
const logger = require('../logger').migrate;
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -10,20 +10,19 @@ const logger = require('../logger').migrate;
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
|
||||
exports.up = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return knex.schema.createTable('access_list_client', (table) => {
|
||||
table.increments().primary();
|
||||
table.dateTime('created_on').notNull();
|
||||
table.dateTime('modified_on').notNull();
|
||||
table.integer('access_list_id').notNull().unsigned();
|
||||
table.string('address').notNull();
|
||||
table.string('directive').notNull();
|
||||
table.json('meta').notNull();
|
||||
|
||||
})
|
||||
return knex.schema
|
||||
.createTable('access_list_client', (table) => {
|
||||
table.increments().primary();
|
||||
table.dateTime('created_on').notNull();
|
||||
table.dateTime('modified_on').notNull();
|
||||
table.integer('access_list_id').notNull().unsigned();
|
||||
table.string('address').notNull();
|
||||
table.string('directive').notNull();
|
||||
table.json('meta').notNull();
|
||||
})
|
||||
.then(function () {
|
||||
logger.info('[' + migrate_name + '] access_list_client Table created');
|
||||
|
||||
@@ -43,11 +42,10 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex/*, Promise*/) {
|
||||
exports.down = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Down...');
|
||||
|
||||
return knex.schema.dropTable('access_list_client')
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] access_list_client Table dropped');
|
||||
});
|
||||
return knex.schema.dropTable('access_list_client').then(() => {
|
||||
logger.info('[' + migrate_name + '] access_list_client Table dropped');
|
||||
});
|
||||
};
|
||||
|
@@ -1,5 +1,5 @@
|
||||
const migrate_name = 'access_list_client_fix';
|
||||
const logger = require('../logger').migrate;
|
||||
const logger = require('../logger').migrate;
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -10,12 +10,13 @@ const logger = require('../logger').migrate;
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
exports.up = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return knex.schema.table('access_list', function (access_list) {
|
||||
access_list.renameColumn('satify_any', 'satisfy_any');
|
||||
})
|
||||
return knex.schema
|
||||
.table('access_list', function (access_list) {
|
||||
access_list.renameColumn('satify_any', 'satisfy_any');
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] access_list Table altered');
|
||||
});
|
||||
@@ -29,6 +30,6 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
|
||||
logger.warn('[' + migrate_name + "] You can't migrate down this one.");
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
@@ -1,5 +1,5 @@
|
||||
const migrate_name = 'pass_auth';
|
||||
const logger = require('../logger').migrate;
|
||||
const logger = require('../logger').migrate;
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -10,13 +10,13 @@ const logger = require('../logger').migrate;
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
|
||||
exports.up = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return knex.schema.table('access_list', function (access_list) {
|
||||
access_list.integer('pass_auth').notNull().defaultTo(1);
|
||||
})
|
||||
return knex.schema
|
||||
.table('access_list', function (access_list) {
|
||||
access_list.integer('pass_auth').notNull().defaultTo(1);
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] access_list Table altered');
|
||||
});
|
||||
@@ -29,12 +29,13 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex/*, Promise*/) {
|
||||
exports.down = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Down...');
|
||||
|
||||
return knex.schema.table('access_list', function (access_list) {
|
||||
access_list.dropColumn('pass_auth');
|
||||
})
|
||||
return knex.schema
|
||||
.table('access_list', function (access_list) {
|
||||
access_list.dropColumn('pass_auth');
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] access_list pass_auth Column dropped');
|
||||
});
|
||||
|
@@ -1,5 +1,5 @@
|
||||
const migrate_name = 'redirection_scheme';
|
||||
const logger = require('../logger').migrate;
|
||||
const logger = require('../logger').migrate;
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -10,13 +10,13 @@ const logger = require('../logger').migrate;
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
|
||||
exports.up = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return knex.schema.table('redirection_host', (table) => {
|
||||
table.string('forward_scheme').notNull().defaultTo('$scheme');
|
||||
})
|
||||
return knex.schema
|
||||
.table('redirection_host', (table) => {
|
||||
table.string('forward_scheme').notNull().defaultTo('$scheme');
|
||||
})
|
||||
.then(function () {
|
||||
logger.info('[' + migrate_name + '] redirection_host Table altered');
|
||||
});
|
||||
@@ -29,12 +29,13 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex/*, Promise*/) {
|
||||
exports.down = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Down...');
|
||||
|
||||
return knex.schema.table('redirection_host', (table) => {
|
||||
table.dropColumn('forward_scheme');
|
||||
})
|
||||
return knex.schema
|
||||
.table('redirection_host', (table) => {
|
||||
table.dropColumn('forward_scheme');
|
||||
})
|
||||
.then(function () {
|
||||
logger.info('[' + migrate_name + '] redirection_host Table altered');
|
||||
});
|
||||
|
@@ -1,5 +1,5 @@
|
||||
const migrate_name = 'redirection_status_code';
|
||||
const logger = require('../logger').migrate;
|
||||
const logger = require('../logger').migrate;
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -10,13 +10,13 @@ const logger = require('../logger').migrate;
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
|
||||
exports.up = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return knex.schema.table('redirection_host', (table) => {
|
||||
table.integer('forward_http_code').notNull().unsigned().defaultTo(302);
|
||||
})
|
||||
return knex.schema
|
||||
.table('redirection_host', (table) => {
|
||||
table.integer('forward_http_code').notNull().unsigned().defaultTo(302);
|
||||
})
|
||||
.then(function () {
|
||||
logger.info('[' + migrate_name + '] redirection_host Table altered');
|
||||
});
|
||||
@@ -29,12 +29,13 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex/*, Promise*/) {
|
||||
exports.down = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Down...');
|
||||
|
||||
return knex.schema.table('redirection_host', (table) => {
|
||||
table.dropColumn('forward_http_code');
|
||||
})
|
||||
return knex.schema
|
||||
.table('redirection_host', (table) => {
|
||||
table.dropColumn('forward_http_code');
|
||||
})
|
||||
.then(function () {
|
||||
logger.info('[' + migrate_name + '] redirection_host Table altered');
|
||||
});
|
||||
|
@@ -1,39 +1,41 @@
|
||||
const migrate_name = 'stream_domain';
|
||||
const logger = require('../logger').migrate;
|
||||
const logger = require('../logger').migrate;
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
*
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
* Migrate
|
||||
*
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return knex.schema.table('stream', (table) => {
|
||||
table.renameColumn('forward_ip', 'forwarding_host');
|
||||
})
|
||||
return knex.schema
|
||||
.table('stream', (table) => {
|
||||
table.renameColumn('forward_ip', 'forwarding_host');
|
||||
})
|
||||
.then(function () {
|
||||
logger.info('[' + migrate_name + '] stream Table altered');
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex/*, Promise*/) {
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex /*, Promise */) {
|
||||
logger.info('[' + migrate_name + '] Migrating Down...');
|
||||
|
||||
return knex.schema.table('stream', (table) => {
|
||||
table.renameColumn('forwarding_host', 'forward_ip');
|
||||
})
|
||||
return knex.schema
|
||||
.table('stream', (table) => {
|
||||
table.renameColumn('forwarding_host', 'forward_ip');
|
||||
})
|
||||
.then(function () {
|
||||
logger.info('[' + migrate_name + '] stream Table altered');
|
||||
});
|
||||
|
@@ -1,5 +1,5 @@
|
||||
const migrate_name = 'stream_domain';
|
||||
const logger = require('../logger').migrate;
|
||||
const migrate_name = 'stream_domain';
|
||||
const logger = require('../logger').migrate;
|
||||
const internalNginx = require('../internal/nginx');
|
||||
|
||||
async function regenerateDefaultHost(knex) {
|
||||
@@ -9,7 +9,8 @@ async function regenerateDefaultHost(knex) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
return internalNginx.deleteConfig('default')
|
||||
return internalNginx
|
||||
.deleteConfig('default')
|
||||
.then(() => {
|
||||
return internalNginx.generateConfig('default', row);
|
||||
})
|
||||
@@ -22,14 +23,14 @@ async function regenerateDefaultHost(knex) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
*
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
* Migrate
|
||||
*
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
@@ -37,14 +38,14 @@ exports.up = function (knex) {
|
||||
};
|
||||
|
||||
/**
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex) {
|
||||
logger.info('[' + migrate_name + '] Migrating Down...');
|
||||
|
||||
return regenerateDefaultHost(knex);
|
||||
};
|
||||
};
|
||||
|
@@ -1,18 +1,18 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const User = require('./user');
|
||||
const AccessListAuth = require('./access_list_auth');
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const User = require('./user');
|
||||
const AccessListAuth = require('./access_list_auth');
|
||||
const AccessListClient = require('./access_list_client');
|
||||
const now = require('./now_helper');
|
||||
const now = require('./now_helper');
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
class AccessList extends Model {
|
||||
$beforeInsert () {
|
||||
this.created_on = now();
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for meta
|
||||
@@ -21,64 +21,64 @@ class AccessList extends Model {
|
||||
}
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
}
|
||||
|
||||
static get name () {
|
||||
static get name() {
|
||||
return 'AccessList';
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
static get tableName() {
|
||||
return 'access_list';
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
static get jsonAttributes() {
|
||||
return ['meta'];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
const ProxyHost = require('./proxy_host');
|
||||
|
||||
return {
|
||||
owner: {
|
||||
relation: Model.HasOneRelation,
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: User,
|
||||
join: {
|
||||
join: {
|
||||
from: 'access_list.owner_user_id',
|
||||
to: 'user.id'
|
||||
to: 'user.id',
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('user.is_deleted', 0);
|
||||
}
|
||||
},
|
||||
},
|
||||
items: {
|
||||
relation: Model.HasManyRelation,
|
||||
relation: Model.HasManyRelation,
|
||||
modelClass: AccessListAuth,
|
||||
join: {
|
||||
join: {
|
||||
from: 'access_list.id',
|
||||
to: 'access_list_auth.access_list_id'
|
||||
}
|
||||
to: 'access_list_auth.access_list_id',
|
||||
},
|
||||
},
|
||||
clients: {
|
||||
relation: Model.HasManyRelation,
|
||||
relation: Model.HasManyRelation,
|
||||
modelClass: AccessListClient,
|
||||
join: {
|
||||
join: {
|
||||
from: 'access_list.id',
|
||||
to: 'access_list_client.access_list_id'
|
||||
}
|
||||
to: 'access_list_client.access_list_id',
|
||||
},
|
||||
},
|
||||
proxy_hosts: {
|
||||
relation: Model.HasManyRelation,
|
||||
relation: Model.HasManyRelation,
|
||||
modelClass: ProxyHost,
|
||||
join: {
|
||||
join: {
|
||||
from: 'access_list.id',
|
||||
to: 'proxy_host.access_list_id'
|
||||
to: 'proxy_host.access_list_id',
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('proxy_host.is_deleted', 0);
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@@ -1,15 +1,15 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const now = require('./now_helper');
|
||||
const now = require('./now_helper');
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
class AccessListAuth extends Model {
|
||||
$beforeInsert () {
|
||||
this.created_on = now();
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for meta
|
||||
@@ -18,35 +18,35 @@ class AccessListAuth extends Model {
|
||||
}
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
}
|
||||
|
||||
static get name () {
|
||||
static get name() {
|
||||
return 'AccessListAuth';
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
static get tableName() {
|
||||
return 'access_list_auth';
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
static get jsonAttributes() {
|
||||
return ['meta'];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
return {
|
||||
access_list: {
|
||||
relation: Model.HasOneRelation,
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: require('./access_list'),
|
||||
join: {
|
||||
join: {
|
||||
from: 'access_list_auth.access_list_id',
|
||||
to: 'access_list.id'
|
||||
to: 'access_list.id',
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('access_list.is_deleted', 0);
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@@ -1,15 +1,15 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const now = require('./now_helper');
|
||||
const now = require('./now_helper');
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
class AccessListClient extends Model {
|
||||
$beforeInsert () {
|
||||
this.created_on = now();
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for meta
|
||||
@@ -18,35 +18,35 @@ class AccessListClient extends Model {
|
||||
}
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
}
|
||||
|
||||
static get name () {
|
||||
static get name() {
|
||||
return 'AccessListClient';
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
static get tableName() {
|
||||
return 'access_list_client';
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
static get jsonAttributes() {
|
||||
return ['meta'];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
return {
|
||||
access_list: {
|
||||
relation: Model.HasOneRelation,
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: require('./access_list'),
|
||||
join: {
|
||||
join: {
|
||||
from: 'access_list_client.access_list_id',
|
||||
to: 'access_list.id'
|
||||
to: 'access_list.id',
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('access_list.is_deleted', 0);
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@@ -1,16 +1,16 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const User = require('./user');
|
||||
const now = require('./now_helper');
|
||||
const User = require('./user');
|
||||
const now = require('./now_helper');
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
class AuditLog extends Model {
|
||||
$beforeInsert () {
|
||||
this.created_on = now();
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for meta
|
||||
@@ -19,32 +19,32 @@ class AuditLog extends Model {
|
||||
}
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
}
|
||||
|
||||
static get name () {
|
||||
static get name() {
|
||||
return 'AuditLog';
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
static get tableName() {
|
||||
return 'audit_log';
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
static get jsonAttributes() {
|
||||
return ['meta'];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
return {
|
||||
user: {
|
||||
relation: Model.HasOneRelation,
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: User,
|
||||
join: {
|
||||
join: {
|
||||
from: 'audit_log.user_id',
|
||||
to: 'user.id'
|
||||
}
|
||||
}
|
||||
to: 'user.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@@ -2,30 +2,29 @@
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const bcrypt = require('bcrypt');
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const User = require('./user');
|
||||
const now = require('./now_helper');
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const User = require('./user');
|
||||
const now = require('./now_helper');
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
function encryptPassword () {
|
||||
function encryptPassword() {
|
||||
/* jshint -W040 */
|
||||
let _this = this;
|
||||
const _this = this;
|
||||
|
||||
if (_this.type === 'password' && _this.secret) {
|
||||
return bcrypt.hash(_this.secret, 13)
|
||||
.then(function (hash) {
|
||||
_this.secret = hash;
|
||||
});
|
||||
return bcrypt.hash(_this.secret, 13).then(function (hash) {
|
||||
_this.secret = hash;
|
||||
});
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
class Auth extends Model {
|
||||
$beforeInsert (queryContext) {
|
||||
this.created_on = now();
|
||||
$beforeInsert(queryContext) {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for meta
|
||||
@@ -36,7 +35,7 @@ class Auth extends Model {
|
||||
return encryptPassword.apply(this, queryContext);
|
||||
}
|
||||
|
||||
$beforeUpdate (queryContext) {
|
||||
$beforeUpdate(queryContext) {
|
||||
this.modified_on = now();
|
||||
return encryptPassword.apply(this, queryContext);
|
||||
}
|
||||
@@ -47,35 +46,35 @@ class Auth extends Model {
|
||||
* @param {String} password
|
||||
* @returns {Promise}
|
||||
*/
|
||||
verifyPassword (password) {
|
||||
verifyPassword(password) {
|
||||
return bcrypt.compare(password, this.secret);
|
||||
}
|
||||
|
||||
static get name () {
|
||||
static get name() {
|
||||
return 'Auth';
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
static get tableName() {
|
||||
return 'auth';
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
static get jsonAttributes() {
|
||||
return ['meta'];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
return {
|
||||
user: {
|
||||
relation: Model.HasOneRelation,
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: User,
|
||||
join: {
|
||||
join: {
|
||||
from: 'auth.user_id',
|
||||
to: 'user.id'
|
||||
to: 'user.id',
|
||||
},
|
||||
filter: {
|
||||
is_deleted: 0
|
||||
}
|
||||
}
|
||||
is_deleted: 0,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@@ -1,16 +1,16 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const User = require('./user');
|
||||
const now = require('./now_helper');
|
||||
const User = require('./user');
|
||||
const now = require('./now_helper');
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
class Certificate extends Model {
|
||||
$beforeInsert () {
|
||||
this.created_on = now();
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for expires_on
|
||||
@@ -29,35 +29,35 @@ class Certificate extends Model {
|
||||
}
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
}
|
||||
|
||||
static get name () {
|
||||
static get name() {
|
||||
return 'Certificate';
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
static get tableName() {
|
||||
return 'certificate';
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
static get jsonAttributes() {
|
||||
return ['domain_names', 'meta'];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
return {
|
||||
owner: {
|
||||
relation: Model.HasOneRelation,
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: User,
|
||||
join: {
|
||||
join: {
|
||||
from: 'certificate.owner_user_id',
|
||||
to: 'user.id'
|
||||
to: 'user.id',
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('user.is_deleted', 0);
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@@ -1,17 +1,17 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const User = require('./user');
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const User = require('./user');
|
||||
const Certificate = require('./certificate');
|
||||
const now = require('./now_helper');
|
||||
const now = require('./now_helper');
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
class DeadHost extends Model {
|
||||
$beforeInsert () {
|
||||
this.created_on = now();
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for domain_names
|
||||
@@ -25,46 +25,46 @@ class DeadHost extends Model {
|
||||
}
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
}
|
||||
|
||||
static get name () {
|
||||
static get name() {
|
||||
return 'DeadHost';
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
static get tableName() {
|
||||
return 'dead_host';
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
static get jsonAttributes() {
|
||||
return ['domain_names', 'meta'];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
return {
|
||||
owner: {
|
||||
relation: Model.HasOneRelation,
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: User,
|
||||
join: {
|
||||
join: {
|
||||
from: 'dead_host.owner_user_id',
|
||||
to: 'user.id'
|
||||
to: 'user.id',
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('user.is_deleted', 0);
|
||||
}
|
||||
},
|
||||
},
|
||||
certificate: {
|
||||
relation: Model.HasOneRelation,
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: Certificate,
|
||||
join: {
|
||||
join: {
|
||||
from: 'dead_host.certificate_id',
|
||||
to: 'certificate.id'
|
||||
to: 'certificate.id',
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('certificate.is_deleted', 0);
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@@ -1,12 +1,11 @@
|
||||
const db = require('../db');
|
||||
const db = require('../db');
|
||||
const config = require('../lib/config');
|
||||
const Model = require('objection').Model;
|
||||
const Model = require('objection').Model;
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
module.exports = function () {
|
||||
if (config.isSqlite()) {
|
||||
// eslint-disable-next-line
|
||||
return Model.raw("datetime('now','localtime')");
|
||||
}
|
||||
return Model.raw('NOW()');
|
||||
|
@@ -1,18 +1,18 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const User = require('./user');
|
||||
const AccessList = require('./access_list');
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const User = require('./user');
|
||||
const AccessList = require('./access_list');
|
||||
const Certificate = require('./certificate');
|
||||
const now = require('./now_helper');
|
||||
const now = require('./now_helper');
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
class ProxyHost extends Model {
|
||||
$beforeInsert () {
|
||||
this.created_on = now();
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for domain_names
|
||||
@@ -26,57 +26,57 @@ class ProxyHost extends Model {
|
||||
}
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
}
|
||||
|
||||
static get name () {
|
||||
static get name() {
|
||||
return 'ProxyHost';
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
static get tableName() {
|
||||
return 'proxy_host';
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
static get jsonAttributes() {
|
||||
return ['domain_names', 'meta', 'locations'];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
return {
|
||||
owner: {
|
||||
relation: Model.HasOneRelation,
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: User,
|
||||
join: {
|
||||
join: {
|
||||
from: 'proxy_host.owner_user_id',
|
||||
to: 'user.id'
|
||||
to: 'user.id',
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('user.is_deleted', 0);
|
||||
}
|
||||
},
|
||||
},
|
||||
access_list: {
|
||||
relation: Model.HasOneRelation,
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: AccessList,
|
||||
join: {
|
||||
join: {
|
||||
from: 'proxy_host.access_list_id',
|
||||
to: 'access_list.id'
|
||||
to: 'access_list.id',
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('access_list.is_deleted', 0);
|
||||
}
|
||||
},
|
||||
},
|
||||
certificate: {
|
||||
relation: Model.HasOneRelation,
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: Certificate,
|
||||
join: {
|
||||
join: {
|
||||
from: 'proxy_host.certificate_id',
|
||||
to: 'certificate.id'
|
||||
to: 'certificate.id',
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('certificate.is_deleted', 0);
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@@ -1,18 +1,17 @@
|
||||
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const User = require('./user');
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const User = require('./user');
|
||||
const Certificate = require('./certificate');
|
||||
const now = require('./now_helper');
|
||||
const now = require('./now_helper');
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
class RedirectionHost extends Model {
|
||||
$beforeInsert () {
|
||||
this.created_on = now();
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for domain_names
|
||||
@@ -28,46 +27,46 @@ class RedirectionHost extends Model {
|
||||
this.domain_names.sort();
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
}
|
||||
|
||||
static get name () {
|
||||
static get name() {
|
||||
return 'RedirectionHost';
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
static get tableName() {
|
||||
return 'redirection_host';
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
static get jsonAttributes() {
|
||||
return ['domain_names', 'meta'];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
return {
|
||||
owner: {
|
||||
relation: Model.HasOneRelation,
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: User,
|
||||
join: {
|
||||
join: {
|
||||
from: 'redirection_host.owner_user_id',
|
||||
to: 'user.id'
|
||||
to: 'user.id',
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('user.is_deleted', 0);
|
||||
}
|
||||
},
|
||||
},
|
||||
certificate: {
|
||||
relation: Model.HasOneRelation,
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: Certificate,
|
||||
join: {
|
||||
join: {
|
||||
from: 'redirection_host.certificate_id',
|
||||
to: 'certificate.id'
|
||||
to: 'certificate.id',
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('certificate.is_deleted', 0);
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@@ -1,28 +1,28 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
class Setting extends Model {
|
||||
$beforeInsert () {
|
||||
$beforeInsert() {
|
||||
// Default for meta
|
||||
if (typeof this.meta === 'undefined') {
|
||||
this.meta = {};
|
||||
}
|
||||
}
|
||||
|
||||
static get name () {
|
||||
static get name() {
|
||||
return 'Setting';
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
static get tableName() {
|
||||
return 'setting';
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
static get jsonAttributes() {
|
||||
return ['meta'];
|
||||
}
|
||||
}
|
||||
|
@@ -1,16 +1,16 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const User = require('./user');
|
||||
const now = require('./now_helper');
|
||||
const User = require('./user');
|
||||
const now = require('./now_helper');
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
class Stream extends Model {
|
||||
$beforeInsert () {
|
||||
this.created_on = now();
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for meta
|
||||
@@ -19,35 +19,35 @@ class Stream extends Model {
|
||||
}
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
}
|
||||
|
||||
static get name () {
|
||||
static get name() {
|
||||
return 'Stream';
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
static get tableName() {
|
||||
return 'stream';
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
static get jsonAttributes() {
|
||||
return ['meta'];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
return {
|
||||
owner: {
|
||||
relation: Model.HasOneRelation,
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: User,
|
||||
join: {
|
||||
join: {
|
||||
from: 'stream.owner_user_id',
|
||||
to: 'user.id'
|
||||
to: 'user.id',
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('user.is_deleted', 0);
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@@ -3,16 +3,15 @@
|
||||
and then has abilities after that.
|
||||
*/
|
||||
|
||||
const _ = require('lodash');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const _ = require('lodash');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const crypto = require('crypto');
|
||||
const config = require('../lib/config');
|
||||
const error = require('../lib/error');
|
||||
const error = require('../lib/error');
|
||||
const logger = require('../logger').global;
|
||||
const ALGO = 'RS256';
|
||||
const ALGO = 'RS256';
|
||||
|
||||
module.exports = function () {
|
||||
|
||||
let token_data = {};
|
||||
|
||||
const self = {
|
||||
@@ -27,12 +26,10 @@ module.exports = function () {
|
||||
// sign with RSA SHA256
|
||||
const options = {
|
||||
algorithm: ALGO,
|
||||
expiresIn: payload.expiresIn || '1d'
|
||||
expiresIn: payload.expiresIn || '1d',
|
||||
};
|
||||
|
||||
payload.jti = crypto.randomBytes(12)
|
||||
.toString('base64')
|
||||
.substring(-8);
|
||||
payload.jti = crypto.randomBytes(12).toString('base64').substring(-8);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
jwt.sign(payload, config.getPrivateKey(), options, (err, token) => {
|
||||
@@ -41,8 +38,8 @@ module.exports = function () {
|
||||
} else {
|
||||
token_data = payload;
|
||||
resolve({
|
||||
token: token,
|
||||
payload: payload
|
||||
token,
|
||||
payload,
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -62,15 +59,13 @@ module.exports = function () {
|
||||
if (!token || token === null || token === 'null') {
|
||||
reject(new error.AuthError('Empty token'));
|
||||
} else {
|
||||
jwt.verify(token, config.getPublicKey(), {ignoreExpiration: false, algorithms: [ALGO]}, (err, result) => {
|
||||
jwt.verify(token, config.getPublicKey(), { ignoreExpiration: false, algorithms: [ALGO] }, (err, result) => {
|
||||
if (err) {
|
||||
|
||||
if (err.name === 'TokenExpiredError') {
|
||||
reject(new error.AuthError('Token has expired', err));
|
||||
} else {
|
||||
reject(err);
|
||||
}
|
||||
|
||||
} else {
|
||||
token_data = result;
|
||||
resolve(token_data);
|
||||
@@ -81,7 +76,6 @@ module.exports = function () {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -125,7 +119,7 @@ module.exports = function () {
|
||||
}
|
||||
|
||||
return default_value || 0;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
return self;
|
||||
|
@@ -1,16 +1,16 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const UserPermission = require('./user_permission');
|
||||
const now = require('./now_helper');
|
||||
const now = require('./now_helper');
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
class User extends Model {
|
||||
$beforeInsert () {
|
||||
this.created_on = now();
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for roles
|
||||
@@ -19,35 +19,34 @@ class User extends Model {
|
||||
}
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
}
|
||||
|
||||
static get name () {
|
||||
static get name() {
|
||||
return 'User';
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
static get tableName() {
|
||||
return 'user';
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
static get jsonAttributes() {
|
||||
return ['roles'];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
return {
|
||||
permissions: {
|
||||
relation: Model.HasOneRelation,
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: UserPermission,
|
||||
join: {
|
||||
join: {
|
||||
from: 'user.id',
|
||||
to: 'user_permission.user_id'
|
||||
}
|
||||
}
|
||||
to: 'user_permission.user_id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = User;
|
||||
|
@@ -1,27 +1,27 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const now = require('./now_helper');
|
||||
const now = require('./now_helper');
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
class UserPermission extends Model {
|
||||
$beforeInsert () {
|
||||
this.created_on = now();
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
}
|
||||
|
||||
static get name () {
|
||||
static get name() {
|
||||
return 'UserPermission';
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
static get tableName() {
|
||||
return 'user_permission';
|
||||
}
|
||||
}
|
||||
|
@@ -4,19 +4,19 @@
|
||||
"description": "A beautiful interface for creating Nginx endpoints",
|
||||
"main": "index.js",
|
||||
"dependencies": {
|
||||
"@apidevtools/json-schema-ref-parser": "11.5.4",
|
||||
"@apidevtools/json-schema-ref-parser": "11.5.5",
|
||||
"ajv": "6.12.6",
|
||||
"archiver": "7.0.1",
|
||||
"batchflow": "0.4.0",
|
||||
"bcrypt": "5.1.1",
|
||||
"body-parser": "1.20.2",
|
||||
"compression": "1.7.4",
|
||||
"express": "4.19.1",
|
||||
"express": "4.19.2",
|
||||
"express-fileupload": "1.5.0",
|
||||
"gravatar": "1.8.2",
|
||||
"jsonwebtoken": "9.0.2",
|
||||
"knex": "3.1.0",
|
||||
"liquidjs": "10.10.2",
|
||||
"liquidjs": "10.11.0",
|
||||
"lodash": "4.17.21",
|
||||
"moment": "2.30.1",
|
||||
"mysql": "2.18.1",
|
||||
@@ -29,7 +29,11 @@
|
||||
"author": "Jamie Curnow <jc@jc21.com> and ZoeyVid <zoeyvid@zvcdn.de>",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"eslint": "8.57.0",
|
||||
"eslint-plugin-align-assignments": "1.1.2"
|
||||
"@eslint/js": "9.0.0",
|
||||
"eslint": "9.0.0",
|
||||
"eslint-config-prettier": "9.1.0",
|
||||
"eslint-plugin-prettier": "5.1.3",
|
||||
"globals": "15.0.0",
|
||||
"prettier": "3.2.5"
|
||||
}
|
||||
}
|
||||
|
@@ -2,8 +2,8 @@
|
||||
|
||||
// based on: https://github.com/jlesage/docker-nginx-proxy-manager/blob/796734a3f9a87e0b1561b47fd418f82216359634/rootfs/opt/nginx-proxy-manager/bin/reset-password
|
||||
|
||||
const fs = require('fs');
|
||||
const bcrypt = require('bcrypt');
|
||||
const fs = require('fs');
|
||||
const bcrypt = require('bcrypt');
|
||||
const sqlite3 = require('sqlite3');
|
||||
|
||||
function usage() {
|
||||
@@ -53,7 +53,7 @@ if (fs.existsSync(process.env.DB_SQLITE_FILE)) {
|
||||
|
||||
console.log(`Password for user ${USER_EMAIL} has been reset.`);
|
||||
process.exit(0);
|
||||
}
|
||||
},
|
||||
);
|
||||
});
|
||||
}
|
||||
|
@@ -1,12 +1,12 @@
|
||||
const express = require('express');
|
||||
const validator = require('../../lib/validator');
|
||||
const jwtdecode = require('../../lib/express/jwt-decode');
|
||||
const express = require('express');
|
||||
const validator = require('../../lib/validator');
|
||||
const jwtdecode = require('../../lib/express/jwt-decode');
|
||||
const internalAuditLog = require('../../internal/audit-log');
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
strict: true,
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -25,26 +25,28 @@ router
|
||||
* Retrieve all logs
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand'
|
||||
validator(
|
||||
{
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand',
|
||||
},
|
||||
query: {
|
||||
$ref: 'definitions#/definitions/query',
|
||||
},
|
||||
},
|
||||
query: {
|
||||
$ref: 'definitions#/definitions/query'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
|
||||
query: (typeof req.query.query === 'string' ? req.query.query : null)
|
||||
})
|
||||
},
|
||||
{
|
||||
expand: typeof req.query.expand === 'string' ? req.query.expand.split(',') : null,
|
||||
query: typeof req.query.query === 'string' ? req.query.query : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalAuditLog.getAll(res.locals.access, data.expand, data.query);
|
||||
})
|
||||
.then((rows) => {
|
||||
res.status(200)
|
||||
.send(rows);
|
||||
res.status(200).send(rows);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
@@ -1,27 +1,27 @@
|
||||
const express = require('express');
|
||||
const pjson = require('../../package.json');
|
||||
const error = require('../../lib/error');
|
||||
const pjson = require('../../package.json');
|
||||
const error = require('../../lib/error');
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
strict: true,
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
* Health Check
|
||||
* GET /api
|
||||
*/
|
||||
router.get('/', (req, res/*, next*/) => {
|
||||
let version = pjson.version.split('-').shift().split('.');
|
||||
router.get('/', (req, res /*, next */) => {
|
||||
const version = pjson.version.split('-').shift().split('.');
|
||||
|
||||
res.status(200).send({
|
||||
status: 'OK',
|
||||
status: 'OK',
|
||||
version: {
|
||||
major: parseInt(version.shift(), 10),
|
||||
minor: parseInt(version.shift(), 10),
|
||||
revision: parseInt(version.shift(), 10)
|
||||
}
|
||||
major: parseInt(version.shift(), 10),
|
||||
minor: parseInt(version.shift(), 10),
|
||||
revision: parseInt(version.shift(), 10),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
|
@@ -1,13 +1,13 @@
|
||||
const express = require('express');
|
||||
const validator = require('../../../lib/validator');
|
||||
const jwtdecode = require('../../../lib/express/jwt-decode');
|
||||
const express = require('express');
|
||||
const validator = require('../../../lib/validator');
|
||||
const jwtdecode = require('../../../lib/express/jwt-decode');
|
||||
const internalAccessList = require('../../../internal/access-list');
|
||||
const apiValidator = require('../../../lib/validator/api');
|
||||
const apiValidator = require('../../../lib/validator/api');
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
strict: true,
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -26,26 +26,28 @@ router
|
||||
* Retrieve all access-lists
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand'
|
||||
validator(
|
||||
{
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand',
|
||||
},
|
||||
query: {
|
||||
$ref: 'definitions#/definitions/query',
|
||||
},
|
||||
},
|
||||
query: {
|
||||
$ref: 'definitions#/definitions/query'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
|
||||
query: (typeof req.query.query === 'string' ? req.query.query : null)
|
||||
})
|
||||
},
|
||||
{
|
||||
expand: typeof req.query.expand === 'string' ? req.query.expand.split(',') : null,
|
||||
query: typeof req.query.query === 'string' ? req.query.query : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalAccessList.getAll(res.locals.access, data.expand, data.query);
|
||||
})
|
||||
.then((rows) => {
|
||||
res.status(200)
|
||||
.send(rows);
|
||||
res.status(200).send(rows);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -56,13 +58,12 @@ router
|
||||
* Create a new access-list
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
apiValidator({$ref: 'endpoints/access-lists#/links/1/schema'}, req.body)
|
||||
apiValidator({ $ref: 'endpoints/access-lists#/links/1/schema' }, req.body)
|
||||
.then((payload) => {
|
||||
return internalAccessList.create(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(201)
|
||||
.send(result);
|
||||
res.status(201).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -85,30 +86,32 @@ router
|
||||
* Retrieve a specific access-list
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
required: ['list_id'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
list_id: {
|
||||
$ref: 'definitions#/definitions/id'
|
||||
validator(
|
||||
{
|
||||
required: ['list_id'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
list_id: {
|
||||
$ref: 'definitions#/definitions/id',
|
||||
},
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand',
|
||||
},
|
||||
},
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
list_id: req.params.list_id,
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
|
||||
})
|
||||
},
|
||||
{
|
||||
list_id: req.params.list_id,
|
||||
expand: typeof req.query.expand === 'string' ? req.query.expand.split(',') : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalAccessList.get(res.locals.access, {
|
||||
id: parseInt(data.list_id, 10),
|
||||
expand: data.expand
|
||||
id: parseInt(data.list_id, 10),
|
||||
expand: data.expand,
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
res.status(200)
|
||||
.send(row);
|
||||
res.status(200).send(row);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -119,14 +122,13 @@ router
|
||||
* Update and existing access-list
|
||||
*/
|
||||
.put((req, res, next) => {
|
||||
apiValidator({$ref: 'endpoints/access-lists#/links/2/schema'}, req.body)
|
||||
apiValidator({ $ref: 'endpoints/access-lists#/links/2/schema' }, req.body)
|
||||
.then((payload) => {
|
||||
payload.id = parseInt(req.params.list_id, 10);
|
||||
return internalAccessList.update(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -137,10 +139,10 @@ router
|
||||
* Delete and existing access-list
|
||||
*/
|
||||
.delete((req, res, next) => {
|
||||
internalAccessList.delete(res.locals.access, {id: parseInt(req.params.list_id, 10)})
|
||||
internalAccessList
|
||||
.delete(res.locals.access, { id: parseInt(req.params.list_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
@@ -1,13 +1,13 @@
|
||||
const express = require('express');
|
||||
const validator = require('../../../lib/validator');
|
||||
const jwtdecode = require('../../../lib/express/jwt-decode');
|
||||
const express = require('express');
|
||||
const validator = require('../../../lib/validator');
|
||||
const jwtdecode = require('../../../lib/express/jwt-decode');
|
||||
const internalCertificate = require('../../../internal/certificate');
|
||||
const apiValidator = require('../../../lib/validator/api');
|
||||
const apiValidator = require('../../../lib/validator/api');
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
strict: true,
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -26,26 +26,28 @@ router
|
||||
* Retrieve all certificates
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand'
|
||||
validator(
|
||||
{
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand',
|
||||
},
|
||||
query: {
|
||||
$ref: 'definitions#/definitions/query',
|
||||
},
|
||||
},
|
||||
query: {
|
||||
$ref: 'definitions#/definitions/query'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
|
||||
query: (typeof req.query.query === 'string' ? req.query.query : null)
|
||||
})
|
||||
},
|
||||
{
|
||||
expand: typeof req.query.expand === 'string' ? req.query.expand.split(',') : null,
|
||||
query: typeof req.query.query === 'string' ? req.query.query : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalCertificate.getAll(res.locals.access, data.expand, data.query);
|
||||
})
|
||||
.then((rows) => {
|
||||
res.status(200)
|
||||
.send(rows);
|
||||
res.status(200).send(rows);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -56,14 +58,13 @@ router
|
||||
* Create a new certificate
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
apiValidator({$ref: 'endpoints/certificates#/links/1/schema'}, req.body)
|
||||
apiValidator({ $ref: 'endpoints/certificates#/links/1/schema' }, req.body)
|
||||
.then((payload) => {
|
||||
req.setTimeout(900000); // 15 minutes timeout
|
||||
return internalCertificate.create(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(201)
|
||||
.send(result);
|
||||
res.status(201).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -80,16 +81,16 @@ router
|
||||
})
|
||||
.all(jwtdecode())
|
||||
|
||||
/**
|
||||
* GET /api/nginx/certificates/test-http
|
||||
*
|
||||
* Test HTTP challenge for domains
|
||||
*/
|
||||
/**
|
||||
* GET /api/nginx/certificates/test-http
|
||||
*
|
||||
* Test HTTP challenge for domains
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
internalCertificate.testHttpsChallenge(res.locals.access, JSON.parse(req.query.domains))
|
||||
internalCertificate
|
||||
.testHttpsChallenge(res.locals.access, JSON.parse(req.query.domains))
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -112,30 +113,32 @@ router
|
||||
* Retrieve a specific certificate
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
required: ['certificate_id'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
certificate_id: {
|
||||
$ref: 'definitions#/definitions/id'
|
||||
validator(
|
||||
{
|
||||
required: ['certificate_id'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
certificate_id: {
|
||||
$ref: 'definitions#/definitions/id',
|
||||
},
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand',
|
||||
},
|
||||
},
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
certificate_id: req.params.certificate_id,
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
|
||||
})
|
||||
},
|
||||
{
|
||||
certificate_id: req.params.certificate_id,
|
||||
expand: typeof req.query.expand === 'string' ? req.query.expand.split(',') : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalCertificate.get(res.locals.access, {
|
||||
id: parseInt(data.certificate_id, 10),
|
||||
expand: data.expand
|
||||
id: parseInt(data.certificate_id, 10),
|
||||
expand: data.expand,
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
res.status(200)
|
||||
.send(row);
|
||||
res.status(200).send(row);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -146,14 +149,13 @@ router
|
||||
* Update and existing certificate
|
||||
*/
|
||||
.put((req, res, next) => {
|
||||
apiValidator({$ref: 'endpoints/certificates#/links/2/schema'}, req.body)
|
||||
apiValidator({ $ref: 'endpoints/certificates#/links/2/schema' }, req.body)
|
||||
.then((payload) => {
|
||||
payload.id = parseInt(req.params.certificate_id, 10);
|
||||
return internalCertificate.update(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -164,10 +166,10 @@ router
|
||||
* Update and existing certificate
|
||||
*/
|
||||
.delete((req, res, next) => {
|
||||
internalCertificate.delete(res.locals.access, {id: parseInt(req.params.certificate_id, 10)})
|
||||
internalCertificate
|
||||
.delete(res.locals.access, { id: parseInt(req.params.certificate_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -191,16 +193,15 @@ router
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
if (!req.files) {
|
||||
res.status(400)
|
||||
.send({error: 'No files were uploaded'});
|
||||
res.status(400).send({ error: 'No files were uploaded' });
|
||||
} else {
|
||||
internalCertificate.upload(res.locals.access, {
|
||||
id: parseInt(req.params.certificate_id, 10),
|
||||
files: req.files
|
||||
})
|
||||
internalCertificate
|
||||
.upload(res.locals.access, {
|
||||
id: parseInt(req.params.certificate_id, 10),
|
||||
files: req.files,
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
}
|
||||
@@ -225,12 +226,12 @@ router
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
req.setTimeout(900000); // 15 minutes timeout
|
||||
internalCertificate.renew(res.locals.access, {
|
||||
id: parseInt(req.params.certificate_id, 10)
|
||||
})
|
||||
internalCertificate
|
||||
.renew(res.locals.access, {
|
||||
id: parseInt(req.params.certificate_id, 10),
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -253,12 +254,12 @@ router
|
||||
* Renew certificate
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
internalCertificate.download(res.locals.access, {
|
||||
id: parseInt(req.params.certificate_id, 10)
|
||||
})
|
||||
internalCertificate
|
||||
.download(res.locals.access, {
|
||||
id: parseInt(req.params.certificate_id, 10),
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.download(result.fileName);
|
||||
res.status(200).download(result.fileName);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -282,15 +283,14 @@ router
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
if (!req.files) {
|
||||
res.status(400)
|
||||
.send({error: 'No files were uploaded'});
|
||||
res.status(400).send({ error: 'No files were uploaded' });
|
||||
} else {
|
||||
internalCertificate.validate({
|
||||
files: req.files
|
||||
})
|
||||
internalCertificate
|
||||
.validate({
|
||||
files: req.files,
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
}
|
||||
|
@@ -1,13 +1,13 @@
|
||||
const express = require('express');
|
||||
const validator = require('../../../lib/validator');
|
||||
const jwtdecode = require('../../../lib/express/jwt-decode');
|
||||
const express = require('express');
|
||||
const validator = require('../../../lib/validator');
|
||||
const jwtdecode = require('../../../lib/express/jwt-decode');
|
||||
const internalDeadHost = require('../../../internal/dead-host');
|
||||
const apiValidator = require('../../../lib/validator/api');
|
||||
const apiValidator = require('../../../lib/validator/api');
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
strict: true,
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -26,26 +26,28 @@ router
|
||||
* Retrieve all dead-hosts
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand'
|
||||
validator(
|
||||
{
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand',
|
||||
},
|
||||
query: {
|
||||
$ref: 'definitions#/definitions/query',
|
||||
},
|
||||
},
|
||||
query: {
|
||||
$ref: 'definitions#/definitions/query'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
|
||||
query: (typeof req.query.query === 'string' ? req.query.query : null)
|
||||
})
|
||||
},
|
||||
{
|
||||
expand: typeof req.query.expand === 'string' ? req.query.expand.split(',') : null,
|
||||
query: typeof req.query.query === 'string' ? req.query.query : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalDeadHost.getAll(res.locals.access, data.expand, data.query);
|
||||
})
|
||||
.then((rows) => {
|
||||
res.status(200)
|
||||
.send(rows);
|
||||
res.status(200).send(rows);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -56,13 +58,12 @@ router
|
||||
* Create a new dead-host
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
apiValidator({$ref: 'endpoints/dead-hosts#/links/1/schema'}, req.body)
|
||||
apiValidator({ $ref: 'endpoints/dead-hosts#/links/1/schema' }, req.body)
|
||||
.then((payload) => {
|
||||
return internalDeadHost.create(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(201)
|
||||
.send(result);
|
||||
res.status(201).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -85,30 +86,32 @@ router
|
||||
* Retrieve a specific dead-host
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
required: ['host_id'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
host_id: {
|
||||
$ref: 'definitions#/definitions/id'
|
||||
validator(
|
||||
{
|
||||
required: ['host_id'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
host_id: {
|
||||
$ref: 'definitions#/definitions/id',
|
||||
},
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand',
|
||||
},
|
||||
},
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
host_id: req.params.host_id,
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
|
||||
})
|
||||
},
|
||||
{
|
||||
host_id: req.params.host_id,
|
||||
expand: typeof req.query.expand === 'string' ? req.query.expand.split(',') : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalDeadHost.get(res.locals.access, {
|
||||
id: parseInt(data.host_id, 10),
|
||||
expand: data.expand
|
||||
id: parseInt(data.host_id, 10),
|
||||
expand: data.expand,
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
res.status(200)
|
||||
.send(row);
|
||||
res.status(200).send(row);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -119,14 +122,13 @@ router
|
||||
* Update and existing dead-host
|
||||
*/
|
||||
.put((req, res, next) => {
|
||||
apiValidator({$ref: 'endpoints/dead-hosts#/links/2/schema'}, req.body)
|
||||
apiValidator({ $ref: 'endpoints/dead-hosts#/links/2/schema' }, req.body)
|
||||
.then((payload) => {
|
||||
payload.id = parseInt(req.params.host_id, 10);
|
||||
return internalDeadHost.update(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -137,10 +139,10 @@ router
|
||||
* Update and existing dead-host
|
||||
*/
|
||||
.delete((req, res, next) => {
|
||||
internalDeadHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalDeadHost
|
||||
.delete(res.locals.access, { id: parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -161,10 +163,10 @@ router
|
||||
* POST /api/nginx/dead-hosts/123/enable
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
internalDeadHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalDeadHost
|
||||
.enable(res.locals.access, { id: parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -185,10 +187,10 @@ router
|
||||
* POST /api/nginx/dead-hosts/123/disable
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
internalDeadHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalDeadHost
|
||||
.disable(res.locals.access, { id: parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
@@ -1,13 +1,13 @@
|
||||
const express = require('express');
|
||||
const validator = require('../../../lib/validator');
|
||||
const jwtdecode = require('../../../lib/express/jwt-decode');
|
||||
const express = require('express');
|
||||
const validator = require('../../../lib/validator');
|
||||
const jwtdecode = require('../../../lib/express/jwt-decode');
|
||||
const internalProxyHost = require('../../../internal/proxy-host');
|
||||
const apiValidator = require('../../../lib/validator/api');
|
||||
const apiValidator = require('../../../lib/validator/api');
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
strict: true,
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -26,26 +26,28 @@ router
|
||||
* Retrieve all proxy-hosts
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand'
|
||||
validator(
|
||||
{
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand',
|
||||
},
|
||||
query: {
|
||||
$ref: 'definitions#/definitions/query',
|
||||
},
|
||||
},
|
||||
query: {
|
||||
$ref: 'definitions#/definitions/query'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
|
||||
query: (typeof req.query.query === 'string' ? req.query.query : null)
|
||||
})
|
||||
},
|
||||
{
|
||||
expand: typeof req.query.expand === 'string' ? req.query.expand.split(',') : null,
|
||||
query: typeof req.query.query === 'string' ? req.query.query : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalProxyHost.getAll(res.locals.access, data.expand, data.query);
|
||||
})
|
||||
.then((rows) => {
|
||||
res.status(200)
|
||||
.send(rows);
|
||||
res.status(200).send(rows);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -56,13 +58,12 @@ router
|
||||
* Create a new proxy-host
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
apiValidator({$ref: 'endpoints/proxy-hosts#/links/1/schema'}, req.body)
|
||||
apiValidator({ $ref: 'endpoints/proxy-hosts#/links/1/schema' }, req.body)
|
||||
.then((payload) => {
|
||||
return internalProxyHost.create(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(201)
|
||||
.send(result);
|
||||
res.status(201).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -85,30 +86,32 @@ router
|
||||
* Retrieve a specific proxy-host
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
required: ['host_id'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
host_id: {
|
||||
$ref: 'definitions#/definitions/id'
|
||||
validator(
|
||||
{
|
||||
required: ['host_id'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
host_id: {
|
||||
$ref: 'definitions#/definitions/id',
|
||||
},
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand',
|
||||
},
|
||||
},
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
host_id: req.params.host_id,
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
|
||||
})
|
||||
},
|
||||
{
|
||||
host_id: req.params.host_id,
|
||||
expand: typeof req.query.expand === 'string' ? req.query.expand.split(',') : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalProxyHost.get(res.locals.access, {
|
||||
id: parseInt(data.host_id, 10),
|
||||
expand: data.expand
|
||||
id: parseInt(data.host_id, 10),
|
||||
expand: data.expand,
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
res.status(200)
|
||||
.send(row);
|
||||
res.status(200).send(row);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -119,14 +122,13 @@ router
|
||||
* Update and existing proxy-host
|
||||
*/
|
||||
.put((req, res, next) => {
|
||||
apiValidator({$ref: 'endpoints/proxy-hosts#/links/2/schema'}, req.body)
|
||||
apiValidator({ $ref: 'endpoints/proxy-hosts#/links/2/schema' }, req.body)
|
||||
.then((payload) => {
|
||||
payload.id = parseInt(req.params.host_id, 10);
|
||||
return internalProxyHost.update(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -137,10 +139,10 @@ router
|
||||
* Update and existing proxy-host
|
||||
*/
|
||||
.delete((req, res, next) => {
|
||||
internalProxyHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalProxyHost
|
||||
.delete(res.locals.access, { id: parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -161,10 +163,10 @@ router
|
||||
* POST /api/nginx/proxy-hosts/123/enable
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
internalProxyHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalProxyHost
|
||||
.enable(res.locals.access, { id: parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -185,10 +187,10 @@ router
|
||||
* POST /api/nginx/proxy-hosts/123/disable
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
internalProxyHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalProxyHost
|
||||
.disable(res.locals.access, { id: parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
@@ -1,13 +1,13 @@
|
||||
const express = require('express');
|
||||
const validator = require('../../../lib/validator');
|
||||
const jwtdecode = require('../../../lib/express/jwt-decode');
|
||||
const express = require('express');
|
||||
const validator = require('../../../lib/validator');
|
||||
const jwtdecode = require('../../../lib/express/jwt-decode');
|
||||
const internalRedirectionHost = require('../../../internal/redirection-host');
|
||||
const apiValidator = require('../../../lib/validator/api');
|
||||
const apiValidator = require('../../../lib/validator/api');
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
strict: true,
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -26,26 +26,28 @@ router
|
||||
* Retrieve all redirection-hosts
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand'
|
||||
validator(
|
||||
{
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand',
|
||||
},
|
||||
query: {
|
||||
$ref: 'definitions#/definitions/query',
|
||||
},
|
||||
},
|
||||
query: {
|
||||
$ref: 'definitions#/definitions/query'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
|
||||
query: (typeof req.query.query === 'string' ? req.query.query : null)
|
||||
})
|
||||
},
|
||||
{
|
||||
expand: typeof req.query.expand === 'string' ? req.query.expand.split(',') : null,
|
||||
query: typeof req.query.query === 'string' ? req.query.query : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalRedirectionHost.getAll(res.locals.access, data.expand, data.query);
|
||||
})
|
||||
.then((rows) => {
|
||||
res.status(200)
|
||||
.send(rows);
|
||||
res.status(200).send(rows);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -56,13 +58,12 @@ router
|
||||
* Create a new redirection-host
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
apiValidator({$ref: 'endpoints/redirection-hosts#/links/1/schema'}, req.body)
|
||||
apiValidator({ $ref: 'endpoints/redirection-hosts#/links/1/schema' }, req.body)
|
||||
.then((payload) => {
|
||||
return internalRedirectionHost.create(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(201)
|
||||
.send(result);
|
||||
res.status(201).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -85,30 +86,32 @@ router
|
||||
* Retrieve a specific redirection-host
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
required: ['host_id'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
host_id: {
|
||||
$ref: 'definitions#/definitions/id'
|
||||
validator(
|
||||
{
|
||||
required: ['host_id'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
host_id: {
|
||||
$ref: 'definitions#/definitions/id',
|
||||
},
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand',
|
||||
},
|
||||
},
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
host_id: req.params.host_id,
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
|
||||
})
|
||||
},
|
||||
{
|
||||
host_id: req.params.host_id,
|
||||
expand: typeof req.query.expand === 'string' ? req.query.expand.split(',') : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalRedirectionHost.get(res.locals.access, {
|
||||
id: parseInt(data.host_id, 10),
|
||||
expand: data.expand
|
||||
id: parseInt(data.host_id, 10),
|
||||
expand: data.expand,
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
res.status(200)
|
||||
.send(row);
|
||||
res.status(200).send(row);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -119,14 +122,13 @@ router
|
||||
* Update and existing redirection-host
|
||||
*/
|
||||
.put((req, res, next) => {
|
||||
apiValidator({$ref: 'endpoints/redirection-hosts#/links/2/schema'}, req.body)
|
||||
apiValidator({ $ref: 'endpoints/redirection-hosts#/links/2/schema' }, req.body)
|
||||
.then((payload) => {
|
||||
payload.id = parseInt(req.params.host_id, 10);
|
||||
return internalRedirectionHost.update(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -137,10 +139,10 @@ router
|
||||
* Update and existing redirection-host
|
||||
*/
|
||||
.delete((req, res, next) => {
|
||||
internalRedirectionHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalRedirectionHost
|
||||
.delete(res.locals.access, { id: parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -161,10 +163,10 @@ router
|
||||
* POST /api/nginx/redirection-hosts/123/enable
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
internalRedirectionHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalRedirectionHost
|
||||
.enable(res.locals.access, { id: parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -185,10 +187,10 @@ router
|
||||
* POST /api/nginx/redirection-hosts/123/disable
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
internalRedirectionHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalRedirectionHost
|
||||
.disable(res.locals.access, { id: parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
@@ -1,13 +1,13 @@
|
||||
const express = require('express');
|
||||
const validator = require('../../../lib/validator');
|
||||
const jwtdecode = require('../../../lib/express/jwt-decode');
|
||||
const express = require('express');
|
||||
const validator = require('../../../lib/validator');
|
||||
const jwtdecode = require('../../../lib/express/jwt-decode');
|
||||
const internalStream = require('../../../internal/stream');
|
||||
const apiValidator = require('../../../lib/validator/api');
|
||||
const apiValidator = require('../../../lib/validator/api');
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
strict: true,
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -26,26 +26,28 @@ router
|
||||
* Retrieve all streams
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand'
|
||||
validator(
|
||||
{
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand',
|
||||
},
|
||||
query: {
|
||||
$ref: 'definitions#/definitions/query',
|
||||
},
|
||||
},
|
||||
query: {
|
||||
$ref: 'definitions#/definitions/query'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
|
||||
query: (typeof req.query.query === 'string' ? req.query.query : null)
|
||||
})
|
||||
},
|
||||
{
|
||||
expand: typeof req.query.expand === 'string' ? req.query.expand.split(',') : null,
|
||||
query: typeof req.query.query === 'string' ? req.query.query : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalStream.getAll(res.locals.access, data.expand, data.query);
|
||||
})
|
||||
.then((rows) => {
|
||||
res.status(200)
|
||||
.send(rows);
|
||||
res.status(200).send(rows);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -56,13 +58,12 @@ router
|
||||
* Create a new stream
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
apiValidator({$ref: 'endpoints/streams#/links/1/schema'}, req.body)
|
||||
apiValidator({ $ref: 'endpoints/streams#/links/1/schema' }, req.body)
|
||||
.then((payload) => {
|
||||
return internalStream.create(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(201)
|
||||
.send(result);
|
||||
res.status(201).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -85,30 +86,32 @@ router
|
||||
* Retrieve a specific stream
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
required: ['stream_id'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
stream_id: {
|
||||
$ref: 'definitions#/definitions/id'
|
||||
validator(
|
||||
{
|
||||
required: ['stream_id'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
stream_id: {
|
||||
$ref: 'definitions#/definitions/id',
|
||||
},
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand',
|
||||
},
|
||||
},
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
stream_id: req.params.stream_id,
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
|
||||
})
|
||||
},
|
||||
{
|
||||
stream_id: req.params.stream_id,
|
||||
expand: typeof req.query.expand === 'string' ? req.query.expand.split(',') : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalStream.get(res.locals.access, {
|
||||
id: parseInt(data.stream_id, 10),
|
||||
expand: data.expand
|
||||
id: parseInt(data.stream_id, 10),
|
||||
expand: data.expand,
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
res.status(200)
|
||||
.send(row);
|
||||
res.status(200).send(row);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -119,14 +122,13 @@ router
|
||||
* Update and existing stream
|
||||
*/
|
||||
.put((req, res, next) => {
|
||||
apiValidator({$ref: 'endpoints/streams#/links/2/schema'}, req.body)
|
||||
apiValidator({ $ref: 'endpoints/streams#/links/2/schema' }, req.body)
|
||||
.then((payload) => {
|
||||
payload.id = parseInt(req.params.stream_id, 10);
|
||||
return internalStream.update(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -137,10 +139,10 @@ router
|
||||
* Update and existing stream
|
||||
*/
|
||||
.delete((req, res, next) => {
|
||||
internalStream.delete(res.locals.access, {id: parseInt(req.params.stream_id, 10)})
|
||||
internalStream
|
||||
.delete(res.locals.access, { id: parseInt(req.params.stream_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -161,10 +163,10 @@ router
|
||||
* POST /api/nginx/streams/123/enable
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
internalStream.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalStream
|
||||
.enable(res.locals.access, { id: parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -185,10 +187,10 @@ router
|
||||
* POST /api/nginx/streams/123/disable
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
internalStream.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalStream
|
||||
.disable(res.locals.access, { id: parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
@@ -1,11 +1,11 @@
|
||||
const express = require('express');
|
||||
const jwtdecode = require('../../lib/express/jwt-decode');
|
||||
const express = require('express');
|
||||
const jwtdecode = require('../../lib/express/jwt-decode');
|
||||
const internalReport = require('../../internal/report');
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
strict: true,
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
router
|
||||
@@ -18,10 +18,10 @@ router
|
||||
* GET /reports/hosts
|
||||
*/
|
||||
.get(jwtdecode(), (req, res, next) => {
|
||||
internalReport.getHostsReport(res.locals.access)
|
||||
internalReport
|
||||
.getHostsReport(res.locals.access)
|
||||
.then((data) => {
|
||||
res.status(200)
|
||||
.send(data);
|
||||
res.status(200).send(data);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
@@ -1,11 +1,11 @@
|
||||
const express = require('express');
|
||||
const express = require('express');
|
||||
const swaggerJSON = require('../../doc/api.swagger.json');
|
||||
const PACKAGE = require('../../package.json');
|
||||
const PACKAGE = require('../../package.json');
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
strict: true,
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
router
|
||||
@@ -17,7 +17,7 @@ router
|
||||
/**
|
||||
* GET /schema
|
||||
*/
|
||||
.get((req, res/*, next*/) => {
|
||||
.get((req, res /*, next */) => {
|
||||
let proto = req.protocol;
|
||||
if (typeof req.headers['x-forwarded-proto'] !== 'undefined' && req.headers['x-forwarded-proto']) {
|
||||
proto = req.headers['x-forwarded-proto'];
|
||||
@@ -28,7 +28,7 @@ router
|
||||
origin = req.headers.origin;
|
||||
}
|
||||
|
||||
swaggerJSON.info.version = PACKAGE.version;
|
||||
swaggerJSON.info.version = PACKAGE.version;
|
||||
swaggerJSON.servers[0].url = origin + '/api';
|
||||
res.status(200).send(swaggerJSON);
|
||||
});
|
||||
|
@@ -1,13 +1,13 @@
|
||||
const express = require('express');
|
||||
const validator = require('../../lib/validator');
|
||||
const jwtdecode = require('../../lib/express/jwt-decode');
|
||||
const express = require('express');
|
||||
const validator = require('../../lib/validator');
|
||||
const jwtdecode = require('../../lib/express/jwt-decode');
|
||||
const internalSetting = require('../../internal/setting');
|
||||
const apiValidator = require('../../lib/validator/api');
|
||||
const apiValidator = require('../../lib/validator/api');
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
strict: true,
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -26,10 +26,10 @@ router
|
||||
* Retrieve all settings
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
internalSetting.getAll(res.locals.access)
|
||||
internalSetting
|
||||
.getAll(res.locals.access)
|
||||
.then((rows) => {
|
||||
res.status(200)
|
||||
.send(rows);
|
||||
res.status(200).send(rows);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -52,25 +52,27 @@ router
|
||||
* Retrieve a specific setting
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
required: ['setting_id'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
setting_id: {
|
||||
$ref: 'definitions#/definitions/setting_id'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
setting_id: req.params.setting_id
|
||||
})
|
||||
validator(
|
||||
{
|
||||
required: ['setting_id'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
setting_id: {
|
||||
$ref: 'definitions#/definitions/setting_id',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
setting_id: req.params.setting_id,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalSetting.get(res.locals.access, {
|
||||
id: data.setting_id
|
||||
id: data.setting_id,
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
res.status(200)
|
||||
.send(row);
|
||||
res.status(200).send(row);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -81,14 +83,13 @@ router
|
||||
* Update and existing setting
|
||||
*/
|
||||
.put((req, res, next) => {
|
||||
apiValidator({$ref: 'endpoints/settings#/links/1/schema'}, req.body)
|
||||
apiValidator({ $ref: 'endpoints/settings#/links/1/schema' }, req.body)
|
||||
.then((payload) => {
|
||||
payload.id = req.params.setting_id;
|
||||
return internalSetting.update(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
@@ -1,12 +1,12 @@
|
||||
const express = require('express');
|
||||
const jwtdecode = require('../../lib/express/jwt-decode');
|
||||
const express = require('express');
|
||||
const jwtdecode = require('../../lib/express/jwt-decode');
|
||||
const internalToken = require('../../internal/token');
|
||||
const apiValidator = require('../../lib/validator/api');
|
||||
const apiValidator = require('../../lib/validator/api');
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
strict: true,
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
router
|
||||
@@ -23,13 +23,13 @@ router
|
||||
* for services like Job board and Worker.
|
||||
*/
|
||||
.get(jwtdecode(), (req, res, next) => {
|
||||
internalToken.getFreshToken(res.locals.access, {
|
||||
expiry: (typeof req.query.expiry !== 'undefined' ? req.query.expiry : null),
|
||||
scope: (typeof req.query.scope !== 'undefined' ? req.query.scope : null)
|
||||
})
|
||||
internalToken
|
||||
.getFreshToken(res.locals.access, {
|
||||
expiry: typeof req.query.expiry !== 'undefined' ? req.query.expiry : null,
|
||||
scope: typeof req.query.scope !== 'undefined' ? req.query.scope : null,
|
||||
})
|
||||
.then((data) => {
|
||||
res.status(200)
|
||||
.send(data);
|
||||
res.status(200).send(data);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -40,13 +40,12 @@ router
|
||||
* Create a new Token
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
apiValidator({$ref: 'endpoints/tokens#/links/0/schema'}, req.body)
|
||||
apiValidator({ $ref: 'endpoints/tokens#/links/0/schema' }, req.body)
|
||||
.then((payload) => {
|
||||
return internalToken.getTokenFromEmail(payload);
|
||||
})
|
||||
.then((data) => {
|
||||
res.status(200)
|
||||
.send(data);
|
||||
res.status(200).send(data);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
@@ -1,14 +1,14 @@
|
||||
const express = require('express');
|
||||
const validator = require('../../lib/validator');
|
||||
const jwtdecode = require('../../lib/express/jwt-decode');
|
||||
const express = require('express');
|
||||
const validator = require('../../lib/validator');
|
||||
const jwtdecode = require('../../lib/express/jwt-decode');
|
||||
const userIdFromMe = require('../../lib/express/user-id-from-me');
|
||||
const internalUser = require('../../internal/user');
|
||||
const apiValidator = require('../../lib/validator/api');
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
strict: true,
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -27,26 +27,28 @@ router
|
||||
* Retrieve all users
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand'
|
||||
validator(
|
||||
{
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand',
|
||||
},
|
||||
query: {
|
||||
$ref: 'definitions#/definitions/query',
|
||||
},
|
||||
},
|
||||
query: {
|
||||
$ref: 'definitions#/definitions/query'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
|
||||
query: (typeof req.query.query === 'string' ? req.query.query : null)
|
||||
})
|
||||
},
|
||||
{
|
||||
expand: typeof req.query.expand === 'string' ? req.query.expand.split(',') : null,
|
||||
query: typeof req.query.query === 'string' ? req.query.query : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalUser.getAll(res.locals.access, data.expand, data.query);
|
||||
})
|
||||
.then((users) => {
|
||||
res.status(200)
|
||||
.send(users);
|
||||
res.status(200).send(users);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -57,13 +59,12 @@ router
|
||||
* Create a new User
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
apiValidator({$ref: 'endpoints/users#/links/1/schema'}, req.body)
|
||||
apiValidator({ $ref: 'endpoints/users#/links/1/schema' }, req.body)
|
||||
.then((payload) => {
|
||||
return internalUser.create(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(201)
|
||||
.send(result);
|
||||
res.status(201).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -87,31 +88,33 @@ router
|
||||
* Retrieve a specific user
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
required: ['user_id'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
user_id: {
|
||||
$ref: 'definitions#/definitions/id'
|
||||
validator(
|
||||
{
|
||||
required: ['user_id'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
user_id: {
|
||||
$ref: 'definitions#/definitions/id',
|
||||
},
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand',
|
||||
},
|
||||
},
|
||||
expand: {
|
||||
$ref: 'definitions#/definitions/expand'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
user_id: req.params.user_id,
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
|
||||
})
|
||||
},
|
||||
{
|
||||
user_id: req.params.user_id,
|
||||
expand: typeof req.query.expand === 'string' ? req.query.expand.split(',') : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalUser.get(res.locals.access, {
|
||||
id: data.user_id,
|
||||
id: data.user_id,
|
||||
expand: data.expand,
|
||||
omit: internalUser.getUserOmisionsByAccess(res.locals.access, data.user_id)
|
||||
omit: internalUser.getUserOmisionsByAccess(res.locals.access, data.user_id),
|
||||
});
|
||||
})
|
||||
.then((user) => {
|
||||
res.status(200)
|
||||
.send(user);
|
||||
res.status(200).send(user);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -122,14 +125,13 @@ router
|
||||
* Update and existing user
|
||||
*/
|
||||
.put((req, res, next) => {
|
||||
apiValidator({$ref: 'endpoints/users#/links/2/schema'}, req.body)
|
||||
apiValidator({ $ref: 'endpoints/users#/links/2/schema' }, req.body)
|
||||
.then((payload) => {
|
||||
payload.id = req.params.user_id;
|
||||
return internalUser.update(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -140,10 +142,10 @@ router
|
||||
* Update and existing user
|
||||
*/
|
||||
.delete((req, res, next) => {
|
||||
internalUser.delete(res.locals.access, {id: req.params.user_id})
|
||||
internalUser
|
||||
.delete(res.locals.access, { id: req.params.user_id })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -167,14 +169,13 @@ router
|
||||
* Update password for a user
|
||||
*/
|
||||
.put((req, res, next) => {
|
||||
apiValidator({$ref: 'endpoints/users#/links/4/schema'}, req.body)
|
||||
apiValidator({ $ref: 'endpoints/users#/links/4/schema' }, req.body)
|
||||
.then((payload) => {
|
||||
payload.id = req.params.user_id;
|
||||
return internalUser.setPassword(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(201)
|
||||
.send(result);
|
||||
res.status(201).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -198,14 +199,13 @@ router
|
||||
* Set some or all permissions for a user
|
||||
*/
|
||||
.put((req, res, next) => {
|
||||
apiValidator({$ref: 'endpoints/users#/links/5/schema'}, req.body)
|
||||
apiValidator({ $ref: 'endpoints/users#/links/5/schema' }, req.body)
|
||||
.then((payload) => {
|
||||
payload.id = req.params.user_id;
|
||||
return internalUser.setPermissions(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(201)
|
||||
.send(result);
|
||||
res.status(201).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -228,10 +228,10 @@ router
|
||||
* Log in as a user
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
internalUser.loginAs(res.locals.access, {id: parseInt(req.params.user_id, 10)})
|
||||
internalUser
|
||||
.loginAs(res.locals.access, { id: parseInt(req.params.user_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(201)
|
||||
.send(result);
|
||||
res.status(201).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
@@ -1,12 +1,12 @@
|
||||
const config = require('./lib/config');
|
||||
const logger = require('./logger').setup;
|
||||
const certificateModel = require('./models/certificate');
|
||||
const userModel = require('./models/user');
|
||||
const config = require('./lib/config');
|
||||
const logger = require('./logger').setup;
|
||||
const certificateModel = require('./models/certificate');
|
||||
const userModel = require('./models/user');
|
||||
const userPermissionModel = require('./models/user_permission');
|
||||
const utils = require('./lib/utils');
|
||||
const authModel = require('./models/auth');
|
||||
const settingModel = require('./models/setting');
|
||||
const certbot = require('./lib/certbot');
|
||||
const utils = require('./lib/utils');
|
||||
const authModel = require('./models/auth');
|
||||
const settingModel = require('./models/setting');
|
||||
const certbot = require('./lib/certbot');
|
||||
|
||||
/**
|
||||
* Creates a default admin users if one doesn't already exist in the database
|
||||
@@ -24,13 +24,13 @@ const setupDefaultUser = () => {
|
||||
// Create a new user and set password
|
||||
logger.info('Creating a new user: admin@example.com with password: iArhP1j7p1P6TA92FA2FMbbUGYqwcYzxC4AVEe12Wbi94FY9gNN62aKyF1shrvG4NycjjX9KfmDQiwkLZH1ZDR9xMjiG2QmoHXi');
|
||||
|
||||
let data = {
|
||||
const data = {
|
||||
is_deleted: 0,
|
||||
email: 'admin@example.com',
|
||||
name: 'Administrator',
|
||||
nickname: 'Admin',
|
||||
avatar: '',
|
||||
roles: ['admin'],
|
||||
email: 'admin@example.com',
|
||||
name: 'Administrator',
|
||||
nickname: 'Admin',
|
||||
avatar: '',
|
||||
roles: ['admin'],
|
||||
};
|
||||
|
||||
return userModel
|
||||
@@ -41,20 +41,20 @@ const setupDefaultUser = () => {
|
||||
.query()
|
||||
.insert({
|
||||
user_id: user.id,
|
||||
type: 'password',
|
||||
secret: 'iArhP1j7p1P6TA92FA2FMbbUGYqwcYzxC4AVEe12Wbi94FY9gNN62aKyF1shrvG4NycjjX9KfmDQiwkLZH1ZDR9xMjiG2QmoHXi',
|
||||
meta: {},
|
||||
type: 'password',
|
||||
secret: 'iArhP1j7p1P6TA92FA2FMbbUGYqwcYzxC4AVEe12Wbi94FY9gNN62aKyF1shrvG4NycjjX9KfmDQiwkLZH1ZDR9xMjiG2QmoHXi',
|
||||
meta: {},
|
||||
})
|
||||
.then(() => {
|
||||
return userPermissionModel.query().insert({
|
||||
user_id: user.id,
|
||||
visibility: 'all',
|
||||
proxy_hosts: 'manage',
|
||||
user_id: user.id,
|
||||
visibility: 'all',
|
||||
proxy_hosts: 'manage',
|
||||
redirection_hosts: 'manage',
|
||||
dead_hosts: 'manage',
|
||||
streams: 'manage',
|
||||
access_lists: 'manage',
|
||||
certificates: 'manage',
|
||||
dead_hosts: 'manage',
|
||||
streams: 'manage',
|
||||
access_lists: 'manage',
|
||||
certificates: 'manage',
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -76,18 +76,18 @@ const setupDefaultSettings = () => {
|
||||
return settingModel
|
||||
.query()
|
||||
.select(settingModel.raw('COUNT(`id`) as `count`'))
|
||||
.where({id: 'default-site'})
|
||||
.where({ id: 'default-site' })
|
||||
.first()
|
||||
.then((row) => {
|
||||
if (!row.count) {
|
||||
settingModel
|
||||
.query()
|
||||
.insert({
|
||||
id: 'default-site',
|
||||
name: 'Default Site',
|
||||
id: 'default-site',
|
||||
name: 'Default Site',
|
||||
description: 'What to show when Nginx is hit with an unknown Host',
|
||||
value: 'congratulations',
|
||||
meta: {},
|
||||
value: 'congratulations',
|
||||
meta: {},
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('Default settings added');
|
||||
@@ -111,8 +111,8 @@ const setupCertbotPlugins = () => {
|
||||
.andWhere('provider', 'letsencrypt')
|
||||
.then((certificates) => {
|
||||
if (certificates && certificates.length) {
|
||||
let plugins = [];
|
||||
let promises = [];
|
||||
const plugins = [];
|
||||
const promises = [];
|
||||
|
||||
certificates.map(function (certificate) {
|
||||
if (certificate.meta && certificate.meta.dns_challenge === true) {
|
||||
@@ -123,27 +123,23 @@ const setupCertbotPlugins = () => {
|
||||
// Make sure credentials file exists
|
||||
const credentials_loc = '/data/tls/certbot/credentials/credentials-' + certificate.id;
|
||||
// Escape single quotes and backslashes
|
||||
const escapedCredentials = certificate.meta.dns_provider_credentials.replaceAll('\'', '\\\'').replaceAll('\\', '\\\\');
|
||||
const credentials_cmd = '[ -f \'' + credentials_loc + '\' ] || { mkdir -p /data/tls/certbot/credentials 2> /dev/null; echo \'' + escapedCredentials + '\' > \'' + credentials_loc + '\' && chmod 600 \'' + credentials_loc + '\'; }';
|
||||
const escapedCredentials = certificate.meta.dns_provider_credentials.replaceAll("'", "\\'").replaceAll('\\', '\\\\');
|
||||
const credentials_cmd = "[ -f '" + credentials_loc + "' ] || { mkdir -p /data/tls/certbot/credentials 2> /dev/null; echo '" + escapedCredentials + "' > '" + credentials_loc + "' && chmod 600 '" + credentials_loc + "'; }";
|
||||
promises.push(utils.exec(credentials_cmd));
|
||||
}
|
||||
});
|
||||
|
||||
return certbot.installPlugins(plugins)
|
||||
.then(() => {
|
||||
if (promises.length) {
|
||||
return Promise.all(promises)
|
||||
.then(() => {
|
||||
logger.info('Added Certbot plugins ' + plugins.join(', '));
|
||||
});
|
||||
}
|
||||
});
|
||||
return certbot.installPlugins(plugins).then(() => {
|
||||
if (promises.length) {
|
||||
return Promise.all(promises).then(() => {
|
||||
logger.info('Added Certbot plugins ' + plugins.join(', '));
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = function () {
|
||||
return setupDefaultUser()
|
||||
.then(setupDefaultSettings)
|
||||
.then(setupCertbotPlugins);
|
||||
return setupDefaultUser().then(setupDefaultSettings).then(setupCertbotPlugins);
|
||||
};
|
||||
|
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const fs = require('fs');
|
||||
const fs = require('fs');
|
||||
const sqlite3 = require('sqlite3');
|
||||
|
||||
if (fs.existsSync(process.env.DB_SQLITE_FILE)) {
|
||||
|
@@ -4,7 +4,7 @@
|
||||
"description": "A beautiful interface for creating Nginx endpoints",
|
||||
"main": "js/index.js",
|
||||
"dependencies": {
|
||||
"@babel/core": "7.24.3",
|
||||
"@babel/core": "7.24.4",
|
||||
"babel-core": "6.26.3",
|
||||
"babel-loader": "8.3.0",
|
||||
"babel-preset-env": "1.7.0",
|
||||
@@ -31,7 +31,7 @@
|
||||
"nodemon": "3.1.0",
|
||||
"numeral": "2.0.6",
|
||||
"sass-loader": "10.5.2",
|
||||
"style-loader": "3.3.4",
|
||||
"style-loader": "4.0.0",
|
||||
"tabler-ui": "git+https://github.com/tabler/tabler.git#00f78ad823311bc3ad974ac3e5b0126198f0a813",
|
||||
"underscore": "1.13.6",
|
||||
"webpack": "4.47.0",
|
||||
|
@@ -1,13 +1,13 @@
|
||||
{
|
||||
"extends": [
|
||||
"config:base"
|
||||
],
|
||||
"baseBranches": [],
|
||||
"includeForks": true,
|
||||
"automerge": false,
|
||||
"branchPrefix": "renovate-deps-update-",
|
||||
"rangeStrategy": "pin",
|
||||
"digest": {
|
||||
"enabled": false
|
||||
}
|
||||
}
|
||||
{
|
||||
"extends": [
|
||||
"config:base"
|
||||
],
|
||||
"baseBranches": [],
|
||||
"includeForks": true,
|
||||
"automerge": false,
|
||||
"branchPrefix": "renovate-deps-update-",
|
||||
"rangeStrategy": "pin",
|
||||
"digest": {
|
||||
"enabled": false
|
||||
}
|
||||
}
|
||||
|
2
rootfs/nftd/showdown.min.js
vendored
2
rootfs/nftd/showdown.min.js
vendored
File diff suppressed because one or more lines are too long
@@ -40,6 +40,7 @@ http {
|
||||
gzip_types *;
|
||||
gzip_proxied any;
|
||||
gzip_comp_level 1;
|
||||
gzip_http_version 1.0;
|
||||
gunzip on;
|
||||
gzip_static on;
|
||||
|
||||
|
Reference in New Issue
Block a user