Compare commits

..

10 Commits

Author SHA1 Message Date
Jamie Curnow
538d28d32d Refactor from Promises to async/await 2025-09-11 14:13:54 +10:00
Jamie Curnow
a7d4fd55d9 Fix proxy hosts routes throwing errors 2025-09-11 08:16:11 +10:00
Jamie Curnow
9682de1830 Biome update 2025-09-10 21:38:02 +10:00
Jamie Curnow
cde7460b5e Fix cypress tests following user wizard changes 2025-09-10 21:32:16 +10:00
Jamie Curnow
ca84e3a146 User Permissions Modal 2025-09-09 15:13:34 +10:00
Jamie Curnow
fa11945235 Introducing the Setup Wizard for creating the first user
- no longer setup a default
- still able to do that with env vars however
2025-09-09 13:44:35 +10:00
Jamie Curnow
432afe73ad User table polishing, user delete modal 2025-09-04 14:59:01 +10:00
Jamie Curnow
5a01da2916 Notification toasts, nicer loading, add new user support 2025-09-04 12:11:39 +10:00
Jamie Curnow
ebd9148813 React 2025-09-03 14:02:14 +10:00
Jamie Curnow
a12553fec7 Convert backend to ESM
- About 5 years overdue
- Remove eslint, use bomejs instead
2025-09-03 13:59:40 +10:00
474 changed files with 15451 additions and 24323 deletions

View File

@@ -1,73 +0,0 @@
{
"env": {
"node": true,
"es6": true
},
"extends": [
"eslint:recommended"
],
"globals": {
"Atomics": "readonly",
"SharedArrayBuffer": "readonly"
},
"parserOptions": {
"ecmaVersion": 2018,
"sourceType": "module"
},
"plugins": [
"align-assignments"
],
"rules": {
"arrow-parens": [
"error",
"always"
],
"indent": [
"error",
"tab"
],
"linebreak-style": [
"error",
"unix"
],
"quotes": [
"error",
"single"
],
"semi": [
"error",
"always"
],
"key-spacing": [
"error",
{
"align": "value"
}
],
"comma-spacing": [
"error",
{
"before": false,
"after": true
}
],
"func-call-spacing": [
"error",
"never"
],
"keyword-spacing": [
"error",
{
"before": true
}
],
"no-irregular-whitespace": "error",
"no-unused-expressions": 0,
"align-assignments/align-assignments": [
2,
{
"requiresOnly": false
}
]
}
}

View File

@@ -1,11 +0,0 @@
{
"printWidth": 320,
"tabWidth": 4,
"useTabs": true,
"semi": true,
"singleQuote": true,
"bracketSpacing": true,
"jsxBracketSameLine": true,
"trailingComma": "all",
"proseWrap": "always"
}

View File

@@ -1,9 +1,12 @@
const express = require('express'); import bodyParser from "body-parser";
const bodyParser = require('body-parser'); import compression from "compression";
const fileUpload = require('express-fileupload'); import express from "express";
const compression = require('compression'); import fileUpload from "express-fileupload";
const config = require('./lib/config'); import { isDebugMode } from "./lib/config.js";
const log = require('./logger').express; import cors from "./lib/express/cors.js";
import jwt from "./lib/express/jwt.js";
import { express as logger } from "./logger.js";
import mainRoutes from "./routes/main.js";
/** /**
* App * App
@@ -20,71 +23,70 @@ app.use(compression());
* General Logging, BEFORE routes * General Logging, BEFORE routes
*/ */
app.disable('x-powered-by'); app.disable("x-powered-by");
app.enable('trust proxy', ['loopback', 'linklocal', 'uniquelocal']); app.enable("trust proxy", ["loopback", "linklocal", "uniquelocal"]);
app.enable('strict routing'); app.enable("strict routing");
// pretty print JSON when not live // pretty print JSON when not live
if (config.debug()) { if (isDebugMode()) {
app.set('json spaces', 2); app.set("json spaces", 2);
} }
// CORS for everything // CORS for everything
app.use(require('./lib/express/cors')); app.use(cors);
// General security/cache related headers + server header // General security/cache related headers + server header
app.use(function (req, res, next) { app.use((_, res, next) => {
let x_frame_options = 'DENY'; let x_frame_options = "DENY";
if (typeof process.env.X_FRAME_OPTIONS !== 'undefined' && process.env.X_FRAME_OPTIONS) { if (typeof process.env.X_FRAME_OPTIONS !== "undefined" && process.env.X_FRAME_OPTIONS) {
x_frame_options = process.env.X_FRAME_OPTIONS; x_frame_options = process.env.X_FRAME_OPTIONS;
} }
res.set({ res.set({
'X-XSS-Protection': '1; mode=block', "X-XSS-Protection": "1; mode=block",
'X-Content-Type-Options': 'nosniff', "X-Content-Type-Options": "nosniff",
'X-Frame-Options': x_frame_options, "X-Frame-Options": x_frame_options,
'Cache-Control': 'no-cache, no-store, max-age=0, must-revalidate', "Cache-Control": "no-cache, no-store, max-age=0, must-revalidate",
Pragma: 'no-cache', Pragma: "no-cache",
Expires: 0 Expires: 0,
}); });
next(); next();
}); });
app.use(require('./lib/express/jwt')()); app.use(jwt());
app.use('/', require('./routes/main')); app.use("/", mainRoutes);
// production error handler // production error handler
// no stacktraces leaked to user // no stacktraces leaked to user
// eslint-disable-next-line app.use((err, req, res, _) => {
app.use(function (err, req, res, next) { const payload = {
let payload = {
error: { error: {
code: err.status, code: err.status,
message: err.public ? err.message : 'Internal Error' message: err.public ? err.message : "Internal Error",
} },
}; };
if (config.debug() || (req.baseUrl + req.path).includes('nginx/certificates')) { if (typeof err.message_i18n !== "undefined") {
payload.error.message_i18n = err.message_i18n;
}
if (isDebugMode() || (req.baseUrl + req.path).includes("nginx/certificates")) {
payload.debug = { payload.debug = {
stack: typeof err.stack !== 'undefined' && err.stack ? err.stack.split('\n') : null, stack: typeof err.stack !== "undefined" && err.stack ? err.stack.split("\n") : null,
previous: err.previous previous: err.previous,
}; };
} }
// Not every error is worth logging - but this is good for now until it gets annoying. // Not every error is worth logging - but this is good for now until it gets annoying.
if (typeof err.stack !== 'undefined' && err.stack) { if (typeof err.stack !== "undefined" && err.stack) {
if (config.debug()) { logger.debug(err.stack);
log.debug(err.stack); if (typeof err.public === "undefined" || !err.public) {
} else if (typeof err.public == 'undefined' || !err.public) { logger.warn(err.message);
log.warn(err.message);
} }
} }
res res.status(err.status || 500).send(payload);
.status(err.status || 500)
.send(payload);
}); });
module.exports = app; export default app;

91
backend/biome.json Normal file
View File

@@ -0,0 +1,91 @@
{
"$schema": "https://biomejs.dev/schemas/2.2.4/schema.json",
"vcs": {
"enabled": true,
"clientKind": "git",
"useIgnoreFile": true
},
"files": {
"ignoreUnknown": false,
"includes": [
"**/*.ts",
"**/*.tsx",
"**/*.js",
"**/*.jsx",
"!**/dist/**/*"
]
},
"formatter": {
"enabled": true,
"indentStyle": "tab",
"indentWidth": 4,
"lineWidth": 120,
"formatWithErrors": true
},
"assist": {
"actions": {
"source": {
"organizeImports": {
"level": "on",
"options": {
"groups": [
":BUN:",
":NODE:",
[
"npm:*",
"npm:*/**"
],
":PACKAGE_WITH_PROTOCOL:",
":URL:",
":PACKAGE:",
[
"/src/*",
"/src/**"
],
[
"/**"
],
[
"#*",
"#*/**"
],
":PATH:"
]
}
}
}
}
},
"linter": {
"enabled": true,
"rules": {
"recommended": true,
"correctness": {
"useUniqueElementIds": "off"
},
"suspicious": {
"noExplicitAny": "off"
},
"performance": {
"noDelete": "off"
},
"nursery": "off",
"a11y": {
"useSemanticElements": "off",
"useValidAnchor": "off"
},
"style": {
"noParameterAssign": "error",
"useAsConstAssertion": "error",
"useDefaultParameterLast": "error",
"useEnumInitializers": "error",
"useSelfClosingElements": "error",
"useSingleVarDeclarator": "error",
"noUnusedTemplateLiteral": "error",
"useNumberNamespace": "error",
"noInferrableTypes": "error",
"noUselessElse": "error"
}
}
}
}

View File

@@ -1,14 +1,19 @@
const config = require('./lib/config'); import knex from "knex";
import {configGet, configHas} from "./lib/config.js";
if (!config.has('database')) { const generateDbConfig = () => {
throw new Error('Database config does not exist! Please read the instructions: https://nginxproxymanager.com/setup/'); if (!configHas("database")) {
throw new Error(
"Database config does not exist! Please read the instructions: https://nginxproxymanager.com/setup/",
);
} }
function generateDbConfig() { const cfg = configGet("database");
const cfg = config.get('database');
if (cfg.engine === 'knex-native') { if (cfg.engine === "knex-native") {
return cfg.knex; return cfg.knex;
} }
return { return {
client: cfg.engine, client: cfg.engine,
connection: { connection: {
@@ -16,12 +21,12 @@ function generateDbConfig() {
user: cfg.user, user: cfg.user,
password: cfg.password, password: cfg.password,
database: cfg.name, database: cfg.name,
port: cfg.port port: cfg.port,
}, },
migrations: { migrations: {
tableName: 'migrations' tableName: "migrations",
} },
};
}; };
}
module.exports = require('knex')(generateDbConfig()); export default knex(generateDbConfig());

View File

@@ -1,48 +1,47 @@
#!/usr/bin/env node #!/usr/bin/env node
const schema = require('./schema'); import app from "./app.js";
const logger = require('./logger').global; import internalCertificate from "./internal/certificate.js";
import internalIpRanges from "./internal/ip_ranges.js";
import { global as logger } from "./logger.js";
import { migrateUp } from "./migrate.js";
import { getCompiledSchema } from "./schema/index.js";
import setup from "./setup.js";
const IP_RANGES_FETCH_ENABLED = process.env.IP_RANGES_FETCH_ENABLED !== 'false'; const IP_RANGES_FETCH_ENABLED = process.env.IP_RANGES_FETCH_ENABLED !== "false";
async function appStart() { async function appStart() {
const migrate = require('./migrate'); return migrateUp()
const setup = require('./setup');
const app = require('./app');
const internalCertificate = require('./internal/certificate');
const internalIpRanges = require('./internal/ip_ranges');
return migrate.latest()
.then(setup) .then(setup)
.then(schema.getCompiledSchema) .then(getCompiledSchema)
.then(() => { .then(() => {
if (IP_RANGES_FETCH_ENABLED) { if (!IP_RANGES_FETCH_ENABLED) {
logger.info('IP Ranges fetch is enabled'); logger.info("IP Ranges fetch is disabled by environment variable");
return internalIpRanges.fetch().catch((err) => { return;
logger.error('IP Ranges fetch failed, continuing anyway:', err.message);
});
} else {
logger.info('IP Ranges fetch is disabled by environment variable');
} }
logger.info("IP Ranges fetch is enabled");
return internalIpRanges.fetch().catch((err) => {
logger.error("IP Ranges fetch failed, continuing anyway:", err.message);
});
}) })
.then(() => { .then(() => {
internalCertificate.initTimer(); internalCertificate.initTimer();
internalIpRanges.initTimer(); internalIpRanges.initTimer();
const server = app.listen(3000, () => { const server = app.listen(3000, () => {
logger.info('Backend PID ' + process.pid + ' listening on port 3000 ...'); logger.info(`Backend PID ${process.pid} listening on port 3000 ...`);
process.on('SIGTERM', () => { process.on("SIGTERM", () => {
logger.info('PID ' + process.pid + ' received SIGTERM'); logger.info(`PID ${process.pid} received SIGTERM`);
server.close(() => { server.close(() => {
logger.info('Stopping.'); logger.info("Stopping.");
process.exit(0); process.exit(0);
}); });
}); });
}); });
}) })
.catch((err) => { .catch((err) => {
logger.error(err.message, err); logger.error(`Startup Error: ${err.message}`, err);
setTimeout(appStart, 1000); setTimeout(appStart, 1000);
}); });
} }
@@ -50,7 +49,6 @@ async function appStart () {
try { try {
appStart(); appStart();
} catch (err) { } catch (err) {
logger.error(err.message, err); logger.fatal(err);
process.exit(1); process.exit(1);
} }

View File

@@ -1,103 +1,94 @@
const _ = require('lodash'); import fs from "node:fs";
const fs = require('node:fs'); import batchflow from "batchflow";
const batchflow = require('batchflow'); import _ from "lodash";
const logger = require('../logger').access; import errs from "../lib/error.js";
const error = require('../lib/error'); import utils from "../lib/utils.js";
const utils = require('../lib/utils'); import { access as logger } from "../logger.js";
const accessListModel = require('../models/access_list'); import accessListModel from "../models/access_list.js";
const accessListAuthModel = require('../models/access_list_auth'); import accessListAuthModel from "../models/access_list_auth.js";
const accessListClientModel = require('../models/access_list_client'); import accessListClientModel from "../models/access_list_client.js";
const proxyHostModel = require('../models/proxy_host'); import proxyHostModel from "../models/proxy_host.js";
const internalAuditLog = require('./audit-log'); import internalAuditLog from "./audit-log.js";
const internalNginx = require('./nginx'); import internalNginx from "./nginx.js";
function omissions () { const omissions = () => {
return ['is_deleted']; return ["is_deleted"];
} };
const internalAccessList = { const internalAccessList = {
/** /**
* @param {Access} access * @param {Access} access
* @param {Object} data * @param {Object} data
* @returns {Promise} * @returns {Promise}
*/ */
create: (access, data) => { create: async (access, data) => {
return access.can('access_lists:create', data) await access.can("access_lists:create", data);
.then((/*access_data*/) => { const row = await accessListModel
return accessListModel
.query() .query()
.insertAndFetch({ .insertAndFetch({
name: data.name, name: data.name,
satisfy_any: data.satisfy_any, satisfy_any: data.satisfy_any,
pass_auth: data.pass_auth, pass_auth: data.pass_auth,
owner_user_id: access.token.getUserId(1) owner_user_id: access.token.getUserId(1),
}) })
.then(utils.omitRow(omissions())); .then(utils.omitRow(omissions()));
})
.then((row) => {
data.id = row.id; data.id = row.id;
const promises = []; const promises = [];
// Items
// Now add the items
data.items.map((item) => { data.items.map((item) => {
promises.push(accessListAuthModel promises.push(
.query() accessListAuthModel.query().insert({
.insert({
access_list_id: row.id, access_list_id: row.id,
username: item.username, username: item.username,
password: item.password password: item.password,
}) }),
); );
return true;
}); });
// Now add the clients // Clients
if (typeof data.clients !== 'undefined' && data.clients) { data.clients?.map((client) => {
data.clients.map((client) => { promises.push(
promises.push(accessListClientModel accessListClientModel.query().insert({
.query()
.insert({
access_list_id: row.id, access_list_id: row.id,
address: client.address, address: client.address,
directive: client.directive directive: client.directive,
}) }),
); );
return true;
}); });
}
return Promise.all(promises); await Promise.all(promises);
})
.then(() => {
// re-fetch with expansions // re-fetch with expansions
return internalAccessList.get(access, { const freshRow = await internalAccessList.get(
access,
{
id: data.id, id: data.id,
expand: ['owner', 'items', 'clients', 'proxy_hosts.access_list.[clients,items]'] expand: ["owner", "items", "clients", "proxy_hosts.access_list.[clients,items]"],
}, true /* <- skip masking */); },
}) true // skip masking
.then((row) => { );
// Audit log
data.meta = _.assign({}, data.meta || {}, row.meta);
return internalAccessList.build(row) // Audit log
.then(() => { data.meta = _.assign({}, data.meta || {}, freshRow.meta);
if (parseInt(row.proxy_host_count, 10)) { await internalAccessList.build(freshRow);
return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
if (Number.parseInt(freshRow.proxy_host_count, 10)) {
await internalNginx.bulkGenerateConfigs("proxy_host", freshRow.proxy_hosts);
} }
})
.then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { await internalAuditLog.add(access, {
action: 'created', action: "created",
object_type: 'access-list', object_type: "access-list",
object_id: row.id, object_id: freshRow.id,
meta: internalAccessList.maskItems(data) meta: internalAccessList.maskItems(data),
});
})
.then(() => {
return internalAccessList.maskItems(row);
});
}); });
return internalAccessList.maskItems(freshRow);
}, },
/** /**
@@ -108,129 +99,107 @@ const internalAccessList = {
* @param {String} [data.items] * @param {String} [data.items]
* @return {Promise} * @return {Promise}
*/ */
update: (access, data) => { update: async (access, data) => {
return access.can('access_lists:update', data.id) await access.can("access_lists:update", data.id);
.then((/*access_data*/) => { const row = await internalAccessList.get(access, { id: data.id });
return internalAccessList.get(access, {id: data.id});
})
.then((row) => {
if (row.id !== data.id) { if (row.id !== data.id) {
// Sanity check that something crazy hasn't happened // Sanity check that something crazy hasn't happened
throw new error.InternalValidationError(`Access List could not be updated, IDs do not match: ${row.id} !== ${data.id}`); throw new errs.InternalValidationError(
`Access List could not be updated, IDs do not match: ${row.id} !== ${data.id}`,
);
} }
})
.then(() => {
// patch name if specified // patch name if specified
if (typeof data.name !== 'undefined' && data.name) { if (typeof data.name !== "undefined" && data.name) {
return accessListModel await accessListModel.query().where({ id: data.id }).patch({
.query()
.where({id: data.id})
.patch({
name: data.name, name: data.name,
satisfy_any: data.satisfy_any, satisfy_any: data.satisfy_any,
pass_auth: data.pass_auth, pass_auth: data.pass_auth,
}); });
} }
})
.then(() => {
// Check for items and add/update/remove them // Check for items and add/update/remove them
if (typeof data.items !== 'undefined' && data.items) { if (typeof data.items !== "undefined" && data.items) {
const promises = []; const promises = [];
const items_to_keep = []; const itemsToKeep = [];
data.items.map((item) => { data.items.map((item) => {
if (item.password) { if (item.password) {
promises.push(accessListAuthModel promises.push(
.query() accessListAuthModel.query().insert({
.insert({
access_list_id: data.id, access_list_id: data.id,
username: item.username, username: item.username,
password: item.password password: item.password,
}) }),
); );
} else { } else {
// This was supplied with an empty password, which means keep it but don't change the password // This was supplied with an empty password, which means keep it but don't change the password
items_to_keep.push(item.username); itemsToKeep.push(item.username);
} }
return true;
}); });
const query = accessListAuthModel const query = accessListAuthModel.query().delete().where("access_list_id", data.id);
.query()
.delete()
.where('access_list_id', data.id);
if (items_to_keep.length) { if (itemsToKeep.length) {
query.andWhere('username', 'NOT IN', items_to_keep); query.andWhere("username", "NOT IN", itemsToKeep);
} }
return query await query;
.then(() => {
// Add new items // Add new items
if (promises.length) { if (promises.length) {
return Promise.all(promises); await Promise.all(promises);
} }
});
} }
})
.then(() => {
// Check for clients and add/update/remove them
if (typeof data.clients !== 'undefined' && data.clients) {
const promises = [];
// Check for clients and add/update/remove them
if (typeof data.clients !== "undefined" && data.clients) {
const clientPromises = [];
data.clients.map((client) => { data.clients.map((client) => {
if (client.address) { if (client.address) {
promises.push(accessListClientModel clientPromises.push(
.query() accessListClientModel.query().insert({
.insert({
access_list_id: data.id, access_list_id: data.id,
address: client.address, address: client.address,
directive: client.directive directive: client.directive,
}) }),
); );
} }
return true;
}); });
const query = accessListClientModel const query = accessListClientModel.query().delete().where("access_list_id", data.id);
.query() await query;
.delete() // Add new clitens
.where('access_list_id', data.id); if (clientPromises.length) {
await Promise.all(clientPromises);
}
}
return query
.then(() => {
// Add new items
if (promises.length) {
return Promise.all(promises);
}
});
}
})
.then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { await internalAuditLog.add(access, {
action: 'updated', action: "updated",
object_type: 'access-list', object_type: "access-list",
object_id: data.id, object_id: data.id,
meta: internalAccessList.maskItems(data) meta: internalAccessList.maskItems(data),
}); });
})
.then(() => {
// re-fetch with expansions // re-fetch with expansions
return internalAccessList.get(access, { const freshRow = await internalAccessList.get(
access,
{
id: data.id, id: data.id,
expand: ['owner', 'items', 'clients', 'proxy_hosts.[certificate,access_list.[clients,items]]'] expand: ["owner", "items", "clients", "proxy_hosts.[certificate,access_list.[clients,items]]"],
}, true /* <- skip masking */); },
}) true // skip masking
.then((row) => { );
return internalAccessList.build(row)
.then(() => { await internalAccessList.build(freshRow)
if (parseInt(row.proxy_host_count, 10)) { if (Number.parseInt(row.proxy_host_count, 10)) {
return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts); await internalNginx.bulkGenerateConfigs("proxy_host", row.proxy_hosts);
} }
}).then(internalNginx.reload) await internalNginx.reload();
.then(() => {
return internalAccessList.maskItems(row); return internalAccessList.maskItems(row);
});
});
}, },
/** /**
@@ -239,52 +208,50 @@ const internalAccessList = {
* @param {Integer} data.id * @param {Integer} data.id
* @param {Array} [data.expand] * @param {Array} [data.expand]
* @param {Array} [data.omit] * @param {Array} [data.omit]
* @param {Boolean} [skip_masking] * @param {Boolean} [skipMasking]
* @return {Promise} * @return {Promise}
*/ */
get: (access, data, skip_masking) => { get: async (access, data, skipMasking) => {
if (typeof data === 'undefined') { const thisData = data || {};
data = {}; const accessData = await access.can("access_lists:get", thisData.id)
}
return access.can('access_lists:get', data.id)
.then((access_data) => {
const query = accessListModel const query = accessListModel
.query() .query()
.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count')) .select("access_list.*", accessListModel.raw("COUNT(proxy_host.id) as proxy_host_count"))
.leftJoin('proxy_host', function() { .leftJoin("proxy_host", function () {
this.on('proxy_host.access_list_id', '=', 'access_list.id') this.on("proxy_host.access_list_id", "=", "access_list.id").andOn(
.andOn('proxy_host.is_deleted', '=', 0); "proxy_host.is_deleted",
"=",
0,
);
}) })
.where('access_list.is_deleted', 0) .where("access_list.is_deleted", 0)
.andWhere('access_list.id', data.id) .andWhere("access_list.id", thisData.id)
.groupBy('access_list.id') .groupBy("access_list.id")
.allowGraph('[owner,items,clients,proxy_hosts.[certificate,access_list.[clients,items]]]') .allowGraph("[owner,items,clients,proxy_hosts.[certificate,access_list.[clients,items]]]")
.first(); .first();
if (access_data.permission_visibility !== 'all') { if (accessData.permission_visibility !== "all") {
query.andWhere('access_list.owner_user_id', access.token.getUserId(1)); query.andWhere("access_list.owner_user_id", access.token.getUserId(1));
} }
if (typeof data.expand !== 'undefined' && data.expand !== null) { if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
query.withGraphFetched(`[${data.expand.join(', ')}]`); query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
} }
return query.then(utils.omitRow(omissions())); let row = await query.then(utils.omitRow(omissions()));
})
.then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(thisData.id);
} }
if (!skip_masking && typeof row.items !== 'undefined' && row.items) { if (!skipMasking && typeof row.items !== "undefined" && row.items) {
row = internalAccessList.maskItems(row); row = internalAccessList.maskItems(row);
} }
// Custom omissions // Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) { if (typeof data.omit !== "undefined" && data.omit !== null) {
row = _.omit(row, data.omit); row = _.omit(row, data.omit);
} }
return row; return row;
});
}, },
/** /**
@@ -294,14 +261,15 @@ const internalAccessList = {
* @param {String} [data.reason] * @param {String} [data.reason]
* @returns {Promise} * @returns {Promise}
*/ */
delete: (access, data) => { delete: async (access, data) => {
return access.can('access_lists:delete', data.id) await access.can("access_lists:delete", data.id);
.then(() => { const row = await internalAccessList.get(access, {
return internalAccessList.get(access, {id: data.id, expand: ['proxy_hosts', 'items', 'clients']}); id: data.id,
}) expand: ["proxy_hosts", "items", "clients"],
.then((row) => { });
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} }
// 1. update row to be deleted // 1. update row to be deleted
@@ -310,57 +278,47 @@ const internalAccessList = {
// 4. audit log // 4. audit log
// 1. update row to be deleted // 1. update row to be deleted
return accessListModel await accessListModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
is_deleted: 1 is_deleted: 1,
}) });
.then(() => {
// 2. update any proxy hosts that were using it (ignoring permissions) // 2. update any proxy hosts that were using it (ignoring permissions)
if (row.proxy_hosts) { if (row.proxy_hosts) {
return proxyHostModel await proxyHostModel
.query() .query()
.where('access_list_id', '=', row.id) .where("access_list_id", "=", row.id)
.patch({access_list_id: 0}) .patch({ access_list_id: 0 });
.then(() => {
// 3. reconfigure those hosts, then reload nginx
// 3. reconfigure those hosts, then reload nginx
// set the access_list_id to zero for these items // set the access_list_id to zero for these items
row.proxy_hosts.map((_val, idx) => { row.proxy_hosts.map((_val, idx) => {
row.proxy_hosts[idx].access_list_id = 0; row.proxy_hosts[idx].access_list_id = 0;
return true;
}); });
return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts); await internalNginx.bulkGenerateConfigs("proxy_host", row.proxy_hosts);
})
.then(() => {
return internalNginx.reload();
});
} }
})
.then(() => {
// delete the htpasswd file
const htpasswd_file = internalAccessList.getFilename(row);
await internalNginx.reload();
// delete the htpasswd file
try { try {
fs.unlinkSync(htpasswd_file); fs.unlinkSync(internalAccessList.getFilename(row));
} catch (_err) { } catch (_err) {
// do nothing // do nothing
} }
})
.then(() => {
// 4. audit log // 4. audit log
return internalAuditLog.add(access, { await internalAuditLog.add(access, {
action: 'deleted', action: "deleted",
object_type: 'access-list', object_type: "access-list",
object_id: row.id, object_id: row.id,
meta: _.omit(internalAccessList.maskItems(row), ['is_deleted', 'proxy_hosts']) meta: _.omit(internalAccessList.maskItems(row), ["is_deleted", "proxy_hosts"]),
}); });
});
})
.then(() => {
return true; return true;
});
}, },
/** /**
@@ -368,75 +326,73 @@ const internalAccessList = {
* *
* @param {Access} access * @param {Access} access
* @param {Array} [expand] * @param {Array} [expand]
* @param {String} [search_query] * @param {String} [searchQuery]
* @returns {Promise} * @returns {Promise}
*/ */
getAll: (access, expand, search_query) => { getAll: async (access, expand, searchQuery) => {
return access.can('access_lists:list') const accessData = await access.can("access_lists:list");
.then((access_data) => {
const query = accessListModel const query = accessListModel
.query() .query()
.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count')) .select("access_list.*", accessListModel.raw("COUNT(proxy_host.id) as proxy_host_count"))
.leftJoin('proxy_host', function() { .leftJoin("proxy_host", function () {
this.on('proxy_host.access_list_id', '=', 'access_list.id') this.on("proxy_host.access_list_id", "=", "access_list.id").andOn(
.andOn('proxy_host.is_deleted', '=', 0); "proxy_host.is_deleted",
"=",
0,
);
}) })
.where('access_list.is_deleted', 0) .where("access_list.is_deleted", 0)
.groupBy('access_list.id') .groupBy("access_list.id")
.allowGraph('[owner,items,clients]') .allowGraph("[owner,items,clients]")
.orderBy('access_list.name', 'ASC'); .orderBy("access_list.name", "ASC");
if (access_data.permission_visibility !== 'all') { if (accessData.permission_visibility !== "all") {
query.andWhere('access_list.owner_user_id', access.token.getUserId(1)); query.andWhere("access_list.owner_user_id", access.token.getUserId(1));
} }
// Query is used for searching // Query is used for searching
if (typeof search_query === 'string') { if (typeof searchQuery === "string") {
query.where(function () { query.where(function () {
this.where('name', 'like', `%${search_query}%`); this.where("name", "like", `%${searchQuery}%`);
}); });
} }
if (typeof expand !== 'undefined' && expand !== null) { if (typeof expand !== "undefined" && expand !== null) {
query.withGraphFetched(`[${expand.join(', ')}]`); query.withGraphFetched(`[${expand.join(", ")}]`);
} }
return query.then(utils.omitRows(omissions())); const rows = await query.then(utils.omitRows(omissions()));
})
.then((rows) => {
if (rows) { if (rows) {
rows.map((row, idx) => { rows.map((row, idx) => {
if (typeof row.items !== 'undefined' && row.items) { if (typeof row.items !== "undefined" && row.items) {
rows[idx] = internalAccessList.maskItems(row); rows[idx] = internalAccessList.maskItems(row);
} }
return true;
}); });
} }
return rows; return rows;
});
}, },
/** /**
* Report use * Count is used in reports
* *
* @param {Integer} user_id * @param {Integer} userId
* @param {String} visibility * @param {String} visibility
* @returns {Promise} * @returns {Promise}
*/ */
getCount: (user_id, visibility) => { getCount: async (userId, visibility) => {
const query = accessListModel const query = accessListModel
.query() .query()
.count('id as count') .count("id as count")
.where('is_deleted', 0); .where("is_deleted", 0);
if (visibility !== 'all') { if (visibility !== "all") {
query.andWhere('owner_user_id', user_id); query.andWhere("owner_user_id", userId);
} }
return query.first() const row = await query.first();
.then((row) => { return Number.parseInt(row.count, 10);
return parseInt(row.count, 10);
});
}, },
/** /**
@@ -444,21 +400,21 @@ const internalAccessList = {
* @returns {Object} * @returns {Object}
*/ */
maskItems: (list) => { maskItems: (list) => {
if (list && typeof list.items !== 'undefined') { if (list && typeof list.items !== "undefined") {
list.items.map((val, idx) => { list.items.map((val, idx) => {
let repeat_for = 8; let repeatFor = 8;
let first_char = '*'; let firstChar = "*";
if (typeof val.password !== 'undefined' && val.password) { if (typeof val.password !== "undefined" && val.password) {
repeat_for = val.password.length - 1; repeatFor = val.password.length - 1;
first_char = val.password.charAt(0); firstChar = val.password.charAt(0);
} }
list.items[idx].hint = first_char + ('*').repeat(repeat_for); list.items[idx].hint = firstChar + "*".repeat(repeatFor);
list.items[idx].password = ''; list.items[idx].password = "";
return true;
}); });
} }
return list; return list;
}, },
@@ -478,40 +434,33 @@ const internalAccessList = {
* @param {Array} list.items * @param {Array} list.items
* @returns {Promise} * @returns {Promise}
*/ */
build: (list) => { build: async (list) => {
logger.info(`Building Access file #${list.id} for: ${list.name}`); logger.info(`Building Access file #${list.id} for: ${list.name}`);
return new Promise((resolve, reject) => { const htpasswdFile = internalAccessList.getFilename(list);
const htpasswd_file = internalAccessList.getFilename(list);
// 1. remove any existing access file // 1. remove any existing access file
try { try {
fs.unlinkSync(htpasswd_file); fs.unlinkSync(htpasswdFile);
} catch (_err) { } catch (_err) {
// do nothing // do nothing
} }
// 2. create empty access file // 2. create empty access file
try { fs.writeFileSync(htpasswdFile, '', {encoding: 'utf8'});
fs.writeFileSync(htpasswd_file, '', {encoding: 'utf8'});
resolve(htpasswd_file);
} catch (err) {
reject(err);
}
})
.then((htpasswd_file) => {
// 3. generate password for each user // 3. generate password for each user
if (list.items.length) { if (list.items.length) {
return new Promise((resolve, reject) => { await new Promise((resolve, reject) => {
batchflow(list.items).sequential() batchflow(list.items).sequential()
.each((_i, item, next) => { .each((_i, item, next) => {
if (typeof item.password !== 'undefined' && item.password.length) { if (item.password?.length) {
logger.info(`Adding: ${item.username}`); logger.info(`Adding: ${item.username}`);
utils.execFile('openssl', ['passwd', '-apr1', item.password]) utils.execFile('openssl', ['passwd', '-apr1', item.password])
.then((res) => { .then((res) => {
try { try {
fs.appendFileSync(htpasswd_file, `${item.username}:${res}\n`, {encoding: 'utf8'}); fs.appendFileSync(htpasswdFile, `${item.username}:${res}\n`, {encoding: 'utf8'});
} catch (err) { } catch (err) {
reject(err); reject(err);
} }
@@ -533,8 +482,7 @@ const internalAccessList = {
}); });
}); });
} }
});
} }
}; }
module.exports = internalAccessList; export default internalAccessList;

View File

@@ -1,6 +1,6 @@
const error = require('../lib/error'); import errs from "../lib/error.js";
const auditLogModel = require('../models/audit-log'); import { castJsonIfNeed } from "../lib/helpers.js";
const {castJsonIfNeed} = require('../lib/helpers'); import auditLogModel from "../models/audit-log.js";
const internalAuditLog = { const internalAuditLog = {
@@ -9,32 +9,31 @@ const internalAuditLog = {
* *
* @param {Access} access * @param {Access} access
* @param {Array} [expand] * @param {Array} [expand]
* @param {String} [search_query] * @param {String} [searchQuery]
* @returns {Promise} * @returns {Promise}
*/ */
getAll: (access, expand, search_query) => { getAll: async (access, expand, searchQuery) => {
return access.can('auditlog:list') await access.can("auditlog:list");
.then(() => {
let query = auditLogModel const query = auditLogModel
.query() .query()
.orderBy('created_on', 'DESC') .orderBy("created_on", "DESC")
.orderBy('id', 'DESC') .orderBy("id", "DESC")
.limit(100) .limit(100)
.allowGraph('[user]'); .allowGraph("[user]");
// Query is used for searching // Query is used for searching
if (typeof search_query === 'string' && search_query.length > 0) { if (typeof searchQuery === "string" && searchQuery.length > 0) {
query.where(function () { query.where(function () {
this.where(castJsonIfNeed('meta'), 'like', '%' + search_query + '%'); this.where(castJsonIfNeed("meta"), "like", `%${searchQuery}`);
}); });
} }
if (typeof expand !== 'undefined' && expand !== null) { if (typeof expand !== "undefined" && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']'); query.withGraphFetched(`[${expand.join(", ")}]`);
} }
return query; return await query;
});
}, },
/** /**
@@ -51,29 +50,24 @@ const internalAuditLog = {
* @param {Object} [data.meta] * @param {Object} [data.meta]
* @returns {Promise} * @returns {Promise}
*/ */
add: (access, data) => { add: async (access, data) => {
return new Promise((resolve, reject) => { if (typeof data.user_id === "undefined" || !data.user_id) {
// Default the user id
if (typeof data.user_id === 'undefined' || !data.user_id) {
data.user_id = access.token.getUserId(1); data.user_id = access.token.getUserId(1);
} }
if (typeof data.action === 'undefined' || !data.action) { if (typeof data.action === "undefined" || !data.action) {
reject(new error.InternalValidationError('Audit log entry must contain an Action')); throw new errs.InternalValidationError("Audit log entry must contain an Action");
} else { }
// Make sure at least 1 of the IDs are set and action // Make sure at least 1 of the IDs are set and action
resolve(auditLogModel return await auditLogModel.query().insert({
.query()
.insert({
user_id: data.user_id, user_id: data.user_id,
action: data.action, action: data.action,
object_type: data.object_type || '', object_type: data.object_type || "",
object_id: data.object_id || 0, object_id: data.object_id || 0,
meta: data.meta || {} meta: data.meta || {},
}));
}
}); });
} },
}; };
module.exports = internalAuditLog; export default internalAuditLog;

File diff suppressed because it is too large Load Diff

View File

@@ -1,110 +1,89 @@
const _ = require('lodash'); import _ from "lodash";
const error = require('../lib/error'); import errs from "../lib/error.js";
const utils = require('../lib/utils'); import { castJsonIfNeed } from "../lib/helpers.js";
const deadHostModel = require('../models/dead_host'); import utils from "../lib/utils.js";
const internalHost = require('./host'); import deadHostModel from "../models/dead_host.js";
const internalNginx = require('./nginx'); import internalAuditLog from "./audit-log.js";
const internalAuditLog = require('./audit-log'); import internalCertificate from "./certificate.js";
const internalCertificate = require('./certificate'); import internalHost from "./host.js";
const {castJsonIfNeed} = require('../lib/helpers'); import internalNginx from "./nginx.js";
function omissions () { const omissions = () => {
return ['is_deleted']; return ["is_deleted"];
} };
const internalDeadHost = { const internalDeadHost = {
/** /**
* @param {Access} access * @param {Access} access
* @param {Object} data * @param {Object} data
* @returns {Promise} * @returns {Promise}
*/ */
create: (access, data) => { create: async (access, data) => {
let create_certificate = data.certificate_id === 'new'; const createCertificate = data.certificate_id === "new";
if (create_certificate) { if (createCertificate) {
delete data.certificate_id; delete data.certificate_id;
} }
return access.can('dead_hosts:create', data) await access.can("dead_hosts:create", data);
.then((/*access_data*/) => {
// Get a list of the domain names and check each of them against existing records // Get a list of the domain names and check each of them against existing records
let domain_name_check_promises = []; const domainNameCheckPromises = [];
data.domain_names.map(function (domain_name) { data.domain_names.map((domain_name) => {
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name)); domainNameCheckPromises.push(internalHost.isHostnameTaken(domain_name));
return true;
}); });
return Promise.all(domain_name_check_promises) await Promise.all(domainNameCheckPromises).then((check_results) => {
.then((check_results) => { check_results.map((result) => {
check_results.map(function (result) {
if (result.is_taken) { if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use'); throw new errs.ValidationError(`${result.hostname} is already in use`);
} }
return true;
}); });
}); });
})
.then(() => {
// At this point the domains should have been checked // At this point the domains should have been checked
data.owner_user_id = access.token.getUserId(1); data.owner_user_id = access.token.getUserId(1);
data = internalHost.cleanSslHstsData(data); const thisData = internalHost.cleanSslHstsData(data);
// Fix for db field not having a default value // Fix for db field not having a default value
// for this optional field. // for this optional field.
if (typeof data.advanced_config === 'undefined') { if (typeof data.advanced_config === "undefined") {
data.advanced_config = ''; thisData.advanced_config = "";
} }
return deadHostModel const row = await deadHostModel.query().insertAndFetch(thisData).then(utils.omitRow(omissions()));
.query()
.insertAndFetch(data) if (createCertificate) {
.then(utils.omitRow(omissions())); const cert = await internalCertificate.createQuickCertificate(access, data);
})
.then((row) => {
if (create_certificate) {
return internalCertificate.createQuickCertificate(access, data)
.then((cert) => {
// update host with cert id // update host with cert id
return internalDeadHost.update(access, { await internalDeadHost.update(access, {
id: row.id, id: row.id,
certificate_id: cert.id certificate_id: cert.id,
}); });
})
.then(() => {
return row;
});
} else {
return row;
} }
})
.then((row) => {
// re-fetch with cert // re-fetch with cert
return internalDeadHost.get(access, { const freshRow = await internalDeadHost.get(access, {
id: row.id, id: row.id,
expand: ['certificate', 'owner'] expand: ["certificate", "owner"],
}); });
})
.then((row) => {
// Configure nginx // Configure nginx
return internalNginx.configure(deadHostModel, 'dead_host', row) await internalNginx.configure(deadHostModel, "dead_host", freshRow);
.then(() => { data.meta = _.assign({}, data.meta || {}, freshRow.meta);
return row;
});
})
.then((row) => {
data.meta = _.assign({}, data.meta || {}, row.meta);
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { await internalAuditLog.add(access, {
action: 'created', action: "created",
object_type: 'dead-host', object_type: "dead-host",
object_id: row.id, object_id: freshRow.id,
meta: data meta: data,
})
.then(() => {
return row;
});
}); });
return freshRow;
}, },
/** /**
@@ -113,98 +92,79 @@ const internalDeadHost = {
* @param {Number} data.id * @param {Number} data.id
* @return {Promise} * @return {Promise}
*/ */
update: (access, data) => { update: async (access, data) => {
let create_certificate = data.certificate_id === 'new'; const createCertificate = data.certificate_id === "new";
if (create_certificate) { if (createCertificate) {
delete data.certificate_id; delete data.certificate_id;
} }
return access.can('dead_hosts:update', data.id) await access.can("dead_hosts:update", data.id);
.then((/*access_data*/) => {
// Get a list of the domain names and check each of them against existing records // Get a list of the domain names and check each of them against existing records
let domain_name_check_promises = []; const domainNameCheckPromises = [];
if (typeof data.domain_names !== "undefined") {
if (typeof data.domain_names !== 'undefined') { data.domain_names.map((domainName) => {
data.domain_names.map(function (domain_name) { domainNameCheckPromises.push(internalHost.isHostnameTaken(domainName, "dead", data.id));
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'dead', data.id)); return true;
}); });
return Promise.all(domain_name_check_promises) const checkResults = await Promise.all(domainNameCheckPromises);
.then((check_results) => { checkResults.map((result) => {
check_results.map(function (result) {
if (result.is_taken) { if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use'); throw new errs.ValidationError(`${result.hostname} is already in use`);
} }
}); return true;
}); });
} }
}) const row = await internalDeadHost.get(access, { id: data.id });
.then(() => {
return internalDeadHost.get(access, {id: data.id});
})
.then((row) => {
if (row.id !== data.id) { if (row.id !== data.id) {
// Sanity check that something crazy hasn't happened // Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('404 Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id); throw new errs.InternalValidationError(
`404 Host could not be updated, IDs do not match: ${row.id} !== ${data.id}`,
);
} }
if (create_certificate) { if (createCertificate) {
return internalCertificate.createQuickCertificate(access, { const cert = await internalCertificate.createQuickCertificate(access, {
domain_names: data.domain_names || row.domain_names, domain_names: data.domain_names || row.domain_names,
meta: _.assign({}, row.meta, data.meta) meta: _.assign({}, row.meta, data.meta),
}) });
.then((cert) => {
// update host with cert id // update host with cert id
data.certificate_id = cert.id; data.certificate_id = cert.id;
})
.then(() => {
return row;
});
} else {
return row;
} }
})
.then((row) => {
// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here. // Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
data = _.assign({}, { let thisData = _.assign(
domain_names: row.domain_names {},
}, data); {
domain_names: row.domain_names,
},
data,
);
data = internalHost.cleanSslHstsData(data, row); thisData = internalHost.cleanSslHstsData(thisData, row);
return deadHostModel
.query()
.where({id: data.id})
.patch(data)
.then((saved_row) => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { await internalAuditLog.add(access, {
action: 'updated', action: "updated",
object_type: 'dead-host', object_type: "dead-host",
object_id: row.id, object_id: row.id,
meta: data meta: thisData,
})
.then(() => {
return _.omit(saved_row, omissions());
}); });
const thisRow = await internalDeadHost
.get(access, {
id: thisData.id,
expand: ["owner", "certificate"],
}); });
})
.then(() => {
return internalDeadHost.get(access, {
id: data.id,
expand: ['owner', 'certificate']
})
.then((row) => {
// Configure nginx // Configure nginx
return internalNginx.configure(deadHostModel, 'dead_host', row) const newMeta = await internalNginx.configure(deadHostModel, "dead_host", row);
.then((new_meta) => { row.meta = newMeta;
row.meta = new_meta; return _.omit(internalHost.cleanRowCertificateMeta(thisRow), omissions());
row = internalHost.cleanRowCertificateMeta(row);
return _.omit(row, omissions());
});
});
});
}, },
/** /**
@@ -215,40 +175,32 @@ const internalDeadHost = {
* @param {Array} [data.omit] * @param {Array} [data.omit]
* @return {Promise} * @return {Promise}
*/ */
get: (access, data) => { get: async (access, data) => {
if (typeof data === 'undefined') { const accessData = await access.can("dead_hosts:get", data.id);
data = {}; const query = deadHostModel
}
return access.can('dead_hosts:get', data.id)
.then((access_data) => {
let query = deadHostModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.andWhere('id', data.id) .andWhere("id", data.id)
.allowGraph('[owner,certificate]') .allowGraph("[owner,certificate]")
.first(); .first();
if (access_data.permission_visibility !== 'all') { if (accessData.permission_visibility !== "all") {
query.andWhere('owner_user_id', access.token.getUserId(1)); query.andWhere("owner_user_id", access.token.getUserId(1));
} }
if (typeof data.expand !== 'undefined' && data.expand !== null) { if (typeof data.expand !== "undefined" && data.expand !== null) {
query.withGraphFetched('[' + data.expand.join(', ') + ']'); query.withGraphFetched(`[${data.expand.join(", ")}]`);
} }
return query.then(utils.omitRow(omissions())); const row = await query.then(utils.omitRow(omissions()));
})
.then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} }
// Custom omissions // Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) { if (typeof data.omit !== "undefined" && data.omit !== null) {
row = _.omit(row, data.omit); return _.omit(row, data.omit);
} }
return row; return row;
});
}, },
/** /**
@@ -258,41 +210,29 @@ const internalDeadHost = {
* @param {String} [data.reason] * @param {String} [data.reason]
* @returns {Promise} * @returns {Promise}
*/ */
delete: (access, data) => { delete: async (access, data) => {
return access.can('dead_hosts:delete', data.id) await access.can("dead_hosts:delete", data.id)
.then(() => { const row = await internalDeadHost.get(access, { id: data.id });
return internalDeadHost.get(access, {id: data.id});
})
.then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} }
return deadHostModel await deadHostModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
is_deleted: 1 is_deleted: 1,
}) });
.then(() => {
// Delete Nginx Config // Delete Nginx Config
return internalNginx.deleteConfig('dead_host', row) await internalNginx.deleteConfig("dead_host", row);
.then(() => { await internalNginx.reload();
return internalNginx.reload();
});
})
.then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { await internalAuditLog.add(access, {
action: 'deleted', action: "deleted",
object_type: 'dead-host', object_type: "dead-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
});
});
})
.then(() => {
return true;
}); });
}, },
@@ -303,46 +243,39 @@ const internalDeadHost = {
* @param {String} [data.reason] * @param {String} [data.reason]
* @returns {Promise} * @returns {Promise}
*/ */
enable: (access, data) => { enable: async (access, data) => {
return access.can('dead_hosts:update', data.id) await access.can("dead_hosts:update", data.id)
.then(() => { const row = await internalDeadHost.get(access, {
return internalDeadHost.get(access, {
id: data.id, id: data.id,
expand: ['certificate', 'owner'] expand: ["certificate", "owner"],
}); });
})
.then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} else if (row.enabled) { }
throw new error.ValidationError('Host is already enabled'); if (row.enabled) {
throw new errs.ValidationError("Host is already enabled");
} }
row.enabled = 1; row.enabled = 1;
return deadHostModel await deadHostModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
enabled: 1 enabled: 1,
}) });
.then(() => {
// Configure nginx // Configure nginx
return internalNginx.configure(deadHostModel, 'dead_host', row); await internalNginx.configure(deadHostModel, "dead_host", row);
})
.then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { await internalAuditLog.add(access, {
action: 'enabled', action: "enabled",
object_type: 'dead-host', object_type: "dead-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
});
})
.then(() => {
return true; return true;
});
}, },
/** /**
@@ -352,46 +285,37 @@ const internalDeadHost = {
* @param {String} [data.reason] * @param {String} [data.reason]
* @returns {Promise} * @returns {Promise}
*/ */
disable: (access, data) => { disable: async (access, data) => {
return access.can('dead_hosts:update', data.id) await access.can("dead_hosts:update", data.id)
.then(() => { const row = await internalDeadHost.get(access, { id: data.id });
return internalDeadHost.get(access, {id: data.id});
})
.then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} else if (!row.enabled) { }
throw new error.ValidationError('Host is already disabled'); if (!row.enabled) {
throw new errs.ValidationError("Host is already disabled");
} }
row.enabled = 0; row.enabled = 0;
return deadHostModel await deadHostModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
enabled: 0 enabled: 0,
}) });
.then(() => {
// Delete Nginx Config // Delete Nginx Config
return internalNginx.deleteConfig('dead_host', row) await internalNginx.deleteConfig("dead_host", row);
.then(() => { await internalNginx.reload();
return internalNginx.reload();
});
})
.then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { await internalAuditLog.add(access, {
action: 'disabled', action: "disabled",
object_type: 'dead-host', object_type: "dead-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
});
})
.then(() => {
return true; return true;
});
}, },
/** /**
@@ -399,43 +323,38 @@ const internalDeadHost = {
* *
* @param {Access} access * @param {Access} access
* @param {Array} [expand] * @param {Array} [expand]
* @param {String} [search_query] * @param {String} [searchQuery]
* @returns {Promise} * @returns {Promise}
*/ */
getAll: (access, expand, search_query) => { getAll: async (access, expand, searchQuery) => {
return access.can('dead_hosts:list') const accessData = await access.can("dead_hosts:list")
.then((access_data) => { const query = deadHostModel
let query = deadHostModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.groupBy('id') .groupBy("id")
.allowGraph('[owner,certificate]') .allowGraph("[owner,certificate]")
.orderBy(castJsonIfNeed('domain_names'), 'ASC'); .orderBy(castJsonIfNeed("domain_names"), "ASC");
if (access_data.permission_visibility !== 'all') { if (accessData.permission_visibility !== "all") {
query.andWhere('owner_user_id', access.token.getUserId(1)); query.andWhere("owner_user_id", access.token.getUserId(1));
} }
// Query is used for searching // Query is used for searching
if (typeof search_query === 'string' && search_query.length > 0) { if (typeof searchQuery === "string" && searchQuery.length > 0) {
query.where(function () { query.where(function () {
this.where(castJsonIfNeed('domain_names'), 'like', '%' + search_query + '%'); this.where(castJsonIfNeed("domain_names"), "like", `%${searchQuery}%`);
}); });
} }
if (typeof expand !== 'undefined' && expand !== null) { if (typeof expand !== "undefined" && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']'); query.withGraphFetched(`[${expand.join(", ")}]`);
} }
return query.then(utils.omitRows(omissions())); const rows = await query.then(utils.omitRows(omissions()));
}) if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
.then((rows) => { internalHost.cleanAllRowsCertificateMeta(rows);
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
return internalHost.cleanAllRowsCertificateMeta(rows);
} }
return rows; return rows;
});
}, },
/** /**
@@ -445,21 +364,16 @@ const internalDeadHost = {
* @param {String} visibility * @param {String} visibility
* @returns {Promise} * @returns {Promise}
*/ */
getCount: (user_id, visibility) => { getCount: async (user_id, visibility) => {
let query = deadHostModel const query = deadHostModel.query().count("id as count").where("is_deleted", 0);
.query()
.count('id as count')
.where('is_deleted', 0);
if (visibility !== 'all') { if (visibility !== "all") {
query.andWhere('owner_user_id', user_id); query.andWhere("owner_user_id", user_id);
} }
return query.first() const row = await query.first();
.then((row) => { return Number.parseInt(row.count, 10);
return parseInt(row.count, 10); },
});
}
}; };
module.exports = internalDeadHost; export default internalDeadHost;

View File

@@ -1,11 +1,10 @@
const _ = require('lodash'); import _ from "lodash";
const proxyHostModel = require('../models/proxy_host'); import { castJsonIfNeed } from "../lib/helpers.js";
const redirectionHostModel = require('../models/redirection_host'); import deadHostModel from "../models/dead_host.js";
const deadHostModel = require('../models/dead_host'); import proxyHostModel from "../models/proxy_host.js";
const {castJsonIfNeed} = require('../lib/helpers'); import redirectionHostModel from "../models/redirection_host.js";
const internalHost = { const internalHost = {
/** /**
* Makes sure that the ssl_* and hsts_* fields play nicely together. * Makes sure that the ssl_* and hsts_* fields play nicely together.
* ie: if there is no cert, then force_ssl is off. * ie: if there is no cert, then force_ssl is off.
@@ -15,25 +14,23 @@ const internalHost = {
* @param {object} [existing_data] * @param {object} [existing_data]
* @returns {object} * @returns {object}
*/ */
cleanSslHstsData: function (data, existing_data) { cleanSslHstsData: (data, existingData) => {
existing_data = existing_data === undefined ? {} : existing_data; const combinedData = _.assign({}, existingData || {}, data);
const combined_data = _.assign({}, existing_data, data); if (!combinedData.certificate_id) {
combinedData.ssl_forced = false;
if (!combined_data.certificate_id) { combinedData.http2_support = false;
combined_data.ssl_forced = false;
combined_data.http2_support = false;
} }
if (!combined_data.ssl_forced) { if (!combinedData.ssl_forced) {
combined_data.hsts_enabled = false; combinedData.hsts_enabled = false;
} }
if (!combined_data.hsts_enabled) { if (!combinedData.hsts_enabled) {
combined_data.hsts_subdomains = false; combinedData.hsts_subdomains = false;
} }
return combined_data; return combinedData;
}, },
/** /**
@@ -42,11 +39,12 @@ const internalHost = {
* @param {Array} rows * @param {Array} rows
* @returns {Array} * @returns {Array}
*/ */
cleanAllRowsCertificateMeta: function (rows) { cleanAllRowsCertificateMeta: (rows) => {
rows.map(function (row, idx) { rows.map((_, idx) => {
if (typeof rows[idx].certificate !== 'undefined' && rows[idx].certificate) { if (typeof rows[idx].certificate !== "undefined" && rows[idx].certificate) {
rows[idx].certificate.meta = {}; rows[idx].certificate.meta = {};
} }
return true;
}); });
return rows; return rows;
@@ -58,8 +56,8 @@ const internalHost = {
* @param {Object} row * @param {Object} row
* @returns {Object} * @returns {Object}
*/ */
cleanRowCertificateMeta: function (row) { cleanRowCertificateMeta: (row) => {
if (typeof row.certificate !== 'undefined' && row.certificate) { if (typeof row.certificate !== "undefined" && row.certificate) {
row.certificate.meta = {}; row.certificate.meta = {};
} }
@@ -67,54 +65,33 @@ const internalHost = {
}, },
/** /**
* This returns all the host types with any domain listed in the provided domain_names array. * This returns all the host types with any domain listed in the provided domainNames array.
* This is used by the certificates to temporarily disable any host that is using the domain * This is used by the certificates to temporarily disable any host that is using the domain
* *
* @param {Array} domain_names * @param {Array} domainNames
* @returns {Promise} * @returns {Promise}
*/ */
getHostsWithDomains: function (domain_names) { getHostsWithDomains: async (domainNames) => {
const promises = [ const responseObject = {
proxyHostModel
.query()
.where('is_deleted', 0),
redirectionHostModel
.query()
.where('is_deleted', 0),
deadHostModel
.query()
.where('is_deleted', 0)
];
return Promise.all(promises)
.then((promises_results) => {
let response_object = {
total_count: 0, total_count: 0,
dead_hosts: [], dead_hosts: [],
proxy_hosts: [], proxy_hosts: [],
redirection_hosts: [] redirection_hosts: [],
}; };
if (promises_results[0]) { const proxyRes = await proxyHostModel.query().where("is_deleted", 0);
// Proxy Hosts responseObject.proxy_hosts = internalHost._getHostsWithDomains(proxyRes, domainNames);
response_object.proxy_hosts = internalHost._getHostsWithDomains(promises_results[0], domain_names); responseObject.total_count += responseObject.proxy_hosts.length;
response_object.total_count += response_object.proxy_hosts.length;
}
if (promises_results[1]) { const redirRes = await redirectionHostModel.query().where("is_deleted", 0);
// Redirection Hosts responseObject.redirection_hosts = internalHost._getHostsWithDomains(redirRes, domainNames);
response_object.redirection_hosts = internalHost._getHostsWithDomains(promises_results[1], domain_names); responseObject.total_count += responseObject.redirection_hosts.length;
response_object.total_count += response_object.redirection_hosts.length;
}
if (promises_results[2]) { const deadRes = await deadHostModel.query().where("is_deleted", 0);
// Dead Hosts responseObject.dead_hosts = internalHost._getHostsWithDomains(deadRes, domainNames);
response_object.dead_hosts = internalHost._getHostsWithDomains(promises_results[2], domain_names); responseObject.total_count += responseObject.dead_hosts.length;
response_object.total_count += response_object.dead_hosts.length;
}
return response_object; return responseObject;
});
}, },
/** /**
@@ -125,50 +102,67 @@ const internalHost = {
* @param {Integer} [ignore_id] Must be supplied if type was also supplied * @param {Integer} [ignore_id] Must be supplied if type was also supplied
* @returns {Promise} * @returns {Promise}
*/ */
isHostnameTaken: function (hostname, ignore_type, ignore_id) { isHostnameTaken: (hostname, ignore_type, ignore_id) => {
const promises = [ const promises = [
proxyHostModel proxyHostModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%'), .andWhere(castJsonIfNeed("domain_names"), "like", `%${hostname}%`),
redirectionHostModel redirectionHostModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%'), .andWhere(castJsonIfNeed("domain_names"), "like", `%${hostname}%`),
deadHostModel deadHostModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%') .andWhere(castJsonIfNeed("domain_names"), "like", `%${hostname}%`),
]; ];
return Promise.all(promises) return Promise.all(promises).then((promises_results) => {
.then((promises_results) => {
let is_taken = false; let is_taken = false;
if (promises_results[0]) { if (promises_results[0]) {
// Proxy Hosts // Proxy Hosts
if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[0], ignore_type === 'proxy' && ignore_id ? ignore_id : 0)) { if (
internalHost._checkHostnameRecordsTaken(
hostname,
promises_results[0],
ignore_type === "proxy" && ignore_id ? ignore_id : 0,
)
) {
is_taken = true; is_taken = true;
} }
} }
if (promises_results[1]) { if (promises_results[1]) {
// Redirection Hosts // Redirection Hosts
if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[1], ignore_type === 'redirection' && ignore_id ? ignore_id : 0)) { if (
internalHost._checkHostnameRecordsTaken(
hostname,
promises_results[1],
ignore_type === "redirection" && ignore_id ? ignore_id : 0,
)
) {
is_taken = true; is_taken = true;
} }
} }
if (promises_results[2]) { if (promises_results[2]) {
// Dead Hosts // Dead Hosts
if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[2], ignore_type === 'dead' && ignore_id ? ignore_id : 0)) { if (
internalHost._checkHostnameRecordsTaken(
hostname,
promises_results[2],
ignore_type === "dead" && ignore_id ? ignore_id : 0,
)
) {
is_taken = true; is_taken = true;
} }
} }
return { return {
hostname: hostname, hostname: hostname,
is_taken: is_taken is_taken: is_taken,
}; };
}); });
}, },
@@ -177,60 +171,64 @@ const internalHost = {
* Private call only * Private call only
* *
* @param {String} hostname * @param {String} hostname
* @param {Array} existing_rows * @param {Array} existingRows
* @param {Integer} [ignore_id] * @param {Integer} [ignoreId]
* @returns {Boolean} * @returns {Boolean}
*/ */
_checkHostnameRecordsTaken: function (hostname, existing_rows, ignore_id) { _checkHostnameRecordsTaken: (hostname, existingRows, ignoreId) => {
let is_taken = false; let isTaken = false;
if (existing_rows && existing_rows.length) { if (existingRows?.length) {
existing_rows.map(function (existing_row) { existingRows.map((existingRow) => {
existing_row.domain_names.map(function (existing_hostname) { existingRow.domain_names.map((existingHostname) => {
// Does this domain match? // Does this domain match?
if (existing_hostname.toLowerCase() === hostname.toLowerCase()) { if (existingHostname.toLowerCase() === hostname.toLowerCase()) {
if (!ignore_id || ignore_id !== existing_row.id) { if (!ignoreId || ignoreId !== existingRow.id) {
is_taken = true; isTaken = true;
} }
} }
return true;
}); });
return true;
}); });
} }
return is_taken; return isTaken;
}, },
/** /**
* Private call only * Private call only
* *
* @param {Array} hosts * @param {Array} hosts
* @param {Array} domain_names * @param {Array} domainNames
* @returns {Array} * @returns {Array}
*/ */
_getHostsWithDomains: function (hosts, domain_names) { _getHostsWithDomains: (hosts, domainNames) => {
let response = []; const response = [];
if (hosts && hosts.length) { if (hosts?.length) {
hosts.map(function (host) { hosts.map((host) => {
let host_matches = false; let hostMatches = false;
domain_names.map(function (domain_name) { domainNames.map((domainName) => {
host.domain_names.map(function (host_domain_name) { host.domain_names.map((hostDomainName) => {
if (domain_name.toLowerCase() === host_domain_name.toLowerCase()) { if (domainName.toLowerCase() === hostDomainName.toLowerCase()) {
host_matches = true; hostMatches = true;
} }
return true;
}); });
return true;
}); });
if (host_matches) { if (hostMatches) {
response.push(host); response.push(host);
} }
return true;
}); });
} }
return response; return response;
} },
}; };
module.exports = internalHost; export default internalHost;

View File

@@ -1,43 +1,49 @@
const https = require('https'); import fs from "node:fs";
const fs = require('fs'); import https from "node:https";
const logger = require('../logger').ip_ranges; import { dirname } from "node:path";
const error = require('../lib/error'); import { fileURLToPath } from "node:url";
const utils = require('../lib/utils'); import errs from "../lib/error.js";
const internalNginx = require('./nginx'); import utils from "../lib/utils.js";
import { ipRanges as logger } from "../logger.js";
import internalNginx from "./nginx.js";
const CLOUDFRONT_URL = 'https://ip-ranges.amazonaws.com/ip-ranges.json'; const __filename = fileURLToPath(import.meta.url);
const CLOUDFARE_V4_URL = 'https://www.cloudflare.com/ips-v4'; const __dirname = dirname(__filename);
const CLOUDFARE_V6_URL = 'https://www.cloudflare.com/ips-v6';
const CLOUDFRONT_URL = "https://ip-ranges.amazonaws.com/ip-ranges.json";
const CLOUDFARE_V4_URL = "https://www.cloudflare.com/ips-v4";
const CLOUDFARE_V6_URL = "https://www.cloudflare.com/ips-v6";
const regIpV4 = /^(\d+\.?){4}\/\d+/; const regIpV4 = /^(\d+\.?){4}\/\d+/;
const regIpV6 = /^(([\da-fA-F]+)?:)+\/\d+/; const regIpV6 = /^(([\da-fA-F]+)?:)+\/\d+/;
const internalIpRanges = { const internalIpRanges = {
interval_timeout: 1000 * 60 * 60 * 6, // 6 hours interval_timeout: 1000 * 60 * 60 * 6, // 6 hours
interval: null, interval: null,
interval_processing: false, interval_processing: false,
iteration_count: 0, iteration_count: 0,
initTimer: () => { initTimer: () => {
logger.info('IP Ranges Renewal Timer initialized'); logger.info("IP Ranges Renewal Timer initialized");
internalIpRanges.interval = setInterval(internalIpRanges.fetch, internalIpRanges.interval_timeout); internalIpRanges.interval = setInterval(internalIpRanges.fetch, internalIpRanges.interval_timeout);
}, },
fetchUrl: (url) => { fetchUrl: (url) => {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
logger.info('Fetching ' + url); logger.info(`Fetching ${url}`);
return https.get(url, (res) => { return https
res.setEncoding('utf8'); .get(url, (res) => {
let raw_data = ''; res.setEncoding("utf8");
res.on('data', (chunk) => { let raw_data = "";
res.on("data", (chunk) => {
raw_data += chunk; raw_data += chunk;
}); });
res.on('end', () => { res.on("end", () => {
resolve(raw_data); resolve(raw_data);
}); });
}).on('error', (err) => { })
.on("error", (err) => {
reject(err); reject(err);
}); });
}); });
@@ -49,27 +55,30 @@ const internalIpRanges = {
fetch: () => { fetch: () => {
if (!internalIpRanges.interval_processing) { if (!internalIpRanges.interval_processing) {
internalIpRanges.interval_processing = true; internalIpRanges.interval_processing = true;
logger.info('Fetching IP Ranges from online services...'); logger.info("Fetching IP Ranges from online services...");
let ip_ranges = []; let ip_ranges = [];
return internalIpRanges.fetchUrl(CLOUDFRONT_URL) return internalIpRanges
.fetchUrl(CLOUDFRONT_URL)
.then((cloudfront_data) => { .then((cloudfront_data) => {
let data = JSON.parse(cloudfront_data); const data = JSON.parse(cloudfront_data);
if (data && typeof data.prefixes !== 'undefined') { if (data && typeof data.prefixes !== "undefined") {
data.prefixes.map((item) => { data.prefixes.map((item) => {
if (item.service === 'CLOUDFRONT') { if (item.service === "CLOUDFRONT") {
ip_ranges.push(item.ip_prefix); ip_ranges.push(item.ip_prefix);
} }
return true;
}); });
} }
if (data && typeof data.ipv6_prefixes !== 'undefined') { if (data && typeof data.ipv6_prefixes !== "undefined") {
data.ipv6_prefixes.map((item) => { data.ipv6_prefixes.map((item) => {
if (item.service === 'CLOUDFRONT') { if (item.service === "CLOUDFRONT") {
ip_ranges.push(item.ipv6_prefix); ip_ranges.push(item.ipv6_prefix);
} }
return true;
}); });
} }
}) })
@@ -77,26 +86,26 @@ const internalIpRanges = {
return internalIpRanges.fetchUrl(CLOUDFARE_V4_URL); return internalIpRanges.fetchUrl(CLOUDFARE_V4_URL);
}) })
.then((cloudfare_data) => { .then((cloudfare_data) => {
let items = cloudfare_data.split('\n').filter((line) => regIpV4.test(line)); const items = cloudfare_data.split("\n").filter((line) => regIpV4.test(line));
ip_ranges = [...ip_ranges, ...items]; ip_ranges = [...ip_ranges, ...items];
}) })
.then(() => { .then(() => {
return internalIpRanges.fetchUrl(CLOUDFARE_V6_URL); return internalIpRanges.fetchUrl(CLOUDFARE_V6_URL);
}) })
.then((cloudfare_data) => { .then((cloudfare_data) => {
let items = cloudfare_data.split('\n').filter((line) => regIpV6.test(line)); const items = cloudfare_data.split("\n").filter((line) => regIpV6.test(line));
ip_ranges = [...ip_ranges, ...items]; ip_ranges = [...ip_ranges, ...items];
}) })
.then(() => { .then(() => {
let clean_ip_ranges = []; const clean_ip_ranges = [];
ip_ranges.map((range) => { ip_ranges.map((range) => {
if (range) { if (range) {
clean_ip_ranges.push(range); clean_ip_ranges.push(range);
} }
return true;
}); });
return internalIpRanges.generateConfig(clean_ip_ranges) return internalIpRanges.generateConfig(clean_ip_ranges).then(() => {
.then(() => {
if (internalIpRanges.iteration_count) { if (internalIpRanges.iteration_count) {
// Reload nginx // Reload nginx
return internalNginx.reload(); return internalNginx.reload();
@@ -108,7 +117,7 @@ const internalIpRanges = {
internalIpRanges.iteration_count++; internalIpRanges.iteration_count++;
}) })
.catch((err) => { .catch((err) => {
logger.error(err.message); logger.fatal(err.message);
internalIpRanges.interval_processing = false; internalIpRanges.interval_processing = false;
}); });
} }
@@ -122,26 +131,26 @@ const internalIpRanges = {
const renderEngine = utils.getRenderEngine(); const renderEngine = utils.getRenderEngine();
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
let template = null; let template = null;
let filename = '/etc/nginx/conf.d/include/ip_ranges.conf'; const filename = "/etc/nginx/conf.d/include/ip_ranges.conf";
try { try {
template = fs.readFileSync(__dirname + '/../templates/ip_ranges.conf', {encoding: 'utf8'}); template = fs.readFileSync(`${__dirname}/../templates/ip_ranges.conf`, { encoding: "utf8" });
} catch (err) { } catch (err) {
reject(new error.ConfigurationError(err.message)); reject(new errs.ConfigurationError(err.message));
return; return;
} }
renderEngine renderEngine
.parseAndRender(template, { ip_ranges: ip_ranges }) .parseAndRender(template, { ip_ranges: ip_ranges })
.then((config_text) => { .then((config_text) => {
fs.writeFileSync(filename, config_text, {encoding: 'utf8'}); fs.writeFileSync(filename, config_text, { encoding: "utf8" });
resolve(true); resolve(true);
}) })
.catch((err) => { .catch((err) => {
logger.warn('Could not write ' + filename + ':', err.message); logger.warn(`Could not write ${filename}: ${err.message}`);
reject(new error.ConfigurationError(err.message)); reject(new errs.ConfigurationError(err.message));
}); });
}); });
} },
}; };
module.exports = internalIpRanges; export default internalIpRanges;

View File

@@ -1,12 +1,15 @@
const _ = require('lodash'); import fs from "node:fs";
const fs = require('node:fs'); import { dirname } from "node:path";
const logger = require('../logger').nginx; import { fileURLToPath } from "node:url";
const config = require('../lib/config'); import _ from "lodash";
const utils = require('../lib/utils'); import errs from "../lib/error.js";
const error = require('../lib/error'); import utils from "../lib/utils.js";
import { nginx as logger } from "../logger.js";
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const internalNginx = { const internalNginx = {
/** /**
* This will: * This will:
* - test the nginx config first to make sure it's OK * - test the nginx config first to make sure it's OK
@@ -24,7 +27,8 @@ const internalNginx = {
configure: (model, host_type, host) => { configure: (model, host_type, host) => {
let combined_meta = {}; let combined_meta = {};
return internalNginx.test() return internalNginx
.test()
.then(() => { .then(() => {
// Nginx is OK // Nginx is OK
// We're deleting this config regardless. // We're deleting this config regardless.
@@ -37,19 +41,17 @@ const internalNginx = {
}) })
.then(() => { .then(() => {
// Test nginx again and update meta with result // Test nginx again and update meta with result
return internalNginx.test() return internalNginx
.test()
.then(() => { .then(() => {
// nginx is ok // nginx is ok
combined_meta = _.assign({}, host.meta, { combined_meta = _.assign({}, host.meta, {
nginx_online: true, nginx_online: true,
nginx_err: null nginx_err: null,
}); });
return model return model.query().where("id", host.id).patch({
.query() meta: combined_meta,
.where('id', host.id)
.patch({
meta: combined_meta
}); });
}) })
.catch((err) => { .catch((err) => {
@@ -58,28 +60,27 @@ const internalNginx = {
// nginx: [alert] could not open error log file: open() "/var/log/nginx/error.log" failed (6: No such device or address) // nginx: [alert] could not open error log file: open() "/var/log/nginx/error.log" failed (6: No such device or address)
const valid_lines = []; const valid_lines = [];
const err_lines = err.message.split('\n'); const err_lines = err.message.split("\n");
err_lines.map((line) => { err_lines.map((line) => {
if (line.indexOf('/var/log/nginx/error.log') === -1) { if (line.indexOf("/var/log/nginx/error.log") === -1) {
valid_lines.push(line); valid_lines.push(line);
} }
return true;
}); });
if (config.debug()) { logger.debug("Nginx test failed:", valid_lines.join("\n"));
logger.error('Nginx test failed:', valid_lines.join('\n'));
}
// config is bad, update meta and delete config // config is bad, update meta and delete config
combined_meta = _.assign({}, host.meta, { combined_meta = _.assign({}, host.meta, {
nginx_online: false, nginx_online: false,
nginx_err: valid_lines.join('\n') nginx_err: valid_lines.join("\n"),
}); });
return model return model
.query() .query()
.where('id', host.id) .where("id", host.id)
.patch({ .patch({
meta: combined_meta meta: combined_meta,
}) })
.then(() => { .then(() => {
internalNginx.renameConfigAsError(host_type, host); internalNginx.renameConfigAsError(host_type, host);
@@ -101,21 +102,17 @@ const internalNginx = {
* @returns {Promise} * @returns {Promise}
*/ */
test: () => { test: () => {
if (config.debug()) { logger.debug("Testing Nginx configuration");
logger.info('Testing Nginx configuration'); return utils.execFile("/usr/sbin/nginx", ["-t", "-g", "error_log off;"]);
}
return utils.execFile('/usr/sbin/nginx', ['-t', '-g', 'error_log off;']);
}, },
/** /**
* @returns {Promise} * @returns {Promise}
*/ */
reload: () => { reload: () => {
return internalNginx.test() return internalNginx.test().then(() => {
.then(() => { logger.info("Reloading Nginx");
logger.info('Reloading Nginx'); return utils.execFile("/usr/sbin/nginx", ["-s", "reload"]);
return utils.execFile('/usr/sbin/nginx', ['-s', 'reload']);
}); });
}, },
@@ -125,8 +122,8 @@ const internalNginx = {
* @returns {String} * @returns {String}
*/ */
getConfigName: (host_type, host_id) => { getConfigName: (host_type, host_id) => {
if (host_type === 'default') { if (host_type === "default") {
return '/data/nginx/default_host/site.conf'; return "/data/nginx/default_host/site.conf";
} }
return `/data/nginx/${internalNginx.getFileFriendlyHostType(host_type)}/${host_id}.conf`; return `/data/nginx/${internalNginx.getFileFriendlyHostType(host_type)}/${host_id}.conf`;
}, },
@@ -141,38 +138,45 @@ const internalNginx = {
let template; let template;
try { try {
template = fs.readFileSync(`${__dirname}/../templates/_location.conf`, {encoding: 'utf8'}); template = fs.readFileSync(`${__dirname}/../templates/_location.conf`, { encoding: "utf8" });
} catch (err) { } catch (err) {
reject(new error.ConfigurationError(err.message)); reject(new errs.ConfigurationError(err.message));
return; return;
} }
const renderEngine = utils.getRenderEngine(); const renderEngine = utils.getRenderEngine();
let renderedLocations = ''; let renderedLocations = "";
const locationRendering = async () => { const locationRendering = async () => {
for (let i = 0; i < host.locations.length; i++) { for (let i = 0; i < host.locations.length; i++) {
const locationCopy = Object.assign({}, {access_list_id: host.access_list_id}, {certificate_id: host.certificate_id}, const locationCopy = Object.assign(
{ssl_forced: host.ssl_forced}, {caching_enabled: host.caching_enabled}, {block_exploits: host.block_exploits}, {},
{allow_websocket_upgrade: host.allow_websocket_upgrade}, {http2_support: host.http2_support}, { access_list_id: host.access_list_id },
{hsts_enabled: host.hsts_enabled}, {hsts_subdomains: host.hsts_subdomains}, {access_list: host.access_list}, { certificate_id: host.certificate_id },
{certificate: host.certificate}, host.locations[i]); { ssl_forced: host.ssl_forced },
{ caching_enabled: host.caching_enabled },
{ block_exploits: host.block_exploits },
{ allow_websocket_upgrade: host.allow_websocket_upgrade },
{ http2_support: host.http2_support },
{ hsts_enabled: host.hsts_enabled },
{ hsts_subdomains: host.hsts_subdomains },
{ access_list: host.access_list },
{ certificate: host.certificate },
host.locations[i],
);
if (locationCopy.forward_host.indexOf('/') > -1) { if (locationCopy.forward_host.indexOf("/") > -1) {
const splitted = locationCopy.forward_host.split('/'); const splitted = locationCopy.forward_host.split("/");
locationCopy.forward_host = splitted.shift(); locationCopy.forward_host = splitted.shift();
locationCopy.forward_path = `/${splitted.join('/')}`; locationCopy.forward_path = `/${splitted.join("/")}`;
} }
// eslint-disable-next-line
renderedLocations += await renderEngine.parseAndRender(template, locationCopy); renderedLocations += await renderEngine.parseAndRender(template, locationCopy);
} }
}; };
locationRendering().then(() => resolve(renderedLocations)); locationRendering().then(() => resolve(renderedLocations));
}); });
}, },
@@ -186,9 +190,7 @@ const internalNginx = {
const host = JSON.parse(JSON.stringify(host_row)); const host = JSON.parse(JSON.stringify(host_row));
const nice_host_type = internalNginx.getFileFriendlyHostType(host_type); const nice_host_type = internalNginx.getFileFriendlyHostType(host_type);
if (config.debug()) { logger.debug(`Generating ${nice_host_type} Config:`, JSON.stringify(host, null, 2));
logger.info(`Generating ${nice_host_type} Config:`, JSON.stringify(host, null, 2));
}
const renderEngine = utils.getRenderEngine(); const renderEngine = utils.getRenderEngine();
@@ -197,9 +199,9 @@ const internalNginx = {
const filename = internalNginx.getConfigName(nice_host_type, host.id); const filename = internalNginx.getConfigName(nice_host_type, host.id);
try { try {
template = fs.readFileSync(`${__dirname}/../templates/${nice_host_type}.conf`, {encoding: 'utf8'}); template = fs.readFileSync(`${__dirname}/../templates/${nice_host_type}.conf`, { encoding: "utf8" });
} catch (err) { } catch (err) {
reject(new error.ConfigurationError(err.message)); reject(new errs.ConfigurationError(err.message));
return; return;
} }
@@ -207,9 +209,9 @@ const internalNginx = {
let origLocations; let origLocations;
// Manipulate the data a bit before sending it to the template // Manipulate the data a bit before sending it to the template
if (nice_host_type !== 'default') { if (nice_host_type !== "default") {
host.use_default_location = true; host.use_default_location = true;
if (typeof host.advanced_config !== 'undefined' && host.advanced_config) { if (typeof host.advanced_config !== "undefined" && host.advanced_config) {
host.use_default_location = !internalNginx.advancedConfigHasDefaultLocation(host.advanced_config); host.use_default_location = !internalNginx.advancedConfigHasDefaultLocation(host.advanced_config);
} }
} }
@@ -223,11 +225,10 @@ const internalNginx = {
// Allow someone who is using / custom location path to use it, and skip the default / location // Allow someone who is using / custom location path to use it, and skip the default / location
_.map(host.locations, (location) => { _.map(host.locations, (location) => {
if (location.path === '/') { if (location.path === "/") {
host.use_default_location = false; host.use_default_location = false;
} }
}); });
} else { } else {
locationsPromise = Promise.resolve(); locationsPromise = Promise.resolve();
} }
@@ -239,11 +240,8 @@ const internalNginx = {
renderEngine renderEngine
.parseAndRender(template, host) .parseAndRender(template, host)
.then((config_text) => { .then((config_text) => {
fs.writeFileSync(filename, config_text, {encoding: 'utf8'}); fs.writeFileSync(filename, config_text, { encoding: "utf8" });
logger.debug("Wrote config:", filename, config_text);
if (config.debug()) {
logger.success('Wrote config:', filename, config_text);
}
// Restore locations array // Restore locations array
host.locations = origLocations; host.locations = origLocations;
@@ -251,11 +249,8 @@ const internalNginx = {
resolve(true); resolve(true);
}) })
.catch((err) => { .catch((err) => {
if (config.debug()) { logger.debug(`Could not write ${filename}:`, err.message);
logger.warn(`Could not write ${filename}:`, err.message); reject(new errs.ConfigurationError(err.message));
}
reject(new error.ConfigurationError(err.message));
}); });
}); });
}); });
@@ -270,10 +265,7 @@ const internalNginx = {
* @returns {Promise} * @returns {Promise}
*/ */
generateLetsEncryptRequestConfig: (certificate) => { generateLetsEncryptRequestConfig: (certificate) => {
if (config.debug()) { logger.debug("Generating LetsEncrypt Request Config:", certificate);
logger.info('Generating LetsEncrypt Request Config:', certificate);
}
const renderEngine = utils.getRenderEngine(); const renderEngine = utils.getRenderEngine();
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
@@ -281,9 +273,9 @@ const internalNginx = {
const filename = `/data/nginx/temp/letsencrypt_${certificate.id}.conf`; const filename = `/data/nginx/temp/letsencrypt_${certificate.id}.conf`;
try { try {
template = fs.readFileSync(`${__dirname}/../templates/letsencrypt-request.conf`, {encoding: 'utf8'}); template = fs.readFileSync(`${__dirname}/../templates/letsencrypt-request.conf`, { encoding: "utf8" });
} catch (err) { } catch (err) {
reject(new error.ConfigurationError(err.message)); reject(new errs.ConfigurationError(err.message));
return; return;
} }
@@ -292,20 +284,13 @@ const internalNginx = {
renderEngine renderEngine
.parseAndRender(template, certificate) .parseAndRender(template, certificate)
.then((config_text) => { .then((config_text) => {
fs.writeFileSync(filename, config_text, {encoding: 'utf8'}); fs.writeFileSync(filename, config_text, { encoding: "utf8" });
logger.debug("Wrote config:", filename, config_text);
if (config.debug()) {
logger.success('Wrote config:', filename, config_text);
}
resolve(true); resolve(true);
}) })
.catch((err) => { .catch((err) => {
if (config.debug()) { logger.debug(`Could not write ${filename}:`, err.message);
logger.warn(`Could not write ${filename}:`, err.message); reject(new errs.ConfigurationError(err.message));
}
reject(new error.ConfigurationError(err.message));
}); });
}); });
}, },
@@ -320,7 +305,7 @@ const internalNginx = {
try { try {
fs.unlinkSync(filename); fs.unlinkSync(filename);
} catch (err) { } catch (err) {
logger.debug('Could not delete file:', JSON.stringify(err, null, 2)); logger.debug("Could not delete file:", JSON.stringify(err, null, 2));
} }
}, },
@@ -330,7 +315,7 @@ const internalNginx = {
* @returns String * @returns String
*/ */
getFileFriendlyHostType: (host_type) => { getFileFriendlyHostType: (host_type) => {
return host_type.replace(/-/g, '_'); return host_type.replace(/-/g, "_");
}, },
/** /**
@@ -354,7 +339,10 @@ const internalNginx = {
* @returns {Promise} * @returns {Promise}
*/ */
deleteConfig: (host_type, host, delete_err_file) => { deleteConfig: (host_type, host, delete_err_file) => {
const config_file = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id); const config_file = internalNginx.getConfigName(
internalNginx.getFileFriendlyHostType(host_type),
typeof host === "undefined" ? 0 : host.id,
);
const config_file_err = `${config_file}.err`; const config_file_err = `${config_file}.err`;
return new Promise((resolve /*, reject*/) => { return new Promise((resolve /*, reject*/) => {
@@ -372,7 +360,10 @@ const internalNginx = {
* @returns {Promise} * @returns {Promise}
*/ */
renameConfigAsError: (host_type, host) => { renameConfigAsError: (host_type, host) => {
const config_file = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id); const config_file = internalNginx.getConfigName(
internalNginx.getFileFriendlyHostType(host_type),
typeof host === "undefined" ? 0 : host.id,
);
const config_file_err = `${config_file}.err`; const config_file_err = `${config_file}.err`;
return new Promise((resolve /*, reject*/) => { return new Promise((resolve /*, reject*/) => {
@@ -395,6 +386,7 @@ const internalNginx = {
const promises = []; const promises = [];
hosts.map((host) => { hosts.map((host) => {
promises.push(internalNginx.generateConfig(host_type, host)); promises.push(internalNginx.generateConfig(host_type, host));
return true;
}); });
return Promise.all(promises); return Promise.all(promises);
@@ -409,6 +401,7 @@ const internalNginx = {
const promises = []; const promises = [];
hosts.map((host) => { hosts.map((host) => {
promises.push(internalNginx.deleteConfig(host_type, host, true)); promises.push(internalNginx.deleteConfig(host_type, host, true));
return true;
}); });
return Promise.all(promises); return Promise.all(promises);
@@ -424,13 +417,13 @@ const internalNginx = {
* @returns {boolean} * @returns {boolean}
*/ */
ipv6Enabled: () => { ipv6Enabled: () => {
if (typeof process.env.DISABLE_IPV6 !== 'undefined') { if (typeof process.env.DISABLE_IPV6 !== "undefined") {
const disabled = process.env.DISABLE_IPV6.toLowerCase(); const disabled = process.env.DISABLE_IPV6.toLowerCase();
return !(disabled === 'on' || disabled === 'true' || disabled === '1' || disabled === 'yes'); return !(disabled === "on" || disabled === "true" || disabled === "1" || disabled === "yes");
} }
return true; return true;
} },
}; };
module.exports = internalNginx; export default internalNginx;

View File

@@ -1,106 +1,105 @@
const _ = require('lodash'); import _ from "lodash";
const error = require('../lib/error'); import errs from "../lib/error.js";
const utils = require('../lib/utils'); import { castJsonIfNeed } from "../lib/helpers.js";
const proxyHostModel = require('../models/proxy_host'); import utils from "../lib/utils.js";
const internalHost = require('./host'); import proxyHostModel from "../models/proxy_host.js";
const internalNginx = require('./nginx'); import internalAuditLog from "./audit-log.js";
const internalAuditLog = require('./audit-log'); import internalCertificate from "./certificate.js";
const internalCertificate = require('./certificate'); import internalHost from "./host.js";
const {castJsonIfNeed} = require('../lib/helpers'); import internalNginx from "./nginx.js";
function omissions () { const omissions = () => {
return ['is_deleted', 'owner.is_deleted']; return ["is_deleted", "owner.is_deleted"];
} };
const internalProxyHost = { const internalProxyHost = {
/** /**
* @param {Access} access * @param {Access} access
* @param {Object} data * @param {Object} data
* @returns {Promise} * @returns {Promise}
*/ */
create: (access, data) => { create: (access, data) => {
let create_certificate = data.certificate_id === 'new'; let thisData = data;
const createCertificate = thisData.certificate_id === "new";
if (create_certificate) { if (createCertificate) {
delete data.certificate_id; delete thisData.certificate_id;
} }
return access.can('proxy_hosts:create', data) return access
.can("proxy_hosts:create", thisData)
.then(() => { .then(() => {
// Get a list of the domain names and check each of them against existing records // Get a list of the domain names and check each of them against existing records
let domain_name_check_promises = []; const domain_name_check_promises = [];
data.domain_names.map(function (domain_name) { thisData.domain_names.map((domain_name) => {
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name)); domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
return true;
}); });
return Promise.all(domain_name_check_promises) return Promise.all(domain_name_check_promises).then((check_results) => {
.then((check_results) => { check_results.map((result) => {
check_results.map(function (result) {
if (result.is_taken) { if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use'); throw new errs.ValidationError(`${result.hostname} is already in use`);
} }
return true;
}); });
}); });
}) })
.then(() => { .then(() => {
// At this point the domains should have been checked // At this point the domains should have been checked
data.owner_user_id = access.token.getUserId(1); thisData.owner_user_id = access.token.getUserId(1);
data = internalHost.cleanSslHstsData(data); thisData = internalHost.cleanSslHstsData(thisData);
// Fix for db field not having a default value // Fix for db field not having a default value
// for this optional field. // for this optional field.
if (typeof data.advanced_config === 'undefined') { if (typeof thisData.advanced_config === "undefined") {
data.advanced_config = ''; thisData.advanced_config = "";
} }
return proxyHostModel return proxyHostModel.query().insertAndFetch(thisData).then(utils.omitRow(omissions()));
.query()
.insertAndFetch(data)
.then(utils.omitRow(omissions()));
}) })
.then((row) => { .then((row) => {
if (create_certificate) { if (createCertificate) {
return internalCertificate.createQuickCertificate(access, data) return internalCertificate
.createQuickCertificate(access, thisData)
.then((cert) => { .then((cert) => {
// update host with cert id // update host with cert id
return internalProxyHost.update(access, { return internalProxyHost.update(access, {
id: row.id, id: row.id,
certificate_id: cert.id certificate_id: cert.id,
}); });
}) })
.then(() => { .then(() => {
return row; return row;
}); });
} else {
return row;
} }
return row;
}) })
.then((row) => { .then((row) => {
// re-fetch with cert // re-fetch with cert
return internalProxyHost.get(access, { return internalProxyHost.get(access, {
id: row.id, id: row.id,
expand: ['certificate', 'owner', 'access_list.[clients,items]'] expand: ["certificate", "owner", "access_list.[clients,items]"],
}); });
}) })
.then((row) => { .then((row) => {
// Configure nginx // Configure nginx
return internalNginx.configure(proxyHostModel, 'proxy_host', row) return internalNginx.configure(proxyHostModel, "proxy_host", row).then(() => {
.then(() => {
return row; return row;
}); });
}) })
.then((row) => { .then((row) => {
// Audit log // Audit log
data.meta = _.assign({}, data.meta || {}, row.meta); thisData.meta = _.assign({}, thisData.meta || {}, row.meta);
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog
action: 'created', .add(access, {
object_type: 'proxy-host', action: "created",
object_type: "proxy-host",
object_id: row.id, object_id: row.id,
meta: data meta: thisData,
}) })
.then(() => { .then(() => {
return row; return row;
@@ -115,77 +114,88 @@ const internalProxyHost = {
* @return {Promise} * @return {Promise}
*/ */
update: (access, data) => { update: (access, data) => {
let create_certificate = data.certificate_id === 'new'; let thisData = data;
const create_certificate = thisData.certificate_id === "new";
if (create_certificate) { if (create_certificate) {
delete data.certificate_id; delete thisData.certificate_id;
} }
return access.can('proxy_hosts:update', data.id) return access
.can("proxy_hosts:update", thisData.id)
.then((/*access_data*/) => { .then((/*access_data*/) => {
// Get a list of the domain names and check each of them against existing records // Get a list of the domain names and check each of them against existing records
let domain_name_check_promises = []; const domain_name_check_promises = [];
if (typeof data.domain_names !== 'undefined') { if (typeof thisData.domain_names !== "undefined") {
data.domain_names.map(function (domain_name) { thisData.domain_names.map((domain_name) => {
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'proxy', data.id)); return domain_name_check_promises.push(
internalHost.isHostnameTaken(domain_name, "proxy", thisData.id),
);
}); });
return Promise.all(domain_name_check_promises) return Promise.all(domain_name_check_promises).then((check_results) => {
.then((check_results) => { check_results.map((result) => {
check_results.map(function (result) {
if (result.is_taken) { if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use'); throw new errs.ValidationError(`${result.hostname} is already in use`);
} }
return true;
}); });
}); });
} }
}) })
.then(() => { .then(() => {
return internalProxyHost.get(access, {id: data.id}); return internalProxyHost.get(access, { id: thisData.id });
}) })
.then((row) => { .then((row) => {
if (row.id !== data.id) { if (row.id !== thisData.id) {
// Sanity check that something crazy hasn't happened // Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('Proxy Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id); throw new errs.InternalValidationError(
`Proxy Host could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
);
} }
if (create_certificate) { if (create_certificate) {
return internalCertificate.createQuickCertificate(access, { return internalCertificate
domain_names: data.domain_names || row.domain_names, .createQuickCertificate(access, {
meta: _.assign({}, row.meta, data.meta) domain_names: thisData.domain_names || row.domain_names,
meta: _.assign({}, row.meta, thisData.meta),
}) })
.then((cert) => { .then((cert) => {
// update host with cert id // update host with cert id
data.certificate_id = cert.id; thisData.certificate_id = cert.id;
}) })
.then(() => { .then(() => {
return row; return row;
}); });
} else {
return row;
} }
return row;
}) })
.then((row) => { .then((row) => {
// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here. // Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
data = _.assign({}, { thisData = _.assign(
domain_names: row.domain_names {},
}, data); {
domain_names: row.domain_names,
},
data,
);
data = internalHost.cleanSslHstsData(data, row); thisData = internalHost.cleanSslHstsData(thisData, row);
return proxyHostModel return proxyHostModel
.query() .query()
.where({id: data.id}) .where({ id: thisData.id })
.patch(data) .patch(thisData)
.then(utils.omitRow(omissions())) .then(utils.omitRow(omissions()))
.then((saved_row) => { .then((saved_row) => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog
action: 'updated', .add(access, {
object_type: 'proxy-host', action: "updated",
object_type: "proxy-host",
object_id: row.id, object_id: row.id,
meta: data meta: thisData,
}) })
.then(() => { .then(() => {
return saved_row; return saved_row;
@@ -193,9 +203,10 @@ const internalProxyHost = {
}); });
}) })
.then(() => { .then(() => {
return internalProxyHost.get(access, { return internalProxyHost
id: data.id, .get(access, {
expand: ['owner', 'certificate', 'access_list.[clients,items]'] id: thisData.id,
expand: ["owner", "certificate", "access_list.[clients,items]"],
}) })
.then((row) => { .then((row) => {
if (!row.enabled) { if (!row.enabled) {
@@ -203,11 +214,9 @@ const internalProxyHost = {
return row; return row;
} }
// Configure nginx // Configure nginx
return internalNginx.configure(proxyHostModel, 'proxy_host', row) return internalNginx.configure(proxyHostModel, "proxy_host", row).then((new_meta) => {
.then((new_meta) => {
row.meta = new_meta; row.meta = new_meta;
row = internalHost.cleanRowCertificateMeta(row); return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
return _.omit(row, omissions());
}); });
}); });
}); });
@@ -222,39 +231,38 @@ const internalProxyHost = {
* @return {Promise} * @return {Promise}
*/ */
get: (access, data) => { get: (access, data) => {
if (typeof data === 'undefined') { const thisData = data || {};
data = {};
}
return access.can('proxy_hosts:get', data.id) return access
.can("proxy_hosts:get", thisData.id)
.then((access_data) => { .then((access_data) => {
let query = proxyHostModel const query = proxyHostModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.andWhere('id', data.id) .andWhere("id", thisData.id)
.allowGraph('[owner,access_list.[clients,items],certificate]') .allowGraph("[owner,access_list.[clients,items],certificate]")
.first(); .first();
if (access_data.permission_visibility !== 'all') { if (access_data.permission_visibility !== "all") {
query.andWhere('owner_user_id', access.token.getUserId(1)); query.andWhere("owner_user_id", access.token.getUserId(1));
} }
if (typeof data.expand !== 'undefined' && data.expand !== null) { if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
query.withGraphFetched('[' + data.expand.join(', ') + ']'); query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
} }
return query.then(utils.omitRow(omissions())); return query.then(utils.omitRow(omissions()));
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(thisData.id);
} }
row = internalHost.cleanRowCertificateMeta(row); const thisRow = internalHost.cleanRowCertificateMeta(row);
// Custom omissions // Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) { if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
row = _.omit(row, data.omit); return _.omit(row, thisData.omit);
} }
return row; return thisRow;
}); });
}, },
@@ -266,35 +274,35 @@ const internalProxyHost = {
* @returns {Promise} * @returns {Promise}
*/ */
delete: (access, data) => { delete: (access, data) => {
return access.can('proxy_hosts:delete', data.id) return access
.can("proxy_hosts:delete", data.id)
.then(() => { .then(() => {
return internalProxyHost.get(access, { id: data.id }); return internalProxyHost.get(access, { id: data.id });
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} }
return proxyHostModel return proxyHostModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
is_deleted: 1 is_deleted: 1,
}) })
.then(() => { .then(() => {
// Delete Nginx Config // Delete Nginx Config
return internalNginx.deleteConfig('proxy_host', row) return internalNginx.deleteConfig("proxy_host", row).then(() => {
.then(() => {
return internalNginx.reload(); return internalNginx.reload();
}); });
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'deleted', action: "deleted",
object_type: 'proxy-host', object_type: "proxy-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
}); });
}) })
@@ -311,39 +319,41 @@ const internalProxyHost = {
* @returns {Promise} * @returns {Promise}
*/ */
enable: (access, data) => { enable: (access, data) => {
return access.can('proxy_hosts:update', data.id) return access
.can("proxy_hosts:update", data.id)
.then(() => { .then(() => {
return internalProxyHost.get(access, { return internalProxyHost.get(access, {
id: data.id, id: data.id,
expand: ['certificate', 'owner', 'access_list'] expand: ["certificate", "owner", "access_list"],
}); });
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} else if (row.enabled) { }
throw new error.ValidationError('Host is already enabled'); if (row.enabled) {
throw new errs.ValidationError("Host is already enabled");
} }
row.enabled = 1; row.enabled = 1;
return proxyHostModel return proxyHostModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
enabled: 1 enabled: 1,
}) })
.then(() => { .then(() => {
// Configure nginx // Configure nginx
return internalNginx.configure(proxyHostModel, 'proxy_host', row); return internalNginx.configure(proxyHostModel, "proxy_host", row);
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'enabled', action: "enabled",
object_type: 'proxy-host', object_type: "proxy-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
}); });
}) })
@@ -360,39 +370,40 @@ const internalProxyHost = {
* @returns {Promise} * @returns {Promise}
*/ */
disable: (access, data) => { disable: (access, data) => {
return access.can('proxy_hosts:update', data.id) return access
.can("proxy_hosts:update", data.id)
.then(() => { .then(() => {
return internalProxyHost.get(access, { id: data.id }); return internalProxyHost.get(access, { id: data.id });
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} else if (!row.enabled) { }
throw new error.ValidationError('Host is already disabled'); if (!row.enabled) {
throw new errs.ValidationError("Host is already disabled");
} }
row.enabled = 0; row.enabled = 0;
return proxyHostModel return proxyHostModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
enabled: 0 enabled: 0,
}) })
.then(() => { .then(() => {
// Delete Nginx Config // Delete Nginx Config
return internalNginx.deleteConfig('proxy_host', row) return internalNginx.deleteConfig("proxy_host", row).then(() => {
.then(() => {
return internalNginx.reload(); return internalNginx.reload();
}); });
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'disabled', action: "disabled",
object_type: 'proxy-host', object_type: "proxy-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
}); });
}) })
@@ -409,40 +420,38 @@ const internalProxyHost = {
* @param {String} [search_query] * @param {String} [search_query]
* @returns {Promise} * @returns {Promise}
*/ */
getAll: (access, expand, search_query) => { getAll: async (access, expand, searchQuery) => {
return access.can('proxy_hosts:list') const accessData = await access.can("proxy_hosts:list");
.then((access_data) => {
let query = proxyHostModel
.query()
.where('is_deleted', 0)
.groupBy('id')
.allowGraph('[owner,access_list,certificate]')
.orderBy(castJsonIfNeed('domain_names'), 'ASC');
if (access_data.permission_visibility !== 'all') { const query = proxyHostModel
query.andWhere('owner_user_id', access.token.getUserId(1)); .query()
.where("is_deleted", 0)
.groupBy("id")
.allowGraph("[owner,access_list,certificate]")
.orderBy(castJsonIfNeed("domain_names"), "ASC");
if (accessData.permission_visibility !== "all") {
query.andWhere("owner_user_id", access.token.getUserId(1));
} }
// Query is used for searching // Query is used for searching
if (typeof search_query === 'string' && search_query.length > 0) { if (typeof searchQuery === "string" && searchQuery.length > 0) {
query.where(function () { query.where(function () {
this.where(castJsonIfNeed('domain_names'), 'like', `%${search_query}%`); this.where(castJsonIfNeed("domain_names"), "like", `%${searchQuery}%`);
}); });
} }
if (typeof expand !== 'undefined' && expand !== null) { if (typeof expand !== "undefined" && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']'); query.withGraphFetched(`[${expand.join(", ")}]`);
} }
return query.then(utils.omitRows(omissions())); const rows = await query.then(utils.omitRows(omissions()));
})
.then((rows) => { if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
return internalHost.cleanAllRowsCertificateMeta(rows); return internalHost.cleanAllRowsCertificateMeta(rows);
} }
return rows; return rows;
});
}, },
/** /**
@@ -453,20 +462,16 @@ const internalProxyHost = {
* @returns {Promise} * @returns {Promise}
*/ */
getCount: (user_id, visibility) => { getCount: (user_id, visibility) => {
let query = proxyHostModel const query = proxyHostModel.query().count("id as count").where("is_deleted", 0);
.query()
.count('id as count')
.where('is_deleted', 0);
if (visibility !== 'all') { if (visibility !== "all") {
query.andWhere('owner_user_id', user_id); query.andWhere("owner_user_id", user_id);
} }
return query.first() return query.first().then((row) => {
.then((row) => { return Number.parseInt(row.count, 10);
return parseInt(row.count, 10);
}); });
} },
}; };
module.exports = internalProxyHost; export default internalProxyHost;

View File

@@ -1,73 +1,73 @@
const _ = require('lodash'); import _ from "lodash";
const error = require('../lib/error'); import errs from "../lib/error.js";
const utils = require('../lib/utils'); import { castJsonIfNeed } from "../lib/helpers.js";
const redirectionHostModel = require('../models/redirection_host'); import utils from "../lib/utils.js";
const internalHost = require('./host'); import redirectionHostModel from "../models/redirection_host.js";
const internalNginx = require('./nginx'); import internalAuditLog from "./audit-log.js";
const internalAuditLog = require('./audit-log'); import internalCertificate from "./certificate.js";
const internalCertificate = require('./certificate'); import internalHost from "./host.js";
const {castJsonIfNeed} = require('../lib/helpers'); import internalNginx from "./nginx.js";
function omissions () { const omissions = () => {
return ['is_deleted']; return ["is_deleted"];
} };
const internalRedirectionHost = { const internalRedirectionHost = {
/** /**
* @param {Access} access * @param {Access} access
* @param {Object} data * @param {Object} data
* @returns {Promise} * @returns {Promise}
*/ */
create: (access, data) => { create: (access, data) => {
let create_certificate = data.certificate_id === 'new'; let thisData = data || {};
const createCertificate = thisData.certificate_id === "new";
if (create_certificate) { if (createCertificate) {
delete data.certificate_id; delete thisData.certificate_id;
} }
return access.can('redirection_hosts:create', data) return access
.can("redirection_hosts:create", thisData)
.then((/*access_data*/) => { .then((/*access_data*/) => {
// Get a list of the domain names and check each of them against existing records // Get a list of the domain names and check each of them against existing records
let domain_name_check_promises = []; const domain_name_check_promises = [];
data.domain_names.map(function (domain_name) { thisData.domain_names.map((domain_name) => {
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name)); domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
return true;
}); });
return Promise.all(domain_name_check_promises) return Promise.all(domain_name_check_promises).then((check_results) => {
.then((check_results) => { check_results.map((result) => {
check_results.map(function (result) {
if (result.is_taken) { if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use'); throw new errs.ValidationError(`${result.hostname} is already in use`);
} }
return true;
}); });
}); });
}) })
.then(() => { .then(() => {
// At this point the domains should have been checked // At this point the domains should have been checked
data.owner_user_id = access.token.getUserId(1); thisData.owner_user_id = access.token.getUserId(1);
data = internalHost.cleanSslHstsData(data); thisData = internalHost.cleanSslHstsData(thisData);
// Fix for db field not having a default value // Fix for db field not having a default value
// for this optional field. // for this optional field.
if (typeof data.advanced_config === 'undefined') { if (typeof data.advanced_config === "undefined") {
data.advanced_config = ''; data.advanced_config = "";
} }
return redirectionHostModel return redirectionHostModel.query().insertAndFetch(thisData).then(utils.omitRow(omissions()));
.query()
.insertAndFetch(data)
.then(utils.omitRow(omissions()));
}) })
.then((row) => { .then((row) => {
if (create_certificate) { if (createCertificate) {
return internalCertificate.createQuickCertificate(access, data) return internalCertificate
.createQuickCertificate(access, thisData)
.then((cert) => { .then((cert) => {
// update host with cert id // update host with cert id
return internalRedirectionHost.update(access, { return internalRedirectionHost.update(access, {
id: row.id, id: row.id,
certificate_id: cert.id certificate_id: cert.id,
}); });
}) })
.then(() => { .then(() => {
@@ -80,25 +80,25 @@ const internalRedirectionHost = {
// re-fetch with cert // re-fetch with cert
return internalRedirectionHost.get(access, { return internalRedirectionHost.get(access, {
id: row.id, id: row.id,
expand: ['certificate', 'owner'] expand: ["certificate", "owner"],
}); });
}) })
.then((row) => { .then((row) => {
// Configure nginx // Configure nginx
return internalNginx.configure(redirectionHostModel, 'redirection_host', row) return internalNginx.configure(redirectionHostModel, "redirection_host", row).then(() => {
.then(() => {
return row; return row;
}); });
}) })
.then((row) => { .then((row) => {
data.meta = _.assign({}, data.meta || {}, row.meta); thisData.meta = _.assign({}, thisData.meta || {}, row.meta);
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog
action: 'created', .add(access, {
object_type: 'redirection-host', action: "created",
object_type: "redirection-host",
object_id: row.id, object_id: row.id,
meta: data meta: thisData,
}) })
.then(() => { .then(() => {
return row; return row;
@@ -113,76 +113,88 @@ const internalRedirectionHost = {
* @return {Promise} * @return {Promise}
*/ */
update: (access, data) => { update: (access, data) => {
let create_certificate = data.certificate_id === 'new'; let thisData = data || {};
const createCertificate = thisData.certificate_id === "new";
if (create_certificate) { if (createCertificate) {
delete data.certificate_id; delete thisData.certificate_id;
} }
return access.can('redirection_hosts:update', data.id) return access
.can("redirection_hosts:update", thisData.id)
.then((/*access_data*/) => { .then((/*access_data*/) => {
// Get a list of the domain names and check each of them against existing records // Get a list of the domain names and check each of them against existing records
let domain_name_check_promises = []; const domain_name_check_promises = [];
if (typeof data.domain_names !== 'undefined') { if (typeof thisData.domain_names !== "undefined") {
data.domain_names.map(function (domain_name) { thisData.domain_names.map((domain_name) => {
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'redirection', data.id)); domain_name_check_promises.push(
internalHost.isHostnameTaken(domain_name, "redirection", thisData.id),
);
return true;
}); });
return Promise.all(domain_name_check_promises) return Promise.all(domain_name_check_promises).then((check_results) => {
.then((check_results) => { check_results.map((result) => {
check_results.map(function (result) {
if (result.is_taken) { if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use'); throw new errs.ValidationError(`${result.hostname} is already in use`);
} }
return true;
}); });
}); });
} }
}) })
.then(() => { .then(() => {
return internalRedirectionHost.get(access, {id: data.id}); return internalRedirectionHost.get(access, { id: thisData.id });
}) })
.then((row) => { .then((row) => {
if (row.id !== data.id) { if (row.id !== thisData.id) {
// Sanity check that something crazy hasn't happened // Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('Redirection Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id); throw new errs.InternalValidationError(
`Redirection Host could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
);
} }
if (create_certificate) { if (createCertificate) {
return internalCertificate.createQuickCertificate(access, { return internalCertificate
domain_names: data.domain_names || row.domain_names, .createQuickCertificate(access, {
meta: _.assign({}, row.meta, data.meta) domain_names: thisData.domain_names || row.domain_names,
meta: _.assign({}, row.meta, thisData.meta),
}) })
.then((cert) => { .then((cert) => {
// update host with cert id // update host with cert id
data.certificate_id = cert.id; thisData.certificate_id = cert.id;
}) })
.then(() => { .then(() => {
return row; return row;
}); });
} else {
return row;
} }
return row;
}) })
.then((row) => { .then((row) => {
// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here. // Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
data = _.assign({}, { thisData = _.assign(
domain_names: row.domain_names {},
}, data); {
domain_names: row.domain_names,
},
thisData,
);
data = internalHost.cleanSslHstsData(data, row); thisData = internalHost.cleanSslHstsData(thisData, row);
return redirectionHostModel return redirectionHostModel
.query() .query()
.where({id: data.id}) .where({ id: thisData.id })
.patch(data) .patch(thisData)
.then((saved_row) => { .then((saved_row) => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog
action: 'updated', .add(access, {
object_type: 'redirection-host', action: "updated",
object_type: "redirection-host",
object_id: row.id, object_id: row.id,
meta: data meta: thisData,
}) })
.then(() => { .then(() => {
return _.omit(saved_row, omissions()); return _.omit(saved_row, omissions());
@@ -190,17 +202,18 @@ const internalRedirectionHost = {
}); });
}) })
.then(() => { .then(() => {
return internalRedirectionHost.get(access, { return internalRedirectionHost
id: data.id, .get(access, {
expand: ['owner', 'certificate'] id: thisData.id,
expand: ["owner", "certificate"],
}) })
.then((row) => { .then((row) => {
// Configure nginx // Configure nginx
return internalNginx.configure(redirectionHostModel, 'redirection_host', row) return internalNginx
.configure(redirectionHostModel, "redirection_host", row)
.then((new_meta) => { .then((new_meta) => {
row.meta = new_meta; row.meta = new_meta;
row = internalHost.cleanRowCertificateMeta(row); return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
return _.omit(row, omissions());
}); });
}); });
}); });
@@ -215,39 +228,39 @@ const internalRedirectionHost = {
* @return {Promise} * @return {Promise}
*/ */
get: (access, data) => { get: (access, data) => {
if (typeof data === 'undefined') { const thisData = data || {};
data = {};
}
return access.can('redirection_hosts:get', data.id) return access
.can("redirection_hosts:get", thisData.id)
.then((access_data) => { .then((access_data) => {
let query = redirectionHostModel const query = redirectionHostModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.andWhere('id', data.id) .andWhere("id", thisData.id)
.allowGraph('[owner,certificate]') .allowGraph("[owner,certificate]")
.first(); .first();
if (access_data.permission_visibility !== 'all') { if (access_data.permission_visibility !== "all") {
query.andWhere('owner_user_id', access.token.getUserId(1)); query.andWhere("owner_user_id", access.token.getUserId(1));
} }
if (typeof data.expand !== 'undefined' && data.expand !== null) { if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
query.withGraphFetched('[' + data.expand.join(', ') + ']'); query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
} }
return query.then(utils.omitRow(omissions())); return query.then(utils.omitRow(omissions()));
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { let thisRow = row;
throw new error.ItemNotFoundError(data.id); if (!thisRow || !thisRow.id) {
throw new errs.ItemNotFoundError(thisData.id);
} }
row = internalHost.cleanRowCertificateMeta(row); thisRow = internalHost.cleanRowCertificateMeta(thisRow);
// Custom omissions // Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) { if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
row = _.omit(row, data.omit); return _.omit(thisRow, thisData.omit);
} }
return row; return thisRow;
}); });
}, },
@@ -259,35 +272,35 @@ const internalRedirectionHost = {
* @returns {Promise} * @returns {Promise}
*/ */
delete: (access, data) => { delete: (access, data) => {
return access.can('redirection_hosts:delete', data.id) return access
.can("redirection_hosts:delete", data.id)
.then(() => { .then(() => {
return internalRedirectionHost.get(access, { id: data.id }); return internalRedirectionHost.get(access, { id: data.id });
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} }
return redirectionHostModel return redirectionHostModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
is_deleted: 1 is_deleted: 1,
}) })
.then(() => { .then(() => {
// Delete Nginx Config // Delete Nginx Config
return internalNginx.deleteConfig('redirection_host', row) return internalNginx.deleteConfig("redirection_host", row).then(() => {
.then(() => {
return internalNginx.reload(); return internalNginx.reload();
}); });
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'deleted', action: "deleted",
object_type: 'redirection-host', object_type: "redirection-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
}); });
}) })
@@ -304,39 +317,41 @@ const internalRedirectionHost = {
* @returns {Promise} * @returns {Promise}
*/ */
enable: (access, data) => { enable: (access, data) => {
return access.can('redirection_hosts:update', data.id) return access
.can("redirection_hosts:update", data.id)
.then(() => { .then(() => {
return internalRedirectionHost.get(access, { return internalRedirectionHost.get(access, {
id: data.id, id: data.id,
expand: ['certificate', 'owner'] expand: ["certificate", "owner"],
}); });
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} else if (row.enabled) { }
throw new error.ValidationError('Host is already enabled'); if (row.enabled) {
throw new errs.ValidationError("Host is already enabled");
} }
row.enabled = 1; row.enabled = 1;
return redirectionHostModel return redirectionHostModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
enabled: 1 enabled: 1,
}) })
.then(() => { .then(() => {
// Configure nginx // Configure nginx
return internalNginx.configure(redirectionHostModel, 'redirection_host', row); return internalNginx.configure(redirectionHostModel, "redirection_host", row);
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'enabled', action: "enabled",
object_type: 'redirection-host', object_type: "redirection-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
}); });
}) })
@@ -353,39 +368,40 @@ const internalRedirectionHost = {
* @returns {Promise} * @returns {Promise}
*/ */
disable: (access, data) => { disable: (access, data) => {
return access.can('redirection_hosts:update', data.id) return access
.can("redirection_hosts:update", data.id)
.then(() => { .then(() => {
return internalRedirectionHost.get(access, { id: data.id }); return internalRedirectionHost.get(access, { id: data.id });
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} else if (!row.enabled) { }
throw new error.ValidationError('Host is already disabled'); if (!row.enabled) {
throw new errs.ValidationError("Host is already disabled");
} }
row.enabled = 0; row.enabled = 0;
return redirectionHostModel return redirectionHostModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
enabled: 0 enabled: 0,
}) })
.then(() => { .then(() => {
// Delete Nginx Config // Delete Nginx Config
return internalNginx.deleteConfig('redirection_host', row) return internalNginx.deleteConfig("redirection_host", row).then(() => {
.then(() => {
return internalNginx.reload(); return internalNginx.reload();
}); });
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'disabled', action: "disabled",
object_type: 'redirection-host', object_type: "redirection-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
}); });
}) })
@@ -403,34 +419,35 @@ const internalRedirectionHost = {
* @returns {Promise} * @returns {Promise}
*/ */
getAll: (access, expand, search_query) => { getAll: (access, expand, search_query) => {
return access.can('redirection_hosts:list') return access
.can("redirection_hosts:list")
.then((access_data) => { .then((access_data) => {
let query = redirectionHostModel const query = redirectionHostModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.groupBy('id') .groupBy("id")
.allowGraph('[owner,certificate]') .allowGraph("[owner,certificate]")
.orderBy(castJsonIfNeed('domain_names'), 'ASC'); .orderBy(castJsonIfNeed("domain_names"), "ASC");
if (access_data.permission_visibility !== 'all') { if (access_data.permission_visibility !== "all") {
query.andWhere('owner_user_id', access.token.getUserId(1)); query.andWhere("owner_user_id", access.token.getUserId(1));
} }
// Query is used for searching // Query is used for searching
if (typeof search_query === 'string' && search_query.length > 0) { if (typeof search_query === "string" && search_query.length > 0) {
query.where(function () { query.where(function () {
this.where(castJsonIfNeed('domain_names'), 'like', `%${search_query}%`); this.where(castJsonIfNeed("domain_names"), "like", `%${search_query}%`);
}); });
} }
if (typeof expand !== 'undefined' && expand !== null) { if (typeof expand !== "undefined" && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']'); query.withGraphFetched(`[${expand.join(", ")}]`);
} }
return query.then(utils.omitRows(omissions())); return query.then(utils.omitRows(omissions()));
}) })
.then((rows) => { .then((rows) => {
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) { if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
return internalHost.cleanAllRowsCertificateMeta(rows); return internalHost.cleanAllRowsCertificateMeta(rows);
} }
@@ -446,20 +463,16 @@ const internalRedirectionHost = {
* @returns {Promise} * @returns {Promise}
*/ */
getCount: (user_id, visibility) => { getCount: (user_id, visibility) => {
let query = redirectionHostModel const query = redirectionHostModel.query().count("id as count").where("is_deleted", 0);
.query()
.count('id as count')
.where('is_deleted', 0);
if (visibility !== 'all') { if (visibility !== "all") {
query.andWhere('owner_user_id', user_id); query.andWhere("owner_user_id", user_id);
} }
return query.first() return query.first().then((row) => {
.then((row) => { return Number.parseInt(row.count, 10);
return parseInt(row.count, 10);
}); });
} },
}; };
module.exports = internalRedirectionHost; export default internalRedirectionHost;

View File

@@ -1,24 +1,24 @@
const internalProxyHost = require('./proxy-host'); import internalDeadHost from "./dead-host.js";
const internalRedirectionHost = require('./redirection-host'); import internalProxyHost from "./proxy-host.js";
const internalDeadHost = require('./dead-host'); import internalRedirectionHost from "./redirection-host.js";
const internalStream = require('./stream'); import internalStream from "./stream.js";
const internalReport = { const internalReport = {
/** /**
* @param {Access} access * @param {Access} access
* @return {Promise} * @return {Promise}
*/ */
getHostsReport: (access) => { getHostsReport: (access) => {
return access.can('reports:hosts', 1) return access
.can("reports:hosts", 1)
.then((access_data) => { .then((access_data) => {
let user_id = access.token.getUserId(1); const userId = access.token.getUserId(1);
let promises = [ const promises = [
internalProxyHost.getCount(user_id, access_data.visibility), internalProxyHost.getCount(userId, access_data.visibility),
internalRedirectionHost.getCount(user_id, access_data.visibility), internalRedirectionHost.getCount(userId, access_data.visibility),
internalStream.getCount(user_id, access_data.visibility), internalStream.getCount(userId, access_data.visibility),
internalDeadHost.getCount(user_id, access_data.visibility) internalDeadHost.getCount(userId, access_data.visibility),
]; ];
return Promise.all(promises); return Promise.all(promises);
@@ -28,11 +28,10 @@ const internalReport = {
proxy: counts.shift(), proxy: counts.shift(),
redirection: counts.shift(), redirection: counts.shift(),
stream: counts.shift(), stream: counts.shift(),
dead: counts.shift() dead: counts.shift(),
}; };
}); });
},
}
}; };
module.exports = internalReport; export default internalReport;

View File

@@ -1,10 +1,9 @@
const fs = require('fs'); import fs from "node:fs";
const error = require('../lib/error'); import errs from "../lib/error.js";
const settingModel = require('../models/setting'); import settingModel from "../models/setting.js";
const internalNginx = require('./nginx'); import internalNginx from "./nginx.js";
const internalSetting = { const internalSetting = {
/** /**
* @param {Access} access * @param {Access} access
* @param {Object} data * @param {Object} data
@@ -12,37 +11,38 @@ const internalSetting = {
* @return {Promise} * @return {Promise}
*/ */
update: (access, data) => { update: (access, data) => {
return access.can('settings:update', data.id) return access
.can("settings:update", data.id)
.then((/*access_data*/) => { .then((/*access_data*/) => {
return internalSetting.get(access, { id: data.id }); return internalSetting.get(access, { id: data.id });
}) })
.then((row) => { .then((row) => {
if (row.id !== data.id) { if (row.id !== data.id) {
// Sanity check that something crazy hasn't happened // Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('Setting could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id); throw new errs.InternalValidationError(
`Setting could not be updated, IDs do not match: ${row.id} !== ${data.id}`,
);
} }
return settingModel return settingModel.query().where({ id: data.id }).patch(data);
.query()
.where({id: data.id})
.patch(data);
}) })
.then(() => { .then(() => {
return internalSetting.get(access, { return internalSetting.get(access, {
id: data.id id: data.id,
}); });
}) })
.then((row) => { .then((row) => {
if (row.id === 'default-site') { if (row.id === "default-site") {
// write the html if we need to // write the html if we need to
if (row.value === 'html') { if (row.value === "html") {
fs.writeFileSync('/data/nginx/default_www/index.html', row.meta.html, {encoding: 'utf8'}); fs.writeFileSync("/data/nginx/default_www/index.html", row.meta.html, { encoding: "utf8" });
} }
// Configure nginx // Configure nginx
return internalNginx.deleteConfig('default') return internalNginx
.deleteConfig("default")
.then(() => { .then(() => {
return internalNginx.generateConfig('default', row); return internalNginx.generateConfig("default", row);
}) })
.then(() => { .then(() => {
return internalNginx.test(); return internalNginx.test();
@@ -54,7 +54,8 @@ const internalSetting = {
return row; return row;
}) })
.catch((/*err*/) => { .catch((/*err*/) => {
internalNginx.deleteConfig('default') internalNginx
.deleteConfig("default")
.then(() => { .then(() => {
return internalNginx.test(); return internalNginx.test();
}) })
@@ -63,12 +64,11 @@ const internalSetting = {
}) })
.then(() => { .then(() => {
// I'm being slack here I know.. // I'm being slack here I know..
throw new error.ValidationError('Could not reconfigure Nginx. Please check logs.'); throw new errs.ValidationError("Could not reconfigure Nginx. Please check logs.");
}); });
}); });
} else {
return row;
} }
return row;
}); });
}, },
@@ -79,19 +79,16 @@ const internalSetting = {
* @return {Promise} * @return {Promise}
*/ */
get: (access, data) => { get: (access, data) => {
return access.can('settings:get', data.id) return access
.can("settings:get", data.id)
.then(() => { .then(() => {
return settingModel return settingModel.query().where("id", data.id).first();
.query()
.where('id', data.id)
.first();
}) })
.then((row) => { .then((row) => {
if (row) { if (row) {
return row; return row;
} else {
throw new error.ItemNotFoundError(data.id);
} }
throw new errs.ItemNotFoundError(data.id);
}); });
}, },
@@ -102,15 +99,13 @@ const internalSetting = {
* @returns {*} * @returns {*}
*/ */
getCount: (access) => { getCount: (access) => {
return access.can('settings:list') return access
.can("settings:list")
.then(() => { .then(() => {
return settingModel return settingModel.query().count("id as count").first();
.query()
.count('id as count')
.first();
}) })
.then((row) => { .then((row) => {
return parseInt(row.count, 10); return Number.parseInt(row.count, 10);
}); });
}, },
@@ -121,13 +116,10 @@ const internalSetting = {
* @returns {Promise} * @returns {Promise}
*/ */
getAll: (access) => { getAll: (access) => {
return access.can('settings:list') return access.can("settings:list").then(() => {
.then(() => { return settingModel.query().orderBy("description", "ASC");
return settingModel
.query()
.orderBy('description', 'ASC');
}); });
} },
}; };
module.exports = internalSetting; export default internalSetting;

View File

@@ -1,87 +1,84 @@
const _ = require('lodash'); import _ from "lodash";
const error = require('../lib/error'); import errs from "../lib/error.js";
const utils = require('../lib/utils'); import { castJsonIfNeed } from "../lib/helpers.js";
const streamModel = require('../models/stream'); import utils from "../lib/utils.js";
const internalNginx = require('./nginx'); import streamModel from "../models/stream.js";
const internalAuditLog = require('./audit-log'); import internalAuditLog from "./audit-log.js";
const internalCertificate = require('./certificate'); import internalCertificate from "./certificate.js";
const internalHost = require('./host'); import internalHost from "./host.js";
const {castJsonIfNeed} = require('../lib/helpers'); import internalNginx from "./nginx.js";
function omissions () { const omissions = () => {
return ['is_deleted', 'owner.is_deleted', 'certificate.is_deleted']; return ["is_deleted", "owner.is_deleted", "certificate.is_deleted"];
} };
const internalStream = { const internalStream = {
/** /**
* @param {Access} access * @param {Access} access
* @param {Object} data * @param {Object} data
* @returns {Promise} * @returns {Promise}
*/ */
create: (access, data) => { create: (access, data) => {
const create_certificate = data.certificate_id === 'new'; const create_certificate = data.certificate_id === "new";
if (create_certificate) { if (create_certificate) {
delete data.certificate_id; delete data.certificate_id;
} }
return access.can('streams:create', data) return access
.can("streams:create", data)
.then((/*access_data*/) => { .then((/*access_data*/) => {
// TODO: At this point the existing ports should have been checked // TODO: At this point the existing ports should have been checked
data.owner_user_id = access.token.getUserId(1); data.owner_user_id = access.token.getUserId(1);
if (typeof data.meta === 'undefined') { if (typeof data.meta === "undefined") {
data.meta = {}; data.meta = {};
} }
// streams aren't routed by domain name so don't store domain names in the DB // streams aren't routed by domain name so don't store domain names in the DB
let data_no_domains = structuredClone(data); const data_no_domains = structuredClone(data);
delete data_no_domains.domain_names; delete data_no_domains.domain_names;
return streamModel return streamModel.query().insertAndFetch(data_no_domains).then(utils.omitRow(omissions()));
.query()
.insertAndFetch(data_no_domains)
.then(utils.omitRow(omissions()));
}) })
.then((row) => { .then((row) => {
if (create_certificate) { if (create_certificate) {
return internalCertificate.createQuickCertificate(access, data) return internalCertificate
.createQuickCertificate(access, data)
.then((cert) => { .then((cert) => {
// update host with cert id // update host with cert id
return internalStream.update(access, { return internalStream.update(access, {
id: row.id, id: row.id,
certificate_id: cert.id certificate_id: cert.id,
}); });
}) })
.then(() => { .then(() => {
return row; return row;
}); });
} else {
return row;
} }
return row;
}) })
.then((row) => { .then((row) => {
// re-fetch with cert // re-fetch with cert
return internalStream.get(access, { return internalStream.get(access, {
id: row.id, id: row.id,
expand: ['certificate', 'owner'] expand: ["certificate", "owner"],
}); });
}) })
.then((row) => { .then((row) => {
// Configure nginx // Configure nginx
return internalNginx.configure(streamModel, 'stream', row) return internalNginx.configure(streamModel, "stream", row).then(() => {
.then(() => {
return row; return row;
}); });
}) })
.then((row) => { .then((row) => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog
action: 'created', .add(access, {
object_type: 'stream', action: "created",
object_type: "stream",
object_id: row.id, object_id: row.id,
meta: data meta: data,
}) })
.then(() => { .then(() => {
return row; return row;
@@ -96,56 +93,65 @@ const internalStream = {
* @return {Promise} * @return {Promise}
*/ */
update: (access, data) => { update: (access, data) => {
const create_certificate = data.certificate_id === 'new'; let thisData = data;
const create_certificate = thisData.certificate_id === "new";
if (create_certificate) { if (create_certificate) {
delete data.certificate_id; delete thisData.certificate_id;
} }
return access.can('streams:update', data.id) return access
.can("streams:update", thisData.id)
.then((/*access_data*/) => { .then((/*access_data*/) => {
// TODO: at this point the existing streams should have been checked // TODO: at this point the existing streams should have been checked
return internalStream.get(access, {id: data.id}); return internalStream.get(access, { id: thisData.id });
}) })
.then((row) => { .then((row) => {
if (row.id !== data.id) { if (row.id !== thisData.id) {
// Sanity check that something crazy hasn't happened // Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('Stream could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id); throw new errs.InternalValidationError(
`Stream could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
);
} }
if (create_certificate) { if (create_certificate) {
return internalCertificate.createQuickCertificate(access, { return internalCertificate
domain_names: data.domain_names || row.domain_names, .createQuickCertificate(access, {
meta: _.assign({}, row.meta, data.meta) domain_names: thisData.domain_names || row.domain_names,
meta: _.assign({}, row.meta, thisData.meta),
}) })
.then((cert) => { .then((cert) => {
// update host with cert id // update host with cert id
data.certificate_id = cert.id; thisData.certificate_id = cert.id;
}) })
.then(() => { .then(() => {
return row; return row;
}); });
} else {
return row;
} }
return row;
}) })
.then((row) => { .then((row) => {
// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here. // Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
data = _.assign({}, { thisData = _.assign(
domain_names: row.domain_names {},
}, data); {
domain_names: row.domain_names,
},
thisData,
);
return streamModel return streamModel
.query() .query()
.patchAndFetchById(row.id, data) .patchAndFetchById(row.id, thisData)
.then(utils.omitRow(omissions())) .then(utils.omitRow(omissions()))
.then((saved_row) => { .then((saved_row) => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog
action: 'updated', .add(access, {
object_type: 'stream', action: "updated",
object_type: "stream",
object_id: row.id, object_id: row.id,
meta: data meta: thisData,
}) })
.then(() => { .then(() => {
return saved_row; return saved_row;
@@ -153,13 +159,10 @@ const internalStream = {
}); });
}) })
.then(() => { .then(() => {
return internalStream.get(access, {id: data.id, expand: ['owner', 'certificate']}) return internalStream.get(access, { id: thisData.id, expand: ["owner", "certificate"] }).then((row) => {
.then((row) => { return internalNginx.configure(streamModel, "stream", row).then((new_meta) => {
return internalNginx.configure(streamModel, 'stream', row)
.then((new_meta) => {
row.meta = new_meta; row.meta = new_meta;
row = internalHost.cleanRowCertificateMeta(row); return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
return _.omit(row, omissions());
}); });
}); });
}); });
@@ -174,39 +177,39 @@ const internalStream = {
* @return {Promise} * @return {Promise}
*/ */
get: (access, data) => { get: (access, data) => {
if (typeof data === 'undefined') { const thisData = data || {};
data = {};
}
return access.can('streams:get', data.id) return access
.can("streams:get", thisData.id)
.then((access_data) => { .then((access_data) => {
let query = streamModel const query = streamModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.andWhere('id', data.id) .andWhere("id", thisData.id)
.allowGraph('[owner,certificate]') .allowGraph("[owner,certificate]")
.first(); .first();
if (access_data.permission_visibility !== 'all') { if (access_data.permission_visibility !== "all") {
query.andWhere('owner_user_id', access.token.getUserId(1)); query.andWhere("owner_user_id", access.token.getUserId(1));
} }
if (typeof data.expand !== 'undefined' && data.expand !== null) { if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
query.withGraphFetched('[' + data.expand.join(', ') + ']'); query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
} }
return query.then(utils.omitRow(omissions())); return query.then(utils.omitRow(omissions()));
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { let thisRow = row;
throw new error.ItemNotFoundError(data.id); if (!thisRow || !thisRow.id) {
throw new errs.ItemNotFoundError(thisData.id);
} }
row = internalHost.cleanRowCertificateMeta(row); thisRow = internalHost.cleanRowCertificateMeta(thisRow);
// Custom omissions // Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) { if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
row = _.omit(row, data.omit); return _.omit(thisRow, thisData.omit);
} }
return row; return thisRow;
}); });
}, },
@@ -218,35 +221,35 @@ const internalStream = {
* @returns {Promise} * @returns {Promise}
*/ */
delete: (access, data) => { delete: (access, data) => {
return access.can('streams:delete', data.id) return access
.can("streams:delete", data.id)
.then(() => { .then(() => {
return internalStream.get(access, { id: data.id }); return internalStream.get(access, { id: data.id });
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} }
return streamModel return streamModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
is_deleted: 1 is_deleted: 1,
}) })
.then(() => { .then(() => {
// Delete Nginx Config // Delete Nginx Config
return internalNginx.deleteConfig('stream', row) return internalNginx.deleteConfig("stream", row).then(() => {
.then(() => {
return internalNginx.reload(); return internalNginx.reload();
}); });
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'deleted', action: "deleted",
object_type: 'stream', object_type: "stream",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
}); });
}) })
@@ -263,39 +266,41 @@ const internalStream = {
* @returns {Promise} * @returns {Promise}
*/ */
enable: (access, data) => { enable: (access, data) => {
return access.can('streams:update', data.id) return access
.can("streams:update", data.id)
.then(() => { .then(() => {
return internalStream.get(access, { return internalStream.get(access, {
id: data.id, id: data.id,
expand: ['certificate', 'owner'] expand: ["certificate", "owner"],
}); });
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} else if (row.enabled) { }
throw new error.ValidationError('Stream is already enabled'); if (row.enabled) {
throw new errs.ValidationError("Stream is already enabled");
} }
row.enabled = 1; row.enabled = 1;
return streamModel return streamModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
enabled: 1 enabled: 1,
}) })
.then(() => { .then(() => {
// Configure nginx // Configure nginx
return internalNginx.configure(streamModel, 'stream', row); return internalNginx.configure(streamModel, "stream", row);
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'enabled', action: "enabled",
object_type: 'stream', object_type: "stream",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
}); });
}) })
@@ -312,39 +317,40 @@ const internalStream = {
* @returns {Promise} * @returns {Promise}
*/ */
disable: (access, data) => { disable: (access, data) => {
return access.can('streams:update', data.id) return access
.can("streams:update", data.id)
.then(() => { .then(() => {
return internalStream.get(access, { id: data.id }); return internalStream.get(access, { id: data.id });
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} else if (!row.enabled) { }
throw new error.ValidationError('Stream is already disabled'); if (!row.enabled) {
throw new errs.ValidationError("Stream is already disabled");
} }
row.enabled = 0; row.enabled = 0;
return streamModel return streamModel
.query() .query()
.where('id', row.id) .where("id", row.id)
.patch({ .patch({
enabled: 0 enabled: 0,
}) })
.then(() => { .then(() => {
// Delete Nginx Config // Delete Nginx Config
return internalNginx.deleteConfig('stream', row) return internalNginx.deleteConfig("stream", row).then(() => {
.then(() => {
return internalNginx.reload(); return internalNginx.reload();
}); });
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'disabled', action: "disabled",
object_type: 'stream-host', object_type: "stream-host",
object_id: row.id, object_id: row.id,
meta: _.omit(row, omissions()) meta: _.omit(row, omissions()),
}); });
}); });
}) })
@@ -362,34 +368,35 @@ const internalStream = {
* @returns {Promise} * @returns {Promise}
*/ */
getAll: (access, expand, search_query) => { getAll: (access, expand, search_query) => {
return access.can('streams:list') return access
.can("streams:list")
.then((access_data) => { .then((access_data) => {
const query = streamModel const query = streamModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.groupBy('id') .groupBy("id")
.allowGraph('[owner,certificate]') .allowGraph("[owner,certificate]")
.orderBy('incoming_port', 'ASC'); .orderBy("incoming_port", "ASC");
if (access_data.permission_visibility !== 'all') { if (access_data.permission_visibility !== "all") {
query.andWhere('owner_user_id', access.token.getUserId(1)); query.andWhere("owner_user_id", access.token.getUserId(1));
} }
// Query is used for searching // Query is used for searching
if (typeof search_query === 'string' && search_query.length > 0) { if (typeof search_query === "string" && search_query.length > 0) {
query.where(function () { query.where(function () {
this.where(castJsonIfNeed('incoming_port'), 'like', `%${search_query}%`); this.where(castJsonIfNeed("incoming_port"), "like", `%${search_query}%`);
}); });
} }
if (typeof expand !== 'undefined' && expand !== null) { if (typeof expand !== "undefined" && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']'); query.withGraphFetched(`[${expand.join(", ")}]`);
} }
return query.then(utils.omitRows(omissions())); return query.then(utils.omitRows(omissions()));
}) })
.then((rows) => { .then((rows) => {
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) { if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
return internalHost.cleanAllRowsCertificateMeta(rows); return internalHost.cleanAllRowsCertificateMeta(rows);
} }
@@ -405,20 +412,16 @@ const internalStream = {
* @returns {Promise} * @returns {Promise}
*/ */
getCount: (user_id, visibility) => { getCount: (user_id, visibility) => {
const query = streamModel const query = streamModel.query().count("id AS count").where("is_deleted", 0);
.query()
.count('id AS count')
.where('is_deleted', 0);
if (visibility !== 'all') { if (visibility !== "all") {
query.andWhere('owner_user_id', user_id); query.andWhere("owner_user_id", user_id);
} }
return query.first() return query.first().then((row) => {
.then((row) => { return Number.parseInt(row.count, 10);
return parseInt(row.count, 10);
}); });
} },
}; };
module.exports = internalStream; export default internalStream;

View File

@@ -1,14 +1,14 @@
const _ = require('lodash'); import _ from "lodash";
const error = require('../lib/error'); import errs from "../lib/error.js";
const userModel = require('../models/user'); import { parseDatePeriod } from "../lib/helpers.js";
const authModel = require('../models/auth'); import authModel from "../models/auth.js";
const helpers = require('../lib/helpers'); import TokenModel from "../models/token.js";
const TokenModel = require('../models/token'); import userModel from "../models/user.js";
const ERROR_MESSAGE_INVALID_AUTH = 'Invalid email or password'; const ERROR_MESSAGE_INVALID_AUTH = "Invalid email or password";
const ERROR_MESSAGE_INVALID_AUTH_I18N = "error.invalid-auth";
module.exports = {
export default {
/** /**
* @param {Object} data * @param {Object} data
* @param {String} data.identity * @param {String} data.identity
@@ -18,70 +18,66 @@ module.exports = {
* @param {String} [issuer] * @param {String} [issuer]
* @returns {Promise} * @returns {Promise}
*/ */
getTokenFromEmail: (data, issuer) => { getTokenFromEmail: async (data, issuer) => {
let Token = new TokenModel(); const Token = TokenModel();
data.scope = data.scope || 'user'; data.scope = data.scope || "user";
data.expiry = data.expiry || '1d'; data.expiry = data.expiry || "1d";
return userModel const user = await userModel
.query() .query()
.where('email', data.identity.toLowerCase().trim()) .where("email", data.identity.toLowerCase().trim())
.andWhere('is_deleted', 0) .andWhere("is_deleted", 0)
.andWhere('is_disabled', 0) .andWhere("is_disabled", 0)
.first() .first();
.then((user) => {
if (user) {
// Get auth
return authModel
.query()
.where('user_id', '=', user.id)
.where('type', '=', 'password')
.first()
.then((auth) => {
if (auth) {
return auth.verifyPassword(data.secret)
.then((valid) => {
if (valid) {
if (data.scope !== 'user' && _.indexOf(user.roles, data.scope) === -1) { if (!user) {
throw new errs.AuthError(ERROR_MESSAGE_INVALID_AUTH);
}
const auth = await authModel
.query()
.where("user_id", "=", user.id)
.where("type", "=", "password")
.first();
if (!auth) {
throw new errs.AuthError(ERROR_MESSAGE_INVALID_AUTH);
}
const valid = await auth.verifyPassword(data.secret);
if (!valid) {
throw new errs.AuthError(
ERROR_MESSAGE_INVALID_AUTH,
ERROR_MESSAGE_INVALID_AUTH_I18N,
);
}
if (data.scope !== "user" && _.indexOf(user.roles, data.scope) === -1) {
// The scope requested doesn't exist as a role against the user, // The scope requested doesn't exist as a role against the user,
// you shall not pass. // you shall not pass.
throw new error.AuthError('Invalid scope: ' + data.scope); throw new errs.AuthError(`Invalid scope: ${data.scope}`);
} }
// Create a moment of the expiry expression // Create a moment of the expiry expression
let expiry = helpers.parseDatePeriod(data.expiry); const expiry = parseDatePeriod(data.expiry);
if (expiry === null) { if (expiry === null) {
throw new error.AuthError('Invalid expiry time: ' + data.expiry); throw new errs.AuthError(`Invalid expiry time: ${data.expiry}`);
} }
return Token.create({ const signed = await Token.create({
iss: issuer || 'api', iss: issuer || "api",
attrs: { attrs: {
id: user.id id: user.id,
}, },
scope: [data.scope], scope: [data.scope],
expiresIn: data.expiry expiresIn: data.expiry,
}) });
.then((signed) => {
return { return {
token: signed.token, token: signed.token,
expires: expiry.toISOString() expires: expiry.toISOString(),
}; };
});
} else {
throw new error.AuthError(ERROR_MESSAGE_INVALID_AUTH);
}
});
} else {
throw new error.AuthError(ERROR_MESSAGE_INVALID_AUTH);
}
});
} else {
throw new error.AuthError(ERROR_MESSAGE_INVALID_AUTH);
}
});
}, },
/** /**
@@ -91,74 +87,70 @@ module.exports = {
* @param {String} [data.scope] Only considered if existing token scope is admin * @param {String} [data.scope] Only considered if existing token scope is admin
* @returns {Promise} * @returns {Promise}
*/ */
getFreshToken: (access, data) => { getFreshToken: async (access, data) => {
let Token = new TokenModel(); const Token = TokenModel();
const thisData = data || {};
data = data || {}; thisData.expiry = thisData.expiry || "1d";
data.expiry = data.expiry || '1d';
if (access && access.token.getUserId(0)) {
if (access?.token.getUserId(0)) {
// Create a moment of the expiry expression // Create a moment of the expiry expression
let expiry = helpers.parseDatePeriod(data.expiry); const expiry = parseDatePeriod(thisData.expiry);
if (expiry === null) { if (expiry === null) {
throw new error.AuthError('Invalid expiry time: ' + data.expiry); throw new errs.AuthError(`Invalid expiry time: ${thisData.expiry}`);
} }
let token_attrs = { const token_attrs = {
id: access.token.getUserId(0) id: access.token.getUserId(0),
}; };
// Only admins can request otherwise scoped tokens // Only admins can request otherwise scoped tokens
let scope = access.token.get('scope'); let scope = access.token.get("scope");
if (data.scope && access.token.hasScope('admin')) { if (thisData.scope && access.token.hasScope("admin")) {
scope = [data.scope]; scope = [thisData.scope];
if (data.scope === 'job-board' || data.scope === 'worker') { if (thisData.scope === "job-board" || thisData.scope === "worker") {
token_attrs.id = 0; token_attrs.id = 0;
} }
} }
return Token.create({ const signed = await Token.create({
iss: 'api', iss: "api",
scope: scope, scope: scope,
attrs: token_attrs, attrs: token_attrs,
expiresIn: data.expiry expiresIn: thisData.expiry,
}) });
.then((signed) => {
return { return {
token: signed.token, token: signed.token,
expires: expiry.toISOString() expires: expiry.toISOString(),
}; };
});
} else {
throw new error.AssertionFailedError('Existing token contained invalid user data');
} }
throw new error.AssertionFailedError("Existing token contained invalid user data");
}, },
/** /**
* @param {Object} user * @param {Object} user
* @returns {Promise} * @returns {Promise}
*/ */
getTokenFromUser: (user) => { getTokenFromUser: async (user) => {
const expire = '1d'; const expire = "1d";
const Token = new TokenModel(); const Token = TokenModel();
const expiry = helpers.parseDatePeriod(expire); const expiry = parseDatePeriod(expire);
return Token.create({ const signed = await Token.create({
iss: 'api', iss: "api",
attrs: { attrs: {
id: user.id id: user.id,
}, },
scope: ['user'], scope: ["user"],
expiresIn: expire expiresIn: expire,
}) });
.then((signed) => {
return { return {
token: signed.token, token: signed.token,
expires: expiry.toISOString(), expires: expiry.toISOString(),
user: user user: user,
}; };
}); },
}
}; };

View File

@@ -1,93 +1,76 @@
const _ = require('lodash'); import gravatar from "gravatar";
const error = require('../lib/error'); import _ from "lodash";
const utils = require('../lib/utils'); import errs from "../lib/error.js";
const userModel = require('../models/user'); import utils from "../lib/utils.js";
const userPermissionModel = require('../models/user_permission'); import authModel from "../models/auth.js";
const authModel = require('../models/auth'); import userModel from "../models/user.js";
const gravatar = require('gravatar'); import userPermissionModel from "../models/user_permission.js";
const internalToken = require('./token'); import internalAuditLog from "./audit-log.js";
const internalAuditLog = require('./audit-log'); import internalToken from "./token.js";
function omissions () { const omissions = () => {
return ['is_deleted']; return ["is_deleted", "permissions.id", "permissions.user_id", "permissions.created_on", "permissions.modified_on"];
} };
const DEFAULT_AVATAR = gravatar.url("admin@example.com", { default: "mm" });
const internalUser = { const internalUser = {
/** /**
* Create a user can happen unauthenticated only once and only when no active users exist.
* Otherwise, a valid auth method is required.
*
* @param {Access} access * @param {Access} access
* @param {Object} data * @param {Object} data
* @returns {Promise} * @returns {Promise}
*/ */
create: (access, data) => { create: async (access, data) => {
let auth = data.auth || null; const auth = data.auth || null;
delete data.auth; delete data.auth;
data.avatar = data.avatar || ''; data.avatar = data.avatar || "";
data.roles = data.roles || []; data.roles = data.roles || [];
if (typeof data.is_disabled !== 'undefined') { if (typeof data.is_disabled !== "undefined") {
data.is_disabled = data.is_disabled ? 1 : 0; data.is_disabled = data.is_disabled ? 1 : 0;
} }
return access.can('users:create', data) await access.can("users:create", data);
.then(() => { data.avatar = gravatar.url(data.email, { default: "mm" });
data.avatar = gravatar.url(data.email, {default: 'mm'});
return userModel let user = await userModel.query().insertAndFetch(data).then(utils.omitRow(omissions()));
.query()
.insertAndFetch(data)
.then(utils.omitRow(omissions()));
})
.then((user) => {
if (auth) { if (auth) {
return authModel user = await authModel.query().insert({
.query()
.insert({
user_id: user.id, user_id: user.id,
type: auth.type, type: auth.type,
secret: auth.secret, secret: auth.secret,
meta: {} meta: {},
})
.then(() => {
return user;
}); });
} else {
return user;
} }
})
.then((user) => {
// Create permissions row as well
let is_admin = data.roles.indexOf('admin') !== -1;
return userPermissionModel // Create permissions row as well
.query() const isAdmin = data.roles.indexOf("admin") !== -1;
.insert({
await userPermissionModel.query().insert({
user_id: user.id, user_id: user.id,
visibility: is_admin ? 'all' : 'user', visibility: isAdmin ? "all" : "user",
proxy_hosts: 'manage', proxy_hosts: "manage",
redirection_hosts: 'manage', redirection_hosts: "manage",
dead_hosts: 'manage', dead_hosts: "manage",
streams: 'manage', streams: "manage",
access_lists: 'manage', access_lists: "manage",
certificates: 'manage' certificates: "manage",
})
.then(() => {
return internalUser.get(access, {id: user.id, expand: ['permissions']});
}); });
})
.then((user) => { user = await internalUser.get(access, { id: user.id, expand: ["permissions"] });
// Add to audit log
return internalAuditLog.add(access, { await internalAuditLog.add(access, {
action: 'created', action: "created",
object_type: 'user', object_type: "user",
object_id: user.id, object_id: user.id,
meta: user meta: user,
}) });
.then(() => {
return user; return user;
});
});
}, },
/** /**
@@ -99,29 +82,25 @@ const internalUser = {
* @return {Promise} * @return {Promise}
*/ */
update: (access, data) => { update: (access, data) => {
if (typeof data.is_disabled !== 'undefined') { if (typeof data.is_disabled !== "undefined") {
data.is_disabled = data.is_disabled ? 1 : 0; data.is_disabled = data.is_disabled ? 1 : 0;
} }
return access.can('users:update', data.id) return access
.can("users:update", data.id)
.then(() => { .then(() => {
// Make sure that the user being updated doesn't change their email to another user that is already using it // Make sure that the user being updated doesn't change their email to another user that is already using it
// 1. get user we want to update // 1. get user we want to update
return internalUser.get(access, {id: data.id}) return internalUser.get(access, { id: data.id }).then((user) => {
.then((user) => {
// 2. if email is to be changed, find other users with that email // 2. if email is to be changed, find other users with that email
if (typeof data.email !== 'undefined') { if (typeof data.email !== "undefined") {
data.email = data.email.toLowerCase().trim(); data.email = data.email.toLowerCase().trim();
if (user.email !== data.email) { if (user.email !== data.email) {
return internalUser.isEmailAvailable(data.email, data.id) return internalUser.isEmailAvailable(data.email, data.id).then((available) => {
.then((available) => {
if (!available) { if (!available) {
throw new error.ValidationError('Email address already in use - ' + data.email); throw new errs.ValidationError(`Email address already in use - ${data.email}`);
} }
return user; return user;
}); });
} }
@@ -134,26 +113,25 @@ const internalUser = {
.then((user) => { .then((user) => {
if (user.id !== data.id) { if (user.id !== data.id) {
// Sanity check that something crazy hasn't happened // Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id); throw new errs.InternalValidationError(
`User could not be updated, IDs do not match: ${user.id} !== ${data.id}`,
);
} }
data.avatar = gravatar.url(data.email || user.email, {default: 'mm'}); data.avatar = gravatar.url(data.email || user.email, { default: "mm" });
return userModel.query().patchAndFetchById(user.id, data).then(utils.omitRow(omissions()));
return userModel
.query()
.patchAndFetchById(user.id, data)
.then(utils.omitRow(omissions()));
}) })
.then(() => { .then(() => {
return internalUser.get(access, { id: data.id }); return internalUser.get(access, { id: data.id });
}) })
.then((user) => { .then((user) => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog
action: 'updated', .add(access, {
object_type: 'user', action: "updated",
object_type: "user",
object_id: user.id, object_id: user.id,
meta: data meta: data,
}) })
.then(() => { .then(() => {
return user; return user;
@@ -170,37 +148,41 @@ const internalUser = {
* @return {Promise} * @return {Promise}
*/ */
get: (access, data) => { get: (access, data) => {
if (typeof data === 'undefined') { const thisData = data || {};
data = {};
if (typeof thisData.id === "undefined" || !thisData.id) {
thisData.id = access.token.getUserId(0);
} }
if (typeof data.id === 'undefined' || !data.id) { return access
data.id = access.token.getUserId(0); .can("users:get", thisData.id)
}
return access.can('users:get', data.id)
.then(() => { .then(() => {
let query = userModel const query = userModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.andWhere('id', data.id) .andWhere("id", thisData.id)
.allowGraph('[permissions]') .allowGraph("[permissions]")
.first(); .first();
if (typeof data.expand !== 'undefined' && data.expand !== null) { if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
query.withGraphFetched('[' + data.expand.join(', ') + ']'); query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
} }
return query.then(utils.omitRow(omissions())); return query.then(utils.omitRow(omissions()));
}) })
.then((row) => { .then((row) => {
if (!row || !row.id) { if (!row || !row.id) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(thisData.id);
} }
// Custom omissions // Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) { if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
row = _.omit(row, data.omit); return _.omit(row, thisData.omit);
} }
if (row.avatar === "") {
row.avatar = DEFAULT_AVATAR;
}
return row; return row;
}); });
}, },
@@ -213,18 +195,13 @@ const internalUser = {
* @param user_id * @param user_id
*/ */
isEmailAvailable: (email, user_id) => { isEmailAvailable: (email, user_id) => {
let query = userModel const query = userModel.query().where("email", "=", email.toLowerCase().trim()).where("is_deleted", 0).first();
.query()
.where('email', '=', email.toLowerCase().trim())
.where('is_deleted', 0)
.first();
if (typeof user_id !== 'undefined') { if (typeof user_id !== "undefined") {
query.where('id', '!=', user_id); query.where("id", "!=", user_id);
} }
return query return query.then((user) => {
.then((user) => {
return !user; return !user;
}); });
}, },
@@ -237,33 +214,34 @@ const internalUser = {
* @returns {Promise} * @returns {Promise}
*/ */
delete: (access, data) => { delete: (access, data) => {
return access.can('users:delete', data.id) return access
.can("users:delete", data.id)
.then(() => { .then(() => {
return internalUser.get(access, { id: data.id }); return internalUser.get(access, { id: data.id });
}) })
.then((user) => { .then((user) => {
if (!user) { if (!user) {
throw new error.ItemNotFoundError(data.id); throw new errs.ItemNotFoundError(data.id);
} }
// Make sure user can't delete themselves // Make sure user can't delete themselves
if (user.id === access.token.getUserId(0)) { if (user.id === access.token.getUserId(0)) {
throw new error.PermissionError('You cannot delete yourself.'); throw new errs.PermissionError("You cannot delete yourself.");
} }
return userModel return userModel
.query() .query()
.where('id', user.id) .where("id", user.id)
.patch({ .patch({
is_deleted: 1 is_deleted: 1,
}) })
.then(() => { .then(() => {
// Add to audit log // Add to audit log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'deleted', action: "deleted",
object_type: 'user', object_type: "user",
object_id: user.id, object_id: user.id,
meta: _.omit(user, omissions()) meta: _.omit(user, omissions()),
}); });
}); });
}) })
@@ -272,6 +250,14 @@ const internalUser = {
}); });
}, },
deleteAll: async () => {
await userModel
.query()
.patch({
is_deleted: 1,
});
},
/** /**
* This will only count the users * This will only count the users
* *
@@ -280,26 +266,26 @@ const internalUser = {
* @returns {*} * @returns {*}
*/ */
getCount: (access, search_query) => { getCount: (access, search_query) => {
return access.can('users:list') return access
.can("users:list")
.then(() => { .then(() => {
let query = userModel const query = userModel.query().count("id as count").where("is_deleted", 0).first();
.query()
.count('id as count')
.where('is_deleted', 0)
.first();
// Query is used for searching // Query is used for searching
if (typeof search_query === 'string') { if (typeof search_query === "string") {
query.where(function () { query.where(function () {
this.where('user.name', 'like', '%' + search_query + '%') this.where("user.name", "like", `%${search_query}%`).orWhere(
.orWhere('user.email', 'like', '%' + search_query + '%'); "user.email",
"like",
`%${search_query}%`,
);
}); });
} }
return query; return query;
}) })
.then((row) => { .then((row) => {
return parseInt(row.count, 10); return Number.parseInt(row.count, 10);
}); });
}, },
@@ -311,30 +297,28 @@ const internalUser = {
* @param {String} [search_query] * @param {String} [search_query]
* @returns {Promise} * @returns {Promise}
*/ */
getAll: (access, expand, search_query) => { getAll: async (access, expand, search_query) => {
return access.can('users:list') await access.can("users:list");
.then(() => { const query = userModel
let query = userModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.groupBy('id') .groupBy("id")
.allowGraph('[permissions]') .allowGraph("[permissions]")
.orderBy('name', 'ASC'); .orderBy("name", "ASC");
// Query is used for searching // Query is used for searching
if (typeof search_query === 'string') { if (typeof search_query === "string") {
query.where(function () { query.where(function () {
this.where('name', 'like', '%' + search_query + '%') this.where("name", "like", `%${search_query}%`).orWhere("email", "like", `%${search_query}%`);
.orWhere('email', 'like', '%' + search_query + '%');
}); });
} }
if (typeof expand !== 'undefined' && expand !== null) { if (typeof expand !== "undefined" && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']'); query.withGraphFetched(`[${expand.join(", ")}]`);
} }
return query.then(utils.omitRows(omissions())); const res = await query;
}); return utils.omitRows(omissions())(res);
}, },
/** /**
@@ -342,11 +326,11 @@ const internalUser = {
* @param {Integer} [id_requested] * @param {Integer} [id_requested]
* @returns {[String]} * @returns {[String]}
*/ */
getUserOmisionsByAccess: (access, id_requested) => { getUserOmisionsByAccess: (access, idRequested) => {
let response = []; // Admin response let response = []; // Admin response
if (!access.token.hasScope('admin') && access.token.getUserId(0) !== id_requested) { if (!access.token.hasScope("admin") && access.token.getUserId(0) !== idRequested) {
response = ['roles', 'is_deleted']; // Restricted response response = ["is_deleted"]; // Restricted response
} }
return response; return response;
@@ -361,25 +345,29 @@ const internalUser = {
* @return {Promise} * @return {Promise}
*/ */
setPassword: (access, data) => { setPassword: (access, data) => {
return access.can('users:password', data.id) return access
.can("users:password", data.id)
.then(() => { .then(() => {
return internalUser.get(access, { id: data.id }); return internalUser.get(access, { id: data.id });
}) })
.then((user) => { .then((user) => {
if (user.id !== data.id) { if (user.id !== data.id) {
// Sanity check that something crazy hasn't happened // Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id); throw new errs.InternalValidationError(
`User could not be updated, IDs do not match: ${user.id} !== ${data.id}`,
);
} }
if (user.id === access.token.getUserId(0)) { if (user.id === access.token.getUserId(0)) {
// they're setting their own password. Make sure their current password is correct // they're setting their own password. Make sure their current password is correct
if (typeof data.current === 'undefined' || !data.current) { if (typeof data.current === "undefined" || !data.current) {
throw new error.ValidationError('Current password was not supplied'); throw new errs.ValidationError("Current password was not supplied");
} }
return internalToken.getTokenFromEmail({ return internalToken
.getTokenFromEmail({
identity: user.email, identity: user.email,
secret: data.current secret: data.current,
}) })
.then(() => { .then(() => {
return user; return user;
@@ -392,43 +380,36 @@ const internalUser = {
// Get auth, patch if it exists // Get auth, patch if it exists
return authModel return authModel
.query() .query()
.where('user_id', user.id) .where("user_id", user.id)
.andWhere('type', data.type) .andWhere("type", data.type)
.first() .first()
.then((existing_auth) => { .then((existing_auth) => {
if (existing_auth) { if (existing_auth) {
// patch // patch
return authModel return authModel.query().where("user_id", user.id).andWhere("type", data.type).patch({
.query()
.where('user_id', user.id)
.andWhere('type', data.type)
.patch({
type: data.type, // This is required for the model to encrypt on save type: data.type, // This is required for the model to encrypt on save
secret: data.secret secret: data.secret,
}); });
} else { }
// insert // insert
return authModel return authModel.query().insert({
.query()
.insert({
user_id: user.id, user_id: user.id,
type: data.type, type: data.type,
secret: data.secret, secret: data.secret,
meta: {} meta: {},
}); });
}
}) })
.then(() => { .then(() => {
// Add to Audit Log // Add to Audit Log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'updated', action: "updated",
object_type: 'user', object_type: "user",
object_id: user.id, object_id: user.id,
meta: { meta: {
name: user.name, name: user.name,
password_changed: true, password_changed: true,
auth_type: data.type auth_type: data.type,
} },
}); });
}); });
}) })
@@ -443,14 +424,17 @@ const internalUser = {
* @return {Promise} * @return {Promise}
*/ */
setPermissions: (access, data) => { setPermissions: (access, data) => {
return access.can('users:permissions', data.id) return access
.can("users:permissions", data.id)
.then(() => { .then(() => {
return internalUser.get(access, { id: data.id }); return internalUser.get(access, { id: data.id });
}) })
.then((user) => { .then((user) => {
if (user.id !== data.id) { if (user.id !== data.id) {
// Sanity check that something crazy hasn't happened // Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id); throw new errs.InternalValidationError(
`User could not be updated, IDs do not match: ${user.id} !== ${data.id}`,
);
} }
return user; return user;
@@ -459,34 +443,30 @@ const internalUser = {
// Get perms row, patch if it exists // Get perms row, patch if it exists
return userPermissionModel return userPermissionModel
.query() .query()
.where('user_id', user.id) .where("user_id", user.id)
.first() .first()
.then((existing_auth) => { .then((existing_auth) => {
if (existing_auth) { if (existing_auth) {
// patch // patch
return userPermissionModel return userPermissionModel
.query() .query()
.where('user_id', user.id) .where("user_id", user.id)
.patchAndFetchById(existing_auth.id, _.assign({ user_id: user.id }, data)); .patchAndFetchById(existing_auth.id, _.assign({ user_id: user.id }, data));
} else {
// insert
return userPermissionModel
.query()
.insertAndFetch(_.assign({user_id: user.id}, data));
} }
// insert
return userPermissionModel.query().insertAndFetch(_.assign({ user_id: user.id }, data));
}) })
.then((permissions) => { .then((permissions) => {
// Add to Audit Log // Add to Audit Log
return internalAuditLog.add(access, { return internalAuditLog.add(access, {
action: 'updated', action: "updated",
object_type: 'user', object_type: "user",
object_id: user.id, object_id: user.id,
meta: { meta: {
name: user.name, name: user.name,
permissions: permissions permissions: permissions,
} },
}); });
}); });
}) })
.then(() => { .then(() => {
@@ -500,14 +480,15 @@ const internalUser = {
* @param {Integer} data.id * @param {Integer} data.id
*/ */
loginAs: (access, data) => { loginAs: (access, data) => {
return access.can('users:loginas', data.id) return access
.can("users:loginas", data.id)
.then(() => { .then(() => {
return internalUser.get(access, data); return internalUser.get(access, data);
}) })
.then((user) => { .then((user) => {
return internalToken.getTokenFromUser(user); return internalToken.getTokenFromUser(user);
}); });
} },
}; };
module.exports = internalUser; export default internalUser;

View File

@@ -4,27 +4,31 @@
* "scope" in this file means "where did this token come from and what is using it", so 99% of the time * "scope" in this file means "where did this token come from and what is using it", so 99% of the time
* the "scope" is going to be "user" because it would be a user token. This is not to be confused with * the "scope" is going to be "user" because it would be a user token. This is not to be confused with
* the "role" which could be "user" or "admin". The scope in fact, could be "worker" or anything else. * the "role" which could be "user" or "admin". The scope in fact, could be "worker" or anything else.
*
*
*/ */
const _ = require('lodash'); import fs from "node:fs";
const logger = require('../logger').access; import { dirname } from "node:path";
const Ajv = require('ajv/dist/2020'); import { fileURLToPath } from "node:url";
const error = require('./error'); import Ajv from "ajv/dist/2020.js";
const userModel = require('../models/user'); import _ from "lodash";
const proxyHostModel = require('../models/proxy_host'); import { access as logger } from "../logger.js";
const TokenModel = require('../models/token'); import proxyHostModel from "../models/proxy_host.js";
const roleSchema = require('./access/roles.json'); import TokenModel from "../models/token.js";
const permsSchema = require('./access/permissions.json'); import userModel from "../models/user.js";
import permsSchema from "./access/permissions.json" with { type: "json" };
import roleSchema from "./access/roles.json" with { type: "json" };
import errs from "./error.js";
module.exports = function (token_string) { const __filename = fileURLToPath(import.meta.url);
let Token = new TokenModel(); const __dirname = dirname(__filename);
let token_data = null;
export default function (tokenString) {
const Token = TokenModel();
let tokenData = null;
let initialised = false; let initialised = false;
let object_cache = {}; const objectCache = {};
let allow_internal_access = false; let allowInternalAccess = false;
let user_roles = []; let userRoles = [];
let permissions = {}; let permissions = {};
/** /**
@@ -32,63 +36,58 @@ module.exports = function (token_string) {
* *
* @returns {Promise} * @returns {Promise}
*/ */
this.init = () => { this.init = async () => {
return new Promise((resolve, reject) => {
if (initialised) { if (initialised) {
resolve(); return;
} else if (!token_string) { }
reject(new error.PermissionError('Permission Denied'));
} else { if (!tokenString) {
resolve(Token.load(token_string) throw new errs.PermissionError("Permission Denied");
.then((data) => { }
token_data = data;
tokenData = await Token.load(tokenString);
// At this point we need to load the user from the DB and make sure they: // At this point we need to load the user from the DB and make sure they:
// - exist (and not soft deleted) // - exist (and not soft deleted)
// - still have the appropriate scopes for this token // - still have the appropriate scopes for this token
// This is only required when the User ID is supplied or if the token scope has `user` // This is only required when the User ID is supplied or if the token scope has `user`
if (
if (token_data.attrs.id || (typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'user') !== -1)) { tokenData.attrs.id ||
(typeof tokenData.scope !== "undefined" && _.indexOf(tokenData.scope, "user") !== -1)
) {
// Has token user id or token user scope // Has token user id or token user scope
return userModel const user = await userModel
.query() .query()
.where('id', token_data.attrs.id) .where("id", tokenData.attrs.id)
.andWhere('is_deleted', 0) .andWhere("is_deleted", 0)
.andWhere('is_disabled', 0) .andWhere("is_disabled", 0)
.allowGraph('[permissions]') .allowGraph("[permissions]")
.withGraphFetched('[permissions]') .withGraphFetched("[permissions]")
.first() .first();
.then((user) => {
if (user) { if (user) {
// make sure user has all scopes of the token // make sure user has all scopes of the token
// The `user` role is not added against the user row, so we have to just add it here to get past this check. // The `user` role is not added against the user row, so we have to just add it here to get past this check.
user.roles.push('user'); user.roles.push("user");
let is_ok = true; let ok = true;
_.forEach(token_data.scope, (scope_item) => { _.forEach(tokenData.scope, (scope_item) => {
if (_.indexOf(user.roles, scope_item) === -1) { if (_.indexOf(user.roles, scope_item) === -1) {
is_ok = false; ok = false;
} }
}); });
if (!is_ok) { if (!ok) {
throw new error.AuthError('Invalid token scope for User'); throw new errs.AuthError("Invalid token scope for User");
} else { }
initialised = true; initialised = true;
user_roles = user.roles; userRoles = user.roles;
permissions = user.permissions; permissions = user.permissions;
}
} else { } else {
throw new error.AuthError('User cannot be loaded for Token'); throw new errs.AuthError("User cannot be loaded for Token");
}
} }
});
} else {
initialised = true; initialised = true;
}
}));
}
});
}; };
/** /**
@@ -96,140 +95,121 @@ module.exports = function (token_string) {
* This only applies to USER token scopes, as all other tokens are not really bound * This only applies to USER token scopes, as all other tokens are not really bound
* by object scopes * by object scopes
* *
* @param {String} object_type * @param {String} objectType
* @returns {Promise} * @returns {Promise}
*/ */
this.loadObjects = (object_type) => { this.loadObjects = async (objectType) => {
return new Promise((resolve, reject) => { let objects = null;
if (Token.hasScope('user')) {
if (typeof token_data.attrs.id === 'undefined' || !token_data.attrs.id) { if (Token.hasScope("user")) {
reject(new error.AuthError('User Token supplied without a User ID')); if (typeof tokenData.attrs.id === "undefined" || !tokenData.attrs.id) {
throw new errs.AuthError("User Token supplied without a User ID");
}
const tokenUserId = tokenData.attrs.id ? tokenData.attrs.id : 0;
if (typeof objectCache[objectType] !== "undefined") {
objects = objectCache[objectType];
} else { } else {
let token_user_id = token_data.attrs.id ? token_data.attrs.id : 0; switch (objectType) {
let query;
if (typeof object_cache[object_type] === 'undefined') {
switch (object_type) {
// USERS - should only return yourself // USERS - should only return yourself
case 'users': case "users":
resolve(token_user_id ? [token_user_id] : []); objects = tokenUserId ? [tokenUserId] : [];
break; break;
// Proxy Hosts // Proxy Hosts
case 'proxy_hosts': case "proxy_hosts": {
query = proxyHostModel const query = proxyHostModel
.query() .query()
.select('id') .select("id")
.andWhere('is_deleted', 0); .andWhere("is_deleted", 0);
if (permissions.visibility === 'user') { if (permissions.visibility === "user") {
query.andWhere('owner_user_id', token_user_id); query.andWhere("owner_user_id", tokenUserId);
} }
resolve(query const rows = await query;
.then((rows) => { objects = [];
let result = []; _.forEach(rows, (ruleRow) => {
_.forEach(rows, (rule_row) => { result.push(ruleRow.id);
result.push(rule_row.id);
}); });
// enum should not have less than 1 item // enum should not have less than 1 item
if (!result.length) { if (!objects.length) {
result.push(0); objects.push(0);
} }
return result;
})
);
break;
// DEFAULT: null
default:
resolve(null);
break; break;
} }
} else { }
resolve(object_cache[object_type]); objectCache[objectType] = objects;
} }
} }
} else {
resolve(null);
}
})
.then((objects) => {
object_cache[object_type] = objects;
return objects; return objects;
});
}; };
/** /**
* Creates a schema object on the fly with the IDs and other values required to be checked against the permissionSchema * Creates a schema object on the fly with the IDs and other values required to be checked against the permissionSchema
* *
* @param {String} permission_label * @param {String} permissionLabel
* @returns {Object} * @returns {Object}
*/ */
this.getObjectSchema = (permission_label) => { this.getObjectSchema = async (permissionLabel) => {
let base_object_type = permission_label.split(':').shift(); const baseObjectType = permissionLabel.split(":").shift();
let schema = { const schema = {
$id: 'objects', $id: "objects",
description: 'Actor Properties', description: "Actor Properties",
type: 'object', type: "object",
additionalProperties: false, additionalProperties: false,
properties: { properties: {
user_id: { user_id: {
anyOf: [ anyOf: [
{ {
type: 'number', type: "number",
enum: [Token.get('attrs').id] enum: [Token.get("attrs").id],
} },
] ],
}, },
scope: { scope: {
type: 'string', type: "string",
pattern: '^' + Token.get('scope') + '$' pattern: `^${Token.get("scope")}$`,
} },
} },
}; };
return this.loadObjects(base_object_type) const result = await this.loadObjects(baseObjectType);
.then((object_result) => { if (typeof result === "object" && result !== null) {
if (typeof object_result === 'object' && object_result !== null) { schema.properties[baseObjectType] = {
schema.properties[base_object_type] = { type: "number",
type: 'number', enum: result,
enum: object_result, minimum: 1,
minimum: 1
}; };
} else { } else {
schema.properties[base_object_type] = { schema.properties[baseObjectType] = {
type: 'number', type: "number",
minimum: 1 minimum: 1,
}; };
} }
return schema; return schema;
});
}; };
return { // here:
return {
token: Token, token: Token,
/** /**
* *
* @param {Boolean} [allow_internal] * @param {Boolean} [allowInternal]
* @returns {Promise} * @returns {Promise}
*/ */
load: (allow_internal) => { load: async (allowInternal) => {
return new Promise(function (resolve/*, reject*/) { if (tokenString) {
if (token_string) { return await Token.load(tokenString);
resolve(Token.load(token_string));
} else {
allow_internal_access = allow_internal;
resolve(allow_internal_access || null);
} }
}); allowInternalAccess = allowInternal;
return allowInternal || null;
}, },
reloadObjects: this.loadObjects, reloadObjects: this.loadObjects,
@@ -240,68 +220,59 @@ module.exports = function (token_string) {
* @param {*} [data] * @param {*} [data]
* @returns {Promise} * @returns {Promise}
*/ */
can: (permission, data) => { can: async (permission, data) => {
if (allow_internal_access === true) { if (allowInternalAccess === true) {
return Promise.resolve(true); return true;
//return true; }
} else {
return this.init() try {
.then(() => { await this.init();
// Initialised, token decoded ok const objectSchema = await this.getObjectSchema(permission);
return this.getObjectSchema(permission)
.then((objectSchema) => { const dataSchema = {
const data_schema = {
[permission]: { [permission]: {
data: data, data: data,
scope: Token.get('scope'), scope: Token.get("scope"),
roles: user_roles, roles: userRoles,
permission_visibility: permissions.visibility, permission_visibility: permissions.visibility,
permission_proxy_hosts: permissions.proxy_hosts, permission_proxy_hosts: permissions.proxy_hosts,
permission_redirection_hosts: permissions.redirection_hosts, permission_redirection_hosts: permissions.redirection_hosts,
permission_dead_hosts: permissions.dead_hosts, permission_dead_hosts: permissions.dead_hosts,
permission_streams: permissions.streams, permission_streams: permissions.streams,
permission_access_lists: permissions.access_lists, permission_access_lists: permissions.access_lists,
permission_certificates: permissions.certificates permission_certificates: permissions.certificates,
} },
}; };
let permissionSchema = { const permissionSchema = {
$async: true, $async: true,
$id: 'permissions', $id: "permissions",
type: 'object', type: "object",
additionalProperties: false, additionalProperties: false,
properties: {} properties: {},
}; };
permissionSchema.properties[permission] = require('./access/' + permission.replace(/:/gim, '-') + '.json'); const rawData = fs.readFileSync(`${__dirname}/access/${permission.replace(/:/gim, "-")}.json`, {
encoding: "utf8",
});
permissionSchema.properties[permission] = JSON.parse(rawData);
const ajv = new Ajv({ const ajv = new Ajv({
verbose: true, verbose: true,
allErrors: true, allErrors: true,
breakOnError: true, breakOnError: true,
coerceTypes: true, coerceTypes: true,
schemas: [ schemas: [roleSchema, permsSchema, objectSchema, permissionSchema],
roleSchema,
permsSchema,
objectSchema,
permissionSchema
]
}); });
return ajv.validate('permissions', data_schema) const valid = ajv.validate("permissions", dataSchema);
.then(() => { return valid && dataSchema[permission];
return data_schema[permission]; } catch (err) {
});
});
})
.catch((err) => {
err.permission = permission; err.permission = permission;
err.permission_data = data; err.permission_data = data;
logger.error(permission, data, err.message); logger.error(permission, data, err.message);
throw errs.PermissionError("Permission Denied", err);
throw new error.PermissionError('Permission Denied', err);
});
} }
},
};
} }
};
};

View File

@@ -1,17 +1,15 @@
const dnsPlugins = require('../global/certbot-dns-plugins.json'); import batchflow from "batchflow";
const utils = require('./utils'); import dnsPlugins from "../global/certbot-dns-plugins.json" with { type: "json" };
const error = require('./error'); import { certbot as logger } from "../logger.js";
const logger = require('../logger').certbot; import errs from "./error.js";
const batchflow = require('batchflow'); import utils from "./utils.js";
const CERTBOT_VERSION_REPLACEMENT = '$(certbot --version | grep -Eo \'[0-9](\\.[0-9]+)+\')'; const CERTBOT_VERSION_REPLACEMENT = "$(certbot --version | grep -Eo '[0-9](\\.[0-9]+)+')";
const certbot = {
/** /**
* @param {array} pluginKeys * @param {array} pluginKeys
*/ */
installPlugins: async (pluginKeys) => { const installPlugins = async (pluginKeys) => {
let hasErrors = false; let hasErrors = false;
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
@@ -20,9 +18,11 @@ const certbot = {
return; return;
} }
batchflow(pluginKeys).sequential() batchflow(pluginKeys)
.sequential()
.each((_i, pluginKey, next) => { .each((_i, pluginKey, next) => {
certbot.installPlugin(pluginKey) certbot
.installPlugin(pluginKey)
.then(() => { .then(() => {
next(); next();
}) })
@@ -36,13 +36,15 @@ const certbot = {
}) })
.end(() => { .end(() => {
if (hasErrors) { if (hasErrors) {
reject(new error.CommandError('Some plugins failed to install. Please check the logs above', 1)); reject(
new errs.CommandError("Some plugins failed to install. Please check the logs above", 1),
);
} else { } else {
resolve(); resolve();
} }
}); });
}); });
}, };
/** /**
* Installs a cerbot plugin given the key for the object from * Installs a cerbot plugin given the key for the object from
@@ -51,10 +53,10 @@ const certbot = {
* @param {string} pluginKey * @param {string} pluginKey
* @returns {Object} * @returns {Object}
*/ */
installPlugin: async (pluginKey) => { const installPlugin = async (pluginKey) => {
if (typeof dnsPlugins[pluginKey] === 'undefined') { if (typeof dnsPlugins[pluginKey] === "undefined") {
// throw Error(`Certbot plugin ${pluginKey} not found`); // throw Error(`Certbot plugin ${pluginKey} not found`);
throw new error.ItemNotFoundError(pluginKey); throw new errs.ItemNotFoundError(pluginKey);
} }
const plugin = dnsPlugins[pluginKey]; const plugin = dnsPlugins[pluginKey];
@@ -65,13 +67,14 @@ const certbot = {
// SETUPTOOLS_USE_DISTUTILS is required for certbot plugins to install correctly // SETUPTOOLS_USE_DISTUTILS is required for certbot plugins to install correctly
// in new versions of Python // in new versions of Python
let env = Object.assign({}, process.env, {SETUPTOOLS_USE_DISTUTILS: 'stdlib'}); let env = Object.assign({}, process.env, { SETUPTOOLS_USE_DISTUTILS: "stdlib" });
if (typeof plugin.env === 'object') { if (typeof plugin.env === "object") {
env = Object.assign(env, plugin.env); env = Object.assign(env, plugin.env);
} }
const cmd = `. /opt/certbot/bin/activate && pip install --no-cache-dir ${plugin.dependencies} ${plugin.package_name}${plugin.version} && deactivate`; const cmd = `. /opt/certbot/bin/activate && pip install --no-cache-dir ${plugin.dependencies} ${plugin.package_name}${plugin.version} && deactivate`;
return utils.exec(cmd, {env}) return utils
.exec(cmd, { env })
.then((result) => { .then((result) => {
logger.complete(`Installed ${pluginKey}`); logger.complete(`Installed ${pluginKey}`);
return result; return result;
@@ -79,7 +82,6 @@ const certbot = {
.catch((err) => { .catch((err) => {
throw err; throw err;
}); });
},
}; };
module.exports = certbot; export { installPlugins, installPlugin };

View File

@@ -1,6 +1,6 @@
const fs = require('fs'); import fs from "node:fs";
const NodeRSA = require('node-rsa'); import NodeRSA from "node-rsa";
const logger = require('../logger').global; import { global as logger } from "../logger.js";
const keysFile = '/data/keys.json'; const keysFile = '/data/keys.json';
const mysqlEngine = 'mysql2'; const mysqlEngine = 'mysql2';
@@ -12,16 +12,18 @@ let instance = null;
// 1. Load from config file first (not recommended anymore) // 1. Load from config file first (not recommended anymore)
// 2. Use config env variables next // 2. Use config env variables next
const configure = () => { const configure = () => {
const filename = (process.env.NODE_CONFIG_DIR || './config') + '/' + (process.env.NODE_ENV || 'default') + '.json'; const filename = `${process.env.NODE_CONFIG_DIR || "./config"}/${process.env.NODE_ENV || "default"}.json`;
if (fs.existsSync(filename)) { if (fs.existsSync(filename)) {
let configData; let configData;
try { try {
configData = require(filename); // Load this json synchronously
const rawData = fs.readFileSync(filename);
configData = JSON.parse(rawData);
} catch (_) { } catch (_) {
// do nothing // do nothing
} }
if (configData && configData.database) { if (configData?.database) {
logger.info(`Using configuration from file: ${filename}`); logger.info(`Using configuration from file: ${filename}`);
instance = configData; instance = configData;
instance.keys = getKeys(); instance.keys = getKeys();
@@ -34,7 +36,7 @@ const configure = () => {
const envMysqlName = process.env.DB_MYSQL_NAME || null; const envMysqlName = process.env.DB_MYSQL_NAME || null;
if (envMysqlHost && envMysqlUser && envMysqlName) { if (envMysqlHost && envMysqlUser && envMysqlName) {
// we have enough mysql creds to go with mysql // we have enough mysql creds to go with mysql
logger.info('Using MySQL configuration'); logger.info("Using MySQL configuration");
instance = { instance = {
database: { database: {
engine: mysqlEngine, engine: mysqlEngine,
@@ -54,7 +56,7 @@ const configure = () => {
const envPostgresName = process.env.DB_POSTGRES_NAME || null; const envPostgresName = process.env.DB_POSTGRES_NAME || null;
if (envPostgresHost && envPostgresUser && envPostgresName) { if (envPostgresHost && envPostgresUser && envPostgresName) {
// we have enough postgres creds to go with postgres // we have enough postgres creds to go with postgres
logger.info('Using Postgres configuration'); logger.info("Using Postgres configuration");
instance = { instance = {
database: { database: {
engine: postgresEngine, engine: postgresEngine,
@@ -69,18 +71,18 @@ const configure = () => {
return; return;
} }
const envSqliteFile = process.env.DB_SQLITE_FILE || '/data/database.sqlite'; const envSqliteFile = process.env.DB_SQLITE_FILE || "/data/database.sqlite";
logger.info(`Using Sqlite: ${envSqliteFile}`); logger.info(`Using Sqlite: ${envSqliteFile}`);
instance = { instance = {
database: { database: {
engine: 'knex-native', engine: "knex-native",
knex: { knex: {
client: sqliteClientName, client: sqliteClientName,
connection: { connection: {
filename: envSqliteFile filename: envSqliteFile,
},
useNullAsDefault: true,
}, },
useNullAsDefault: true
}
}, },
keys: getKeys(), keys: getKeys(),
}; };
@@ -88,54 +90,55 @@ const configure = () => {
const getKeys = () => { const getKeys = () => {
// Get keys from file // Get keys from file
logger.debug("Cheecking for keys file:", keysFile);
if (!fs.existsSync(keysFile)) { if (!fs.existsSync(keysFile)) {
generateKeys(); generateKeys();
} else if (process.env.DEBUG) { } else if (process.env.DEBUG) {
logger.info('Keys file exists OK'); logger.info("Keys file exists OK");
} }
try { try {
return require(keysFile); // Load this json keysFile synchronously and return the json object
const rawData = fs.readFileSync(keysFile);
return JSON.parse(rawData);
} catch (err) { } catch (err) {
logger.error('Could not read JWT key pair from config file: ' + keysFile, err); logger.error(`Could not read JWT key pair from config file: ${keysFile}`, err);
process.exit(1); process.exit(1);
} }
}; };
const generateKeys = () => { const generateKeys = () => {
logger.info('Creating a new JWT key pair...'); logger.info("Creating a new JWT key pair...");
// Now create the keys and save them in the config. // Now create the keys and save them in the config.
const key = new NodeRSA({ b: 2048 }); const key = new NodeRSA({ b: 2048 });
key.generateKeyPair(); key.generateKeyPair();
const keys = { const keys = {
key: key.exportKey('private').toString(), key: key.exportKey("private").toString(),
pub: key.exportKey('public').toString(), pub: key.exportKey("public").toString(),
}; };
// Write keys config // Write keys config
try { try {
fs.writeFileSync(keysFile, JSON.stringify(keys, null, 2)); fs.writeFileSync(keysFile, JSON.stringify(keys, null, 2));
} catch (err) { } catch (err) {
logger.error('Could not write JWT key pair to config file: ' + keysFile + ': ' + err.message); logger.error(`Could not write JWT key pair to config file: ${keysFile}: ${err.message}`);
process.exit(1); process.exit(1);
} }
logger.info('Wrote JWT key pair to config file: ' + keysFile); logger.info(`Wrote JWT key pair to config file: ${keysFile}`);
}; };
module.exports = {
/** /**
* *
* @param {string} key ie: 'database' or 'database.engine' * @param {string} key ie: 'database' or 'database.engine'
* @returns {boolean} * @returns {boolean}
*/ */
has: function(key) { const configHas = (key) => {
instance === null && configure(); instance === null && configure();
const keys = key.split('.'); const keys = key.split(".");
let level = instance; let level = instance;
let has = true; let has = true;
keys.forEach((keyItem) => { keys.forEach((keyItem) => {
if (typeof level[keyItem] === 'undefined') { if (typeof level[keyItem] === "undefined") {
has = false; has = false;
} else { } else {
level = level[keyItem]; level = level[keyItem];
@@ -143,7 +146,7 @@ module.exports = {
}); });
return has; return has;
}, };
/** /**
* Gets a specific key from the top level * Gets a specific key from the top level
@@ -151,87 +154,91 @@ module.exports = {
* @param {string} key * @param {string} key
* @returns {*} * @returns {*}
*/ */
get: function (key) { const configGet = (key) => {
instance === null && configure(); instance === null && configure();
if (key && typeof instance[key] !== 'undefined') { if (key && typeof instance[key] !== "undefined") {
return instance[key]; return instance[key];
} }
return instance; return instance;
}, };
/** /**
* Is this a sqlite configuration? * Is this a sqlite configuration?
* *
* @returns {boolean} * @returns {boolean}
*/ */
isSqlite: function () { const isSqlite = () => {
instance === null && configure(); instance === null && configure();
return instance.database.knex && instance.database.knex.client === sqliteClientName; return instance.database.knex && instance.database.knex.client === sqliteClientName;
}, };
/** /**
* Is this a mysql configuration? * Is this a mysql configuration?
* *
* @returns {boolean} * @returns {boolean}
*/ */
isMysql: function () { const isMysql = () => {
instance === null && configure(); instance === null && configure();
return instance.database.engine === mysqlEngine; return instance.database.engine === mysqlEngine;
}, };
/** /**
* Is this a postgres configuration? * Is this a postgres configuration?
* *
* @returns {boolean} * @returns {boolean}
*/ */
isPostgres: function () { const isPostgres = () => {
instance === null && configure(); instance === null && configure();
return instance.database.engine === postgresEngine; return instance.database.engine === postgresEngine;
}, };
/** /**
* Are we running in debug mdoe? * Are we running in debug mdoe?
* *
* @returns {boolean} * @returns {boolean}
*/ */
debug: function () { const isDebugMode = () => !!process.env.DEBUG;
return !!process.env.DEBUG;
}, /**
* Are we running in CI?
*
* @returns {boolean}
*/
const isCI = () => process.env.CI === 'true' && process.env.DEBUG === 'true';
/** /**
* Returns a public key * Returns a public key
* *
* @returns {string} * @returns {string}
*/ */
getPublicKey: function () { const getPublicKey = () => {
instance === null && configure(); instance === null && configure();
return instance.keys.pub; return instance.keys.pub;
}, };
/** /**
* Returns a private key * Returns a private key
* *
* @returns {string} * @returns {string}
*/ */
getPrivateKey: function () { const getPrivateKey = () => {
instance === null && configure(); instance === null && configure();
return instance.keys.key; return instance.keys.key;
}, };
/** /**
* @returns {boolean} * @returns {boolean}
*/ */
useLetsencryptStaging: function () { const useLetsencryptStaging = () => !!process.env.LE_STAGING;
return !!process.env.LE_STAGING;
},
/** /**
* @returns {string|null} * @returns {string|null}
*/ */
useLetsencryptServer: function () { const useLetsencryptServer = () => {
if (process.env.LE_SERVER) { if (process.env.LE_SERVER) {
return process.env.LE_SERVER; return process.env.LE_SERVER;
} }
return null; return null;
}
}; };
export { isCI, configHas, configGet, isSqlite, isMysql, isPostgres, isDebugMode, getPrivateKey, getPublicKey, useLetsencryptStaging, useLetsencryptServer };

View File

@@ -1,13 +1,11 @@
const _ = require('lodash'); import _ from "lodash";
const util = require('util');
module.exports = { const errs = {
PermissionError: function (_, previous) {
PermissionError: function (message, previous) {
Error.captureStackTrace(this, this.constructor); Error.captureStackTrace(this, this.constructor);
this.name = this.constructor.name; this.name = this.constructor.name;
this.previous = previous; this.previous = previous;
this.message = 'Permission Denied'; this.message = "Permission Denied";
this.public = true; this.public = true;
this.status = 403; this.status = 403;
}, },
@@ -16,18 +14,22 @@ module.exports = {
Error.captureStackTrace(this, this.constructor); Error.captureStackTrace(this, this.constructor);
this.name = this.constructor.name; this.name = this.constructor.name;
this.previous = previous; this.previous = previous;
this.message = 'Item Not Found - ' + id; this.message = "Not Found";
if (id) {
this.message = `Not Found - ${id}`;
}
this.public = true; this.public = true;
this.status = 404; this.status = 404;
}, },
AuthError: function (message, previous) { AuthError: function (message, messageI18n, previous) {
Error.captureStackTrace(this, this.constructor); Error.captureStackTrace(this, this.constructor);
this.name = this.constructor.name; this.name = this.constructor.name;
this.previous = previous; this.previous = previous;
this.message = message; this.message = message;
this.message_i18n = messageI18n;
this.public = true; this.public = true;
this.status = 401; this.status = 400;
}, },
InternalError: function (message, previous) { InternalError: function (message, previous) {
@@ -94,6 +96,8 @@ module.exports = {
}, },
}; };
_.forEach(module.exports, function (error) { _.forEach(errs, (err) => {
util.inherits(error, Error); err.prototype = Object.create(Error.prototype);
}); });
export default errs;

View File

@@ -1,12 +1,13 @@
module.exports = function (req, res, next) { export default (req, res, next) => {
if (req.headers.origin) { if (req.headers.origin) {
res.set({ res.set({
'Access-Control-Allow-Origin': req.headers.origin, "Access-Control-Allow-Origin": req.headers.origin,
'Access-Control-Allow-Credentials': true, "Access-Control-Allow-Credentials": true,
'Access-Control-Allow-Methods': 'OPTIONS, GET, POST', "Access-Control-Allow-Methods": "OPTIONS, GET, POST",
'Access-Control-Allow-Headers': 'Content-Type, Cache-Control, Pragma, Expires, Authorization, X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit', "Access-Control-Allow-Headers":
'Access-Control-Max-Age': 5 * 60, "Content-Type, Cache-Control, Pragma, Expires, Authorization, X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit",
'Access-Control-Expose-Headers': 'X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit' "Access-Control-Max-Age": 5 * 60,
"Access-Control-Expose-Headers": "X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit",
}); });
next(); next();
} else { } else {

View File

@@ -1,15 +1,15 @@
const Access = require('../access'); import Access from "../access.js";
module.exports = () => { export default () => {
return function (req, res, next) { return async (_, res, next) => {
try {
res.locals.access = null; res.locals.access = null;
let access = new Access(res.locals.token || null); const access = new Access(res.locals.token || null);
access.load() await access.load();
.then(() => {
res.locals.access = access; res.locals.access = access;
next(); next();
}) } catch (err) {
.catch(next); next(err);
}
}; };
}; };

View File

@@ -1,13 +1,13 @@
module.exports = function () { export default function () {
return function (req, res, next) { return (req, res, next) => {
if (req.headers.authorization) { if (req.headers.authorization) {
let parts = req.headers.authorization.split(' '); const parts = req.headers.authorization.split(" ");
if (parts && parts[0] === 'Bearer' && parts[1]) { if (parts && parts[0] === "Bearer" && parts[1]) {
res.locals.token = parts[1]; res.locals.token = parts[1];
} }
} }
next(); next();
}; };
}; }

View File

@@ -1,7 +1,6 @@
let _ = require('lodash'); import _ from "lodash";
module.exports = function (default_sort, default_offset, default_limit, max_limit) {
export default (default_sort, default_offset, default_limit, max_limit) => {
/** /**
* This will setup the req query params with filtered data and defaults * This will setup the req query params with filtered data and defaults
* *
@@ -11,34 +10,35 @@ module.exports = function (default_sort, default_offset, default_limit, max_limi
* *
*/ */
return function (req, res, next) { return (req, _res, next) => {
req.query.offset =
req.query.offset = typeof req.query.limit === 'undefined' ? default_offset || 0 : parseInt(req.query.offset, 10); typeof req.query.limit === "undefined" ? default_offset || 0 : Number.parseInt(req.query.offset, 10);
req.query.limit = typeof req.query.limit === 'undefined' ? default_limit || 50 : parseInt(req.query.limit, 10); req.query.limit =
typeof req.query.limit === "undefined" ? default_limit || 50 : Number.parseInt(req.query.limit, 10);
if (max_limit && req.query.limit > max_limit) { if (max_limit && req.query.limit > max_limit) {
req.query.limit = max_limit; req.query.limit = max_limit;
} }
// Sorting // Sorting
let sort = typeof req.query.sort === 'undefined' ? default_sort : req.query.sort; let sort = typeof req.query.sort === "undefined" ? default_sort : req.query.sort;
let myRegexp = /.*\.(asc|desc)$/ig; const myRegexp = /.*\.(asc|desc)$/gi;
let sort_array = []; const sort_array = [];
sort = sort.split(','); sort = sort.split(",");
_.map(sort, function (val) { _.map(sort, (val) => {
let matches = myRegexp.exec(val); const matches = myRegexp.exec(val);
if (matches !== null) { if (matches !== null) {
let dir = matches[1]; const dir = matches[1];
sort_array.push({ sort_array.push({
field: val.substr(0, val.length - (dir.length + 1)), field: val.substr(0, val.length - (dir.length + 1)),
dir: dir.toLowerCase() dir: dir.toLowerCase(),
}); });
} else { } else {
sort_array.push({ sort_array.push({
field: val, field: val,
dir: 'asc' dir: "asc",
}); });
} }
}); });

View File

@@ -1,9 +1,8 @@
module.exports = (req, res, next) => { export default (req, res, next) => {
if (req.params.user_id === 'me' && res.locals.access) { if (req.params.user_id === 'me' && res.locals.access) {
req.params.user_id = res.locals.access.token.get('attrs').id; req.params.user_id = res.locals.access.token.get('attrs').id;
} else { } else {
req.params.user_id = parseInt(req.params.user_id, 10); req.params.user_id = Number.parseInt(req.params.user_id, 10);
} }
next(); next();
}; };

View File

@@ -1,8 +1,6 @@
const moment = require('moment'); import moment from "moment";
const {isPostgres} = require('./config'); import { ref } from "objection";
const {ref} = require('objection'); import { isPostgres } from "./config.js";
module.exports = {
/** /**
* Takes an expression such as 30d and returns a moment object of that date in future * Takes an expression such as 30d and returns a moment object of that date in future
@@ -22,32 +20,32 @@ module.exports = {
* @param {String} expression * @param {String} expression
* @returns {Object} * @returns {Object}
*/ */
parseDatePeriod: function (expression) { const parseDatePeriod = (expression) => {
let matches = expression.match(/^([0-9]+)(y|Q|M|w|d|h|m|s|ms)$/m); const matches = expression.match(/^([0-9]+)(y|Q|M|w|d|h|m|s|ms)$/m);
if (matches) { if (matches) {
return moment().add(matches[1], matches[2]); return moment().add(matches[1], matches[2]);
} }
return null; return null;
}, };
convertIntFieldsToBool: function (obj, fields) { const convertIntFieldsToBool = (obj, fields) => {
fields.forEach(function (field) { fields.forEach((field) => {
if (typeof obj[field] !== 'undefined') { if (typeof obj[field] !== "undefined") {
obj[field] = obj[field] === 1; obj[field] = obj[field] === 1;
} }
}); });
return obj; return obj;
}, };
convertBoolFieldsToInt: function (obj, fields) { const convertBoolFieldsToInt = (obj, fields) => {
fields.forEach(function (field) { fields.forEach((field) => {
if (typeof obj[field] !== 'undefined') { if (typeof obj[field] !== "undefined") {
obj[field] = obj[field] ? 1 : 0; obj[field] = obj[field] ? 1 : 0;
} }
}); });
return obj; return obj;
}, };
/** /**
* Casts a column to json if using postgres * Casts a column to json if using postgres
@@ -55,8 +53,6 @@ module.exports = {
* @param {string} colName * @param {string} colName
* @returns {string|Objection.ReferenceBuilder} * @returns {string|Objection.ReferenceBuilder}
*/ */
castJsonIfNeed: function (colName) { const castJsonIfNeed = (colName) => (isPostgres() ? ref(colName).castText() : colName);
return isPostgres() ? ref(colName).castText() : colName;
}
}; export { parseDatePeriod, convertIntFieldsToBool, convertBoolFieldsToInt, castJsonIfNeed };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'identifier_for_migrate'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "identifier_for_migrate";
/** /**
* Migrate * Migrate
@@ -7,16 +8,15 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex, Promise) { const up = (_knex) => {
logger.info(`[${migrateName}] Migrating Up...`);
logger.info('[' + migrate_name + '] Migrating Up...');
// Create Table example: // Create Table example:
/*return knex.schema.createTable('notification', (table) => { /*
return knex.schema.createTable('notification', (table) => {
table.increments().primary(); table.increments().primary();
table.string('name').notNull(); table.string('name').notNull();
table.string('type').notNull(); table.string('type').notNull();
@@ -24,10 +24,11 @@ exports.up = function (knex, Promise) {
table.integer('modified_on').notNull(); table.integer('modified_on').notNull();
}) })
.then(function () { .then(function () {
logger.info('[' + migrate_name + '] Notification Table created'); logger.info('[' + migrateName + '] Notification Table created');
});*/ });
*/
logger.info('[' + migrate_name + '] Migrating Up Complete'); logger.info(`[${migrateName}] Migrating Up Complete`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
@@ -36,20 +37,23 @@ exports.up = function (knex, Promise) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.info('[' + migrate_name + '] Migrating Down...'); logger.info(`[${migrateName}] Migrating Down...`);
// Drop table example: // Drop table example:
/*return knex.schema.dropTable('notification') /*
return knex.schema.dropTable('notification')
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] Notification Table dropped'); logger.info(`[${migrateName}] Notification Table dropped`);
});*/ });
*/
logger.info('[' + migrate_name + '] Migrating Down Complete'); logger.info(`[${migrateName}] Migrating Down Complete`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,30 +1,31 @@
const _ = require('lodash'); import { exec as nodeExec, execFile as nodeExecFile } from "node:child_process";
const exec = require('node:child_process').exec; import { dirname } from "node:path";
const execFile = require('node:child_process').execFile; import { fileURLToPath } from "node:url";
const { Liquid } = require('liquidjs'); import { Liquid } from "liquidjs";
const logger = require('../logger').global; import _ from "lodash";
const error = require('./error'); import { global as logger } from "../logger.js";
import errs from "./error.js";
module.exports = { const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
exec: async (cmd, options = {}) => {
logger.debug('CMD:', cmd);
const exec = async (cmd, options = {}) => {
logger.debug("CMD:", cmd);
const { stdout, stderr } = await new Promise((resolve, reject) => { const { stdout, stderr } = await new Promise((resolve, reject) => {
const child = exec(cmd, options, (isError, stdout, stderr) => { const child = nodeExec(cmd, options, (isError, stdout, stderr) => {
if (isError) { if (isError) {
reject(new error.CommandError(stderr, isError)); reject(new errs.CommandError(stderr, isError));
} else { } else {
resolve({ stdout, stderr }); resolve({ stdout, stderr });
} }
}); });
child.on('error', (e) => { child.on("error", (e) => {
reject(new error.CommandError(stderr, 1, e)); reject(new errs.CommandError(stderr, 1, e));
}); });
}); });
return stdout; return stdout;
}, };
/** /**
* @param {String} cmd * @param {String} cmd
@@ -32,22 +33,20 @@ module.exports = {
* @param {Object|undefined} options * @param {Object|undefined} options
* @returns {Promise} * @returns {Promise}
*/ */
execFile: (cmd, args, options) => { const execFile = (cmd, args, options) => {
logger.debug(`CMD: ${cmd} ${args ? args.join(' ') : ''}`); logger.debug(`CMD: ${cmd} ${args ? args.join(" ") : ""}`);
if (typeof options === 'undefined') { const opts = options || {};
options = {};
}
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
execFile(cmd, args, options, (err, stdout, stderr) => { nodeExecFile(cmd, args, opts, (err, stdout, stderr) => {
if (err && typeof err === 'object') { if (err && typeof err === "object") {
reject(new error.CommandError(stderr, 1, err)); reject(new errs.CommandError(stderr, 1, err));
} else { } else {
resolve(stdout.trim()); resolve(stdout.trim());
} }
}); });
}); });
}, };
/** /**
* Used in objection query builder * Used in objection query builder
@@ -55,7 +54,7 @@ module.exports = {
* @param {Array} omissions * @param {Array} omissions
* @returns {Function} * @returns {Function}
*/ */
omitRow: (omissions) => { const omitRow = (omissions) => {
/** /**
* @param {Object} row * @param {Object} row
* @returns {Object} * @returns {Object}
@@ -63,7 +62,7 @@ module.exports = {
return (row) => { return (row) => {
return _.omit(row, omissions); return _.omit(row, omissions);
}; };
}, };
/** /**
* Used in objection query builder * Used in objection query builder
@@ -71,7 +70,7 @@ module.exports = {
* @param {Array} omissions * @param {Array} omissions
* @returns {Function} * @returns {Function}
*/ */
omitRows: (omissions) => { const omitRows = (omissions) => {
/** /**
* @param {Array} rows * @param {Array} rows
* @returns {Object} * @returns {Object}
@@ -82,14 +81,14 @@ module.exports = {
}); });
return rows; return rows;
}; };
}, };
/** /**
* @returns {Object} Liquid render engine * @returns {Object} Liquid render engine
*/ */
getRenderEngine: () => { const getRenderEngine = () => {
const renderEngine = new Liquid({ const renderEngine = new Liquid({
root: `${__dirname}/../templates/` root: `${__dirname}/../templates/`,
}); });
/** /**
@@ -98,13 +97,14 @@ module.exports = {
* directive string * directive string
* address string * address string
*/ */
renderEngine.registerFilter('nginxAccessRule', (v) => { renderEngine.registerFilter("nginxAccessRule", (v) => {
if (typeof v.directive !== 'undefined' && typeof v.address !== 'undefined' && v.directive && v.address) { if (typeof v.directive !== "undefined" && typeof v.address !== "undefined" && v.directive && v.address) {
return `${v.directive} ${v.address};`; return `${v.directive} ${v.address};`;
} }
return ''; return "";
}); });
return renderEngine; return renderEngine;
}
}; };
export default { exec, execFile, omitRow, omitRows, getRenderEngine };

View File

@@ -1,5 +1,5 @@
const Ajv = require('ajv/dist/2020'); import Ajv from "ajv/dist/2020.js";
const error = require('../error'); import errs from "../error.js";
const ajv = new Ajv({ const ajv = new Ajv({
verbose: true, verbose: true,
@@ -14,30 +14,27 @@ const ajv = new Ajv({
* @param {Object} payload * @param {Object} payload
* @returns {Promise} * @returns {Promise}
*/ */
function apiValidator (schema, payload/*, description*/) { const apiValidator = async (schema, payload /*, description*/) => {
return new Promise(function Promise_apiValidator (resolve, reject) { if (!schema) {
if (schema === null) { throw new errs.ValidationError("Schema is undefined");
reject(new error.ValidationError('Schema is undefined'));
return;
} }
if (typeof payload === 'undefined') { // Can't use falsy check here as valid payload could be `0` or `false`
reject(new error.ValidationError('Payload is undefined')); if (typeof payload === "undefined") {
return; throw new errs.ValidationError("Payload is undefined");
} }
const validate = ajv.compile(schema); const validate = ajv.compile(schema);
const valid = validate(payload); const valid = validate(payload);
if (valid && !validate.errors) { if (valid && !validate.errors) {
resolve(payload); return payload;
} else {
let message = ajv.errorsText(validate.errors);
let err = new error.ValidationError(message);
err.debug = [validate.errors, payload];
reject(err);
}
});
} }
module.exports = apiValidator; const message = ajv.errorsText(validate.errors);
const err = new errs.ValidationError(message);
err.debug = [validate.errors, payload];
throw err;
};
export default apiValidator;

View File

@@ -1,7 +1,7 @@
const _ = require('lodash'); import Ajv from 'ajv/dist/2020.js';
const Ajv = require('ajv/dist/2020'); import _ from "lodash";
const error = require('../error'); import commonDefinitions from "../../schema/common.json" with { type: "json" };
const commonDefinitions = require('../../schema/common.json'); import errs from "../error.js";
RegExp.prototype.toJSON = RegExp.prototype.toString; RegExp.prototype.toJSON = RegExp.prototype.toString;
@@ -11,7 +11,7 @@ const ajv = new Ajv({
allowUnionTypes: true, allowUnionTypes: true,
coerceTypes: true, coerceTypes: true,
strict: false, strict: false,
schemas: [commonDefinitions] schemas: [commonDefinitions],
}); });
/** /**
@@ -20,26 +20,26 @@ const ajv = new Ajv({
* @param {Object} payload * @param {Object} payload
* @returns {Promise} * @returns {Promise}
*/ */
function validator (schema, payload) { const validator = (schema, payload) => {
return new Promise(function (resolve, reject) { return new Promise((resolve, reject) => {
if (!payload) { if (!payload) {
reject(new error.InternalValidationError('Payload is falsy')); reject(new errs.InternalValidationError("Payload is falsy"));
} else { } else {
try { try {
let validate = ajv.compile(schema); const validate = ajv.compile(schema);
let valid = validate(payload); const valid = validate(payload);
if (valid && !validate.errors) { if (valid && !validate.errors) {
resolve(_.cloneDeep(payload)); resolve(_.cloneDeep(payload));
} else { } else {
let message = ajv.errorsText(validate.errors); const message = ajv.errorsText(validate.errors);
reject(new error.InternalValidationError(message)); reject(new errs.InternalValidationError(message));
} }
} catch (err) { } catch (err) {
reject(err); reject(err);
} }
} }
}); });
} };
module.exports = validator; export default validator;

View File

@@ -1,14 +1,18 @@
const {Signale} = require('signale'); import signale from "signale";
module.exports = { const opts = {
global: new Signale({scope: 'Global '}), logLevel: "info",
migrate: new Signale({scope: 'Migrate '}),
express: new Signale({scope: 'Express '}),
access: new Signale({scope: 'Access '}),
nginx: new Signale({scope: 'Nginx '}),
ssl: new Signale({scope: 'SSL '}),
certbot: new Signale({scope: 'Certbot '}),
import: new Signale({scope: 'Importer '}),
setup: new Signale({scope: 'Setup '}),
ip_ranges: new Signale({scope: 'IP Ranges'})
}; };
const global = new signale.Signale({ scope: "Global ", ...opts });
const migrate = new signale.Signale({ scope: "Migrate ", ...opts });
const express = new signale.Signale({ scope: "Express ", ...opts });
const access = new signale.Signale({ scope: "Access ", ...opts });
const nginx = new signale.Signale({ scope: "Nginx ", ...opts });
const ssl = new signale.Signale({ scope: "SSL ", ...opts });
const certbot = new signale.Signale({ scope: "Certbot ", ...opts });
const importer = new signale.Signale({ scope: "Importer ", ...opts });
const setup = new signale.Signale({ scope: "Setup ", ...opts });
const ipRanges = new signale.Signale({ scope: "IP Ranges", ...opts });
export { global, migrate, express, access, nginx, ssl, certbot, importer, setup, ipRanges };

View File

@@ -1,15 +1,13 @@
const db = require('./db'); import db from "./db.js";
const logger = require('./logger').migrate; import { migrate as logger } from "./logger.js";
module.exports = { const migrateUp = async () => {
latest: function () { const version = await db.migrate.currentVersion();
return db.migrate.currentVersion() logger.info("Current database version:", version);
.then((version) => { return await db.migrate.latest({
logger.info('Current database version:', version); tableName: "migrations",
return db.migrate.latest({ directory: "migrations",
tableName: 'migrations',
directory: 'migrations'
}); });
});
}
}; };
export { migrateUp };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'initial-schema'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "initial-schema";
/** /**
* Migrate * Migrate
@@ -7,199 +8,199 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.createTable('auth', (table) => { return knex.schema
.createTable("auth", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('user_id').notNull().unsigned(); table.integer("user_id").notNull().unsigned();
table.string('type', 30).notNull(); table.string("type", 30).notNull();
table.string('secret').notNull(); table.string("secret").notNull();
table.json('meta').notNull(); table.json("meta").notNull();
table.integer('is_deleted').notNull().unsigned().defaultTo(0); table.integer("is_deleted").notNull().unsigned().defaultTo(0);
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] auth Table created'); logger.info(`[${migrateName}] auth Table created`);
return knex.schema.createTable('user', (table) => { return knex.schema.createTable("user", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('is_deleted').notNull().unsigned().defaultTo(0); table.integer("is_deleted").notNull().unsigned().defaultTo(0);
table.integer('is_disabled').notNull().unsigned().defaultTo(0); table.integer("is_disabled").notNull().unsigned().defaultTo(0);
table.string('email').notNull(); table.string("email").notNull();
table.string('name').notNull(); table.string("name").notNull();
table.string('nickname').notNull(); table.string("nickname").notNull();
table.string('avatar').notNull(); table.string("avatar").notNull();
table.json('roles').notNull(); table.json("roles").notNull();
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] user Table created'); logger.info(`[${migrateName}] user Table created`);
return knex.schema.createTable('user_permission', (table) => { return knex.schema.createTable("user_permission", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('user_id').notNull().unsigned(); table.integer("user_id").notNull().unsigned();
table.string('visibility').notNull(); table.string("visibility").notNull();
table.string('proxy_hosts').notNull(); table.string("proxy_hosts").notNull();
table.string('redirection_hosts').notNull(); table.string("redirection_hosts").notNull();
table.string('dead_hosts').notNull(); table.string("dead_hosts").notNull();
table.string('streams').notNull(); table.string("streams").notNull();
table.string('access_lists').notNull(); table.string("access_lists").notNull();
table.string('certificates').notNull(); table.string("certificates").notNull();
table.unique('user_id'); table.unique("user_id");
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] user_permission Table created'); logger.info(`[${migrateName}] user_permission Table created`);
return knex.schema.createTable('proxy_host', (table) => { return knex.schema.createTable("proxy_host", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('owner_user_id').notNull().unsigned(); table.integer("owner_user_id").notNull().unsigned();
table.integer('is_deleted').notNull().unsigned().defaultTo(0); table.integer("is_deleted").notNull().unsigned().defaultTo(0);
table.json('domain_names').notNull(); table.json("domain_names").notNull();
table.string('forward_ip').notNull(); table.string("forward_ip").notNull();
table.integer('forward_port').notNull().unsigned(); table.integer("forward_port").notNull().unsigned();
table.integer('access_list_id').notNull().unsigned().defaultTo(0); table.integer("access_list_id").notNull().unsigned().defaultTo(0);
table.integer('certificate_id').notNull().unsigned().defaultTo(0); table.integer("certificate_id").notNull().unsigned().defaultTo(0);
table.integer('ssl_forced').notNull().unsigned().defaultTo(0); table.integer("ssl_forced").notNull().unsigned().defaultTo(0);
table.integer('caching_enabled').notNull().unsigned().defaultTo(0); table.integer("caching_enabled").notNull().unsigned().defaultTo(0);
table.integer('block_exploits').notNull().unsigned().defaultTo(0); table.integer("block_exploits").notNull().unsigned().defaultTo(0);
table.text('advanced_config').notNull().defaultTo(''); table.text("advanced_config").notNull().defaultTo("");
table.json('meta').notNull(); table.json("meta").notNull();
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] proxy_host Table created'); logger.info(`[${migrateName}] proxy_host Table created`);
return knex.schema.createTable('redirection_host', (table) => { return knex.schema.createTable("redirection_host", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('owner_user_id').notNull().unsigned(); table.integer("owner_user_id").notNull().unsigned();
table.integer('is_deleted').notNull().unsigned().defaultTo(0); table.integer("is_deleted").notNull().unsigned().defaultTo(0);
table.json('domain_names').notNull(); table.json("domain_names").notNull();
table.string('forward_domain_name').notNull(); table.string("forward_domain_name").notNull();
table.integer('preserve_path').notNull().unsigned().defaultTo(0); table.integer("preserve_path").notNull().unsigned().defaultTo(0);
table.integer('certificate_id').notNull().unsigned().defaultTo(0); table.integer("certificate_id").notNull().unsigned().defaultTo(0);
table.integer('ssl_forced').notNull().unsigned().defaultTo(0); table.integer("ssl_forced").notNull().unsigned().defaultTo(0);
table.integer('block_exploits').notNull().unsigned().defaultTo(0); table.integer("block_exploits").notNull().unsigned().defaultTo(0);
table.text('advanced_config').notNull().defaultTo(''); table.text("advanced_config").notNull().defaultTo("");
table.json('meta').notNull(); table.json("meta").notNull();
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] redirection_host Table created'); logger.info(`[${migrateName}] redirection_host Table created`);
return knex.schema.createTable('dead_host', (table) => { return knex.schema.createTable("dead_host", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('owner_user_id').notNull().unsigned(); table.integer("owner_user_id").notNull().unsigned();
table.integer('is_deleted').notNull().unsigned().defaultTo(0); table.integer("is_deleted").notNull().unsigned().defaultTo(0);
table.json('domain_names').notNull(); table.json("domain_names").notNull();
table.integer('certificate_id').notNull().unsigned().defaultTo(0); table.integer("certificate_id").notNull().unsigned().defaultTo(0);
table.integer('ssl_forced').notNull().unsigned().defaultTo(0); table.integer("ssl_forced").notNull().unsigned().defaultTo(0);
table.text('advanced_config').notNull().defaultTo(''); table.text("advanced_config").notNull().defaultTo("");
table.json('meta').notNull(); table.json("meta").notNull();
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] dead_host Table created'); logger.info(`[${migrateName}] dead_host Table created`);
return knex.schema.createTable('stream', (table) => { return knex.schema.createTable("stream", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('owner_user_id').notNull().unsigned(); table.integer("owner_user_id").notNull().unsigned();
table.integer('is_deleted').notNull().unsigned().defaultTo(0); table.integer("is_deleted").notNull().unsigned().defaultTo(0);
table.integer('incoming_port').notNull().unsigned(); table.integer("incoming_port").notNull().unsigned();
table.string('forward_ip').notNull(); table.string("forward_ip").notNull();
table.integer('forwarding_port').notNull().unsigned(); table.integer("forwarding_port").notNull().unsigned();
table.integer('tcp_forwarding').notNull().unsigned().defaultTo(0); table.integer("tcp_forwarding").notNull().unsigned().defaultTo(0);
table.integer('udp_forwarding').notNull().unsigned().defaultTo(0); table.integer("udp_forwarding").notNull().unsigned().defaultTo(0);
table.json('meta').notNull(); table.json("meta").notNull();
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] stream Table created'); logger.info(`[${migrateName}] stream Table created`);
return knex.schema.createTable('access_list', (table) => { return knex.schema.createTable("access_list", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('owner_user_id').notNull().unsigned(); table.integer("owner_user_id").notNull().unsigned();
table.integer('is_deleted').notNull().unsigned().defaultTo(0); table.integer("is_deleted").notNull().unsigned().defaultTo(0);
table.string('name').notNull(); table.string("name").notNull();
table.json('meta').notNull(); table.json("meta").notNull();
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] access_list Table created'); logger.info(`[${migrateName}] access_list Table created`);
return knex.schema.createTable('certificate', (table) => { return knex.schema.createTable("certificate", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('owner_user_id').notNull().unsigned(); table.integer("owner_user_id").notNull().unsigned();
table.integer('is_deleted').notNull().unsigned().defaultTo(0); table.integer("is_deleted").notNull().unsigned().defaultTo(0);
table.string('provider').notNull(); table.string("provider").notNull();
table.string('nice_name').notNull().defaultTo(''); table.string("nice_name").notNull().defaultTo("");
table.json('domain_names').notNull(); table.json("domain_names").notNull();
table.dateTime('expires_on').notNull(); table.dateTime("expires_on").notNull();
table.json('meta').notNull(); table.json("meta").notNull();
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] certificate Table created'); logger.info(`[${migrateName}] certificate Table created`);
return knex.schema.createTable('access_list_auth', (table) => { return knex.schema.createTable("access_list_auth", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('access_list_id').notNull().unsigned(); table.integer("access_list_id").notNull().unsigned();
table.string('username').notNull(); table.string("username").notNull();
table.string('password').notNull(); table.string("password").notNull();
table.json('meta').notNull(); table.json("meta").notNull();
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] access_list_auth Table created'); logger.info(`[${migrateName}] access_list_auth Table created`);
return knex.schema.createTable('audit_log', (table) => { return knex.schema.createTable("audit_log", (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('user_id').notNull().unsigned(); table.integer("user_id").notNull().unsigned();
table.string('object_type').notNull().defaultTo(''); table.string("object_type").notNull().defaultTo("");
table.integer('object_id').notNull().unsigned().defaultTo(0); table.integer("object_id").notNull().unsigned().defaultTo(0);
table.string('action').notNull(); table.string("action").notNull();
table.json('meta').notNull(); table.json("meta").notNull();
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] audit_log Table created'); logger.info(`[${migrateName}] audit_log Table created`);
}); });
}; };
/** /**
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down the initial data.'); logger.warn(`[${migrateName}] You can't migrate down the initial data.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'websockets'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "websockets";
/** /**
* Migrate * Migrate
@@ -7,29 +8,29 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.table('proxy_host', function (proxy_host) { return knex.schema
proxy_host.integer('allow_websocket_upgrade').notNull().unsigned().defaultTo(0); .table("proxy_host", (proxy_host) => {
proxy_host.integer("allow_websocket_upgrade").notNull().unsigned().defaultTo(0);
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] proxy_host Table altered'); logger.info(`[${migrateName}] proxy_host Table altered`);
}); });
}; };
/** /**
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.'); logger.warn(`[${migrateName}] You can't migrate down this one.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'forward_host'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "forward_host";
/** /**
* Migrate * Migrate
@@ -7,17 +8,17 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.table('proxy_host', function (proxy_host) { return knex.schema
proxy_host.renameColumn('forward_ip', 'forward_host'); .table("proxy_host", (proxy_host) => {
proxy_host.renameColumn("forward_ip", "forward_host");
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] proxy_host Table altered'); logger.info(`[${migrateName}] proxy_host Table altered`);
}); });
}; };
@@ -25,10 +26,11 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.'); logger.warn(`[${migrateName}] You can't migrate down this one.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'http2_support'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "http2_support";
/** /**
* Migrate * Migrate
@@ -7,31 +8,31 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.table('proxy_host', function (proxy_host) { return knex.schema
proxy_host.integer('http2_support').notNull().unsigned().defaultTo(0); .table("proxy_host", (proxy_host) => {
proxy_host.integer("http2_support").notNull().unsigned().defaultTo(0);
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] proxy_host Table altered'); logger.info(`[${migrateName}] proxy_host Table altered`);
return knex.schema.table('redirection_host', function (redirection_host) { return knex.schema.table("redirection_host", (redirection_host) => {
redirection_host.integer('http2_support').notNull().unsigned().defaultTo(0); redirection_host.integer("http2_support").notNull().unsigned().defaultTo(0);
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] redirection_host Table altered'); logger.info(`[${migrateName}] redirection_host Table altered`);
return knex.schema.table('dead_host', function (dead_host) { return knex.schema.table("dead_host", (dead_host) => {
dead_host.integer('http2_support').notNull().unsigned().defaultTo(0); dead_host.integer("http2_support").notNull().unsigned().defaultTo(0);
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] dead_host Table altered'); logger.info(`[${migrateName}] dead_host Table altered`);
}); });
}; };
@@ -39,11 +40,11 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.'); logger.warn(`[${migrateName}] You can't migrate down this one.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'forward_scheme'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "forward_scheme";
/** /**
* Migrate * Migrate
@@ -7,17 +8,17 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.table('proxy_host', function (proxy_host) { return knex.schema
proxy_host.string('forward_scheme').notNull().defaultTo('http'); .table("proxy_host", (proxy_host) => {
proxy_host.string("forward_scheme").notNull().defaultTo("http");
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] proxy_host Table altered'); logger.info(`[${migrateName}] proxy_host Table altered`);
}); });
}; };
@@ -25,10 +26,11 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.'); logger.warn(`[${migrateName}] You can't migrate down this one.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'disabled'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "disabled";
/** /**
* Migrate * Migrate
@@ -7,38 +8,38 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.table('proxy_host', function (proxy_host) { return knex.schema
proxy_host.integer('enabled').notNull().unsigned().defaultTo(1); .table("proxy_host", (proxy_host) => {
proxy_host.integer("enabled").notNull().unsigned().defaultTo(1);
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] proxy_host Table altered'); logger.info(`[${migrateName}] proxy_host Table altered`);
return knex.schema.table('redirection_host', function (redirection_host) { return knex.schema.table("redirection_host", (redirection_host) => {
redirection_host.integer('enabled').notNull().unsigned().defaultTo(1); redirection_host.integer("enabled").notNull().unsigned().defaultTo(1);
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] redirection_host Table altered'); logger.info(`[${migrateName}] redirection_host Table altered`);
return knex.schema.table('dead_host', function (dead_host) { return knex.schema.table("dead_host", (dead_host) => {
dead_host.integer('enabled').notNull().unsigned().defaultTo(1); dead_host.integer("enabled").notNull().unsigned().defaultTo(1);
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] dead_host Table altered'); logger.info(`[${migrateName}] dead_host Table altered`);
return knex.schema.table('stream', function (stream) { return knex.schema.table("stream", (stream) => {
stream.integer('enabled').notNull().unsigned().defaultTo(1); stream.integer("enabled").notNull().unsigned().defaultTo(1);
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] stream Table altered'); logger.info(`[${migrateName}] stream Table altered`);
}); });
}; };
@@ -46,10 +47,11 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.'); logger.warn(`[${migrateName}] You can't migrate down this one.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'custom_locations'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "custom_locations";
/** /**
* Migrate * Migrate
@@ -8,17 +9,17 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.table('proxy_host', function (proxy_host) { return knex.schema
proxy_host.json('locations'); .table("proxy_host", (proxy_host) => {
proxy_host.json("locations");
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] proxy_host Table altered'); logger.info(`[${migrateName}] proxy_host Table altered`);
}); });
}; };
@@ -26,10 +27,11 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.'); logger.warn(`[${migrateName}] You can't migrate down this one.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'hsts'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "hsts";
/** /**
* Migrate * Migrate
@@ -7,34 +8,34 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.table('proxy_host', function (proxy_host) { return knex.schema
proxy_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0); .table("proxy_host", (proxy_host) => {
proxy_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0); proxy_host.integer("hsts_enabled").notNull().unsigned().defaultTo(0);
proxy_host.integer("hsts_subdomains").notNull().unsigned().defaultTo(0);
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] proxy_host Table altered'); logger.info(`[${migrateName}] proxy_host Table altered`);
return knex.schema.table('redirection_host', function (redirection_host) { return knex.schema.table("redirection_host", (redirection_host) => {
redirection_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0); redirection_host.integer("hsts_enabled").notNull().unsigned().defaultTo(0);
redirection_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0); redirection_host.integer("hsts_subdomains").notNull().unsigned().defaultTo(0);
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] redirection_host Table altered'); logger.info(`[${migrateName}] redirection_host Table altered`);
return knex.schema.table('dead_host', function (dead_host) { return knex.schema.table("dead_host", (dead_host) => {
dead_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0); dead_host.integer("hsts_enabled").notNull().unsigned().defaultTo(0);
dead_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0); dead_host.integer("hsts_subdomains").notNull().unsigned().defaultTo(0);
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] dead_host Table altered'); logger.info(`[${migrateName}] dead_host Table altered`);
}); });
}; };
@@ -42,10 +43,11 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.'); logger.warn(`[${migrateName}] You can't migrate down this one.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'settings'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "settings";
/** /**
* Migrate * Migrate
@@ -7,11 +8,10 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.createTable('setting', (table) => { return knex.schema.createTable('setting', (table) => {
table.string('id').notNull().primary(); table.string('id').notNull().primary();
@@ -21,7 +21,7 @@ exports.up = function (knex/*, Promise*/) {
table.json('meta').notNull(); table.json('meta').notNull();
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] setting Table created'); logger.info(`[${migrateName}] setting Table created`);
}); });
}; };
@@ -29,10 +29,11 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down the initial data.'); logger.warn(`[${migrateName}] You can't migrate down the initial data.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'access_list_client'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "access_list_client";
/** /**
* Migrate * Migrate
@@ -7,32 +8,30 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info(`[${migrateName}] Migrating Up...`);
logger.info('[' + migrate_name + '] Migrating Up...'); return knex.schema
.createTable("access_list_client", (table) => {
return knex.schema.createTable('access_list_client', (table) => {
table.increments().primary(); table.increments().primary();
table.dateTime('created_on').notNull(); table.dateTime("created_on").notNull();
table.dateTime('modified_on').notNull(); table.dateTime("modified_on").notNull();
table.integer('access_list_id').notNull().unsigned(); table.integer("access_list_id").notNull().unsigned();
table.string('address').notNull(); table.string("address").notNull();
table.string('directive').notNull(); table.string("directive").notNull();
table.json('meta').notNull(); table.json("meta").notNull();
}) })
.then(function () { .then(() => {
logger.info('[' + migrate_name + '] access_list_client Table created'); logger.info(`[${migrateName}] access_list_client Table created`);
return knex.schema.table('access_list', function (access_list) { return knex.schema.table("access_list", (access_list) => {
access_list.integer('satify_any').notNull().defaultTo(0); access_list.integer("satify_any").notNull().defaultTo(0);
}); });
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] access_list Table altered'); logger.info(`[${migrateName}] access_list Table altered`);
}); });
}; };
@@ -40,14 +39,14 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex/*, Promise*/) { const down = (knex) => {
logger.info('[' + migrate_name + '] Migrating Down...'); logger.info(`[${migrateName}] Migrating Down...`);
return knex.schema.dropTable('access_list_client') return knex.schema.dropTable("access_list_client").then(() => {
.then(() => { logger.info(`[${migrateName}] access_list_client Table dropped`);
logger.info('[' + migrate_name + '] access_list_client Table dropped');
}); });
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'access_list_client_fix'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "access_list_client_fix";
/** /**
* Migrate * Migrate
@@ -7,17 +8,17 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.table('access_list', function (access_list) { return knex.schema
access_list.renameColumn('satify_any', 'satisfy_any'); .table("access_list", (access_list) => {
access_list.renameColumn("satify_any", "satisfy_any");
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] access_list Table altered'); logger.info(`[${migrateName}] access_list Table altered`);
}); });
}; };
@@ -25,10 +26,11 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex, Promise) { const down = (_knex) => {
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.'); logger.warn(`[${migrateName}] You can't migrate down this one.`);
return Promise.resolve(true); return Promise.resolve(true);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'pass_auth'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "pass_auth";
/** /**
* Migrate * Migrate
@@ -7,18 +8,17 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info(`[${migrateName}] Migrating Up...`);
logger.info('[' + migrate_name + '] Migrating Up...'); return knex.schema
.table("access_list", (access_list) => {
return knex.schema.table('access_list', function (access_list) { access_list.integer("pass_auth").notNull().defaultTo(1);
access_list.integer('pass_auth').notNull().defaultTo(1);
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] access_list Table altered'); logger.info(`[${migrateName}] access_list Table altered`);
}); });
}; };
@@ -26,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex/*, Promise*/) { const down = (knex) => {
logger.info('[' + migrate_name + '] Migrating Down...'); logger.info(`[${migrateName}] Migrating Down...`);
return knex.schema.table('access_list', function (access_list) { return knex.schema
access_list.dropColumn('pass_auth'); .table("access_list", (access_list) => {
access_list.dropColumn("pass_auth");
}) })
.then(() => { .then(() => {
logger.info('[' + migrate_name + '] access_list pass_auth Column dropped'); logger.info(`[${migrateName}] access_list pass_auth Column dropped`);
}); });
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'redirection_scheme'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "redirection_scheme";
/** /**
* Migrate * Migrate
@@ -7,18 +8,17 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info(`[${migrateName}] Migrating Up...`);
logger.info('[' + migrate_name + '] Migrating Up...'); return knex.schema
.table("redirection_host", (table) => {
return knex.schema.table('redirection_host', (table) => { table.string("forward_scheme").notNull().defaultTo("$scheme");
table.string('forward_scheme').notNull().defaultTo('$scheme');
}) })
.then(function () { .then(() => {
logger.info('[' + migrate_name + '] redirection_host Table altered'); logger.info(`[${migrateName}] redirection_host Table altered`);
}); });
}; };
@@ -26,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex/*, Promise*/) { const down = (knex) => {
logger.info('[' + migrate_name + '] Migrating Down...'); logger.info(`[${migrateName}] Migrating Down...`);
return knex.schema.table('redirection_host', (table) => { return knex.schema
table.dropColumn('forward_scheme'); .table("redirection_host", (table) => {
table.dropColumn("forward_scheme");
}) })
.then(function () { .then(() => {
logger.info('[' + migrate_name + '] redirection_host Table altered'); logger.info(`[${migrateName}] redirection_host Table altered`);
}); });
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'redirection_status_code'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "redirection_status_code";
/** /**
* Migrate * Migrate
@@ -7,18 +8,17 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info(`[${migrateName}] Migrating Up...`);
logger.info('[' + migrate_name + '] Migrating Up...'); return knex.schema
.table("redirection_host", (table) => {
return knex.schema.table('redirection_host', (table) => { table.integer("forward_http_code").notNull().unsigned().defaultTo(302);
table.integer('forward_http_code').notNull().unsigned().defaultTo(302);
}) })
.then(function () { .then(() => {
logger.info('[' + migrate_name + '] redirection_host Table altered'); logger.info(`[${migrateName}] redirection_host Table altered`);
}); });
}; };
@@ -26,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex/*, Promise*/) { const down = (knex) => {
logger.info('[' + migrate_name + '] Migrating Down...'); logger.info(`[${migrateName}] Migrating Down...`);
return knex.schema.table('redirection_host', (table) => { return knex.schema
table.dropColumn('forward_http_code'); .table("redirection_host", (table) => {
table.dropColumn("forward_http_code");
}) })
.then(function () { .then(() => {
logger.info('[' + migrate_name + '] redirection_host Table altered'); logger.info(`[${migrateName}] redirection_host Table altered`);
}); });
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'stream_domain'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "stream_domain";
/** /**
* Migrate * Migrate
@@ -7,17 +8,17 @@ const logger = require('../logger').migrate;
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex/*, Promise*/) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.table('stream', (table) => { return knex.schema
table.renameColumn('forward_ip', 'forwarding_host'); .table("stream", (table) => {
table.renameColumn("forward_ip", "forwarding_host");
}) })
.then(function () { .then(() => {
logger.info('[' + migrate_name + '] stream Table altered'); logger.info(`[${migrateName}] stream Table altered`);
}); });
}; };
@@ -25,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex/*, Promise*/) { const down = (knex) => {
logger.info('[' + migrate_name + '] Migrating Down...'); logger.info(`[${migrateName}] Migrating Down...`);
return knex.schema.table('stream', (table) => { return knex.schema
table.renameColumn('forwarding_host', 'forward_ip'); .table("stream", (table) => {
table.renameColumn("forwarding_host", "forward_ip");
}) })
.then(function () { .then(() => {
logger.info('[' + migrate_name + '] stream Table altered'); logger.info(`[${migrateName}] stream Table altered`);
}); });
}; };
export { up, down };

View File

@@ -1,17 +1,19 @@
const migrate_name = 'stream_domain'; import internalNginx from "../internal/nginx.js";
const logger = require('../logger').migrate; import { migrate as logger } from "../logger.js";
const internalNginx = require('../internal/nginx');
const migrateName = "stream_domain";
async function regenerateDefaultHost(knex) { async function regenerateDefaultHost(knex) {
const row = await knex('setting').select('*').where('id', 'default-site').first(); const row = await knex("setting").select("*").where("id", "default-site").first();
if (!row) { if (!row) {
return Promise.resolve(); return Promise.resolve();
} }
return internalNginx.deleteConfig('default') return internalNginx
.deleteConfig("default")
.then(() => { .then(() => {
return internalNginx.generateConfig('default', row); return internalNginx.generateConfig("default", row);
}) })
.then(() => { .then(() => {
return internalNginx.test(); return internalNginx.test();
@@ -27,11 +29,10 @@ async function regenerateDefaultHost(knex) {
* @see http://knexjs.org/#Schema * @see http://knexjs.org/#Schema
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return regenerateDefaultHost(knex); return regenerateDefaultHost(knex);
}; };
@@ -40,11 +41,12 @@ exports.up = function (knex) {
* Undo Migrate * Undo Migrate
* *
* @param {Object} knex * @param {Object} knex
* @param {Promise} Promise
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex) { const down = (knex) => {
logger.info('[' + migrate_name + '] Migrating Down...'); logger.info(`[${migrateName}] Migrating Down...`);
return regenerateDefaultHost(knex); return regenerateDefaultHost(knex);
}; };
export { up, down };

View File

@@ -1,5 +1,6 @@
const migrate_name = 'stream_ssl'; import { migrate as logger } from "../logger.js";
const logger = require('../logger').migrate;
const migrateName = "stream_ssl";
/** /**
* Migrate * Migrate
@@ -9,14 +10,15 @@ const logger = require('../logger').migrate;
* @param {Object} knex * @param {Object} knex
* @returns {Promise} * @returns {Promise}
*/ */
exports.up = function (knex) { const up = (knex) => {
logger.info('[' + migrate_name + '] Migrating Up...'); logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema.table('stream', (table) => { return knex.schema
table.integer('certificate_id').notNull().unsigned().defaultTo(0); .table("stream", (table) => {
table.integer("certificate_id").notNull().unsigned().defaultTo(0);
}) })
.then(function () { .then(() => {
logger.info('[' + migrate_name + '] stream Table altered'); logger.info(`[${migrateName}] stream Table altered`);
}); });
}; };
@@ -26,13 +28,16 @@ exports.up = function (knex) {
* @param {Object} knex * @param {Object} knex
* @returns {Promise} * @returns {Promise}
*/ */
exports.down = function (knex) { const down = (knex) => {
logger.info('[' + migrate_name + '] Migrating Down...'); logger.info(`[${migrateName}] Migrating Down...`);
return knex.schema.table('stream', (table) => { return knex.schema
table.dropColumn('certificate_id'); .table("stream", (table) => {
table.dropColumn("certificate_id");
}) })
.then(function () { .then(() => {
logger.info('[' + migrate_name + '] stream Table altered'); logger.info(`[${migrateName}] stream Table altered`);
}); });
}; };
export { up, down };

View File

@@ -1,21 +1,18 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const helpers = require('../lib/helpers'); import db from "../db.js";
const Model = require('objection').Model; import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
const User = require('./user'); import AccessListAuth from "./access_list_auth.js";
const AccessListAuth = require('./access_list_auth'); import AccessListClient from "./access_list_client.js";
const AccessListClient = require('./access_list_client'); import now from "./now_helper.js";
const now = require('./now_helper'); import ProxyHostModel from "./proxy_host.js";
import User from "./user.js";
Model.knex(db); Model.knex(db);
const boolFields = [ const boolFields = ["is_deleted", "satisfy_any", "pass_auth"];
'is_deleted',
'satisfy_any',
'pass_auth',
];
class AccessList extends Model { class AccessList extends Model {
$beforeInsert() { $beforeInsert() {
@@ -23,7 +20,7 @@ class AccessList extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
} }
@@ -33,71 +30,69 @@ class AccessList extends Model {
} }
$parseDatabaseJson(json) { $parseDatabaseJson(json) {
json = super.$parseDatabaseJson(json); const thisJson = super.$parseDatabaseJson(json);
return helpers.convertIntFieldsToBool(json, boolFields); return convertIntFieldsToBool(thisJson, boolFields);
} }
$formatDatabaseJson(json) { $formatDatabaseJson(json) {
json = helpers.convertBoolFieldsToInt(json, boolFields); const thisJson = convertBoolFieldsToInt(json, boolFields);
return super.$formatDatabaseJson(json); return super.$formatDatabaseJson(thisJson);
} }
static get name() { static get name() {
return 'AccessList'; return "AccessList";
} }
static get tableName() { static get tableName() {
return 'access_list'; return "access_list";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['meta']; return ["meta"];
} }
static get relationMappings() { static get relationMappings() {
const ProxyHost = require('./proxy_host');
return { return {
owner: { owner: {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: User, modelClass: User,
join: { join: {
from: 'access_list.owner_user_id', from: "access_list.owner_user_id",
to: 'user.id' to: "user.id",
},
modify: (qb) => {
qb.where("user.is_deleted", 0);
}, },
modify: function (qb) {
qb.where('user.is_deleted', 0);
}
}, },
items: { items: {
relation: Model.HasManyRelation, relation: Model.HasManyRelation,
modelClass: AccessListAuth, modelClass: AccessListAuth,
join: { join: {
from: 'access_list.id', from: "access_list.id",
to: 'access_list_auth.access_list_id' to: "access_list_auth.access_list_id",
} },
}, },
clients: { clients: {
relation: Model.HasManyRelation, relation: Model.HasManyRelation,
modelClass: AccessListClient, modelClass: AccessListClient,
join: { join: {
from: 'access_list.id', from: "access_list.id",
to: 'access_list_client.access_list_id' to: "access_list_client.access_list_id",
} },
}, },
proxy_hosts: { proxy_hosts: {
relation: Model.HasManyRelation, relation: Model.HasManyRelation,
modelClass: ProxyHost, modelClass: ProxyHostModel,
join: { join: {
from: 'access_list.id', from: "access_list.id",
to: 'proxy_host.access_list_id' to: "proxy_host.access_list_id",
},
modify: (qb) => {
qb.where("proxy_host.is_deleted", 0);
},
}, },
modify: function (qb) {
qb.where('proxy_host.is_deleted', 0);
}
}
}; };
} }
} }
module.exports = AccessList; export default AccessList;

View File

@@ -1,9 +1,10 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const Model = require('objection').Model; import db from "../db.js";
const now = require('./now_helper'); import accessListModel from "./access_list.js";
import now from "./now_helper.js";
Model.knex(db); Model.knex(db);
@@ -13,7 +14,7 @@ class AccessListAuth extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
} }
@@ -23,32 +24,32 @@ class AccessListAuth extends Model {
} }
static get name() { static get name() {
return 'AccessListAuth'; return "AccessListAuth";
} }
static get tableName() { static get tableName() {
return 'access_list_auth'; return "access_list_auth";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['meta']; return ["meta"];
} }
static get relationMappings() { static get relationMappings() {
return { return {
access_list: { access_list: {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: require('./access_list'), modelClass: accessListModel,
join: { join: {
from: 'access_list_auth.access_list_id', from: "access_list_auth.access_list_id",
to: 'access_list.id' to: "access_list.id",
},
modify: (qb) => {
qb.where("access_list.is_deleted", 0);
},
}, },
modify: function (qb) {
qb.where('access_list.is_deleted', 0);
}
}
}; };
} }
} }
module.exports = AccessListAuth; export default AccessListAuth;

View File

@@ -1,9 +1,10 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const Model = require('objection').Model; import db from "../db.js";
const now = require('./now_helper'); import accessListModel from "./access_list.js";
import now from "./now_helper.js";
Model.knex(db); Model.knex(db);
@@ -13,7 +14,7 @@ class AccessListClient extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
} }
@@ -23,32 +24,32 @@ class AccessListClient extends Model {
} }
static get name() { static get name() {
return 'AccessListClient'; return "AccessListClient";
} }
static get tableName() { static get tableName() {
return 'access_list_client'; return "access_list_client";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['meta']; return ["meta"];
} }
static get relationMappings() { static get relationMappings() {
return { return {
access_list: { access_list: {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: require('./access_list'), modelClass: accessListModel,
join: { join: {
from: 'access_list_client.access_list_id', from: "access_list_client.access_list_id",
to: 'access_list.id' to: "access_list.id",
},
modify: (qb) => {
qb.where("access_list.is_deleted", 0);
},
}, },
modify: function (qb) {
qb.where('access_list.is_deleted', 0);
}
}
}; };
} }
} }
module.exports = AccessListClient; export default AccessListClient;

View File

@@ -1,10 +1,10 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const Model = require('objection').Model; import db from "../db.js";
const User = require('./user'); import now from "./now_helper.js";
const now = require('./now_helper'); import User from "./user.js";
Model.knex(db); Model.knex(db);
@@ -14,7 +14,7 @@ class AuditLog extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
} }
@@ -24,15 +24,15 @@ class AuditLog extends Model {
} }
static get name() { static get name() {
return 'AuditLog'; return "AuditLog";
} }
static get tableName() { static get tableName() {
return 'audit_log'; return "audit_log";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['meta']; return ["meta"];
} }
static get relationMappings() { static get relationMappings() {
@@ -41,12 +41,12 @@ class AuditLog extends Model {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: User, modelClass: User,
join: { join: {
from: 'audit_log.user_id', from: "audit_log.user_id",
to: 'user.id' to: "user.id",
} },
} },
}; };
} }
} }
module.exports = AuditLog; export default AuditLog;

View File

@@ -1,27 +1,21 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const bcrypt = require('bcrypt'); import bcrypt from "bcrypt";
const db = require('../db'); import { Model } from "objection";
const helpers = require('../lib/helpers'); import db from "../db.js";
const Model = require('objection').Model; import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
const User = require('./user'); import now from "./now_helper.js";
const now = require('./now_helper'); import User from "./user.js";
Model.knex(db); Model.knex(db);
const boolFields = [ const boolFields = ["is_deleted"];
'is_deleted',
];
function encryptPassword() { function encryptPassword() {
/* jshint -W040 */ if (this.type === "password" && this.secret) {
let _this = this; return bcrypt.hash(this.secret, 13).then((hash) => {
this.secret = hash;
if (_this.type === 'password' && _this.secret) {
return bcrypt.hash(_this.secret, 13)
.then(function (hash) {
_this.secret = hash;
}); });
} }
@@ -34,7 +28,7 @@ class Auth extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
@@ -47,13 +41,13 @@ class Auth extends Model {
} }
$parseDatabaseJson(json) { $parseDatabaseJson(json) {
json = super.$parseDatabaseJson(json); const thisJson = super.$parseDatabaseJson(json);
return helpers.convertIntFieldsToBool(json, boolFields); return convertIntFieldsToBool(thisJson, boolFields);
} }
$formatDatabaseJson(json) { $formatDatabaseJson(json) {
json = helpers.convertBoolFieldsToInt(json, boolFields); const thisJson = convertBoolFieldsToInt(json, boolFields);
return super.$formatDatabaseJson(json); return super.$formatDatabaseJson(thisJson);
} }
/** /**
@@ -67,15 +61,15 @@ class Auth extends Model {
} }
static get name() { static get name() {
return 'Auth'; return "Auth";
} }
static get tableName() { static get tableName() {
return 'auth'; return "auth";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['meta']; return ["meta"];
} }
static get relationMappings() { static get relationMappings() {
@@ -84,15 +78,15 @@ class Auth extends Model {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: User, modelClass: User,
join: { join: {
from: 'auth.user_id', from: "auth.user_id",
to: 'user.id' to: "user.id",
}, },
filter: { filter: {
is_deleted: 0 is_deleted: 0,
} },
} },
}; };
} }
} }
module.exports = Auth; export default Auth;

View File

@@ -1,16 +1,18 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const helpers = require('../lib/helpers'); import db from "../db.js";
const Model = require('objection').Model; import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
const now = require('./now_helper'); import deadHostModel from "./dead_host.js";
import now from "./now_helper.js";
import proxyHostModel from "./proxy_host.js";
import redirectionHostModel from "./redirection_host.js";
import userModel from "./user.js";
Model.knex(db); Model.knex(db);
const boolFields = [ const boolFields = ["is_deleted"];
'is_deleted',
];
class Certificate extends Model { class Certificate extends Model {
$beforeInsert() { $beforeInsert() {
@@ -18,17 +20,17 @@ class Certificate extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for expires_on // Default for expires_on
if (typeof this.expires_on === 'undefined') { if (typeof this.expires_on === "undefined") {
this.expires_on = now(); this.expires_on = now();
} }
// Default for domain_names // Default for domain_names
if (typeof this.domain_names === 'undefined') { if (typeof this.domain_names === "undefined") {
this.domain_names = []; this.domain_names = [];
} }
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
@@ -39,86 +41,81 @@ class Certificate extends Model {
this.modified_on = now(); this.modified_on = now();
// Sort domain_names // Sort domain_names
if (typeof this.domain_names !== 'undefined') { if (typeof this.domain_names !== "undefined") {
this.domain_names.sort(); this.domain_names.sort();
} }
} }
$parseDatabaseJson(json) { $parseDatabaseJson(json) {
json = super.$parseDatabaseJson(json); const thisJson = super.$parseDatabaseJson(json);
return helpers.convertIntFieldsToBool(json, boolFields); return convertIntFieldsToBool(thisJson, boolFields);
} }
$formatDatabaseJson(json) { $formatDatabaseJson(json) {
json = helpers.convertBoolFieldsToInt(json, boolFields); const thisJson = convertBoolFieldsToInt(json, boolFields);
return super.$formatDatabaseJson(json); return super.$formatDatabaseJson(thisJson);
} }
static get name() { static get name() {
return 'Certificate'; return "Certificate";
} }
static get tableName() { static get tableName() {
return 'certificate'; return "certificate";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['domain_names', 'meta']; return ["domain_names", "meta"];
} }
static get relationMappings() { static get relationMappings() {
const ProxyHost = require('./proxy_host');
const DeadHost = require('./dead_host');
const User = require('./user');
const RedirectionHost = require('./redirection_host');
return { return {
owner: { owner: {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: User, modelClass: userModel,
join: { join: {
from: 'certificate.owner_user_id', from: "certificate.owner_user_id",
to: 'user.id' to: "user.id",
},
modify: (qb) => {
qb.where("user.is_deleted", 0);
}, },
modify: function (qb) {
qb.where('user.is_deleted', 0);
}
}, },
proxy_hosts: { proxy_hosts: {
relation: Model.HasManyRelation, relation: Model.HasManyRelation,
modelClass: ProxyHost, modelClass: proxyHostModel,
join: { join: {
from: 'certificate.id', from: "certificate.id",
to: 'proxy_host.certificate_id' to: "proxy_host.certificate_id",
},
modify: (qb) => {
qb.where("proxy_host.is_deleted", 0);
}, },
modify: function (qb) {
qb.where('proxy_host.is_deleted', 0);
}
}, },
dead_hosts: { dead_hosts: {
relation: Model.HasManyRelation, relation: Model.HasManyRelation,
modelClass: DeadHost, modelClass: deadHostModel,
join: { join: {
from: 'certificate.id', from: "certificate.id",
to: 'dead_host.certificate_id' to: "dead_host.certificate_id",
},
modify: (qb) => {
qb.where("dead_host.is_deleted", 0);
}, },
modify: function (qb) {
qb.where('dead_host.is_deleted', 0);
}
}, },
redirection_hosts: { redirection_hosts: {
relation: Model.HasManyRelation, relation: Model.HasManyRelation,
modelClass: RedirectionHost, modelClass: redirectionHostModel,
join: { join: {
from: 'certificate.id', from: "certificate.id",
to: 'redirection_host.certificate_id' to: "redirection_host.certificate_id",
},
modify: (qb) => {
qb.where("redirection_host.is_deleted", 0);
},
}, },
modify: function (qb) {
qb.where('redirection_host.is_deleted', 0);
}
}
}; };
} }
} }
module.exports = Certificate; export default Certificate;

View File

@@ -1,23 +1,16 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const helpers = require('../lib/helpers'); import db from "../db.js";
const Model = require('objection').Model; import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
const User = require('./user'); import Certificate from "./certificate.js";
const Certificate = require('./certificate'); import now from "./now_helper.js";
const now = require('./now_helper'); import User from "./user.js";
Model.knex(db); Model.knex(db);
const boolFields = [ const boolFields = ["is_deleted", "ssl_forced", "http2_support", "enabled", "hsts_enabled", "hsts_subdomains"];
'is_deleted',
'ssl_forced',
'http2_support',
'enabled',
'hsts_enabled',
'hsts_subdomains',
];
class DeadHost extends Model { class DeadHost extends Model {
$beforeInsert() { $beforeInsert() {
@@ -25,12 +18,12 @@ class DeadHost extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for domain_names // Default for domain_names
if (typeof this.domain_names === 'undefined') { if (typeof this.domain_names === "undefined") {
this.domain_names = []; this.domain_names = [];
} }
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
@@ -41,31 +34,31 @@ class DeadHost extends Model {
this.modified_on = now(); this.modified_on = now();
// Sort domain_names // Sort domain_names
if (typeof this.domain_names !== 'undefined') { if (typeof this.domain_names !== "undefined") {
this.domain_names.sort(); this.domain_names.sort();
} }
} }
$parseDatabaseJson(json) { $parseDatabaseJson(json) {
json = super.$parseDatabaseJson(json); const thisJson = super.$parseDatabaseJson(json);
return helpers.convertIntFieldsToBool(json, boolFields); return convertIntFieldsToBool(thisJson, boolFields);
} }
$formatDatabaseJson(json) { $formatDatabaseJson(json) {
json = helpers.convertBoolFieldsToInt(json, boolFields); const thisJson = convertBoolFieldsToInt(json, boolFields);
return super.$formatDatabaseJson(json); return super.$formatDatabaseJson(thisJson);
} }
static get name() { static get name() {
return 'DeadHost'; return "DeadHost";
} }
static get tableName() { static get tableName() {
return 'dead_host'; return "dead_host";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['domain_names', 'meta']; return ["domain_names", "meta"];
} }
static get relationMappings() { static get relationMappings() {
@@ -74,26 +67,26 @@ class DeadHost extends Model {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: User, modelClass: User,
join: { join: {
from: 'dead_host.owner_user_id', from: "dead_host.owner_user_id",
to: 'user.id' to: "user.id",
},
modify: (qb) => {
qb.where("user.is_deleted", 0);
}, },
modify: function (qb) {
qb.where('user.is_deleted', 0);
}
}, },
certificate: { certificate: {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: Certificate, modelClass: Certificate,
join: { join: {
from: 'dead_host.certificate_id', from: "dead_host.certificate_id",
to: 'certificate.id' to: "certificate.id",
},
modify: (qb) => {
qb.where("certificate.is_deleted", 0);
},
}, },
modify: function (qb) {
qb.where('certificate.is_deleted', 0);
}
}
}; };
} }
} }
module.exports = DeadHost; export default DeadHost;

View File

@@ -1,13 +1,12 @@
const db = require('../db'); import { Model } from "objection";
const config = require('../lib/config'); import db from "../db.js";
const Model = require('objection').Model; import { isSqlite } from "../lib/config.js";
Model.knex(db); Model.knex(db);
module.exports = function () { export default () => {
if (config.isSqlite()) { if (isSqlite()) {
// eslint-disable-next-line
return Model.raw("datetime('now','localtime')"); return Model.raw("datetime('now','localtime')");
} }
return Model.raw('NOW()'); return Model.raw("NOW()");
}; };

View File

@@ -1,26 +1,26 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const helpers = require('../lib/helpers'); import db from "../db.js";
const Model = require('objection').Model; import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
const User = require('./user'); import AccessList from "./access_list.js";
const AccessList = require('./access_list'); import Certificate from "./certificate.js";
const Certificate = require('./certificate'); import now from "./now_helper.js";
const now = require('./now_helper'); import User from "./user.js";
Model.knex(db); Model.knex(db);
const boolFields = [ const boolFields = [
'is_deleted', "is_deleted",
'ssl_forced', "ssl_forced",
'caching_enabled', "caching_enabled",
'block_exploits', "block_exploits",
'allow_websocket_upgrade', "allow_websocket_upgrade",
'http2_support', "http2_support",
'enabled', "enabled",
'hsts_enabled', "hsts_enabled",
'hsts_subdomains', "hsts_subdomains",
]; ];
class ProxyHost extends Model { class ProxyHost extends Model {
@@ -29,12 +29,12 @@ class ProxyHost extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for domain_names // Default for domain_names
if (typeof this.domain_names === 'undefined') { if (typeof this.domain_names === "undefined") {
this.domain_names = []; this.domain_names = [];
} }
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
@@ -45,31 +45,31 @@ class ProxyHost extends Model {
this.modified_on = now(); this.modified_on = now();
// Sort domain_names // Sort domain_names
if (typeof this.domain_names !== 'undefined') { if (typeof this.domain_names !== "undefined") {
this.domain_names.sort(); this.domain_names.sort();
} }
} }
$parseDatabaseJson(json) { $parseDatabaseJson(json) {
json = super.$parseDatabaseJson(json); const thisJson = super.$parseDatabaseJson(json);
return helpers.convertIntFieldsToBool(json, boolFields); return convertIntFieldsToBool(thisJson, boolFields);
} }
$formatDatabaseJson(json) { $formatDatabaseJson(json) {
json = helpers.convertBoolFieldsToInt(json, boolFields); const thisJson = convertBoolFieldsToInt(json, boolFields);
return super.$formatDatabaseJson(json); return super.$formatDatabaseJson(thisJson);
} }
static get name() { static get name() {
return 'ProxyHost'; return "ProxyHost";
} }
static get tableName() { static get tableName() {
return 'proxy_host'; return "proxy_host";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['domain_names', 'meta', 'locations']; return ["domain_names", "meta", "locations"];
} }
static get relationMappings() { static get relationMappings() {
@@ -78,37 +78,37 @@ class ProxyHost extends Model {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: User, modelClass: User,
join: { join: {
from: 'proxy_host.owner_user_id', from: "proxy_host.owner_user_id",
to: 'user.id' to: "user.id",
},
modify: (qb) => {
qb.where("user.is_deleted", 0);
}, },
modify: function (qb) {
qb.where('user.is_deleted', 0);
}
}, },
access_list: { access_list: {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: AccessList, modelClass: AccessList,
join: { join: {
from: 'proxy_host.access_list_id', from: "proxy_host.access_list_id",
to: 'access_list.id' to: "access_list.id",
},
modify: (qb) => {
qb.where("access_list.is_deleted", 0);
}, },
modify: function (qb) {
qb.where('access_list.is_deleted', 0);
}
}, },
certificate: { certificate: {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: Certificate, modelClass: Certificate,
join: { join: {
from: 'proxy_host.certificate_id', from: "proxy_host.certificate_id",
to: 'certificate.id' to: "certificate.id",
},
modify: (qb) => {
qb.where("certificate.is_deleted", 0);
},
}, },
modify: function (qb) {
qb.where('certificate.is_deleted', 0);
}
}
}; };
} }
} }
module.exports = ProxyHost; export default ProxyHost;

View File

@@ -1,25 +1,24 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const helpers = require('../lib/helpers'); import db from "../db.js";
const Model = require('objection').Model; import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
const User = require('./user'); import Certificate from "./certificate.js";
const Certificate = require('./certificate'); import now from "./now_helper.js";
const now = require('./now_helper'); import User from "./user.js";
Model.knex(db); Model.knex(db);
const boolFields = [ const boolFields = [
'is_deleted', "is_deleted",
'enabled', "enabled",
'preserve_path', "preserve_path",
'ssl_forced', "ssl_forced",
'block_exploits', "block_exploits",
'hsts_enabled', "hsts_enabled",
'hsts_subdomains', "hsts_subdomains",
'http2_support', "http2_support",
]; ];
class RedirectionHost extends Model { class RedirectionHost extends Model {
@@ -28,12 +27,12 @@ class RedirectionHost extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for domain_names // Default for domain_names
if (typeof this.domain_names === 'undefined') { if (typeof this.domain_names === "undefined") {
this.domain_names = []; this.domain_names = [];
} }
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
@@ -44,31 +43,31 @@ class RedirectionHost extends Model {
this.modified_on = now(); this.modified_on = now();
// Sort domain_names // Sort domain_names
if (typeof this.domain_names !== 'undefined') { if (typeof this.domain_names !== "undefined") {
this.domain_names.sort(); this.domain_names.sort();
} }
} }
$parseDatabaseJson(json) { $parseDatabaseJson(json) {
json = super.$parseDatabaseJson(json); const thisJson = super.$parseDatabaseJson(json);
return helpers.convertIntFieldsToBool(json, boolFields); return convertIntFieldsToBool(thisJson, boolFields);
} }
$formatDatabaseJson(json) { $formatDatabaseJson(json) {
json = helpers.convertBoolFieldsToInt(json, boolFields); const thisJson = convertBoolFieldsToInt(json, boolFields);
return super.$formatDatabaseJson(json); return super.$formatDatabaseJson(thisJson);
} }
static get name() { static get name() {
return 'RedirectionHost'; return "RedirectionHost";
} }
static get tableName() { static get tableName() {
return 'redirection_host'; return "redirection_host";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['domain_names', 'meta']; return ["domain_names", "meta"];
} }
static get relationMappings() { static get relationMappings() {
@@ -77,26 +76,26 @@ class RedirectionHost extends Model {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: User, modelClass: User,
join: { join: {
from: 'redirection_host.owner_user_id', from: "redirection_host.owner_user_id",
to: 'user.id' to: "user.id",
},
modify: (qb) => {
qb.where("user.is_deleted", 0);
}, },
modify: function (qb) {
qb.where('user.is_deleted', 0);
}
}, },
certificate: { certificate: {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: Certificate, modelClass: Certificate,
join: { join: {
from: 'redirection_host.certificate_id', from: "redirection_host.certificate_id",
to: 'certificate.id' to: "certificate.id",
},
modify: (qb) => {
qb.where("certificate.is_deleted", 0);
},
}, },
modify: function (qb) {
qb.where('certificate.is_deleted', 0);
}
}
}; };
} }
} }
module.exports = RedirectionHost; export default RedirectionHost;

View File

@@ -1,8 +1,8 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const Model = require('objection').Model; import db from "../db.js";
Model.knex(db); Model.knex(db);
@@ -27,4 +27,4 @@ class Setting extends Model {
} }
} }
module.exports = Setting; export default Setting;

View File

@@ -1,18 +1,13 @@
const Model = require('objection').Model; import { Model } from "objection";
const db = require('../db'); import db from "../db.js";
const helpers = require('../lib/helpers'); import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
const User = require('./user'); import Certificate from "./certificate.js";
const Certificate = require('./certificate'); import now from "./now_helper.js";
const now = require('./now_helper'); import User from "./user.js";
Model.knex(db); Model.knex(db);
const boolFields = [ const boolFields = ["is_deleted", "enabled", "tcp_forwarding", "udp_forwarding"];
'is_deleted',
'enabled',
'tcp_forwarding',
'udp_forwarding',
];
class Stream extends Model { class Stream extends Model {
$beforeInsert() { $beforeInsert() {
@@ -20,7 +15,7 @@ class Stream extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for meta // Default for meta
if (typeof this.meta === 'undefined') { if (typeof this.meta === "undefined") {
this.meta = {}; this.meta = {};
} }
} }
@@ -30,25 +25,25 @@ class Stream extends Model {
} }
$parseDatabaseJson(json) { $parseDatabaseJson(json) {
json = super.$parseDatabaseJson(json); const thisJson = super.$parseDatabaseJson(json);
return helpers.convertIntFieldsToBool(json, boolFields); return convertIntFieldsToBool(thisJson, boolFields);
} }
$formatDatabaseJson(json) { $formatDatabaseJson(json) {
json = helpers.convertBoolFieldsToInt(json, boolFields); const thisJson = convertBoolFieldsToInt(json, boolFields);
return super.$formatDatabaseJson(json); return super.$formatDatabaseJson(thisJson);
} }
static get name() { static get name() {
return 'Stream'; return "Stream";
} }
static get tableName() { static get tableName() {
return 'stream'; return "stream";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['meta']; return ["meta"];
} }
static get relationMappings() { static get relationMappings() {
@@ -57,26 +52,26 @@ class Stream extends Model {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: User, modelClass: User,
join: { join: {
from: 'stream.owner_user_id', from: "stream.owner_user_id",
to: 'user.id' to: "user.id",
},
modify: (qb) => {
qb.where("user.is_deleted", 0);
}, },
modify: function (qb) {
qb.where('user.is_deleted', 0);
}
}, },
certificate: { certificate: {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: Certificate, modelClass: Certificate,
join: { join: {
from: 'stream.certificate_id', from: "stream.certificate_id",
to: 'certificate.id' to: "certificate.id",
},
modify: (qb) => {
qb.where("certificate.is_deleted", 0);
},
}, },
modify: function (qb) {
qb.where('certificate.is_deleted', 0);
}
}
}; };
} }
} }
module.exports = Stream; export default Stream;

View File

@@ -3,17 +3,17 @@
and then has abilities after that. and then has abilities after that.
*/ */
const _ = require('lodash'); import crypto from "node:crypto";
const jwt = require('jsonwebtoken'); import jwt from "jsonwebtoken";
const crypto = require('crypto'); import _ from "lodash";
const config = require('../lib/config'); import { getPrivateKey, getPublicKey } from "../lib/config.js";
const error = require('../lib/error'); import errs from "../lib/error.js";
const logger = require('../logger').global; import { global as logger } from "../logger.js";
const ALGO = 'RS256';
module.exports = function () { const ALGO = "RS256";
let token_data = {}; export default () => {
let tokenData = {};
const self = { const self = {
/** /**
@@ -21,28 +21,26 @@ module.exports = function () {
* @returns {Promise} * @returns {Promise}
*/ */
create: (payload) => { create: (payload) => {
if (!config.getPrivateKey()) { if (!getPrivateKey()) {
logger.error('Private key is empty!'); logger.error("Private key is empty!");
} }
// sign with RSA SHA256 // sign with RSA SHA256
const options = { const options = {
algorithm: ALGO, algorithm: ALGO,
expiresIn: payload.expiresIn || '1d' expiresIn: payload.expiresIn || "1d",
}; };
payload.jti = crypto.randomBytes(12) payload.jti = crypto.randomBytes(12).toString("base64").substring(-8);
.toString('base64')
.substring(-8);
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
jwt.sign(payload, config.getPrivateKey(), options, (err, token) => { jwt.sign(payload, getPrivateKey(), options, (err, token) => {
if (err) { if (err) {
reject(err); reject(err);
} else { } else {
token_data = payload; tokenData = payload;
resolve({ resolve({
token: token, token: token,
payload: payload payload: payload,
}); });
} }
}); });
@@ -53,42 +51,47 @@ module.exports = function () {
* @param {String} token * @param {String} token
* @returns {Promise} * @returns {Promise}
*/ */
load: function (token) { load: (token) => {
if (!config.getPublicKey()) { if (!getPublicKey()) {
logger.error('Public key is empty!'); logger.error("Public key is empty!");
} }
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
try { try {
if (!token || token === null || token === 'null') { if (!token || token === null || token === "null") {
reject(new error.AuthError('Empty token')); reject(new errs.AuthError("Empty token"));
} else { } else {
jwt.verify(token, config.getPublicKey(), {ignoreExpiration: false, algorithms: [ALGO]}, (err, result) => { jwt.verify(
token,
getPublicKey(),
{ ignoreExpiration: false, algorithms: [ALGO] },
(err, result) => {
if (err) { if (err) {
if (err.name === "TokenExpiredError") {
if (err.name === 'TokenExpiredError') { reject(new errs.AuthError("Token has expired", err));
reject(new error.AuthError('Token has expired', err));
} else { } else {
reject(err); reject(err);
} }
} else { } else {
token_data = result; tokenData = result;
// Hack: some tokens out in the wild have a scope of 'all' instead of 'user'. // Hack: some tokens out in the wild have a scope of 'all' instead of 'user'.
// For 30 days at least, we need to replace 'all' with user. // For 30 days at least, we need to replace 'all' with user.
if ((typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'all') !== -1)) { if (
token_data.scope = ['user']; typeof tokenData.scope !== "undefined" &&
_.indexOf(tokenData.scope, "all") !== -1
) {
tokenData.scope = ["user"];
} }
resolve(token_data); resolve(tokenData);
} }
}); },
);
} }
} catch (err) { } catch (err) {
reject(err); reject(err);
} }
}); });
}, },
/** /**
@@ -97,17 +100,15 @@ module.exports = function () {
* @param {String} scope * @param {String} scope
* @returns {Boolean} * @returns {Boolean}
*/ */
hasScope: function (scope) { hasScope: (scope) => typeof tokenData.scope !== "undefined" && _.indexOf(tokenData.scope, scope) !== -1,
return typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, scope) !== -1;
},
/** /**
* @param {String} key * @param {String} key
* @return {*} * @return {*}
*/ */
get: function (key) { get: (key) => {
if (typeof token_data[key] !== 'undefined') { if (typeof tokenData[key] !== "undefined") {
return token_data[key]; return tokenData[key];
} }
return null; return null;
@@ -117,22 +118,22 @@ module.exports = function () {
* @param {String} key * @param {String} key
* @param {*} value * @param {*} value
*/ */
set: function (key, value) { set: (key, value) => {
token_data[key] = value; tokenData[key] = value;
}, },
/** /**
* @param [default_value] * @param [defaultValue]
* @returns {Integer} * @returns {Integer}
*/ */
getUserId: (default_value) => { getUserId: (defaultValue) => {
const attrs = self.get('attrs'); const attrs = self.get("attrs");
if (attrs && typeof attrs.id !== 'undefined' && attrs.id) { if (attrs?.id) {
return attrs.id; return attrs.id;
} }
return default_value || 0; return defaultValue || 0;
} },
}; };
return self; return self;

View File

@@ -1,18 +1,15 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const helpers = require('../lib/helpers'); import db from "../db.js";
const Model = require('objection').Model; import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
const UserPermission = require('./user_permission'); import now from "./now_helper.js";
const now = require('./now_helper'); import UserPermission from "./user_permission.js";
Model.knex(db); Model.knex(db);
const boolFields = [ const boolFields = ["is_deleted", "is_disabled"];
'is_deleted',
'is_disabled',
];
class User extends Model { class User extends Model {
$beforeInsert() { $beforeInsert() {
@@ -20,7 +17,7 @@ class User extends Model {
this.modified_on = now(); this.modified_on = now();
// Default for roles // Default for roles
if (typeof this.roles === 'undefined') { if (typeof this.roles === "undefined") {
this.roles = []; this.roles = [];
} }
} }
@@ -30,25 +27,25 @@ class User extends Model {
} }
$parseDatabaseJson(json) { $parseDatabaseJson(json) {
json = super.$parseDatabaseJson(json); const thisJson = super.$parseDatabaseJson(json);
return helpers.convertIntFieldsToBool(json, boolFields); return convertIntFieldsToBool(thisJson, boolFields);
} }
$formatDatabaseJson(json) { $formatDatabaseJson(json) {
json = helpers.convertBoolFieldsToInt(json, boolFields); const thisJson = convertBoolFieldsToInt(json, boolFields);
return super.$formatDatabaseJson(json); return super.$formatDatabaseJson(thisJson);
} }
static get name() { static get name() {
return 'User'; return "User";
} }
static get tableName() { static get tableName() {
return 'user'; return "user";
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['roles']; return ["roles"];
} }
static get relationMappings() { static get relationMappings() {
@@ -57,13 +54,12 @@ class User extends Model {
relation: Model.HasOneRelation, relation: Model.HasOneRelation,
modelClass: UserPermission, modelClass: UserPermission,
join: { join: {
from: 'user.id', from: "user.id",
to: 'user_permission.user_id' to: "user_permission.user_id",
} },
} },
}; };
} }
} }
module.exports = User; export default User;

View File

@@ -1,9 +1,9 @@
// Objection Docs: // Objection Docs:
// http://vincit.github.io/objection.js/ // http://vincit.github.io/objection.js/
const db = require('../db'); import { Model } from "objection";
const Model = require('objection').Model; import db from "../db.js";
const now = require('./now_helper'); import now from "./now_helper.js";
Model.knex(db); Model.knex(db);
@@ -26,4 +26,4 @@ class UserPermission extends Model {
} }
} }
module.exports = UserPermission; export default UserPermission;

View File

@@ -3,5 +3,5 @@
"ignore": [ "ignore": [
"data" "data"
], ],
"ext": "js json ejs" "ext": "js json ejs cjs"
} }

View File

@@ -1,8 +1,16 @@
{ {
"name": "nginx-proxy-manager", "name": "nginx-proxy-manager",
"version": "0.0.0", "version": "2.0.0",
"description": "A beautiful interface for creating Nginx endpoints", "description": "A beautiful interface for creating Nginx endpoints",
"author": "Jamie Curnow <jc@jc21.com>",
"license": "MIT",
"main": "index.js", "main": "index.js",
"type": "module",
"scripts": {
"lint": "biome lint",
"prettier": "biome format --write .",
"validate-schema": "node validate-schema.js"
},
"dependencies": { "dependencies": {
"@apidevtools/json-schema-ref-parser": "^11.7.0", "@apidevtools/json-schema-ref-parser": "^11.7.0",
"ajv": "^8.17.1", "ajv": "^8.17.1",
@@ -28,21 +36,14 @@
"sqlite3": "5.1.6", "sqlite3": "5.1.6",
"temp-write": "^4.0.0" "temp-write": "^4.0.0"
}, },
"devDependencies": {
"@apidevtools/swagger-parser": "^10.1.0",
"@biomejs/biome": "^2.2.4",
"chalk": "4.1.2",
"nodemon": "^2.0.2"
},
"signale": { "signale": {
"displayDate": true, "displayDate": true,
"displayTimestamp": true "displayTimestamp": true
},
"author": "Jamie Curnow <jc@jc21.com>",
"license": "MIT",
"devDependencies": {
"@apidevtools/swagger-parser": "^10.1.0",
"chalk": "4.1.2",
"eslint": "^8.36.0",
"eslint-plugin-align-assignments": "^1.1.2",
"nodemon": "^2.0.2",
"prettier": "^2.0.4"
},
"scripts": {
"validate-schema": "node validate-schema.js"
} }
} }

View File

@@ -1,19 +1,20 @@
const express = require('express'); import express from "express";
const validator = require('../lib/validator'); import internalAuditLog from "../internal/audit-log.js";
const jwtdecode = require('../lib/express/jwt-decode'); import jwtdecode from "../lib/express/jwt-decode.js";
const internalAuditLog = require('../internal/audit-log'); import validator from "../lib/validator/index.js";
import { express as logger } from "../logger.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* /api/audit-log * /api/audit-log
*/ */
router router
.route('/') .route("/")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -24,29 +25,31 @@ router
* *
* Retrieve all logs * Retrieve all logs
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
const data = await validator(
{
additionalProperties: false, additionalProperties: false,
properties: { properties: {
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
}, },
query: { query: {
$ref: 'common#/properties/query' $ref: "common#/properties/query",
},
},
},
{
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
query: typeof req.query.query === "string" ? req.query.query : null,
},
);
const rows = await internalAuditLog.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
} }
}
}, {
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
query: (typeof req.query.query === 'string' ? req.query.query : null)
})
.then((data) => {
return internalAuditLog.getAll(res.locals.access, data.expand, data.query);
})
.then((rows) => {
res.status(200)
.send(rows);
})
.catch(next);
}); });
module.exports = router; export default router;

View File

@@ -1,51 +1,66 @@
const express = require('express'); import express from "express";
const pjson = require('../package.json'); import errs from "../lib/error.js";
const error = require('../lib/error'); import pjson from "../package.json" with { type: "json" };
import { isSetup } from "../setup.js";
import auditLogRoutes from "./audit-log.js";
import accessListsRoutes from "./nginx/access_lists.js";
import certificatesHostsRoutes from "./nginx/certificates.js";
import deadHostsRoutes from "./nginx/dead_hosts.js";
import proxyHostsRoutes from "./nginx/proxy_hosts.js";
import redirectionHostsRoutes from "./nginx/redirection_hosts.js";
import streamsRoutes from "./nginx/streams.js";
import reportsRoutes from "./reports.js";
import schemaRoutes from "./schema.js";
import settingsRoutes from "./settings.js";
import tokensRoutes from "./tokens.js";
import usersRoutes from "./users.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* Health Check * Health Check
* GET /api * GET /api
*/ */
router.get('/', (req, res/*, next*/) => { router.get("/", async (_, res /*, next*/) => {
let version = pjson.version.split('-').shift().split('.'); const version = pjson.version.split("-").shift().split(".");
const setup = await isSetup();
res.status(200).send({ res.status(200).send({
status: 'OK', status: "OK",
setup,
version: { version: {
major: parseInt(version.shift(), 10), major: Number.parseInt(version.shift(), 10),
minor: parseInt(version.shift(), 10), minor: Number.parseInt(version.shift(), 10),
revision: parseInt(version.shift(), 10) revision: Number.parseInt(version.shift(), 10),
} },
}); });
}); });
router.use('/schema', require('./schema')); router.use("/schema", schemaRoutes);
router.use('/tokens', require('./tokens')); router.use("/tokens", tokensRoutes);
router.use('/users', require('./users')); router.use("/users", usersRoutes);
router.use('/audit-log', require('./audit-log')); router.use("/audit-log", auditLogRoutes);
router.use('/reports', require('./reports')); router.use("/reports", reportsRoutes);
router.use('/settings', require('./settings')); router.use("/settings", settingsRoutes);
router.use('/nginx/proxy-hosts', require('./nginx/proxy_hosts')); router.use("/nginx/proxy-hosts", proxyHostsRoutes);
router.use('/nginx/redirection-hosts', require('./nginx/redirection_hosts')); router.use("/nginx/redirection-hosts", redirectionHostsRoutes);
router.use('/nginx/dead-hosts', require('./nginx/dead_hosts')); router.use("/nginx/dead-hosts", deadHostsRoutes);
router.use('/nginx/streams', require('./nginx/streams')); router.use("/nginx/streams", streamsRoutes);
router.use('/nginx/access-lists', require('./nginx/access_lists')); router.use("/nginx/access-lists", accessListsRoutes);
router.use('/nginx/certificates', require('./nginx/certificates')); router.use("/nginx/certificates", certificatesHostsRoutes);
/** /**
* API 404 for all other routes * API 404 for all other routes
* *
* ALL /api/* * ALL /api/*
*/ */
router.all(/(.+)/, function (req, _, next) { router.all(/(.+)/, (req, _, next) => {
req.params.page = req.params['0']; req.params.page = req.params["0"];
next(new error.ItemNotFoundError(req.params.page)); next(new errs.ItemNotFoundError(req.params.page));
}); });
module.exports = router; export default router;

View File

@@ -1,22 +1,23 @@
const express = require('express'); import express from "express";
const validator = require('../../lib/validator'); import internalAccessList from "../../internal/access-list.js";
const jwtdecode = require('../../lib/express/jwt-decode'); import jwtdecode from "../../lib/express/jwt-decode.js";
const apiValidator = require('../../lib/validator/api'); import apiValidator from "../../lib/validator/api.js";
const internalAccessList = require('../../internal/access-list'); import validator from "../../lib/validator/index.js";
const schema = require('../../schema'); import { express as logger } from "../../logger.js";
import { getValidationSchema } from "../../schema/index.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* /api/nginx/access-lists * /api/nginx/access-lists
*/ */
router router
.route('/') .route("/")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -26,29 +27,31 @@ router
* *
* Retrieve all access-lists * Retrieve all access-lists
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
const data = await validator(
{
additionalProperties: false, additionalProperties: false,
properties: { properties: {
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
}, },
query: { query: {
$ref: 'common#/properties/query' $ref: "common#/properties/query",
},
},
},
{
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
query: typeof req.query.query === "string" ? req.query.query : null,
},
);
const rows = await internalAccessList.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
} }
}
}, {
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
query: (typeof req.query.query === 'string' ? req.query.query : null)
})
.then((data) => {
return internalAccessList.getAll(res.locals.access, data.expand, data.query);
})
.then((rows) => {
res.status(200)
.send(rows);
})
.catch(next);
}) })
/** /**
@@ -56,16 +59,15 @@ router
* *
* Create a new access-list * Create a new access-list
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/access-lists', 'post'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/access-lists", "post"), req.body);
return internalAccessList.create(res.locals.access, payload); const result = await internalAccessList.create(res.locals.access, payload);
}) res.status(201).send(result);
.then((result) => { } catch (err) {
res.status(201) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}); });
/** /**
@@ -74,7 +76,7 @@ router
* /api/nginx/access-lists/123 * /api/nginx/access-lists/123
*/ */
router router
.route('/:list_id') .route("/:list_id")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -85,33 +87,35 @@ router
* *
* Retrieve a specific access-list * Retrieve a specific access-list
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
required: ['list_id'], const data = await validator(
{
required: ["list_id"],
additionalProperties: false, additionalProperties: false,
properties: { properties: {
list_id: { list_id: {
$ref: 'common#/properties/id' $ref: "common#/properties/id",
}, },
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
} },
} },
}, { },
{
list_id: req.params.list_id, list_id: req.params.list_id,
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null) expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
}) },
.then((data) => { );
return internalAccessList.get(res.locals.access, { const row = await internalAccessList.get(res.locals.access, {
id: parseInt(data.list_id, 10), id: Number.parseInt(data.list_id, 10),
expand: data.expand expand: data.expand,
}); });
}) res.status(200).send(row);
.then((row) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(row); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -119,17 +123,16 @@ router
* *
* Update and existing access-list * Update and existing access-list
*/ */
.put((req, res, next) => { .put(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/access-lists/{listID}', 'put'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/access-lists/{listID}", "put"), req.body);
payload.id = parseInt(req.params.list_id, 10); payload.id = Number.parseInt(req.params.list_id, 10);
return internalAccessList.update(res.locals.access, payload); const result = await internalAccessList.update(res.locals.access, payload);
}) res.status(200).send(result);
.then((result) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -137,13 +140,16 @@ router
* *
* Delete and existing access-list * Delete and existing access-list
*/ */
.delete((req, res, next) => { .delete(async (req, res, next) => {
internalAccessList.delete(res.locals.access, {id: parseInt(req.params.list_id, 10)}) try {
.then((result) => { const result = await internalAccessList.delete(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.list_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
module.exports = router; export default router;

View File

@@ -1,22 +1,23 @@
const express = require('express'); import express from "express";
const error = require('../../lib/error'); import internalCertificate from "../../internal/certificate.js";
const validator = require('../../lib/validator'); import errs from "../../lib/error.js";
const jwtdecode = require('../../lib/express/jwt-decode'); import jwtdecode from "../../lib/express/jwt-decode.js";
const apiValidator = require('../../lib/validator/api'); import apiValidator from "../../lib/validator/api.js";
const internalCertificate = require('../../internal/certificate'); import validator from "../../lib/validator/index.js";
const schema = require('../../schema'); import { express as logger } from "../../logger.js";
import { getValidationSchema } from "../../schema/index.js";
const router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* /api/nginx/certificates * /api/nginx/certificates
*/ */
router router
.route('/') .route("/")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -27,29 +28,31 @@ router
* *
* Retrieve all certificates * Retrieve all certificates
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
const data = await validator(
{
additionalProperties: false, additionalProperties: false,
properties: { properties: {
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
}, },
query: { query: {
$ref: 'common#/properties/query' $ref: "common#/properties/query",
},
},
},
{
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
query: typeof req.query.query === "string" ? req.query.query : null,
},
);
const rows = await internalCertificate.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
} }
}
}, {
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
query: (typeof req.query.query === 'string' ? req.query.query : null)
})
.then((data) => {
return internalCertificate.getAll(res.locals.access, data.expand, data.query);
})
.then((rows) => {
res.status(200)
.send(rows);
})
.catch(next);
}) })
/** /**
@@ -57,17 +60,16 @@ router
* *
* Create a new certificate * Create a new certificate
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/certificates', 'post'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/certificates", "post"), req.body);
req.setTimeout(900000); // 15 minutes timeout req.setTimeout(900000); // 15 minutes timeout
return internalCertificate.create(res.locals.access, payload); const result = await internalCertificate.create(res.locals.access, payload);
}) res.status(201).send(result);
.then((result) => { } catch (err) {
res.status(201) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}); });
/** /**
@@ -76,7 +78,7 @@ router
* /api/nginx/certificates/test-http * /api/nginx/certificates/test-http
*/ */
router router
.route('/test-http') .route("/test-http")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -87,18 +89,22 @@ router
* *
* Test HTTP challenge for domains * Test HTTP challenge for domains
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
if (req.query.domains === undefined) { if (req.query.domains === undefined) {
next(new error.ValidationError('Domains are required as query parameters')); next(new errs.ValidationError("Domains are required as query parameters"));
return; return;
} }
internalCertificate.testHttpsChallenge(res.locals.access, JSON.parse(req.query.domains)) try {
.then((result) => { const result = await internalCertificate.testHttpsChallenge(
res.status(200) res.locals.access,
.send(result); JSON.parse(req.query.domains),
}) );
.catch(next); res.status(200).send(result);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -107,7 +113,7 @@ router
* /api/nginx/certificates/123 * /api/nginx/certificates/123
*/ */
router router
.route('/:certificate_id') .route("/:certificate_id")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -118,33 +124,35 @@ router
* *
* Retrieve a specific certificate * Retrieve a specific certificate
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
required: ['certificate_id'], const data = await validator(
{
required: ["certificate_id"],
additionalProperties: false, additionalProperties: false,
properties: { properties: {
certificate_id: { certificate_id: {
$ref: 'common#/properties/id' $ref: "common#/properties/id",
}, },
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
} },
} },
}, { },
{
certificate_id: req.params.certificate_id, certificate_id: req.params.certificate_id,
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null) expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
}) },
.then((data) => { );
return internalCertificate.get(res.locals.access, { const row = await internalCertificate.get(res.locals.access, {
id: parseInt(data.certificate_id, 10), id: Number.parseInt(data.certificate_id, 10),
expand: data.expand expand: data.expand,
}); });
}) res.status(200).send(row);
.then((row) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(row); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -152,13 +160,16 @@ router
* *
* Update and existing certificate * Update and existing certificate
*/ */
.delete((req, res, next) => { .delete(async (req, res, next) => {
internalCertificate.delete(res.locals.access, {id: parseInt(req.params.certificate_id, 10)}) try {
.then((result) => { const result = await internalCertificate.delete(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.certificate_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -167,7 +178,7 @@ router
* /api/nginx/certificates/123/upload * /api/nginx/certificates/123/upload
*/ */
router router
.route('/:certificate_id/upload') .route("/:certificate_id/upload")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -178,20 +189,21 @@ router
* *
* Upload certificates * Upload certificates
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
if (!req.files) { if (!req.files) {
res.status(400) res.status(400).send({ error: "No files were uploaded" });
.send({error: 'No files were uploaded'}); return;
} else { }
internalCertificate.upload(res.locals.access, {
id: parseInt(req.params.certificate_id, 10), try {
files: req.files const result = await internalCertificate.upload(res.locals.access, {
}) id: Number.parseInt(req.params.certificate_id, 10),
.then((result) => { files: req.files,
res.status(200) });
.send(result); res.status(200).send(result);
}) } catch (err) {
.catch(next); logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
} }
}); });
@@ -201,7 +213,7 @@ router
* /api/nginx/certificates/123/renew * /api/nginx/certificates/123/renew
*/ */
router router
.route('/:certificate_id/renew') .route("/:certificate_id/renew")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -212,16 +224,17 @@ router
* *
* Renew certificate * Renew certificate
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
req.setTimeout(900000); // 15 minutes timeout req.setTimeout(900000); // 15 minutes timeout
internalCertificate.renew(res.locals.access, { try {
id: parseInt(req.params.certificate_id, 10) const result = await internalCertificate.renew(res.locals.access, {
}) id: Number.parseInt(req.params.certificate_id, 10),
.then((result) => { });
res.status(200) res.status(200).send(result);
.send(result); } catch (err) {
}) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.catch(next); next(err);
}
}); });
/** /**
@@ -230,7 +243,7 @@ router
* /api/nginx/certificates/123/download * /api/nginx/certificates/123/download
*/ */
router router
.route('/:certificate_id/download') .route("/:certificate_id/download")
.options((_req, res) => { .options((_req, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -241,15 +254,16 @@ router
* *
* Renew certificate * Renew certificate
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
internalCertificate.download(res.locals.access, { try {
id: parseInt(req.params.certificate_id, 10) const result = await internalCertificate.download(res.locals.access, {
}) id: Number.parseInt(req.params.certificate_id, 10),
.then((result) => { });
res.status(200) res.status(200).download(result.fileName);
.download(result.fileName); } catch (err) {
}) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.catch(next); next(err);
}
}); });
/** /**
@@ -258,7 +272,7 @@ router
* /api/nginx/certificates/validate * /api/nginx/certificates/validate
*/ */
router router
.route('/validate') .route("/validate")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -269,20 +283,21 @@ router
* *
* Validate certificates * Validate certificates
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
if (!req.files) { if (!req.files) {
res.status(400) res.status(400).send({ error: "No files were uploaded" });
.send({error: 'No files were uploaded'}); return;
} else { }
internalCertificate.validate({
files: req.files try {
}) const result = await internalCertificate.validate({
.then((result) => { files: req.files,
res.status(200) });
.send(result); res.status(200).send(result);
}) } catch (err) {
.catch(next); logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
} }
}); });
module.exports = router; export default router;

View File

@@ -1,21 +1,22 @@
const express = require('express'); import express from "express";
const validator = require('../../lib/validator'); import internalDeadHost from "../../internal/dead-host.js";
const jwtdecode = require('../../lib/express/jwt-decode'); import jwtdecode from "../../lib/express/jwt-decode.js";
const apiValidator = require('../../lib/validator/api'); import apiValidator from "../../lib/validator/api.js";
const internalDeadHost = require('../../internal/dead-host'); import validator from "../../lib/validator/index.js";
const schema = require('../../schema'); import { express as logger } from "../../logger.js";
import { getValidationSchema } from "../../schema/index.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* /api/nginx/dead-hosts * /api/nginx/dead-hosts
*/ */
router router
.route('/') .route("/")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -26,29 +27,31 @@ router
* *
* Retrieve all dead-hosts * Retrieve all dead-hosts
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
const data = await validator(
{
additionalProperties: false, additionalProperties: false,
properties: { properties: {
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
}, },
query: { query: {
$ref: 'common#/properties/query' $ref: "common#/properties/query",
},
},
},
{
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
query: typeof req.query.query === "string" ? req.query.query : null,
},
);
const rows = await internalDeadHost.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
} }
}
}, {
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
query: (typeof req.query.query === 'string' ? req.query.query : null)
})
.then((data) => {
return internalDeadHost.getAll(res.locals.access, data.expand, data.query);
})
.then((rows) => {
res.status(200)
.send(rows);
})
.catch(next);
}) })
/** /**
@@ -56,16 +59,15 @@ router
* *
* Create a new dead-host * Create a new dead-host
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/dead-hosts', 'post'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/dead-hosts", "post"), req.body);
return internalDeadHost.create(res.locals.access, payload); const result = await internalDeadHost.create(res.locals.access, payload);
}) res.status(201).send(result);
.then((result) => { } catch (err) {
res.status(201) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}); });
/** /**
@@ -74,8 +76,8 @@ router
* /api/nginx/dead-hosts/123 * /api/nginx/dead-hosts/123
*/ */
router router
.route('/:host_id') .route("/:host_id")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -85,33 +87,35 @@ router
* *
* Retrieve a specific dead-host * Retrieve a specific dead-host
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
required: ['host_id'], const data = await validator(
{
required: ["host_id"],
additionalProperties: false, additionalProperties: false,
properties: { properties: {
host_id: { host_id: {
$ref: 'common#/properties/id' $ref: "common#/properties/id",
}, },
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
} },
} },
}, { },
{
host_id: req.params.host_id, host_id: req.params.host_id,
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null) expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
}) },
.then((data) => { );
return internalDeadHost.get(res.locals.access, { const row = await internalDeadHost.get(res.locals.access, {
id: parseInt(data.host_id, 10), id: Number.parseInt(data.host_id, 10),
expand: data.expand expand: data.expand,
}); });
}) res.status(200).send(row);
.then((row) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(row); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -119,17 +123,16 @@ router
* *
* Update and existing dead-host * Update and existing dead-host
*/ */
.put((req, res, next) => { .put(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/dead-hosts/{hostID}', 'put'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/dead-hosts/{hostID}", "put"), req.body);
payload.id = parseInt(req.params.host_id, 10); payload.id = Number.parseInt(req.params.host_id, 10);
return internalDeadHost.update(res.locals.access, payload); const result = await internalDeadHost.update(res.locals.access, payload);
}) res.status(200).send(result);
.then((result) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -137,13 +140,16 @@ router
* *
* Update and existing dead-host * Update and existing dead-host
*/ */
.delete((req, res, next) => { .delete(async (req, res, next) => {
internalDeadHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalDeadHost.delete(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -152,7 +158,7 @@ router
* /api/nginx/dead-hosts/123/enable * /api/nginx/dead-hosts/123/enable
*/ */
router router
.route('/:host_id/enable') .route("/:host_id/enable")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -161,13 +167,16 @@ router
/** /**
* POST /api/nginx/dead-hosts/123/enable * POST /api/nginx/dead-hosts/123/enable
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
internalDeadHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalDeadHost.enable(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -176,7 +185,7 @@ router
* /api/nginx/dead-hosts/123/disable * /api/nginx/dead-hosts/123/disable
*/ */
router router
.route('/:host_id/disable') .route("/:host_id/disable")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -186,12 +195,13 @@ router
* POST /api/nginx/dead-hosts/123/disable * POST /api/nginx/dead-hosts/123/disable
*/ */
.post((req, res, next) => { .post((req, res, next) => {
internalDeadHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = internalDeadHost.disable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) });
res.status(200) res.status(200).send(result);
.send(result); } catch (err) {
}) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.catch(next); next(err);
}
}); });
module.exports = router; export default router;

View File

@@ -1,22 +1,23 @@
const express = require('express'); import express from "express";
const validator = require('../../lib/validator'); import internalProxyHost from "../../internal/proxy-host.js";
const jwtdecode = require('../../lib/express/jwt-decode'); import jwtdecode from "../../lib/express/jwt-decode.js";
const apiValidator = require('../../lib/validator/api'); import apiValidator from "../../lib/validator/api.js";
const internalProxyHost = require('../../internal/proxy-host'); import validator from "../../lib/validator/index.js";
const schema = require('../../schema'); import { express as logger } from "../../logger.js";
import { getValidationSchema } from "../../schema/index.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* /api/nginx/proxy-hosts * /api/nginx/proxy-hosts
*/ */
router router
.route('/') .route("/")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -26,29 +27,31 @@ router
* *
* Retrieve all proxy-hosts * Retrieve all proxy-hosts
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
const data = await validator(
{
additionalProperties: false, additionalProperties: false,
properties: { properties: {
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
}, },
query: { query: {
$ref: 'common#/properties/query' $ref: "common#/properties/query",
},
},
},
{
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
query: typeof req.query.query === "string" ? req.query.query : null,
},
);
const rows = await internalProxyHost.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
} }
}
}, {
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
query: (typeof req.query.query === 'string' ? req.query.query : null)
})
.then((data) => {
return internalProxyHost.getAll(res.locals.access, data.expand, data.query);
})
.then((rows) => {
res.status(200)
.send(rows);
})
.catch(next);
}) })
/** /**
@@ -56,16 +59,15 @@ router
* *
* Create a new proxy-host * Create a new proxy-host
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/proxy-hosts', 'post'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/proxy-hosts", "post"), req.body);
return internalProxyHost.create(res.locals.access, payload); const result = await internalProxyHost.create(res.locals.access, payload);
}) res.status(201).send(result);
.then((result) => { } catch (err) {
res.status(201) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}); });
/** /**
@@ -74,8 +76,8 @@ router
* /api/nginx/proxy-hosts/123 * /api/nginx/proxy-hosts/123
*/ */
router router
.route('/:host_id') .route("/:host_id")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -85,33 +87,35 @@ router
* *
* Retrieve a specific proxy-host * Retrieve a specific proxy-host
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
required: ['host_id'], const data = await validator(
{
required: ["host_id"],
additionalProperties: false, additionalProperties: false,
properties: { properties: {
host_id: { host_id: {
$ref: 'common#/properties/id' $ref: "common#/properties/id",
}, },
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
} },
} },
}, { },
{
host_id: req.params.host_id, host_id: req.params.host_id,
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null) expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
}) },
.then((data) => { );
return internalProxyHost.get(res.locals.access, { const row = await internalProxyHost.get(res.locals.access, {
id: parseInt(data.host_id, 10), id: Number.parseInt(data.host_id, 10),
expand: data.expand expand: data.expand,
}); });
}) res.status(200).send(row);
.then((row) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(row); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -119,17 +123,16 @@ router
* *
* Update and existing proxy-host * Update and existing proxy-host
*/ */
.put((req, res, next) => { .put(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/proxy-hosts/{hostID}', 'put'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/proxy-hosts/{hostID}", "put"), req.body);
payload.id = parseInt(req.params.host_id, 10); payload.id = Number.parseInt(req.params.host_id, 10);
return internalProxyHost.update(res.locals.access, payload); const result = await internalProxyHost.update(res.locals.access, payload);
}) res.status(200).send(result);
.then((result) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -137,13 +140,16 @@ router
* *
* Update and existing proxy-host * Update and existing proxy-host
*/ */
.delete((req, res, next) => { .delete(async (req, res, next) => {
internalProxyHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalProxyHost.delete(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -152,7 +158,7 @@ router
* /api/nginx/proxy-hosts/123/enable * /api/nginx/proxy-hosts/123/enable
*/ */
router router
.route('/:host_id/enable') .route("/:host_id/enable")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -161,13 +167,16 @@ router
/** /**
* POST /api/nginx/proxy-hosts/123/enable * POST /api/nginx/proxy-hosts/123/enable
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
internalProxyHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalProxyHost.enable(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -176,7 +185,7 @@ router
* /api/nginx/proxy-hosts/123/disable * /api/nginx/proxy-hosts/123/disable
*/ */
router router
.route('/:host_id/disable') .route("/:host_id/disable")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -185,13 +194,16 @@ router
/** /**
* POST /api/nginx/proxy-hosts/123/disable * POST /api/nginx/proxy-hosts/123/disable
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
internalProxyHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalProxyHost.disable(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
module.exports = router; export default router;

View File

@@ -1,22 +1,23 @@
const express = require('express'); import express from "express";
const validator = require('../../lib/validator'); import internalRedirectionHost from "../../internal/redirection-host.js";
const jwtdecode = require('../../lib/express/jwt-decode'); import jwtdecode from "../../lib/express/jwt-decode.js";
const apiValidator = require('../../lib/validator/api'); import apiValidator from "../../lib/validator/api.js";
const internalRedirectionHost = require('../../internal/redirection-host'); import validator from "../../lib/validator/index.js";
const schema = require('../../schema'); import { express as logger } from "../../logger.js";
import { getValidationSchema } from "../../schema/index.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* /api/nginx/redirection-hosts * /api/nginx/redirection-hosts
*/ */
router router
.route('/') .route("/")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -26,29 +27,31 @@ router
* *
* Retrieve all redirection-hosts * Retrieve all redirection-hosts
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
const data = await validator(
{
additionalProperties: false, additionalProperties: false,
properties: { properties: {
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
}, },
query: { query: {
$ref: 'common#/properties/query' $ref: "common#/properties/query",
},
},
},
{
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
query: typeof req.query.query === "string" ? req.query.query : null,
},
);
const rows = await internalRedirectionHost.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
} }
}
}, {
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
query: (typeof req.query.query === 'string' ? req.query.query : null)
})
.then((data) => {
return internalRedirectionHost.getAll(res.locals.access, data.expand, data.query);
})
.then((rows) => {
res.status(200)
.send(rows);
})
.catch(next);
}) })
/** /**
@@ -56,16 +59,15 @@ router
* *
* Create a new redirection-host * Create a new redirection-host
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/redirection-hosts', 'post'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/redirection-hosts", "post"), req.body);
return internalRedirectionHost.create(res.locals.access, payload); const result = await internalRedirectionHost.create(res.locals.access, payload);
}) res.status(201).send(result);
.then((result) => { } catch (err) {
res.status(201) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}); });
/** /**
@@ -74,8 +76,8 @@ router
* /api/nginx/redirection-hosts/123 * /api/nginx/redirection-hosts/123
*/ */
router router
.route('/:host_id') .route("/:host_id")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -85,33 +87,35 @@ router
* *
* Retrieve a specific redirection-host * Retrieve a specific redirection-host
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
required: ['host_id'], const data = await validator(
{
required: ["host_id"],
additionalProperties: false, additionalProperties: false,
properties: { properties: {
host_id: { host_id: {
$ref: 'common#/properties/id' $ref: "common#/properties/id",
}, },
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
} },
} },
}, { },
{
host_id: req.params.host_id, host_id: req.params.host_id,
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null) expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
}) },
.then((data) => { );
return internalRedirectionHost.get(res.locals.access, { const row = await internalRedirectionHost.get(res.locals.access, {
id: parseInt(data.host_id, 10), id: Number.parseInt(data.host_id, 10),
expand: data.expand expand: data.expand,
}); });
}) res.status(200).send(row);
.then((row) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(row); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -119,17 +123,19 @@ router
* *
* Update and existing redirection-host * Update and existing redirection-host
*/ */
.put((req, res, next) => { .put(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/redirection-hosts/{hostID}', 'put'), req.body) try {
.then((payload) => { const payload = await apiValidator(
payload.id = parseInt(req.params.host_id, 10); getValidationSchema("/nginx/redirection-hosts/{hostID}", "put"),
return internalRedirectionHost.update(res.locals.access, payload); req.body,
}) );
.then((result) => { payload.id = Number.parseInt(req.params.host_id, 10);
res.status(200) const result = await internalRedirectionHost.update(res.locals.access, payload);
.send(result); res.status(200).send(result);
}) } catch (err) {
.catch(next); logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}) })
/** /**
@@ -137,13 +143,16 @@ router
* *
* Update and existing redirection-host * Update and existing redirection-host
*/ */
.delete((req, res, next) => { .delete(async (req, res, next) => {
internalRedirectionHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalRedirectionHost.delete(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -152,8 +161,8 @@ router
* /api/nginx/redirection-hosts/123/enable * /api/nginx/redirection-hosts/123/enable
*/ */
router router
.route('/:host_id/enable') .route("/:host_id/enable")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -161,13 +170,16 @@ router
/** /**
* POST /api/nginx/redirection-hosts/123/enable * POST /api/nginx/redirection-hosts/123/enable
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
internalRedirectionHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalRedirectionHost.enable(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -176,8 +188,8 @@ router
* /api/nginx/redirection-hosts/123/disable * /api/nginx/redirection-hosts/123/disable
*/ */
router router
.route('/:host_id/disable') .route("/:host_id/disable")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -185,13 +197,16 @@ router
/** /**
* POST /api/nginx/redirection-hosts/123/disable * POST /api/nginx/redirection-hosts/123/disable
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
internalRedirectionHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalRedirectionHost.disable(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
module.exports = router; export default router;

View File

@@ -1,22 +1,23 @@
const express = require('express'); import express from "express";
const validator = require('../../lib/validator'); import internalStream from "../../internal/stream.js";
const jwtdecode = require('../../lib/express/jwt-decode'); import jwtdecode from "../../lib/express/jwt-decode.js";
const apiValidator = require('../../lib/validator/api'); import apiValidator from "../../lib/validator/api.js";
const internalStream = require('../../internal/stream'); import validator from "../../lib/validator/index.js";
const schema = require('../../schema'); import { express as logger } from "../../logger.js";
import { getValidationSchema } from "../../schema/index.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* /api/nginx/streams * /api/nginx/streams
*/ */
router router
.route('/') .route("/")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes .all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
@@ -26,29 +27,31 @@ router
* *
* Retrieve all streams * Retrieve all streams
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
const data = await validator(
{
additionalProperties: false, additionalProperties: false,
properties: { properties: {
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
}, },
query: { query: {
$ref: 'common#/properties/query' $ref: "common#/properties/query",
},
},
},
{
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
query: typeof req.query.query === "string" ? req.query.query : null,
},
);
const rows = await internalStream.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
} }
}
}, {
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
query: (typeof req.query.query === 'string' ? req.query.query : null)
})
.then((data) => {
return internalStream.getAll(res.locals.access, data.expand, data.query);
})
.then((rows) => {
res.status(200)
.send(rows);
})
.catch(next);
}) })
/** /**
@@ -56,16 +59,15 @@ router
* *
* Create a new stream * Create a new stream
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/streams', 'post'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/streams", "post"), req.body);
return internalStream.create(res.locals.access, payload); const result = await internalStream.create(res.locals.access, payload);
}) res.status(201).send(result);
.then((result) => { } catch (err) {
res.status(201) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}); });
/** /**
@@ -74,8 +76,8 @@ router
* /api/nginx/streams/123 * /api/nginx/streams/123
*/ */
router router
.route('/:stream_id') .route("/:stream_id")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes .all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
@@ -85,33 +87,35 @@ router
* *
* Retrieve a specific stream * Retrieve a specific stream
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
required: ['stream_id'], const data = await validator(
{
required: ["stream_id"],
additionalProperties: false, additionalProperties: false,
properties: { properties: {
stream_id: { stream_id: {
$ref: 'common#/properties/id' $ref: "common#/properties/id",
}, },
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
} },
} },
}, { },
{
stream_id: req.params.stream_id, stream_id: req.params.stream_id,
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null) expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
}) },
.then((data) => { );
return internalStream.get(res.locals.access, { const row = await internalStream.get(res.locals.access, {
id: parseInt(data.stream_id, 10), id: Number.parseInt(data.stream_id, 10),
expand: data.expand expand: data.expand,
}); });
}) res.status(200).send(row);
.then((row) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(row); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -119,17 +123,16 @@ router
* *
* Update and existing stream * Update and existing stream
*/ */
.put((req, res, next) => { .put(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/nginx/streams/{streamID}', 'put'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/nginx/streams/{streamID}", "put"), req.body);
payload.id = parseInt(req.params.stream_id, 10); payload.id = Number.parseInt(req.params.stream_id, 10);
return internalStream.update(res.locals.access, payload); const result = await internalStream.update(res.locals.access, payload);
}) res.status(200).send(result);
.then((result) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -137,13 +140,16 @@ router
* *
* Update and existing stream * Update and existing stream
*/ */
.delete((req, res, next) => { .delete(async (req, res, next) => {
internalStream.delete(res.locals.access, {id: parseInt(req.params.stream_id, 10)}) try {
.then((result) => { const result = await internalStream.delete(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.stream_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -152,7 +158,7 @@ router
* /api/nginx/streams/123/enable * /api/nginx/streams/123/enable
*/ */
router router
.route('/:host_id/enable') .route("/:host_id/enable")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -161,13 +167,16 @@ router
/** /**
* POST /api/nginx/streams/123/enable * POST /api/nginx/streams/123/enable
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
internalStream.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalStream.enable(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -176,7 +185,7 @@ router
* /api/nginx/streams/123/disable * /api/nginx/streams/123/disable
*/ */
router router
.route('/:host_id/disable') .route("/:host_id/disable")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -185,13 +194,16 @@ router
/** /**
* POST /api/nginx/streams/123/disable * POST /api/nginx/streams/123/disable
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
internalStream.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)}) try {
.then((result) => { const result = await internalStream.disable(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.host_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
module.exports = router; export default router;

View File

@@ -1,15 +1,16 @@
const express = require('express'); import express from "express";
const jwtdecode = require('../lib/express/jwt-decode'); import internalReport from "../internal/report.js";
const internalReport = require('../internal/report'); import jwtdecode from "../lib/express/jwt-decode.js";
import { express as logger } from "../logger.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
router router
.route('/hosts') .route("/hosts")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -17,13 +18,14 @@ router
/** /**
* GET /reports/hosts * GET /reports/hosts
*/ */
.get(jwtdecode(), (_, res, next) => { .get(jwtdecode(), async (req, res, next) => {
internalReport.getHostsReport(res.locals.access) try {
.then((data) => { const data = await internalReport.getHostsReport(res.locals.access);
res.status(200) res.status(200).send(data);
.send(data); } catch (err) {
}) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.catch(next); next(err);
}
}); });
module.exports = router; export default router;

View File

@@ -1,15 +1,16 @@
const express = require('express'); import express from "express";
const schema = require('../schema'); import { express as logger } from "../logger.js";
const PACKAGE = require('../package.json'); import PACKAGE from "../package.json" with { type: "json" };
import { getCompiledSchema } from "../schema/index.js";
const router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
router router
.route('/') .route("/")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -18,21 +19,26 @@ router
* GET /schema * GET /schema
*/ */
.get(async (req, res) => { .get(async (req, res) => {
let swaggerJSON = await schema.getCompiledSchema(); try {
const swaggerJSON = await getCompiledSchema();
let proto = req.protocol; let proto = req.protocol;
if (typeof req.headers['x-forwarded-proto'] !== 'undefined' && req.headers['x-forwarded-proto']) { if (typeof req.headers["x-forwarded-proto"] !== "undefined" && req.headers["x-forwarded-proto"]) {
proto = req.headers['x-forwarded-proto']; proto = req.headers["x-forwarded-proto"];
} }
let origin = proto + '://' + req.hostname; let origin = `${proto}://${req.hostname}`;
if (typeof req.headers.origin !== 'undefined' && req.headers.origin) { if (typeof req.headers.origin !== "undefined" && req.headers.origin) {
origin = req.headers.origin; origin = req.headers.origin;
} }
swaggerJSON.info.version = PACKAGE.version; swaggerJSON.info.version = PACKAGE.version;
swaggerJSON.servers[0].url = origin + '/api'; swaggerJSON.servers[0].url = `${origin}/api`;
res.status(200).send(swaggerJSON); res.status(200).send(swaggerJSON);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
module.exports = router; export default router;

View File

@@ -1,21 +1,22 @@
const express = require('express'); import express from "express";
const validator = require('../lib/validator'); import internalSetting from "../internal/setting.js";
const jwtdecode = require('../lib/express/jwt-decode'); import jwtdecode from "../lib/express/jwt-decode.js";
const apiValidator = require('../lib/validator/api'); import apiValidator from "../lib/validator/api.js";
const internalSetting = require('../internal/setting'); import validator from "../lib/validator/index.js";
const schema = require('../schema'); import { express as logger } from "../logger.js";
import { getValidationSchema } from "../schema/index.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* /api/settings * /api/settings
*/ */
router router
.route('/') .route("/")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -26,13 +27,14 @@ router
* *
* Retrieve all settings * Retrieve all settings
*/ */
.get((_, res, next) => { .get(async (req, res, next) => {
internalSetting.getAll(res.locals.access) try {
.then((rows) => { const rows = await internalSetting.getAll(res.locals.access);
res.status(200) res.status(200).send(rows);
.send(rows); } catch (err) {
}) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.catch(next); next(err);
}
}); });
/** /**
@@ -41,7 +43,7 @@ router
* /api/settings/something * /api/settings/something
*/ */
router router
.route('/:setting_id') .route("/:setting_id")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -52,29 +54,31 @@ router
* *
* Retrieve a specific setting * Retrieve a specific setting
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
required: ['setting_id'], const data = await validator(
{
required: ["setting_id"],
additionalProperties: false, additionalProperties: false,
properties: { properties: {
setting_id: { setting_id: {
type: 'string', type: "string",
minLength: 1 minLength: 1,
} },
} },
}, { },
setting_id: req.params.setting_id {
}) setting_id: req.params.setting_id,
.then((data) => { },
return internalSetting.get(res.locals.access, { );
id: data.setting_id const row = await internalSetting.get(res.locals.access, {
id: data.setting_id,
}); });
}) res.status(200).send(row);
.then((row) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(row); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -82,17 +86,16 @@ router
* *
* Update and existing setting * Update and existing setting
*/ */
.put((req, res, next) => { .put(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/settings/{settingID}', 'put'), req.body) try {
.then((payload) => { const payload = await apiValidator(getValidationSchema("/settings/{settingID}", "put"), req.body);
payload.id = req.params.setting_id; payload.id = req.params.setting_id;
return internalSetting.update(res.locals.access, payload); const result = await internalSetting.update(res.locals.access, payload);
}) res.status(200).send(result);
.then((result) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}); });
module.exports = router; export default router;

View File

@@ -1,17 +1,18 @@
const express = require('express'); import express from "express";
const jwtdecode = require('../lib/express/jwt-decode'); import internalToken from "../internal/token.js";
const apiValidator = require('../lib/validator/api'); import jwtdecode from "../lib/express/jwt-decode.js";
const internalToken = require('../internal/token'); import apiValidator from "../lib/validator/api.js";
const schema = require('../schema'); import { express as logger } from "../logger.js";
import { getValidationSchema } from "../schema/index.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
router router
.route('/') .route("/")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -23,16 +24,17 @@ router
* We also piggy back on to this method, allowing admins to get tokens * We also piggy back on to this method, allowing admins to get tokens
* for services like Job board and Worker. * for services like Job board and Worker.
*/ */
.get(jwtdecode(), (req, res, next) => { .get(jwtdecode(), async (req, res, next) => {
internalToken.getFreshToken(res.locals.access, { try {
expiry: (typeof req.query.expiry !== 'undefined' ? req.query.expiry : null), const data = await internalToken.getFreshToken(res.locals.access, {
scope: (typeof req.query.scope !== 'undefined' ? req.query.scope : null) expiry: typeof req.query.expiry !== "undefined" ? req.query.expiry : null,
}) scope: typeof req.query.scope !== "undefined" ? req.query.scope : null,
.then((data) => { });
res.status(200) res.status(200).send(data);
.send(data); } catch (err) {
}) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.catch(next); next(err);
}
}) })
/** /**
@@ -41,13 +43,14 @@ router
* Create a new Token * Create a new Token
*/ */
.post(async (req, res, next) => { .post(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/tokens', 'post'), req.body) try {
.then(internalToken.getTokenFromEmail) const data = await apiValidator(getValidationSchema("/tokens", "post"), req.body);
.then((data) => { const result = await internalToken.getTokenFromEmail(data);
res.status(200) res.status(200).send(result);
.send(data); } catch (err) {
}) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.catch(next); next(err);
}
}); });
module.exports = router; export default router;

View File

@@ -1,22 +1,27 @@
const express = require('express'); import express from "express";
const validator = require('../lib/validator'); import internalUser from "../internal/user.js";
const jwtdecode = require('../lib/express/jwt-decode'); import Access from "../lib/access.js";
const userIdFromMe = require('../lib/express/user-id-from-me'); import { isCI } from "../lib/config.js";
const internalUser = require('../internal/user'); import errs from "../lib/error.js";
const apiValidator = require('../lib/validator/api'); import jwtdecode from "../lib/express/jwt-decode.js";
const schema = require('../schema'); import userIdFromMe from "../lib/express/user-id-from-me.js";
import apiValidator from "../lib/validator/api.js";
import validator from "../lib/validator/index.js";
import { express as logger } from "../logger.js";
import { getValidationSchema } from "../schema/index.js";
import { isSetup } from "../setup.js";
let router = express.Router({ const router = express.Router({
caseSensitive: true, caseSensitive: true,
strict: true, strict: true,
mergeParams: true mergeParams: true,
}); });
/** /**
* /api/users * /api/users
*/ */
router router
.route('/') .route("/")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -27,33 +32,38 @@ router
* *
* Retrieve all users * Retrieve all users
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
const data = await validator(
{
additionalProperties: false, additionalProperties: false,
properties: { properties: {
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
}, },
query: { query: {
$ref: 'common#/properties/query' $ref: "common#/properties/query",
} },
} },
}, { },
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null), {
query: (typeof req.query.query === 'string' ? req.query.query : null) expand:
}) typeof req.query.expand === "string"
.then((data) => { ? req.query.expand.split(",")
return internalUser.getAll(res.locals.access, data.expand, data.query); : null,
}) query: typeof req.query.query === "string" ? req.query.query : null,
.then((users) => { },
res.status(200) );
.send(users); const users = await internalUser.getAll(
}) res.locals.access,
.catch((err) => { data.expand,
console.log(err); data.query,
);
res.status(200).send(users);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err); next(err);
}); }
//.catch(next);
}) })
/** /**
@@ -61,16 +71,66 @@ router
* *
* Create a new User * Create a new User
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/users', 'post'), req.body) const body = req.body;
.then((payload) => {
return internalUser.create(res.locals.access, payload); try {
// If we are in setup mode, we don't check access for current user
const setup = await isSetup();
if (!setup) {
logger.info("Creating a new user in setup mode");
const access = new Access(null);
await access.load(true);
res.locals.access = access;
// We are in setup mode, set some defaults for this first new user, such as making
// them an admin.
body.is_disabled = false;
if (typeof body.roles !== "object" || body.roles === null) {
body.roles = [];
}
if (body.roles.indexOf("admin") === -1) {
body.roles.push("admin");
}
}
const payload = await apiValidator(
getValidationSchema("/users", "post"),
body,
);
const user = await internalUser.create(res.locals.access, payload);
res.status(201).send(user);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}) })
.then((result) => {
res.status(201) /**
.send(result); * DELETE /api/users
}) *
.catch(next); * Deletes ALL users. This is NOT GENERALLY AVAILABLE!
* (!) It is NOT an authenticated endpoint.
* (!) Only CI should be able to call this endpoint. As a result,
*
* it will only work when the env vars DEBUG=true and CI=true
*
* Do NOT set those env vars in a production environment!
*/
.delete(async (_, res, next) => {
if (isCI()) {
try {
logger.warn("Deleting all users - CI environment detected, allowing this operation");
await internalUser.deleteAll();
res.status(200).send(true);
} catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
return;
}
next(new errs.ItemNotFoundError());
}); });
/** /**
@@ -79,7 +139,7 @@ router
* /api/users/123 * /api/users/123
*/ */
router router
.route('/:user_id') .route("/:user_id")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -91,37 +151,43 @@ router
* *
* Retrieve a specific user * Retrieve a specific user
*/ */
.get((req, res, next) => { .get(async (req, res, next) => {
validator({ try {
required: ['user_id'], const data = await validator(
{
required: ["user_id"],
additionalProperties: false, additionalProperties: false,
properties: { properties: {
user_id: { user_id: {
$ref: 'common#/properties/id' $ref: "common#/properties/id",
}, },
expand: { expand: {
$ref: 'common#/properties/expand' $ref: "common#/properties/expand",
} },
} },
}, { },
{
user_id: req.params.user_id, user_id: req.params.user_id,
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null) expand:
}) typeof req.query.expand === "string"
.then((data) => { ? req.query.expand.split(",")
return internalUser.get(res.locals.access, { : null,
},
);
const user = await internalUser.get(res.locals.access, {
id: data.user_id, id: data.user_id,
expand: data.expand, expand: data.expand,
omit: internalUser.getUserOmisionsByAccess(res.locals.access, data.user_id) omit: internalUser.getUserOmisionsByAccess(
res.locals.access,
data.user_id,
),
}); });
}) res.status(200).send(user);
.then((user) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(user);
})
.catch((err) => {
console.log(err);
next(err); next(err);
}); }
}) })
/** /**
@@ -129,17 +195,19 @@ router
* *
* Update and existing user * Update and existing user
*/ */
.put((req, res, next) => { .put(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/users/{userID}', 'put'), req.body) try {
.then((payload) => { const payload = await apiValidator(
getValidationSchema("/users/{userID}", "put"),
req.body,
);
payload.id = req.params.user_id; payload.id = req.params.user_id;
return internalUser.update(res.locals.access, payload); const result = await internalUser.update(res.locals.access, payload);
}) res.status(200).send(result);
.then((result) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}) })
/** /**
@@ -147,13 +215,16 @@ router
* *
* Update and existing user * Update and existing user
*/ */
.delete((req, res, next) => { .delete(async (req, res, next) => {
internalUser.delete(res.locals.access, {id: req.params.user_id}) try {
.then((result) => { const result = await internalUser.delete(res.locals.access, {
res.status(200) id: req.params.user_id,
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -162,8 +233,8 @@ router
* /api/users/123/auth * /api/users/123/auth
*/ */
router router
.route('/:user_id/auth') .route("/:user_id/auth")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -174,17 +245,19 @@ router
* *
* Update password for a user * Update password for a user
*/ */
.put((req, res, next) => { .put(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/users/{userID}/auth', 'put'), req.body) try {
.then((payload) => { const payload = await apiValidator(
getValidationSchema("/users/{userID}/auth", "put"),
req.body,
);
payload.id = req.params.user_id; payload.id = req.params.user_id;
return internalUser.setPassword(res.locals.access, payload); const result = await internalUser.setPassword(res.locals.access, payload);
}) res.status(200).send(result);
.then((result) => { } catch (err) {
res.status(200) logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
.send(result); next(err);
}) }
.catch(next);
}); });
/** /**
@@ -193,8 +266,8 @@ router
* /api/users/123/permissions * /api/users/123/permissions
*/ */
router router
.route('/:user_id/permissions') .route("/:user_id/permissions")
.options((req, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
.all(jwtdecode()) .all(jwtdecode())
@@ -205,17 +278,22 @@ router
* *
* Set some or all permissions for a user * Set some or all permissions for a user
*/ */
.put((req, res, next) => { .put(async (req, res, next) => {
apiValidator(schema.getValidationSchema('/users/{userID}/permissions', 'put'), req.body) try {
.then((payload) => { const payload = await apiValidator(
getValidationSchema("/users/{userID}/permissions", "put"),
req.body,
);
payload.id = req.params.user_id; payload.id = req.params.user_id;
return internalUser.setPermissions(res.locals.access, payload); const result = await internalUser.setPermissions(
}) res.locals.access,
.then((result) => { payload,
res.status(200) );
.send(result); res.status(200).send(result);
}) } catch (err) {
.catch(next); logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
/** /**
@@ -224,7 +302,7 @@ router
* /api/users/123/login * /api/users/123/login
*/ */
router router
.route('/:user_id/login') .route("/:user_id/login")
.options((_, res) => { .options((_, res) => {
res.sendStatus(204); res.sendStatus(204);
}) })
@@ -235,13 +313,16 @@ router
* *
* Log in as a user * Log in as a user
*/ */
.post((req, res, next) => { .post(async (req, res, next) => {
internalUser.loginAs(res.locals.access, {id: parseInt(req.params.user_id, 10)}) try {
.then((result) => { const result = await internalUser.loginAs(res.locals.access, {
res.status(200) id: Number.parseInt(req.params.user_id, 10),
.send(result); });
}) res.status(200).send(result);
.catch(next); } catch (err) {
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
}); });
module.exports = router; export default router;

View File

@@ -9,6 +9,11 @@
"description": "Healthy", "description": "Healthy",
"example": "OK" "example": "OK"
}, },
"setup": {
"type": "boolean",
"description": "Whether the initial setup has been completed",
"example": true
},
"version": { "version": {
"type": "object", "type": "object",
"description": "The version object", "description": "The version object",

View File

@@ -54,6 +54,63 @@
"items": { "items": {
"type": "string" "type": "string"
} }
},
"permissions": {
"type": "object",
"description": "Permissions if expanded in request",
"required": [
"visibility",
"proxy_hosts",
"redirection_hosts",
"dead_hosts",
"streams",
"access_lists",
"certificates"
],
"properties": {
"visibility": {
"type": "string",
"description": "Visibility level",
"example": "all",
"pattern": "^(all|user)$"
},
"proxy_hosts": {
"type": "string",
"description": "Proxy Hosts access level",
"example": "all",
"pattern": "^(manage|view|hidden)$"
},
"redirection_hosts": {
"type": "string",
"description": "Redirection Hosts access level",
"example": "all",
"pattern": "^(manage|view|hidden)$"
},
"dead_hosts": {
"type": "string",
"description": "Dead Hosts access level",
"example": "all",
"pattern": "^(manage|view|hidden)$"
},
"streams": {
"type": "string",
"description": "Streams access level",
"example": "all",
"pattern": "^(manage|view|hidden)$"
},
"access_lists": {
"type": "string",
"description": "Access Lists access level",
"example": "all",
"pattern": "^(manage|view|hidden)$"
},
"certificates": {
"type": "string",
"description": "Certificates access level",
"example": "all",
"pattern": "^(manage|view|hidden)$"
}
}
} }
} }
} }

View File

@@ -1,21 +1,24 @@
const refParser = require('@apidevtools/json-schema-ref-parser'); import { dirname } from "node:path";
import { fileURLToPath } from "node:url";
import $RefParser from "@apidevtools/json-schema-ref-parser";
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
let compiledSchema = null; let compiledSchema = null;
module.exports = {
/** /**
* Compiles the schema, by dereferencing it, only once * Compiles the schema, by dereferencing it, only once
* and returns the memory cached value * and returns the memory cached value
*/ */
getCompiledSchema: async () => { const getCompiledSchema = async () => {
if (compiledSchema === null) { if (compiledSchema === null) {
compiledSchema = await refParser.dereference(__dirname + '/swagger.json', { compiledSchema = await $RefParser.dereference(`${__dirname}/swagger.json`, {
mutateInputSchema: false, mutateInputSchema: false,
}); });
} }
return compiledSchema; return compiledSchema;
}, };
/** /**
* Scans the schema for the validation schema for the given path and method * Scans the schema for the validation schema for the given path and method
@@ -25,17 +28,19 @@ module.exports = {
* @param {string} method * @param {string} method
* @returns string|null * @returns string|null
*/ */
getValidationSchema: (path, method) => { const getValidationSchema = (path, method) => {
if (compiledSchema !== null && if (
typeof compiledSchema.paths[path] !== 'undefined' && compiledSchema !== null &&
typeof compiledSchema.paths[path][method] !== 'undefined' && typeof compiledSchema.paths[path] !== "undefined" &&
typeof compiledSchema.paths[path][method].requestBody !== 'undefined' && typeof compiledSchema.paths[path][method] !== "undefined" &&
typeof compiledSchema.paths[path][method].requestBody.content !== 'undefined' && typeof compiledSchema.paths[path][method].requestBody !== "undefined" &&
typeof compiledSchema.paths[path][method].requestBody.content['application/json'] !== 'undefined' && typeof compiledSchema.paths[path][method].requestBody.content !== "undefined" &&
typeof compiledSchema.paths[path][method].requestBody.content['application/json'].schema !== 'undefined' typeof compiledSchema.paths[path][method].requestBody.content["application/json"] !== "undefined" &&
typeof compiledSchema.paths[path][method].requestBody.content["application/json"].schema !== "undefined"
) { ) {
return compiledSchema.paths[path][method].requestBody.content['application/json'].schema; return compiledSchema.paths[path][method].requestBody.content["application/json"].schema;
} }
return null; return null;
}
}; };
export { getCompiledSchema, getValidationSchema };

View File

@@ -11,6 +11,7 @@
"default": { "default": {
"value": { "value": {
"status": "OK", "status": "OK",
"setup": true,
"version": { "version": {
"major": 2, "major": 2,
"minor": 1, "minor": 1,

View File

@@ -10,10 +10,10 @@
// docker exec npm_core /command/s6-setuidgid 1000:1000 bash -c "/app/scripts/install-certbot-plugins" // docker exec npm_core /command/s6-setuidgid 1000:1000 bash -c "/app/scripts/install-certbot-plugins"
// //
const dnsPlugins = require('../global/certbot-dns-plugins.json'); import dnsPlugins from "../global/certbot-dns-plugins.json" with { type: "json" };
const certbot = require('../lib/certbot'); import { installPlugin } from "../lib/certbot.js";
const logger = require('../logger').certbot; import { certbot as logger } from "../logger.js";
const batchflow = require('batchflow'); import batchflow from "batchflow";
let hasErrors = false; let hasErrors = false;
let failingPlugins = []; let failingPlugins = [];
@@ -25,7 +25,7 @@ if (process.argv.length > 2) {
batchflow(pluginKeys).sequential() batchflow(pluginKeys).sequential()
.each((i, pluginKey, next) => { .each((i, pluginKey, next) => {
certbot.installPlugin(pluginKey) installPlugin(pluginKey)
.then(() => { .then(() => {
next(); next();
}) })

View File

@@ -1,72 +1,74 @@
const config = require('./lib/config'); import { installPlugins } from "./lib/certbot.js";
const logger = require('./logger').setup; import utils from "./lib/utils.js";
const certificateModel = require('./models/certificate'); import { setup as logger } from "./logger.js";
const userModel = require('./models/user'); import authModel from "./models/auth.js";
const userPermissionModel = require('./models/user_permission'); import certificateModel from "./models/certificate.js";
const utils = require('./lib/utils'); import settingModel from "./models/setting.js";
const authModel = require('./models/auth'); import userModel from "./models/user.js";
const settingModel = require('./models/setting'); import userPermissionModel from "./models/user_permission.js";
const certbot = require('./lib/certbot');
export const isSetup = async () => {
const row = await userModel.query().select("id").where("is_deleted", 0).first();
return row?.id > 0;
}
/** /**
* Creates a default admin users if one doesn't already exist in the database * Creates a default admin users if one doesn't already exist in the database
* *
* @returns {Promise} * @returns {Promise}
*/ */
const setupDefaultUser = () => { const setupDefaultUser = async () => {
return userModel const initialAdminEmail = process.env.INITIAL_ADMIN_EMAIL;
.query() const initialAdminPassword = process.env.INITIAL_ADMIN_PASSWORD;
.select('id', )
.where('is_deleted', 0)
.first()
.then((row) => {
if (!row || !row.id) {
// Create a new user and set password
const email = (process.env.INITIAL_ADMIN_EMAIL || 'admin@example.com').toLowerCase();
const password = process.env.INITIAL_ADMIN_PASSWORD || 'changeme';
logger.info(`Creating a new user: ${email} with password: ${password}`); // This will only create a new user when there are no active users in the database
// and the INITIAL_ADMIN_EMAIL and INITIAL_ADMIN_PASSWORD environment variables are set.
// Otherwise, users should be shown the setup wizard in the frontend.
// I'm keeping this legacy behavior in case some people are automating deployments.
if (!initialAdminEmail || !initialAdminPassword) {
return Promise.resolve();
}
const userIsetup = await isSetup();
if (!userIsetup) {
// Create a new user and set password
logger.info(`Creating a new user: ${initialAdminEmail} with password: ${initialAdminPassword}`);
const data = { const data = {
is_deleted: 0, is_deleted: 0,
email: email, email: email,
name: 'Administrator', name: "Administrator",
nickname: 'Admin', nickname: "Admin",
avatar: '', avatar: "",
roles: ['admin'], roles: ["admin"],
}; };
return userModel const user = await userModel
.query() .query()
.insertAndFetch(data) .insertAndFetch(data);
.then((user) => {
return authModel await authModel
.query() .query()
.insert({ .insert({
user_id: user.id, user_id: user.id,
type: 'password', type: "password",
secret: password, secret: password,
meta: {}, meta: {},
}) });
.then(() => {
return userPermissionModel.query().insert({ await userPermissionModel.query().insert({
user_id: user.id, user_id: user.id,
visibility: 'all', visibility: "all",
proxy_hosts: 'manage', proxy_hosts: "manage",
redirection_hosts: 'manage', redirection_hosts: "manage",
dead_hosts: 'manage', dead_hosts: "manage",
streams: 'manage', streams: "manage",
access_lists: 'manage', access_lists: "manage",
certificates: 'manage', certificates: "manage",
}); });
}); logger.info("Initial admin setup completed");
})
.then(() => {
logger.info('Initial admin setup completed');
});
} else if (config.debug()) {
logger.info('Admin user setup not required');
} }
});
}; };
/** /**
@@ -74,31 +76,25 @@ const setupDefaultUser = () => {
* *
* @returns {Promise} * @returns {Promise}
*/ */
const setupDefaultSettings = () => { const setupDefaultSettings = async () => {
return settingModel const row = await settingModel
.query() .query()
.select('id') .select("id")
.where({id: 'default-site'}) .where({ id: "default-site" })
.first() .first();
.then((row) => {
if (!row || !row.id) { if (!row?.id) {
settingModel await settingModel
.query() .query()
.insert({ .insert({
id: 'default-site', id: "default-site",
name: 'Default Site', name: "Default Site",
description: 'What to show when Nginx is hit with an unknown Host', description: "What to show when Nginx is hit with an unknown Host",
value: 'congratulations', value: "congratulations",
meta: {}, meta: {},
})
.then(() => {
logger.info('Default settings added');
}); });
logger.info("Default settings added");
} }
if (config.debug()) {
logger.info('Default setting setup not required');
}
});
}; };
/** /**
@@ -106,13 +102,13 @@ const setupDefaultSettings = () => {
* *
* @returns {Promise} * @returns {Promise}
*/ */
const setupCertbotPlugins = () => { const setupCertbotPlugins = async () => {
return certificateModel const certificates = await certificateModel
.query() .query()
.where('is_deleted', 0) .where("is_deleted", 0)
.andWhere('provider', 'letsencrypt') .andWhere("provider", "letsencrypt");
.then((certificates) => {
if (certificates && certificates.length) { if (certificates?.length) {
const plugins = []; const plugins = [];
const promises = []; const promises = [];
@@ -125,26 +121,24 @@ const setupCertbotPlugins = () => {
// Make sure credentials file exists // Make sure credentials file exists
const credentials_loc = `/etc/letsencrypt/credentials/credentials-${certificate.id}`; const credentials_loc = `/etc/letsencrypt/credentials/credentials-${certificate.id}`;
// Escape single quotes and backslashes // Escape single quotes and backslashes
const escapedCredentials = certificate.meta.dns_provider_credentials.replaceAll('\'', '\\\'').replaceAll('\\', '\\\\'); const escapedCredentials = certificate.meta.dns_provider_credentials
.replaceAll("'", "\\'")
.replaceAll("\\", "\\\\");
const credentials_cmd = `[ -f '${credentials_loc}' ] || { mkdir -p /etc/letsencrypt/credentials 2> /dev/null; echo '${escapedCredentials}' > '${credentials_loc}' && chmod 600 '${credentials_loc}'; }`; const credentials_cmd = `[ -f '${credentials_loc}' ] || { mkdir -p /etc/letsencrypt/credentials 2> /dev/null; echo '${escapedCredentials}' > '${credentials_loc}' && chmod 600 '${credentials_loc}'; }`;
promises.push(utils.exec(credentials_cmd)); promises.push(utils.exec(credentials_cmd));
} }
return true;
}); });
return certbot.installPlugins(plugins) await installPlugins(plugins);
.then(() => {
if (promises.length) { if (promises.length) {
return Promise.all(promises) await Promise.all(promises);
.then(() => { logger.info(`Added Certbot plugins ${plugins.join(", ")}`);
logger.info(`Added Certbot plugins ${plugins.join(', ')}`);
});
} }
});
} }
});
}; };
/** /**
* Starts a timer to call run the logrotation binary every two days * Starts a timer to call run the logrotation binary every two days
* @returns {Promise} * @returns {Promise}
@@ -154,18 +148,17 @@ const setupLogrotation = () => {
const runLogrotate = async () => { const runLogrotate = async () => {
try { try {
await utils.exec('logrotate /etc/logrotate.d/nginx-proxy-manager'); await utils.exec("logrotate /etc/logrotate.d/nginx-proxy-manager");
logger.info('Logrotate completed.'); logger.info("Logrotate completed.");
} catch (e) { logger.warn(e); } } catch (e) {
logger.warn(e);
}
}; };
logger.info('Logrotate Timer initialized'); logger.info("Logrotate Timer initialized");
setInterval(runLogrotate, intervalTimeout); setInterval(runLogrotate, intervalTimeout);
// And do this now as well // And do this now as well
return runLogrotate(); return runLogrotate();
}; };
module.exports = () => setupDefaultUser() export default () => setupDefaultUser().then(setupDefaultSettings).then(setupCertbotPlugins).then(setupLogrotation);
.then(setupDefaultSettings)
.then(setupCertbotPlugins)
.then(setupLogrotation);

17
backend/validate-schema.js Normal file → Executable file
View File

@@ -1,16 +1,19 @@
const SwaggerParser = require('@apidevtools/swagger-parser'); #!/usr/bin/node
const chalk = require('chalk');
const schema = require('./schema'); import SwaggerParser from "@apidevtools/swagger-parser";
import chalk from "chalk";
import { getCompiledSchema } from "./schema/index.js";
const log = console.log; const log = console.log;
schema.getCompiledSchema().then(async (swaggerJSON) => { getCompiledSchema().then(async (swaggerJSON) => {
try { try {
const api = await SwaggerParser.validate(swaggerJSON); const api = await SwaggerParser.validate(swaggerJSON);
console.log('API name: %s, Version: %s', api.info.title, api.info.version); console.log("API name: %s, Version: %s", api.info.title, api.info.version);
log(chalk.green(' Schema is valid')); log(chalk.green(" Schema is valid"));
} catch (e) { } catch (e) {
console.error(e); console.error(e);
log(chalk.red('', e.message), '\n'); log(chalk.red("", e.message), "\n");
process.exit(1); process.exit(1);
} }
}); });

File diff suppressed because it is too large Load Diff

View File

@@ -7,7 +7,9 @@ services:
fullstack: fullstack:
image: "${IMAGE}:${BRANCH_LOWER}-ci-${BUILD_NUMBER}" image: "${IMAGE}:${BRANCH_LOWER}-ci-${BUILD_NUMBER}"
environment: environment:
TZ: "${TZ:-Australia/Brisbane}"
DEBUG: 'true' DEBUG: 'true'
CI: 'true'
FORCE_COLOR: 1 FORCE_COLOR: 1
# Required for DNS Certificate provisioning in CI # Required for DNS Certificate provisioning in CI
LE_SERVER: 'https://ca.internal/acme/acme/directory' LE_SERVER: 'https://ca.internal/acme/acme/directory'

View File

@@ -18,6 +18,7 @@ services:
- website2.example.com - website2.example.com
- website3.example.com - website3.example.com
environment: environment:
TZ: "${TZ:-Australia/Brisbane}"
PUID: 1000 PUID: 1000
PGID: 1000 PGID: 1000
FORCE_COLOR: 1 FORCE_COLOR: 1
@@ -49,6 +50,7 @@ services:
- ../backend:/app - ../backend:/app
- ../frontend:/app/frontend - ../frontend:/app/frontend
- ../global:/app/global - ../global:/app/global
- '/etc/localtime:/etc/localtime:ro'
healthcheck: healthcheck:
test: ["CMD", "/usr/bin/check-health"] test: ["CMD", "/usr/bin/check-health"]
interval: 10s interval: 10s

View File

@@ -12,6 +12,7 @@ server {
location /api/ { location /api/ {
add_header X-Served-By $host; add_header X-Served-By $host;
proxy_http_version 1.1;
proxy_set_header Host $host; proxy_set_header Host $host;
proxy_set_header X-Forwarded-Scheme $scheme; proxy_set_header X-Forwarded-Scheme $scheme;
proxy_set_header X-Forwarded-Proto $scheme; proxy_set_header X-Forwarded-Proto $scheme;
@@ -23,7 +24,14 @@ server {
} }
location / { location / {
index index.html; add_header X-Served-By $host;
try_files $uri $uri.html $uri/ /index.html; proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "Upgrade";
proxy_set_header X-Forwarded-Scheme $scheme;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $remote_addr;
proxy_pass http://127.0.0.1:5173;
} }
} }

View File

@@ -15,7 +15,7 @@ if [ "$DEVELOPMENT" = 'true' ]; then
log_info 'Starting frontend ...' log_info 'Starting frontend ...'
s6-setuidgid "$PUID:$PGID" yarn install s6-setuidgid "$PUID:$PGID" yarn install
exec s6-setuidgid "$PUID:$PGID" yarn watch exec s6-setuidgid "$PUID:$PGID" yarn dev
else else
exit 0 exit 0
fi fi

View File

@@ -5,7 +5,7 @@
"preview": "vitepress preview" "preview": "vitepress preview"
}, },
"devDependencies": { "devDependencies": {
"vitepress": "^1.4.0" "vitepress": "^1.6.4"
}, },
"dependencies": {} "dependencies": {}
} }

View File

@@ -228,3 +228,13 @@ To enable the geoip2 module, you can create the custom configuration file `/data
load_module /usr/lib/nginx/modules/ngx_http_geoip2_module.so; load_module /usr/lib/nginx/modules/ngx_http_geoip2_module.so;
load_module /usr/lib/nginx/modules/ngx_stream_geoip2_module.so; load_module /usr/lib/nginx/modules/ngx_stream_geoip2_module.so;
``` ```
## Auto Initial User Creation
Setting these environment variables will create the default user on startup, skipping the UI first user setup screen:
```
environment:
INITIAL_ADMIN_EMAIL: my@example.com
INITIAL_ADMIN_PASSWORD: mypassword1
```

View File

@@ -23,4 +23,10 @@ Your best bet is to ask the [Reddit community for support](https://www.reddit.co
## When adding username and password access control to a proxy host, I can no longer login into the app. ## When adding username and password access control to a proxy host, I can no longer login into the app.
Having an Access Control List (ACL) with username and password requires the browser to always send this username and password in the `Authorization` header on each request. If your proxied app also requires authentication (like Nginx Proxy Manager itself), most likely the app will also use the `Authorization` header to transmit this information, as this is the standardized header meant for this kind of information. However having multiples of the same headers is not allowed in the [internet standard](https://www.rfc-editor.org/rfc/rfc7230#section-3.2.2) and almost all apps do not support multiple values in the `Authorization` header. Hence one of the two logins will be broken. This can only be fixed by either removing one of the logins or by changing the app to use other non-standard headers for authorization. Having an Access Control List (ACL) with username and password requires the browser to always send this username
and password in the `Authorization` header on each request. If your proxied app also requires authentication (like
Nginx Proxy Manager itself), most likely the app will also use the `Authorization` header to transmit this information,
as this is the standardized header meant for this kind of information. However having multiples of the same headers
is not allowed in the [internet standard](https://www.rfc-editor.org/rfc/rfc7230#section-3.2.2) and almost all apps
do not support multiple values in the `Authorization` header. Hence one of the two logins will be broken. This can
only be fixed by either removing one of the logins or by changing the app to use other non-standard headers for authorization.

View File

@@ -35,7 +35,7 @@ so that the barrier for entry here is low.
## Features ## Features
- Beautiful and Secure Admin Interface based on [Tabler](https://tabler.github.io/) - Beautiful and Secure Admin Interface based on [Tabler](https://tabler.io/)
- Easily create forwarding domains, redirections, streams and 404 hosts without knowing anything about Nginx - Easily create forwarding domains, redirections, streams and 404 hosts without knowing anything about Nginx
- Free SSL using Let's Encrypt or provide your own custom SSL certificates - Free SSL using Let's Encrypt or provide your own custom SSL certificates
- Access Lists and basic HTTP Authentication for your hosts - Access Lists and basic HTTP Authentication for your hosts
@@ -66,6 +66,8 @@ services:
app: app:
image: 'jc21/nginx-proxy-manager:latest' image: 'jc21/nginx-proxy-manager:latest'
restart: unless-stopped restart: unless-stopped
environment:
TZ: "Australia/Brisbane"
ports: ports:
- '80:80' - '80:80'
- '81:81' - '81:81'
@@ -89,17 +91,10 @@ docker compose up -d
4. Log in to the Admin UI 4. Log in to the Admin UI
When your docker container is running, connect to it on port `81` for the admin interface. When your docker container is running, connect to it on port `81` for the admin interface.
Sometimes this can take a little bit because of the entropy of keys.
[http://127.0.0.1:81](http://127.0.0.1:81) [http://127.0.0.1:81](http://127.0.0.1:81)
Default Admin User: This startup can take a minute depending on your hardware.
```
Email: admin@example.com
Password: changeme
```
Immediately after logging in with this default user you will be asked to modify your details and change your password.
## Contributing ## Contributing

View File

@@ -13,6 +13,7 @@ services:
app: app:
image: 'jc21/nginx-proxy-manager:latest' image: 'jc21/nginx-proxy-manager:latest'
restart: unless-stopped restart: unless-stopped
ports: ports:
# These ports are in format <host-port>:<container-port> # These ports are in format <host-port>:<container-port>
- '80:80' # Public HTTP Port - '80:80' # Public HTTP Port
@@ -21,7 +22,9 @@ services:
# Add any other Stream port you want to expose # Add any other Stream port you want to expose
# - '21:21' # FTP # - '21:21' # FTP
#environment: environment:
TZ: "Australia/Brisbane"
# Uncomment this if you want to change the location of # Uncomment this if you want to change the location of
# the SQLite DB file within the container # the SQLite DB file within the container
# DB_SQLITE_FILE: "/data/database.sqlite" # DB_SQLITE_FILE: "/data/database.sqlite"
@@ -65,6 +68,7 @@ services:
# Add any other Stream port you want to expose # Add any other Stream port you want to expose
# - '21:21' # FTP # - '21:21' # FTP
environment: environment:
TZ: "Australia/Brisbane"
# Mysql/Maria connection parameters: # Mysql/Maria connection parameters:
DB_MYSQL_HOST: "db" DB_MYSQL_HOST: "db"
DB_MYSQL_PORT: 3306 DB_MYSQL_PORT: 3306
@@ -115,6 +119,7 @@ services:
# Add any other Stream port you want to expose # Add any other Stream port you want to expose
# - '21:21' # FTP # - '21:21' # FTP
environment: environment:
TZ: "Australia/Brisbane"
# Postgres parameters: # Postgres parameters:
DB_POSTGRES_HOST: 'db' DB_POSTGRES_HOST: 'db'
DB_POSTGRES_PORT: '5432' DB_POSTGRES_PORT: '5432'
@@ -173,21 +178,3 @@ After the app is running for the first time, the following will happen:
3. A default admin user will be created 3. A default admin user will be created
This process can take a couple of minutes depending on your machine. This process can take a couple of minutes depending on your machine.
## Default Administrator User
```
Email: admin@example.com
Password: changeme
```
Immediately after logging in with this default user you will be asked to modify your details and change your password. You can change defaults with:
```
environment:
INITIAL_ADMIN_EMAIL: my@example.com
INITIAL_ADMIN_PASSWORD: mypassword1
```

Some files were not shown because too many files have changed in this diff Show More