mirror of
https://github.com/NginxProxyManager/nginx-proxy-manager.git
synced 2025-12-06 08:16:51 +00:00
Compare commits
2 Commits
v2.13.3
...
ebd9148813
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ebd9148813 | ||
|
|
a12553fec7 |
@@ -1,73 +0,0 @@
|
||||
{
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true
|
||||
},
|
||||
"extends": [
|
||||
"eslint:recommended"
|
||||
],
|
||||
"globals": {
|
||||
"Atomics": "readonly",
|
||||
"SharedArrayBuffer": "readonly"
|
||||
},
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2018,
|
||||
"sourceType": "module"
|
||||
},
|
||||
"plugins": [
|
||||
"align-assignments"
|
||||
],
|
||||
"rules": {
|
||||
"arrow-parens": [
|
||||
"error",
|
||||
"always"
|
||||
],
|
||||
"indent": [
|
||||
"error",
|
||||
"tab"
|
||||
],
|
||||
"linebreak-style": [
|
||||
"error",
|
||||
"unix"
|
||||
],
|
||||
"quotes": [
|
||||
"error",
|
||||
"single"
|
||||
],
|
||||
"semi": [
|
||||
"error",
|
||||
"always"
|
||||
],
|
||||
"key-spacing": [
|
||||
"error",
|
||||
{
|
||||
"align": "value"
|
||||
}
|
||||
],
|
||||
"comma-spacing": [
|
||||
"error",
|
||||
{
|
||||
"before": false,
|
||||
"after": true
|
||||
}
|
||||
],
|
||||
"func-call-spacing": [
|
||||
"error",
|
||||
"never"
|
||||
],
|
||||
"keyword-spacing": [
|
||||
"error",
|
||||
{
|
||||
"before": true
|
||||
}
|
||||
],
|
||||
"no-irregular-whitespace": "error",
|
||||
"no-unused-expressions": 0,
|
||||
"align-assignments/align-assignments": [
|
||||
2,
|
||||
{
|
||||
"requiresOnly": false
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"printWidth": 320,
|
||||
"tabWidth": 4,
|
||||
"useTabs": true,
|
||||
"semi": true,
|
||||
"singleQuote": true,
|
||||
"bracketSpacing": true,
|
||||
"jsxBracketSameLine": true,
|
||||
"trailingComma": "all",
|
||||
"proseWrap": "always"
|
||||
}
|
||||
@@ -1,9 +1,12 @@
|
||||
const express = require('express');
|
||||
const bodyParser = require('body-parser');
|
||||
const fileUpload = require('express-fileupload');
|
||||
const compression = require('compression');
|
||||
const config = require('./lib/config');
|
||||
const log = require('./logger').express;
|
||||
import bodyParser from "body-parser";
|
||||
import compression from "compression";
|
||||
import express from "express";
|
||||
import fileUpload from "express-fileupload";
|
||||
import { isDebugMode } from "./lib/config.js";
|
||||
import cors from "./lib/express/cors.js";
|
||||
import jwt from "./lib/express/jwt.js";
|
||||
import { express as logger } from "./logger.js";
|
||||
import mainRoutes from "./routes/main.js";
|
||||
|
||||
/**
|
||||
* App
|
||||
@@ -11,7 +14,7 @@ const log = require('./logger').express;
|
||||
const app = express();
|
||||
app.use(fileUpload());
|
||||
app.use(bodyParser.json());
|
||||
app.use(bodyParser.urlencoded({extended: true}));
|
||||
app.use(bodyParser.urlencoded({ extended: true }));
|
||||
|
||||
// Gzip
|
||||
app.use(compression());
|
||||
@@ -20,71 +23,70 @@ app.use(compression());
|
||||
* General Logging, BEFORE routes
|
||||
*/
|
||||
|
||||
app.disable('x-powered-by');
|
||||
app.enable('trust proxy', ['loopback', 'linklocal', 'uniquelocal']);
|
||||
app.enable('strict routing');
|
||||
app.disable("x-powered-by");
|
||||
app.enable("trust proxy", ["loopback", "linklocal", "uniquelocal"]);
|
||||
app.enable("strict routing");
|
||||
|
||||
// pretty print JSON when not live
|
||||
if (config.debug()) {
|
||||
app.set('json spaces', 2);
|
||||
if (isDebugMode()) {
|
||||
app.set("json spaces", 2);
|
||||
}
|
||||
|
||||
// CORS for everything
|
||||
app.use(require('./lib/express/cors'));
|
||||
app.use(cors);
|
||||
|
||||
// General security/cache related headers + server header
|
||||
app.use(function (req, res, next) {
|
||||
let x_frame_options = 'DENY';
|
||||
app.use((_, res, next) => {
|
||||
let x_frame_options = "DENY";
|
||||
|
||||
if (typeof process.env.X_FRAME_OPTIONS !== 'undefined' && process.env.X_FRAME_OPTIONS) {
|
||||
if (typeof process.env.X_FRAME_OPTIONS !== "undefined" && process.env.X_FRAME_OPTIONS) {
|
||||
x_frame_options = process.env.X_FRAME_OPTIONS;
|
||||
}
|
||||
|
||||
res.set({
|
||||
'X-XSS-Protection': '1; mode=block',
|
||||
'X-Content-Type-Options': 'nosniff',
|
||||
'X-Frame-Options': x_frame_options,
|
||||
'Cache-Control': 'no-cache, no-store, max-age=0, must-revalidate',
|
||||
Pragma: 'no-cache',
|
||||
Expires: 0
|
||||
"X-XSS-Protection": "1; mode=block",
|
||||
"X-Content-Type-Options": "nosniff",
|
||||
"X-Frame-Options": x_frame_options,
|
||||
"Cache-Control": "no-cache, no-store, max-age=0, must-revalidate",
|
||||
Pragma: "no-cache",
|
||||
Expires: 0,
|
||||
});
|
||||
next();
|
||||
});
|
||||
|
||||
app.use(require('./lib/express/jwt')());
|
||||
app.use('/', require('./routes/main'));
|
||||
app.use(jwt());
|
||||
app.use("/", mainRoutes);
|
||||
|
||||
// production error handler
|
||||
// no stacktraces leaked to user
|
||||
// eslint-disable-next-line
|
||||
app.use(function (err, req, res, next) {
|
||||
|
||||
let payload = {
|
||||
app.use((err, req, res, _) => {
|
||||
const payload = {
|
||||
error: {
|
||||
code: err.status,
|
||||
message: err.public ? err.message : 'Internal Error'
|
||||
}
|
||||
message: err.public ? err.message : "Internal Error",
|
||||
},
|
||||
};
|
||||
|
||||
if (config.debug() || (req.baseUrl + req.path).includes('nginx/certificates')) {
|
||||
if (typeof err.message_i18n !== "undefined") {
|
||||
payload.error.message_i18n = err.message_i18n;
|
||||
}
|
||||
|
||||
if (isDebugMode() || (req.baseUrl + req.path).includes("nginx/certificates")) {
|
||||
payload.debug = {
|
||||
stack: typeof err.stack !== 'undefined' && err.stack ? err.stack.split('\n') : null,
|
||||
previous: err.previous
|
||||
stack: typeof err.stack !== "undefined" && err.stack ? err.stack.split("\n") : null,
|
||||
previous: err.previous,
|
||||
};
|
||||
}
|
||||
|
||||
// Not every error is worth logging - but this is good for now until it gets annoying.
|
||||
if (typeof err.stack !== 'undefined' && err.stack) {
|
||||
if (config.debug()) {
|
||||
log.debug(err.stack);
|
||||
} else if (typeof err.public == 'undefined' || !err.public) {
|
||||
log.warn(err.message);
|
||||
if (typeof err.stack !== "undefined" && err.stack) {
|
||||
logger.debug(err.stack);
|
||||
if (typeof err.public === "undefined" || !err.public) {
|
||||
logger.warn(err.message);
|
||||
}
|
||||
}
|
||||
|
||||
res
|
||||
.status(err.status || 500)
|
||||
.send(payload);
|
||||
res.status(err.status || 500).send(payload);
|
||||
});
|
||||
|
||||
module.exports = app;
|
||||
export default app;
|
||||
|
||||
91
backend/biome.json
Normal file
91
backend/biome.json
Normal file
@@ -0,0 +1,91 @@
|
||||
{
|
||||
"$schema": "https://biomejs.dev/schemas/2.2.0/schema.json",
|
||||
"vcs": {
|
||||
"enabled": true,
|
||||
"clientKind": "git",
|
||||
"useIgnoreFile": true
|
||||
},
|
||||
"files": {
|
||||
"ignoreUnknown": false,
|
||||
"includes": [
|
||||
"**/*.ts",
|
||||
"**/*.tsx",
|
||||
"**/*.js",
|
||||
"**/*.jsx",
|
||||
"!**/dist/**/*"
|
||||
]
|
||||
},
|
||||
"formatter": {
|
||||
"enabled": true,
|
||||
"indentStyle": "tab",
|
||||
"indentWidth": 4,
|
||||
"lineWidth": 120,
|
||||
"formatWithErrors": true
|
||||
},
|
||||
"assist": {
|
||||
"actions": {
|
||||
"source": {
|
||||
"organizeImports": {
|
||||
"level": "on",
|
||||
"options": {
|
||||
"groups": [
|
||||
":BUN:",
|
||||
":NODE:",
|
||||
[
|
||||
"npm:*",
|
||||
"npm:*/**"
|
||||
],
|
||||
":PACKAGE_WITH_PROTOCOL:",
|
||||
":URL:",
|
||||
":PACKAGE:",
|
||||
[
|
||||
"/src/*",
|
||||
"/src/**"
|
||||
],
|
||||
[
|
||||
"/**"
|
||||
],
|
||||
[
|
||||
"#*",
|
||||
"#*/**"
|
||||
],
|
||||
":PATH:"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"linter": {
|
||||
"enabled": true,
|
||||
"rules": {
|
||||
"recommended": true,
|
||||
"correctness": {
|
||||
"useUniqueElementIds": "off"
|
||||
},
|
||||
"suspicious": {
|
||||
"noExplicitAny": "off"
|
||||
},
|
||||
"performance": {
|
||||
"noDelete": "off"
|
||||
},
|
||||
"nursery": "off",
|
||||
"a11y": {
|
||||
"useSemanticElements": "off",
|
||||
"useValidAnchor": "off"
|
||||
},
|
||||
"style": {
|
||||
"noParameterAssign": "error",
|
||||
"useAsConstAssertion": "error",
|
||||
"useDefaultParameterLast": "error",
|
||||
"useEnumInitializers": "error",
|
||||
"useSelfClosingElements": "error",
|
||||
"useSingleVarDeclarator": "error",
|
||||
"noUnusedTemplateLiteral": "error",
|
||||
"useNumberNamespace": "error",
|
||||
"noInferrableTypes": "error",
|
||||
"noUselessElse": "error"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,14 +1,19 @@
|
||||
const config = require('./lib/config');
|
||||
import knex from "knex";
|
||||
import {configGet, configHas} from "./lib/config.js";
|
||||
|
||||
if (!config.has('database')) {
|
||||
throw new Error('Database config does not exist! Please read the instructions: https://nginxproxymanager.com/setup/');
|
||||
}
|
||||
const generateDbConfig = () => {
|
||||
if (!configHas("database")) {
|
||||
throw new Error(
|
||||
"Database config does not exist! Please read the instructions: https://nginxproxymanager.com/setup/",
|
||||
);
|
||||
}
|
||||
|
||||
function generateDbConfig() {
|
||||
const cfg = config.get('database');
|
||||
if (cfg.engine === 'knex-native') {
|
||||
const cfg = configGet("database");
|
||||
|
||||
if (cfg.engine === "knex-native") {
|
||||
return cfg.knex;
|
||||
}
|
||||
|
||||
return {
|
||||
client: cfg.engine,
|
||||
connection: {
|
||||
@@ -16,12 +21,12 @@ function generateDbConfig() {
|
||||
user: cfg.user,
|
||||
password: cfg.password,
|
||||
database: cfg.name,
|
||||
port: cfg.port
|
||||
port: cfg.port,
|
||||
},
|
||||
migrations: {
|
||||
tableName: 'migrations'
|
||||
}
|
||||
tableName: "migrations",
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = require('knex')(generateDbConfig());
|
||||
export default knex(generateDbConfig());
|
||||
|
||||
@@ -1,48 +1,47 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const schema = require('./schema');
|
||||
const logger = require('./logger').global;
|
||||
import app from "./app.js";
|
||||
import internalCertificate from "./internal/certificate.js";
|
||||
import internalIpRanges from "./internal/ip_ranges.js";
|
||||
import { global as logger } from "./logger.js";
|
||||
import { migrateUp } from "./migrate.js";
|
||||
import { getCompiledSchema } from "./schema/index.js";
|
||||
import setup from "./setup.js";
|
||||
|
||||
const IP_RANGES_FETCH_ENABLED = process.env.IP_RANGES_FETCH_ENABLED !== 'false';
|
||||
const IP_RANGES_FETCH_ENABLED = process.env.IP_RANGES_FETCH_ENABLED !== "false";
|
||||
|
||||
async function appStart () {
|
||||
const migrate = require('./migrate');
|
||||
const setup = require('./setup');
|
||||
const app = require('./app');
|
||||
const internalCertificate = require('./internal/certificate');
|
||||
const internalIpRanges = require('./internal/ip_ranges');
|
||||
|
||||
return migrate.latest()
|
||||
async function appStart() {
|
||||
return migrateUp()
|
||||
.then(setup)
|
||||
.then(schema.getCompiledSchema)
|
||||
.then(getCompiledSchema)
|
||||
.then(() => {
|
||||
if (IP_RANGES_FETCH_ENABLED) {
|
||||
logger.info('IP Ranges fetch is enabled');
|
||||
return internalIpRanges.fetch().catch((err) => {
|
||||
logger.error('IP Ranges fetch failed, continuing anyway:', err.message);
|
||||
});
|
||||
} else {
|
||||
logger.info('IP Ranges fetch is disabled by environment variable');
|
||||
if (!IP_RANGES_FETCH_ENABLED) {
|
||||
logger.info("IP Ranges fetch is disabled by environment variable");
|
||||
return;
|
||||
}
|
||||
logger.info("IP Ranges fetch is enabled");
|
||||
return internalIpRanges.fetch().catch((err) => {
|
||||
logger.error("IP Ranges fetch failed, continuing anyway:", err.message);
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
internalCertificate.initTimer();
|
||||
internalIpRanges.initTimer();
|
||||
|
||||
const server = app.listen(3000, () => {
|
||||
logger.info('Backend PID ' + process.pid + ' listening on port 3000 ...');
|
||||
logger.info(`Backend PID ${process.pid} listening on port 3000 ...`);
|
||||
|
||||
process.on('SIGTERM', () => {
|
||||
logger.info('PID ' + process.pid + ' received SIGTERM');
|
||||
process.on("SIGTERM", () => {
|
||||
logger.info(`PID ${process.pid} received SIGTERM`);
|
||||
server.close(() => {
|
||||
logger.info('Stopping.');
|
||||
logger.info("Stopping.");
|
||||
process.exit(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.error(err.message, err);
|
||||
logger.error(`Startup Error: ${err.message}`, err);
|
||||
setTimeout(appStart, 1000);
|
||||
});
|
||||
}
|
||||
@@ -50,7 +49,6 @@ async function appStart () {
|
||||
try {
|
||||
appStart();
|
||||
} catch (err) {
|
||||
logger.error(err.message, err);
|
||||
logger.fatal(err);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,29 +1,29 @@
|
||||
const _ = require('lodash');
|
||||
const fs = require('node:fs');
|
||||
const batchflow = require('batchflow');
|
||||
const logger = require('../logger').access;
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const accessListModel = require('../models/access_list');
|
||||
const accessListAuthModel = require('../models/access_list_auth');
|
||||
const accessListClientModel = require('../models/access_list_client');
|
||||
const proxyHostModel = require('../models/proxy_host');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
const internalNginx = require('./nginx');
|
||||
import fs from "node:fs";
|
||||
import batchflow from "batchflow";
|
||||
import _ from "lodash";
|
||||
import errs from "../lib/error.js";
|
||||
import utils from "../lib/utils.js";
|
||||
import { access as logger } from "../logger.js";
|
||||
import accessListModel from "../models/access_list.js";
|
||||
import accessListAuthModel from "../models/access_list_auth.js";
|
||||
import accessListClientModel from "../models/access_list_client.js";
|
||||
import proxyHostModel from "../models/proxy_host.js";
|
||||
import internalAuditLog from "./audit-log.js";
|
||||
import internalNginx from "./nginx.js";
|
||||
|
||||
function omissions () {
|
||||
return ['is_deleted'];
|
||||
}
|
||||
const omissions = () => {
|
||||
return ["is_deleted"];
|
||||
};
|
||||
|
||||
const internalAccessList = {
|
||||
|
||||
/**
|
||||
* @param {Access} access
|
||||
* @param {Object} data
|
||||
* @returns {Promise}
|
||||
*/
|
||||
create: (access, data) => {
|
||||
return access.can('access_lists:create', data)
|
||||
return access
|
||||
.can("access_lists:create", data)
|
||||
.then((/*access_data*/) => {
|
||||
return accessListModel
|
||||
.query()
|
||||
@@ -31,7 +31,7 @@ const internalAccessList = {
|
||||
name: data.name,
|
||||
satisfy_any: data.satisfy_any,
|
||||
pass_auth: data.pass_auth,
|
||||
owner_user_id: access.token.getUserId(1)
|
||||
owner_user_id: access.token.getUserId(1),
|
||||
})
|
||||
.then(utils.omitRow(omissions()));
|
||||
})
|
||||
@@ -42,27 +42,27 @@ const internalAccessList = {
|
||||
|
||||
// Now add the items
|
||||
data.items.map((item) => {
|
||||
promises.push(accessListAuthModel
|
||||
.query()
|
||||
.insert({
|
||||
promises.push(
|
||||
accessListAuthModel.query().insert({
|
||||
access_list_id: row.id,
|
||||
username: item.username,
|
||||
password: item.password
|
||||
})
|
||||
password: item.password,
|
||||
}),
|
||||
);
|
||||
return true;
|
||||
});
|
||||
|
||||
// Now add the clients
|
||||
if (typeof data.clients !== 'undefined' && data.clients) {
|
||||
if (typeof data.clients !== "undefined" && data.clients) {
|
||||
data.clients.map((client) => {
|
||||
promises.push(accessListClientModel
|
||||
.query()
|
||||
.insert({
|
||||
promises.push(
|
||||
accessListClientModel.query().insert({
|
||||
access_list_id: row.id,
|
||||
address: client.address,
|
||||
directive: client.directive
|
||||
})
|
||||
directive: client.directive,
|
||||
}),
|
||||
);
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -70,28 +70,33 @@ const internalAccessList = {
|
||||
})
|
||||
.then(() => {
|
||||
// re-fetch with expansions
|
||||
return internalAccessList.get(access, {
|
||||
return internalAccessList.get(
|
||||
access,
|
||||
{
|
||||
id: data.id,
|
||||
expand: ['owner', 'items', 'clients', 'proxy_hosts.access_list.[clients,items]']
|
||||
}, true /* <- skip masking */);
|
||||
expand: ["owner", "items", "clients", "proxy_hosts.access_list.[clients,items]"],
|
||||
},
|
||||
true /* <- skip masking */,
|
||||
);
|
||||
})
|
||||
.then((row) => {
|
||||
// Audit log
|
||||
data.meta = _.assign({}, data.meta || {}, row.meta);
|
||||
|
||||
return internalAccessList.build(row)
|
||||
return internalAccessList
|
||||
.build(row)
|
||||
.then(() => {
|
||||
if (parseInt(row.proxy_host_count, 10)) {
|
||||
return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
|
||||
if (Number.parseInt(row.proxy_host_count, 10)) {
|
||||
return internalNginx.bulkGenerateConfigs("proxy_host", row.proxy_hosts);
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'created',
|
||||
object_type: 'access-list',
|
||||
action: "created",
|
||||
object_type: "access-list",
|
||||
object_id: row.id,
|
||||
meta: internalAccessList.maskItems(data)
|
||||
meta: internalAccessList.maskItems(data),
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
@@ -109,23 +114,23 @@ const internalAccessList = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
update: (access, data) => {
|
||||
return access.can('access_lists:update', data.id)
|
||||
return access
|
||||
.can("access_lists:update", data.id)
|
||||
.then((/*access_data*/) => {
|
||||
return internalAccessList.get(access, {id: data.id});
|
||||
return internalAccessList.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (row.id !== data.id) {
|
||||
// Sanity check that something crazy hasn't happened
|
||||
throw new error.InternalValidationError(`Access List could not be updated, IDs do not match: ${row.id} !== ${data.id}`);
|
||||
throw new errs.InternalValidationError(
|
||||
`Access List could not be updated, IDs do not match: ${row.id} !== ${data.id}`,
|
||||
);
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
// patch name if specified
|
||||
if (typeof data.name !== 'undefined' && data.name) {
|
||||
return accessListModel
|
||||
.query()
|
||||
.where({id: data.id})
|
||||
.patch({
|
||||
if (typeof data.name !== "undefined" && data.name) {
|
||||
return accessListModel.query().where({ id: data.id }).patch({
|
||||
name: data.name,
|
||||
satisfy_any: data.satisfy_any,
|
||||
pass_auth: data.pass_auth,
|
||||
@@ -134,37 +139,33 @@ const internalAccessList = {
|
||||
})
|
||||
.then(() => {
|
||||
// Check for items and add/update/remove them
|
||||
if (typeof data.items !== 'undefined' && data.items) {
|
||||
if (typeof data.items !== "undefined" && data.items) {
|
||||
const promises = [];
|
||||
const items_to_keep = [];
|
||||
|
||||
data.items.map((item) => {
|
||||
if (item.password) {
|
||||
promises.push(accessListAuthModel
|
||||
.query()
|
||||
.insert({
|
||||
promises.push(
|
||||
accessListAuthModel.query().insert({
|
||||
access_list_id: data.id,
|
||||
username: item.username,
|
||||
password: item.password
|
||||
})
|
||||
password: item.password,
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
// This was supplied with an empty password, which means keep it but don't change the password
|
||||
items_to_keep.push(item.username);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
const query = accessListAuthModel
|
||||
.query()
|
||||
.delete()
|
||||
.where('access_list_id', data.id);
|
||||
const query = accessListAuthModel.query().delete().where("access_list_id", data.id);
|
||||
|
||||
if (items_to_keep.length) {
|
||||
query.andWhere('username', 'NOT IN', items_to_keep);
|
||||
query.andWhere("username", "NOT IN", items_to_keep);
|
||||
}
|
||||
|
||||
return query
|
||||
.then(() => {
|
||||
return query.then(() => {
|
||||
// Add new items
|
||||
if (promises.length) {
|
||||
return Promise.all(promises);
|
||||
@@ -174,29 +175,25 @@ const internalAccessList = {
|
||||
})
|
||||
.then(() => {
|
||||
// Check for clients and add/update/remove them
|
||||
if (typeof data.clients !== 'undefined' && data.clients) {
|
||||
if (typeof data.clients !== "undefined" && data.clients) {
|
||||
const promises = [];
|
||||
|
||||
data.clients.map((client) => {
|
||||
if (client.address) {
|
||||
promises.push(accessListClientModel
|
||||
.query()
|
||||
.insert({
|
||||
promises.push(
|
||||
accessListClientModel.query().insert({
|
||||
access_list_id: data.id,
|
||||
address: client.address,
|
||||
directive: client.directive
|
||||
})
|
||||
directive: client.directive,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
const query = accessListClientModel
|
||||
.query()
|
||||
.delete()
|
||||
.where('access_list_id', data.id);
|
||||
const query = accessListClientModel.query().delete().where("access_list_id", data.id);
|
||||
|
||||
return query
|
||||
.then(() => {
|
||||
return query.then(() => {
|
||||
// Add new items
|
||||
if (promises.length) {
|
||||
return Promise.all(promises);
|
||||
@@ -207,26 +204,32 @@ const internalAccessList = {
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'access-list',
|
||||
action: "updated",
|
||||
object_type: "access-list",
|
||||
object_id: data.id,
|
||||
meta: internalAccessList.maskItems(data)
|
||||
meta: internalAccessList.maskItems(data),
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// re-fetch with expansions
|
||||
return internalAccessList.get(access, {
|
||||
return internalAccessList.get(
|
||||
access,
|
||||
{
|
||||
id: data.id,
|
||||
expand: ['owner', 'items', 'clients', 'proxy_hosts.[certificate,access_list.[clients,items]]']
|
||||
}, true /* <- skip masking */);
|
||||
expand: ["owner", "items", "clients", "proxy_hosts.[certificate,access_list.[clients,items]]"],
|
||||
},
|
||||
true /* <- skip masking */,
|
||||
);
|
||||
})
|
||||
.then((row) => {
|
||||
return internalAccessList.build(row)
|
||||
return internalAccessList
|
||||
.build(row)
|
||||
.then(() => {
|
||||
if (parseInt(row.proxy_host_count, 10)) {
|
||||
return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
|
||||
if (Number.parseInt(row.proxy_host_count, 10)) {
|
||||
return internalNginx.bulkGenerateConfigs("proxy_host", row.proxy_hosts);
|
||||
}
|
||||
}).then(internalNginx.reload)
|
||||
})
|
||||
.then(internalNginx.reload)
|
||||
.then(() => {
|
||||
return internalAccessList.maskItems(row);
|
||||
});
|
||||
@@ -243,47 +246,50 @@ const internalAccessList = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
get: (access, data, skip_masking) => {
|
||||
if (typeof data === 'undefined') {
|
||||
data = {};
|
||||
}
|
||||
const thisData = data || {};
|
||||
|
||||
return access.can('access_lists:get', data.id)
|
||||
.then((access_data) => {
|
||||
return access
|
||||
.can("access_lists:get", thisData.id)
|
||||
.then((accessData) => {
|
||||
const query = accessListModel
|
||||
.query()
|
||||
.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
|
||||
.leftJoin('proxy_host', function() {
|
||||
this.on('proxy_host.access_list_id', '=', 'access_list.id')
|
||||
.andOn('proxy_host.is_deleted', '=', 0);
|
||||
.select("access_list.*", accessListModel.raw("COUNT(proxy_host.id) as proxy_host_count"))
|
||||
.leftJoin("proxy_host", function () {
|
||||
this.on("proxy_host.access_list_id", "=", "access_list.id").andOn(
|
||||
"proxy_host.is_deleted",
|
||||
"=",
|
||||
0,
|
||||
);
|
||||
})
|
||||
.where('access_list.is_deleted', 0)
|
||||
.andWhere('access_list.id', data.id)
|
||||
.groupBy('access_list.id')
|
||||
.allowGraph('[owner,items,clients,proxy_hosts.[certificate,access_list.[clients,items]]]')
|
||||
.where("access_list.is_deleted", 0)
|
||||
.andWhere("access_list.id", thisData.id)
|
||||
.groupBy("access_list.id")
|
||||
.allowGraph("[owner,items,clients,proxy_hosts.[certificate,access_list.[clients,items]]]")
|
||||
.first();
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('access_list.owner_user_id', access.token.getUserId(1));
|
||||
if (accessData.permission_visibility !== "all") {
|
||||
query.andWhere("access_list.owner_user_id", access.token.getUserId(1));
|
||||
}
|
||||
|
||||
if (typeof data.expand !== 'undefined' && data.expand !== null) {
|
||||
query.withGraphFetched(`[${data.expand.join(', ')}]`);
|
||||
if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
|
||||
query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
|
||||
}
|
||||
|
||||
return query.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
let thisRow = row;
|
||||
if (!row || !row.id) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
throw new errs.ItemNotFoundError(thisData.id);
|
||||
}
|
||||
if (!skip_masking && typeof row.items !== 'undefined' && row.items) {
|
||||
row = internalAccessList.maskItems(row);
|
||||
if (!skip_masking && typeof thisRow.items !== "undefined" && thisRow.items) {
|
||||
thisRow = internalAccessList.maskItems(thisRow);
|
||||
}
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
row = _.omit(row, data.omit);
|
||||
if (typeof data.omit !== "undefined" && data.omit !== null) {
|
||||
thisRow = _.omit(thisRow, data.omit);
|
||||
}
|
||||
return row;
|
||||
return thisRow;
|
||||
});
|
||||
},
|
||||
|
||||
@@ -295,13 +301,14 @@ const internalAccessList = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
delete: (access, data) => {
|
||||
return access.can('access_lists:delete', data.id)
|
||||
return access
|
||||
.can("access_lists:delete", data.id)
|
||||
.then(() => {
|
||||
return internalAccessList.get(access, {id: data.id, expand: ['proxy_hosts', 'items', 'clients']});
|
||||
return internalAccessList.get(access, { id: data.id, expand: ["proxy_hosts", "items", "clients"] });
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
throw new errs.ItemNotFoundError(data.id);
|
||||
}
|
||||
|
||||
// 1. update row to be deleted
|
||||
@@ -312,26 +319,27 @@ const internalAccessList = {
|
||||
// 1. update row to be deleted
|
||||
return accessListModel
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.where("id", row.id)
|
||||
.patch({
|
||||
is_deleted: 1
|
||||
is_deleted: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// 2. update any proxy hosts that were using it (ignoring permissions)
|
||||
if (row.proxy_hosts) {
|
||||
return proxyHostModel
|
||||
.query()
|
||||
.where('access_list_id', '=', row.id)
|
||||
.patch({access_list_id: 0})
|
||||
.where("access_list_id", "=", row.id)
|
||||
.patch({ access_list_id: 0 })
|
||||
.then(() => {
|
||||
// 3. reconfigure those hosts, then reload nginx
|
||||
|
||||
// set the access_list_id to zero for these items
|
||||
row.proxy_hosts.map((_val, idx) => {
|
||||
row.proxy_hosts[idx].access_list_id = 0;
|
||||
return true;
|
||||
});
|
||||
|
||||
return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
|
||||
return internalNginx.bulkGenerateConfigs("proxy_host", row.proxy_hosts);
|
||||
})
|
||||
.then(() => {
|
||||
return internalNginx.reload();
|
||||
@@ -351,10 +359,10 @@ const internalAccessList = {
|
||||
.then(() => {
|
||||
// 4. audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'deleted',
|
||||
object_type: 'access-list',
|
||||
action: "deleted",
|
||||
object_type: "access-list",
|
||||
object_id: row.id,
|
||||
meta: _.omit(internalAccessList.maskItems(row), ['is_deleted', 'proxy_hosts'])
|
||||
meta: _.omit(internalAccessList.maskItems(row), ["is_deleted", "proxy_hosts"]),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -372,33 +380,37 @@ const internalAccessList = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getAll: (access, expand, search_query) => {
|
||||
return access.can('access_lists:list')
|
||||
return access
|
||||
.can("access_lists:list")
|
||||
.then((access_data) => {
|
||||
const query = accessListModel
|
||||
.query()
|
||||
.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
|
||||
.leftJoin('proxy_host', function() {
|
||||
this.on('proxy_host.access_list_id', '=', 'access_list.id')
|
||||
.andOn('proxy_host.is_deleted', '=', 0);
|
||||
.select("access_list.*", accessListModel.raw("COUNT(proxy_host.id) as proxy_host_count"))
|
||||
.leftJoin("proxy_host", function () {
|
||||
this.on("proxy_host.access_list_id", "=", "access_list.id").andOn(
|
||||
"proxy_host.is_deleted",
|
||||
"=",
|
||||
0,
|
||||
);
|
||||
})
|
||||
.where('access_list.is_deleted', 0)
|
||||
.groupBy('access_list.id')
|
||||
.allowGraph('[owner,items,clients]')
|
||||
.orderBy('access_list.name', 'ASC');
|
||||
.where("access_list.is_deleted", 0)
|
||||
.groupBy("access_list.id")
|
||||
.allowGraph("[owner,items,clients]")
|
||||
.orderBy("access_list.name", "ASC");
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('access_list.owner_user_id', access.token.getUserId(1));
|
||||
if (access_data.permission_visibility !== "all") {
|
||||
query.andWhere("access_list.owner_user_id", access.token.getUserId(1));
|
||||
}
|
||||
|
||||
// Query is used for searching
|
||||
if (typeof search_query === 'string') {
|
||||
if (typeof search_query === "string") {
|
||||
query.where(function () {
|
||||
this.where('name', 'like', `%${search_query}%`);
|
||||
this.where("name", "like", `%${search_query}%`);
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.withGraphFetched(`[${expand.join(', ')}]`);
|
||||
if (typeof expand !== "undefined" && expand !== null) {
|
||||
query.withGraphFetched(`[${expand.join(", ")}]`);
|
||||
}
|
||||
|
||||
return query.then(utils.omitRows(omissions()));
|
||||
@@ -406,9 +418,10 @@ const internalAccessList = {
|
||||
.then((rows) => {
|
||||
if (rows) {
|
||||
rows.map((row, idx) => {
|
||||
if (typeof row.items !== 'undefined' && row.items) {
|
||||
if (typeof row.items !== "undefined" && row.items) {
|
||||
rows[idx] = internalAccessList.maskItems(row);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -424,18 +437,14 @@ const internalAccessList = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getCount: (user_id, visibility) => {
|
||||
const query = accessListModel
|
||||
.query()
|
||||
.count('id as count')
|
||||
.where('is_deleted', 0);
|
||||
const query = accessListModel.query().count("id as count").where("is_deleted", 0);
|
||||
|
||||
if (visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', user_id);
|
||||
if (visibility !== "all") {
|
||||
query.andWhere("owner_user_id", user_id);
|
||||
}
|
||||
|
||||
return query.first()
|
||||
.then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
return query.first().then((row) => {
|
||||
return Number.parseInt(row.count, 10);
|
||||
});
|
||||
},
|
||||
|
||||
@@ -444,18 +453,19 @@ const internalAccessList = {
|
||||
* @returns {Object}
|
||||
*/
|
||||
maskItems: (list) => {
|
||||
if (list && typeof list.items !== 'undefined') {
|
||||
if (list && typeof list.items !== "undefined") {
|
||||
list.items.map((val, idx) => {
|
||||
let repeat_for = 8;
|
||||
let first_char = '*';
|
||||
let first_char = "*";
|
||||
|
||||
if (typeof val.password !== 'undefined' && val.password) {
|
||||
if (typeof val.password !== "undefined" && val.password) {
|
||||
repeat_for = val.password.length - 1;
|
||||
first_char = val.password.charAt(0);
|
||||
}
|
||||
|
||||
list.items[idx].hint = first_char + ('*').repeat(repeat_for);
|
||||
list.items[idx].password = '';
|
||||
list.items[idx].hint = first_char + "*".repeat(repeat_for);
|
||||
list.items[idx].password = "";
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -493,25 +503,28 @@ const internalAccessList = {
|
||||
|
||||
// 2. create empty access file
|
||||
try {
|
||||
fs.writeFileSync(htpasswd_file, '', {encoding: 'utf8'});
|
||||
fs.writeFileSync(htpasswd_file, "", { encoding: "utf8" });
|
||||
resolve(htpasswd_file);
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
})
|
||||
.then((htpasswd_file) => {
|
||||
}).then((htpasswd_file) => {
|
||||
// 3. generate password for each user
|
||||
if (list.items.length) {
|
||||
return new Promise((resolve, reject) => {
|
||||
batchflow(list.items).sequential()
|
||||
batchflow(list.items)
|
||||
.sequential()
|
||||
.each((_i, item, next) => {
|
||||
if (typeof item.password !== 'undefined' && item.password.length) {
|
||||
if (typeof item.password !== "undefined" && item.password.length) {
|
||||
logger.info(`Adding: ${item.username}`);
|
||||
|
||||
utils.execFile('openssl', ['passwd', '-apr1', item.password])
|
||||
utils
|
||||
.execFile("openssl", ["passwd", "-apr1", item.password])
|
||||
.then((res) => {
|
||||
try {
|
||||
fs.appendFileSync(htpasswd_file, `${item.username}:${res}\n`, {encoding: 'utf8'});
|
||||
fs.appendFileSync(htpasswd_file, `${item.username}:${res}\n`, {
|
||||
encoding: "utf8",
|
||||
});
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
@@ -534,7 +547,7 @@ const internalAccessList = {
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalAccessList;
|
||||
export default internalAccessList;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const error = require('../lib/error');
|
||||
const auditLogModel = require('../models/audit-log');
|
||||
const {castJsonIfNeed} = require('../lib/helpers');
|
||||
import errs from "../lib/error.js";
|
||||
import { castJsonIfNeed } from "../lib/helpers.js";
|
||||
import auditLogModel from "../models/audit-log.js";
|
||||
|
||||
const internalAuditLog = {
|
||||
|
||||
@@ -13,24 +13,23 @@ const internalAuditLog = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getAll: (access, expand, search_query) => {
|
||||
return access.can('auditlog:list')
|
||||
.then(() => {
|
||||
let query = auditLogModel
|
||||
return access.can("auditlog:list").then(() => {
|
||||
const query = auditLogModel
|
||||
.query()
|
||||
.orderBy('created_on', 'DESC')
|
||||
.orderBy('id', 'DESC')
|
||||
.orderBy("created_on", "DESC")
|
||||
.orderBy("id", "DESC")
|
||||
.limit(100)
|
||||
.allowGraph('[user]');
|
||||
.allowGraph("[user]");
|
||||
|
||||
// Query is used for searching
|
||||
if (typeof search_query === 'string' && search_query.length > 0) {
|
||||
if (typeof search_query === "string" && search_query.length > 0) {
|
||||
query.where(function () {
|
||||
this.where(castJsonIfNeed('meta'), 'like', '%' + search_query + '%');
|
||||
this.where(castJsonIfNeed("meta"), "like", `%${search_query}`);
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
if (typeof expand !== "undefined" && expand !== null) {
|
||||
query.withGraphFetched(`[${expand.join(", ")}]`);
|
||||
}
|
||||
|
||||
return query;
|
||||
@@ -54,26 +53,26 @@ const internalAuditLog = {
|
||||
add: (access, data) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Default the user id
|
||||
if (typeof data.user_id === 'undefined' || !data.user_id) {
|
||||
if (typeof data.user_id === "undefined" || !data.user_id) {
|
||||
data.user_id = access.token.getUserId(1);
|
||||
}
|
||||
|
||||
if (typeof data.action === 'undefined' || !data.action) {
|
||||
reject(new error.InternalValidationError('Audit log entry must contain an Action'));
|
||||
if (typeof data.action === "undefined" || !data.action) {
|
||||
reject(new errs.InternalValidationError("Audit log entry must contain an Action"));
|
||||
} else {
|
||||
// Make sure at least 1 of the IDs are set and action
|
||||
resolve(auditLogModel
|
||||
.query()
|
||||
.insert({
|
||||
resolve(
|
||||
auditLogModel.query().insert({
|
||||
user_id: data.user_id,
|
||||
action: data.action,
|
||||
object_type: data.object_type || '',
|
||||
object_type: data.object_type || "",
|
||||
object_id: data.object_id || 0,
|
||||
meta: data.meta || {}
|
||||
}));
|
||||
meta: data.meta || {},
|
||||
}),
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalAuditLog;
|
||||
export default internalAuditLog;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,93 +1,90 @@
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const deadHostModel = require('../models/dead_host');
|
||||
const internalHost = require('./host');
|
||||
const internalNginx = require('./nginx');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
const internalCertificate = require('./certificate');
|
||||
const {castJsonIfNeed} = require('../lib/helpers');
|
||||
import _ from "lodash";
|
||||
import errs from "../lib/error.js";
|
||||
import { castJsonIfNeed } from "../lib/helpers.js";
|
||||
import utils from "../lib/utils.js";
|
||||
import deadHostModel from "../models/dead_host.js";
|
||||
import internalAuditLog from "./audit-log.js";
|
||||
import internalCertificate from "./certificate.js";
|
||||
import internalHost from "./host.js";
|
||||
import internalNginx from "./nginx.js";
|
||||
|
||||
function omissions () {
|
||||
return ['is_deleted'];
|
||||
}
|
||||
const omissions = () => {
|
||||
return ["is_deleted"];
|
||||
};
|
||||
|
||||
const internalDeadHost = {
|
||||
|
||||
/**
|
||||
* @param {Access} access
|
||||
* @param {Object} data
|
||||
* @returns {Promise}
|
||||
*/
|
||||
create: (access, data) => {
|
||||
let create_certificate = data.certificate_id === 'new';
|
||||
const createCertificate = data.certificate_id === "new";
|
||||
|
||||
if (create_certificate) {
|
||||
if (createCertificate) {
|
||||
delete data.certificate_id;
|
||||
}
|
||||
|
||||
return access.can('dead_hosts:create', data)
|
||||
return access
|
||||
.can("dead_hosts:create", data)
|
||||
.then((/*access_data*/) => {
|
||||
// Get a list of the domain names and check each of them against existing records
|
||||
let domain_name_check_promises = [];
|
||||
const domain_name_check_promises = [];
|
||||
|
||||
data.domain_names.map(function (domain_name) {
|
||||
data.domain_names.map((domain_name) => {
|
||||
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
|
||||
return true;
|
||||
});
|
||||
|
||||
return Promise.all(domain_name_check_promises)
|
||||
.then((check_results) => {
|
||||
check_results.map(function (result) {
|
||||
return Promise.all(domain_name_check_promises).then((check_results) => {
|
||||
check_results.map((result) => {
|
||||
if (result.is_taken) {
|
||||
throw new error.ValidationError(result.hostname + ' is already in use');
|
||||
throw new errs.ValidationError(`${result.hostname} is already in use`);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// At this point the domains should have been checked
|
||||
data.owner_user_id = access.token.getUserId(1);
|
||||
data = internalHost.cleanSslHstsData(data);
|
||||
const thisData = internalHost.cleanSslHstsData(data);
|
||||
|
||||
// Fix for db field not having a default value
|
||||
// for this optional field.
|
||||
if (typeof data.advanced_config === 'undefined') {
|
||||
data.advanced_config = '';
|
||||
if (typeof data.advanced_config === "undefined") {
|
||||
thisData.advanced_config = "";
|
||||
}
|
||||
|
||||
return deadHostModel
|
||||
.query()
|
||||
.insertAndFetch(data)
|
||||
.then(utils.omitRow(omissions()));
|
||||
return deadHostModel.query().insertAndFetch(thisData).then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (create_certificate) {
|
||||
return internalCertificate.createQuickCertificate(access, data)
|
||||
if (createCertificate) {
|
||||
return internalCertificate
|
||||
.createQuickCertificate(access, data)
|
||||
.then((cert) => {
|
||||
// update host with cert id
|
||||
return internalDeadHost.update(access, {
|
||||
id: row.id,
|
||||
certificate_id: cert.id
|
||||
certificate_id: cert.id,
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
return row;
|
||||
});
|
||||
} else {
|
||||
return row;
|
||||
}
|
||||
return row;
|
||||
})
|
||||
.then((row) => {
|
||||
// re-fetch with cert
|
||||
return internalDeadHost.get(access, {
|
||||
id: row.id,
|
||||
expand: ['certificate', 'owner']
|
||||
expand: ["certificate", "owner"],
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
// Configure nginx
|
||||
return internalNginx.configure(deadHostModel, 'dead_host', row)
|
||||
.then(() => {
|
||||
return internalNginx.configure(deadHostModel, "dead_host", row).then(() => {
|
||||
return row;
|
||||
});
|
||||
})
|
||||
@@ -95,11 +92,12 @@ const internalDeadHost = {
|
||||
data.meta = _.assign({}, data.meta || {}, row.meta);
|
||||
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'created',
|
||||
object_type: 'dead-host',
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: "created",
|
||||
object_type: "dead-host",
|
||||
object_id: row.id,
|
||||
meta: data
|
||||
meta: data,
|
||||
})
|
||||
.then(() => {
|
||||
return row;
|
||||
@@ -114,76 +112,86 @@ const internalDeadHost = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
update: (access, data) => {
|
||||
let create_certificate = data.certificate_id === 'new';
|
||||
let thisData = data;
|
||||
const createCertificate = thisData.certificate_id === "new";
|
||||
|
||||
if (create_certificate) {
|
||||
delete data.certificate_id;
|
||||
if (createCertificate) {
|
||||
delete thisData.certificate_id;
|
||||
}
|
||||
|
||||
return access.can('dead_hosts:update', data.id)
|
||||
return access
|
||||
.can("dead_hosts:update", thisData.id)
|
||||
.then((/*access_data*/) => {
|
||||
// Get a list of the domain names and check each of them against existing records
|
||||
let domain_name_check_promises = [];
|
||||
const domain_name_check_promises = [];
|
||||
|
||||
if (typeof data.domain_names !== 'undefined') {
|
||||
data.domain_names.map(function (domain_name) {
|
||||
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'dead', data.id));
|
||||
if (typeof thisData.domain_names !== "undefined") {
|
||||
thisData.domain_names.map((domain_name) => {
|
||||
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, "dead", data.id));
|
||||
return true;
|
||||
});
|
||||
|
||||
return Promise.all(domain_name_check_promises)
|
||||
.then((check_results) => {
|
||||
check_results.map(function (result) {
|
||||
return Promise.all(domain_name_check_promises).then((check_results) => {
|
||||
check_results.map((result) => {
|
||||
if (result.is_taken) {
|
||||
throw new error.ValidationError(result.hostname + ' is already in use');
|
||||
throw new errs.ValidationError(`${result.hostname} is already in use`);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
});
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
return internalDeadHost.get(access, {id: data.id});
|
||||
return internalDeadHost.get(access, { id: thisData.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (row.id !== data.id) {
|
||||
if (row.id !== thisData.id) {
|
||||
// Sanity check that something crazy hasn't happened
|
||||
throw new error.InternalValidationError('404 Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
|
||||
throw new errs.InternalValidationError(
|
||||
`404 Host could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
|
||||
);
|
||||
}
|
||||
|
||||
if (create_certificate) {
|
||||
return internalCertificate.createQuickCertificate(access, {
|
||||
domain_names: data.domain_names || row.domain_names,
|
||||
meta: _.assign({}, row.meta, data.meta)
|
||||
if (createCertificate) {
|
||||
return internalCertificate
|
||||
.createQuickCertificate(access, {
|
||||
domain_names: thisData.domain_names || row.domain_names,
|
||||
meta: _.assign({}, row.meta, thisData.meta),
|
||||
})
|
||||
.then((cert) => {
|
||||
// update host with cert id
|
||||
data.certificate_id = cert.id;
|
||||
thisData.certificate_id = cert.id;
|
||||
})
|
||||
.then(() => {
|
||||
return row;
|
||||
});
|
||||
} else {
|
||||
return row;
|
||||
}
|
||||
return row;
|
||||
})
|
||||
.then((row) => {
|
||||
// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
|
||||
data = _.assign({}, {
|
||||
domain_names: row.domain_names
|
||||
}, data);
|
||||
thisData = _.assign(
|
||||
{},
|
||||
{
|
||||
domain_names: row.domain_names,
|
||||
},
|
||||
data,
|
||||
);
|
||||
|
||||
data = internalHost.cleanSslHstsData(data, row);
|
||||
thisData = internalHost.cleanSslHstsData(thisData, row);
|
||||
|
||||
return deadHostModel
|
||||
.query()
|
||||
.where({id: data.id})
|
||||
.patch(data)
|
||||
.where({ id: thisData.id })
|
||||
.patch(thisData)
|
||||
.then((saved_row) => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'dead-host',
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: "updated",
|
||||
object_type: "dead-host",
|
||||
object_id: row.id,
|
||||
meta: data
|
||||
meta: thisData,
|
||||
})
|
||||
.then(() => {
|
||||
return _.omit(saved_row, omissions());
|
||||
@@ -191,17 +199,16 @@ const internalDeadHost = {
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
return internalDeadHost.get(access, {
|
||||
id: data.id,
|
||||
expand: ['owner', 'certificate']
|
||||
return internalDeadHost
|
||||
.get(access, {
|
||||
id: thisData.id,
|
||||
expand: ["owner", "certificate"],
|
||||
})
|
||||
.then((row) => {
|
||||
// Configure nginx
|
||||
return internalNginx.configure(deadHostModel, 'dead_host', row)
|
||||
.then((new_meta) => {
|
||||
return internalNginx.configure(deadHostModel, "dead_host", row).then((new_meta) => {
|
||||
row.meta = new_meta;
|
||||
row = internalHost.cleanRowCertificateMeta(row);
|
||||
return _.omit(row, omissions());
|
||||
return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -216,36 +223,35 @@ const internalDeadHost = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
get: (access, data) => {
|
||||
if (typeof data === 'undefined') {
|
||||
data = {};
|
||||
}
|
||||
const thisData = data || {};
|
||||
|
||||
return access.can('dead_hosts:get', data.id)
|
||||
return access
|
||||
.can("dead_hosts:get", thisData.id)
|
||||
.then((access_data) => {
|
||||
let query = deadHostModel
|
||||
const query = deadHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('id', data.id)
|
||||
.allowGraph('[owner,certificate]')
|
||||
.where("is_deleted", 0)
|
||||
.andWhere("id", dthisDataata.id)
|
||||
.allowGraph("[owner,certificate]")
|
||||
.first();
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
if (access_data.permission_visibility !== "all") {
|
||||
query.andWhere("owner_user_id", access.token.getUserId(1));
|
||||
}
|
||||
|
||||
if (typeof data.expand !== 'undefined' && data.expand !== null) {
|
||||
query.withGraphFetched('[' + data.expand.join(', ') + ']');
|
||||
if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
|
||||
query.withGraphFetched(`[${data.expand.join(", ")}]`);
|
||||
}
|
||||
|
||||
return query.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
throw new errs.ItemNotFoundError(thisData.id);
|
||||
}
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
row = _.omit(row, data.omit);
|
||||
if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
|
||||
return _.omit(row, thisData.omit);
|
||||
}
|
||||
return row;
|
||||
});
|
||||
@@ -259,35 +265,35 @@ const internalDeadHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
delete: (access, data) => {
|
||||
return access.can('dead_hosts:delete', data.id)
|
||||
return access
|
||||
.can("dead_hosts:delete", data.id)
|
||||
.then(() => {
|
||||
return internalDeadHost.get(access, {id: data.id});
|
||||
return internalDeadHost.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
throw new errs.ItemNotFoundError(data.id);
|
||||
}
|
||||
|
||||
return deadHostModel
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.where("id", row.id)
|
||||
.patch({
|
||||
is_deleted: 1
|
||||
is_deleted: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// Delete Nginx Config
|
||||
return internalNginx.deleteConfig('dead_host', row)
|
||||
.then(() => {
|
||||
return internalNginx.deleteConfig("dead_host", row).then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'deleted',
|
||||
object_type: 'dead-host',
|
||||
action: "deleted",
|
||||
object_type: "dead-host",
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -304,39 +310,41 @@ const internalDeadHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
enable: (access, data) => {
|
||||
return access.can('dead_hosts:update', data.id)
|
||||
return access
|
||||
.can("dead_hosts:update", data.id)
|
||||
.then(() => {
|
||||
return internalDeadHost.get(access, {
|
||||
id: data.id,
|
||||
expand: ['certificate', 'owner']
|
||||
expand: ["certificate", "owner"],
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
} else if (row.enabled) {
|
||||
throw new error.ValidationError('Host is already enabled');
|
||||
throw new errs.ItemNotFoundError(data.id);
|
||||
}
|
||||
if (row.enabled) {
|
||||
throw new errs.ValidationError("Host is already enabled");
|
||||
}
|
||||
|
||||
row.enabled = 1;
|
||||
|
||||
return deadHostModel
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.where("id", row.id)
|
||||
.patch({
|
||||
enabled: 1
|
||||
enabled: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// Configure nginx
|
||||
return internalNginx.configure(deadHostModel, 'dead_host', row);
|
||||
return internalNginx.configure(deadHostModel, "dead_host", row);
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'enabled',
|
||||
object_type: 'dead-host',
|
||||
action: "enabled",
|
||||
object_type: "dead-host",
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -353,39 +361,40 @@ const internalDeadHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
disable: (access, data) => {
|
||||
return access.can('dead_hosts:update', data.id)
|
||||
return access
|
||||
.can("dead_hosts:update", data.id)
|
||||
.then(() => {
|
||||
return internalDeadHost.get(access, {id: data.id});
|
||||
return internalDeadHost.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
} else if (!row.enabled) {
|
||||
throw new error.ValidationError('Host is already disabled');
|
||||
throw new errs.ItemNotFoundError(data.id);
|
||||
}
|
||||
if (!row.enabled) {
|
||||
throw new errs.ValidationError("Host is already disabled");
|
||||
}
|
||||
|
||||
row.enabled = 0;
|
||||
|
||||
return deadHostModel
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.where("id", row.id)
|
||||
.patch({
|
||||
enabled: 0
|
||||
enabled: 0,
|
||||
})
|
||||
.then(() => {
|
||||
// Delete Nginx Config
|
||||
return internalNginx.deleteConfig('dead_host', row)
|
||||
.then(() => {
|
||||
return internalNginx.deleteConfig("dead_host", row).then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'disabled',
|
||||
object_type: 'dead-host',
|
||||
action: "disabled",
|
||||
object_type: "dead-host",
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -403,34 +412,35 @@ const internalDeadHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getAll: (access, expand, search_query) => {
|
||||
return access.can('dead_hosts:list')
|
||||
return access
|
||||
.can("dead_hosts:list")
|
||||
.then((access_data) => {
|
||||
let query = deadHostModel
|
||||
const query = deadHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.groupBy('id')
|
||||
.allowGraph('[owner,certificate]')
|
||||
.orderBy(castJsonIfNeed('domain_names'), 'ASC');
|
||||
.where("is_deleted", 0)
|
||||
.groupBy("id")
|
||||
.allowGraph("[owner,certificate]")
|
||||
.orderBy(castJsonIfNeed("domain_names"), "ASC");
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
if (access_data.permission_visibility !== "all") {
|
||||
query.andWhere("owner_user_id", access.token.getUserId(1));
|
||||
}
|
||||
|
||||
// Query is used for searching
|
||||
if (typeof search_query === 'string' && search_query.length > 0) {
|
||||
if (typeof search_query === "string" && search_query.length > 0) {
|
||||
query.where(function () {
|
||||
this.where(castJsonIfNeed('domain_names'), 'like', '%' + search_query + '%');
|
||||
this.where(castJsonIfNeed("domain_names"), "like", `%${search_query}%`);
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
if (typeof expand !== "undefined" && expand !== null) {
|
||||
query.withGraphFetched(`[${expand.join(", ")}]`);
|
||||
}
|
||||
|
||||
return query.then(utils.omitRows(omissions()));
|
||||
})
|
||||
.then((rows) => {
|
||||
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
|
||||
if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
|
||||
return internalHost.cleanAllRowsCertificateMeta(rows);
|
||||
}
|
||||
|
||||
@@ -446,20 +456,16 @@ const internalDeadHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getCount: (user_id, visibility) => {
|
||||
let query = deadHostModel
|
||||
.query()
|
||||
.count('id as count')
|
||||
.where('is_deleted', 0);
|
||||
const query = deadHostModel.query().count("id as count").where("is_deleted", 0);
|
||||
|
||||
if (visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', user_id);
|
||||
if (visibility !== "all") {
|
||||
query.andWhere("owner_user_id", user_id);
|
||||
}
|
||||
|
||||
return query.first()
|
||||
.then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
return query.first().then((row) => {
|
||||
return Number.parseInt(row.count, 10);
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalDeadHost;
|
||||
export default internalDeadHost;
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
const _ = require('lodash');
|
||||
const proxyHostModel = require('../models/proxy_host');
|
||||
const redirectionHostModel = require('../models/redirection_host');
|
||||
const deadHostModel = require('../models/dead_host');
|
||||
const {castJsonIfNeed} = require('../lib/helpers');
|
||||
import _ from "lodash";
|
||||
import { castJsonIfNeed } from "../lib/helpers.js";
|
||||
import deadHostModel from "../models/dead_host.js";
|
||||
import proxyHostModel from "../models/proxy_host.js";
|
||||
import redirectionHostModel from "../models/redirection_host.js";
|
||||
|
||||
const internalHost = {
|
||||
|
||||
/**
|
||||
* Makes sure that the ssl_* and hsts_* fields play nicely together.
|
||||
* ie: if there is no cert, then force_ssl is off.
|
||||
@@ -15,25 +14,23 @@ const internalHost = {
|
||||
* @param {object} [existing_data]
|
||||
* @returns {object}
|
||||
*/
|
||||
cleanSslHstsData: function (data, existing_data) {
|
||||
existing_data = existing_data === undefined ? {} : existing_data;
|
||||
cleanSslHstsData: (data, existingData) => {
|
||||
const combinedData = _.assign({}, existingData || {}, data);
|
||||
|
||||
const combined_data = _.assign({}, existing_data, data);
|
||||
|
||||
if (!combined_data.certificate_id) {
|
||||
combined_data.ssl_forced = false;
|
||||
combined_data.http2_support = false;
|
||||
if (!combinedData.certificate_id) {
|
||||
combinedData.ssl_forced = false;
|
||||
combinedData.http2_support = false;
|
||||
}
|
||||
|
||||
if (!combined_data.ssl_forced) {
|
||||
combined_data.hsts_enabled = false;
|
||||
if (!combinedData.ssl_forced) {
|
||||
combinedData.hsts_enabled = false;
|
||||
}
|
||||
|
||||
if (!combined_data.hsts_enabled) {
|
||||
combined_data.hsts_subdomains = false;
|
||||
if (!combinedData.hsts_enabled) {
|
||||
combinedData.hsts_subdomains = false;
|
||||
}
|
||||
|
||||
return combined_data;
|
||||
return combinedData;
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -42,11 +39,12 @@ const internalHost = {
|
||||
* @param {Array} rows
|
||||
* @returns {Array}
|
||||
*/
|
||||
cleanAllRowsCertificateMeta: function (rows) {
|
||||
rows.map(function (row, idx) {
|
||||
if (typeof rows[idx].certificate !== 'undefined' && rows[idx].certificate) {
|
||||
cleanAllRowsCertificateMeta: (rows) => {
|
||||
rows.map((_, idx) => {
|
||||
if (typeof rows[idx].certificate !== "undefined" && rows[idx].certificate) {
|
||||
rows[idx].certificate.meta = {};
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
return rows;
|
||||
@@ -58,8 +56,8 @@ const internalHost = {
|
||||
* @param {Object} row
|
||||
* @returns {Object}
|
||||
*/
|
||||
cleanRowCertificateMeta: function (row) {
|
||||
if (typeof row.certificate !== 'undefined' && row.certificate) {
|
||||
cleanRowCertificateMeta: (row) => {
|
||||
if (typeof row.certificate !== "undefined" && row.certificate) {
|
||||
row.certificate.meta = {};
|
||||
}
|
||||
|
||||
@@ -73,26 +71,19 @@ const internalHost = {
|
||||
* @param {Array} domain_names
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getHostsWithDomains: function (domain_names) {
|
||||
getHostsWithDomains: (domain_names) => {
|
||||
const promises = [
|
||||
proxyHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0),
|
||||
redirectionHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0),
|
||||
deadHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
proxyHostModel.query().where("is_deleted", 0),
|
||||
redirectionHostModel.query().where("is_deleted", 0),
|
||||
deadHostModel.query().where("is_deleted", 0),
|
||||
];
|
||||
|
||||
return Promise.all(promises)
|
||||
.then((promises_results) => {
|
||||
let response_object = {
|
||||
return Promise.all(promises).then((promises_results) => {
|
||||
const response_object = {
|
||||
total_count: 0,
|
||||
dead_hosts: [],
|
||||
proxy_hosts: [],
|
||||
redirection_hosts: []
|
||||
redirection_hosts: [],
|
||||
};
|
||||
|
||||
if (promises_results[0]) {
|
||||
@@ -103,7 +94,10 @@ const internalHost = {
|
||||
|
||||
if (promises_results[1]) {
|
||||
// Redirection Hosts
|
||||
response_object.redirection_hosts = internalHost._getHostsWithDomains(promises_results[1], domain_names);
|
||||
response_object.redirection_hosts = internalHost._getHostsWithDomains(
|
||||
promises_results[1],
|
||||
domain_names,
|
||||
);
|
||||
response_object.total_count += response_object.redirection_hosts.length;
|
||||
}
|
||||
|
||||
@@ -125,50 +119,67 @@ const internalHost = {
|
||||
* @param {Integer} [ignore_id] Must be supplied if type was also supplied
|
||||
* @returns {Promise}
|
||||
*/
|
||||
isHostnameTaken: function (hostname, ignore_type, ignore_id) {
|
||||
isHostnameTaken: (hostname, ignore_type, ignore_id) => {
|
||||
const promises = [
|
||||
proxyHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%'),
|
||||
.where("is_deleted", 0)
|
||||
.andWhere(castJsonIfNeed("domain_names"), "like", `%${hostname}%`),
|
||||
redirectionHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%'),
|
||||
.where("is_deleted", 0)
|
||||
.andWhere(castJsonIfNeed("domain_names"), "like", `%${hostname}%`),
|
||||
deadHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%')
|
||||
.where("is_deleted", 0)
|
||||
.andWhere(castJsonIfNeed("domain_names"), "like", `%${hostname}%`),
|
||||
];
|
||||
|
||||
return Promise.all(promises)
|
||||
.then((promises_results) => {
|
||||
return Promise.all(promises).then((promises_results) => {
|
||||
let is_taken = false;
|
||||
|
||||
if (promises_results[0]) {
|
||||
// Proxy Hosts
|
||||
if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[0], ignore_type === 'proxy' && ignore_id ? ignore_id : 0)) {
|
||||
if (
|
||||
internalHost._checkHostnameRecordsTaken(
|
||||
hostname,
|
||||
promises_results[0],
|
||||
ignore_type === "proxy" && ignore_id ? ignore_id : 0,
|
||||
)
|
||||
) {
|
||||
is_taken = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (promises_results[1]) {
|
||||
// Redirection Hosts
|
||||
if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[1], ignore_type === 'redirection' && ignore_id ? ignore_id : 0)) {
|
||||
if (
|
||||
internalHost._checkHostnameRecordsTaken(
|
||||
hostname,
|
||||
promises_results[1],
|
||||
ignore_type === "redirection" && ignore_id ? ignore_id : 0,
|
||||
)
|
||||
) {
|
||||
is_taken = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (promises_results[2]) {
|
||||
// Dead Hosts
|
||||
if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[2], ignore_type === 'dead' && ignore_id ? ignore_id : 0)) {
|
||||
if (
|
||||
internalHost._checkHostnameRecordsTaken(
|
||||
hostname,
|
||||
promises_results[2],
|
||||
ignore_type === "dead" && ignore_id ? ignore_id : 0,
|
||||
)
|
||||
) {
|
||||
is_taken = true;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
hostname: hostname,
|
||||
is_taken: is_taken
|
||||
is_taken: is_taken,
|
||||
};
|
||||
});
|
||||
},
|
||||
@@ -177,60 +188,64 @@ const internalHost = {
|
||||
* Private call only
|
||||
*
|
||||
* @param {String} hostname
|
||||
* @param {Array} existing_rows
|
||||
* @param {Integer} [ignore_id]
|
||||
* @param {Array} existingRows
|
||||
* @param {Integer} [ignoreId]
|
||||
* @returns {Boolean}
|
||||
*/
|
||||
_checkHostnameRecordsTaken: function (hostname, existing_rows, ignore_id) {
|
||||
let is_taken = false;
|
||||
_checkHostnameRecordsTaken: (hostname, existingRows, ignoreId) => {
|
||||
let isTaken = false;
|
||||
|
||||
if (existing_rows && existing_rows.length) {
|
||||
existing_rows.map(function (existing_row) {
|
||||
existing_row.domain_names.map(function (existing_hostname) {
|
||||
if (existingRows?.length) {
|
||||
existingRows.map((existingRow) => {
|
||||
existingRow.domain_names.map((existingHostname) => {
|
||||
// Does this domain match?
|
||||
if (existing_hostname.toLowerCase() === hostname.toLowerCase()) {
|
||||
if (!ignore_id || ignore_id !== existing_row.id) {
|
||||
is_taken = true;
|
||||
if (existingHostname.toLowerCase() === hostname.toLowerCase()) {
|
||||
if (!ignoreId || ignoreId !== existingRow.id) {
|
||||
isTaken = true;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
});
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
return is_taken;
|
||||
return isTaken;
|
||||
},
|
||||
|
||||
/**
|
||||
* Private call only
|
||||
*
|
||||
* @param {Array} hosts
|
||||
* @param {Array} domain_names
|
||||
* @param {Array} domainNames
|
||||
* @returns {Array}
|
||||
*/
|
||||
_getHostsWithDomains: function (hosts, domain_names) {
|
||||
let response = [];
|
||||
_getHostsWithDomains: (hosts, domainNames) => {
|
||||
const response = [];
|
||||
|
||||
if (hosts && hosts.length) {
|
||||
hosts.map(function (host) {
|
||||
let host_matches = false;
|
||||
if (hosts?.length) {
|
||||
hosts.map((host) => {
|
||||
let hostMatches = false;
|
||||
|
||||
domain_names.map(function (domain_name) {
|
||||
host.domain_names.map(function (host_domain_name) {
|
||||
if (domain_name.toLowerCase() === host_domain_name.toLowerCase()) {
|
||||
host_matches = true;
|
||||
domainNames.map((domainName) => {
|
||||
host.domain_names.map((hostDomainName) => {
|
||||
if (domainName.toLowerCase() === hostDomainName.toLowerCase()) {
|
||||
hostMatches = true;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
return true;
|
||||
});
|
||||
|
||||
if (host_matches) {
|
||||
if (hostMatches) {
|
||||
response.push(host);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalHost;
|
||||
export default internalHost;
|
||||
|
||||
@@ -1,43 +1,49 @@
|
||||
const https = require('https');
|
||||
const fs = require('fs');
|
||||
const logger = require('../logger').ip_ranges;
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const internalNginx = require('./nginx');
|
||||
import fs from "node:fs";
|
||||
import https from "node:https";
|
||||
import { dirname } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import errs from "../lib/error.js";
|
||||
import utils from "../lib/utils.js";
|
||||
import { ipRanges as logger } from "../logger.js";
|
||||
import internalNginx from "./nginx.js";
|
||||
|
||||
const CLOUDFRONT_URL = 'https://ip-ranges.amazonaws.com/ip-ranges.json';
|
||||
const CLOUDFARE_V4_URL = 'https://www.cloudflare.com/ips-v4';
|
||||
const CLOUDFARE_V6_URL = 'https://www.cloudflare.com/ips-v6';
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
const CLOUDFRONT_URL = "https://ip-ranges.amazonaws.com/ip-ranges.json";
|
||||
const CLOUDFARE_V4_URL = "https://www.cloudflare.com/ips-v4";
|
||||
const CLOUDFARE_V6_URL = "https://www.cloudflare.com/ips-v6";
|
||||
|
||||
const regIpV4 = /^(\d+\.?){4}\/\d+/;
|
||||
const regIpV6 = /^(([\da-fA-F]+)?:)+\/\d+/;
|
||||
|
||||
const internalIpRanges = {
|
||||
|
||||
interval_timeout: 1000 * 60 * 60 * 6, // 6 hours
|
||||
interval: null,
|
||||
interval_processing: false,
|
||||
iteration_count: 0,
|
||||
|
||||
initTimer: () => {
|
||||
logger.info('IP Ranges Renewal Timer initialized');
|
||||
logger.info("IP Ranges Renewal Timer initialized");
|
||||
internalIpRanges.interval = setInterval(internalIpRanges.fetch, internalIpRanges.interval_timeout);
|
||||
},
|
||||
|
||||
fetchUrl: (url) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
logger.info('Fetching ' + url);
|
||||
return https.get(url, (res) => {
|
||||
res.setEncoding('utf8');
|
||||
let raw_data = '';
|
||||
res.on('data', (chunk) => {
|
||||
logger.info(`Fetching ${url}`);
|
||||
return https
|
||||
.get(url, (res) => {
|
||||
res.setEncoding("utf8");
|
||||
let raw_data = "";
|
||||
res.on("data", (chunk) => {
|
||||
raw_data += chunk;
|
||||
});
|
||||
|
||||
res.on('end', () => {
|
||||
res.on("end", () => {
|
||||
resolve(raw_data);
|
||||
});
|
||||
}).on('error', (err) => {
|
||||
})
|
||||
.on("error", (err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
@@ -49,27 +55,30 @@ const internalIpRanges = {
|
||||
fetch: () => {
|
||||
if (!internalIpRanges.interval_processing) {
|
||||
internalIpRanges.interval_processing = true;
|
||||
logger.info('Fetching IP Ranges from online services...');
|
||||
logger.info("Fetching IP Ranges from online services...");
|
||||
|
||||
let ip_ranges = [];
|
||||
|
||||
return internalIpRanges.fetchUrl(CLOUDFRONT_URL)
|
||||
return internalIpRanges
|
||||
.fetchUrl(CLOUDFRONT_URL)
|
||||
.then((cloudfront_data) => {
|
||||
let data = JSON.parse(cloudfront_data);
|
||||
const data = JSON.parse(cloudfront_data);
|
||||
|
||||
if (data && typeof data.prefixes !== 'undefined') {
|
||||
if (data && typeof data.prefixes !== "undefined") {
|
||||
data.prefixes.map((item) => {
|
||||
if (item.service === 'CLOUDFRONT') {
|
||||
if (item.service === "CLOUDFRONT") {
|
||||
ip_ranges.push(item.ip_prefix);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
if (data && typeof data.ipv6_prefixes !== 'undefined') {
|
||||
if (data && typeof data.ipv6_prefixes !== "undefined") {
|
||||
data.ipv6_prefixes.map((item) => {
|
||||
if (item.service === 'CLOUDFRONT') {
|
||||
if (item.service === "CLOUDFRONT") {
|
||||
ip_ranges.push(item.ipv6_prefix);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
})
|
||||
@@ -77,26 +86,26 @@ const internalIpRanges = {
|
||||
return internalIpRanges.fetchUrl(CLOUDFARE_V4_URL);
|
||||
})
|
||||
.then((cloudfare_data) => {
|
||||
let items = cloudfare_data.split('\n').filter((line) => regIpV4.test(line));
|
||||
ip_ranges = [... ip_ranges, ... items];
|
||||
const items = cloudfare_data.split("\n").filter((line) => regIpV4.test(line));
|
||||
ip_ranges = [...ip_ranges, ...items];
|
||||
})
|
||||
.then(() => {
|
||||
return internalIpRanges.fetchUrl(CLOUDFARE_V6_URL);
|
||||
})
|
||||
.then((cloudfare_data) => {
|
||||
let items = cloudfare_data.split('\n').filter((line) => regIpV6.test(line));
|
||||
ip_ranges = [... ip_ranges, ... items];
|
||||
const items = cloudfare_data.split("\n").filter((line) => regIpV6.test(line));
|
||||
ip_ranges = [...ip_ranges, ...items];
|
||||
})
|
||||
.then(() => {
|
||||
let clean_ip_ranges = [];
|
||||
const clean_ip_ranges = [];
|
||||
ip_ranges.map((range) => {
|
||||
if (range) {
|
||||
clean_ip_ranges.push(range);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
return internalIpRanges.generateConfig(clean_ip_ranges)
|
||||
.then(() => {
|
||||
return internalIpRanges.generateConfig(clean_ip_ranges).then(() => {
|
||||
if (internalIpRanges.iteration_count) {
|
||||
// Reload nginx
|
||||
return internalNginx.reload();
|
||||
@@ -108,7 +117,7 @@ const internalIpRanges = {
|
||||
internalIpRanges.iteration_count++;
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.error(err.message);
|
||||
logger.fatal(err.message);
|
||||
internalIpRanges.interval_processing = false;
|
||||
});
|
||||
}
|
||||
@@ -122,26 +131,26 @@ const internalIpRanges = {
|
||||
const renderEngine = utils.getRenderEngine();
|
||||
return new Promise((resolve, reject) => {
|
||||
let template = null;
|
||||
let filename = '/etc/nginx/conf.d/include/ip_ranges.conf';
|
||||
const filename = "/etc/nginx/conf.d/include/ip_ranges.conf";
|
||||
try {
|
||||
template = fs.readFileSync(__dirname + '/../templates/ip_ranges.conf', {encoding: 'utf8'});
|
||||
template = fs.readFileSync(`${__dirname}/../templates/ip_ranges.conf`, { encoding: "utf8" });
|
||||
} catch (err) {
|
||||
reject(new error.ConfigurationError(err.message));
|
||||
reject(new errs.ConfigurationError(err.message));
|
||||
return;
|
||||
}
|
||||
|
||||
renderEngine
|
||||
.parseAndRender(template, {ip_ranges: ip_ranges})
|
||||
.parseAndRender(template, { ip_ranges: ip_ranges })
|
||||
.then((config_text) => {
|
||||
fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
|
||||
fs.writeFileSync(filename, config_text, { encoding: "utf8" });
|
||||
resolve(true);
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.warn('Could not write ' + filename + ':', err.message);
|
||||
reject(new error.ConfigurationError(err.message));
|
||||
logger.warn(`Could not write ${filename}: ${err.message}`);
|
||||
reject(new errs.ConfigurationError(err.message));
|
||||
});
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalIpRanges;
|
||||
export default internalIpRanges;
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
const _ = require('lodash');
|
||||
const fs = require('node:fs');
|
||||
const logger = require('../logger').nginx;
|
||||
const config = require('../lib/config');
|
||||
const utils = require('../lib/utils');
|
||||
const error = require('../lib/error');
|
||||
import fs from "node:fs";
|
||||
import { dirname } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import _ from "lodash";
|
||||
import errs from "../lib/error.js";
|
||||
import utils from "../lib/utils.js";
|
||||
import { nginx as logger } from "../logger.js";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
const internalNginx = {
|
||||
|
||||
/**
|
||||
* This will:
|
||||
* - test the nginx config first to make sure it's OK
|
||||
@@ -24,7 +27,8 @@ const internalNginx = {
|
||||
configure: (model, host_type, host) => {
|
||||
let combined_meta = {};
|
||||
|
||||
return internalNginx.test()
|
||||
return internalNginx
|
||||
.test()
|
||||
.then(() => {
|
||||
// Nginx is OK
|
||||
// We're deleting this config regardless.
|
||||
@@ -37,19 +41,17 @@ const internalNginx = {
|
||||
})
|
||||
.then(() => {
|
||||
// Test nginx again and update meta with result
|
||||
return internalNginx.test()
|
||||
return internalNginx
|
||||
.test()
|
||||
.then(() => {
|
||||
// nginx is ok
|
||||
combined_meta = _.assign({}, host.meta, {
|
||||
nginx_online: true,
|
||||
nginx_err: null
|
||||
nginx_err: null,
|
||||
});
|
||||
|
||||
return model
|
||||
.query()
|
||||
.where('id', host.id)
|
||||
.patch({
|
||||
meta: combined_meta
|
||||
return model.query().where("id", host.id).patch({
|
||||
meta: combined_meta,
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
@@ -58,28 +60,27 @@ const internalNginx = {
|
||||
// nginx: [alert] could not open error log file: open() "/var/log/nginx/error.log" failed (6: No such device or address)
|
||||
|
||||
const valid_lines = [];
|
||||
const err_lines = err.message.split('\n');
|
||||
const err_lines = err.message.split("\n");
|
||||
err_lines.map((line) => {
|
||||
if (line.indexOf('/var/log/nginx/error.log') === -1) {
|
||||
if (line.indexOf("/var/log/nginx/error.log") === -1) {
|
||||
valid_lines.push(line);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
if (config.debug()) {
|
||||
logger.error('Nginx test failed:', valid_lines.join('\n'));
|
||||
}
|
||||
logger.debug("Nginx test failed:", valid_lines.join("\n"));
|
||||
|
||||
// config is bad, update meta and delete config
|
||||
combined_meta = _.assign({}, host.meta, {
|
||||
nginx_online: false,
|
||||
nginx_err: valid_lines.join('\n')
|
||||
nginx_err: valid_lines.join("\n"),
|
||||
});
|
||||
|
||||
return model
|
||||
.query()
|
||||
.where('id', host.id)
|
||||
.where("id", host.id)
|
||||
.patch({
|
||||
meta: combined_meta
|
||||
meta: combined_meta,
|
||||
})
|
||||
.then(() => {
|
||||
internalNginx.renameConfigAsError(host_type, host);
|
||||
@@ -101,21 +102,17 @@ const internalNginx = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
test: () => {
|
||||
if (config.debug()) {
|
||||
logger.info('Testing Nginx configuration');
|
||||
}
|
||||
|
||||
return utils.execFile('/usr/sbin/nginx', ['-t', '-g', 'error_log off;']);
|
||||
logger.debug("Testing Nginx configuration");
|
||||
return utils.execFile("/usr/sbin/nginx", ["-t", "-g", "error_log off;"]);
|
||||
},
|
||||
|
||||
/**
|
||||
* @returns {Promise}
|
||||
*/
|
||||
reload: () => {
|
||||
return internalNginx.test()
|
||||
.then(() => {
|
||||
logger.info('Reloading Nginx');
|
||||
return utils.execFile('/usr/sbin/nginx', ['-s', 'reload']);
|
||||
return internalNginx.test().then(() => {
|
||||
logger.info("Reloading Nginx");
|
||||
return utils.execFile("/usr/sbin/nginx", ["-s", "reload"]);
|
||||
});
|
||||
},
|
||||
|
||||
@@ -125,8 +122,8 @@ const internalNginx = {
|
||||
* @returns {String}
|
||||
*/
|
||||
getConfigName: (host_type, host_id) => {
|
||||
if (host_type === 'default') {
|
||||
return '/data/nginx/default_host/site.conf';
|
||||
if (host_type === "default") {
|
||||
return "/data/nginx/default_host/site.conf";
|
||||
}
|
||||
return `/data/nginx/${internalNginx.getFileFriendlyHostType(host_type)}/${host_id}.conf`;
|
||||
},
|
||||
@@ -141,38 +138,45 @@ const internalNginx = {
|
||||
let template;
|
||||
|
||||
try {
|
||||
template = fs.readFileSync(`${__dirname}/../templates/_location.conf`, {encoding: 'utf8'});
|
||||
template = fs.readFileSync(`${__dirname}/../templates/_location.conf`, { encoding: "utf8" });
|
||||
} catch (err) {
|
||||
reject(new error.ConfigurationError(err.message));
|
||||
reject(new errs.ConfigurationError(err.message));
|
||||
return;
|
||||
}
|
||||
|
||||
const renderEngine = utils.getRenderEngine();
|
||||
let renderedLocations = '';
|
||||
let renderedLocations = "";
|
||||
|
||||
const locationRendering = async () => {
|
||||
for (let i = 0; i < host.locations.length; i++) {
|
||||
const locationCopy = Object.assign({}, {access_list_id: host.access_list_id}, {certificate_id: host.certificate_id},
|
||||
{ssl_forced: host.ssl_forced}, {caching_enabled: host.caching_enabled}, {block_exploits: host.block_exploits},
|
||||
{allow_websocket_upgrade: host.allow_websocket_upgrade}, {http2_support: host.http2_support},
|
||||
{hsts_enabled: host.hsts_enabled}, {hsts_subdomains: host.hsts_subdomains}, {access_list: host.access_list},
|
||||
{certificate: host.certificate}, host.locations[i]);
|
||||
const locationCopy = Object.assign(
|
||||
{},
|
||||
{ access_list_id: host.access_list_id },
|
||||
{ certificate_id: host.certificate_id },
|
||||
{ ssl_forced: host.ssl_forced },
|
||||
{ caching_enabled: host.caching_enabled },
|
||||
{ block_exploits: host.block_exploits },
|
||||
{ allow_websocket_upgrade: host.allow_websocket_upgrade },
|
||||
{ http2_support: host.http2_support },
|
||||
{ hsts_enabled: host.hsts_enabled },
|
||||
{ hsts_subdomains: host.hsts_subdomains },
|
||||
{ access_list: host.access_list },
|
||||
{ certificate: host.certificate },
|
||||
host.locations[i],
|
||||
);
|
||||
|
||||
if (locationCopy.forward_host.indexOf('/') > -1) {
|
||||
const splitted = locationCopy.forward_host.split('/');
|
||||
if (locationCopy.forward_host.indexOf("/") > -1) {
|
||||
const splitted = locationCopy.forward_host.split("/");
|
||||
|
||||
locationCopy.forward_host = splitted.shift();
|
||||
locationCopy.forward_path = `/${splitted.join('/')}`;
|
||||
locationCopy.forward_path = `/${splitted.join("/")}`;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line
|
||||
renderedLocations += await renderEngine.parseAndRender(template, locationCopy);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
locationRendering().then(() => resolve(renderedLocations));
|
||||
|
||||
});
|
||||
},
|
||||
|
||||
@@ -186,9 +190,7 @@ const internalNginx = {
|
||||
const host = JSON.parse(JSON.stringify(host_row));
|
||||
const nice_host_type = internalNginx.getFileFriendlyHostType(host_type);
|
||||
|
||||
if (config.debug()) {
|
||||
logger.info(`Generating ${nice_host_type} Config:`, JSON.stringify(host, null, 2));
|
||||
}
|
||||
logger.debug(`Generating ${nice_host_type} Config:`, JSON.stringify(host, null, 2));
|
||||
|
||||
const renderEngine = utils.getRenderEngine();
|
||||
|
||||
@@ -197,9 +199,9 @@ const internalNginx = {
|
||||
const filename = internalNginx.getConfigName(nice_host_type, host.id);
|
||||
|
||||
try {
|
||||
template = fs.readFileSync(`${__dirname}/../templates/${nice_host_type}.conf`, {encoding: 'utf8'});
|
||||
template = fs.readFileSync(`${__dirname}/../templates/${nice_host_type}.conf`, { encoding: "utf8" });
|
||||
} catch (err) {
|
||||
reject(new error.ConfigurationError(err.message));
|
||||
reject(new errs.ConfigurationError(err.message));
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -207,9 +209,9 @@ const internalNginx = {
|
||||
let origLocations;
|
||||
|
||||
// Manipulate the data a bit before sending it to the template
|
||||
if (nice_host_type !== 'default') {
|
||||
if (nice_host_type !== "default") {
|
||||
host.use_default_location = true;
|
||||
if (typeof host.advanced_config !== 'undefined' && host.advanced_config) {
|
||||
if (typeof host.advanced_config !== "undefined" && host.advanced_config) {
|
||||
host.use_default_location = !internalNginx.advancedConfigHasDefaultLocation(host.advanced_config);
|
||||
}
|
||||
}
|
||||
@@ -223,11 +225,10 @@ const internalNginx = {
|
||||
|
||||
// Allow someone who is using / custom location path to use it, and skip the default / location
|
||||
_.map(host.locations, (location) => {
|
||||
if (location.path === '/') {
|
||||
if (location.path === "/") {
|
||||
host.use_default_location = false;
|
||||
}
|
||||
});
|
||||
|
||||
} else {
|
||||
locationsPromise = Promise.resolve();
|
||||
}
|
||||
@@ -239,11 +240,8 @@ const internalNginx = {
|
||||
renderEngine
|
||||
.parseAndRender(template, host)
|
||||
.then((config_text) => {
|
||||
fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
|
||||
|
||||
if (config.debug()) {
|
||||
logger.success('Wrote config:', filename, config_text);
|
||||
}
|
||||
fs.writeFileSync(filename, config_text, { encoding: "utf8" });
|
||||
logger.debug("Wrote config:", filename, config_text);
|
||||
|
||||
// Restore locations array
|
||||
host.locations = origLocations;
|
||||
@@ -251,11 +249,8 @@ const internalNginx = {
|
||||
resolve(true);
|
||||
})
|
||||
.catch((err) => {
|
||||
if (config.debug()) {
|
||||
logger.warn(`Could not write ${filename}:`, err.message);
|
||||
}
|
||||
|
||||
reject(new error.ConfigurationError(err.message));
|
||||
logger.debug(`Could not write ${filename}:`, err.message);
|
||||
reject(new errs.ConfigurationError(err.message));
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -270,10 +265,7 @@ const internalNginx = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
generateLetsEncryptRequestConfig: (certificate) => {
|
||||
if (config.debug()) {
|
||||
logger.info('Generating LetsEncrypt Request Config:', certificate);
|
||||
}
|
||||
|
||||
logger.debug("Generating LetsEncrypt Request Config:", certificate);
|
||||
const renderEngine = utils.getRenderEngine();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
@@ -281,9 +273,9 @@ const internalNginx = {
|
||||
const filename = `/data/nginx/temp/letsencrypt_${certificate.id}.conf`;
|
||||
|
||||
try {
|
||||
template = fs.readFileSync(`${__dirname}/../templates/letsencrypt-request.conf`, {encoding: 'utf8'});
|
||||
template = fs.readFileSync(`${__dirname}/../templates/letsencrypt-request.conf`, { encoding: "utf8" });
|
||||
} catch (err) {
|
||||
reject(new error.ConfigurationError(err.message));
|
||||
reject(new errs.ConfigurationError(err.message));
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -292,20 +284,13 @@ const internalNginx = {
|
||||
renderEngine
|
||||
.parseAndRender(template, certificate)
|
||||
.then((config_text) => {
|
||||
fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
|
||||
|
||||
if (config.debug()) {
|
||||
logger.success('Wrote config:', filename, config_text);
|
||||
}
|
||||
|
||||
fs.writeFileSync(filename, config_text, { encoding: "utf8" });
|
||||
logger.debug("Wrote config:", filename, config_text);
|
||||
resolve(true);
|
||||
})
|
||||
.catch((err) => {
|
||||
if (config.debug()) {
|
||||
logger.warn(`Could not write ${filename}:`, err.message);
|
||||
}
|
||||
|
||||
reject(new error.ConfigurationError(err.message));
|
||||
logger.debug(`Could not write ${filename}:`, err.message);
|
||||
reject(new errs.ConfigurationError(err.message));
|
||||
});
|
||||
});
|
||||
},
|
||||
@@ -320,7 +305,7 @@ const internalNginx = {
|
||||
try {
|
||||
fs.unlinkSync(filename);
|
||||
} catch (err) {
|
||||
logger.debug('Could not delete file:', JSON.stringify(err, null, 2));
|
||||
logger.debug("Could not delete file:", JSON.stringify(err, null, 2));
|
||||
}
|
||||
},
|
||||
|
||||
@@ -330,7 +315,7 @@ const internalNginx = {
|
||||
* @returns String
|
||||
*/
|
||||
getFileFriendlyHostType: (host_type) => {
|
||||
return host_type.replace(/-/g, '_');
|
||||
return host_type.replace(/-/g, "_");
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -341,7 +326,7 @@ const internalNginx = {
|
||||
*/
|
||||
deleteLetsEncryptRequestConfig: (certificate) => {
|
||||
const config_file = `/data/nginx/temp/letsencrypt_${certificate.id}.conf`;
|
||||
return new Promise((resolve/*, reject*/) => {
|
||||
return new Promise((resolve /*, reject*/) => {
|
||||
internalNginx.deleteFile(config_file);
|
||||
resolve();
|
||||
});
|
||||
@@ -354,10 +339,13 @@ const internalNginx = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
deleteConfig: (host_type, host, delete_err_file) => {
|
||||
const config_file = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id);
|
||||
const config_file = internalNginx.getConfigName(
|
||||
internalNginx.getFileFriendlyHostType(host_type),
|
||||
typeof host === "undefined" ? 0 : host.id,
|
||||
);
|
||||
const config_file_err = `${config_file}.err`;
|
||||
|
||||
return new Promise((resolve/*, reject*/) => {
|
||||
return new Promise((resolve /*, reject*/) => {
|
||||
internalNginx.deleteFile(config_file);
|
||||
if (delete_err_file) {
|
||||
internalNginx.deleteFile(config_file_err);
|
||||
@@ -372,10 +360,13 @@ const internalNginx = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
renameConfigAsError: (host_type, host) => {
|
||||
const config_file = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id);
|
||||
const config_file = internalNginx.getConfigName(
|
||||
internalNginx.getFileFriendlyHostType(host_type),
|
||||
typeof host === "undefined" ? 0 : host.id,
|
||||
);
|
||||
const config_file_err = `${config_file}.err`;
|
||||
|
||||
return new Promise((resolve/*, reject*/) => {
|
||||
return new Promise((resolve /*, reject*/) => {
|
||||
fs.unlink(config_file, () => {
|
||||
// ignore result, continue
|
||||
fs.rename(config_file, config_file_err, () => {
|
||||
@@ -395,6 +386,7 @@ const internalNginx = {
|
||||
const promises = [];
|
||||
hosts.map((host) => {
|
||||
promises.push(internalNginx.generateConfig(host_type, host));
|
||||
return true;
|
||||
});
|
||||
|
||||
return Promise.all(promises);
|
||||
@@ -409,6 +401,7 @@ const internalNginx = {
|
||||
const promises = [];
|
||||
hosts.map((host) => {
|
||||
promises.push(internalNginx.deleteConfig(host_type, host, true));
|
||||
return true;
|
||||
});
|
||||
|
||||
return Promise.all(promises);
|
||||
@@ -424,13 +417,13 @@ const internalNginx = {
|
||||
* @returns {boolean}
|
||||
*/
|
||||
ipv6Enabled: () => {
|
||||
if (typeof process.env.DISABLE_IPV6 !== 'undefined') {
|
||||
if (typeof process.env.DISABLE_IPV6 !== "undefined") {
|
||||
const disabled = process.env.DISABLE_IPV6.toLowerCase();
|
||||
return !(disabled === 'on' || disabled === 'true' || disabled === '1' || disabled === 'yes');
|
||||
return !(disabled === "on" || disabled === "true" || disabled === "1" || disabled === "yes");
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalNginx;
|
||||
export default internalNginx;
|
||||
|
||||
@@ -1,106 +1,105 @@
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const proxyHostModel = require('../models/proxy_host');
|
||||
const internalHost = require('./host');
|
||||
const internalNginx = require('./nginx');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
const internalCertificate = require('./certificate');
|
||||
const {castJsonIfNeed} = require('../lib/helpers');
|
||||
import _ from "lodash";
|
||||
import errs from "../lib/error.js";
|
||||
import { castJsonIfNeed } from "../lib/helpers.js";
|
||||
import utils from "../lib/utils.js";
|
||||
import proxyHostModel from "../models/proxy_host.js";
|
||||
import internalAuditLog from "./audit-log.js";
|
||||
import internalCertificate from "./certificate.js";
|
||||
import internalHost from "./host.js";
|
||||
import internalNginx from "./nginx.js";
|
||||
|
||||
function omissions () {
|
||||
return ['is_deleted', 'owner.is_deleted'];
|
||||
}
|
||||
const omissions = () => {
|
||||
return ["is_deleted", "owner.is_deleted"];
|
||||
};
|
||||
|
||||
const internalProxyHost = {
|
||||
|
||||
/**
|
||||
* @param {Access} access
|
||||
* @param {Object} data
|
||||
* @returns {Promise}
|
||||
*/
|
||||
create: (access, data) => {
|
||||
let create_certificate = data.certificate_id === 'new';
|
||||
let thisData = data;
|
||||
const createCertificate = thisData.certificate_id === "new";
|
||||
|
||||
if (create_certificate) {
|
||||
delete data.certificate_id;
|
||||
if (createCertificate) {
|
||||
delete thisData.certificate_id;
|
||||
}
|
||||
|
||||
return access.can('proxy_hosts:create', data)
|
||||
return access
|
||||
.can("proxy_hosts:create", thisData)
|
||||
.then(() => {
|
||||
// Get a list of the domain names and check each of them against existing records
|
||||
let domain_name_check_promises = [];
|
||||
const domain_name_check_promises = [];
|
||||
|
||||
data.domain_names.map(function (domain_name) {
|
||||
thisData.domain_names.map((domain_name) => {
|
||||
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
|
||||
return true;
|
||||
});
|
||||
|
||||
return Promise.all(domain_name_check_promises)
|
||||
.then((check_results) => {
|
||||
check_results.map(function (result) {
|
||||
return Promise.all(domain_name_check_promises).then((check_results) => {
|
||||
check_results.map((result) => {
|
||||
if (result.is_taken) {
|
||||
throw new error.ValidationError(result.hostname + ' is already in use');
|
||||
throw new errs.ValidationError(`${result.hostname} is already in use`);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// At this point the domains should have been checked
|
||||
data.owner_user_id = access.token.getUserId(1);
|
||||
data = internalHost.cleanSslHstsData(data);
|
||||
thisData.owner_user_id = access.token.getUserId(1);
|
||||
thisData = internalHost.cleanSslHstsData(thisData);
|
||||
|
||||
// Fix for db field not having a default value
|
||||
// for this optional field.
|
||||
if (typeof data.advanced_config === 'undefined') {
|
||||
data.advanced_config = '';
|
||||
if (typeof thisData.advanced_config === "undefined") {
|
||||
thisData.advanced_config = "";
|
||||
}
|
||||
|
||||
return proxyHostModel
|
||||
.query()
|
||||
.insertAndFetch(data)
|
||||
.then(utils.omitRow(omissions()));
|
||||
return proxyHostModel.query().insertAndFetch(thisData).then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (create_certificate) {
|
||||
return internalCertificate.createQuickCertificate(access, data)
|
||||
if (createCertificate) {
|
||||
return internalCertificate
|
||||
.createQuickCertificate(access, thisData)
|
||||
.then((cert) => {
|
||||
// update host with cert id
|
||||
return internalProxyHost.update(access, {
|
||||
id: row.id,
|
||||
certificate_id: cert.id
|
||||
certificate_id: cert.id,
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
return row;
|
||||
});
|
||||
} else {
|
||||
return row;
|
||||
}
|
||||
return row;
|
||||
})
|
||||
.then((row) => {
|
||||
// re-fetch with cert
|
||||
return internalProxyHost.get(access, {
|
||||
id: row.id,
|
||||
expand: ['certificate', 'owner', 'access_list.[clients,items]']
|
||||
expand: ["certificate", "owner", "access_list.[clients,items]"],
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
// Configure nginx
|
||||
return internalNginx.configure(proxyHostModel, 'proxy_host', row)
|
||||
.then(() => {
|
||||
return internalNginx.configure(proxyHostModel, "proxy_host", row).then(() => {
|
||||
return row;
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
// Audit log
|
||||
data.meta = _.assign({}, data.meta || {}, row.meta);
|
||||
thisData.meta = _.assign({}, thisData.meta || {}, row.meta);
|
||||
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'created',
|
||||
object_type: 'proxy-host',
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: "created",
|
||||
object_type: "proxy-host",
|
||||
object_id: row.id,
|
||||
meta: data
|
||||
meta: thisData,
|
||||
})
|
||||
.then(() => {
|
||||
return row;
|
||||
@@ -115,77 +114,88 @@ const internalProxyHost = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
update: (access, data) => {
|
||||
let create_certificate = data.certificate_id === 'new';
|
||||
let thisData = data;
|
||||
const create_certificate = thisData.certificate_id === "new";
|
||||
|
||||
if (create_certificate) {
|
||||
delete data.certificate_id;
|
||||
delete thisData.certificate_id;
|
||||
}
|
||||
|
||||
return access.can('proxy_hosts:update', data.id)
|
||||
return access
|
||||
.can("proxy_hosts:update", thisData.id)
|
||||
.then((/*access_data*/) => {
|
||||
// Get a list of the domain names and check each of them against existing records
|
||||
let domain_name_check_promises = [];
|
||||
const domain_name_check_promises = [];
|
||||
|
||||
if (typeof data.domain_names !== 'undefined') {
|
||||
data.domain_names.map(function (domain_name) {
|
||||
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'proxy', data.id));
|
||||
if (typeof thisData.domain_names !== "undefined") {
|
||||
thisData.domain_names.map((domain_name) => {
|
||||
return domain_name_check_promises.push(
|
||||
internalHost.isHostnameTaken(domain_name, "proxy", thisData.id),
|
||||
);
|
||||
});
|
||||
|
||||
return Promise.all(domain_name_check_promises)
|
||||
.then((check_results) => {
|
||||
check_results.map(function (result) {
|
||||
return Promise.all(domain_name_check_promises).then((check_results) => {
|
||||
check_results.map((result) => {
|
||||
if (result.is_taken) {
|
||||
throw new error.ValidationError(result.hostname + ' is already in use');
|
||||
throw new errs.ValidationError(`${result.hostname} is already in use`);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
});
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
return internalProxyHost.get(access, {id: data.id});
|
||||
return internalProxyHost.get(access, { id: thisData.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (row.id !== data.id) {
|
||||
if (row.id !== thisData.id) {
|
||||
// Sanity check that something crazy hasn't happened
|
||||
throw new error.InternalValidationError('Proxy Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
|
||||
throw new errs.InternalValidationError(
|
||||
`Proxy Host could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
|
||||
);
|
||||
}
|
||||
|
||||
if (create_certificate) {
|
||||
return internalCertificate.createQuickCertificate(access, {
|
||||
domain_names: data.domain_names || row.domain_names,
|
||||
meta: _.assign({}, row.meta, data.meta)
|
||||
return internalCertificate
|
||||
.createQuickCertificate(access, {
|
||||
domain_names: thisData.domain_names || row.domain_names,
|
||||
meta: _.assign({}, row.meta, thisData.meta),
|
||||
})
|
||||
.then((cert) => {
|
||||
// update host with cert id
|
||||
data.certificate_id = cert.id;
|
||||
thisData.certificate_id = cert.id;
|
||||
})
|
||||
.then(() => {
|
||||
return row;
|
||||
});
|
||||
} else {
|
||||
return row;
|
||||
}
|
||||
return row;
|
||||
})
|
||||
.then((row) => {
|
||||
// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
|
||||
data = _.assign({}, {
|
||||
domain_names: row.domain_names
|
||||
}, data);
|
||||
thisData = _.assign(
|
||||
{},
|
||||
{
|
||||
domain_names: row.domain_names,
|
||||
},
|
||||
data,
|
||||
);
|
||||
|
||||
data = internalHost.cleanSslHstsData(data, row);
|
||||
thisData = internalHost.cleanSslHstsData(thisData, row);
|
||||
|
||||
return proxyHostModel
|
||||
.query()
|
||||
.where({id: data.id})
|
||||
.patch(data)
|
||||
.where({ id: thisData.id })
|
||||
.patch(thisData)
|
||||
.then(utils.omitRow(omissions()))
|
||||
.then((saved_row) => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'proxy-host',
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: "updated",
|
||||
object_type: "proxy-host",
|
||||
object_id: row.id,
|
||||
meta: data
|
||||
meta: thisData,
|
||||
})
|
||||
.then(() => {
|
||||
return saved_row;
|
||||
@@ -193,9 +203,10 @@ const internalProxyHost = {
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
return internalProxyHost.get(access, {
|
||||
id: data.id,
|
||||
expand: ['owner', 'certificate', 'access_list.[clients,items]']
|
||||
return internalProxyHost
|
||||
.get(access, {
|
||||
id: thisData.id,
|
||||
expand: ["owner", "certificate", "access_list.[clients,items]"],
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row.enabled) {
|
||||
@@ -203,11 +214,9 @@ const internalProxyHost = {
|
||||
return row;
|
||||
}
|
||||
// Configure nginx
|
||||
return internalNginx.configure(proxyHostModel, 'proxy_host', row)
|
||||
.then((new_meta) => {
|
||||
return internalNginx.configure(proxyHostModel, "proxy_host", row).then((new_meta) => {
|
||||
row.meta = new_meta;
|
||||
row = internalHost.cleanRowCertificateMeta(row);
|
||||
return _.omit(row, omissions());
|
||||
return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -222,39 +231,38 @@ const internalProxyHost = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
get: (access, data) => {
|
||||
if (typeof data === 'undefined') {
|
||||
data = {};
|
||||
}
|
||||
const thisData = data || {};
|
||||
|
||||
return access.can('proxy_hosts:get', data.id)
|
||||
return access
|
||||
.can("proxy_hosts:get", thisData.id)
|
||||
.then((access_data) => {
|
||||
let query = proxyHostModel
|
||||
const query = proxyHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('id', data.id)
|
||||
.allowGraph('[owner,access_list.[clients,items],certificate]')
|
||||
.where("is_deleted", 0)
|
||||
.andWhere("id", thisData.id)
|
||||
.allowGraph("[owner,access_list.[clients,items],certificate]")
|
||||
.first();
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
if (access_data.permission_visibility !== "all") {
|
||||
query.andWhere("owner_user_id", access.token.getUserId(1));
|
||||
}
|
||||
|
||||
if (typeof data.expand !== 'undefined' && data.expand !== null) {
|
||||
query.withGraphFetched('[' + data.expand.join(', ') + ']');
|
||||
if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
|
||||
query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
|
||||
}
|
||||
|
||||
return query.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
throw new errs.ItemNotFoundError(thisData.id);
|
||||
}
|
||||
row = internalHost.cleanRowCertificateMeta(row);
|
||||
const thisRow = internalHost.cleanRowCertificateMeta(row);
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
row = _.omit(row, data.omit);
|
||||
if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
|
||||
return _.omit(row, thisData.omit);
|
||||
}
|
||||
return row;
|
||||
return thisRow;
|
||||
});
|
||||
},
|
||||
|
||||
@@ -266,35 +274,35 @@ const internalProxyHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
delete: (access, data) => {
|
||||
return access.can('proxy_hosts:delete', data.id)
|
||||
return access
|
||||
.can("proxy_hosts:delete", data.id)
|
||||
.then(() => {
|
||||
return internalProxyHost.get(access, {id: data.id});
|
||||
return internalProxyHost.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
throw new errs.ItemNotFoundError(data.id);
|
||||
}
|
||||
|
||||
return proxyHostModel
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.where("id", row.id)
|
||||
.patch({
|
||||
is_deleted: 1
|
||||
is_deleted: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// Delete Nginx Config
|
||||
return internalNginx.deleteConfig('proxy_host', row)
|
||||
.then(() => {
|
||||
return internalNginx.deleteConfig("proxy_host", row).then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'deleted',
|
||||
object_type: 'proxy-host',
|
||||
action: "deleted",
|
||||
object_type: "proxy-host",
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -311,39 +319,41 @@ const internalProxyHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
enable: (access, data) => {
|
||||
return access.can('proxy_hosts:update', data.id)
|
||||
return access
|
||||
.can("proxy_hosts:update", data.id)
|
||||
.then(() => {
|
||||
return internalProxyHost.get(access, {
|
||||
id: data.id,
|
||||
expand: ['certificate', 'owner', 'access_list']
|
||||
expand: ["certificate", "owner", "access_list"],
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
} else if (row.enabled) {
|
||||
throw new error.ValidationError('Host is already enabled');
|
||||
throw new errs.ItemNotFoundError(data.id);
|
||||
}
|
||||
if (row.enabled) {
|
||||
throw new errs.ValidationError("Host is already enabled");
|
||||
}
|
||||
|
||||
row.enabled = 1;
|
||||
|
||||
return proxyHostModel
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.where("id", row.id)
|
||||
.patch({
|
||||
enabled: 1
|
||||
enabled: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// Configure nginx
|
||||
return internalNginx.configure(proxyHostModel, 'proxy_host', row);
|
||||
return internalNginx.configure(proxyHostModel, "proxy_host", row);
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'enabled',
|
||||
object_type: 'proxy-host',
|
||||
action: "enabled",
|
||||
object_type: "proxy-host",
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -360,39 +370,40 @@ const internalProxyHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
disable: (access, data) => {
|
||||
return access.can('proxy_hosts:update', data.id)
|
||||
return access
|
||||
.can("proxy_hosts:update", data.id)
|
||||
.then(() => {
|
||||
return internalProxyHost.get(access, {id: data.id});
|
||||
return internalProxyHost.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
} else if (!row.enabled) {
|
||||
throw new error.ValidationError('Host is already disabled');
|
||||
throw new errs.ItemNotFoundError(data.id);
|
||||
}
|
||||
if (!row.enabled) {
|
||||
throw new errs.ValidationError("Host is already disabled");
|
||||
}
|
||||
|
||||
row.enabled = 0;
|
||||
|
||||
return proxyHostModel
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.where("id", row.id)
|
||||
.patch({
|
||||
enabled: 0
|
||||
enabled: 0,
|
||||
})
|
||||
.then(() => {
|
||||
// Delete Nginx Config
|
||||
return internalNginx.deleteConfig('proxy_host', row)
|
||||
.then(() => {
|
||||
return internalNginx.deleteConfig("proxy_host", row).then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'disabled',
|
||||
object_type: 'proxy-host',
|
||||
action: "disabled",
|
||||
object_type: "proxy-host",
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -410,34 +421,35 @@ const internalProxyHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getAll: (access, expand, search_query) => {
|
||||
return access.can('proxy_hosts:list')
|
||||
return access
|
||||
.can("proxy_hosts:list")
|
||||
.then((access_data) => {
|
||||
let query = proxyHostModel
|
||||
const query = proxyHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.groupBy('id')
|
||||
.allowGraph('[owner,access_list,certificate]')
|
||||
.orderBy(castJsonIfNeed('domain_names'), 'ASC');
|
||||
.where("is_deleted", 0)
|
||||
.groupBy("id")
|
||||
.allowGraph("[owner,access_list,certificate]")
|
||||
.orderBy(castJsonIfNeed("domain_names"), "ASC");
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
if (access_data.permission_visibility !== "all") {
|
||||
query.andWhere("owner_user_id", access.token.getUserId(1));
|
||||
}
|
||||
|
||||
// Query is used for searching
|
||||
if (typeof search_query === 'string' && search_query.length > 0) {
|
||||
if (typeof search_query === "string" && search_query.length > 0) {
|
||||
query.where(function () {
|
||||
this.where(castJsonIfNeed('domain_names'), 'like', `%${search_query}%`);
|
||||
this.where(castJsonIfNeed("domain_names"), "like", `%${search_query}%`);
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
if (typeof expand !== "undefined" && expand !== null) {
|
||||
query.withGraphFetched(`[${expand.join(", ")}]`);
|
||||
}
|
||||
|
||||
return query.then(utils.omitRows(omissions()));
|
||||
})
|
||||
.then((rows) => {
|
||||
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
|
||||
if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
|
||||
return internalHost.cleanAllRowsCertificateMeta(rows);
|
||||
}
|
||||
|
||||
@@ -453,20 +465,16 @@ const internalProxyHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getCount: (user_id, visibility) => {
|
||||
let query = proxyHostModel
|
||||
.query()
|
||||
.count('id as count')
|
||||
.where('is_deleted', 0);
|
||||
const query = proxyHostModel.query().count("id as count").where("is_deleted", 0);
|
||||
|
||||
if (visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', user_id);
|
||||
if (visibility !== "all") {
|
||||
query.andWhere("owner_user_id", user_id);
|
||||
}
|
||||
|
||||
return query.first()
|
||||
.then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
return query.first().then((row) => {
|
||||
return Number.parseInt(row.count, 10);
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalProxyHost;
|
||||
export default internalProxyHost;
|
||||
|
||||
@@ -1,73 +1,73 @@
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const redirectionHostModel = require('../models/redirection_host');
|
||||
const internalHost = require('./host');
|
||||
const internalNginx = require('./nginx');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
const internalCertificate = require('./certificate');
|
||||
const {castJsonIfNeed} = require('../lib/helpers');
|
||||
import _ from "lodash";
|
||||
import errs from "../lib/error.js";
|
||||
import { castJsonIfNeed } from "../lib/helpers.js";
|
||||
import utils from "../lib/utils.js";
|
||||
import redirectionHostModel from "../models/redirection_host.js";
|
||||
import internalAuditLog from "./audit-log.js";
|
||||
import internalCertificate from "./certificate.js";
|
||||
import internalHost from "./host.js";
|
||||
import internalNginx from "./nginx.js";
|
||||
|
||||
function omissions () {
|
||||
return ['is_deleted'];
|
||||
}
|
||||
const omissions = () => {
|
||||
return ["is_deleted"];
|
||||
};
|
||||
|
||||
const internalRedirectionHost = {
|
||||
|
||||
/**
|
||||
* @param {Access} access
|
||||
* @param {Object} data
|
||||
* @returns {Promise}
|
||||
*/
|
||||
create: (access, data) => {
|
||||
let create_certificate = data.certificate_id === 'new';
|
||||
let thisData = data || {};
|
||||
const createCertificate = thisData.certificate_id === "new";
|
||||
|
||||
if (create_certificate) {
|
||||
delete data.certificate_id;
|
||||
if (createCertificate) {
|
||||
delete thisData.certificate_id;
|
||||
}
|
||||
|
||||
return access.can('redirection_hosts:create', data)
|
||||
return access
|
||||
.can("redirection_hosts:create", thisData)
|
||||
.then((/*access_data*/) => {
|
||||
// Get a list of the domain names and check each of them against existing records
|
||||
let domain_name_check_promises = [];
|
||||
const domain_name_check_promises = [];
|
||||
|
||||
data.domain_names.map(function (domain_name) {
|
||||
thisData.domain_names.map((domain_name) => {
|
||||
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
|
||||
return true;
|
||||
});
|
||||
|
||||
return Promise.all(domain_name_check_promises)
|
||||
.then((check_results) => {
|
||||
check_results.map(function (result) {
|
||||
return Promise.all(domain_name_check_promises).then((check_results) => {
|
||||
check_results.map((result) => {
|
||||
if (result.is_taken) {
|
||||
throw new error.ValidationError(result.hostname + ' is already in use');
|
||||
throw new errs.ValidationError(`${result.hostname} is already in use`);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// At this point the domains should have been checked
|
||||
data.owner_user_id = access.token.getUserId(1);
|
||||
data = internalHost.cleanSslHstsData(data);
|
||||
thisData.owner_user_id = access.token.getUserId(1);
|
||||
thisData = internalHost.cleanSslHstsData(thisData);
|
||||
|
||||
// Fix for db field not having a default value
|
||||
// for this optional field.
|
||||
if (typeof data.advanced_config === 'undefined') {
|
||||
data.advanced_config = '';
|
||||
if (typeof data.advanced_config === "undefined") {
|
||||
data.advanced_config = "";
|
||||
}
|
||||
|
||||
return redirectionHostModel
|
||||
.query()
|
||||
.insertAndFetch(data)
|
||||
.then(utils.omitRow(omissions()));
|
||||
return redirectionHostModel.query().insertAndFetch(thisData).then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (create_certificate) {
|
||||
return internalCertificate.createQuickCertificate(access, data)
|
||||
if (createCertificate) {
|
||||
return internalCertificate
|
||||
.createQuickCertificate(access, thisData)
|
||||
.then((cert) => {
|
||||
// update host with cert id
|
||||
return internalRedirectionHost.update(access, {
|
||||
id: row.id,
|
||||
certificate_id: cert.id
|
||||
certificate_id: cert.id,
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
@@ -80,25 +80,25 @@ const internalRedirectionHost = {
|
||||
// re-fetch with cert
|
||||
return internalRedirectionHost.get(access, {
|
||||
id: row.id,
|
||||
expand: ['certificate', 'owner']
|
||||
expand: ["certificate", "owner"],
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
// Configure nginx
|
||||
return internalNginx.configure(redirectionHostModel, 'redirection_host', row)
|
||||
.then(() => {
|
||||
return internalNginx.configure(redirectionHostModel, "redirection_host", row).then(() => {
|
||||
return row;
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
data.meta = _.assign({}, data.meta || {}, row.meta);
|
||||
thisData.meta = _.assign({}, thisData.meta || {}, row.meta);
|
||||
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'created',
|
||||
object_type: 'redirection-host',
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: "created",
|
||||
object_type: "redirection-host",
|
||||
object_id: row.id,
|
||||
meta: data
|
||||
meta: thisData,
|
||||
})
|
||||
.then(() => {
|
||||
return row;
|
||||
@@ -113,76 +113,88 @@ const internalRedirectionHost = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
update: (access, data) => {
|
||||
let create_certificate = data.certificate_id === 'new';
|
||||
let thisData = data || {};
|
||||
const createCertificate = thisData.certificate_id === "new";
|
||||
|
||||
if (create_certificate) {
|
||||
delete data.certificate_id;
|
||||
if (createCertificate) {
|
||||
delete thisData.certificate_id;
|
||||
}
|
||||
|
||||
return access.can('redirection_hosts:update', data.id)
|
||||
return access
|
||||
.can("redirection_hosts:update", thisData.id)
|
||||
.then((/*access_data*/) => {
|
||||
// Get a list of the domain names and check each of them against existing records
|
||||
let domain_name_check_promises = [];
|
||||
const domain_name_check_promises = [];
|
||||
|
||||
if (typeof data.domain_names !== 'undefined') {
|
||||
data.domain_names.map(function (domain_name) {
|
||||
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'redirection', data.id));
|
||||
if (typeof thisData.domain_names !== "undefined") {
|
||||
thisData.domain_names.map((domain_name) => {
|
||||
domain_name_check_promises.push(
|
||||
internalHost.isHostnameTaken(domain_name, "redirection", thisData.id),
|
||||
);
|
||||
return true;
|
||||
});
|
||||
|
||||
return Promise.all(domain_name_check_promises)
|
||||
.then((check_results) => {
|
||||
check_results.map(function (result) {
|
||||
return Promise.all(domain_name_check_promises).then((check_results) => {
|
||||
check_results.map((result) => {
|
||||
if (result.is_taken) {
|
||||
throw new error.ValidationError(result.hostname + ' is already in use');
|
||||
throw new errs.ValidationError(`${result.hostname} is already in use`);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
});
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
return internalRedirectionHost.get(access, {id: data.id});
|
||||
return internalRedirectionHost.get(access, { id: thisData.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (row.id !== data.id) {
|
||||
if (row.id !== thisData.id) {
|
||||
// Sanity check that something crazy hasn't happened
|
||||
throw new error.InternalValidationError('Redirection Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
|
||||
throw new errs.InternalValidationError(
|
||||
`Redirection Host could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
|
||||
);
|
||||
}
|
||||
|
||||
if (create_certificate) {
|
||||
return internalCertificate.createQuickCertificate(access, {
|
||||
domain_names: data.domain_names || row.domain_names,
|
||||
meta: _.assign({}, row.meta, data.meta)
|
||||
if (createCertificate) {
|
||||
return internalCertificate
|
||||
.createQuickCertificate(access, {
|
||||
domain_names: thisData.domain_names || row.domain_names,
|
||||
meta: _.assign({}, row.meta, thisData.meta),
|
||||
})
|
||||
.then((cert) => {
|
||||
// update host with cert id
|
||||
data.certificate_id = cert.id;
|
||||
thisData.certificate_id = cert.id;
|
||||
})
|
||||
.then(() => {
|
||||
return row;
|
||||
});
|
||||
} else {
|
||||
return row;
|
||||
}
|
||||
return row;
|
||||
})
|
||||
.then((row) => {
|
||||
// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
|
||||
data = _.assign({}, {
|
||||
domain_names: row.domain_names
|
||||
}, data);
|
||||
thisData = _.assign(
|
||||
{},
|
||||
{
|
||||
domain_names: row.domain_names,
|
||||
},
|
||||
thisData,
|
||||
);
|
||||
|
||||
data = internalHost.cleanSslHstsData(data, row);
|
||||
thisData = internalHost.cleanSslHstsData(thisData, row);
|
||||
|
||||
return redirectionHostModel
|
||||
.query()
|
||||
.where({id: data.id})
|
||||
.patch(data)
|
||||
.where({ id: thisData.id })
|
||||
.patch(thisData)
|
||||
.then((saved_row) => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'redirection-host',
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: "updated",
|
||||
object_type: "redirection-host",
|
||||
object_id: row.id,
|
||||
meta: data
|
||||
meta: thisData,
|
||||
})
|
||||
.then(() => {
|
||||
return _.omit(saved_row, omissions());
|
||||
@@ -190,17 +202,18 @@ const internalRedirectionHost = {
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
return internalRedirectionHost.get(access, {
|
||||
id: data.id,
|
||||
expand: ['owner', 'certificate']
|
||||
return internalRedirectionHost
|
||||
.get(access, {
|
||||
id: thisData.id,
|
||||
expand: ["owner", "certificate"],
|
||||
})
|
||||
.then((row) => {
|
||||
// Configure nginx
|
||||
return internalNginx.configure(redirectionHostModel, 'redirection_host', row)
|
||||
return internalNginx
|
||||
.configure(redirectionHostModel, "redirection_host", row)
|
||||
.then((new_meta) => {
|
||||
row.meta = new_meta;
|
||||
row = internalHost.cleanRowCertificateMeta(row);
|
||||
return _.omit(row, omissions());
|
||||
return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -215,39 +228,39 @@ const internalRedirectionHost = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
get: (access, data) => {
|
||||
if (typeof data === 'undefined') {
|
||||
data = {};
|
||||
}
|
||||
const thisData = data || {};
|
||||
|
||||
return access.can('redirection_hosts:get', data.id)
|
||||
return access
|
||||
.can("redirection_hosts:get", thisData.id)
|
||||
.then((access_data) => {
|
||||
let query = redirectionHostModel
|
||||
const query = redirectionHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('id', data.id)
|
||||
.allowGraph('[owner,certificate]')
|
||||
.where("is_deleted", 0)
|
||||
.andWhere("id", thisData.id)
|
||||
.allowGraph("[owner,certificate]")
|
||||
.first();
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
if (access_data.permission_visibility !== "all") {
|
||||
query.andWhere("owner_user_id", access.token.getUserId(1));
|
||||
}
|
||||
|
||||
if (typeof data.expand !== 'undefined' && data.expand !== null) {
|
||||
query.withGraphFetched('[' + data.expand.join(', ') + ']');
|
||||
if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
|
||||
query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
|
||||
}
|
||||
|
||||
return query.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
let thisRow = row;
|
||||
if (!thisRow || !thisRow.id) {
|
||||
throw new errs.ItemNotFoundError(thisData.id);
|
||||
}
|
||||
row = internalHost.cleanRowCertificateMeta(row);
|
||||
thisRow = internalHost.cleanRowCertificateMeta(thisRow);
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
row = _.omit(row, data.omit);
|
||||
if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
|
||||
return _.omit(thisRow, thisData.omit);
|
||||
}
|
||||
return row;
|
||||
return thisRow;
|
||||
});
|
||||
},
|
||||
|
||||
@@ -259,35 +272,35 @@ const internalRedirectionHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
delete: (access, data) => {
|
||||
return access.can('redirection_hosts:delete', data.id)
|
||||
return access
|
||||
.can("redirection_hosts:delete", data.id)
|
||||
.then(() => {
|
||||
return internalRedirectionHost.get(access, {id: data.id});
|
||||
return internalRedirectionHost.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
throw new errs.ItemNotFoundError(data.id);
|
||||
}
|
||||
|
||||
return redirectionHostModel
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.where("id", row.id)
|
||||
.patch({
|
||||
is_deleted: 1
|
||||
is_deleted: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// Delete Nginx Config
|
||||
return internalNginx.deleteConfig('redirection_host', row)
|
||||
.then(() => {
|
||||
return internalNginx.deleteConfig("redirection_host", row).then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'deleted',
|
||||
object_type: 'redirection-host',
|
||||
action: "deleted",
|
||||
object_type: "redirection-host",
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -304,39 +317,41 @@ const internalRedirectionHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
enable: (access, data) => {
|
||||
return access.can('redirection_hosts:update', data.id)
|
||||
return access
|
||||
.can("redirection_hosts:update", data.id)
|
||||
.then(() => {
|
||||
return internalRedirectionHost.get(access, {
|
||||
id: data.id,
|
||||
expand: ['certificate', 'owner']
|
||||
expand: ["certificate", "owner"],
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
} else if (row.enabled) {
|
||||
throw new error.ValidationError('Host is already enabled');
|
||||
throw new errs.ItemNotFoundError(data.id);
|
||||
}
|
||||
if (row.enabled) {
|
||||
throw new errs.ValidationError("Host is already enabled");
|
||||
}
|
||||
|
||||
row.enabled = 1;
|
||||
|
||||
return redirectionHostModel
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.where("id", row.id)
|
||||
.patch({
|
||||
enabled: 1
|
||||
enabled: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// Configure nginx
|
||||
return internalNginx.configure(redirectionHostModel, 'redirection_host', row);
|
||||
return internalNginx.configure(redirectionHostModel, "redirection_host", row);
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'enabled',
|
||||
object_type: 'redirection-host',
|
||||
action: "enabled",
|
||||
object_type: "redirection-host",
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -353,39 +368,40 @@ const internalRedirectionHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
disable: (access, data) => {
|
||||
return access.can('redirection_hosts:update', data.id)
|
||||
return access
|
||||
.can("redirection_hosts:update", data.id)
|
||||
.then(() => {
|
||||
return internalRedirectionHost.get(access, {id: data.id});
|
||||
return internalRedirectionHost.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
} else if (!row.enabled) {
|
||||
throw new error.ValidationError('Host is already disabled');
|
||||
throw new errs.ItemNotFoundError(data.id);
|
||||
}
|
||||
if (!row.enabled) {
|
||||
throw new errs.ValidationError("Host is already disabled");
|
||||
}
|
||||
|
||||
row.enabled = 0;
|
||||
|
||||
return redirectionHostModel
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.where("id", row.id)
|
||||
.patch({
|
||||
enabled: 0
|
||||
enabled: 0,
|
||||
})
|
||||
.then(() => {
|
||||
// Delete Nginx Config
|
||||
return internalNginx.deleteConfig('redirection_host', row)
|
||||
.then(() => {
|
||||
return internalNginx.deleteConfig("redirection_host", row).then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'disabled',
|
||||
object_type: 'redirection-host',
|
||||
action: "disabled",
|
||||
object_type: "redirection-host",
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -403,34 +419,35 @@ const internalRedirectionHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getAll: (access, expand, search_query) => {
|
||||
return access.can('redirection_hosts:list')
|
||||
return access
|
||||
.can("redirection_hosts:list")
|
||||
.then((access_data) => {
|
||||
let query = redirectionHostModel
|
||||
const query = redirectionHostModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.groupBy('id')
|
||||
.allowGraph('[owner,certificate]')
|
||||
.orderBy(castJsonIfNeed('domain_names'), 'ASC');
|
||||
.where("is_deleted", 0)
|
||||
.groupBy("id")
|
||||
.allowGraph("[owner,certificate]")
|
||||
.orderBy(castJsonIfNeed("domain_names"), "ASC");
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
if (access_data.permission_visibility !== "all") {
|
||||
query.andWhere("owner_user_id", access.token.getUserId(1));
|
||||
}
|
||||
|
||||
// Query is used for searching
|
||||
if (typeof search_query === 'string' && search_query.length > 0) {
|
||||
if (typeof search_query === "string" && search_query.length > 0) {
|
||||
query.where(function () {
|
||||
this.where(castJsonIfNeed('domain_names'), 'like', `%${search_query}%`);
|
||||
this.where(castJsonIfNeed("domain_names"), "like", `%${search_query}%`);
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
if (typeof expand !== "undefined" && expand !== null) {
|
||||
query.withGraphFetched(`[${expand.join(", ")}]`);
|
||||
}
|
||||
|
||||
return query.then(utils.omitRows(omissions()));
|
||||
})
|
||||
.then((rows) => {
|
||||
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
|
||||
if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
|
||||
return internalHost.cleanAllRowsCertificateMeta(rows);
|
||||
}
|
||||
|
||||
@@ -446,20 +463,16 @@ const internalRedirectionHost = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getCount: (user_id, visibility) => {
|
||||
let query = redirectionHostModel
|
||||
.query()
|
||||
.count('id as count')
|
||||
.where('is_deleted', 0);
|
||||
const query = redirectionHostModel.query().count("id as count").where("is_deleted", 0);
|
||||
|
||||
if (visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', user_id);
|
||||
if (visibility !== "all") {
|
||||
query.andWhere("owner_user_id", user_id);
|
||||
}
|
||||
|
||||
return query.first()
|
||||
.then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
return query.first().then((row) => {
|
||||
return Number.parseInt(row.count, 10);
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalRedirectionHost;
|
||||
export default internalRedirectionHost;
|
||||
|
||||
@@ -1,24 +1,24 @@
|
||||
const internalProxyHost = require('./proxy-host');
|
||||
const internalRedirectionHost = require('./redirection-host');
|
||||
const internalDeadHost = require('./dead-host');
|
||||
const internalStream = require('./stream');
|
||||
import internalDeadHost from "./dead-host.js";
|
||||
import internalProxyHost from "./proxy-host.js";
|
||||
import internalRedirectionHost from "./redirection-host.js";
|
||||
import internalStream from "./stream.js";
|
||||
|
||||
const internalReport = {
|
||||
|
||||
/**
|
||||
* @param {Access} access
|
||||
* @return {Promise}
|
||||
*/
|
||||
getHostsReport: (access) => {
|
||||
return access.can('reports:hosts', 1)
|
||||
return access
|
||||
.can("reports:hosts", 1)
|
||||
.then((access_data) => {
|
||||
let user_id = access.token.getUserId(1);
|
||||
const userId = access.token.getUserId(1);
|
||||
|
||||
let promises = [
|
||||
internalProxyHost.getCount(user_id, access_data.visibility),
|
||||
internalRedirectionHost.getCount(user_id, access_data.visibility),
|
||||
internalStream.getCount(user_id, access_data.visibility),
|
||||
internalDeadHost.getCount(user_id, access_data.visibility)
|
||||
const promises = [
|
||||
internalProxyHost.getCount(userId, access_data.visibility),
|
||||
internalRedirectionHost.getCount(userId, access_data.visibility),
|
||||
internalStream.getCount(userId, access_data.visibility),
|
||||
internalDeadHost.getCount(userId, access_data.visibility),
|
||||
];
|
||||
|
||||
return Promise.all(promises);
|
||||
@@ -28,11 +28,10 @@ const internalReport = {
|
||||
proxy: counts.shift(),
|
||||
redirection: counts.shift(),
|
||||
stream: counts.shift(),
|
||||
dead: counts.shift()
|
||||
dead: counts.shift(),
|
||||
};
|
||||
});
|
||||
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalReport;
|
||||
export default internalReport;
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
const fs = require('fs');
|
||||
const error = require('../lib/error');
|
||||
const settingModel = require('../models/setting');
|
||||
const internalNginx = require('./nginx');
|
||||
import fs from "node:fs";
|
||||
import errs from "../lib/error.js";
|
||||
import settingModel from "../models/setting.js";
|
||||
import internalNginx from "./nginx.js";
|
||||
|
||||
const internalSetting = {
|
||||
|
||||
/**
|
||||
* @param {Access} access
|
||||
* @param {Object} data
|
||||
@@ -12,37 +11,38 @@ const internalSetting = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
update: (access, data) => {
|
||||
return access.can('settings:update', data.id)
|
||||
return access
|
||||
.can("settings:update", data.id)
|
||||
.then((/*access_data*/) => {
|
||||
return internalSetting.get(access, {id: data.id});
|
||||
return internalSetting.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (row.id !== data.id) {
|
||||
// Sanity check that something crazy hasn't happened
|
||||
throw new error.InternalValidationError('Setting could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
|
||||
throw new errs.InternalValidationError(
|
||||
`Setting could not be updated, IDs do not match: ${row.id} !== ${data.id}`,
|
||||
);
|
||||
}
|
||||
|
||||
return settingModel
|
||||
.query()
|
||||
.where({id: data.id})
|
||||
.patch(data);
|
||||
return settingModel.query().where({ id: data.id }).patch(data);
|
||||
})
|
||||
.then(() => {
|
||||
return internalSetting.get(access, {
|
||||
id: data.id
|
||||
id: data.id,
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
if (row.id === 'default-site') {
|
||||
if (row.id === "default-site") {
|
||||
// write the html if we need to
|
||||
if (row.value === 'html') {
|
||||
fs.writeFileSync('/data/nginx/default_www/index.html', row.meta.html, {encoding: 'utf8'});
|
||||
if (row.value === "html") {
|
||||
fs.writeFileSync("/data/nginx/default_www/index.html", row.meta.html, { encoding: "utf8" });
|
||||
}
|
||||
|
||||
// Configure nginx
|
||||
return internalNginx.deleteConfig('default')
|
||||
return internalNginx
|
||||
.deleteConfig("default")
|
||||
.then(() => {
|
||||
return internalNginx.generateConfig('default', row);
|
||||
return internalNginx.generateConfig("default", row);
|
||||
})
|
||||
.then(() => {
|
||||
return internalNginx.test();
|
||||
@@ -54,7 +54,8 @@ const internalSetting = {
|
||||
return row;
|
||||
})
|
||||
.catch((/*err*/) => {
|
||||
internalNginx.deleteConfig('default')
|
||||
internalNginx
|
||||
.deleteConfig("default")
|
||||
.then(() => {
|
||||
return internalNginx.test();
|
||||
})
|
||||
@@ -63,12 +64,11 @@ const internalSetting = {
|
||||
})
|
||||
.then(() => {
|
||||
// I'm being slack here I know..
|
||||
throw new error.ValidationError('Could not reconfigure Nginx. Please check logs.');
|
||||
throw new errs.ValidationError("Could not reconfigure Nginx. Please check logs.");
|
||||
});
|
||||
});
|
||||
} else {
|
||||
return row;
|
||||
}
|
||||
return row;
|
||||
});
|
||||
},
|
||||
|
||||
@@ -79,19 +79,16 @@ const internalSetting = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
get: (access, data) => {
|
||||
return access.can('settings:get', data.id)
|
||||
return access
|
||||
.can("settings:get", data.id)
|
||||
.then(() => {
|
||||
return settingModel
|
||||
.query()
|
||||
.where('id', data.id)
|
||||
.first();
|
||||
return settingModel.query().where("id", data.id).first();
|
||||
})
|
||||
.then((row) => {
|
||||
if (row) {
|
||||
return row;
|
||||
} else {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
}
|
||||
throw new errs.ItemNotFoundError(data.id);
|
||||
});
|
||||
},
|
||||
|
||||
@@ -102,15 +99,13 @@ const internalSetting = {
|
||||
* @returns {*}
|
||||
*/
|
||||
getCount: (access) => {
|
||||
return access.can('settings:list')
|
||||
return access
|
||||
.can("settings:list")
|
||||
.then(() => {
|
||||
return settingModel
|
||||
.query()
|
||||
.count('id as count')
|
||||
.first();
|
||||
return settingModel.query().count("id as count").first();
|
||||
})
|
||||
.then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
return Number.parseInt(row.count, 10);
|
||||
});
|
||||
},
|
||||
|
||||
@@ -121,13 +116,10 @@ const internalSetting = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getAll: (access) => {
|
||||
return access.can('settings:list')
|
||||
.then(() => {
|
||||
return settingModel
|
||||
.query()
|
||||
.orderBy('description', 'ASC');
|
||||
return access.can("settings:list").then(() => {
|
||||
return settingModel.query().orderBy("description", "ASC");
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalSetting;
|
||||
export default internalSetting;
|
||||
|
||||
@@ -1,87 +1,84 @@
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const streamModel = require('../models/stream');
|
||||
const internalNginx = require('./nginx');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
const internalCertificate = require('./certificate');
|
||||
const internalHost = require('./host');
|
||||
const {castJsonIfNeed} = require('../lib/helpers');
|
||||
import _ from "lodash";
|
||||
import errs from "../lib/error.js";
|
||||
import { castJsonIfNeed } from "../lib/helpers.js";
|
||||
import utils from "../lib/utils.js";
|
||||
import streamModel from "../models/stream.js";
|
||||
import internalAuditLog from "./audit-log.js";
|
||||
import internalCertificate from "./certificate.js";
|
||||
import internalHost from "./host.js";
|
||||
import internalNginx from "./nginx.js";
|
||||
|
||||
function omissions () {
|
||||
return ['is_deleted', 'owner.is_deleted', 'certificate.is_deleted'];
|
||||
}
|
||||
const omissions = () => {
|
||||
return ["is_deleted", "owner.is_deleted", "certificate.is_deleted"];
|
||||
};
|
||||
|
||||
const internalStream = {
|
||||
|
||||
/**
|
||||
* @param {Access} access
|
||||
* @param {Object} data
|
||||
* @returns {Promise}
|
||||
*/
|
||||
create: (access, data) => {
|
||||
const create_certificate = data.certificate_id === 'new';
|
||||
const create_certificate = data.certificate_id === "new";
|
||||
|
||||
if (create_certificate) {
|
||||
delete data.certificate_id;
|
||||
}
|
||||
|
||||
return access.can('streams:create', data)
|
||||
return access
|
||||
.can("streams:create", data)
|
||||
.then((/*access_data*/) => {
|
||||
// TODO: At this point the existing ports should have been checked
|
||||
data.owner_user_id = access.token.getUserId(1);
|
||||
|
||||
if (typeof data.meta === 'undefined') {
|
||||
if (typeof data.meta === "undefined") {
|
||||
data.meta = {};
|
||||
}
|
||||
|
||||
// streams aren't routed by domain name so don't store domain names in the DB
|
||||
let data_no_domains = structuredClone(data);
|
||||
const data_no_domains = structuredClone(data);
|
||||
delete data_no_domains.domain_names;
|
||||
|
||||
return streamModel
|
||||
.query()
|
||||
.insertAndFetch(data_no_domains)
|
||||
.then(utils.omitRow(omissions()));
|
||||
return streamModel.query().insertAndFetch(data_no_domains).then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (create_certificate) {
|
||||
return internalCertificate.createQuickCertificate(access, data)
|
||||
return internalCertificate
|
||||
.createQuickCertificate(access, data)
|
||||
.then((cert) => {
|
||||
// update host with cert id
|
||||
return internalStream.update(access, {
|
||||
id: row.id,
|
||||
certificate_id: cert.id
|
||||
certificate_id: cert.id,
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
return row;
|
||||
});
|
||||
} else {
|
||||
return row;
|
||||
}
|
||||
return row;
|
||||
})
|
||||
.then((row) => {
|
||||
// re-fetch with cert
|
||||
return internalStream.get(access, {
|
||||
id: row.id,
|
||||
expand: ['certificate', 'owner']
|
||||
expand: ["certificate", "owner"],
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
// Configure nginx
|
||||
return internalNginx.configure(streamModel, 'stream', row)
|
||||
.then(() => {
|
||||
return internalNginx.configure(streamModel, "stream", row).then(() => {
|
||||
return row;
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'created',
|
||||
object_type: 'stream',
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: "created",
|
||||
object_type: "stream",
|
||||
object_id: row.id,
|
||||
meta: data
|
||||
meta: data,
|
||||
})
|
||||
.then(() => {
|
||||
return row;
|
||||
@@ -96,56 +93,65 @@ const internalStream = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
update: (access, data) => {
|
||||
const create_certificate = data.certificate_id === 'new';
|
||||
let thisData = data;
|
||||
const create_certificate = thisData.certificate_id === "new";
|
||||
|
||||
if (create_certificate) {
|
||||
delete data.certificate_id;
|
||||
delete thisData.certificate_id;
|
||||
}
|
||||
|
||||
return access.can('streams:update', data.id)
|
||||
return access
|
||||
.can("streams:update", thisData.id)
|
||||
.then((/*access_data*/) => {
|
||||
// TODO: at this point the existing streams should have been checked
|
||||
return internalStream.get(access, {id: data.id});
|
||||
return internalStream.get(access, { id: thisData.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (row.id !== data.id) {
|
||||
if (row.id !== thisData.id) {
|
||||
// Sanity check that something crazy hasn't happened
|
||||
throw new error.InternalValidationError('Stream could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
|
||||
throw new errs.InternalValidationError(
|
||||
`Stream could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
|
||||
);
|
||||
}
|
||||
|
||||
if (create_certificate) {
|
||||
return internalCertificate.createQuickCertificate(access, {
|
||||
domain_names: data.domain_names || row.domain_names,
|
||||
meta: _.assign({}, row.meta, data.meta)
|
||||
return internalCertificate
|
||||
.createQuickCertificate(access, {
|
||||
domain_names: thisData.domain_names || row.domain_names,
|
||||
meta: _.assign({}, row.meta, thisData.meta),
|
||||
})
|
||||
.then((cert) => {
|
||||
// update host with cert id
|
||||
data.certificate_id = cert.id;
|
||||
thisData.certificate_id = cert.id;
|
||||
})
|
||||
.then(() => {
|
||||
return row;
|
||||
});
|
||||
} else {
|
||||
return row;
|
||||
}
|
||||
return row;
|
||||
})
|
||||
.then((row) => {
|
||||
// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
|
||||
data = _.assign({}, {
|
||||
domain_names: row.domain_names
|
||||
}, data);
|
||||
thisData = _.assign(
|
||||
{},
|
||||
{
|
||||
domain_names: row.domain_names,
|
||||
},
|
||||
thisData,
|
||||
);
|
||||
|
||||
return streamModel
|
||||
.query()
|
||||
.patchAndFetchById(row.id, data)
|
||||
.patchAndFetchById(row.id, thisData)
|
||||
.then(utils.omitRow(omissions()))
|
||||
.then((saved_row) => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'stream',
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: "updated",
|
||||
object_type: "stream",
|
||||
object_id: row.id,
|
||||
meta: data
|
||||
meta: thisData,
|
||||
})
|
||||
.then(() => {
|
||||
return saved_row;
|
||||
@@ -153,13 +159,10 @@ const internalStream = {
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
return internalStream.get(access, {id: data.id, expand: ['owner', 'certificate']})
|
||||
.then((row) => {
|
||||
return internalNginx.configure(streamModel, 'stream', row)
|
||||
.then((new_meta) => {
|
||||
return internalStream.get(access, { id: thisData.id, expand: ["owner", "certificate"] }).then((row) => {
|
||||
return internalNginx.configure(streamModel, "stream", row).then((new_meta) => {
|
||||
row.meta = new_meta;
|
||||
row = internalHost.cleanRowCertificateMeta(row);
|
||||
return _.omit(row, omissions());
|
||||
return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -174,39 +177,39 @@ const internalStream = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
get: (access, data) => {
|
||||
if (typeof data === 'undefined') {
|
||||
data = {};
|
||||
}
|
||||
const thisData = data || {};
|
||||
|
||||
return access.can('streams:get', data.id)
|
||||
return access
|
||||
.can("streams:get", thisData.id)
|
||||
.then((access_data) => {
|
||||
let query = streamModel
|
||||
const query = streamModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('id', data.id)
|
||||
.allowGraph('[owner,certificate]')
|
||||
.where("is_deleted", 0)
|
||||
.andWhere("id", thisData.id)
|
||||
.allowGraph("[owner,certificate]")
|
||||
.first();
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
if (access_data.permission_visibility !== "all") {
|
||||
query.andWhere("owner_user_id", access.token.getUserId(1));
|
||||
}
|
||||
|
||||
if (typeof data.expand !== 'undefined' && data.expand !== null) {
|
||||
query.withGraphFetched('[' + data.expand.join(', ') + ']');
|
||||
if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
|
||||
query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
|
||||
}
|
||||
|
||||
return query.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
let thisRow = row;
|
||||
if (!thisRow || !thisRow.id) {
|
||||
throw new errs.ItemNotFoundError(thisData.id);
|
||||
}
|
||||
row = internalHost.cleanRowCertificateMeta(row);
|
||||
thisRow = internalHost.cleanRowCertificateMeta(thisRow);
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
row = _.omit(row, data.omit);
|
||||
if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
|
||||
return _.omit(thisRow, thisData.omit);
|
||||
}
|
||||
return row;
|
||||
return thisRow;
|
||||
});
|
||||
},
|
||||
|
||||
@@ -218,35 +221,35 @@ const internalStream = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
delete: (access, data) => {
|
||||
return access.can('streams:delete', data.id)
|
||||
return access
|
||||
.can("streams:delete", data.id)
|
||||
.then(() => {
|
||||
return internalStream.get(access, {id: data.id});
|
||||
return internalStream.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
throw new errs.ItemNotFoundError(data.id);
|
||||
}
|
||||
|
||||
return streamModel
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.where("id", row.id)
|
||||
.patch({
|
||||
is_deleted: 1
|
||||
is_deleted: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// Delete Nginx Config
|
||||
return internalNginx.deleteConfig('stream', row)
|
||||
.then(() => {
|
||||
return internalNginx.deleteConfig("stream", row).then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'deleted',
|
||||
object_type: 'stream',
|
||||
action: "deleted",
|
||||
object_type: "stream",
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -263,39 +266,41 @@ const internalStream = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
enable: (access, data) => {
|
||||
return access.can('streams:update', data.id)
|
||||
return access
|
||||
.can("streams:update", data.id)
|
||||
.then(() => {
|
||||
return internalStream.get(access, {
|
||||
id: data.id,
|
||||
expand: ['certificate', 'owner']
|
||||
expand: ["certificate", "owner"],
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
} else if (row.enabled) {
|
||||
throw new error.ValidationError('Stream is already enabled');
|
||||
throw new errs.ItemNotFoundError(data.id);
|
||||
}
|
||||
if (row.enabled) {
|
||||
throw new errs.ValidationError("Stream is already enabled");
|
||||
}
|
||||
|
||||
row.enabled = 1;
|
||||
|
||||
return streamModel
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.where("id", row.id)
|
||||
.patch({
|
||||
enabled: 1
|
||||
enabled: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// Configure nginx
|
||||
return internalNginx.configure(streamModel, 'stream', row);
|
||||
return internalNginx.configure(streamModel, "stream", row);
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'enabled',
|
||||
object_type: 'stream',
|
||||
action: "enabled",
|
||||
object_type: "stream",
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -312,39 +317,40 @@ const internalStream = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
disable: (access, data) => {
|
||||
return access.can('streams:update', data.id)
|
||||
return access
|
||||
.can("streams:update", data.id)
|
||||
.then(() => {
|
||||
return internalStream.get(access, {id: data.id});
|
||||
return internalStream.get(access, { id: data.id });
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
} else if (!row.enabled) {
|
||||
throw new error.ValidationError('Stream is already disabled');
|
||||
throw new errs.ItemNotFoundError(data.id);
|
||||
}
|
||||
if (!row.enabled) {
|
||||
throw new errs.ValidationError("Stream is already disabled");
|
||||
}
|
||||
|
||||
row.enabled = 0;
|
||||
|
||||
return streamModel
|
||||
.query()
|
||||
.where('id', row.id)
|
||||
.where("id", row.id)
|
||||
.patch({
|
||||
enabled: 0
|
||||
enabled: 0,
|
||||
})
|
||||
.then(() => {
|
||||
// Delete Nginx Config
|
||||
return internalNginx.deleteConfig('stream', row)
|
||||
.then(() => {
|
||||
return internalNginx.deleteConfig("stream", row).then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'disabled',
|
||||
object_type: 'stream-host',
|
||||
action: "disabled",
|
||||
object_type: "stream-host",
|
||||
object_id: row.id,
|
||||
meta: _.omit(row, omissions())
|
||||
meta: _.omit(row, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -362,34 +368,35 @@ const internalStream = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getAll: (access, expand, search_query) => {
|
||||
return access.can('streams:list')
|
||||
return access
|
||||
.can("streams:list")
|
||||
.then((access_data) => {
|
||||
const query = streamModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.groupBy('id')
|
||||
.allowGraph('[owner,certificate]')
|
||||
.orderBy('incoming_port', 'ASC');
|
||||
.where("is_deleted", 0)
|
||||
.groupBy("id")
|
||||
.allowGraph("[owner,certificate]")
|
||||
.orderBy("incoming_port", "ASC");
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
if (access_data.permission_visibility !== "all") {
|
||||
query.andWhere("owner_user_id", access.token.getUserId(1));
|
||||
}
|
||||
|
||||
// Query is used for searching
|
||||
if (typeof search_query === 'string' && search_query.length > 0) {
|
||||
if (typeof search_query === "string" && search_query.length > 0) {
|
||||
query.where(function () {
|
||||
this.where(castJsonIfNeed('incoming_port'), 'like', `%${search_query}%`);
|
||||
this.where(castJsonIfNeed("incoming_port"), "like", `%${search_query}%`);
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
if (typeof expand !== "undefined" && expand !== null) {
|
||||
query.withGraphFetched(`[${expand.join(", ")}]`);
|
||||
}
|
||||
|
||||
return query.then(utils.omitRows(omissions()));
|
||||
})
|
||||
.then((rows) => {
|
||||
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
|
||||
if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
|
||||
return internalHost.cleanAllRowsCertificateMeta(rows);
|
||||
}
|
||||
|
||||
@@ -405,20 +412,16 @@ const internalStream = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getCount: (user_id, visibility) => {
|
||||
const query = streamModel
|
||||
.query()
|
||||
.count('id AS count')
|
||||
.where('is_deleted', 0);
|
||||
const query = streamModel.query().count("id AS count").where("is_deleted", 0);
|
||||
|
||||
if (visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', user_id);
|
||||
if (visibility !== "all") {
|
||||
query.andWhere("owner_user_id", user_id);
|
||||
}
|
||||
|
||||
return query.first()
|
||||
.then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
return query.first().then((row) => {
|
||||
return Number.parseInt(row.count, 10);
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalStream;
|
||||
export default internalStream;
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const userModel = require('../models/user');
|
||||
const authModel = require('../models/auth');
|
||||
const helpers = require('../lib/helpers');
|
||||
const TokenModel = require('../models/token');
|
||||
import _ from "lodash";
|
||||
import errs from "../lib/error.js";
|
||||
import { parseDatePeriod } from "../lib/helpers.js";
|
||||
import authModel from "../models/auth.js";
|
||||
import TokenModel from "../models/token.js";
|
||||
import userModel from "../models/user.js";
|
||||
|
||||
const ERROR_MESSAGE_INVALID_AUTH = 'Invalid email or password';
|
||||
|
||||
module.exports = {
|
||||
const ERROR_MESSAGE_INVALID_AUTH = "Invalid email or password";
|
||||
const ERROR_MESSAGE_INVALID_AUTH_I18N = "error.invalid-auth";
|
||||
|
||||
export default {
|
||||
/**
|
||||
* @param {Object} data
|
||||
* @param {String} data.identity
|
||||
@@ -19,68 +19,65 @@ module.exports = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getTokenFromEmail: (data, issuer) => {
|
||||
let Token = new TokenModel();
|
||||
const Token = TokenModel();
|
||||
|
||||
data.scope = data.scope || 'user';
|
||||
data.expiry = data.expiry || '1d';
|
||||
data.scope = data.scope || "user";
|
||||
data.expiry = data.expiry || "1d";
|
||||
|
||||
return userModel
|
||||
.query()
|
||||
.where('email', data.identity.toLowerCase().trim())
|
||||
.andWhere('is_deleted', 0)
|
||||
.andWhere('is_disabled', 0)
|
||||
.where("email", data.identity.toLowerCase().trim())
|
||||
.andWhere("is_deleted", 0)
|
||||
.andWhere("is_disabled", 0)
|
||||
.first()
|
||||
.then((user) => {
|
||||
if (user) {
|
||||
// Get auth
|
||||
return authModel
|
||||
.query()
|
||||
.where('user_id', '=', user.id)
|
||||
.where('type', '=', 'password')
|
||||
.where("user_id", "=", user.id)
|
||||
.where("type", "=", "password")
|
||||
.first()
|
||||
.then((auth) => {
|
||||
if (auth) {
|
||||
return auth.verifyPassword(data.secret)
|
||||
.then((valid) => {
|
||||
return auth.verifyPassword(data.secret).then((valid) => {
|
||||
if (valid) {
|
||||
|
||||
if (data.scope !== 'user' && _.indexOf(user.roles, data.scope) === -1) {
|
||||
if (data.scope !== "user" && _.indexOf(user.roles, data.scope) === -1) {
|
||||
// The scope requested doesn't exist as a role against the user,
|
||||
// you shall not pass.
|
||||
throw new error.AuthError('Invalid scope: ' + data.scope);
|
||||
throw new errs.AuthError(`Invalid scope: ${data.scope}`);
|
||||
}
|
||||
|
||||
// Create a moment of the expiry expression
|
||||
let expiry = helpers.parseDatePeriod(data.expiry);
|
||||
const expiry = parseDatePeriod(data.expiry);
|
||||
if (expiry === null) {
|
||||
throw new error.AuthError('Invalid expiry time: ' + data.expiry);
|
||||
throw new errs.AuthError(`Invalid expiry time: ${data.expiry}`);
|
||||
}
|
||||
|
||||
return Token.create({
|
||||
iss: issuer || 'api',
|
||||
iss: issuer || "api",
|
||||
attrs: {
|
||||
id: user.id
|
||||
id: user.id,
|
||||
},
|
||||
scope: [data.scope],
|
||||
expiresIn: data.expiry
|
||||
})
|
||||
.then((signed) => {
|
||||
expiresIn: data.expiry,
|
||||
}).then((signed) => {
|
||||
return {
|
||||
token: signed.token,
|
||||
expires: expiry.toISOString()
|
||||
expires: expiry.toISOString(),
|
||||
};
|
||||
});
|
||||
} else {
|
||||
throw new error.AuthError(ERROR_MESSAGE_INVALID_AUTH);
|
||||
}
|
||||
throw new errs.AuthError(
|
||||
ERROR_MESSAGE_INVALID_AUTH,
|
||||
ERROR_MESSAGE_INVALID_AUTH_I18N,
|
||||
);
|
||||
});
|
||||
} else {
|
||||
throw new error.AuthError(ERROR_MESSAGE_INVALID_AUTH);
|
||||
}
|
||||
throw new errs.AuthError(ERROR_MESSAGE_INVALID_AUTH);
|
||||
});
|
||||
} else {
|
||||
throw new error.AuthError(ERROR_MESSAGE_INVALID_AUTH);
|
||||
}
|
||||
throw new errs.AuthError(ERROR_MESSAGE_INVALID_AUTH);
|
||||
});
|
||||
},
|
||||
|
||||
@@ -92,48 +89,45 @@ module.exports = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getFreshToken: (access, data) => {
|
||||
let Token = new TokenModel();
|
||||
const Token = TokenModel();
|
||||
const thisData = data || {};
|
||||
|
||||
data = data || {};
|
||||
data.expiry = data.expiry || '1d';
|
||||
|
||||
if (access && access.token.getUserId(0)) {
|
||||
thisData.expiry = thisData.expiry || "1d";
|
||||
|
||||
if (access?.token.getUserId(0)) {
|
||||
// Create a moment of the expiry expression
|
||||
let expiry = helpers.parseDatePeriod(data.expiry);
|
||||
const expiry = parseDatePeriod(thisData.expiry);
|
||||
if (expiry === null) {
|
||||
throw new error.AuthError('Invalid expiry time: ' + data.expiry);
|
||||
throw new errs.AuthError(`Invalid expiry time: ${thisData.expiry}`);
|
||||
}
|
||||
|
||||
let token_attrs = {
|
||||
id: access.token.getUserId(0)
|
||||
const token_attrs = {
|
||||
id: access.token.getUserId(0),
|
||||
};
|
||||
|
||||
// Only admins can request otherwise scoped tokens
|
||||
let scope = access.token.get('scope');
|
||||
if (data.scope && access.token.hasScope('admin')) {
|
||||
scope = [data.scope];
|
||||
let scope = access.token.get("scope");
|
||||
if (thisData.scope && access.token.hasScope("admin")) {
|
||||
scope = [thisData.scope];
|
||||
|
||||
if (data.scope === 'job-board' || data.scope === 'worker') {
|
||||
if (thisData.scope === "job-board" || thisData.scope === "worker") {
|
||||
token_attrs.id = 0;
|
||||
}
|
||||
}
|
||||
|
||||
return Token.create({
|
||||
iss: 'api',
|
||||
iss: "api",
|
||||
scope: scope,
|
||||
attrs: token_attrs,
|
||||
expiresIn: data.expiry
|
||||
})
|
||||
.then((signed) => {
|
||||
expiresIn: thisData.expiry,
|
||||
}).then((signed) => {
|
||||
return {
|
||||
token: signed.token,
|
||||
expires: expiry.toISOString()
|
||||
expires: expiry.toISOString(),
|
||||
};
|
||||
});
|
||||
} else {
|
||||
throw new error.AssertionFailedError('Existing token contained invalid user data');
|
||||
}
|
||||
throw new error.AssertionFailedError("Existing token contained invalid user data");
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -141,24 +135,23 @@ module.exports = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getTokenFromUser: (user) => {
|
||||
const expire = '1d';
|
||||
const expire = "1d";
|
||||
const Token = new TokenModel();
|
||||
const expiry = helpers.parseDatePeriod(expire);
|
||||
const expiry = parseDatePeriod(expire);
|
||||
|
||||
return Token.create({
|
||||
iss: 'api',
|
||||
iss: "api",
|
||||
attrs: {
|
||||
id: user.id
|
||||
id: user.id,
|
||||
},
|
||||
scope: ['user'],
|
||||
expiresIn: expire
|
||||
})
|
||||
.then((signed) => {
|
||||
scope: ["user"],
|
||||
expiresIn: expire,
|
||||
}).then((signed) => {
|
||||
return {
|
||||
token: signed.token,
|
||||
expires: expiry.toISOString(),
|
||||
user: user
|
||||
user: user,
|
||||
};
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,43 +1,41 @@
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const userModel = require('../models/user');
|
||||
const userPermissionModel = require('../models/user_permission');
|
||||
const authModel = require('../models/auth');
|
||||
const gravatar = require('gravatar');
|
||||
const internalToken = require('./token');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
import gravatar from "gravatar";
|
||||
import _ from "lodash";
|
||||
import errs from "../lib/error.js";
|
||||
import utils from "../lib/utils.js";
|
||||
import authModel from "../models/auth.js";
|
||||
import userModel from "../models/user.js";
|
||||
import userPermissionModel from "../models/user_permission.js";
|
||||
import internalAuditLog from "./audit-log.js";
|
||||
import internalToken from "./token.js";
|
||||
|
||||
function omissions () {
|
||||
return ['is_deleted'];
|
||||
const omissions = () => {
|
||||
return ["is_deleted"];
|
||||
}
|
||||
|
||||
const internalUser = {
|
||||
const DEFAULT_AVATAR = 'https://gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=200&d=mp&r=g';
|
||||
|
||||
const internalUser = {
|
||||
/**
|
||||
* @param {Access} access
|
||||
* @param {Object} data
|
||||
* @returns {Promise}
|
||||
*/
|
||||
create: (access, data) => {
|
||||
let auth = data.auth || null;
|
||||
const auth = data.auth || null;
|
||||
delete data.auth;
|
||||
|
||||
data.avatar = data.avatar || '';
|
||||
data.avatar = data.avatar || "";
|
||||
data.roles = data.roles || [];
|
||||
|
||||
if (typeof data.is_disabled !== 'undefined') {
|
||||
if (typeof data.is_disabled !== "undefined") {
|
||||
data.is_disabled = data.is_disabled ? 1 : 0;
|
||||
}
|
||||
|
||||
return access.can('users:create', data)
|
||||
return access
|
||||
.can("users:create", data)
|
||||
.then(() => {
|
||||
data.avatar = gravatar.url(data.email, {default: 'mm'});
|
||||
|
||||
return userModel
|
||||
.query()
|
||||
.insertAndFetch(data)
|
||||
.then(utils.omitRow(omissions()));
|
||||
data.avatar = gravatar.url(data.email, { default: "mm" });
|
||||
return userModel.query().insertAndFetch(data).then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((user) => {
|
||||
if (auth) {
|
||||
@@ -47,42 +45,42 @@ const internalUser = {
|
||||
user_id: user.id,
|
||||
type: auth.type,
|
||||
secret: auth.secret,
|
||||
meta: {}
|
||||
meta: {},
|
||||
})
|
||||
.then(() => {
|
||||
return user;
|
||||
});
|
||||
} else {
|
||||
return user;
|
||||
}
|
||||
return user;
|
||||
})
|
||||
.then((user) => {
|
||||
// Create permissions row as well
|
||||
let is_admin = data.roles.indexOf('admin') !== -1;
|
||||
const is_admin = data.roles.indexOf("admin") !== -1;
|
||||
|
||||
return userPermissionModel
|
||||
.query()
|
||||
.insert({
|
||||
user_id: user.id,
|
||||
visibility: is_admin ? 'all' : 'user',
|
||||
proxy_hosts: 'manage',
|
||||
redirection_hosts: 'manage',
|
||||
dead_hosts: 'manage',
|
||||
streams: 'manage',
|
||||
access_lists: 'manage',
|
||||
certificates: 'manage'
|
||||
visibility: is_admin ? "all" : "user",
|
||||
proxy_hosts: "manage",
|
||||
redirection_hosts: "manage",
|
||||
dead_hosts: "manage",
|
||||
streams: "manage",
|
||||
access_lists: "manage",
|
||||
certificates: "manage",
|
||||
})
|
||||
.then(() => {
|
||||
return internalUser.get(access, {id: user.id, expand: ['permissions']});
|
||||
return internalUser.get(access, { id: user.id, expand: ["permissions"] });
|
||||
});
|
||||
})
|
||||
.then((user) => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'created',
|
||||
object_type: 'user',
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: "created",
|
||||
object_type: "user",
|
||||
object_id: user.id,
|
||||
meta: user
|
||||
meta: user,
|
||||
})
|
||||
.then(() => {
|
||||
return user;
|
||||
@@ -99,29 +97,25 @@ const internalUser = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
update: (access, data) => {
|
||||
if (typeof data.is_disabled !== 'undefined') {
|
||||
if (typeof data.is_disabled !== "undefined") {
|
||||
data.is_disabled = data.is_disabled ? 1 : 0;
|
||||
}
|
||||
|
||||
return access.can('users:update', data.id)
|
||||
return access
|
||||
.can("users:update", data.id)
|
||||
.then(() => {
|
||||
|
||||
// Make sure that the user being updated doesn't change their email to another user that is already using it
|
||||
// 1. get user we want to update
|
||||
return internalUser.get(access, {id: data.id})
|
||||
.then((user) => {
|
||||
|
||||
return internalUser.get(access, { id: data.id }).then((user) => {
|
||||
// 2. if email is to be changed, find other users with that email
|
||||
if (typeof data.email !== 'undefined') {
|
||||
if (typeof data.email !== "undefined") {
|
||||
data.email = data.email.toLowerCase().trim();
|
||||
|
||||
if (user.email !== data.email) {
|
||||
return internalUser.isEmailAvailable(data.email, data.id)
|
||||
.then((available) => {
|
||||
return internalUser.isEmailAvailable(data.email, data.id).then((available) => {
|
||||
if (!available) {
|
||||
throw new error.ValidationError('Email address already in use - ' + data.email);
|
||||
throw new errs.ValidationError(`Email address already in use - ${data.email}`);
|
||||
}
|
||||
|
||||
return user;
|
||||
});
|
||||
}
|
||||
@@ -134,26 +128,25 @@ const internalUser = {
|
||||
.then((user) => {
|
||||
if (user.id !== data.id) {
|
||||
// Sanity check that something crazy hasn't happened
|
||||
throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
|
||||
throw new errs.InternalValidationError(
|
||||
`User could not be updated, IDs do not match: ${user.id} !== ${data.id}`,
|
||||
);
|
||||
}
|
||||
|
||||
data.avatar = gravatar.url(data.email || user.email, {default: 'mm'});
|
||||
|
||||
return userModel
|
||||
.query()
|
||||
.patchAndFetchById(user.id, data)
|
||||
.then(utils.omitRow(omissions()));
|
||||
data.avatar = gravatar.url(data.email || user.email, { default: "mm" });
|
||||
return userModel.query().patchAndFetchById(user.id, data).then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then(() => {
|
||||
return internalUser.get(access, {id: data.id});
|
||||
return internalUser.get(access, { id: data.id });
|
||||
})
|
||||
.then((user) => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'user',
|
||||
return internalAuditLog
|
||||
.add(access, {
|
||||
action: "updated",
|
||||
object_type: "user",
|
||||
object_id: user.id,
|
||||
meta: data
|
||||
meta: data,
|
||||
})
|
||||
.then(() => {
|
||||
return user;
|
||||
@@ -170,37 +163,41 @@ const internalUser = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
get: (access, data) => {
|
||||
if (typeof data === 'undefined') {
|
||||
data = {};
|
||||
const thisData = data || {};
|
||||
|
||||
if (typeof thisData.id === "undefined" || !thisData.id) {
|
||||
thisData.id = access.token.getUserId(0);
|
||||
}
|
||||
|
||||
if (typeof data.id === 'undefined' || !data.id) {
|
||||
data.id = access.token.getUserId(0);
|
||||
}
|
||||
|
||||
return access.can('users:get', data.id)
|
||||
return access
|
||||
.can("users:get", thisData.id)
|
||||
.then(() => {
|
||||
let query = userModel
|
||||
const query = userModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('id', data.id)
|
||||
.allowGraph('[permissions]')
|
||||
.where("is_deleted", 0)
|
||||
.andWhere("id", thisData.id)
|
||||
.allowGraph("[permissions]")
|
||||
.first();
|
||||
|
||||
if (typeof data.expand !== 'undefined' && data.expand !== null) {
|
||||
query.withGraphFetched('[' + data.expand.join(', ') + ']');
|
||||
if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
|
||||
query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
|
||||
}
|
||||
|
||||
return query.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
throw new errs.ItemNotFoundError(thisData.id);
|
||||
}
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
row = _.omit(row, data.omit);
|
||||
if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
|
||||
return _.omit(row, thisData.omit);
|
||||
}
|
||||
|
||||
if (row.avatar === "") {
|
||||
row.avatar = DEFAULT_AVATAR;
|
||||
}
|
||||
|
||||
return row;
|
||||
});
|
||||
},
|
||||
@@ -213,18 +210,13 @@ const internalUser = {
|
||||
* @param user_id
|
||||
*/
|
||||
isEmailAvailable: (email, user_id) => {
|
||||
let query = userModel
|
||||
.query()
|
||||
.where('email', '=', email.toLowerCase().trim())
|
||||
.where('is_deleted', 0)
|
||||
.first();
|
||||
const query = userModel.query().where("email", "=", email.toLowerCase().trim()).where("is_deleted", 0).first();
|
||||
|
||||
if (typeof user_id !== 'undefined') {
|
||||
query.where('id', '!=', user_id);
|
||||
if (typeof user_id !== "undefined") {
|
||||
query.where("id", "!=", user_id);
|
||||
}
|
||||
|
||||
return query
|
||||
.then((user) => {
|
||||
return query.then((user) => {
|
||||
return !user;
|
||||
});
|
||||
},
|
||||
@@ -237,33 +229,34 @@ const internalUser = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
delete: (access, data) => {
|
||||
return access.can('users:delete', data.id)
|
||||
return access
|
||||
.can("users:delete", data.id)
|
||||
.then(() => {
|
||||
return internalUser.get(access, {id: data.id});
|
||||
return internalUser.get(access, { id: data.id });
|
||||
})
|
||||
.then((user) => {
|
||||
if (!user) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
throw new errs.ItemNotFoundError(data.id);
|
||||
}
|
||||
|
||||
// Make sure user can't delete themselves
|
||||
if (user.id === access.token.getUserId(0)) {
|
||||
throw new error.PermissionError('You cannot delete yourself.');
|
||||
throw new errs.PermissionError("You cannot delete yourself.");
|
||||
}
|
||||
|
||||
return userModel
|
||||
.query()
|
||||
.where('id', user.id)
|
||||
.where("id", user.id)
|
||||
.patch({
|
||||
is_deleted: 1
|
||||
is_deleted: 1,
|
||||
})
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'deleted',
|
||||
object_type: 'user',
|
||||
action: "deleted",
|
||||
object_type: "user",
|
||||
object_id: user.id,
|
||||
meta: _.omit(user, omissions())
|
||||
meta: _.omit(user, omissions()),
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -280,26 +273,26 @@ const internalUser = {
|
||||
* @returns {*}
|
||||
*/
|
||||
getCount: (access, search_query) => {
|
||||
return access.can('users:list')
|
||||
return access
|
||||
.can("users:list")
|
||||
.then(() => {
|
||||
let query = userModel
|
||||
.query()
|
||||
.count('id as count')
|
||||
.where('is_deleted', 0)
|
||||
.first();
|
||||
const query = userModel.query().count("id as count").where("is_deleted", 0).first();
|
||||
|
||||
// Query is used for searching
|
||||
if (typeof search_query === 'string') {
|
||||
if (typeof search_query === "string") {
|
||||
query.where(function () {
|
||||
this.where('user.name', 'like', '%' + search_query + '%')
|
||||
.orWhere('user.email', 'like', '%' + search_query + '%');
|
||||
this.where("user.name", "like", `%${search_query}%`).orWhere(
|
||||
"user.email",
|
||||
"like",
|
||||
`%${search_query}%`,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
return query;
|
||||
})
|
||||
.then((row) => {
|
||||
return parseInt(row.count, 10);
|
||||
return Number.parseInt(row.count, 10);
|
||||
});
|
||||
},
|
||||
|
||||
@@ -311,30 +304,32 @@ const internalUser = {
|
||||
* @param {String} [search_query]
|
||||
* @returns {Promise}
|
||||
*/
|
||||
getAll: (access, expand, search_query) => {
|
||||
return access.can('users:list')
|
||||
.then(() => {
|
||||
let query = userModel
|
||||
getAll: async (access, expand, search_query) => {
|
||||
await access.can("users:list");
|
||||
const query = userModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.groupBy('id')
|
||||
.allowGraph('[permissions]')
|
||||
.orderBy('name', 'ASC');
|
||||
.where("is_deleted", 0)
|
||||
.groupBy("id")
|
||||
.allowGraph("[permissions]")
|
||||
.orderBy("name", "ASC");
|
||||
|
||||
// Query is used for searching
|
||||
if (typeof search_query === 'string') {
|
||||
if (typeof search_query === "string") {
|
||||
query.where(function () {
|
||||
this.where('name', 'like', '%' + search_query + '%')
|
||||
.orWhere('email', 'like', '%' + search_query + '%');
|
||||
this.where("name", "like", `%${search_query}%`).orWhere(
|
||||
"email",
|
||||
"like",
|
||||
`%${search_query}%`,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
if (typeof expand !== "undefined" && expand !== null) {
|
||||
query.withGraphFetched(`[${expand.join(", ")}]`);
|
||||
}
|
||||
|
||||
return query.then(utils.omitRows(omissions()));
|
||||
});
|
||||
const res = await query;
|
||||
return utils.omitRows(omissions())(res);
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -345,8 +340,8 @@ const internalUser = {
|
||||
getUserOmisionsByAccess: (access, id_requested) => {
|
||||
let response = []; // Admin response
|
||||
|
||||
if (!access.token.hasScope('admin') && access.token.getUserId(0) !== id_requested) {
|
||||
response = ['roles', 'is_deleted']; // Restricted response
|
||||
if (!access.token.hasScope("admin") && access.token.getUserId(0) !== id_requested) {
|
||||
response = ["roles", "is_deleted"]; // Restricted response
|
||||
}
|
||||
|
||||
return response;
|
||||
@@ -361,25 +356,29 @@ const internalUser = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
setPassword: (access, data) => {
|
||||
return access.can('users:password', data.id)
|
||||
return access
|
||||
.can("users:password", data.id)
|
||||
.then(() => {
|
||||
return internalUser.get(access, {id: data.id});
|
||||
return internalUser.get(access, { id: data.id });
|
||||
})
|
||||
.then((user) => {
|
||||
if (user.id !== data.id) {
|
||||
// Sanity check that something crazy hasn't happened
|
||||
throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
|
||||
throw new errs.InternalValidationError(
|
||||
`User could not be updated, IDs do not match: ${user.id} !== ${data.id}`,
|
||||
);
|
||||
}
|
||||
|
||||
if (user.id === access.token.getUserId(0)) {
|
||||
// they're setting their own password. Make sure their current password is correct
|
||||
if (typeof data.current === 'undefined' || !data.current) {
|
||||
throw new error.ValidationError('Current password was not supplied');
|
||||
if (typeof data.current === "undefined" || !data.current) {
|
||||
throw new errs.ValidationError("Current password was not supplied");
|
||||
}
|
||||
|
||||
return internalToken.getTokenFromEmail({
|
||||
return internalToken
|
||||
.getTokenFromEmail({
|
||||
identity: user.email,
|
||||
secret: data.current
|
||||
secret: data.current,
|
||||
})
|
||||
.then(() => {
|
||||
return user;
|
||||
@@ -392,43 +391,36 @@ const internalUser = {
|
||||
// Get auth, patch if it exists
|
||||
return authModel
|
||||
.query()
|
||||
.where('user_id', user.id)
|
||||
.andWhere('type', data.type)
|
||||
.where("user_id", user.id)
|
||||
.andWhere("type", data.type)
|
||||
.first()
|
||||
.then((existing_auth) => {
|
||||
if (existing_auth) {
|
||||
// patch
|
||||
return authModel
|
||||
.query()
|
||||
.where('user_id', user.id)
|
||||
.andWhere('type', data.type)
|
||||
.patch({
|
||||
return authModel.query().where("user_id", user.id).andWhere("type", data.type).patch({
|
||||
type: data.type, // This is required for the model to encrypt on save
|
||||
secret: data.secret
|
||||
secret: data.secret,
|
||||
});
|
||||
} else {
|
||||
}
|
||||
// insert
|
||||
return authModel
|
||||
.query()
|
||||
.insert({
|
||||
return authModel.query().insert({
|
||||
user_id: user.id,
|
||||
type: data.type,
|
||||
secret: data.secret,
|
||||
meta: {}
|
||||
meta: {},
|
||||
});
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
// Add to Audit Log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'user',
|
||||
action: "updated",
|
||||
object_type: "user",
|
||||
object_id: user.id,
|
||||
meta: {
|
||||
name: user.name,
|
||||
password_changed: true,
|
||||
auth_type: data.type
|
||||
}
|
||||
auth_type: data.type,
|
||||
},
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -443,14 +435,17 @@ const internalUser = {
|
||||
* @return {Promise}
|
||||
*/
|
||||
setPermissions: (access, data) => {
|
||||
return access.can('users:permissions', data.id)
|
||||
return access
|
||||
.can("users:permissions", data.id)
|
||||
.then(() => {
|
||||
return internalUser.get(access, {id: data.id});
|
||||
return internalUser.get(access, { id: data.id });
|
||||
})
|
||||
.then((user) => {
|
||||
if (user.id !== data.id) {
|
||||
// Sanity check that something crazy hasn't happened
|
||||
throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
|
||||
throw new errs.InternalValidationError(
|
||||
`User could not be updated, IDs do not match: ${user.id} !== ${data.id}`,
|
||||
);
|
||||
}
|
||||
|
||||
return user;
|
||||
@@ -459,34 +454,30 @@ const internalUser = {
|
||||
// Get perms row, patch if it exists
|
||||
return userPermissionModel
|
||||
.query()
|
||||
.where('user_id', user.id)
|
||||
.where("user_id", user.id)
|
||||
.first()
|
||||
.then((existing_auth) => {
|
||||
if (existing_auth) {
|
||||
// patch
|
||||
return userPermissionModel
|
||||
.query()
|
||||
.where('user_id', user.id)
|
||||
.patchAndFetchById(existing_auth.id, _.assign({user_id: user.id}, data));
|
||||
} else {
|
||||
// insert
|
||||
return userPermissionModel
|
||||
.query()
|
||||
.insertAndFetch(_.assign({user_id: user.id}, data));
|
||||
.where("user_id", user.id)
|
||||
.patchAndFetchById(existing_auth.id, _.assign({ user_id: user.id }, data));
|
||||
}
|
||||
// insert
|
||||
return userPermissionModel.query().insertAndFetch(_.assign({ user_id: user.id }, data));
|
||||
})
|
||||
.then((permissions) => {
|
||||
// Add to Audit Log
|
||||
return internalAuditLog.add(access, {
|
||||
action: 'updated',
|
||||
object_type: 'user',
|
||||
action: "updated",
|
||||
object_type: "user",
|
||||
object_id: user.id,
|
||||
meta: {
|
||||
name: user.name,
|
||||
permissions: permissions
|
||||
}
|
||||
permissions: permissions,
|
||||
},
|
||||
});
|
||||
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
@@ -500,14 +491,15 @@ const internalUser = {
|
||||
* @param {Integer} data.id
|
||||
*/
|
||||
loginAs: (access, data) => {
|
||||
return access.can('users:loginas', data.id)
|
||||
return access
|
||||
.can("users:loginas", data.id)
|
||||
.then(() => {
|
||||
return internalUser.get(access, data);
|
||||
})
|
||||
.then((user) => {
|
||||
return internalToken.getTokenFromUser(user);
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = internalUser;
|
||||
export default internalUser;
|
||||
|
||||
@@ -4,25 +4,29 @@
|
||||
* "scope" in this file means "where did this token come from and what is using it", so 99% of the time
|
||||
* the "scope" is going to be "user" because it would be a user token. This is not to be confused with
|
||||
* the "role" which could be "user" or "admin". The scope in fact, could be "worker" or anything else.
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
const _ = require('lodash');
|
||||
const logger = require('../logger').access;
|
||||
const Ajv = require('ajv/dist/2020');
|
||||
const error = require('./error');
|
||||
const userModel = require('../models/user');
|
||||
const proxyHostModel = require('../models/proxy_host');
|
||||
const TokenModel = require('../models/token');
|
||||
const roleSchema = require('./access/roles.json');
|
||||
const permsSchema = require('./access/permissions.json');
|
||||
import fs from "node:fs";
|
||||
import { dirname } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import Ajv from "ajv/dist/2020.js";
|
||||
import _ from "lodash";
|
||||
import { access as logger } from "../logger.js";
|
||||
import proxyHostModel from "../models/proxy_host.js";
|
||||
import TokenModel from "../models/token.js";
|
||||
import userModel from "../models/user.js";
|
||||
import permsSchema from "./access/permissions.json" with { type: "json" };
|
||||
import roleSchema from "./access/roles.json" with { type: "json" };
|
||||
import errs from "./error.js";
|
||||
|
||||
module.exports = function (token_string) {
|
||||
let Token = new TokenModel();
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
export default function (token_string) {
|
||||
const Token = TokenModel();
|
||||
let token_data = null;
|
||||
let initialised = false;
|
||||
let object_cache = {};
|
||||
const object_cache = {};
|
||||
let allow_internal_access = false;
|
||||
let user_roles = [];
|
||||
let permissions = {};
|
||||
@@ -37,10 +41,10 @@ module.exports = function (token_string) {
|
||||
if (initialised) {
|
||||
resolve();
|
||||
} else if (!token_string) {
|
||||
reject(new error.PermissionError('Permission Denied'));
|
||||
reject(new errs.PermissionError("Permission Denied"));
|
||||
} else {
|
||||
resolve(Token.load(token_string)
|
||||
.then((data) => {
|
||||
resolve(
|
||||
Token.load(token_string).then((data) => {
|
||||
token_data = data;
|
||||
|
||||
// At this point we need to load the user from the DB and make sure they:
|
||||
@@ -48,21 +52,25 @@ module.exports = function (token_string) {
|
||||
// - still have the appropriate scopes for this token
|
||||
// This is only required when the User ID is supplied or if the token scope has `user`
|
||||
|
||||
if (token_data.attrs.id || (typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'user') !== -1)) {
|
||||
if (
|
||||
token_data.attrs.id ||
|
||||
(typeof token_data.scope !== "undefined" &&
|
||||
_.indexOf(token_data.scope, "user") !== -1)
|
||||
) {
|
||||
// Has token user id or token user scope
|
||||
return userModel
|
||||
.query()
|
||||
.where('id', token_data.attrs.id)
|
||||
.andWhere('is_deleted', 0)
|
||||
.andWhere('is_disabled', 0)
|
||||
.allowGraph('[permissions]')
|
||||
.withGraphFetched('[permissions]')
|
||||
.where("id", token_data.attrs.id)
|
||||
.andWhere("is_deleted", 0)
|
||||
.andWhere("is_disabled", 0)
|
||||
.allowGraph("[permissions]")
|
||||
.withGraphFetched("[permissions]")
|
||||
.first()
|
||||
.then((user) => {
|
||||
if (user) {
|
||||
// make sure user has all scopes of the token
|
||||
// The `user` role is not added against the user row, so we have to just add it here to get past this check.
|
||||
user.roles.push('user');
|
||||
user.roles.push("user");
|
||||
|
||||
let is_ok = true;
|
||||
_.forEach(token_data.scope, (scope_item) => {
|
||||
@@ -72,21 +80,19 @@ module.exports = function (token_string) {
|
||||
});
|
||||
|
||||
if (!is_ok) {
|
||||
throw new error.AuthError('Invalid token scope for User');
|
||||
} else {
|
||||
throw new errs.AuthError("Invalid token scope for User");
|
||||
}
|
||||
initialised = true;
|
||||
user_roles = user.roles;
|
||||
permissions = user.permissions;
|
||||
}
|
||||
|
||||
} else {
|
||||
throw new error.AuthError('User cannot be loaded for Token');
|
||||
throw new errs.AuthError("User cannot be loaded for Token");
|
||||
}
|
||||
});
|
||||
} else {
|
||||
initialised = true;
|
||||
}
|
||||
}));
|
||||
initialised = true;
|
||||
}),
|
||||
);
|
||||
}
|
||||
});
|
||||
};
|
||||
@@ -101,35 +107,37 @@ module.exports = function (token_string) {
|
||||
*/
|
||||
this.loadObjects = (object_type) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (Token.hasScope('user')) {
|
||||
if (typeof token_data.attrs.id === 'undefined' || !token_data.attrs.id) {
|
||||
reject(new error.AuthError('User Token supplied without a User ID'));
|
||||
if (Token.hasScope("user")) {
|
||||
if (
|
||||
typeof token_data.attrs.id === "undefined" ||
|
||||
!token_data.attrs.id
|
||||
) {
|
||||
reject(new errs.AuthError("User Token supplied without a User ID"));
|
||||
} else {
|
||||
let token_user_id = token_data.attrs.id ? token_data.attrs.id : 0;
|
||||
const token_user_id = token_data.attrs.id ? token_data.attrs.id : 0;
|
||||
let query;
|
||||
|
||||
if (typeof object_cache[object_type] === 'undefined') {
|
||||
if (typeof object_cache[object_type] === "undefined") {
|
||||
switch (object_type) {
|
||||
|
||||
// USERS - should only return yourself
|
||||
case 'users':
|
||||
case "users":
|
||||
resolve(token_user_id ? [token_user_id] : []);
|
||||
break;
|
||||
|
||||
// Proxy Hosts
|
||||
case 'proxy_hosts':
|
||||
case "proxy_hosts":
|
||||
query = proxyHostModel
|
||||
.query()
|
||||
.select('id')
|
||||
.andWhere('is_deleted', 0);
|
||||
.select("id")
|
||||
.andWhere("is_deleted", 0);
|
||||
|
||||
if (permissions.visibility === 'user') {
|
||||
query.andWhere('owner_user_id', token_user_id);
|
||||
if (permissions.visibility === "user") {
|
||||
query.andWhere("owner_user_id", token_user_id);
|
||||
}
|
||||
|
||||
resolve(query
|
||||
.then((rows) => {
|
||||
let result = [];
|
||||
resolve(
|
||||
query.then((rows) => {
|
||||
const result = [];
|
||||
_.forEach(rows, (rule_row) => {
|
||||
result.push(rule_row.id);
|
||||
});
|
||||
@@ -140,7 +148,7 @@ module.exports = function (token_string) {
|
||||
}
|
||||
|
||||
return result;
|
||||
})
|
||||
}),
|
||||
);
|
||||
break;
|
||||
|
||||
@@ -156,8 +164,7 @@ module.exports = function (token_string) {
|
||||
} else {
|
||||
resolve(null);
|
||||
}
|
||||
})
|
||||
.then((objects) => {
|
||||
}).then((objects) => {
|
||||
object_cache[object_type] = objects;
|
||||
return objects;
|
||||
});
|
||||
@@ -170,41 +177,40 @@ module.exports = function (token_string) {
|
||||
* @returns {Object}
|
||||
*/
|
||||
this.getObjectSchema = (permission_label) => {
|
||||
let base_object_type = permission_label.split(':').shift();
|
||||
const base_object_type = permission_label.split(":").shift();
|
||||
|
||||
let schema = {
|
||||
$id: 'objects',
|
||||
description: 'Actor Properties',
|
||||
type: 'object',
|
||||
const schema = {
|
||||
$id: "objects",
|
||||
description: "Actor Properties",
|
||||
type: "object",
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
user_id: {
|
||||
anyOf: [
|
||||
{
|
||||
type: 'number',
|
||||
enum: [Token.get('attrs').id]
|
||||
}
|
||||
]
|
||||
type: "number",
|
||||
enum: [Token.get("attrs").id],
|
||||
},
|
||||
],
|
||||
},
|
||||
scope: {
|
||||
type: 'string',
|
||||
pattern: '^' + Token.get('scope') + '$'
|
||||
}
|
||||
}
|
||||
type: "string",
|
||||
pattern: `^${Token.get("scope")}$`,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
return this.loadObjects(base_object_type)
|
||||
.then((object_result) => {
|
||||
if (typeof object_result === 'object' && object_result !== null) {
|
||||
return this.loadObjects(base_object_type).then((object_result) => {
|
||||
if (typeof object_result === "object" && object_result !== null) {
|
||||
schema.properties[base_object_type] = {
|
||||
type: 'number',
|
||||
type: "number",
|
||||
enum: object_result,
|
||||
minimum: 1
|
||||
minimum: 1,
|
||||
};
|
||||
} else {
|
||||
schema.properties[base_object_type] = {
|
||||
type: 'number',
|
||||
minimum: 1
|
||||
type: "number",
|
||||
minimum: 1,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -213,7 +219,6 @@ module.exports = function (token_string) {
|
||||
};
|
||||
|
||||
return {
|
||||
|
||||
token: Token,
|
||||
|
||||
/**
|
||||
@@ -222,7 +227,7 @@ module.exports = function (token_string) {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
load: (allow_internal) => {
|
||||
return new Promise(function (resolve/*, reject*/) {
|
||||
return new Promise((resolve /*, reject*/) => {
|
||||
if (token_string) {
|
||||
resolve(Token.load(token_string));
|
||||
} else {
|
||||
@@ -240,20 +245,19 @@ module.exports = function (token_string) {
|
||||
* @param {*} [data]
|
||||
* @returns {Promise}
|
||||
*/
|
||||
can: (permission, data) => {
|
||||
can: async (permission, data) => {
|
||||
if (allow_internal_access === true) {
|
||||
return Promise.resolve(true);
|
||||
//return true;
|
||||
} else {
|
||||
return this.init()
|
||||
.then(() => {
|
||||
// Initialised, token decoded ok
|
||||
return this.getObjectSchema(permission)
|
||||
.then((objectSchema) => {
|
||||
const data_schema = {
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
await this.init();
|
||||
const objectSchema = await this.getObjectSchema(permission);
|
||||
|
||||
const dataSchema = {
|
||||
[permission]: {
|
||||
data: data,
|
||||
scope: Token.get('scope'),
|
||||
scope: Token.get("scope"),
|
||||
roles: user_roles,
|
||||
permission_visibility: permissions.visibility,
|
||||
permission_proxy_hosts: permissions.proxy_hosts,
|
||||
@@ -261,47 +265,40 @@ module.exports = function (token_string) {
|
||||
permission_dead_hosts: permissions.dead_hosts,
|
||||
permission_streams: permissions.streams,
|
||||
permission_access_lists: permissions.access_lists,
|
||||
permission_certificates: permissions.certificates
|
||||
}
|
||||
permission_certificates: permissions.certificates,
|
||||
},
|
||||
};
|
||||
|
||||
let permissionSchema = {
|
||||
const permissionSchema = {
|
||||
$async: true,
|
||||
$id: 'permissions',
|
||||
type: 'object',
|
||||
$id: "permissions",
|
||||
type: "object",
|
||||
additionalProperties: false,
|
||||
properties: {}
|
||||
properties: {},
|
||||
};
|
||||
|
||||
permissionSchema.properties[permission] = require('./access/' + permission.replace(/:/gim, '-') + '.json');
|
||||
const rawData = fs.readFileSync(
|
||||
`${__dirname}/access/${permission.replace(/:/gim, "-")}.json`,
|
||||
{ encoding: "utf8" },
|
||||
);
|
||||
permissionSchema.properties[permission] = JSON.parse(rawData);
|
||||
|
||||
const ajv = new Ajv({
|
||||
verbose: true,
|
||||
allErrors: true,
|
||||
breakOnError: true,
|
||||
coerceTypes: true,
|
||||
schemas: [
|
||||
roleSchema,
|
||||
permsSchema,
|
||||
objectSchema,
|
||||
permissionSchema
|
||||
]
|
||||
schemas: [roleSchema, permsSchema, objectSchema, permissionSchema],
|
||||
});
|
||||
|
||||
return ajv.validate('permissions', data_schema)
|
||||
.then(() => {
|
||||
return data_schema[permission];
|
||||
});
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
const valid = ajv.validate("permissions", dataSchema);
|
||||
return valid && dataSchema[permission];
|
||||
} catch (err) {
|
||||
err.permission = permission;
|
||||
err.permission_data = data;
|
||||
logger.error(permission, data, err.message);
|
||||
|
||||
throw new error.PermissionError('Permission Denied', err);
|
||||
});
|
||||
}
|
||||
throw errs.PermissionError("Permission Denied", err);
|
||||
}
|
||||
},
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
const dnsPlugins = require('../global/certbot-dns-plugins.json');
|
||||
const utils = require('./utils');
|
||||
const error = require('./error');
|
||||
const logger = require('../logger').certbot;
|
||||
const batchflow = require('batchflow');
|
||||
import batchflow from "batchflow";
|
||||
import dnsPlugins from "../global/certbot-dns-plugins.json" with { type: "json" };
|
||||
import { certbot as logger } from "../logger.js";
|
||||
import errs from "./error.js";
|
||||
import utils from "./utils.js";
|
||||
|
||||
const CERTBOT_VERSION_REPLACEMENT = '$(certbot --version | grep -Eo \'[0-9](\\.[0-9]+)+\')';
|
||||
const CERTBOT_VERSION_REPLACEMENT = "$(certbot --version | grep -Eo '[0-9](\\.[0-9]+)+')";
|
||||
|
||||
const certbot = {
|
||||
|
||||
/**
|
||||
/**
|
||||
* @param {array} pluginKeys
|
||||
*/
|
||||
installPlugins: async (pluginKeys) => {
|
||||
const installPlugins = async (pluginKeys) => {
|
||||
let hasErrors = false;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
@@ -20,9 +18,11 @@ const certbot = {
|
||||
return;
|
||||
}
|
||||
|
||||
batchflow(pluginKeys).sequential()
|
||||
batchflow(pluginKeys)
|
||||
.sequential()
|
||||
.each((_i, pluginKey, next) => {
|
||||
certbot.installPlugin(pluginKey)
|
||||
certbot
|
||||
.installPlugin(pluginKey)
|
||||
.then(() => {
|
||||
next();
|
||||
})
|
||||
@@ -36,25 +36,27 @@ const certbot = {
|
||||
})
|
||||
.end(() => {
|
||||
if (hasErrors) {
|
||||
reject(new error.CommandError('Some plugins failed to install. Please check the logs above', 1));
|
||||
reject(
|
||||
new errs.CommandError("Some plugins failed to install. Please check the logs above", 1),
|
||||
);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
/**
|
||||
* Installs a cerbot plugin given the key for the object from
|
||||
* ../global/certbot-dns-plugins.json
|
||||
*
|
||||
* @param {string} pluginKey
|
||||
* @returns {Object}
|
||||
*/
|
||||
installPlugin: async (pluginKey) => {
|
||||
if (typeof dnsPlugins[pluginKey] === 'undefined') {
|
||||
const installPlugin = async (pluginKey) => {
|
||||
if (typeof dnsPlugins[pluginKey] === "undefined") {
|
||||
// throw Error(`Certbot plugin ${pluginKey} not found`);
|
||||
throw new error.ItemNotFoundError(pluginKey);
|
||||
throw new errs.ItemNotFoundError(pluginKey);
|
||||
}
|
||||
|
||||
const plugin = dnsPlugins[pluginKey];
|
||||
@@ -65,13 +67,14 @@ const certbot = {
|
||||
|
||||
// SETUPTOOLS_USE_DISTUTILS is required for certbot plugins to install correctly
|
||||
// in new versions of Python
|
||||
let env = Object.assign({}, process.env, {SETUPTOOLS_USE_DISTUTILS: 'stdlib'});
|
||||
if (typeof plugin.env === 'object') {
|
||||
let env = Object.assign({}, process.env, { SETUPTOOLS_USE_DISTUTILS: "stdlib" });
|
||||
if (typeof plugin.env === "object") {
|
||||
env = Object.assign(env, plugin.env);
|
||||
}
|
||||
|
||||
const cmd = `. /opt/certbot/bin/activate && pip install --no-cache-dir ${plugin.dependencies} ${plugin.package_name}${plugin.version} && deactivate`;
|
||||
return utils.exec(cmd, {env})
|
||||
return utils
|
||||
.exec(cmd, { env })
|
||||
.then((result) => {
|
||||
logger.complete(`Installed ${pluginKey}`);
|
||||
return result;
|
||||
@@ -79,7 +82,6 @@ const certbot = {
|
||||
.catch((err) => {
|
||||
throw err;
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = certbot;
|
||||
export { installPlugins, installPlugin };
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const fs = require('fs');
|
||||
const NodeRSA = require('node-rsa');
|
||||
const logger = require('../logger').global;
|
||||
import fs from "node:fs";
|
||||
import NodeRSA from "node-rsa";
|
||||
import { global as logger } from "../logger.js";
|
||||
|
||||
const keysFile = '/data/keys.json';
|
||||
const mysqlEngine = 'mysql2';
|
||||
@@ -12,16 +12,18 @@ let instance = null;
|
||||
// 1. Load from config file first (not recommended anymore)
|
||||
// 2. Use config env variables next
|
||||
const configure = () => {
|
||||
const filename = (process.env.NODE_CONFIG_DIR || './config') + '/' + (process.env.NODE_ENV || 'default') + '.json';
|
||||
const filename = `${process.env.NODE_CONFIG_DIR || "./config"}/${process.env.NODE_ENV || "default"}.json`;
|
||||
if (fs.existsSync(filename)) {
|
||||
let configData;
|
||||
try {
|
||||
configData = require(filename);
|
||||
// Load this json synchronously
|
||||
const rawData = fs.readFileSync(filename);
|
||||
configData = JSON.parse(rawData);
|
||||
} catch (_) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
if (configData && configData.database) {
|
||||
if (configData?.database) {
|
||||
logger.info(`Using configuration from file: ${filename}`);
|
||||
instance = configData;
|
||||
instance.keys = getKeys();
|
||||
@@ -34,7 +36,7 @@ const configure = () => {
|
||||
const envMysqlName = process.env.DB_MYSQL_NAME || null;
|
||||
if (envMysqlHost && envMysqlUser && envMysqlName) {
|
||||
// we have enough mysql creds to go with mysql
|
||||
logger.info('Using MySQL configuration');
|
||||
logger.info("Using MySQL configuration");
|
||||
instance = {
|
||||
database: {
|
||||
engine: mysqlEngine,
|
||||
@@ -54,7 +56,7 @@ const configure = () => {
|
||||
const envPostgresName = process.env.DB_POSTGRES_NAME || null;
|
||||
if (envPostgresHost && envPostgresUser && envPostgresName) {
|
||||
// we have enough postgres creds to go with postgres
|
||||
logger.info('Using Postgres configuration');
|
||||
logger.info("Using Postgres configuration");
|
||||
instance = {
|
||||
database: {
|
||||
engine: postgresEngine,
|
||||
@@ -69,18 +71,18 @@ const configure = () => {
|
||||
return;
|
||||
}
|
||||
|
||||
const envSqliteFile = process.env.DB_SQLITE_FILE || '/data/database.sqlite';
|
||||
const envSqliteFile = process.env.DB_SQLITE_FILE || "/data/database.sqlite";
|
||||
logger.info(`Using Sqlite: ${envSqliteFile}`);
|
||||
instance = {
|
||||
database: {
|
||||
engine: 'knex-native',
|
||||
engine: "knex-native",
|
||||
knex: {
|
||||
client: sqliteClientName,
|
||||
connection: {
|
||||
filename: envSqliteFile
|
||||
filename: envSqliteFile,
|
||||
},
|
||||
useNullAsDefault: true,
|
||||
},
|
||||
useNullAsDefault: true
|
||||
}
|
||||
},
|
||||
keys: getKeys(),
|
||||
};
|
||||
@@ -88,54 +90,55 @@ const configure = () => {
|
||||
|
||||
const getKeys = () => {
|
||||
// Get keys from file
|
||||
logger.debug("Cheecking for keys file:", keysFile);
|
||||
if (!fs.existsSync(keysFile)) {
|
||||
generateKeys();
|
||||
} else if (process.env.DEBUG) {
|
||||
logger.info('Keys file exists OK');
|
||||
logger.info("Keys file exists OK");
|
||||
}
|
||||
try {
|
||||
return require(keysFile);
|
||||
// Load this json keysFile synchronously and return the json object
|
||||
const rawData = fs.readFileSync(keysFile);
|
||||
return JSON.parse(rawData);
|
||||
} catch (err) {
|
||||
logger.error('Could not read JWT key pair from config file: ' + keysFile, err);
|
||||
logger.error(`Could not read JWT key pair from config file: ${keysFile}`, err);
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
const generateKeys = () => {
|
||||
logger.info('Creating a new JWT key pair...');
|
||||
logger.info("Creating a new JWT key pair...");
|
||||
// Now create the keys and save them in the config.
|
||||
const key = new NodeRSA({ b: 2048 });
|
||||
key.generateKeyPair();
|
||||
|
||||
const keys = {
|
||||
key: key.exportKey('private').toString(),
|
||||
pub: key.exportKey('public').toString(),
|
||||
key: key.exportKey("private").toString(),
|
||||
pub: key.exportKey("public").toString(),
|
||||
};
|
||||
|
||||
// Write keys config
|
||||
try {
|
||||
fs.writeFileSync(keysFile, JSON.stringify(keys, null, 2));
|
||||
} catch (err) {
|
||||
logger.error('Could not write JWT key pair to config file: ' + keysFile + ': ' + err.message);
|
||||
logger.error(`Could not write JWT key pair to config file: ${keysFile}: ${err.message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
logger.info('Wrote JWT key pair to config file: ' + keysFile);
|
||||
logger.info(`Wrote JWT key pair to config file: ${keysFile}`);
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
|
||||
/**
|
||||
/**
|
||||
*
|
||||
* @param {string} key ie: 'database' or 'database.engine'
|
||||
* @returns {boolean}
|
||||
*/
|
||||
has: function(key) {
|
||||
const configHas = (key) => {
|
||||
instance === null && configure();
|
||||
const keys = key.split('.');
|
||||
const keys = key.split(".");
|
||||
let level = instance;
|
||||
let has = true;
|
||||
keys.forEach((keyItem) =>{
|
||||
if (typeof level[keyItem] === 'undefined') {
|
||||
keys.forEach((keyItem) => {
|
||||
if (typeof level[keyItem] === "undefined") {
|
||||
has = false;
|
||||
} else {
|
||||
level = level[keyItem];
|
||||
@@ -143,95 +146,92 @@ module.exports = {
|
||||
});
|
||||
|
||||
return has;
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
/**
|
||||
* Gets a specific key from the top level
|
||||
*
|
||||
* @param {string} key
|
||||
* @returns {*}
|
||||
*/
|
||||
get: function (key) {
|
||||
const configGet = (key) => {
|
||||
instance === null && configure();
|
||||
if (key && typeof instance[key] !== 'undefined') {
|
||||
if (key && typeof instance[key] !== "undefined") {
|
||||
return instance[key];
|
||||
}
|
||||
return instance;
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
/**
|
||||
* Is this a sqlite configuration?
|
||||
*
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isSqlite: function () {
|
||||
const isSqlite = () => {
|
||||
instance === null && configure();
|
||||
return instance.database.knex && instance.database.knex.client === sqliteClientName;
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
/**
|
||||
* Is this a mysql configuration?
|
||||
*
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isMysql: function () {
|
||||
const isMysql = () => {
|
||||
instance === null && configure();
|
||||
return instance.database.engine === mysqlEngine;
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
/**
|
||||
* Is this a postgres configuration?
|
||||
*
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isPostgres: function () {
|
||||
const isPostgres = () => {
|
||||
instance === null && configure();
|
||||
return instance.database.engine === postgresEngine;
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
/**
|
||||
* Are we running in debug mdoe?
|
||||
*
|
||||
* @returns {boolean}
|
||||
*/
|
||||
debug: function () {
|
||||
return !!process.env.DEBUG;
|
||||
},
|
||||
const isDebugMode = () => !!process.env.DEBUG;
|
||||
|
||||
/**
|
||||
/**
|
||||
* Returns a public key
|
||||
*
|
||||
* @returns {string}
|
||||
*/
|
||||
getPublicKey: function () {
|
||||
const getPublicKey = () => {
|
||||
instance === null && configure();
|
||||
return instance.keys.pub;
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
/**
|
||||
* Returns a private key
|
||||
*
|
||||
* @returns {string}
|
||||
*/
|
||||
getPrivateKey: function () {
|
||||
const getPrivateKey = () => {
|
||||
instance === null && configure();
|
||||
return instance.keys.key;
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
/**
|
||||
* @returns {boolean}
|
||||
*/
|
||||
useLetsencryptStaging: function () {
|
||||
return !!process.env.LE_STAGING;
|
||||
},
|
||||
const useLetsencryptStaging = () => !!process.env.LE_STAGING;
|
||||
|
||||
/**
|
||||
/**
|
||||
* @returns {string|null}
|
||||
*/
|
||||
useLetsencryptServer: function () {
|
||||
const useLetsencryptServer = () => {
|
||||
if (process.env.LE_SERVER) {
|
||||
return process.env.LE_SERVER;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
export { configHas, configGet, isSqlite, isMysql, isPostgres, isDebugMode, getPrivateKey, getPublicKey, useLetsencryptStaging, useLetsencryptServer };
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
const _ = require('lodash');
|
||||
const util = require('util');
|
||||
import _ from "lodash";
|
||||
|
||||
module.exports = {
|
||||
|
||||
PermissionError: function (message, previous) {
|
||||
const errs = {
|
||||
PermissionError: function (_, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = 'Permission Denied';
|
||||
this.message = "Permission Denied";
|
||||
this.public = true;
|
||||
this.status = 403;
|
||||
},
|
||||
@@ -16,18 +14,19 @@ module.exports = {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = 'Item Not Found - ' + id;
|
||||
this.message = `Item Not Found - ${id}`;
|
||||
this.public = true;
|
||||
this.status = 404;
|
||||
},
|
||||
|
||||
AuthError: function (message, previous) {
|
||||
AuthError: function (message, messageI18n, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = message;
|
||||
this.message_i18n = messageI18n;
|
||||
this.public = true;
|
||||
this.status = 401;
|
||||
this.status = 400;
|
||||
},
|
||||
|
||||
InternalError: function (message, previous) {
|
||||
@@ -94,6 +93,8 @@ module.exports = {
|
||||
},
|
||||
};
|
||||
|
||||
_.forEach(module.exports, function (error) {
|
||||
util.inherits(error, Error);
|
||||
_.forEach(errs, (err) => {
|
||||
err.prototype = Object.create(Error.prototype);
|
||||
});
|
||||
|
||||
export default errs;
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
module.exports = function (req, res, next) {
|
||||
export default (req, res, next) => {
|
||||
if (req.headers.origin) {
|
||||
res.set({
|
||||
'Access-Control-Allow-Origin': req.headers.origin,
|
||||
'Access-Control-Allow-Credentials': true,
|
||||
'Access-Control-Allow-Methods': 'OPTIONS, GET, POST',
|
||||
'Access-Control-Allow-Headers': 'Content-Type, Cache-Control, Pragma, Expires, Authorization, X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit',
|
||||
'Access-Control-Max-Age': 5 * 60,
|
||||
'Access-Control-Expose-Headers': 'X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit'
|
||||
"Access-Control-Allow-Origin": req.headers.origin,
|
||||
"Access-Control-Allow-Credentials": true,
|
||||
"Access-Control-Allow-Methods": "OPTIONS, GET, POST",
|
||||
"Access-Control-Allow-Headers":
|
||||
"Content-Type, Cache-Control, Pragma, Expires, Authorization, X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit",
|
||||
"Access-Control-Max-Age": 5 * 60,
|
||||
"Access-Control-Expose-Headers": "X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit",
|
||||
});
|
||||
next();
|
||||
} else {
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
const Access = require('../access');
|
||||
import Access from "../access.js";
|
||||
|
||||
module.exports = () => {
|
||||
return function (req, res, next) {
|
||||
export default () => {
|
||||
return (_, res, next) => {
|
||||
res.locals.access = null;
|
||||
let access = new Access(res.locals.token || null);
|
||||
access.load()
|
||||
const access = new Access(res.locals.token || null);
|
||||
access
|
||||
.load()
|
||||
.then(() => {
|
||||
res.locals.access = access;
|
||||
next();
|
||||
@@ -12,4 +13,3 @@ module.exports = () => {
|
||||
.catch(next);
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
module.exports = function () {
|
||||
return function (req, res, next) {
|
||||
export default function () {
|
||||
return (req, res, next) => {
|
||||
if (req.headers.authorization) {
|
||||
let parts = req.headers.authorization.split(' ');
|
||||
const parts = req.headers.authorization.split(" ");
|
||||
|
||||
if (parts && parts[0] === 'Bearer' && parts[1]) {
|
||||
if (parts && parts[0] === "Bearer" && parts[1]) {
|
||||
res.locals.token = parts[1];
|
||||
}
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
let _ = require('lodash');
|
||||
|
||||
module.exports = function (default_sort, default_offset, default_limit, max_limit) {
|
||||
import _ from "lodash";
|
||||
|
||||
export default (default_sort, default_offset, default_limit, max_limit) => {
|
||||
/**
|
||||
* This will setup the req query params with filtered data and defaults
|
||||
*
|
||||
@@ -11,34 +10,35 @@ module.exports = function (default_sort, default_offset, default_limit, max_limi
|
||||
*
|
||||
*/
|
||||
|
||||
return function (req, res, next) {
|
||||
|
||||
req.query.offset = typeof req.query.limit === 'undefined' ? default_offset || 0 : parseInt(req.query.offset, 10);
|
||||
req.query.limit = typeof req.query.limit === 'undefined' ? default_limit || 50 : parseInt(req.query.limit, 10);
|
||||
return (req, _res, next) => {
|
||||
req.query.offset =
|
||||
typeof req.query.limit === "undefined" ? default_offset || 0 : Number.parseInt(req.query.offset, 10);
|
||||
req.query.limit =
|
||||
typeof req.query.limit === "undefined" ? default_limit || 50 : Number.parseInt(req.query.limit, 10);
|
||||
|
||||
if (max_limit && req.query.limit > max_limit) {
|
||||
req.query.limit = max_limit;
|
||||
}
|
||||
|
||||
// Sorting
|
||||
let sort = typeof req.query.sort === 'undefined' ? default_sort : req.query.sort;
|
||||
let myRegexp = /.*\.(asc|desc)$/ig;
|
||||
let sort_array = [];
|
||||
let sort = typeof req.query.sort === "undefined" ? default_sort : req.query.sort;
|
||||
const myRegexp = /.*\.(asc|desc)$/gi;
|
||||
const sort_array = [];
|
||||
|
||||
sort = sort.split(',');
|
||||
_.map(sort, function (val) {
|
||||
let matches = myRegexp.exec(val);
|
||||
sort = sort.split(",");
|
||||
_.map(sort, (val) => {
|
||||
const matches = myRegexp.exec(val);
|
||||
|
||||
if (matches !== null) {
|
||||
let dir = matches[1];
|
||||
const dir = matches[1];
|
||||
sort_array.push({
|
||||
field: val.substr(0, val.length - (dir.length + 1)),
|
||||
dir: dir.toLowerCase()
|
||||
dir: dir.toLowerCase(),
|
||||
});
|
||||
} else {
|
||||
sort_array.push({
|
||||
field: val,
|
||||
dir: 'asc'
|
||||
dir: "asc",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
module.exports = (req, res, next) => {
|
||||
export default (req, res, next) => {
|
||||
if (req.params.user_id === 'me' && res.locals.access) {
|
||||
req.params.user_id = res.locals.access.token.get('attrs').id;
|
||||
} else {
|
||||
req.params.user_id = parseInt(req.params.user_id, 10);
|
||||
req.params.user_id = Number.parseInt(req.params.user_id, 10);
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
const moment = require('moment');
|
||||
const {isPostgres} = require('./config');
|
||||
const {ref} = require('objection');
|
||||
import moment from "moment";
|
||||
import { ref } from "objection";
|
||||
import { isPostgres } from "./config.js";
|
||||
|
||||
module.exports = {
|
||||
|
||||
/**
|
||||
/**
|
||||
* Takes an expression such as 30d and returns a moment object of that date in future
|
||||
*
|
||||
* Key Shorthand
|
||||
@@ -22,41 +20,39 @@ module.exports = {
|
||||
* @param {String} expression
|
||||
* @returns {Object}
|
||||
*/
|
||||
parseDatePeriod: function (expression) {
|
||||
let matches = expression.match(/^([0-9]+)(y|Q|M|w|d|h|m|s|ms)$/m);
|
||||
const parseDatePeriod = (expression) => {
|
||||
const matches = expression.match(/^([0-9]+)(y|Q|M|w|d|h|m|s|ms)$/m);
|
||||
if (matches) {
|
||||
return moment().add(matches[1], matches[2]);
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
};
|
||||
|
||||
convertIntFieldsToBool: function (obj, fields) {
|
||||
fields.forEach(function (field) {
|
||||
if (typeof obj[field] !== 'undefined') {
|
||||
const convertIntFieldsToBool = (obj, fields) => {
|
||||
fields.forEach((field) => {
|
||||
if (typeof obj[field] !== "undefined") {
|
||||
obj[field] = obj[field] === 1;
|
||||
}
|
||||
});
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
|
||||
convertBoolFieldsToInt: function (obj, fields) {
|
||||
fields.forEach(function (field) {
|
||||
if (typeof obj[field] !== 'undefined') {
|
||||
const convertBoolFieldsToInt = (obj, fields) => {
|
||||
fields.forEach((field) => {
|
||||
if (typeof obj[field] !== "undefined") {
|
||||
obj[field] = obj[field] ? 1 : 0;
|
||||
}
|
||||
});
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
/**
|
||||
* Casts a column to json if using postgres
|
||||
*
|
||||
* @param {string} colName
|
||||
* @returns {string|Objection.ReferenceBuilder}
|
||||
*/
|
||||
castJsonIfNeed: function (colName) {
|
||||
return isPostgres() ? ref(colName).castText() : colName;
|
||||
}
|
||||
const castJsonIfNeed = (colName) => (isPostgres() ? ref(colName).castText() : colName);
|
||||
|
||||
};
|
||||
export { parseDatePeriod, convertIntFieldsToBool, convertBoolFieldsToInt, castJsonIfNeed };
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const migrate_name = 'identifier_for_migrate';
|
||||
const logger = require('../logger').migrate;
|
||||
import { migrate as logger } from "../logger.js";
|
||||
|
||||
const migrateName = "identifier_for_migrate";
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -7,16 +8,15 @@ const logger = require('../logger').migrate;
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex, Promise) {
|
||||
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
const up = (_knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Up...`);
|
||||
|
||||
// Create Table example:
|
||||
|
||||
/*return knex.schema.createTable('notification', (table) => {
|
||||
/*
|
||||
return knex.schema.createTable('notification', (table) => {
|
||||
table.increments().primary();
|
||||
table.string('name').notNull();
|
||||
table.string('type').notNull();
|
||||
@@ -24,10 +24,11 @@ exports.up = function (knex, Promise) {
|
||||
table.integer('modified_on').notNull();
|
||||
})
|
||||
.then(function () {
|
||||
logger.info('[' + migrate_name + '] Notification Table created');
|
||||
});*/
|
||||
logger.info('[' + migrateName + '] Notification Table created');
|
||||
});
|
||||
*/
|
||||
|
||||
logger.info('[' + migrate_name + '] Migrating Up Complete');
|
||||
logger.info(`[${migrateName}] Migrating Up Complete`);
|
||||
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
@@ -36,20 +37,23 @@ exports.up = function (knex, Promise) {
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.info('[' + migrate_name + '] Migrating Down...');
|
||||
const down = (_knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Down...`);
|
||||
|
||||
// Drop table example:
|
||||
|
||||
/*return knex.schema.dropTable('notification')
|
||||
/*
|
||||
return knex.schema.dropTable('notification')
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] Notification Table dropped');
|
||||
});*/
|
||||
logger.info(`[${migrateName}] Notification Table dropped`);
|
||||
});
|
||||
*/
|
||||
|
||||
logger.info('[' + migrate_name + '] Migrating Down Complete');
|
||||
logger.info(`[${migrateName}] Migrating Down Complete`);
|
||||
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
||||
export { up, down };
|
||||
|
||||
@@ -1,61 +1,60 @@
|
||||
const _ = require('lodash');
|
||||
const exec = require('node:child_process').exec;
|
||||
const execFile = require('node:child_process').execFile;
|
||||
const { Liquid } = require('liquidjs');
|
||||
const logger = require('../logger').global;
|
||||
const error = require('./error');
|
||||
import { exec as nodeExec, execFile as nodeExecFile } from "node:child_process";
|
||||
import { dirname } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { Liquid } from "liquidjs";
|
||||
import _ from "lodash";
|
||||
import { global as logger } from "../logger.js";
|
||||
import errs from "./error.js";
|
||||
|
||||
module.exports = {
|
||||
|
||||
exec: async (cmd, options = {}) => {
|
||||
logger.debug('CMD:', cmd);
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
const exec = async (cmd, options = {}) => {
|
||||
logger.debug("CMD:", cmd);
|
||||
const { stdout, stderr } = await new Promise((resolve, reject) => {
|
||||
const child = exec(cmd, options, (isError, stdout, stderr) => {
|
||||
const child = nodeExec(cmd, options, (isError, stdout, stderr) => {
|
||||
if (isError) {
|
||||
reject(new error.CommandError(stderr, isError));
|
||||
reject(new errs.CommandError(stderr, isError));
|
||||
} else {
|
||||
resolve({ stdout, stderr });
|
||||
}
|
||||
});
|
||||
|
||||
child.on('error', (e) => {
|
||||
reject(new error.CommandError(stderr, 1, e));
|
||||
child.on("error", (e) => {
|
||||
reject(new errs.CommandError(stderr, 1, e));
|
||||
});
|
||||
});
|
||||
return stdout;
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
/**
|
||||
* @param {String} cmd
|
||||
* @param {Array} args
|
||||
* @param {Object|undefined} options
|
||||
* @returns {Promise}
|
||||
*/
|
||||
execFile: (cmd, args, options) => {
|
||||
logger.debug(`CMD: ${cmd} ${args ? args.join(' ') : ''}`);
|
||||
if (typeof options === 'undefined') {
|
||||
options = {};
|
||||
}
|
||||
const execFile = (cmd, args, options) => {
|
||||
logger.debug(`CMD: ${cmd} ${args ? args.join(" ") : ""}`);
|
||||
const opts = options || {};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
execFile(cmd, args, options, (err, stdout, stderr) => {
|
||||
if (err && typeof err === 'object') {
|
||||
reject(new error.CommandError(stderr, 1, err));
|
||||
nodeExecFile(cmd, args, opts, (err, stdout, stderr) => {
|
||||
if (err && typeof err === "object") {
|
||||
reject(new errs.CommandError(stderr, 1, err));
|
||||
} else {
|
||||
resolve(stdout.trim());
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
/**
|
||||
* Used in objection query builder
|
||||
*
|
||||
* @param {Array} omissions
|
||||
* @returns {Function}
|
||||
*/
|
||||
omitRow: (omissions) => {
|
||||
const omitRow = (omissions) => {
|
||||
/**
|
||||
* @param {Object} row
|
||||
* @returns {Object}
|
||||
@@ -63,15 +62,15 @@ module.exports = {
|
||||
return (row) => {
|
||||
return _.omit(row, omissions);
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
/**
|
||||
* Used in objection query builder
|
||||
*
|
||||
* @param {Array} omissions
|
||||
* @returns {Function}
|
||||
*/
|
||||
omitRows: (omissions) => {
|
||||
const omitRows = (omissions) => {
|
||||
/**
|
||||
* @param {Array} rows
|
||||
* @returns {Object}
|
||||
@@ -82,14 +81,14 @@ module.exports = {
|
||||
});
|
||||
return rows;
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
/**
|
||||
* @returns {Object} Liquid render engine
|
||||
*/
|
||||
getRenderEngine: () => {
|
||||
const getRenderEngine = () => {
|
||||
const renderEngine = new Liquid({
|
||||
root: `${__dirname}/../templates/`
|
||||
root: `${__dirname}/../templates/`,
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -98,13 +97,14 @@ module.exports = {
|
||||
* directive string
|
||||
* address string
|
||||
*/
|
||||
renderEngine.registerFilter('nginxAccessRule', (v) => {
|
||||
if (typeof v.directive !== 'undefined' && typeof v.address !== 'undefined' && v.directive && v.address) {
|
||||
renderEngine.registerFilter("nginxAccessRule", (v) => {
|
||||
if (typeof v.directive !== "undefined" && typeof v.address !== "undefined" && v.directive && v.address) {
|
||||
return `${v.directive} ${v.address};`;
|
||||
}
|
||||
return '';
|
||||
return "";
|
||||
});
|
||||
|
||||
return renderEngine;
|
||||
}
|
||||
};
|
||||
|
||||
export default { exec, execFile, omitRow, omitRows, getRenderEngine };
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const Ajv = require('ajv/dist/2020');
|
||||
const error = require('../error');
|
||||
import Ajv from "ajv/dist/2020.js";
|
||||
import errs from "../error.js";
|
||||
|
||||
const ajv = new Ajv({
|
||||
verbose: true,
|
||||
@@ -14,15 +14,15 @@ const ajv = new Ajv({
|
||||
* @param {Object} payload
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function apiValidator (schema, payload/*, description*/) {
|
||||
return new Promise(function Promise_apiValidator (resolve, reject) {
|
||||
function apiValidator(schema, payload /*, description*/) {
|
||||
return new Promise(function Promise_apiValidator(resolve, reject) {
|
||||
if (schema === null) {
|
||||
reject(new error.ValidationError('Schema is undefined'));
|
||||
reject(new errs.ValidationError("Schema is undefined"));
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof payload === 'undefined') {
|
||||
reject(new error.ValidationError('Payload is undefined'));
|
||||
if (typeof payload === "undefined") {
|
||||
reject(new errs.ValidationError("Payload is undefined"));
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -32,12 +32,12 @@ function apiValidator (schema, payload/*, description*/) {
|
||||
if (valid && !validate.errors) {
|
||||
resolve(payload);
|
||||
} else {
|
||||
let message = ajv.errorsText(validate.errors);
|
||||
let err = new error.ValidationError(message);
|
||||
const message = ajv.errorsText(validate.errors);
|
||||
const err = new errs.ValidationError(message);
|
||||
err.debug = [validate.errors, payload];
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = apiValidator;
|
||||
export default apiValidator;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const _ = require('lodash');
|
||||
const Ajv = require('ajv/dist/2020');
|
||||
const error = require('../error');
|
||||
const commonDefinitions = require('../../schema/common.json');
|
||||
import Ajv from 'ajv/dist/2020.js';
|
||||
import _ from "lodash";
|
||||
import commonDefinitions from "../../schema/common.json" with { type: "json" };
|
||||
import errs from "../error.js";
|
||||
|
||||
RegExp.prototype.toJSON = RegExp.prototype.toString;
|
||||
|
||||
@@ -11,7 +11,7 @@ const ajv = new Ajv({
|
||||
allowUnionTypes: true,
|
||||
coerceTypes: true,
|
||||
strict: false,
|
||||
schemas: [commonDefinitions]
|
||||
schemas: [commonDefinitions],
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -20,26 +20,26 @@ const ajv = new Ajv({
|
||||
* @param {Object} payload
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function validator (schema, payload) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
const validator = (schema, payload) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!payload) {
|
||||
reject(new error.InternalValidationError('Payload is falsy'));
|
||||
reject(new errs.InternalValidationError("Payload is falsy"));
|
||||
} else {
|
||||
try {
|
||||
let validate = ajv.compile(schema);
|
||||
let valid = validate(payload);
|
||||
const validate = ajv.compile(schema);
|
||||
const valid = validate(payload);
|
||||
|
||||
if (valid && !validate.errors) {
|
||||
resolve(_.cloneDeep(payload));
|
||||
} else {
|
||||
let message = ajv.errorsText(validate.errors);
|
||||
reject(new error.InternalValidationError(message));
|
||||
const message = ajv.errorsText(validate.errors);
|
||||
reject(new errs.InternalValidationError(message));
|
||||
}
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = validator;
|
||||
export default validator;
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
const {Signale} = require('signale');
|
||||
import signale from "signale";
|
||||
|
||||
module.exports = {
|
||||
global: new Signale({scope: 'Global '}),
|
||||
migrate: new Signale({scope: 'Migrate '}),
|
||||
express: new Signale({scope: 'Express '}),
|
||||
access: new Signale({scope: 'Access '}),
|
||||
nginx: new Signale({scope: 'Nginx '}),
|
||||
ssl: new Signale({scope: 'SSL '}),
|
||||
certbot: new Signale({scope: 'Certbot '}),
|
||||
import: new Signale({scope: 'Importer '}),
|
||||
setup: new Signale({scope: 'Setup '}),
|
||||
ip_ranges: new Signale({scope: 'IP Ranges'})
|
||||
const opts = {
|
||||
logLevel: "info",
|
||||
};
|
||||
|
||||
const global = new signale.Signale({ scope: "Global ", ...opts });
|
||||
const migrate = new signale.Signale({ scope: "Migrate ", ...opts });
|
||||
const express = new signale.Signale({ scope: "Express ", ...opts });
|
||||
const access = new signale.Signale({ scope: "Access ", ...opts });
|
||||
const nginx = new signale.Signale({ scope: "Nginx ", ...opts });
|
||||
const ssl = new signale.Signale({ scope: "SSL ", ...opts });
|
||||
const certbot = new signale.Signale({ scope: "Certbot ", ...opts });
|
||||
const importer = new signale.Signale({ scope: "Importer ", ...opts });
|
||||
const setup = new signale.Signale({ scope: "Setup ", ...opts });
|
||||
const ipRanges = new signale.Signale({ scope: "IP Ranges", ...opts });
|
||||
|
||||
export { global, migrate, express, access, nginx, ssl, certbot, importer, setup, ipRanges };
|
||||
|
||||
@@ -1,15 +1,13 @@
|
||||
const db = require('./db');
|
||||
const logger = require('./logger').migrate;
|
||||
import db from "./db.js";
|
||||
import { migrate as logger } from "./logger.js";
|
||||
|
||||
module.exports = {
|
||||
latest: function () {
|
||||
return db.migrate.currentVersion()
|
||||
.then((version) => {
|
||||
logger.info('Current database version:', version);
|
||||
return db.migrate.latest({
|
||||
tableName: 'migrations',
|
||||
directory: 'migrations'
|
||||
const migrateUp = async () => {
|
||||
const version = await db.migrate.currentVersion();
|
||||
logger.info("Current database version:", version);
|
||||
return await db.migrate.latest({
|
||||
tableName: "migrations",
|
||||
directory: "migrations",
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export { migrateUp };
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const migrate_name = 'initial-schema';
|
||||
const logger = require('../logger').migrate;
|
||||
import { migrate as logger } from "../logger.js";
|
||||
|
||||
const migrateName = "initial-schema";
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -7,199 +8,199 @@ const logger = require('../logger').migrate;
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
const up = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Up...`);
|
||||
|
||||
return knex.schema.createTable('auth', (table) => {
|
||||
return knex.schema
|
||||
.createTable("auth", (table) => {
|
||||
table.increments().primary();
|
||||
table.dateTime('created_on').notNull();
|
||||
table.dateTime('modified_on').notNull();
|
||||
table.integer('user_id').notNull().unsigned();
|
||||
table.string('type', 30).notNull();
|
||||
table.string('secret').notNull();
|
||||
table.json('meta').notNull();
|
||||
table.integer('is_deleted').notNull().unsigned().defaultTo(0);
|
||||
table.dateTime("created_on").notNull();
|
||||
table.dateTime("modified_on").notNull();
|
||||
table.integer("user_id").notNull().unsigned();
|
||||
table.string("type", 30).notNull();
|
||||
table.string("secret").notNull();
|
||||
table.json("meta").notNull();
|
||||
table.integer("is_deleted").notNull().unsigned().defaultTo(0);
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] auth Table created');
|
||||
logger.info(`[${migrateName}] auth Table created`);
|
||||
|
||||
return knex.schema.createTable('user', (table) => {
|
||||
return knex.schema.createTable("user", (table) => {
|
||||
table.increments().primary();
|
||||
table.dateTime('created_on').notNull();
|
||||
table.dateTime('modified_on').notNull();
|
||||
table.integer('is_deleted').notNull().unsigned().defaultTo(0);
|
||||
table.integer('is_disabled').notNull().unsigned().defaultTo(0);
|
||||
table.string('email').notNull();
|
||||
table.string('name').notNull();
|
||||
table.string('nickname').notNull();
|
||||
table.string('avatar').notNull();
|
||||
table.json('roles').notNull();
|
||||
table.dateTime("created_on").notNull();
|
||||
table.dateTime("modified_on").notNull();
|
||||
table.integer("is_deleted").notNull().unsigned().defaultTo(0);
|
||||
table.integer("is_disabled").notNull().unsigned().defaultTo(0);
|
||||
table.string("email").notNull();
|
||||
table.string("name").notNull();
|
||||
table.string("nickname").notNull();
|
||||
table.string("avatar").notNull();
|
||||
table.json("roles").notNull();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] user Table created');
|
||||
logger.info(`[${migrateName}] user Table created`);
|
||||
|
||||
return knex.schema.createTable('user_permission', (table) => {
|
||||
return knex.schema.createTable("user_permission", (table) => {
|
||||
table.increments().primary();
|
||||
table.dateTime('created_on').notNull();
|
||||
table.dateTime('modified_on').notNull();
|
||||
table.integer('user_id').notNull().unsigned();
|
||||
table.string('visibility').notNull();
|
||||
table.string('proxy_hosts').notNull();
|
||||
table.string('redirection_hosts').notNull();
|
||||
table.string('dead_hosts').notNull();
|
||||
table.string('streams').notNull();
|
||||
table.string('access_lists').notNull();
|
||||
table.string('certificates').notNull();
|
||||
table.unique('user_id');
|
||||
table.dateTime("created_on").notNull();
|
||||
table.dateTime("modified_on").notNull();
|
||||
table.integer("user_id").notNull().unsigned();
|
||||
table.string("visibility").notNull();
|
||||
table.string("proxy_hosts").notNull();
|
||||
table.string("redirection_hosts").notNull();
|
||||
table.string("dead_hosts").notNull();
|
||||
table.string("streams").notNull();
|
||||
table.string("access_lists").notNull();
|
||||
table.string("certificates").notNull();
|
||||
table.unique("user_id");
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] user_permission Table created');
|
||||
logger.info(`[${migrateName}] user_permission Table created`);
|
||||
|
||||
return knex.schema.createTable('proxy_host', (table) => {
|
||||
return knex.schema.createTable("proxy_host", (table) => {
|
||||
table.increments().primary();
|
||||
table.dateTime('created_on').notNull();
|
||||
table.dateTime('modified_on').notNull();
|
||||
table.integer('owner_user_id').notNull().unsigned();
|
||||
table.integer('is_deleted').notNull().unsigned().defaultTo(0);
|
||||
table.json('domain_names').notNull();
|
||||
table.string('forward_ip').notNull();
|
||||
table.integer('forward_port').notNull().unsigned();
|
||||
table.integer('access_list_id').notNull().unsigned().defaultTo(0);
|
||||
table.integer('certificate_id').notNull().unsigned().defaultTo(0);
|
||||
table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
|
||||
table.integer('caching_enabled').notNull().unsigned().defaultTo(0);
|
||||
table.integer('block_exploits').notNull().unsigned().defaultTo(0);
|
||||
table.text('advanced_config').notNull().defaultTo('');
|
||||
table.json('meta').notNull();
|
||||
table.dateTime("created_on").notNull();
|
||||
table.dateTime("modified_on").notNull();
|
||||
table.integer("owner_user_id").notNull().unsigned();
|
||||
table.integer("is_deleted").notNull().unsigned().defaultTo(0);
|
||||
table.json("domain_names").notNull();
|
||||
table.string("forward_ip").notNull();
|
||||
table.integer("forward_port").notNull().unsigned();
|
||||
table.integer("access_list_id").notNull().unsigned().defaultTo(0);
|
||||
table.integer("certificate_id").notNull().unsigned().defaultTo(0);
|
||||
table.integer("ssl_forced").notNull().unsigned().defaultTo(0);
|
||||
table.integer("caching_enabled").notNull().unsigned().defaultTo(0);
|
||||
table.integer("block_exploits").notNull().unsigned().defaultTo(0);
|
||||
table.text("advanced_config").notNull().defaultTo("");
|
||||
table.json("meta").notNull();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] proxy_host Table created');
|
||||
logger.info(`[${migrateName}] proxy_host Table created`);
|
||||
|
||||
return knex.schema.createTable('redirection_host', (table) => {
|
||||
return knex.schema.createTable("redirection_host", (table) => {
|
||||
table.increments().primary();
|
||||
table.dateTime('created_on').notNull();
|
||||
table.dateTime('modified_on').notNull();
|
||||
table.integer('owner_user_id').notNull().unsigned();
|
||||
table.integer('is_deleted').notNull().unsigned().defaultTo(0);
|
||||
table.json('domain_names').notNull();
|
||||
table.string('forward_domain_name').notNull();
|
||||
table.integer('preserve_path').notNull().unsigned().defaultTo(0);
|
||||
table.integer('certificate_id').notNull().unsigned().defaultTo(0);
|
||||
table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
|
||||
table.integer('block_exploits').notNull().unsigned().defaultTo(0);
|
||||
table.text('advanced_config').notNull().defaultTo('');
|
||||
table.json('meta').notNull();
|
||||
table.dateTime("created_on").notNull();
|
||||
table.dateTime("modified_on").notNull();
|
||||
table.integer("owner_user_id").notNull().unsigned();
|
||||
table.integer("is_deleted").notNull().unsigned().defaultTo(0);
|
||||
table.json("domain_names").notNull();
|
||||
table.string("forward_domain_name").notNull();
|
||||
table.integer("preserve_path").notNull().unsigned().defaultTo(0);
|
||||
table.integer("certificate_id").notNull().unsigned().defaultTo(0);
|
||||
table.integer("ssl_forced").notNull().unsigned().defaultTo(0);
|
||||
table.integer("block_exploits").notNull().unsigned().defaultTo(0);
|
||||
table.text("advanced_config").notNull().defaultTo("");
|
||||
table.json("meta").notNull();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] redirection_host Table created');
|
||||
logger.info(`[${migrateName}] redirection_host Table created`);
|
||||
|
||||
return knex.schema.createTable('dead_host', (table) => {
|
||||
return knex.schema.createTable("dead_host", (table) => {
|
||||
table.increments().primary();
|
||||
table.dateTime('created_on').notNull();
|
||||
table.dateTime('modified_on').notNull();
|
||||
table.integer('owner_user_id').notNull().unsigned();
|
||||
table.integer('is_deleted').notNull().unsigned().defaultTo(0);
|
||||
table.json('domain_names').notNull();
|
||||
table.integer('certificate_id').notNull().unsigned().defaultTo(0);
|
||||
table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
|
||||
table.text('advanced_config').notNull().defaultTo('');
|
||||
table.json('meta').notNull();
|
||||
table.dateTime("created_on").notNull();
|
||||
table.dateTime("modified_on").notNull();
|
||||
table.integer("owner_user_id").notNull().unsigned();
|
||||
table.integer("is_deleted").notNull().unsigned().defaultTo(0);
|
||||
table.json("domain_names").notNull();
|
||||
table.integer("certificate_id").notNull().unsigned().defaultTo(0);
|
||||
table.integer("ssl_forced").notNull().unsigned().defaultTo(0);
|
||||
table.text("advanced_config").notNull().defaultTo("");
|
||||
table.json("meta").notNull();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] dead_host Table created');
|
||||
logger.info(`[${migrateName}] dead_host Table created`);
|
||||
|
||||
return knex.schema.createTable('stream', (table) => {
|
||||
return knex.schema.createTable("stream", (table) => {
|
||||
table.increments().primary();
|
||||
table.dateTime('created_on').notNull();
|
||||
table.dateTime('modified_on').notNull();
|
||||
table.integer('owner_user_id').notNull().unsigned();
|
||||
table.integer('is_deleted').notNull().unsigned().defaultTo(0);
|
||||
table.integer('incoming_port').notNull().unsigned();
|
||||
table.string('forward_ip').notNull();
|
||||
table.integer('forwarding_port').notNull().unsigned();
|
||||
table.integer('tcp_forwarding').notNull().unsigned().defaultTo(0);
|
||||
table.integer('udp_forwarding').notNull().unsigned().defaultTo(0);
|
||||
table.json('meta').notNull();
|
||||
table.dateTime("created_on").notNull();
|
||||
table.dateTime("modified_on").notNull();
|
||||
table.integer("owner_user_id").notNull().unsigned();
|
||||
table.integer("is_deleted").notNull().unsigned().defaultTo(0);
|
||||
table.integer("incoming_port").notNull().unsigned();
|
||||
table.string("forward_ip").notNull();
|
||||
table.integer("forwarding_port").notNull().unsigned();
|
||||
table.integer("tcp_forwarding").notNull().unsigned().defaultTo(0);
|
||||
table.integer("udp_forwarding").notNull().unsigned().defaultTo(0);
|
||||
table.json("meta").notNull();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] stream Table created');
|
||||
logger.info(`[${migrateName}] stream Table created`);
|
||||
|
||||
return knex.schema.createTable('access_list', (table) => {
|
||||
return knex.schema.createTable("access_list", (table) => {
|
||||
table.increments().primary();
|
||||
table.dateTime('created_on').notNull();
|
||||
table.dateTime('modified_on').notNull();
|
||||
table.integer('owner_user_id').notNull().unsigned();
|
||||
table.integer('is_deleted').notNull().unsigned().defaultTo(0);
|
||||
table.string('name').notNull();
|
||||
table.json('meta').notNull();
|
||||
table.dateTime("created_on").notNull();
|
||||
table.dateTime("modified_on").notNull();
|
||||
table.integer("owner_user_id").notNull().unsigned();
|
||||
table.integer("is_deleted").notNull().unsigned().defaultTo(0);
|
||||
table.string("name").notNull();
|
||||
table.json("meta").notNull();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] access_list Table created');
|
||||
logger.info(`[${migrateName}] access_list Table created`);
|
||||
|
||||
return knex.schema.createTable('certificate', (table) => {
|
||||
return knex.schema.createTable("certificate", (table) => {
|
||||
table.increments().primary();
|
||||
table.dateTime('created_on').notNull();
|
||||
table.dateTime('modified_on').notNull();
|
||||
table.integer('owner_user_id').notNull().unsigned();
|
||||
table.integer('is_deleted').notNull().unsigned().defaultTo(0);
|
||||
table.string('provider').notNull();
|
||||
table.string('nice_name').notNull().defaultTo('');
|
||||
table.json('domain_names').notNull();
|
||||
table.dateTime('expires_on').notNull();
|
||||
table.json('meta').notNull();
|
||||
table.dateTime("created_on").notNull();
|
||||
table.dateTime("modified_on").notNull();
|
||||
table.integer("owner_user_id").notNull().unsigned();
|
||||
table.integer("is_deleted").notNull().unsigned().defaultTo(0);
|
||||
table.string("provider").notNull();
|
||||
table.string("nice_name").notNull().defaultTo("");
|
||||
table.json("domain_names").notNull();
|
||||
table.dateTime("expires_on").notNull();
|
||||
table.json("meta").notNull();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] certificate Table created');
|
||||
logger.info(`[${migrateName}] certificate Table created`);
|
||||
|
||||
return knex.schema.createTable('access_list_auth', (table) => {
|
||||
return knex.schema.createTable("access_list_auth", (table) => {
|
||||
table.increments().primary();
|
||||
table.dateTime('created_on').notNull();
|
||||
table.dateTime('modified_on').notNull();
|
||||
table.integer('access_list_id').notNull().unsigned();
|
||||
table.string('username').notNull();
|
||||
table.string('password').notNull();
|
||||
table.json('meta').notNull();
|
||||
table.dateTime("created_on").notNull();
|
||||
table.dateTime("modified_on").notNull();
|
||||
table.integer("access_list_id").notNull().unsigned();
|
||||
table.string("username").notNull();
|
||||
table.string("password").notNull();
|
||||
table.json("meta").notNull();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] access_list_auth Table created');
|
||||
logger.info(`[${migrateName}] access_list_auth Table created`);
|
||||
|
||||
return knex.schema.createTable('audit_log', (table) => {
|
||||
return knex.schema.createTable("audit_log", (table) => {
|
||||
table.increments().primary();
|
||||
table.dateTime('created_on').notNull();
|
||||
table.dateTime('modified_on').notNull();
|
||||
table.integer('user_id').notNull().unsigned();
|
||||
table.string('object_type').notNull().defaultTo('');
|
||||
table.integer('object_id').notNull().unsigned().defaultTo(0);
|
||||
table.string('action').notNull();
|
||||
table.json('meta').notNull();
|
||||
table.dateTime("created_on").notNull();
|
||||
table.dateTime("modified_on").notNull();
|
||||
table.integer("user_id").notNull().unsigned();
|
||||
table.string("object_type").notNull().defaultTo("");
|
||||
table.integer("object_id").notNull().unsigned().defaultTo(0);
|
||||
table.string("action").notNull();
|
||||
table.json("meta").notNull();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] audit_log Table created');
|
||||
logger.info(`[${migrateName}] audit_log Table created`);
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
/**
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down the initial data.');
|
||||
const down = (_knex) => {
|
||||
logger.warn(`[${migrateName}] You can't migrate down the initial data.`);
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
||||
export { up, down };
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const migrate_name = 'websockets';
|
||||
const logger = require('../logger').migrate;
|
||||
import { migrate as logger } from "../logger.js";
|
||||
|
||||
const migrateName = "websockets";
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -7,29 +8,29 @@ const logger = require('../logger').migrate;
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
const up = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Up...`);
|
||||
|
||||
return knex.schema.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.integer('allow_websocket_upgrade').notNull().unsigned().defaultTo(0);
|
||||
return knex.schema
|
||||
.table("proxy_host", (proxy_host) => {
|
||||
proxy_host.integer("allow_websocket_upgrade").notNull().unsigned().defaultTo(0);
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] proxy_host Table altered');
|
||||
logger.info(`[${migrateName}] proxy_host Table altered`);
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
/**
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
|
||||
const down = (_knex) => {
|
||||
logger.warn(`[${migrateName}] You can't migrate down this one.`);
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
||||
export { up, down };
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const migrate_name = 'forward_host';
|
||||
const logger = require('../logger').migrate;
|
||||
import { migrate as logger } from "../logger.js";
|
||||
|
||||
const migrateName = "forward_host";
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -7,17 +8,17 @@ const logger = require('../logger').migrate;
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
const up = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Up...`);
|
||||
|
||||
return knex.schema.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.renameColumn('forward_ip', 'forward_host');
|
||||
return knex.schema
|
||||
.table("proxy_host", (proxy_host) => {
|
||||
proxy_host.renameColumn("forward_ip", "forward_host");
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] proxy_host Table altered');
|
||||
logger.info(`[${migrateName}] proxy_host Table altered`);
|
||||
});
|
||||
};
|
||||
|
||||
@@ -25,10 +26,11 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
|
||||
const down = (_knex) => {
|
||||
logger.warn(`[${migrateName}] You can't migrate down this one.`);
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
||||
export { up, down };
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const migrate_name = 'http2_support';
|
||||
const logger = require('../logger').migrate;
|
||||
import { migrate as logger } from "../logger.js";
|
||||
|
||||
const migrateName = "http2_support";
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -7,31 +8,31 @@ const logger = require('../logger').migrate;
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
const up = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Up...`);
|
||||
|
||||
return knex.schema.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.integer('http2_support').notNull().unsigned().defaultTo(0);
|
||||
return knex.schema
|
||||
.table("proxy_host", (proxy_host) => {
|
||||
proxy_host.integer("http2_support").notNull().unsigned().defaultTo(0);
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] proxy_host Table altered');
|
||||
logger.info(`[${migrateName}] proxy_host Table altered`);
|
||||
|
||||
return knex.schema.table('redirection_host', function (redirection_host) {
|
||||
redirection_host.integer('http2_support').notNull().unsigned().defaultTo(0);
|
||||
return knex.schema.table("redirection_host", (redirection_host) => {
|
||||
redirection_host.integer("http2_support").notNull().unsigned().defaultTo(0);
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] redirection_host Table altered');
|
||||
logger.info(`[${migrateName}] redirection_host Table altered`);
|
||||
|
||||
return knex.schema.table('dead_host', function (dead_host) {
|
||||
dead_host.integer('http2_support').notNull().unsigned().defaultTo(0);
|
||||
return knex.schema.table("dead_host", (dead_host) => {
|
||||
dead_host.integer("http2_support").notNull().unsigned().defaultTo(0);
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] dead_host Table altered');
|
||||
logger.info(`[${migrateName}] dead_host Table altered`);
|
||||
});
|
||||
};
|
||||
|
||||
@@ -39,11 +40,11 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
|
||||
const down = (_knex) => {
|
||||
logger.warn(`[${migrateName}] You can't migrate down this one.`);
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
||||
export { up, down };
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const migrate_name = 'forward_scheme';
|
||||
const logger = require('../logger').migrate;
|
||||
import { migrate as logger } from "../logger.js";
|
||||
|
||||
const migrateName = "forward_scheme";
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -7,17 +8,17 @@ const logger = require('../logger').migrate;
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
const up = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Up...`);
|
||||
|
||||
return knex.schema.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.string('forward_scheme').notNull().defaultTo('http');
|
||||
return knex.schema
|
||||
.table("proxy_host", (proxy_host) => {
|
||||
proxy_host.string("forward_scheme").notNull().defaultTo("http");
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] proxy_host Table altered');
|
||||
logger.info(`[${migrateName}] proxy_host Table altered`);
|
||||
});
|
||||
};
|
||||
|
||||
@@ -25,10 +26,11 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
|
||||
const down = (_knex) => {
|
||||
logger.warn(`[${migrateName}] You can't migrate down this one.`);
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
||||
export { up, down };
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const migrate_name = 'disabled';
|
||||
const logger = require('../logger').migrate;
|
||||
import { migrate as logger } from "../logger.js";
|
||||
|
||||
const migrateName = "disabled";
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -7,38 +8,38 @@ const logger = require('../logger').migrate;
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
const up = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Up...`);
|
||||
|
||||
return knex.schema.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.integer('enabled').notNull().unsigned().defaultTo(1);
|
||||
return knex.schema
|
||||
.table("proxy_host", (proxy_host) => {
|
||||
proxy_host.integer("enabled").notNull().unsigned().defaultTo(1);
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] proxy_host Table altered');
|
||||
logger.info(`[${migrateName}] proxy_host Table altered`);
|
||||
|
||||
return knex.schema.table('redirection_host', function (redirection_host) {
|
||||
redirection_host.integer('enabled').notNull().unsigned().defaultTo(1);
|
||||
return knex.schema.table("redirection_host", (redirection_host) => {
|
||||
redirection_host.integer("enabled").notNull().unsigned().defaultTo(1);
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] redirection_host Table altered');
|
||||
logger.info(`[${migrateName}] redirection_host Table altered`);
|
||||
|
||||
return knex.schema.table('dead_host', function (dead_host) {
|
||||
dead_host.integer('enabled').notNull().unsigned().defaultTo(1);
|
||||
return knex.schema.table("dead_host", (dead_host) => {
|
||||
dead_host.integer("enabled").notNull().unsigned().defaultTo(1);
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] dead_host Table altered');
|
||||
logger.info(`[${migrateName}] dead_host Table altered`);
|
||||
|
||||
return knex.schema.table('stream', function (stream) {
|
||||
stream.integer('enabled').notNull().unsigned().defaultTo(1);
|
||||
return knex.schema.table("stream", (stream) => {
|
||||
stream.integer("enabled").notNull().unsigned().defaultTo(1);
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] stream Table altered');
|
||||
logger.info(`[${migrateName}] stream Table altered`);
|
||||
});
|
||||
};
|
||||
|
||||
@@ -46,10 +47,11 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
|
||||
const down = (_knex) => {
|
||||
logger.warn(`[${migrateName}] You can't migrate down this one.`);
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
||||
export { up, down };
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const migrate_name = 'custom_locations';
|
||||
const logger = require('../logger').migrate;
|
||||
import { migrate as logger } from "../logger.js";
|
||||
|
||||
const migrateName = "custom_locations";
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -8,17 +9,17 @@ const logger = require('../logger').migrate;
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
const up = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Up...`);
|
||||
|
||||
return knex.schema.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.json('locations');
|
||||
return knex.schema
|
||||
.table("proxy_host", (proxy_host) => {
|
||||
proxy_host.json("locations");
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] proxy_host Table altered');
|
||||
logger.info(`[${migrateName}] proxy_host Table altered`);
|
||||
});
|
||||
};
|
||||
|
||||
@@ -26,10 +27,11 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
|
||||
const down = (_knex) => {
|
||||
logger.warn(`[${migrateName}] You can't migrate down this one.`);
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
||||
export { up, down };
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const migrate_name = 'hsts';
|
||||
const logger = require('../logger').migrate;
|
||||
import { migrate as logger } from "../logger.js";
|
||||
|
||||
const migrateName = "hsts";
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -7,34 +8,34 @@ const logger = require('../logger').migrate;
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
const up = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Up...`);
|
||||
|
||||
return knex.schema.table('proxy_host', function (proxy_host) {
|
||||
proxy_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
|
||||
proxy_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
|
||||
return knex.schema
|
||||
.table("proxy_host", (proxy_host) => {
|
||||
proxy_host.integer("hsts_enabled").notNull().unsigned().defaultTo(0);
|
||||
proxy_host.integer("hsts_subdomains").notNull().unsigned().defaultTo(0);
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] proxy_host Table altered');
|
||||
logger.info(`[${migrateName}] proxy_host Table altered`);
|
||||
|
||||
return knex.schema.table('redirection_host', function (redirection_host) {
|
||||
redirection_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
|
||||
redirection_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
|
||||
return knex.schema.table("redirection_host", (redirection_host) => {
|
||||
redirection_host.integer("hsts_enabled").notNull().unsigned().defaultTo(0);
|
||||
redirection_host.integer("hsts_subdomains").notNull().unsigned().defaultTo(0);
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] redirection_host Table altered');
|
||||
logger.info(`[${migrateName}] redirection_host Table altered`);
|
||||
|
||||
return knex.schema.table('dead_host', function (dead_host) {
|
||||
dead_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
|
||||
dead_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
|
||||
return knex.schema.table("dead_host", (dead_host) => {
|
||||
dead_host.integer("hsts_enabled").notNull().unsigned().defaultTo(0);
|
||||
dead_host.integer("hsts_subdomains").notNull().unsigned().defaultTo(0);
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] dead_host Table altered');
|
||||
logger.info(`[${migrateName}] dead_host Table altered`);
|
||||
});
|
||||
};
|
||||
|
||||
@@ -42,10 +43,11 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
|
||||
const down = (_knex) => {
|
||||
logger.warn(`[${migrateName}] You can't migrate down this one.`);
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
||||
export { up, down };
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const migrate_name = 'settings';
|
||||
const logger = require('../logger').migrate;
|
||||
import { migrate as logger } from "../logger.js";
|
||||
|
||||
const migrateName = "settings";
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -7,11 +8,10 @@ const logger = require('../logger').migrate;
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
const up = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Up...`);
|
||||
|
||||
return knex.schema.createTable('setting', (table) => {
|
||||
table.string('id').notNull().primary();
|
||||
@@ -21,7 +21,7 @@ exports.up = function (knex/*, Promise*/) {
|
||||
table.json('meta').notNull();
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] setting Table created');
|
||||
logger.info(`[${migrateName}] setting Table created`);
|
||||
});
|
||||
};
|
||||
|
||||
@@ -29,10 +29,11 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down the initial data.');
|
||||
const down = (_knex) => {
|
||||
logger.warn(`[${migrateName}] You can't migrate down the initial data.`);
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
||||
export { up, down };
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const migrate_name = 'access_list_client';
|
||||
const logger = require('../logger').migrate;
|
||||
import { migrate as logger } from "../logger.js";
|
||||
|
||||
const migrateName = "access_list_client";
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -7,32 +8,30 @@ const logger = require('../logger').migrate;
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
const up = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Up...`);
|
||||
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return knex.schema.createTable('access_list_client', (table) => {
|
||||
return knex.schema
|
||||
.createTable("access_list_client", (table) => {
|
||||
table.increments().primary();
|
||||
table.dateTime('created_on').notNull();
|
||||
table.dateTime('modified_on').notNull();
|
||||
table.integer('access_list_id').notNull().unsigned();
|
||||
table.string('address').notNull();
|
||||
table.string('directive').notNull();
|
||||
table.json('meta').notNull();
|
||||
|
||||
table.dateTime("created_on").notNull();
|
||||
table.dateTime("modified_on").notNull();
|
||||
table.integer("access_list_id").notNull().unsigned();
|
||||
table.string("address").notNull();
|
||||
table.string("directive").notNull();
|
||||
table.json("meta").notNull();
|
||||
})
|
||||
.then(function () {
|
||||
logger.info('[' + migrate_name + '] access_list_client Table created');
|
||||
.then(() => {
|
||||
logger.info(`[${migrateName}] access_list_client Table created`);
|
||||
|
||||
return knex.schema.table('access_list', function (access_list) {
|
||||
access_list.integer('satify_any').notNull().defaultTo(0);
|
||||
return knex.schema.table("access_list", (access_list) => {
|
||||
access_list.integer("satify_any").notNull().defaultTo(0);
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] access_list Table altered');
|
||||
logger.info(`[${migrateName}] access_list Table altered`);
|
||||
});
|
||||
};
|
||||
|
||||
@@ -40,14 +39,14 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex/*, Promise*/) {
|
||||
logger.info('[' + migrate_name + '] Migrating Down...');
|
||||
const down = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Down...`);
|
||||
|
||||
return knex.schema.dropTable('access_list_client')
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] access_list_client Table dropped');
|
||||
return knex.schema.dropTable("access_list_client").then(() => {
|
||||
logger.info(`[${migrateName}] access_list_client Table dropped`);
|
||||
});
|
||||
};
|
||||
|
||||
export { up, down };
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const migrate_name = 'access_list_client_fix';
|
||||
const logger = require('../logger').migrate;
|
||||
import { migrate as logger } from "../logger.js";
|
||||
|
||||
const migrateName = "access_list_client_fix";
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -7,17 +8,17 @@ const logger = require('../logger').migrate;
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
const up = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Up...`);
|
||||
|
||||
return knex.schema.table('access_list', function (access_list) {
|
||||
access_list.renameColumn('satify_any', 'satisfy_any');
|
||||
return knex.schema
|
||||
.table("access_list", (access_list) => {
|
||||
access_list.renameColumn("satify_any", "satisfy_any");
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] access_list Table altered');
|
||||
logger.info(`[${migrateName}] access_list Table altered`);
|
||||
});
|
||||
};
|
||||
|
||||
@@ -25,10 +26,11 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
|
||||
const down = (_knex) => {
|
||||
logger.warn(`[${migrateName}] You can't migrate down this one.`);
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
||||
export { up, down };
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const migrate_name = 'pass_auth';
|
||||
const logger = require('../logger').migrate;
|
||||
import { migrate as logger } from "../logger.js";
|
||||
|
||||
const migrateName = "pass_auth";
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -7,18 +8,17 @@ const logger = require('../logger').migrate;
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
const up = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Up...`);
|
||||
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return knex.schema.table('access_list', function (access_list) {
|
||||
access_list.integer('pass_auth').notNull().defaultTo(1);
|
||||
return knex.schema
|
||||
.table("access_list", (access_list) => {
|
||||
access_list.integer("pass_auth").notNull().defaultTo(1);
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] access_list Table altered');
|
||||
logger.info(`[${migrateName}] access_list Table altered`);
|
||||
});
|
||||
};
|
||||
|
||||
@@ -26,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex/*, Promise*/) {
|
||||
logger.info('[' + migrate_name + '] Migrating Down...');
|
||||
const down = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Down...`);
|
||||
|
||||
return knex.schema.table('access_list', function (access_list) {
|
||||
access_list.dropColumn('pass_auth');
|
||||
return knex.schema
|
||||
.table("access_list", (access_list) => {
|
||||
access_list.dropColumn("pass_auth");
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] access_list pass_auth Column dropped');
|
||||
logger.info(`[${migrateName}] access_list pass_auth Column dropped`);
|
||||
});
|
||||
};
|
||||
|
||||
export { up, down };
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const migrate_name = 'redirection_scheme';
|
||||
const logger = require('../logger').migrate;
|
||||
import { migrate as logger } from "../logger.js";
|
||||
|
||||
const migrateName = "redirection_scheme";
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -7,18 +8,17 @@ const logger = require('../logger').migrate;
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
const up = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Up...`);
|
||||
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return knex.schema.table('redirection_host', (table) => {
|
||||
table.string('forward_scheme').notNull().defaultTo('$scheme');
|
||||
return knex.schema
|
||||
.table("redirection_host", (table) => {
|
||||
table.string("forward_scheme").notNull().defaultTo("$scheme");
|
||||
})
|
||||
.then(function () {
|
||||
logger.info('[' + migrate_name + '] redirection_host Table altered');
|
||||
.then(() => {
|
||||
logger.info(`[${migrateName}] redirection_host Table altered`);
|
||||
});
|
||||
};
|
||||
|
||||
@@ -26,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex/*, Promise*/) {
|
||||
logger.info('[' + migrate_name + '] Migrating Down...');
|
||||
const down = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Down...`);
|
||||
|
||||
return knex.schema.table('redirection_host', (table) => {
|
||||
table.dropColumn('forward_scheme');
|
||||
return knex.schema
|
||||
.table("redirection_host", (table) => {
|
||||
table.dropColumn("forward_scheme");
|
||||
})
|
||||
.then(function () {
|
||||
logger.info('[' + migrate_name + '] redirection_host Table altered');
|
||||
.then(() => {
|
||||
logger.info(`[${migrateName}] redirection_host Table altered`);
|
||||
});
|
||||
};
|
||||
|
||||
export { up, down };
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const migrate_name = 'redirection_status_code';
|
||||
const logger = require('../logger').migrate;
|
||||
import { migrate as logger } from "../logger.js";
|
||||
|
||||
const migrateName = "redirection_status_code";
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -7,18 +8,17 @@ const logger = require('../logger').migrate;
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
const up = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Up...`);
|
||||
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return knex.schema.table('redirection_host', (table) => {
|
||||
table.integer('forward_http_code').notNull().unsigned().defaultTo(302);
|
||||
return knex.schema
|
||||
.table("redirection_host", (table) => {
|
||||
table.integer("forward_http_code").notNull().unsigned().defaultTo(302);
|
||||
})
|
||||
.then(function () {
|
||||
logger.info('[' + migrate_name + '] redirection_host Table altered');
|
||||
.then(() => {
|
||||
logger.info(`[${migrateName}] redirection_host Table altered`);
|
||||
});
|
||||
};
|
||||
|
||||
@@ -26,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex/*, Promise*/) {
|
||||
logger.info('[' + migrate_name + '] Migrating Down...');
|
||||
const down = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Down...`);
|
||||
|
||||
return knex.schema.table('redirection_host', (table) => {
|
||||
table.dropColumn('forward_http_code');
|
||||
return knex.schema
|
||||
.table("redirection_host", (table) => {
|
||||
table.dropColumn("forward_http_code");
|
||||
})
|
||||
.then(function () {
|
||||
logger.info('[' + migrate_name + '] redirection_host Table altered');
|
||||
.then(() => {
|
||||
logger.info(`[${migrateName}] redirection_host Table altered`);
|
||||
});
|
||||
};
|
||||
|
||||
export { up, down };
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const migrate_name = 'stream_domain';
|
||||
const logger = require('../logger').migrate;
|
||||
import { migrate as logger } from "../logger.js";
|
||||
|
||||
const migrateName = "stream_domain";
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -7,17 +8,17 @@ const logger = require('../logger').migrate;
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex/*, Promise*/) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
const up = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Up...`);
|
||||
|
||||
return knex.schema.table('stream', (table) => {
|
||||
table.renameColumn('forward_ip', 'forwarding_host');
|
||||
return knex.schema
|
||||
.table("stream", (table) => {
|
||||
table.renameColumn("forward_ip", "forwarding_host");
|
||||
})
|
||||
.then(function () {
|
||||
logger.info('[' + migrate_name + '] stream Table altered');
|
||||
.then(() => {
|
||||
logger.info(`[${migrateName}] stream Table altered`);
|
||||
});
|
||||
};
|
||||
|
||||
@@ -25,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex/*, Promise*/) {
|
||||
logger.info('[' + migrate_name + '] Migrating Down...');
|
||||
const down = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Down...`);
|
||||
|
||||
return knex.schema.table('stream', (table) => {
|
||||
table.renameColumn('forwarding_host', 'forward_ip');
|
||||
return knex.schema
|
||||
.table("stream", (table) => {
|
||||
table.renameColumn("forwarding_host", "forward_ip");
|
||||
})
|
||||
.then(function () {
|
||||
logger.info('[' + migrate_name + '] stream Table altered');
|
||||
.then(() => {
|
||||
logger.info(`[${migrateName}] stream Table altered`);
|
||||
});
|
||||
};
|
||||
|
||||
export { up, down };
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
const migrate_name = 'stream_domain';
|
||||
const logger = require('../logger').migrate;
|
||||
const internalNginx = require('../internal/nginx');
|
||||
import internalNginx from "../internal/nginx.js";
|
||||
import { migrate as logger } from "../logger.js";
|
||||
|
||||
const migrateName = "stream_domain";
|
||||
|
||||
async function regenerateDefaultHost(knex) {
|
||||
const row = await knex('setting').select('*').where('id', 'default-site').first();
|
||||
const row = await knex("setting").select("*").where("id", "default-site").first();
|
||||
|
||||
if (!row) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
return internalNginx.deleteConfig('default')
|
||||
return internalNginx
|
||||
.deleteConfig("default")
|
||||
.then(() => {
|
||||
return internalNginx.generateConfig('default', row);
|
||||
return internalNginx.generateConfig("default", row);
|
||||
})
|
||||
.then(() => {
|
||||
return internalNginx.test();
|
||||
@@ -27,11 +29,10 @@ async function regenerateDefaultHost(knex) {
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
const up = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Up...`);
|
||||
|
||||
return regenerateDefaultHost(knex);
|
||||
};
|
||||
@@ -40,11 +41,12 @@ exports.up = function (knex) {
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex) {
|
||||
logger.info('[' + migrate_name + '] Migrating Down...');
|
||||
const down = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Down...`);
|
||||
|
||||
return regenerateDefaultHost(knex);
|
||||
};
|
||||
|
||||
export { up, down };
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const migrate_name = 'stream_ssl';
|
||||
const logger = require('../logger').migrate;
|
||||
import { migrate as logger } from "../logger.js";
|
||||
|
||||
const migrateName = "stream_ssl";
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
@@ -9,14 +10,15 @@ const logger = require('../logger').migrate;
|
||||
* @param {Object} knex
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
const up = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Up...`);
|
||||
|
||||
return knex.schema.table('stream', (table) => {
|
||||
table.integer('certificate_id').notNull().unsigned().defaultTo(0);
|
||||
return knex.schema
|
||||
.table("stream", (table) => {
|
||||
table.integer("certificate_id").notNull().unsigned().defaultTo(0);
|
||||
})
|
||||
.then(function () {
|
||||
logger.info('[' + migrate_name + '] stream Table altered');
|
||||
.then(() => {
|
||||
logger.info(`[${migrateName}] stream Table altered`);
|
||||
});
|
||||
};
|
||||
|
||||
@@ -26,13 +28,16 @@ exports.up = function (knex) {
|
||||
* @param {Object} knex
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex) {
|
||||
logger.info('[' + migrate_name + '] Migrating Down...');
|
||||
const down = (knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Down...`);
|
||||
|
||||
return knex.schema.table('stream', (table) => {
|
||||
table.dropColumn('certificate_id');
|
||||
return knex.schema
|
||||
.table("stream", (table) => {
|
||||
table.dropColumn("certificate_id");
|
||||
})
|
||||
.then(function () {
|
||||
logger.info('[' + migrate_name + '] stream Table altered');
|
||||
.then(() => {
|
||||
logger.info(`[${migrateName}] stream Table altered`);
|
||||
});
|
||||
};
|
||||
|
||||
export { up, down };
|
||||
|
||||
@@ -1,103 +1,98 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const helpers = require('../lib/helpers');
|
||||
const Model = require('objection').Model;
|
||||
const User = require('./user');
|
||||
const AccessListAuth = require('./access_list_auth');
|
||||
const AccessListClient = require('./access_list_client');
|
||||
const now = require('./now_helper');
|
||||
import { Model } from "objection";
|
||||
import db from "../db.js";
|
||||
import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
|
||||
import AccessListAuth from "./access_list_auth.js";
|
||||
import AccessListClient from "./access_list_client.js";
|
||||
import now from "./now_helper.js";
|
||||
import ProxyHostModel from "./proxy_host.js";
|
||||
import User from "./user.js";
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
const boolFields = [
|
||||
'is_deleted',
|
||||
'satisfy_any',
|
||||
'pass_auth',
|
||||
];
|
||||
const boolFields = ["is_deleted", "satisfy_any", "pass_auth"];
|
||||
|
||||
class AccessList extends Model {
|
||||
$beforeInsert () {
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for meta
|
||||
if (typeof this.meta === 'undefined') {
|
||||
if (typeof this.meta === "undefined") {
|
||||
this.meta = {};
|
||||
}
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
}
|
||||
|
||||
$parseDatabaseJson(json) {
|
||||
json = super.$parseDatabaseJson(json);
|
||||
return helpers.convertIntFieldsToBool(json, boolFields);
|
||||
const thisJson = super.$parseDatabaseJson(json);
|
||||
return convertIntFieldsToBool(thisJson, boolFields);
|
||||
}
|
||||
|
||||
$formatDatabaseJson(json) {
|
||||
json = helpers.convertBoolFieldsToInt(json, boolFields);
|
||||
return super.$formatDatabaseJson(json);
|
||||
const thisJson = convertBoolFieldsToInt(json, boolFields);
|
||||
return super.$formatDatabaseJson(thisJson);
|
||||
}
|
||||
|
||||
static get name () {
|
||||
return 'AccessList';
|
||||
static get name() {
|
||||
return "AccessList";
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
return 'access_list';
|
||||
static get tableName() {
|
||||
return "access_list";
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
return ['meta'];
|
||||
static get jsonAttributes() {
|
||||
return ["meta"];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
const ProxyHost = require('./proxy_host');
|
||||
|
||||
static get relationMappings() {
|
||||
return {
|
||||
owner: {
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: User,
|
||||
join: {
|
||||
from: 'access_list.owner_user_id',
|
||||
to: 'user.id'
|
||||
from: "access_list.owner_user_id",
|
||||
to: "user.id",
|
||||
},
|
||||
modify: (qb) => {
|
||||
qb.where("user.is_deleted", 0);
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('user.is_deleted', 0);
|
||||
}
|
||||
},
|
||||
items: {
|
||||
relation: Model.HasManyRelation,
|
||||
modelClass: AccessListAuth,
|
||||
join: {
|
||||
from: 'access_list.id',
|
||||
to: 'access_list_auth.access_list_id'
|
||||
}
|
||||
from: "access_list.id",
|
||||
to: "access_list_auth.access_list_id",
|
||||
},
|
||||
},
|
||||
clients: {
|
||||
relation: Model.HasManyRelation,
|
||||
modelClass: AccessListClient,
|
||||
join: {
|
||||
from: 'access_list.id',
|
||||
to: 'access_list_client.access_list_id'
|
||||
}
|
||||
from: "access_list.id",
|
||||
to: "access_list_client.access_list_id",
|
||||
},
|
||||
},
|
||||
proxy_hosts: {
|
||||
relation: Model.HasManyRelation,
|
||||
modelClass: ProxyHost,
|
||||
modelClass: ProxyHostModel,
|
||||
join: {
|
||||
from: 'access_list.id',
|
||||
to: 'proxy_host.access_list_id'
|
||||
from: "access_list.id",
|
||||
to: "proxy_host.access_list_id",
|
||||
},
|
||||
modify: (qb) => {
|
||||
qb.where("proxy_host.is_deleted", 0);
|
||||
},
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('proxy_host.is_deleted', 0);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = AccessList;
|
||||
export default AccessList;
|
||||
|
||||
@@ -1,54 +1,55 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const now = require('./now_helper');
|
||||
import { Model } from "objection";
|
||||
import db from "../db.js";
|
||||
import accessListModel from "./access_list.js";
|
||||
import now from "./now_helper.js";
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
class AccessListAuth extends Model {
|
||||
$beforeInsert () {
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for meta
|
||||
if (typeof this.meta === 'undefined') {
|
||||
if (typeof this.meta === "undefined") {
|
||||
this.meta = {};
|
||||
}
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
}
|
||||
|
||||
static get name () {
|
||||
return 'AccessListAuth';
|
||||
static get name() {
|
||||
return "AccessListAuth";
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
return 'access_list_auth';
|
||||
static get tableName() {
|
||||
return "access_list_auth";
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
return ['meta'];
|
||||
static get jsonAttributes() {
|
||||
return ["meta"];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
return {
|
||||
access_list: {
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: require('./access_list'),
|
||||
modelClass: accessListModel,
|
||||
join: {
|
||||
from: 'access_list_auth.access_list_id',
|
||||
to: 'access_list.id'
|
||||
from: "access_list_auth.access_list_id",
|
||||
to: "access_list.id",
|
||||
},
|
||||
modify: (qb) => {
|
||||
qb.where("access_list.is_deleted", 0);
|
||||
},
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('access_list.is_deleted', 0);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = AccessListAuth;
|
||||
export default AccessListAuth;
|
||||
|
||||
@@ -1,54 +1,55 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const now = require('./now_helper');
|
||||
import { Model } from "objection";
|
||||
import db from "../db.js";
|
||||
import accessListModel from "./access_list.js";
|
||||
import now from "./now_helper.js";
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
class AccessListClient extends Model {
|
||||
$beforeInsert () {
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for meta
|
||||
if (typeof this.meta === 'undefined') {
|
||||
if (typeof this.meta === "undefined") {
|
||||
this.meta = {};
|
||||
}
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
}
|
||||
|
||||
static get name () {
|
||||
return 'AccessListClient';
|
||||
static get name() {
|
||||
return "AccessListClient";
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
return 'access_list_client';
|
||||
static get tableName() {
|
||||
return "access_list_client";
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
return ['meta'];
|
||||
static get jsonAttributes() {
|
||||
return ["meta"];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
return {
|
||||
access_list: {
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: require('./access_list'),
|
||||
modelClass: accessListModel,
|
||||
join: {
|
||||
from: 'access_list_client.access_list_id',
|
||||
to: 'access_list.id'
|
||||
from: "access_list_client.access_list_id",
|
||||
to: "access_list.id",
|
||||
},
|
||||
modify: (qb) => {
|
||||
qb.where("access_list.is_deleted", 0);
|
||||
},
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('access_list.is_deleted', 0);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = AccessListClient;
|
||||
export default AccessListClient;
|
||||
|
||||
@@ -1,52 +1,52 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const User = require('./user');
|
||||
const now = require('./now_helper');
|
||||
import { Model } from "objection";
|
||||
import db from "../db.js";
|
||||
import now from "./now_helper.js";
|
||||
import User from "./user.js";
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
class AuditLog extends Model {
|
||||
$beforeInsert () {
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for meta
|
||||
if (typeof this.meta === 'undefined') {
|
||||
if (typeof this.meta === "undefined") {
|
||||
this.meta = {};
|
||||
}
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
}
|
||||
|
||||
static get name () {
|
||||
return 'AuditLog';
|
||||
static get name() {
|
||||
return "AuditLog";
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
return 'audit_log';
|
||||
static get tableName() {
|
||||
return "audit_log";
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
return ['meta'];
|
||||
static get jsonAttributes() {
|
||||
return ["meta"];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
return {
|
||||
user: {
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: User,
|
||||
join: {
|
||||
from: 'audit_log.user_id',
|
||||
to: 'user.id'
|
||||
}
|
||||
}
|
||||
from: "audit_log.user_id",
|
||||
to: "user.id",
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = AuditLog;
|
||||
export default AuditLog;
|
||||
|
||||
@@ -1,27 +1,21 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const bcrypt = require('bcrypt');
|
||||
const db = require('../db');
|
||||
const helpers = require('../lib/helpers');
|
||||
const Model = require('objection').Model;
|
||||
const User = require('./user');
|
||||
const now = require('./now_helper');
|
||||
import bcrypt from "bcrypt";
|
||||
import { Model } from "objection";
|
||||
import db from "../db.js";
|
||||
import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
|
||||
import now from "./now_helper.js";
|
||||
import User from "./user.js";
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
const boolFields = [
|
||||
'is_deleted',
|
||||
];
|
||||
const boolFields = ["is_deleted"];
|
||||
|
||||
function encryptPassword () {
|
||||
/* jshint -W040 */
|
||||
let _this = this;
|
||||
|
||||
if (_this.type === 'password' && _this.secret) {
|
||||
return bcrypt.hash(_this.secret, 13)
|
||||
.then(function (hash) {
|
||||
_this.secret = hash;
|
||||
function encryptPassword() {
|
||||
if (this.type === "password" && this.secret) {
|
||||
return bcrypt.hash(this.secret, 13).then((hash) => {
|
||||
this.secret = hash;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -29,31 +23,31 @@ function encryptPassword () {
|
||||
}
|
||||
|
||||
class Auth extends Model {
|
||||
$beforeInsert (queryContext) {
|
||||
$beforeInsert(queryContext) {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for meta
|
||||
if (typeof this.meta === 'undefined') {
|
||||
if (typeof this.meta === "undefined") {
|
||||
this.meta = {};
|
||||
}
|
||||
|
||||
return encryptPassword.apply(this, queryContext);
|
||||
}
|
||||
|
||||
$beforeUpdate (queryContext) {
|
||||
$beforeUpdate(queryContext) {
|
||||
this.modified_on = now();
|
||||
return encryptPassword.apply(this, queryContext);
|
||||
}
|
||||
|
||||
$parseDatabaseJson(json) {
|
||||
json = super.$parseDatabaseJson(json);
|
||||
return helpers.convertIntFieldsToBool(json, boolFields);
|
||||
const thisJson = super.$parseDatabaseJson(json);
|
||||
return convertIntFieldsToBool(thisJson, boolFields);
|
||||
}
|
||||
|
||||
$formatDatabaseJson(json) {
|
||||
json = helpers.convertBoolFieldsToInt(json, boolFields);
|
||||
return super.$formatDatabaseJson(json);
|
||||
const thisJson = convertBoolFieldsToInt(json, boolFields);
|
||||
return super.$formatDatabaseJson(thisJson);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -62,37 +56,37 @@ class Auth extends Model {
|
||||
* @param {String} password
|
||||
* @returns {Promise}
|
||||
*/
|
||||
verifyPassword (password) {
|
||||
verifyPassword(password) {
|
||||
return bcrypt.compare(password, this.secret);
|
||||
}
|
||||
|
||||
static get name () {
|
||||
return 'Auth';
|
||||
static get name() {
|
||||
return "Auth";
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
return 'auth';
|
||||
static get tableName() {
|
||||
return "auth";
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
return ['meta'];
|
||||
static get jsonAttributes() {
|
||||
return ["meta"];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
return {
|
||||
user: {
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: User,
|
||||
join: {
|
||||
from: 'auth.user_id',
|
||||
to: 'user.id'
|
||||
from: "auth.user_id",
|
||||
to: "user.id",
|
||||
},
|
||||
filter: {
|
||||
is_deleted: 0
|
||||
}
|
||||
}
|
||||
is_deleted: 0,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Auth;
|
||||
export default Auth;
|
||||
|
||||
@@ -1,124 +1,121 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const helpers = require('../lib/helpers');
|
||||
const Model = require('objection').Model;
|
||||
const now = require('./now_helper');
|
||||
import { Model } from "objection";
|
||||
import db from "../db.js";
|
||||
import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
|
||||
import deadHostModel from "./dead_host.js";
|
||||
import now from "./now_helper.js";
|
||||
import proxyHostModel from "./proxy_host.js";
|
||||
import redirectionHostModel from "./redirection_host.js";
|
||||
import userModel from "./user.js";
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
const boolFields = [
|
||||
'is_deleted',
|
||||
];
|
||||
const boolFields = ["is_deleted"];
|
||||
|
||||
class Certificate extends Model {
|
||||
$beforeInsert () {
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for expires_on
|
||||
if (typeof this.expires_on === 'undefined') {
|
||||
if (typeof this.expires_on === "undefined") {
|
||||
this.expires_on = now();
|
||||
}
|
||||
|
||||
// Default for domain_names
|
||||
if (typeof this.domain_names === 'undefined') {
|
||||
if (typeof this.domain_names === "undefined") {
|
||||
this.domain_names = [];
|
||||
}
|
||||
|
||||
// Default for meta
|
||||
if (typeof this.meta === 'undefined') {
|
||||
if (typeof this.meta === "undefined") {
|
||||
this.meta = {};
|
||||
}
|
||||
|
||||
this.domain_names.sort();
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
|
||||
// Sort domain_names
|
||||
if (typeof this.domain_names !== 'undefined') {
|
||||
if (typeof this.domain_names !== "undefined") {
|
||||
this.domain_names.sort();
|
||||
}
|
||||
}
|
||||
|
||||
$parseDatabaseJson(json) {
|
||||
json = super.$parseDatabaseJson(json);
|
||||
return helpers.convertIntFieldsToBool(json, boolFields);
|
||||
const thisJson = super.$parseDatabaseJson(json);
|
||||
return convertIntFieldsToBool(thisJson, boolFields);
|
||||
}
|
||||
|
||||
$formatDatabaseJson(json) {
|
||||
json = helpers.convertBoolFieldsToInt(json, boolFields);
|
||||
return super.$formatDatabaseJson(json);
|
||||
const thisJson = convertBoolFieldsToInt(json, boolFields);
|
||||
return super.$formatDatabaseJson(thisJson);
|
||||
}
|
||||
|
||||
static get name () {
|
||||
return 'Certificate';
|
||||
static get name() {
|
||||
return "Certificate";
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
return 'certificate';
|
||||
static get tableName() {
|
||||
return "certificate";
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
return ['domain_names', 'meta'];
|
||||
static get jsonAttributes() {
|
||||
return ["domain_names", "meta"];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
const ProxyHost = require('./proxy_host');
|
||||
const DeadHost = require('./dead_host');
|
||||
const User = require('./user');
|
||||
const RedirectionHost = require('./redirection_host');
|
||||
|
||||
static get relationMappings() {
|
||||
return {
|
||||
owner: {
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: User,
|
||||
modelClass: userModel,
|
||||
join: {
|
||||
from: 'certificate.owner_user_id',
|
||||
to: 'user.id'
|
||||
from: "certificate.owner_user_id",
|
||||
to: "user.id",
|
||||
},
|
||||
modify: (qb) => {
|
||||
qb.where("user.is_deleted", 0);
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('user.is_deleted', 0);
|
||||
}
|
||||
},
|
||||
proxy_hosts: {
|
||||
relation: Model.HasManyRelation,
|
||||
modelClass: ProxyHost,
|
||||
modelClass: proxyHostModel,
|
||||
join: {
|
||||
from: 'certificate.id',
|
||||
to: 'proxy_host.certificate_id'
|
||||
from: "certificate.id",
|
||||
to: "proxy_host.certificate_id",
|
||||
},
|
||||
modify: (qb) => {
|
||||
qb.where("proxy_host.is_deleted", 0);
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('proxy_host.is_deleted', 0);
|
||||
}
|
||||
},
|
||||
dead_hosts: {
|
||||
relation: Model.HasManyRelation,
|
||||
modelClass: DeadHost,
|
||||
modelClass: deadHostModel,
|
||||
join: {
|
||||
from: 'certificate.id',
|
||||
to: 'dead_host.certificate_id'
|
||||
from: "certificate.id",
|
||||
to: "dead_host.certificate_id",
|
||||
},
|
||||
modify: (qb) => {
|
||||
qb.where("dead_host.is_deleted", 0);
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('dead_host.is_deleted', 0);
|
||||
}
|
||||
},
|
||||
redirection_hosts: {
|
||||
relation: Model.HasManyRelation,
|
||||
modelClass: RedirectionHost,
|
||||
modelClass: redirectionHostModel,
|
||||
join: {
|
||||
from: 'certificate.id',
|
||||
to: 'redirection_host.certificate_id'
|
||||
from: "certificate.id",
|
||||
to: "redirection_host.certificate_id",
|
||||
},
|
||||
modify: (qb) => {
|
||||
qb.where("redirection_host.is_deleted", 0);
|
||||
},
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('redirection_host.is_deleted', 0);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Certificate;
|
||||
export default Certificate;
|
||||
|
||||
@@ -1,99 +1,92 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const helpers = require('../lib/helpers');
|
||||
const Model = require('objection').Model;
|
||||
const User = require('./user');
|
||||
const Certificate = require('./certificate');
|
||||
const now = require('./now_helper');
|
||||
import { Model } from "objection";
|
||||
import db from "../db.js";
|
||||
import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
|
||||
import Certificate from "./certificate.js";
|
||||
import now from "./now_helper.js";
|
||||
import User from "./user.js";
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
const boolFields = [
|
||||
'is_deleted',
|
||||
'ssl_forced',
|
||||
'http2_support',
|
||||
'enabled',
|
||||
'hsts_enabled',
|
||||
'hsts_subdomains',
|
||||
];
|
||||
const boolFields = ["is_deleted", "ssl_forced", "http2_support", "enabled", "hsts_enabled", "hsts_subdomains"];
|
||||
|
||||
class DeadHost extends Model {
|
||||
$beforeInsert () {
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for domain_names
|
||||
if (typeof this.domain_names === 'undefined') {
|
||||
if (typeof this.domain_names === "undefined") {
|
||||
this.domain_names = [];
|
||||
}
|
||||
|
||||
// Default for meta
|
||||
if (typeof this.meta === 'undefined') {
|
||||
if (typeof this.meta === "undefined") {
|
||||
this.meta = {};
|
||||
}
|
||||
|
||||
this.domain_names.sort();
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
|
||||
// Sort domain_names
|
||||
if (typeof this.domain_names !== 'undefined') {
|
||||
if (typeof this.domain_names !== "undefined") {
|
||||
this.domain_names.sort();
|
||||
}
|
||||
}
|
||||
|
||||
$parseDatabaseJson(json) {
|
||||
json = super.$parseDatabaseJson(json);
|
||||
return helpers.convertIntFieldsToBool(json, boolFields);
|
||||
const thisJson = super.$parseDatabaseJson(json);
|
||||
return convertIntFieldsToBool(thisJson, boolFields);
|
||||
}
|
||||
|
||||
$formatDatabaseJson(json) {
|
||||
json = helpers.convertBoolFieldsToInt(json, boolFields);
|
||||
return super.$formatDatabaseJson(json);
|
||||
const thisJson = convertBoolFieldsToInt(json, boolFields);
|
||||
return super.$formatDatabaseJson(thisJson);
|
||||
}
|
||||
|
||||
static get name () {
|
||||
return 'DeadHost';
|
||||
static get name() {
|
||||
return "DeadHost";
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
return 'dead_host';
|
||||
static get tableName() {
|
||||
return "dead_host";
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
return ['domain_names', 'meta'];
|
||||
static get jsonAttributes() {
|
||||
return ["domain_names", "meta"];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
return {
|
||||
owner: {
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: User,
|
||||
join: {
|
||||
from: 'dead_host.owner_user_id',
|
||||
to: 'user.id'
|
||||
from: "dead_host.owner_user_id",
|
||||
to: "user.id",
|
||||
},
|
||||
modify: (qb) => {
|
||||
qb.where("user.is_deleted", 0);
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('user.is_deleted', 0);
|
||||
}
|
||||
},
|
||||
certificate: {
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: Certificate,
|
||||
join: {
|
||||
from: 'dead_host.certificate_id',
|
||||
to: 'certificate.id'
|
||||
from: "dead_host.certificate_id",
|
||||
to: "certificate.id",
|
||||
},
|
||||
modify: (qb) => {
|
||||
qb.where("certificate.is_deleted", 0);
|
||||
},
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('certificate.is_deleted', 0);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = DeadHost;
|
||||
export default DeadHost;
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
const db = require('../db');
|
||||
const config = require('../lib/config');
|
||||
const Model = require('objection').Model;
|
||||
import { Model } from "objection";
|
||||
import db from "../db.js";
|
||||
import { isSqlite } from "../lib/config.js";
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
module.exports = function () {
|
||||
if (config.isSqlite()) {
|
||||
// eslint-disable-next-line
|
||||
export default () => {
|
||||
if (isSqlite()) {
|
||||
return Model.raw("datetime('now','localtime')");
|
||||
}
|
||||
return Model.raw('NOW()');
|
||||
return Model.raw("NOW()");
|
||||
};
|
||||
|
||||
@@ -1,114 +1,114 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const helpers = require('../lib/helpers');
|
||||
const Model = require('objection').Model;
|
||||
const User = require('./user');
|
||||
const AccessList = require('./access_list');
|
||||
const Certificate = require('./certificate');
|
||||
const now = require('./now_helper');
|
||||
import { Model } from "objection";
|
||||
import db from "../db.js";
|
||||
import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
|
||||
import AccessList from "./access_list.js";
|
||||
import Certificate from "./certificate.js";
|
||||
import now from "./now_helper.js";
|
||||
import User from "./user.js";
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
const boolFields = [
|
||||
'is_deleted',
|
||||
'ssl_forced',
|
||||
'caching_enabled',
|
||||
'block_exploits',
|
||||
'allow_websocket_upgrade',
|
||||
'http2_support',
|
||||
'enabled',
|
||||
'hsts_enabled',
|
||||
'hsts_subdomains',
|
||||
"is_deleted",
|
||||
"ssl_forced",
|
||||
"caching_enabled",
|
||||
"block_exploits",
|
||||
"allow_websocket_upgrade",
|
||||
"http2_support",
|
||||
"enabled",
|
||||
"hsts_enabled",
|
||||
"hsts_subdomains",
|
||||
];
|
||||
|
||||
class ProxyHost extends Model {
|
||||
$beforeInsert () {
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for domain_names
|
||||
if (typeof this.domain_names === 'undefined') {
|
||||
if (typeof this.domain_names === "undefined") {
|
||||
this.domain_names = [];
|
||||
}
|
||||
|
||||
// Default for meta
|
||||
if (typeof this.meta === 'undefined') {
|
||||
if (typeof this.meta === "undefined") {
|
||||
this.meta = {};
|
||||
}
|
||||
|
||||
this.domain_names.sort();
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
|
||||
// Sort domain_names
|
||||
if (typeof this.domain_names !== 'undefined') {
|
||||
if (typeof this.domain_names !== "undefined") {
|
||||
this.domain_names.sort();
|
||||
}
|
||||
}
|
||||
|
||||
$parseDatabaseJson(json) {
|
||||
json = super.$parseDatabaseJson(json);
|
||||
return helpers.convertIntFieldsToBool(json, boolFields);
|
||||
const thisJson = super.$parseDatabaseJson(json);
|
||||
return convertIntFieldsToBool(thisJson, boolFields);
|
||||
}
|
||||
|
||||
$formatDatabaseJson(json) {
|
||||
json = helpers.convertBoolFieldsToInt(json, boolFields);
|
||||
return super.$formatDatabaseJson(json);
|
||||
const thisJson = convertBoolFieldsToInt(json, boolFields);
|
||||
return super.$formatDatabaseJson(thisJson);
|
||||
}
|
||||
|
||||
static get name () {
|
||||
return 'ProxyHost';
|
||||
static get name() {
|
||||
return "ProxyHost";
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
return 'proxy_host';
|
||||
static get tableName() {
|
||||
return "proxy_host";
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
return ['domain_names', 'meta', 'locations'];
|
||||
static get jsonAttributes() {
|
||||
return ["domain_names", "meta", "locations"];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
return {
|
||||
owner: {
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: User,
|
||||
join: {
|
||||
from: 'proxy_host.owner_user_id',
|
||||
to: 'user.id'
|
||||
from: "proxy_host.owner_user_id",
|
||||
to: "user.id",
|
||||
},
|
||||
modify: (qb) => {
|
||||
qb.where("user.is_deleted", 0);
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('user.is_deleted', 0);
|
||||
}
|
||||
},
|
||||
access_list: {
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: AccessList,
|
||||
join: {
|
||||
from: 'proxy_host.access_list_id',
|
||||
to: 'access_list.id'
|
||||
from: "proxy_host.access_list_id",
|
||||
to: "access_list.id",
|
||||
},
|
||||
modify: (qb) => {
|
||||
qb.where("access_list.is_deleted", 0);
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('access_list.is_deleted', 0);
|
||||
}
|
||||
},
|
||||
certificate: {
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: Certificate,
|
||||
join: {
|
||||
from: 'proxy_host.certificate_id',
|
||||
to: 'certificate.id'
|
||||
from: "proxy_host.certificate_id",
|
||||
to: "certificate.id",
|
||||
},
|
||||
modify: (qb) => {
|
||||
qb.where("certificate.is_deleted", 0);
|
||||
},
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('certificate.is_deleted', 0);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ProxyHost;
|
||||
export default ProxyHost;
|
||||
|
||||
@@ -1,102 +1,101 @@
|
||||
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const helpers = require('../lib/helpers');
|
||||
const Model = require('objection').Model;
|
||||
const User = require('./user');
|
||||
const Certificate = require('./certificate');
|
||||
const now = require('./now_helper');
|
||||
import { Model } from "objection";
|
||||
import db from "../db.js";
|
||||
import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
|
||||
import Certificate from "./certificate.js";
|
||||
import now from "./now_helper.js";
|
||||
import User from "./user.js";
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
const boolFields = [
|
||||
'is_deleted',
|
||||
'enabled',
|
||||
'preserve_path',
|
||||
'ssl_forced',
|
||||
'block_exploits',
|
||||
'hsts_enabled',
|
||||
'hsts_subdomains',
|
||||
'http2_support',
|
||||
"is_deleted",
|
||||
"enabled",
|
||||
"preserve_path",
|
||||
"ssl_forced",
|
||||
"block_exploits",
|
||||
"hsts_enabled",
|
||||
"hsts_subdomains",
|
||||
"http2_support",
|
||||
];
|
||||
|
||||
class RedirectionHost extends Model {
|
||||
$beforeInsert () {
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for domain_names
|
||||
if (typeof this.domain_names === 'undefined') {
|
||||
if (typeof this.domain_names === "undefined") {
|
||||
this.domain_names = [];
|
||||
}
|
||||
|
||||
// Default for meta
|
||||
if (typeof this.meta === 'undefined') {
|
||||
if (typeof this.meta === "undefined") {
|
||||
this.meta = {};
|
||||
}
|
||||
|
||||
this.domain_names.sort();
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
|
||||
// Sort domain_names
|
||||
if (typeof this.domain_names !== 'undefined') {
|
||||
if (typeof this.domain_names !== "undefined") {
|
||||
this.domain_names.sort();
|
||||
}
|
||||
}
|
||||
|
||||
$parseDatabaseJson(json) {
|
||||
json = super.$parseDatabaseJson(json);
|
||||
return helpers.convertIntFieldsToBool(json, boolFields);
|
||||
const thisJson = super.$parseDatabaseJson(json);
|
||||
return convertIntFieldsToBool(thisJson, boolFields);
|
||||
}
|
||||
|
||||
$formatDatabaseJson(json) {
|
||||
json = helpers.convertBoolFieldsToInt(json, boolFields);
|
||||
return super.$formatDatabaseJson(json);
|
||||
const thisJson = convertBoolFieldsToInt(json, boolFields);
|
||||
return super.$formatDatabaseJson(thisJson);
|
||||
}
|
||||
|
||||
static get name () {
|
||||
return 'RedirectionHost';
|
||||
static get name() {
|
||||
return "RedirectionHost";
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
return 'redirection_host';
|
||||
static get tableName() {
|
||||
return "redirection_host";
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
return ['domain_names', 'meta'];
|
||||
static get jsonAttributes() {
|
||||
return ["domain_names", "meta"];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
return {
|
||||
owner: {
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: User,
|
||||
join: {
|
||||
from: 'redirection_host.owner_user_id',
|
||||
to: 'user.id'
|
||||
from: "redirection_host.owner_user_id",
|
||||
to: "user.id",
|
||||
},
|
||||
modify: (qb) => {
|
||||
qb.where("user.is_deleted", 0);
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('user.is_deleted', 0);
|
||||
}
|
||||
},
|
||||
certificate: {
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: Certificate,
|
||||
join: {
|
||||
from: 'redirection_host.certificate_id',
|
||||
to: 'certificate.id'
|
||||
from: "redirection_host.certificate_id",
|
||||
to: "certificate.id",
|
||||
},
|
||||
modify: (qb) => {
|
||||
qb.where("certificate.is_deleted", 0);
|
||||
},
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('certificate.is_deleted', 0);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RedirectionHost;
|
||||
export default RedirectionHost;
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
import { Model } from "objection";
|
||||
import db from "../db.js";
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
@@ -27,4 +27,4 @@ class Setting extends Model {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Setting;
|
||||
export default Setting;
|
||||
|
||||
@@ -1,82 +1,77 @@
|
||||
const Model = require('objection').Model;
|
||||
const db = require('../db');
|
||||
const helpers = require('../lib/helpers');
|
||||
const User = require('./user');
|
||||
const Certificate = require('./certificate');
|
||||
const now = require('./now_helper');
|
||||
import { Model } from "objection";
|
||||
import db from "../db.js";
|
||||
import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
|
||||
import Certificate from "./certificate.js";
|
||||
import now from "./now_helper.js";
|
||||
import User from "./user.js";
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
const boolFields = [
|
||||
'is_deleted',
|
||||
'enabled',
|
||||
'tcp_forwarding',
|
||||
'udp_forwarding',
|
||||
];
|
||||
const boolFields = ["is_deleted", "enabled", "tcp_forwarding", "udp_forwarding"];
|
||||
|
||||
class Stream extends Model {
|
||||
$beforeInsert () {
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for meta
|
||||
if (typeof this.meta === 'undefined') {
|
||||
if (typeof this.meta === "undefined") {
|
||||
this.meta = {};
|
||||
}
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
}
|
||||
|
||||
$parseDatabaseJson(json) {
|
||||
json = super.$parseDatabaseJson(json);
|
||||
return helpers.convertIntFieldsToBool(json, boolFields);
|
||||
const thisJson = super.$parseDatabaseJson(json);
|
||||
return convertIntFieldsToBool(thisJson, boolFields);
|
||||
}
|
||||
|
||||
$formatDatabaseJson(json) {
|
||||
json = helpers.convertBoolFieldsToInt(json, boolFields);
|
||||
return super.$formatDatabaseJson(json);
|
||||
const thisJson = convertBoolFieldsToInt(json, boolFields);
|
||||
return super.$formatDatabaseJson(thisJson);
|
||||
}
|
||||
|
||||
static get name () {
|
||||
return 'Stream';
|
||||
static get name() {
|
||||
return "Stream";
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
return 'stream';
|
||||
static get tableName() {
|
||||
return "stream";
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
return ['meta'];
|
||||
static get jsonAttributes() {
|
||||
return ["meta"];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
return {
|
||||
owner: {
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: User,
|
||||
join: {
|
||||
from: 'stream.owner_user_id',
|
||||
to: 'user.id'
|
||||
from: "stream.owner_user_id",
|
||||
to: "user.id",
|
||||
},
|
||||
modify: (qb) => {
|
||||
qb.where("user.is_deleted", 0);
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('user.is_deleted', 0);
|
||||
}
|
||||
},
|
||||
certificate: {
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: Certificate,
|
||||
join: {
|
||||
from: 'stream.certificate_id',
|
||||
to: 'certificate.id'
|
||||
from: "stream.certificate_id",
|
||||
to: "certificate.id",
|
||||
},
|
||||
modify: (qb) => {
|
||||
qb.where("certificate.is_deleted", 0);
|
||||
},
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('certificate.is_deleted', 0);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Stream;
|
||||
export default Stream;
|
||||
|
||||
@@ -3,16 +3,16 @@
|
||||
and then has abilities after that.
|
||||
*/
|
||||
|
||||
const _ = require('lodash');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const crypto = require('crypto');
|
||||
const config = require('../lib/config');
|
||||
const error = require('../lib/error');
|
||||
const logger = require('../logger').global;
|
||||
const ALGO = 'RS256';
|
||||
import crypto from "node:crypto";
|
||||
import jwt from "jsonwebtoken";
|
||||
import _ from "lodash";
|
||||
import { getPrivateKey, getPublicKey } from "../lib/config.js";
|
||||
import errs from "../lib/error.js";
|
||||
import { global as logger } from "../logger.js";
|
||||
|
||||
module.exports = function () {
|
||||
const ALGO = "RS256";
|
||||
|
||||
export default () => {
|
||||
let token_data = {};
|
||||
|
||||
const self = {
|
||||
@@ -21,28 +21,26 @@ module.exports = function () {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
create: (payload) => {
|
||||
if (!config.getPrivateKey()) {
|
||||
logger.error('Private key is empty!');
|
||||
if (!getPrivateKey()) {
|
||||
logger.error("Private key is empty!");
|
||||
}
|
||||
// sign with RSA SHA256
|
||||
const options = {
|
||||
algorithm: ALGO,
|
||||
expiresIn: payload.expiresIn || '1d'
|
||||
expiresIn: payload.expiresIn || "1d",
|
||||
};
|
||||
|
||||
payload.jti = crypto.randomBytes(12)
|
||||
.toString('base64')
|
||||
.substring(-8);
|
||||
payload.jti = crypto.randomBytes(12).toString("base64").substring(-8);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
jwt.sign(payload, config.getPrivateKey(), options, (err, token) => {
|
||||
jwt.sign(payload, getPrivateKey(), options, (err, token) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
token_data = payload;
|
||||
resolve({
|
||||
token: token,
|
||||
payload: payload
|
||||
payload: payload,
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -53,42 +51,47 @@ module.exports = function () {
|
||||
* @param {String} token
|
||||
* @returns {Promise}
|
||||
*/
|
||||
load: function (token) {
|
||||
if (!config.getPublicKey()) {
|
||||
logger.error('Public key is empty!');
|
||||
load: (token) => {
|
||||
if (!getPublicKey()) {
|
||||
logger.error("Public key is empty!");
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
if (!token || token === null || token === 'null') {
|
||||
reject(new error.AuthError('Empty token'));
|
||||
if (!token || token === null || token === "null") {
|
||||
reject(new errs.AuthError("Empty token"));
|
||||
} else {
|
||||
jwt.verify(token, config.getPublicKey(), {ignoreExpiration: false, algorithms: [ALGO]}, (err, result) => {
|
||||
jwt.verify(
|
||||
token,
|
||||
getPublicKey(),
|
||||
{ ignoreExpiration: false, algorithms: [ALGO] },
|
||||
(err, result) => {
|
||||
if (err) {
|
||||
|
||||
if (err.name === 'TokenExpiredError') {
|
||||
reject(new error.AuthError('Token has expired', err));
|
||||
if (err.name === "TokenExpiredError") {
|
||||
reject(new errs.AuthError("Token has expired", err));
|
||||
} else {
|
||||
reject(err);
|
||||
}
|
||||
|
||||
} else {
|
||||
token_data = result;
|
||||
|
||||
// Hack: some tokens out in the wild have a scope of 'all' instead of 'user'.
|
||||
// For 30 days at least, we need to replace 'all' with user.
|
||||
if ((typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'all') !== -1)) {
|
||||
token_data.scope = ['user'];
|
||||
if (
|
||||
typeof token_data.scope !== "undefined" &&
|
||||
_.indexOf(token_data.scope, "all") !== -1
|
||||
) {
|
||||
token_data.scope = ["user"];
|
||||
}
|
||||
|
||||
resolve(token_data);
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -97,16 +100,14 @@ module.exports = function () {
|
||||
* @param {String} scope
|
||||
* @returns {Boolean}
|
||||
*/
|
||||
hasScope: function (scope) {
|
||||
return typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, scope) !== -1;
|
||||
},
|
||||
hasScope: (scope) => typeof token_data.scope !== "undefined" && _.indexOf(token_data.scope, scope) !== -1,
|
||||
|
||||
/**
|
||||
* @param {String} key
|
||||
* @return {*}
|
||||
*/
|
||||
get: function (key) {
|
||||
if (typeof token_data[key] !== 'undefined') {
|
||||
get: (key) => {
|
||||
if (typeof token_data[key] !== "undefined") {
|
||||
return token_data[key];
|
||||
}
|
||||
|
||||
@@ -117,7 +118,7 @@ module.exports = function () {
|
||||
* @param {String} key
|
||||
* @param {*} value
|
||||
*/
|
||||
set: function (key, value) {
|
||||
set: (key, value) => {
|
||||
token_data[key] = value;
|
||||
},
|
||||
|
||||
@@ -126,13 +127,13 @@ module.exports = function () {
|
||||
* @returns {Integer}
|
||||
*/
|
||||
getUserId: (default_value) => {
|
||||
const attrs = self.get('attrs');
|
||||
if (attrs && typeof attrs.id !== 'undefined' && attrs.id) {
|
||||
const attrs = self.get("attrs");
|
||||
if (attrs && typeof attrs.id !== "undefined" && attrs.id) {
|
||||
return attrs.id;
|
||||
}
|
||||
|
||||
return default_value || 0;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
return self;
|
||||
|
||||
@@ -1,69 +1,65 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const helpers = require('../lib/helpers');
|
||||
const Model = require('objection').Model;
|
||||
const UserPermission = require('./user_permission');
|
||||
const now = require('./now_helper');
|
||||
import { Model } from "objection";
|
||||
import db from "../db.js";
|
||||
import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
|
||||
import now from "./now_helper.js";
|
||||
import UserPermission from "./user_permission.js";
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
const boolFields = [
|
||||
'is_deleted',
|
||||
'is_disabled',
|
||||
];
|
||||
const boolFields = ["is_deleted", "is_disabled"];
|
||||
|
||||
class User extends Model {
|
||||
$beforeInsert () {
|
||||
$beforeInsert() {
|
||||
this.created_on = now();
|
||||
this.modified_on = now();
|
||||
|
||||
// Default for roles
|
||||
if (typeof this.roles === 'undefined') {
|
||||
if (typeof this.roles === "undefined") {
|
||||
this.roles = [];
|
||||
}
|
||||
}
|
||||
|
||||
$beforeUpdate () {
|
||||
$beforeUpdate() {
|
||||
this.modified_on = now();
|
||||
}
|
||||
|
||||
$parseDatabaseJson(json) {
|
||||
json = super.$parseDatabaseJson(json);
|
||||
return helpers.convertIntFieldsToBool(json, boolFields);
|
||||
const thisJson = super.$parseDatabaseJson(json);
|
||||
return convertIntFieldsToBool(thisJson, boolFields);
|
||||
}
|
||||
|
||||
$formatDatabaseJson(json) {
|
||||
json = helpers.convertBoolFieldsToInt(json, boolFields);
|
||||
return super.$formatDatabaseJson(json);
|
||||
const thisJson = convertBoolFieldsToInt(json, boolFields);
|
||||
return super.$formatDatabaseJson(thisJson);
|
||||
}
|
||||
|
||||
static get name () {
|
||||
return 'User';
|
||||
static get name() {
|
||||
return "User";
|
||||
}
|
||||
|
||||
static get tableName () {
|
||||
return 'user';
|
||||
static get tableName() {
|
||||
return "user";
|
||||
}
|
||||
|
||||
static get jsonAttributes () {
|
||||
return ['roles'];
|
||||
static get jsonAttributes() {
|
||||
return ["roles"];
|
||||
}
|
||||
|
||||
static get relationMappings () {
|
||||
static get relationMappings() {
|
||||
return {
|
||||
permissions: {
|
||||
relation: Model.HasOneRelation,
|
||||
modelClass: UserPermission,
|
||||
join: {
|
||||
from: 'user.id',
|
||||
to: 'user_permission.user_id'
|
||||
}
|
||||
}
|
||||
from: "user.id",
|
||||
to: "user_permission.user_id",
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = User;
|
||||
export default User;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
const db = require('../db');
|
||||
const Model = require('objection').Model;
|
||||
const now = require('./now_helper');
|
||||
import { Model } from "objection";
|
||||
import db from "../db.js";
|
||||
import now from "./now_helper.js";
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
@@ -26,4 +26,4 @@ class UserPermission extends Model {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = UserPermission;
|
||||
export default UserPermission;
|
||||
|
||||
@@ -1,8 +1,16 @@
|
||||
{
|
||||
"name": "nginx-proxy-manager",
|
||||
"version": "0.0.0",
|
||||
"version": "2.0.0",
|
||||
"description": "A beautiful interface for creating Nginx endpoints",
|
||||
"author": "Jamie Curnow <jc@jc21.com>",
|
||||
"license": "MIT",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"lint": "biome lint",
|
||||
"prettier": "biome format --write .",
|
||||
"validate-schema": "node validate-schema.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@apidevtools/json-schema-ref-parser": "^11.7.0",
|
||||
"ajv": "^8.17.1",
|
||||
@@ -28,21 +36,14 @@
|
||||
"sqlite3": "5.1.6",
|
||||
"temp-write": "^4.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@apidevtools/swagger-parser": "^10.1.0",
|
||||
"@biomejs/biome": "2.2.0",
|
||||
"chalk": "4.1.2",
|
||||
"nodemon": "^2.0.2"
|
||||
},
|
||||
"signale": {
|
||||
"displayDate": true,
|
||||
"displayTimestamp": true
|
||||
},
|
||||
"author": "Jamie Curnow <jc@jc21.com>",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@apidevtools/swagger-parser": "^10.1.0",
|
||||
"chalk": "4.1.2",
|
||||
"eslint": "^8.36.0",
|
||||
"eslint-plugin-align-assignments": "^1.1.2",
|
||||
"nodemon": "^2.0.2",
|
||||
"prettier": "^2.0.4"
|
||||
},
|
||||
"scripts": {
|
||||
"validate-schema": "node validate-schema.js"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
const express = require('express');
|
||||
const validator = require('../lib/validator');
|
||||
const jwtdecode = require('../lib/express/jwt-decode');
|
||||
const internalAuditLog = require('../internal/audit-log');
|
||||
import express from "express";
|
||||
import internalAuditLog from "../internal/audit-log.js";
|
||||
import jwtdecode from "../lib/express/jwt-decode.js";
|
||||
import validator from "../lib/validator/index.js";
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
* /api/audit-log
|
||||
*/
|
||||
router
|
||||
.route('/')
|
||||
.route("/")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -25,28 +25,30 @@ router
|
||||
* Retrieve all logs
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
validator(
|
||||
{
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'common#/properties/expand'
|
||||
$ref: "common#/properties/expand",
|
||||
},
|
||||
query: {
|
||||
$ref: 'common#/properties/query'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
|
||||
query: (typeof req.query.query === 'string' ? req.query.query : null)
|
||||
})
|
||||
$ref: "common#/properties/query",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
|
||||
query: typeof req.query.query === "string" ? req.query.query : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalAuditLog.getAll(res.locals.access, data.expand, data.query);
|
||||
})
|
||||
.then((rows) => {
|
||||
res.status(200)
|
||||
.send(rows);
|
||||
res.status(200).send(rows);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
export default router;
|
||||
|
||||
@@ -1,51 +1,63 @@
|
||||
const express = require('express');
|
||||
const pjson = require('../package.json');
|
||||
const error = require('../lib/error');
|
||||
import express from "express";
|
||||
import errs from "../lib/error.js";
|
||||
import pjson from "../package.json" with { type: "json" };
|
||||
import auditLogRoutes from "./audit-log.js";
|
||||
import accessListsRoutes from "./nginx/access_lists.js";
|
||||
import certificatesHostsRoutes from "./nginx/certificates.js";
|
||||
import deadHostsRoutes from "./nginx/dead_hosts.js";
|
||||
import proxyHostsRoutes from "./nginx/proxy_hosts.js";
|
||||
import redirectionHostsRoutes from "./nginx/redirection_hosts.js";
|
||||
import streamsRoutes from "./nginx/streams.js";
|
||||
import reportsRoutes from "./reports.js";
|
||||
import schemaRoutes from "./schema.js";
|
||||
import settingsRoutes from "./settings.js";
|
||||
import tokensRoutes from "./tokens.js";
|
||||
import usersRoutes from "./users.js";
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
* Health Check
|
||||
* GET /api
|
||||
*/
|
||||
router.get('/', (req, res/*, next*/) => {
|
||||
let version = pjson.version.split('-').shift().split('.');
|
||||
router.get("/", (_, res /*, next*/) => {
|
||||
const version = pjson.version.split("-").shift().split(".");
|
||||
|
||||
res.status(200).send({
|
||||
status: 'OK',
|
||||
status: "OK",
|
||||
version: {
|
||||
major: parseInt(version.shift(), 10),
|
||||
minor: parseInt(version.shift(), 10),
|
||||
revision: parseInt(version.shift(), 10)
|
||||
}
|
||||
major: Number.parseInt(version.shift(), 10),
|
||||
minor: Number.parseInt(version.shift(), 10),
|
||||
revision: Number.parseInt(version.shift(), 10),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
router.use('/schema', require('./schema'));
|
||||
router.use('/tokens', require('./tokens'));
|
||||
router.use('/users', require('./users'));
|
||||
router.use('/audit-log', require('./audit-log'));
|
||||
router.use('/reports', require('./reports'));
|
||||
router.use('/settings', require('./settings'));
|
||||
router.use('/nginx/proxy-hosts', require('./nginx/proxy_hosts'));
|
||||
router.use('/nginx/redirection-hosts', require('./nginx/redirection_hosts'));
|
||||
router.use('/nginx/dead-hosts', require('./nginx/dead_hosts'));
|
||||
router.use('/nginx/streams', require('./nginx/streams'));
|
||||
router.use('/nginx/access-lists', require('./nginx/access_lists'));
|
||||
router.use('/nginx/certificates', require('./nginx/certificates'));
|
||||
router.use("/schema", schemaRoutes);
|
||||
router.use("/tokens", tokensRoutes);
|
||||
router.use("/users", usersRoutes);
|
||||
router.use("/audit-log", auditLogRoutes);
|
||||
router.use("/reports", reportsRoutes);
|
||||
router.use("/settings", settingsRoutes);
|
||||
router.use("/nginx/proxy-hosts", proxyHostsRoutes);
|
||||
router.use("/nginx/redirection-hosts", redirectionHostsRoutes);
|
||||
router.use("/nginx/dead-hosts", deadHostsRoutes);
|
||||
router.use("/nginx/streams", streamsRoutes);
|
||||
router.use("/nginx/access-lists", accessListsRoutes);
|
||||
router.use("/nginx/certificates", certificatesHostsRoutes);
|
||||
|
||||
/**
|
||||
* API 404 for all other routes
|
||||
*
|
||||
* ALL /api/*
|
||||
*/
|
||||
router.all(/(.+)/, function (req, _, next) {
|
||||
req.params.page = req.params['0'];
|
||||
next(new error.ItemNotFoundError(req.params.page));
|
||||
router.all(/(.+)/, (req, _, next) => {
|
||||
req.params.page = req.params["0"];
|
||||
next(new errs.ItemNotFoundError(req.params.page));
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
export default router;
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
const express = require('express');
|
||||
const validator = require('../../lib/validator');
|
||||
const jwtdecode = require('../../lib/express/jwt-decode');
|
||||
const apiValidator = require('../../lib/validator/api');
|
||||
const internalAccessList = require('../../internal/access-list');
|
||||
const schema = require('../../schema');
|
||||
import express from "express";
|
||||
import internalAccessList from "../../internal/access-list.js";
|
||||
import jwtdecode from "../../lib/express/jwt-decode.js";
|
||||
import apiValidator from "../../lib/validator/api.js";
|
||||
import validator from "../../lib/validator/index.js";
|
||||
import { getValidationSchema } from "../../schema/index.js";
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
* /api/nginx/access-lists
|
||||
*/
|
||||
router
|
||||
.route('/')
|
||||
.options((req, res) => {
|
||||
.route("/")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
.all(jwtdecode())
|
||||
@@ -27,26 +27,28 @@ router
|
||||
* Retrieve all access-lists
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
validator(
|
||||
{
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'common#/properties/expand'
|
||||
$ref: "common#/properties/expand",
|
||||
},
|
||||
query: {
|
||||
$ref: 'common#/properties/query'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
|
||||
query: (typeof req.query.query === 'string' ? req.query.query : null)
|
||||
})
|
||||
$ref: "common#/properties/query",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
|
||||
query: typeof req.query.query === "string" ? req.query.query : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalAccessList.getAll(res.locals.access, data.expand, data.query);
|
||||
})
|
||||
.then((rows) => {
|
||||
res.status(200)
|
||||
.send(rows);
|
||||
res.status(200).send(rows);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -57,13 +59,12 @@ router
|
||||
* Create a new access-list
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
apiValidator(schema.getValidationSchema('/nginx/access-lists', 'post'), req.body)
|
||||
apiValidator(getValidationSchema("/nginx/access-lists", "post"), req.body)
|
||||
.then((payload) => {
|
||||
return internalAccessList.create(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(201)
|
||||
.send(result);
|
||||
res.status(201).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -74,7 +75,7 @@ router
|
||||
* /api/nginx/access-lists/123
|
||||
*/
|
||||
router
|
||||
.route('/:list_id')
|
||||
.route("/:list_id")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -86,30 +87,32 @@ router
|
||||
* Retrieve a specific access-list
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
required: ['list_id'],
|
||||
validator(
|
||||
{
|
||||
required: ["list_id"],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
list_id: {
|
||||
$ref: 'common#/properties/id'
|
||||
$ref: "common#/properties/id",
|
||||
},
|
||||
expand: {
|
||||
$ref: 'common#/properties/expand'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
$ref: "common#/properties/expand",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
list_id: req.params.list_id,
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
|
||||
})
|
||||
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalAccessList.get(res.locals.access, {
|
||||
id: parseInt(data.list_id, 10),
|
||||
expand: data.expand
|
||||
id: Number.parseInt(data.list_id, 10),
|
||||
expand: data.expand,
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
res.status(200)
|
||||
.send(row);
|
||||
res.status(200).send(row);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -120,14 +123,13 @@ router
|
||||
* Update and existing access-list
|
||||
*/
|
||||
.put((req, res, next) => {
|
||||
apiValidator(schema.getValidationSchema('/nginx/access-lists/{listID}', 'put'), req.body)
|
||||
apiValidator(getValidationSchema("/nginx/access-lists/{listID}", "put"), req.body)
|
||||
.then((payload) => {
|
||||
payload.id = parseInt(req.params.list_id, 10);
|
||||
payload.id = Number.parseInt(req.params.list_id, 10);
|
||||
return internalAccessList.update(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -138,12 +140,12 @@ router
|
||||
* Delete and existing access-list
|
||||
*/
|
||||
.delete((req, res, next) => {
|
||||
internalAccessList.delete(res.locals.access, {id: parseInt(req.params.list_id, 10)})
|
||||
internalAccessList
|
||||
.delete(res.locals.access, { id: Number.parseInt(req.params.list_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
export default router;
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
const express = require('express');
|
||||
const error = require('../../lib/error');
|
||||
const validator = require('../../lib/validator');
|
||||
const jwtdecode = require('../../lib/express/jwt-decode');
|
||||
const apiValidator = require('../../lib/validator/api');
|
||||
const internalCertificate = require('../../internal/certificate');
|
||||
const schema = require('../../schema');
|
||||
import express from "express";
|
||||
import internalCertificate from "../../internal/certificate.js";
|
||||
import errs from "../../lib/error.js";
|
||||
import jwtdecode from "../../lib/express/jwt-decode.js";
|
||||
import apiValidator from "../../lib/validator/api.js";
|
||||
import validator from "../../lib/validator/index.js";
|
||||
import { getValidationSchema } from "../../schema/index.js";
|
||||
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
* /api/nginx/certificates
|
||||
*/
|
||||
router
|
||||
.route('/')
|
||||
.route("/")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -28,26 +28,28 @@ router
|
||||
* Retrieve all certificates
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
validator(
|
||||
{
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'common#/properties/expand'
|
||||
$ref: "common#/properties/expand",
|
||||
},
|
||||
query: {
|
||||
$ref: 'common#/properties/query'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
|
||||
query: (typeof req.query.query === 'string' ? req.query.query : null)
|
||||
})
|
||||
$ref: "common#/properties/query",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
|
||||
query: typeof req.query.query === "string" ? req.query.query : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalCertificate.getAll(res.locals.access, data.expand, data.query);
|
||||
})
|
||||
.then((rows) => {
|
||||
res.status(200)
|
||||
.send(rows);
|
||||
res.status(200).send(rows);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -58,14 +60,13 @@ router
|
||||
* Create a new certificate
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
apiValidator(schema.getValidationSchema('/nginx/certificates', 'post'), req.body)
|
||||
apiValidator(getValidationSchema("/nginx/certificates", "post"), req.body)
|
||||
.then((payload) => {
|
||||
req.setTimeout(900000); // 15 minutes timeout
|
||||
return internalCertificate.create(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(201)
|
||||
.send(result);
|
||||
res.status(201).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -76,7 +77,7 @@ router
|
||||
* /api/nginx/certificates/test-http
|
||||
*/
|
||||
router
|
||||
.route('/test-http')
|
||||
.route("/test-http")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -89,14 +90,14 @@ router
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
if (req.query.domains === undefined) {
|
||||
next(new error.ValidationError('Domains are required as query parameters'));
|
||||
next(new errs.ValidationError("Domains are required as query parameters"));
|
||||
return;
|
||||
}
|
||||
|
||||
internalCertificate.testHttpsChallenge(res.locals.access, JSON.parse(req.query.domains))
|
||||
internalCertificate
|
||||
.testHttpsChallenge(res.locals.access, JSON.parse(req.query.domains))
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -107,7 +108,7 @@ router
|
||||
* /api/nginx/certificates/123
|
||||
*/
|
||||
router
|
||||
.route('/:certificate_id')
|
||||
.route("/:certificate_id")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -119,30 +120,32 @@ router
|
||||
* Retrieve a specific certificate
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
required: ['certificate_id'],
|
||||
validator(
|
||||
{
|
||||
required: ["certificate_id"],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
certificate_id: {
|
||||
$ref: 'common#/properties/id'
|
||||
$ref: "common#/properties/id",
|
||||
},
|
||||
expand: {
|
||||
$ref: 'common#/properties/expand'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
$ref: "common#/properties/expand",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
certificate_id: req.params.certificate_id,
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
|
||||
})
|
||||
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalCertificate.get(res.locals.access, {
|
||||
id: parseInt(data.certificate_id, 10),
|
||||
expand: data.expand
|
||||
id: Number.parseInt(data.certificate_id, 10),
|
||||
expand: data.expand,
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
res.status(200)
|
||||
.send(row);
|
||||
res.status(200).send(row);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -153,10 +156,10 @@ router
|
||||
* Update and existing certificate
|
||||
*/
|
||||
.delete((req, res, next) => {
|
||||
internalCertificate.delete(res.locals.access, {id: parseInt(req.params.certificate_id, 10)})
|
||||
internalCertificate
|
||||
.delete(res.locals.access, { id: Number.parseInt(req.params.certificate_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -167,7 +170,7 @@ router
|
||||
* /api/nginx/certificates/123/upload
|
||||
*/
|
||||
router
|
||||
.route('/:certificate_id/upload')
|
||||
.route("/:certificate_id/upload")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -180,16 +183,15 @@ router
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
if (!req.files) {
|
||||
res.status(400)
|
||||
.send({error: 'No files were uploaded'});
|
||||
res.status(400).send({ error: "No files were uploaded" });
|
||||
} else {
|
||||
internalCertificate.upload(res.locals.access, {
|
||||
id: parseInt(req.params.certificate_id, 10),
|
||||
files: req.files
|
||||
internalCertificate
|
||||
.upload(res.locals.access, {
|
||||
id: Number.parseInt(req.params.certificate_id, 10),
|
||||
files: req.files,
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
}
|
||||
@@ -201,7 +203,7 @@ router
|
||||
* /api/nginx/certificates/123/renew
|
||||
*/
|
||||
router
|
||||
.route('/:certificate_id/renew')
|
||||
.route("/:certificate_id/renew")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -214,12 +216,12 @@ router
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
req.setTimeout(900000); // 15 minutes timeout
|
||||
internalCertificate.renew(res.locals.access, {
|
||||
id: parseInt(req.params.certificate_id, 10)
|
||||
internalCertificate
|
||||
.renew(res.locals.access, {
|
||||
id: Number.parseInt(req.params.certificate_id, 10),
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -230,7 +232,7 @@ router
|
||||
* /api/nginx/certificates/123/download
|
||||
*/
|
||||
router
|
||||
.route('/:certificate_id/download')
|
||||
.route("/:certificate_id/download")
|
||||
.options((_req, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -242,12 +244,12 @@ router
|
||||
* Renew certificate
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
internalCertificate.download(res.locals.access, {
|
||||
id: parseInt(req.params.certificate_id, 10)
|
||||
internalCertificate
|
||||
.download(res.locals.access, {
|
||||
id: Number.parseInt(req.params.certificate_id, 10),
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.download(result.fileName);
|
||||
res.status(200).download(result.fileName);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -258,7 +260,7 @@ router
|
||||
* /api/nginx/certificates/validate
|
||||
*/
|
||||
router
|
||||
.route('/validate')
|
||||
.route("/validate")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -271,18 +273,17 @@ router
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
if (!req.files) {
|
||||
res.status(400)
|
||||
.send({error: 'No files were uploaded'});
|
||||
res.status(400).send({ error: "No files were uploaded" });
|
||||
} else {
|
||||
internalCertificate.validate({
|
||||
files: req.files
|
||||
internalCertificate
|
||||
.validate({
|
||||
files: req.files,
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
export default router;
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
const express = require('express');
|
||||
const validator = require('../../lib/validator');
|
||||
const jwtdecode = require('../../lib/express/jwt-decode');
|
||||
const apiValidator = require('../../lib/validator/api');
|
||||
const internalDeadHost = require('../../internal/dead-host');
|
||||
const schema = require('../../schema');
|
||||
import express from "express";
|
||||
import internalDeadHost from "../../internal/dead-host.js";
|
||||
import jwtdecode from "../../lib/express/jwt-decode.js";
|
||||
import apiValidator from "../../lib/validator/api.js";
|
||||
import validator from "../../lib/validator/index.js";
|
||||
import { getValidationSchema } from "../../schema/index.js";
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
* /api/nginx/dead-hosts
|
||||
*/
|
||||
router
|
||||
.route('/')
|
||||
.route("/")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -27,26 +27,28 @@ router
|
||||
* Retrieve all dead-hosts
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
validator(
|
||||
{
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'common#/properties/expand'
|
||||
$ref: "common#/properties/expand",
|
||||
},
|
||||
query: {
|
||||
$ref: 'common#/properties/query'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
|
||||
query: (typeof req.query.query === 'string' ? req.query.query : null)
|
||||
})
|
||||
$ref: "common#/properties/query",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
|
||||
query: typeof req.query.query === "string" ? req.query.query : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalDeadHost.getAll(res.locals.access, data.expand, data.query);
|
||||
})
|
||||
.then((rows) => {
|
||||
res.status(200)
|
||||
.send(rows);
|
||||
res.status(200).send(rows);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -57,13 +59,12 @@ router
|
||||
* Create a new dead-host
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
apiValidator(schema.getValidationSchema('/nginx/dead-hosts', 'post'), req.body)
|
||||
apiValidator(getValidationSchema("/nginx/dead-hosts", "post"), req.body)
|
||||
.then((payload) => {
|
||||
return internalDeadHost.create(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(201)
|
||||
.send(result);
|
||||
res.status(201).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -74,8 +75,8 @@ router
|
||||
* /api/nginx/dead-hosts/123
|
||||
*/
|
||||
router
|
||||
.route('/:host_id')
|
||||
.options((req, res) => {
|
||||
.route("/:host_id")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
.all(jwtdecode())
|
||||
@@ -86,30 +87,32 @@ router
|
||||
* Retrieve a specific dead-host
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
required: ['host_id'],
|
||||
validator(
|
||||
{
|
||||
required: ["host_id"],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
host_id: {
|
||||
$ref: 'common#/properties/id'
|
||||
$ref: "common#/properties/id",
|
||||
},
|
||||
expand: {
|
||||
$ref: 'common#/properties/expand'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
$ref: "common#/properties/expand",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
host_id: req.params.host_id,
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
|
||||
})
|
||||
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalDeadHost.get(res.locals.access, {
|
||||
id: parseInt(data.host_id, 10),
|
||||
expand: data.expand
|
||||
id: Number.parseInt(data.host_id, 10),
|
||||
expand: data.expand,
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
res.status(200)
|
||||
.send(row);
|
||||
res.status(200).send(row);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -120,14 +123,13 @@ router
|
||||
* Update and existing dead-host
|
||||
*/
|
||||
.put((req, res, next) => {
|
||||
apiValidator(schema.getValidationSchema('/nginx/dead-hosts/{hostID}', 'put'), req.body)
|
||||
apiValidator(getValidationSchema("/nginx/dead-hosts/{hostID}", "put"), req.body)
|
||||
.then((payload) => {
|
||||
payload.id = parseInt(req.params.host_id, 10);
|
||||
payload.id = Number.parseInt(req.params.host_id, 10);
|
||||
return internalDeadHost.update(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -138,10 +140,10 @@ router
|
||||
* Update and existing dead-host
|
||||
*/
|
||||
.delete((req, res, next) => {
|
||||
internalDeadHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalDeadHost
|
||||
.delete(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -152,7 +154,7 @@ router
|
||||
* /api/nginx/dead-hosts/123/enable
|
||||
*/
|
||||
router
|
||||
.route('/:host_id/enable')
|
||||
.route("/:host_id/enable")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -162,10 +164,10 @@ router
|
||||
* POST /api/nginx/dead-hosts/123/enable
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
internalDeadHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalDeadHost
|
||||
.enable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -176,7 +178,7 @@ router
|
||||
* /api/nginx/dead-hosts/123/disable
|
||||
*/
|
||||
router
|
||||
.route('/:host_id/disable')
|
||||
.route("/:host_id/disable")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -186,12 +188,12 @@ router
|
||||
* POST /api/nginx/dead-hosts/123/disable
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
internalDeadHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalDeadHost
|
||||
.disable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
export default router;
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
const express = require('express');
|
||||
const validator = require('../../lib/validator');
|
||||
const jwtdecode = require('../../lib/express/jwt-decode');
|
||||
const apiValidator = require('../../lib/validator/api');
|
||||
const internalProxyHost = require('../../internal/proxy-host');
|
||||
const schema = require('../../schema');
|
||||
import express from "express";
|
||||
import internalProxyHost from "../../internal/proxy-host.js";
|
||||
import jwtdecode from "../../lib/express/jwt-decode.js";
|
||||
import apiValidator from "../../lib/validator/api.js";
|
||||
import validator from "../../lib/validator/index.js";
|
||||
import { getValidationSchema } from "../../schema/index.js";
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
* /api/nginx/proxy-hosts
|
||||
*/
|
||||
router
|
||||
.route('/')
|
||||
.options((req, res) => {
|
||||
.route("/")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
.all(jwtdecode())
|
||||
@@ -27,26 +27,28 @@ router
|
||||
* Retrieve all proxy-hosts
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
validator(
|
||||
{
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'common#/properties/expand'
|
||||
$ref: "common#/properties/expand",
|
||||
},
|
||||
query: {
|
||||
$ref: 'common#/properties/query'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
|
||||
query: (typeof req.query.query === 'string' ? req.query.query : null)
|
||||
})
|
||||
$ref: "common#/properties/query",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
|
||||
query: typeof req.query.query === "string" ? req.query.query : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalProxyHost.getAll(res.locals.access, data.expand, data.query);
|
||||
})
|
||||
.then((rows) => {
|
||||
res.status(200)
|
||||
.send(rows);
|
||||
res.status(200).send(rows);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -57,13 +59,12 @@ router
|
||||
* Create a new proxy-host
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
apiValidator(schema.getValidationSchema('/nginx/proxy-hosts', 'post'), req.body)
|
||||
apiValidator(getValidationSchema("/nginx/proxy-hosts", "post"), req.body)
|
||||
.then((payload) => {
|
||||
return internalProxyHost.create(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(201)
|
||||
.send(result);
|
||||
res.status(201).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -74,8 +75,8 @@ router
|
||||
* /api/nginx/proxy-hosts/123
|
||||
*/
|
||||
router
|
||||
.route('/:host_id')
|
||||
.options((req, res) => {
|
||||
.route("/:host_id")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
.all(jwtdecode())
|
||||
@@ -86,30 +87,32 @@ router
|
||||
* Retrieve a specific proxy-host
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
required: ['host_id'],
|
||||
validator(
|
||||
{
|
||||
required: ["host_id"],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
host_id: {
|
||||
$ref: 'common#/properties/id'
|
||||
$ref: "common#/properties/id",
|
||||
},
|
||||
expand: {
|
||||
$ref: 'common#/properties/expand'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
$ref: "common#/properties/expand",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
host_id: req.params.host_id,
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
|
||||
})
|
||||
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalProxyHost.get(res.locals.access, {
|
||||
id: parseInt(data.host_id, 10),
|
||||
expand: data.expand
|
||||
id: Number.parseInt(data.host_id, 10),
|
||||
expand: data.expand,
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
res.status(200)
|
||||
.send(row);
|
||||
res.status(200).send(row);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -120,14 +123,13 @@ router
|
||||
* Update and existing proxy-host
|
||||
*/
|
||||
.put((req, res, next) => {
|
||||
apiValidator(schema.getValidationSchema('/nginx/proxy-hosts/{hostID}', 'put'), req.body)
|
||||
apiValidator(getValidationSchema("/nginx/proxy-hosts/{hostID}", "put"), req.body)
|
||||
.then((payload) => {
|
||||
payload.id = parseInt(req.params.host_id, 10);
|
||||
payload.id = Number.parseInt(req.params.host_id, 10);
|
||||
return internalProxyHost.update(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -138,10 +140,10 @@ router
|
||||
* Update and existing proxy-host
|
||||
*/
|
||||
.delete((req, res, next) => {
|
||||
internalProxyHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalProxyHost
|
||||
.delete(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -152,7 +154,7 @@ router
|
||||
* /api/nginx/proxy-hosts/123/enable
|
||||
*/
|
||||
router
|
||||
.route('/:host_id/enable')
|
||||
.route("/:host_id/enable")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -162,10 +164,10 @@ router
|
||||
* POST /api/nginx/proxy-hosts/123/enable
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
internalProxyHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalProxyHost
|
||||
.enable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -176,7 +178,7 @@ router
|
||||
* /api/nginx/proxy-hosts/123/disable
|
||||
*/
|
||||
router
|
||||
.route('/:host_id/disable')
|
||||
.route("/:host_id/disable")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -186,12 +188,12 @@ router
|
||||
* POST /api/nginx/proxy-hosts/123/disable
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
internalProxyHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalProxyHost
|
||||
.disable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
export default router;
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
const express = require('express');
|
||||
const validator = require('../../lib/validator');
|
||||
const jwtdecode = require('../../lib/express/jwt-decode');
|
||||
const apiValidator = require('../../lib/validator/api');
|
||||
const internalRedirectionHost = require('../../internal/redirection-host');
|
||||
const schema = require('../../schema');
|
||||
import express from "express";
|
||||
import internalRedirectionHost from "../../internal/redirection-host.js";
|
||||
import jwtdecode from "../../lib/express/jwt-decode.js";
|
||||
import apiValidator from "../../lib/validator/api.js";
|
||||
import validator from "../../lib/validator/index.js";
|
||||
import { getValidationSchema } from "../../schema/index.js";
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
* /api/nginx/redirection-hosts
|
||||
*/
|
||||
router
|
||||
.route('/')
|
||||
.options((req, res) => {
|
||||
.route("/")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
.all(jwtdecode())
|
||||
@@ -27,26 +27,28 @@ router
|
||||
* Retrieve all redirection-hosts
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
validator(
|
||||
{
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'common#/properties/expand'
|
||||
$ref: "common#/properties/expand",
|
||||
},
|
||||
query: {
|
||||
$ref: 'common#/properties/query'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
|
||||
query: (typeof req.query.query === 'string' ? req.query.query : null)
|
||||
})
|
||||
$ref: "common#/properties/query",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
|
||||
query: typeof req.query.query === "string" ? req.query.query : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalRedirectionHost.getAll(res.locals.access, data.expand, data.query);
|
||||
})
|
||||
.then((rows) => {
|
||||
res.status(200)
|
||||
.send(rows);
|
||||
res.status(200).send(rows);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -57,13 +59,12 @@ router
|
||||
* Create a new redirection-host
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
apiValidator(schema.getValidationSchema('/nginx/redirection-hosts', 'post'), req.body)
|
||||
apiValidator(getValidationSchema("/nginx/redirection-hosts", "post"), req.body)
|
||||
.then((payload) => {
|
||||
return internalRedirectionHost.create(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(201)
|
||||
.send(result);
|
||||
res.status(201).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -74,8 +75,8 @@ router
|
||||
* /api/nginx/redirection-hosts/123
|
||||
*/
|
||||
router
|
||||
.route('/:host_id')
|
||||
.options((req, res) => {
|
||||
.route("/:host_id")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
.all(jwtdecode())
|
||||
@@ -86,30 +87,32 @@ router
|
||||
* Retrieve a specific redirection-host
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
required: ['host_id'],
|
||||
validator(
|
||||
{
|
||||
required: ["host_id"],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
host_id: {
|
||||
$ref: 'common#/properties/id'
|
||||
$ref: "common#/properties/id",
|
||||
},
|
||||
expand: {
|
||||
$ref: 'common#/properties/expand'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
$ref: "common#/properties/expand",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
host_id: req.params.host_id,
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
|
||||
})
|
||||
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalRedirectionHost.get(res.locals.access, {
|
||||
id: parseInt(data.host_id, 10),
|
||||
expand: data.expand
|
||||
id: Number.parseInt(data.host_id, 10),
|
||||
expand: data.expand,
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
res.status(200)
|
||||
.send(row);
|
||||
res.status(200).send(row);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -120,14 +123,13 @@ router
|
||||
* Update and existing redirection-host
|
||||
*/
|
||||
.put((req, res, next) => {
|
||||
apiValidator(schema.getValidationSchema('/nginx/redirection-hosts/{hostID}', 'put'), req.body)
|
||||
apiValidator(getValidationSchema("/nginx/redirection-hosts/{hostID}", "put"), req.body)
|
||||
.then((payload) => {
|
||||
payload.id = parseInt(req.params.host_id, 10);
|
||||
payload.id = Number.parseInt(req.params.host_id, 10);
|
||||
return internalRedirectionHost.update(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -138,10 +140,10 @@ router
|
||||
* Update and existing redirection-host
|
||||
*/
|
||||
.delete((req, res, next) => {
|
||||
internalRedirectionHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalRedirectionHost
|
||||
.delete(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -152,8 +154,8 @@ router
|
||||
* /api/nginx/redirection-hosts/123/enable
|
||||
*/
|
||||
router
|
||||
.route('/:host_id/enable')
|
||||
.options((req, res) => {
|
||||
.route("/:host_id/enable")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
.all(jwtdecode())
|
||||
@@ -162,10 +164,10 @@ router
|
||||
* POST /api/nginx/redirection-hosts/123/enable
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
internalRedirectionHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalRedirectionHost
|
||||
.enable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -176,8 +178,8 @@ router
|
||||
* /api/nginx/redirection-hosts/123/disable
|
||||
*/
|
||||
router
|
||||
.route('/:host_id/disable')
|
||||
.options((req, res) => {
|
||||
.route("/:host_id/disable")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
.all(jwtdecode())
|
||||
@@ -186,12 +188,12 @@ router
|
||||
* POST /api/nginx/redirection-hosts/123/disable
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
internalRedirectionHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalRedirectionHost
|
||||
.disable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
export default router;
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
const express = require('express');
|
||||
const validator = require('../../lib/validator');
|
||||
const jwtdecode = require('../../lib/express/jwt-decode');
|
||||
const apiValidator = require('../../lib/validator/api');
|
||||
const internalStream = require('../../internal/stream');
|
||||
const schema = require('../../schema');
|
||||
import express from "express";
|
||||
import internalStream from "../../internal/stream.js";
|
||||
import jwtdecode from "../../lib/express/jwt-decode.js";
|
||||
import apiValidator from "../../lib/validator/api.js";
|
||||
import validator from "../../lib/validator/index.js";
|
||||
import { getValidationSchema } from "../../schema/index.js";
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
* /api/nginx/streams
|
||||
*/
|
||||
router
|
||||
.route('/')
|
||||
.options((req, res) => {
|
||||
.route("/")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
.all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
|
||||
@@ -27,26 +27,28 @@ router
|
||||
* Retrieve all streams
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
validator(
|
||||
{
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'common#/properties/expand'
|
||||
$ref: "common#/properties/expand",
|
||||
},
|
||||
query: {
|
||||
$ref: 'common#/properties/query'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
|
||||
query: (typeof req.query.query === 'string' ? req.query.query : null)
|
||||
})
|
||||
$ref: "common#/properties/query",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
|
||||
query: typeof req.query.query === "string" ? req.query.query : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalStream.getAll(res.locals.access, data.expand, data.query);
|
||||
})
|
||||
.then((rows) => {
|
||||
res.status(200)
|
||||
.send(rows);
|
||||
res.status(200).send(rows);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -57,13 +59,12 @@ router
|
||||
* Create a new stream
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
apiValidator(schema.getValidationSchema('/nginx/streams', 'post'), req.body)
|
||||
apiValidator(getValidationSchema("/nginx/streams", "post"), req.body)
|
||||
.then((payload) => {
|
||||
return internalStream.create(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(201)
|
||||
.send(result);
|
||||
res.status(201).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -74,8 +75,8 @@ router
|
||||
* /api/nginx/streams/123
|
||||
*/
|
||||
router
|
||||
.route('/:stream_id')
|
||||
.options((req, res) => {
|
||||
.route("/:stream_id")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
.all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
|
||||
@@ -86,30 +87,32 @@ router
|
||||
* Retrieve a specific stream
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
required: ['stream_id'],
|
||||
validator(
|
||||
{
|
||||
required: ["stream_id"],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
stream_id: {
|
||||
$ref: 'common#/properties/id'
|
||||
$ref: "common#/properties/id",
|
||||
},
|
||||
expand: {
|
||||
$ref: 'common#/properties/expand'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
$ref: "common#/properties/expand",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
stream_id: req.params.stream_id,
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
|
||||
})
|
||||
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalStream.get(res.locals.access, {
|
||||
id: parseInt(data.stream_id, 10),
|
||||
expand: data.expand
|
||||
id: Number.parseInt(data.stream_id, 10),
|
||||
expand: data.expand,
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
res.status(200)
|
||||
.send(row);
|
||||
res.status(200).send(row);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -120,14 +123,13 @@ router
|
||||
* Update and existing stream
|
||||
*/
|
||||
.put((req, res, next) => {
|
||||
apiValidator(schema.getValidationSchema('/nginx/streams/{streamID}', 'put'), req.body)
|
||||
apiValidator(getValidationSchema("/nginx/streams/{streamID}", "put"), req.body)
|
||||
.then((payload) => {
|
||||
payload.id = parseInt(req.params.stream_id, 10);
|
||||
payload.id = Number.parseInt(req.params.stream_id, 10);
|
||||
return internalStream.update(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -138,10 +140,10 @@ router
|
||||
* Update and existing stream
|
||||
*/
|
||||
.delete((req, res, next) => {
|
||||
internalStream.delete(res.locals.access, {id: parseInt(req.params.stream_id, 10)})
|
||||
internalStream
|
||||
.delete(res.locals.access, { id: Number.parseInt(req.params.stream_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -152,7 +154,7 @@ router
|
||||
* /api/nginx/streams/123/enable
|
||||
*/
|
||||
router
|
||||
.route('/:host_id/enable')
|
||||
.route("/:host_id/enable")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -162,10 +164,10 @@ router
|
||||
* POST /api/nginx/streams/123/enable
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
internalStream.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalStream
|
||||
.enable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -176,7 +178,7 @@ router
|
||||
* /api/nginx/streams/123/disable
|
||||
*/
|
||||
router
|
||||
.route('/:host_id/disable')
|
||||
.route("/:host_id/disable")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -186,12 +188,12 @@ router
|
||||
* POST /api/nginx/streams/123/disable
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
internalStream.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
|
||||
internalStream
|
||||
.disable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
export default router;
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
const express = require('express');
|
||||
const jwtdecode = require('../lib/express/jwt-decode');
|
||||
const internalReport = require('../internal/report');
|
||||
import express from "express";
|
||||
import internalReport from "../internal/report.js";
|
||||
import jwtdecode from "../lib/express/jwt-decode.js";
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
router
|
||||
.route('/hosts')
|
||||
.route("/hosts")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -18,12 +18,12 @@ router
|
||||
* GET /reports/hosts
|
||||
*/
|
||||
.get(jwtdecode(), (_, res, next) => {
|
||||
internalReport.getHostsReport(res.locals.access)
|
||||
internalReport
|
||||
.getHostsReport(res.locals.access)
|
||||
.then((data) => {
|
||||
res.status(200)
|
||||
.send(data);
|
||||
res.status(200).send(data);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
export default router;
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
const express = require('express');
|
||||
const schema = require('../schema');
|
||||
const PACKAGE = require('../package.json');
|
||||
import express from "express";
|
||||
import PACKAGE from "../package.json" with { type: "json" };
|
||||
import { getCompiledSchema } from "../schema/index.js";
|
||||
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
router
|
||||
.route('/')
|
||||
.route("/")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -18,21 +18,21 @@ router
|
||||
* GET /schema
|
||||
*/
|
||||
.get(async (req, res) => {
|
||||
let swaggerJSON = await schema.getCompiledSchema();
|
||||
const swaggerJSON = await getCompiledSchema();
|
||||
|
||||
let proto = req.protocol;
|
||||
if (typeof req.headers['x-forwarded-proto'] !== 'undefined' && req.headers['x-forwarded-proto']) {
|
||||
proto = req.headers['x-forwarded-proto'];
|
||||
if (typeof req.headers["x-forwarded-proto"] !== "undefined" && req.headers["x-forwarded-proto"]) {
|
||||
proto = req.headers["x-forwarded-proto"];
|
||||
}
|
||||
|
||||
let origin = proto + '://' + req.hostname;
|
||||
if (typeof req.headers.origin !== 'undefined' && req.headers.origin) {
|
||||
let origin = `${proto}://${req.hostname}`;
|
||||
if (typeof req.headers.origin !== "undefined" && req.headers.origin) {
|
||||
origin = req.headers.origin;
|
||||
}
|
||||
|
||||
swaggerJSON.info.version = PACKAGE.version;
|
||||
swaggerJSON.servers[0].url = origin + '/api';
|
||||
swaggerJSON.servers[0].url = `${origin}/api`;
|
||||
res.status(200).send(swaggerJSON);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
export default router;
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
const express = require('express');
|
||||
const validator = require('../lib/validator');
|
||||
const jwtdecode = require('../lib/express/jwt-decode');
|
||||
const apiValidator = require('../lib/validator/api');
|
||||
const internalSetting = require('../internal/setting');
|
||||
const schema = require('../schema');
|
||||
import express from "express";
|
||||
import internalSetting from "../internal/setting.js";
|
||||
import jwtdecode from "../lib/express/jwt-decode.js";
|
||||
import apiValidator from "../lib/validator/api.js";
|
||||
import validator from "../lib/validator/index.js";
|
||||
import { getValidationSchema } from "../schema/index.js";
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
* /api/settings
|
||||
*/
|
||||
router
|
||||
.route('/')
|
||||
.route("/")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -27,10 +27,10 @@ router
|
||||
* Retrieve all settings
|
||||
*/
|
||||
.get((_, res, next) => {
|
||||
internalSetting.getAll(res.locals.access)
|
||||
internalSetting
|
||||
.getAll(res.locals.access)
|
||||
.then((rows) => {
|
||||
res.status(200)
|
||||
.send(rows);
|
||||
res.status(200).send(rows);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -41,7 +41,7 @@ router
|
||||
* /api/settings/something
|
||||
*/
|
||||
router
|
||||
.route('/:setting_id')
|
||||
.route("/:setting_id")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -53,26 +53,28 @@ router
|
||||
* Retrieve a specific setting
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
required: ['setting_id'],
|
||||
validator(
|
||||
{
|
||||
required: ["setting_id"],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
setting_id: {
|
||||
type: 'string',
|
||||
minLength: 1
|
||||
}
|
||||
}
|
||||
}, {
|
||||
setting_id: req.params.setting_id
|
||||
})
|
||||
type: "string",
|
||||
minLength: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
setting_id: req.params.setting_id,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalSetting.get(res.locals.access, {
|
||||
id: data.setting_id
|
||||
id: data.setting_id,
|
||||
});
|
||||
})
|
||||
.then((row) => {
|
||||
res.status(200)
|
||||
.send(row);
|
||||
res.status(200).send(row);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -83,16 +85,15 @@ router
|
||||
* Update and existing setting
|
||||
*/
|
||||
.put((req, res, next) => {
|
||||
apiValidator(schema.getValidationSchema('/settings/{settingID}', 'put'), req.body)
|
||||
apiValidator(getValidationSchema("/settings/{settingID}", "put"), req.body)
|
||||
.then((payload) => {
|
||||
payload.id = req.params.setting_id;
|
||||
return internalSetting.update(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
export default router;
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
const express = require('express');
|
||||
const jwtdecode = require('../lib/express/jwt-decode');
|
||||
const apiValidator = require('../lib/validator/api');
|
||||
const internalToken = require('../internal/token');
|
||||
const schema = require('../schema');
|
||||
import express from "express";
|
||||
import internalToken from "../internal/token.js";
|
||||
import jwtdecode from "../lib/express/jwt-decode.js";
|
||||
import apiValidator from "../lib/validator/api.js";
|
||||
import { getValidationSchema } from "../schema/index.js";
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
router
|
||||
.route('/')
|
||||
.route("/")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -24,13 +24,13 @@ router
|
||||
* for services like Job board and Worker.
|
||||
*/
|
||||
.get(jwtdecode(), (req, res, next) => {
|
||||
internalToken.getFreshToken(res.locals.access, {
|
||||
expiry: (typeof req.query.expiry !== 'undefined' ? req.query.expiry : null),
|
||||
scope: (typeof req.query.scope !== 'undefined' ? req.query.scope : null)
|
||||
internalToken
|
||||
.getFreshToken(res.locals.access, {
|
||||
expiry: typeof req.query.expiry !== "undefined" ? req.query.expiry : null,
|
||||
scope: typeof req.query.scope !== "undefined" ? req.query.scope : null,
|
||||
})
|
||||
.then((data) => {
|
||||
res.status(200)
|
||||
.send(data);
|
||||
res.status(200).send(data);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -41,13 +41,12 @@ router
|
||||
* Create a new Token
|
||||
*/
|
||||
.post(async (req, res, next) => {
|
||||
apiValidator(schema.getValidationSchema('/tokens', 'post'), req.body)
|
||||
apiValidator(getValidationSchema("/tokens", "post"), req.body)
|
||||
.then(internalToken.getTokenFromEmail)
|
||||
.then((data) => {
|
||||
res.status(200)
|
||||
.send(data);
|
||||
res.status(200).send(data);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
export default router;
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
const express = require('express');
|
||||
const validator = require('../lib/validator');
|
||||
const jwtdecode = require('../lib/express/jwt-decode');
|
||||
const userIdFromMe = require('../lib/express/user-id-from-me');
|
||||
const internalUser = require('../internal/user');
|
||||
const apiValidator = require('../lib/validator/api');
|
||||
const schema = require('../schema');
|
||||
import express from "express";
|
||||
import internalUser from "../internal/user.js";
|
||||
import jwtdecode from "../lib/express/jwt-decode.js";
|
||||
import userIdFromMe from "../lib/express/user-id-from-me.js";
|
||||
import apiValidator from "../lib/validator/api.js";
|
||||
import validator from "../lib/validator/index.js";
|
||||
import { getValidationSchema } from "../schema/index.js";
|
||||
|
||||
let router = express.Router({
|
||||
const router = express.Router({
|
||||
caseSensitive: true,
|
||||
strict: true,
|
||||
mergeParams: true
|
||||
mergeParams: true,
|
||||
});
|
||||
|
||||
/**
|
||||
* /api/users
|
||||
*/
|
||||
router
|
||||
.route('/')
|
||||
.route("/")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -28,26 +28,28 @@ router
|
||||
* Retrieve all users
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
validator(
|
||||
{
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
expand: {
|
||||
$ref: 'common#/properties/expand'
|
||||
$ref: "common#/properties/expand",
|
||||
},
|
||||
query: {
|
||||
$ref: 'common#/properties/query'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
|
||||
query: (typeof req.query.query === 'string' ? req.query.query : null)
|
||||
})
|
||||
$ref: "common#/properties/query",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
|
||||
query: typeof req.query.query === "string" ? req.query.query : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalUser.getAll(res.locals.access, data.expand, data.query);
|
||||
})
|
||||
.then((users) => {
|
||||
res.status(200)
|
||||
.send(users);
|
||||
res.status(200).send(users);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
@@ -62,13 +64,12 @@ router
|
||||
* Create a new User
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
apiValidator(schema.getValidationSchema('/users', 'post'), req.body)
|
||||
apiValidator(getValidationSchema("/users", "post"), req.body)
|
||||
.then((payload) => {
|
||||
return internalUser.create(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(201)
|
||||
.send(result);
|
||||
res.status(201).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -79,7 +80,7 @@ router
|
||||
* /api/users/123
|
||||
*/
|
||||
router
|
||||
.route('/:user_id')
|
||||
.route("/:user_id")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -92,31 +93,33 @@ router
|
||||
* Retrieve a specific user
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
validator({
|
||||
required: ['user_id'],
|
||||
validator(
|
||||
{
|
||||
required: ["user_id"],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
user_id: {
|
||||
$ref: 'common#/properties/id'
|
||||
$ref: "common#/properties/id",
|
||||
},
|
||||
expand: {
|
||||
$ref: 'common#/properties/expand'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
$ref: "common#/properties/expand",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
user_id: req.params.user_id,
|
||||
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
|
||||
})
|
||||
expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
|
||||
},
|
||||
)
|
||||
.then((data) => {
|
||||
return internalUser.get(res.locals.access, {
|
||||
id: data.user_id,
|
||||
expand: data.expand,
|
||||
omit: internalUser.getUserOmisionsByAccess(res.locals.access, data.user_id)
|
||||
omit: internalUser.getUserOmisionsByAccess(res.locals.access, data.user_id),
|
||||
});
|
||||
})
|
||||
.then((user) => {
|
||||
res.status(200)
|
||||
.send(user);
|
||||
res.status(200).send(user);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
@@ -130,14 +133,13 @@ router
|
||||
* Update and existing user
|
||||
*/
|
||||
.put((req, res, next) => {
|
||||
apiValidator(schema.getValidationSchema('/users/{userID}', 'put'), req.body)
|
||||
apiValidator(getValidationSchema("/users/{userID}", "put"), req.body)
|
||||
.then((payload) => {
|
||||
payload.id = req.params.user_id;
|
||||
return internalUser.update(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
})
|
||||
@@ -148,10 +150,10 @@ router
|
||||
* Update and existing user
|
||||
*/
|
||||
.delete((req, res, next) => {
|
||||
internalUser.delete(res.locals.access, {id: req.params.user_id})
|
||||
internalUser
|
||||
.delete(res.locals.access, { id: req.params.user_id })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -162,8 +164,8 @@ router
|
||||
* /api/users/123/auth
|
||||
*/
|
||||
router
|
||||
.route('/:user_id/auth')
|
||||
.options((req, res) => {
|
||||
.route("/:user_id/auth")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
.all(jwtdecode())
|
||||
@@ -175,14 +177,13 @@ router
|
||||
* Update password for a user
|
||||
*/
|
||||
.put((req, res, next) => {
|
||||
apiValidator(schema.getValidationSchema('/users/{userID}/auth', 'put'), req.body)
|
||||
apiValidator(getValidationSchema("/users/{userID}/auth", "put"), req.body)
|
||||
.then((payload) => {
|
||||
payload.id = req.params.user_id;
|
||||
return internalUser.setPassword(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -193,8 +194,8 @@ router
|
||||
* /api/users/123/permissions
|
||||
*/
|
||||
router
|
||||
.route('/:user_id/permissions')
|
||||
.options((req, res) => {
|
||||
.route("/:user_id/permissions")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
.all(jwtdecode())
|
||||
@@ -206,14 +207,13 @@ router
|
||||
* Set some or all permissions for a user
|
||||
*/
|
||||
.put((req, res, next) => {
|
||||
apiValidator(schema.getValidationSchema('/users/{userID}/permissions', 'put'), req.body)
|
||||
apiValidator(getValidationSchema("/users/{userID}/permissions", "put"), req.body)
|
||||
.then((payload) => {
|
||||
payload.id = req.params.user_id;
|
||||
return internalUser.setPermissions(res.locals.access, payload);
|
||||
})
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
@@ -224,7 +224,7 @@ router
|
||||
* /api/users/123/login
|
||||
*/
|
||||
router
|
||||
.route('/:user_id/login')
|
||||
.route("/:user_id/login")
|
||||
.options((_, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
@@ -236,12 +236,12 @@ router
|
||||
* Log in as a user
|
||||
*/
|
||||
.post((req, res, next) => {
|
||||
internalUser.loginAs(res.locals.access, {id: parseInt(req.params.user_id, 10)})
|
||||
internalUser
|
||||
.loginAs(res.locals.access, { id: Number.parseInt(req.params.user_id, 10) })
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
res.status(200).send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
export default router;
|
||||
|
||||
@@ -1,23 +1,26 @@
|
||||
const refParser = require('@apidevtools/json-schema-ref-parser');
|
||||
import { dirname } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import $RefParser from "@apidevtools/json-schema-ref-parser";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
let compiledSchema = null;
|
||||
|
||||
module.exports = {
|
||||
|
||||
/**
|
||||
/**
|
||||
* Compiles the schema, by dereferencing it, only once
|
||||
* and returns the memory cached value
|
||||
*/
|
||||
getCompiledSchema: async () => {
|
||||
const getCompiledSchema = async () => {
|
||||
if (compiledSchema === null) {
|
||||
compiledSchema = await refParser.dereference(__dirname + '/swagger.json', {
|
||||
compiledSchema = await $RefParser.dereference(`${__dirname}/swagger.json`, {
|
||||
mutateInputSchema: false,
|
||||
});
|
||||
}
|
||||
return compiledSchema;
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
/**
|
||||
* Scans the schema for the validation schema for the given path and method
|
||||
* and returns it.
|
||||
*
|
||||
@@ -25,17 +28,19 @@ module.exports = {
|
||||
* @param {string} method
|
||||
* @returns string|null
|
||||
*/
|
||||
getValidationSchema: (path, method) => {
|
||||
if (compiledSchema !== null &&
|
||||
typeof compiledSchema.paths[path] !== 'undefined' &&
|
||||
typeof compiledSchema.paths[path][method] !== 'undefined' &&
|
||||
typeof compiledSchema.paths[path][method].requestBody !== 'undefined' &&
|
||||
typeof compiledSchema.paths[path][method].requestBody.content !== 'undefined' &&
|
||||
typeof compiledSchema.paths[path][method].requestBody.content['application/json'] !== 'undefined' &&
|
||||
typeof compiledSchema.paths[path][method].requestBody.content['application/json'].schema !== 'undefined'
|
||||
const getValidationSchema = (path, method) => {
|
||||
if (
|
||||
compiledSchema !== null &&
|
||||
typeof compiledSchema.paths[path] !== "undefined" &&
|
||||
typeof compiledSchema.paths[path][method] !== "undefined" &&
|
||||
typeof compiledSchema.paths[path][method].requestBody !== "undefined" &&
|
||||
typeof compiledSchema.paths[path][method].requestBody.content !== "undefined" &&
|
||||
typeof compiledSchema.paths[path][method].requestBody.content["application/json"] !== "undefined" &&
|
||||
typeof compiledSchema.paths[path][method].requestBody.content["application/json"].schema !== "undefined"
|
||||
) {
|
||||
return compiledSchema.paths[path][method].requestBody.content['application/json'].schema;
|
||||
return compiledSchema.paths[path][method].requestBody.content["application/json"].schema;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
export { getCompiledSchema, getValidationSchema };
|
||||
|
||||
@@ -10,10 +10,10 @@
|
||||
// docker exec npm_core /command/s6-setuidgid 1000:1000 bash -c "/app/scripts/install-certbot-plugins"
|
||||
//
|
||||
|
||||
const dnsPlugins = require('../global/certbot-dns-plugins.json');
|
||||
const certbot = require('../lib/certbot');
|
||||
const logger = require('../logger').certbot;
|
||||
const batchflow = require('batchflow');
|
||||
import dnsPlugins from "../global/certbot-dns-plugins.json" with { type: "json" };
|
||||
import { installPlugin } from "../lib/certbot.js";
|
||||
import { certbot as logger } from "../logger.js";
|
||||
import batchflow from "batchflow";
|
||||
|
||||
let hasErrors = false;
|
||||
let failingPlugins = [];
|
||||
@@ -25,7 +25,7 @@ if (process.argv.length > 2) {
|
||||
|
||||
batchflow(pluginKeys).sequential()
|
||||
.each((i, pluginKey, next) => {
|
||||
certbot.installPlugin(pluginKey)
|
||||
installPlugin(pluginKey)
|
||||
.then(() => {
|
||||
next();
|
||||
})
|
||||
|
||||
106
backend/setup.js
106
backend/setup.js
@@ -1,12 +1,12 @@
|
||||
const config = require('./lib/config');
|
||||
const logger = require('./logger').setup;
|
||||
const certificateModel = require('./models/certificate');
|
||||
const userModel = require('./models/user');
|
||||
const userPermissionModel = require('./models/user_permission');
|
||||
const utils = require('./lib/utils');
|
||||
const authModel = require('./models/auth');
|
||||
const settingModel = require('./models/setting');
|
||||
const certbot = require('./lib/certbot');
|
||||
import { installPlugins } from "./lib/certbot.js";
|
||||
import utils from "./lib/utils.js";
|
||||
import { setup as logger } from "./logger.js";
|
||||
import authModel from "./models/auth.js";
|
||||
import certificateModel from "./models/certificate.js";
|
||||
import settingModel from "./models/setting.js";
|
||||
import userModel from "./models/user.js";
|
||||
import userPermissionModel from "./models/user_permission.js";
|
||||
|
||||
/**
|
||||
* Creates a default admin users if one doesn't already exist in the database
|
||||
*
|
||||
@@ -15,24 +15,24 @@ const certbot = require('./lib/certbot');
|
||||
const setupDefaultUser = () => {
|
||||
return userModel
|
||||
.query()
|
||||
.select('id', )
|
||||
.where('is_deleted', 0)
|
||||
.select("id")
|
||||
.where("is_deleted", 0)
|
||||
.first()
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
// Create a new user and set password
|
||||
const email = (process.env.INITIAL_ADMIN_EMAIL || 'admin@example.com').toLowerCase();
|
||||
const password = process.env.INITIAL_ADMIN_PASSWORD || 'changeme';
|
||||
const password = process.env.INITIAL_ADMIN_PASSWORD || "changeme";
|
||||
|
||||
logger.info(`Creating a new user: ${email} with password: ${password}`);
|
||||
|
||||
const data = {
|
||||
is_deleted: 0,
|
||||
email: email,
|
||||
name: 'Administrator',
|
||||
nickname: 'Admin',
|
||||
avatar: '',
|
||||
roles: ['admin'],
|
||||
name: "Administrator",
|
||||
nickname: "Admin",
|
||||
avatar: "",
|
||||
roles: ["admin"],
|
||||
};
|
||||
|
||||
return userModel
|
||||
@@ -43,29 +43,28 @@ const setupDefaultUser = () => {
|
||||
.query()
|
||||
.insert({
|
||||
user_id: user.id,
|
||||
type: 'password',
|
||||
type: "password",
|
||||
secret: password,
|
||||
meta: {},
|
||||
})
|
||||
.then(() => {
|
||||
return userPermissionModel.query().insert({
|
||||
user_id: user.id,
|
||||
visibility: 'all',
|
||||
proxy_hosts: 'manage',
|
||||
redirection_hosts: 'manage',
|
||||
dead_hosts: 'manage',
|
||||
streams: 'manage',
|
||||
access_lists: 'manage',
|
||||
certificates: 'manage',
|
||||
visibility: "all",
|
||||
proxy_hosts: "manage",
|
||||
redirection_hosts: "manage",
|
||||
dead_hosts: "manage",
|
||||
streams: "manage",
|
||||
access_lists: "manage",
|
||||
certificates: "manage",
|
||||
});
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('Initial admin setup completed');
|
||||
logger.info("Initial admin setup completed");
|
||||
});
|
||||
} else if (config.debug()) {
|
||||
logger.info('Admin user setup not required');
|
||||
}
|
||||
logger.debug("Admin user setup not required");
|
||||
});
|
||||
};
|
||||
|
||||
@@ -77,27 +76,25 @@ const setupDefaultUser = () => {
|
||||
const setupDefaultSettings = () => {
|
||||
return settingModel
|
||||
.query()
|
||||
.select('id')
|
||||
.where({id: 'default-site'})
|
||||
.select("id")
|
||||
.where({ id: "default-site" })
|
||||
.first()
|
||||
.then((row) => {
|
||||
if (!row || !row.id) {
|
||||
settingModel
|
||||
.query()
|
||||
.insert({
|
||||
id: 'default-site',
|
||||
name: 'Default Site',
|
||||
description: 'What to show when Nginx is hit with an unknown Host',
|
||||
value: 'congratulations',
|
||||
id: "default-site",
|
||||
name: "Default Site",
|
||||
description: "What to show when Nginx is hit with an unknown Host",
|
||||
value: "congratulations",
|
||||
meta: {},
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('Default settings added');
|
||||
logger.info("Default settings added");
|
||||
});
|
||||
}
|
||||
if (config.debug()) {
|
||||
logger.info('Default setting setup not required');
|
||||
}
|
||||
logger.debug("Default setting setup not required");
|
||||
});
|
||||
};
|
||||
|
||||
@@ -109,10 +106,10 @@ const setupDefaultSettings = () => {
|
||||
const setupCertbotPlugins = () => {
|
||||
return certificateModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('provider', 'letsencrypt')
|
||||
.where("is_deleted", 0)
|
||||
.andWhere("provider", "letsencrypt")
|
||||
.then((certificates) => {
|
||||
if (certificates && certificates.length) {
|
||||
if (certificates?.length) {
|
||||
const plugins = [];
|
||||
const promises = [];
|
||||
|
||||
@@ -125,18 +122,19 @@ const setupCertbotPlugins = () => {
|
||||
// Make sure credentials file exists
|
||||
const credentials_loc = `/etc/letsencrypt/credentials/credentials-${certificate.id}`;
|
||||
// Escape single quotes and backslashes
|
||||
const escapedCredentials = certificate.meta.dns_provider_credentials.replaceAll('\'', '\\\'').replaceAll('\\', '\\\\');
|
||||
const escapedCredentials = certificate.meta.dns_provider_credentials
|
||||
.replaceAll("'", "\\'")
|
||||
.replaceAll("\\", "\\\\");
|
||||
const credentials_cmd = `[ -f '${credentials_loc}' ] || { mkdir -p /etc/letsencrypt/credentials 2> /dev/null; echo '${escapedCredentials}' > '${credentials_loc}' && chmod 600 '${credentials_loc}'; }`;
|
||||
promises.push(utils.exec(credentials_cmd));
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
return certbot.installPlugins(plugins)
|
||||
.then(() => {
|
||||
return installPlugins(plugins).then(() => {
|
||||
if (promises.length) {
|
||||
return Promise.all(promises)
|
||||
.then(() => {
|
||||
logger.info(`Added Certbot plugins ${plugins.join(', ')}`);
|
||||
return Promise.all(promises).then(() => {
|
||||
logger.info(`Added Certbot plugins ${plugins.join(", ")}`);
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -144,7 +142,6 @@ const setupCertbotPlugins = () => {
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Starts a timer to call run the logrotation binary every two days
|
||||
* @returns {Promise}
|
||||
@@ -154,18 +151,17 @@ const setupLogrotation = () => {
|
||||
|
||||
const runLogrotate = async () => {
|
||||
try {
|
||||
await utils.exec('logrotate /etc/logrotate.d/nginx-proxy-manager');
|
||||
logger.info('Logrotate completed.');
|
||||
} catch (e) { logger.warn(e); }
|
||||
await utils.exec("logrotate /etc/logrotate.d/nginx-proxy-manager");
|
||||
logger.info("Logrotate completed.");
|
||||
} catch (e) {
|
||||
logger.warn(e);
|
||||
}
|
||||
};
|
||||
|
||||
logger.info('Logrotate Timer initialized');
|
||||
logger.info("Logrotate Timer initialized");
|
||||
setInterval(runLogrotate, intervalTimeout);
|
||||
// And do this now as well
|
||||
return runLogrotate();
|
||||
};
|
||||
|
||||
module.exports = () => setupDefaultUser()
|
||||
.then(setupDefaultSettings)
|
||||
.then(setupCertbotPlugins)
|
||||
.then(setupLogrotation);
|
||||
export default () => setupDefaultUser().then(setupDefaultSettings).then(setupCertbotPlugins).then(setupLogrotation);
|
||||
|
||||
17
backend/validate-schema.js
Normal file → Executable file
17
backend/validate-schema.js
Normal file → Executable file
@@ -1,16 +1,19 @@
|
||||
const SwaggerParser = require('@apidevtools/swagger-parser');
|
||||
const chalk = require('chalk');
|
||||
const schema = require('./schema');
|
||||
#!/usr/bin/node
|
||||
|
||||
import SwaggerParser from "@apidevtools/swagger-parser";
|
||||
import chalk from "chalk";
|
||||
import { getCompiledSchema } from "./schema/index.js";
|
||||
|
||||
const log = console.log;
|
||||
|
||||
schema.getCompiledSchema().then(async (swaggerJSON) => {
|
||||
getCompiledSchema().then(async (swaggerJSON) => {
|
||||
try {
|
||||
const api = await SwaggerParser.validate(swaggerJSON);
|
||||
console.log('API name: %s, Version: %s', api.info.title, api.info.version);
|
||||
log(chalk.green('❯ Schema is valid'));
|
||||
console.log("API name: %s, Version: %s", api.info.title, api.info.version);
|
||||
log(chalk.green("❯ Schema is valid"));
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
log(chalk.red('❯', e.message), '\n');
|
||||
log(chalk.red("❯", e.message), "\n");
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
2501
backend/yarn.lock
2501
backend/yarn.lock
File diff suppressed because it is too large
Load Diff
@@ -12,6 +12,7 @@ server {
|
||||
|
||||
location /api/ {
|
||||
add_header X-Served-By $host;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Forwarded-Scheme $scheme;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
@@ -23,7 +24,14 @@ server {
|
||||
}
|
||||
|
||||
location / {
|
||||
index index.html;
|
||||
try_files $uri $uri.html $uri/ /index.html;
|
||||
add_header X-Served-By $host;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "Upgrade";
|
||||
proxy_set_header X-Forwarded-Scheme $scheme;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-For $remote_addr;
|
||||
proxy_pass http://127.0.0.1:5173;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ if [ "$DEVELOPMENT" = 'true' ]; then
|
||||
|
||||
log_info 'Starting frontend ...'
|
||||
s6-setuidgid "$PUID:$PGID" yarn install
|
||||
exec s6-setuidgid "$PUID:$PGID" yarn watch
|
||||
exec s6-setuidgid "$PUID:$PGID" yarn dev
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"presets": [
|
||||
[
|
||||
"env",
|
||||
{
|
||||
"targets": {
|
||||
"browsers": [
|
||||
"Chrome >= 65"
|
||||
]
|
||||
},
|
||||
"debug": false,
|
||||
"modules": false,
|
||||
"useBuiltIns": "usage"
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
24
frontend/.gitignore
vendored
24
frontend/.gitignore
vendored
@@ -1,4 +1,22 @@
|
||||
dist
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
lerna-debug.log*
|
||||
|
||||
node_modules
|
||||
webpack_stats.html
|
||||
yarn-error.log
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
|
||||
# Editor directories and files
|
||||
.idea
|
||||
.DS_Store
|
||||
*.suo
|
||||
*.ntvs*
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
|
||||
91
frontend/biome.json
Normal file
91
frontend/biome.json
Normal file
@@ -0,0 +1,91 @@
|
||||
{
|
||||
"$schema": "https://biomejs.dev/schemas/2.2.2/schema.json",
|
||||
"vcs": {
|
||||
"enabled": true,
|
||||
"clientKind": "git",
|
||||
"useIgnoreFile": true
|
||||
},
|
||||
"files": {
|
||||
"ignoreUnknown": false,
|
||||
"includes": [
|
||||
"**/*.ts",
|
||||
"**/*.tsx",
|
||||
"**/*.js",
|
||||
"**/*.jsx",
|
||||
"!**/dist/**/*"
|
||||
]
|
||||
},
|
||||
"formatter": {
|
||||
"enabled": true,
|
||||
"indentStyle": "tab",
|
||||
"indentWidth": 4,
|
||||
"lineWidth": 120,
|
||||
"formatWithErrors": true
|
||||
},
|
||||
"assist": {
|
||||
"actions": {
|
||||
"source": {
|
||||
"organizeImports": {
|
||||
"level": "on",
|
||||
"options": {
|
||||
"groups": [
|
||||
":BUN:",
|
||||
":NODE:",
|
||||
[
|
||||
"npm:*",
|
||||
"npm:*/**"
|
||||
],
|
||||
":PACKAGE_WITH_PROTOCOL:",
|
||||
":URL:",
|
||||
":PACKAGE:",
|
||||
[
|
||||
"/src/*",
|
||||
"/src/**"
|
||||
],
|
||||
[
|
||||
"/**"
|
||||
],
|
||||
[
|
||||
"#*",
|
||||
"#*/**"
|
||||
],
|
||||
":PATH:"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"linter": {
|
||||
"enabled": true,
|
||||
"rules": {
|
||||
"recommended": true,
|
||||
"correctness": {
|
||||
"useUniqueElementIds": "off"
|
||||
},
|
||||
"suspicious": {
|
||||
"noExplicitAny": "off"
|
||||
},
|
||||
"performance": {
|
||||
"noDelete": "off"
|
||||
},
|
||||
"nursery": "off",
|
||||
"a11y": {
|
||||
"useSemanticElements": "off",
|
||||
"useValidAnchor": "off"
|
||||
},
|
||||
"style": {
|
||||
"noParameterAssign": "error",
|
||||
"useAsConstAssertion": "error",
|
||||
"useDefaultParameterLast": "error",
|
||||
"useEnumInitializers": "error",
|
||||
"useSelfClosingElements": "error",
|
||||
"useSingleVarDeclarator": "error",
|
||||
"noUnusedTemplateLiteral": "error",
|
||||
"useNumberNamespace": "error",
|
||||
"noInferrableTypes": "error",
|
||||
"noUselessElse": "error"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
172
frontend/check-locales.cjs
Executable file
172
frontend/check-locales.cjs
Executable file
@@ -0,0 +1,172 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
// This file does a few things to ensure that the Locales are present and valid:
|
||||
// - Ensures that the name of the locale exists in the language list
|
||||
// - Ensures that each locale contains the translations used in the application
|
||||
// - Ensures that there are no unused translations in the locale files
|
||||
// - Also checks the error messages returned by the backend
|
||||
|
||||
const allLocales = [
|
||||
["en", "en-US"],
|
||||
["de", "de-DE"],
|
||||
["fa", "fa-IR"],
|
||||
];
|
||||
|
||||
const ignoreUnused = [
|
||||
/^capability\..*$/,
|
||||
/^status\..*$/,
|
||||
/^type\..*$/,
|
||||
];
|
||||
|
||||
const { spawnSync } = require("child_process");
|
||||
const fs = require("fs");
|
||||
|
||||
const tmp = require("tmp");
|
||||
|
||||
// Parse backend errors
|
||||
const BACKEND_ERRORS_FILE = "../backend/internal/errors/errors.go";
|
||||
const BACKEND_ERRORS = [];
|
||||
/*
|
||||
try {
|
||||
const backendErrorsContent = fs.readFileSync(BACKEND_ERRORS_FILE, "utf8");
|
||||
const backendErrorsContentRes = [
|
||||
...backendErrorsContent.matchAll(/(?:errors|eris)\.New\("([^"]+)"\)/g),
|
||||
];
|
||||
backendErrorsContentRes.map((item) => {
|
||||
BACKEND_ERRORS.push("error." + item[1]);
|
||||
return null;
|
||||
});
|
||||
} catch (err) {
|
||||
console.log("\x1b[31m%s\x1b[0m", err);
|
||||
process.exit(1);
|
||||
}
|
||||
*/
|
||||
|
||||
// get all translations used in frontend code
|
||||
const tmpobj = tmp.fileSync({ postfix: ".json" });
|
||||
spawnSync("yarn", ["locale-extract", "--out-file", tmpobj.name]);
|
||||
|
||||
const allLocalesInProject = require(tmpobj.name);
|
||||
|
||||
// get list og language names and locales
|
||||
const langList = require("./src/locale/src/lang-list.json");
|
||||
|
||||
// store a list of all validation errors
|
||||
const allErrors = [];
|
||||
const allWarnings = [];
|
||||
const allKeys = [];
|
||||
|
||||
const checkLangList = (fullCode) => {
|
||||
const key = "locale-" + fullCode;
|
||||
if (typeof langList[key] === "undefined") {
|
||||
allErrors.push(
|
||||
"ERROR: `" + key + "` language does not exist in lang-list.json",
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const compareLocale = (locale) => {
|
||||
const projectLocaleKeys = Object.keys(allLocalesInProject);
|
||||
// Check that locale contains the items used in the codebase
|
||||
projectLocaleKeys.map((key) => {
|
||||
if (typeof locale.data[key] === "undefined") {
|
||||
allErrors.push(
|
||||
"ERROR: `" + locale[0] + "` does not contain item: `" + key + "`",
|
||||
);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
// Check that locale contains all error.* items
|
||||
BACKEND_ERRORS.forEach((key) => {
|
||||
if (typeof locale.data[key] === "undefined") {
|
||||
allErrors.push(
|
||||
"ERROR: `" + locale[0] + "` does not contain item: `" + key + "`",
|
||||
);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
// Check that locale does not contain items not used in the codebase
|
||||
const localeKeys = Object.keys(locale.data);
|
||||
localeKeys.map((key) => {
|
||||
let ignored = false;
|
||||
ignoreUnused.map((regex) => {
|
||||
if (key.match(regex)) {
|
||||
ignored = true;
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
if (!ignored && typeof allLocalesInProject[key] === "undefined") {
|
||||
// ensure this key doesn't exist in the backend errors either
|
||||
if (!BACKEND_ERRORS.includes(key)) {
|
||||
allErrors.push(
|
||||
"ERROR: `" + locale[0] + "` contains unused item: `" + key + "`",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Add this key to allKeys
|
||||
if (allKeys.indexOf(key) === -1) {
|
||||
allKeys.push(key);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
};
|
||||
|
||||
// Checks for any keys missing from this locale, that
|
||||
// have been defined in any other locales
|
||||
const checkForMissing = (locale) => {
|
||||
allKeys.forEach((key) => {
|
||||
if (typeof locale.data[key] === "undefined") {
|
||||
let ignored = false;
|
||||
ignoreMissing.map((regex) => {
|
||||
if (key.match(regex)) {
|
||||
ignored = true;
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
if (!ignored) {
|
||||
allWarnings.push(
|
||||
"WARN: `" + locale[0] + "` does not contain item: `" + key + "`",
|
||||
);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
});
|
||||
};
|
||||
|
||||
// Local all locale data
|
||||
allLocales.map((locale, idx) => {
|
||||
checkLangList(locale[1]);
|
||||
allLocales[idx].data = require("./src/locale/src/" + locale[0] + ".json");
|
||||
return null;
|
||||
});
|
||||
|
||||
// Verify all locale data
|
||||
allLocales.map((locale) => {
|
||||
compareLocale(locale);
|
||||
checkForMissing(locale);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (allErrors.length) {
|
||||
allErrors.map((err) => {
|
||||
console.log("\x1b[31m%s\x1b[0m", err);
|
||||
return null;
|
||||
});
|
||||
}
|
||||
if (allWarnings.length) {
|
||||
allWarnings.map((err) => {
|
||||
console.log("\x1b[33m%s\x1b[0m", err);
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
if (allErrors.length) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log("\x1b[32m%s\x1b[0m", "Locale check passed");
|
||||
process.exit(0);
|
||||
@@ -1 +0,0 @@
|
||||
../node_modules/tabler-ui/dist/assets/fonts/feather
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user