mirror of
https://github.com/NginxProxyManager/nginx-proxy-manager.git
synced 2025-09-14 10:52:34 +00:00
Convert backend to ESM
- About 5 years overdue - Remove eslint, use bomejs instead
This commit is contained in:
@@ -4,28 +4,32 @@
|
||||
* "scope" in this file means "where did this token come from and what is using it", so 99% of the time
|
||||
* the "scope" is going to be "user" because it would be a user token. This is not to be confused with
|
||||
* the "role" which could be "user" or "admin". The scope in fact, could be "worker" or anything else.
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
const _ = require('lodash');
|
||||
const logger = require('../logger').access;
|
||||
const Ajv = require('ajv/dist/2020');
|
||||
const error = require('./error');
|
||||
const userModel = require('../models/user');
|
||||
const proxyHostModel = require('../models/proxy_host');
|
||||
const TokenModel = require('../models/token');
|
||||
const roleSchema = require('./access/roles.json');
|
||||
const permsSchema = require('./access/permissions.json');
|
||||
import fs from "node:fs";
|
||||
import { dirname } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import Ajv from "ajv/dist/2020.js";
|
||||
import _ from "lodash";
|
||||
import { access as logger } from "../logger.js";
|
||||
import proxyHostModel from "../models/proxy_host.js";
|
||||
import TokenModel from "../models/token.js";
|
||||
import userModel from "../models/user.js";
|
||||
import permsSchema from "./access/permissions.json" with { type: "json" };
|
||||
import roleSchema from "./access/roles.json" with { type: "json" };
|
||||
import errs from "./error.js";
|
||||
|
||||
module.exports = function (token_string) {
|
||||
let Token = new TokenModel();
|
||||
let token_data = null;
|
||||
let initialised = false;
|
||||
let object_cache = {};
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
export default function (token_string) {
|
||||
const Token = TokenModel();
|
||||
let token_data = null;
|
||||
let initialised = false;
|
||||
const object_cache = {};
|
||||
let allow_internal_access = false;
|
||||
let user_roles = [];
|
||||
let permissions = {};
|
||||
let user_roles = [];
|
||||
let permissions = {};
|
||||
|
||||
/**
|
||||
* Loads the Token object from the token string
|
||||
@@ -37,10 +41,10 @@ module.exports = function (token_string) {
|
||||
if (initialised) {
|
||||
resolve();
|
||||
} else if (!token_string) {
|
||||
reject(new error.PermissionError('Permission Denied'));
|
||||
reject(new errs.PermissionError("Permission Denied"));
|
||||
} else {
|
||||
resolve(Token.load(token_string)
|
||||
.then((data) => {
|
||||
resolve(
|
||||
Token.load(token_string).then((data) => {
|
||||
token_data = data;
|
||||
|
||||
// At this point we need to load the user from the DB and make sure they:
|
||||
@@ -48,21 +52,25 @@ module.exports = function (token_string) {
|
||||
// - still have the appropriate scopes for this token
|
||||
// This is only required when the User ID is supplied or if the token scope has `user`
|
||||
|
||||
if (token_data.attrs.id || (typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'user') !== -1)) {
|
||||
if (
|
||||
token_data.attrs.id ||
|
||||
(typeof token_data.scope !== "undefined" &&
|
||||
_.indexOf(token_data.scope, "user") !== -1)
|
||||
) {
|
||||
// Has token user id or token user scope
|
||||
return userModel
|
||||
.query()
|
||||
.where('id', token_data.attrs.id)
|
||||
.andWhere('is_deleted', 0)
|
||||
.andWhere('is_disabled', 0)
|
||||
.allowGraph('[permissions]')
|
||||
.withGraphFetched('[permissions]')
|
||||
.where("id", token_data.attrs.id)
|
||||
.andWhere("is_deleted", 0)
|
||||
.andWhere("is_disabled", 0)
|
||||
.allowGraph("[permissions]")
|
||||
.withGraphFetched("[permissions]")
|
||||
.first()
|
||||
.then((user) => {
|
||||
if (user) {
|
||||
// make sure user has all scopes of the token
|
||||
// The `user` role is not added against the user row, so we have to just add it here to get past this check.
|
||||
user.roles.push('user');
|
||||
user.roles.push("user");
|
||||
|
||||
let is_ok = true;
|
||||
_.forEach(token_data.scope, (scope_item) => {
|
||||
@@ -72,21 +80,19 @@ module.exports = function (token_string) {
|
||||
});
|
||||
|
||||
if (!is_ok) {
|
||||
throw new error.AuthError('Invalid token scope for User');
|
||||
} else {
|
||||
initialised = true;
|
||||
user_roles = user.roles;
|
||||
permissions = user.permissions;
|
||||
throw new errs.AuthError("Invalid token scope for User");
|
||||
}
|
||||
|
||||
initialised = true;
|
||||
user_roles = user.roles;
|
||||
permissions = user.permissions;
|
||||
} else {
|
||||
throw new error.AuthError('User cannot be loaded for Token');
|
||||
throw new errs.AuthError("User cannot be loaded for Token");
|
||||
}
|
||||
});
|
||||
} else {
|
||||
initialised = true;
|
||||
}
|
||||
}));
|
||||
initialised = true;
|
||||
}),
|
||||
);
|
||||
}
|
||||
});
|
||||
};
|
||||
@@ -101,53 +107,55 @@ module.exports = function (token_string) {
|
||||
*/
|
||||
this.loadObjects = (object_type) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (Token.hasScope('user')) {
|
||||
if (typeof token_data.attrs.id === 'undefined' || !token_data.attrs.id) {
|
||||
reject(new error.AuthError('User Token supplied without a User ID'));
|
||||
if (Token.hasScope("user")) {
|
||||
if (
|
||||
typeof token_data.attrs.id === "undefined" ||
|
||||
!token_data.attrs.id
|
||||
) {
|
||||
reject(new errs.AuthError("User Token supplied without a User ID"));
|
||||
} else {
|
||||
let token_user_id = token_data.attrs.id ? token_data.attrs.id : 0;
|
||||
const token_user_id = token_data.attrs.id ? token_data.attrs.id : 0;
|
||||
let query;
|
||||
|
||||
if (typeof object_cache[object_type] === 'undefined') {
|
||||
if (typeof object_cache[object_type] === "undefined") {
|
||||
switch (object_type) {
|
||||
|
||||
// USERS - should only return yourself
|
||||
case 'users':
|
||||
resolve(token_user_id ? [token_user_id] : []);
|
||||
break;
|
||||
// USERS - should only return yourself
|
||||
case "users":
|
||||
resolve(token_user_id ? [token_user_id] : []);
|
||||
break;
|
||||
|
||||
// Proxy Hosts
|
||||
case 'proxy_hosts':
|
||||
query = proxyHostModel
|
||||
.query()
|
||||
.select('id')
|
||||
.andWhere('is_deleted', 0);
|
||||
case "proxy_hosts":
|
||||
query = proxyHostModel
|
||||
.query()
|
||||
.select("id")
|
||||
.andWhere("is_deleted", 0);
|
||||
|
||||
if (permissions.visibility === 'user') {
|
||||
query.andWhere('owner_user_id', token_user_id);
|
||||
}
|
||||
if (permissions.visibility === "user") {
|
||||
query.andWhere("owner_user_id", token_user_id);
|
||||
}
|
||||
|
||||
resolve(query
|
||||
.then((rows) => {
|
||||
let result = [];
|
||||
_.forEach(rows, (rule_row) => {
|
||||
result.push(rule_row.id);
|
||||
});
|
||||
resolve(
|
||||
query.then((rows) => {
|
||||
const result = [];
|
||||
_.forEach(rows, (rule_row) => {
|
||||
result.push(rule_row.id);
|
||||
});
|
||||
|
||||
// enum should not have less than 1 item
|
||||
if (!result.length) {
|
||||
result.push(0);
|
||||
}
|
||||
// enum should not have less than 1 item
|
||||
if (!result.length) {
|
||||
result.push(0);
|
||||
}
|
||||
|
||||
return result;
|
||||
})
|
||||
);
|
||||
break;
|
||||
return result;
|
||||
}),
|
||||
);
|
||||
break;
|
||||
|
||||
// DEFAULT: null
|
||||
default:
|
||||
resolve(null);
|
||||
break;
|
||||
default:
|
||||
resolve(null);
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
resolve(object_cache[object_type]);
|
||||
@@ -156,11 +164,10 @@ module.exports = function (token_string) {
|
||||
} else {
|
||||
resolve(null);
|
||||
}
|
||||
})
|
||||
.then((objects) => {
|
||||
object_cache[object_type] = objects;
|
||||
return objects;
|
||||
});
|
||||
}).then((objects) => {
|
||||
object_cache[object_type] = objects;
|
||||
return objects;
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -170,50 +177,48 @@ module.exports = function (token_string) {
|
||||
* @returns {Object}
|
||||
*/
|
||||
this.getObjectSchema = (permission_label) => {
|
||||
let base_object_type = permission_label.split(':').shift();
|
||||
const base_object_type = permission_label.split(":").shift();
|
||||
|
||||
let schema = {
|
||||
$id: 'objects',
|
||||
description: 'Actor Properties',
|
||||
type: 'object',
|
||||
const schema = {
|
||||
$id: "objects",
|
||||
description: "Actor Properties",
|
||||
type: "object",
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
properties: {
|
||||
user_id: {
|
||||
anyOf: [
|
||||
{
|
||||
type: 'number',
|
||||
enum: [Token.get('attrs').id]
|
||||
}
|
||||
]
|
||||
type: "number",
|
||||
enum: [Token.get("attrs").id],
|
||||
},
|
||||
],
|
||||
},
|
||||
scope: {
|
||||
type: 'string',
|
||||
pattern: '^' + Token.get('scope') + '$'
|
||||
}
|
||||
}
|
||||
type: "string",
|
||||
pattern: `^${Token.get("scope")}$`,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
return this.loadObjects(base_object_type)
|
||||
.then((object_result) => {
|
||||
if (typeof object_result === 'object' && object_result !== null) {
|
||||
schema.properties[base_object_type] = {
|
||||
type: 'number',
|
||||
enum: object_result,
|
||||
minimum: 1
|
||||
};
|
||||
} else {
|
||||
schema.properties[base_object_type] = {
|
||||
type: 'number',
|
||||
minimum: 1
|
||||
};
|
||||
}
|
||||
return this.loadObjects(base_object_type).then((object_result) => {
|
||||
if (typeof object_result === "object" && object_result !== null) {
|
||||
schema.properties[base_object_type] = {
|
||||
type: "number",
|
||||
enum: object_result,
|
||||
minimum: 1,
|
||||
};
|
||||
} else {
|
||||
schema.properties[base_object_type] = {
|
||||
type: "number",
|
||||
minimum: 1,
|
||||
};
|
||||
}
|
||||
|
||||
return schema;
|
||||
});
|
||||
return schema;
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
|
||||
token: Token,
|
||||
|
||||
/**
|
||||
@@ -222,7 +227,7 @@ module.exports = function (token_string) {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
load: (allow_internal) => {
|
||||
return new Promise(function (resolve/*, reject*/) {
|
||||
return new Promise((resolve /*, reject*/) => {
|
||||
if (token_string) {
|
||||
resolve(Token.load(token_string));
|
||||
} else {
|
||||
@@ -240,68 +245,60 @@ module.exports = function (token_string) {
|
||||
* @param {*} [data]
|
||||
* @returns {Promise}
|
||||
*/
|
||||
can: (permission, data) => {
|
||||
can: async (permission, data) => {
|
||||
if (allow_internal_access === true) {
|
||||
return Promise.resolve(true);
|
||||
//return true;
|
||||
} else {
|
||||
return this.init()
|
||||
.then(() => {
|
||||
// Initialised, token decoded ok
|
||||
return this.getObjectSchema(permission)
|
||||
.then((objectSchema) => {
|
||||
const data_schema = {
|
||||
[permission]: {
|
||||
data: data,
|
||||
scope: Token.get('scope'),
|
||||
roles: user_roles,
|
||||
permission_visibility: permissions.visibility,
|
||||
permission_proxy_hosts: permissions.proxy_hosts,
|
||||
permission_redirection_hosts: permissions.redirection_hosts,
|
||||
permission_dead_hosts: permissions.dead_hosts,
|
||||
permission_streams: permissions.streams,
|
||||
permission_access_lists: permissions.access_lists,
|
||||
permission_certificates: permissions.certificates
|
||||
}
|
||||
};
|
||||
|
||||
let permissionSchema = {
|
||||
$async: true,
|
||||
$id: 'permissions',
|
||||
type: 'object',
|
||||
additionalProperties: false,
|
||||
properties: {}
|
||||
};
|
||||
|
||||
permissionSchema.properties[permission] = require('./access/' + permission.replace(/:/gim, '-') + '.json');
|
||||
|
||||
const ajv = new Ajv({
|
||||
verbose: true,
|
||||
allErrors: true,
|
||||
breakOnError: true,
|
||||
coerceTypes: true,
|
||||
schemas: [
|
||||
roleSchema,
|
||||
permsSchema,
|
||||
objectSchema,
|
||||
permissionSchema
|
||||
]
|
||||
});
|
||||
|
||||
return ajv.validate('permissions', data_schema)
|
||||
.then(() => {
|
||||
return data_schema[permission];
|
||||
});
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
err.permission = permission;
|
||||
err.permission_data = data;
|
||||
logger.error(permission, data, err.message);
|
||||
|
||||
throw new error.PermissionError('Permission Denied', err);
|
||||
});
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await this.init();
|
||||
const objectSchema = await this.getObjectSchema(permission);
|
||||
|
||||
const dataSchema = {
|
||||
[permission]: {
|
||||
data: data,
|
||||
scope: Token.get("scope"),
|
||||
roles: user_roles,
|
||||
permission_visibility: permissions.visibility,
|
||||
permission_proxy_hosts: permissions.proxy_hosts,
|
||||
permission_redirection_hosts: permissions.redirection_hosts,
|
||||
permission_dead_hosts: permissions.dead_hosts,
|
||||
permission_streams: permissions.streams,
|
||||
permission_access_lists: permissions.access_lists,
|
||||
permission_certificates: permissions.certificates,
|
||||
},
|
||||
};
|
||||
|
||||
const permissionSchema = {
|
||||
$async: true,
|
||||
$id: "permissions",
|
||||
type: "object",
|
||||
additionalProperties: false,
|
||||
properties: {},
|
||||
};
|
||||
|
||||
const rawData = fs.readFileSync(
|
||||
`${__dirname}/access/${permission.replace(/:/gim, "-")}.json`,
|
||||
{ encoding: "utf8" },
|
||||
);
|
||||
permissionSchema.properties[permission] = JSON.parse(rawData);
|
||||
|
||||
const ajv = new Ajv({
|
||||
verbose: true,
|
||||
allErrors: true,
|
||||
breakOnError: true,
|
||||
coerceTypes: true,
|
||||
schemas: [roleSchema, permsSchema, objectSchema, permissionSchema],
|
||||
});
|
||||
|
||||
const valid = ajv.validate("permissions", dataSchema);
|
||||
return valid && dataSchema[permission];
|
||||
} catch (err) {
|
||||
err.permission = permission;
|
||||
err.permission_data = data;
|
||||
logger.error(permission, data, err.message);
|
||||
throw errs.PermissionError("Permission Denied", err);
|
||||
}
|
||||
},
|
||||
};
|
||||
};
|
||||
}
|
||||
|
@@ -1,85 +1,87 @@
|
||||
const dnsPlugins = require('../global/certbot-dns-plugins.json');
|
||||
const utils = require('./utils');
|
||||
const error = require('./error');
|
||||
const logger = require('../logger').certbot;
|
||||
const batchflow = require('batchflow');
|
||||
import batchflow from "batchflow";
|
||||
import dnsPlugins from "../global/certbot-dns-plugins.json" with { type: "json" };
|
||||
import { certbot as logger } from "../logger.js";
|
||||
import errs from "./error.js";
|
||||
import utils from "./utils.js";
|
||||
|
||||
const CERTBOT_VERSION_REPLACEMENT = '$(certbot --version | grep -Eo \'[0-9](\\.[0-9]+)+\')';
|
||||
const CERTBOT_VERSION_REPLACEMENT = "$(certbot --version | grep -Eo '[0-9](\\.[0-9]+)+')";
|
||||
|
||||
const certbot = {
|
||||
/**
|
||||
* @param {array} pluginKeys
|
||||
*/
|
||||
const installPlugins = async (pluginKeys) => {
|
||||
let hasErrors = false;
|
||||
|
||||
/**
|
||||
* @param {array} pluginKeys
|
||||
*/
|
||||
installPlugins: async (pluginKeys) => {
|
||||
let hasErrors = false;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
if (pluginKeys.length === 0) {
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
batchflow(pluginKeys).sequential()
|
||||
.each((_i, pluginKey, next) => {
|
||||
certbot.installPlugin(pluginKey)
|
||||
.then(() => {
|
||||
next();
|
||||
})
|
||||
.catch((err) => {
|
||||
hasErrors = true;
|
||||
next(err);
|
||||
});
|
||||
})
|
||||
.error((err) => {
|
||||
logger.error(err.message);
|
||||
})
|
||||
.end(() => {
|
||||
if (hasErrors) {
|
||||
reject(new error.CommandError('Some plugins failed to install. Please check the logs above', 1));
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Installs a cerbot plugin given the key for the object from
|
||||
* ../global/certbot-dns-plugins.json
|
||||
*
|
||||
* @param {string} pluginKey
|
||||
* @returns {Object}
|
||||
*/
|
||||
installPlugin: async (pluginKey) => {
|
||||
if (typeof dnsPlugins[pluginKey] === 'undefined') {
|
||||
// throw Error(`Certbot plugin ${pluginKey} not found`);
|
||||
throw new error.ItemNotFoundError(pluginKey);
|
||||
return new Promise((resolve, reject) => {
|
||||
if (pluginKeys.length === 0) {
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
const plugin = dnsPlugins[pluginKey];
|
||||
logger.start(`Installing ${pluginKey}...`);
|
||||
|
||||
plugin.version = plugin.version.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
|
||||
plugin.dependencies = plugin.dependencies.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
|
||||
|
||||
// SETUPTOOLS_USE_DISTUTILS is required for certbot plugins to install correctly
|
||||
// in new versions of Python
|
||||
let env = Object.assign({}, process.env, {SETUPTOOLS_USE_DISTUTILS: 'stdlib'});
|
||||
if (typeof plugin.env === 'object') {
|
||||
env = Object.assign(env, plugin.env);
|
||||
}
|
||||
|
||||
const cmd = `. /opt/certbot/bin/activate && pip install --no-cache-dir ${plugin.dependencies} ${plugin.package_name}${plugin.version} && deactivate`;
|
||||
return utils.exec(cmd, {env})
|
||||
.then((result) => {
|
||||
logger.complete(`Installed ${pluginKey}`);
|
||||
return result;
|
||||
batchflow(pluginKeys)
|
||||
.sequential()
|
||||
.each((_i, pluginKey, next) => {
|
||||
certbot
|
||||
.installPlugin(pluginKey)
|
||||
.then(() => {
|
||||
next();
|
||||
})
|
||||
.catch((err) => {
|
||||
hasErrors = true;
|
||||
next(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
throw err;
|
||||
.error((err) => {
|
||||
logger.error(err.message);
|
||||
})
|
||||
.end(() => {
|
||||
if (hasErrors) {
|
||||
reject(
|
||||
new errs.CommandError("Some plugins failed to install. Please check the logs above", 1),
|
||||
);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = certbot;
|
||||
/**
|
||||
* Installs a cerbot plugin given the key for the object from
|
||||
* ../global/certbot-dns-plugins.json
|
||||
*
|
||||
* @param {string} pluginKey
|
||||
* @returns {Object}
|
||||
*/
|
||||
const installPlugin = async (pluginKey) => {
|
||||
if (typeof dnsPlugins[pluginKey] === "undefined") {
|
||||
// throw Error(`Certbot plugin ${pluginKey} not found`);
|
||||
throw new errs.ItemNotFoundError(pluginKey);
|
||||
}
|
||||
|
||||
const plugin = dnsPlugins[pluginKey];
|
||||
logger.start(`Installing ${pluginKey}...`);
|
||||
|
||||
plugin.version = plugin.version.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
|
||||
plugin.dependencies = plugin.dependencies.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
|
||||
|
||||
// SETUPTOOLS_USE_DISTUTILS is required for certbot plugins to install correctly
|
||||
// in new versions of Python
|
||||
let env = Object.assign({}, process.env, { SETUPTOOLS_USE_DISTUTILS: "stdlib" });
|
||||
if (typeof plugin.env === "object") {
|
||||
env = Object.assign(env, plugin.env);
|
||||
}
|
||||
|
||||
const cmd = `. /opt/certbot/bin/activate && pip install --no-cache-dir ${plugin.dependencies} ${plugin.package_name}${plugin.version} && deactivate`;
|
||||
return utils
|
||||
.exec(cmd, { env })
|
||||
.then((result) => {
|
||||
logger.complete(`Installed ${pluginKey}`);
|
||||
return result;
|
||||
})
|
||||
.catch((err) => {
|
||||
throw err;
|
||||
});
|
||||
};
|
||||
|
||||
export { installPlugins, installPlugin };
|
||||
|
@@ -1,6 +1,6 @@
|
||||
const fs = require('fs');
|
||||
const NodeRSA = require('node-rsa');
|
||||
const logger = require('../logger').global;
|
||||
import fs from "node:fs";
|
||||
import NodeRSA from "node-rsa";
|
||||
import { global as logger } from "../logger.js";
|
||||
|
||||
const keysFile = '/data/keys.json';
|
||||
const mysqlEngine = 'mysql2';
|
||||
@@ -12,18 +12,20 @@ let instance = null;
|
||||
// 1. Load from config file first (not recommended anymore)
|
||||
// 2. Use config env variables next
|
||||
const configure = () => {
|
||||
const filename = (process.env.NODE_CONFIG_DIR || './config') + '/' + (process.env.NODE_ENV || 'default') + '.json';
|
||||
const filename = `${process.env.NODE_CONFIG_DIR || "./config"}/${process.env.NODE_ENV || "default"}.json`;
|
||||
if (fs.existsSync(filename)) {
|
||||
let configData;
|
||||
try {
|
||||
configData = require(filename);
|
||||
// Load this json synchronously
|
||||
const rawData = fs.readFileSync(filename);
|
||||
configData = JSON.parse(rawData);
|
||||
} catch (_) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
if (configData && configData.database) {
|
||||
if (configData?.database) {
|
||||
logger.info(`Using configuration from file: ${filename}`);
|
||||
instance = configData;
|
||||
instance = configData;
|
||||
instance.keys = getKeys();
|
||||
return;
|
||||
}
|
||||
@@ -34,15 +36,15 @@ const configure = () => {
|
||||
const envMysqlName = process.env.DB_MYSQL_NAME || null;
|
||||
if (envMysqlHost && envMysqlUser && envMysqlName) {
|
||||
// we have enough mysql creds to go with mysql
|
||||
logger.info('Using MySQL configuration');
|
||||
logger.info("Using MySQL configuration");
|
||||
instance = {
|
||||
database: {
|
||||
engine: mysqlEngine,
|
||||
host: envMysqlHost,
|
||||
port: process.env.DB_MYSQL_PORT || 3306,
|
||||
user: envMysqlUser,
|
||||
engine: mysqlEngine,
|
||||
host: envMysqlHost,
|
||||
port: process.env.DB_MYSQL_PORT || 3306,
|
||||
user: envMysqlUser,
|
||||
password: process.env.DB_MYSQL_PASSWORD,
|
||||
name: envMysqlName,
|
||||
name: envMysqlName,
|
||||
},
|
||||
keys: getKeys(),
|
||||
};
|
||||
@@ -54,33 +56,33 @@ const configure = () => {
|
||||
const envPostgresName = process.env.DB_POSTGRES_NAME || null;
|
||||
if (envPostgresHost && envPostgresUser && envPostgresName) {
|
||||
// we have enough postgres creds to go with postgres
|
||||
logger.info('Using Postgres configuration');
|
||||
logger.info("Using Postgres configuration");
|
||||
instance = {
|
||||
database: {
|
||||
engine: postgresEngine,
|
||||
host: envPostgresHost,
|
||||
port: process.env.DB_POSTGRES_PORT || 5432,
|
||||
user: envPostgresUser,
|
||||
engine: postgresEngine,
|
||||
host: envPostgresHost,
|
||||
port: process.env.DB_POSTGRES_PORT || 5432,
|
||||
user: envPostgresUser,
|
||||
password: process.env.DB_POSTGRES_PASSWORD,
|
||||
name: envPostgresName,
|
||||
name: envPostgresName,
|
||||
},
|
||||
keys: getKeys(),
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
const envSqliteFile = process.env.DB_SQLITE_FILE || '/data/database.sqlite';
|
||||
const envSqliteFile = process.env.DB_SQLITE_FILE || "/data/database.sqlite";
|
||||
logger.info(`Using Sqlite: ${envSqliteFile}`);
|
||||
instance = {
|
||||
database: {
|
||||
engine: 'knex-native',
|
||||
knex: {
|
||||
client: sqliteClientName,
|
||||
engine: "knex-native",
|
||||
knex: {
|
||||
client: sqliteClientName,
|
||||
connection: {
|
||||
filename: envSqliteFile
|
||||
filename: envSqliteFile,
|
||||
},
|
||||
useNullAsDefault: true
|
||||
}
|
||||
useNullAsDefault: true,
|
||||
},
|
||||
},
|
||||
keys: getKeys(),
|
||||
};
|
||||
@@ -88,150 +90,148 @@ const configure = () => {
|
||||
|
||||
const getKeys = () => {
|
||||
// Get keys from file
|
||||
logger.debug("Cheecking for keys file:", keysFile);
|
||||
if (!fs.existsSync(keysFile)) {
|
||||
generateKeys();
|
||||
} else if (process.env.DEBUG) {
|
||||
logger.info('Keys file exists OK');
|
||||
logger.info("Keys file exists OK");
|
||||
}
|
||||
try {
|
||||
return require(keysFile);
|
||||
// Load this json keysFile synchronously and return the json object
|
||||
const rawData = fs.readFileSync(keysFile);
|
||||
return JSON.parse(rawData);
|
||||
} catch (err) {
|
||||
logger.error('Could not read JWT key pair from config file: ' + keysFile, err);
|
||||
logger.error(`Could not read JWT key pair from config file: ${keysFile}`, err);
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
const generateKeys = () => {
|
||||
logger.info('Creating a new JWT key pair...');
|
||||
logger.info("Creating a new JWT key pair...");
|
||||
// Now create the keys and save them in the config.
|
||||
const key = new NodeRSA({ b: 2048 });
|
||||
key.generateKeyPair();
|
||||
|
||||
const keys = {
|
||||
key: key.exportKey('private').toString(),
|
||||
pub: key.exportKey('public').toString(),
|
||||
key: key.exportKey("private").toString(),
|
||||
pub: key.exportKey("public").toString(),
|
||||
};
|
||||
|
||||
// Write keys config
|
||||
try {
|
||||
fs.writeFileSync(keysFile, JSON.stringify(keys, null, 2));
|
||||
} catch (err) {
|
||||
logger.error('Could not write JWT key pair to config file: ' + keysFile + ': ' + err.message);
|
||||
logger.error(`Could not write JWT key pair to config file: ${keysFile}: ${err.message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
logger.info('Wrote JWT key pair to config file: ' + keysFile);
|
||||
logger.info(`Wrote JWT key pair to config file: ${keysFile}`);
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} key ie: 'database' or 'database.engine'
|
||||
* @returns {boolean}
|
||||
*/
|
||||
has: function(key) {
|
||||
instance === null && configure();
|
||||
const keys = key.split('.');
|
||||
let level = instance;
|
||||
let has = true;
|
||||
keys.forEach((keyItem) =>{
|
||||
if (typeof level[keyItem] === 'undefined') {
|
||||
has = false;
|
||||
} else {
|
||||
level = level[keyItem];
|
||||
}
|
||||
});
|
||||
|
||||
return has;
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets a specific key from the top level
|
||||
*
|
||||
* @param {string} key
|
||||
* @returns {*}
|
||||
*/
|
||||
get: function (key) {
|
||||
instance === null && configure();
|
||||
if (key && typeof instance[key] !== 'undefined') {
|
||||
return instance[key];
|
||||
/**
|
||||
*
|
||||
* @param {string} key ie: 'database' or 'database.engine'
|
||||
* @returns {boolean}
|
||||
*/
|
||||
const configHas = (key) => {
|
||||
instance === null && configure();
|
||||
const keys = key.split(".");
|
||||
let level = instance;
|
||||
let has = true;
|
||||
keys.forEach((keyItem) => {
|
||||
if (typeof level[keyItem] === "undefined") {
|
||||
has = false;
|
||||
} else {
|
||||
level = level[keyItem];
|
||||
}
|
||||
return instance;
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* Is this a sqlite configuration?
|
||||
*
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isSqlite: function () {
|
||||
instance === null && configure();
|
||||
return instance.database.knex && instance.database.knex.client === sqliteClientName;
|
||||
},
|
||||
return has;
|
||||
};
|
||||
|
||||
/**
|
||||
* Is this a mysql configuration?
|
||||
*
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isMysql: function () {
|
||||
instance === null && configure();
|
||||
return instance.database.engine === mysqlEngine;
|
||||
},
|
||||
|
||||
/**
|
||||
* Is this a postgres configuration?
|
||||
*
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isPostgres: function () {
|
||||
instance === null && configure();
|
||||
return instance.database.engine === postgresEngine;
|
||||
},
|
||||
|
||||
/**
|
||||
* Are we running in debug mdoe?
|
||||
*
|
||||
* @returns {boolean}
|
||||
*/
|
||||
debug: function () {
|
||||
return !!process.env.DEBUG;
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns a public key
|
||||
*
|
||||
* @returns {string}
|
||||
*/
|
||||
getPublicKey: function () {
|
||||
instance === null && configure();
|
||||
return instance.keys.pub;
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns a private key
|
||||
*
|
||||
* @returns {string}
|
||||
*/
|
||||
getPrivateKey: function () {
|
||||
instance === null && configure();
|
||||
return instance.keys.key;
|
||||
},
|
||||
|
||||
/**
|
||||
* @returns {boolean}
|
||||
*/
|
||||
useLetsencryptStaging: function () {
|
||||
return !!process.env.LE_STAGING;
|
||||
},
|
||||
|
||||
/**
|
||||
* @returns {string|null}
|
||||
*/
|
||||
useLetsencryptServer: function () {
|
||||
if (process.env.LE_SERVER) {
|
||||
return process.env.LE_SERVER;
|
||||
}
|
||||
return null;
|
||||
/**
|
||||
* Gets a specific key from the top level
|
||||
*
|
||||
* @param {string} key
|
||||
* @returns {*}
|
||||
*/
|
||||
const configGet = (key) => {
|
||||
instance === null && configure();
|
||||
if (key && typeof instance[key] !== "undefined") {
|
||||
return instance[key];
|
||||
}
|
||||
return instance;
|
||||
};
|
||||
|
||||
/**
|
||||
* Is this a sqlite configuration?
|
||||
*
|
||||
* @returns {boolean}
|
||||
*/
|
||||
const isSqlite = () => {
|
||||
instance === null && configure();
|
||||
return instance.database.knex && instance.database.knex.client === sqliteClientName;
|
||||
};
|
||||
|
||||
/**
|
||||
* Is this a mysql configuration?
|
||||
*
|
||||
* @returns {boolean}
|
||||
*/
|
||||
const isMysql = () => {
|
||||
instance === null && configure();
|
||||
return instance.database.engine === mysqlEngine;
|
||||
};
|
||||
|
||||
/**
|
||||
* Is this a postgres configuration?
|
||||
*
|
||||
* @returns {boolean}
|
||||
*/
|
||||
const isPostgres = () => {
|
||||
instance === null && configure();
|
||||
return instance.database.engine === postgresEngine;
|
||||
};
|
||||
|
||||
/**
|
||||
* Are we running in debug mdoe?
|
||||
*
|
||||
* @returns {boolean}
|
||||
*/
|
||||
const isDebugMode = () => !!process.env.DEBUG;
|
||||
|
||||
/**
|
||||
* Returns a public key
|
||||
*
|
||||
* @returns {string}
|
||||
*/
|
||||
const getPublicKey = () => {
|
||||
instance === null && configure();
|
||||
return instance.keys.pub;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns a private key
|
||||
*
|
||||
* @returns {string}
|
||||
*/
|
||||
const getPrivateKey = () => {
|
||||
instance === null && configure();
|
||||
return instance.keys.key;
|
||||
};
|
||||
|
||||
/**
|
||||
* @returns {boolean}
|
||||
*/
|
||||
const useLetsencryptStaging = () => !!process.env.LE_STAGING;
|
||||
|
||||
/**
|
||||
* @returns {string|null}
|
||||
*/
|
||||
const useLetsencryptServer = () => {
|
||||
if (process.env.LE_SERVER) {
|
||||
return process.env.LE_SERVER;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export { configHas, configGet, isSqlite, isMysql, isPostgres, isDebugMode, getPrivateKey, getPublicKey, useLetsencryptStaging, useLetsencryptServer };
|
||||
|
@@ -1,99 +1,100 @@
|
||||
const _ = require('lodash');
|
||||
const util = require('util');
|
||||
import _ from "lodash";
|
||||
|
||||
module.exports = {
|
||||
|
||||
PermissionError: function (message, previous) {
|
||||
const errs = {
|
||||
PermissionError: function (_, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = 'Permission Denied';
|
||||
this.public = true;
|
||||
this.status = 403;
|
||||
this.message = "Permission Denied";
|
||||
this.public = true;
|
||||
this.status = 403;
|
||||
},
|
||||
|
||||
ItemNotFoundError: function (id, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = 'Item Not Found - ' + id;
|
||||
this.public = true;
|
||||
this.status = 404;
|
||||
this.message = `Item Not Found - ${id}`;
|
||||
this.public = true;
|
||||
this.status = 404;
|
||||
},
|
||||
|
||||
AuthError: function (message, previous) {
|
||||
AuthError: function (message, messageI18n, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = message;
|
||||
this.public = true;
|
||||
this.status = 401;
|
||||
this.message = message;
|
||||
this.message_i18n = messageI18n;
|
||||
this.public = true;
|
||||
this.status = 400;
|
||||
},
|
||||
|
||||
InternalError: function (message, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = message;
|
||||
this.status = 500;
|
||||
this.public = false;
|
||||
this.message = message;
|
||||
this.status = 500;
|
||||
this.public = false;
|
||||
},
|
||||
|
||||
InternalValidationError: function (message, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = message;
|
||||
this.status = 400;
|
||||
this.public = false;
|
||||
this.message = message;
|
||||
this.status = 400;
|
||||
this.public = false;
|
||||
},
|
||||
|
||||
ConfigurationError: function (message, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = message;
|
||||
this.status = 400;
|
||||
this.public = true;
|
||||
this.message = message;
|
||||
this.status = 400;
|
||||
this.public = true;
|
||||
},
|
||||
|
||||
CacheError: function (message, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.message = message;
|
||||
this.name = this.constructor.name;
|
||||
this.message = message;
|
||||
this.previous = previous;
|
||||
this.status = 500;
|
||||
this.public = false;
|
||||
this.status = 500;
|
||||
this.public = false;
|
||||
},
|
||||
|
||||
ValidationError: function (message, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = message;
|
||||
this.public = true;
|
||||
this.status = 400;
|
||||
this.message = message;
|
||||
this.public = true;
|
||||
this.status = 400;
|
||||
},
|
||||
|
||||
AssertionFailedError: function (message, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = message;
|
||||
this.public = false;
|
||||
this.status = 400;
|
||||
this.message = message;
|
||||
this.public = false;
|
||||
this.status = 400;
|
||||
},
|
||||
|
||||
CommandError: function (stdErr, code, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = stdErr;
|
||||
this.code = code;
|
||||
this.public = false;
|
||||
this.message = stdErr;
|
||||
this.code = code;
|
||||
this.public = false;
|
||||
},
|
||||
};
|
||||
|
||||
_.forEach(module.exports, function (error) {
|
||||
util.inherits(error, Error);
|
||||
_.forEach(errs, (err) => {
|
||||
err.prototype = Object.create(Error.prototype);
|
||||
});
|
||||
|
||||
export default errs;
|
||||
|
@@ -1,12 +1,13 @@
|
||||
module.exports = function (req, res, next) {
|
||||
export default (req, res, next) => {
|
||||
if (req.headers.origin) {
|
||||
res.set({
|
||||
'Access-Control-Allow-Origin': req.headers.origin,
|
||||
'Access-Control-Allow-Credentials': true,
|
||||
'Access-Control-Allow-Methods': 'OPTIONS, GET, POST',
|
||||
'Access-Control-Allow-Headers': 'Content-Type, Cache-Control, Pragma, Expires, Authorization, X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit',
|
||||
'Access-Control-Max-Age': 5 * 60,
|
||||
'Access-Control-Expose-Headers': 'X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit'
|
||||
"Access-Control-Allow-Origin": req.headers.origin,
|
||||
"Access-Control-Allow-Credentials": true,
|
||||
"Access-Control-Allow-Methods": "OPTIONS, GET, POST",
|
||||
"Access-Control-Allow-Headers":
|
||||
"Content-Type, Cache-Control, Pragma, Expires, Authorization, X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit",
|
||||
"Access-Control-Max-Age": 5 * 60,
|
||||
"Access-Control-Expose-Headers": "X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit",
|
||||
});
|
||||
next();
|
||||
} else {
|
||||
|
@@ -1,10 +1,11 @@
|
||||
const Access = require('../access');
|
||||
import Access from "../access.js";
|
||||
|
||||
module.exports = () => {
|
||||
return function (req, res, next) {
|
||||
export default () => {
|
||||
return (_, res, next) => {
|
||||
res.locals.access = null;
|
||||
let access = new Access(res.locals.token || null);
|
||||
access.load()
|
||||
const access = new Access(res.locals.token || null);
|
||||
access
|
||||
.load()
|
||||
.then(() => {
|
||||
res.locals.access = access;
|
||||
next();
|
||||
@@ -12,4 +13,3 @@ module.exports = () => {
|
||||
.catch(next);
|
||||
};
|
||||
};
|
||||
|
||||
|
@@ -1,13 +1,13 @@
|
||||
module.exports = function () {
|
||||
return function (req, res, next) {
|
||||
export default function () {
|
||||
return (req, res, next) => {
|
||||
if (req.headers.authorization) {
|
||||
let parts = req.headers.authorization.split(' ');
|
||||
const parts = req.headers.authorization.split(" ");
|
||||
|
||||
if (parts && parts[0] === 'Bearer' && parts[1]) {
|
||||
if (parts && parts[0] === "Bearer" && parts[1]) {
|
||||
res.locals.token = parts[1];
|
||||
}
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
};
|
||||
}
|
||||
|
@@ -1,7 +1,6 @@
|
||||
let _ = require('lodash');
|
||||
|
||||
module.exports = function (default_sort, default_offset, default_limit, max_limit) {
|
||||
import _ from "lodash";
|
||||
|
||||
export default (default_sort, default_offset, default_limit, max_limit) => {
|
||||
/**
|
||||
* This will setup the req query params with filtered data and defaults
|
||||
*
|
||||
@@ -11,34 +10,35 @@ module.exports = function (default_sort, default_offset, default_limit, max_limi
|
||||
*
|
||||
*/
|
||||
|
||||
return function (req, res, next) {
|
||||
|
||||
req.query.offset = typeof req.query.limit === 'undefined' ? default_offset || 0 : parseInt(req.query.offset, 10);
|
||||
req.query.limit = typeof req.query.limit === 'undefined' ? default_limit || 50 : parseInt(req.query.limit, 10);
|
||||
return (req, _res, next) => {
|
||||
req.query.offset =
|
||||
typeof req.query.limit === "undefined" ? default_offset || 0 : Number.parseInt(req.query.offset, 10);
|
||||
req.query.limit =
|
||||
typeof req.query.limit === "undefined" ? default_limit || 50 : Number.parseInt(req.query.limit, 10);
|
||||
|
||||
if (max_limit && req.query.limit > max_limit) {
|
||||
req.query.limit = max_limit;
|
||||
}
|
||||
|
||||
// Sorting
|
||||
let sort = typeof req.query.sort === 'undefined' ? default_sort : req.query.sort;
|
||||
let myRegexp = /.*\.(asc|desc)$/ig;
|
||||
let sort_array = [];
|
||||
let sort = typeof req.query.sort === "undefined" ? default_sort : req.query.sort;
|
||||
const myRegexp = /.*\.(asc|desc)$/gi;
|
||||
const sort_array = [];
|
||||
|
||||
sort = sort.split(',');
|
||||
_.map(sort, function (val) {
|
||||
let matches = myRegexp.exec(val);
|
||||
sort = sort.split(",");
|
||||
_.map(sort, (val) => {
|
||||
const matches = myRegexp.exec(val);
|
||||
|
||||
if (matches !== null) {
|
||||
let dir = matches[1];
|
||||
const dir = matches[1];
|
||||
sort_array.push({
|
||||
field: val.substr(0, val.length - (dir.length + 1)),
|
||||
dir: dir.toLowerCase()
|
||||
dir: dir.toLowerCase(),
|
||||
});
|
||||
} else {
|
||||
sort_array.push({
|
||||
field: val,
|
||||
dir: 'asc'
|
||||
dir: "asc",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
@@ -1,9 +1,8 @@
|
||||
module.exports = (req, res, next) => {
|
||||
export default (req, res, next) => {
|
||||
if (req.params.user_id === 'me' && res.locals.access) {
|
||||
req.params.user_id = res.locals.access.token.get('attrs').id;
|
||||
} else {
|
||||
req.params.user_id = parseInt(req.params.user_id, 10);
|
||||
req.params.user_id = Number.parseInt(req.params.user_id, 10);
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
|
@@ -1,62 +1,58 @@
|
||||
const moment = require('moment');
|
||||
const {isPostgres} = require('./config');
|
||||
const {ref} = require('objection');
|
||||
import moment from "moment";
|
||||
import { ref } from "objection";
|
||||
import { isPostgres } from "./config.js";
|
||||
|
||||
module.exports = {
|
||||
|
||||
/**
|
||||
* Takes an expression such as 30d and returns a moment object of that date in future
|
||||
*
|
||||
* Key Shorthand
|
||||
* ==================
|
||||
* years y
|
||||
* quarters Q
|
||||
* months M
|
||||
* weeks w
|
||||
* days d
|
||||
* hours h
|
||||
* minutes m
|
||||
* seconds s
|
||||
* milliseconds ms
|
||||
*
|
||||
* @param {String} expression
|
||||
* @returns {Object}
|
||||
*/
|
||||
parseDatePeriod: function (expression) {
|
||||
let matches = expression.match(/^([0-9]+)(y|Q|M|w|d|h|m|s|ms)$/m);
|
||||
if (matches) {
|
||||
return moment().add(matches[1], matches[2]);
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
convertIntFieldsToBool: function (obj, fields) {
|
||||
fields.forEach(function (field) {
|
||||
if (typeof obj[field] !== 'undefined') {
|
||||
obj[field] = obj[field] === 1;
|
||||
}
|
||||
});
|
||||
return obj;
|
||||
},
|
||||
|
||||
convertBoolFieldsToInt: function (obj, fields) {
|
||||
fields.forEach(function (field) {
|
||||
if (typeof obj[field] !== 'undefined') {
|
||||
obj[field] = obj[field] ? 1 : 0;
|
||||
}
|
||||
});
|
||||
return obj;
|
||||
},
|
||||
|
||||
/**
|
||||
* Casts a column to json if using postgres
|
||||
*
|
||||
* @param {string} colName
|
||||
* @returns {string|Objection.ReferenceBuilder}
|
||||
*/
|
||||
castJsonIfNeed: function (colName) {
|
||||
return isPostgres() ? ref(colName).castText() : colName;
|
||||
/**
|
||||
* Takes an expression such as 30d and returns a moment object of that date in future
|
||||
*
|
||||
* Key Shorthand
|
||||
* ==================
|
||||
* years y
|
||||
* quarters Q
|
||||
* months M
|
||||
* weeks w
|
||||
* days d
|
||||
* hours h
|
||||
* minutes m
|
||||
* seconds s
|
||||
* milliseconds ms
|
||||
*
|
||||
* @param {String} expression
|
||||
* @returns {Object}
|
||||
*/
|
||||
const parseDatePeriod = (expression) => {
|
||||
const matches = expression.match(/^([0-9]+)(y|Q|M|w|d|h|m|s|ms)$/m);
|
||||
if (matches) {
|
||||
return moment().add(matches[1], matches[2]);
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
const convertIntFieldsToBool = (obj, fields) => {
|
||||
fields.forEach((field) => {
|
||||
if (typeof obj[field] !== "undefined") {
|
||||
obj[field] = obj[field] === 1;
|
||||
}
|
||||
});
|
||||
return obj;
|
||||
};
|
||||
|
||||
const convertBoolFieldsToInt = (obj, fields) => {
|
||||
fields.forEach((field) => {
|
||||
if (typeof obj[field] !== "undefined") {
|
||||
obj[field] = obj[field] ? 1 : 0;
|
||||
}
|
||||
});
|
||||
return obj;
|
||||
};
|
||||
|
||||
/**
|
||||
* Casts a column to json if using postgres
|
||||
*
|
||||
* @param {string} colName
|
||||
* @returns {string|Objection.ReferenceBuilder}
|
||||
*/
|
||||
const castJsonIfNeed = (colName) => (isPostgres() ? ref(colName).castText() : colName);
|
||||
|
||||
export { parseDatePeriod, convertIntFieldsToBool, convertBoolFieldsToInt, castJsonIfNeed };
|
||||
|
@@ -1,33 +1,34 @@
|
||||
const migrate_name = 'identifier_for_migrate';
|
||||
const logger = require('../logger').migrate;
|
||||
import { migrate as logger } from "../logger.js";
|
||||
|
||||
const migrateName = "identifier_for_migrate";
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
*
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @param {Object} knex
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex, Promise) {
|
||||
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
const up = (_knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Up...`);
|
||||
|
||||
// Create Table example:
|
||||
|
||||
/*return knex.schema.createTable('notification', (table) => {
|
||||
/*
|
||||
return knex.schema.createTable('notification', (table) => {
|
||||
table.increments().primary();
|
||||
table.string('name').notNull();
|
||||
table.string('type').notNull();
|
||||
table.integer('created_on').notNull();
|
||||
table.integer('modified_on').notNull();
|
||||
})
|
||||
.then(function () {
|
||||
logger.info('[' + migrate_name + '] Notification Table created');
|
||||
});*/
|
||||
.then(function () {
|
||||
logger.info('[' + migrateName + '] Notification Table created');
|
||||
});
|
||||
*/
|
||||
|
||||
logger.info('[' + migrate_name + '] Migrating Up Complete');
|
||||
logger.info(`[${migrateName}] Migrating Up Complete`);
|
||||
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
@@ -35,21 +36,24 @@ exports.up = function (knex, Promise) {
|
||||
/**
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @param {Object} knex
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex, Promise) {
|
||||
logger.info('[' + migrate_name + '] Migrating Down...');
|
||||
const down = (_knex) => {
|
||||
logger.info(`[${migrateName}] Migrating Down...`);
|
||||
|
||||
// Drop table example:
|
||||
|
||||
/*return knex.schema.dropTable('notification')
|
||||
.then(() => {
|
||||
logger.info('[' + migrate_name + '] Notification Table dropped');
|
||||
});*/
|
||||
/*
|
||||
return knex.schema.dropTable('notification')
|
||||
.then(() => {
|
||||
logger.info(`[${migrateName}] Notification Table dropped`);
|
||||
});
|
||||
*/
|
||||
|
||||
logger.info('[' + migrate_name + '] Migrating Down Complete');
|
||||
logger.info(`[${migrateName}] Migrating Down Complete`);
|
||||
|
||||
return Promise.resolve(true);
|
||||
};
|
||||
|
||||
export { up, down };
|
||||
|
@@ -1,110 +1,110 @@
|
||||
const _ = require('lodash');
|
||||
const exec = require('node:child_process').exec;
|
||||
const execFile = require('node:child_process').execFile;
|
||||
const { Liquid } = require('liquidjs');
|
||||
const logger = require('../logger').global;
|
||||
const error = require('./error');
|
||||
import { exec as nodeExec, execFile as nodeExecFile } from "node:child_process";
|
||||
import { dirname } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { Liquid } from "liquidjs";
|
||||
import _ from "lodash";
|
||||
import { global as logger } from "../logger.js";
|
||||
import errs from "./error.js";
|
||||
|
||||
module.exports = {
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
exec: async (cmd, options = {}) => {
|
||||
logger.debug('CMD:', cmd);
|
||||
|
||||
const { stdout, stderr } = await new Promise((resolve, reject) => {
|
||||
const child = exec(cmd, options, (isError, stdout, stderr) => {
|
||||
if (isError) {
|
||||
reject(new error.CommandError(stderr, isError));
|
||||
} else {
|
||||
resolve({ stdout, stderr });
|
||||
}
|
||||
});
|
||||
|
||||
child.on('error', (e) => {
|
||||
reject(new error.CommandError(stderr, 1, e));
|
||||
});
|
||||
});
|
||||
return stdout;
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {String} cmd
|
||||
* @param {Array} args
|
||||
* @param {Object|undefined} options
|
||||
* @returns {Promise}
|
||||
*/
|
||||
execFile: (cmd, args, options) => {
|
||||
logger.debug(`CMD: ${cmd} ${args ? args.join(' ') : ''}`);
|
||||
if (typeof options === 'undefined') {
|
||||
options = {};
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
execFile(cmd, args, options, (err, stdout, stderr) => {
|
||||
if (err && typeof err === 'object') {
|
||||
reject(new error.CommandError(stderr, 1, err));
|
||||
} else {
|
||||
resolve(stdout.trim());
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Used in objection query builder
|
||||
*
|
||||
* @param {Array} omissions
|
||||
* @returns {Function}
|
||||
*/
|
||||
omitRow: (omissions) => {
|
||||
/**
|
||||
* @param {Object} row
|
||||
* @returns {Object}
|
||||
*/
|
||||
return (row) => {
|
||||
return _.omit(row, omissions);
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* Used in objection query builder
|
||||
*
|
||||
* @param {Array} omissions
|
||||
* @returns {Function}
|
||||
*/
|
||||
omitRows: (omissions) => {
|
||||
/**
|
||||
* @param {Array} rows
|
||||
* @returns {Object}
|
||||
*/
|
||||
return (rows) => {
|
||||
rows.forEach((row, idx) => {
|
||||
rows[idx] = _.omit(row, omissions);
|
||||
});
|
||||
return rows;
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* @returns {Object} Liquid render engine
|
||||
*/
|
||||
getRenderEngine: () => {
|
||||
const renderEngine = new Liquid({
|
||||
root: `${__dirname}/../templates/`
|
||||
});
|
||||
|
||||
/**
|
||||
* nginxAccessRule expects the object given to have 2 properties:
|
||||
*
|
||||
* directive string
|
||||
* address string
|
||||
*/
|
||||
renderEngine.registerFilter('nginxAccessRule', (v) => {
|
||||
if (typeof v.directive !== 'undefined' && typeof v.address !== 'undefined' && v.directive && v.address) {
|
||||
return `${v.directive} ${v.address};`;
|
||||
const exec = async (cmd, options = {}) => {
|
||||
logger.debug("CMD:", cmd);
|
||||
const { stdout, stderr } = await new Promise((resolve, reject) => {
|
||||
const child = nodeExec(cmd, options, (isError, stdout, stderr) => {
|
||||
if (isError) {
|
||||
reject(new errs.CommandError(stderr, isError));
|
||||
} else {
|
||||
resolve({ stdout, stderr });
|
||||
}
|
||||
return '';
|
||||
});
|
||||
|
||||
return renderEngine;
|
||||
}
|
||||
child.on("error", (e) => {
|
||||
reject(new errs.CommandError(stderr, 1, e));
|
||||
});
|
||||
});
|
||||
return stdout;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {String} cmd
|
||||
* @param {Array} args
|
||||
* @param {Object|undefined} options
|
||||
* @returns {Promise}
|
||||
*/
|
||||
const execFile = (cmd, args, options) => {
|
||||
logger.debug(`CMD: ${cmd} ${args ? args.join(" ") : ""}`);
|
||||
const opts = options || {};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
nodeExecFile(cmd, args, opts, (err, stdout, stderr) => {
|
||||
if (err && typeof err === "object") {
|
||||
reject(new errs.CommandError(stderr, 1, err));
|
||||
} else {
|
||||
resolve(stdout.trim());
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Used in objection query builder
|
||||
*
|
||||
* @param {Array} omissions
|
||||
* @returns {Function}
|
||||
*/
|
||||
const omitRow = (omissions) => {
|
||||
/**
|
||||
* @param {Object} row
|
||||
* @returns {Object}
|
||||
*/
|
||||
return (row) => {
|
||||
return _.omit(row, omissions);
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Used in objection query builder
|
||||
*
|
||||
* @param {Array} omissions
|
||||
* @returns {Function}
|
||||
*/
|
||||
const omitRows = (omissions) => {
|
||||
/**
|
||||
* @param {Array} rows
|
||||
* @returns {Object}
|
||||
*/
|
||||
return (rows) => {
|
||||
rows.forEach((row, idx) => {
|
||||
rows[idx] = _.omit(row, omissions);
|
||||
});
|
||||
return rows;
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* @returns {Object} Liquid render engine
|
||||
*/
|
||||
const getRenderEngine = () => {
|
||||
const renderEngine = new Liquid({
|
||||
root: `${__dirname}/../templates/`,
|
||||
});
|
||||
|
||||
/**
|
||||
* nginxAccessRule expects the object given to have 2 properties:
|
||||
*
|
||||
* directive string
|
||||
* address string
|
||||
*/
|
||||
renderEngine.registerFilter("nginxAccessRule", (v) => {
|
||||
if (typeof v.directive !== "undefined" && typeof v.address !== "undefined" && v.directive && v.address) {
|
||||
return `${v.directive} ${v.address};`;
|
||||
}
|
||||
return "";
|
||||
});
|
||||
|
||||
return renderEngine;
|
||||
};
|
||||
|
||||
export default { exec, execFile, omitRow, omitRows, getRenderEngine };
|
||||
|
@@ -1,12 +1,12 @@
|
||||
const Ajv = require('ajv/dist/2020');
|
||||
const error = require('../error');
|
||||
import Ajv from "ajv/dist/2020.js";
|
||||
import errs from "../error.js";
|
||||
|
||||
const ajv = new Ajv({
|
||||
verbose: true,
|
||||
allErrors: true,
|
||||
verbose: true,
|
||||
allErrors: true,
|
||||
allowUnionTypes: true,
|
||||
strict: false,
|
||||
coerceTypes: true,
|
||||
strict: false,
|
||||
coerceTypes: true,
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -14,30 +14,30 @@ const ajv = new Ajv({
|
||||
* @param {Object} payload
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function apiValidator (schema, payload/*, description*/) {
|
||||
return new Promise(function Promise_apiValidator (resolve, reject) {
|
||||
function apiValidator(schema, payload /*, description*/) {
|
||||
return new Promise(function Promise_apiValidator(resolve, reject) {
|
||||
if (schema === null) {
|
||||
reject(new error.ValidationError('Schema is undefined'));
|
||||
reject(new errs.ValidationError("Schema is undefined"));
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof payload === 'undefined') {
|
||||
reject(new error.ValidationError('Payload is undefined'));
|
||||
if (typeof payload === "undefined") {
|
||||
reject(new errs.ValidationError("Payload is undefined"));
|
||||
return;
|
||||
}
|
||||
|
||||
const validate = ajv.compile(schema);
|
||||
const valid = validate(payload);
|
||||
const valid = validate(payload);
|
||||
|
||||
if (valid && !validate.errors) {
|
||||
resolve(payload);
|
||||
} else {
|
||||
let message = ajv.errorsText(validate.errors);
|
||||
let err = new error.ValidationError(message);
|
||||
err.debug = [validate.errors, payload];
|
||||
const message = ajv.errorsText(validate.errors);
|
||||
const err = new errs.ValidationError(message);
|
||||
err.debug = [validate.errors, payload];
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = apiValidator;
|
||||
export default apiValidator;
|
||||
|
@@ -1,17 +1,17 @@
|
||||
const _ = require('lodash');
|
||||
const Ajv = require('ajv/dist/2020');
|
||||
const error = require('../error');
|
||||
const commonDefinitions = require('../../schema/common.json');
|
||||
import Ajv from 'ajv/dist/2020.js';
|
||||
import _ from "lodash";
|
||||
import commonDefinitions from "../../schema/common.json" with { type: "json" };
|
||||
import errs from "../error.js";
|
||||
|
||||
RegExp.prototype.toJSON = RegExp.prototype.toString;
|
||||
|
||||
const ajv = new Ajv({
|
||||
verbose: true,
|
||||
allErrors: true,
|
||||
verbose: true,
|
||||
allErrors: true,
|
||||
allowUnionTypes: true,
|
||||
coerceTypes: true,
|
||||
strict: false,
|
||||
schemas: [commonDefinitions]
|
||||
coerceTypes: true,
|
||||
strict: false,
|
||||
schemas: [commonDefinitions],
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -20,26 +20,26 @@ const ajv = new Ajv({
|
||||
* @param {Object} payload
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function validator (schema, payload) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
const validator = (schema, payload) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!payload) {
|
||||
reject(new error.InternalValidationError('Payload is falsy'));
|
||||
reject(new errs.InternalValidationError("Payload is falsy"));
|
||||
} else {
|
||||
try {
|
||||
let validate = ajv.compile(schema);
|
||||
let valid = validate(payload);
|
||||
const validate = ajv.compile(schema);
|
||||
const valid = validate(payload);
|
||||
|
||||
if (valid && !validate.errors) {
|
||||
resolve(_.cloneDeep(payload));
|
||||
} else {
|
||||
let message = ajv.errorsText(validate.errors);
|
||||
reject(new error.InternalValidationError(message));
|
||||
const message = ajv.errorsText(validate.errors);
|
||||
reject(new errs.InternalValidationError(message));
|
||||
}
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = validator;
|
||||
export default validator;
|
||||
|
Reference in New Issue
Block a user