diff --git a/.gitignore b/.gitignore
index fbb8167e..5bf37c0d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,6 @@
 .DS_Store
 .idea
+.qodo
 ._*
 .vscode
 certbot-help.txt
diff --git a/.version b/.version
index e4643748..fb2c0766 100644
--- a/.version
+++ b/.version
@@ -1 +1 @@
-2.12.6
+2.13.0
diff --git a/Jenkinsfile b/Jenkinsfile
index af913c2e..fc249ab4 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -119,13 +119,13 @@ pipeline {
 				always {
 					// Dumps to analyze later
 					sh 'mkdir -p debug/sqlite'
-					sh 'docker logs $(docker-compose ps --all -q fullstack) > debug/sqlite/docker_fullstack.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q stepca) > debug/sqlite/docker_stepca.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q pdns) > debug/sqlite/docker_pdns.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q pdns-db) > debug/sqlite/docker_pdns-db.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q dnsrouter) > debug/sqlite/docker_dnsrouter.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q fullstack) > debug/sqlite/docker_fullstack.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q stepca) > debug/sqlite/docker_stepca.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q pdns) > debug/sqlite/docker_pdns.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q pdns-db) > debug/sqlite/docker_pdns-db.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q dnsrouter) > debug/sqlite/docker_dnsrouter.log 2>&1'
 					junit 'test/results/junit/*'
-					sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
+					sh 'docker compose down --remove-orphans --volumes -t 30 || true'
 				}
 				unstable {
 					dir(path: 'test/results') {
@@ -152,13 +152,13 @@ pipeline {
 				always {
 					// Dumps to analyze later
 					sh 'mkdir -p debug/mysql'
-					sh 'docker logs $(docker-compose ps --all -q fullstack) > debug/mysql/docker_fullstack.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q stepca) > debug/mysql/docker_stepca.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q pdns) > debug/mysql/docker_pdns.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q pdns-db) > debug/mysql/docker_pdns-db.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q dnsrouter) > debug/mysql/docker_dnsrouter.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q fullstack) > debug/mysql/docker_fullstack.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q stepca) > debug/mysql/docker_stepca.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q pdns) > debug/mysql/docker_pdns.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q pdns-db) > debug/mysql/docker_pdns-db.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q dnsrouter) > debug/mysql/docker_dnsrouter.log 2>&1'
 					junit 'test/results/junit/*'
-					sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
+					sh 'docker compose down --remove-orphans --volumes -t 30 || true'
 				}
 				unstable {
 					dir(path: 'test/results') {
@@ -185,18 +185,18 @@ pipeline {
 				always {
 					// Dumps to analyze later
 					sh 'mkdir -p debug/postgres'
-					sh 'docker logs $(docker-compose ps --all -q fullstack) > debug/postgres/docker_fullstack.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q stepca) > debug/postgres/docker_stepca.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q pdns) > debug/postgres/docker_pdns.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q pdns-db) > debug/postgres/docker_pdns-db.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q dnsrouter) > debug/postgres/docker_dnsrouter.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q db-postgres) > debug/postgres/docker_db-postgres.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q authentik) > debug/postgres/docker_authentik.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q authentik-redis) > debug/postgres/docker_authentik-redis.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q authentik-ldap) > debug/postgres/docker_authentik-ldap.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q fullstack) > debug/postgres/docker_fullstack.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q stepca) > debug/postgres/docker_stepca.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q pdns) > debug/postgres/docker_pdns.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q pdns-db) > debug/postgres/docker_pdns-db.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q dnsrouter) > debug/postgres/docker_dnsrouter.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q db-postgres) > debug/postgres/docker_db-postgres.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q authentik) > debug/postgres/docker_authentik.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q authentik-redis) > debug/postgres/docker_authentik-redis.log 2>&1'
+					sh 'docker logs $(docke rcompose ps --all -q authentik-ldap) > debug/postgres/docker_authentik-ldap.log 2>&1'
 
 					junit 'test/results/junit/*'
-					sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
+					sh 'docker compose down --remove-orphans --volumes -t 30 || true'
 				}
 				unstable {
 					dir(path: 'test/results') {
diff --git a/README.md b/README.md
index 2116a55a..f48478ed 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
 
 	
 	
-	
+	
 	
 		
 	
@@ -74,11 +74,7 @@ This is the bare minimum configuration required. See the [documentation](https:/
 3. Bring up your stack by running
 
 ```bash
-docker-compose up -d
-
-# If using docker-compose-plugin
 docker compose up -d
-
 ```
 
 4. Log in to the Admin UI
@@ -88,14 +84,6 @@ Sometimes this can take a little bit because of the entropy of keys.
 
 [http://127.0.0.1:81](http://127.0.0.1:81)
 
-Default Admin User:
-```
-Email:    admin@example.com
-Password: changeme
-```
-
-Immediately after logging in with this default user you will be asked to modify your details and change your password.
-
 
 ## Contributing
 
diff --git a/backend/.eslintrc.json b/backend/.eslintrc.json
deleted file mode 100644
index 6d6172a4..00000000
--- a/backend/.eslintrc.json
+++ /dev/null
@@ -1,73 +0,0 @@
-{
-	"env": {
-		"node": true,
-		"es6": true
-	},
-	"extends": [
-		"eslint:recommended"
-	],
-	"globals": {
-		"Atomics": "readonly",
-		"SharedArrayBuffer": "readonly"
-	},
-	"parserOptions": {
-		"ecmaVersion": 2018,
-		"sourceType": "module"
-	},
-	"plugins": [
-		"align-assignments"
-	],
-	"rules": {
-		"arrow-parens": [
-			"error",
-			"always"
-		],
-		"indent": [
-			"error",
-			"tab"
-		],
-		"linebreak-style": [
-			"error",
-			"unix"
-		],
-		"quotes": [
-			"error",
-			"single"
-		],
-		"semi": [
-			"error",
-			"always"
-		],
-		"key-spacing": [
-			"error",
-			{
-				"align": "value"
-			}
-		],
-		"comma-spacing": [
-			"error",
-			{
-				"before": false,
-				"after": true
-			}
-		],
-		"func-call-spacing": [
-			"error",
-			"never"
-		],
-		"keyword-spacing": [
-			"error",
-			{
-				"before": true
-			}
-		],
-		"no-irregular-whitespace": "error",
-		"no-unused-expressions": 0,
-		"align-assignments/align-assignments": [
-			2,
-			{
-				"requiresOnly": false
-			}
-		]
-	}
-}
\ No newline at end of file
diff --git a/backend/.prettierrc b/backend/.prettierrc
deleted file mode 100644
index fefbcfa6..00000000
--- a/backend/.prettierrc
+++ /dev/null
@@ -1,11 +0,0 @@
-{
-	"printWidth": 320,
-	"tabWidth": 4,
-	"useTabs": true,
-	"semi": true,
-	"singleQuote": true,
-	"bracketSpacing": true,
-	"jsxBracketSameLine": true,
-	"trailingComma": "all",
-	"proseWrap": "always"
-}
diff --git a/backend/app.js b/backend/app.js
index 59f7def2..857db882 100644
--- a/backend/app.js
+++ b/backend/app.js
@@ -1,9 +1,12 @@
-const express     = require('express');
-const bodyParser  = require('body-parser');
-const fileUpload  = require('express-fileupload');
-const compression = require('compression');
-const config      = require('./lib/config');
-const log         = require('./logger').express;
+import bodyParser from "body-parser";
+import compression from "compression";
+import express from "express";
+import fileUpload from "express-fileupload";
+import { isDebugMode } from "./lib/config.js";
+import cors from "./lib/express/cors.js";
+import jwt from "./lib/express/jwt.js";
+import { express as logger } from "./logger.js";
+import mainRoutes from "./routes/main.js";
 
 /**
  * App
@@ -11,7 +14,7 @@ const log         = require('./logger').express;
 const app = express();
 app.use(fileUpload());
 app.use(bodyParser.json());
-app.use(bodyParser.urlencoded({extended: true}));
+app.use(bodyParser.urlencoded({ extended: true }));
 
 // Gzip
 app.use(compression());
@@ -20,71 +23,70 @@ app.use(compression());
  * General Logging, BEFORE routes
  */
 
-app.disable('x-powered-by');
-app.enable('trust proxy', ['loopback', 'linklocal', 'uniquelocal']);
-app.enable('strict routing');
+app.disable("x-powered-by");
+app.enable("trust proxy", ["loopback", "linklocal", "uniquelocal"]);
+app.enable("strict routing");
 
 // pretty print JSON when not live
-if (config.debug()) {
-	app.set('json spaces', 2);
+if (isDebugMode()) {
+	app.set("json spaces", 2);
 }
 
 // CORS for everything
-app.use(require('./lib/express/cors'));
+app.use(cors);
 
 // General security/cache related headers + server header
-app.use(function (req, res, next) {
-	let x_frame_options = 'DENY';
+app.use((_, res, next) => {
+	let x_frame_options = "DENY";
 
-	if (typeof process.env.X_FRAME_OPTIONS !== 'undefined' && process.env.X_FRAME_OPTIONS) {
+	if (typeof process.env.X_FRAME_OPTIONS !== "undefined" && process.env.X_FRAME_OPTIONS) {
 		x_frame_options = process.env.X_FRAME_OPTIONS;
 	}
 
 	res.set({
-		'X-XSS-Protection':       '1; mode=block',
-		'X-Content-Type-Options': 'nosniff',
-		'X-Frame-Options':        x_frame_options,
-		'Cache-Control':          'no-cache, no-store, max-age=0, must-revalidate',
-		Pragma:                   'no-cache',
-		Expires:                  0
+		"X-XSS-Protection": "1; mode=block",
+		"X-Content-Type-Options": "nosniff",
+		"X-Frame-Options": x_frame_options,
+		"Cache-Control": "no-cache, no-store, max-age=0, must-revalidate",
+		Pragma: "no-cache",
+		Expires: 0,
 	});
 	next();
 });
 
-app.use(require('./lib/express/jwt')());
-app.use('/', require('./routes/main'));
+app.use(jwt());
+app.use("/", mainRoutes);
 
 // production error handler
 // no stacktraces leaked to user
-// eslint-disable-next-line
-app.use(function (err, req, res, next) {
-
-	let payload = {
+app.use((err, req, res, _) => {
+	const payload = {
 		error: {
-			code:    err.status,
-			message: err.public ? err.message : 'Internal Error'
-		}
+			code: err.status,
+			message: err.public ? err.message : "Internal Error",
+		},
 	};
 
-	if (config.debug() || (req.baseUrl + req.path).includes('nginx/certificates')) {
+	if (typeof err.message_i18n !== "undefined") {
+		payload.error.message_i18n = err.message_i18n;
+	}
+
+	if (isDebugMode() || (req.baseUrl + req.path).includes("nginx/certificates")) {
 		payload.debug = {
-			stack:    typeof err.stack !== 'undefined' && err.stack ? err.stack.split('\n') : null,
-			previous: err.previous
+			stack: typeof err.stack !== "undefined" && err.stack ? err.stack.split("\n") : null,
+			previous: err.previous,
 		};
 	}
 
 	// Not every error is worth logging - but this is good for now until it gets annoying.
-	if (typeof err.stack !== 'undefined' && err.stack) {
-		if (config.debug()) {
-			log.debug(err.stack);
-		} else if (typeof err.public == 'undefined' || !err.public) {
-			log.warn(err.message);
+	if (typeof err.stack !== "undefined" && err.stack) {
+		logger.debug(err.stack);
+		if (typeof err.public === "undefined" || !err.public) {
+			logger.warn(err.message);
 		}
 	}
 
-	res
-		.status(err.status || 500)
-		.send(payload);
+	res.status(err.status || 500).send(payload);
 });
 
-module.exports = app;
+export default app;
diff --git a/backend/biome.json b/backend/biome.json
new file mode 100644
index 00000000..8bd8e2ab
--- /dev/null
+++ b/backend/biome.json
@@ -0,0 +1,91 @@
+{
+    "$schema": "https://biomejs.dev/schemas/2.3.1/schema.json",
+    "vcs": {
+        "enabled": true,
+        "clientKind": "git",
+        "useIgnoreFile": true
+    },
+    "files": {
+        "ignoreUnknown": false,
+        "includes": [
+            "**/*.ts",
+            "**/*.tsx",
+            "**/*.js",
+            "**/*.jsx",
+            "!**/dist/**/*"
+        ]
+    },
+    "formatter": {
+        "enabled": true,
+        "indentStyle": "tab",
+        "indentWidth": 4,
+        "lineWidth": 120,
+        "formatWithErrors": true
+    },
+    "assist": {
+        "actions": {
+            "source": {
+                "organizeImports": {
+                    "level": "on",
+                    "options": {
+                        "groups": [
+                            ":BUN:",
+                            ":NODE:",
+                            [
+                                "npm:*",
+                                "npm:*/**"
+                            ],
+                            ":PACKAGE_WITH_PROTOCOL:",
+                            ":URL:",
+                            ":PACKAGE:",
+                            [
+                                "/src/*",
+                                "/src/**"
+                            ],
+                            [
+                                "/**"
+                            ],
+                            [
+                                "#*",
+                                "#*/**"
+                            ],
+                            ":PATH:"
+                        ]
+                    }
+                }
+            }
+        }
+    },
+    "linter": {
+        "enabled": true,
+        "rules": {
+            "recommended": true,
+            "correctness": {
+                "useUniqueElementIds": "off"
+            },
+            "suspicious": {
+                "noExplicitAny": "off"
+            },
+            "performance": {
+                "noDelete": "off"
+            },
+            "nursery": "off",
+            "a11y": {
+                "useSemanticElements": "off",
+                "useValidAnchor": "off"
+            },
+            "style": {
+                "noParameterAssign": "error",
+                "useAsConstAssertion": "error",
+                "useDefaultParameterLast": "error",
+                "useEnumInitializers": "error",
+                "useSelfClosingElements": "error",
+                "useSingleVarDeclarator": "error",
+                "noUnusedTemplateLiteral": "error",
+                "useNumberNamespace": "error",
+                "noInferrableTypes": "error",
+                "noUselessElse": "error"
+            }
+        }
+    }
+}
diff --git a/global/README.md b/backend/certbot/README.md
similarity index 97%
rename from global/README.md
rename to backend/certbot/README.md
index 83e6c8c5..3c456462 100644
--- a/global/README.md
+++ b/backend/certbot/README.md
@@ -1,4 +1,4 @@
-# certbot-dns-plugins
+# Certbot dns-plugins
 
 This file contains info about available Certbot DNS plugins.
 This only works for plugins which use the standard argument structure, so:
diff --git a/global/certbot-dns-plugins.json b/backend/certbot/dns-plugins.json
similarity index 100%
rename from global/certbot-dns-plugins.json
rename to backend/certbot/dns-plugins.json
diff --git a/backend/db.js b/backend/db.js
index 1a8b1634..6fe47b6f 100644
--- a/backend/db.js
+++ b/backend/db.js
@@ -1,27 +1,32 @@
-const config = require('./lib/config');
+import knex from "knex";
+import {configGet, configHas} from "./lib/config.js";
 
-if (!config.has('database')) {
-	throw new Error('Database config does not exist! Please read the instructions: https://nginxproxymanager.com/setup/');
-}
+const generateDbConfig = () => {
+	if (!configHas("database")) {
+		throw new Error(
+			"Database config does not exist! Please read the instructions: https://nginxproxymanager.com/setup/",
+		);
+	}
 
-function generateDbConfig() {
-	const cfg = config.get('database');
-	if (cfg.engine === 'knex-native') {
+	const cfg = configGet("database");
+
+	if (cfg.engine === "knex-native") {
 		return cfg.knex;
 	}
+
 	return {
-		client:     cfg.engine,
+		client: cfg.engine,
 		connection: {
-			host:     cfg.host,
-			user:     cfg.user,
+			host: cfg.host,
+			user: cfg.user,
 			password: cfg.password,
 			database: cfg.name,
-			port:     cfg.port
+			port: cfg.port,
 		},
 		migrations: {
-			tableName: 'migrations'
-		}
+			tableName: "migrations",
+		},
 	};
-}
+};
 
-module.exports = require('knex')(generateDbConfig());
+export default knex(generateDbConfig());
diff --git a/backend/index.js b/backend/index.js
index d334a7c2..00285667 100644
--- a/backend/index.js
+++ b/backend/index.js
@@ -1,48 +1,47 @@
 #!/usr/bin/env node
 
-const schema = require('./schema');
-const logger = require('./logger').global;
+import app from "./app.js";
+import internalCertificate from "./internal/certificate.js";
+import internalIpRanges from "./internal/ip_ranges.js";
+import { global as logger } from "./logger.js";
+import { migrateUp } from "./migrate.js";
+import { getCompiledSchema } from "./schema/index.js";
+import setup from "./setup.js";
 
-const IP_RANGES_FETCH_ENABLED = process.env.IP_RANGES_FETCH_ENABLED !== 'false';
+const IP_RANGES_FETCH_ENABLED = process.env.IP_RANGES_FETCH_ENABLED !== "false";
 
-async function appStart () {
-	const migrate             = require('./migrate');
-	const setup               = require('./setup');
-	const app                 = require('./app');
-	const internalCertificate = require('./internal/certificate');
-	const internalIpRanges    = require('./internal/ip_ranges');
-
-	return migrate.latest()
+async function appStart() {
+	return migrateUp()
 		.then(setup)
-		.then(schema.getCompiledSchema)
+		.then(getCompiledSchema)
 		.then(() => {
-			if (IP_RANGES_FETCH_ENABLED) {
-				logger.info('IP Ranges fetch is enabled');
-				return internalIpRanges.fetch().catch((err) => {
-					logger.error('IP Ranges fetch failed, continuing anyway:', err.message);
-				});
-			} else {
-				logger.info('IP Ranges fetch is disabled by environment variable');
+			if (!IP_RANGES_FETCH_ENABLED) {
+				logger.info("IP Ranges fetch is disabled by environment variable");
+				return;
 			}
+			logger.info("IP Ranges fetch is enabled");
+			return internalIpRanges.fetch().catch((err) => {
+				logger.error("IP Ranges fetch failed, continuing anyway:", err.message);
+			});
 		})
 		.then(() => {
 			internalCertificate.initTimer();
 			internalIpRanges.initTimer();
 
 			const server = app.listen(3000, () => {
-				logger.info('Backend PID ' + process.pid + ' listening on port 3000 ...');
+				logger.info(`Backend PID ${process.pid} listening on port 3000 ...`);
 
-				process.on('SIGTERM', () => {
-					logger.info('PID ' + process.pid + ' received SIGTERM');
+				process.on("SIGTERM", () => {
+					logger.info(`PID ${process.pid} received SIGTERM`);
 					server.close(() => {
-						logger.info('Stopping.');
+						logger.info("Stopping.");
 						process.exit(0);
 					});
 				});
 			});
 		})
 		.catch((err) => {
-			logger.error(err.message, err);
+			logger.error(`Startup Error: ${err.message}`, err);
 			setTimeout(appStart, 1000);
 		});
 }
@@ -50,7 +49,6 @@ async function appStart () {
 try {
 	appStart();
 } catch (err) {
-	logger.error(err.message, err);
+	logger.fatal(err);
 	process.exit(1);
 }
-
diff --git a/backend/internal/access-list.js b/backend/internal/access-list.js
index 2407a0ac..60a7105d 100644
--- a/backend/internal/access-list.js
+++ b/backend/internal/access-list.js
@@ -1,103 +1,94 @@
-const _                     = require('lodash');
-const fs                    = require('node:fs');
-const batchflow             = require('batchflow');
-const logger                = require('../logger').access;
-const error                 = require('../lib/error');
-const utils                 = require('../lib/utils');
-const accessListModel       = require('../models/access_list');
-const accessListAuthModel   = require('../models/access_list_auth');
-const accessListClientModel = require('../models/access_list_client');
-const proxyHostModel        = require('../models/proxy_host');
-const internalAuditLog      = require('./audit-log');
-const internalNginx         = require('./nginx');
+import fs from "node:fs";
+import batchflow from "batchflow";
+import _ from "lodash";
+import errs from "../lib/error.js";
+import utils from "../lib/utils.js";
+import { access as logger } from "../logger.js";
+import accessListModel from "../models/access_list.js";
+import accessListAuthModel from "../models/access_list_auth.js";
+import accessListClientModel from "../models/access_list_client.js";
+import proxyHostModel from "../models/proxy_host.js";
+import internalAuditLog from "./audit-log.js";
+import internalNginx from "./nginx.js";
 
-function omissions () {
-	return ['is_deleted'];
-}
+const omissions = () => {
+	return ["is_deleted"];
+};
 
 const internalAccessList = {
-
 	/**
 	 * @param   {Access}  access
 	 * @param   {Object}  data
 	 * @returns {Promise}
 	 */
-	create: (access, data) => {
-		return access.can('access_lists:create', data)
-			.then((/*access_data*/) => {
-				return accessListModel
-					.query()
-					.insertAndFetch({
-						name:          data.name,
-						satisfy_any:   data.satisfy_any,
-						pass_auth:     data.pass_auth,
-						owner_user_id: access.token.getUserId(1)
-					})
-					.then(utils.omitRow(omissions()));
+	create: async (access, data) => {
+		await access.can("access_lists:create", data);
+		const row = await accessListModel
+			.query()
+			.insertAndFetch({
+				name: data.name,
+				satisfy_any: data.satisfy_any,
+				pass_auth: data.pass_auth,
+				owner_user_id: access.token.getUserId(1),
 			})
-			.then((row) => {
-				data.id = row.id;
+			.then(utils.omitRow(omissions()));
 
-				const promises = [];
+		data.id = row.id;
 
-				// Now add the items
-				data.items.map((item) => {
-					promises.push(accessListAuthModel
-						.query()
-						.insert({
-							access_list_id: row.id,
-							username:       item.username,
-							password:       item.password
-						})
-					);
-				});
+		const promises = [];
+		// Items
+		data.items.map((item) => {
+			promises.push(
+				accessListAuthModel.query().insert({
+					access_list_id: row.id,
+					username: item.username,
+					password: item.password,
+				}),
+			);
+			return true;
+		});
 
-				// Now add the clients
-				if (typeof data.clients !== 'undefined' && data.clients) {
-					data.clients.map((client) => {
-						promises.push(accessListClientModel
-							.query()
-							.insert({
-								access_list_id: row.id,
-								address:        client.address,
-								directive:      client.directive
-							})
-						);
-					});
-				}
+		// Clients
+		data.clients?.map((client) => {
+			promises.push(
+				accessListClientModel.query().insert({
+					access_list_id: row.id,
+					address: client.address,
+					directive: client.directive,
+				}),
+			);
+			return true;
+		});
 
-				return Promise.all(promises);
-			})
-			.then(() => {
-				// re-fetch with expansions
-				return internalAccessList.get(access, {
-					id:     data.id,
-					expand: ['owner', 'items', 'clients', 'proxy_hosts.access_list.[clients,items]']
-				}, true /* <- skip masking */);
-			})
-			.then((row) => {
-				// Audit log
-				data.meta = _.assign({}, data.meta || {}, row.meta);
+		await Promise.all(promises);
 
-				return internalAccessList.build(row)
-					.then(() => {
-						if (parseInt(row.proxy_host_count, 10)) {
-							return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
-						}
-					})
-					.then(() => {
-						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'created',
-							object_type: 'access-list',
-							object_id:   row.id,
-							meta:        internalAccessList.maskItems(data)
-						});
-					})
-					.then(() => {
-						return internalAccessList.maskItems(row);
-					});
-			});
+		// re-fetch with expansions
+		const freshRow = await internalAccessList.get(
+			access,
+			{
+				id: data.id,
+				expand: ["owner", "items", "clients", "proxy_hosts.access_list.[clients,items]"],
+			},
+			true // skip masking
+		);
+
+		// Audit log
+		data.meta = _.assign({}, data.meta || {}, freshRow.meta);
+		await internalAccessList.build(freshRow);
+
+		if (Number.parseInt(freshRow.proxy_host_count, 10)) {
+			await internalNginx.bulkGenerateConfigs("proxy_host", freshRow.proxy_hosts);
+		}
+
+		// Add to audit log
+		await internalAuditLog.add(access, {
+			action: "created",
+			object_type: "access-list",
+			object_id: freshRow.id,
+			meta: internalAccessList.maskItems(data),
+		});
+
+		return internalAccessList.maskItems(freshRow);
 	},
 
 	/**
@@ -108,129 +99,107 @@ const internalAccessList = {
 	 * @param  {String}  [data.items]
 	 * @return {Promise}
 	 */
-	update: (access, data) => {
-		return access.can('access_lists:update', data.id)
-			.then((/*access_data*/) => {
-				return internalAccessList.get(access, {id: data.id});
-			})
-			.then((row) => {
-				if (row.id !== data.id) {
-					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError(`Access List could not be updated, IDs do not match: ${row.id} !== ${data.id}`);
-				}
-			})
-			.then(() => {
-				// patch name if specified
-				if (typeof data.name !== 'undefined' && data.name) {
-					return accessListModel
-						.query()
-						.where({id: data.id})
-						.patch({
-							name:        data.name,
-							satisfy_any: data.satisfy_any,
-							pass_auth:   data.pass_auth,
-						});
-				}
-			})
-			.then(() => {
-				// Check for items and add/update/remove them
-				if (typeof data.items !== 'undefined' && data.items) {
-					const promises      = [];
-					const items_to_keep = [];
+	update: async (access, data) => {
+		await access.can("access_lists:update", data.id);
+		const row = await internalAccessList.get(access, { id: data.id });
+		if (row.id !== data.id) {
+			// Sanity check that something crazy hasn't happened
+			throw new errs.InternalValidationError(
+				`Access List could not be updated, IDs do not match: ${row.id} !== ${data.id}`,
+			);
+		}
 
-					data.items.map((item) => {
-						if (item.password) {
-							promises.push(accessListAuthModel
-								.query()
-								.insert({
-									access_list_id: data.id,
-									username:       item.username,
-									password:       item.password
-								})
-							);
-						} else {
-							// This was supplied with an empty password, which means keep it but don't change the password
-							items_to_keep.push(item.username);
-						}
-					});
-
-					const query = accessListAuthModel
-						.query()
-						.delete()
-						.where('access_list_id', data.id);
-
-					if (items_to_keep.length) {
-						query.andWhere('username', 'NOT IN', items_to_keep);
-					}
-
-					return query
-						.then(() => {
-							// Add new items
-							if (promises.length) {
-								return Promise.all(promises);
-							}
-						});
-				}
-			})
-			.then(() => {
-				// Check for clients and add/update/remove them
-				if (typeof data.clients !== 'undefined' && data.clients) {
-					const promises = [];
-
-					data.clients.map((client) => {
-						if (client.address) {
-							promises.push(accessListClientModel
-								.query()
-								.insert({
-									access_list_id: data.id,
-									address:        client.address,
-									directive:      client.directive
-								})
-							);
-						}
-					});
-
-					const query = accessListClientModel
-						.query()
-						.delete()
-						.where('access_list_id', data.id);
-
-					return query
-						.then(() => {
-							// Add new items
-							if (promises.length) {
-								return Promise.all(promises);
-							}
-						});
-				}
-			})
-			.then(() => {
-				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'updated',
-					object_type: 'access-list',
-					object_id:   data.id,
-					meta:        internalAccessList.maskItems(data)
-				});
-			})
-			.then(() => {
-				// re-fetch with expansions
-				return internalAccessList.get(access, {
-					id:     data.id,
-					expand: ['owner', 'items', 'clients', 'proxy_hosts.[certificate,access_list.[clients,items]]']
-				}, true /* <- skip masking */);
-			})
-			.then((row) => {
-				return internalAccessList.build(row)
-					.then(() => {
-						if (parseInt(row.proxy_host_count, 10)) {
-							return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
-						}
-					}).then(internalNginx.reload)
-					.then(() => {
-						return internalAccessList.maskItems(row);
-					});
+		// patch name if specified
+		if (typeof data.name !== "undefined" && data.name) {
+			await accessListModel.query().where({ id: data.id }).patch({
+				name: data.name,
+				satisfy_any: data.satisfy_any,
+				pass_auth: data.pass_auth,
 			});
+		}
+
+		// Check for items and add/update/remove them
+		if (typeof data.items !== "undefined" && data.items) {
+			const promises = [];
+			const itemsToKeep = [];
+
+			data.items.map((item) => {
+				if (item.password) {
+					promises.push(
+						accessListAuthModel.query().insert({
+							access_list_id: data.id,
+							username: item.username,
+							password: item.password,
+						}),
+					);
+				} else {
+					// This was supplied with an empty password, which means keep it but don't change the password
+					itemsToKeep.push(item.username);
+				}
+				return true;
+			});
+
+			const query = accessListAuthModel.query().delete().where("access_list_id", data.id);
+
+			if (itemsToKeep.length) {
+				query.andWhere("username", "NOT IN", itemsToKeep);
+			}
+
+			await query;
+			// Add new items
+			if (promises.length) {
+				await Promise.all(promises);
+			}
+		}
+
+		// Check for clients and add/update/remove them
+		if (typeof data.clients !== "undefined" && data.clients) {
+			const clientPromises = [];
+			data.clients.map((client) => {
+				if (client.address) {
+					clientPromises.push(
+						accessListClientModel.query().insert({
+							access_list_id: data.id,
+							address: client.address,
+							directive: client.directive,
+						}),
+					);
+				}
+				return true;
+			});
+
+			const query = accessListClientModel.query().delete().where("access_list_id", data.id);
+			await query;
+			// Add new clitens
+			if (clientPromises.length) {
+				await Promise.all(clientPromises);
+			}
+		}
+
+		// Add to audit log
+		await internalAuditLog.add(access, {
+			action: "updated",
+			object_type: "access-list",
+			object_id: data.id,
+			meta: internalAccessList.maskItems(data),
+		});
+
+		// re-fetch with expansions
+		const freshRow = await internalAccessList.get(
+			access,
+			{
+				id: data.id,
+				expand: ["owner", "items", "clients", "proxy_hosts.[certificate,access_list.[clients,items]]"],
+			},
+			true // skip masking
+		);
+
+		await internalAccessList.build(freshRow)
+		if (Number.parseInt(freshRow.proxy_host_count, 10)) {
+			await internalNginx.bulkGenerateConfigs("proxy_host", freshRow.proxy_hosts);
+		}
+		await internalNginx.reload();
+		return internalAccessList.maskItems(freshRow);
 	},
 
 	/**
@@ -239,52 +208,50 @@ const internalAccessList = {
 	 * @param  {Integer}  data.id
 	 * @param  {Array}    [data.expand]
 	 * @param  {Array}    [data.omit]
-	 * @param  {Boolean}  [skip_masking]
+	 * @param  {Boolean}  [skipMasking]
 	 * @return {Promise}
 	 */
-	get: (access, data, skip_masking) => {
-		if (typeof data === 'undefined') {
-			data = {};
+	get: async (access, data, skipMasking) => {
+		const thisData = data || {};
+		const accessData = await access.can("access_lists:get", thisData.id)
+
+		const query = accessListModel
+			.query()
+			.select("access_list.*", accessListModel.raw("COUNT(proxy_host.id) as proxy_host_count"))
+			.leftJoin("proxy_host", function () {
+				this.on("proxy_host.access_list_id", "=", "access_list.id").andOn(
+					"proxy_host.is_deleted",
+					"=",
+					0,
+				);
+			})
+			.where("access_list.is_deleted", 0)
+			.andWhere("access_list.id", thisData.id)
+			.groupBy("access_list.id")
+			.allowGraph("[owner,items,clients,proxy_hosts.[certificate,access_list.[clients,items]]]")
+			.first();
+
+		if (accessData.permission_visibility !== "all") {
+			query.andWhere("access_list.owner_user_id", access.token.getUserId(1));
 		}
 
-		return access.can('access_lists:get', data.id)
-			.then((access_data) => {
-				const query = accessListModel
-					.query()
-					.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
-					.leftJoin('proxy_host', function() {
-						this.on('proxy_host.access_list_id', '=', 'access_list.id')
-							.andOn('proxy_host.is_deleted', '=', 0);
-					})
-					.where('access_list.is_deleted', 0)
-					.andWhere('access_list.id', data.id)
-					.groupBy('access_list.id')
-					.allowGraph('[owner,items,clients,proxy_hosts.[certificate,access_list.[clients,items]]]')
-					.first();
+		if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
+			query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
+		}
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('access_list.owner_user_id', access.token.getUserId(1));
-				}
+		let row = await query.then(utils.omitRow(omissions()));
 
-				if (typeof data.expand !== 'undefined' && data.expand !== null) {
-					query.withGraphFetched(`[${data.expand.join(', ')}]`);
-				}
-
-				return query.then(utils.omitRow(omissions()));
-			})
-			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				}
-				if (!skip_masking && typeof row.items !== 'undefined' && row.items) {
-					row = internalAccessList.maskItems(row);
-				}
-				// Custom omissions
-				if (typeof data.omit !== 'undefined' && data.omit !== null) {
-					row = _.omit(row, data.omit);
-				}
-				return row;
-			});
+		if (!row || !row.id) {
+			throw new errs.ItemNotFoundError(thisData.id);
+		}
+		if (!skipMasking && typeof row.items !== "undefined" && row.items) {
+			row = internalAccessList.maskItems(row);
+		}
+		// Custom omissions
+		if (typeof data.omit !== "undefined" && data.omit !== null) {
+			row = _.omit(row, data.omit);
+		}
+		return row;
 	},
 
 	/**
@@ -294,73 +261,64 @@ const internalAccessList = {
 	 * @param   {String}  [data.reason]
 	 * @returns {Promise}
 	 */
-	delete: (access, data) => {
-		return access.can('access_lists:delete', data.id)
-			.then(() => {
-				return internalAccessList.get(access, {id: data.id, expand: ['proxy_hosts', 'items', 'clients']});
-			})
-			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				}
+	delete: async (access, data) => {
+		await access.can("access_lists:delete", data.id);
+		const row = await internalAccessList.get(access, {
+			id: data.id,
+			expand: ["proxy_hosts", "items", "clients"],
+		});
 
-				// 1. update row to be deleted
-				// 2. update any proxy hosts that were using it (ignoring permissions)
-				// 3. reconfigure those hosts
-				// 4. audit log
+		if (!row || !row.id) {
+			throw new errs.ItemNotFoundError(data.id);
+		}
 
-				// 1. update row to be deleted
-				return accessListModel
-					.query()
-					.where('id', row.id)
-					.patch({
-						is_deleted: 1
-					})
-					.then(() => {
-						// 2. update any proxy hosts that were using it (ignoring permissions)
-						if (row.proxy_hosts) {
-							return proxyHostModel
-								.query()
-								.where('access_list_id', '=', row.id)
-								.patch({access_list_id: 0})
-								.then(() => {
-									// 3. reconfigure those hosts, then reload nginx
+		// 1. update row to be deleted
+		// 2. update any proxy hosts that were using it (ignoring permissions)
+		// 3. reconfigure those hosts
+		// 4. audit log
 
-									// set the access_list_id to zero for these items
-									row.proxy_hosts.map((_val, idx) => {
-										row.proxy_hosts[idx].access_list_id = 0;
-									});
+		// 1. update row to be deleted
+		await accessListModel
+			.query()
+			.where("id", row.id)
+			.patch({
+				is_deleted: 1,
+			});
 
-									return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
-								})
-								.then(() => {
-									return internalNginx.reload();
-								});
-						}
-					})
-					.then(() => {
-						// delete the htpasswd file
-						const htpasswd_file = internalAccessList.getFilename(row);
+		// 2. update any proxy hosts that were using it (ignoring permissions)
+		if (row.proxy_hosts) {
+			await proxyHostModel
+				.query()
+				.where("access_list_id", "=", row.id)
+				.patch({ access_list_id: 0 });
 
-						try {
-							fs.unlinkSync(htpasswd_file);
-						} catch (_err) {
-							// do nothing
-						}
-					})
-					.then(() => {
-						// 4. audit log
-						return internalAuditLog.add(access, {
-							action:      'deleted',
-							object_type: 'access-list',
-							object_id:   row.id,
-							meta:        _.omit(internalAccessList.maskItems(row), ['is_deleted', 'proxy_hosts'])
-						});
-					});
-			})
-			.then(() => {
+			// 3. reconfigure those hosts, then reload nginx
+			// set the access_list_id to zero for these items
+			row.proxy_hosts.map((_val, idx) => {
+				row.proxy_hosts[idx].access_list_id = 0;
 				return true;
 			});
+
+			await internalNginx.bulkGenerateConfigs("proxy_host", row.proxy_hosts);
+		}
+
+		await internalNginx.reload();
+
+		// delete the htpasswd file
+		try {
+			fs.unlinkSync(internalAccessList.getFilename(row));
+		} catch (_err) {
+			// do nothing
+		}
+
+		// 4. audit log
+		await internalAuditLog.add(access, {
+			action: "deleted",
+			object_type: "access-list",
+			object_id: row.id,
+			meta: _.omit(internalAccessList.maskItems(row), ["is_deleted", "proxy_hosts"]),
+		});
+		return true;
 	},
 
 	/**
@@ -368,75 +326,73 @@ const internalAccessList = {
 	 *
 	 * @param   {Access}  access
 	 * @param   {Array}   [expand]
-	 * @param   {String}  [search_query]
+	 * @param   {String}  [searchQuery]
 	 * @returns {Promise}
 	 */
-	getAll: (access, expand, search_query) => {
-		return access.can('access_lists:list')
-			.then((access_data) => {
-				const query = accessListModel
-					.query()
-					.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
-					.leftJoin('proxy_host', function() {
-						this.on('proxy_host.access_list_id', '=', 'access_list.id')
-							.andOn('proxy_host.is_deleted', '=', 0);
-					})
-					.where('access_list.is_deleted', 0)
-					.groupBy('access_list.id')
-					.allowGraph('[owner,items,clients]')
-					.orderBy('access_list.name', 'ASC');
+	getAll: async (access, expand, searchQuery) => {
+		const accessData = await access.can("access_lists:list");
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('access_list.owner_user_id', access.token.getUserId(1));
-				}
-
-				// Query is used for searching
-				if (typeof search_query === 'string') {
-					query.where(function () {
-						this.where('name', 'like', `%${search_query}%`);
-					});
-				}
-
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched(`[${expand.join(', ')}]`);
-				}
-
-				return query.then(utils.omitRows(omissions()));
+		const query = accessListModel
+			.query()
+			.select("access_list.*", accessListModel.raw("COUNT(proxy_host.id) as proxy_host_count"))
+			.leftJoin("proxy_host", function () {
+				this.on("proxy_host.access_list_id", "=", "access_list.id").andOn(
+					"proxy_host.is_deleted",
+					"=",
+					0,
+				);
 			})
-			.then((rows) => {
-				if (rows) {
-					rows.map((row, idx) => {
-						if (typeof row.items !== 'undefined' && row.items) {
-							rows[idx] = internalAccessList.maskItems(row);
-						}
-					});
-				}
+			.where("access_list.is_deleted", 0)
+			.groupBy("access_list.id")
+			.allowGraph("[owner,items,clients]")
+			.orderBy("access_list.name", "ASC");
 
-				return rows;
+		if (accessData.permission_visibility !== "all") {
+			query.andWhere("access_list.owner_user_id", access.token.getUserId(1));
+		}
+
+		// Query is used for searching
+		if (typeof searchQuery === "string") {
+			query.where(function () {
+				this.where("name", "like", `%${searchQuery}%`);
 			});
+		}
+
+		if (typeof expand !== "undefined" && expand !== null) {
+			query.withGraphFetched(`[${expand.join(", ")}]`);
+		}
+
+		const rows = await query.then(utils.omitRows(omissions()));
+		if (rows) {
+			rows.map((row, idx) => {
+				if (typeof row.items !== "undefined" && row.items) {
+					rows[idx] = internalAccessList.maskItems(row);
+				}
+				return true;
+			});
+		}
+		return rows;
 	},
 
 	/**
-	 * Report use
+	 * Count is used in reports
 	 *
-	 * @param   {Integer} user_id
+	 * @param   {Integer} userId
 	 * @param   {String}  visibility
 	 * @returns {Promise}
 	 */
-	getCount: (user_id, visibility) => {
+	getCount: async (userId, visibility) => {
 		const query = accessListModel
 			.query()
-			.count('id as count')
-			.where('is_deleted', 0);
+			.count("id as count")
+			.where("is_deleted", 0);
 
-		if (visibility !== 'all') {
-			query.andWhere('owner_user_id', user_id);
+		if (visibility !== "all") {
+			query.andWhere("owner_user_id", userId);
 		}
 
-		return query.first()
-			.then((row) => {
-				return parseInt(row.count, 10);
-			});
+		const row = await query.first();
+		return Number.parseInt(row.count, 10);
 	},
 
 	/**
@@ -444,21 +400,21 @@ const internalAccessList = {
 	 * @returns {Object}
 	 */
 	maskItems: (list) => {
-		if (list && typeof list.items !== 'undefined') {
+		if (list && typeof list.items !== "undefined") {
 			list.items.map((val, idx) => {
-				let repeat_for = 8;
-				let first_char = '*';
+				let repeatFor = 8;
+				let firstChar = "*";
 
-				if (typeof val.password !== 'undefined' && val.password) {
-					repeat_for = val.password.length - 1;
-					first_char = val.password.charAt(0);
+				if (typeof val.password !== "undefined" && val.password) {
+					repeatFor = val.password.length - 1;
+					firstChar = val.password.charAt(0);
 				}
 
-				list.items[idx].hint     = first_char + ('*').repeat(repeat_for);
-				list.items[idx].password = '';
+				list.items[idx].hint = firstChar + "*".repeat(repeatFor);
+				list.items[idx].password = "";
+				return true;
 			});
 		}
-
 		return list;
 	},
 
@@ -478,63 +434,55 @@ const internalAccessList = {
 	 * @param   {Array}   list.items
 	 * @returns {Promise}
 	 */
-	build: (list) => {
+	build: async (list) => {
 		logger.info(`Building Access file #${list.id} for: ${list.name}`);
 
-		return new Promise((resolve, reject) => {
-			const htpasswd_file = internalAccessList.getFilename(list);
+		const htpasswdFile = internalAccessList.getFilename(list);
 
-			// 1. remove any existing access file
-			try {
-				fs.unlinkSync(htpasswd_file);
-			} catch (_err) {
-				// do nothing
-			}
+		// 1. remove any existing access file
+		try {
+			fs.unlinkSync(htpasswdFile);
+		} catch (_err) {
+			// do nothing
+		}
 
-			// 2. create empty access file
-			try {
-				fs.writeFileSync(htpasswd_file, '', {encoding: 'utf8'});
-				resolve(htpasswd_file);
-			} catch (err) {
-				reject(err);
-			}
-		})
-			.then((htpasswd_file) => {
-				// 3. generate password for each user
-				if (list.items.length) {
-					return new Promise((resolve, reject) => {
-						batchflow(list.items).sequential()
-							.each((_i, item, next) => {
-								if (typeof item.password !== 'undefined' && item.password.length) {
-									logger.info(`Adding: ${item.username}`);
+		// 2. create empty access file
+		fs.writeFileSync(htpasswdFile, '', {encoding: 'utf8'});
 
-									utils.execFile('openssl', ['passwd', '-apr1', item.password])
-										.then((res) => {
-											try {
-												fs.appendFileSync(htpasswd_file, `${item.username}:${res}\n`, {encoding: 'utf8'});
-											} catch (err) {
-												reject(err);
-											}
-											next();
-										})
-										.catch((err) => {
-											logger.error(err);
-											next(err);
-										});
-								}
-							})
-							.error((err) => {
-								logger.error(err);
-								reject(err);
-							})
-							.end((results) => {
-								logger.success(`Built Access file #${list.id} for: ${list.name}`);
-								resolve(results);
-							});
+		// 3. generate password for each user
+		if (list.items.length) {
+			await new Promise((resolve, reject) => {
+				batchflow(list.items).sequential()
+					.each((_i, item, next) => {
+						if (item.password?.length) {
+							logger.info(`Adding: ${item.username}`);
+
+							utils.execFile('openssl', ['passwd', '-apr1', item.password])
+								.then((res) => {
+									try {
+										fs.appendFileSync(htpasswdFile, `${item.username}:${res}\n`, {encoding: 'utf8'});
+									} catch (err) {
+										reject(err);
+									}
+									next();
+								})
+								.catch((err) => {
+									logger.error(err);
+									next(err);
+								});
+						}
+					})
+					.error((err) => {
+						logger.error(err);
+						reject(err);
+					})
+					.end((results) => {
+						logger.success(`Built Access file #${list.id} for: ${list.name}`);
+						resolve(results);
 					});
-				}
 			});
+		}
 	}
-};
+}
 
-module.exports = internalAccessList;
+export default internalAccessList;
diff --git a/backend/internal/audit-log.js b/backend/internal/audit-log.js
index 60bdd2ef..02700dc5 100644
--- a/backend/internal/audit-log.js
+++ b/backend/internal/audit-log.js
@@ -1,6 +1,6 @@
-const error            = require('../lib/error');
-const auditLogModel    = require('../models/audit-log');
-const {castJsonIfNeed} = require('../lib/helpers');
+import errs from "../lib/error.js";
+import { castJsonIfNeed } from "../lib/helpers.js";
+import auditLogModel from "../models/audit-log.js";
 
 const internalAuditLog = {
 
@@ -9,32 +9,60 @@ const internalAuditLog = {
 	 *
 	 * @param   {Access}  access
 	 * @param   {Array}   [expand]
-	 * @param   {String}  [search_query]
+	 * @param   {String}  [searchQuery]
 	 * @returns {Promise}
 	 */
-	getAll: (access, expand, search_query) => {
-		return access.can('auditlog:list')
-			.then(() => {
-				let query = auditLogModel
-					.query()
-					.orderBy('created_on', 'DESC')
-					.orderBy('id', 'DESC')
-					.limit(100)
-					.allowGraph('[user]');
+	getAll: async (access, expand, searchQuery) => {
+		await access.can("auditlog:list");
 
-				// Query is used for searching
-				if (typeof search_query === 'string' && search_query.length > 0) {
-					query.where(function () {
-						this.where(castJsonIfNeed('meta'), 'like', '%' + search_query + '%');
-					});
-				}
+		const query = auditLogModel
+			.query()
+			.orderBy("created_on", "DESC")
+			.orderBy("id", "DESC")
+			.limit(100)
+			.allowGraph("[user]");
 
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched('[' + expand.join(', ') + ']');
-				}
-
-				return query;
+		// Query is used for searching
+		if (typeof searchQuery === "string" && searchQuery.length > 0) {
+			query.where(function () {
+				this.where(castJsonIfNeed("meta"), "like", `%${searchQuery}`);
 			});
+		}
+
+		if (typeof expand !== "undefined" && expand !== null) {
+			query.withGraphFetched(`[${expand.join(", ")}]`);
+		}
+
+		return await query;
+	},
+
+	/**
+	 * @param  {Access}   access
+	 * @param  {Object}   [data]
+	 * @param  {Integer}  [data.id]          Defaults to the token user
+	 * @param  {Array}    [data.expand]
+	 * @return {Promise}
+	 */
+	get: async (access, data) => {
+		await access.can("auditlog:list");
+
+		const query = auditLogModel
+			.query()
+			.andWhere("id", data.id)
+			.allowGraph("[user]")
+			.first();
+
+		if (typeof data.expand !== "undefined" && data.expand !== null) {
+			query.withGraphFetched(`[${data.expand.join(", ")}]`);
+		}
+
+		const row = await query;
+
+		if (!row?.id) {
+			throw new errs.ItemNotFoundError(data.id);
+		}
+
+		return row;
 	},
 
 	/**
@@ -51,29 +79,24 @@ const internalAuditLog = {
 	 * @param   {Object}   [data.meta]
 	 * @returns {Promise}
 	 */
-	add: (access, data) => {
-		return new Promise((resolve, reject) => {
-			// Default the user id
-			if (typeof data.user_id === 'undefined' || !data.user_id) {
-				data.user_id = access.token.getUserId(1);
-			}
+	add: async (access, data) => {
+		if (typeof data.user_id === "undefined" || !data.user_id) {
+			data.user_id = access.token.getUserId(1);
+		}
 
-			if (typeof data.action === 'undefined' || !data.action) {
-				reject(new error.InternalValidationError('Audit log entry must contain an Action'));
-			} else {
-				// Make sure at least 1 of the IDs are set and action
-				resolve(auditLogModel
-					.query()
-					.insert({
-						user_id:     data.user_id,
-						action:      data.action,
-						object_type: data.object_type || '',
-						object_id:   data.object_id || 0,
-						meta:        data.meta || {}
-					}));
-			}
+		if (typeof data.action === "undefined" || !data.action) {
+			throw new errs.InternalValidationError("Audit log entry must contain an Action");
+		}
+
+		// Make sure at least 1 of the IDs are set and action
+		return await auditLogModel.query().insert({
+			user_id: data.user_id,
+			action: data.action,
+			object_type: data.object_type || "",
+			object_id: data.object_id || 0,
+			meta: data.meta || {},
 		});
-	}
+	},
 };
 
-module.exports = internalAuditLog;
+export default internalAuditLog;
diff --git a/backend/internal/certificate.js b/backend/internal/certificate.js
index 55e74c3e..ca02e2d9 100644
--- a/backend/internal/certificate.js
+++ b/backend/internal/certificate.js
@@ -1,44 +1,45 @@
-const _                = require('lodash');
-const fs               = require('node:fs');
-const https            = require('node:https');
-const tempWrite        = require('temp-write');
-const moment           = require('moment');
-const archiver         = require('archiver');
-const path             = require('path');
-const { isArray }      = require('lodash');
-const logger           = require('../logger').ssl;
-const config           = require('../lib/config');
-const error            = require('../lib/error');
-const utils            = require('../lib/utils');
-const certbot          = require('../lib/certbot');
-const certificateModel = require('../models/certificate');
-const tokenModel       = require('../models/token');
-const dnsPlugins       = require('../global/certbot-dns-plugins.json');
-const internalAuditLog = require('./audit-log');
-const internalNginx    = require('./nginx');
-const internalHost     = require('./host');
+import fs from "node:fs";
+import https from "node:https";
+import path from "path";
+import archiver from "archiver";
+import _ from "lodash";
+import moment from "moment";
+import tempWrite from "temp-write";
+import dnsPlugins from "../certbot/dns-plugins.json" with { type: "json" };
+import { installPlugin } from "../lib/certbot.js";
+import { useLetsencryptServer, useLetsencryptStaging } from "../lib/config.js";
+import error from "../lib/error.js";
+import utils from "../lib/utils.js";
+import { ssl as logger } from "../logger.js";
+import certificateModel from "../models/certificate.js";
+import tokenModel from "../models/token.js";
+import userModel from "../models/user.js";
+import internalAuditLog from "./audit-log.js";
+import internalHost from "./host.js";
+import internalNginx from "./nginx.js";
 
+const letsencryptConfig = "/etc/letsencrypt.ini";
+const certbotCommand = "certbot";
+const certbotLogsDir = "/data/logs";
+const certbotWorkDir = "/tmp/letsencrypt-lib";
 
-const letsencryptStaging = config.useLetsencryptStaging();
-const letsencryptServer  = config.useLetsencryptServer();
-const letsencryptConfig  = '/etc/letsencrypt.ini';
-const certbotCommand     = 'certbot';
-
-function omissions() {
-	return ['is_deleted', 'owner.is_deleted'];
-}
+const omissions = () => {
+	return ["is_deleted", "owner.is_deleted", "meta.dns_provider_credentials"];
+};
 
 const internalCertificate = {
-
-	allowedSslFiles:         ['certificate', 'certificate_key', 'intermediate_certificate'],
-	intervalTimeout:         1000 * 60 * 60, // 1 hour
-	interval:                null,
-	intervalProcessing:      false,
-	renewBeforeExpirationBy: [30, 'days'],
+	allowedSslFiles: ["certificate", "certificate_key", "intermediate_certificate"],
+	intervalTimeout: 1000 * 60 * 60, // 1 hour
+	interval: null,
+	intervalProcessing: false,
+	renewBeforeExpirationBy: [30, "days"],
 
 	initTimer: () => {
-		logger.info('Let\'s Encrypt Renewal Timer initialized');
-		internalCertificate.interval = setInterval(internalCertificate.processExpiringHosts, internalCertificate.intervalTimeout);
+		logger.info("Let's Encrypt Renewal Timer initialized");
+		internalCertificate.interval = setInterval(
+			internalCertificate.processExpiringHosts,
+			internalCertificate.intervalTimeout,
+		);
 		// And do this now as well
 		internalCertificate.processExpiringHosts();
 	},
@@ -49,16 +50,20 @@ const internalCertificate = {
 	processExpiringHosts: () => {
 		if (!internalCertificate.intervalProcessing) {
 			internalCertificate.intervalProcessing = true;
-			logger.info(`Renewing SSL certs expiring within ${internalCertificate.renewBeforeExpirationBy[0]} ${internalCertificate.renewBeforeExpirationBy[1]} ...`);
+			logger.info(
+				`Renewing SSL certs expiring within ${internalCertificate.renewBeforeExpirationBy[0]} ${internalCertificate.renewBeforeExpirationBy[1]} ...`,
+			);
 
-			const expirationThreshold = moment().add(internalCertificate.renewBeforeExpirationBy[0], internalCertificate.renewBeforeExpirationBy[1]).format('YYYY-MM-DD HH:mm:ss');
+			const expirationThreshold = moment()
+				.add(internalCertificate.renewBeforeExpirationBy[0], internalCertificate.renewBeforeExpirationBy[1])
+				.format("YYYY-MM-DD HH:mm:ss");
 
 			// Fetch all the letsencrypt certs from the db that will expire within the configured threshold
 			certificateModel
 				.query()
-				.where('is_deleted', 0)
-				.andWhere('provider', 'letsencrypt')
-				.andWhere('expires_on', '<', expirationThreshold)
+				.where("is_deleted", 0)
+				.andWhere("provider", "letsencrypt")
+				.andWhere("expires_on", "<", expirationThreshold)
 				.then((certificates) => {
 					if (!certificates || !certificates.length) {
 						return null;
@@ -77,9 +82,9 @@ const internalCertificate = {
 									{
 										can: () =>
 											Promise.resolve({
-												permission_visibility: 'all',
+												permission_visibility: "all",
 											}),
-										token: new tokenModel(),
+										token: tokenModel(),
 									},
 									{ id: certificate.id },
 								)
@@ -93,7 +98,7 @@ const internalCertificate = {
 					return sequence;
 				})
 				.then(() => {
-					logger.info('Completed SSL cert renew process');
+					logger.info("Completed SSL cert renew process");
 					internalCertificate.intervalProcessing = false;
 				})
 				.catch((err) => {
@@ -108,143 +113,126 @@ const internalCertificate = {
 	 * @param   {Object}  data
 	 * @returns {Promise}
 	 */
-	create: (access, data) => {
-		return access.can('certificates:create', data)
-			.then(() => {
-				data.owner_user_id = access.token.getUserId(1);
+	create: async (access, data) => {
+		await access.can("certificates:create", data);
+		data.owner_user_id = access.token.getUserId(1);
 
-				if (data.provider === 'letsencrypt') {
-					data.nice_name = data.domain_names.join(', ');
+		if (data.provider === "letsencrypt") {
+			data.nice_name = data.domain_names.join(", ");
+		}
+
+		// this command really should clean up and delete the cert if it can't fully succeed
+		const certificate = await certificateModel.query().insertAndFetch(data);
+
+		try {
+			if (certificate.provider === "letsencrypt") {
+				// Request a new Cert from LE. Let the fun begin.
+
+				// 1. Find out any hosts that are using any of the hostnames in this cert
+				// 2. Disable them in nginx temporarily
+				// 3. Generate the LE config
+				// 4. Request cert
+				// 5. Remove LE config
+				// 6. Re-instate previously disabled hosts
+
+				// 1. Find out any hosts that are using any of the hostnames in this cert
+				const inUseResult = await internalHost.getHostsWithDomains(certificate.domain_names);
+
+				// 2. Disable them in nginx temporarily
+				await internalCertificate.disableInUseHosts(inUseResult);
+
+				const user = await userModel.query().where("is_deleted", 0).andWhere("id", data.owner_user_id).first();
+				if (!user || !user.email) {
+					throw new error.ValidationError(
+						"A valid email address must be set on your user account to use Let's Encrypt",
+					);
 				}
 
-				return certificateModel
-					.query()
-					.insertAndFetch(data)
-					.then(utils.omitRow(omissions()));
-			})
-			.then((certificate) => {
-				if (certificate.provider === 'letsencrypt') {
-					// Request a new Cert from LE. Let the fun begin.
-
-					// 1. Find out any hosts that are using any of the hostnames in this cert
-					// 2. Disable them in nginx temporarily
-					// 3. Generate the LE config
-					// 4. Request cert
-					// 5. Remove LE config
-					// 6. Re-instate previously disabled hosts
-
-					// 1. Find out any hosts that are using any of the hostnames in this cert
-					return internalHost.getHostsWithDomains(certificate.domain_names)
-						.then((in_use_result) => {
-							// 2. Disable them in nginx temporarily
-							return internalCertificate.disableInUseHosts(in_use_result)
-								.then(() => {
-									return in_use_result;
-								});
-						})
-						.then((in_use_result) => {
-							// With DNS challenge no config is needed, so skip 3 and 5.
-							if (certificate.meta.dns_challenge) {
-								return internalNginx.reload().then(() => {
-									// 4. Request cert
-									return internalCertificate.requestLetsEncryptSslWithDnsChallenge(certificate);
-								})
-									.then(internalNginx.reload)
-									.then(() => {
-										// 6. Re-instate previously disabled hosts
-										return internalCertificate.enableInUseHosts(in_use_result);
-									})
-									.then(() => {
-										return certificate;
-									})
-									.catch((err) => {
-										// In the event of failure, revert things and throw err back
-										return internalCertificate.enableInUseHosts(in_use_result)
-											.then(internalNginx.reload)
-											.then(() => {
-												throw err;
-											});
-									});
-							} else {
-								// 3. Generate the LE config
-								return internalNginx.generateLetsEncryptRequestConfig(certificate)
-									.then(internalNginx.reload)
-									.then(async() => await new Promise((r) => setTimeout(r, 5000)))
-									.then(() => {
-										// 4. Request cert
-										return internalCertificate.requestLetsEncryptSsl(certificate);
-									})
-									.then(() => {
-										// 5. Remove LE config
-										return internalNginx.deleteLetsEncryptRequestConfig(certificate);
-									})
-									.then(internalNginx.reload)
-									.then(() => {
-										// 6. Re-instate previously disabled hosts
-										return internalCertificate.enableInUseHosts(in_use_result);
-									})
-									.then(() => {
-										return certificate;
-									})
-									.catch((err) => {
-										// In the event of failure, revert things and throw err back
-										return internalNginx.deleteLetsEncryptRequestConfig(certificate)
-											.then(() => {
-												return internalCertificate.enableInUseHosts(in_use_result);
-											})
-											.then(internalNginx.reload)
-											.then(() => {
-												throw err;
-											});
-									});
-							}
-						})
-						.then(() => {
-							// At this point, the letsencrypt cert should exist on disk.
-							// Lets get the expiry date from the file and update the row silently
-							return internalCertificate.getCertificateInfoFromFile(`${internalCertificate.getLiveCertPath(certificate.id)}/fullchain.pem`)
-								.then((cert_info) => {
-									return certificateModel
-										.query()
-										.patchAndFetchById(certificate.id, {
-											expires_on: moment(cert_info.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss')
-										})
-										.then(utils.omitRow(omissions()))
-										.then((saved_row) => {
-											// Add cert data for audit log
-											saved_row.meta = _.assign({}, saved_row.meta, {
-												letsencrypt_certificate: cert_info
-											});
-
-											return saved_row;
-										});
-								});
-						}).catch(async (error) => {
-							// Delete the certificate from the database if it was not created successfully
-							await certificateModel
-								.query()
-								.deleteById(certificate.id);
-
-							throw error;
-						});
+				// With DNS challenge no config is needed, so skip 3 and 5.
+				if (certificate.meta?.dns_challenge) {
+					try {
+						await internalNginx.reload();
+						// 4. Request cert
+						await internalCertificate.requestLetsEncryptSslWithDnsChallenge(certificate, user.email);
+						await internalNginx.reload();
+						// 6. Re-instate previously disabled hosts
+						await internalCertificate.enableInUseHosts(inUseResult);
+					} catch (err) {
+						// In the event of failure, revert things and throw err back
+						await internalCertificate.enableInUseHosts(inUseResult);
+						await internalNginx.reload();
+						throw err;
+					}
 				} else {
-					return certificate;
+					// 3. Generate the LE config
+					try {
+						await internalNginx.generateLetsEncryptRequestConfig(certificate);
+						await internalNginx.reload();
+						setTimeout(() => {}, 5000);
+						// 4. Request cert
+						await internalCertificate.requestLetsEncryptSsl(certificate, user.email);
+						// 5. Remove LE config
+						await internalNginx.deleteLetsEncryptRequestConfig(certificate);
+						await internalNginx.reload();
+						// 6. Re-instate previously disabled hosts
+						await internalCertificate.enableInUseHosts(inUseResult);
+					} catch (err) {
+						// In the event of failure, revert things and throw err back
+						await internalNginx.deleteLetsEncryptRequestConfig(certificate);
+						await internalCertificate.enableInUseHosts(inUseResult);
+						await internalNginx.reload();
+						throw err;
+					}
 				}
-			}).then((certificate) => {
 
-				data.meta = _.assign({}, data.meta || {}, certificate.meta);
+				// At this point, the letsencrypt cert should exist on disk.
+				// Lets get the expiry date from the file and update the row silently
+				try {
+					const certInfo = await internalCertificate.getCertificateInfoFromFile(
+						`${internalCertificate.getLiveCertPath(certificate.id)}/fullchain.pem`,
+					);
+					const savedRow = await certificateModel
+						.query()
+						.patchAndFetchById(certificate.id, {
+							expires_on: moment(certInfo.dates.to, "X").format("YYYY-MM-DD HH:mm:ss"),
+						})
+						.then(utils.omitRow(omissions()));
 
-				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'created',
-					object_type: 'certificate',
-					object_id:   certificate.id,
-					meta:        data
-				})
-					.then(() => {
-						return certificate;
+					// Add cert data for audit log
+					savedRow.meta = _.assign({}, savedRow.meta, {
+						letsencrypt_certificate: certInfo,
 					});
-			});
+
+					await internalCertificate.addCreatedAuditLog(access, certificate.id, savedRow);
+
+					return savedRow;
+				} catch (err) {
+					// Delete the certificate from the database if it was not created successfully
+					await certificateModel.query().deleteById(certificate.id);
+					throw err;
+				}
+			}
+		} catch (err) {
+			// Delete the certificate here. This is a hard delete, since it never existed properly
+			await certificateModel.query().deleteById(certificate.id);
+			throw err;
+		}
+
+		data.meta = _.assign({}, data.meta || {}, certificate.meta);
+
+		// Add to audit log
+		await internalCertificate.addCreatedAuditLog(access, certificate.id, utils.omitRow(omissions())(data));
+
+		return utils.omitRow(omissions())(certificate);
+	},
+
+	addCreatedAuditLog: async (access, certificate_id, meta) => {
+		await internalAuditLog.add(access, {
+			action: "created",
+			object_type: "certificate",
+			object_id: certificate_id,
+			meta: meta,
+		});
 	},
 
 	/**
@@ -255,42 +243,39 @@ const internalCertificate = {
 	 * @param  {String}  [data.name]
 	 * @return {Promise}
 	 */
-	update: (access, data) => {
-		return access.can('certificates:update', data.id)
-			.then((/*access_data*/) => {
-				return internalCertificate.get(access, {id: data.id});
-			})
-			.then((row) => {
-				if (row.id !== data.id) {
-					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError(`Certificate could not be updated, IDs do not match: ${row.id} !== ${data.id}`);
-				}
+	update: async (access, data) => {
+		await access.can("certificates:update", data.id);
+		const row = await internalCertificate.get(access, { id: data.id });
 
-				return certificateModel
-					.query()
-					.patchAndFetchById(row.id, data)
-					.then(utils.omitRow(omissions()))
-					.then((saved_row) => {
-						saved_row.meta = internalCertificate.cleanMeta(saved_row.meta);
-						data.meta      = internalCertificate.cleanMeta(data.meta);
+		if (row.id !== data.id) {
+			// Sanity check that something crazy hasn't happened
+			throw new error.InternalValidationError(
+				`Certificate could not be updated, IDs do not match: ${row.id} !== ${data.id}`,
+			);
+		}
 
-						// Add row.nice_name for custom certs
-						if (saved_row.provider === 'other') {
-							data.nice_name = saved_row.nice_name;
-						}
+		const savedRow = await certificateModel
+			.query()
+			.patchAndFetchById(row.id, data)
+			.then(utils.omitRow(omissions()));
 
-						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'updated',
-							object_type: 'certificate',
-							object_id:   row.id,
-							meta:        _.omit(data, ['expires_on']) // this prevents json circular reference because expires_on might be raw
-						})
-							.then(() => {
-								return saved_row;
-							});
-					});
-			});
+		savedRow.meta = internalCertificate.cleanMeta(savedRow.meta);
+		data.meta = internalCertificate.cleanMeta(data.meta);
+
+		// Add row.nice_name for custom certs
+		if (savedRow.provider === "other") {
+			data.nice_name = savedRow.nice_name;
+		}
+
+		// Add to audit log
+		await internalAuditLog.add(access, {
+			action: "updated",
+			object_type: "certificate",
+			object_id: row.id,
+			meta: _.omit(data, ["expires_on"]), // this prevents json circular reference because expires_on might be raw
+		});
+
+		return savedRow;
 	},
 
 	/**
@@ -301,43 +286,49 @@ const internalCertificate = {
 	 * @param  {Array}    [data.omit]
 	 * @return {Promise}
 	 */
-	get: (access, data) => {
-		if (typeof data === 'undefined') {
-			data = {};
+	get: async (access, data) => {
+		const accessData = await access.can("certificates:get", data.id);
+		const query = certificateModel
+			.query()
+			.where("is_deleted", 0)
+			.andWhere("id", data.id)
+			.allowGraph("[owner,proxy_hosts,redirection_hosts,dead_hosts,streams]")
+			.first();
+
+		if (accessData.permission_visibility !== "all") {
+			query.andWhere("owner_user_id", access.token.getUserId(1));
 		}
 
-		return access.can('certificates:get', data.id)
-			.then((access_data) => {
-				const query = certificateModel
-					.query()
-					.where('is_deleted', 0)
-					.andWhere('id', data.id)
-					.allowGraph('[owner]')
-					.allowGraph('[proxy_hosts]')
-					.allowGraph('[redirection_hosts]')
-					.allowGraph('[dead_hosts]')
-					.first();
+		if (typeof data.expand !== "undefined" && data.expand !== null) {
+			query.withGraphFetched(`[${data.expand.join(", ")}]`);
+		}
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
-				}
+		const row = await query.then(utils.omitRow(omissions()));
+		if (!row || !row.id) {
+			throw new error.ItemNotFoundError(data.id);
+		}
+		// Custom omissions
+		if (typeof data.omit !== "undefined" && data.omit !== null) {
+			return _.omit(row, [...data.omit]);
+		}
 
-				if (typeof data.expand !== 'undefined' && data.expand !== null) {
-					query.withGraphFetched(`[${data.expand.join(', ')}]`);
-				}
+		return internalCertificate.cleanExpansions(row);
+	},
 
-				return query.then(utils.omitRow(omissions()));
-			})
-			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				}
-				// Custom omissions
-				if (typeof data.omit !== 'undefined' && data.omit !== null) {
-					row = _.omit(row, data.omit);
-				}
-				return row;
-			});
+	cleanExpansions: (row) => {
+		if (typeof row.proxy_hosts !== "undefined") {
+			row.proxy_hosts = utils.omitRows(["is_deleted"])(row.proxy_hosts);
+		}
+		if (typeof row.redirection_hosts !== "undefined") {
+			row.redirection_hosts = utils.omitRows(["is_deleted"])(row.redirection_hosts);
+		}
+		if (typeof row.dead_hosts !== "undefined") {
+			row.dead_hosts = utils.omitRows(["is_deleted"])(row.dead_hosts);
+		}
+		if (typeof row.streams !== "undefined") {
+			row.streams = utils.omitRows(["is_deleted"])(row.streams);
+		}
+		return row;
 	},
 
 	/**
@@ -346,61 +337,50 @@ const internalCertificate = {
 	 * @param   {Number}  data.id
 	 * @returns {Promise}
 	 */
-	download: (access, data) => {
-		return new Promise((resolve, reject) => {
-			access.can('certificates:get', data)
-				.then(() => {
-					return internalCertificate.get(access, data);
-				})
-				.then((certificate) => {
-					if (certificate.provider === 'letsencrypt') {
-						const zipDirectory = internalCertificate.getLiveCertPath(data.id);
+	download: async (access, data) => {
+		await access.can("certificates:get", data);
+		const certificate = await internalCertificate.get(access, data);
+		if (certificate.provider === "letsencrypt") {
+			const zipDirectory = internalCertificate.getLiveCertPath(data.id);
+			if (!fs.existsSync(zipDirectory)) {
+				throw new error.ItemNotFoundError(`Certificate ${certificate.nice_name} does not exists`);
+			}
 
-						if (!fs.existsSync(zipDirectory)) {
-							throw new error.ItemNotFoundError(`Certificate ${certificate.nice_name} does not exists`);
-						}
+			const certFiles = fs
+				.readdirSync(zipDirectory)
+				.filter((fn) => fn.endsWith(".pem"))
+				.map((fn) => fs.realpathSync(path.join(zipDirectory, fn)));
 
-						const certFiles    = fs.readdirSync(zipDirectory)
-							.filter((fn) => fn.endsWith('.pem'))
-							.map((fn) => fs.realpathSync(path.join(zipDirectory, fn)));
-						const downloadName = `npm-${data.id}-${Date.now()}.zip`;
-						const opName       = `/tmp/${downloadName}`;
-						internalCertificate.zipFiles(certFiles, opName)
-							.then(() => {
-								logger.debug('zip completed : ', opName);
-								const resp = {
-									fileName: opName
-								};
-								resolve(resp);
-							}).catch((err) => reject(err));
-					} else {
-						throw new error.ValidationError('Only Let\'sEncrypt certificates can be downloaded');
-					}
-				}).catch((err) => reject(err));
-		});
+			const downloadName = `npm-${data.id}-${Date.now()}.zip`;
+			const opName = `/tmp/${downloadName}`;
+
+			await internalCertificate.zipFiles(certFiles, opName);
+			logger.debug("zip completed : ", opName);
+			return {
+				fileName: opName,
+			};
+		}
+		throw new error.ValidationError("Only Let'sEncrypt certificates can be downloaded");
 	},
 
 	/**
-	* @param   {String}  source
-	* @param   {String}  out
-	* @returns {Promise}
-	*/
-	zipFiles(source, out) {
-		const archive = archiver('zip', { zlib: { level: 9 } });
-		const stream  = fs.createWriteStream(out);
+	 * @param   {String}  source
+	 * @param   {String}  out
+	 * @returns {Promise}
+	 */
+	zipFiles: async (source, out) => {
+		const archive = archiver("zip", { zlib: { level: 9 } });
+		const stream = fs.createWriteStream(out);
 
 		return new Promise((resolve, reject) => {
-			source
-				.map((fl) => {
-					const fileName = path.basename(fl);
-					logger.debug(fl, 'added to certificate zip');
-					archive.file(fl, { name: fileName });
-				});
-			archive
-				.on('error', (err) => reject(err))
-				.pipe(stream);
-
-			stream.on('close', () => resolve());
+			source.map((fl) => {
+				const fileName = path.basename(fl);
+				logger.debug(fl, "added to certificate zip");
+				archive.file(fl, { name: fileName });
+				return true;
+			});
+			archive.on("error", (err) => reject(err)).pipe(stream);
+			stream.on("close", () => resolve());
 			archive.finalize();
 		});
 	},
@@ -412,43 +392,33 @@ const internalCertificate = {
 	 * @param {String}  [data.reason]
 	 * @returns {Promise}
 	 */
-	delete: (access, data) => {
-		return access.can('certificates:delete', data.id)
-			.then(() => {
-				return internalCertificate.get(access, {id: data.id});
-			})
-			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				}
+	delete: async (access, data) => {
+		await access.can("certificates:delete", data.id);
+		const row = await internalCertificate.get(access, { id: data.id });
 
-				return certificateModel
-					.query()
-					.where('id', row.id)
-					.patch({
-						is_deleted: 1
-					})
-					.then(() => {
-						// Add to audit log
-						row.meta = internalCertificate.cleanMeta(row.meta);
+		if (!row || !row.id) {
+			throw new error.ItemNotFoundError(data.id);
+		}
 
-						return internalAuditLog.add(access, {
-							action:      'deleted',
-							object_type: 'certificate',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
-						});
-					})
-					.then(() => {
-						if (row.provider === 'letsencrypt') {
-							// Revoke the cert
-							return internalCertificate.revokeLetsEncryptSsl(row);
-						}
-					});
-			})
-			.then(() => {
-				return true;
-			});
+		await certificateModel.query().where("id", row.id).patch({
+			is_deleted: 1,
+		});
+
+		// Add to audit log
+		row.meta = internalCertificate.cleanMeta(row.meta);
+
+		await internalAuditLog.add(access, {
+			action: "deleted",
+			object_type: "certificate",
+			object_id: row.id,
+			meta: _.omit(row, omissions()),
+		});
+
+		if (row.provider === "letsencrypt") {
+			// Revoke the cert
+			await internalCertificate.revokeLetsEncryptSsl(row);
+		}
+		return true;
 	},
 
 	/**
@@ -456,81 +426,76 @@ const internalCertificate = {
 	 *
 	 * @param   {Access}  access
 	 * @param   {Array}   [expand]
-	 * @param   {String}  [search_query]
+	 * @param   {String}  [searchQuery]
 	 * @returns {Promise}
 	 */
-	getAll: (access, expand, search_query) => {
-		return access.can('certificates:list')
-			.then((access_data) => {
-				const query = certificateModel
-					.query()
-					.where('is_deleted', 0)
-					.groupBy('id')
-					.allowGraph('[owner]')
-					.allowGraph('[proxy_hosts]')
-					.allowGraph('[redirection_hosts]')
-					.allowGraph('[dead_hosts]')
-					.orderBy('nice_name', 'ASC');
+	getAll: async (access, expand, searchQuery) => {
+		const accessData = await access.can("certificates:list");
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
-				}
+		const query = certificateModel
+			.query()
+			.where("is_deleted", 0)
+			.groupBy("id")
+			.allowGraph("[owner,proxy_hosts,redirection_hosts,dead_hosts,streams]")
+			.orderBy("nice_name", "ASC");
 
-				// Query is used for searching
-				if (typeof search_query === 'string') {
-					query.where(function () {
-						this.where('nice_name', 'like', `%${search_query}%`);
-					});
-				}
+		if (accessData.permission_visibility !== "all") {
+			query.andWhere("owner_user_id", access.token.getUserId(1));
+		}
 
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched(`[${expand.join(', ')}]`);
-				}
-
-				return query.then(utils.omitRows(omissions()));
+		// Query is used for searching
+		if (typeof searchQuery === "string") {
+			query.where(function () {
+				this.where("nice_name", "like", `%${searchQuery}%`);
 			});
+		}
+
+		if (typeof expand !== "undefined" && expand !== null) {
+			query.withGraphFetched(`[${expand.join(", ")}]`);
+		}
+
+		const r = await query.then(utils.omitRows(omissions()));
+		for (let i = 0; i < r.length; i++) {
+			r[i] = internalCertificate.cleanExpansions(r[i]);
+		}
+		return r;
 	},
 
 	/**
 	 * Report use
 	 *
-	 * @param   {Number}  user_id
+	 * @param   {Number}  userId
 	 * @param   {String}  visibility
 	 * @returns {Promise}
 	 */
-	getCount: (user_id, visibility) => {
-		const query = certificateModel
-			.query()
-			.count('id as count')
-			.where('is_deleted', 0);
+	getCount: async (userId, visibility) => {
+		const query = certificateModel.query().count("id as count").where("is_deleted", 0);
 
-		if (visibility !== 'all') {
-			query.andWhere('owner_user_id', user_id);
+		if (visibility !== "all") {
+			query.andWhere("owner_user_id", userId);
 		}
 
-		return query.first()
-			.then((row) => {
-				return parseInt(row.count, 10);
-			});
+		const row = await query.first();
+		return Number.parseInt(row.count, 10);
 	},
 
 	/**
 	 * @param   {Object} certificate
 	 * @returns {Promise}
 	 */
-	writeCustomCert: (certificate) => {
-		logger.info('Writing Custom Certificate:', certificate);
+	writeCustomCert: async (certificate) => {
+		logger.info("Writing Custom Certificate:", certificate);
 
 		const dir = `/data/custom_ssl/npm-${certificate.id}`;
 
 		return new Promise((resolve, reject) => {
-			if (certificate.provider === 'letsencrypt') {
-				reject(new Error('Refusing to write letsencrypt certs here'));
+			if (certificate.provider === "letsencrypt") {
+				reject(new Error("Refusing to write letsencrypt certs here"));
 				return;
 			}
 
 			let certData = certificate.meta.certificate;
-			if (typeof certificate.meta.intermediate_certificate !== 'undefined') {
+			if (typeof certificate.meta.intermediate_certificate !== "undefined") {
 				certData = `${certData}\n${certificate.meta.intermediate_certificate}`;
 			}
 
@@ -550,33 +515,30 @@ const internalCertificate = {
 					resolve();
 				}
 			});
-		})
-			.then(() => {
-				return new Promise((resolve, reject) => {
-					fs.writeFile(`${dir}/privkey.pem`, certificate.meta.certificate_key, (err) => {
-						if (err) {
-							reject(err);
-						} else {
-							resolve();
-						}
-					});
+		}).then(() => {
+			return new Promise((resolve, reject) => {
+				fs.writeFile(`${dir}/privkey.pem`, certificate.meta.certificate_key, (err) => {
+					if (err) {
+						reject(err);
+					} else {
+						resolve();
+					}
 				});
 			});
+		});
 	},
 
 	/**
 	 * @param   {Access}   access
 	 * @param   {Object}   data
 	 * @param   {Array}    data.domain_names
-	 * @param   {String}   data.meta.letsencrypt_email
-	 * @param   {Boolean}  data.meta.letsencrypt_agree
 	 * @returns {Promise}
 	 */
-	createQuickCertificate: (access, data) => {
-		return internalCertificate.create(access, {
-			provider:     'letsencrypt',
+	createQuickCertificate: async (access, data) => {
+		return await internalCertificate.create(access, {
+			provider: "letsencrypt",
 			domain_names: data.domain_names,
-			meta:         data.meta
+			meta: data.meta,
 		});
 	},
 
@@ -589,45 +551,39 @@ const internalCertificate = {
 	 * @returns {Promise}
 	 */
 	validate: (data) => {
-		return new Promise((resolve) => {
-			// Put file contents into an object
-			const files = {};
-			_.map(data.files, (file, name) => {
-				if (internalCertificate.allowedSslFiles.indexOf(name) !== -1) {
-					files[name] = file.data.toString();
-				}
-			});
-
-			resolve(files);
-		})
-			.then((files) => {
-				// For each file, create a temp file and write the contents to it
-				// Then test it depending on the file type
-				const promises = [];
-				_.map(files, (content, type) => {
-					promises.push(new Promise((resolve) => {
-						if (type === 'certificate_key') {
-							resolve(internalCertificate.checkPrivateKey(content));
-						} else {
-							// this should handle `certificate` and intermediate certificate
-							resolve(internalCertificate.getCertificateInfo(content, true));
-						}
-					}).then((res) => {
-						return {[type]: res};
-					}));
-				});
-
-				return Promise.all(promises)
-					.then((files) => {
-						let data = {};
-
-						_.each(files, (file) => {
-							data = _.assign({}, data, file);
-						});
-
-						return data;
-					});
+		// Put file contents into an object
+		const files = {};
+		_.map(data.files, (file, name) => {
+			if (internalCertificate.allowedSslFiles.indexOf(name) !== -1) {
+				files[name] = file.data.toString();
+			}
+		});
+
+		// For each file, create a temp file and write the contents to it
+		// Then test it depending on the file type
+		const promises = [];
+		_.map(files, (content, type) => {
+			promises.push(
+				new Promise((resolve) => {
+					if (type === "certificate_key") {
+						resolve(internalCertificate.checkPrivateKey(content));
+					} else {
+						// this should handle `certificate` and intermediate certificate
+						resolve(internalCertificate.getCertificateInfo(content, true));
+					}
+				}).then((res) => {
+					return { [type]: res };
+				}),
+			);
+		});
+
+		return Promise.all(promises).then((files) => {
+			let data = {};
+			_.each(files, (file) => {
+				data = _.assign({}, data, file);
 			});
+			return data;
+		});
 	},
 
 	/**
@@ -637,73 +593,62 @@ const internalCertificate = {
 	 * @param   {Object}  data.files
 	 * @returns {Promise}
 	 */
-	upload: (access, data) => {
-		return internalCertificate.get(access, {id: data.id})
-			.then((row) => {
-				if (row.provider !== 'other') {
-					throw new error.ValidationError('Cannot upload certificates for this type of provider');
-				}
+	upload: async (access, data) => {
+		const row = await internalCertificate.get(access, { id: data.id });
+		if (row.provider !== "other") {
+			throw new error.ValidationError("Cannot upload certificates for this type of provider");
+		}
 
-				return internalCertificate.validate(data)
-					.then((validations) => {
-						if (typeof validations.certificate === 'undefined') {
-							throw new error.ValidationError('Certificate file was not provided');
-						}
+		const validations = await internalCertificate.validate(data);
+		if (typeof validations.certificate === "undefined") {
+			throw new error.ValidationError("Certificate file was not provided");
+		}
 
-						_.map(data.files, (file, name) => {
-							if (internalCertificate.allowedSslFiles.indexOf(name) !== -1) {
-								row.meta[name] = file.data.toString();
-							}
-						});
+		_.map(data.files, (file, name) => {
+			if (internalCertificate.allowedSslFiles.indexOf(name) !== -1) {
+				row.meta[name] = file.data.toString();
+			}
+		});
 
-						// TODO: This uses a mysql only raw function that won't translate to postgres
-						return internalCertificate.update(access, {
-							id:           data.id,
-							expires_on:   moment(validations.certificate.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss'),
-							domain_names: [validations.certificate.cn],
-							meta:         _.clone(row.meta) // Prevent the update method from changing this value that we'll use later
-						})
-							.then((certificate) => {
-								certificate.meta = row.meta;
-								return internalCertificate.writeCustomCert(certificate);
-							});
-					})
-					.then(() => {
-						return _.pick(row.meta, internalCertificate.allowedSslFiles);
-					});
-			});
+		const certificate = await internalCertificate.update(access, {
+			id: data.id,
+			expires_on: moment(validations.certificate.dates.to, "X").format("YYYY-MM-DD HH:mm:ss"),
+			domain_names: [validations.certificate.cn],
+			meta: _.clone(row.meta), // Prevent the update method from changing this value that we'll use later
+		});
+
+		certificate.meta = row.meta;
+		await internalCertificate.writeCustomCert(certificate);
+		return _.pick(row.meta, internalCertificate.allowedSslFiles);
 	},
 
 	/**
 	 * Uses the openssl command to validate the private key.
 	 * It will save the file to disk first, then run commands on it, then delete the file.
 	 *
-	 * @param {String}  private_key    This is the entire key contents as a string
+	 * @param {String}  privateKey    This is the entire key contents as a string
 	 */
-	checkPrivateKey: (private_key) => {
-		return tempWrite(private_key, '/tmp')
-			.then((filepath) => {
-				return new Promise((resolve, reject) => {
-					const failTimeout = setTimeout(() => {
-						reject(new error.ValidationError('Result Validation Error: Validation timed out. This could be due to the key being passphrase-protected.'));
-					}, 10000);
-					utils
-						.exec(`openssl pkey -in ${filepath} -check -noout 2>&1 `)
-						.then((result) => {
-							clearTimeout(failTimeout);
-							if (!result.toLowerCase().includes('key is valid')) {
-								reject(new error.ValidationError(`Result Validation Error: ${result}`));
-							}
-							fs.unlinkSync(filepath);
-							resolve(true);
-						})
-						.catch((err) => {
-							clearTimeout(failTimeout);
-							fs.unlinkSync(filepath);
-							reject(new error.ValidationError(`Certificate Key is not valid (${err.message})`, err));
-						});
-				});
-			});
+	checkPrivateKey: async (privateKey) => {
+		const filepath = await tempWrite(privateKey, "/tmp");
+		const failTimeout = setTimeout(() => {
+			throw new error.ValidationError(
+				"Result Validation Error: Validation timed out. This could be due to the key being passphrase-protected.",
+			);
+		}, 10000);
+
+		try {
+			const result = await utils.exec(`openssl pkey -in ${filepath} -check -noout 2>&1 `);
+			clearTimeout(failTimeout);
+			if (!result.toLowerCase().includes("key is valid")) {
+				throw new error.ValidationError(`Result Validation Error: ${result}`);
+			}
+			fs.unlinkSync(filepath);
+			return true;
+		} catch (err) {
+			clearTimeout(failTimeout);
+			fs.unlinkSync(filepath);
+			throw new error.ValidationError(`Certificate Key is not valid (${err.message})`, err);
+		}
 	},
 
 	/**
@@ -711,100 +656,92 @@ const internalCertificate = {
 	 * It will save the file to disk first, then run commands on it, then delete the file.
 	 *
 	 * @param {String}  certificate      This is the entire cert contents as a string
-	 * @param {Boolean} [throw_expired]  Throw when the certificate is out of date
+	 * @param {Boolean} [throwExpired]  Throw when the certificate is out of date
 	 */
-	getCertificateInfo: (certificate, throw_expired) => {
-		return tempWrite(certificate, '/tmp')
-			.then((filepath) => {
-				return internalCertificate.getCertificateInfoFromFile(filepath, throw_expired)
-					.then((certData) => {
-						fs.unlinkSync(filepath);
-						return certData;
-					}).catch((err) => {
-						fs.unlinkSync(filepath);
-						throw err;
-					});
-			});
+	getCertificateInfo: async (certificate, throwExpired) => {
+		try {
+			const filepath = await tempWrite(certificate, "/tmp");
+			const certData = await internalCertificate.getCertificateInfoFromFile(filepath, throwExpired);
+			fs.unlinkSync(filepath);
+			return certData;
+		} catch (err) {
+			fs.unlinkSync(filepath);
+			throw err;
+		}
 	},
 
 	/**
 	 * Uses the openssl command to both validate and get info out of the certificate.
 	 * It will save the file to disk first, then run commands on it, then delete the file.
 	 *
-	 * @param {String}  certificate_file The file location on disk
+	 * @param {String}  certificateFile The file location on disk
 	 * @param {Boolean} [throw_expired]  Throw when the certificate is out of date
 	 */
-	getCertificateInfoFromFile: (certificate_file, throw_expired) => {
+	getCertificateInfoFromFile: async (certificateFile, throw_expired) => {
 		const certData = {};
 
-		return utils.execFile('openssl', ['x509', '-in', certificate_file, '-subject', '-noout'])
-			.then((result) => {
-				// Examples:
-				// subject=CN = *.jc21.com
-				// subject=CN = something.example.com
-				const regex = /(?:subject=)?[^=]+=\s+(\S+)/gim;
-				const match = regex.exec(result);
-				if (match && typeof match[1] !== 'undefined') {
-					certData.cn = match[1];
-				}
-			})
-			.then(() => {
-				return utils.execFile('openssl', ['x509', '-in', certificate_file, '-issuer', '-noout']);
-			})
+		try {
+			const result = await utils.execFile("openssl", ["x509", "-in", certificateFile, "-subject", "-noout"]);
+			// Examples:
+			// subject=CN = *.jc21.com
+			// subject=CN = something.example.com
+			const regex = /(?:subject=)?[^=]+=\s+(\S+)/gim;
+			const match = regex.exec(result);
+			if (match && typeof match[1] !== "undefined") {
+				certData.cn = match[1];
+			}
 
-			.then((result) => {
-				// Examples:
-				// issuer=C = US, O = Let's Encrypt, CN = Let's Encrypt Authority X3
-				// issuer=C = US, O = Let's Encrypt, CN = E5
-				// issuer=O = NginxProxyManager, CN = NginxProxyManager Intermediate CA","O = NginxProxyManager, CN = NginxProxyManager Intermediate CA
-				const regex = /^(?:issuer=)?(.*)$/gim;
-				const match = regex.exec(result);
-				if (match && typeof match[1] !== 'undefined') {
-					certData.issuer = match[1];
-				}
-			})
-			.then(() => {
-				return utils.execFile('openssl', ['x509', '-in', certificate_file, '-dates', '-noout']);
-			})
-			.then((result) => {
-				// notBefore=Jul 14 04:04:29 2018 GMT
-				// notAfter=Oct 12 04:04:29 2018 GMT
-				let validFrom = null;
-				let validTo   = null;
+			const result2 = await utils.execFile("openssl", ["x509", "-in", certificateFile, "-issuer", "-noout"]);
+			// Examples:
+			// issuer=C = US, O = Let's Encrypt, CN = Let's Encrypt Authority X3
+			// issuer=C = US, O = Let's Encrypt, CN = E5
+			// issuer=O = NginxProxyManager, CN = NginxProxyManager Intermediate CA","O = NginxProxyManager, CN = NginxProxyManager Intermediate CA
+			const regex2 = /^(?:issuer=)?(.*)$/gim;
+			const match2 = regex2.exec(result2);
+			if (match2 && typeof match2[1] !== "undefined") {
+				certData.issuer = match2[1];
+			}
 
-				const lines = result.split('\n');
-				lines.map((str) => {
-					const regex = /^(\S+)=(.*)$/gim;
-					const match = regex.exec(str.trim());
+			const result3 = await utils.execFile("openssl", ["x509", "-in", certificateFile, "-dates", "-noout"]);
+			// notBefore=Jul 14 04:04:29 2018 GMT
+			// notAfter=Oct 12 04:04:29 2018 GMT
+			let validFrom = null;
+			let validTo = null;
 
-					if (match && typeof match[2] !== 'undefined') {
-						const date = parseInt(moment(match[2], 'MMM DD HH:mm:ss YYYY z').format('X'), 10);
+			const lines = result3.split("\n");
+			lines.map((str) => {
+				const regex = /^(\S+)=(.*)$/gim;
+				const match = regex.exec(str.trim());
 
-						if (match[1].toLowerCase() === 'notbefore') {
-							validFrom = date;
-						} else if (match[1].toLowerCase() === 'notafter') {
-							validTo = date;
-						}
+				if (match && typeof match[2] !== "undefined") {
+					const date = Number.parseInt(moment(match[2], "MMM DD HH:mm:ss YYYY z").format("X"), 10);
+
+					if (match[1].toLowerCase() === "notbefore") {
+						validFrom = date;
+					} else if (match[1].toLowerCase() === "notafter") {
+						validTo = date;
 					}
-				});
-
-				if (!validFrom || !validTo) {
-					throw new error.ValidationError(`Could not determine dates from certificate: ${result}`);
 				}
-
-				if (throw_expired && validTo < parseInt(moment().format('X'), 10)) {
-					throw new error.ValidationError('Certificate has expired');
-				}
-
-				certData.dates = {
-					from: validFrom,
-					to:   validTo
-				};
-
-				return certData;
-			}).catch((err) => {
-				throw new error.ValidationError(`Certificate is not valid (${err.message})`, err);
+				return true;
 			});
+
+			if (!validFrom || !validTo) {
+				throw new error.ValidationError(`Could not determine dates from certificate: ${result}`);
+			}
+
+			if (throw_expired && validTo < Number.parseInt(moment().format("X"), 10)) {
+				throw new error.ValidationError("Certificate has expired");
+			}
+
+			certData.dates = {
+				from: validFrom,
+				to: validTo,
+			};
+
+			return certData;
+		} catch (err) {
+			throw new error.ValidationError(`Certificate is not valid (${err.message})`, err);
+		}
 	},
 
 	/**
@@ -816,94 +753,97 @@ const internalCertificate = {
 	 */
 	cleanMeta: (meta, remove) => {
 		internalCertificate.allowedSslFiles.map((key) => {
-			if (typeof meta[key] !== 'undefined' && meta[key]) {
+			if (typeof meta[key] !== "undefined" && meta[key]) {
 				if (remove) {
 					delete meta[key];
 				} else {
 					meta[key] = true;
 				}
 			}
+			return true;
 		});
-
 		return meta;
 	},
 
 	/**
 	 * Request a certificate using the http challenge
 	 * @param   {Object}  certificate   the certificate row
+	 * @param   {String}  email         the email address to use for registration
 	 * @returns {Promise}
 	 */
-	requestLetsEncryptSsl: (certificate) => {
-		logger.info(`Requesting LetsEncrypt certificates for Cert #${certificate.id}: ${certificate.domain_names.join(', ')}`);
+	requestLetsEncryptSsl: async (certificate, email) => {
+		logger.info(
+			`Requesting LetsEncrypt certificates for Cert #${certificate.id}: ${certificate.domain_names.join(", ")}`,
+		);
 
 		const args = [
-			'certonly',
-			'--config',
+			"certonly",
+			"--config",
 			letsencryptConfig,
-			'--work-dir',
-			'/tmp/letsencrypt-lib',
-			'--logs-dir',
-			'/tmp/letsencrypt-log',
-			'--cert-name',
+			"--work-dir",
+			certbotWorkDir,
+			"--logs-dir",
+			certbotLogsDir,
+			"--cert-name",
 			`npm-${certificate.id}`,
-			'--agree-tos',
-			'--authenticator',
-			'webroot',
-			'--email',
-			certificate.meta.letsencrypt_email,
-			'--preferred-challenges',
-			'dns,http',
-			'--domains',
-			certificate.domain_names.join(','),
+			"--agree-tos",
+			"--authenticator",
+			"webroot",
+			"-m",
+			email,
+			"--preferred-challenges",
+			"http",
+			"--domains",
+			certificate.domain_names.join(","),
 		];
 
 		const adds = internalCertificate.getAdditionalCertbotArgs(certificate.id);
 		args.push(...adds.args);
 
-		logger.info(`Command: ${certbotCommand} ${args ? args.join(' ') : ''}`);
+		logger.info(`Command: ${certbotCommand} ${args ? args.join(" ") : ""}`);
 
-		return utils.execFile(certbotCommand, args, adds.opts)
-			.then((result) => {
-				logger.success(result);
-				return result;
-			});
+		const result = await utils.execFile(certbotCommand, args, adds.opts);
+		logger.success(result);
+		return result;
 	},
 
 	/**
-	 * @param   {Object}         certificate          the certificate row
-	 * @param   {String}         dns_provider         the dns provider name (key used in `certbot-dns-plugins.json`)
-	 * @param   {String | null}  credentials          the content of this providers credentials file
-	 * @param   {String}         propagation_seconds
+	 * @param   {Object}   certificate  the certificate row
+	 * @param   {String}   email        the email address to use for registration
 	 * @returns {Promise}
 	 */
-	requestLetsEncryptSslWithDnsChallenge: async (certificate) => {
-		await certbot.installPlugin(certificate.meta.dns_provider);
+	requestLetsEncryptSslWithDnsChallenge: async (certificate, email) => {
+		await installPlugin(certificate.meta.dns_provider);
 		const dnsPlugin = dnsPlugins[certificate.meta.dns_provider];
-		logger.info(`Requesting LetsEncrypt certificates via ${dnsPlugin.name} for Cert #${certificate.id}: ${certificate.domain_names.join(', ')}`);
+		logger.info(
+			`Requesting LetsEncrypt certificates via ${dnsPlugin.name} for Cert #${certificate.id}: ${certificate.domain_names.join(", ")}`,
+		);
 
 		const credentialsLocation = `/etc/letsencrypt/credentials/credentials-${certificate.id}`;
-		fs.mkdirSync('/etc/letsencrypt/credentials', { recursive: true });
-		fs.writeFileSync(credentialsLocation, certificate.meta.dns_provider_credentials, {mode: 0o600});
+		fs.mkdirSync("/etc/letsencrypt/credentials", { recursive: true });
+		fs.writeFileSync(credentialsLocation, certificate.meta.dns_provider_credentials, { mode: 0o600 });
 
 		// Whether the plugin has a ---credentials argument
-		const hasConfigArg = certificate.meta.dns_provider !== 'route53';
+		const hasConfigArg = certificate.meta.dns_provider !== "route53";
 
 		const args = [
-			'certonly',
-			'--config',
+			"certonly",
+			"--config",
 			letsencryptConfig,
-			'--work-dir',
-			'/tmp/letsencrypt-lib',
-			'--logs-dir',
-			'/tmp/letsencrypt-log',
-			'--cert-name',
+			"--work-dir",
+			certbotWorkDir,
+			"--logs-dir",
+			certbotLogsDir,
+			"--cert-name",
 			`npm-${certificate.id}`,
-			'--agree-tos',
-			'--email',
-			certificate.meta.letsencrypt_email,
-			'--domains',
-			certificate.domain_names.join(','),
-			'--authenticator',
+			"--agree-tos",
+			"-m",
+			email,
+			"--preferred-challenges",
+			"dns",
+			"--domains",
+			certificate.domain_names.join(","),
+			"--authenticator",
 			dnsPlugin.full_plugin_name,
 		];
 
@@ -911,13 +851,16 @@ const internalCertificate = {
 			args.push(`--${dnsPlugin.full_plugin_name}-credentials`, credentialsLocation);
 		}
 		if (certificate.meta.propagation_seconds !== undefined) {
-			args.push(`--${dnsPlugin.full_plugin_name}-propagation-seconds`, certificate.meta.propagation_seconds.toString());
+			args.push(
+				`--${dnsPlugin.full_plugin_name}-propagation-seconds`,
+				certificate.meta.propagation_seconds.toString(),
+			);
 		}
 
 		const adds = internalCertificate.getAdditionalCertbotArgs(certificate.id, certificate.meta.dns_provider);
 		args.push(...adds.args);
 
-		logger.info(`Command: ${certbotCommand} ${args ? args.join(' ') : ''}`);
+		logger.info(`Command: ${certbotCommand} ${args ? args.join(" ") : ""}`);
 
 		try {
 			const result = await utils.execFile(certbotCommand, args, adds.opts);
@@ -930,166 +873,160 @@ const internalCertificate = {
 		}
 	},
 
-
 	/**
 	 * @param   {Access}  access
 	 * @param   {Object}  data
 	 * @param   {Number}  data.id
 	 * @returns {Promise}
 	 */
-	renew: (access, data) => {
-		return access.can('certificates:update', data)
-			.then(() => {
-				return internalCertificate.get(access, data);
-			})
-			.then((certificate) => {
-				if (certificate.provider === 'letsencrypt') {
-					const renewMethod = certificate.meta.dns_challenge ? internalCertificate.renewLetsEncryptSslWithDnsChallenge : internalCertificate.renewLetsEncryptSsl;
+	renew: async (access, data) => {
+		await access.can("certificates:update", data);
+		const certificate = await internalCertificate.get(access, data);
 
-					return renewMethod(certificate)
-						.then(() => {
-							return internalCertificate.getCertificateInfoFromFile(`${internalCertificate.getLiveCertPath(certificate.id)}/fullchain.pem`);
-						})
-						.then((cert_info) => {
-							return certificateModel
-								.query()
-								.patchAndFetchById(certificate.id, {
-									expires_on: moment(cert_info.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss')
-								});
-						})
-						.then((updated_certificate) => {
-							// Add to audit log
-							return internalAuditLog.add(access, {
-								action:      'renewed',
-								object_type: 'certificate',
-								object_id:   updated_certificate.id,
-								meta:        updated_certificate
-							})
-								.then(() => {
-									return updated_certificate;
-								});
-						});
-				} else {
-					throw new error.ValidationError('Only Let\'sEncrypt certificates can be renewed');
-				}
+		if (certificate.provider === "letsencrypt") {
+			const renewMethod = certificate.meta.dns_challenge
+				? internalCertificate.renewLetsEncryptSslWithDnsChallenge
+				: internalCertificate.renewLetsEncryptSsl;
+
+			await renewMethod(certificate);
+			const certInfo = await internalCertificate.getCertificateInfoFromFile(
+				`${internalCertificate.getLiveCertPath(certificate.id)}/fullchain.pem`,
+			);
+
+			const updatedCertificate = await certificateModel.query().patchAndFetchById(certificate.id, {
+				expires_on: moment(certInfo.dates.to, "X").format("YYYY-MM-DD HH:mm:ss"),
 			});
+
+			// Add to audit log
+			await internalAuditLog.add(access, {
+				action: "renewed",
+				object_type: "certificate",
+				object_id: updatedCertificate.id,
+				meta: updatedCertificate,
+			});
+
+			return updatedCertificate;
+		}
+
+		throw new error.ValidationError("Only Let'sEncrypt certificates can be renewed");
 	},
 
 	/**
 	 * @param   {Object}  certificate   the certificate row
 	 * @returns {Promise}
 	 */
-	renewLetsEncryptSsl: (certificate) => {
-		logger.info(`Renewing LetsEncrypt certificates for Cert #${certificate.id}: ${certificate.domain_names.join(', ')}`);
+	renewLetsEncryptSsl: async (certificate) => {
+		logger.info(
+			`Renewing LetsEncrypt certificates for Cert #${certificate.id}: ${certificate.domain_names.join(", ")}`,
+		);
 
 		const args = [
-			'renew',
-			'--force-renewal',
-			'--config',
+			"renew",
+			"--force-renewal",
+			"--config",
 			letsencryptConfig,
-			'--work-dir',
-			'/tmp/letsencrypt-lib',
-			'--logs-dir',
-			'/tmp/letsencrypt-log',
-			'--cert-name',
+			"--work-dir",
+			certbotWorkDir,
+			"--logs-dir",
+			certbotLogsDir,
+			"--cert-name",
 			`npm-${certificate.id}`,
-			'--preferred-challenges',
-			'dns,http',
-			'--no-random-sleep-on-renew',
-			'--disable-hook-validation',
+			"--preferred-challenges",
+			"http",
+			"--no-random-sleep-on-renew",
+			"--disable-hook-validation",
 		];
 
 		const adds = internalCertificate.getAdditionalCertbotArgs(certificate.id, certificate.meta.dns_provider);
 		args.push(...adds.args);
 
-		logger.info(`Command: ${certbotCommand} ${args ? args.join(' ') : ''}`);
+		logger.info(`Command: ${certbotCommand} ${args ? args.join(" ") : ""}`);
 
-		return utils.execFile(certbotCommand, args, adds.opts)
-			.then((result) => {
-				logger.info(result);
-				return result;
-			});
+		const result = await utils.execFile(certbotCommand, args, adds.opts);
+		logger.info(result);
+		return result;
 	},
 
 	/**
 	 * @param   {Object}  certificate   the certificate row
 	 * @returns {Promise}
 	 */
-	renewLetsEncryptSslWithDnsChallenge: (certificate) => {
+	renewLetsEncryptSslWithDnsChallenge: async (certificate) => {
 		const dnsPlugin = dnsPlugins[certificate.meta.dns_provider];
-
 		if (!dnsPlugin) {
 			throw Error(`Unknown DNS provider '${certificate.meta.dns_provider}'`);
 		}
 
-		logger.info(`Renewing LetsEncrypt certificates via ${dnsPlugin.name} for Cert #${certificate.id}: ${certificate.domain_names.join(', ')}`);
+		logger.info(
+			`Renewing LetsEncrypt certificates via ${dnsPlugin.name} for Cert #${certificate.id}: ${certificate.domain_names.join(", ")}`,
+		);
 
 		const args = [
-			'renew',
-			'--force-renewal',
-			'--config',
+			"renew",
+			"--force-renewal",
+			"--config",
 			letsencryptConfig,
-			'--work-dir',
-			'/tmp/letsencrypt-lib',
-			'--logs-dir',
-			'/tmp/letsencrypt-log',
-			'--cert-name',
+			"--work-dir",
+			certbotWorkDir,
+			"--logs-dir",
+			certbotLogsDir,
+			"--cert-name",
 			`npm-${certificate.id}`,
-			'--disable-hook-validation',
-			'--no-random-sleep-on-renew',
+			"--preferred-challenges",
+			"dns",
+			"--disable-hook-validation",
+			"--no-random-sleep-on-renew",
 		];
 
 		const adds = internalCertificate.getAdditionalCertbotArgs(certificate.id, certificate.meta.dns_provider);
 		args.push(...adds.args);
 
-		logger.info(`Command: ${certbotCommand} ${args ? args.join(' ') : ''}`);
+		logger.info(`Command: ${certbotCommand} ${args ? args.join(" ") : ""}`);
 
-		return utils.execFile(certbotCommand, args, adds.opts)
-			.then(async (result) => {
-				logger.info(result);
-				return result;
-			});
+		const result = await utils.execFile(certbotCommand, args, adds.opts);
+		logger.info(result);
+		return result;
 	},
 
 	/**
 	 * @param   {Object}  certificate    the certificate row
-	 * @param   {Boolean} [throw_errors]
+	 * @param   {Boolean} [throwErrors]
 	 * @returns {Promise}
 	 */
-	revokeLetsEncryptSsl: (certificate, throw_errors) => {
-		logger.info(`Revoking LetsEncrypt certificates for Cert #${certificate.id}: ${certificate.domain_names.join(', ')}`);
+	revokeLetsEncryptSsl: async (certificate, throwErrors) => {
+		logger.info(
+			`Revoking LetsEncrypt certificates for Cert #${certificate.id}: ${certificate.domain_names.join(", ")}`,
+		);
 
 		const args = [
-			'revoke',
-			'--config',
+			"revoke",
+			"--config",
 			letsencryptConfig,
-			'--work-dir',
-			'/tmp/letsencrypt-lib',
-			'--logs-dir',
-			'/tmp/letsencrypt-log',
-			'--cert-path',
+			"--work-dir",
+			certbotWorkDir,
+			"--logs-dir",
+			certbotLogsDir,
+			"--cert-path",
 			`${internalCertificate.getLiveCertPath(certificate.id)}/fullchain.pem`,
-			'--delete-after-revoke',
+			"--delete-after-revoke",
 		];
 
 		const adds = internalCertificate.getAdditionalCertbotArgs(certificate.id);
 		args.push(...adds.args);
 
-		logger.info(`Command: ${certbotCommand} ${args ? args.join(' ') : ''}`);
+		logger.info(`Command: ${certbotCommand} ${args ? args.join(" ") : ""}`);
 
-		return utils.execFile(certbotCommand, args, adds.opts)
-			.then(async (result) => {
-				await utils.exec(`rm -f '/etc/letsencrypt/credentials/credentials-${certificate.id}' || true`);
-				logger.info(result);
-				return result;
-			})
-			.catch((err) => {
-				logger.error(err.message);
-
-				if (throw_errors) {
-					throw err;
-				}
-			});
+		try {
+			const result = await utils.execFile(certbotCommand, args, adds.opts);
+			await utils.exec(`rm -f '/etc/letsencrypt/credentials/credentials-${certificate.id}' || true`);
+			logger.info(result);
+			return result;
+		} catch (err) {
+			logger.error(err.message);
+			if (throwErrors) {
+				throw err;
+			}
+		}
 	},
 
 	/**
@@ -1102,162 +1039,71 @@ const internalCertificate = {
 	},
 
 	/**
-	 * @param {Object}  in_use_result
-	 * @param {Number}  in_use_result.total_count
-	 * @param {Array}   in_use_result.proxy_hosts
-	 * @param {Array}   in_use_result.redirection_hosts
-	 * @param {Array}   in_use_result.dead_hosts
+	 * @param   {Object}  inUseResult
+	 * @param   {Number}  inUseResult.total_count
+	 * @param   {Array}   inUseResult.proxy_hosts
+	 * @param   {Array}   inUseResult.redirection_hosts
+	 * @param   {Array}   inUseResult.dead_hosts
+	 * @returns {Promise}
 	 */
-	disableInUseHosts: (in_use_result) => {
-		if (in_use_result.total_count) {
-			const promises = [];
-
-			if (in_use_result.proxy_hosts.length) {
-				promises.push(internalNginx.bulkDeleteConfigs('proxy_host', in_use_result.proxy_hosts));
+	disableInUseHosts: async (inUseResult) => {
+		if (inUseResult?.total_count) {
+			if (inUseResult?.proxy_hosts.length) {
+				await internalNginx.bulkDeleteConfigs("proxy_host", inUseResult.proxy_hosts);
 			}
 
-			if (in_use_result.redirection_hosts.length) {
-				promises.push(internalNginx.bulkDeleteConfigs('redirection_host', in_use_result.redirection_hosts));
+			if (inUseResult?.redirection_hosts.length) {
+				await internalNginx.bulkDeleteConfigs("redirection_host", inUseResult.redirection_hosts);
 			}
 
-			if (in_use_result.dead_hosts.length) {
-				promises.push(internalNginx.bulkDeleteConfigs('dead_host', in_use_result.dead_hosts));
+			if (inUseResult?.dead_hosts.length) {
+				await internalNginx.bulkDeleteConfigs("dead_host", inUseResult.dead_hosts);
 			}
-
-			return Promise.all(promises);
-
-		} else {
-			return Promise.resolve();
 		}
 	},
 
 	/**
-	 * @param {Object}  in_use_result
-	 * @param {Number}  in_use_result.total_count
-	 * @param {Array}   in_use_result.proxy_hosts
-	 * @param {Array}   in_use_result.redirection_hosts
-	 * @param {Array}   in_use_result.dead_hosts
+	 * @param   {Object}  inUseResult
+	 * @param   {Number}  inUseResult.total_count
+	 * @param   {Array}   inUseResult.proxy_hosts
+	 * @param   {Array}   inUseResult.redirection_hosts
+	 * @param   {Array}   inUseResult.dead_hosts
+	 * @returns {Promise}
 	 */
-	enableInUseHosts: (in_use_result) => {
-		if (in_use_result.total_count) {
-			const promises = [];
-
-			if (in_use_result.proxy_hosts.length) {
-				promises.push(internalNginx.bulkGenerateConfigs('proxy_host', in_use_result.proxy_hosts));
+	enableInUseHosts: async (inUseResult) => {
+		if (inUseResult.total_count) {
+			if (inUseResult.proxy_hosts.length) {
+				await internalNginx.bulkGenerateConfigs("proxy_host", inUseResult.proxy_hosts);
 			}
 
-			if (in_use_result.redirection_hosts.length) {
-				promises.push(internalNginx.bulkGenerateConfigs('redirection_host', in_use_result.redirection_hosts));
+			if (inUseResult.redirection_hosts.length) {
+				await internalNginx.bulkGenerateConfigs("redirection_host", inUseResult.redirection_hosts);
 			}
 
-			if (in_use_result.dead_hosts.length) {
-				promises.push(internalNginx.bulkGenerateConfigs('dead_host', in_use_result.dead_hosts));
+			if (inUseResult.dead_hosts.length) {
+				await internalNginx.bulkGenerateConfigs("dead_host", inUseResult.dead_hosts);
 			}
-
-			return Promise.all(promises);
-
-		} else {
-			return Promise.resolve();
 		}
 	},
 
-	testHttpsChallenge: async (access, domains) => {
-		await access.can('certificates:list');
-
-		if (!isArray(domains)) {
-			throw new error.InternalValidationError('Domains must be an array of strings');
-		}
-		if (domains.length === 0) {
-			throw new error.InternalValidationError('No domains provided');
-		}
+	/**
+	 *
+	 * @param   {Object}    payload
+	 * @param   {string[]}  payload.domains
+	 * @returns
+	 */
+	testHttpsChallenge: async (access, payload) => {
+		await access.can("certificates:list");
 
 		// Create a test challenge file
-		const testChallengeDir  = '/data/letsencrypt-acme-challenge/.well-known/acme-challenge';
+		const testChallengeDir = "/data/letsencrypt-acme-challenge/.well-known/acme-challenge";
 		const testChallengeFile = `${testChallengeDir}/test-challenge`;
-		fs.mkdirSync(testChallengeDir, {recursive: true});
-		fs.writeFileSync(testChallengeFile, 'Success', {encoding: 'utf8'});
-
-		async function performTestForDomain (domain) {
-			logger.info(`Testing http challenge for ${domain}`);
-			const url      = `http://${domain}/.well-known/acme-challenge/test-challenge`;
-			const formBody = `method=G&url=${encodeURI(url)}&bodytype=T&requestbody=&headername=User-Agent&headervalue=None&locationid=1&ch=false&cc=false`;
-			const options  = {
-				method:  'POST',
-				headers: {
-					'User-Agent':     'Mozilla/5.0',
-					'Content-Type':   'application/x-www-form-urlencoded',
-					'Content-Length': Buffer.byteLength(formBody)
-				}
-			};
-
-			const result = await new Promise((resolve) => {
-
-				const req = https.request('https://www.site24x7.com/tools/restapi-tester', options, (res) => {
-					let responseBody = '';
-
-					res.on('data', (chunk) => {
-						responseBody = responseBody + chunk;
-					});
-
-					res.on('end', () => {
-						try {
-							const parsedBody = JSON.parse(`${responseBody}`);
-							if (res.statusCode !== 200) {
-								logger.warn(`Failed to test HTTP challenge for domain ${domain} because HTTP status code ${res.statusCode} was returned: ${parsedBody.message}`);
-								resolve(undefined);
-							} else {
-								resolve(parsedBody);
-							}
-						} catch (err) {
-							if (res.statusCode !== 200) {
-								logger.warn(`Failed to test HTTP challenge for domain ${domain} because HTTP status code ${res.statusCode} was returned`);
-							} else {
-								logger.warn(`Failed to test HTTP challenge for domain ${domain} because response failed to be parsed: ${err.message}`);
-							}
-							resolve(undefined);
-						}
-					});
-				});
-
-				// Make sure to write the request body.
-				req.write(formBody);
-				req.end();
-				req.on('error', (e) => { logger.warn(`Failed to test HTTP challenge for domain ${domain}`, e);
-					resolve(undefined); });
-			});
-
-			if (!result) {
-				// Some error occurred while trying to get the data
-				return 'failed';
-			} else if (result.error) {
-				logger.info(`HTTP challenge test failed for domain ${domain} because error was returned: ${result.error.msg}`);
-				return `other:${result.error.msg}`;
-			} else if (`${result.responsecode}` === '200' && result.htmlresponse === 'Success') {
-				// Server exists and has responded with the correct data
-				return 'ok';
-			} else if (`${result.responsecode}` === '200') {
-				// Server exists but has responded with wrong data
-				logger.info(`HTTP challenge test failed for domain ${domain} because of invalid returned data:`, result.htmlresponse);
-				return 'wrong-data';
-			} else if (`${result.responsecode}` === '404') {
-				// Server exists but responded with a 404
-				logger.info(`HTTP challenge test failed for domain ${domain} because code 404 was returned`);
-				return '404';
-			} else if (`${result.responsecode}` === '0' || (typeof result.reason === 'string' && result.reason.toLowerCase() === 'host unavailable')) {
-				// Server does not exist at domain
-				logger.info(`HTTP challenge test failed for domain ${domain} the host was not found`);
-				return 'no-host';
-			} else {
-				// Other errors
-				logger.info(`HTTP challenge test failed for domain ${domain} because code ${result.responsecode} was returned`);
-				return `other:${result.responsecode}`;
-			}
-		}
+		fs.mkdirSync(testChallengeDir, { recursive: true });
+		fs.writeFileSync(testChallengeFile, "Success", { encoding: "utf8" });
 
 		const results = {};
-
-		for (const domain of domains){
-			results[domain] = await performTestForDomain(domain);
+		for (const domain of payload.domains) {
+			results[domain] = await internalCertificate.performTestForDomain(domain);
 		}
 
 		// Remove the test challenge file
@@ -1266,33 +1112,129 @@ const internalCertificate = {
 		return results;
 	},
 
+	performTestForDomain: async (domain) => {
+		logger.info(`Testing http challenge for ${domain}`);
+		const url = `http://${domain}/.well-known/acme-challenge/test-challenge`;
+		const formBody = `method=G&url=${encodeURI(url)}&bodytype=T&requestbody=&headername=User-Agent&headervalue=None&locationid=1&ch=false&cc=false`;
+		const options = {
+			method: "POST",
+			headers: {
+				"User-Agent": "Mozilla/5.0",
+				"Content-Type": "application/x-www-form-urlencoded",
+				"Content-Length": Buffer.byteLength(formBody),
+			},
+		};
+
+		const result = await new Promise((resolve) => {
+			const req = https.request("https://www.site24x7.com/tools/restapi-tester", options, (res) => {
+				let responseBody = "";
+
+				res.on("data", (chunk) => {
+					responseBody = responseBody + chunk;
+				});
+
+				res.on("end", () => {
+					try {
+						const parsedBody = JSON.parse(`${responseBody}`);
+						if (res.statusCode !== 200) {
+							logger.warn(
+								`Failed to test HTTP challenge for domain ${domain} because HTTP status code ${res.statusCode} was returned: ${parsedBody.message}`,
+							);
+							resolve(undefined);
+						} else {
+							resolve(parsedBody);
+						}
+					} catch (err) {
+						if (res.statusCode !== 200) {
+							logger.warn(
+								`Failed to test HTTP challenge for domain ${domain} because HTTP status code ${res.statusCode} was returned`,
+							);
+						} else {
+							logger.warn(
+								`Failed to test HTTP challenge for domain ${domain} because response failed to be parsed: ${err.message}`,
+							);
+						}
+						resolve(undefined);
+					}
+				});
+			});
+
+			// Make sure to write the request body.
+			req.write(formBody);
+			req.end();
+			req.on("error", (e) => {
+				logger.warn(`Failed to test HTTP challenge for domain ${domain}`, e);
+				resolve(undefined);
+			});
+		});
+
+		if (!result) {
+			// Some error occurred while trying to get the data
+			return "failed";
+		}
+		if (result.error) {
+			logger.info(
+				`HTTP challenge test failed for domain ${domain} because error was returned: ${result.error.msg}`,
+			);
+			return `other:${result.error.msg}`;
+		}
+		if (`${result.responsecode}` === "200" && result.htmlresponse === "Success") {
+			// Server exists and has responded with the correct data
+			return "ok";
+		}
+		if (`${result.responsecode}` === "200") {
+			// Server exists but has responded with wrong data
+			logger.info(
+				`HTTP challenge test failed for domain ${domain} because of invalid returned data:`,
+				result.htmlresponse,
+			);
+			return "wrong-data";
+		}
+		if (`${result.responsecode}` === "404") {
+			// Server exists but responded with a 404
+			logger.info(`HTTP challenge test failed for domain ${domain} because code 404 was returned`);
+			return "404";
+		}
+		if (
+			`${result.responsecode}` === "0" ||
+			(typeof result.reason === "string" && result.reason.toLowerCase() === "host unavailable")
+		) {
+			// Server does not exist at domain
+			logger.info(`HTTP challenge test failed for domain ${domain} the host was not found`);
+			return "no-host";
+		}
+		// Other errors
+		logger.info(`HTTP challenge test failed for domain ${domain} because code ${result.responsecode} was returned`);
+		return `other:${result.responsecode}`;
+	},
+
 	getAdditionalCertbotArgs: (certificate_id, dns_provider) => {
 		const args = [];
-		if (letsencryptServer !== null) {
-			args.push('--server', letsencryptServer);
+		if (useLetsencryptServer() !== null) {
+			args.push("--server", useLetsencryptServer());
 		}
-		if (letsencryptStaging && letsencryptServer === null) {
-			args.push('--staging');
+		if (useLetsencryptStaging() && useLetsencryptServer() === null) {
+			args.push("--staging");
 		}
 
 		// For route53, add the credentials file as an environment variable,
 		// inheriting the process env
 		const opts = {};
-		if (certificate_id && dns_provider === 'route53') {
-			opts.env                 = process.env;
+		if (certificate_id && dns_provider === "route53") {
+			opts.env = process.env;
 			opts.env.AWS_CONFIG_FILE = `/etc/letsencrypt/credentials/credentials-${certificate_id}`;
 		}
 
-		if (dns_provider === 'duckdns') {
-			args.push('--dns-duckdns-no-txt-restore');
+		if (dns_provider === "duckdns") {
+			args.push("--dns-duckdns-no-txt-restore");
 		}
 
-		return {args: args, opts: opts};
+		return { args: args, opts: opts };
 	},
 
-	getLiveCertPath: (certificate_id) => {
-		return `/etc/letsencrypt/live/npm-${certificate_id}`;
-	}
+	getLiveCertPath: (certificateId) => {
+		return `/etc/letsencrypt/live/npm-${certificateId}`;
+	},
 };
 
-module.exports = internalCertificate;
+export default internalCertificate;
diff --git a/backend/internal/dead-host.js b/backend/internal/dead-host.js
index 6bbdf61b..21b12012 100644
--- a/backend/internal/dead-host.js
+++ b/backend/internal/dead-host.js
@@ -1,110 +1,96 @@
-const _                   = require('lodash');
-const error               = require('../lib/error');
-const utils               = require('../lib/utils');
-const deadHostModel       = require('../models/dead_host');
-const internalHost        = require('./host');
-const internalNginx       = require('./nginx');
-const internalAuditLog    = require('./audit-log');
-const internalCertificate = require('./certificate');
-const {castJsonIfNeed}    = require('../lib/helpers');
+import _ from "lodash";
+import errs from "../lib/error.js";
+import { castJsonIfNeed } from "../lib/helpers.js";
+import utils from "../lib/utils.js";
+import deadHostModel from "../models/dead_host.js";
+import internalAuditLog from "./audit-log.js";
+import internalCertificate from "./certificate.js";
+import internalHost from "./host.js";
+import internalNginx from "./nginx.js";
 
-function omissions () {
-	return ['is_deleted'];
-}
+const omissions = () => {
+	return ["is_deleted"];
+};
 
 const internalDeadHost = {
-
 	/**
 	 * @param   {Access}  access
 	 * @param   {Object}  data
 	 * @returns {Promise}
 	 */
-	create: (access, data) => {
-		let create_certificate = data.certificate_id === 'new';
+	create: async (access, data) => {
+		const createCertificate = data.certificate_id === "new";
 
-		if (create_certificate) {
+		if (createCertificate) {
 			delete data.certificate_id;
 		}
 
-		return access.can('dead_hosts:create', data)
-			.then((/*access_data*/) => {
-				// Get a list of the domain names and check each of them against existing records
-				let domain_name_check_promises = [];
+		await access.can("dead_hosts:create", data);
 
-				data.domain_names.map(function (domain_name) {
-					domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
-				});
+		// Get a list of the domain names and check each of them against existing records
+		const domainNameCheckPromises = [];
 
-				return Promise.all(domain_name_check_promises)
-					.then((check_results) => {
-						check_results.map(function (result) {
-							if (result.is_taken) {
-								throw new error.ValidationError(result.hostname + ' is already in use');
-							}
-						});
-					});
-			})
-			.then(() => {
-				// At this point the domains should have been checked
-				data.owner_user_id = access.token.getUserId(1);
-				data               = internalHost.cleanSslHstsData(data);
+		data.domain_names.map((domain_name) => {
+			domainNameCheckPromises.push(internalHost.isHostnameTaken(domain_name));
+			return true;
+		});
 
-				// Fix for db field not having a default value
-				// for this optional field.
-				if (typeof data.advanced_config === 'undefined') {
-					data.advanced_config = '';
+		await Promise.all(domainNameCheckPromises).then((check_results) => {
+			check_results.map((result) => {
+				if (result.is_taken) {
+					throw new errs.ValidationError(`${result.hostname} is already in use`);
 				}
-
-				return deadHostModel
-					.query()
-					.insertAndFetch(data)
-					.then(utils.omitRow(omissions()));
-			})
-			.then((row) => {
-				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, data)
-						.then((cert) => {
-							// update host with cert id
-							return internalDeadHost.update(access, {
-								id:             row.id,
-								certificate_id: cert.id
-							});
-						})
-						.then(() => {
-							return row;
-						});
-				} else {
-					return row;
-				}
-			})
-			.then((row) => {
-				// re-fetch with cert
-				return internalDeadHost.get(access, {
-					id:     row.id,
-					expand: ['certificate', 'owner']
-				});
-			})
-			.then((row) => {
-				// Configure nginx
-				return internalNginx.configure(deadHostModel, 'dead_host', row)
-					.then(() => {
-						return row;
-					});
-			})
-			.then((row) => {
-				data.meta = _.assign({}, data.meta || {}, row.meta);
-
-				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'created',
-					object_type: 'dead-host',
-					object_id:   row.id,
-					meta:        data
-				})
-					.then(() => {
-						return row;
-					});
+				return true;
 			});
+		});
+
+		// At this point the domains should have been checked
+		data.owner_user_id = access.token.getUserId(1);
+		const thisData = internalHost.cleanSslHstsData(data);
+
+		// Fix for db field not having a default value
+		// for this optional field.
+		if (typeof data.advanced_config === "undefined") {
+			thisData.advanced_config = "";
+		}
+
+		const row = await deadHostModel.query()
+			.insertAndFetch(thisData)
+			.then(utils.omitRow(omissions()));
+
+		// Add to audit log
+		await internalAuditLog.add(access, {
+			action: "created",
+			object_type: "dead-host",
+			object_id: row.id,
+			meta: thisData,
+		});
+
+		if (createCertificate) {
+			const cert = await internalCertificate.createQuickCertificate(access, data);
+
+			// update host with cert id
+			await internalDeadHost.update(access, {
+				id: row.id,
+				certificate_id: cert.id,
+			});
+		}
+
+		// re-fetch with cert
+		const freshRow = await internalDeadHost.get(access, {
+			id: row.id,
+			expand: ["certificate", "owner"],
+		});
+
+		// Sanity check
+		if (createCertificate && !freshRow.certificate_id) {
+			throw new errs.InternalValidationError("The host was created but the Certificate creation failed.");
+		}
+
+		// Configure nginx
+		await internalNginx.configure(deadHostModel, "dead_host", freshRow);
+
+		return freshRow;
 	},
 
 	/**
@@ -113,98 +99,85 @@ const internalDeadHost = {
 	 * @param  {Number}  data.id
 	 * @return {Promise}
 	 */
-	update: (access, data) => {
-		let create_certificate = data.certificate_id === 'new';
-
-		if (create_certificate) {
+	update: async (access, data) => {
+		const createCertificate = data.certificate_id === "new";
+		if (createCertificate) {
 			delete data.certificate_id;
 		}
 
-		return access.can('dead_hosts:update', data.id)
-			.then((/*access_data*/) => {
-				// Get a list of the domain names and check each of them against existing records
-				let domain_name_check_promises = [];
+		await access.can("dead_hosts:update", data.id);
 
-				if (typeof data.domain_names !== 'undefined') {
-					data.domain_names.map(function (domain_name) {
-						domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'dead', data.id));
-					});
-
-					return Promise.all(domain_name_check_promises)
-						.then((check_results) => {
-							check_results.map(function (result) {
-								if (result.is_taken) {
-									throw new error.ValidationError(result.hostname + ' is already in use');
-								}
-							});
-						});
-				}
-			})
-			.then(() => {
-				return internalDeadHost.get(access, {id: data.id});
-			})
-			.then((row) => {
-				if (row.id !== data.id) {
-					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('404 Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
-				}
-
-				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, {
-						domain_names: data.domain_names || row.domain_names,
-						meta:         _.assign({}, row.meta, data.meta)
-					})
-						.then((cert) => {
-							// update host with cert id
-							data.certificate_id = cert.id;
-						})
-						.then(() => {
-							return row;
-						});
-				} else {
-					return row;
-				}
-			})
-			.then((row) => {
-				// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
-				data = _.assign({}, {
-					domain_names: row.domain_names
-				}, data);
-
-				data = internalHost.cleanSslHstsData(data, row);
-
-				return deadHostModel
-					.query()
-					.where({id: data.id})
-					.patch(data)
-					.then((saved_row) => {
-						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'updated',
-							object_type: 'dead-host',
-							object_id:   row.id,
-							meta:        data
-						})
-							.then(() => {
-								return _.omit(saved_row, omissions());
-							});
-					});
-			})
-			.then(() => {
-				return internalDeadHost.get(access, {
-					id:     data.id,
-					expand: ['owner', 'certificate']
-				})
-					.then((row) => {
-						// Configure nginx
-						return internalNginx.configure(deadHostModel, 'dead_host', row)
-							.then((new_meta) => {
-								row.meta = new_meta;
-								row      = internalHost.cleanRowCertificateMeta(row);
-								return _.omit(row, omissions());
-							});
-					});
+		// Get a list of the domain names and check each of them against existing records
+		const domainNameCheckPromises = [];
+		if (typeof data.domain_names !== "undefined") {
+			data.domain_names.map((domainName) => {
+				domainNameCheckPromises.push(internalHost.isHostnameTaken(domainName, "dead", data.id));
+				return true;
 			});
+
+			const checkResults = await Promise.all(domainNameCheckPromises);
+			checkResults.map((result) => {
+				if (result.is_taken) {
+					throw new errs.ValidationError(`${result.hostname} is already in use`);
+				}
+				return true;
+			});
+		}
+		const row = await internalDeadHost.get(access, { id: data.id });
+
+		if (row.id !== data.id) {
+			// Sanity check that something crazy hasn't happened
+			throw new errs.InternalValidationError(
+				`404 Host could not be updated, IDs do not match: ${row.id} !== ${data.id}`,
+			);
+		}
+
+		if (createCertificate) {
+			const cert = await internalCertificate.createQuickCertificate(access, {
+				domain_names: data.domain_names || row.domain_names,
+				meta: _.assign({}, row.meta, data.meta),
+			});
+
+			// update host with cert id
+			data.certificate_id = cert.id;
+		}
+
+		// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
+		let thisData = _.assign(
+			{},
+			{
+				domain_names: row.domain_names,
+			},
+			data,
+		);
+
+		thisData = internalHost.cleanSslHstsData(thisData, row);
+
+
+		// do the row update
+		await deadHostModel
+			.query()
+			.where({id: data.id})
+			.patch(data);
+
+		// Add to audit log
+		await internalAuditLog.add(access, {
+			action: "updated",
+			object_type: "dead-host",
+			object_id: row.id,
+			meta: thisData,
+		});
+
+		const thisRow = await internalDeadHost
+			.get(access, {
+				id: thisData.id,
+				expand: ["owner", "certificate"],
+			});
+
+		// Configure nginx
+		const newMeta = await internalNginx.configure(deadHostModel, "dead_host", row);
+		row.meta = newMeta;
+		return _.omit(internalHost.cleanRowCertificateMeta(thisRow), omissions());
 	},
 
 	/**
@@ -215,40 +188,32 @@ const internalDeadHost = {
 	 * @param  {Array}    [data.omit]
 	 * @return {Promise}
 	 */
-	get: (access, data) => {
-		if (typeof data === 'undefined') {
-			data = {};
+	get: async (access, data) => {
+		const accessData = await access.can("dead_hosts:get", data.id);
+		const query = deadHostModel
+			.query()
+			.where("is_deleted", 0)
+			.andWhere("id", data.id)
+			.allowGraph("[owner,certificate]")
+			.first();
+
+		if (accessData.permission_visibility !== "all") {
+			query.andWhere("owner_user_id", access.token.getUserId(1));
 		}
 
-		return access.can('dead_hosts:get', data.id)
-			.then((access_data) => {
-				let query = deadHostModel
-					.query()
-					.where('is_deleted', 0)
-					.andWhere('id', data.id)
-					.allowGraph('[owner,certificate]')
-					.first();
+		if (typeof data.expand !== "undefined" && data.expand !== null) {
+			query.withGraphFetched(`[${data.expand.join(", ")}]`);
+		}
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
-				}
-
-				if (typeof data.expand !== 'undefined' && data.expand !== null) {
-					query.withGraphFetched('[' + data.expand.join(', ') + ']');
-				}
-
-				return query.then(utils.omitRow(omissions()));
-			})
-			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				}
-				// Custom omissions
-				if (typeof data.omit !== 'undefined' && data.omit !== null) {
-					row = _.omit(row, data.omit);
-				}
-				return row;
-			});
+		const row = await query.then(utils.omitRow(omissions()));
+		if (!row || !row.id) {
+			throw new errs.ItemNotFoundError(data.id);
+		}
+		// Custom omissions
+		if (typeof data.omit !== "undefined" && data.omit !== null) {
+			return _.omit(row, data.omit);
+		}
+		return row;
 	},
 
 	/**
@@ -258,42 +223,32 @@ const internalDeadHost = {
 	 * @param {String}  [data.reason]
 	 * @returns {Promise}
 	 */
-	delete: (access, data) => {
-		return access.can('dead_hosts:delete', data.id)
-			.then(() => {
-				return internalDeadHost.get(access, {id: data.id});
-			})
-			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				}
+	delete: async (access, data) => {
+		await access.can("dead_hosts:delete", data.id)
+		const row = await internalDeadHost.get(access, { id: data.id });
+		if (!row || !row.id) {
+			throw new errs.ItemNotFoundError(data.id);
+		}
 
-				return deadHostModel
-					.query()
-					.where('id', row.id)
-					.patch({
-						is_deleted: 1
-					})
-					.then(() => {
-						// Delete Nginx Config
-						return internalNginx.deleteConfig('dead_host', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
-					})
-					.then(() => {
-						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'deleted',
-							object_type: 'dead-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
-						});
-					});
-			})
-			.then(() => {
-				return true;
+		await deadHostModel
+			.query()
+			.where("id", row.id)
+			.patch({
+				is_deleted: 1,
 			});
+
+		// Delete Nginx Config
+		await internalNginx.deleteConfig("dead_host", row);
+		await internalNginx.reload();
+
+		// Add to audit log
+		await internalAuditLog.add(access, {
+			action: "deleted",
+			object_type: "dead-host",
+			object_id: row.id,
+			meta: _.omit(row, omissions()),
+		});
+		return true;
 	},
 
 	/**
@@ -303,46 +258,39 @@ const internalDeadHost = {
 	 * @param {String}  [data.reason]
 	 * @returns {Promise}
 	 */
-	enable: (access, data) => {
-		return access.can('dead_hosts:update', data.id)
-			.then(() => {
-				return internalDeadHost.get(access, {
-					id:     data.id,
-					expand: ['certificate', 'owner']
-				});
-			})
-			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (row.enabled) {
-					throw new error.ValidationError('Host is already enabled');
-				}
+	enable: async (access, data) => {
+		await access.can("dead_hosts:update", data.id)
+		const row = await internalDeadHost.get(access, {
+			id: data.id,
+			expand: ["certificate", "owner"],
+		});
+		if (!row || !row.id) {
+			throw new errs.ItemNotFoundError(data.id);
+		}
+		if (row.enabled) {
+			throw new errs.ValidationError("Host is already enabled");
+		}
 
-				row.enabled = 1;
+		row.enabled = 1;
 
-				return deadHostModel
-					.query()
-					.where('id', row.id)
-					.patch({
-						enabled: 1
-					})
-					.then(() => {
-						// Configure nginx
-						return internalNginx.configure(deadHostModel, 'dead_host', row);
-					})
-					.then(() => {
-						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'enabled',
-							object_type: 'dead-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
-						});
-					});
-			})
-			.then(() => {
-				return true;
+		await deadHostModel
+			.query()
+			.where("id", row.id)
+			.patch({
+				enabled: 1,
 			});
+
+		// Configure nginx
+		await internalNginx.configure(deadHostModel, "dead_host", row);
+
+		// Add to audit log
+		await internalAuditLog.add(access, {
+			action: "enabled",
+			object_type: "dead-host",
+			object_id: row.id,
+			meta: _.omit(row, omissions()),
+		});
+		return true;
 	},
 
 	/**
@@ -352,46 +300,37 @@ const internalDeadHost = {
 	 * @param {String}  [data.reason]
 	 * @returns {Promise}
 	 */
-	disable: (access, data) => {
-		return access.can('dead_hosts:update', data.id)
-			.then(() => {
-				return internalDeadHost.get(access, {id: data.id});
-			})
-			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (!row.enabled) {
-					throw new error.ValidationError('Host is already disabled');
-				}
+	disable: async (access, data) => {
+		await access.can("dead_hosts:update", data.id)
+		const row = await internalDeadHost.get(access, { id: data.id });
+		if (!row || !row.id) {
+			throw new errs.ItemNotFoundError(data.id);
+		}
+		if (!row.enabled) {
+			throw new errs.ValidationError("Host is already disabled");
+		}
 
-				row.enabled = 0;
+		row.enabled = 0;
 
-				return deadHostModel
-					.query()
-					.where('id', row.id)
-					.patch({
-						enabled: 0
-					})
-					.then(() => {
-						// Delete Nginx Config
-						return internalNginx.deleteConfig('dead_host', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
-					})
-					.then(() => {
-						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'disabled',
-							object_type: 'dead-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
-						});
-					});
-			})
-			.then(() => {
-				return true;
+		await deadHostModel
+			.query()
+			.where("id", row.id)
+			.patch({
+				enabled: 0,
 			});
+
+		// Delete Nginx Config
+		await internalNginx.deleteConfig("dead_host", row);
+		await internalNginx.reload();
+
+		// Add to audit log
+		await internalAuditLog.add(access, {
+			action: "disabled",
+			object_type: "dead-host",
+			object_id: row.id,
+			meta: _.omit(row, omissions()),
+		});
+		return true;
 	},
 
 	/**
@@ -399,43 +338,38 @@ const internalDeadHost = {
 	 *
 	 * @param   {Access}  access
 	 * @param   {Array}   [expand]
-	 * @param   {String}  [search_query]
+	 * @param   {String}  [searchQuery]
 	 * @returns {Promise}
 	 */
-	getAll: (access, expand, search_query) => {
-		return access.can('dead_hosts:list')
-			.then((access_data) => {
-				let query = deadHostModel
-					.query()
-					.where('is_deleted', 0)
-					.groupBy('id')
-					.allowGraph('[owner,certificate]')
-					.orderBy(castJsonIfNeed('domain_names'), 'ASC');
+	getAll: async (access, expand, searchQuery) => {
+		const accessData = await access.can("dead_hosts:list")
+		const query = deadHostModel
+			.query()
+			.where("is_deleted", 0)
+			.groupBy("id")
+			.allowGraph("[owner,certificate]")
+			.orderBy(castJsonIfNeed("domain_names"), "ASC");
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
-				}
+		if (accessData.permission_visibility !== "all") {
+			query.andWhere("owner_user_id", access.token.getUserId(1));
+		}
 
-				// Query is used for searching
-				if (typeof search_query === 'string' && search_query.length > 0) {
-					query.where(function () {
-						this.where(castJsonIfNeed('domain_names'), 'like', '%' + search_query + '%');
-					});
-				}
-
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched('[' + expand.join(', ') + ']');
-				}
-
-				return query.then(utils.omitRows(omissions()));
-			})
-			.then((rows) => {
-				if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
-					return internalHost.cleanAllRowsCertificateMeta(rows);
-				}
-
-				return rows;
+		// Query is used for searching
+		if (typeof searchQuery === "string" && searchQuery.length > 0) {
+			query.where(function () {
+				this.where(castJsonIfNeed("domain_names"), "like", `%${searchQuery}%`);
 			});
+		}
+
+		if (typeof expand !== "undefined" && expand !== null) {
+			query.withGraphFetched(`[${expand.join(", ")}]`);
+		}
+
+		const rows = await query.then(utils.omitRows(omissions()));
+		if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
+			internalHost.cleanAllRowsCertificateMeta(rows);
+		}
+		return rows;
 	},
 
 	/**
@@ -445,21 +379,16 @@ const internalDeadHost = {
 	 * @param   {String}  visibility
 	 * @returns {Promise}
 	 */
-	getCount: (user_id, visibility) => {
-		let query = deadHostModel
-			.query()
-			.count('id as count')
-			.where('is_deleted', 0);
+	getCount: async (user_id, visibility) => {
+		const query = deadHostModel.query().count("id as count").where("is_deleted", 0);
 
-		if (visibility !== 'all') {
-			query.andWhere('owner_user_id', user_id);
+		if (visibility !== "all") {
+			query.andWhere("owner_user_id", user_id);
 		}
 
-		return query.first()
-			.then((row) => {
-				return parseInt(row.count, 10);
-			});
-	}
+		const row = await query.first();
+		return Number.parseInt(row.count, 10);
+	},
 };
 
-module.exports = internalDeadHost;
+export default internalDeadHost;
diff --git a/backend/internal/host.js b/backend/internal/host.js
index 52c6d2bd..74871626 100644
--- a/backend/internal/host.js
+++ b/backend/internal/host.js
@@ -1,11 +1,10 @@
-const _                    = require('lodash');
-const proxyHostModel       = require('../models/proxy_host');
-const redirectionHostModel = require('../models/redirection_host');
-const deadHostModel        = require('../models/dead_host');
-const {castJsonIfNeed}     = require('../lib/helpers');
+import _ from "lodash";
+import { castJsonIfNeed } from "../lib/helpers.js";
+import deadHostModel from "../models/dead_host.js";
+import proxyHostModel from "../models/proxy_host.js";
+import redirectionHostModel from "../models/redirection_host.js";
 
 const internalHost = {
-
 	/**
 	 * Makes sure that the ssl_* and hsts_* fields play nicely together.
 	 * ie: if there is no cert, then force_ssl is off.
@@ -15,25 +14,23 @@ const internalHost = {
 	 * @param   {object} [existing_data]
 	 * @returns {object}
 	 */
-	cleanSslHstsData: function (data, existing_data) {
-		existing_data = existing_data === undefined ? {} : existing_data;
+	cleanSslHstsData: (data, existingData) => {
+		const combinedData = _.assign({}, existingData || {}, data);
 
-		const combined_data = _.assign({}, existing_data, data);
-
-		if (!combined_data.certificate_id) {
-			combined_data.ssl_forced    = false;
-			combined_data.http2_support = false;
+		if (!combinedData.certificate_id) {
+			combinedData.ssl_forced = false;
+			combinedData.http2_support = false;
 		}
 
-		if (!combined_data.ssl_forced) {
-			combined_data.hsts_enabled = false;
+		if (!combinedData.ssl_forced) {
+			combinedData.hsts_enabled = false;
 		}
 
-		if (!combined_data.hsts_enabled) {
-			combined_data.hsts_subdomains = false;
+		if (!combinedData.hsts_enabled) {
+			combinedData.hsts_subdomains = false;
 		}
 
-		return combined_data;
+		return combinedData;
 	},
 
 	/**
@@ -42,11 +39,12 @@ const internalHost = {
 	 * @param   {Array}  rows
 	 * @returns {Array}
 	 */
-	cleanAllRowsCertificateMeta: function (rows) {
-		rows.map(function (row, idx) {
-			if (typeof rows[idx].certificate !== 'undefined' && rows[idx].certificate) {
+	cleanAllRowsCertificateMeta: (rows) => {
+		rows.map((_, idx) => {
+			if (typeof rows[idx].certificate !== "undefined" && rows[idx].certificate) {
 				rows[idx].certificate.meta = {};
 			}
+			return true;
 		});
 
 		return rows;
@@ -58,8 +56,8 @@ const internalHost = {
 	 * @param   {Object}  row
 	 * @returns {Object}
 	 */
-	cleanRowCertificateMeta: function (row) {
-		if (typeof row.certificate !== 'undefined' && row.certificate) {
+	cleanRowCertificateMeta: (row) => {
+		if (typeof row.certificate !== "undefined" && row.certificate) {
 			row.certificate.meta = {};
 		}
 
@@ -67,54 +65,33 @@ const internalHost = {
 	},
 
 	/**
-	 * This returns all the host types with any domain listed in the provided domain_names array.
+	 * This returns all the host types with any domain listed in the provided domainNames array.
 	 * This is used by the certificates to temporarily disable any host that is using the domain
 	 *
-	 * @param   {Array}  domain_names
+	 * @param   {Array}  domainNames
 	 * @returns {Promise}
 	 */
-	getHostsWithDomains: function (domain_names) {
-		const promises = [
-			proxyHostModel
-				.query()
-				.where('is_deleted', 0),
-			redirectionHostModel
-				.query()
-				.where('is_deleted', 0),
-			deadHostModel
-				.query()
-				.where('is_deleted', 0)
-		];
+	getHostsWithDomains: async (domainNames) => {
+		const responseObject = {
+			total_count: 0,
+			dead_hosts: [],
+			proxy_hosts: [],
+			redirection_hosts: [],
+		};
 
-		return Promise.all(promises)
-			.then((promises_results) => {
-				let response_object = {
-					total_count:       0,
-					dead_hosts:        [],
-					proxy_hosts:       [],
-					redirection_hosts: []
-				};
+		const proxyRes = await proxyHostModel.query().where("is_deleted", 0);
+		responseObject.proxy_hosts = internalHost._getHostsWithDomains(proxyRes, domainNames);
+		responseObject.total_count += responseObject.proxy_hosts.length;
 
-				if (promises_results[0]) {
-					// Proxy Hosts
-					response_object.proxy_hosts  = internalHost._getHostsWithDomains(promises_results[0], domain_names);
-					response_object.total_count += response_object.proxy_hosts.length;
-				}
+		const redirRes = await redirectionHostModel.query().where("is_deleted", 0);
+		responseObject.redirection_hosts = internalHost._getHostsWithDomains(redirRes, domainNames);
+		responseObject.total_count += responseObject.redirection_hosts.length;
 
-				if (promises_results[1]) {
-					// Redirection Hosts
-					response_object.redirection_hosts = internalHost._getHostsWithDomains(promises_results[1], domain_names);
-					response_object.total_count      += response_object.redirection_hosts.length;
-				}
+		const deadRes = await deadHostModel.query().where("is_deleted", 0);
+		responseObject.dead_hosts = internalHost._getHostsWithDomains(deadRes, domainNames);
+		responseObject.total_count += responseObject.dead_hosts.length;
 
-				if (promises_results[2]) {
-					// Dead Hosts
-					response_object.dead_hosts   = internalHost._getHostsWithDomains(promises_results[2], domain_names);
-					response_object.total_count += response_object.dead_hosts.length;
-				}
-
-				return response_object;
-			});
+		return responseObject;
 	},
 
 	/**
@@ -125,112 +102,133 @@ const internalHost = {
 	 * @param   {Integer}  [ignore_id]     Must be supplied if type was also supplied
 	 * @returns {Promise}
 	 */
-	isHostnameTaken: function (hostname, ignore_type, ignore_id) {
+	isHostnameTaken: (hostname, ignore_type, ignore_id) => {
 		const promises = [
 			proxyHostModel
 				.query()
-				.where('is_deleted', 0)
-				.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%'),
+				.where("is_deleted", 0)
+				.andWhere(castJsonIfNeed("domain_names"), "like", `%${hostname}%`),
 			redirectionHostModel
 				.query()
-				.where('is_deleted', 0)
-				.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%'),
+				.where("is_deleted", 0)
+				.andWhere(castJsonIfNeed("domain_names"), "like", `%${hostname}%`),
 			deadHostModel
 				.query()
-				.where('is_deleted', 0)
-				.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%')
+				.where("is_deleted", 0)
+				.andWhere(castJsonIfNeed("domain_names"), "like", `%${hostname}%`),
 		];
 
-		return Promise.all(promises)
-			.then((promises_results) => {
-				let is_taken = false;
+		return Promise.all(promises).then((promises_results) => {
+			let is_taken = false;
 
-				if (promises_results[0]) {
-					// Proxy Hosts
-					if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[0], ignore_type === 'proxy' && ignore_id ? ignore_id : 0)) {
-						is_taken = true;
-					}
+			if (promises_results[0]) {
+				// Proxy Hosts
+				if (
+					internalHost._checkHostnameRecordsTaken(
+						hostname,
+						promises_results[0],
+						ignore_type === "proxy" && ignore_id ? ignore_id : 0,
+					)
+				) {
+					is_taken = true;
 				}
+			}
 
-				if (promises_results[1]) {
-					// Redirection Hosts
-					if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[1], ignore_type === 'redirection' && ignore_id ? ignore_id : 0)) {
-						is_taken = true;
-					}
+			if (promises_results[1]) {
+				// Redirection Hosts
+				if (
+					internalHost._checkHostnameRecordsTaken(
+						hostname,
+						promises_results[1],
+						ignore_type === "redirection" && ignore_id ? ignore_id : 0,
+					)
+				) {
+					is_taken = true;
 				}
+			}
 
-				if (promises_results[2]) {
-					// Dead Hosts
-					if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[2], ignore_type === 'dead' && ignore_id ? ignore_id : 0)) {
-						is_taken = true;
-					}
+			if (promises_results[2]) {
+				// Dead Hosts
+				if (
+					internalHost._checkHostnameRecordsTaken(
+						hostname,
+						promises_results[2],
+						ignore_type === "dead" && ignore_id ? ignore_id : 0,
+					)
+				) {
+					is_taken = true;
 				}
+			}
 
-				return {
-					hostname: hostname,
-					is_taken: is_taken
-				};
-			});
+			return {
+				hostname: hostname,
+				is_taken: is_taken,
+			};
+		});
 	},
 
 	/**
 	 * Private call only
 	 *
 	 * @param   {String}  hostname
-	 * @param   {Array}   existing_rows
-	 * @param   {Integer} [ignore_id]
+	 * @param   {Array}   existingRows
+	 * @param   {Integer} [ignoreId]
 	 * @returns {Boolean}
 	 */
-	_checkHostnameRecordsTaken: function (hostname, existing_rows, ignore_id) {
-		let is_taken = false;
+	_checkHostnameRecordsTaken: (hostname, existingRows, ignoreId) => {
+		let isTaken = false;
 
-		if (existing_rows && existing_rows.length) {
-			existing_rows.map(function (existing_row) {
-				existing_row.domain_names.map(function (existing_hostname) {
+		if (existingRows?.length) {
+			existingRows.map((existingRow) => {
+				existingRow.domain_names.map((existingHostname) => {
 					// Does this domain match?
-					if (existing_hostname.toLowerCase() === hostname.toLowerCase()) {
-						if (!ignore_id || ignore_id !== existing_row.id) {
-							is_taken = true;
+					if (existingHostname.toLowerCase() === hostname.toLowerCase()) {
+						if (!ignoreId || ignoreId !== existingRow.id) {
+							isTaken = true;
 						}
 					}
+					return true;
 				});
+				return true;
 			});
 		}
 
-		return is_taken;
+		return isTaken;
 	},
 
 	/**
 	 * Private call only
 	 *
 	 * @param   {Array}   hosts
-	 * @param   {Array}   domain_names
+	 * @param   {Array}   domainNames
 	 * @returns {Array}
 	 */
-	_getHostsWithDomains: function (hosts, domain_names) {
-		let response = [];
+	_getHostsWithDomains: (hosts, domainNames) => {
+		const response = [];
 
-		if (hosts && hosts.length) {
-			hosts.map(function (host) {
-				let host_matches = false;
+		if (hosts?.length) {
+			hosts.map((host) => {
+				let hostMatches = false;
 
-				domain_names.map(function (domain_name) {
-					host.domain_names.map(function (host_domain_name) {
-						if (domain_name.toLowerCase() === host_domain_name.toLowerCase()) {
-							host_matches = true;
+				domainNames.map((domainName) => {
+					host.domain_names.map((hostDomainName) => {
+						if (domainName.toLowerCase() === hostDomainName.toLowerCase()) {
+							hostMatches = true;
 						}
+						return true;
 					});
+					return true;
 				});
 
-				if (host_matches) {
+				if (hostMatches) {
 					response.push(host);
 				}
+				return true;
 			});
 		}
 
 		return response;
-	}
-
+	},
 };
 
-module.exports = internalHost;
+export default internalHost;
diff --git a/backend/internal/ip_ranges.js b/backend/internal/ip_ranges.js
index d34ee5a1..662da8ed 100644
--- a/backend/internal/ip_ranges.js
+++ b/backend/internal/ip_ranges.js
@@ -1,45 +1,51 @@
-const https         = require('https');
-const fs            = require('fs');
-const logger        = require('../logger').ip_ranges;
-const error         = require('../lib/error');
-const utils         = require('../lib/utils');
-const internalNginx = require('./nginx');
+import fs from "node:fs";
+import https from "node:https";
+import { dirname } from "node:path";
+import { fileURLToPath } from "node:url";
+import errs from "../lib/error.js";
+import utils from "../lib/utils.js";
+import { ipRanges as logger } from "../logger.js";
+import internalNginx from "./nginx.js";
 
-const CLOUDFRONT_URL   = 'https://ip-ranges.amazonaws.com/ip-ranges.json';
-const CLOUDFARE_V4_URL = 'https://www.cloudflare.com/ips-v4';
-const CLOUDFARE_V6_URL = 'https://www.cloudflare.com/ips-v6';
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = dirname(__filename);
+
+const CLOUDFRONT_URL = "https://ip-ranges.amazonaws.com/ip-ranges.json";
+const CLOUDFARE_V4_URL = "https://www.cloudflare.com/ips-v4";
+const CLOUDFARE_V6_URL = "https://www.cloudflare.com/ips-v6";
 
 const regIpV4 = /^(\d+\.?){4}\/\d+/;
 const regIpV6 = /^(([\da-fA-F]+)?:)+\/\d+/;
 
 const internalIpRanges = {
-
-	interval_timeout:    1000 * 60 * 60 * 6, // 6 hours
-	interval:            null,
+	interval_timeout: 1000 * 60 * 60 * 6, // 6 hours
+	interval: null,
 	interval_processing: false,
-	iteration_count:     0,
+	iteration_count: 0,
 
 	initTimer: () => {
-		logger.info('IP Ranges Renewal Timer initialized');
+		logger.info("IP Ranges Renewal Timer initialized");
 		internalIpRanges.interval = setInterval(internalIpRanges.fetch, internalIpRanges.interval_timeout);
 	},
 
 	fetchUrl: (url) => {
 		return new Promise((resolve, reject) => {
-			logger.info('Fetching ' + url);
-			return https.get(url, (res) => {
-				res.setEncoding('utf8');
-				let raw_data = '';
-				res.on('data', (chunk) => {
-					raw_data += chunk;
-				});
+			logger.info(`Fetching ${url}`);
+			return https
+				.get(url, (res) => {
+					res.setEncoding("utf8");
+					let raw_data = "";
+					res.on("data", (chunk) => {
+						raw_data += chunk;
+					});
 
-				res.on('end', () => {
-					resolve(raw_data);
+					res.on("end", () => {
+						resolve(raw_data);
+					});
+				})
+				.on("error", (err) => {
+					reject(err);
 				});
-			}).on('error', (err) => {
-				reject(err);
-			});
 		});
 	},
 
@@ -49,27 +55,30 @@ const internalIpRanges = {
 	fetch: () => {
 		if (!internalIpRanges.interval_processing) {
 			internalIpRanges.interval_processing = true;
-			logger.info('Fetching IP Ranges from online services...');
+			logger.info("Fetching IP Ranges from online services...");
 
 			let ip_ranges = [];
 
-			return internalIpRanges.fetchUrl(CLOUDFRONT_URL)
+			return internalIpRanges
+				.fetchUrl(CLOUDFRONT_URL)
 				.then((cloudfront_data) => {
-					let data = JSON.parse(cloudfront_data);
+					const data = JSON.parse(cloudfront_data);
 
-					if (data && typeof data.prefixes !== 'undefined') {
+					if (data && typeof data.prefixes !== "undefined") {
 						data.prefixes.map((item) => {
-							if (item.service === 'CLOUDFRONT') {
+							if (item.service === "CLOUDFRONT") {
 								ip_ranges.push(item.ip_prefix);
 							}
+							return true;
 						});
 					}
 
-					if (data && typeof data.ipv6_prefixes !== 'undefined') {
+					if (data && typeof data.ipv6_prefixes !== "undefined") {
 						data.ipv6_prefixes.map((item) => {
-							if (item.service === 'CLOUDFRONT') {
+							if (item.service === "CLOUDFRONT") {
 								ip_ranges.push(item.ipv6_prefix);
 							}
+							return true;
 						});
 					}
 				})
@@ -77,38 +86,38 @@ const internalIpRanges = {
 					return internalIpRanges.fetchUrl(CLOUDFARE_V4_URL);
 				})
 				.then((cloudfare_data) => {
-					let items = cloudfare_data.split('\n').filter((line) => regIpV4.test(line));
-					ip_ranges = [... ip_ranges, ... items];
+					const items = cloudfare_data.split("\n").filter((line) => regIpV4.test(line));
+					ip_ranges = [...ip_ranges, ...items];
 				})
 				.then(() => {
 					return internalIpRanges.fetchUrl(CLOUDFARE_V6_URL);
 				})
 				.then((cloudfare_data) => {
-					let items = cloudfare_data.split('\n').filter((line) => regIpV6.test(line));
-					ip_ranges = [... ip_ranges, ... items];
+					const items = cloudfare_data.split("\n").filter((line) => regIpV6.test(line));
+					ip_ranges = [...ip_ranges, ...items];
 				})
 				.then(() => {
-					let clean_ip_ranges = [];
+					const clean_ip_ranges = [];
 					ip_ranges.map((range) => {
 						if (range) {
 							clean_ip_ranges.push(range);
 						}
+						return true;
 					});
 
-					return internalIpRanges.generateConfig(clean_ip_ranges)
-						.then(() => {
-							if (internalIpRanges.iteration_count) {
-								// Reload nginx
-								return internalNginx.reload();
-							}
-						});
+					return internalIpRanges.generateConfig(clean_ip_ranges).then(() => {
+						if (internalIpRanges.iteration_count) {
+							// Reload nginx
+							return internalNginx.reload();
+						}
+					});
 				})
 				.then(() => {
 					internalIpRanges.interval_processing = false;
 					internalIpRanges.iteration_count++;
 				})
 				.catch((err) => {
-					logger.error(err.message);
+					logger.fatal(err.message);
 					internalIpRanges.interval_processing = false;
 				});
 		}
@@ -122,26 +131,26 @@ const internalIpRanges = {
 		const renderEngine = utils.getRenderEngine();
 		return new Promise((resolve, reject) => {
 			let template = null;
-			let filename = '/etc/nginx/conf.d/include/ip_ranges.conf';
+			const filename = "/etc/nginx/conf.d/include/ip_ranges.conf";
 			try {
-				template = fs.readFileSync(__dirname + '/../templates/ip_ranges.conf', {encoding: 'utf8'});
+				template = fs.readFileSync(`${__dirname}/../templates/ip_ranges.conf`, { encoding: "utf8" });
 			} catch (err) {
-				reject(new error.ConfigurationError(err.message));
+				reject(new errs.ConfigurationError(err.message));
 				return;
 			}
 
 			renderEngine
-				.parseAndRender(template, {ip_ranges: ip_ranges})
+				.parseAndRender(template, { ip_ranges: ip_ranges })
 				.then((config_text) => {
-					fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
+					fs.writeFileSync(filename, config_text, { encoding: "utf8" });
 					resolve(true);
 				})
 				.catch((err) => {
-					logger.warn('Could not write ' + filename + ':', err.message);
-					reject(new error.ConfigurationError(err.message));
+					logger.warn(`Could not write ${filename}: ${err.message}`);
+					reject(new errs.ConfigurationError(err.message));
 				});
 		});
-	}
+	},
 };
 
-module.exports = internalIpRanges;
+export default internalIpRanges;
diff --git a/backend/internal/nginx.js b/backend/internal/nginx.js
index 59694d3c..573beeb1 100644
--- a/backend/internal/nginx.js
+++ b/backend/internal/nginx.js
@@ -1,12 +1,15 @@
-const _      = require('lodash');
-const fs     = require('node:fs');
-const logger = require('../logger').nginx;
-const config = require('../lib/config');
-const utils  = require('../lib/utils');
-const error  = require('../lib/error');
+import fs from "node:fs";
+import { dirname } from "node:path";
+import { fileURLToPath } from "node:url";
+import _ from "lodash";
+import errs from "../lib/error.js";
+import utils from "../lib/utils.js";
+import { nginx as logger } from "../logger.js";
+
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = dirname(__filename);
 
 const internalNginx = {
-
 	/**
 	 * This will:
 	 * - test the nginx config first to make sure it's OK
@@ -24,7 +27,8 @@ const internalNginx = {
 	configure: (model, host_type, host) => {
 		let combined_meta = {};
 
-		return internalNginx.test()
+		return internalNginx
+			.test()
 			.then(() => {
 				// Nginx is OK
 				// We're deleting this config regardless.
@@ -37,20 +41,18 @@ const internalNginx = {
 			})
 			.then(() => {
 				// Test nginx again and update meta with result
-				return internalNginx.test()
+				return internalNginx
+					.test()
 					.then(() => {
 						// nginx is ok
 						combined_meta = _.assign({}, host.meta, {
 							nginx_online: true,
-							nginx_err:    null
+							nginx_err: null,
 						});
 
-						return model
-							.query()
-							.where('id', host.id)
-							.patch({
-								meta: combined_meta
-							});
+						return model.query().where("id", host.id).patch({
+							meta: combined_meta,
+						});
 					})
 					.catch((err) => {
 						// Remove the error_log line because it's a docker-ism false positive that doesn't need to be reported.
@@ -58,28 +60,27 @@ const internalNginx = {
 						//   nginx: [alert] could not open error log file: open() "/var/log/nginx/error.log" failed (6: No such device or address)
 
 						const valid_lines = [];
-						const err_lines   = err.message.split('\n');
+						const err_lines = err.message.split("\n");
 						err_lines.map((line) => {
-							if (line.indexOf('/var/log/nginx/error.log') === -1) {
+							if (line.indexOf("/var/log/nginx/error.log") === -1) {
 								valid_lines.push(line);
 							}
+							return true;
 						});
 
-						if (config.debug()) {
-							logger.error('Nginx test failed:', valid_lines.join('\n'));
-						}
+						logger.debug("Nginx test failed:", valid_lines.join("\n"));
 
 						// config is bad, update meta and delete config
 						combined_meta = _.assign({}, host.meta, {
 							nginx_online: false,
-							nginx_err:    valid_lines.join('\n')
+							nginx_err: valid_lines.join("\n"),
 						});
 
 						return model
 							.query()
-							.where('id', host.id)
+							.where("id", host.id)
 							.patch({
-								meta: combined_meta
+								meta: combined_meta,
 							})
 							.then(() => {
 								internalNginx.renameConfigAsError(host_type, host);
@@ -101,22 +102,18 @@ const internalNginx = {
 	 * @returns {Promise}
 	 */
 	test: () => {
-		if (config.debug()) {
-			logger.info('Testing Nginx configuration');
-		}
-
-		return utils.execFile('/usr/sbin/nginx', ['-t', '-g', 'error_log off;']);
+		logger.debug("Testing Nginx configuration");
+		return utils.execFile("/usr/sbin/nginx", ["-t", "-g", "error_log off;"]);
 	},
 
 	/**
 	 * @returns {Promise}
 	 */
 	reload: () => {
-		return internalNginx.test()
-			.then(() => {
-				logger.info('Reloading Nginx');
-				return utils.execFile('/usr/sbin/nginx', ['-s', 'reload']);
-			});
+		return internalNginx.test().then(() => {
+			logger.info("Reloading Nginx");
+			return utils.execFile("/usr/sbin/nginx", ["-s", "reload"]);
+		});
 	},
 
 	/**
@@ -125,8 +122,8 @@ const internalNginx = {
 	 * @returns {String}
 	 */
 	getConfigName: (host_type, host_id) => {
-		if (host_type === 'default') {
-			return '/data/nginx/default_host/site.conf';
+		if (host_type === "default") {
+			return "/data/nginx/default_host/site.conf";
 		}
 		return `/data/nginx/${internalNginx.getFileFriendlyHostType(host_type)}/${host_id}.conf`;
 	},
@@ -141,38 +138,45 @@ const internalNginx = {
 			let template;
 
 			try {
-				template = fs.readFileSync(`${__dirname}/../templates/_location.conf`, {encoding: 'utf8'});
+				template = fs.readFileSync(`${__dirname}/../templates/_location.conf`, { encoding: "utf8" });
 			} catch (err) {
-				reject(new error.ConfigurationError(err.message));
+				reject(new errs.ConfigurationError(err.message));
 				return;
 			}
 
-			const renderEngine    = utils.getRenderEngine();
-			let renderedLocations = '';
+			const renderEngine = utils.getRenderEngine();
+			let renderedLocations = "";
 
 			const locationRendering = async () => {
 				for (let i = 0; i < host.locations.length; i++) {
-					const locationCopy = Object.assign({}, {access_list_id: host.access_list_id}, {certificate_id: host.certificate_id},
-						{ssl_forced: host.ssl_forced}, {caching_enabled: host.caching_enabled}, {block_exploits: host.block_exploits},
-						{allow_websocket_upgrade: host.allow_websocket_upgrade}, {http2_support: host.http2_support},
-						{hsts_enabled: host.hsts_enabled}, {hsts_subdomains: host.hsts_subdomains}, {access_list: host.access_list},
-						{certificate: host.certificate}, host.locations[i]);
+					const locationCopy = Object.assign(
+						{},
+						{ access_list_id: host.access_list_id },
+						{ certificate_id: host.certificate_id },
+						{ ssl_forced: host.ssl_forced },
+						{ caching_enabled: host.caching_enabled },
+						{ block_exploits: host.block_exploits },
+						{ allow_websocket_upgrade: host.allow_websocket_upgrade },
+						{ http2_support: host.http2_support },
+						{ hsts_enabled: host.hsts_enabled },
+						{ hsts_subdomains: host.hsts_subdomains },
+						{ access_list: host.access_list },
+						{ certificate: host.certificate },
+						host.locations[i],
+					);
 
-					if (locationCopy.forward_host.indexOf('/') > -1) {
-						const splitted = locationCopy.forward_host.split('/');
+					if (locationCopy.forward_host.indexOf("/") > -1) {
+						const splitted = locationCopy.forward_host.split("/");
 
 						locationCopy.forward_host = splitted.shift();
-						locationCopy.forward_path = `/${splitted.join('/')}`;
+						locationCopy.forward_path = `/${splitted.join("/")}`;
 					}
 
-					// eslint-disable-next-line
 					renderedLocations += await renderEngine.parseAndRender(template, locationCopy);
 				}
-
 			};
 
 			locationRendering().then(() => resolve(renderedLocations));
-
 		});
 	},
 
@@ -183,23 +187,21 @@ const internalNginx = {
 	 */
 	generateConfig: (host_type, host_row) => {
 		// Prevent modifying the original object:
-		const host           = JSON.parse(JSON.stringify(host_row));
+		const host = JSON.parse(JSON.stringify(host_row));
 		const nice_host_type = internalNginx.getFileFriendlyHostType(host_type);
 
-		if (config.debug()) {
-			logger.info(`Generating ${nice_host_type} Config:`, JSON.stringify(host, null, 2));
-		}
+		logger.debug(`Generating ${nice_host_type} Config:`, JSON.stringify(host, null, 2));
 
 		const renderEngine = utils.getRenderEngine();
 
 		return new Promise((resolve, reject) => {
-			let template   = null;
+			let template = null;
 			const filename = internalNginx.getConfigName(nice_host_type, host.id);
 
 			try {
-				template = fs.readFileSync(`${__dirname}/../templates/${nice_host_type}.conf`, {encoding: 'utf8'});
+				template = fs.readFileSync(`${__dirname}/../templates/${nice_host_type}.conf`, { encoding: "utf8" });
 			} catch (err) {
-				reject(new error.ConfigurationError(err.message));
+				reject(new errs.ConfigurationError(err.message));
 				return;
 			}
 
@@ -207,27 +209,26 @@ const internalNginx = {
 			let origLocations;
 
 			// Manipulate the data a bit before sending it to the template
-			if (nice_host_type !== 'default') {
+			if (nice_host_type !== "default") {
 				host.use_default_location = true;
-				if (typeof host.advanced_config !== 'undefined' && host.advanced_config) {
+				if (typeof host.advanced_config !== "undefined" && host.advanced_config) {
 					host.use_default_location = !internalNginx.advancedConfigHasDefaultLocation(host.advanced_config);
 				}
 			}
 
 			if (host.locations) {
 				//logger.info ('host.locations = ' + JSON.stringify(host.locations, null, 2));
-				origLocations    = [].concat(host.locations);
+				origLocations = [].concat(host.locations);
 				locationsPromise = internalNginx.renderLocations(host).then((renderedLocations) => {
 					host.locations = renderedLocations;
 				});
 
 				// Allow someone who is using / custom location path to use it, and skip the default / location
 				_.map(host.locations, (location) => {
-					if (location.path === '/') {
+					if (location.path === "/") {
 						host.use_default_location = false;
 					}
 				});
-
 			} else {
 				locationsPromise = Promise.resolve();
 			}
@@ -239,11 +240,8 @@ const internalNginx = {
 				renderEngine
 					.parseAndRender(template, host)
 					.then((config_text) => {
-						fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
-
-						if (config.debug()) {
-							logger.success('Wrote config:', filename, config_text);
-						}
+						fs.writeFileSync(filename, config_text, { encoding: "utf8" });
+						logger.debug("Wrote config:", filename, config_text);
 
 						// Restore locations array
 						host.locations = origLocations;
@@ -251,11 +249,8 @@ const internalNginx = {
 						resolve(true);
 					})
 					.catch((err) => {
-						if (config.debug()) {
-							logger.warn(`Could not write ${filename}:`, err.message);
-						}
-
-						reject(new error.ConfigurationError(err.message));
+						logger.debug(`Could not write ${filename}:`, err.message);
+						reject(new errs.ConfigurationError(err.message));
 					});
 			});
 		});
@@ -270,20 +265,17 @@ const internalNginx = {
 	 * @returns {Promise}
 	 */
 	generateLetsEncryptRequestConfig: (certificate) => {
-		if (config.debug()) {
-			logger.info('Generating LetsEncrypt Request Config:', certificate);
-		}
-
+		logger.debug("Generating LetsEncrypt Request Config:", certificate);
 		const renderEngine = utils.getRenderEngine();
 
 		return new Promise((resolve, reject) => {
-			let template   = null;
+			let template = null;
 			const filename = `/data/nginx/temp/letsencrypt_${certificate.id}.conf`;
 
 			try {
-				template = fs.readFileSync(`${__dirname}/../templates/letsencrypt-request.conf`, {encoding: 'utf8'});
+				template = fs.readFileSync(`${__dirname}/../templates/letsencrypt-request.conf`, { encoding: "utf8" });
 			} catch (err) {
-				reject(new error.ConfigurationError(err.message));
+				reject(new errs.ConfigurationError(err.message));
 				return;
 			}
 
@@ -292,20 +284,13 @@ const internalNginx = {
 			renderEngine
 				.parseAndRender(template, certificate)
 				.then((config_text) => {
-					fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
-
-					if (config.debug()) {
-						logger.success('Wrote config:', filename, config_text);
-					}
-
+					fs.writeFileSync(filename, config_text, { encoding: "utf8" });
+					logger.debug("Wrote config:", filename, config_text);
 					resolve(true);
 				})
 				.catch((err) => {
-					if (config.debug()) {
-						logger.warn(`Could not write ${filename}:`, err.message);
-					}
-
-					reject(new error.ConfigurationError(err.message));
+					logger.debug(`Could not write ${filename}:`, err.message);
+					reject(new errs.ConfigurationError(err.message));
 				});
 		});
 	},
@@ -316,11 +301,14 @@ const internalNginx = {
 	 * @param   {String}  filename
 	 */
 	deleteFile: (filename) => {
-		logger.debug(`Deleting file: ${filename}`);
+		if (!fs.existsSync(filename)) {
+			return;
+		}
 		try {
+			logger.debug(`Deleting file: ${filename}`);
 			fs.unlinkSync(filename);
 		} catch (err) {
-			logger.debug('Could not delete file:', JSON.stringify(err, null, 2));
+			logger.debug("Could not delete file:", JSON.stringify(err, null, 2));
 		}
 	},
 
@@ -330,7 +318,7 @@ const internalNginx = {
 	 * @returns String
 	 */
 	getFileFriendlyHostType: (host_type) => {
-		return host_type.replace(/-/g, '_');
+		return host_type.replace(/-/g, "_");
 	},
 
 	/**
@@ -341,7 +329,7 @@ const internalNginx = {
 	 */
 	deleteLetsEncryptRequestConfig: (certificate) => {
 		const config_file = `/data/nginx/temp/letsencrypt_${certificate.id}.conf`;
-		return new Promise((resolve/*, reject*/) => {
+		return new Promise((resolve /*, reject*/) => {
 			internalNginx.deleteFile(config_file);
 			resolve();
 		});
@@ -354,10 +342,13 @@ const internalNginx = {
 	 * @returns {Promise}
 	 */
 	deleteConfig: (host_type, host, delete_err_file) => {
-		const config_file     = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id);
+		const config_file = internalNginx.getConfigName(
+			internalNginx.getFileFriendlyHostType(host_type),
+			typeof host === "undefined" ? 0 : host.id,
+		);
 		const config_file_err = `${config_file}.err`;
 
-		return new Promise((resolve/*, reject*/) => {
+		return new Promise((resolve /*, reject*/) => {
 			internalNginx.deleteFile(config_file);
 			if (delete_err_file) {
 				internalNginx.deleteFile(config_file_err);
@@ -372,10 +363,13 @@ const internalNginx = {
 	 * @returns {Promise}
 	 */
 	renameConfigAsError: (host_type, host) => {
-		const config_file     = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id);
+		const config_file = internalNginx.getConfigName(
+			internalNginx.getFileFriendlyHostType(host_type),
+			typeof host === "undefined" ? 0 : host.id,
+		);
 		const config_file_err = `${config_file}.err`;
 
-		return new Promise((resolve/*, reject*/) => {
+		return new Promise((resolve /*, reject*/) => {
 			fs.unlink(config_file, () => {
 				// ignore result, continue
 				fs.rename(config_file, config_file_err, () => {
@@ -387,14 +381,15 @@ const internalNginx = {
 	},
 
 	/**
-	 * @param   {String}  host_type
+	 * @param   {String}  hostType
 	 * @param   {Array}   hosts
 	 * @returns {Promise}
 	 */
-	bulkGenerateConfigs: (host_type, hosts) => {
+	bulkGenerateConfigs: (hostType, hosts) => {
 		const promises = [];
 		hosts.map((host) => {
-			promises.push(internalNginx.generateConfig(host_type, host));
+			promises.push(internalNginx.generateConfig(hostType, host));
+			return true;
 		});
 
 		return Promise.all(promises);
@@ -409,6 +404,7 @@ const internalNginx = {
 		const promises = [];
 		hosts.map((host) => {
 			promises.push(internalNginx.deleteConfig(host_type, host, true));
+			return true;
 		});
 
 		return Promise.all(promises);
@@ -424,13 +420,13 @@ const internalNginx = {
 	 * @returns {boolean}
 	 */
 	ipv6Enabled: () => {
-		if (typeof process.env.DISABLE_IPV6 !== 'undefined') {
+		if (typeof process.env.DISABLE_IPV6 !== "undefined") {
 			const disabled = process.env.DISABLE_IPV6.toLowerCase();
-			return !(disabled === 'on' || disabled === 'true' || disabled === '1' || disabled === 'yes');
+			return !(disabled === "on" || disabled === "true" || disabled === "1" || disabled === "yes");
 		}
 
 		return true;
-	}
+	},
 };
 
-module.exports = internalNginx;
+export default internalNginx;
diff --git a/backend/internal/proxy-host.js b/backend/internal/proxy-host.js
index 32f2bc0d..3299012a 100644
--- a/backend/internal/proxy-host.js
+++ b/backend/internal/proxy-host.js
@@ -1,107 +1,106 @@
-const _                   = require('lodash');
-const error               = require('../lib/error');
-const utils               = require('../lib/utils');
-const proxyHostModel      = require('../models/proxy_host');
-const internalHost        = require('./host');
-const internalNginx       = require('./nginx');
-const internalAuditLog    = require('./audit-log');
-const internalCertificate = require('./certificate');
-const {castJsonIfNeed}    = require('../lib/helpers');
+import _ from "lodash";
+import errs from "../lib/error.js";
+import { castJsonIfNeed } from "../lib/helpers.js";
+import utils from "../lib/utils.js";
+import proxyHostModel from "../models/proxy_host.js";
+import internalAuditLog from "./audit-log.js";
+import internalCertificate from "./certificate.js";
+import internalHost from "./host.js";
+import internalNginx from "./nginx.js";
 
-function omissions () {
-	return ['is_deleted', 'owner.is_deleted'];
-}
+const omissions = () => {
+	return ["is_deleted", "owner.is_deleted"];
+};
 
 const internalProxyHost = {
-
 	/**
 	 * @param   {Access}  access
 	 * @param   {Object}  data
 	 * @returns {Promise}
 	 */
 	create: (access, data) => {
-		let create_certificate = data.certificate_id === 'new';
+		let thisData = data;
+		const createCertificate = thisData.certificate_id === "new";
 
-		if (create_certificate) {
-			delete data.certificate_id;
+		if (createCertificate) {
+			delete thisData.certificate_id;
 		}
 
-		return access.can('proxy_hosts:create', data)
+		return access
+			.can("proxy_hosts:create", thisData)
 			.then(() => {
 				// Get a list of the domain names and check each of them against existing records
-				let domain_name_check_promises = [];
+				const domain_name_check_promises = [];
 
-				data.domain_names.map(function (domain_name) {
+				thisData.domain_names.map((domain_name) => {
 					domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
+					return true;
 				});
 
-				return Promise.all(domain_name_check_promises)
-					.then((check_results) => {
-						check_results.map(function (result) {
-							if (result.is_taken) {
-								throw new error.ValidationError(result.hostname + ' is already in use');
-							}
-						});
+				return Promise.all(domain_name_check_promises).then((check_results) => {
+					check_results.map((result) => {
+						if (result.is_taken) {
+							throw new errs.ValidationError(`${result.hostname} is already in use`);
+						}
+						return true;
 					});
+				});
 			})
 			.then(() => {
 				// At this point the domains should have been checked
-				data.owner_user_id = access.token.getUserId(1);
-				data               = internalHost.cleanSslHstsData(data);
+				thisData.owner_user_id = access.token.getUserId(1);
+				thisData = internalHost.cleanSslHstsData(thisData);
 
 				// Fix for db field not having a default value
 				// for this optional field.
-				if (typeof data.advanced_config === 'undefined') {
-					data.advanced_config = '';
+				if (typeof thisData.advanced_config === "undefined") {
+					thisData.advanced_config = "";
 				}
 
-				return proxyHostModel
-					.query()
-					.insertAndFetch(data)
-					.then(utils.omitRow(omissions()));
+				return proxyHostModel.query().insertAndFetch(thisData).then(utils.omitRow(omissions()));
 			})
 			.then((row) => {
-				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, data)
+				if (createCertificate) {
+					return internalCertificate
+						.createQuickCertificate(access, thisData)
 						.then((cert) => {
 							// update host with cert id
 							return internalProxyHost.update(access, {
-								id:             row.id,
-								certificate_id: cert.id
+								id: row.id,
+								certificate_id: cert.id,
 							});
 						})
 						.then(() => {
 							return row;
 						});
-				} else {
-					return row;
 				}
+				return row;
 			})
 			.then((row) => {
 				// re-fetch with cert
 				return internalProxyHost.get(access, {
-					id:     row.id,
-					expand: ['certificate', 'owner', 'access_list.[clients,items]']
+					id: row.id,
+					expand: ["certificate", "owner", "access_list.[clients,items]"],
 				});
 			})
 			.then((row) => {
 				// Configure nginx
-				return internalNginx.configure(proxyHostModel, 'proxy_host', row)
-					.then(() => {
-						return row;
-					});
+				return internalNginx.configure(proxyHostModel, "proxy_host", row).then(() => {
+					return row;
+				});
 			})
 			.then((row) => {
 				// Audit log
-				data.meta = _.assign({}, data.meta || {}, row.meta);
+				thisData.meta = _.assign({}, thisData.meta || {}, row.meta);
 
 				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'created',
-					object_type: 'proxy-host',
-					object_id:   row.id,
-					meta:        data
-				})
+				return internalAuditLog
+					.add(access, {
+						action: "created",
+						object_type: "proxy-host",
+						object_id: row.id,
+						meta: thisData,
+					})
 					.then(() => {
 						return row;
 					});
@@ -115,100 +114,110 @@ const internalProxyHost = {
 	 * @return {Promise}
 	 */
 	update: (access, data) => {
-		let create_certificate = data.certificate_id === 'new';
+		let thisData = data;
+		const create_certificate = thisData.certificate_id === "new";
 
 		if (create_certificate) {
-			delete data.certificate_id;
+			delete thisData.certificate_id;
 		}
 
-		return access.can('proxy_hosts:update', data.id)
+		return access
+			.can("proxy_hosts:update", thisData.id)
 			.then((/*access_data*/) => {
 				// Get a list of the domain names and check each of them against existing records
-				let domain_name_check_promises = [];
+				const domain_name_check_promises = [];
 
-				if (typeof data.domain_names !== 'undefined') {
-					data.domain_names.map(function (domain_name) {
-						domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'proxy', data.id));
+				if (typeof thisData.domain_names !== "undefined") {
+					thisData.domain_names.map((domain_name) => {
+						return domain_name_check_promises.push(
+							internalHost.isHostnameTaken(domain_name, "proxy", thisData.id),
+						);
 					});
 
-					return Promise.all(domain_name_check_promises)
-						.then((check_results) => {
-							check_results.map(function (result) {
-								if (result.is_taken) {
-									throw new error.ValidationError(result.hostname + ' is already in use');
-								}
-							});
+					return Promise.all(domain_name_check_promises).then((check_results) => {
+						check_results.map((result) => {
+							if (result.is_taken) {
+								throw new errs.ValidationError(`${result.hostname} is already in use`);
+							}
+							return true;
 						});
+					});
 				}
 			})
 			.then(() => {
-				return internalProxyHost.get(access, {id: data.id});
+				return internalProxyHost.get(access, { id: thisData.id });
 			})
 			.then((row) => {
-				if (row.id !== data.id) {
+				if (row.id !== thisData.id) {
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('Proxy Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`Proxy Host could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
+					);
 				}
 
 				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, {
-						domain_names: data.domain_names || row.domain_names,
-						meta:         _.assign({}, row.meta, data.meta)
-					})
+					return internalCertificate
+						.createQuickCertificate(access, {
+							domain_names: thisData.domain_names || row.domain_names,
+							meta: _.assign({}, row.meta, thisData.meta),
+						})
 						.then((cert) => {
 							// update host with cert id
-							data.certificate_id = cert.id;
+							thisData.certificate_id = cert.id;
 						})
 						.then(() => {
 							return row;
 						});
-				} else {
-					return row;
 				}
+				return row;
 			})
 			.then((row) => {
 				// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
-				data = _.assign({}, {
-					domain_names: row.domain_names
-				}, data);
+				thisData = _.assign(
+					{},
+					{
+						domain_names: row.domain_names,
+					},
+					data,
+				);
 
-				data = internalHost.cleanSslHstsData(data, row);
+				thisData = internalHost.cleanSslHstsData(thisData, row);
 
 				return proxyHostModel
 					.query()
-					.where({id: data.id})
-					.patch(data)
+					.where({ id: thisData.id })
+					.patch(thisData)
 					.then(utils.omitRow(omissions()))
 					.then((saved_row) => {
 						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'updated',
-							object_type: 'proxy-host',
-							object_id:   row.id,
-							meta:        data
-						})
+						return internalAuditLog
+							.add(access, {
+								action: "updated",
+								object_type: "proxy-host",
+								object_id: row.id,
+								meta: thisData,
+							})
 							.then(() => {
 								return saved_row;
 							});
 					});
 			})
 			.then(() => {
-				return internalProxyHost.get(access, {
-					id:     data.id,
-					expand: ['owner', 'certificate', 'access_list.[clients,items]']
-				})
+				return internalProxyHost
+					.get(access, {
+						id: thisData.id,
+						expand: ["owner", "certificate", "access_list.[clients,items]"],
+					})
 					.then((row) => {
 						if (!row.enabled) {
 							// No need to add nginx config if host is disabled
 							return row;
 						}
 						// Configure nginx
-						return internalNginx.configure(proxyHostModel, 'proxy_host', row)
-							.then((new_meta) => {
-								row.meta = new_meta;
-								row      = internalHost.cleanRowCertificateMeta(row);
-								return _.omit(row, omissions());
-							});
+						return internalNginx.configure(proxyHostModel, "proxy_host", row).then((new_meta) => {
+							row.meta = new_meta;
+							return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
+						});
 					});
 			});
 	},
@@ -222,39 +231,38 @@ const internalProxyHost = {
 	 * @return {Promise}
 	 */
 	get: (access, data) => {
-		if (typeof data === 'undefined') {
-			data = {};
-		}
+		const thisData = data || {};
 
-		return access.can('proxy_hosts:get', data.id)
+		return access
+			.can("proxy_hosts:get", thisData.id)
 			.then((access_data) => {
-				let query = proxyHostModel
+				const query = proxyHostModel
 					.query()
-					.where('is_deleted', 0)
-					.andWhere('id', data.id)
-					.allowGraph('[owner,access_list.[clients,items],certificate]')
+					.where("is_deleted", 0)
+					.andWhere("id", thisData.id)
+					.allowGraph("[owner,access_list.[clients,items],certificate]")
 					.first();
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
+				if (access_data.permission_visibility !== "all") {
+					query.andWhere("owner_user_id", access.token.getUserId(1));
 				}
 
-				if (typeof data.expand !== 'undefined' && data.expand !== null) {
-					query.withGraphFetched('[' + data.expand.join(', ') + ']');
+				if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
+					query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
 				}
 
 				return query.then(utils.omitRow(omissions()));
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(thisData.id);
 				}
-				row = internalHost.cleanRowCertificateMeta(row);
+				const thisRow = internalHost.cleanRowCertificateMeta(row);
 				// Custom omissions
-				if (typeof data.omit !== 'undefined' && data.omit !== null) {
-					row = _.omit(row, data.omit);
+				if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
+					return _.omit(row, thisData.omit);
 				}
-				return row;
+				return thisRow;
 			});
 	},
 
@@ -266,35 +274,35 @@ const internalProxyHost = {
 	 * @returns {Promise}
 	 */
 	delete: (access, data) => {
-		return access.can('proxy_hosts:delete', data.id)
+		return access
+			.can("proxy_hosts:delete", data.id)
 			.then(() => {
-				return internalProxyHost.get(access, {id: data.id});
+				return internalProxyHost.get(access, { id: data.id });
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(data.id);
 				}
 
 				return proxyHostModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						is_deleted: 1
+						is_deleted: 1,
 					})
 					.then(() => {
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('proxy_host', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("proxy_host", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'deleted',
-							object_type: 'proxy-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "deleted",
+							object_type: "proxy-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -311,39 +319,41 @@ const internalProxyHost = {
 	 * @returns {Promise}
 	 */
 	enable: (access, data) => {
-		return access.can('proxy_hosts:update', data.id)
+		return access
+			.can("proxy_hosts:update", data.id)
 			.then(() => {
 				return internalProxyHost.get(access, {
-					id:     data.id,
-					expand: ['certificate', 'owner', 'access_list']
+					id: data.id,
+					expand: ["certificate", "owner", "access_list"],
 				});
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (row.enabled) {
-					throw new error.ValidationError('Host is already enabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (row.enabled) {
+					throw new errs.ValidationError("Host is already enabled");
 				}
 
 				row.enabled = 1;
 
 				return proxyHostModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						enabled: 1
+						enabled: 1,
 					})
 					.then(() => {
 						// Configure nginx
-						return internalNginx.configure(proxyHostModel, 'proxy_host', row);
+						return internalNginx.configure(proxyHostModel, "proxy_host", row);
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'enabled',
-							object_type: 'proxy-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "enabled",
+							object_type: "proxy-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -360,39 +370,40 @@ const internalProxyHost = {
 	 * @returns {Promise}
 	 */
 	disable: (access, data) => {
-		return access.can('proxy_hosts:update', data.id)
+		return access
+			.can("proxy_hosts:update", data.id)
 			.then(() => {
-				return internalProxyHost.get(access, {id: data.id});
+				return internalProxyHost.get(access, { id: data.id });
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (!row.enabled) {
-					throw new error.ValidationError('Host is already disabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (!row.enabled) {
+					throw new errs.ValidationError("Host is already disabled");
 				}
 
 				row.enabled = 0;
 
 				return proxyHostModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						enabled: 0
+						enabled: 0,
 					})
 					.then(() => {
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('proxy_host', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("proxy_host", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'disabled',
-							object_type: 'proxy-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "disabled",
+							object_type: "proxy-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -409,40 +420,35 @@ const internalProxyHost = {
 	 * @param   {String}  [search_query]
 	 * @returns {Promise}
 	 */
-	getAll: (access, expand, search_query) => {
-		return access.can('proxy_hosts:list')
-			.then((access_data) => {
-				let query = proxyHostModel
-					.query()
-					.where('is_deleted', 0)
-					.groupBy('id')
-					.allowGraph('[owner,access_list,certificate]')
-					.orderBy(castJsonIfNeed('domain_names'), 'ASC');
+	getAll: async (access, expand, searchQuery) => {
+		const accessData = await access.can("proxy_hosts:list");
+		const query = proxyHostModel
+			.query()
+			.where("is_deleted", 0)
+			.groupBy("id")
+			.allowGraph("[owner,access_list,certificate]")
+			.orderBy(castJsonIfNeed("domain_names"), "ASC");
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
-				}
+		if (accessData.permission_visibility !== "all") {
+			query.andWhere("owner_user_id", access.token.getUserId(1));
+		}
 
-				// Query is used for searching
-				if (typeof search_query === 'string' && search_query.length > 0) {
-					query.where(function () {
-						this.where(castJsonIfNeed('domain_names'), 'like', `%${search_query}%`);
-					});
-				}
-
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched('[' + expand.join(', ') + ']');
-				}
-
-				return query.then(utils.omitRows(omissions()));
-			})
-			.then((rows) => {
-				if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
-					return internalHost.cleanAllRowsCertificateMeta(rows);
-				}
-
-				return rows;
+		// Query is used for searching
+		if (typeof searchQuery === "string" && searchQuery.length > 0) {
+			query.where(function () {
+				this.where(castJsonIfNeed("domain_names"), "like", `%${searchQuery}%`);
 			});
+		}
+
+		if (typeof expand !== "undefined" && expand !== null) {
+			query.withGraphFetched(`[${expand.join(", ")}]`);
+		}
+
+		const rows = await query.then(utils.omitRows(omissions()));
+		if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
+			return internalHost.cleanAllRowsCertificateMeta(rows);
+		}
+		return rows;
 	},
 
 	/**
@@ -453,20 +459,16 @@ const internalProxyHost = {
 	 * @returns {Promise}
 	 */
 	getCount: (user_id, visibility) => {
-		let query = proxyHostModel
-			.query()
-			.count('id as count')
-			.where('is_deleted', 0);
+		const query = proxyHostModel.query().count("id as count").where("is_deleted", 0);
 
-		if (visibility !== 'all') {
-			query.andWhere('owner_user_id', user_id);
+		if (visibility !== "all") {
+			query.andWhere("owner_user_id", user_id);
 		}
 
-		return query.first()
-			.then((row) => {
-				return parseInt(row.count, 10);
-			});
-	}
+		return query.first().then((row) => {
+			return Number.parseInt(row.count, 10);
+		});
+	},
 };
 
-module.exports = internalProxyHost;
+export default internalProxyHost;
diff --git a/backend/internal/redirection-host.js b/backend/internal/redirection-host.js
index 6a81b866..159ffd8b 100644
--- a/backend/internal/redirection-host.js
+++ b/backend/internal/redirection-host.js
@@ -1,73 +1,73 @@
-const _                    = require('lodash');
-const error                = require('../lib/error');
-const utils                = require('../lib/utils');
-const redirectionHostModel = require('../models/redirection_host');
-const internalHost         = require('./host');
-const internalNginx        = require('./nginx');
-const internalAuditLog     = require('./audit-log');
-const internalCertificate  = require('./certificate');
-const {castJsonIfNeed}     = require('../lib/helpers');
+import _ from "lodash";
+import errs from "../lib/error.js";
+import { castJsonIfNeed } from "../lib/helpers.js";
+import utils from "../lib/utils.js";
+import redirectionHostModel from "../models/redirection_host.js";
+import internalAuditLog from "./audit-log.js";
+import internalCertificate from "./certificate.js";
+import internalHost from "./host.js";
+import internalNginx from "./nginx.js";
 
-function omissions () {
-	return ['is_deleted'];
-}
+const omissions = () => {
+	return ["is_deleted"];
+};
 
 const internalRedirectionHost = {
-
 	/**
 	 * @param   {Access}  access
 	 * @param   {Object}  data
 	 * @returns {Promise}
 	 */
 	create: (access, data) => {
-		let create_certificate = data.certificate_id === 'new';
+		let thisData = data || {};
+		const createCertificate = thisData.certificate_id === "new";
 
-		if (create_certificate) {
-			delete data.certificate_id;
+		if (createCertificate) {
+			delete thisData.certificate_id;
 		}
 
-		return access.can('redirection_hosts:create', data)
+		return access
+			.can("redirection_hosts:create", thisData)
 			.then((/*access_data*/) => {
 				// Get a list of the domain names and check each of them against existing records
-				let domain_name_check_promises = [];
+				const domain_name_check_promises = [];
 
-				data.domain_names.map(function (domain_name) {
+				thisData.domain_names.map((domain_name) => {
 					domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
+					return true;
 				});
 
-				return Promise.all(domain_name_check_promises)
-					.then((check_results) => {
-						check_results.map(function (result) {
-							if (result.is_taken) {
-								throw new error.ValidationError(result.hostname + ' is already in use');
-							}
-						});
+				return Promise.all(domain_name_check_promises).then((check_results) => {
+					check_results.map((result) => {
+						if (result.is_taken) {
+							throw new errs.ValidationError(`${result.hostname} is already in use`);
+						}
+						return true;
 					});
+				});
 			})
 			.then(() => {
 				// At this point the domains should have been checked
-				data.owner_user_id = access.token.getUserId(1);
-				data               = internalHost.cleanSslHstsData(data);
+				thisData.owner_user_id = access.token.getUserId(1);
+				thisData = internalHost.cleanSslHstsData(thisData);
 
 				// Fix for db field not having a default value
 				// for this optional field.
-				if (typeof data.advanced_config === 'undefined') {
-					data.advanced_config = '';
+				if (typeof data.advanced_config === "undefined") {
+					data.advanced_config = "";
 				}
 
-				return redirectionHostModel
-					.query()
-					.insertAndFetch(data)
-					.then(utils.omitRow(omissions()));
+				return redirectionHostModel.query().insertAndFetch(thisData).then(utils.omitRow(omissions()));
 			})
 			.then((row) => {
-				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, data)
+				if (createCertificate) {
+					return internalCertificate
+						.createQuickCertificate(access, thisData)
 						.then((cert) => {
 							// update host with cert id
 							return internalRedirectionHost.update(access, {
-								id:             row.id,
-								certificate_id: cert.id
+								id: row.id,
+								certificate_id: cert.id,
 							});
 						})
 						.then(() => {
@@ -79,27 +79,27 @@ const internalRedirectionHost = {
 			.then((row) => {
 				// re-fetch with cert
 				return internalRedirectionHost.get(access, {
-					id:     row.id,
-					expand: ['certificate', 'owner']
+					id: row.id,
+					expand: ["certificate", "owner"],
 				});
 			})
 			.then((row) => {
 				// Configure nginx
-				return internalNginx.configure(redirectionHostModel, 'redirection_host', row)
-					.then(() => {
-						return row;
-					});
+				return internalNginx.configure(redirectionHostModel, "redirection_host", row).then(() => {
+					return row;
+				});
 			})
 			.then((row) => {
-				data.meta = _.assign({}, data.meta || {}, row.meta);
+				thisData.meta = _.assign({}, thisData.meta || {}, row.meta);
 
 				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'created',
-					object_type: 'redirection-host',
-					object_id:   row.id,
-					meta:        data
-				})
+				return internalAuditLog
+					.add(access, {
+						action: "created",
+						object_type: "redirection-host",
+						object_id: row.id,
+						meta: thisData,
+					})
 					.then(() => {
 						return row;
 					});
@@ -113,94 +113,107 @@ const internalRedirectionHost = {
 	 * @return {Promise}
 	 */
 	update: (access, data) => {
-		let create_certificate = data.certificate_id === 'new';
+		let thisData = data || {};
+		const createCertificate = thisData.certificate_id === "new";
 
-		if (create_certificate) {
-			delete data.certificate_id;
+		if (createCertificate) {
+			delete thisData.certificate_id;
 		}
 
-		return access.can('redirection_hosts:update', data.id)
+		return access
+			.can("redirection_hosts:update", thisData.id)
 			.then((/*access_data*/) => {
 				// Get a list of the domain names and check each of them against existing records
-				let domain_name_check_promises = [];
+				const domain_name_check_promises = [];
 
-				if (typeof data.domain_names !== 'undefined') {
-					data.domain_names.map(function (domain_name) {
-						domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'redirection', data.id));
+				if (typeof thisData.domain_names !== "undefined") {
+					thisData.domain_names.map((domain_name) => {
+						domain_name_check_promises.push(
+							internalHost.isHostnameTaken(domain_name, "redirection", thisData.id),
+						);
+						return true;
 					});
 
-					return Promise.all(domain_name_check_promises)
-						.then((check_results) => {
-							check_results.map(function (result) {
-								if (result.is_taken) {
-									throw new error.ValidationError(result.hostname + ' is already in use');
-								}
-							});
+					return Promise.all(domain_name_check_promises).then((check_results) => {
+						check_results.map((result) => {
+							if (result.is_taken) {
+								throw new errs.ValidationError(`${result.hostname} is already in use`);
+							}
+							return true;
 						});
+					});
 				}
 			})
 			.then(() => {
-				return internalRedirectionHost.get(access, {id: data.id});
+				return internalRedirectionHost.get(access, { id: thisData.id });
 			})
 			.then((row) => {
-				if (row.id !== data.id) {
+				if (row.id !== thisData.id) {
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('Redirection Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`Redirection Host could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
+					);
 				}
 
-				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, {
-						domain_names: data.domain_names || row.domain_names,
-						meta:         _.assign({}, row.meta, data.meta)
-					})
+				if (createCertificate) {
+					return internalCertificate
+						.createQuickCertificate(access, {
+							domain_names: thisData.domain_names || row.domain_names,
+							meta: _.assign({}, row.meta, thisData.meta),
+						})
 						.then((cert) => {
 							// update host with cert id
-							data.certificate_id = cert.id;
+							thisData.certificate_id = cert.id;
 						})
 						.then(() => {
 							return row;
 						});
-				} else {
-					return row;
 				}
+				return row;
 			})
 			.then((row) => {
 				// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
-				data = _.assign({}, {
-					domain_names: row.domain_names
-				}, data);
+				thisData = _.assign(
+					{},
+					{
+						domain_names: row.domain_names,
+					},
+					thisData,
+				);
 
-				data = internalHost.cleanSslHstsData(data, row);
+				thisData = internalHost.cleanSslHstsData(thisData, row);
 
 				return redirectionHostModel
 					.query()
-					.where({id: data.id})
-					.patch(data)
+					.where({ id: thisData.id })
+					.patch(thisData)
 					.then((saved_row) => {
 						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'updated',
-							object_type: 'redirection-host',
-							object_id:   row.id,
-							meta:        data
-						})
+						return internalAuditLog
+							.add(access, {
+								action: "updated",
+								object_type: "redirection-host",
+								object_id: row.id,
+								meta: thisData,
+							})
 							.then(() => {
 								return _.omit(saved_row, omissions());
 							});
 					});
 			})
 			.then(() => {
-				return internalRedirectionHost.get(access, {
-					id:     data.id,
-					expand: ['owner', 'certificate']
-				})
+				return internalRedirectionHost
+					.get(access, {
+						id: thisData.id,
+						expand: ["owner", "certificate"],
+					})
 					.then((row) => {
 						// Configure nginx
-						return internalNginx.configure(redirectionHostModel, 'redirection_host', row)
+						return internalNginx
+							.configure(redirectionHostModel, "redirection_host", row)
 							.then((new_meta) => {
 								row.meta = new_meta;
-								row      = internalHost.cleanRowCertificateMeta(row);
-								return _.omit(row, omissions());
+								return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
 							});
 					});
 			});
@@ -215,39 +228,39 @@ const internalRedirectionHost = {
 	 * @return {Promise}
 	 */
 	get: (access, data) => {
-		if (typeof data === 'undefined') {
-			data = {};
-		}
+		const thisData = data || {};
 
-		return access.can('redirection_hosts:get', data.id)
+		return access
+			.can("redirection_hosts:get", thisData.id)
 			.then((access_data) => {
-				let query = redirectionHostModel
+				const query = redirectionHostModel
 					.query()
-					.where('is_deleted', 0)
-					.andWhere('id', data.id)
-					.allowGraph('[owner,certificate]')
+					.where("is_deleted", 0)
+					.andWhere("id", thisData.id)
+					.allowGraph("[owner,certificate]")
 					.first();
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
+				if (access_data.permission_visibility !== "all") {
+					query.andWhere("owner_user_id", access.token.getUserId(1));
 				}
 
-				if (typeof data.expand !== 'undefined' && data.expand !== null) {
-					query.withGraphFetched('[' + data.expand.join(', ') + ']');
+				if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
+					query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
 				}
 
 				return query.then(utils.omitRow(omissions()));
 			})
 			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+				let thisRow = row;
+				if (!thisRow || !thisRow.id) {
+					throw new errs.ItemNotFoundError(thisData.id);
 				}
-				row = internalHost.cleanRowCertificateMeta(row);
+				thisRow = internalHost.cleanRowCertificateMeta(thisRow);
 				// Custom omissions
-				if (typeof data.omit !== 'undefined' && data.omit !== null) {
-					row = _.omit(row, data.omit);
+				if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
+					return _.omit(thisRow, thisData.omit);
 				}
-				return row;
+				return thisRow;
 			});
 	},
 
@@ -259,35 +272,35 @@ const internalRedirectionHost = {
 	 * @returns {Promise}
 	 */
 	delete: (access, data) => {
-		return access.can('redirection_hosts:delete', data.id)
+		return access
+			.can("redirection_hosts:delete", data.id)
 			.then(() => {
-				return internalRedirectionHost.get(access, {id: data.id});
+				return internalRedirectionHost.get(access, { id: data.id });
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(data.id);
 				}
 
 				return redirectionHostModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						is_deleted: 1
+						is_deleted: 1,
 					})
 					.then(() => {
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('redirection_host', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("redirection_host", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'deleted',
-							object_type: 'redirection-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "deleted",
+							object_type: "redirection-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -304,39 +317,41 @@ const internalRedirectionHost = {
 	 * @returns {Promise}
 	 */
 	enable: (access, data) => {
-		return access.can('redirection_hosts:update', data.id)
+		return access
+			.can("redirection_hosts:update", data.id)
 			.then(() => {
 				return internalRedirectionHost.get(access, {
-					id:     data.id,
-					expand: ['certificate', 'owner']
+					id: data.id,
+					expand: ["certificate", "owner"],
 				});
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (row.enabled) {
-					throw new error.ValidationError('Host is already enabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (row.enabled) {
+					throw new errs.ValidationError("Host is already enabled");
 				}
 
 				row.enabled = 1;
 
 				return redirectionHostModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						enabled: 1
+						enabled: 1,
 					})
 					.then(() => {
 						// Configure nginx
-						return internalNginx.configure(redirectionHostModel, 'redirection_host', row);
+						return internalNginx.configure(redirectionHostModel, "redirection_host", row);
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'enabled',
-							object_type: 'redirection-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "enabled",
+							object_type: "redirection-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -353,39 +368,40 @@ const internalRedirectionHost = {
 	 * @returns {Promise}
 	 */
 	disable: (access, data) => {
-		return access.can('redirection_hosts:update', data.id)
+		return access
+			.can("redirection_hosts:update", data.id)
 			.then(() => {
-				return internalRedirectionHost.get(access, {id: data.id});
+				return internalRedirectionHost.get(access, { id: data.id });
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (!row.enabled) {
-					throw new error.ValidationError('Host is already disabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (!row.enabled) {
+					throw new errs.ValidationError("Host is already disabled");
 				}
 
 				row.enabled = 0;
 
 				return redirectionHostModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						enabled: 0
+						enabled: 0,
 					})
 					.then(() => {
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('redirection_host', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("redirection_host", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'disabled',
-							object_type: 'redirection-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "disabled",
+							object_type: "redirection-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -403,34 +419,35 @@ const internalRedirectionHost = {
 	 * @returns {Promise}
 	 */
 	getAll: (access, expand, search_query) => {
-		return access.can('redirection_hosts:list')
+		return access
+			.can("redirection_hosts:list")
 			.then((access_data) => {
-				let query = redirectionHostModel
+				const query = redirectionHostModel
 					.query()
-					.where('is_deleted', 0)
-					.groupBy('id')
-					.allowGraph('[owner,certificate]')
-					.orderBy(castJsonIfNeed('domain_names'), 'ASC');
+					.where("is_deleted", 0)
+					.groupBy("id")
+					.allowGraph("[owner,certificate]")
+					.orderBy(castJsonIfNeed("domain_names"), "ASC");
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
+				if (access_data.permission_visibility !== "all") {
+					query.andWhere("owner_user_id", access.token.getUserId(1));
 				}
 
 				// Query is used for searching
-				if (typeof search_query === 'string' && search_query.length > 0) {
+				if (typeof search_query === "string" && search_query.length > 0) {
 					query.where(function () {
-						this.where(castJsonIfNeed('domain_names'), 'like', `%${search_query}%`);
+						this.where(castJsonIfNeed("domain_names"), "like", `%${search_query}%`);
 					});
 				}
 
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched('[' + expand.join(', ') + ']');
+				if (typeof expand !== "undefined" && expand !== null) {
+					query.withGraphFetched(`[${expand.join(", ")}]`);
 				}
 
 				return query.then(utils.omitRows(omissions()));
 			})
 			.then((rows) => {
-				if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
+				if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
 					return internalHost.cleanAllRowsCertificateMeta(rows);
 				}
 
@@ -446,20 +463,16 @@ const internalRedirectionHost = {
 	 * @returns {Promise}
 	 */
 	getCount: (user_id, visibility) => {
-		let query = redirectionHostModel
-			.query()
-			.count('id as count')
-			.where('is_deleted', 0);
+		const query = redirectionHostModel.query().count("id as count").where("is_deleted", 0);
 
-		if (visibility !== 'all') {
-			query.andWhere('owner_user_id', user_id);
+		if (visibility !== "all") {
+			query.andWhere("owner_user_id", user_id);
 		}
 
-		return query.first()
-			.then((row) => {
-				return parseInt(row.count, 10);
-			});
-	}
+		return query.first().then((row) => {
+			return Number.parseInt(row.count, 10);
+		});
+	},
 };
 
-module.exports = internalRedirectionHost;
+export default internalRedirectionHost;
diff --git a/backend/internal/report.js b/backend/internal/report.js
index 4dde659b..8bad7ec2 100644
--- a/backend/internal/report.js
+++ b/backend/internal/report.js
@@ -1,38 +1,37 @@
-const internalProxyHost       = require('./proxy-host');
-const internalRedirectionHost = require('./redirection-host');
-const internalDeadHost        = require('./dead-host');
-const internalStream          = require('./stream');
+import internalDeadHost from "./dead-host.js";
+import internalProxyHost from "./proxy-host.js";
+import internalRedirectionHost from "./redirection-host.js";
+import internalStream from "./stream.js";
 
 const internalReport = {
-
 	/**
 	 * @param  {Access}   access
 	 * @return {Promise}
 	 */
 	getHostsReport: (access) => {
-		return access.can('reports:hosts', 1)
+		return access
+			.can("reports:hosts", 1)
 			.then((access_data) => {
-				let user_id = access.token.getUserId(1);
+				const userId = access.token.getUserId(1);
 
-				let promises = [
-					internalProxyHost.getCount(user_id, access_data.visibility),
-					internalRedirectionHost.getCount(user_id, access_data.visibility),
-					internalStream.getCount(user_id, access_data.visibility),
-					internalDeadHost.getCount(user_id, access_data.visibility)
+				const promises = [
+					internalProxyHost.getCount(userId, access_data.visibility),
+					internalRedirectionHost.getCount(userId, access_data.visibility),
+					internalStream.getCount(userId, access_data.visibility),
+					internalDeadHost.getCount(userId, access_data.visibility),
 				];
 
 				return Promise.all(promises);
 			})
 			.then((counts) => {
 				return {
-					proxy:       counts.shift(),
+					proxy: counts.shift(),
 					redirection: counts.shift(),
-					stream:      counts.shift(),
-					dead:        counts.shift()
+					stream: counts.shift(),
+					dead: counts.shift(),
 				};
 			});
-
-	}
+	},
 };
 
-module.exports = internalReport;
+export default internalReport;
diff --git a/backend/internal/setting.js b/backend/internal/setting.js
index d4ac67d8..f8fc7114 100644
--- a/backend/internal/setting.js
+++ b/backend/internal/setting.js
@@ -1,10 +1,9 @@
-const fs            = require('fs');
-const error         = require('../lib/error');
-const settingModel  = require('../models/setting');
-const internalNginx = require('./nginx');
+import fs from "node:fs";
+import errs from "../lib/error.js";
+import settingModel from "../models/setting.js";
+import internalNginx from "./nginx.js";
 
 const internalSetting = {
-
 	/**
 	 * @param  {Access}  access
 	 * @param  {Object}  data
@@ -12,37 +11,38 @@ const internalSetting = {
 	 * @return {Promise}
 	 */
 	update: (access, data) => {
-		return access.can('settings:update', data.id)
+		return access
+			.can("settings:update", data.id)
 			.then((/*access_data*/) => {
-				return internalSetting.get(access, {id: data.id});
+				return internalSetting.get(access, { id: data.id });
 			})
 			.then((row) => {
 				if (row.id !== data.id) {
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('Setting could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`Setting could not be updated, IDs do not match: ${row.id} !== ${data.id}`,
+					);
 				}
 
-				return settingModel
-					.query()
-					.where({id: data.id})
-					.patch(data);
+				return settingModel.query().where({ id: data.id }).patch(data);
 			})
 			.then(() => {
 				return internalSetting.get(access, {
-					id: data.id
+					id: data.id,
 				});
 			})
 			.then((row) => {
-				if (row.id === 'default-site') {
+				if (row.id === "default-site") {
 					// write the html if we need to
-					if (row.value === 'html') {
-						fs.writeFileSync('/data/nginx/default_www/index.html', row.meta.html, {encoding: 'utf8'});
+					if (row.value === "html") {
+						fs.writeFileSync("/data/nginx/default_www/index.html", row.meta.html, { encoding: "utf8" });
 					}
 
 					// Configure nginx
-					return internalNginx.deleteConfig('default')
+					return internalNginx
+						.deleteConfig("default")
 						.then(() => {
-							return internalNginx.generateConfig('default', row);
+							return internalNginx.generateConfig("default", row);
 						})
 						.then(() => {
 							return internalNginx.test();
@@ -54,7 +54,8 @@ const internalSetting = {
 							return row;
 						})
 						.catch((/*err*/) => {
-							internalNginx.deleteConfig('default')
+							internalNginx
+								.deleteConfig("default")
 								.then(() => {
 									return internalNginx.test();
 								})
@@ -63,12 +64,11 @@ const internalSetting = {
 								})
 								.then(() => {
 									// I'm being slack here I know..
-									throw new error.ValidationError('Could not reconfigure Nginx. Please check logs.');
+									throw new errs.ValidationError("Could not reconfigure Nginx. Please check logs.");
 								});
 						});
-				} else {
-					return row;
 				}
+				return row;
 			});
 	},
 
@@ -79,19 +79,16 @@ const internalSetting = {
 	 * @return {Promise}
 	 */
 	get: (access, data) => {
-		return access.can('settings:get', data.id)
+		return access
+			.can("settings:get", data.id)
 			.then(() => {
-				return settingModel
-					.query()
-					.where('id', data.id)
-					.first();
+				return settingModel.query().where("id", data.id).first();
 			})
 			.then((row) => {
 				if (row) {
 					return row;
-				} else {
-					throw new error.ItemNotFoundError(data.id);
 				}
+				throw new errs.ItemNotFoundError(data.id);
 			});
 	},
 
@@ -102,15 +99,13 @@ const internalSetting = {
 	 * @returns {*}
 	 */
 	getCount: (access) => {
-		return access.can('settings:list')
+		return access
+			.can("settings:list")
 			.then(() => {
-				return settingModel
-					.query()
-					.count('id as count')
-					.first();
+				return settingModel.query().count("id as count").first();
 			})
 			.then((row) => {
-				return parseInt(row.count, 10);
+				return Number.parseInt(row.count, 10);
 			});
 	},
 
@@ -121,13 +116,10 @@ const internalSetting = {
 	 * @returns {Promise}
 	 */
 	getAll: (access) => {
-		return access.can('settings:list')
-			.then(() => {
-				return settingModel
-					.query()
-					.orderBy('description', 'ASC');
-			});
-	}
+		return access.can("settings:list").then(() => {
+			return settingModel.query().orderBy("description", "ASC");
+		});
+	},
 };
 
-module.exports = internalSetting;
+export default internalSetting;
diff --git a/backend/internal/stream.js b/backend/internal/stream.js
index 50ce0832..805b6652 100644
--- a/backend/internal/stream.js
+++ b/backend/internal/stream.js
@@ -1,88 +1,85 @@
-const _                   = require('lodash');
-const error               = require('../lib/error');
-const utils               = require('../lib/utils');
-const streamModel         = require('../models/stream');
-const internalNginx       = require('./nginx');
-const internalAuditLog    = require('./audit-log');
-const internalCertificate = require('./certificate');
-const internalHost        = require('./host');
-const {castJsonIfNeed}    = require('../lib/helpers');
+import _ from "lodash";
+import errs from "../lib/error.js";
+import { castJsonIfNeed } from "../lib/helpers.js";
+import utils from "../lib/utils.js";
+import streamModel from "../models/stream.js";
+import internalAuditLog from "./audit-log.js";
+import internalCertificate from "./certificate.js";
+import internalHost from "./host.js";
+import internalNginx from "./nginx.js";
 
-function omissions () {
-	return ['is_deleted', 'owner.is_deleted', 'certificate.is_deleted'];
-}
+const omissions = () => {
+	return ["is_deleted", "owner.is_deleted", "certificate.is_deleted"];
+};
 
 const internalStream = {
-
 	/**
 	 * @param   {Access}  access
 	 * @param   {Object}  data
 	 * @returns {Promise}
 	 */
 	create: (access, data) => {
-		const create_certificate = data.certificate_id === 'new';
+		const create_certificate = data.certificate_id === "new";
 
 		if (create_certificate) {
 			delete data.certificate_id;
 		}
 
-		return access.can('streams:create', data)
+		return access
+			.can("streams:create", data)
 			.then((/*access_data*/) => {
 				// TODO: At this point the existing ports should have been checked
 				data.owner_user_id = access.token.getUserId(1);
 
-				if (typeof data.meta === 'undefined') {
+				if (typeof data.meta === "undefined") {
 					data.meta = {};
 				}
 
 				// streams aren't routed by domain name so don't store domain names in the DB
-				let data_no_domains = structuredClone(data);
+				const data_no_domains = structuredClone(data);
 				delete data_no_domains.domain_names;
 
-				return streamModel
-					.query()
-					.insertAndFetch(data_no_domains)
-					.then(utils.omitRow(omissions()));
+				return streamModel.query().insertAndFetch(data_no_domains).then(utils.omitRow(omissions()));
 			})
 			.then((row) => {
 				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, data)
+					return internalCertificate
+						.createQuickCertificate(access, data)
 						.then((cert) => {
 							// update host with cert id
 							return internalStream.update(access, {
-								id:             row.id,
-								certificate_id: cert.id
+								id: row.id,
+								certificate_id: cert.id,
 							});
 						})
 						.then(() => {
 							return row;
 						});
-				} else {
-					return row;
 				}
+				return row;
 			})
 			.then((row) => {
 				// re-fetch with cert
 				return internalStream.get(access, {
-					id:     row.id,
-					expand: ['certificate', 'owner']
+					id: row.id,
+					expand: ["certificate", "owner"],
 				});
 			})
 			.then((row) => {
 				// Configure nginx
-				return internalNginx.configure(streamModel, 'stream', row)
-					.then(() => {
-						return row;
-					});
+				return internalNginx.configure(streamModel, "stream", row).then(() => {
+					return row;
+				});
 			})
 			.then((row) => {
 				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'created',
-					object_type: 'stream',
-					object_id:   row.id,
-					meta:        data
-				})
+				return internalAuditLog
+					.add(access, {
+						action: "created",
+						object_type: "stream",
+						object_id: row.id,
+						meta: data,
+					})
 					.then(() => {
 						return row;
 					});
@@ -96,72 +93,78 @@ const internalStream = {
 	 * @return {Promise}
 	 */
 	update: (access, data) => {
-		const create_certificate = data.certificate_id === 'new';
+		let thisData = data;
+		const create_certificate = thisData.certificate_id === "new";
 
 		if (create_certificate) {
-			delete data.certificate_id;
+			delete thisData.certificate_id;
 		}
 
-		return access.can('streams:update', data.id)
+		return access
+			.can("streams:update", thisData.id)
 			.then((/*access_data*/) => {
 				// TODO: at this point the existing streams should have been checked
-				return internalStream.get(access, {id: data.id});
+				return internalStream.get(access, { id: thisData.id });
 			})
 			.then((row) => {
-				if (row.id !== data.id) {
+				if (row.id !== thisData.id) {
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('Stream could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`Stream could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
+					);
 				}
 
 				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, {
-						domain_names: data.domain_names || row.domain_names,
-						meta:         _.assign({}, row.meta, data.meta)
-					})
+					return internalCertificate
+						.createQuickCertificate(access, {
+							domain_names: thisData.domain_names || row.domain_names,
+							meta: _.assign({}, row.meta, thisData.meta),
+						})
 						.then((cert) => {
 							// update host with cert id
-							data.certificate_id = cert.id;
+							thisData.certificate_id = cert.id;
 						})
 						.then(() => {
 							return row;
 						});
-				} else {
-					return row;
 				}
+				return row;
 			})
 			.then((row) => {
 				// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
-				data = _.assign({}, {
-					domain_names: row.domain_names
-				}, data);
+				thisData = _.assign(
+					{},
+					{
+						domain_names: row.domain_names,
+					},
+					thisData,
+				);
 
 				return streamModel
 					.query()
-					.patchAndFetchById(row.id, data)
+					.patchAndFetchById(row.id, thisData)
 					.then(utils.omitRow(omissions()))
 					.then((saved_row) => {
 						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'updated',
-							object_type: 'stream',
-							object_id:   row.id,
-							meta:        data
-						})
+						return internalAuditLog
+							.add(access, {
+								action: "updated",
+								object_type: "stream",
+								object_id: row.id,
+								meta: thisData,
+							})
 							.then(() => {
 								return saved_row;
 							});
 					});
 			})
 			.then(() => {
-				return internalStream.get(access, {id: data.id, expand: ['owner', 'certificate']})
-					.then((row) => {
-						return internalNginx.configure(streamModel, 'stream', row)
-							.then((new_meta) => {
-								row.meta = new_meta;
-								row      = internalHost.cleanRowCertificateMeta(row);
-								return _.omit(row, omissions());
-							});
+				return internalStream.get(access, { id: thisData.id, expand: ["owner", "certificate"] }).then((row) => {
+					return internalNginx.configure(streamModel, "stream", row).then((new_meta) => {
+						row.meta = new_meta;
+						return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
 					});
+				});
 			});
 	},
 
@@ -174,39 +177,39 @@ const internalStream = {
 	 * @return {Promise}
 	 */
 	get: (access, data) => {
-		if (typeof data === 'undefined') {
-			data = {};
-		}
+		const thisData = data || {};
 
-		return access.can('streams:get', data.id)
+		return access
+			.can("streams:get", thisData.id)
 			.then((access_data) => {
-				let query = streamModel
+				const query = streamModel
 					.query()
-					.where('is_deleted', 0)
-					.andWhere('id', data.id)
-					.allowGraph('[owner,certificate]')
+					.where("is_deleted", 0)
+					.andWhere("id", thisData.id)
+					.allowGraph("[owner,certificate]")
 					.first();
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
+				if (access_data.permission_visibility !== "all") {
+					query.andWhere("owner_user_id", access.token.getUserId(1));
 				}
 
-				if (typeof data.expand !== 'undefined' && data.expand !== null) {
-					query.withGraphFetched('[' + data.expand.join(', ') + ']');
+				if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
+					query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
 				}
 
 				return query.then(utils.omitRow(omissions()));
 			})
 			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+				let thisRow = row;
+				if (!thisRow || !thisRow.id) {
+					throw new errs.ItemNotFoundError(thisData.id);
 				}
-				row = internalHost.cleanRowCertificateMeta(row);
+				thisRow = internalHost.cleanRowCertificateMeta(thisRow);
 				// Custom omissions
-				if (typeof data.omit !== 'undefined' && data.omit !== null) {
-					row = _.omit(row, data.omit);
+				if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
+					return _.omit(thisRow, thisData.omit);
 				}
-				return row;
+				return thisRow;
 			});
 	},
 
@@ -218,35 +221,35 @@ const internalStream = {
 	 * @returns {Promise}
 	 */
 	delete: (access, data) => {
-		return access.can('streams:delete', data.id)
+		return access
+			.can("streams:delete", data.id)
 			.then(() => {
-				return internalStream.get(access, {id: data.id});
+				return internalStream.get(access, { id: data.id });
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(data.id);
 				}
 
 				return streamModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						is_deleted: 1
+						is_deleted: 1,
 					})
 					.then(() => {
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('stream', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("stream", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'deleted',
-							object_type: 'stream',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "deleted",
+							object_type: "stream",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -263,39 +266,41 @@ const internalStream = {
 	 * @returns {Promise}
 	 */
 	enable: (access, data) => {
-		return access.can('streams:update', data.id)
+		return access
+			.can("streams:update", data.id)
 			.then(() => {
 				return internalStream.get(access, {
-					id:     data.id,
-					expand: ['certificate', 'owner']
+					id: data.id,
+					expand: ["certificate", "owner"],
 				});
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (row.enabled) {
-					throw new error.ValidationError('Stream is already enabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (row.enabled) {
+					throw new errs.ValidationError("Stream is already enabled");
 				}
 
 				row.enabled = 1;
 
 				return streamModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						enabled: 1
+						enabled: 1,
 					})
 					.then(() => {
 						// Configure nginx
-						return internalNginx.configure(streamModel, 'stream', row);
+						return internalNginx.configure(streamModel, "stream", row);
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'enabled',
-							object_type: 'stream',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "enabled",
+							object_type: "stream",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -312,39 +317,40 @@ const internalStream = {
 	 * @returns {Promise}
 	 */
 	disable: (access, data) => {
-		return access.can('streams:update', data.id)
+		return access
+			.can("streams:update", data.id)
 			.then(() => {
-				return internalStream.get(access, {id: data.id});
+				return internalStream.get(access, { id: data.id });
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (!row.enabled) {
-					throw new error.ValidationError('Stream is already disabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (!row.enabled) {
+					throw new errs.ValidationError("Stream is already disabled");
 				}
 
 				row.enabled = 0;
 
 				return streamModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						enabled: 0
+						enabled: 0,
 					})
 					.then(() => {
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('stream', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("stream", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'disabled',
-							object_type: 'stream-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "disabled",
+							object_type: "stream",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -362,34 +368,35 @@ const internalStream = {
 	 * @returns {Promise}
 	 */
 	getAll: (access, expand, search_query) => {
-		return access.can('streams:list')
+		return access
+			.can("streams:list")
 			.then((access_data) => {
 				const query = streamModel
 					.query()
-					.where('is_deleted', 0)
-					.groupBy('id')
-					.allowGraph('[owner,certificate]')
-					.orderBy('incoming_port', 'ASC');
+					.where("is_deleted", 0)
+					.groupBy("id")
+					.allowGraph("[owner,certificate]")
+					.orderBy("incoming_port", "ASC");
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
+				if (access_data.permission_visibility !== "all") {
+					query.andWhere("owner_user_id", access.token.getUserId(1));
 				}
 
 				// Query is used for searching
-				if (typeof search_query === 'string' && search_query.length > 0) {
+				if (typeof search_query === "string" && search_query.length > 0) {
 					query.where(function () {
-						this.where(castJsonIfNeed('incoming_port'), 'like', `%${search_query}%`);
+						this.where(castJsonIfNeed("incoming_port"), "like", `%${search_query}%`);
 					});
 				}
 
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched('[' + expand.join(', ') + ']');
+				if (typeof expand !== "undefined" && expand !== null) {
+					query.withGraphFetched(`[${expand.join(", ")}]`);
 				}
 
 				return query.then(utils.omitRows(omissions()));
 			})
 			.then((rows) => {
-				if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
+				if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
 					return internalHost.cleanAllRowsCertificateMeta(rows);
 				}
 
@@ -405,20 +412,16 @@ const internalStream = {
 	 * @returns {Promise}
 	 */
 	getCount: (user_id, visibility) => {
-		const query = streamModel
-			.query()
-			.count('id AS count')
-			.where('is_deleted', 0);
+		const query = streamModel.query().count("id AS count").where("is_deleted", 0);
 
-		if (visibility !== 'all') {
-			query.andWhere('owner_user_id', user_id);
+		if (visibility !== "all") {
+			query.andWhere("owner_user_id", user_id);
 		}
 
-		return query.first()
-			.then((row) => {
-				return parseInt(row.count, 10);
-			});
-	}
+		return query.first().then((row) => {
+			return Number.parseInt(row.count, 10);
+		});
+	},
 };
 
-module.exports = internalStream;
+export default internalStream;
diff --git a/backend/internal/token.js b/backend/internal/token.js
index 0e6dec5e..1935b16d 100644
--- a/backend/internal/token.js
+++ b/backend/internal/token.js
@@ -1,14 +1,14 @@
-const _          = require('lodash');
-const error      = require('../lib/error');
-const userModel  = require('../models/user');
-const authModel  = require('../models/auth');
-const helpers    = require('../lib/helpers');
-const TokenModel = require('../models/token');
+import _ from "lodash";
+import errs from "../lib/error.js";
+import { parseDatePeriod } from "../lib/helpers.js";
+import authModel from "../models/auth.js";
+import TokenModel from "../models/token.js";
+import userModel from "../models/user.js";
 
-const ERROR_MESSAGE_INVALID_AUTH = 'Invalid email or password';
-
-module.exports = {
+const ERROR_MESSAGE_INVALID_AUTH = "Invalid email or password";
+const ERROR_MESSAGE_INVALID_AUTH_I18N = "error.invalid-auth";
 
+export default {
 	/**
 	 * @param   {Object} data
 	 * @param   {String} data.identity
@@ -18,70 +18,66 @@ module.exports = {
 	 * @param   {String} [issuer]
 	 * @returns {Promise}
 	 */
-	getTokenFromEmail: (data, issuer) => {
-		let Token = new TokenModel();
+	getTokenFromEmail: async (data, issuer) => {
+		const Token = TokenModel();
 
-		data.scope  = data.scope || 'user';
-		data.expiry = data.expiry || '1d';
+		data.scope = data.scope || "user";
+		data.expiry = data.expiry || "1d";
 
-		return userModel
+		const user = await userModel
 			.query()
-			.where('email', data.identity.toLowerCase().trim())
-			.andWhere('is_deleted', 0)
-			.andWhere('is_disabled', 0)
-			.first()
-			.then((user) => {
-				if (user) {
-					// Get auth
-					return authModel
-						.query()
-						.where('user_id', '=', user.id)
-						.where('type', '=', 'password')
-						.first()
-						.then((auth) => {
-							if (auth) {
-								return auth.verifyPassword(data.secret)
-									.then((valid) => {
-										if (valid) {
+			.where("email", data.identity.toLowerCase().trim())
+			.andWhere("is_deleted", 0)
+			.andWhere("is_disabled", 0)
+			.first();
 
-											if (data.scope !== 'user' && _.indexOf(user.roles, data.scope) === -1) {
-												// The scope requested doesn't exist as a role against the user,
-												// you shall not pass.
-												throw new error.AuthError('Invalid scope: ' + data.scope);
-											}
+		if (!user) {
+			throw new errs.AuthError(ERROR_MESSAGE_INVALID_AUTH);
+		}
 
-											// Create a moment of the expiry expression
-											let expiry = helpers.parseDatePeriod(data.expiry);
-											if (expiry === null) {
-												throw new error.AuthError('Invalid expiry time: ' + data.expiry);
-											}
+		const auth = await authModel
+			.query()
+			.where("user_id", "=", user.id)
+			.where("type", "=", "password")
+			.first();
 
-											return Token.create({
-												iss:   issuer || 'api',
-												attrs: {
-													id: user.id
-												},
-												scope:     [data.scope],
-												expiresIn: data.expiry
-											})
-												.then((signed) => {
-													return {
-														token:   signed.token,
-														expires: expiry.toISOString()
-													};
-												});
-										} else {
-											throw new error.AuthError(ERROR_MESSAGE_INVALID_AUTH);
-										}
-									});
-							} else {
-								throw new error.AuthError(ERROR_MESSAGE_INVALID_AUTH);
-							}
-						});
-				} else {
-					throw new error.AuthError(ERROR_MESSAGE_INVALID_AUTH);
-				}
-			});
+		if (!auth) {
+			throw new errs.AuthError(ERROR_MESSAGE_INVALID_AUTH);
+		}
+
+		const valid = await auth.verifyPassword(data.secret);
+		if (!valid) {
+			throw new errs.AuthError(
+				ERROR_MESSAGE_INVALID_AUTH,
+				ERROR_MESSAGE_INVALID_AUTH_I18N,
+			);
+		}
+
+		if (data.scope !== "user" && _.indexOf(user.roles, data.scope) === -1) {
+			// The scope requested doesn't exist as a role against the user,
+			// you shall not pass.
+			throw new errs.AuthError(`Invalid scope: ${data.scope}`);
+		}
+
+		// Create a moment of the expiry expression
+		const expiry = parseDatePeriod(data.expiry);
+		if (expiry === null) {
+			throw new errs.AuthError(`Invalid expiry time: ${data.expiry}`);
+		}
+
+		const signed = await Token.create({
+			iss: issuer || "api",
+			attrs: {
+				id: user.id,
+			},
+			scope: [data.scope],
+			expiresIn: data.expiry,
+		});
+
+		return {
+			token: signed.token,
+			expires: expiry.toISOString(),
+		};
 	},
 
 	/**
@@ -91,74 +87,70 @@ module.exports = {
 	 * @param {String} [data.scope]   Only considered if existing token scope is admin
 	 * @returns {Promise}
 	 */
-	getFreshToken: (access, data) => {
-		let Token = new TokenModel();
+	getFreshToken: async (access, data) => {
+		const Token = TokenModel();
+		const thisData = data || {};
 
-		data        = data || {};
-		data.expiry = data.expiry || '1d';
-
-		if (access && access.token.getUserId(0)) {
+		thisData.expiry = thisData.expiry || "1d";
 
+		if (access?.token.getUserId(0)) {
 			// Create a moment of the expiry expression
-			let expiry = helpers.parseDatePeriod(data.expiry);
+			const expiry = parseDatePeriod(thisData.expiry);
 			if (expiry === null) {
-				throw new error.AuthError('Invalid expiry time: ' + data.expiry);
+				throw new errs.AuthError(`Invalid expiry time: ${thisData.expiry}`);
 			}
 
-			let token_attrs = {
-				id: access.token.getUserId(0)
+			const token_attrs = {
+				id: access.token.getUserId(0),
 			};
 
 			// Only admins can request otherwise scoped tokens
-			let scope = access.token.get('scope');
-			if (data.scope && access.token.hasScope('admin')) {
-				scope = [data.scope];
+			let scope = access.token.get("scope");
+			if (thisData.scope && access.token.hasScope("admin")) {
+				scope = [thisData.scope];
 
-				if (data.scope === 'job-board' || data.scope === 'worker') {
+				if (thisData.scope === "job-board" || thisData.scope === "worker") {
 					token_attrs.id = 0;
 				}
 			}
 
-			return Token.create({
-				iss:       'api',
-				scope:     scope,
-				attrs:     token_attrs,
-				expiresIn: data.expiry
-			})
-				.then((signed) => {
-					return {
-						token:   signed.token,
-						expires: expiry.toISOString()
-					};
-				});
-		} else {
-			throw new error.AssertionFailedError('Existing token contained invalid user data');
+			const signed = await Token.create({
+				iss: "api",
+				scope: scope,
+				attrs: token_attrs,
+				expiresIn: thisData.expiry,
+			});
+
+			return {
+				token: signed.token,
+				expires: expiry.toISOString(),
+			};
 		}
+		throw new error.AssertionFailedError("Existing token contained invalid user data");
 	},
 
 	/**
 	 * @param   {Object} user
 	 * @returns {Promise}
 	 */
-	getTokenFromUser: (user) => {
-		const expire = '1d';
-		const Token  = new TokenModel();
-		const expiry = helpers.parseDatePeriod(expire);
+	getTokenFromUser: async (user) => {
+		const expire = "1d";
+		const Token = TokenModel();
+		const expiry = parseDatePeriod(expire);
 
-		return Token.create({
-			iss:   'api',
+		const signed = await Token.create({
+			iss: "api",
 			attrs: {
-				id: user.id
+				id: user.id,
 			},
-			scope:     ['user'],
-			expiresIn: expire
-		})
-			.then((signed) => {
-				return {
-					token:   signed.token,
-					expires: expiry.toISOString(),
-					user:    user
-				};
-			});
-	}
+			scope: ["user"],
+			expiresIn: expire,
+		});
+
+		return {
+			token: signed.token,
+			expires: expiry.toISOString(),
+			user: user,
+		};
+	},
 };
diff --git a/backend/internal/user.js b/backend/internal/user.js
index 742ab65d..d13931d5 100644
--- a/backend/internal/user.js
+++ b/backend/internal/user.js
@@ -1,93 +1,76 @@
-const _                   = require('lodash');
-const error               = require('../lib/error');
-const utils               = require('../lib/utils');
-const userModel           = require('../models/user');
-const userPermissionModel = require('../models/user_permission');
-const authModel           = require('../models/auth');
-const gravatar            = require('gravatar');
-const internalToken       = require('./token');
-const internalAuditLog    = require('./audit-log');
+import gravatar from "gravatar";
+import _ from "lodash";
+import errs from "../lib/error.js";
+import utils from "../lib/utils.js";
+import authModel from "../models/auth.js";
+import userModel from "../models/user.js";
+import userPermissionModel from "../models/user_permission.js";
+import internalAuditLog from "./audit-log.js";
+import internalToken from "./token.js";
 
-function omissions () {
-	return ['is_deleted'];
-}
+const omissions = () => {
+	return ["is_deleted", "permissions.id", "permissions.user_id", "permissions.created_on", "permissions.modified_on"];
+};
+
+const DEFAULT_AVATAR = gravatar.url("admin@example.com", { default: "mm" });
 
 const internalUser = {
-
 	/**
+	 * Create a user can happen unauthenticated only once and only when no active users exist.
+	 * Otherwise, a valid auth method is required.
+	 *
 	 * @param   {Access}  access
 	 * @param   {Object}  data
 	 * @returns {Promise}
 	 */
-	create: (access, data) => {
-		let auth = data.auth || null;
+	create: async (access, data) => {
+		const auth = data.auth || null;
 		delete data.auth;
 
-		data.avatar = data.avatar || '';
-		data.roles  = data.roles || [];
+		data.avatar = data.avatar || "";
+		data.roles = data.roles || [];
 
-		if (typeof data.is_disabled !== 'undefined') {
+		if (typeof data.is_disabled !== "undefined") {
 			data.is_disabled = data.is_disabled ? 1 : 0;
 		}
 
-		return access.can('users:create', data)
-			.then(() => {
-				data.avatar = gravatar.url(data.email, {default: 'mm'});
+		await access.can("users:create", data);
+		data.avatar = gravatar.url(data.email, { default: "mm" });
 
-				return userModel
-					.query()
-					.insertAndFetch(data)
-					.then(utils.omitRow(omissions()));
-			})
-			.then((user) => {
-				if (auth) {
-					return authModel
-						.query()
-						.insert({
-							user_id: user.id,
-							type:    auth.type,
-							secret:  auth.secret,
-							meta:    {}
-						})
-						.then(() => {
-							return user;
-						});
-				} else {
-					return user;
-				}
-			})
-			.then((user) => {
-				// Create permissions row as well
-				let is_admin = data.roles.indexOf('admin') !== -1;
-
-				return userPermissionModel
-					.query()
-					.insert({
-						user_id:           user.id,
-						visibility:        is_admin ? 'all' : 'user',
-						proxy_hosts:       'manage',
-						redirection_hosts: 'manage',
-						dead_hosts:        'manage',
-						streams:           'manage',
-						access_lists:      'manage',
-						certificates:      'manage'
-					})
-					.then(() => {
-						return internalUser.get(access, {id: user.id, expand: ['permissions']});
-					});
-			})
-			.then((user) => {
-				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'created',
-					object_type: 'user',
-					object_id:   user.id,
-					meta:        user
-				})
-					.then(() => {
-						return user;
-					});
+		let user = await userModel.query().insertAndFetch(data).then(utils.omitRow(omissions()));
+		if (auth) {
+			user = await authModel.query().insert({
+				user_id: user.id,
+				type: auth.type,
+				secret: auth.secret,
+				meta: {},
 			});
+		}
+
+		// Create permissions row as well
+		const isAdmin = data.roles.indexOf("admin") !== -1;
+
+		await userPermissionModel.query().insert({
+			user_id: user.id,
+			visibility: isAdmin ? "all" : "user",
+			proxy_hosts: "manage",
+			redirection_hosts: "manage",
+			dead_hosts: "manage",
+			streams: "manage",
+			access_lists: "manage",
+			certificates: "manage",
+		});
+
+		user = await internalUser.get(access, { id: user.id, expand: ["permissions"] });
+
+		await internalAuditLog.add(access, {
+			action: "created",
+			object_type: "user",
+			object_id: user.id,
+			meta: user,
+		});
+
+		return user;
 	},
 
 	/**
@@ -99,62 +82,57 @@ const internalUser = {
 	 * @return {Promise}
 	 */
 	update: (access, data) => {
-		if (typeof data.is_disabled !== 'undefined') {
+		if (typeof data.is_disabled !== "undefined") {
 			data.is_disabled = data.is_disabled ? 1 : 0;
 		}
 
-		return access.can('users:update', data.id)
+		return access
+			.can("users:update", data.id)
 			.then(() => {
-
 				// Make sure that the user being updated doesn't change their email to another user that is already using it
 				// 1. get user we want to update
-				return internalUser.get(access, {id: data.id})
-					.then((user) => {
+				return internalUser.get(access, { id: data.id }).then((user) => {
+					// 2. if email is to be changed, find other users with that email
+					if (typeof data.email !== "undefined") {
+						data.email = data.email.toLowerCase().trim();
 
-						// 2. if email is to be changed, find other users with that email
-						if (typeof data.email !== 'undefined') {
-							data.email = data.email.toLowerCase().trim();
-
-							if (user.email !== data.email) {
-								return internalUser.isEmailAvailable(data.email, data.id)
-									.then((available) => {
-										if (!available) {
-											throw new error.ValidationError('Email address already in use - ' + data.email);
-										}
-
-										return user;
-									});
-							}
+						if (user.email !== data.email) {
+							return internalUser.isEmailAvailable(data.email, data.id).then((available) => {
+								if (!available) {
+									throw new errs.ValidationError(`Email address already in use - ${data.email}`);
+								}
+								return user;
+							});
 						}
+					}
 
-						// No change to email:
-						return user;
-					});
+					// No change to email:
+					return user;
+				});
 			})
 			.then((user) => {
 				if (user.id !== data.id) {
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`User could not be updated, IDs do not match: ${user.id} !== ${data.id}`,
+					);
 				}
 
-				data.avatar = gravatar.url(data.email || user.email, {default: 'mm'});
-
-				return userModel
-					.query()
-					.patchAndFetchById(user.id, data)
-					.then(utils.omitRow(omissions()));
+				data.avatar = gravatar.url(data.email || user.email, { default: "mm" });
+				return userModel.query().patchAndFetchById(user.id, data).then(utils.omitRow(omissions()));
 			})
 			.then(() => {
-				return internalUser.get(access, {id: data.id});
+				return internalUser.get(access, { id: data.id });
 			})
 			.then((user) => {
 				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'updated',
-					object_type: 'user',
-					object_id:   user.id,
-					meta:        data
-				})
+				return internalAuditLog
+					.add(access, {
+						action: "updated",
+						object_type: "user",
+						object_id: user.id,
+						meta: { ...data, id: user.id, name: user.name },
+					})
 					.then(() => {
 						return user;
 					});
@@ -170,37 +148,41 @@ const internalUser = {
 	 * @return {Promise}
 	 */
 	get: (access, data) => {
-		if (typeof data === 'undefined') {
-			data = {};
+		const thisData = data || {};
+
+		if (typeof thisData.id === "undefined" || !thisData.id) {
+			thisData.id = access.token.getUserId(0);
 		}
 
-		if (typeof data.id === 'undefined' || !data.id) {
-			data.id = access.token.getUserId(0);
-		}
-
-		return access.can('users:get', data.id)
+		return access
+			.can("users:get", thisData.id)
 			.then(() => {
-				let query = userModel
+				const query = userModel
 					.query()
-					.where('is_deleted', 0)
-					.andWhere('id', data.id)
-					.allowGraph('[permissions]')
+					.where("is_deleted", 0)
+					.andWhere("id", thisData.id)
+					.allowGraph("[permissions]")
 					.first();
 
-				if (typeof data.expand !== 'undefined' && data.expand !== null) {
-					query.withGraphFetched('[' + data.expand.join(', ') + ']');
+				if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
+					query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
 				}
 
 				return query.then(utils.omitRow(omissions()));
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(thisData.id);
 				}
 				// Custom omissions
-				if (typeof data.omit !== 'undefined' && data.omit !== null) {
-					row = _.omit(row, data.omit);
+				if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
+					return _.omit(row, thisData.omit);
 				}
+
+				if (row.avatar === "") {
+					row.avatar = DEFAULT_AVATAR;
+				}
+
 				return row;
 			});
 	},
@@ -213,20 +195,15 @@ const internalUser = {
 	 * @param user_id
 	 */
 	isEmailAvailable: (email, user_id) => {
-		let query = userModel
-			.query()
-			.where('email', '=', email.toLowerCase().trim())
-			.where('is_deleted', 0)
-			.first();
+		const query = userModel.query().where("email", "=", email.toLowerCase().trim()).where("is_deleted", 0).first();
 
-		if (typeof user_id !== 'undefined') {
-			query.where('id', '!=', user_id);
+		if (typeof user_id !== "undefined") {
+			query.where("id", "!=", user_id);
 		}
 
-		return query
-			.then((user) => {
-				return !user;
-			});
+		return query.then((user) => {
+			return !user;
+		});
 	},
 
 	/**
@@ -237,33 +214,34 @@ const internalUser = {
 	 * @returns {Promise}
 	 */
 	delete: (access, data) => {
-		return access.can('users:delete', data.id)
+		return access
+			.can("users:delete", data.id)
 			.then(() => {
-				return internalUser.get(access, {id: data.id});
+				return internalUser.get(access, { id: data.id });
 			})
 			.then((user) => {
 				if (!user) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(data.id);
 				}
 
 				// Make sure user can't delete themselves
 				if (user.id === access.token.getUserId(0)) {
-					throw new error.PermissionError('You cannot delete yourself.');
+					throw new errs.PermissionError("You cannot delete yourself.");
 				}
 
 				return userModel
 					.query()
-					.where('id', user.id)
+					.where("id", user.id)
 					.patch({
-						is_deleted: 1
+						is_deleted: 1,
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'deleted',
-							object_type: 'user',
-							object_id:   user.id,
-							meta:        _.omit(user, omissions())
+							action: "deleted",
+							object_type: "user",
+							object_id: user.id,
+							meta: _.omit(user, omissions()),
 						});
 					});
 			})
@@ -272,6 +250,14 @@ const internalUser = {
 			});
 	},
 
+	deleteAll: async () => {
+		await userModel
+			.query()
+			.patch({
+				is_deleted: 1,
+			});
+	},
+
 	/**
 	 * This will only count the users
 	 *
@@ -280,26 +266,26 @@ const internalUser = {
 	 * @returns {*}
 	 */
 	getCount: (access, search_query) => {
-		return access.can('users:list')
+		return access
+			.can("users:list")
 			.then(() => {
-				let query = userModel
-					.query()
-					.count('id as count')
-					.where('is_deleted', 0)
-					.first();
+				const query = userModel.query().count("id as count").where("is_deleted", 0).first();
 
 				// Query is used for searching
-				if (typeof search_query === 'string') {
+				if (typeof search_query === "string") {
 					query.where(function () {
-						this.where('user.name', 'like', '%' + search_query + '%')
-							.orWhere('user.email', 'like', '%' + search_query + '%');
+						this.where("user.name", "like", `%${search_query}%`).orWhere(
+							"user.email",
+							"like",
+							`%${search_query}%`,
+						);
 					});
 				}
 
 				return query;
 			})
 			.then((row) => {
-				return parseInt(row.count, 10);
+				return Number.parseInt(row.count, 10);
 			});
 	},
 
@@ -311,30 +297,28 @@ const internalUser = {
 	 * @param   {String}  [search_query]
 	 * @returns {Promise}
 	 */
-	getAll: (access, expand, search_query) => {
-		return access.can('users:list')
-			.then(() => {
-				let query = userModel
-					.query()
-					.where('is_deleted', 0)
-					.groupBy('id')
-					.allowGraph('[permissions]')
-					.orderBy('name', 'ASC');
+	getAll: async (access, expand, search_query) => {
+		await access.can("users:list");
+		const query = userModel
+			.query()
+			.where("is_deleted", 0)
+			.groupBy("id")
+			.allowGraph("[permissions]")
+			.orderBy("name", "ASC");
 
-				// Query is used for searching
-				if (typeof search_query === 'string') {
-					query.where(function () {
-						this.where('name', 'like', '%' + search_query + '%')
-							.orWhere('email', 'like', '%' + search_query + '%');
-					});
-				}
-
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched('[' + expand.join(', ') + ']');
-				}
-
-				return query.then(utils.omitRows(omissions()));
+		// Query is used for searching
+		if (typeof search_query === "string") {
+			query.where(function () {
+				this.where("name", "like", `%${search_query}%`).orWhere("email", "like", `%${search_query}%`);
 			});
+		}
+
+		if (typeof expand !== "undefined" && expand !== null) {
+			query.withGraphFetched(`[${expand.join(", ")}]`);
+		}
+
+		const res = await query;
+		return utils.omitRows(omissions())(res);
 	},
 
 	/**
@@ -342,11 +326,11 @@ const internalUser = {
 	 * @param   {Integer} [id_requested]
 	 * @returns {[String]}
 	 */
-	getUserOmisionsByAccess: (access, id_requested) => {
+	getUserOmisionsByAccess: (access, idRequested) => {
 		let response = []; // Admin response
 
-		if (!access.token.hasScope('admin') && access.token.getUserId(0) !== id_requested) {
-			response = ['roles', 'is_deleted']; // Restricted response
+		if (!access.token.hasScope("admin") && access.token.getUserId(0) !== idRequested) {
+			response = ["is_deleted"]; // Restricted response
 		}
 
 		return response;
@@ -361,26 +345,30 @@ const internalUser = {
 	 * @return {Promise}
 	 */
 	setPassword: (access, data) => {
-		return access.can('users:password', data.id)
+		return access
+			.can("users:password", data.id)
 			.then(() => {
-				return internalUser.get(access, {id: data.id});
+				return internalUser.get(access, { id: data.id });
 			})
 			.then((user) => {
 				if (user.id !== data.id) {
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`User could not be updated, IDs do not match: ${user.id} !== ${data.id}`,
+					);
 				}
 
 				if (user.id === access.token.getUserId(0)) {
 					// they're setting their own password. Make sure their current password is correct
-					if (typeof data.current === 'undefined' || !data.current) {
-						throw new error.ValidationError('Current password was not supplied');
+					if (typeof data.current === "undefined" || !data.current) {
+						throw new errs.ValidationError("Current password was not supplied");
 					}
 
-					return internalToken.getTokenFromEmail({
-						identity: user.email,
-						secret:   data.current
-					})
+					return internalToken
+						.getTokenFromEmail({
+							identity: user.email,
+							secret: data.current,
+						})
 						.then(() => {
 							return user;
 						});
@@ -392,43 +380,36 @@ const internalUser = {
 				// Get auth, patch if it exists
 				return authModel
 					.query()
-					.where('user_id', user.id)
-					.andWhere('type', data.type)
+					.where("user_id", user.id)
+					.andWhere("type", data.type)
 					.first()
 					.then((existing_auth) => {
 						if (existing_auth) {
 							// patch
-							return authModel
-								.query()
-								.where('user_id', user.id)
-								.andWhere('type', data.type)
-								.patch({
-									type:   data.type, // This is required for the model to encrypt on save
-									secret: data.secret
-								});
-						} else {
-							// insert
-							return authModel
-								.query()
-								.insert({
-									user_id: user.id,
-									type:    data.type,
-									secret:  data.secret,
-									meta:    {}
-								});
+							return authModel.query().where("user_id", user.id).andWhere("type", data.type).patch({
+								type: data.type, // This is required for the model to encrypt on save
+								secret: data.secret,
+							});
 						}
+						// insert
+						return authModel.query().insert({
+							user_id: user.id,
+							type: data.type,
+							secret: data.secret,
+							meta: {},
+						});
 					})
 					.then(() => {
 						// Add to Audit Log
 						return internalAuditLog.add(access, {
-							action:      'updated',
-							object_type: 'user',
-							object_id:   user.id,
-							meta:        {
-								name:             user.name,
+							action: "updated",
+							object_type: "user",
+							object_id: user.id,
+							meta: {
+								name: user.name,
 								password_changed: true,
-								auth_type:        data.type
-							}
+								auth_type: data.type,
+							},
 						});
 					});
 			})
@@ -443,14 +424,17 @@ const internalUser = {
 	 * @return {Promise}
 	 */
 	setPermissions: (access, data) => {
-		return access.can('users:permissions', data.id)
+		return access
+			.can("users:permissions", data.id)
 			.then(() => {
-				return internalUser.get(access, {id: data.id});
+				return internalUser.get(access, { id: data.id });
 			})
 			.then((user) => {
 				if (user.id !== data.id) {
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`User could not be updated, IDs do not match: ${user.id} !== ${data.id}`,
+					);
 				}
 
 				return user;
@@ -459,34 +443,30 @@ const internalUser = {
 				// Get perms row, patch if it exists
 				return userPermissionModel
 					.query()
-					.where('user_id', user.id)
+					.where("user_id", user.id)
 					.first()
 					.then((existing_auth) => {
 						if (existing_auth) {
 							// patch
 							return userPermissionModel
 								.query()
-								.where('user_id', user.id)
-								.patchAndFetchById(existing_auth.id, _.assign({user_id: user.id}, data));
-						} else {
-							// insert
-							return userPermissionModel
-								.query()
-								.insertAndFetch(_.assign({user_id: user.id}, data));
+								.where("user_id", user.id)
+								.patchAndFetchById(existing_auth.id, _.assign({ user_id: user.id }, data));
 						}
+						// insert
+						return userPermissionModel.query().insertAndFetch(_.assign({ user_id: user.id }, data));
 					})
 					.then((permissions) => {
 						// Add to Audit Log
 						return internalAuditLog.add(access, {
-							action:      'updated',
-							object_type: 'user',
-							object_id:   user.id,
-							meta:        {
-								name:        user.name,
-								permissions: permissions
-							}
+							action: "updated",
+							object_type: "user",
+							object_id: user.id,
+							meta: {
+								name: user.name,
+								permissions: permissions,
+							},
 						});
-
 					});
 			})
 			.then(() => {
@@ -500,14 +480,15 @@ const internalUser = {
 	 * @param {Integer}  data.id
 	 */
 	loginAs: (access, data) => {
-		return access.can('users:loginas', data.id)
+		return access
+			.can("users:loginas", data.id)
 			.then(() => {
 				return internalUser.get(access, data);
 			})
 			.then((user) => {
 				return internalToken.getTokenFromUser(user);
 			});
-	}
+	},
 };
 
-module.exports = internalUser;
+export default internalUser;
diff --git a/backend/lib/access.js b/backend/lib/access.js
index 0e658a65..4c1672e1 100644
--- a/backend/lib/access.js
+++ b/backend/lib/access.js
@@ -4,91 +4,90 @@
  * "scope" in this file means "where did this token come from and what is using it", so 99% of the time
  * the "scope" is going to be "user" because it would be a user token. This is not to be confused with
  * the "role" which could be "user" or "admin". The scope in fact, could be "worker" or anything else.
- *
- *
  */
 
-const _              = require('lodash');
-const logger         = require('../logger').access;
-const Ajv            = require('ajv/dist/2020');
-const error          = require('./error');
-const userModel      = require('../models/user');
-const proxyHostModel = require('../models/proxy_host');
-const TokenModel     = require('../models/token');
-const roleSchema     = require('./access/roles.json');
-const permsSchema    = require('./access/permissions.json');
+import fs from "node:fs";
+import { dirname } from "node:path";
+import { fileURLToPath } from "node:url";
+import Ajv from "ajv/dist/2020.js";
+import _ from "lodash";
+import { access as logger } from "../logger.js";
+import proxyHostModel from "../models/proxy_host.js";
+import TokenModel from "../models/token.js";
+import userModel from "../models/user.js";
+import permsSchema from "./access/permissions.json" with { type: "json" };
+import roleSchema from "./access/roles.json" with { type: "json" };
+import errs from "./error.js";
 
-module.exports = function (token_string) {
-	let Token                 = new TokenModel();
-	let token_data            = null;
-	let initialised           = false;
-	let object_cache          = {};
-	let allow_internal_access = false;
-	let user_roles            = [];
-	let permissions           = {};
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = dirname(__filename);
+
+export default function (tokenString) {
+	const Token = TokenModel();
+	let tokenData = null;
+	let initialised = false;
+	const objectCache = {};
+	let allowInternalAccess = false;
+	let userRoles = [];
+	let permissions = {};
 
 	/**
 	 * Loads the Token object from the token string
 	 *
 	 * @returns {Promise}
 	 */
-	this.init = () => {
-		return new Promise((resolve, reject) => {
-			if (initialised) {
-				resolve();
-			} else if (!token_string) {
-				reject(new error.PermissionError('Permission Denied'));
+	this.init = async () => {
+		if (initialised) {
+			return;
+		}
+
+		if (!tokenString) {
+			throw new errs.PermissionError("Permission Denied");
+		}
+
+		tokenData = await Token.load(tokenString);
+
+		// At this point we need to load the user from the DB and make sure they:
+		// - exist (and not soft deleted)
+		// - still have the appropriate scopes for this token
+		// This is only required when the User ID is supplied or if the token scope has `user`
+		if (
+			tokenData.attrs.id ||
+			(typeof tokenData.scope !== "undefined" && _.indexOf(tokenData.scope, "user") !== -1)
+		) {
+			// Has token user id or token user scope
+			const user = await userModel
+				.query()
+				.where("id", tokenData.attrs.id)
+				.andWhere("is_deleted", 0)
+				.andWhere("is_disabled", 0)
+				.allowGraph("[permissions]")
+				.withGraphFetched("[permissions]")
+				.first();
+
+			if (user) {
+				// make sure user has all scopes of the token
+				// The `user` role is not added against the user row, so we have to just add it here to get past this check.
+				user.roles.push("user");
+
+				let ok = true;
+				_.forEach(tokenData.scope, (scope_item) => {
+					if (_.indexOf(user.roles, scope_item) === -1) {
+						ok = false;
+					}
+				});
+
+				if (!ok) {
+					throw new errs.AuthError("Invalid token scope for User");
+				}
+				initialised = true;
+				userRoles = user.roles;
+				permissions = user.permissions;
 			} else {
-				resolve(Token.load(token_string)
-					.then((data) => {
-						token_data = data;
-
-						// At this point we need to load the user from the DB and make sure they:
-						// - exist (and not soft deleted)
-						// - still have the appropriate scopes for this token
-						// This is only required when the User ID is supplied or if the token scope has `user`
-
-						if (token_data.attrs.id || (typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'user') !== -1)) {
-							// Has token user id or token user scope
-							return userModel
-								.query()
-								.where('id', token_data.attrs.id)
-								.andWhere('is_deleted', 0)
-								.andWhere('is_disabled', 0)
-								.allowGraph('[permissions]')
-								.withGraphFetched('[permissions]')
-								.first()
-								.then((user) => {
-									if (user) {
-										// make sure user has all scopes of the token
-										// The `user` role is not added against the user row, so we have to just add it here to get past this check.
-										user.roles.push('user');
-
-										let is_ok = true;
-										_.forEach(token_data.scope, (scope_item) => {
-											if (_.indexOf(user.roles, scope_item) === -1) {
-												is_ok = false;
-											}
-										});
-
-										if (!is_ok) {
-											throw new error.AuthError('Invalid token scope for User');
-										} else {
-											initialised = true;
-											user_roles  = user.roles;
-											permissions = user.permissions;
-										}
-
-									} else {
-										throw new error.AuthError('User cannot be loaded for Token');
-									}
-								});
-						} else {
-							initialised = true;
-						}
-					}));
+				throw new errs.AuthError("User cannot be loaded for Token");
 			}
-		});
+		}
+		initialised = true;
 	};
 
 	/**
@@ -96,140 +95,121 @@ module.exports = function (token_string) {
 	 * This only applies to USER token scopes, as all other tokens are not really bound
 	 * by object scopes
 	 *
-	 * @param   {String} object_type
+	 * @param   {String} objectType
 	 * @returns {Promise}
 	 */
-	this.loadObjects = (object_type) => {
-		return new Promise((resolve, reject) => {
-			if (Token.hasScope('user')) {
-				if (typeof token_data.attrs.id === 'undefined' || !token_data.attrs.id) {
-					reject(new error.AuthError('User Token supplied without a User ID'));
-				} else {
-					let token_user_id = token_data.attrs.id ? token_data.attrs.id : 0;
-					let query;
+	this.loadObjects = async (objectType) => {
+		let objects = null;
 
-					if (typeof object_cache[object_type] === 'undefined') {
-						switch (object_type) {
+		if (Token.hasScope("user")) {
+			if (typeof tokenData.attrs.id === "undefined" || !tokenData.attrs.id) {
+				throw new errs.AuthError("User Token supplied without a User ID");
+			}
 
-						// USERS - should only return yourself
-						case 'users':
-							resolve(token_user_id ? [token_user_id] : []);
-							break;
+			const tokenUserId = tokenData.attrs.id ? tokenData.attrs.id : 0;
 
-							// Proxy Hosts
-						case 'proxy_hosts':
-							query = proxyHostModel
-								.query()
-								.select('id')
-								.andWhere('is_deleted', 0);
+			if (typeof objectCache[objectType] !== "undefined") {
+				objects = objectCache[objectType];
+			} else {
+				switch (objectType) {
+					// USERS - should only return yourself
+					case "users":
+						objects = tokenUserId ? [tokenUserId] : [];
+						break;
 
-							if (permissions.visibility === 'user') {
-								query.andWhere('owner_user_id', token_user_id);
-							}
+					// Proxy Hosts
+					case "proxy_hosts": {
+						const query = proxyHostModel
+							.query()
+							.select("id")
+							.andWhere("is_deleted", 0);
 
-							resolve(query
-								.then((rows) => {
-									let result = [];
-									_.forEach(rows, (rule_row) => {
-										result.push(rule_row.id);
-									});
-
-									// enum should not have less than 1 item
-									if (!result.length) {
-										result.push(0);
-									}
-
-									return result;
-								})
-							);
-							break;
-
-							// DEFAULT: null
-						default:
-							resolve(null);
-							break;
+						if (permissions.visibility === "user") {
+							query.andWhere("owner_user_id", tokenUserId);
 						}
-					} else {
-						resolve(object_cache[object_type]);
+
+						const rows = await query;
+						objects = [];
+						_.forEach(rows, (ruleRow) => {
+							objects.push(ruleRow.id);
+						});
+
+						// enum should not have less than 1 item
+						if (!objects.length) {
+							objects.push(0);
+						}
+						break;
 					}
 				}
-			} else {
-				resolve(null);
+				objectCache[objectType] = objects;
 			}
-		})
-			.then((objects) => {
-				object_cache[object_type] = objects;
-				return objects;
-			});
+		}
+		return objects;
 	};
 
 	/**
 	 * Creates a schema object on the fly with the IDs and other values required to be checked against the permissionSchema
 	 *
-	 * @param   {String} permission_label
+	 * @param   {String} permissionLabel
 	 * @returns {Object}
 	 */
-	this.getObjectSchema = (permission_label) => {
-		let base_object_type = permission_label.split(':').shift();
+	this.getObjectSchema = async (permissionLabel) => {
+		const baseObjectType = permissionLabel.split(":").shift();
 
-		let schema = {
-			$id:                  'objects',
-			description:          'Actor Properties',
-			type:                 'object',
+		const schema = {
+			$id: "objects",
+			description: "Actor Properties",
+			type: "object",
 			additionalProperties: false,
-			properties:           {
+			properties: {
 				user_id: {
 					anyOf: [
 						{
-							type: 'number',
-							enum: [Token.get('attrs').id]
-						}
-					]
+							type: "number",
+							enum: [Token.get("attrs").id],
+						},
+					],
 				},
 				scope: {
-					type:    'string',
-					pattern: '^' + Token.get('scope') + '$'
-				}
-			}
+					type: "string",
+					pattern: `^${Token.get("scope")}$`,
+				},
+			},
 		};
 
-		return this.loadObjects(base_object_type)
-			.then((object_result) => {
-				if (typeof object_result === 'object' && object_result !== null) {
-					schema.properties[base_object_type] = {
-						type:    'number',
-						enum:    object_result,
-						minimum: 1
-					};
-				} else {
-					schema.properties[base_object_type] = {
-						type:    'number',
-						minimum: 1
-					};
-				}
+		const result = await this.loadObjects(baseObjectType);
+		if (typeof result === "object" && result !== null) {
+			schema.properties[baseObjectType] = {
+				type: "number",
+				enum: result,
+				minimum: 1,
+			};
+		} else {
+			schema.properties[baseObjectType] = {
+				type: "number",
+				minimum: 1,
+			};
+		}
 
-				return schema;
-			});
+		return schema;
 	};
 
-	return {
+	// here:
 
+	return {
 		token: Token,
 
 		/**
 		 *
-		 * @param   {Boolean}  [allow_internal]
+		 * @param   {Boolean}  [allowInternal]
 		 * @returns {Promise}
 		 */
-		load: (allow_internal) => {
-			return new Promise(function (resolve/*, reject*/) {
-				if (token_string) {
-					resolve(Token.load(token_string));
-				} else {
-					allow_internal_access = allow_internal;
-					resolve(allow_internal_access || null);
-				}
-			});
+		load: async (allowInternal) => {
+			if (tokenString) {
+				return await Token.load(tokenString);
+			}
+			allowInternalAccess = allowInternal;
+			return allowInternal || null;
 		},
 
 		reloadObjects: this.loadObjects,
@@ -240,68 +220,59 @@ module.exports = function (token_string) {
 		 * @param {*}       [data]
 		 * @returns {Promise}
 		 */
-		can: (permission, data) => {
-			if (allow_internal_access === true) {
-				return Promise.resolve(true);
-				//return true;
-			} else {
-				return this.init()
-					.then(() => {
-						// Initialised, token decoded ok
-						return this.getObjectSchema(permission)
-							.then((objectSchema) => {
-								const data_schema = {
-									[permission]: {
-										data:                         data,
-										scope:                        Token.get('scope'),
-										roles:                        user_roles,
-										permission_visibility:        permissions.visibility,
-										permission_proxy_hosts:       permissions.proxy_hosts,
-										permission_redirection_hosts: permissions.redirection_hosts,
-										permission_dead_hosts:        permissions.dead_hosts,
-										permission_streams:           permissions.streams,
-										permission_access_lists:      permissions.access_lists,
-										permission_certificates:      permissions.certificates
-									}
-								};
-
-								let permissionSchema = {
-									$async:               true,
-									$id:                  'permissions',
-									type:                 'object',
-									additionalProperties: false,
-									properties:           {}
-								};
-
-								permissionSchema.properties[permission] = require('./access/' + permission.replace(/:/gim, '-') + '.json');
-
-								const ajv = new Ajv({
-									verbose:      true,
-									allErrors:    true,
-									breakOnError: true,
-									coerceTypes:  true,
-									schemas:      [
-										roleSchema,
-										permsSchema,
-										objectSchema,
-										permissionSchema
-									]
-								});
-
-								return ajv.validate('permissions', data_schema)
-									.then(() => {
-										return data_schema[permission];
-									});
-							});
-					})
-					.catch((err) => {
-						err.permission      = permission;
-						err.permission_data = data;
-						logger.error(permission, data, err.message);
-
-						throw new error.PermissionError('Permission Denied', err);
-					});
+		can: async (permission, data) => {
+			if (allowInternalAccess === true) {
+				return true;
 			}
-		}
+
+			try {
+				await this.init();
+				const objectSchema = await this.getObjectSchema(permission);
+
+				const dataSchema = {
+					[permission]: {
+						data: data,
+						scope: Token.get("scope"),
+						roles: userRoles,
+						permission_visibility: permissions.visibility,
+						permission_proxy_hosts: permissions.proxy_hosts,
+						permission_redirection_hosts: permissions.redirection_hosts,
+						permission_dead_hosts: permissions.dead_hosts,
+						permission_streams: permissions.streams,
+						permission_access_lists: permissions.access_lists,
+						permission_certificates: permissions.certificates,
+					},
+				};
+
+				const permissionSchema = {
+					$async: true,
+					$id: "permissions",
+					type: "object",
+					additionalProperties: false,
+					properties: {},
+				};
+
+				const rawData = fs.readFileSync(`${__dirname}/access/${permission.replace(/:/gim, "-")}.json`, {
+					encoding: "utf8",
+				});
+				permissionSchema.properties[permission] = JSON.parse(rawData);
+
+				const ajv = new Ajv({
+					verbose: true,
+					allErrors: true,
+					breakOnError: true,
+					coerceTypes: true,
+					schemas: [roleSchema, permsSchema, objectSchema, permissionSchema],
+				});
+
+				const valid = ajv.validate("permissions", dataSchema);
+				return valid && dataSchema[permission];
+			} catch (err) {
+				err.permission = permission;
+				err.permission_data = data;
+				logger.error(permission, data, err.message);
+				throw errs.PermissionError("Permission Denied", err);
+			}
+		},
 	};
-};
+}
diff --git a/backend/lib/certbot.js b/backend/lib/certbot.js
index 96d94710..3a2dd072 100644
--- a/backend/lib/certbot.js
+++ b/backend/lib/certbot.js
@@ -1,85 +1,86 @@
-const dnsPlugins = require('../global/certbot-dns-plugins.json');
-const utils      = require('./utils');
-const error      = require('./error');
-const logger     = require('../logger').certbot;
-const batchflow  = require('batchflow');
+import batchflow from "batchflow";
+import dnsPlugins from "../certbot/dns-plugins.json" with { type: "json" };
+import { certbot as logger } from "../logger.js";
+import errs from "./error.js";
+import utils from "./utils.js";
 
-const CERTBOT_VERSION_REPLACEMENT = '$(certbot --version | grep -Eo \'[0-9](\\.[0-9]+)+\')';
+const CERTBOT_VERSION_REPLACEMENT = "$(certbot --version | grep -Eo '[0-9](\\.[0-9]+)+')";
 
-const certbot = {
+/**
+ * Installs a cerbot plugin given the key for the object from
+ * ../certbot/dns-plugins.json
+ *
+ * @param   {string}  pluginKey
+ * @returns {Object}
+ */
+const installPlugin = async (pluginKey) => {
+	if (typeof dnsPlugins[pluginKey] === "undefined") {
+		// throw Error(`Certbot plugin ${pluginKey} not found`);
+		throw new errs.ItemNotFoundError(pluginKey);
+	}
 
-	/**
-	 * @param {array} pluginKeys
-	 */
-	installPlugins: async (pluginKeys) => {
-		let hasErrors = false;
+	const plugin = dnsPlugins[pluginKey];
+	logger.start(`Installing ${pluginKey}...`);
 
-		return new Promise((resolve, reject) => {
-			if (pluginKeys.length === 0) {
-				resolve();
-				return;
-			}
+	plugin.version = plugin.version.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
+	plugin.dependencies = plugin.dependencies.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
 
-			batchflow(pluginKeys).sequential()
-				.each((_i, pluginKey, next) => {
-					certbot.installPlugin(pluginKey)
-						.then(() => {
-							next();
-						})
-						.catch((err) => {
-							hasErrors = true;
-							next(err);
-						});
-				})
-				.error((err) => {
-					logger.error(err.message);
-				})
-				.end(() => {
-					if (hasErrors) {
-						reject(new error.CommandError('Some plugins failed to install. Please check the logs above', 1));
-					} else {
-						resolve();
-					}
-				});
+	// SETUPTOOLS_USE_DISTUTILS is required for certbot plugins to install correctly
+	// in new versions of Python
+	let env = Object.assign({}, process.env, { SETUPTOOLS_USE_DISTUTILS: "stdlib" });
+	if (typeof plugin.env === "object") {
+		env = Object.assign(env, plugin.env);
+	}
+
+	const cmd = `. /opt/certbot/bin/activate && pip install --no-cache-dir ${plugin.dependencies} ${plugin.package_name}${plugin.version}  && deactivate`;
+	return utils
+		.exec(cmd, { env })
+		.then((result) => {
+			logger.complete(`Installed ${pluginKey}`);
+			return result;
+		})
+		.catch((err) => {
+			throw err;
 		});
-	},
-
-	/**
-	 * Installs a cerbot plugin given the key for the object from
-	 * ../global/certbot-dns-plugins.json
-	 *
-	 * @param   {string}  pluginKey
-	 * @returns {Object}
-	 */
-	installPlugin: async (pluginKey) => {
-		if (typeof dnsPlugins[pluginKey] === 'undefined') {
-			// throw Error(`Certbot plugin ${pluginKey} not found`);
-			throw new error.ItemNotFoundError(pluginKey);
-		}
-
-		const plugin = dnsPlugins[pluginKey];
-		logger.start(`Installing ${pluginKey}...`);
-
-		plugin.version      = plugin.version.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
-		plugin.dependencies = plugin.dependencies.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
-
-		// SETUPTOOLS_USE_DISTUTILS is required for certbot plugins to install correctly
-		// in new versions of Python
-		let env = Object.assign({}, process.env, {SETUPTOOLS_USE_DISTUTILS: 'stdlib'});
-		if (typeof plugin.env === 'object') {
-			env = Object.assign(env, plugin.env);
-		}
-
-		const cmd = `. /opt/certbot/bin/activate && pip install --no-cache-dir ${plugin.dependencies} ${plugin.package_name}${plugin.version}  && deactivate`;
-		return utils.exec(cmd, {env})
-			.then((result) => {
-				logger.complete(`Installed ${pluginKey}`);
-				return result;
-			})
-			.catch((err) => {
-				throw err;
-			});
-	},
 };
 
-module.exports = certbot;
+/**
+ * @param {array} pluginKeys
+ */
+const installPlugins = async (pluginKeys) => {
+	let hasErrors = false;
+
+	return new Promise((resolve, reject) => {
+		if (pluginKeys.length === 0) {
+			resolve();
+			return;
+		}
+
+		batchflow(pluginKeys)
+			.sequential()
+			.each((_i, pluginKey, next) => {
+				installPlugin(pluginKey)
+					.then(() => {
+						next();
+					})
+					.catch((err) => {
+						hasErrors = true;
+						next(err);
+					});
+			})
+			.error((err) => {
+				logger.error(err.message);
+			})
+			.end(() => {
+				if (hasErrors) {
+					reject(
+						new errs.CommandError("Some plugins failed to install. Please check the logs above", 1),
+					);
+				} else {
+					resolve();
+				}
+			});
+	});
+};
+
+export { installPlugins, installPlugin };
diff --git a/backend/lib/config.js b/backend/lib/config.js
index 23184f3e..ff3f2e12 100644
--- a/backend/lib/config.js
+++ b/backend/lib/config.js
@@ -1,6 +1,6 @@
-const fs      = require('fs');
-const NodeRSA = require('node-rsa');
-const logger  = require('../logger').global;
+import fs from "node:fs";
+import NodeRSA from "node-rsa";
+import { global as logger } from "../logger.js";
 
 const keysFile         = '/data/keys.json';
 const mysqlEngine      = 'mysql2';
@@ -12,18 +12,20 @@ let instance = null;
 // 1. Load from config file first (not recommended anymore)
 // 2. Use config env variables next
 const configure = () => {
-	const filename = (process.env.NODE_CONFIG_DIR || './config') + '/' + (process.env.NODE_ENV || 'default') + '.json';
+	const filename = `${process.env.NODE_CONFIG_DIR || "./config"}/${process.env.NODE_ENV || "default"}.json`;
 	if (fs.existsSync(filename)) {
 		let configData;
 		try {
-			configData = require(filename);
+			// Load this json  synchronously
+			const rawData = fs.readFileSync(filename);
+			configData = JSON.parse(rawData);
 		} catch (_) {
 			// do nothing
 		}
 
-		if (configData && configData.database) {
+		if (configData?.database) {
 			logger.info(`Using configuration from file: ${filename}`);
-			instance      = configData;
+			instance = configData;
 			instance.keys = getKeys();
 			return;
 		}
@@ -34,15 +36,15 @@ const configure = () => {
 	const envMysqlName = process.env.DB_MYSQL_NAME || null;
 	if (envMysqlHost && envMysqlUser && envMysqlName) {
 		// we have enough mysql creds to go with mysql
-		logger.info('Using MySQL configuration');
+		logger.info("Using MySQL configuration");
 		instance = {
 			database: {
-				engine:   mysqlEngine,
-				host:     envMysqlHost,
-				port:     process.env.DB_MYSQL_PORT || 3306,
-				user:     envMysqlUser,
+				engine: mysqlEngine,
+				host: envMysqlHost,
+				port: process.env.DB_MYSQL_PORT || 3306,
+				user: envMysqlUser,
 				password: process.env.DB_MYSQL_PASSWORD,
-				name:     envMysqlName,
+				name: envMysqlName,
 			},
 			keys: getKeys(),
 		};
@@ -54,33 +56,33 @@ const configure = () => {
 	const envPostgresName = process.env.DB_POSTGRES_NAME || null;
 	if (envPostgresHost && envPostgresUser && envPostgresName) {
 		// we have enough postgres creds to go with postgres
-		logger.info('Using Postgres configuration');
+		logger.info("Using Postgres configuration");
 		instance = {
 			database: {
-				engine:   postgresEngine,
-				host:     envPostgresHost,
-				port:     process.env.DB_POSTGRES_PORT || 5432,
-				user:     envPostgresUser,
+				engine: postgresEngine,
+				host: envPostgresHost,
+				port: process.env.DB_POSTGRES_PORT || 5432,
+				user: envPostgresUser,
 				password: process.env.DB_POSTGRES_PASSWORD,
-				name:     envPostgresName,
+				name: envPostgresName,
 			},
 			keys: getKeys(),
 		};
 		return;
 	}
 
-	const envSqliteFile = process.env.DB_SQLITE_FILE || '/data/database.sqlite';
+	const envSqliteFile = process.env.DB_SQLITE_FILE || "/data/database.sqlite";
 	logger.info(`Using Sqlite: ${envSqliteFile}`);
 	instance = {
 		database: {
-			engine: 'knex-native',
-			knex:   {
-				client:     sqliteClientName,
+			engine: "knex-native",
+			knex: {
+				client: sqliteClientName,
 				connection: {
-					filename: envSqliteFile
+					filename: envSqliteFile,
 				},
-				useNullAsDefault: true
-			}
+				useNullAsDefault: true,
+			},
 		},
 		keys: getKeys(),
 	};
@@ -88,150 +90,155 @@ const configure = () => {
 
 const getKeys = () => {
 	// Get keys from file
+	logger.debug("Cheecking for keys file:", keysFile);
 	if (!fs.existsSync(keysFile)) {
 		generateKeys();
 	} else if (process.env.DEBUG) {
-		logger.info('Keys file exists OK');
+		logger.info("Keys file exists OK");
 	}
 	try {
-		return require(keysFile);
+		// Load this json keysFile synchronously and return the json object
+		const rawData = fs.readFileSync(keysFile);
+		return JSON.parse(rawData);
 	} catch (err) {
-		logger.error('Could not read JWT key pair from config file: ' + keysFile, err);
+		logger.error(`Could not read JWT key pair from config file: ${keysFile}`, err);
 		process.exit(1);
 	}
 };
 
 const generateKeys = () => {
-	logger.info('Creating a new JWT key pair...');
+	logger.info("Creating a new JWT key pair...");
 	// Now create the keys and save them in the config.
 	const key = new NodeRSA({ b: 2048 });
 	key.generateKeyPair();
 
 	const keys = {
-		key: key.exportKey('private').toString(),
-		pub: key.exportKey('public').toString(),
+		key: key.exportKey("private").toString(),
+		pub: key.exportKey("public").toString(),
 	};
 
 	// Write keys config
 	try {
 		fs.writeFileSync(keysFile, JSON.stringify(keys, null, 2));
 	} catch (err) {
-		logger.error('Could not write JWT key pair to config file: ' + keysFile + ': ' + err.message);
+		logger.error(`Could not write JWT key pair to config file: ${keysFile}: ${err.message}`);
 		process.exit(1);
 	}
-	logger.info('Wrote JWT key pair to config file: ' + keysFile);
+	logger.info(`Wrote JWT key pair to config file: ${keysFile}`);
 };
 
-module.exports = {
-
-	/**
-	 *
-	 * @param   {string}  key   ie: 'database' or 'database.engine'
-	 * @returns {boolean}
-	 */
-	has: function(key) {
-		instance === null && configure();
-		const keys = key.split('.');
-		let level  = instance;
-		let has    = true;
-		keys.forEach((keyItem) =>{
-			if (typeof level[keyItem] === 'undefined') {
-				has = false;
-			} else {
-				level = level[keyItem];
-			}
-		});
-
-		return has;
-	},
-
-	/**
-	 * Gets a specific key from the top level
-	 *
-	 * @param {string} key
-	 * @returns {*}
-	 */
-	get: function (key) {
-		instance === null && configure();
-		if (key && typeof instance[key] !== 'undefined') {
-			return instance[key];
+/**
+ *
+ * @param   {string}  key   ie: 'database' or 'database.engine'
+ * @returns {boolean}
+ */
+const configHas = (key) => {
+	instance === null && configure();
+	const keys = key.split(".");
+	let level = instance;
+	let has = true;
+	keys.forEach((keyItem) => {
+		if (typeof level[keyItem] === "undefined") {
+			has = false;
+		} else {
+			level = level[keyItem];
 		}
-		return instance;
-	},
+	});
 
-	/**
-	 * Is this a sqlite configuration?
-	 *
-	 * @returns {boolean}
-	 */
-	isSqlite: function () {
-		instance === null && configure();
-		return instance.database.knex && instance.database.knex.client === sqliteClientName;
-	},
+	return has;
+};
 
-	/**
-	 * Is this a mysql configuration?
-	 *
-	 * @returns {boolean}
-	 */
-	isMysql: function () {
-		instance === null && configure();
-		return instance.database.engine === mysqlEngine;
-	},
-	
-	/**
-		 * Is this a postgres configuration?
-		 *
-		 * @returns {boolean}
-		 */
-	isPostgres: function () {
-		instance === null && configure();
-		return instance.database.engine === postgresEngine;
-	},
-
-	/**
-	 * Are we running in debug mdoe?
-	 *
-	 * @returns {boolean}
-	 */
-	debug: function () {
-		return !!process.env.DEBUG;
-	},
-
-	/**
-	 * Returns a public key
-	 *
-	 * @returns {string}
-	 */
-	getPublicKey: function () {
-		instance === null && configure();
-		return instance.keys.pub;
-	},
-
-	/**
-	 * Returns a private key
-	 *
-	 * @returns {string}
-	 */
-	getPrivateKey: function () {
-		instance === null && configure();
-		return instance.keys.key;
-	},
-
-	/**
-	 * @returns {boolean}
-	 */
-	useLetsencryptStaging: function () {
-		return !!process.env.LE_STAGING;
-	},
-
-	/**
-	 * @returns {string|null}
-	 */
-	useLetsencryptServer: function () {
-		if (process.env.LE_SERVER) {
-			return process.env.LE_SERVER;
-		}
-		return null;
+/**
+ * Gets a specific key from the top level
+ *
+ * @param {string} key
+ * @returns {*}
+ */
+const configGet = (key) => {
+	instance === null && configure();
+	if (key && typeof instance[key] !== "undefined") {
+		return instance[key];
 	}
+	return instance;
 };
+
+/**
+ * Is this a sqlite configuration?
+ *
+ * @returns {boolean}
+ */
+const isSqlite = () => {
+	instance === null && configure();
+	return instance.database.knex && instance.database.knex.client === sqliteClientName;
+};
+
+/**
+ * Is this a mysql configuration?
+ *
+ * @returns {boolean}
+ */
+const isMysql = () => {
+	instance === null && configure();
+	return instance.database.engine === mysqlEngine;
+};
+
+/**
+ * Is this a postgres configuration?
+ *
+ * @returns {boolean}
+ */
+const isPostgres = () => {
+	instance === null && configure();
+	return instance.database.engine === postgresEngine;
+};
+
+/**
+ * Are we running in debug mdoe?
+ *
+ * @returns {boolean}
+ */
+const isDebugMode = () => !!process.env.DEBUG;
+
+/**
+ * Are we running in CI?
+ *
+ * @returns {boolean}
+ */
+const isCI = () => process.env.CI === 'true' && process.env.DEBUG === 'true';
+
+/**
+ * Returns a public key
+ *
+ * @returns {string}
+ */
+const getPublicKey = () => {
+	instance === null && configure();
+	return instance.keys.pub;
+};
+
+/**
+ * Returns a private key
+ *
+ * @returns {string}
+ */
+const getPrivateKey = () => {
+	instance === null && configure();
+	return instance.keys.key;
+};
+
+/**
+ * @returns {boolean}
+ */
+const useLetsencryptStaging = () => !!process.env.LE_STAGING;
+
+/**
+ * @returns {string|null}
+ */
+const useLetsencryptServer = () => {
+	if (process.env.LE_SERVER) {
+		return process.env.LE_SERVER;
+	}
+	return null;
+};
+
+export { isCI, configHas, configGet, isSqlite, isMysql, isPostgres, isDebugMode, getPrivateKey, getPublicKey, useLetsencryptStaging, useLetsencryptServer };
diff --git a/backend/lib/error.js b/backend/lib/error.js
index 413d6a7d..d7dbf0c9 100644
--- a/backend/lib/error.js
+++ b/backend/lib/error.js
@@ -1,99 +1,103 @@
-const _    = require('lodash');
-const util = require('util');
+import _ from "lodash";
 
-module.exports = {
-
-	PermissionError: function (message, previous) {
+const errs = {
+	PermissionError: function (_, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
-		this.message  = 'Permission Denied';
-		this.public   = true;
-		this.status   = 403;
+		this.message = "Permission Denied";
+		this.public = true;
+		this.status = 403;
 	},
 
 	ItemNotFoundError: function (id, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
-		this.message  = 'Item Not Found - ' + id;
-		this.public   = true;
-		this.status   = 404;
+		this.message = "Not Found";
+		if (id) {
+			this.message = `Not Found - ${id}`;
+		}
+		this.public = true;
+		this.status = 404;
 	},
 
-	AuthError: function (message, previous) {
+	AuthError: function (message, messageI18n, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
-		this.message  = message;
-		this.public   = true;
-		this.status   = 401;
+		this.message = message;
+		this.message_i18n = messageI18n;
+		this.public = true;
+		this.status = 400;
 	},
 
 	InternalError: function (message, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
-		this.message  = message;
-		this.status   = 500;
-		this.public   = false;
+		this.message = message;
+		this.status = 500;
+		this.public = false;
 	},
 
 	InternalValidationError: function (message, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
-		this.message  = message;
-		this.status   = 400;
-		this.public   = false;
+		this.message = message;
+		this.status = 400;
+		this.public = false;
 	},
 
 	ConfigurationError: function (message, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
-		this.message  = message;
-		this.status   = 400;
-		this.public   = true;
+		this.message = message;
+		this.status = 400;
+		this.public = true;
 	},
 
 	CacheError: function (message, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
-		this.message  = message;
+		this.name = this.constructor.name;
+		this.message = message;
 		this.previous = previous;
-		this.status   = 500;
-		this.public   = false;
+		this.status = 500;
+		this.public = false;
 	},
 
 	ValidationError: function (message, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
-		this.message  = message;
-		this.public   = true;
-		this.status   = 400;
+		this.message = message;
+		this.public = true;
+		this.status = 400;
 	},
 
 	AssertionFailedError: function (message, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
-		this.message  = message;
-		this.public   = false;
-		this.status   = 400;
+		this.message = message;
+		this.public = false;
+		this.status = 400;
 	},
 
 	CommandError: function (stdErr, code, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
-		this.message  = stdErr;
-		this.code     = code;
-		this.public   = false;
+		this.message = stdErr;
+		this.code = code;
+		this.public = false;
 	},
 };
 
-_.forEach(module.exports, function (error) {
-	util.inherits(error, Error);
+_.forEach(errs, (err) => {
+	err.prototype = Object.create(Error.prototype);
 });
+
+export default errs;
diff --git a/backend/lib/express/cors.js b/backend/lib/express/cors.js
index 6d5b8b5f..6fbf3baf 100644
--- a/backend/lib/express/cors.js
+++ b/backend/lib/express/cors.js
@@ -1,12 +1,13 @@
-module.exports = function (req, res, next) {
+export default (req, res, next) => {
 	if (req.headers.origin) {
 		res.set({
-			'Access-Control-Allow-Origin':      req.headers.origin,
-			'Access-Control-Allow-Credentials': true,
-			'Access-Control-Allow-Methods':     'OPTIONS, GET, POST',
-			'Access-Control-Allow-Headers':     'Content-Type, Cache-Control, Pragma, Expires, Authorization, X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit',
-			'Access-Control-Max-Age':           5 * 60,
-			'Access-Control-Expose-Headers':    'X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit'
+			"Access-Control-Allow-Origin": req.headers.origin,
+			"Access-Control-Allow-Credentials": true,
+			"Access-Control-Allow-Methods": "OPTIONS, GET, POST",
+			"Access-Control-Allow-Headers":
+				"Content-Type, Cache-Control, Pragma, Expires, Authorization, X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit",
+			"Access-Control-Max-Age": 5 * 60,
+			"Access-Control-Expose-Headers": "X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit",
 		});
 		next();
 	} else {
diff --git a/backend/lib/express/jwt-decode.js b/backend/lib/express/jwt-decode.js
index 17edccec..90fe241e 100644
--- a/backend/lib/express/jwt-decode.js
+++ b/backend/lib/express/jwt-decode.js
@@ -1,15 +1,15 @@
-const Access = require('../access');
+import Access from "../access.js";
 
-module.exports = () => {
-	return function (req, res, next) {
-		res.locals.access = null;
-		let access        = new Access(res.locals.token || null);
-		access.load()
-			.then(() => {
-				res.locals.access = access;
-				next();
-			})
-			.catch(next);
+export default () => {
+	return async (_, res, next) => {
+		try {
+			res.locals.access = null;
+			const access = new Access(res.locals.token || null);
+			await access.load();
+			res.locals.access = access;
+			next();
+		} catch (err) {
+			next(err);
+		}
 	};
 };
-
diff --git a/backend/lib/express/jwt.js b/backend/lib/express/jwt.js
index 44aa3693..ce907b6d 100644
--- a/backend/lib/express/jwt.js
+++ b/backend/lib/express/jwt.js
@@ -1,13 +1,13 @@
-module.exports = function () {
-	return function (req, res, next) {
+export default function () {
+	return (req, res, next) => {
 		if (req.headers.authorization) {
-			let parts = req.headers.authorization.split(' ');
+			const parts = req.headers.authorization.split(" ");
 
-			if (parts && parts[0] === 'Bearer' && parts[1]) {
+			if (parts && parts[0] === "Bearer" && parts[1]) {
 				res.locals.token = parts[1];
 			}
 		}
 
 		next();
 	};
-};
+}
diff --git a/backend/lib/express/pagination.js b/backend/lib/express/pagination.js
index 24ffa58d..188df277 100644
--- a/backend/lib/express/pagination.js
+++ b/backend/lib/express/pagination.js
@@ -1,7 +1,6 @@
-let _ = require('lodash');
-
-module.exports = function (default_sort, default_offset, default_limit, max_limit) {
+import _  from "lodash";
 
+export default (default_sort, default_offset, default_limit, max_limit) => {
 	/**
 	 * This will setup the req query params with filtered data and defaults
 	 *
@@ -11,34 +10,35 @@ module.exports = function (default_sort, default_offset, default_limit, max_limi
 	 *
 	 */
 
-	return function (req, res, next) {
-
-		req.query.offset = typeof req.query.limit === 'undefined' ? default_offset || 0 : parseInt(req.query.offset, 10);
-		req.query.limit  = typeof req.query.limit === 'undefined' ? default_limit || 50 : parseInt(req.query.limit, 10);
+	return (req, _res, next) => {
+		req.query.offset =
+			typeof req.query.limit === "undefined" ? default_offset || 0 : Number.parseInt(req.query.offset, 10);
+		req.query.limit =
+			typeof req.query.limit === "undefined" ? default_limit || 50 : Number.parseInt(req.query.limit, 10);
 
 		if (max_limit && req.query.limit > max_limit) {
 			req.query.limit = max_limit;
 		}
 
 		// Sorting
-		let sort       = typeof req.query.sort === 'undefined' ? default_sort : req.query.sort;
-		let myRegexp   = /.*\.(asc|desc)$/ig;
-		let sort_array = [];
+		let sort = typeof req.query.sort === "undefined" ? default_sort : req.query.sort;
+		const myRegexp = /.*\.(asc|desc)$/gi;
+		const sort_array = [];
 
-		sort = sort.split(',');
-		_.map(sort, function (val) {
-			let matches = myRegexp.exec(val);
+		sort = sort.split(",");
+		_.map(sort, (val) => {
+			const matches = myRegexp.exec(val);
 
 			if (matches !== null) {
-				let dir = matches[1];
+				const dir = matches[1];
 				sort_array.push({
 					field: val.substr(0, val.length - (dir.length + 1)),
-					dir:   dir.toLowerCase()
+					dir: dir.toLowerCase(),
 				});
 			} else {
 				sort_array.push({
 					field: val,
-					dir:   'asc'
+					dir: "asc",
 				});
 			}
 		});
diff --git a/backend/lib/express/user-id-from-me.js b/backend/lib/express/user-id-from-me.js
index 4a37a406..9c29ba27 100644
--- a/backend/lib/express/user-id-from-me.js
+++ b/backend/lib/express/user-id-from-me.js
@@ -1,9 +1,8 @@
-module.exports = (req, res, next) => {
+export default (req, res, next) => {
 	if (req.params.user_id === 'me' && res.locals.access) {
 		req.params.user_id = res.locals.access.token.get('attrs').id;
 	} else {
-		req.params.user_id = parseInt(req.params.user_id, 10);
+		req.params.user_id = Number.parseInt(req.params.user_id, 10);
 	}
-
 	next();
 };
diff --git a/backend/lib/helpers.js b/backend/lib/helpers.js
index ad3df3c2..853a7a55 100644
--- a/backend/lib/helpers.js
+++ b/backend/lib/helpers.js
@@ -1,62 +1,58 @@
-const moment       = require('moment');
-const {isPostgres} = require('./config');
-const {ref}        = require('objection');
+import moment from "moment";
+import { ref } from "objection";
+import { isPostgres } from "./config.js";
 
-module.exports = {
-
-	/**
-	 * Takes an expression such as 30d and returns a moment object of that date in future
-	 *
-	 * Key      Shorthand
-	 * ==================
-	 * years         y
-	 * quarters      Q
-	 * months        M
-	 * weeks         w
-	 * days          d
-	 * hours         h
-	 * minutes       m
-	 * seconds       s
-	 * milliseconds  ms
-	 *
-	 * @param {String}  expression
-	 * @returns {Object}
-	 */
-	parseDatePeriod: function (expression) {
-		let matches = expression.match(/^([0-9]+)(y|Q|M|w|d|h|m|s|ms)$/m);
-		if (matches) {
-			return moment().add(matches[1], matches[2]);
-		}
-
-		return null;
-	},
-
-	convertIntFieldsToBool: function (obj, fields) {
-		fields.forEach(function (field) {
-			if (typeof obj[field] !== 'undefined') {
-				obj[field] = obj[field] === 1;
-			}
-		});
-		return obj;
-	},
-
-	convertBoolFieldsToInt: function (obj, fields) {
-		fields.forEach(function (field) {
-			if (typeof obj[field] !== 'undefined') {
-				obj[field] = obj[field] ? 1 : 0;
-			}
-		});
-		return obj;
-	},
-
-	/**
-	 * Casts a column to json if using postgres
-	 *
-	 * @param {string} colName
-	 * @returns {string|Objection.ReferenceBuilder}
-	 */
-	castJsonIfNeed: function (colName) {
-		return isPostgres() ? ref(colName).castText() : colName;
+/**
+ * Takes an expression such as 30d and returns a moment object of that date in future
+ *
+ * Key      Shorthand
+ * ==================
+ * years         y
+ * quarters      Q
+ * months        M
+ * weeks         w
+ * days          d
+ * hours         h
+ * minutes       m
+ * seconds       s
+ * milliseconds  ms
+ *
+ * @param {String}  expression
+ * @returns {Object}
+ */
+const parseDatePeriod = (expression) => {
+	const matches = expression.match(/^([0-9]+)(y|Q|M|w|d|h|m|s|ms)$/m);
+	if (matches) {
+		return moment().add(matches[1], matches[2]);
 	}
 
+	return null;
 };
+
+const convertIntFieldsToBool = (obj, fields) => {
+	fields.forEach((field) => {
+		if (typeof obj[field] !== "undefined") {
+			obj[field] = obj[field] === 1;
+		}
+	});
+	return obj;
+};
+
+const convertBoolFieldsToInt = (obj, fields) => {
+	fields.forEach((field) => {
+		if (typeof obj[field] !== "undefined") {
+			obj[field] = obj[field] ? 1 : 0;
+		}
+	});
+	return obj;
+};
+
+/**
+ * Casts a column to json if using postgres
+ *
+ * @param {string} colName
+ * @returns {string|Objection.ReferenceBuilder}
+ */
+const castJsonIfNeed = (colName) => (isPostgres() ? ref(colName).castText() : colName);
+
+export { parseDatePeriod, convertIntFieldsToBool, convertBoolFieldsToInt, castJsonIfNeed };
diff --git a/backend/lib/migrate_template.js b/backend/lib/migrate_template.js
index f75f77ef..0b8e2840 100644
--- a/backend/lib/migrate_template.js
+++ b/backend/lib/migrate_template.js
@@ -1,33 +1,34 @@
-const migrate_name = 'identifier_for_migrate';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "identifier_for_migrate";
 
 /**
  * Migrate
  *
  * @see http://knexjs.org/#Schema
  *
- * @param {Object} knex
- * @param {Promise} Promise
+ * @param   {Object} knex
  * @returns {Promise}
  */
-exports.up = function (knex, Promise) {
-
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (_knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 	// Create Table example:
 
-	/*return knex.schema.createTable('notification', (table) => {
+	/*
+	return knex.schema.createTable('notification', (table) => {
 		 table.increments().primary();
 		 table.string('name').notNull();
 		 table.string('type').notNull();
 		 table.integer('created_on').notNull();
 		 table.integer('modified_on').notNull();
 	 })
-	 .then(function () {
-		logger.info('[' + migrate_name + '] Notification Table created');
-	 });*/
+		.then(function () {
+			logger.info('[' + migrateName + '] Notification Table created');
+		});
+	 */
 
-	logger.info('[' + migrate_name + '] Migrating Up Complete');
+	logger.info(`[${migrateName}] Migrating Up Complete`);
 
 	return Promise.resolve(true);
 };
@@ -35,21 +36,24 @@ exports.up = function (knex, Promise) {
 /**
  * Undo Migrate
  *
- * @param {Object} knex
- * @param {Promise} Promise
+ * @param   {Object} knex
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+const down = (_knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
 	// Drop table example:
 
-	/*return knex.schema.dropTable('notification')
-	 .then(() => {
-		logger.info('[' + migrate_name + '] Notification Table dropped');
-	 });*/
+	/*
+	return knex.schema.dropTable('notification')
+		.then(() => {
+			logger.info(`[${migrateName}] Notification Table dropped`);
+		});
+	*/
 
-	logger.info('[' + migrate_name + '] Migrating Down Complete');
+	logger.info(`[${migrateName}] Migrating Down Complete`);
 
 	return Promise.resolve(true);
 };
+
+export { up, down };
diff --git a/backend/lib/utils.js b/backend/lib/utils.js
index e2d60778..21904df2 100644
--- a/backend/lib/utils.js
+++ b/backend/lib/utils.js
@@ -1,110 +1,110 @@
-const _          = require('lodash');
-const exec       = require('node:child_process').exec;
-const execFile   = require('node:child_process').execFile;
-const { Liquid } = require('liquidjs');
-const logger     = require('../logger').global;
-const error      = require('./error');
+import { exec as nodeExec, execFile as nodeExecFile } from "node:child_process";
+import { dirname } from "node:path";
+import { fileURLToPath } from "node:url";
+import { Liquid } from "liquidjs";
+import _ from "lodash";
+import { global as logger } from "../logger.js";
+import errs from "./error.js";
 
-module.exports = {
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = dirname(__filename);
 
-	exec: async (cmd, options = {}) => {
-		logger.debug('CMD:', cmd);
-
-		const { stdout, stderr } = await new Promise((resolve, reject) => {
-			const child = exec(cmd, options, (isError, stdout, stderr) => {
-				if (isError) {
-					reject(new error.CommandError(stderr, isError));
-				} else {
-					resolve({ stdout, stderr });
-				}
-			});
-
-			child.on('error', (e) => {
-				reject(new error.CommandError(stderr, 1, e));
-			});
-		});
-		return stdout;
-	},
-
-	/**
-	 * @param   {String} cmd
-	 * @param   {Array}  args
-	 * @param   {Object|undefined}  options
-	 * @returns {Promise}
-	 */
-	execFile: (cmd, args, options) => {
-		logger.debug(`CMD: ${cmd} ${args ? args.join(' ') : ''}`);
-		if (typeof options === 'undefined') {
-			options = {};
-		}
-
-		return new Promise((resolve, reject) => {
-			execFile(cmd, args, options, (err, stdout, stderr) => {
-				if (err && typeof err === 'object') {
-					reject(new error.CommandError(stderr, 1, err));
-				} else {
-					resolve(stdout.trim());
-				}
-			});
-		});
-	},
-
-	/**
-	 * Used in objection query builder
-	 *
-	 * @param   {Array}  omissions
-	 * @returns {Function}
-	 */
-	omitRow: (omissions) => {
-		/**
-		 * @param   {Object} row
-		 * @returns {Object}
-		 */
-		return (row) => {
-			return _.omit(row, omissions);
-		};
-	},
-
-	/**
-	 * Used in objection query builder
-	 *
-	 * @param   {Array}  omissions
-	 * @returns {Function}
-	 */
-	omitRows: (omissions) => {
-		/**
-		 * @param   {Array} rows
-		 * @returns {Object}
-		 */
-		return (rows) => {
-			rows.forEach((row, idx) => {
-				rows[idx] = _.omit(row, omissions);
-			});
-			return rows;
-		};
-	},
-
-	/**
-	 * @returns {Object} Liquid render engine
-	 */
-	getRenderEngine: () => {
-		const renderEngine = new Liquid({
-			root: `${__dirname}/../templates/`
-		});
-
-		/**
-		 * nginxAccessRule expects the object given to have 2 properties:
-		 *
-		 * directive  string
-		 * address    string
-		 */
-		renderEngine.registerFilter('nginxAccessRule', (v) => {
-			if (typeof v.directive !== 'undefined' && typeof v.address !== 'undefined' && v.directive && v.address) {
-				return `${v.directive} ${v.address};`;
+const exec = async (cmd, options = {}) => {
+	logger.debug("CMD:", cmd);
+	const { stdout, stderr } = await new Promise((resolve, reject) => {
+		const child = nodeExec(cmd, options, (isError, stdout, stderr) => {
+			if (isError) {
+				reject(new errs.CommandError(stderr, isError));
+			} else {
+				resolve({ stdout, stderr });
 			}
-			return '';
 		});
 
-		return renderEngine;
-	}
+		child.on("error", (e) => {
+			reject(new errs.CommandError(stderr, 1, e));
+		});
+	});
+	return stdout;
 };
+
+/**
+ * @param   {String} cmd
+ * @param   {Array}  args
+ * @param   {Object|undefined}  options
+ * @returns {Promise}
+ */
+const execFile = (cmd, args, options) => {
+	logger.debug(`CMD: ${cmd} ${args ? args.join(" ") : ""}`);
+	const opts = options || {};
+
+	return new Promise((resolve, reject) => {
+		nodeExecFile(cmd, args, opts, (err, stdout, stderr) => {
+			if (err && typeof err === "object") {
+				reject(new errs.CommandError(stderr, 1, err));
+			} else {
+				resolve(stdout.trim());
+			}
+		});
+	});
+};
+
+/**
+ * Used in objection query builder
+ *
+ * @param   {Array}  omissions
+ * @returns {Function}
+ */
+const omitRow = (omissions) => {
+	/**
+	 * @param   {Object} row
+	 * @returns {Object}
+	 */
+	return (row) => {
+		return _.omit(row, omissions);
+	};
+};
+
+/**
+ * Used in objection query builder
+ *
+ * @param   {Array}  omissions
+ * @returns {Function}
+ */
+const omitRows = (omissions) => {
+	/**
+	 * @param   {Array} rows
+	 * @returns {Object}
+	 */
+	return (rows) => {
+		rows.forEach((row, idx) => {
+			rows[idx] = _.omit(row, omissions);
+		});
+		return rows;
+	};
+};
+
+/**
+ * @returns {Object} Liquid render engine
+ */
+const getRenderEngine = () => {
+	const renderEngine = new Liquid({
+		root: `${__dirname}/../templates/`,
+	});
+
+	/**
+	 * nginxAccessRule expects the object given to have 2 properties:
+	 *
+	 * directive  string
+	 * address    string
+	 */
+	renderEngine.registerFilter("nginxAccessRule", (v) => {
+		if (typeof v.directive !== "undefined" && typeof v.address !== "undefined" && v.directive && v.address) {
+			return `${v.directive} ${v.address};`;
+		}
+		return "";
+	});
+
+	return renderEngine;
+};
+
+export default { exec, execFile, omitRow, omitRows, getRenderEngine };
diff --git a/backend/lib/validator/api.js b/backend/lib/validator/api.js
index fb31e64c..6c738d50 100644
--- a/backend/lib/validator/api.js
+++ b/backend/lib/validator/api.js
@@ -1,12 +1,12 @@
-const Ajv   = require('ajv/dist/2020');
-const error = require('../error');
+import Ajv from "ajv/dist/2020.js";
+import errs from "../error.js";
 
 const ajv = new Ajv({
-	verbose:         true,
-	allErrors:       true,
+	verbose: true,
+	allErrors: true,
 	allowUnionTypes: true,
-	strict:          false,
-	coerceTypes:     true,
+	strict: false,
+	coerceTypes: true,
 });
 
 /**
@@ -14,30 +14,32 @@ const ajv = new Ajv({
  * @param {Object} payload
  * @returns {Promise}
  */
-function apiValidator (schema, payload/*, description*/) {
-	return new Promise(function Promise_apiValidator (resolve, reject) {
-		if (schema === null) {
-			reject(new error.ValidationError('Schema is undefined'));
-			return;
-		}
+const apiValidator = async (schema, payload /*, description*/) => {
+	if (!schema) {
+		throw new errs.ValidationError("Schema is undefined");
+	}
 
-		if (typeof payload === 'undefined') {
-			reject(new error.ValidationError('Payload is undefined'));
-			return;
-		}
+	// Can't use falsy check here as valid payload could be `0` or `false`
+	if (typeof payload === "undefined") {
+		throw new errs.ValidationError("Payload is undefined");
+	}
 
-		const validate = ajv.compile(schema);
-		const valid    = validate(payload);
 
-		if (valid && !validate.errors) {
-			resolve(payload);
-		} else {
-			let message = ajv.errorsText(validate.errors);
-			let err     = new error.ValidationError(message);
-			err.debug   = [validate.errors, payload];
-			reject(err);
-		}
-	});
-}
+	const validate = ajv.compile(schema);
 
-module.exports = apiValidator;
+	const valid = validate(payload);
+
+
+	if (valid && !validate.errors) {
+		return payload;
+	}
+
+
+
+	const message = ajv.errorsText(validate.errors);
+	const err = new errs.ValidationError(message);
+	err.debug = {validationErrors: validate.errors, payload};
+	throw err;
+};
+
+export default apiValidator;
diff --git a/backend/lib/validator/index.js b/backend/lib/validator/index.js
index c6d24096..5f2586fd 100644
--- a/backend/lib/validator/index.js
+++ b/backend/lib/validator/index.js
@@ -1,17 +1,17 @@
-const _                 = require('lodash');
-const Ajv               = require('ajv/dist/2020');
-const error             = require('../error');
-const commonDefinitions = require('../../schema/common.json');
+import Ajv from 'ajv/dist/2020.js';
+import _ from "lodash";
+import commonDefinitions from "../../schema/common.json" with { type: "json" };
+import errs from "../error.js";
 
 RegExp.prototype.toJSON = RegExp.prototype.toString;
 
 const ajv = new Ajv({
-	verbose:         true,
-	allErrors:       true,
+	verbose: true,
+	allErrors: true,
 	allowUnionTypes: true,
-	coerceTypes:     true,
-	strict:          false,
-	schemas:         [commonDefinitions]
+	coerceTypes: true,
+	strict: false,
+	schemas: [commonDefinitions],
 });
 
 /**
@@ -20,26 +20,26 @@ const ajv = new Ajv({
  * @param   {Object} payload
  * @returns {Promise}
  */
-function validator (schema, payload) {
-	return new Promise(function (resolve, reject) {
+const validator = (schema, payload) => {
+	return new Promise((resolve, reject) => {
 		if (!payload) {
-			reject(new error.InternalValidationError('Payload is falsy'));
+			reject(new errs.InternalValidationError("Payload is falsy"));
 		} else {
 			try {
-				let validate = ajv.compile(schema);
-				let valid    = validate(payload);
+				const validate = ajv.compile(schema);
+				const valid = validate(payload);
 
 				if (valid && !validate.errors) {
 					resolve(_.cloneDeep(payload));
 				} else {
-					let message = ajv.errorsText(validate.errors);
-					reject(new error.InternalValidationError(message));
+					const message = ajv.errorsText(validate.errors);
+					reject(new errs.InternalValidationError(message));
 				}
 			} catch (err) {
 				reject(err);
 			}
 		}
 	});
-}
+};
 
-module.exports = validator;
+export default validator;
diff --git a/backend/logger.js b/backend/logger.js
index 0ebb07c5..6318a5fb 100644
--- a/backend/logger.js
+++ b/backend/logger.js
@@ -1,14 +1,18 @@
-const {Signale} = require('signale');
+import signale from "signale";
 
-module.exports = {
-	global:    new Signale({scope: 'Global   '}),
-	migrate:   new Signale({scope: 'Migrate  '}),
-	express:   new Signale({scope: 'Express  '}),
-	access:    new Signale({scope: 'Access   '}),
-	nginx:     new Signale({scope: 'Nginx    '}),
-	ssl:       new Signale({scope: 'SSL      '}),
-	certbot:   new Signale({scope: 'Certbot  '}),
-	import:    new Signale({scope: 'Importer '}),
-	setup:     new Signale({scope: 'Setup    '}),
-	ip_ranges: new Signale({scope: 'IP Ranges'})
+const opts = {
+	logLevel: "info",
 };
+
+const global = new signale.Signale({ scope: "Global   ", ...opts });
+const migrate = new signale.Signale({ scope: "Migrate  ", ...opts });
+const express = new signale.Signale({ scope: "Express  ", ...opts });
+const access = new signale.Signale({ scope: "Access   ", ...opts });
+const nginx = new signale.Signale({ scope: "Nginx    ", ...opts });
+const ssl = new signale.Signale({ scope: "SSL      ", ...opts });
+const certbot = new signale.Signale({ scope: "Certbot  ", ...opts });
+const importer = new signale.Signale({ scope: "Importer ", ...opts });
+const setup = new signale.Signale({ scope: "Setup    ", ...opts });
+const ipRanges = new signale.Signale({ scope: "IP Ranges", ...opts });
+
+export { global, migrate, express, access, nginx, ssl, certbot, importer, setup, ipRanges };
diff --git a/backend/migrate.js b/backend/migrate.js
index 263c8702..dd3f1b61 100644
--- a/backend/migrate.js
+++ b/backend/migrate.js
@@ -1,15 +1,13 @@
-const db     = require('./db');
-const logger = require('./logger').migrate;
+import db from "./db.js";
+import { migrate as logger } from "./logger.js";
 
-module.exports = {
-	latest: function () {
-		return db.migrate.currentVersion()
-			.then((version) => {
-				logger.info('Current database version:', version);
-				return db.migrate.latest({
-					tableName: 'migrations',
-					directory: 'migrations'
-				});
-			});
-	}
+const migrateUp = async () => {
+	const version = await db.migrate.currentVersion();
+	logger.info("Current database version:", version);
+	return await db.migrate.latest({
+		tableName: "migrations",
+		directory: "migrations",
+	});
 };
+
+export { migrateUp };
diff --git a/backend/migrations/20180618015850_initial.js b/backend/migrations/20180618015850_initial.js
index a112e826..d3c55d9c 100644
--- a/backend/migrations/20180618015850_initial.js
+++ b/backend/migrations/20180618015850_initial.js
@@ -1,5 +1,6 @@
-const migrate_name = 'initial-schema';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "initial-schema";
 
 /**
  * Migrate
@@ -7,199 +8,199 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.createTable('auth', (table) => {
-		table.increments().primary();
-		table.dateTime('created_on').notNull();
-		table.dateTime('modified_on').notNull();
-		table.integer('user_id').notNull().unsigned();
-		table.string('type', 30).notNull();
-		table.string('secret').notNull();
-		table.json('meta').notNull();
-		table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-	})
+	return knex.schema
+		.createTable("auth", (table) => {
+			table.increments().primary();
+			table.dateTime("created_on").notNull();
+			table.dateTime("modified_on").notNull();
+			table.integer("user_id").notNull().unsigned();
+			table.string("type", 30).notNull();
+			table.string("secret").notNull();
+			table.json("meta").notNull();
+			table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] auth Table created');
+			logger.info(`[${migrateName}] auth Table created`);
 
-			return knex.schema.createTable('user', (table) => {
+			return knex.schema.createTable("user", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.integer('is_disabled').notNull().unsigned().defaultTo(0);
-				table.string('email').notNull();
-				table.string('name').notNull();
-				table.string('nickname').notNull();
-				table.string('avatar').notNull();
-				table.json('roles').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.integer("is_disabled").notNull().unsigned().defaultTo(0);
+				table.string("email").notNull();
+				table.string("name").notNull();
+				table.string("nickname").notNull();
+				table.string("avatar").notNull();
+				table.json("roles").notNull();
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] user Table created');
+			logger.info(`[${migrateName}] user Table created`);
 
-			return knex.schema.createTable('user_permission', (table) => {
+			return knex.schema.createTable("user_permission", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('user_id').notNull().unsigned();
-				table.string('visibility').notNull();
-				table.string('proxy_hosts').notNull();
-				table.string('redirection_hosts').notNull();
-				table.string('dead_hosts').notNull();
-				table.string('streams').notNull();
-				table.string('access_lists').notNull();
-				table.string('certificates').notNull();
-				table.unique('user_id');
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("user_id").notNull().unsigned();
+				table.string("visibility").notNull();
+				table.string("proxy_hosts").notNull();
+				table.string("redirection_hosts").notNull();
+				table.string("dead_hosts").notNull();
+				table.string("streams").notNull();
+				table.string("access_lists").notNull();
+				table.string("certificates").notNull();
+				table.unique("user_id");
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] user_permission Table created');
+			logger.info(`[${migrateName}] user_permission Table created`);
 
-			return knex.schema.createTable('proxy_host', (table) => {
+			return knex.schema.createTable("proxy_host", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('owner_user_id').notNull().unsigned();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.json('domain_names').notNull();
-				table.string('forward_ip').notNull();
-				table.integer('forward_port').notNull().unsigned();
-				table.integer('access_list_id').notNull().unsigned().defaultTo(0);
-				table.integer('certificate_id').notNull().unsigned().defaultTo(0);
-				table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
-				table.integer('caching_enabled').notNull().unsigned().defaultTo(0);
-				table.integer('block_exploits').notNull().unsigned().defaultTo(0);
-				table.text('advanced_config').notNull().defaultTo('');
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("owner_user_id").notNull().unsigned();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.json("domain_names").notNull();
+				table.string("forward_ip").notNull();
+				table.integer("forward_port").notNull().unsigned();
+				table.integer("access_list_id").notNull().unsigned().defaultTo(0);
+				table.integer("certificate_id").notNull().unsigned().defaultTo(0);
+				table.integer("ssl_forced").notNull().unsigned().defaultTo(0);
+				table.integer("caching_enabled").notNull().unsigned().defaultTo(0);
+				table.integer("block_exploits").notNull().unsigned().defaultTo(0);
+				table.text("advanced_config").notNull().defaultTo("");
+				table.json("meta").notNull();
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table created');
+			logger.info(`[${migrateName}] proxy_host Table created`);
 
-			return knex.schema.createTable('redirection_host', (table) => {
+			return knex.schema.createTable("redirection_host", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('owner_user_id').notNull().unsigned();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.json('domain_names').notNull();
-				table.string('forward_domain_name').notNull();
-				table.integer('preserve_path').notNull().unsigned().defaultTo(0);
-				table.integer('certificate_id').notNull().unsigned().defaultTo(0);
-				table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
-				table.integer('block_exploits').notNull().unsigned().defaultTo(0);
-				table.text('advanced_config').notNull().defaultTo('');
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("owner_user_id").notNull().unsigned();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.json("domain_names").notNull();
+				table.string("forward_domain_name").notNull();
+				table.integer("preserve_path").notNull().unsigned().defaultTo(0);
+				table.integer("certificate_id").notNull().unsigned().defaultTo(0);
+				table.integer("ssl_forced").notNull().unsigned().defaultTo(0);
+				table.integer("block_exploits").notNull().unsigned().defaultTo(0);
+				table.text("advanced_config").notNull().defaultTo("");
+				table.json("meta").notNull();
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] redirection_host Table created');
+			logger.info(`[${migrateName}] redirection_host Table created`);
 
-			return knex.schema.createTable('dead_host', (table) => {
+			return knex.schema.createTable("dead_host", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('owner_user_id').notNull().unsigned();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.json('domain_names').notNull();
-				table.integer('certificate_id').notNull().unsigned().defaultTo(0);
-				table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
-				table.text('advanced_config').notNull().defaultTo('');
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("owner_user_id").notNull().unsigned();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.json("domain_names").notNull();
+				table.integer("certificate_id").notNull().unsigned().defaultTo(0);
+				table.integer("ssl_forced").notNull().unsigned().defaultTo(0);
+				table.text("advanced_config").notNull().defaultTo("");
+				table.json("meta").notNull();
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] dead_host Table created');
+			logger.info(`[${migrateName}] dead_host Table created`);
 
-			return knex.schema.createTable('stream', (table) => {
+			return knex.schema.createTable("stream", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('owner_user_id').notNull().unsigned();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.integer('incoming_port').notNull().unsigned();
-				table.string('forward_ip').notNull();
-				table.integer('forwarding_port').notNull().unsigned();
-				table.integer('tcp_forwarding').notNull().unsigned().defaultTo(0);
-				table.integer('udp_forwarding').notNull().unsigned().defaultTo(0);
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("owner_user_id").notNull().unsigned();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.integer("incoming_port").notNull().unsigned();
+				table.string("forward_ip").notNull();
+				table.integer("forwarding_port").notNull().unsigned();
+				table.integer("tcp_forwarding").notNull().unsigned().defaultTo(0);
+				table.integer("udp_forwarding").notNull().unsigned().defaultTo(0);
+				table.json("meta").notNull();
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] stream Table created');
+			logger.info(`[${migrateName}] stream Table created`);
 
-			return knex.schema.createTable('access_list', (table) => {
+			return knex.schema.createTable("access_list", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('owner_user_id').notNull().unsigned();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.string('name').notNull();
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("owner_user_id").notNull().unsigned();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.string("name").notNull();
+				table.json("meta").notNull();
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] access_list Table created');
+			logger.info(`[${migrateName}] access_list Table created`);
 
-			return knex.schema.createTable('certificate', (table) => {
+			return knex.schema.createTable("certificate", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('owner_user_id').notNull().unsigned();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.string('provider').notNull();
-				table.string('nice_name').notNull().defaultTo('');
-				table.json('domain_names').notNull();
-				table.dateTime('expires_on').notNull();
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("owner_user_id").notNull().unsigned();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.string("provider").notNull();
+				table.string("nice_name").notNull().defaultTo("");
+				table.json("domain_names").notNull();
+				table.dateTime("expires_on").notNull();
+				table.json("meta").notNull();
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] certificate Table created');
+			logger.info(`[${migrateName}] certificate Table created`);
 
-			return knex.schema.createTable('access_list_auth', (table) => {
+			return knex.schema.createTable("access_list_auth", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('access_list_id').notNull().unsigned();
-				table.string('username').notNull();
-				table.string('password').notNull();
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("access_list_id").notNull().unsigned();
+				table.string("username").notNull();
+				table.string("password").notNull();
+				table.json("meta").notNull();
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] access_list_auth Table created');
+			logger.info(`[${migrateName}] access_list_auth Table created`);
 
-			return knex.schema.createTable('audit_log', (table) => {
+			return knex.schema.createTable("audit_log", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('user_id').notNull().unsigned();
-				table.string('object_type').notNull().defaultTo('');
-				table.integer('object_id').notNull().unsigned().defaultTo(0);
-				table.string('action').notNull();
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("user_id").notNull().unsigned();
+				table.string("object_type").notNull().defaultTo("");
+				table.integer("object_id").notNull().unsigned().defaultTo(0);
+				table.string("action").notNull();
+				table.json("meta").notNull();
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] audit_log Table created');
+			logger.info(`[${migrateName}] audit_log Table created`);
 		});
-
 };
 
 /**
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down the initial data.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down the initial data.`);
 	return Promise.resolve(true);
 };
+
+export { up, down };
diff --git a/backend/migrations/20180929054513_websockets.js b/backend/migrations/20180929054513_websockets.js
index 06054850..cce80d37 100644
--- a/backend/migrations/20180929054513_websockets.js
+++ b/backend/migrations/20180929054513_websockets.js
@@ -1,5 +1,6 @@
-const migrate_name = 'websockets';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "websockets";
 
 /**
  * Migrate
@@ -7,29 +8,29 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.integer('allow_websocket_upgrade').notNull().unsigned().defaultTo(0);
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.integer("allow_websocket_upgrade").notNull().unsigned().defaultTo(0);
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 		});
-
 };
 
 /**
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
-};
\ No newline at end of file
+};
+
+export { up, down };
diff --git a/backend/migrations/20181019052346_forward_host.js b/backend/migrations/20181019052346_forward_host.js
index 05c27739..fe11edc5 100644
--- a/backend/migrations/20181019052346_forward_host.js
+++ b/backend/migrations/20181019052346_forward_host.js
@@ -1,5 +1,6 @@
-const migrate_name = 'forward_host';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "forward_host";
 
 /**
  * Migrate
@@ -7,17 +8,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.renameColumn('forward_ip', 'forward_host');
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.renameColumn("forward_ip", "forward_host");
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 		});
 };
 
@@ -25,10 +26,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
-};
\ No newline at end of file
+};
+
+export { up, down };
diff --git a/backend/migrations/20181113041458_http2_support.js b/backend/migrations/20181113041458_http2_support.js
index 9f6b4336..cfa94a99 100644
--- a/backend/migrations/20181113041458_http2_support.js
+++ b/backend/migrations/20181113041458_http2_support.js
@@ -1,5 +1,6 @@
-const migrate_name = 'http2_support';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "http2_support";
 
 /**
  * Migrate
@@ -7,31 +8,31 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.integer('http2_support').notNull().unsigned().defaultTo(0);
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.integer("http2_support").notNull().unsigned().defaultTo(0);
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 
-			return knex.schema.table('redirection_host', function (redirection_host) {
-				redirection_host.integer('http2_support').notNull().unsigned().defaultTo(0);
+			return knex.schema.table("redirection_host", (redirection_host) => {
+				redirection_host.integer("http2_support").notNull().unsigned().defaultTo(0);
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 
-			return knex.schema.table('dead_host', function (dead_host) {
-				dead_host.integer('http2_support').notNull().unsigned().defaultTo(0);
+			return knex.schema.table("dead_host", (dead_host) => {
+				dead_host.integer("http2_support").notNull().unsigned().defaultTo(0);
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] dead_host Table altered');
+			logger.info(`[${migrateName}] dead_host Table altered`);
 		});
 };
 
@@ -39,11 +40,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 };
 
+export { up, down };
diff --git a/backend/migrations/20181213013211_forward_scheme.js b/backend/migrations/20181213013211_forward_scheme.js
index 22ae619e..ba3bc562 100644
--- a/backend/migrations/20181213013211_forward_scheme.js
+++ b/backend/migrations/20181213013211_forward_scheme.js
@@ -1,5 +1,6 @@
-const migrate_name = 'forward_scheme';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "forward_scheme";
 
 /**
  * Migrate
@@ -7,17 +8,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.string('forward_scheme').notNull().defaultTo('http');
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.string("forward_scheme").notNull().defaultTo("http");
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 		});
 };
 
@@ -25,10 +26,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 };
+
+export { up, down };
diff --git a/backend/migrations/20190104035154_disabled.js b/backend/migrations/20190104035154_disabled.js
index 2780c4df..28fcc7b5 100644
--- a/backend/migrations/20190104035154_disabled.js
+++ b/backend/migrations/20190104035154_disabled.js
@@ -1,5 +1,6 @@
-const migrate_name = 'disabled';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "disabled";
 
 /**
  * Migrate
@@ -7,38 +8,38 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.integer('enabled').notNull().unsigned().defaultTo(1);
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.integer("enabled").notNull().unsigned().defaultTo(1);
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 
-			return knex.schema.table('redirection_host', function (redirection_host) {
-				redirection_host.integer('enabled').notNull().unsigned().defaultTo(1);
+			return knex.schema.table("redirection_host", (redirection_host) => {
+				redirection_host.integer("enabled").notNull().unsigned().defaultTo(1);
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 
-			return knex.schema.table('dead_host', function (dead_host) {
-				dead_host.integer('enabled').notNull().unsigned().defaultTo(1);
+			return knex.schema.table("dead_host", (dead_host) => {
+				dead_host.integer("enabled").notNull().unsigned().defaultTo(1);
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] dead_host Table altered');
+			logger.info(`[${migrateName}] dead_host Table altered`);
 
-			return knex.schema.table('stream', function (stream) {
-				stream.integer('enabled').notNull().unsigned().defaultTo(1);
+			return knex.schema.table("stream", (stream) => {
+				stream.integer("enabled").notNull().unsigned().defaultTo(1);
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] stream Table altered');
+			logger.info(`[${migrateName}] stream Table altered`);
 		});
 };
 
@@ -46,10 +47,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 };
+
+export { up, down };
diff --git a/backend/migrations/20190215115310_customlocations.js b/backend/migrations/20190215115310_customlocations.js
index 4bcfd51a..c4f77977 100644
--- a/backend/migrations/20190215115310_customlocations.js
+++ b/backend/migrations/20190215115310_customlocations.js
@@ -1,5 +1,6 @@
-const migrate_name = 'custom_locations';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "custom_locations";
 
 /**
  * Migrate
@@ -8,17 +9,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.json('locations');
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.json("locations");
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 		});
 };
 
@@ -26,10 +27,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 };
+
+export { up, down };
diff --git a/backend/migrations/20190218060101_hsts.js b/backend/migrations/20190218060101_hsts.js
index 648b162a..1253130e 100644
--- a/backend/migrations/20190218060101_hsts.js
+++ b/backend/migrations/20190218060101_hsts.js
@@ -1,5 +1,6 @@
-const migrate_name = 'hsts';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "hsts";
 
 /**
  * Migrate
@@ -7,34 +8,34 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
-		proxy_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.integer("hsts_enabled").notNull().unsigned().defaultTo(0);
+			proxy_host.integer("hsts_subdomains").notNull().unsigned().defaultTo(0);
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 
-			return knex.schema.table('redirection_host', function (redirection_host) {
-				redirection_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
-				redirection_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
+			return knex.schema.table("redirection_host", (redirection_host) => {
+				redirection_host.integer("hsts_enabled").notNull().unsigned().defaultTo(0);
+				redirection_host.integer("hsts_subdomains").notNull().unsigned().defaultTo(0);
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 
-			return knex.schema.table('dead_host', function (dead_host) {
-				dead_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
-				dead_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
+			return knex.schema.table("dead_host", (dead_host) => {
+				dead_host.integer("hsts_enabled").notNull().unsigned().defaultTo(0);
+				dead_host.integer("hsts_subdomains").notNull().unsigned().defaultTo(0);
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] dead_host Table altered');
+			logger.info(`[${migrateName}] dead_host Table altered`);
 		});
 };
 
@@ -42,10 +43,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 };
+
+export { up, down };
diff --git a/backend/migrations/20190227065017_settings.js b/backend/migrations/20190227065017_settings.js
index 7dc9c192..a6cbe2c9 100644
--- a/backend/migrations/20190227065017_settings.js
+++ b/backend/migrations/20190227065017_settings.js
@@ -1,5 +1,6 @@
-const migrate_name = 'settings';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "settings";
 
 /**
  * Migrate
@@ -7,11 +8,10 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 	return knex.schema.createTable('setting', (table) => {
 		table.string('id').notNull().primary();
@@ -21,7 +21,7 @@ exports.up = function (knex/*, Promise*/) {
 		table.json('meta').notNull();
 	})
 		.then(() => {
-			logger.info('[' + migrate_name + '] setting Table created');
+			logger.info(`[${migrateName}] setting Table created`);
 		});
 };
 
@@ -29,10 +29,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down the initial data.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down the initial data.`);
 	return Promise.resolve(true);
 };
+
+export { up, down };
diff --git a/backend/migrations/20200410143839_access_list_client.js b/backend/migrations/20200410143839_access_list_client.js
index 3511e35b..e6825048 100644
--- a/backend/migrations/20200410143839_access_list_client.js
+++ b/backend/migrations/20200410143839_access_list_client.js
@@ -1,5 +1,6 @@
-const migrate_name = 'access_list_client';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "access_list_client";
 
 /**
  * Migrate
@@ -7,32 +8,30 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	logger.info('[' + migrate_name + '] Migrating Up...');
+	return knex.schema
+		.createTable("access_list_client", (table) => {
+			table.increments().primary();
+			table.dateTime("created_on").notNull();
+			table.dateTime("modified_on").notNull();
+			table.integer("access_list_id").notNull().unsigned();
+			table.string("address").notNull();
+			table.string("directive").notNull();
+			table.json("meta").notNull();
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] access_list_client Table created`);
 
-	return knex.schema.createTable('access_list_client', (table) => {
-		table.increments().primary();
-		table.dateTime('created_on').notNull();
-		table.dateTime('modified_on').notNull();
-		table.integer('access_list_id').notNull().unsigned();
-		table.string('address').notNull();
-		table.string('directive').notNull();
-		table.json('meta').notNull();
-
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] access_list_client Table created');
-
-			return knex.schema.table('access_list', function (access_list) {
-				access_list.integer('satify_any').notNull().defaultTo(0);
+			return knex.schema.table("access_list", (access_list) => {
+				access_list.integer("satify_any").notNull().defaultTo(0);
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] access_list Table altered');
+			logger.info(`[${migrateName}] access_list Table altered`);
 		});
 };
 
@@ -40,14 +39,14 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param {Object} knex
- * @param {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
-	return knex.schema.dropTable('access_list_client')
-		.then(() => {
-			logger.info('[' + migrate_name + '] access_list_client Table dropped');
-		});
+	return knex.schema.dropTable("access_list_client").then(() => {
+		logger.info(`[${migrateName}] access_list_client Table dropped`);
+	});
 };
+
+export { up, down };
diff --git a/backend/migrations/20200410143840_access_list_client_fix.js b/backend/migrations/20200410143840_access_list_client_fix.js
index ee0f0906..6bdaedb6 100644
--- a/backend/migrations/20200410143840_access_list_client_fix.js
+++ b/backend/migrations/20200410143840_access_list_client_fix.js
@@ -1,5 +1,6 @@
-const migrate_name = 'access_list_client_fix';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "access_list_client_fix";
 
 /**
  * Migrate
@@ -7,17 +8,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('access_list', function (access_list) {
-		access_list.renameColumn('satify_any', 'satisfy_any');
-	})
+	return knex.schema
+		.table("access_list", (access_list) => {
+			access_list.renameColumn("satify_any", "satisfy_any");
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] access_list Table altered');
+			logger.info(`[${migrateName}] access_list Table altered`);
 		});
 };
 
@@ -25,10 +26,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 };
+
+export { up, down };
diff --git a/backend/migrations/20201014143841_pass_auth.js b/backend/migrations/20201014143841_pass_auth.js
index a7767eb1..dc57e2a7 100644
--- a/backend/migrations/20201014143841_pass_auth.js
+++ b/backend/migrations/20201014143841_pass_auth.js
@@ -1,5 +1,6 @@
-const migrate_name = 'pass_auth';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "pass_auth";
 
 /**
  * Migrate
@@ -7,18 +8,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	logger.info('[' + migrate_name + '] Migrating Up...');
-
-	return knex.schema.table('access_list', function (access_list) {
-		access_list.integer('pass_auth').notNull().defaultTo(1);
-	})
+	return knex.schema
+		.table("access_list", (access_list) => {
+			access_list.integer("pass_auth").notNull().defaultTo(1);
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] access_list Table altered');
+			logger.info(`[${migrateName}] access_list Table altered`);
 		});
 };
 
@@ -26,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param {Object} knex
- * @param {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
-	return knex.schema.table('access_list', function (access_list) {
-		access_list.dropColumn('pass_auth');
-	})
+	return knex.schema
+		.table("access_list", (access_list) => {
+			access_list.dropColumn("pass_auth");
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] access_list pass_auth Column dropped');
+			logger.info(`[${migrateName}] access_list pass_auth Column dropped`);
 		});
 };
+
+export { up, down };
diff --git a/backend/migrations/20210210154702_redirection_scheme.js b/backend/migrations/20210210154702_redirection_scheme.js
index 0dad4876..b3f18aeb 100644
--- a/backend/migrations/20210210154702_redirection_scheme.js
+++ b/backend/migrations/20210210154702_redirection_scheme.js
@@ -1,5 +1,6 @@
-const migrate_name = 'redirection_scheme';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "redirection_scheme";
 
 /**
  * Migrate
@@ -7,18 +8,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object} knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	logger.info('[' + migrate_name + '] Migrating Up...');
-
-	return knex.schema.table('redirection_host', (table) => {
-		table.string('forward_scheme').notNull().defaultTo('$scheme');
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+	return knex.schema
+		.table("redirection_host", (table) => {
+			table.string("forward_scheme").notNull().defaultTo("$scheme");
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 		});
 };
 
@@ -26,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object} knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
-	return knex.schema.table('redirection_host', (table) => {
-		table.dropColumn('forward_scheme');
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+	return knex.schema
+		.table("redirection_host", (table) => {
+			table.dropColumn("forward_scheme");
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 		});
 };
+
+export { up, down };
diff --git a/backend/migrations/20210210154703_redirection_status_code.js b/backend/migrations/20210210154703_redirection_status_code.js
index b9bea0b9..cf84298d 100644
--- a/backend/migrations/20210210154703_redirection_status_code.js
+++ b/backend/migrations/20210210154703_redirection_status_code.js
@@ -1,5 +1,6 @@
-const migrate_name = 'redirection_status_code';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "redirection_status_code";
 
 /**
  * Migrate
@@ -7,18 +8,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object} knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	logger.info('[' + migrate_name + '] Migrating Up...');
-
-	return knex.schema.table('redirection_host', (table) => {
-		table.integer('forward_http_code').notNull().unsigned().defaultTo(302);
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+	return knex.schema
+		.table("redirection_host", (table) => {
+			table.integer("forward_http_code").notNull().unsigned().defaultTo(302);
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 		});
 };
 
@@ -26,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object} knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
-	return knex.schema.table('redirection_host', (table) => {
-		table.dropColumn('forward_http_code');
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+	return knex.schema
+		.table("redirection_host", (table) => {
+			table.dropColumn("forward_http_code");
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 		});
 };
+
+export { up, down };
diff --git a/backend/migrations/20210423103500_stream_domain.js b/backend/migrations/20210423103500_stream_domain.js
index a894ca5e..b4afabd4 100644
--- a/backend/migrations/20210423103500_stream_domain.js
+++ b/backend/migrations/20210423103500_stream_domain.js
@@ -1,40 +1,43 @@
-const migrate_name = 'stream_domain';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "stream_domain";
 
 /**
-	* Migrate
-	*
-	* @see http://knexjs.org/#Schema
-	*
-	* @param   {Object} knex
-	* @param   {Promise} Promise
-	* @returns {Promise}
-	*/
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+ * Migrate
+ *
+ * @see http://knexjs.org/#Schema
+ *
+ * @param   {Object} knex
+ * @returns {Promise}
+ */
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('stream', (table) => {
-		table.renameColumn('forward_ip', 'forwarding_host');
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] stream Table altered');
+	return knex.schema
+		.table("stream", (table) => {
+			table.renameColumn("forward_ip", "forwarding_host");
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] stream Table altered`);
 		});
 };
 
 /**
-	* Undo Migrate
-	*
-	* @param   {Object} knex
-	* @param   {Promise} Promise
-	* @returns {Promise}
-	*/
-exports.down = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+ * Undo Migrate
+ *
+ * @param   {Object} knex
+ * @returns {Promise}
+ */
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
-	return knex.schema.table('stream', (table) => {
-		table.renameColumn('forwarding_host', 'forward_ip');
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] stream Table altered');
+	return knex.schema
+		.table("stream", (table) => {
+			table.renameColumn("forwarding_host", "forward_ip");
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] stream Table altered`);
 		});
 };
+
+export { up, down };
diff --git a/backend/migrations/20211108145214_regenerate_default_host.js b/backend/migrations/20211108145214_regenerate_default_host.js
index 4c50941f..c2805152 100644
--- a/backend/migrations/20211108145214_regenerate_default_host.js
+++ b/backend/migrations/20211108145214_regenerate_default_host.js
@@ -1,17 +1,19 @@
-const migrate_name  = 'stream_domain';
-const logger        = require('../logger').migrate;
-const internalNginx = require('../internal/nginx');
+import internalNginx from "../internal/nginx.js";
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "stream_domain";
 
 async function regenerateDefaultHost(knex) {
-	const row = await knex('setting').select('*').where('id', 'default-site').first();
+	const row = await knex("setting").select("*").where("id", "default-site").first();
 
 	if (!row) {
 		return Promise.resolve();
 	}
 
-	return internalNginx.deleteConfig('default')
+	return internalNginx
+		.deleteConfig("default")
 		.then(() => {
-			return internalNginx.generateConfig('default', row);
+			return internalNginx.generateConfig("default", row);
 		})
 		.then(() => {
 			return internalNginx.test();
@@ -22,29 +24,29 @@ async function regenerateDefaultHost(knex) {
 }
 
 /**
-	* Migrate
-	*
-	* @see http://knexjs.org/#Schema
-	*
-	* @param   {Object} knex
-	* @param   {Promise} Promise
-	* @returns {Promise}
-	*/
-exports.up = function (knex) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+ * Migrate
+ *
+ * @see http://knexjs.org/#Schema
+ *
+ * @param   {Object} knex
+ * @returns {Promise}
+ */
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 	return regenerateDefaultHost(knex);
 };
 
 /**
-	* Undo Migrate
-	*
-	* @param   {Object} knex
-	* @param   {Promise} Promise
-	* @returns {Promise}
-	*/
-exports.down = function (knex) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+ * Undo Migrate
+ *
+ * @param   {Object} knex
+ * @returns {Promise}
+ */
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
 	return regenerateDefaultHost(knex);
-};
\ No newline at end of file
+};
+
+export { up, down };
diff --git a/backend/migrations/20240427161436_stream_ssl.js b/backend/migrations/20240427161436_stream_ssl.js
index 5f47b18e..0fbba110 100644
--- a/backend/migrations/20240427161436_stream_ssl.js
+++ b/backend/migrations/20240427161436_stream_ssl.js
@@ -1,5 +1,6 @@
-const migrate_name = 'stream_ssl';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "stream_ssl";
 
 /**
  * Migrate
@@ -9,14 +10,15 @@ const logger       = require('../logger').migrate;
  * @param   {Object} knex
  * @returns {Promise}
  */
-exports.up = function (knex) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('stream', (table) => {
-		table.integer('certificate_id').notNull().unsigned().defaultTo(0);
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] stream Table altered');
+	return knex.schema
+		.table("stream", (table) => {
+			table.integer("certificate_id").notNull().unsigned().defaultTo(0);
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] stream Table altered`);
 		});
 };
 
@@ -26,13 +28,16 @@ exports.up = function (knex) {
  * @param   {Object} knex
  * @returns {Promise}
  */
-exports.down = function (knex) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
-	return knex.schema.table('stream', (table) => {
-		table.dropColumn('certificate_id');
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] stream Table altered');
+	return knex.schema
+		.table("stream", (table) => {
+			table.dropColumn("certificate_id");
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] stream Table altered`);
 		});
 };
+
+export { up, down };
diff --git a/backend/models/access_list.js b/backend/models/access_list.js
index 959df05f..98016a17 100644
--- a/backend/models/access_list.js
+++ b/backend/models/access_list.js
@@ -1,103 +1,98 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db               = require('../db');
-const helpers          = require('../lib/helpers');
-const Model            = require('objection').Model;
-const User             = require('./user');
-const AccessListAuth   = require('./access_list_auth');
-const AccessListClient = require('./access_list_client');
-const now              = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import AccessListAuth from "./access_list_auth.js";
+import AccessListClient from "./access_list_client.js";
+import now from "./now_helper.js";
+import ProxyHostModel from "./proxy_host.js";
+import User from "./user.js";
 
 Model.knex(db);
 
-const boolFields = [
-	'is_deleted',
-	'satisfy_any',
-	'pass_auth',
-];
+const boolFields = ["is_deleted", "satisfy_any", "pass_auth"];
 
 class AccessList extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 	}
 
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 
-	static get name () {
-		return 'AccessList';
+	static get name() {
+		return "AccessList";
 	}
 
-	static get tableName () {
-		return 'access_list';
+	static get tableName() {
+		return "access_list";
 	}
 
-	static get jsonAttributes () {
-		return ['meta'];
+	static get jsonAttributes() {
+		return ["meta"];
 	}
 
-	static get relationMappings () {
-		const ProxyHost = require('./proxy_host');
-
+	static get relationMappings() {
 		return {
 			owner: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
-				join:       {
-					from: 'access_list.owner_user_id',
-					to:   'user.id'
+				join: {
+					from: "access_list.owner_user_id",
+					to: "user.id",
+				},
+				modify: (qb) => {
+					qb.where("user.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('user.is_deleted', 0);
-				}
 			},
 			items: {
-				relation:   Model.HasManyRelation,
+				relation: Model.HasManyRelation,
 				modelClass: AccessListAuth,
-				join:       {
-					from: 'access_list.id',
-					to:   'access_list_auth.access_list_id'
-				}
+				join: {
+					from: "access_list.id",
+					to: "access_list_auth.access_list_id",
+				},
 			},
 			clients: {
-				relation:   Model.HasManyRelation,
+				relation: Model.HasManyRelation,
 				modelClass: AccessListClient,
-				join:       {
-					from: 'access_list.id',
-					to:   'access_list_client.access_list_id'
-				}
+				join: {
+					from: "access_list.id",
+					to: "access_list_client.access_list_id",
+				},
 			},
 			proxy_hosts: {
-				relation:   Model.HasManyRelation,
-				modelClass: ProxyHost,
-				join:       {
-					from: 'access_list.id',
-					to:   'proxy_host.access_list_id'
+				relation: Model.HasManyRelation,
+				modelClass: ProxyHostModel,
+				join: {
+					from: "access_list.id",
+					to: "proxy_host.access_list_id",
 				},
-				modify: function (qb) {
-					qb.where('proxy_host.is_deleted', 0);
-				}
-			}
+				modify: (qb) => {
+					qb.where("proxy_host.is_deleted", 0);
+				},
+			},
 		};
 	}
 }
 
-module.exports = AccessList;
+export default AccessList;
diff --git a/backend/models/access_list_auth.js b/backend/models/access_list_auth.js
index 3895539c..a4fd85a5 100644
--- a/backend/models/access_list_auth.js
+++ b/backend/models/access_list_auth.js
@@ -1,54 +1,55 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db    = require('../db');
-const Model = require('objection').Model;
-const now   = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import accessListModel from "./access_list.js";
+import now from "./now_helper.js";
 
 Model.knex(db);
 
 class AccessListAuth extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 	}
 
-	static get name () {
-		return 'AccessListAuth';
+	static get name() {
+		return "AccessListAuth";
 	}
 
-	static get tableName () {
-		return 'access_list_auth';
+	static get tableName() {
+		return "access_list_auth";
 	}
 
-	static get jsonAttributes () {
-		return ['meta'];
+	static get jsonAttributes() {
+		return ["meta"];
 	}
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 			access_list: {
-				relation:   Model.HasOneRelation,
-				modelClass: require('./access_list'),
-				join:       {
-					from: 'access_list_auth.access_list_id',
-					to:   'access_list.id'
+				relation: Model.HasOneRelation,
+				modelClass: accessListModel,
+				join: {
+					from: "access_list_auth.access_list_id",
+					to: "access_list.id",
 				},
-				modify: function (qb) {
-					qb.where('access_list.is_deleted', 0);
-				}
-			}
+				modify: (qb) => {
+					qb.where("access_list.is_deleted", 0);
+				},
+			},
 		};
 	}
 }
 
-module.exports = AccessListAuth;
+export default AccessListAuth;
diff --git a/backend/models/access_list_client.js b/backend/models/access_list_client.js
index bffc0023..4b63aec9 100644
--- a/backend/models/access_list_client.js
+++ b/backend/models/access_list_client.js
@@ -1,54 +1,55 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db    = require('../db');
-const Model = require('objection').Model;
-const now   = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import accessListModel from "./access_list.js";
+import now from "./now_helper.js";
 
 Model.knex(db);
 
 class AccessListClient extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 	}
 
-	static get name () {
-		return 'AccessListClient';
+	static get name() {
+		return "AccessListClient";
 	}
 
-	static get tableName () {
-		return 'access_list_client';
+	static get tableName() {
+		return "access_list_client";
 	}
 
-	static get jsonAttributes () {
-		return ['meta'];
+	static get jsonAttributes() {
+		return ["meta"];
 	}
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 			access_list: {
-				relation:   Model.HasOneRelation,
-				modelClass: require('./access_list'),
-				join:       {
-					from: 'access_list_client.access_list_id',
-					to:   'access_list.id'
+				relation: Model.HasOneRelation,
+				modelClass: accessListModel,
+				join: {
+					from: "access_list_client.access_list_id",
+					to: "access_list.id",
 				},
-				modify: function (qb) {
-					qb.where('access_list.is_deleted', 0);
-				}
-			}
+				modify: (qb) => {
+					qb.where("access_list.is_deleted", 0);
+				},
+			},
 		};
 	}
 }
 
-module.exports = AccessListClient;
+export default AccessListClient;
diff --git a/backend/models/audit-log.js b/backend/models/audit-log.js
index 45a4b460..a9b2d563 100644
--- a/backend/models/audit-log.js
+++ b/backend/models/audit-log.js
@@ -1,52 +1,52 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db    = require('../db');
-const Model = require('objection').Model;
-const User  = require('./user');
-const now   = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import now from "./now_helper.js";
+import User from "./user.js";
 
 Model.knex(db);
 
 class AuditLog extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 	}
 
-	static get name () {
-		return 'AuditLog';
+	static get name() {
+		return "AuditLog";
 	}
 
-	static get tableName () {
-		return 'audit_log';
+	static get tableName() {
+		return "audit_log";
 	}
 
-	static get jsonAttributes () {
-		return ['meta'];
+	static get jsonAttributes() {
+		return ["meta"];
 	}
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 			user: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
-				join:       {
-					from: 'audit_log.user_id',
-					to:   'user.id'
-				}
-			}
+				join: {
+					from: "audit_log.user_id",
+					to: "user.id",
+				},
+			},
 		};
 	}
 }
 
-module.exports = AuditLog;
+export default AuditLog;
diff --git a/backend/models/auth.js b/backend/models/auth.js
index 469e96bf..4ba50b41 100644
--- a/backend/models/auth.js
+++ b/backend/models/auth.js
@@ -1,59 +1,53 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const bcrypt  = require('bcrypt');
-const db      = require('../db');
-const helpers = require('../lib/helpers');
-const Model   = require('objection').Model;
-const User    = require('./user');
-const now     = require('./now_helper');
+import bcrypt from "bcrypt";
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import now from "./now_helper.js";
+import User from "./user.js";
 
 Model.knex(db);
 
-const boolFields = [
-	'is_deleted',
-];
+const boolFields = ["is_deleted"];
 
-function encryptPassword () {
-	/* jshint -W040 */
-	let _this = this;
-
-	if (_this.type === 'password' && _this.secret) {
-		return bcrypt.hash(_this.secret, 13)
-			.then(function (hash) {
-				_this.secret = hash;
-			});
+function encryptPassword() {
+	if (this.type === "password" && this.secret) {
+		return bcrypt.hash(this.secret, 13).then((hash) => {
+			this.secret = hash;
+		});
 	}
 
 	return null;
 }
 
 class Auth extends Model {
-	$beforeInsert (queryContext) {
-		this.created_on  = now();
+	$beforeInsert(queryContext) {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 
 		return encryptPassword.apply(this, queryContext);
 	}
 
-	$beforeUpdate (queryContext) {
+	$beforeUpdate(queryContext) {
 		this.modified_on = now();
 		return encryptPassword.apply(this, queryContext);
 	}
 
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 
 	/**
@@ -62,37 +56,37 @@ class Auth extends Model {
 	 * @param {String} password
 	 * @returns {Promise}
 	 */
-	verifyPassword (password) {
+	verifyPassword(password) {
 		return bcrypt.compare(password, this.secret);
 	}
 
-	static get name () {
-		return 'Auth';
+	static get name() {
+		return "Auth";
 	}
 
-	static get tableName () {
-		return 'auth';
+	static get tableName() {
+		return "auth";
 	}
 
-	static get jsonAttributes () {
-		return ['meta'];
+	static get jsonAttributes() {
+		return ["meta"];
 	}
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 			user: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
-				join:       {
-					from: 'auth.user_id',
-					to:   'user.id'
+				join: {
+					from: "auth.user_id",
+					to: "user.id",
 				},
 				filter: {
-					is_deleted: 0
-				}
-			}
+					is_deleted: 0,
+				},
+			},
 		};
 	}
 }
 
-module.exports = Auth;
+export default Auth;
diff --git a/backend/models/certificate.js b/backend/models/certificate.js
index d4ea21ad..9ad03c89 100644
--- a/backend/models/certificate.js
+++ b/backend/models/certificate.js
@@ -1,124 +1,133 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db      = require('../db');
-const helpers = require('../lib/helpers');
-const Model   = require('objection').Model;
-const now     = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import deadHostModel from "./dead_host.js";
+import now from "./now_helper.js";
+import proxyHostModel from "./proxy_host.js";
+import redirectionHostModel from "./redirection_host.js";
+import streamModel from "./stream.js";
+import userModel from "./user.js";
 
 Model.knex(db);
 
-const boolFields = [
-	'is_deleted',
-];
+const boolFields = ["is_deleted"];
 
 class Certificate extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for expires_on
-		if (typeof this.expires_on === 'undefined') {
+		if (typeof this.expires_on === "undefined") {
 			this.expires_on = now();
 		}
 
 		// Default for domain_names
-		if (typeof this.domain_names === 'undefined') {
+		if (typeof this.domain_names === "undefined") {
 			this.domain_names = [];
 		}
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 
 		this.domain_names.sort();
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 
 		// Sort domain_names
-		if (typeof this.domain_names !== 'undefined') {
+		if (typeof this.domain_names !== "undefined") {
 			this.domain_names.sort();
 		}
 	}
 
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 
-	static get name () {
-		return 'Certificate';
+	static get name() {
+		return "Certificate";
 	}
 
-	static get tableName () {
-		return 'certificate';
+	static get tableName() {
+		return "certificate";
 	}
 
-	static get jsonAttributes () {
-		return ['domain_names', 'meta'];
+	static get jsonAttributes() {
+		return ["domain_names", "meta"];
 	}
 
-	static get relationMappings () {
-		const ProxyHost       = require('./proxy_host');
-		const DeadHost        = require('./dead_host');
-		const User            = require('./user');
-		const RedirectionHost = require('./redirection_host');
-
+	static get relationMappings() {
 		return {
 			owner: {
-				relation:   Model.HasOneRelation,
-				modelClass: User,
-				join:       {
-					from: 'certificate.owner_user_id',
-					to:   'user.id'
+				relation: Model.HasOneRelation,
+				modelClass: userModel,
+				join: {
+					from: "certificate.owner_user_id",
+					to: "user.id",
+				},
+				modify: (qb) => {
+					qb.where("user.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('user.is_deleted', 0);
-				}
 			},
 			proxy_hosts: {
-				relation:   Model.HasManyRelation,
-				modelClass: ProxyHost,
-				join:       {
-					from: 'certificate.id',
-					to:   'proxy_host.certificate_id'
+				relation: Model.HasManyRelation,
+				modelClass: proxyHostModel,
+				join: {
+					from: "certificate.id",
+					to: "proxy_host.certificate_id",
+				},
+				modify: (qb) => {
+					qb.where("proxy_host.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('proxy_host.is_deleted', 0);
-				}
 			},
 			dead_hosts: {
-				relation:   Model.HasManyRelation,
-				modelClass: DeadHost,
-				join:       {
-					from: 'certificate.id',
-					to:   'dead_host.certificate_id'
+				relation: Model.HasManyRelation,
+				modelClass: deadHostModel,
+				join: {
+					from: "certificate.id",
+					to: "dead_host.certificate_id",
+				},
+				modify: (qb) => {
+					qb.where("dead_host.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('dead_host.is_deleted', 0);
-				}
 			},
 			redirection_hosts: {
-				relation:   Model.HasManyRelation,
-				modelClass: RedirectionHost,
-				join:       {
-					from: 'certificate.id',
-					to:   'redirection_host.certificate_id'
+				relation: Model.HasManyRelation,
+				modelClass: redirectionHostModel,
+				join: {
+					from: "certificate.id",
+					to: "redirection_host.certificate_id",
 				},
-				modify: function (qb) {
-					qb.where('redirection_host.is_deleted', 0);
-				}
-			}
+				modify: (qb) => {
+					qb.where("redirection_host.is_deleted", 0);
+				},
+			},
+			streams: {
+				relation: Model.HasManyRelation,
+				modelClass: streamModel,
+				join: {
+					from: "certificate.id",
+					to: "stream.certificate_id",
+				},
+				modify: (qb) => {
+					qb.where("stream.is_deleted", 0);
+				},
+			},
 		};
 	}
 }
 
-module.exports = Certificate;
+export default Certificate;
diff --git a/backend/models/dead_host.js b/backend/models/dead_host.js
index 3386caab..56807012 100644
--- a/backend/models/dead_host.js
+++ b/backend/models/dead_host.js
@@ -1,99 +1,92 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db          = require('../db');
-const helpers     = require('../lib/helpers');
-const Model       = require('objection').Model;
-const User        = require('./user');
-const Certificate = require('./certificate');
-const now         = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import Certificate from "./certificate.js";
+import now from "./now_helper.js";
+import User from "./user.js";
 
 Model.knex(db);
 
-const boolFields = [
-	'is_deleted',
-	'ssl_forced',
-	'http2_support',
-	'enabled',
-	'hsts_enabled',
-	'hsts_subdomains',
-];
+const boolFields = ["is_deleted", "ssl_forced", "http2_support", "enabled", "hsts_enabled", "hsts_subdomains"];
 
 class DeadHost extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for domain_names
-		if (typeof this.domain_names === 'undefined') {
+		if (typeof this.domain_names === "undefined") {
 			this.domain_names = [];
 		}
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 
 		this.domain_names.sort();
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 
 		// Sort domain_names
-		if (typeof this.domain_names !== 'undefined') {
+		if (typeof this.domain_names !== "undefined") {
 			this.domain_names.sort();
 		}
 	}
 
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 
-	static get name () {
-		return 'DeadHost';
+	static get name() {
+		return "DeadHost";
 	}
 
-	static get tableName () {
-		return 'dead_host';
+	static get tableName() {
+		return "dead_host";
 	}
 
-	static get jsonAttributes () {
-		return ['domain_names', 'meta'];
+	static get jsonAttributes() {
+		return ["domain_names", "meta"];
 	}
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 			owner: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
-				join:       {
-					from: 'dead_host.owner_user_id',
-					to:   'user.id'
+				join: {
+					from: "dead_host.owner_user_id",
+					to: "user.id",
+				},
+				modify: (qb) => {
+					qb.where("user.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('user.is_deleted', 0);
-				}
 			},
 			certificate: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: Certificate,
-				join:       {
-					from: 'dead_host.certificate_id',
-					to:   'certificate.id'
+				join: {
+					from: "dead_host.certificate_id",
+					to: "certificate.id",
 				},
-				modify: function (qb) {
-					qb.where('certificate.is_deleted', 0);
-				}
-			}
+				modify: (qb) => {
+					qb.where("certificate.is_deleted", 0);
+				},
+			},
 		};
 	}
 }
 
-module.exports = DeadHost;
+export default DeadHost;
diff --git a/backend/models/now_helper.js b/backend/models/now_helper.js
index dec70c3d..4dc71cea 100644
--- a/backend/models/now_helper.js
+++ b/backend/models/now_helper.js
@@ -1,13 +1,12 @@
-const db     = require('../db');
-const config = require('../lib/config');
-const Model  = require('objection').Model;
+import { Model } from "objection";
+import db from "../db.js";
+import { isSqlite } from "../lib/config.js";
 
 Model.knex(db);
 
-module.exports = function () {
-	if (config.isSqlite()) {
-		// eslint-disable-next-line
+export default () => {
+	if (isSqlite()) {
 		return Model.raw("datetime('now','localtime')");
 	}
-	return Model.raw('NOW()');
+	return Model.raw("NOW()");
 };
diff --git a/backend/models/proxy_host.js b/backend/models/proxy_host.js
index 07aa5dd3..119fe2b7 100644
--- a/backend/models/proxy_host.js
+++ b/backend/models/proxy_host.js
@@ -1,114 +1,114 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db          = require('../db');
-const helpers     = require('../lib/helpers');
-const Model       = require('objection').Model;
-const User        = require('./user');
-const AccessList  = require('./access_list');
-const Certificate = require('./certificate');
-const now         = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import AccessList from "./access_list.js";
+import Certificate from "./certificate.js";
+import now from "./now_helper.js";
+import User from "./user.js";
 
 Model.knex(db);
 
 const boolFields = [
-	'is_deleted',
-	'ssl_forced',
-	'caching_enabled',
-	'block_exploits',
-	'allow_websocket_upgrade',
-	'http2_support',
-	'enabled',
-	'hsts_enabled',
-	'hsts_subdomains',
+	"is_deleted",
+	"ssl_forced",
+	"caching_enabled",
+	"block_exploits",
+	"allow_websocket_upgrade",
+	"http2_support",
+	"enabled",
+	"hsts_enabled",
+	"hsts_subdomains",
 ];
 
 class ProxyHost extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for domain_names
-		if (typeof this.domain_names === 'undefined') {
+		if (typeof this.domain_names === "undefined") {
 			this.domain_names = [];
 		}
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 
 		this.domain_names.sort();
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 
 		// Sort domain_names
-		if (typeof this.domain_names !== 'undefined') {
+		if (typeof this.domain_names !== "undefined") {
 			this.domain_names.sort();
 		}
 	}
 
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 
-	static get name () {
-		return 'ProxyHost';
+	static get name() {
+		return "ProxyHost";
 	}
 
-	static get tableName () {
-		return 'proxy_host';
+	static get tableName() {
+		return "proxy_host";
 	}
 
-	static get jsonAttributes () {
-		return ['domain_names', 'meta', 'locations'];
+	static get jsonAttributes() {
+		return ["domain_names", "meta", "locations"];
 	}
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 			owner: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
-				join:       {
-					from: 'proxy_host.owner_user_id',
-					to:   'user.id'
+				join: {
+					from: "proxy_host.owner_user_id",
+					to: "user.id",
+				},
+				modify: (qb) => {
+					qb.where("user.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('user.is_deleted', 0);
-				}
 			},
 			access_list: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: AccessList,
-				join:       {
-					from: 'proxy_host.access_list_id',
-					to:   'access_list.id'
+				join: {
+					from: "proxy_host.access_list_id",
+					to: "access_list.id",
+				},
+				modify: (qb) => {
+					qb.where("access_list.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('access_list.is_deleted', 0);
-				}
 			},
 			certificate: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: Certificate,
-				join:       {
-					from: 'proxy_host.certificate_id',
-					to:   'certificate.id'
+				join: {
+					from: "proxy_host.certificate_id",
+					to: "certificate.id",
 				},
-				modify: function (qb) {
-					qb.where('certificate.is_deleted', 0);
-				}
-			}
+				modify: (qb) => {
+					qb.where("certificate.is_deleted", 0);
+				},
+			},
 		};
 	}
 }
 
-module.exports = ProxyHost;
+export default ProxyHost;
diff --git a/backend/models/redirection_host.js b/backend/models/redirection_host.js
index 80162791..bb397baa 100644
--- a/backend/models/redirection_host.js
+++ b/backend/models/redirection_host.js
@@ -1,102 +1,101 @@
-
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db          = require('../db');
-const helpers     = require('../lib/helpers');
-const Model       = require('objection').Model;
-const User        = require('./user');
-const Certificate = require('./certificate');
-const now         = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import Certificate from "./certificate.js";
+import now from "./now_helper.js";
+import User from "./user.js";
 
 Model.knex(db);
 
 const boolFields = [
-	'is_deleted',
-	'enabled',
-	'preserve_path',
-	'ssl_forced',
-	'block_exploits',
-	'hsts_enabled',
-	'hsts_subdomains',
-	'http2_support',
+	"is_deleted",
+	"enabled",
+	"preserve_path",
+	"ssl_forced",
+	"block_exploits",
+	"hsts_enabled",
+	"hsts_subdomains",
+	"http2_support",
 ];
 
 class RedirectionHost extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for domain_names
-		if (typeof this.domain_names === 'undefined') {
+		if (typeof this.domain_names === "undefined") {
 			this.domain_names = [];
 		}
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 
 		this.domain_names.sort();
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 
 		// Sort domain_names
-		if (typeof this.domain_names !== 'undefined') {
+		if (typeof this.domain_names !== "undefined") {
 			this.domain_names.sort();
 		}
 	}
 
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 
-	static get name () {
-		return 'RedirectionHost';
+	static get name() {
+		return "RedirectionHost";
 	}
 
-	static get tableName () {
-		return 'redirection_host';
+	static get tableName() {
+		return "redirection_host";
 	}
 
-	static get jsonAttributes () {
-		return ['domain_names', 'meta'];
+	static get jsonAttributes() {
+		return ["domain_names", "meta"];
 	}
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 			owner: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
-				join:       {
-					from: 'redirection_host.owner_user_id',
-					to:   'user.id'
+				join: {
+					from: "redirection_host.owner_user_id",
+					to: "user.id",
+				},
+				modify: (qb) => {
+					qb.where("user.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('user.is_deleted', 0);
-				}
 			},
 			certificate: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: Certificate,
-				join:       {
-					from: 'redirection_host.certificate_id',
-					to:   'certificate.id'
+				join: {
+					from: "redirection_host.certificate_id",
+					to: "certificate.id",
 				},
-				modify: function (qb) {
-					qb.where('certificate.is_deleted', 0);
-				}
-			}
+				modify: (qb) => {
+					qb.where("certificate.is_deleted", 0);
+				},
+			},
 		};
 	}
 }
 
-module.exports = RedirectionHost;
+export default RedirectionHost;
diff --git a/backend/models/setting.js b/backend/models/setting.js
index 75aa9007..0e0d6f4f 100644
--- a/backend/models/setting.js
+++ b/backend/models/setting.js
@@ -1,8 +1,8 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db    = require('../db');
-const Model = require('objection').Model;
+import { Model } from "objection";
+import db from "../db.js";
 
 Model.knex(db);
 
@@ -27,4 +27,4 @@ class Setting extends Model {
 	}
 }
 
-module.exports = Setting;
+export default Setting;
diff --git a/backend/models/stream.js b/backend/models/stream.js
index 5d1cb6c1..92d335ff 100644
--- a/backend/models/stream.js
+++ b/backend/models/stream.js
@@ -1,82 +1,77 @@
-const Model       = require('objection').Model;
-const db          = require('../db');
-const helpers     = require('../lib/helpers');
-const User        = require('./user');
-const Certificate = require('./certificate');
-const now         = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import Certificate from "./certificate.js";
+import now from "./now_helper.js";
+import User from "./user.js";
 
 Model.knex(db);
 
-const boolFields = [
-	'is_deleted',
-	'enabled',
-	'tcp_forwarding',
-	'udp_forwarding',
-];
+const boolFields = ["is_deleted", "enabled", "tcp_forwarding", "udp_forwarding"];
 
 class Stream extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 	}
 
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 
-	static get name () {
-		return 'Stream';
+	static get name() {
+		return "Stream";
 	}
 
-	static get tableName () {
-		return 'stream';
+	static get tableName() {
+		return "stream";
 	}
 
-	static get jsonAttributes () {
-		return ['meta'];
+	static get jsonAttributes() {
+		return ["meta"];
 	}
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 			owner: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
-				join:       {
-					from: 'stream.owner_user_id',
-					to:   'user.id'
+				join: {
+					from: "stream.owner_user_id",
+					to: "user.id",
+				},
+				modify: (qb) => {
+					qb.where("user.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('user.is_deleted', 0);
-				}
 			},
 			certificate: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: Certificate,
-				join:       {
-					from: 'stream.certificate_id',
-					to:   'certificate.id'
+				join: {
+					from: "stream.certificate_id",
+					to: "certificate.id",
 				},
-				modify: function (qb) {
-					qb.where('certificate.is_deleted', 0);
-				}
-			}
+				modify: (qb) => {
+					qb.where("certificate.is_deleted", 0);
+				},
+			},
 		};
 	}
 }
 
-module.exports = Stream;
+export default Stream;
diff --git a/backend/models/token.js b/backend/models/token.js
index 7cf11e03..4edab5bb 100644
--- a/backend/models/token.js
+++ b/backend/models/token.js
@@ -3,17 +3,17 @@
  and then has abilities after that.
  */
 
-const _      = require('lodash');
-const jwt    = require('jsonwebtoken');
-const crypto = require('crypto');
-const config = require('../lib/config');
-const error  = require('../lib/error');
-const logger = require('../logger').global;
-const ALGO   = 'RS256';
+import crypto from "node:crypto";
+import jwt from "jsonwebtoken";
+import _ from "lodash";
+import { getPrivateKey, getPublicKey } from "../lib/config.js";
+import errs from "../lib/error.js";
+import { global as logger } from "../logger.js";
 
-module.exports = function () {
+const ALGO = "RS256";
 
-	let token_data = {};
+export default () => {
+	let tokenData = {};
 
 	const self = {
 		/**
@@ -21,28 +21,26 @@ module.exports = function () {
 		 * @returns {Promise}
 		 */
 		create: (payload) => {
-			if (!config.getPrivateKey()) {
-				logger.error('Private key is empty!');
+			if (!getPrivateKey()) {
+				logger.error("Private key is empty!");
 			}
 			// sign with RSA SHA256
 			const options = {
 				algorithm: ALGO,
-				expiresIn: payload.expiresIn || '1d'
+				expiresIn: payload.expiresIn || "1d",
 			};
 
-			payload.jti = crypto.randomBytes(12)
-				.toString('base64')
-				.substring(-8);
+			payload.jti = crypto.randomBytes(12).toString("base64").substring(-8);
 
 			return new Promise((resolve, reject) => {
-				jwt.sign(payload, config.getPrivateKey(), options, (err, token) => {
+				jwt.sign(payload, getPrivateKey(), options, (err, token) => {
 					if (err) {
 						reject(err);
 					} else {
-						token_data = payload;
+						tokenData = payload;
 						resolve({
-							token:   token,
-							payload: payload
+							token: token,
+							payload: payload,
 						});
 					}
 				});
@@ -53,42 +51,47 @@ module.exports = function () {
 		 * @param {String} token
 		 * @returns {Promise}
 		 */
-		load: function (token) {
-			if (!config.getPublicKey()) {
-				logger.error('Public key is empty!');
+		load: (token) => {
+			if (!getPublicKey()) {
+				logger.error("Public key is empty!");
 			}
 			return new Promise((resolve, reject) => {
 				try {
-					if (!token || token === null || token === 'null') {
-						reject(new error.AuthError('Empty token'));
+					if (!token || token === null || token === "null") {
+						reject(new errs.AuthError("Empty token"));
 					} else {
-						jwt.verify(token, config.getPublicKey(), {ignoreExpiration: false, algorithms: [ALGO]}, (err, result) => {
-							if (err) {
-
-								if (err.name === 'TokenExpiredError') {
-									reject(new error.AuthError('Token has expired', err));
+						jwt.verify(
+							token,
+							getPublicKey(),
+							{ ignoreExpiration: false, algorithms: [ALGO] },
+							(err, result) => {
+								if (err) {
+									if (err.name === "TokenExpiredError") {
+										reject(new errs.AuthError("Token has expired", err));
+									} else {
+										reject(err);
+									}
 								} else {
-									reject(err);
+									tokenData = result;
+
+									// Hack: some tokens out in the wild have a scope of 'all' instead of 'user'.
+									// For 30 days at least, we need to replace 'all' with user.
+									if (
+										typeof tokenData.scope !== "undefined" &&
+										_.indexOf(tokenData.scope, "all") !== -1
+									) {
+										tokenData.scope = ["user"];
+									}
+
+									resolve(tokenData);
 								}
-
-							} else {
-								token_data = result;
-
-								// Hack: some tokens out in the wild have a scope of 'all' instead of 'user'.
-								// For 30 days at least, we need to replace 'all' with user.
-								if ((typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'all') !== -1)) {
-									token_data.scope = ['user'];
-								}
-
-								resolve(token_data);
-							}
-						});
+							},
+						);
 					}
 				} catch (err) {
 					reject(err);
 				}
 			});
-
 		},
 
 		/**
@@ -97,17 +100,15 @@ module.exports = function () {
 		 * @param   {String}  scope
 		 * @returns {Boolean}
 		 */
-		hasScope: function (scope) {
-			return typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, scope) !== -1;
-		},
+		hasScope: (scope) => typeof tokenData.scope !== "undefined" && _.indexOf(tokenData.scope, scope) !== -1,
 
 		/**
 		 * @param  {String}  key
 		 * @return {*}
 		 */
-		get: function (key) {
-			if (typeof token_data[key] !== 'undefined') {
-				return token_data[key];
+		get: (key) => {
+			if (typeof tokenData[key] !== "undefined") {
+				return tokenData[key];
 			}
 
 			return null;
@@ -117,22 +118,22 @@ module.exports = function () {
 		 * @param  {String}  key
 		 * @param  {*}       value
 		 */
-		set: function (key, value) {
-			token_data[key] = value;
+		set: (key, value) => {
+			tokenData[key] = value;
 		},
 
 		/**
-		 * @param   [default_value]
+		 * @param   [defaultValue]
 		 * @returns {Integer}
 		 */
-		getUserId: (default_value) => {
-			const attrs = self.get('attrs');
-			if (attrs && typeof attrs.id !== 'undefined' && attrs.id) {
+		getUserId: (defaultValue) => {
+			const attrs = self.get("attrs");
+			if (attrs?.id) {
 				return attrs.id;
 			}
 
-			return default_value || 0;
-		}
+			return defaultValue || 0;
+		},
 	};
 
 	return self;
diff --git a/backend/models/user.js b/backend/models/user.js
index 78fd3dd6..64aed05d 100644
--- a/backend/models/user.js
+++ b/backend/models/user.js
@@ -1,69 +1,65 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db             = require('../db');
-const helpers        = require('../lib/helpers');
-const Model          = require('objection').Model;
-const UserPermission = require('./user_permission');
-const now            = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import now from "./now_helper.js";
+import UserPermission from "./user_permission.js";
 
 Model.knex(db);
 
-const boolFields = [
-	'is_deleted',
-	'is_disabled',
-];
+const boolFields = ["is_deleted", "is_disabled"];
 
 class User extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for roles
-		if (typeof this.roles === 'undefined') {
+		if (typeof this.roles === "undefined") {
 			this.roles = [];
 		}
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 	}
 
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 
-	static get name () {
-		return 'User';
+	static get name() {
+		return "User";
 	}
 
-	static get tableName () {
-		return 'user';
+	static get tableName() {
+		return "user";
 	}
 
-	static get jsonAttributes () {
-		return ['roles'];
+	static get jsonAttributes() {
+		return ["roles"];
 	}
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 			permissions: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: UserPermission,
-				join:       {
-					from: 'user.id',
-					to:   'user_permission.user_id'
-				}
-			}
+				join: {
+					from: "user.id",
+					to: "user_permission.user_id",
+				},
+			},
 		};
 	}
-
 }
 
-module.exports = User;
+export default User;
diff --git a/backend/models/user_permission.js b/backend/models/user_permission.js
index bb87d5dc..49ea2d90 100644
--- a/backend/models/user_permission.js
+++ b/backend/models/user_permission.js
@@ -1,9 +1,9 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db    = require('../db');
-const Model = require('objection').Model;
-const now   = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import now from "./now_helper.js";
 
 Model.knex(db);
 
@@ -26,4 +26,4 @@ class UserPermission extends Model {
 	}
 }
 
-module.exports = UserPermission;
+export default UserPermission;
diff --git a/backend/nodemon.json b/backend/nodemon.json
index 3d6d1342..90223a21 100644
--- a/backend/nodemon.json
+++ b/backend/nodemon.json
@@ -3,5 +3,5 @@
   "ignore": [
     "data"
   ],
-  "ext": "js json ejs"
+  "ext": "js json ejs cjs"
 }
diff --git a/backend/package.json b/backend/package.json
index 30984a33..1218fe3a 100644
--- a/backend/package.json
+++ b/backend/package.json
@@ -1,8 +1,16 @@
 {
 	"name": "nginx-proxy-manager",
-	"version": "0.0.0",
+	"version": "2.0.0",
 	"description": "A beautiful interface for creating Nginx endpoints",
+	"author": "Jamie Curnow ",
+	"license": "MIT",
 	"main": "index.js",
+	"type": "module",
+	"scripts": {
+		"lint": "biome lint",
+		"prettier": "biome format --write .",
+		"validate-schema": "node validate-schema.js"
+	},
 	"dependencies": {
 		"@apidevtools/json-schema-ref-parser": "^11.7.0",
 		"ajv": "^8.17.1",
@@ -18,31 +26,24 @@
 		"knex": "2.4.2",
 		"liquidjs": "10.6.1",
 		"lodash": "^4.17.21",
-		"moment": "^2.29.4",
-		"mysql2": "^3.11.1",
-		"node-rsa": "^1.0.8",
+		"moment": "^2.30.1",
+		"mysql2": "^3.15.3",
+		"node-rsa": "^1.1.1",
 		"objection": "3.0.1",
 		"path": "^0.12.7",
-		"pg": "^8.13.1",
+		"pg": "^8.16.3",
 		"signale": "1.4.0",
-		"sqlite3": "5.1.6",
+		"sqlite3": "^5.1.7",
 		"temp-write": "^4.0.0"
 	},
+	"devDependencies": {
+		"@apidevtools/swagger-parser": "^10.1.0",
+		"@biomejs/biome": "^2.3.1",
+		"chalk": "4.1.2",
+		"nodemon": "^2.0.2"
+	},
 	"signale": {
 		"displayDate": true,
 		"displayTimestamp": true
-	},
-	"author": "Jamie Curnow ",
-	"license": "MIT",
-	"devDependencies": {
-		"@apidevtools/swagger-parser": "^10.1.0",
-		"chalk": "4.1.2",
-		"eslint": "^8.36.0",
-		"eslint-plugin-align-assignments": "^1.1.2",
-		"nodemon": "^2.0.2",
-		"prettier": "^2.0.4"
-	},
-	"scripts": {
-		"validate-schema": "node validate-schema.js"
 	}
 }
diff --git a/backend/routes/audit-log.js b/backend/routes/audit-log.js
index c68c7b35..7cd232df 100644
--- a/backend/routes/audit-log.js
+++ b/backend/routes/audit-log.js
@@ -1,19 +1,20 @@
-const express          = require('express');
-const validator        = require('../lib/validator');
-const jwtdecode        = require('../lib/express/jwt-decode');
-const internalAuditLog = require('../internal/audit-log');
+import express from "express";
+import internalAuditLog from "../internal/audit-log.js";
+import jwtdecode from "../lib/express/jwt-decode.js";
+import validator from "../lib/validator/index.js";
+import { express as logger } from "../logger.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * /api/audit-log
  */
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -24,29 +25,83 @@ router
 	 *
 	 * Retrieve all logs
 	 */
-	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					additionalProperties: false,
+					properties: {
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+						query: {
+							$ref: "common#/properties/query",
+						},
+					},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
-			.then((data) => {
-				return internalAuditLog.getAll(res.locals.access, data.expand, data.query);
-			})
-			.then((rows) => {
-				res.status(200)
-					.send(rows);
-			})
-			.catch(next);
+				{
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+					query: typeof req.query.query === "string" ? req.query.query : null,
+				},
+			);
+			const rows = await internalAuditLog.getAll(res.locals.access, data.expand, data.query);
+			res.status(200).send(rows);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
-module.exports = router;
+/**
+ * Specific audit log entry
+ *
+ * /api/audit-log/123
+ */
+router
+	.route("/:event_id")
+	.options((_, res) => {
+		res.sendStatus(204);
+	})
+	.all(jwtdecode())
+
+	/**
+	 * GET /api/audit-log/123
+	 *
+	 * Retrieve a specific entry
+	 */
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					required: ["event_id"],
+					additionalProperties: false,
+					properties: {
+						event_id: {
+							$ref: "common#/properties/id",
+						},
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+					},
+				},
+				{
+					event_id: req.params.event_id,
+					expand:
+						typeof req.query.expand === "string"
+							? req.query.expand.split(",")
+							: null,
+				},
+			);
+
+			const item = await internalAuditLog.get(res.locals.access, {
+				id: data.event_id,
+				expand: data.expand,
+			});
+			res.status(200).send(item);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
+	});
+
+export default router;
diff --git a/backend/routes/main.js b/backend/routes/main.js
index b97096d0..7bc4323d 100644
--- a/backend/routes/main.js
+++ b/backend/routes/main.js
@@ -1,51 +1,66 @@
-const express = require('express');
-const pjson   = require('../package.json');
-const error   = require('../lib/error');
+import express from "express";
+import errs from "../lib/error.js";
+import pjson from "../package.json" with { type: "json" };
+import { isSetup } from "../setup.js";
+import auditLogRoutes from "./audit-log.js";
+import accessListsRoutes from "./nginx/access_lists.js";
+import certificatesHostsRoutes from "./nginx/certificates.js";
+import deadHostsRoutes from "./nginx/dead_hosts.js";
+import proxyHostsRoutes from "./nginx/proxy_hosts.js";
+import redirectionHostsRoutes from "./nginx/redirection_hosts.js";
+import streamsRoutes from "./nginx/streams.js";
+import reportsRoutes from "./reports.js";
+import schemaRoutes from "./schema.js";
+import settingsRoutes from "./settings.js";
+import tokensRoutes from "./tokens.js";
+import usersRoutes from "./users.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * Health Check
  * GET /api
  */
-router.get('/', (req, res/*, next*/) => {
-	let version = pjson.version.split('-').shift().split('.');
+router.get("/", async (_, res /*, next*/) => {
+	const version = pjson.version.split("-").shift().split(".");
+	const setup = await isSetup();
 
 	res.status(200).send({
-		status:  'OK',
+		status: "OK",
+		setup,
 		version: {
-			major:    parseInt(version.shift(), 10),
-			minor:    parseInt(version.shift(), 10),
-			revision: parseInt(version.shift(), 10)
-		}
+			major: Number.parseInt(version.shift(), 10),
+			minor: Number.parseInt(version.shift(), 10),
+			revision: Number.parseInt(version.shift(), 10),
+		},
 	});
 });
 
-router.use('/schema', require('./schema'));
-router.use('/tokens', require('./tokens'));
-router.use('/users', require('./users'));
-router.use('/audit-log', require('./audit-log'));
-router.use('/reports', require('./reports'));
-router.use('/settings', require('./settings'));
-router.use('/nginx/proxy-hosts', require('./nginx/proxy_hosts'));
-router.use('/nginx/redirection-hosts', require('./nginx/redirection_hosts'));
-router.use('/nginx/dead-hosts', require('./nginx/dead_hosts'));
-router.use('/nginx/streams', require('./nginx/streams'));
-router.use('/nginx/access-lists', require('./nginx/access_lists'));
-router.use('/nginx/certificates', require('./nginx/certificates'));
+router.use("/schema", schemaRoutes);
+router.use("/tokens", tokensRoutes);
+router.use("/users", usersRoutes);
+router.use("/audit-log", auditLogRoutes);
+router.use("/reports", reportsRoutes);
+router.use("/settings", settingsRoutes);
+router.use("/nginx/proxy-hosts", proxyHostsRoutes);
+router.use("/nginx/redirection-hosts", redirectionHostsRoutes);
+router.use("/nginx/dead-hosts", deadHostsRoutes);
+router.use("/nginx/streams", streamsRoutes);
+router.use("/nginx/access-lists", accessListsRoutes);
+router.use("/nginx/certificates", certificatesHostsRoutes);
 
 /**
  * API 404 for all other routes
  *
  * ALL /api/*
  */
-router.all(/(.+)/, function (req, _, next) {
-	req.params.page = req.params['0'];
-	next(new error.ItemNotFoundError(req.params.page));
+router.all(/(.+)/, (req, _, next) => {
+	req.params.page = req.params["0"];
+	next(new errs.ItemNotFoundError(req.params.page));
 });
 
-module.exports = router;
+export default router;
diff --git a/backend/routes/nginx/access_lists.js b/backend/routes/nginx/access_lists.js
index 38375127..a1c75f82 100644
--- a/backend/routes/nginx/access_lists.js
+++ b/backend/routes/nginx/access_lists.js
@@ -1,22 +1,23 @@
-const express            = require('express');
-const validator          = require('../../lib/validator');
-const jwtdecode          = require('../../lib/express/jwt-decode');
-const apiValidator       = require('../../lib/validator/api');
-const internalAccessList = require('../../internal/access-list');
-const schema             = require('../../schema');
+import express from "express";
+import internalAccessList from "../../internal/access-list.js";
+import jwtdecode from "../../lib/express/jwt-decode.js";
+import apiValidator from "../../lib/validator/api.js";
+import validator from "../../lib/validator/index.js";
+import { express as logger } from "../../logger.js";
+import { getValidationSchema } from "../../schema/index.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * /api/nginx/access-lists
  */
 router
-	.route('/')
-	.options((req, res) => {
+	.route("/")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -26,29 +27,31 @@ router
 	 *
 	 * Retrieve all access-lists
 	 */
-	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					additionalProperties: false,
+					properties: {
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+						query: {
+							$ref: "common#/properties/query",
+						},
+					},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
-			.then((data) => {
-				return internalAccessList.getAll(res.locals.access, data.expand, data.query);
-			})
-			.then((rows) => {
-				res.status(200)
-					.send(rows);
-			})
-			.catch(next);
+				{
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+					query: typeof req.query.query === "string" ? req.query.query : null,
+				},
+			);
+			const rows = await internalAccessList.getAll(res.locals.access, data.expand, data.query);
+			res.status(200).send(rows);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
@@ -56,16 +59,15 @@ router
 	 *
 	 * Create a new access-list
 	 */
-	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/access-lists', 'post'), req.body)
-			.then((payload) => {
-				return internalAccessList.create(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(201)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/nginx/access-lists", "post"), req.body);
+			const result = await internalAccessList.create(res.locals.access, payload);
+			res.status(201).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
 /**
@@ -74,7 +76,7 @@ router
  * /api/nginx/access-lists/123
  */
 router
-	.route('/:list_id')
+	.route("/:list_id")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -85,33 +87,35 @@ router
 	 *
 	 * Retrieve a specific access-list
 	 */
-	.get((req, res, next) => {
-		validator({
-			required:             ['list_id'],
-			additionalProperties: false,
-			properties:           {
-				list_id: {
-					$ref: 'common#/properties/id'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					required: ["list_id"],
+					additionalProperties: false,
+					properties: {
+						list_id: {
+							$ref: "common#/properties/id",
+						},
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+					},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			list_id: req.params.list_id,
-			expand:  (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
-			.then((data) => {
-				return internalAccessList.get(res.locals.access, {
-					id:     parseInt(data.list_id, 10),
-					expand: data.expand
-				});
-			})
-			.then((row) => {
-				res.status(200)
-					.send(row);
-			})
-			.catch(next);
+				{
+					list_id: req.params.list_id,
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+				},
+			);
+			const row = await internalAccessList.get(res.locals.access, {
+				id: Number.parseInt(data.list_id, 10),
+				expand: data.expand,
+			});
+			res.status(200).send(row);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
@@ -119,17 +123,16 @@ router
 	 *
 	 * Update and existing access-list
 	 */
-	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/access-lists/{listID}', 'put'), req.body)
-			.then((payload) => {
-				payload.id = parseInt(req.params.list_id, 10);
-				return internalAccessList.update(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.put(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/nginx/access-lists/{listID}", "put"), req.body);
+			payload.id = Number.parseInt(req.params.list_id, 10);
+			const result = await internalAccessList.update(res.locals.access, payload);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
@@ -137,13 +140,16 @@ router
 	 *
 	 * Delete and existing access-list
 	 */
-	.delete((req, res, next) => {
-		internalAccessList.delete(res.locals.access, {id: parseInt(req.params.list_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.delete(async (req, res, next) => {
+		try {
+			const result = await internalAccessList.delete(res.locals.access, {
+				id: Number.parseInt(req.params.list_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
-module.exports = router;
+export default router;
diff --git a/backend/routes/nginx/certificates.js b/backend/routes/nginx/certificates.js
index 4b10d137..0a2c4b1b 100644
--- a/backend/routes/nginx/certificates.js
+++ b/backend/routes/nginx/certificates.js
@@ -1,22 +1,24 @@
-const express             = require('express');
-const error               = require('../../lib/error');
-const validator           = require('../../lib/validator');
-const jwtdecode           = require('../../lib/express/jwt-decode');
-const apiValidator        = require('../../lib/validator/api');
-const internalCertificate = require('../../internal/certificate');
-const schema              = require('../../schema');
+import express from "express";
+import dnsPlugins from "../../certbot/dns-plugins.json" with { type: "json" };
+import internalCertificate from "../../internal/certificate.js";
+import errs from "../../lib/error.js";
+import jwtdecode from "../../lib/express/jwt-decode.js";
+import apiValidator from "../../lib/validator/api.js";
+import validator from "../../lib/validator/index.js";
+import { express as logger } from "../../logger.js";
+import { getValidationSchema } from "../../schema/index.js";
 
 const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * /api/nginx/certificates
  */
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -27,29 +29,38 @@ router
 	 *
 	 * Retrieve all certificates
 	 */
-	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					additionalProperties: false,
+					properties: {
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+						query: {
+							$ref: "common#/properties/query",
+						},
+					},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
-			.then((data) => {
-				return internalCertificate.getAll(res.locals.access, data.expand, data.query);
-			})
-			.then((rows) => {
-				res.status(200)
-					.send(rows);
-			})
-			.catch(next);
+				{
+					expand:
+						typeof req.query.expand === "string"
+							? req.query.expand.split(",")
+							: null,
+					query: typeof req.query.query === "string" ? req.query.query : null,
+				},
+			);
+			const rows = await internalCertificate.getAll(
+				res.locals.access,
+				data.expand,
+				data.query,
+			);
+			res.status(200).send(rows);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
@@ -57,17 +68,56 @@ router
 	 *
 	 * Create a new certificate
 	 */
-	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/certificates', 'post'), req.body)
-			.then((payload) => {
-				req.setTimeout(900000); // 15 minutes timeout
-				return internalCertificate.create(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(201)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(
+				getValidationSchema("/nginx/certificates", "post"),
+				req.body,
+			);
+			req.setTimeout(900000); // 15 minutes timeout
+			const result = await internalCertificate.create(
+				res.locals.access,
+				payload,
+			);
+			res.status(201).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
+	});
+
+/**
+ * /api/nginx/certificates/dns-providers
+ */
+router
+	.route("/dns-providers")
+	.options((_, res) => {
+		res.sendStatus(204);
+	})
+	.all(jwtdecode())
+
+	/**
+	 * GET /api/nginx/certificates/dns-providers
+	 *
+	 * Get list of all supported DNS providers
+	 */
+	.get(async (req, res, next) => {
+		try {
+			if (!res.locals.access.token.getUserId()) {
+				throw new errs.PermissionError("Login required");
+			}
+			const clean = Object.keys(dnsPlugins).map((key) => ({
+				id: key,
+				name: dnsPlugins[key].name,
+				credentials: dnsPlugins[key].credentials,
+			}));
+
+			clean.sort((a, b) => a.name.localeCompare(b.name));
+			res.status(200).send(clean);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
 /**
@@ -76,180 +126,34 @@ router
  * /api/nginx/certificates/test-http
  */
 router
-	.route('/test-http')
+	.route("/test-http")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
 
 	/**
-	 * GET /api/nginx/certificates/test-http
+	 * POST /api/nginx/certificates/test-http
 	 *
 	 * Test HTTP challenge for domains
 	 */
-	.get((req, res, next) => {
-		if (req.query.domains === undefined) {
-			next(new error.ValidationError('Domains are required as query parameters'));
-			return;
+	.post(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(
+				getValidationSchema("/nginx/certificates/test-http", "post"),
+				req.body,
+			);
+			req.setTimeout(60000); // 1 minute timeout
+
+			const result = await internalCertificate.testHttpsChallenge(
+				res.locals.access,
+				payload,
+			);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
 		}
-
-		internalCertificate.testHttpsChallenge(res.locals.access, JSON.parse(req.query.domains))
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
-	});
-
-/**
- * Specific certificate
- *
- * /api/nginx/certificates/123
- */
-router
-	.route('/:certificate_id')
-	.options((_, res) => {
-		res.sendStatus(204);
-	})
-	.all(jwtdecode())
-
-	/**
-	 * GET /api/nginx/certificates/123
-	 *
-	 * Retrieve a specific certificate
-	 */
-	.get((req, res, next) => {
-		validator({
-			required:             ['certificate_id'],
-			additionalProperties: false,
-			properties:           {
-				certificate_id: {
-					$ref: 'common#/properties/id'
-				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			certificate_id: req.params.certificate_id,
-			expand:         (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
-			.then((data) => {
-				return internalCertificate.get(res.locals.access, {
-					id:     parseInt(data.certificate_id, 10),
-					expand: data.expand
-				});
-			})
-			.then((row) => {
-				res.status(200)
-					.send(row);
-			})
-			.catch(next);
-	})
-
-	/**
-	 * DELETE /api/nginx/certificates/123
-	 *
-	 * Update and existing certificate
-	 */
-	.delete((req, res, next) => {
-		internalCertificate.delete(res.locals.access, {id: parseInt(req.params.certificate_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
-	});
-
-/**
- * Upload Certs
- *
- * /api/nginx/certificates/123/upload
- */
-router
-	.route('/:certificate_id/upload')
-	.options((_, res) => {
-		res.sendStatus(204);
-	})
-	.all(jwtdecode())
-
-	/**
-	 * POST /api/nginx/certificates/123/upload
-	 *
-	 * Upload certificates
-	 */
-	.post((req, res, next) => {
-		if (!req.files) {
-			res.status(400)
-				.send({error: 'No files were uploaded'});
-		} else {
-			internalCertificate.upload(res.locals.access, {
-				id:    parseInt(req.params.certificate_id, 10),
-				files: req.files
-			})
-				.then((result) => {
-					res.status(200)
-						.send(result);
-				})
-				.catch(next);
-		}
-	});
-
-/**
- * Renew LE Certs
- *
- * /api/nginx/certificates/123/renew
- */
-router
-	.route('/:certificate_id/renew')
-	.options((_, res) => {
-		res.sendStatus(204);
-	})
-	.all(jwtdecode())
-
-	/**
-	 * POST /api/nginx/certificates/123/renew
-	 *
-	 * Renew certificate
-	 */
-	.post((req, res, next) => {
-		req.setTimeout(900000); // 15 minutes timeout
-		internalCertificate.renew(res.locals.access, {
-			id: parseInt(req.params.certificate_id, 10)
-		})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
-	});
-
-/**
- * Download LE Certs
- *
- * /api/nginx/certificates/123/download
- */
-router
-	.route('/:certificate_id/download')
-	.options((_req, res) => {
-		res.sendStatus(204);
-	})
-	.all(jwtdecode())
-
-	/**
-	 * GET /api/nginx/certificates/123/download
-	 *
-	 * Renew certificate
-	 */
-	.get((req, res, next) => {
-		internalCertificate.download(res.locals.access, {
-			id: parseInt(req.params.certificate_id, 10)
-		})
-			.then((result) => {
-				res.status(200)
-					.download(result.fileName);
-			})
-			.catch(next);
 	});
 
 /**
@@ -258,7 +162,7 @@ router
  * /api/nginx/certificates/validate
  */
 router
-	.route('/validate')
+	.route("/validate")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -269,20 +173,183 @@ router
 	 *
 	 * Validate certificates
 	 */
-	.post((req, res, next) => {
+	.post(async (req, res, next) => {
 		if (!req.files) {
-			res.status(400)
-				.send({error: 'No files were uploaded'});
-		} else {
-			internalCertificate.validate({
-				files: req.files
-			})
-				.then((result) => {
-					res.status(200)
-						.send(result);
-				})
-				.catch(next);
+			res.status(400).send({ error: "No files were uploaded" });
+			return;
+		}
+
+		try {
+			const result = await internalCertificate.validate({
+				files: req.files,
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
 		}
 	});
 
-module.exports = router;
+/**
+ * Specific certificate
+ *
+ * /api/nginx/certificates/123
+ */
+router
+	.route("/:certificate_id")
+	.options((_, res) => {
+		res.sendStatus(204);
+	})
+	.all(jwtdecode())
+
+	/**
+	 * GET /api/nginx/certificates/123
+	 *
+	 * Retrieve a specific certificate
+	 */
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					required: ["certificate_id"],
+					additionalProperties: false,
+					properties: {
+						certificate_id: {
+							$ref: "common#/properties/id",
+						},
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+					},
+				},
+				{
+					certificate_id: req.params.certificate_id,
+					expand:
+						typeof req.query.expand === "string"
+							? req.query.expand.split(",")
+							: null,
+				},
+			);
+			const row = await internalCertificate.get(res.locals.access, {
+				id: Number.parseInt(data.certificate_id, 10),
+				expand: data.expand,
+			});
+			res.status(200).send(row);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
+	})
+
+	/**
+	 * DELETE /api/nginx/certificates/123
+	 *
+	 * Update and existing certificate
+	 */
+	.delete(async (req, res, next) => {
+		try {
+			const result = await internalCertificate.delete(res.locals.access, {
+				id: Number.parseInt(req.params.certificate_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
+	});
+
+/**
+ * Upload Certs
+ *
+ * /api/nginx/certificates/123/upload
+ */
+router
+	.route("/:certificate_id/upload")
+	.options((_, res) => {
+		res.sendStatus(204);
+	})
+	.all(jwtdecode())
+
+	/**
+	 * POST /api/nginx/certificates/123/upload
+	 *
+	 * Upload certificates
+	 */
+	.post(async (req, res, next) => {
+		if (!req.files) {
+			res.status(400).send({ error: "No files were uploaded" });
+			return;
+		}
+
+		try {
+			const result = await internalCertificate.upload(res.locals.access, {
+				id: Number.parseInt(req.params.certificate_id, 10),
+				files: req.files,
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
+	});
+
+/**
+ * Renew LE Certs
+ *
+ * /api/nginx/certificates/123/renew
+ */
+router
+	.route("/:certificate_id/renew")
+	.options((_, res) => {
+		res.sendStatus(204);
+	})
+	.all(jwtdecode())
+
+	/**
+	 * POST /api/nginx/certificates/123/renew
+	 *
+	 * Renew certificate
+	 */
+	.post(async (req, res, next) => {
+		req.setTimeout(900000); // 15 minutes timeout
+		try {
+			const result = await internalCertificate.renew(res.locals.access, {
+				id: Number.parseInt(req.params.certificate_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
+	});
+
+/**
+ * Download LE Certs
+ *
+ * /api/nginx/certificates/123/download
+ */
+router
+	.route("/:certificate_id/download")
+	.options((_req, res) => {
+		res.sendStatus(204);
+	})
+	.all(jwtdecode())
+
+	/**
+	 * GET /api/nginx/certificates/123/download
+	 *
+	 * Renew certificate
+	 */
+	.get(async (req, res, next) => {
+		try {
+			const result = await internalCertificate.download(res.locals.access, {
+				id: Number.parseInt(req.params.certificate_id, 10),
+			});
+			res.status(200).download(result.fileName);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
+	});
+
+export default router;
diff --git a/backend/routes/nginx/dead_hosts.js b/backend/routes/nginx/dead_hosts.js
index 83b37765..5323c731 100644
--- a/backend/routes/nginx/dead_hosts.js
+++ b/backend/routes/nginx/dead_hosts.js
@@ -1,21 +1,22 @@
-const express          = require('express');
-const validator        = require('../../lib/validator');
-const jwtdecode        = require('../../lib/express/jwt-decode');
-const apiValidator     = require('../../lib/validator/api');
-const internalDeadHost = require('../../internal/dead-host');
-const schema           = require('../../schema');
+import express from "express";
+import internalDeadHost from "../../internal/dead-host.js";
+import jwtdecode from "../../lib/express/jwt-decode.js";
+import apiValidator from "../../lib/validator/api.js";
+import validator from "../../lib/validator/index.js";
+import { express as logger } from "../../logger.js";
+import { getValidationSchema } from "../../schema/index.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * /api/nginx/dead-hosts
  */
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -26,29 +27,31 @@ router
 	 *
 	 * Retrieve all dead-hosts
 	 */
-	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					additionalProperties: false,
+					properties: {
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+						query: {
+							$ref: "common#/properties/query",
+						},
+					},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
-			.then((data) => {
-				return internalDeadHost.getAll(res.locals.access, data.expand, data.query);
-			})
-			.then((rows) => {
-				res.status(200)
-					.send(rows);
-			})
-			.catch(next);
+				{
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+					query: typeof req.query.query === "string" ? req.query.query : null,
+				},
+			);
+			const rows = await internalDeadHost.getAll(res.locals.access, data.expand, data.query);
+			res.status(200).send(rows);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
@@ -56,16 +59,15 @@ router
 	 *
 	 * Create a new dead-host
 	 */
-	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/dead-hosts', 'post'), req.body)
-			.then((payload) => {
-				return internalDeadHost.create(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(201)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/nginx/dead-hosts", "post"), req.body);
+			const result = await internalDeadHost.create(res.locals.access, payload);
+			res.status(201).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
 /**
@@ -74,8 +76,8 @@ router
  * /api/nginx/dead-hosts/123
  */
 router
-	.route('/:host_id')
-	.options((req, res) => {
+	.route("/:host_id")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -85,65 +87,69 @@ router
 	 *
 	 * Retrieve a specific dead-host
 	 */
-	.get((req, res, next) => {
-		validator({
-			required:             ['host_id'],
-			additionalProperties: false,
-			properties:           {
-				host_id: {
-					$ref: 'common#/properties/id'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					required: ["host_id"],
+					additionalProperties: false,
+					properties: {
+						host_id: {
+							$ref: "common#/properties/id",
+						},
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+					},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			host_id: req.params.host_id,
-			expand:  (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
-			.then((data) => {
-				return internalDeadHost.get(res.locals.access, {
-					id:     parseInt(data.host_id, 10),
-					expand: data.expand
-				});
-			})
-			.then((row) => {
-				res.status(200)
-					.send(row);
-			})
-			.catch(next);
+				{
+					host_id: req.params.host_id,
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+				},
+			);
+			const row = await internalDeadHost.get(res.locals.access, {
+				id: Number.parseInt(data.host_id, 10),
+				expand: data.expand,
+			});
+			res.status(200).send(row);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
 	 * PUT /api/nginx/dead-hosts/123
 	 *
-	 * Update and existing dead-host
+	 * Update an existing dead-host
 	 */
-	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/dead-hosts/{hostID}', 'put'), req.body)
-			.then((payload) => {
-				payload.id = parseInt(req.params.host_id, 10);
-				return internalDeadHost.update(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.put(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/nginx/dead-hosts/{hostID}", "put"), req.body);
+			payload.id = Number.parseInt(req.params.host_id, 10);
+			const result = await internalDeadHost.update(res.locals.access, payload);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
 	 * DELETE /api/nginx/dead-hosts/123
 	 *
-	 * Update and existing dead-host
+	 * Delete a dead-host
 	 */
-	.delete((req, res, next) => {
-		internalDeadHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.delete(async (req, res, next) => {
+		try {
+			const result = await internalDeadHost.delete(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
 /**
@@ -152,7 +158,7 @@ router
  * /api/nginx/dead-hosts/123/enable
  */
 router
-	.route('/:host_id/enable')
+	.route("/:host_id/enable")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -161,13 +167,16 @@ router
 	/**
 	 * POST /api/nginx/dead-hosts/123/enable
 	 */
-	.post((req, res, next) => {
-		internalDeadHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const result = await internalDeadHost.enable(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
 /**
@@ -176,7 +185,7 @@ router
  * /api/nginx/dead-hosts/123/disable
  */
 router
-	.route('/:host_id/disable')
+	.route("/:host_id/disable")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -186,12 +195,13 @@ router
 	 * POST /api/nginx/dead-hosts/123/disable
 	 */
 	.post((req, res, next) => {
-		internalDeadHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+		try {
+			const result = internalDeadHost.disable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) });
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
-module.exports = router;
+export default router;
diff --git a/backend/routes/nginx/proxy_hosts.js b/backend/routes/nginx/proxy_hosts.js
index 3be4582a..5b2a4178 100644
--- a/backend/routes/nginx/proxy_hosts.js
+++ b/backend/routes/nginx/proxy_hosts.js
@@ -1,22 +1,23 @@
-const express           = require('express');
-const validator         = require('../../lib/validator');
-const jwtdecode         = require('../../lib/express/jwt-decode');
-const apiValidator      = require('../../lib/validator/api');
-const internalProxyHost = require('../../internal/proxy-host');
-const schema            = require('../../schema');
+import express from "express";
+import internalProxyHost from "../../internal/proxy-host.js";
+import jwtdecode from "../../lib/express/jwt-decode.js";
+import apiValidator from "../../lib/validator/api.js";
+import validator from "../../lib/validator/index.js";
+import { express as logger } from "../../logger.js";
+import { getValidationSchema } from "../../schema/index.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * /api/nginx/proxy-hosts
  */
 router
-	.route('/')
-	.options((req, res) => {
+	.route("/")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -26,29 +27,31 @@ router
 	 *
 	 * Retrieve all proxy-hosts
 	 */
-	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					additionalProperties: false,
+					properties: {
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+						query: {
+							$ref: "common#/properties/query",
+						},
+					},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
-			.then((data) => {
-				return internalProxyHost.getAll(res.locals.access, data.expand, data.query);
-			})
-			.then((rows) => {
-				res.status(200)
-					.send(rows);
-			})
-			.catch(next);
+				{
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+					query: typeof req.query.query === "string" ? req.query.query : null,
+				},
+			);
+			const rows = await internalProxyHost.getAll(res.locals.access, data.expand, data.query);
+			res.status(200).send(rows);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
@@ -56,16 +59,15 @@ router
 	 *
 	 * Create a new proxy-host
 	 */
-	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/proxy-hosts', 'post'), req.body)
-			.then((payload) => {
-				return internalProxyHost.create(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(201)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/nginx/proxy-hosts", "post"), req.body);
+			const result = await internalProxyHost.create(res.locals.access, payload);
+			res.status(201).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err} ${JSON.stringify(err.debug, null, 2)}`);
+			next(err);
+		}
 	});
 
 /**
@@ -74,8 +76,8 @@ router
  * /api/nginx/proxy-hosts/123
  */
 router
-	.route('/:host_id')
-	.options((req, res) => {
+	.route("/:host_id")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -85,33 +87,35 @@ router
 	 *
 	 * Retrieve a specific proxy-host
 	 */
-	.get((req, res, next) => {
-		validator({
-			required:             ['host_id'],
-			additionalProperties: false,
-			properties:           {
-				host_id: {
-					$ref: 'common#/properties/id'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					required: ["host_id"],
+					additionalProperties: false,
+					properties: {
+						host_id: {
+							$ref: "common#/properties/id",
+						},
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+					},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			host_id: req.params.host_id,
-			expand:  (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
-			.then((data) => {
-				return internalProxyHost.get(res.locals.access, {
-					id:     parseInt(data.host_id, 10),
-					expand: data.expand
-				});
-			})
-			.then((row) => {
-				res.status(200)
-					.send(row);
-			})
-			.catch(next);
+				{
+					host_id: req.params.host_id,
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+				},
+			);
+			const row = await internalProxyHost.get(res.locals.access, {
+				id: Number.parseInt(data.host_id, 10),
+				expand: data.expand,
+			});
+			res.status(200).send(row);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
@@ -119,17 +123,16 @@ router
 	 *
 	 * Update and existing proxy-host
 	 */
-	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/proxy-hosts/{hostID}', 'put'), req.body)
-			.then((payload) => {
-				payload.id = parseInt(req.params.host_id, 10);
-				return internalProxyHost.update(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.put(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/nginx/proxy-hosts/{hostID}", "put"), req.body);
+			payload.id = Number.parseInt(req.params.host_id, 10);
+			const result = await internalProxyHost.update(res.locals.access, payload);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
@@ -137,13 +140,16 @@ router
 	 *
 	 * Update and existing proxy-host
 	 */
-	.delete((req, res, next) => {
-		internalProxyHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.delete(async (req, res, next) => {
+		try {
+			const result = await internalProxyHost.delete(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
 /**
@@ -152,7 +158,7 @@ router
  * /api/nginx/proxy-hosts/123/enable
  */
 router
-	.route('/:host_id/enable')
+	.route("/:host_id/enable")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -161,13 +167,16 @@ router
 	/**
 	 * POST /api/nginx/proxy-hosts/123/enable
 	 */
-	.post((req, res, next) => {
-		internalProxyHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const result = await internalProxyHost.enable(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
 /**
@@ -176,7 +185,7 @@ router
  * /api/nginx/proxy-hosts/123/disable
  */
 router
-	.route('/:host_id/disable')
+	.route("/:host_id/disable")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -185,13 +194,16 @@ router
 	/**
 	 * POST /api/nginx/proxy-hosts/123/disable
 	 */
-	.post((req, res, next) => {
-		internalProxyHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const result = await internalProxyHost.disable(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
-module.exports = router;
+export default router;
diff --git a/backend/routes/nginx/redirection_hosts.js b/backend/routes/nginx/redirection_hosts.js
index a46feb84..bbeccf6e 100644
--- a/backend/routes/nginx/redirection_hosts.js
+++ b/backend/routes/nginx/redirection_hosts.js
@@ -1,22 +1,23 @@
-const express                 = require('express');
-const validator               = require('../../lib/validator');
-const jwtdecode               = require('../../lib/express/jwt-decode');
-const apiValidator            = require('../../lib/validator/api');
-const internalRedirectionHost = require('../../internal/redirection-host');
-const schema                  = require('../../schema');
+import express from "express";
+import internalRedirectionHost from "../../internal/redirection-host.js";
+import jwtdecode from "../../lib/express/jwt-decode.js";
+import apiValidator from "../../lib/validator/api.js";
+import validator from "../../lib/validator/index.js";
+import { express as logger } from "../../logger.js";
+import { getValidationSchema } from "../../schema/index.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * /api/nginx/redirection-hosts
  */
 router
-	.route('/')
-	.options((req, res) => {
+	.route("/")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -26,29 +27,31 @@ router
 	 *
 	 * Retrieve all redirection-hosts
 	 */
-	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					additionalProperties: false,
+					properties: {
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+						query: {
+							$ref: "common#/properties/query",
+						},
+					},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
-			.then((data) => {
-				return internalRedirectionHost.getAll(res.locals.access, data.expand, data.query);
-			})
-			.then((rows) => {
-				res.status(200)
-					.send(rows);
-			})
-			.catch(next);
+				{
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+					query: typeof req.query.query === "string" ? req.query.query : null,
+				},
+			);
+			const rows = await internalRedirectionHost.getAll(res.locals.access, data.expand, data.query);
+			res.status(200).send(rows);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
@@ -56,16 +59,15 @@ router
 	 *
 	 * Create a new redirection-host
 	 */
-	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/redirection-hosts', 'post'), req.body)
-			.then((payload) => {
-				return internalRedirectionHost.create(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(201)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/nginx/redirection-hosts", "post"), req.body);
+			const result = await internalRedirectionHost.create(res.locals.access, payload);
+			res.status(201).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
 /**
@@ -74,8 +76,8 @@ router
  * /api/nginx/redirection-hosts/123
  */
 router
-	.route('/:host_id')
-	.options((req, res) => {
+	.route("/:host_id")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -85,33 +87,35 @@ router
 	 *
 	 * Retrieve a specific redirection-host
 	 */
-	.get((req, res, next) => {
-		validator({
-			required:             ['host_id'],
-			additionalProperties: false,
-			properties:           {
-				host_id: {
-					$ref: 'common#/properties/id'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					required: ["host_id"],
+					additionalProperties: false,
+					properties: {
+						host_id: {
+							$ref: "common#/properties/id",
+						},
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+					},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			host_id: req.params.host_id,
-			expand:  (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
-			.then((data) => {
-				return internalRedirectionHost.get(res.locals.access, {
-					id:     parseInt(data.host_id, 10),
-					expand: data.expand
-				});
-			})
-			.then((row) => {
-				res.status(200)
-					.send(row);
-			})
-			.catch(next);
+				{
+					host_id: req.params.host_id,
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+				},
+			);
+			const row = await internalRedirectionHost.get(res.locals.access, {
+				id: Number.parseInt(data.host_id, 10),
+				expand: data.expand,
+			});
+			res.status(200).send(row);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
@@ -119,17 +123,19 @@ router
 	 *
 	 * Update and existing redirection-host
 	 */
-	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/redirection-hosts/{hostID}', 'put'), req.body)
-			.then((payload) => {
-				payload.id = parseInt(req.params.host_id, 10);
-				return internalRedirectionHost.update(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.put(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(
+				getValidationSchema("/nginx/redirection-hosts/{hostID}", "put"),
+				req.body,
+			);
+			payload.id = Number.parseInt(req.params.host_id, 10);
+			const result = await internalRedirectionHost.update(res.locals.access, payload);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
@@ -137,13 +143,16 @@ router
 	 *
 	 * Update and existing redirection-host
 	 */
-	.delete((req, res, next) => {
-		internalRedirectionHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.delete(async (req, res, next) => {
+		try {
+			const result = await internalRedirectionHost.delete(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
 /**
@@ -152,8 +161,8 @@ router
  * /api/nginx/redirection-hosts/123/enable
  */
 router
-	.route('/:host_id/enable')
-	.options((req, res) => {
+	.route("/:host_id/enable")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -161,13 +170,16 @@ router
 	/**
 	 * POST /api/nginx/redirection-hosts/123/enable
 	 */
-	.post((req, res, next) => {
-		internalRedirectionHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const result = await internalRedirectionHost.enable(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
 /**
@@ -176,8 +188,8 @@ router
  * /api/nginx/redirection-hosts/123/disable
  */
 router
-	.route('/:host_id/disable')
-	.options((req, res) => {
+	.route("/:host_id/disable")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -185,13 +197,16 @@ router
 	/**
 	 * POST /api/nginx/redirection-hosts/123/disable
 	 */
-	.post((req, res, next) => {
-		internalRedirectionHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const result = await internalRedirectionHost.disable(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
-module.exports = router;
+export default router;
diff --git a/backend/routes/nginx/streams.js b/backend/routes/nginx/streams.js
index c033f2ef..878dd148 100644
--- a/backend/routes/nginx/streams.js
+++ b/backend/routes/nginx/streams.js
@@ -1,22 +1,23 @@
-const express        = require('express');
-const validator      = require('../../lib/validator');
-const jwtdecode      = require('../../lib/express/jwt-decode');
-const apiValidator   = require('../../lib/validator/api');
-const internalStream = require('../../internal/stream');
-const schema         = require('../../schema');
+import express from "express";
+import internalStream from "../../internal/stream.js";
+import jwtdecode from "../../lib/express/jwt-decode.js";
+import apiValidator from "../../lib/validator/api.js";
+import validator from "../../lib/validator/index.js";
+import { express as logger } from "../../logger.js";
+import { getValidationSchema } from "../../schema/index.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * /api/nginx/streams
  */
 router
-	.route('/')
-	.options((req, res) => {
+	.route("/")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
@@ -26,29 +27,31 @@ router
 	 *
 	 * Retrieve all streams
 	 */
-	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					additionalProperties: false,
+					properties: {
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+						query: {
+							$ref: "common#/properties/query",
+						},
+					},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
-			.then((data) => {
-				return internalStream.getAll(res.locals.access, data.expand, data.query);
-			})
-			.then((rows) => {
-				res.status(200)
-					.send(rows);
-			})
-			.catch(next);
+				{
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+					query: typeof req.query.query === "string" ? req.query.query : null,
+				},
+			);
+			const rows = await internalStream.getAll(res.locals.access, data.expand, data.query);
+			res.status(200).send(rows);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
@@ -56,16 +59,15 @@ router
 	 *
 	 * Create a new stream
 	 */
-	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/streams', 'post'), req.body)
-			.then((payload) => {
-				return internalStream.create(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(201)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/nginx/streams", "post"), req.body);
+			const result = await internalStream.create(res.locals.access, payload);
+			res.status(201).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
 /**
@@ -74,8 +76,8 @@ router
  * /api/nginx/streams/123
  */
 router
-	.route('/:stream_id')
-	.options((req, res) => {
+	.route("/:stream_id")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
@@ -85,33 +87,35 @@ router
 	 *
 	 * Retrieve a specific stream
 	 */
-	.get((req, res, next) => {
-		validator({
-			required:             ['stream_id'],
-			additionalProperties: false,
-			properties:           {
-				stream_id: {
-					$ref: 'common#/properties/id'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					required: ["stream_id"],
+					additionalProperties: false,
+					properties: {
+						stream_id: {
+							$ref: "common#/properties/id",
+						},
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+					},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			stream_id: req.params.stream_id,
-			expand:    (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
-			.then((data) => {
-				return internalStream.get(res.locals.access, {
-					id:     parseInt(data.stream_id, 10),
-					expand: data.expand
-				});
-			})
-			.then((row) => {
-				res.status(200)
-					.send(row);
-			})
-			.catch(next);
+				{
+					stream_id: req.params.stream_id,
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+				},
+			);
+			const row = await internalStream.get(res.locals.access, {
+				id: Number.parseInt(data.stream_id, 10),
+				expand: data.expand,
+			});
+			res.status(200).send(row);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
@@ -119,17 +123,16 @@ router
 	 *
 	 * Update and existing stream
 	 */
-	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/streams/{streamID}', 'put'), req.body)
-			.then((payload) => {
-				payload.id = parseInt(req.params.stream_id, 10);
-				return internalStream.update(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.put(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/nginx/streams/{streamID}", "put"), req.body);
+			payload.id = Number.parseInt(req.params.stream_id, 10);
+			const result = await internalStream.update(res.locals.access, payload);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
@@ -137,13 +140,16 @@ router
 	 *
 	 * Update and existing stream
 	 */
-	.delete((req, res, next) => {
-		internalStream.delete(res.locals.access, {id: parseInt(req.params.stream_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.delete(async (req, res, next) => {
+		try {
+			const result = await internalStream.delete(res.locals.access, {
+				id: Number.parseInt(req.params.stream_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
 /**
@@ -152,7 +158,7 @@ router
  * /api/nginx/streams/123/enable
  */
 router
-	.route('/:host_id/enable')
+	.route("/:host_id/enable")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -161,13 +167,16 @@ router
 	/**
 	 * POST /api/nginx/streams/123/enable
 	 */
-	.post((req, res, next) => {
-		internalStream.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const result = await internalStream.enable(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
 /**
@@ -176,7 +185,7 @@ router
  * /api/nginx/streams/123/disable
  */
 router
-	.route('/:host_id/disable')
+	.route("/:host_id/disable")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -185,13 +194,16 @@ router
 	/**
 	 * POST /api/nginx/streams/123/disable
 	 */
-	.post((req, res, next) => {
-		internalStream.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const result = await internalStream.disable(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
-module.exports = router;
+export default router;
diff --git a/backend/routes/reports.js b/backend/routes/reports.js
index 98c6cf86..bd3a91fe 100644
--- a/backend/routes/reports.js
+++ b/backend/routes/reports.js
@@ -1,29 +1,32 @@
-const express        = require('express');
-const jwtdecode      = require('../lib/express/jwt-decode');
-const internalReport = require('../internal/report');
+import express from "express";
+import internalReport from "../internal/report.js";
+import jwtdecode from "../lib/express/jwt-decode.js";
+import { express as logger } from "../logger.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 router
-	.route('/hosts')
+	.route("/hosts")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
+	.all(jwtdecode())
 
 	/**
 	 * GET /reports/hosts
 	 */
-	.get(jwtdecode(), (_, res, next) => {
-		internalReport.getHostsReport(res.locals.access)
-			.then((data) => {
-				res.status(200)
-					.send(data);
-			})
-			.catch(next);
+	.get(async (req, res, next) => {
+		try {
+			const data = await internalReport.getHostsReport(res.locals.access);
+			res.status(200).send(data);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
-module.exports = router;
+export default router;
diff --git a/backend/routes/schema.js b/backend/routes/schema.js
index fc3e48b6..71003f21 100644
--- a/backend/routes/schema.js
+++ b/backend/routes/schema.js
@@ -1,15 +1,16 @@
-const express = require('express');
-const schema  = require('../schema');
-const PACKAGE = require('../package.json');
+import express from "express";
+import { express as logger } from "../logger.js";
+import PACKAGE from "../package.json" with { type: "json" };
+import { getCompiledSchema } from "../schema/index.js";
 
 const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -18,21 +19,26 @@ router
 	 * GET /schema
 	 */
 	.get(async (req, res) => {
-		let swaggerJSON = await schema.getCompiledSchema();
+		try {
+			const swaggerJSON = await getCompiledSchema();
 
-		let proto = req.protocol;
-		if (typeof req.headers['x-forwarded-proto'] !== 'undefined' && req.headers['x-forwarded-proto']) {
-			proto = req.headers['x-forwarded-proto'];
+			let proto = req.protocol;
+			if (typeof req.headers["x-forwarded-proto"] !== "undefined" && req.headers["x-forwarded-proto"]) {
+				proto = req.headers["x-forwarded-proto"];
+			}
+
+			let origin = `${proto}://${req.hostname}`;
+			if (typeof req.headers.origin !== "undefined" && req.headers.origin) {
+				origin = req.headers.origin;
+			}
+
+			swaggerJSON.info.version = PACKAGE.version;
+			swaggerJSON.servers[0].url = `${origin}/api`;
+			res.status(200).send(swaggerJSON);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
 		}
-
-		let origin = proto + '://' + req.hostname;
-		if (typeof req.headers.origin !== 'undefined' && req.headers.origin) {
-			origin = req.headers.origin;
-		}
-
-		swaggerJSON.info.version   = PACKAGE.version;
-		swaggerJSON.servers[0].url = origin + '/api';
-		res.status(200).send(swaggerJSON);
 	});
 
-module.exports = router;
+export default router;
diff --git a/backend/routes/settings.js b/backend/routes/settings.js
index dac4c3d1..0439992c 100644
--- a/backend/routes/settings.js
+++ b/backend/routes/settings.js
@@ -1,21 +1,22 @@
-const express         = require('express');
-const validator       = require('../lib/validator');
-const jwtdecode       = require('../lib/express/jwt-decode');
-const apiValidator    = require('../lib/validator/api');
-const internalSetting = require('../internal/setting');
-const schema          = require('../schema');
+import express from "express";
+import internalSetting from "../internal/setting.js";
+import jwtdecode from "../lib/express/jwt-decode.js";
+import apiValidator from "../lib/validator/api.js";
+import validator from "../lib/validator/index.js";
+import { express as logger } from "../logger.js";
+import { getValidationSchema } from "../schema/index.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * /api/settings
  */
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -26,13 +27,14 @@ router
 	 *
 	 * Retrieve all settings
 	 */
-	.get((_, res, next) => {
-		internalSetting.getAll(res.locals.access)
-			.then((rows) => {
-				res.status(200)
-					.send(rows);
-			})
-			.catch(next);
+	.get(async (req, res, next) => {
+		try {
+			const rows = await internalSetting.getAll(res.locals.access);
+			res.status(200).send(rows);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
 /**
@@ -41,7 +43,7 @@ router
  * /api/settings/something
  */
 router
-	.route('/:setting_id')
+	.route("/:setting_id")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -52,29 +54,31 @@ router
 	 *
 	 * Retrieve a specific setting
 	 */
-	.get((req, res, next) => {
-		validator({
-			required:             ['setting_id'],
-			additionalProperties: false,
-			properties:           {
-				setting_id: {
-					type:      'string',
-					minLength: 1
-				}
-			}
-		}, {
-			setting_id: req.params.setting_id
-		})
-			.then((data) => {
-				return internalSetting.get(res.locals.access, {
-					id: data.setting_id
-				});
-			})
-			.then((row) => {
-				res.status(200)
-					.send(row);
-			})
-			.catch(next);
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					required: ["setting_id"],
+					additionalProperties: false,
+					properties: {
+						setting_id: {
+							type: "string",
+							minLength: 1,
+						},
+					},
+				},
+				{
+					setting_id: req.params.setting_id,
+				},
+			);
+			const row = await internalSetting.get(res.locals.access, {
+				id: data.setting_id,
+			});
+			res.status(200).send(row);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
@@ -82,17 +86,16 @@ router
 	 *
 	 * Update and existing setting
 	 */
-	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/settings/{settingID}', 'put'), req.body)
-			.then((payload) => {
-				payload.id = req.params.setting_id;
-				return internalSetting.update(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.put(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/settings/{settingID}", "put"), req.body);
+			payload.id = req.params.setting_id;
+			const result = await internalSetting.update(res.locals.access, payload);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
-module.exports = router;
+export default router;
diff --git a/backend/routes/tokens.js b/backend/routes/tokens.js
index 72d01d41..76e0dac0 100644
--- a/backend/routes/tokens.js
+++ b/backend/routes/tokens.js
@@ -1,17 +1,18 @@
-const express       = require('express');
-const jwtdecode     = require('../lib/express/jwt-decode');
-const apiValidator  = require('../lib/validator/api');
-const internalToken = require('../internal/token');
-const schema        = require('../schema');
+import express from "express";
+import internalToken from "../internal/token.js";
+import jwtdecode from "../lib/express/jwt-decode.js";
+import apiValidator from "../lib/validator/api.js";
+import { express as logger } from "../logger.js";
+import { getValidationSchema } from "../schema/index.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -23,16 +24,17 @@ router
 	 * We also piggy back on to this method, allowing admins to get tokens
 	 * for services like Job board and Worker.
 	 */
-	.get(jwtdecode(), (req, res, next) => {
-		internalToken.getFreshToken(res.locals.access, {
-			expiry: (typeof req.query.expiry !== 'undefined' ? req.query.expiry : null),
-			scope:  (typeof req.query.scope !== 'undefined' ? req.query.scope : null)
-		})
-			.then((data) => {
-				res.status(200)
-					.send(data);
-			})
-			.catch(next);
+	.get(jwtdecode(), async (req, res, next) => {
+		try {
+			const data = await internalToken.getFreshToken(res.locals.access, {
+				expiry: typeof req.query.expiry !== "undefined" ? req.query.expiry : null,
+				scope: typeof req.query.scope !== "undefined" ? req.query.scope : null,
+			});
+			res.status(200).send(data);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
@@ -41,13 +43,14 @@ router
 	 * Create a new Token
 	 */
 	.post(async (req, res, next) => {
-		apiValidator(schema.getValidationSchema('/tokens', 'post'), req.body)
-			.then(internalToken.getTokenFromEmail)
-			.then((data) => {
-				res.status(200)
-					.send(data);
-			})
-			.catch(next);
+		try {
+			const data = await apiValidator(getValidationSchema("/tokens", "post"), req.body);
+			const result = await internalToken.getTokenFromEmail(data);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
-module.exports = router;
+export default router;
diff --git a/backend/routes/users.js b/backend/routes/users.js
index e41bf6cf..db6656d5 100644
--- a/backend/routes/users.js
+++ b/backend/routes/users.js
@@ -1,22 +1,27 @@
-const express      = require('express');
-const validator    = require('../lib/validator');
-const jwtdecode    = require('../lib/express/jwt-decode');
-const userIdFromMe = require('../lib/express/user-id-from-me');
-const internalUser = require('../internal/user');
-const apiValidator = require('../lib/validator/api');
-const schema       = require('../schema');
+import express from "express";
+import internalUser from "../internal/user.js";
+import Access from "../lib/access.js";
+import { isCI } from "../lib/config.js";
+import errs from "../lib/error.js";
+import jwtdecode from "../lib/express/jwt-decode.js";
+import userIdFromMe from "../lib/express/user-id-from-me.js";
+import apiValidator from "../lib/validator/api.js";
+import validator from "../lib/validator/index.js";
+import { express as logger } from "../logger.js";
+import { getValidationSchema } from "../schema/index.js";
+import { isSetup } from "../setup.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * /api/users
  */
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -27,33 +32,38 @@ router
 	 *
 	 * Retrieve all users
 	 */
-	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					additionalProperties: false,
+					properties: {
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+						query: {
+							$ref: "common#/properties/query",
+						},
+					},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
-			.then((data) => {
-				return internalUser.getAll(res.locals.access, data.expand, data.query);
-			})
-			.then((users) => {
-				res.status(200)
-					.send(users);
-			})
-			.catch((err) => {
-				console.log(err);
-				next(err);
-			});
-		//.catch(next);
+				{
+					expand:
+						typeof req.query.expand === "string"
+							? req.query.expand.split(",")
+							: null,
+					query: typeof req.query.query === "string" ? req.query.query : null,
+				},
+			);
+			const users = await internalUser.getAll(
+				res.locals.access,
+				data.expand,
+				data.query,
+			);
+			res.status(200).send(users);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
@@ -61,16 +71,66 @@ router
 	 *
 	 * Create a new User
 	 */
-	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/users', 'post'), req.body)
-			.then((payload) => {
-				return internalUser.create(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(201)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		const body = req.body;
+
+		try {
+			// If we are in setup mode, we don't check access for current user
+			const setup = await isSetup();
+			if (!setup) {
+				logger.info("Creating a new user in setup mode");
+				const access = new Access(null);
+				await access.load(true);
+				res.locals.access = access;
+
+				// We are in setup mode, set some defaults for this first new user, such as making
+				// them an admin.
+				body.is_disabled = false;
+				if (typeof body.roles !== "object" || body.roles === null) {
+					body.roles = [];
+				}
+				if (body.roles.indexOf("admin") === -1) {
+					body.roles.push("admin");
+				}
+			}
+
+			const payload = await apiValidator(
+				getValidationSchema("/users", "post"),
+				body,
+			);
+			const user = await internalUser.create(res.locals.access, payload);
+			res.status(201).send(user);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
+	})
+
+	/**
+	 * DELETE /api/users
+	 *
+	 * Deletes ALL users. This is NOT GENERALLY AVAILABLE!
+	 * (!) It is NOT an authenticated endpoint.
+	 * (!) Only CI should be able to call this endpoint. As a result,
+	 *
+	 * it will only work when the env vars DEBUG=true and CI=true
+	 *
+	 * Do NOT set those env vars in a production environment!
+	 */
+	.delete(async (_, res, next) => {
+		if (isCI()) {
+			try {
+				logger.warn("Deleting all users - CI environment detected, allowing this operation");
+				await internalUser.deleteAll();
+				res.status(200).send(true);
+			} catch (err) {
+				logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+				next(err);
+			}
+			return;
+		}
+
+		next(new errs.ItemNotFoundError());
 	});
 
 /**
@@ -79,7 +139,7 @@ router
  * /api/users/123
  */
 router
-	.route('/:user_id')
+	.route("/:user_id")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -91,37 +151,43 @@ router
 	 *
 	 * Retrieve a specific user
 	 */
-	.get((req, res, next) => {
-		validator({
-			required:             ['user_id'],
-			additionalProperties: false,
-			properties:           {
-				user_id: {
-					$ref: 'common#/properties/id'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					required: ["user_id"],
+					additionalProperties: false,
+					properties: {
+						user_id: {
+							$ref: "common#/properties/id",
+						},
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+					},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			user_id: req.params.user_id,
-			expand:  (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
-			.then((data) => {
-				return internalUser.get(res.locals.access, {
-					id:     data.user_id,
-					expand: data.expand,
-					omit:   internalUser.getUserOmisionsByAccess(res.locals.access, data.user_id)
-				});
-			})
-			.then((user) => {
-				res.status(200)
-					.send(user);
-			})
-			.catch((err) => {
-				console.log(err);
-				next(err);
+				{
+					user_id: req.params.user_id,
+					expand:
+						typeof req.query.expand === "string"
+							? req.query.expand.split(",")
+							: null,
+				},
+			);
+
+			const user = await internalUser.get(res.locals.access, {
+				id: data.user_id,
+				expand: data.expand,
+				omit: internalUser.getUserOmisionsByAccess(
+					res.locals.access,
+					data.user_id,
+				),
 			});
+			res.status(200).send(user);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
@@ -129,17 +195,19 @@ router
 	 *
 	 * Update and existing user
 	 */
-	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/users/{userID}', 'put'), req.body)
-			.then((payload) => {
-				payload.id = req.params.user_id;
-				return internalUser.update(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.put(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(
+				getValidationSchema("/users/{userID}", "put"),
+				req.body,
+			);
+			payload.id = req.params.user_id;
+			const result = await internalUser.update(res.locals.access, payload);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 
 	/**
@@ -147,13 +215,16 @@ router
 	 *
 	 * Update and existing user
 	 */
-	.delete((req, res, next) => {
-		internalUser.delete(res.locals.access, {id: req.params.user_id})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.delete(async (req, res, next) => {
+		try {
+			const result = await internalUser.delete(res.locals.access, {
+				id: req.params.user_id,
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
 /**
@@ -162,8 +233,8 @@ router
  * /api/users/123/auth
  */
 router
-	.route('/:user_id/auth')
-	.options((req, res) => {
+	.route("/:user_id/auth")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -174,17 +245,19 @@ router
 	 *
 	 * Update password for a user
 	 */
-	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/users/{userID}/auth', 'put'), req.body)
-			.then((payload) => {
-				payload.id = req.params.user_id;
-				return internalUser.setPassword(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.put(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(
+				getValidationSchema("/users/{userID}/auth", "put"),
+				req.body,
+			);
+			payload.id = req.params.user_id;
+			const result = await internalUser.setPassword(res.locals.access, payload);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
 /**
@@ -193,8 +266,8 @@ router
  * /api/users/123/permissions
  */
 router
-	.route('/:user_id/permissions')
-	.options((req, res) => {
+	.route("/:user_id/permissions")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -205,17 +278,22 @@ router
 	 *
 	 * Set some or all permissions for a user
 	 */
-	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/users/{userID}/permissions', 'put'), req.body)
-			.then((payload) => {
-				payload.id = req.params.user_id;
-				return internalUser.setPermissions(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.put(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(
+				getValidationSchema("/users/{userID}/permissions", "put"),
+				req.body,
+			);
+			payload.id = req.params.user_id;
+			const result = await internalUser.setPermissions(
+				res.locals.access,
+				payload,
+			);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
 /**
@@ -224,7 +302,7 @@ router
  * /api/users/123/login
  */
 router
-	.route('/:user_id/login')
+	.route("/:user_id/login")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -235,13 +313,16 @@ router
 	 *
 	 * Log in as a user
 	 */
-	.post((req, res, next) => {
-		internalUser.loginAs(res.locals.access, {id: parseInt(req.params.user_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const result = await internalUser.loginAs(res.locals.access, {
+				id: Number.parseInt(req.params.user_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 
-module.exports = router;
+export default router;
diff --git a/backend/schema/components/audit-log-list.json b/backend/schema/components/audit-log-list.json
new file mode 100644
index 00000000..74368528
--- /dev/null
+++ b/backend/schema/components/audit-log-list.json
@@ -0,0 +1,7 @@
+{
+	"type": "array",
+	"description": "Audit Log list",
+	"items": {
+		"$ref": "./audit-log-object.json"
+	}
+}
diff --git a/backend/schema/components/audit-log-object.json b/backend/schema/components/audit-log-object.json
index 3e5e8594..4ed25e5c 100644
--- a/backend/schema/components/audit-log-object.json
+++ b/backend/schema/components/audit-log-object.json
@@ -1,7 +1,16 @@
 {
 	"type": "object",
 	"description": "Audit Log object",
-	"required": ["id", "created_on", "modified_on", "user_id", "object_type", "object_id", "action", "meta"],
+	"required": [
+		"id",
+		"created_on",
+		"modified_on",
+		"user_id",
+		"object_type",
+		"object_id",
+		"action",
+		"meta"
+	],
 	"additionalProperties": false,
 	"properties": {
 		"id": {
@@ -27,6 +36,9 @@
 		},
 		"meta": {
 			"type": "object"
+		},
+		"user": {
+			"$ref": "./user-object.json"
 		}
 	}
 }
diff --git a/backend/schema/components/certificate-object.json b/backend/schema/components/certificate-object.json
index dcc2a834..ef3553d9 100644
--- a/backend/schema/components/certificate-object.json
+++ b/backend/schema/components/certificate-object.json
@@ -62,15 +62,9 @@
 				"dns_provider_credentials": {
 					"type": "string"
 				},
-				"letsencrypt_agree": {
-					"type": "boolean"
-				},
 				"letsencrypt_certificate": {
 					"type": "object"
 				},
-				"letsencrypt_email": {
-					"$ref": "../common.json#/properties/email"
-				},
 				"propagation_seconds": {
 					"type": "integer",
 					"minimum": 0
diff --git a/backend/schema/components/dns-providers-list.json b/backend/schema/components/dns-providers-list.json
new file mode 100644
index 00000000..c240db18
--- /dev/null
+++ b/backend/schema/components/dns-providers-list.json
@@ -0,0 +1,23 @@
+{
+	"type": "array",
+	"description": "DNS Providers list",
+	"items": {
+		"type": "object",
+		"required": ["id", "name", "credentials"],
+		"additionalProperties": false,
+		"properties": {
+			"id": {
+				"type": "string",
+				"description": "Unique identifier for the DNS provider, matching the python package"
+			},
+			"name": {
+				"type": "string",
+				"description": "Human-readable name of the DNS provider"
+			},
+			"credentials": {
+				"type": "string",
+				"description": "Instructions on how to format the credentials for this DNS provider"
+			}
+		}
+	}
+}
diff --git a/backend/schema/components/health-object.json b/backend/schema/components/health-object.json
index 8d223417..f6398094 100644
--- a/backend/schema/components/health-object.json
+++ b/backend/schema/components/health-object.json
@@ -9,6 +9,11 @@
 			"description": "Healthy",
 			"example": "OK"
 		},
+		"setup": {
+			"type": "boolean",
+			"description": "Whether the initial setup has been completed",
+			"example": true
+		},
 		"version": {
 			"type": "object",
 			"description": "The version object",
diff --git a/backend/schema/components/stream-object.json b/backend/schema/components/stream-object.json
index 848c30e6..d4ba0a27 100644
--- a/backend/schema/components/stream-object.json
+++ b/backend/schema/components/stream-object.json
@@ -31,7 +31,7 @@
 				},
 				{
 					"type": "string",
-					"format": "ipv4"
+					"format": "^[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}$"
 				},
 				{
 					"type": "string",
diff --git a/backend/schema/components/user-object.json b/backend/schema/components/user-object.json
index 180e8f19..eec02a86 100644
--- a/backend/schema/components/user-object.json
+++ b/backend/schema/components/user-object.json
@@ -54,6 +54,63 @@
 			"items": {
 				"type": "string"
 			}
+		},
+		"permissions": {
+			"type": "object",
+			"description": "Permissions if expanded in request",
+			"required": [
+				"visibility",
+				"proxy_hosts",
+				"redirection_hosts",
+				"dead_hosts",
+				"streams",
+				"access_lists",
+				"certificates"
+			],
+			"properties": {
+				"visibility": {
+					"type": "string",
+					"description": "Visibility level",
+					"example": "all",
+					"pattern": "^(all|user)$"
+				},
+				"proxy_hosts": {
+					"type": "string",
+					"description": "Proxy Hosts access level",
+					"example": "all",
+					"pattern": "^(manage|view|hidden)$"
+				},
+				"redirection_hosts": {
+					"type": "string",
+					"description": "Redirection Hosts access level",
+					"example": "all",
+					"pattern": "^(manage|view|hidden)$"
+				},
+				"dead_hosts": {
+					"type": "string",
+					"description": "Dead Hosts access level",
+					"example": "all",
+					"pattern": "^(manage|view|hidden)$"
+				},
+				"streams": {
+					"type": "string",
+					"description": "Streams access level",
+					"example": "all",
+					"pattern": "^(manage|view|hidden)$"
+				},
+				"access_lists": {
+					"type": "string",
+					"description": "Access Lists access level",
+					"example": "all",
+					"pattern": "^(manage|view|hidden)$"
+				},
+				"certificates": {
+					"type": "string",
+					"description": "Certificates access level",
+					"example": "all",
+					"pattern": "^(manage|view|hidden)$"
+				}
+			}
 		}
 	}
 }
diff --git a/backend/schema/index.js b/backend/schema/index.js
index 87b75f25..0478486b 100644
--- a/backend/schema/index.js
+++ b/backend/schema/index.js
@@ -1,41 +1,46 @@
-const refParser = require('@apidevtools/json-schema-ref-parser');
+import { dirname } from "node:path";
+import { fileURLToPath } from "node:url";
+import $RefParser from "@apidevtools/json-schema-ref-parser";
+
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = dirname(__filename);
 
 let compiledSchema = null;
 
-module.exports = {
-
-	/**
-	 * Compiles the schema, by dereferencing it, only once
-	 * and returns the memory cached value
-	 */
-	getCompiledSchema: async () => {
-		if (compiledSchema === null) {
-			compiledSchema = await refParser.dereference(__dirname + '/swagger.json', {
-				mutateInputSchema: false,
-			});
-		}
-		return compiledSchema;
-	},
-
-	/**
-	 * Scans the schema for the validation schema for the given path and method
-	 * and returns it.
-	 *
-	 * @param {string} path
-	 * @param {string} method
-	 * @returns string|null
-	 */
-	getValidationSchema: (path, method) => {
-		if (compiledSchema !== null &&
-			typeof compiledSchema.paths[path] !== 'undefined' &&
-			typeof compiledSchema.paths[path][method] !== 'undefined' &&
-			typeof compiledSchema.paths[path][method].requestBody !== 'undefined' &&
-			typeof compiledSchema.paths[path][method].requestBody.content !== 'undefined' &&
-			typeof compiledSchema.paths[path][method].requestBody.content['application/json'] !== 'undefined' &&
-			typeof compiledSchema.paths[path][method].requestBody.content['application/json'].schema !== 'undefined'
-		) {
-			return compiledSchema.paths[path][method].requestBody.content['application/json'].schema;
-		}
-		return null;
+/**
+ * Compiles the schema, by dereferencing it, only once
+ * and returns the memory cached value
+ */
+const getCompiledSchema = async () => {
+	if (compiledSchema === null) {
+		compiledSchema = await $RefParser.dereference(`${__dirname}/swagger.json`, {
+			mutateInputSchema: false,
+		});
 	}
+	return compiledSchema;
 };
+
+/**
+ * Scans the schema for the validation schema for the given path and method
+ * and returns it.
+ *
+ * @param {string} path
+ * @param {string} method
+ * @returns string|null
+ */
+const getValidationSchema = (path, method) => {
+	if (
+		compiledSchema !== null &&
+		typeof compiledSchema.paths[path] !== "undefined" &&
+		typeof compiledSchema.paths[path][method] !== "undefined" &&
+		typeof compiledSchema.paths[path][method].requestBody !== "undefined" &&
+		typeof compiledSchema.paths[path][method].requestBody.content !== "undefined" &&
+		typeof compiledSchema.paths[path][method].requestBody.content["application/json"] !== "undefined" &&
+		typeof compiledSchema.paths[path][method].requestBody.content["application/json"].schema !== "undefined"
+	) {
+		return compiledSchema.paths[path][method].requestBody.content["application/json"].schema;
+	}
+	return null;
+};
+
+export { getCompiledSchema, getValidationSchema };
diff --git a/backend/schema/paths/audit-log/get.json b/backend/schema/paths/audit-log/get.json
index bc43e29d..ecda9cef 100644
--- a/backend/schema/paths/audit-log/get.json
+++ b/backend/schema/paths/audit-log/get.json
@@ -1,6 +1,6 @@
 {
-	"operationId": "getAuditLog",
-	"summary": "Get Audit Log",
+	"operationId": "getAuditLogs",
+	"summary": "Get Audit Logs",
 	"tags": ["Audit Log"],
 	"security": [
 		{
@@ -44,7 +44,7 @@
 						}
 					},
 					"schema": {
-						"$ref": "../../components/audit-log-object.json"
+						"$ref": "../../components/audit-log-list.json"
 					}
 				}
 			}
diff --git a/backend/schema/paths/audit-log/id/get.json b/backend/schema/paths/audit-log/id/get.json
new file mode 100644
index 00000000..74f59723
--- /dev/null
+++ b/backend/schema/paths/audit-log/id/get.json
@@ -0,0 +1,73 @@
+{
+	"operationId": "getAuditLog",
+	"summary": "Get Audit Log Event",
+	"tags": [
+		"Audit Log"
+	],
+	"security": [
+		{
+			"BearerAuth": [
+				"audit-log"
+			]
+		}
+	],
+	"parameters": [
+		{
+			"in": "path",
+			"name": "id",
+			"schema": {
+				"type": "integer",
+				"minimum": 1
+			},
+			"required": true,
+			"example": 1
+		}
+	],
+	"responses": {
+		"200": {
+			"description": "200 response",
+			"content": {
+				"application/json": {
+					"examples": {
+						"default": {
+							"value": {
+								"id": 1,
+								"created_on": "2025-09-15T17:27:45.000Z",
+								"modified_on": "2025-09-15T17:27:45.000Z",
+								"user_id": 1,
+								"object_type": "user",
+								"object_id": 1,
+								"action": "created",
+								"meta": {
+									"id": 1,
+									"created_on": "2025-09-15T17:27:45.000Z",
+									"modified_on": "2025-09-15T17:27:45.000Z",
+									"is_disabled": false,
+									"email": "jc@jc21.com",
+									"name": "Jamie",
+									"nickname": "Jamie",
+									"avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
+									"roles": [
+										"admin"
+									],
+									"permissions": {
+										"visibility": "all",
+										"proxy_hosts": "manage",
+										"redirection_hosts": "manage",
+										"dead_hosts": "manage",
+										"streams": "manage",
+										"access_lists": "manage",
+										"certificates": "manage"
+									}
+								}
+							}
+						}
+					},
+					"schema": {
+						"$ref": "../../../components/audit-log-object.json"
+					}
+				}
+			}
+		}
+	}
+}
diff --git a/backend/schema/paths/get.json b/backend/schema/paths/get.json
index 8c3a4e02..9ca1b8d3 100644
--- a/backend/schema/paths/get.json
+++ b/backend/schema/paths/get.json
@@ -11,6 +11,7 @@
 						"default": {
 							"value": {
 								"status": "OK",
+								"setup": true,
 								"version": {
 									"major": 2,
 									"minor": 1,
diff --git a/backend/schema/paths/nginx/certificates/certID/get.json b/backend/schema/paths/nginx/certificates/certID/get.json
index 22317b33..bc289573 100644
--- a/backend/schema/paths/nginx/certificates/certID/get.json
+++ b/backend/schema/paths/nginx/certificates/certID/get.json
@@ -36,8 +36,6 @@
 								"domain_names": ["test.example.com"],
 								"expires_on": "2025-01-07T04:34:18.000Z",
 								"meta": {
-									"letsencrypt_email": "jc@jc21.com",
-									"letsencrypt_agree": true,
 									"dns_challenge": false
 								}
 							}
diff --git a/backend/schema/paths/nginx/certificates/certID/renew/post.json b/backend/schema/paths/nginx/certificates/certID/renew/post.json
index ef4d20e5..b2c1dcd6 100644
--- a/backend/schema/paths/nginx/certificates/certID/renew/post.json
+++ b/backend/schema/paths/nginx/certificates/certID/renew/post.json
@@ -37,8 +37,6 @@
 								"nice_name": "My Test Cert",
 								"domain_names": ["test.jc21.supernerd.pro"],
 								"meta": {
-									"letsencrypt_email": "jc@jc21.com",
-									"letsencrypt_agree": true,
 									"dns_challenge": false
 								}
 							}
diff --git a/backend/schema/paths/nginx/certificates/dns-providers/get.json b/backend/schema/paths/nginx/certificates/dns-providers/get.json
new file mode 100644
index 00000000..ec064627
--- /dev/null
+++ b/backend/schema/paths/nginx/certificates/dns-providers/get.json
@@ -0,0 +1,52 @@
+{
+	"operationId": "getDNSProviders",
+	"summary": "Get DNS Providers for Certificates",
+	"tags": [
+		"Certificates"
+	],
+	"security": [
+		{
+			"BearerAuth": [
+				"certificates"
+			]
+		}
+	],
+	"responses": {
+		"200": {
+			"description": "200 response",
+			"content": {
+				"application/json": {
+					"examples": {
+						"default": {
+							"value": [
+								{
+									"id": "vultr",
+									"name": "Vultr",
+									"credentials": "dns_vultr_key = YOUR_VULTR_API_KEY"
+								},
+								{
+									"id": "websupport",
+									"name": "Websupport.sk",
+									"credentials": "dns_websupport_identifier = \ndns_websupport_secret_key = "
+								},
+								{
+									"id": "wedos",
+									"name": "Wedos",
+									"credentials": "dns_wedos_user = \ndns_wedos_auth = "
+								},
+								{
+									"id": "zoneedit",
+									"name": "ZoneEdit",
+									"credentials": "dns_zoneedit_user = \ndns_zoneedit_token = "
+								}
+							]
+						}
+					},
+					"schema": {
+						"$ref": "../../../../components/dns-providers-list.json"
+					}
+				}
+			}
+		}
+	}
+}
diff --git a/backend/schema/paths/nginx/certificates/get.json b/backend/schema/paths/nginx/certificates/get.json
index 2f4b556a..bd45e62a 100644
--- a/backend/schema/paths/nginx/certificates/get.json
+++ b/backend/schema/paths/nginx/certificates/get.json
@@ -36,8 +36,6 @@
 									"domain_names": ["test.example.com"],
 									"expires_on": "2025-01-07T04:34:18.000Z",
 									"meta": {
-										"letsencrypt_email": "jc@jc21.com",
-										"letsencrypt_agree": true,
 										"dns_challenge": false
 									}
 								}
diff --git a/backend/schema/paths/nginx/certificates/post.json b/backend/schema/paths/nginx/certificates/post.json
index 5a3306c2..f2bb2fa2 100644
--- a/backend/schema/paths/nginx/certificates/post.json
+++ b/backend/schema/paths/nginx/certificates/post.json
@@ -52,8 +52,6 @@
 								"nice_name": "test.example.com",
 								"domain_names": ["test.example.com"],
 								"meta": {
-									"letsencrypt_email": "jc@jc21.com",
-									"letsencrypt_agree": true,
 									"dns_challenge": false,
 									"letsencrypt_certificate": {
 										"cn": "test.example.com",
diff --git a/backend/schema/paths/nginx/certificates/test-http/get.json b/backend/schema/paths/nginx/certificates/test-http/post.json
similarity index 59%
rename from backend/schema/paths/nginx/certificates/test-http/get.json
rename to backend/schema/paths/nginx/certificates/test-http/post.json
index 2b9a8dd3..f4f82e3f 100644
--- a/backend/schema/paths/nginx/certificates/test-http/get.json
+++ b/backend/schema/paths/nginx/certificates/test-http/post.json
@@ -7,18 +7,24 @@
 			"BearerAuth": ["certificates"]
 		}
 	],
-	"parameters": [
-		{
-			"in": "query",
-			"name": "domains",
-			"description": "Expansions",
-			"required": true,
-			"schema": {
-				"type": "string",
-				"example": "[\"test.example.ord\",\"test.example.com\",\"nonexistent.example.com\"]"
+	"requestBody": {
+		"description": "Test Payload",
+		"required": true,
+		"content": {
+			"application/json": {
+				"schema": {
+					"type": "object",
+					"additionalProperties": false,
+					"required": ["domains"],
+					"properties": {
+						"domains": {
+							"$ref": "../../../../common.json#/properties/domain_names"
+						}
+					}
+				}
 			}
 		}
-	],
+	},
 	"responses": {
 		"200": {
 			"description": "200 response",
diff --git a/backend/schema/paths/nginx/streams/post.json b/backend/schema/paths/nginx/streams/post.json
index d26996b6..0ee6c6eb 100644
--- a/backend/schema/paths/nginx/streams/post.json
+++ b/backend/schema/paths/nginx/streams/post.json
@@ -37,6 +37,9 @@
 						},
 						"meta": {
 							"$ref": "../../../components/stream-object.json#/properties/meta"
+						},
+						"domain_names": {
+							"$ref": "../../../components/dead-host-object.json#/properties/domain_names"
 						}
 					}
 				}
diff --git a/backend/schema/swagger.json b/backend/schema/swagger.json
index 4a502b4e..7feb4ebc 100644
--- a/backend/schema/swagger.json
+++ b/backend/schema/swagger.json
@@ -29,6 +29,11 @@
 				"$ref": "./paths/audit-log/get.json"
 			}
 		},
+		"/audit-log/{id}": {
+			"get": {
+				"$ref": "./paths/audit-log/id/get.json"
+			}
+		},
 		"/nginx/access-lists": {
 			"get": {
 				"$ref": "./paths/nginx/access-lists/get.json"
@@ -56,14 +61,19 @@
 				"$ref": "./paths/nginx/certificates/post.json"
 			}
 		},
+		"/nginx/certificates/dns-providers": {
+			"get": {
+				"$ref": "./paths/nginx/certificates/dns-providers/get.json"
+			}
+		},
 		"/nginx/certificates/validate": {
 			"post": {
 				"$ref": "./paths/nginx/certificates/validate/post.json"
 			}
 		},
 		"/nginx/certificates/test-http": {
-			"get": {
-				"$ref": "./paths/nginx/certificates/test-http/get.json"
+			"post": {
+				"$ref": "./paths/nginx/certificates/test-http/post.json"
 			}
 		},
 		"/nginx/certificates/{certID}": {
diff --git a/backend/scripts/install-certbot-plugins b/backend/scripts/install-certbot-plugins
index bf995410..6acb0226 100755
--- a/backend/scripts/install-certbot-plugins
+++ b/backend/scripts/install-certbot-plugins
@@ -1,7 +1,7 @@
 #!/usr/bin/node
 
 // Usage:
-//   Install all plugins defined in `certbot-dns-plugins.json`:
+//   Install all plugins defined in `../certbot/dns-plugins.json`:
 //    ./install-certbot-plugins
 //   Install one or more specific plugins:
 //    ./install-certbot-plugins route53 cloudflare
@@ -10,22 +10,23 @@
 //    docker exec npm_core /command/s6-setuidgid 1000:1000 bash -c "/app/scripts/install-certbot-plugins"
 //
 
-const dnsPlugins = require('../global/certbot-dns-plugins.json');
-const certbot    = require('../lib/certbot');
-const logger     = require('../logger').certbot;
-const batchflow  = require('batchflow');
+import batchflow from "batchflow";
+import dnsPlugins from "../certbot/dns-plugins.json" with { type: "json" };
+import { installPlugin } from "../lib/certbot.js";
+import { certbot as logger } from "../logger.js";
 
-let hasErrors      = false;
-let failingPlugins = [];
+let hasErrors = false;
+const failingPlugins = [];
 
 let pluginKeys = Object.keys(dnsPlugins);
 if (process.argv.length > 2) {
 	pluginKeys = process.argv.slice(2);
 }
 
-batchflow(pluginKeys).sequential()
+batchflow(pluginKeys)
+	.sequential()
 	.each((i, pluginKey, next) => {
-		certbot.installPlugin(pluginKey)
+		installPlugin(pluginKey)
 			.then(() => {
 				next();
 			})
@@ -40,10 +41,14 @@ batchflow(pluginKeys).sequential()
 	})
 	.end(() => {
 		if (hasErrors) {
-			logger.error('Some plugins failed to install. Please check the logs above. Failing plugins: ' + '\n - ' + failingPlugins.join('\n - '));
+			logger.error(
+				"Some plugins failed to install. Please check the logs above. Failing plugins: " +
+					"\n - " +
+					failingPlugins.join("\n - "),
+			);
 			process.exit(1);
 		} else {
-			logger.complete('Plugins installed successfully');
+			logger.complete("Plugins installed successfully");
 			process.exit(0);
 		}
 	});
diff --git a/backend/setup.js b/backend/setup.js
index 29208a0d..b2c0dcb7 100644
--- a/backend/setup.js
+++ b/backend/setup.js
@@ -1,72 +1,74 @@
-const config              = require('./lib/config');
-const logger              = require('./logger').setup;
-const certificateModel    = require('./models/certificate');
-const userModel           = require('./models/user');
-const userPermissionModel = require('./models/user_permission');
-const utils               = require('./lib/utils');
-const authModel           = require('./models/auth');
-const settingModel        = require('./models/setting');
-const certbot             = require('./lib/certbot');
+import { installPlugins } from "./lib/certbot.js";
+import utils from "./lib/utils.js";
+import { setup as logger } from "./logger.js";
+import authModel from "./models/auth.js";
+import certificateModel from "./models/certificate.js";
+import settingModel from "./models/setting.js";
+import userModel from "./models/user.js";
+import userPermissionModel from "./models/user_permission.js";
+
+export const isSetup = async () => {
+	const row = await userModel.query().select("id").where("is_deleted", 0).first();
+	return row?.id > 0;
+}
+
 /**
  * Creates a default admin users if one doesn't already exist in the database
  *
  * @returns {Promise}
  */
-const setupDefaultUser = () => {
-	return userModel
-		.query()
-		.select('id', )
-		.where('is_deleted', 0)
-		.first()
-		.then((row) => {
-			if (!row || !row.id) {
-				// Create a new user and set password
-				const email    = (process.env.INITIAL_ADMIN_EMAIL || 'admin@example.com').toLowerCase();
-				const password = process.env.INITIAL_ADMIN_PASSWORD || 'changeme';
+const setupDefaultUser = async () => {
+	const initialAdminEmail = process.env.INITIAL_ADMIN_EMAIL;
+	const initialAdminPassword = process.env.INITIAL_ADMIN_PASSWORD;
 
-				logger.info(`Creating a new user: ${email} with password: ${password}`);
+	// This will only create a new user when there are no active users in the database
+	// and the INITIAL_ADMIN_EMAIL and INITIAL_ADMIN_PASSWORD environment variables are set.
+	// Otherwise, users should be shown the setup wizard in the frontend.
+	// I'm keeping this legacy behavior in case some people are automating deployments.
 
-				const data = {
-					is_deleted: 0,
-					email:      email,
-					name:       'Administrator',
-					nickname:   'Admin',
-					avatar:     '',
-					roles:      ['admin'],
-				};
+	if (!initialAdminEmail || !initialAdminPassword) {
+		return Promise.resolve();
+	}
 
-				return userModel
-					.query()
-					.insertAndFetch(data)
-					.then((user) => {
-						return authModel
-							.query()
-							.insert({
-								user_id: user.id,
-								type:    'password',
-								secret:  password,
-								meta:    {},
-							})
-							.then(() => {
-								return userPermissionModel.query().insert({
-									user_id:           user.id,
-									visibility:        'all',
-									proxy_hosts:       'manage',
-									redirection_hosts: 'manage',
-									dead_hosts:        'manage',
-									streams:           'manage',
-									access_lists:      'manage',
-									certificates:      'manage',
-								});
-							});
-					})
-					.then(() => {
-						logger.info('Initial admin setup completed');
-					});
-			} else if (config.debug()) {
-				logger.info('Admin user setup not required');
-			}
+	const userIsetup = await isSetup();
+	if (!userIsetup) {
+		// Create a new user and set password
+		logger.info(`Creating a new user: ${initialAdminEmail} with password: ${initialAdminPassword}`);
+
+		const data = {
+			is_deleted: 0,
+			email: email,
+			name: "Administrator",
+			nickname: "Admin",
+			avatar: "",
+			roles: ["admin"],
+		};
+
+		const user = await userModel
+			.query()
+			.insertAndFetch(data);
+
+		await authModel
+			.query()
+			.insert({
+				user_id: user.id,
+				type: "password",
+				secret: password,
+				meta: {},
+			});
+
+		await userPermissionModel.query().insert({
+			user_id: user.id,
+			visibility: "all",
+			proxy_hosts: "manage",
+			redirection_hosts: "manage",
+			dead_hosts: "manage",
+			streams: "manage",
+			access_lists: "manage",
+			certificates: "manage",
 		});
+		logger.info("Initial admin setup completed");
+	}
 };
 
 /**
@@ -74,31 +76,25 @@ const setupDefaultUser = () => {
  *
  * @returns {Promise}
  */
-const setupDefaultSettings = () => {
-	return settingModel
+const setupDefaultSettings = async () => {
+	const row = await settingModel
 		.query()
-		.select('id')
-		.where({id: 'default-site'})
-		.first()
-		.then((row) => {
-			if (!row || !row.id) {
-				settingModel
-					.query()
-					.insert({
-						id:          'default-site',
-						name:        'Default Site',
-						description: 'What to show when Nginx is hit with an unknown Host',
-						value:       'congratulations',
-						meta:        {},
-					})
-					.then(() => {
-						logger.info('Default settings added');
-					});
-			}
-			if (config.debug()) {
-				logger.info('Default setting setup not required');
-			}
-		});
+		.select("id")
+		.where({ id: "default-site" })
+		.first();
+
+	if (!row?.id) {
+		await settingModel
+			.query()
+			.insert({
+				id: "default-site",
+				name: "Default Site",
+				description: "What to show when Nginx is hit with an unknown Host",
+				value: "congratulations",
+				meta: {},
+			});
+		logger.info("Default settings added");
+	}
 };
 
 /**
@@ -106,44 +102,44 @@ const setupDefaultSettings = () => {
  *
  * @returns {Promise}
  */
-const setupCertbotPlugins = () => {
-	return certificateModel
+const setupCertbotPlugins = async () => {
+	const certificates = await certificateModel
 		.query()
-		.where('is_deleted', 0)
-		.andWhere('provider', 'letsencrypt')
-		.then((certificates) => {
-			if (certificates && certificates.length) {
-				const plugins  = [];
-				const promises = [];
+		.where("is_deleted", 0)
+		.andWhere("provider", "letsencrypt");
 
-				certificates.map((certificate) => {
-					if (certificate.meta && certificate.meta.dns_challenge === true) {
-						if (plugins.indexOf(certificate.meta.dns_provider) === -1) {
-							plugins.push(certificate.meta.dns_provider);
-						}
+	if (certificates?.length) {
+		const plugins = [];
+		const promises = [];
 
-						// Make sure credentials file exists
-						const credentials_loc = `/etc/letsencrypt/credentials/credentials-${certificate.id}`;
-						// Escape single quotes and backslashes
-						const escapedCredentials = certificate.meta.dns_provider_credentials.replaceAll('\'', '\\\'').replaceAll('\\', '\\\\');
-						const credentials_cmd    = `[ -f '${credentials_loc}' ] || { mkdir -p /etc/letsencrypt/credentials 2> /dev/null; echo '${escapedCredentials}' > '${credentials_loc}' && chmod 600 '${credentials_loc}'; }`;
-						promises.push(utils.exec(credentials_cmd));
-					}
-				});
+		certificates.map((certificate) => {
+			if (certificate.meta && certificate.meta.dns_challenge === true) {
+				if (plugins.indexOf(certificate.meta.dns_provider) === -1) {
+					plugins.push(certificate.meta.dns_provider);
+				}
 
-				return certbot.installPlugins(plugins)
-					.then(() => {
-						if (promises.length) {
-							return Promise.all(promises)
-								.then(() => {
-									logger.info(`Added Certbot plugins ${plugins.join(', ')}`);
-								});
-						}
-					});
+				// Make sure credentials file exists
+				const credentials_loc = `/etc/letsencrypt/credentials/credentials-${certificate.id}`;
+				// Escape single quotes and backslashes
+				if (typeof certificate.meta.dns_provider_credentials === "string") {
+					const escapedCredentials = certificate.meta.dns_provider_credentials
+						.replaceAll("'", "\\'")
+						.replaceAll("\\", "\\\\");
+					const credentials_cmd = `[ -f '${credentials_loc}' ] || { mkdir -p /etc/letsencrypt/credentials 2> /dev/null; echo '${escapedCredentials}' > '${credentials_loc}' && chmod 600 '${credentials_loc}'; }`;
+					promises.push(utils.exec(credentials_cmd));
+				}
 			}
+			return true;
 		});
-};
 
+		await installPlugins(plugins);
+
+		if (promises.length) {
+			await Promise.all(promises);
+			logger.info(`Added Certbot plugins ${plugins.join(", ")}`);
+		}
+	}
+};
 
 /**
  * Starts a timer to call run the logrotation binary every two days
@@ -154,18 +150,17 @@ const setupLogrotation = () => {
 
 	const runLogrotate = async () => {
 		try {
-			await utils.exec('logrotate /etc/logrotate.d/nginx-proxy-manager');
-			logger.info('Logrotate completed.');
-		} catch (e) { logger.warn(e); }
+			await utils.exec("logrotate /etc/logrotate.d/nginx-proxy-manager");
+			logger.info("Logrotate completed.");
+		} catch (e) {
+			logger.warn(e);
+		}
 	};
 
-	logger.info('Logrotate Timer initialized');
+	logger.info("Logrotate Timer initialized");
 	setInterval(runLogrotate, intervalTimeout);
 	// And do this now as well
 	return runLogrotate();
 };
 
-module.exports = () => setupDefaultUser()
-	.then(setupDefaultSettings)
-	.then(setupCertbotPlugins)
-	.then(setupLogrotation);
+export default () => setupDefaultUser().then(setupDefaultSettings).then(setupCertbotPlugins).then(setupLogrotation);
diff --git a/backend/validate-schema.js b/backend/validate-schema.js
old mode 100644
new mode 100755
index 71a05c81..b1870665
--- a/backend/validate-schema.js
+++ b/backend/validate-schema.js
@@ -1,16 +1,19 @@
-const SwaggerParser = require('@apidevtools/swagger-parser');
-const chalk         = require('chalk');
-const schema        = require('./schema');
-const log           = console.log;
+#!/usr/bin/node
 
-schema.getCompiledSchema().then(async (swaggerJSON) => {
+import SwaggerParser from "@apidevtools/swagger-parser";
+import chalk from "chalk";
+import { getCompiledSchema } from "./schema/index.js";
+
+const log = console.log;
+
+getCompiledSchema().then(async (swaggerJSON) => {
 	try {
 		const api = await SwaggerParser.validate(swaggerJSON);
-		console.log('API name: %s, Version: %s', api.info.title, api.info.version);
-		log(chalk.green('❯ Schema is valid'));
+		console.log("API name: %s, Version: %s", api.info.title, api.info.version);
+		log(chalk.green("❯ Schema is valid"));
 	} catch (e) {
 		console.error(e);
-		log(chalk.red('❯', e.message), '\n');
+		log(chalk.red("❯", e.message), "\n");
 		process.exit(1);
 	}
 });
diff --git a/backend/yarn.lock b/backend/yarn.lock
index bae734b4..4681d527 100644
--- a/backend/yarn.lock
+++ b/backend/yarn.lock
@@ -2,19 +2,19 @@
 # yarn lockfile v1
 
 
-"@apidevtools/json-schema-ref-parser@9.0.6":
-  version "9.0.6"
-  resolved "https://registry.yarnpkg.com/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-9.0.6.tgz#5d9000a3ac1fd25404da886da6b266adcd99cf1c"
-  integrity sha512-M3YgsLjI0lZxvrpeGVk9Ap032W6TPQkH6pRAZz81Ac3WUNF79VQooAFnp8umjvVzUmD93NkogxEwbSce7qMsUg==
+"@apidevtools/json-schema-ref-parser@11.7.2":
+  version "11.7.2"
+  resolved "https://registry.yarnpkg.com/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-11.7.2.tgz#cdf3e0aded21492364a70e193b45b7cf4177f031"
+  integrity sha512-4gY54eEGEstClvEkGnwVkTkrx0sqwemEFG5OSRRn3tD91XH0+Q8XIkYIfo7IwEWPpJZwILb9GUXeShtplRc/eA==
   dependencies:
     "@jsdevtools/ono" "^7.1.3"
-    call-me-maybe "^1.0.1"
-    js-yaml "^3.13.1"
+    "@types/json-schema" "^7.0.15"
+    js-yaml "^4.1.0"
 
 "@apidevtools/json-schema-ref-parser@^11.7.0":
-  version "11.7.0"
-  resolved "https://registry.yarnpkg.com/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-11.7.0.tgz#228d72018a0e7cbee744b677eaa01a8968f302d9"
-  integrity sha512-pRrmXMCwnmrkS3MLgAIW5dXRzeTv6GLjkjb4HmxNnvAKXN1Nfzp4KmGADBQvlVUcqi+a5D+hfGDLLnd5NnYxog==
+  version "11.9.3"
+  resolved "https://registry.yarnpkg.com/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-11.9.3.tgz#0e0c9061fc41cf03737d499a4e6a8299fdd2bfa7"
+  integrity sha512-60vepv88RwcJtSHrD6MjIL6Ta3SOYbgfnkHb+ppAVK+o9mXprRtulx7VlRl3lN3bbvysAfCS7WMVfhUYemB0IQ==
   dependencies:
     "@jsdevtools/ono" "^7.1.3"
     "@types/json-schema" "^7.0.15"
@@ -31,83 +31,86 @@
   integrity sha512-QAkD5kK2b1WfjDS/UQn/qQkbwF31uqRjPTrsCs5ZG9BQGAkjwvqGFjjPqAuzac/IYzpPtRzjCP1WrTuAIjMrXg==
 
 "@apidevtools/swagger-parser@^10.1.0":
-  version "10.1.0"
-  resolved "https://registry.yarnpkg.com/@apidevtools/swagger-parser/-/swagger-parser-10.1.0.tgz#a987d71e5be61feb623203be0c96e5985b192ab6"
-  integrity sha512-9Kt7EuS/7WbMAUv2gSziqjvxwDbFSg3Xeyfuj5laUODX8o/k/CpsAKiQ8W7/R88eXFTMbJYg6+7uAmOWNKmwnw==
+  version "10.1.1"
+  resolved "https://registry.yarnpkg.com/@apidevtools/swagger-parser/-/swagger-parser-10.1.1.tgz#e29bf17cf94b487a340e06784e9fbe20cb671c45"
+  integrity sha512-u/kozRnsPO/x8QtKYJOqoGtC4kH6yg1lfYkB9Au0WhYB0FNLpyFusttQtvhlwjtG3rOwiRz4D8DnnXa8iEpIKA==
   dependencies:
-    "@apidevtools/json-schema-ref-parser" "9.0.6"
+    "@apidevtools/json-schema-ref-parser" "11.7.2"
     "@apidevtools/openapi-schemas" "^2.1.0"
     "@apidevtools/swagger-methods" "^3.0.2"
     "@jsdevtools/ono" "^7.1.3"
-    ajv "^8.6.3"
+    ajv "^8.17.1"
     ajv-draft-04 "^1.0.0"
-    call-me-maybe "^1.0.1"
+    call-me-maybe "^1.0.2"
 
-"@eslint-community/eslint-utils@^4.2.0":
-  version "4.3.0"
-  resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.3.0.tgz#a556790523a351b4e47e9d385f47265eaaf9780a"
-  integrity sha512-v3oplH6FYCULtFuCeqyuTd9D2WKO937Dxdq+GmHOLL72TTRriLxz2VLlNfkZRsvj6PKnOPAtuT6dwrs/pA5DvA==
-  dependencies:
-    eslint-visitor-keys "^3.3.0"
+"@biomejs/biome@^2.3.1":
+  version "2.3.1"
+  resolved "https://registry.yarnpkg.com/@biomejs/biome/-/biome-2.3.1.tgz#d1a9284f52986324f288cdaf450331a0f3fb1da7"
+  integrity sha512-A29evf1R72V5bo4o2EPxYMm5mtyGvzp2g+biZvRFx29nWebGyyeOSsDWGx3tuNNMFRepGwxmA9ZQ15mzfabK2w==
+  optionalDependencies:
+    "@biomejs/cli-darwin-arm64" "2.3.1"
+    "@biomejs/cli-darwin-x64" "2.3.1"
+    "@biomejs/cli-linux-arm64" "2.3.1"
+    "@biomejs/cli-linux-arm64-musl" "2.3.1"
+    "@biomejs/cli-linux-x64" "2.3.1"
+    "@biomejs/cli-linux-x64-musl" "2.3.1"
+    "@biomejs/cli-win32-arm64" "2.3.1"
+    "@biomejs/cli-win32-x64" "2.3.1"
 
-"@eslint-community/regexpp@^4.4.0":
-  version "4.4.0"
-  resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.4.0.tgz#3e61c564fcd6b921cb789838631c5ee44df09403"
-  integrity sha512-A9983Q0LnDGdLPjxyXQ00sbV+K+O+ko2Dr+CZigbHWtX9pNfxlaBkMR8X1CztI73zuEyEBXTVjx7CE+/VSwDiQ==
+"@biomejs/cli-darwin-arm64@2.3.1":
+  version "2.3.1"
+  resolved "https://registry.yarnpkg.com/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.3.1.tgz#607835f8ef043e1a80f9ad2a232c9e860941ab60"
+  integrity sha512-ombSf3MnTUueiYGN1SeI9tBCsDUhpWzOwS63Dove42osNh0PfE1cUtHFx6eZ1+MYCCLwXzlFlYFdrJ+U7h6LcA==
 
-"@eslint/eslintrc@^2.0.1":
-  version "2.0.1"
-  resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-2.0.1.tgz#7888fe7ec8f21bc26d646dbd2c11cd776e21192d"
-  integrity sha512-eFRmABvW2E5Ho6f5fHLqgena46rOj7r7OKHYfLElqcBfGFHHpjBhivyi5+jOEQuSpdc/1phIZJlbC2te+tZNIw==
-  dependencies:
-    ajv "^6.12.4"
-    debug "^4.3.2"
-    espree "^9.5.0"
-    globals "^13.19.0"
-    ignore "^5.2.0"
-    import-fresh "^3.2.1"
-    js-yaml "^4.1.0"
-    minimatch "^3.1.2"
-    strip-json-comments "^3.1.1"
+"@biomejs/cli-darwin-x64@2.3.1":
+  version "2.3.1"
+  resolved "https://registry.yarnpkg.com/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.3.1.tgz#654fe4aaa8ea5d5bde5457db4961ad5d214713ac"
+  integrity sha512-pcOfwyoQkrkbGvXxRvZNe5qgD797IowpJPovPX5biPk2FwMEV+INZqfCaz4G5bVq9hYnjwhRMamg11U4QsRXrQ==
 
-"@eslint/js@8.36.0":
-  version "8.36.0"
-  resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.36.0.tgz#9837f768c03a1e4a30bd304a64fb8844f0e72efe"
-  integrity sha512-lxJ9R5ygVm8ZWgYdUweoq5ownDlJ4upvoWmO4eLxBYHdMo+vZ/Rx0EN6MbKWDJOSUGrqJy2Gt+Dyv/VKml0fjg==
+"@biomejs/cli-linux-arm64-musl@2.3.1":
+  version "2.3.1"
+  resolved "https://registry.yarnpkg.com/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.3.1.tgz#5fe502082a575c31ef808cf080cbcd4485964167"
+  integrity sha512-+DZYv8l7FlUtTrWs1Tdt1KcNCAmRO87PyOnxKGunbWm5HKg1oZBSbIIPkjrCtDZaeqSG1DiGx7qF+CPsquQRcg==
+
+"@biomejs/cli-linux-arm64@2.3.1":
+  version "2.3.1"
+  resolved "https://registry.yarnpkg.com/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.3.1.tgz#81c02547905d379dbb312e6ff24b04908c2e320f"
+  integrity sha512-td5O8pFIgLs8H1sAZsD6v+5quODihyEw4nv2R8z7swUfIK1FKk+15e4eiYVLcAE4jUqngvh4j3JCNgg0Y4o4IQ==
+
+"@biomejs/cli-linux-x64-musl@2.3.1":
+  version "2.3.1"
+  resolved "https://registry.yarnpkg.com/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.3.1.tgz#c7c00beb5eda1ad25185544897e66eeec6be3b0b"
+  integrity sha512-Y3Ob4nqgv38Mh+6EGHltuN+Cq8aj/gyMTJYzkFZV2AEj+9XzoXB9VNljz9pjfFNHUxvLEV4b55VWyxozQTBaUQ==
+
+"@biomejs/cli-linux-x64@2.3.1":
+  version "2.3.1"
+  resolved "https://registry.yarnpkg.com/@biomejs/cli-linux-x64/-/cli-linux-x64-2.3.1.tgz#7481d2e7be98d4de574df233766a5bdda037c897"
+  integrity sha512-PYWgEO7up7XYwSAArOpzsVCiqxBCXy53gsReAb1kKYIyXaoAlhBaBMvxR/k2Rm9aTuZ662locXUmPk/Aj+Xu+Q==
+
+"@biomejs/cli-win32-arm64@2.3.1":
+  version "2.3.1"
+  resolved "https://registry.yarnpkg.com/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.3.1.tgz#dac8c7c7223e97f86cd0eed7aa95584984761481"
+  integrity sha512-RHIG/zgo+69idUqVvV3n8+j58dKYABRpMyDmfWu2TITC+jwGPiEaT0Q3RKD+kQHiS80mpBrST0iUGeEXT0bU9A==
+
+"@biomejs/cli-win32-x64@2.3.1":
+  version "2.3.1"
+  resolved "https://registry.yarnpkg.com/@biomejs/cli-win32-x64/-/cli-win32-x64-2.3.1.tgz#f8818ab2c1e3a6e2ed8a656935173e5ce4c720be"
+  integrity sha512-izl30JJ5Dp10mi90Eko47zhxE6pYyWPcnX1NQxKpL/yMhXxf95oLTzfpu4q+MDBh/gemNqyJEwjBpe0MT5iWPA==
 
 "@gar/promisify@^1.0.1":
   version "1.1.3"
   resolved "https://registry.yarnpkg.com/@gar/promisify/-/promisify-1.1.3.tgz#555193ab2e3bb3b6adc3d551c9c030d9e860daf6"
   integrity sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==
 
-"@humanwhocodes/config-array@^0.11.8":
-  version "0.11.8"
-  resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.8.tgz#03595ac2075a4dc0f191cc2131de14fbd7d410b9"
-  integrity sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==
-  dependencies:
-    "@humanwhocodes/object-schema" "^1.2.1"
-    debug "^4.1.1"
-    minimatch "^3.0.5"
-
-"@humanwhocodes/module-importer@^1.0.1":
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c"
-  integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==
-
-"@humanwhocodes/object-schema@^1.2.1":
-  version "1.2.1"
-  resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45"
-  integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==
-
 "@jsdevtools/ono@^7.1.3":
   version "7.1.3"
   resolved "https://registry.yarnpkg.com/@jsdevtools/ono/-/ono-7.1.3.tgz#9df03bbd7c696a5c58885c34aa06da41c8543796"
   integrity sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==
 
-"@mapbox/node-pre-gyp@^1.0.0":
-  version "1.0.10"
-  resolved "https://registry.yarnpkg.com/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.10.tgz#8e6735ccebbb1581e5a7e652244cadc8a844d03c"
-  integrity sha512-4ySo4CjzStuprMwk35H5pPbkymjv1SF3jGLj6rAHp/xT/RF7TL7bd9CTm1xDY49K2qF7jmR/g7k+SkLETP6opA==
+"@mapbox/node-pre-gyp@^1.0.11":
+  version "1.0.11"
+  resolved "https://registry.yarnpkg.com/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz#417db42b7f5323d79e93b34a6d7a2a12c0df43fa"
+  integrity sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==
   dependencies:
     detect-libc "^2.0.0"
     https-proxy-agent "^5.0.0"
@@ -119,27 +122,6 @@
     semver "^7.3.5"
     tar "^6.1.11"
 
-"@nodelib/fs.scandir@2.1.5":
-  version "2.1.5"
-  resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5"
-  integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==
-  dependencies:
-    "@nodelib/fs.stat" "2.0.5"
-    run-parallel "^1.1.9"
-
-"@nodelib/fs.stat@2.0.5":
-  version "2.0.5"
-  resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b"
-  integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==
-
-"@nodelib/fs.walk@^1.2.8":
-  version "1.2.8"
-  resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a"
-  integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==
-  dependencies:
-    "@nodelib/fs.scandir" "2.1.5"
-    fastq "^1.6.0"
-
 "@npmcli/fs@^1.0.0":
   version "1.1.1"
   resolved "https://registry.yarnpkg.com/@npmcli/fs/-/fs-1.1.1.tgz#72f719fe935e687c56a4faecf3c03d06ba593257"
@@ -156,28 +138,11 @@
     mkdirp "^1.0.4"
     rimraf "^3.0.2"
 
-"@sindresorhus/is@^0.14.0":
-  version "0.14.0"
-  resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea"
-  integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ==
-
-"@szmarczak/http-timer@^1.1.2":
-  version "1.1.2"
-  resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-1.1.2.tgz#b1665e2c461a2cd92f4c1bbf50d5454de0d4b421"
-  integrity sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA==
-  dependencies:
-    defer-to-connect "^1.0.1"
-
 "@tootallnate/once@1":
   version "1.1.2"
   resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82"
   integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==
 
-"@types/color-name@^1.1.1":
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/@types/color-name/-/color-name-1.1.1.tgz#1c1261bbeaa10a8055bbc5d8ab84b7b2afc846a0"
-  integrity sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==
-
 "@types/json-schema@^7.0.15":
   version "7.0.15"
   resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841"
@@ -188,7 +153,7 @@ abbrev@1:
   resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8"
   integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==
 
-accepts@~1.3.5, accepts@~1.3.8:
+accepts@~1.3.8:
   version "1.3.8"
   resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e"
   integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==
@@ -196,16 +161,6 @@ accepts@~1.3.5, accepts@~1.3.8:
     mime-types "~2.1.34"
     negotiator "0.6.3"
 
-acorn-jsx@^5.3.2:
-  version "5.3.2"
-  resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937"
-  integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==
-
-acorn@^8.8.0:
-  version "8.8.2"
-  resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.2.tgz#1b2f25db02af965399b9776b0c2c391276d37c4a"
-  integrity sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==
-
 agent-base@6, agent-base@^6.0.2:
   version "6.0.2"
   resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77"
@@ -214,12 +169,10 @@ agent-base@6, agent-base@^6.0.2:
     debug "4"
 
 agentkeepalive@^4.1.3:
-  version "4.3.0"
-  resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.3.0.tgz#bb999ff07412653c1803b3ced35e50729830a255"
-  integrity sha512-7Epl1Blf4Sy37j4v9f9FjICCh4+KAQOyXgHEwlyBiAQLbhKdq/i2QQU3amQalS/wPhdPzDXPL5DMR5bkn+YeWg==
+  version "4.6.0"
+  resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.6.0.tgz#35f73e94b3f40bf65f105219c623ad19c136ea6a"
+  integrity sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ==
   dependencies:
-    debug "^4.1.0"
-    depd "^2.0.0"
     humanize-ms "^1.2.1"
 
 aggregate-error@^3.0.0:
@@ -235,17 +188,7 @@ ajv-draft-04@^1.0.0:
   resolved "https://registry.yarnpkg.com/ajv-draft-04/-/ajv-draft-04-1.0.0.tgz#3b64761b268ba0b9e668f0b41ba53fce0ad77fc8"
   integrity sha512-mv00Te6nmYbRp5DCwclxtt7yV/joXJPGS7nM+97GdxvuttCOfgI3K4U25zboyeX0O+myI8ERluxQe5wljMmVIw==
 
-ajv@^6.10.0, ajv@^6.12.4:
-  version "6.12.6"
-  resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4"
-  integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==
-  dependencies:
-    fast-deep-equal "^3.1.1"
-    fast-json-stable-stringify "^2.0.0"
-    json-schema-traverse "^0.4.1"
-    uri-js "^4.2.2"
-
-ajv@^8.17.1, ajv@^8.6.3:
+ajv@^8.17.1, ajv@^8.6.2:
   version "8.17.1"
   resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.17.1.tgz#37d9a5c776af6bc92d7f4f9510eba4c0a60d11a6"
   integrity sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==
@@ -255,43 +198,6 @@ ajv@^8.17.1, ajv@^8.6.3:
     json-schema-traverse "^1.0.0"
     require-from-string "^2.0.2"
 
-ajv@^8.6.2:
-  version "8.12.0"
-  resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.12.0.tgz#d1a0527323e22f53562c567c00991577dfbe19d1"
-  integrity sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==
-  dependencies:
-    fast-deep-equal "^3.1.1"
-    json-schema-traverse "^1.0.0"
-    require-from-string "^2.0.2"
-    uri-js "^4.2.2"
-
-ansi-align@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/ansi-align/-/ansi-align-3.0.0.tgz#b536b371cf687caaef236c18d3e21fe3797467cb"
-  integrity sha512-ZpClVKqXN3RGBmKibdfWzqCY4lnjEuoNzU5T0oEFpfd/z5qJHVarukridD4juLO2FXMiwUQxr9WqQtaYa8XRYw==
-  dependencies:
-    string-width "^3.0.0"
-
-ansi-regex@^2.0.0:
-  version "2.1.1"
-  resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df"
-  integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8=
-
-ansi-regex@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998"
-  integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=
-
-ansi-regex@^4.1.0:
-  version "4.1.0"
-  resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997"
-  integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==
-
-ansi-regex@^5.0.0:
-  version "5.0.0"
-  resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.0.tgz#388539f55179bf39339c81af30a654d69f87cb75"
-  integrity sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==
-
 ansi-regex@^5.0.1:
   version "5.0.1"
   resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304"
@@ -305,30 +211,24 @@ ansi-styles@^3.2.1:
     color-convert "^1.9.0"
 
 ansi-styles@^4.0.0, ansi-styles@^4.1.0:
-  version "4.2.1"
-  resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.2.1.tgz#90ae75c424d008d2624c5bf29ead3177ebfcf359"
-  integrity sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==
+  version "4.3.0"
+  resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937"
+  integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==
   dependencies:
-    "@types/color-name" "^1.1.1"
     color-convert "^2.0.1"
 
-anymatch@~3.1.1:
-  version "3.1.1"
-  resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142"
-  integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==
+anymatch@~3.1.2:
+  version "3.1.3"
+  resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e"
+  integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==
   dependencies:
     normalize-path "^3.0.0"
     picomatch "^2.0.4"
 
-aproba@^1.0.3:
-  version "1.2.0"
-  resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a"
-  integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==
-
 "aproba@^1.0.3 || ^2.0.0":
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/aproba/-/aproba-2.0.0.tgz#52520b8ae5b569215b354efc0caa3fe1e45a8adc"
-  integrity sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==
+  version "2.1.0"
+  resolved "https://registry.yarnpkg.com/aproba/-/aproba-2.1.0.tgz#75500a190313d95c64e871e7e4284c6ac219f0b1"
+  integrity sha512-tLIEcj5GuR2RSTnxNKdkK0dJ/GrC7P38sUkiDmDuHfsHmbagTFAxDVIBltoklXEVIQ/f14IL8IMJ5pn9Hez1Ew==
 
 archiver-utils@^2.1.0:
   version "2.1.0"
@@ -346,16 +246,32 @@ archiver-utils@^2.1.0:
     normalize-path "^3.0.0"
     readable-stream "^2.0.0"
 
+archiver-utils@^3.0.4:
+  version "3.0.4"
+  resolved "https://registry.yarnpkg.com/archiver-utils/-/archiver-utils-3.0.4.tgz#a0d201f1cf8fce7af3b5a05aea0a337329e96ec7"
+  integrity sha512-KVgf4XQVrTjhyWmx6cte4RxonPLR9onExufI1jhvw/MQ4BB6IsZD5gT8Lq+u/+pRkWna/6JoHpiQioaqFP5Rzw==
+  dependencies:
+    glob "^7.2.3"
+    graceful-fs "^4.2.0"
+    lazystream "^1.0.0"
+    lodash.defaults "^4.2.0"
+    lodash.difference "^4.5.0"
+    lodash.flatten "^4.4.0"
+    lodash.isplainobject "^4.0.6"
+    lodash.union "^4.6.0"
+    normalize-path "^3.0.0"
+    readable-stream "^3.6.0"
+
 archiver@^5.3.0:
-  version "5.3.0"
-  resolved "https://registry.yarnpkg.com/archiver/-/archiver-5.3.0.tgz#dd3e097624481741df626267564f7dd8640a45ba"
-  integrity sha512-iUw+oDwK0fgNpvveEsdQ0Ase6IIKztBJU2U0E9MzszMfmVVUyv1QJhS2ITW9ZCqx8dktAxVAjWWkKehuZE8OPg==
+  version "5.3.2"
+  resolved "https://registry.yarnpkg.com/archiver/-/archiver-5.3.2.tgz#99991d5957e53bd0303a392979276ac4ddccf3b0"
+  integrity sha512-+25nxyyznAXF7Nef3y0EbBeqmGZgeN/BxHX29Rs39djAfaFalmQ89SE6CWyDCHzGL0yt/ycBtNOmGTW0FyGWNw==
   dependencies:
     archiver-utils "^2.1.0"
-    async "^3.2.0"
+    async "^3.2.4"
     buffer-crc32 "^0.2.1"
     readable-stream "^3.6.0"
-    readdir-glob "^1.0.0"
+    readdir-glob "^1.1.2"
     tar-stream "^2.2.0"
     zip-stream "^4.1.0"
 
@@ -375,21 +291,6 @@ are-we-there-yet@^3.0.0:
     delegates "^1.0.0"
     readable-stream "^3.6.0"
 
-are-we-there-yet@~1.1.2:
-  version "1.1.5"
-  resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21"
-  integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w==
-  dependencies:
-    delegates "^1.0.0"
-    readable-stream "^2.0.6"
-
-argparse@^1.0.7:
-  version "1.0.10"
-  resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
-  integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==
-  dependencies:
-    sprintf-js "~1.0.2"
-
 argparse@^2.0.1:
   version "2.0.1"
   resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38"
@@ -398,19 +299,19 @@ argparse@^2.0.1:
 array-flatten@1.1.1:
   version "1.1.1"
   resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2"
-  integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=
+  integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==
 
 asn1@^0.2.4:
-  version "0.2.4"
-  resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136"
-  integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==
+  version "0.2.6"
+  resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.6.tgz#0d3a7bb6e64e02a90c0303b31f292868ea09a08d"
+  integrity sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==
   dependencies:
     safer-buffer "~2.1.0"
 
-async@^3.2.0:
-  version "3.2.4"
-  resolved "https://registry.yarnpkg.com/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c"
-  integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==
+async@^3.2.4:
+  version "3.2.6"
+  resolved "https://registry.yarnpkg.com/async/-/async-3.2.6.tgz#1b0728e14929d51b85b449b7f06e27c1145e38ce"
+  integrity sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==
 
 aws-ssl-profiles@^1.1.1:
   version "1.1.2"
@@ -418,9 +319,9 @@ aws-ssl-profiles@^1.1.1:
   integrity sha512-NZKeq9AfyQvEeNlN0zSYAaWrmBffJh3IELMZfRpJVWgrpEbtEpnjvzqBPf+mxoI287JohRDoa+/nsfqqiZmF6g==
 
 balanced-match@^1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767"
-  integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c=
+  version "1.0.2"
+  resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee"
+  integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==
 
 base64-js@^1.3.1:
   version "1.5.1"
@@ -430,20 +331,27 @@ base64-js@^1.3.1:
 batchflow@^0.4.0:
   version "0.4.0"
   resolved "https://registry.yarnpkg.com/batchflow/-/batchflow-0.4.0.tgz#7d419df79b6b7587b06f9ea34f96ccef6f74e5b5"
-  integrity sha1-fUGd95trdYewb56jT5bM72905bU=
+  integrity sha512-XwQQoCGPUjdLWzmpAvRNZc91wnBYuKLmj52d9LLZ1Ww06ow5RBqBt8kUmU9/3ZvPq88j7Elh3V4cEhgNKXbIlQ==
 
 bcrypt@^5.0.0:
-  version "5.0.0"
-  resolved "https://registry.yarnpkg.com/bcrypt/-/bcrypt-5.0.0.tgz#051407c7cd5ffbfb773d541ca3760ea0754e37e2"
-  integrity sha512-jB0yCBl4W/kVHM2whjfyqnxTmOHkCX4kHEa5nYKSoGeYe8YrjTYTc87/6bwt1g8cmV0QrbhKriETg9jWtcREhg==
+  version "5.1.1"
+  resolved "https://registry.yarnpkg.com/bcrypt/-/bcrypt-5.1.1.tgz#0f732c6dcb4e12e5b70a25e326a72965879ba6e2"
+  integrity sha512-AGBHOG5hPYZ5Xl9KXzU5iKq9516yEmvCKDg3ecP5kX2aB6UqTeXZxk2ELnDgDm6BQSMlLt9rDB4LoSMx0rYwww==
   dependencies:
-    node-addon-api "^3.0.0"
-    node-pre-gyp "0.15.0"
+    "@mapbox/node-pre-gyp" "^1.0.11"
+    node-addon-api "^5.0.0"
 
 binary-extensions@^2.0.0:
-  version "2.1.0"
-  resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.1.0.tgz#30fa40c9e7fe07dbc895678cd287024dea241dd9"
-  integrity sha512-1Yj8h9Q+QDF5FzhMs/c9+6UntbD5MkRfRwac8DoEm9ZfUBZ7tZ55YcGVAzEe4bXsdQHEk+s9S5wsOKVdZrw0tQ==
+  version "2.3.0"
+  resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.3.0.tgz#f6e14a97858d327252200242d4ccfe522c445522"
+  integrity sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==
+
+bindings@^1.5.0:
+  version "1.5.0"
+  resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.5.0.tgz#10353c9e945334bc0511a6d90b38fbc7c9c504df"
+  integrity sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==
+  dependencies:
+    file-uri-to-path "1.0.0"
 
 bl@^4.0.3:
   version "4.1.0"
@@ -455,9 +363,9 @@ bl@^4.0.3:
     readable-stream "^3.4.0"
 
 blueimp-md5@^2.16.0:
-  version "2.17.0"
-  resolved "https://registry.yarnpkg.com/blueimp-md5/-/blueimp-md5-2.17.0.tgz#f4fcac088b115f7b4045f19f5da59e9d01b1bb96"
-  integrity sha512-x5PKJHY5rHQYaADj6NwPUR2QRCUVSggPzrUKkeENpj871o9l9IefJbO2jkT5UvYykeOK9dx0VmkIo6dZ+vThYw==
+  version "2.19.0"
+  resolved "https://registry.yarnpkg.com/blueimp-md5/-/blueimp-md5-2.19.0.tgz#b53feea5498dcb53dc6ec4b823adb84b729c4af0"
+  integrity sha512-DRQrD6gJyy8FbiE4s+bDoXS9hiW3Vbx5uCdwvcCf3zLHL+Iv7LtGHLpr+GZV8rHG8tK766FGYBwRbu8pELTt+w==
 
 body-parser@1.20.3, body-parser@^1.20.3:
   version "1.20.3"
@@ -477,20 +385,6 @@ body-parser@1.20.3, body-parser@^1.20.3:
     type-is "~1.6.18"
     unpipe "1.0.0"
 
-boxen@^4.2.0:
-  version "4.2.0"
-  resolved "https://registry.yarnpkg.com/boxen/-/boxen-4.2.0.tgz#e411b62357d6d6d36587c8ac3d5d974daa070e64"
-  integrity sha512-eB4uT9RGzg2odpER62bBwSLvUeGC+WbRjjyyFhGsKnc8wp/m0+hQsMUvUe3H2V0D5vw0nBdO1hCJoZo5mKeuIQ==
-  dependencies:
-    ansi-align "^3.0.0"
-    camelcase "^5.3.1"
-    chalk "^3.0.0"
-    cli-boxes "^2.2.0"
-    string-width "^4.1.0"
-    term-size "^2.1.0"
-    type-fest "^0.8.1"
-    widest-line "^3.1.0"
-
 brace-expansion@^1.1.7:
   version "1.1.12"
   resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.12.tgz#ab9b454466e5a8cc3a187beaad580412a9c5b843"
@@ -499,6 +393,13 @@ brace-expansion@^1.1.7:
     balanced-match "^1.0.0"
     concat-map "0.0.1"
 
+brace-expansion@^2.0.1:
+  version "2.0.2"
+  resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.2.tgz#54fc53237a613d854c7bd37463aad17df87214e7"
+  integrity sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==
+  dependencies:
+    balanced-match "^1.0.0"
+
 braces@~3.0.2:
   version "3.0.3"
   resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789"
@@ -509,12 +410,12 @@ braces@~3.0.2:
 buffer-crc32@^0.2.1, buffer-crc32@^0.2.13:
   version "0.2.13"
   resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242"
-  integrity sha1-DTM+PwDqxQqhRUq9MO+MKl2ackI=
+  integrity sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==
 
-buffer-equal-constant-time@1.0.1:
+buffer-equal-constant-time@^1.0.1:
   version "1.0.1"
   resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819"
-  integrity sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=
+  integrity sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==
 
 buffer@^5.5.0:
   version "5.7.1"
@@ -524,17 +425,12 @@ buffer@^5.5.0:
     base64-js "^1.3.1"
     ieee754 "^1.1.13"
 
-busboy@^0.3.1:
-  version "0.3.1"
-  resolved "https://registry.yarnpkg.com/busboy/-/busboy-0.3.1.tgz#170899274c5bf38aae27d5c62b71268cd585fd1b"
-  integrity sha512-y7tTxhGKXcyBxRKAni+awqx8uqaJKrSFSNFSeRG5CsWNdmy2BIK+6VGWEW7TZnIO/533mtMEA4rOevQV815YJw==
+busboy@^1.6.0:
+  version "1.6.0"
+  resolved "https://registry.yarnpkg.com/busboy/-/busboy-1.6.0.tgz#966ea36a9502e43cdb9146962523b92f531f6893"
+  integrity sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==
   dependencies:
-    dicer "0.3.0"
-
-bytes@3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048"
-  integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=
+    streamsearch "^1.1.0"
 
 bytes@3.1.2:
   version "3.1.2"
@@ -565,46 +461,33 @@ cacache@^15.2.0:
     tar "^6.0.2"
     unique-filename "^1.1.1"
 
-cacheable-request@^6.0.0:
-  version "6.1.0"
-  resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-6.1.0.tgz#20ffb8bd162ba4be11e9567d823db651052ca912"
-  integrity sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg==
+call-bind-apply-helpers@^1.0.1, call-bind-apply-helpers@^1.0.2:
+  version "1.0.2"
+  resolved "https://registry.yarnpkg.com/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz#4b5428c222be985d79c3d82657479dbe0b59b2d6"
+  integrity sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==
   dependencies:
-    clone-response "^1.0.2"
-    get-stream "^5.1.0"
-    http-cache-semantics "^4.0.0"
-    keyv "^3.0.0"
-    lowercase-keys "^2.0.0"
-    normalize-url "^4.1.0"
-    responselike "^1.0.2"
-
-call-bind@^1.0.7:
-  version "1.0.7"
-  resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.7.tgz#06016599c40c56498c18769d2730be242b6fa3b9"
-  integrity sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==
-  dependencies:
-    es-define-property "^1.0.0"
     es-errors "^1.3.0"
     function-bind "^1.1.2"
-    get-intrinsic "^1.2.4"
-    set-function-length "^1.2.1"
 
-call-me-maybe@^1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.1.tgz#26d208ea89e37b5cbde60250a15f031c16a4d66b"
-  integrity sha1-JtII6onje1y95gJQoV8DHBak1ms=
+call-bound@^1.0.2:
+  version "1.0.4"
+  resolved "https://registry.yarnpkg.com/call-bound/-/call-bound-1.0.4.tgz#238de935d2a2a692928c538c7ccfa91067fd062a"
+  integrity sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==
+  dependencies:
+    call-bind-apply-helpers "^1.0.2"
+    get-intrinsic "^1.3.0"
 
-callsites@^3.0.0:
-  version "3.1.0"
-  resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73"
-  integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==
+call-me-maybe@^1.0.2:
+  version "1.0.2"
+  resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.2.tgz#03f964f19522ba643b1b0693acb9152fe2074baa"
+  integrity sha512-HpX65o1Hnr9HH25ojC1YGs7HCQLq0GCOibSaWER0eNpgJ/Z1MZv2mTc7+xh6WOPxbRVcmgbv4hGU+uSQ/2xFZQ==
 
-camelcase@^5.0.0, camelcase@^5.3.1:
+camelcase@^5.0.0:
   version "5.3.1"
   resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
   integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
 
-chalk@4.1.2, chalk@^4.0.0:
+chalk@4.1.2:
   version "4.1.2"
   resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01"
   integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==
@@ -621,30 +504,22 @@ chalk@^2.3.2:
     escape-string-regexp "^1.0.5"
     supports-color "^5.3.0"
 
-chalk@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4"
-  integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==
+chokidar@^3.5.2:
+  version "3.6.0"
+  resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.6.0.tgz#197c6cc669ef2a8dc5e7b4d97ee4e092c3eb0d5b"
+  integrity sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==
   dependencies:
-    ansi-styles "^4.1.0"
-    supports-color "^7.1.0"
-
-chokidar@^3.2.2:
-  version "3.4.1"
-  resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.4.1.tgz#e905bdecf10eaa0a0b1db0c664481cc4cbc22ba1"
-  integrity sha512-TQTJyr2stihpC4Sya9hs2Xh+O2wf+igjL36Y75xx2WdHuiICcn/XJza46Jwt0eT5hVpQOzo3FpY3cj3RVYLX0g==
-  dependencies:
-    anymatch "~3.1.1"
+    anymatch "~3.1.2"
     braces "~3.0.2"
-    glob-parent "~5.1.0"
+    glob-parent "~5.1.2"
     is-binary-path "~2.1.0"
     is-glob "~4.0.1"
     normalize-path "~3.0.0"
-    readdirp "~3.4.0"
+    readdirp "~3.6.0"
   optionalDependencies:
-    fsevents "~2.1.2"
+    fsevents "~2.3.2"
 
-chownr@^1.1.4:
+chownr@^1.1.1:
   version "1.1.4"
   resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b"
   integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==
@@ -654,21 +529,11 @@ chownr@^2.0.0:
   resolved "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece"
   integrity sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==
 
-ci-info@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46"
-  integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==
-
 clean-stack@^2.0.0:
   version "2.2.0"
   resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b"
   integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==
 
-cli-boxes@^2.2.0:
-  version "2.2.0"
-  resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.0.tgz#538ecae8f9c6ca508e3c3c95b453fe93cb4c168d"
-  integrity sha512-gpaBrMAizVEANOpfZp/EEUixTXDyGt7DFzdK5hU+UbWt/J0lB0w20ncZj59Z9a93xHb9u12zF5BS6i9RKbtg4w==
-
 cliui@^6.0.0:
   version "6.0.0"
   resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1"
@@ -678,18 +543,6 @@ cliui@^6.0.0:
     strip-ansi "^6.0.0"
     wrap-ansi "^6.2.0"
 
-clone-response@^1.0.2:
-  version "1.0.2"
-  resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b"
-  integrity sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws=
-  dependencies:
-    mimic-response "^1.0.0"
-
-code-point-at@^1.0.0:
-  version "1.1.0"
-  resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77"
-  integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=
-
 color-convert@^1.9.0:
   version "1.9.3"
   resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8"
@@ -707,7 +560,7 @@ color-convert@^2.0.1:
 color-name@1.1.3:
   version "1.1.3"
   resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
-  integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=
+  integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==
 
 color-name@~1.1.4:
   version "1.1.4"
@@ -725,26 +578,26 @@ colorette@2.0.19:
   integrity sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==
 
 commander@^10.0.0:
-  version "10.0.0"
-  resolved "https://registry.yarnpkg.com/commander/-/commander-10.0.0.tgz#71797971162cd3cf65f0b9d24eb28f8d303acdf1"
-  integrity sha512-zS5PnTI22FIRM6ylNW8G4Ap0IEOyk62fhLSD0+uHRT9McRCLGpkVNvao4bjimpK/GShynyQkFFxHhwMcETmduA==
+  version "10.0.1"
+  resolved "https://registry.yarnpkg.com/commander/-/commander-10.0.1.tgz#881ee46b4f77d1c1dccc5823433aa39b022cbe06"
+  integrity sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==
 
 commander@^9.1.0:
   version "9.5.0"
   resolved "https://registry.yarnpkg.com/commander/-/commander-9.5.0.tgz#bc08d1eb5cedf7ccb797a96199d41c7bc3e60d30"
   integrity sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==
 
-compress-commons@^4.1.0:
-  version "4.1.1"
-  resolved "https://registry.yarnpkg.com/compress-commons/-/compress-commons-4.1.1.tgz#df2a09a7ed17447642bad10a85cc9a19e5c42a7d"
-  integrity sha512-QLdDLCKNV2dtoTorqgxngQCMA+gWXkM/Nwu7FpeBhk/RdkzimqC3jueb/FDmaZeXh+uby1jkBqE3xArsLBE5wQ==
+compress-commons@^4.1.2:
+  version "4.1.2"
+  resolved "https://registry.yarnpkg.com/compress-commons/-/compress-commons-4.1.2.tgz#6542e59cb63e1f46a8b21b0e06f9a32e4c8b06df"
+  integrity sha512-D3uMHtGc/fcO1Gt1/L7i1e33VOvD4A9hfQLP+6ewd+BvG/gQ84Yh4oftEhAdjSMgBgwGL+jsppT7JYNpo6MHHg==
   dependencies:
     buffer-crc32 "^0.2.13"
     crc32-stream "^4.0.2"
     normalize-path "^3.0.0"
     readable-stream "^3.6.0"
 
-compressible@~2.0.16:
+compressible@~2.0.18:
   version "2.0.18"
   resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba"
   integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==
@@ -752,39 +605,27 @@ compressible@~2.0.16:
     mime-db ">= 1.43.0 < 2"
 
 compression@^1.7.4:
-  version "1.7.4"
-  resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f"
-  integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==
+  version "1.8.1"
+  resolved "https://registry.yarnpkg.com/compression/-/compression-1.8.1.tgz#4a45d909ac16509195a9a28bd91094889c180d79"
+  integrity sha512-9mAqGPHLakhCLeNyxPkK4xVo746zQ/czLH1Ky+vkitMnWfWZps8r0qXuwhwizagCRttsL4lfG4pIOvaWLpAP0w==
   dependencies:
-    accepts "~1.3.5"
-    bytes "3.0.0"
-    compressible "~2.0.16"
+    bytes "3.1.2"
+    compressible "~2.0.18"
     debug "2.6.9"
-    on-headers "~1.0.2"
-    safe-buffer "5.1.2"
+    negotiator "~0.6.4"
+    on-headers "~1.1.0"
+    safe-buffer "5.2.1"
     vary "~1.1.2"
 
 concat-map@0.0.1:
   version "0.0.1"
   resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
-  integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=
+  integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==
 
-configstore@^5.0.1:
-  version "5.0.1"
-  resolved "https://registry.yarnpkg.com/configstore/-/configstore-5.0.1.tgz#d365021b5df4b98cdd187d6a3b0e3f6a7cc5ed96"
-  integrity sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA==
-  dependencies:
-    dot-prop "^5.2.0"
-    graceful-fs "^4.1.2"
-    make-dir "^3.0.0"
-    unique-string "^2.0.0"
-    write-file-atomic "^3.0.0"
-    xdg-basedir "^4.0.0"
-
-console-control-strings@^1.0.0, console-control-strings@^1.1.0, console-control-strings@~1.1.0:
+console-control-strings@^1.0.0, console-control-strings@^1.1.0:
   version "1.1.0"
   resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e"
-  integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=
+  integrity sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==
 
 content-disposition@0.5.4:
   version "0.5.4"
@@ -801,121 +642,92 @@ content-type@~1.0.4, content-type@~1.0.5:
 cookie-signature@1.0.6:
   version "1.0.6"
   resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c"
-  integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw=
+  integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==
 
-cookie@0.6.0:
-  version "0.6.0"
-  resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.6.0.tgz#2798b04b071b0ecbff0dbb62a505a8efa4e19051"
-  integrity sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==
+cookie@0.7.1:
+  version "0.7.1"
+  resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.7.1.tgz#2f73c42142d5d5cf71310a74fc4ae61670e5dbc9"
+  integrity sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==
 
 core-util-is@~1.0.0:
-  version "1.0.2"
-  resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
-  integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=
+  version "1.0.3"
+  resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85"
+  integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==
 
 crc-32@^1.2.0:
-  version "1.2.0"
-  resolved "https://registry.yarnpkg.com/crc-32/-/crc-32-1.2.0.tgz#cb2db6e29b88508e32d9dd0ec1693e7b41a18208"
-  integrity sha512-1uBwHxF+Y/4yF5G48fwnKq6QsIXheor3ZLPT80yGBV1oEUwpPojlEhQbWKVw1VwcTQyMGHK1/XMmTjmlsmTTGA==
-  dependencies:
-    exit-on-epipe "~1.0.1"
-    printj "~1.1.0"
+  version "1.2.2"
+  resolved "https://registry.yarnpkg.com/crc-32/-/crc-32-1.2.2.tgz#3cad35a934b8bf71f25ca524b6da51fb7eace2ff"
+  integrity sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==
 
 crc32-stream@^4.0.2:
-  version "4.0.2"
-  resolved "https://registry.yarnpkg.com/crc32-stream/-/crc32-stream-4.0.2.tgz#c922ad22b38395abe9d3870f02fa8134ed709007"
-  integrity sha512-DxFZ/Hk473b/muq1VJ///PMNLj0ZMnzye9thBpmjpJKCc5eMgB95aK8zCGrGfQ90cWo561Te6HK9D+j4KPdM6w==
+  version "4.0.3"
+  resolved "https://registry.yarnpkg.com/crc32-stream/-/crc32-stream-4.0.3.tgz#85dd677eb78fa7cad1ba17cc506a597d41fc6f33"
+  integrity sha512-NT7w2JVU7DFroFdYkeq8cywxrgjPHWkdX1wjpRQXPX5Asews3tA+Ght6lddQO5Mkumffp3X7GEqku3epj2toIw==
   dependencies:
     crc-32 "^1.2.0"
     readable-stream "^3.4.0"
 
-cross-spawn@^7.0.2:
-  version "7.0.6"
-  resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f"
-  integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==
-  dependencies:
-    path-key "^3.1.0"
-    shebang-command "^2.0.0"
-    which "^2.0.1"
-
-crypto-random-string@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5"
-  integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==
-
 db-errors@^0.2.3:
   version "0.2.3"
   resolved "https://registry.yarnpkg.com/db-errors/-/db-errors-0.2.3.tgz#a6a38952e00b20e790f2695a6446b3c65497ffa2"
   integrity sha512-OOgqgDuCavHXjYSJoV2yGhv6SeG8nk42aoCSoyXLZUH7VwFG27rxbavU1z+VrZbZjphw5UkDQwUlD21MwZpUng==
 
-debug@2.6.9, debug@^2.2.0:
+debug@2.6.9:
   version "2.6.9"
   resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f"
   integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==
   dependencies:
     ms "2.0.0"
 
-debug@4, debug@4.3.4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.3:
+debug@4, debug@^4.3.3:
+  version "4.4.1"
+  resolved "https://registry.yarnpkg.com/debug/-/debug-4.4.1.tgz#e5a8bc6cbc4c6cd3e64308b0693a3d4fa550189b"
+  integrity sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==
+  dependencies:
+    ms "^2.1.3"
+
+debug@4.3.4:
   version "4.3.4"
   resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
   integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==
   dependencies:
     ms "2.1.2"
 
-debug@^3.2.6:
-  version "3.2.6"
-  resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b"
-  integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==
+debug@^3.2.7:
+  version "3.2.7"
+  resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a"
+  integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==
   dependencies:
     ms "^2.1.1"
 
 decamelize@^1.2.0:
   version "1.2.0"
   resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
-  integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=
+  integrity sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==
 
-decompress-response@^3.3.0:
-  version "3.3.0"
-  resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-3.3.0.tgz#80a4dd323748384bfa248083622aedec982adff3"
-  integrity sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M=
+decompress-response@^6.0.0:
+  version "6.0.0"
+  resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-6.0.0.tgz#ca387612ddb7e104bd16d85aab00d5ecf09c66fc"
+  integrity sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==
   dependencies:
-    mimic-response "^1.0.0"
+    mimic-response "^3.1.0"
 
 deep-extend@^0.6.0:
   version "0.6.0"
   resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac"
   integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==
 
-deep-is@^0.1.3:
-  version "0.1.4"
-  resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831"
-  integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==
-
-defer-to-connect@^1.0.1:
-  version "1.1.3"
-  resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591"
-  integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ==
-
-define-data-property@^1.1.4:
-  version "1.1.4"
-  resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.4.tgz#894dc141bb7d3060ae4366f6a0107e68fbe48c5e"
-  integrity sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==
-  dependencies:
-    es-define-property "^1.0.0"
-    es-errors "^1.3.0"
-    gopd "^1.0.1"
-
 delegates@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a"
-  integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=
+  integrity sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==
 
 denque@^2.1.0:
   version "2.1.0"
   resolved "https://registry.yarnpkg.com/denque/-/denque-2.1.0.tgz#e93e1a6569fb5e66f16a3c2a2964617d349d6ab1"
   integrity sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==
 
-depd@2.0.0, depd@^2.0.0:
+depd@2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df"
   integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==
@@ -925,41 +737,19 @@ destroy@1.2.0:
   resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015"
   integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==
 
-detect-libc@^1.0.2:
-  version "1.0.3"
-  resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b"
-  integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=
-
 detect-libc@^2.0.0:
-  version "2.0.1"
-  resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.1.tgz#e1897aa88fa6ad197862937fbc0441ef352ee0cd"
-  integrity sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==
+  version "2.0.4"
+  resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.4.tgz#f04715b8ba815e53b4d8109655b6508a6865a7e8"
+  integrity sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==
 
-dicer@0.3.0:
-  version "0.3.0"
-  resolved "https://registry.yarnpkg.com/dicer/-/dicer-0.3.0.tgz#eacd98b3bfbf92e8ab5c2fdb71aaac44bb06b872"
-  integrity sha512-MdceRRWqltEG2dZqO769g27N/3PXfcKl04VhYnBlo2YhH7zPi88VebsjTKclaOyiuMaGU72hTfw3VkUitGcVCA==
+dunder-proto@^1.0.1:
+  version "1.0.1"
+  resolved "https://registry.yarnpkg.com/dunder-proto/-/dunder-proto-1.0.1.tgz#d7ae667e1dc83482f8b70fd0f6eefc50da30f58a"
+  integrity sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==
   dependencies:
-    streamsearch "0.1.2"
-
-doctrine@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961"
-  integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==
-  dependencies:
-    esutils "^2.0.2"
-
-dot-prop@^5.2.0:
-  version "5.2.0"
-  resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-5.2.0.tgz#c34ecc29556dc45f1f4c22697b6f4904e0cc4fcb"
-  integrity sha512-uEUyaDKoSQ1M4Oq8l45hSE26SnTxL6snNnqvK/VWx5wJhmff5z0FUVJDKDanor/6w3kzE3i7XZOk+7wC0EXr1A==
-  dependencies:
-    is-obj "^2.0.0"
-
-duplexer3@^0.1.4:
-  version "0.1.4"
-  resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2"
-  integrity sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI=
+    call-bind-apply-helpers "^1.0.1"
+    es-errors "^1.3.0"
+    gopd "^1.2.0"
 
 ecdsa-sig-formatter@1.0.11:
   version "1.0.11"
@@ -971,18 +761,13 @@ ecdsa-sig-formatter@1.0.11:
 ee-first@1.1.1:
   version "1.1.1"
   resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d"
-  integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=
+  integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==
 
 email-validator@^2.0.4:
   version "2.0.4"
   resolved "https://registry.yarnpkg.com/email-validator/-/email-validator-2.0.4.tgz#b8dfaa5d0dae28f1b03c95881d904d4e40bfe7ed"
   integrity sha512-gYCwo7kh5S3IDyZPLZf6hSS0MnZT8QmJFqYvbqlDZSbwdZlY6QZWxJ4i/6UhITOJ4XzyI647Bm2MXKCLqnJ4nQ==
 
-emoji-regex@^7.0.1:
-  version "7.0.3"
-  resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156"
-  integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==
-
 emoji-regex@^8.0.0:
   version "8.0.0"
   resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37"
@@ -991,7 +776,7 @@ emoji-regex@^8.0.0:
 encodeurl@~1.0.2:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59"
-  integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=
+  integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==
 
 encodeurl@~2.0.0:
   version "2.0.0"
@@ -1006,9 +791,9 @@ encoding@^0.1.12:
     iconv-lite "^0.6.2"
 
 end-of-stream@^1.1.0, end-of-stream@^1.4.1:
-  version "1.4.4"
-  resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0"
-  integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==
+  version "1.4.5"
+  resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.5.tgz#7344d711dea40e0b74abc2ed49778743ccedb08c"
+  integrity sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==
   dependencies:
     once "^1.4.0"
 
@@ -1029,249 +814,118 @@ error-ex@^1.3.1:
   dependencies:
     is-arrayish "^0.2.1"
 
-es-define-property@^1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.0.tgz#c7faefbdff8b2696cf5f46921edfb77cc4ba3845"
-  integrity sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==
-  dependencies:
-    get-intrinsic "^1.2.4"
+es-define-property@^1.0.1:
+  version "1.0.1"
+  resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.1.tgz#983eb2f9a6724e9303f61addf011c72e09e0b0fa"
+  integrity sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==
 
 es-errors@^1.3.0:
   version "1.3.0"
   resolved "https://registry.yarnpkg.com/es-errors/-/es-errors-1.3.0.tgz#05f75a25dab98e4fb1dcd5e1472c0546d5057c8f"
   integrity sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==
 
-escalade@^3.1.1:
-  version "3.1.1"
-  resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40"
-  integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==
+es-object-atoms@^1.0.0, es-object-atoms@^1.1.1:
+  version "1.1.1"
+  resolved "https://registry.yarnpkg.com/es-object-atoms/-/es-object-atoms-1.1.1.tgz#1c4f2c4837327597ce69d2ca190a7fdd172338c1"
+  integrity sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==
+  dependencies:
+    es-errors "^1.3.0"
 
-escape-goat@^2.0.0:
-  version "2.1.1"
-  resolved "https://registry.yarnpkg.com/escape-goat/-/escape-goat-2.1.1.tgz#1b2dc77003676c457ec760b2dc68edb648188675"
-  integrity sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q==
+escalade@^3.1.1:
+  version "3.2.0"
+  resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.2.0.tgz#011a3f69856ba189dffa7dc8fcce99d2a87903e5"
+  integrity sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==
 
 escape-html@~1.0.3:
   version "1.0.3"
   resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988"
-  integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=
+  integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==
 
 escape-string-regexp@^1.0.5:
   version "1.0.5"
   resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
-  integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=
-
-escape-string-regexp@^4.0.0:
-  version "4.0.0"
-  resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34"
-  integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==
-
-eslint-plugin-align-assignments@^1.1.2:
-  version "1.1.2"
-  resolved "https://registry.yarnpkg.com/eslint-plugin-align-assignments/-/eslint-plugin-align-assignments-1.1.2.tgz#83e1a8a826d4adf29e82b52d0bb39c88b301b576"
-  integrity sha512-I1ZJgk9EjHfGVU9M2Ex8UkVkkjLL5Y9BS6VNnQHq79eHj2H4/Cgxf36lQSUTLgm2ntB03A2NtF+zg9fyi5vChg==
-
-eslint-scope@^7.1.1:
-  version "7.1.1"
-  resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.1.1.tgz#fff34894c2f65e5226d3041ac480b4513a163642"
-  integrity sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==
-  dependencies:
-    esrecurse "^4.3.0"
-    estraverse "^5.2.0"
-
-eslint-visitor-keys@^3.3.0:
-  version "3.3.0"
-  resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826"
-  integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==
-
-eslint@^8.36.0:
-  version "8.36.0"
-  resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.36.0.tgz#1bd72202200a5492f91803b113fb8a83b11285cf"
-  integrity sha512-Y956lmS7vDqomxlaaQAHVmeb4tNMp2FWIvU/RnU5BD3IKMD/MJPr76xdyr68P8tV1iNMvN2mRK0yy3c+UjL+bw==
-  dependencies:
-    "@eslint-community/eslint-utils" "^4.2.0"
-    "@eslint-community/regexpp" "^4.4.0"
-    "@eslint/eslintrc" "^2.0.1"
-    "@eslint/js" "8.36.0"
-    "@humanwhocodes/config-array" "^0.11.8"
-    "@humanwhocodes/module-importer" "^1.0.1"
-    "@nodelib/fs.walk" "^1.2.8"
-    ajv "^6.10.0"
-    chalk "^4.0.0"
-    cross-spawn "^7.0.2"
-    debug "^4.3.2"
-    doctrine "^3.0.0"
-    escape-string-regexp "^4.0.0"
-    eslint-scope "^7.1.1"
-    eslint-visitor-keys "^3.3.0"
-    espree "^9.5.0"
-    esquery "^1.4.2"
-    esutils "^2.0.2"
-    fast-deep-equal "^3.1.3"
-    file-entry-cache "^6.0.1"
-    find-up "^5.0.0"
-    glob-parent "^6.0.2"
-    globals "^13.19.0"
-    grapheme-splitter "^1.0.4"
-    ignore "^5.2.0"
-    import-fresh "^3.0.0"
-    imurmurhash "^0.1.4"
-    is-glob "^4.0.0"
-    is-path-inside "^3.0.3"
-    js-sdsl "^4.1.4"
-    js-yaml "^4.1.0"
-    json-stable-stringify-without-jsonify "^1.0.1"
-    levn "^0.4.1"
-    lodash.merge "^4.6.2"
-    minimatch "^3.1.2"
-    natural-compare "^1.4.0"
-    optionator "^0.9.1"
-    strip-ansi "^6.0.1"
-    strip-json-comments "^3.1.0"
-    text-table "^0.2.0"
+  integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==
 
 esm@^3.2.25:
   version "3.2.25"
   resolved "https://registry.yarnpkg.com/esm/-/esm-3.2.25.tgz#342c18c29d56157688ba5ce31f8431fbb795cc10"
   integrity sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==
 
-espree@^9.5.0:
-  version "9.5.0"
-  resolved "https://registry.yarnpkg.com/espree/-/espree-9.5.0.tgz#3646d4e3f58907464edba852fa047e6a27bdf113"
-  integrity sha512-JPbJGhKc47++oo4JkEoTe2wjy4fmMwvFpgJT9cQzmfXKp22Dr6Hf1tdCteLz1h0P3t+mGvWZ+4Uankvh8+c6zw==
-  dependencies:
-    acorn "^8.8.0"
-    acorn-jsx "^5.3.2"
-    eslint-visitor-keys "^3.3.0"
-
-esprima@^4.0.0:
-  version "4.0.1"
-  resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71"
-  integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==
-
-esquery@^1.4.2:
-  version "1.5.0"
-  resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.5.0.tgz#6ce17738de8577694edd7361c57182ac8cb0db0b"
-  integrity sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==
-  dependencies:
-    estraverse "^5.1.0"
-
-esrecurse@^4.3.0:
-  version "4.3.0"
-  resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921"
-  integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==
-  dependencies:
-    estraverse "^5.2.0"
-
-estraverse@^5.1.0:
-  version "5.2.0"
-  resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.2.0.tgz#307df42547e6cc7324d3cf03c155d5cdb8c53880"
-  integrity sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==
-
-estraverse@^5.2.0:
-  version "5.3.0"
-  resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123"
-  integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==
-
-esutils@^2.0.2:
-  version "2.0.3"
-  resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64"
-  integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==
-
 etag@~1.8.1:
   version "1.8.1"
   resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887"
-  integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=
+  integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==
 
-exit-on-epipe@~1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/exit-on-epipe/-/exit-on-epipe-1.0.1.tgz#0bdd92e87d5285d267daa8171d0eb06159689692"
-  integrity sha512-h2z5mrROTxce56S+pnvAV890uu7ls7f1kEvVGJbw1OlFH3/mlJ5bkXu0KRyW94v37zzHPiUd55iLn3DA7TjWpw==
+expand-template@^2.0.3:
+  version "2.0.3"
+  resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-2.0.3.tgz#6e14b3fcee0f3a6340ecb57d2e8918692052a47c"
+  integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==
 
 express-fileupload@^1.1.9:
-  version "1.1.9"
-  resolved "https://registry.yarnpkg.com/express-fileupload/-/express-fileupload-1.1.9.tgz#e798e9318394ed5083e56217ad6cda576da465d2"
-  integrity sha512-f2w0aoe7lj3NeD8a4MXmYQsqir3Z66I08l9AKq04QbFUAjeZNmPwTlR5Lx2NGwSu/PslsAjGC38MWzo5tTjoBg==
+  version "1.5.2"
+  resolved "https://registry.yarnpkg.com/express-fileupload/-/express-fileupload-1.5.2.tgz#4da70ba6f2ffd4c736eab0776445865a9dbd9bfa"
+  integrity sha512-wxUJn2vTHvj/kZCVmc5/bJO15C7aSMyHeuXYY3geKpeKibaAoQGcEv5+sM6nHS2T7VF+QHS4hTWPiY2mKofEdg==
   dependencies:
-    busboy "^0.3.1"
+    busboy "^1.6.0"
 
 express@^4.20.0:
-  version "4.20.0"
-  resolved "https://registry.yarnpkg.com/express/-/express-4.20.0.tgz#f1d08e591fcec770c07be4767af8eb9bcfd67c48"
-  integrity sha512-pLdae7I6QqShF5PnNTCVn4hI91Dx0Grkn2+IAsMTgMIKuQVte2dN9PeGSSAME2FR8anOhVA62QDIUaWVfEXVLw==
+  version "4.21.2"
+  resolved "https://registry.yarnpkg.com/express/-/express-4.21.2.tgz#cf250e48362174ead6cea4a566abef0162c1ec32"
+  integrity sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==
   dependencies:
     accepts "~1.3.8"
     array-flatten "1.1.1"
     body-parser "1.20.3"
     content-disposition "0.5.4"
     content-type "~1.0.4"
-    cookie "0.6.0"
+    cookie "0.7.1"
     cookie-signature "1.0.6"
     debug "2.6.9"
     depd "2.0.0"
     encodeurl "~2.0.0"
     escape-html "~1.0.3"
     etag "~1.8.1"
-    finalhandler "1.2.0"
+    finalhandler "1.3.1"
     fresh "0.5.2"
     http-errors "2.0.0"
     merge-descriptors "1.0.3"
     methods "~1.1.2"
     on-finished "2.4.1"
     parseurl "~1.3.3"
-    path-to-regexp "0.1.10"
+    path-to-regexp "0.1.12"
     proxy-addr "~2.0.7"
-    qs "6.11.0"
+    qs "6.13.0"
     range-parser "~1.2.1"
     safe-buffer "5.2.1"
     send "0.19.0"
-    serve-static "1.16.0"
+    serve-static "1.16.2"
     setprototypeof "1.2.0"
     statuses "2.0.1"
     type-is "~1.6.18"
     utils-merge "1.0.1"
     vary "~1.1.2"
 
-fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3:
+fast-deep-equal@^3.1.3:
   version "3.1.3"
   resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525"
   integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==
 
-fast-json-stable-stringify@^2.0.0:
-  version "2.1.0"
-  resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633"
-  integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==
-
-fast-levenshtein@^2.0.6:
-  version "2.0.6"
-  resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917"
-  integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==
-
 fast-uri@^3.0.1:
-  version "3.0.2"
-  resolved "https://registry.yarnpkg.com/fast-uri/-/fast-uri-3.0.2.tgz#d78b298cf70fd3b752fd951175a3da6a7b48f024"
-  integrity sha512-GR6f0hD7XXyNJa25Tb9BuIdN0tdr+0BMi6/CJPH3wJO1JjNG3n/VsSw38AwRdKZABm8lGbPfakLRkYzx2V9row==
-
-fastq@^1.6.0:
-  version "1.15.0"
-  resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.15.0.tgz#d04d07c6a2a68fe4599fea8d2e103a937fae6b3a"
-  integrity sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==
-  dependencies:
-    reusify "^1.0.4"
+  version "3.1.0"
+  resolved "https://registry.yarnpkg.com/fast-uri/-/fast-uri-3.1.0.tgz#66eecff6c764c0df9b762e62ca7edcfb53b4edfa"
+  integrity sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==
 
 figures@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962"
-  integrity sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=
+  integrity sha512-Oa2M9atig69ZkfwiApY8F2Yy+tzMbazyvqv21R0NsSC8floSOC09BbT1ITWAdoMGQvJ/aZnR1KMwdx9tvHnTNA==
   dependencies:
     escape-string-regexp "^1.0.5"
 
-file-entry-cache@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027"
-  integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==
-  dependencies:
-    flat-cache "^3.0.4"
+file-uri-to-path@1.0.0:
+  version "1.0.0"
+  resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd"
+  integrity sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==
 
 fill-range@^7.1.1:
   version "7.1.1"
@@ -1280,13 +934,13 @@ fill-range@^7.1.1:
   dependencies:
     to-regex-range "^5.0.1"
 
-finalhandler@1.2.0:
-  version "1.2.0"
-  resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32"
-  integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==
+finalhandler@1.3.1:
+  version "1.3.1"
+  resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.3.1.tgz#0c575f1d1d324ddd1da35ad7ece3df7d19088019"
+  integrity sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==
   dependencies:
     debug "2.6.9"
-    encodeurl "~1.0.2"
+    encodeurl "~2.0.0"
     escape-html "~1.0.3"
     on-finished "2.4.1"
     parseurl "~1.3.3"
@@ -1296,7 +950,7 @@ finalhandler@1.2.0:
 find-up@^2.0.0:
   version "2.1.0"
   resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7"
-  integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c=
+  integrity sha512-NWzkk0jSJtTt08+FBFMvXoeZnOJD+jTtsRmBYbAIzJdX6l7dLgR7CTubCM5/eDdPUBvLCeVasP1brfVR/9/EZQ==
   dependencies:
     locate-path "^2.0.0"
 
@@ -1308,27 +962,6 @@ find-up@^4.1.0:
     locate-path "^5.0.0"
     path-exists "^4.0.0"
 
-find-up@^5.0.0:
-  version "5.0.0"
-  resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc"
-  integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==
-  dependencies:
-    locate-path "^6.0.0"
-    path-exists "^4.0.0"
-
-flat-cache@^3.0.4:
-  version "3.0.4"
-  resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11"
-  integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==
-  dependencies:
-    flatted "^3.1.0"
-    rimraf "^3.0.2"
-
-flatted@^3.1.0:
-  version "3.2.7"
-  resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787"
-  integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==
-
 forwarded@0.2.0:
   version "0.2.0"
   resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811"
@@ -1337,20 +970,13 @@ forwarded@0.2.0:
 fresh@0.5.2:
   version "0.5.2"
   resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7"
-  integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=
+  integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==
 
 fs-constants@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad"
   integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==
 
-fs-minipass@^1.2.7:
-  version "1.2.7"
-  resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.7.tgz#ccff8570841e7fe4265693da88936c55aed7f7c7"
-  integrity sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA==
-  dependencies:
-    minipass "^2.6.0"
-
 fs-minipass@^2.0.0:
   version "2.1.0"
   resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb"
@@ -1361,17 +987,12 @@ fs-minipass@^2.0.0:
 fs.realpath@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
-  integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8=
+  integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==
 
-fsevents@~2.1.2:
-  version "2.1.3"
-  resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.3.tgz#fb738703ae8d2f9fe900c33836ddebee8b97f23e"
-  integrity sha512-Auw9a4AxqWpa9GUfj370BMPzzyncfBABW8Mab7BGWBYDj4Isgq+cDKtx0i6u9jcX9pQDnswsaaOTgTmA5pEjuQ==
-
-function-bind@^1.1.1:
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
-  integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==
+fsevents@~2.3.2:
+  version "2.3.3"
+  resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6"
+  integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==
 
 function-bind@^1.1.2:
   version "1.1.2"
@@ -1407,20 +1028,6 @@ gauge@^4.0.3:
     strip-ansi "^6.0.1"
     wide-align "^1.1.5"
 
-gauge@~2.7.3:
-  version "2.7.4"
-  resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7"
-  integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=
-  dependencies:
-    aproba "^1.0.3"
-    console-control-strings "^1.0.0"
-    has-unicode "^2.0.0"
-    object-assign "^4.1.0"
-    signal-exit "^3.0.0"
-    string-width "^1.0.1"
-    strip-ansi "^3.0.1"
-    wide-align "^1.1.0"
-
 generate-function@^2.3.1:
   version "2.3.1"
   resolved "https://registry.yarnpkg.com/generate-function/-/generate-function-2.3.1.tgz#f069617690c10c868e73b8465746764f97c3479f"
@@ -1433,141 +1040,78 @@ get-caller-file@^2.0.1:
   resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e"
   integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==
 
-get-intrinsic@^1.1.3, get-intrinsic@^1.2.4:
-  version "1.2.4"
-  resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.4.tgz#e385f5a4b5227d449c3eabbad05494ef0abbeadd"
-  integrity sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==
+get-intrinsic@^1.2.5, get-intrinsic@^1.3.0:
+  version "1.3.0"
+  resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.3.0.tgz#743f0e3b6964a93a5491ed1bffaae054d7f98d01"
+  integrity sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==
   dependencies:
+    call-bind-apply-helpers "^1.0.2"
+    es-define-property "^1.0.1"
     es-errors "^1.3.0"
+    es-object-atoms "^1.1.1"
     function-bind "^1.1.2"
-    has-proto "^1.0.1"
-    has-symbols "^1.0.3"
-    hasown "^2.0.0"
+    get-proto "^1.0.1"
+    gopd "^1.2.0"
+    has-symbols "^1.1.0"
+    hasown "^2.0.2"
+    math-intrinsics "^1.1.0"
 
 get-package-type@^0.1.0:
   version "0.1.0"
   resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a"
   integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==
 
-get-stream@^4.1.0:
-  version "4.1.0"
-  resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
-  integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==
+get-proto@^1.0.1:
+  version "1.0.1"
+  resolved "https://registry.yarnpkg.com/get-proto/-/get-proto-1.0.1.tgz#150b3f2743869ef3e851ec0c49d15b1d14d00ee1"
+  integrity sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==
   dependencies:
-    pump "^3.0.0"
-
-get-stream@^5.1.0:
-  version "5.1.0"
-  resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.1.0.tgz#01203cdc92597f9b909067c3e656cc1f4d3c4dc9"
-  integrity sha512-EXr1FOzrzTfGeL0gQdeFEvOMm2mzMOglyiOXSTpPC+iAjAKftbr3jpCMWynogwYnM+eSj9sHGc6wjIcDvYiygw==
-  dependencies:
-    pump "^3.0.0"
+    dunder-proto "^1.0.1"
+    es-object-atoms "^1.0.0"
 
 getopts@2.3.0:
   version "2.3.0"
   resolved "https://registry.yarnpkg.com/getopts/-/getopts-2.3.0.tgz#71e5593284807e03e2427449d4f6712a268666f4"
   integrity sha512-5eDf9fuSXwxBL6q5HX+dhDj+dslFGWzU5thZ9kNKUkcPtaPdatmUFKwHFrLb/uf/WpA4BHET+AX3Scl56cAjpA==
 
-glob-parent@^6.0.2:
-  version "6.0.2"
-  resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3"
-  integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==
-  dependencies:
-    is-glob "^4.0.3"
+github-from-package@0.0.0:
+  version "0.0.0"
+  resolved "https://registry.yarnpkg.com/github-from-package/-/github-from-package-0.0.0.tgz#97fb5d96bfde8973313f20e8288ef9a167fa64ce"
+  integrity sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==
 
-glob-parent@~5.1.0:
+glob-parent@~5.1.2:
   version "5.1.2"
   resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4"
   integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==
   dependencies:
     is-glob "^4.0.1"
 
-glob@^7.1.3:
-  version "7.1.6"
-  resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6"
-  integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==
+glob@^7.1.3, glob@^7.1.4, glob@^7.2.3:
+  version "7.2.3"
+  resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b"
+  integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==
   dependencies:
     fs.realpath "^1.0.0"
     inflight "^1.0.4"
     inherits "2"
-    minimatch "^3.0.4"
+    minimatch "^3.1.1"
     once "^1.3.0"
     path-is-absolute "^1.0.0"
 
-glob@^7.1.4:
-  version "7.1.7"
-  resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90"
-  integrity sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==
-  dependencies:
-    fs.realpath "^1.0.0"
-    inflight "^1.0.4"
-    inherits "2"
-    minimatch "^3.0.4"
-    once "^1.3.0"
-    path-is-absolute "^1.0.0"
+gopd@^1.2.0:
+  version "1.2.0"
+  resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.2.0.tgz#89f56b8217bdbc8802bd299df6d7f1081d7e51a1"
+  integrity sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==
 
-global-dirs@^2.0.1:
-  version "2.0.1"
-  resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-2.0.1.tgz#acdf3bb6685bcd55cb35e8a052266569e9469201"
-  integrity sha512-5HqUqdhkEovj2Of/ms3IeS/EekcO54ytHRLV4PEY2rhRwrHXLQjeVEES0Lhka0xwNDtGYn58wyC4s5+MHsOO6A==
-  dependencies:
-    ini "^1.3.5"
-
-globals@^13.19.0:
-  version "13.20.0"
-  resolved "https://registry.yarnpkg.com/globals/-/globals-13.20.0.tgz#ea276a1e508ffd4f1612888f9d1bad1e2717bf82"
-  integrity sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==
-  dependencies:
-    type-fest "^0.20.2"
-
-gopd@^1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c"
-  integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==
-  dependencies:
-    get-intrinsic "^1.1.3"
-
-got@^9.6.0:
-  version "9.6.0"
-  resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85"
-  integrity sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q==
-  dependencies:
-    "@sindresorhus/is" "^0.14.0"
-    "@szmarczak/http-timer" "^1.1.2"
-    cacheable-request "^6.0.0"
-    decompress-response "^3.3.0"
-    duplexer3 "^0.1.4"
-    get-stream "^4.1.0"
-    lowercase-keys "^1.0.1"
-    mimic-response "^1.0.1"
-    p-cancelable "^1.0.0"
-    to-readable-stream "^1.0.0"
-    url-parse-lax "^3.0.0"
-
-graceful-fs@^4.1.15, graceful-fs@^4.1.2:
-  version "4.2.4"
-  resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.4.tgz#2256bde14d3632958c465ebc96dc467ca07a29fb"
-  integrity sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw==
-
-graceful-fs@^4.2.0:
-  version "4.2.8"
-  resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.8.tgz#e412b8d33f5e006593cbd3cee6df9f2cebbe802a"
-  integrity sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg==
-
-graceful-fs@^4.2.6:
+graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.2.0, graceful-fs@^4.2.6:
   version "4.2.11"
   resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3"
   integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==
 
-grapheme-splitter@^1.0.4:
-  version "1.0.4"
-  resolved "https://registry.yarnpkg.com/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz#9cf3a665c6247479896834af35cf1dbb4400767e"
-  integrity sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==
-
 gravatar@^1.8.0:
-  version "1.8.1"
-  resolved "https://registry.yarnpkg.com/gravatar/-/gravatar-1.8.1.tgz#743bbdf3185c3433172e00e0e6ff5f6b30c58997"
-  integrity sha512-18frnfVp4kRYkM/eQW32Mfwlsh/KMbwd3S6nkescBZHioobflFEFHsvM71qZAkUSLNifyi2uoI+TuGxJAnQIOA==
+  version "1.8.2"
+  resolved "https://registry.yarnpkg.com/gravatar/-/gravatar-1.8.2.tgz#f298642b1562ed685af2ae938dbe31ec0c542cc1"
+  integrity sha512-GdRwLM3oYpFQKy47MKuluw9hZ2gaCtiKPbDGdcDEuYDKlc8eNnW27KYL9LVbIDzEsx88WtDWQm2ClBcsgBnj6w==
   dependencies:
     blueimp-md5 "^2.16.0"
     email-validator "^2.0.4"
@@ -1577,58 +1121,34 @@ gravatar@^1.8.0:
 has-flag@^3.0.0:
   version "3.0.0"
   resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
-  integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0=
+  integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==
 
 has-flag@^4.0.0:
   version "4.0.0"
   resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b"
   integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==
 
-has-property-descriptors@^1.0.2:
-  version "1.0.2"
-  resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz#963ed7d071dc7bf5f084c5bfbe0d1b6222586854"
-  integrity sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==
-  dependencies:
-    es-define-property "^1.0.0"
+has-symbols@^1.1.0:
+  version "1.1.0"
+  resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.1.0.tgz#fc9c6a783a084951d0b971fe1018de813707a338"
+  integrity sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==
 
-has-proto@^1.0.1:
-  version "1.0.3"
-  resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.3.tgz#b31ddfe9b0e6e9914536a6ab286426d0214f77fd"
-  integrity sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==
-
-has-symbols@^1.0.3:
-  version "1.0.3"
-  resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8"
-  integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==
-
-has-unicode@^2.0.0, has-unicode@^2.0.1:
+has-unicode@^2.0.1:
   version "2.0.1"
   resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9"
-  integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=
+  integrity sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==
 
-has-yarn@^2.1.0:
-  version "2.1.0"
-  resolved "https://registry.yarnpkg.com/has-yarn/-/has-yarn-2.1.0.tgz#137e11354a7b5bf11aa5cb649cf0c6f3ff2b2e77"
-  integrity sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw==
-
-has@^1.0.3:
-  version "1.0.3"
-  resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796"
-  integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==
-  dependencies:
-    function-bind "^1.1.1"
-
-hasown@^2.0.0:
+hasown@^2.0.2:
   version "2.0.2"
   resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003"
   integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==
   dependencies:
     function-bind "^1.1.2"
 
-http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.0:
-  version "4.1.1"
-  resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a"
-  integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==
+http-cache-semantics@^4.1.0:
+  version "4.2.0"
+  resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.2.0.tgz#205f4db64f8562b76a4ff9235aa5279839a09dd5"
+  integrity sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ==
 
 http-errors@2.0.0:
   version "2.0.0"
@@ -1665,20 +1185,27 @@ humanize-ms@^1.2.1:
   dependencies:
     ms "^2.0.0"
 
-iconv-lite@0.4.24, iconv-lite@^0.4.4:
+iconv-lite@0.4.24:
   version "0.4.24"
   resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b"
   integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==
   dependencies:
     safer-buffer ">= 2.1.2 < 3"
 
-iconv-lite@^0.6.2, iconv-lite@^0.6.3:
+iconv-lite@^0.6.2:
   version "0.6.3"
   resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501"
   integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==
   dependencies:
     safer-buffer ">= 2.1.2 < 3.0.0"
 
+iconv-lite@^0.7.0:
+  version "0.7.0"
+  resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.7.0.tgz#c50cd80e6746ca8115eb98743afa81aa0e147a3e"
+  integrity sha512-cf6L2Ds3h57VVmkZe+Pn+5APsT7FpqJtEhhieDCvrE2MK5Qk9MyffgQyuxQTm6BChfeZNtcOLHp9IcWRVcIcBQ==
+  dependencies:
+    safer-buffer ">= 2.1.2 < 3.0.0"
+
 ieee754@^1.1.13:
   version "1.2.1"
   resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352"
@@ -1687,45 +1214,12 @@ ieee754@^1.1.13:
 ignore-by-default@^1.0.1:
   version "1.0.1"
   resolved "https://registry.yarnpkg.com/ignore-by-default/-/ignore-by-default-1.0.1.tgz#48ca6d72f6c6a3af00a9ad4ae6876be3889e2b09"
-  integrity sha1-SMptcvbGo68Aqa1K5odr44ieKwk=
-
-ignore-walk@^3.0.1:
-  version "3.0.3"
-  resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.3.tgz#017e2447184bfeade7c238e4aefdd1e8f95b1e37"
-  integrity sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw==
-  dependencies:
-    minimatch "^3.0.4"
-
-ignore@^5.2.0:
-  version "5.2.4"
-  resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324"
-  integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==
-
-import-fresh@^3.0.0:
-  version "3.2.1"
-  resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.2.1.tgz#633ff618506e793af5ac91bf48b72677e15cbe66"
-  integrity sha512-6e1q1cnWP2RXD9/keSkxHScg508CdXqXWgWBaETNhyuBFz+kUZlKboh+ISK+bU++DmbHimVBrOz/zzPe0sZ3sQ==
-  dependencies:
-    parent-module "^1.0.0"
-    resolve-from "^4.0.0"
-
-import-fresh@^3.2.1:
-  version "3.3.0"
-  resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b"
-  integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==
-  dependencies:
-    parent-module "^1.0.0"
-    resolve-from "^4.0.0"
-
-import-lazy@^2.1.0:
-  version "2.1.0"
-  resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-2.1.0.tgz#05698e3d45c88e8d7e9d92cb0584e77f096f3e43"
-  integrity sha1-BWmOPUXIjo1+nZLLBYTnfwlvPkM=
+  integrity sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==
 
 imurmurhash@^0.1.4:
   version "0.1.4"
   resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea"
-  integrity sha1-khi5srkoojixPcT7a21XbyMUU+o=
+  integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==
 
 indent-string@^4.0.0:
   version "4.0.0"
@@ -1740,7 +1234,7 @@ infer-owner@^1.0.4:
 inflight@^1.0.4:
   version "1.0.6"
   resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
-  integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=
+  integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==
   dependencies:
     once "^1.3.0"
     wrappy "1"
@@ -1753,9 +1247,9 @@ inherits@2, inherits@2.0.4, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3:
 inherits@2.0.3:
   version "2.0.3"
   resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
-  integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=
+  integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==
 
-ini@^1.3.5, ini@~1.3.0:
+ini@~1.3.0:
   version "1.3.8"
   resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c"
   integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==
@@ -1765,10 +1259,10 @@ interpret@^2.2.0:
   resolved "https://registry.yarnpkg.com/interpret/-/interpret-2.2.0.tgz#1a78a0b5965c40a5416d007ad6f50ad27c417df9"
   integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==
 
-ip@^2.0.0:
-  version "2.0.1"
-  resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.1.tgz#e8f3595d33a3ea66490204234b77636965307105"
-  integrity sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==
+ip-address@^10.0.1:
+  version "10.0.1"
+  resolved "https://registry.yarnpkg.com/ip-address/-/ip-address-10.0.1.tgz#a8180b783ce7788777d796286d61bce4276818ed"
+  integrity sha512-NWv9YLW4PoW2B7xtzaS3NCot75m6nK7Icdv0o3lfMceJVRfSoQwqD4wEH5rLwoKJwUiZ/rfpiVBhnaF0FK4HoA==
 
 ipaddr.js@1.9.1:
   version "1.9.1"
@@ -1778,7 +1272,7 @@ ipaddr.js@1.9.1:
 is-arrayish@^0.2.1:
   version "0.2.1"
   resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
-  integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=
+  integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==
 
 is-binary-path@~2.1.0:
   version "2.1.0"
@@ -1787,136 +1281,59 @@ is-binary-path@~2.1.0:
   dependencies:
     binary-extensions "^2.0.0"
 
-is-ci@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c"
-  integrity sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==
+is-core-module@^2.16.0:
+  version "2.16.1"
+  resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.16.1.tgz#2a98801a849f43e2add644fbb6bc6229b19a4ef4"
+  integrity sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==
   dependencies:
-    ci-info "^2.0.0"
-
-is-core-module@^2.9.0:
-  version "2.11.0"
-  resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.11.0.tgz#ad4cb3e3863e814523c96f3f58d26cc570ff0144"
-  integrity sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==
-  dependencies:
-    has "^1.0.3"
+    hasown "^2.0.2"
 
 is-extglob@^2.1.1:
   version "2.1.1"
   resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2"
-  integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=
-
-is-fullwidth-code-point@^1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb"
-  integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs=
-  dependencies:
-    number-is-nan "^1.0.0"
-
-is-fullwidth-code-point@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f"
-  integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=
+  integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==
 
 is-fullwidth-code-point@^3.0.0:
   version "3.0.0"
   resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d"
   integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==
 
-is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1:
-  version "4.0.1"
-  resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc"
-  integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==
-  dependencies:
-    is-extglob "^2.1.1"
-
-is-glob@^4.0.3:
+is-glob@^4.0.1, is-glob@~4.0.1:
   version "4.0.3"
   resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084"
   integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==
   dependencies:
     is-extglob "^2.1.1"
 
-is-installed-globally@^0.3.1:
-  version "0.3.2"
-  resolved "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-0.3.2.tgz#fd3efa79ee670d1187233182d5b0a1dd00313141"
-  integrity sha512-wZ8x1js7Ia0kecP/CHM/3ABkAmujX7WPvQk6uu3Fly/Mk44pySulQpnHG46OMjHGXApINnV4QhY3SWnECO2z5g==
-  dependencies:
-    global-dirs "^2.0.1"
-    is-path-inside "^3.0.1"
-
 is-lambda@^1.0.1:
   version "1.0.1"
   resolved "https://registry.yarnpkg.com/is-lambda/-/is-lambda-1.0.1.tgz#3d9877899e6a53efc0160504cde15f82e6f061d5"
   integrity sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==
 
-is-npm@^4.0.0:
-  version "4.0.0"
-  resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-4.0.0.tgz#c90dd8380696df87a7a6d823c20d0b12bbe3c84d"
-  integrity sha512-96ECIfh9xtDDlPylNPXhzjsykHsMJZ18ASpaWzQyBr4YRTcVjUvzaHayDAES2oU/3KpljhHUjtSRNiDwi0F0ig==
-
 is-number@^7.0.0:
   version "7.0.0"
   resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b"
   integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==
 
-is-obj@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-2.0.0.tgz#473fb05d973705e3fd9620545018ca8e22ef4982"
-  integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==
-
-is-path-inside@^3.0.1:
-  version "3.0.2"
-  resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.2.tgz#f5220fc82a3e233757291dddc9c5877f2a1f3017"
-  integrity sha512-/2UGPSgmtqwo1ktx8NDHjuPwZWmHhO+gj0f93EkhLB5RgW9RZevWYYlIkS6zePc6U2WpOdQYIwHe9YC4DWEBVg==
-
-is-path-inside@^3.0.3:
-  version "3.0.3"
-  resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283"
-  integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==
-
 is-property@^1.0.2:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/is-property/-/is-property-1.0.2.tgz#57fe1c4e48474edd65b09911f26b1cd4095dda84"
   integrity sha512-Ks/IoX00TtClbGQr4TWXemAnktAQvYB7HzcCxDGqEZU6oCmb2INHuOoKxbtR+HFkmYWBKv/dOZtGRiAjDhj92g==
 
 is-stream@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.0.tgz#bde9c32680d6fae04129d6ac9d921ce7815f78e3"
-  integrity sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==
-
-is-typedarray@^1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
-  integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=
-
-is-yarn-global@^0.3.0:
-  version "0.3.0"
-  resolved "https://registry.yarnpkg.com/is-yarn-global/-/is-yarn-global-0.3.0.tgz#d502d3382590ea3004893746754c89139973e232"
-  integrity sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw==
+  version "2.0.1"
+  resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077"
+  integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==
 
 isarray@~1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
-  integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=
+  integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==
 
 isexe@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
-  integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=
-
-js-sdsl@^4.1.4:
-  version "4.3.0"
-  resolved "https://registry.yarnpkg.com/js-sdsl/-/js-sdsl-4.3.0.tgz#aeefe32a451f7af88425b11fdb5f58c90ae1d711"
-  integrity sha512-mifzlm2+5nZ+lEcLJMoBK0/IH/bDg8XnJfd/Wq6IP+xoCjLZsTOnV2QpxlVbX9bMnkl5PdEjNtBJ9Cj1NjifhQ==
-
-js-yaml@^3.13.1:
-  version "3.14.0"
-  resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.0.tgz#a7a34170f26a21bb162424d8adacb4113a69e482"
-  integrity sha512-/4IbIeHcD9VMHFqDR/gQ7EdZdLimOvW2DdcxFjdyyZ9NsbS+ccrXqVWDtab/lRl5AlUqmpBx8EhPaWR+OtY17A==
-  dependencies:
-    argparse "^1.0.7"
-    esprima "^4.0.0"
+  integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==
 
 js-yaml@^4.1.0:
   version "4.1.0"
@@ -1925,47 +1342,38 @@ js-yaml@^4.1.0:
   dependencies:
     argparse "^2.0.1"
 
-json-buffer@3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898"
-  integrity sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg=
-
 json-parse-better-errors@^1.0.1:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9"
   integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==
 
-json-schema-traverse@^0.4.1:
-  version "0.4.1"
-  resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660"
-  integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==
-
 json-schema-traverse@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2"
   integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==
 
-json-stable-stringify-without-jsonify@^1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651"
-  integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=
-
 jsonwebtoken@^9.0.0:
-  version "9.0.0"
-  resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-9.0.0.tgz#d0faf9ba1cc3a56255fe49c0961a67e520c1926d"
-  integrity sha512-tuGfYXxkQGDPnLJ7SibiQgVgeDgfbPq2k2ICcbgqW8WxWLBAxKQM/ZCu/IT8SOSwmaYl4dpTFCW5xZv7YbbWUw==
+  version "9.0.2"
+  resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz#65ff91f4abef1784697d40952bb1998c504caaf3"
+  integrity sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==
   dependencies:
     jws "^3.2.2"
-    lodash "^4.17.21"
+    lodash.includes "^4.3.0"
+    lodash.isboolean "^3.0.3"
+    lodash.isinteger "^4.0.4"
+    lodash.isnumber "^3.0.3"
+    lodash.isplainobject "^4.0.6"
+    lodash.isstring "^4.0.1"
+    lodash.once "^4.0.0"
     ms "^2.1.1"
-    semver "^7.3.8"
+    semver "^7.5.4"
 
 jwa@^1.4.1:
-  version "1.4.1"
-  resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.4.1.tgz#743c32985cb9e98655530d53641b66c8645b039a"
-  integrity sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==
+  version "1.4.2"
+  resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.4.2.tgz#16011ac6db48de7b102777e57897901520eec7b9"
+  integrity sha512-eeH5JO+21J78qMvTIDdBXidBd6nG2kZjg5Ohz/1fpa28Z4CcsWUzJ1ZZyFq/3z3N17aZy+ZuBoHljASbL1WfOw==
   dependencies:
-    buffer-equal-constant-time "1.0.1"
+    buffer-equal-constant-time "^1.0.1"
     ecdsa-sig-formatter "1.0.11"
     safe-buffer "^5.0.1"
 
@@ -1977,13 +1385,6 @@ jws@^3.2.2:
     jwa "^1.4.1"
     safe-buffer "^5.0.1"
 
-keyv@^3.0.0:
-  version "3.1.0"
-  resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9"
-  integrity sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA==
-  dependencies:
-    json-buffer "3.0.0"
-
 knex@2.4.2:
   version "2.4.2"
   resolved "https://registry.yarnpkg.com/knex/-/knex-2.4.2.tgz#a34a289d38406dc19a0447a78eeaf2d16ebedd61"
@@ -2004,28 +1405,13 @@ knex@2.4.2:
     tarn "^3.0.2"
     tildify "2.0.0"
 
-latest-version@^5.0.0:
-  version "5.1.0"
-  resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-5.1.0.tgz#119dfe908fe38d15dfa43ecd13fa12ec8832face"
-  integrity sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA==
-  dependencies:
-    package-json "^6.3.0"
-
 lazystream@^1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/lazystream/-/lazystream-1.0.0.tgz#f6995fe0f820392f61396be89462407bb77168e4"
-  integrity sha1-9plf4PggOS9hOWvolGJAe7dxaOQ=
+  version "1.0.1"
+  resolved "https://registry.yarnpkg.com/lazystream/-/lazystream-1.0.1.tgz#494c831062f1f9408251ec44db1cba29242a2638"
+  integrity sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==
   dependencies:
     readable-stream "^2.0.5"
 
-levn@^0.4.1:
-  version "0.4.1"
-  resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade"
-  integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==
-  dependencies:
-    prelude-ls "^1.2.1"
-    type-check "~0.4.0"
-
 liquidjs@10.6.1:
   version "10.6.1"
   resolved "https://registry.yarnpkg.com/liquidjs/-/liquidjs-10.6.1.tgz#b401662cb8f0cca59b42f79fc08e411c86d92dab"
@@ -2036,7 +1422,7 @@ liquidjs@10.6.1:
 load-json-file@^4.0.0:
   version "4.0.0"
   resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-4.0.0.tgz#2f5f45ab91e33216234fd53adab668eb4ec0993b"
-  integrity sha1-L19Fq5HjMhYjT9U62rZo607AmTs=
+  integrity sha512-Kx8hMakjX03tiGTLAIdJ+lL0htKnXjEZN6hk/tozf/WOuYGdZBJrZ+rCJRbVCugsjB3jMLn9746NsQIf5VjBMw==
   dependencies:
     graceful-fs "^4.1.2"
     parse-json "^4.0.0"
@@ -2046,7 +1432,7 @@ load-json-file@^4.0.0:
 locate-path@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e"
-  integrity sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=
+  integrity sha512-NCI2kiDkyR7VeEKm27Kda/iQHyKJe1Bu0FlTbYp3CqJu+9IFe9bLyAjMxf5ZDDbEg+iMPzB5zYyUTSm8wVTKmA==
   dependencies:
     p-locate "^2.0.0"
     path-exists "^3.0.0"
@@ -2058,42 +1444,60 @@ locate-path@^5.0.0:
   dependencies:
     p-locate "^4.1.0"
 
-locate-path@^6.0.0:
-  version "6.0.0"
-  resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286"
-  integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==
-  dependencies:
-    p-locate "^5.0.0"
-
 lodash.defaults@^4.2.0:
   version "4.2.0"
   resolved "https://registry.yarnpkg.com/lodash.defaults/-/lodash.defaults-4.2.0.tgz#d09178716ffea4dde9e5fb7b37f6f0802274580c"
-  integrity sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw=
+  integrity sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==
 
 lodash.difference@^4.5.0:
   version "4.5.0"
   resolved "https://registry.yarnpkg.com/lodash.difference/-/lodash.difference-4.5.0.tgz#9ccb4e505d486b91651345772885a2df27fd017c"
-  integrity sha1-nMtOUF1Ia5FlE0V3KIWi3yf9AXw=
+  integrity sha512-dS2j+W26TQ7taQBGN8Lbbq04ssV3emRw4NY58WErlTO29pIqS0HmoT5aJ9+TUQ1N3G+JOZSji4eugsWwGp9yPA==
 
 lodash.flatten@^4.4.0:
   version "4.4.0"
   resolved "https://registry.yarnpkg.com/lodash.flatten/-/lodash.flatten-4.4.0.tgz#f31c22225a9632d2bbf8e4addbef240aa765a61f"
-  integrity sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8=
+  integrity sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g==
+
+lodash.includes@^4.3.0:
+  version "4.3.0"
+  resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f"
+  integrity sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==
+
+lodash.isboolean@^3.0.3:
+  version "3.0.3"
+  resolved "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6"
+  integrity sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==
+
+lodash.isinteger@^4.0.4:
+  version "4.0.4"
+  resolved "https://registry.yarnpkg.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz#619c0af3d03f8b04c31f5882840b77b11cd68343"
+  integrity sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==
+
+lodash.isnumber@^3.0.3:
+  version "3.0.3"
+  resolved "https://registry.yarnpkg.com/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz#3ce76810c5928d03352301ac287317f11c0b1ffc"
+  integrity sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==
 
 lodash.isplainobject@^4.0.6:
   version "4.0.6"
   resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb"
-  integrity sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs=
+  integrity sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==
 
-lodash.merge@^4.6.2:
-  version "4.6.2"
-  resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a"
-  integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==
+lodash.isstring@^4.0.1:
+  version "4.0.1"
+  resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451"
+  integrity sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==
+
+lodash.once@^4.0.0:
+  version "4.1.1"
+  resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac"
+  integrity sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==
 
 lodash.union@^4.6.0:
   version "4.6.0"
   resolved "https://registry.yarnpkg.com/lodash.union/-/lodash.union-4.6.0.tgz#48bb5088409f16f1821666641c44dd1aaae3cd88"
-  integrity sha1-SLtQiECfFvGCFmZkHETdGqrjzYg=
+  integrity sha512-c4pB2CdGrGdjMKYLA+XiRDO7Y0PRQbm/Gzg8qMj+QH+pFVAoTp5sBpO0odL3FjoPCGjK96p6qsP+yQoiLoOBcw==
 
 lodash@^4.17.21:
   version "4.17.21"
@@ -2101,19 +1505,9 @@ lodash@^4.17.21:
   integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
 
 long@^5.2.1:
-  version "5.2.3"
-  resolved "https://registry.yarnpkg.com/long/-/long-5.2.3.tgz#a3ba97f3877cf1d778eccbcb048525ebb77499e1"
-  integrity sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==
-
-lowercase-keys@^1.0.0, lowercase-keys@^1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f"
-  integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==
-
-lowercase-keys@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479"
-  integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==
+  version "5.3.2"
+  resolved "https://registry.yarnpkg.com/long/-/long-5.3.2.tgz#1d84463095999262d7d7b7f8bfd4a8cc55167f83"
+  integrity sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==
 
 lru-cache@^6.0.0:
   version "6.0.0"
@@ -2127,10 +1521,10 @@ lru-cache@^7.14.1:
   resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.18.3.tgz#f793896e0fd0e954a59dfdd82f0773808df6aa89"
   integrity sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==
 
-lru-cache@^8.0.0:
-  version "8.0.5"
-  resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-8.0.5.tgz#983fe337f3e176667f8e567cfcce7cb064ea214e"
-  integrity sha512-MhWWlVnuab1RG5/zMRRcVGXZLCXrZTgfwMikgzCegsPnG62yDQo5JnqKkrK4jO5iKqDAZGItAqN5CtKBCBWRUA==
+lru.min@^1.0.0:
+  version "1.1.2"
+  resolved "https://registry.yarnpkg.com/lru.min/-/lru.min-1.1.2.tgz#01ce1d72cc50c7faf8bd1f809ebf05d4331021eb"
+  integrity sha512-Nv9KddBcQSlQopmBHXSsZVY5xsdlZkdH/Iey0BlcBYggMd4two7cZnKOK9vmy3nY0O5RGH99z1PCeTpPqszUYg==
 
 make-dir@^3.0.0, make-dir@^3.1.0:
   version "3.1.0"
@@ -2161,10 +1555,15 @@ make-fetch-happen@^9.1.0:
     socks-proxy-agent "^6.0.0"
     ssri "^8.0.0"
 
+math-intrinsics@^1.1.0:
+  version "1.1.0"
+  resolved "https://registry.yarnpkg.com/math-intrinsics/-/math-intrinsics-1.1.0.tgz#a0dd74be81e2aa5c2f27e65ce283605ee4e2b7f9"
+  integrity sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==
+
 media-typer@0.3.0:
   version "0.3.0"
   resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748"
-  integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=
+  integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==
 
 merge-descriptors@1.0.3:
   version "1.0.3"
@@ -2174,26 +1573,19 @@ merge-descriptors@1.0.3:
 methods@~1.1.2:
   version "1.1.2"
   resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee"
-  integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=
-
-mime-db@1.44.0, "mime-db@>= 1.43.0 < 2":
-  version "1.44.0"
-  resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.44.0.tgz#fa11c5eb0aca1334b4233cb4d52f10c5a6272f92"
-  integrity sha512-/NOTfLrsPBVeH7YtFPgsVWveuL+4SjjYxaQ1xtM1KMFj7HdxlBlxeyNLzhyJVx7r4rZGJAZ/6lkKCitSc/Nmpg==
+  integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==
 
 mime-db@1.52.0:
   version "1.52.0"
   resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70"
   integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==
 
-mime-types@~2.1.24:
-  version "2.1.27"
-  resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.27.tgz#47949f98e279ea53119f5722e0f34e529bec009f"
-  integrity sha512-JIhqnCasI9yD+SsmkquHBxTSEuZdQX5BuQnS2Vc7puQQQ+8yiP5AY5uWhpdv4YL4VM5c6iliiYWPgJ/nJQLp7w==
-  dependencies:
-    mime-db "1.44.0"
+"mime-db@>= 1.43.0 < 2":
+  version "1.54.0"
+  resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.54.0.tgz#cddb3ee4f9c64530dff640236661d42cb6a314f5"
+  integrity sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==
 
-mime-types@~2.1.34:
+mime-types@~2.1.24, mime-types@~2.1.34:
   version "2.1.35"
   resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a"
   integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==
@@ -2205,26 +1597,26 @@ mime@1.6.0:
   resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1"
   integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==
 
-mimic-response@^1.0.0, mimic-response@^1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b"
-  integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==
+mimic-response@^3.1.0:
+  version "3.1.0"
+  resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9"
+  integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==
 
-minimatch@^3.0.4:
-  version "3.0.4"
-  resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083"
-  integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==
-  dependencies:
-    brace-expansion "^1.1.7"
-
-minimatch@^3.0.5, minimatch@^3.1.2:
+minimatch@^3.1.1, minimatch@^3.1.2:
   version "3.1.2"
   resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b"
   integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==
   dependencies:
     brace-expansion "^1.1.7"
 
-minimist@^1.2.0, minimist@^1.2.5:
+minimatch@^5.1.0:
+  version "5.1.6"
+  resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.6.tgz#1cfcb8cf5522ea69952cd2af95ae09477f122a96"
+  integrity sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==
+  dependencies:
+    brace-expansion "^2.0.1"
+
+minimist@^1.2.0, minimist@^1.2.3:
   version "1.2.8"
   resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c"
   integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==
@@ -2268,14 +1660,6 @@ minipass-sized@^1.0.3:
   dependencies:
     minipass "^3.0.0"
 
-minipass@^2.6.0, minipass@^2.9.0:
-  version "2.9.0"
-  resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6"
-  integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==
-  dependencies:
-    safe-buffer "^5.1.2"
-    yallist "^3.0.0"
-
 minipass@^3.0.0, minipass@^3.1.0, minipass@^3.1.1, minipass@^3.1.3:
   version "3.3.6"
   resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a"
@@ -2283,17 +1667,10 @@ minipass@^3.0.0, minipass@^3.1.0, minipass@^3.1.1, minipass@^3.1.3:
   dependencies:
     yallist "^4.0.0"
 
-minipass@^4.0.0:
-  version "4.2.5"
-  resolved "https://registry.yarnpkg.com/minipass/-/minipass-4.2.5.tgz#9e0e5256f1e3513f8c34691dd68549e85b2c8ceb"
-  integrity sha512-+yQl7SX3bIT83Lhb4BVorMAHVuqsskxRdlmO9kTpyukp8vsm2Sn/fUOV9xlnG8/a5JsypJzap21lz/y3FBMJ8Q==
-
-minizlib@^1.3.3:
-  version "1.3.3"
-  resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d"
-  integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==
-  dependencies:
-    minipass "^2.9.0"
+minipass@^5.0.0:
+  version "5.0.0"
+  resolved "https://registry.yarnpkg.com/minipass/-/minipass-5.0.0.tgz#3e9788ffb90b694a5d0ec94479a45b5d8738133d"
+  integrity sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==
 
 minizlib@^2.0.0, minizlib@^2.1.1:
   version "2.1.2"
@@ -2303,49 +1680,47 @@ minizlib@^2.0.0, minizlib@^2.1.1:
     minipass "^3.0.0"
     yallist "^4.0.0"
 
-mkdirp@^0.5.3, mkdirp@^0.5.5:
-  version "0.5.5"
-  resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def"
-  integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==
-  dependencies:
-    minimist "^1.2.5"
+mkdirp-classic@^0.5.2, mkdirp-classic@^0.5.3:
+  version "0.5.3"
+  resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113"
+  integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==
 
 mkdirp@^1.0.3, mkdirp@^1.0.4:
   version "1.0.4"
   resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e"
   integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==
 
-moment@^2.29.4:
-  version "2.29.4"
-  resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108"
-  integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==
+moment@^2.30.1:
+  version "2.30.1"
+  resolved "https://registry.yarnpkg.com/moment/-/moment-2.30.1.tgz#f8c91c07b7a786e30c59926df530b4eac96974ae"
+  integrity sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==
 
 ms@2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
-  integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=
+  integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==
 
-ms@2.1.2, ms@^2.1.1:
+ms@2.1.2:
   version "2.1.2"
   resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
   integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
 
-ms@2.1.3, ms@^2.0.0:
+ms@2.1.3, ms@^2.0.0, ms@^2.1.1, ms@^2.1.3:
   version "2.1.3"
   resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
   integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
 
-mysql2@^3.11.1:
-  version "3.11.1"
-  resolved "https://registry.yarnpkg.com/mysql2/-/mysql2-3.11.1.tgz#edfb856e2176fcf43d2cc066dd4959e9fc76ea85"
-  integrity sha512-Oc8Zffd0gpIJnJ/NOMp6IiiJJDdWc7nmWpS+UE3K9feTpYia8XkbgL6EaOJYz52f6+2pAoC0eAQqUzal4lnNGQ==
+mysql2@^3.15.3:
+  version "3.15.3"
+  resolved "https://registry.yarnpkg.com/mysql2/-/mysql2-3.15.3.tgz#f0348d9c7401bb98cb1f45ffc5a773b109f70808"
+  integrity sha512-FBrGau0IXmuqg4haEZRBfHNWB5mUARw6hNwPDXXGg0XzVJ50mr/9hb267lvpVMnhZ1FON3qNd4Xfcez1rbFwSg==
   dependencies:
     aws-ssl-profiles "^1.1.1"
     denque "^2.1.0"
     generate-function "^2.3.1"
-    iconv-lite "^0.6.3"
+    iconv-lite "^0.7.0"
     long "^5.2.1"
-    lru-cache "^8.0.0"
+    lru.min "^1.0.0"
     named-placeholders "^1.1.3"
     seq-queue "^0.0.5"
     sqlstring "^2.3.2"
@@ -2357,39 +1732,42 @@ named-placeholders@^1.1.3:
   dependencies:
     lru-cache "^7.14.1"
 
-natural-compare@^1.4.0:
-  version "1.4.0"
-  resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
-  integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=
+napi-build-utils@^2.0.0:
+  version "2.0.0"
+  resolved "https://registry.yarnpkg.com/napi-build-utils/-/napi-build-utils-2.0.0.tgz#13c22c0187fcfccce1461844136372a47ddc027e"
+  integrity sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA==
 
-needle@^2.5.0:
-  version "2.5.0"
-  resolved "https://registry.yarnpkg.com/needle/-/needle-2.5.0.tgz#e6fc4b3cc6c25caed7554bd613a5cf0bac8c31c0"
-  integrity sha512-o/qITSDR0JCyCKEQ1/1bnUXMmznxabbwi/Y4WwJElf+evwJNFNwIDMCCt5IigFVxgeGBJESLohGtIS9gEzo1fA==
-  dependencies:
-    debug "^3.2.6"
-    iconv-lite "^0.4.4"
-    sax "^1.2.4"
-
-negotiator@0.6.3, negotiator@^0.6.2:
+negotiator@0.6.3:
   version "0.6.3"
   resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd"
   integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==
 
-node-addon-api@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.0.0.tgz#812446a1001a54f71663bed188314bba07e09247"
-  integrity sha512-sSHCgWfJ+Lui/u+0msF3oyCgvdkhxDbkCS6Q8uiJquzOimkJBvX6hl5aSSA7DR1XbMpdM8r7phjcF63sF4rkKg==
+negotiator@^0.6.2, negotiator@~0.6.4:
+  version "0.6.4"
+  resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.4.tgz#777948e2452651c570b712dd01c23e262713fff7"
+  integrity sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==
 
-node-addon-api@^4.2.0:
-  version "4.3.0"
-  resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-4.3.0.tgz#52a1a0b475193e0928e98e0426a0d1254782b77f"
-  integrity sha512-73sE9+3UaLYYFmDsFZnqCInzPyh3MqIwZO9cw58yIqAZhONrrabrYyYe3TuIqtIiOuTXVhsGau8hcrhhwSsDIQ==
+node-abi@^3.3.0:
+  version "3.78.0"
+  resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-3.78.0.tgz#fd0ecbd0aa89857b98da06bd3909194abb0821ba"
+  integrity sha512-E2wEyrgX/CqvicaQYU3Ze1PFGjc4QYPGsjUrlYkqAE0WjHEZwgOsGMPMzkMse4LjJbDmaEuDX3CM036j5K2DSQ==
+  dependencies:
+    semver "^7.3.5"
+
+node-addon-api@^5.0.0:
+  version "5.1.0"
+  resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-5.1.0.tgz#49da1ca055e109a23d537e9de43c09cca21eb762"
+  integrity sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==
+
+node-addon-api@^7.0.0:
+  version "7.1.1"
+  resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-7.1.1.tgz#1aba6693b0f255258a049d621329329322aad558"
+  integrity sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==
 
 node-fetch@^2.6.7:
-  version "2.6.9"
-  resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.9.tgz#7c7f744b5cc6eb5fd404e0c7a9fec630a55657e6"
-  integrity sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg==
+  version "2.7.0"
+  resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d"
+  integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==
   dependencies:
     whatwg-url "^5.0.0"
 
@@ -2409,23 +1787,7 @@ node-gyp@8.x:
     tar "^6.1.2"
     which "^2.0.2"
 
-node-pre-gyp@0.15.0:
-  version "0.15.0"
-  resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.15.0.tgz#c2fc383276b74c7ffa842925241553e8b40f1087"
-  integrity sha512-7QcZa8/fpaU/BKenjcaeFF9hLz2+7S9AqyXFhlH/rilsQ/hPZKK32RtR5EQHJElgu+q5RfbJ34KriI79UWaorA==
-  dependencies:
-    detect-libc "^1.0.2"
-    mkdirp "^0.5.3"
-    needle "^2.5.0"
-    nopt "^4.0.1"
-    npm-packlist "^1.1.6"
-    npmlog "^4.0.2"
-    rc "^1.2.7"
-    rimraf "^2.6.1"
-    semver "^5.3.0"
-    tar "^4.4.2"
-
-node-rsa@^1.0.8:
+node-rsa@^1.1.1:
   version "1.1.1"
   resolved "https://registry.yarnpkg.com/node-rsa/-/node-rsa-1.1.1.tgz#efd9ad382097782f506153398496f79e4464434d"
   integrity sha512-Jd4cvbJMryN21r5HgxQOpMEqv+ooke/korixNNK3mGqfGJmy0M77WDDzo/05969+OkMy3XW1UuZsSmW9KQm7Fw==
@@ -2433,28 +1795,20 @@ node-rsa@^1.0.8:
     asn1 "^0.2.4"
 
 nodemon@^2.0.2:
-  version "2.0.4"
-  resolved "https://registry.yarnpkg.com/nodemon/-/nodemon-2.0.4.tgz#55b09319eb488d6394aa9818148c0c2d1c04c416"
-  integrity sha512-Ltced+hIfTmaS28Zjv1BM552oQ3dbwPqI4+zI0SLgq+wpJhSyqgYude/aZa/3i31VCQWMfXJVxvu86abcam3uQ==
+  version "2.0.22"
+  resolved "https://registry.yarnpkg.com/nodemon/-/nodemon-2.0.22.tgz#182c45c3a78da486f673d6c1702e00728daf5258"
+  integrity sha512-B8YqaKMmyuCO7BowF1Z1/mkPqLk6cs/l63Ojtd6otKjMx47Dq1utxfRxcavH1I7VSaL8n5BUaoutadnsX3AAVQ==
   dependencies:
-    chokidar "^3.2.2"
-    debug "^3.2.6"
+    chokidar "^3.5.2"
+    debug "^3.2.7"
     ignore-by-default "^1.0.1"
-    minimatch "^3.0.4"
-    pstree.remy "^1.1.7"
+    minimatch "^3.1.2"
+    pstree.remy "^1.1.8"
     semver "^5.7.1"
+    simple-update-notifier "^1.0.7"
     supports-color "^5.5.0"
     touch "^3.1.0"
-    undefsafe "^2.0.2"
-    update-notifier "^4.0.0"
-
-nopt@^4.0.1:
-  version "4.0.3"
-  resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.3.tgz#a375cad9d02fd921278d954c2254d5aa57e15e48"
-  integrity sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg==
-  dependencies:
-    abbrev "1"
-    osenv "^0.1.4"
+    undefsafe "^2.0.5"
 
 nopt@^5.0.0:
   version "5.0.0"
@@ -2463,54 +1817,11 @@ nopt@^5.0.0:
   dependencies:
     abbrev "1"
 
-nopt@~1.0.10:
-  version "1.0.10"
-  resolved "https://registry.yarnpkg.com/nopt/-/nopt-1.0.10.tgz#6ddd21bd2a31417b92727dd585f8a6f37608ebee"
-  integrity sha1-bd0hvSoxQXuScn3Vhfim83YI6+4=
-  dependencies:
-    abbrev "1"
-
 normalize-path@^3.0.0, normalize-path@~3.0.0:
   version "3.0.0"
   resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65"
   integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==
 
-normalize-url@^4.1.0:
-  version "4.5.1"
-  resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-4.5.1.tgz#0dd90cf1288ee1d1313b87081c9a5932ee48518a"
-  integrity sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA==
-
-npm-bundled@^1.0.1:
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.1.1.tgz#1edd570865a94cdb1bc8220775e29466c9fb234b"
-  integrity sha512-gqkfgGePhTpAEgUsGEgcq1rqPXA+tv/aVBlgEzfXwA1yiUJF7xtEt3CtVwOjNYQOVknDk0F20w58Fnm3EtG0fA==
-  dependencies:
-    npm-normalize-package-bin "^1.0.1"
-
-npm-normalize-package-bin@^1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz#6e79a41f23fd235c0623218228da7d9c23b8f6e2"
-  integrity sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==
-
-npm-packlist@^1.1.6:
-  version "1.4.8"
-  resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.8.tgz#56ee6cc135b9f98ad3d51c1c95da22bbb9b2ef3e"
-  integrity sha512-5+AZgwru5IevF5ZdnFglB5wNlHG1AOOuw28WhUq8/8emhBmLv6jX5by4WJCh7lW0uSYZYS6DXqIsyZVIXRZU9A==
-  dependencies:
-    ignore-walk "^3.0.1"
-    npm-bundled "^1.0.1"
-    npm-normalize-package-bin "^1.0.1"
-
-npmlog@^4.0.2:
-  version "4.1.2"
-  resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b"
-  integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==
-  dependencies:
-    are-we-there-yet "~1.1.2"
-    console-control-strings "~1.1.0"
-    gauge "~2.7.3"
-    set-blocking "~2.0.0"
-
 npmlog@^5.0.1:
   version "5.0.1"
   resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-5.0.1.tgz#f06678e80e29419ad67ab964e0fa69959c1eb8b0"
@@ -2531,20 +1842,15 @@ npmlog@^6.0.0:
     gauge "^4.0.3"
     set-blocking "^2.0.0"
 
-number-is-nan@^1.0.0:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d"
-  integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=
-
-object-assign@^4.1.0, object-assign@^4.1.1:
+object-assign@^4.1.1:
   version "4.1.1"
   resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
-  integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=
+  integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==
 
-object-inspect@^1.13.1:
-  version "1.13.1"
-  resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.1.tgz#b96c6109324ccfef6b12216a956ca4dc2ff94bc2"
-  integrity sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==
+object-inspect@^1.13.3:
+  version "1.13.4"
+  resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.4.tgz#8375265e21bc20d0fa582c22e1b13485d6e00213"
+  integrity sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==
 
 objection@3.0.1:
   version "3.0.1"
@@ -2561,53 +1867,18 @@ on-finished@2.4.1:
   dependencies:
     ee-first "1.1.1"
 
-on-headers@~1.0.2:
-  version "1.0.2"
-  resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f"
-  integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==
+on-headers@~1.1.0:
+  version "1.1.0"
+  resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.1.0.tgz#59da4f91c45f5f989c6e4bcedc5a3b0aed70ff65"
+  integrity sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==
 
 once@^1.3.0, once@^1.3.1, once@^1.4.0:
   version "1.4.0"
   resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
-  integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
+  integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==
   dependencies:
     wrappy "1"
 
-optionator@^0.9.1:
-  version "0.9.1"
-  resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499"
-  integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==
-  dependencies:
-    deep-is "^0.1.3"
-    fast-levenshtein "^2.0.6"
-    levn "^0.4.1"
-    prelude-ls "^1.2.1"
-    type-check "^0.4.0"
-    word-wrap "^1.2.3"
-
-os-homedir@^1.0.0:
-  version "1.0.2"
-  resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3"
-  integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M=
-
-os-tmpdir@^1.0.0:
-  version "1.0.2"
-  resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
-  integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=
-
-osenv@^0.1.4:
-  version "0.1.5"
-  resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410"
-  integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==
-  dependencies:
-    os-homedir "^1.0.0"
-    os-tmpdir "^1.0.0"
-
-p-cancelable@^1.0.0:
-  version "1.1.0"
-  resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc"
-  integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw==
-
 p-limit@^1.1.0:
   version "1.3.0"
   resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8"
@@ -2622,17 +1893,10 @@ p-limit@^2.2.0:
   dependencies:
     p-try "^2.0.0"
 
-p-limit@^3.0.2:
-  version "3.1.0"
-  resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b"
-  integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==
-  dependencies:
-    yocto-queue "^0.1.0"
-
 p-locate@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43"
-  integrity sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=
+  integrity sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg==
   dependencies:
     p-limit "^1.1.0"
 
@@ -2643,13 +1907,6 @@ p-locate@^4.1.0:
   dependencies:
     p-limit "^2.2.0"
 
-p-locate@^5.0.0:
-  version "5.0.0"
-  resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834"
-  integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==
-  dependencies:
-    p-limit "^3.0.2"
-
 p-map@^4.0.0:
   version "4.0.0"
   resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b"
@@ -2660,34 +1917,17 @@ p-map@^4.0.0:
 p-try@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3"
-  integrity sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=
+  integrity sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww==
 
 p-try@^2.0.0:
   version "2.2.0"
   resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6"
   integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==
 
-package-json@^6.3.0:
-  version "6.5.0"
-  resolved "https://registry.yarnpkg.com/package-json/-/package-json-6.5.0.tgz#6feedaca35e75725876d0b0e64974697fed145b0"
-  integrity sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ==
-  dependencies:
-    got "^9.6.0"
-    registry-auth-token "^4.0.0"
-    registry-url "^5.0.0"
-    semver "^6.2.0"
-
-parent-module@^1.0.0:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2"
-  integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==
-  dependencies:
-    callsites "^3.0.0"
-
 parse-json@^4.0.0:
   version "4.0.0"
   resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0"
-  integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=
+  integrity sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw==
   dependencies:
     error-ex "^1.3.1"
     json-parse-better-errors "^1.0.1"
@@ -2700,7 +1940,7 @@ parseurl@~1.3.3:
 path-exists@^3.0.0:
   version "3.0.0"
   resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515"
-  integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=
+  integrity sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==
 
 path-exists@^4.0.0:
   version "4.0.0"
@@ -2710,62 +1950,57 @@ path-exists@^4.0.0:
 path-is-absolute@^1.0.0:
   version "1.0.1"
   resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
-  integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18=
-
-path-key@^3.1.0:
-  version "3.1.1"
-  resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375"
-  integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==
+  integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==
 
 path-parse@^1.0.7:
   version "1.0.7"
   resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735"
   integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==
 
-path-to-regexp@0.1.10:
-  version "0.1.10"
-  resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.10.tgz#67e9108c5c0551b9e5326064387de4763c4d5f8b"
-  integrity sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w==
+path-to-regexp@0.1.12:
+  version "0.1.12"
+  resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.12.tgz#d5e1a12e478a976d432ef3c58d534b9923164bb7"
+  integrity sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==
 
 path@^0.12.7:
   version "0.12.7"
   resolved "https://registry.yarnpkg.com/path/-/path-0.12.7.tgz#d4dc2a506c4ce2197eb481ebfcd5b36c0140b10f"
-  integrity sha1-1NwqUGxM4hl+tIHr/NWzbAFAsQ8=
+  integrity sha512-aXXC6s+1w7otVF9UletFkFcDsJeO7lSZBPUQhtb5O0xJe8LtYhj/GxldoL09bBj9+ZmE2hNoHqQSFMN5fikh4Q==
   dependencies:
     process "^0.11.1"
     util "^0.10.3"
 
-pg-cloudflare@^1.1.1:
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/pg-cloudflare/-/pg-cloudflare-1.1.1.tgz#e6d5833015b170e23ae819e8c5d7eaedb472ca98"
-  integrity sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==
+pg-cloudflare@^1.2.7:
+  version "1.2.7"
+  resolved "https://registry.yarnpkg.com/pg-cloudflare/-/pg-cloudflare-1.2.7.tgz#a1f3d226bab2c45ae75ea54d65ec05ac6cfafbef"
+  integrity sha512-YgCtzMH0ptvZJslLM1ffsY4EuGaU0cx4XSdXLRFae8bPP4dS5xL1tNB3k2o/N64cHJpwU7dxKli/nZ2lUa5fLg==
 
 pg-connection-string@2.5.0:
   version "2.5.0"
   resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.5.0.tgz#538cadd0f7e603fc09a12590f3b8a452c2c0cf34"
   integrity sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==
 
-pg-connection-string@^2.7.0:
-  version "2.7.0"
-  resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.7.0.tgz#f1d3489e427c62ece022dba98d5262efcb168b37"
-  integrity sha512-PI2W9mv53rXJQEOb8xNR8lH7Hr+EKa6oJa38zsK0S/ky2er16ios1wLKhZyxzD7jUReiWokc9WK5nxSnC7W1TA==
+pg-connection-string@^2.9.1:
+  version "2.9.1"
+  resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.9.1.tgz#bb1fd0011e2eb76ac17360dc8fa183b2d3465238"
+  integrity sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==
 
 pg-int8@1.0.1:
   version "1.0.1"
   resolved "https://registry.yarnpkg.com/pg-int8/-/pg-int8-1.0.1.tgz#943bd463bf5b71b4170115f80f8efc9a0c0eb78c"
   integrity sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==
 
-pg-pool@^3.7.0:
-  version "3.7.0"
-  resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-3.7.0.tgz#d4d3c7ad640f8c6a2245adc369bafde4ebb8cbec"
-  integrity sha512-ZOBQForurqh4zZWjrgSwwAtzJ7QiRX0ovFkZr2klsen3Nm0aoh33Ls0fzfv3imeH/nw/O27cjdz5kzYJfeGp/g==
+pg-pool@^3.10.1:
+  version "3.10.1"
+  resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-3.10.1.tgz#481047c720be2d624792100cac1816f8850d31b2"
+  integrity sha512-Tu8jMlcX+9d8+QVzKIvM/uJtp07PKr82IUOYEphaWcoBhIYkoHpLXN3qO59nAI11ripznDsEzEv8nUxBVWajGg==
 
-pg-protocol@^1.7.0:
-  version "1.7.0"
-  resolved "https://registry.yarnpkg.com/pg-protocol/-/pg-protocol-1.7.0.tgz#ec037c87c20515372692edac8b63cf4405448a93"
-  integrity sha512-hTK/mE36i8fDDhgDFjy6xNOG+LCorxLG3WO17tku+ij6sVHXh1jQUJ8hYAnRhNla4QVD2H8er/FOjc/+EgC6yQ==
+pg-protocol@^1.10.3:
+  version "1.10.3"
+  resolved "https://registry.yarnpkg.com/pg-protocol/-/pg-protocol-1.10.3.tgz#ac9e4778ad3f84d0c5670583bab976ea0a34f69f"
+  integrity sha512-6DIBgBQaTKDJyxnXaLiLR8wBpQQcGWuAESkRBX/t6OwA8YsqP+iVSiond2EDy6Y/dsGk8rh/jtax3js5NeV7JQ==
 
-pg-types@^2.1.0:
+pg-types@2.2.0:
   version "2.2.0"
   resolved "https://registry.yarnpkg.com/pg-types/-/pg-types-2.2.0.tgz#2d0250d636454f7cfa3b6ae0382fdfa8063254a3"
   integrity sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==
@@ -2776,20 +2011,20 @@ pg-types@^2.1.0:
     postgres-date "~1.0.4"
     postgres-interval "^1.1.0"
 
-pg@^8.13.1:
-  version "8.13.1"
-  resolved "https://registry.yarnpkg.com/pg/-/pg-8.13.1.tgz#6498d8b0a87ff76c2df7a32160309d3168c0c080"
-  integrity sha512-OUir1A0rPNZlX//c7ksiu7crsGZTKSOXJPgtNiHGIlC9H0lO+NC6ZDYksSgBYY/thSWhnSRBv8w1lieNNGATNQ==
+pg@^8.16.3:
+  version "8.16.3"
+  resolved "https://registry.yarnpkg.com/pg/-/pg-8.16.3.tgz#160741d0b44fdf64680e45374b06d632e86c99fd"
+  integrity sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==
   dependencies:
-    pg-connection-string "^2.7.0"
-    pg-pool "^3.7.0"
-    pg-protocol "^1.7.0"
-    pg-types "^2.1.0"
-    pgpass "1.x"
+    pg-connection-string "^2.9.1"
+    pg-pool "^3.10.1"
+    pg-protocol "^1.10.3"
+    pg-types "2.2.0"
+    pgpass "1.0.5"
   optionalDependencies:
-    pg-cloudflare "^1.1.1"
+    pg-cloudflare "^1.2.7"
 
-pgpass@1.x:
+pgpass@1.0.5:
   version "1.0.5"
   resolved "https://registry.yarnpkg.com/pgpass/-/pgpass-1.0.5.tgz#9b873e4a564bb10fa7a7dbd55312728d422a223d"
   integrity sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==
@@ -2797,19 +2032,19 @@ pgpass@1.x:
     split2 "^4.1.0"
 
 picomatch@^2.0.4, picomatch@^2.2.1:
-  version "2.2.2"
-  resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
-  integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==
+  version "2.3.1"
+  resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42"
+  integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==
 
 pify@^3.0.0:
   version "3.0.0"
   resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176"
-  integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=
+  integrity sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==
 
 pkg-conf@^2.1.0:
   version "2.1.0"
   resolved "https://registry.yarnpkg.com/pkg-conf/-/pkg-conf-2.1.0.tgz#2126514ca6f2abfebd168596df18ba57867f0058"
-  integrity sha1-ISZRTKbyq/69FoWW3xi6V4Z/AFg=
+  integrity sha512-C+VUP+8jis7EsQZIhDYmS5qlNtjv2yP4SNtjXK9AP1ZcTRlnSfuumaTnRfYZnYgUUYVIKqL0fRvmUGDV2fmp6g==
   dependencies:
     find-up "^2.0.0"
     load-json-file "^4.0.0"
@@ -2836,25 +2071,23 @@ postgres-interval@^1.1.0:
   dependencies:
     xtend "^4.0.0"
 
-prelude-ls@^1.2.1:
-  version "1.2.1"
-  resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396"
-  integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==
-
-prepend-http@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897"
-  integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc=
-
-prettier@^2.0.4:
-  version "2.0.5"
-  resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.0.5.tgz#d6d56282455243f2f92cc1716692c08aa31522d4"
-  integrity sha512-7PtVymN48hGcO4fGjybyBSIWDsLU4H4XlvOHfq91pz9kkGlonzwTfYkaIEwiRg/dAJF9YlbsduBAgtYLi+8cFg==
-
-printj@~1.1.0:
-  version "1.1.2"
-  resolved "https://registry.yarnpkg.com/printj/-/printj-1.1.2.tgz#d90deb2975a8b9f600fb3a1c94e3f4c53c78a222"
-  integrity sha512-zA2SmoLaxZyArQTOPj5LXecR+RagfPSU5Kw1qP+jkWeNlrq+eJZyY2oS68SU1Z/7/myXM4lo9716laOFAVStCQ==
+prebuild-install@^7.1.1:
+  version "7.1.3"
+  resolved "https://registry.yarnpkg.com/prebuild-install/-/prebuild-install-7.1.3.tgz#d630abad2b147443f20a212917beae68b8092eec"
+  integrity sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug==
+  dependencies:
+    detect-libc "^2.0.0"
+    expand-template "^2.0.3"
+    github-from-package "0.0.0"
+    minimist "^1.2.3"
+    mkdirp-classic "^0.5.3"
+    napi-build-utils "^2.0.0"
+    node-abi "^3.3.0"
+    pump "^3.0.0"
+    rc "^1.2.7"
+    simple-get "^4.0.0"
+    tar-fs "^2.0.0"
+    tunnel-agent "^0.6.0"
 
 process-nextick-args@~2.0.0:
   version "2.0.1"
@@ -2864,7 +2097,7 @@ process-nextick-args@~2.0.0:
 process@^0.11.1:
   version "0.11.10"
   resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182"
-  integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI=
+  integrity sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==
 
 promise-inflight@^1.0.1:
   version "1.0.1"
@@ -2887,38 +2120,19 @@ proxy-addr@~2.0.7:
     forwarded "0.2.0"
     ipaddr.js "1.9.1"
 
-pstree.remy@^1.1.7:
+pstree.remy@^1.1.8:
   version "1.1.8"
   resolved "https://registry.yarnpkg.com/pstree.remy/-/pstree.remy-1.1.8.tgz#c242224f4a67c21f686839bbdb4ac282b8373d3a"
   integrity sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==
 
 pump@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
-  integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==
+  version "3.0.3"
+  resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.3.tgz#151d979f1a29668dc0025ec589a455b53282268d"
+  integrity sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==
   dependencies:
     end-of-stream "^1.1.0"
     once "^1.3.1"
 
-punycode@^2.1.0:
-  version "2.1.1"
-  resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec"
-  integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==
-
-pupa@^2.0.1:
-  version "2.0.1"
-  resolved "https://registry.yarnpkg.com/pupa/-/pupa-2.0.1.tgz#dbdc9ff48ffbea4a26a069b6f9f7abb051008726"
-  integrity sha512-hEJH0s8PXLY/cdXh66tNEQGndDrIKNqNC5xmrysZy3i5C3oEoLna7YAOad+7u125+zH1HNXUmGEkrhb3c2VriA==
-  dependencies:
-    escape-goat "^2.0.0"
-
-qs@6.11.0:
-  version "6.11.0"
-  resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a"
-  integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==
-  dependencies:
-    side-channel "^1.0.4"
-
 qs@6.13.0:
   version "6.13.0"
   resolved "https://registry.yarnpkg.com/qs/-/qs-6.13.0.tgz#6ca3bd58439f7e245655798997787b0d88a51906"
@@ -2929,12 +2143,7 @@ qs@6.13.0:
 querystring@0.2.0:
   version "0.2.0"
   resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620"
-  integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=
-
-queue-microtask@^1.2.2:
-  version "1.2.3"
-  resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243"
-  integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==
+  integrity sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==
 
 range-parser@~1.2.1:
   version "1.2.1"
@@ -2951,7 +2160,7 @@ raw-body@2.5.2:
     iconv-lite "0.4.24"
     unpipe "1.0.0"
 
-rc@^1.2.7, rc@^1.2.8:
+rc@^1.2.7:
   version "1.2.8"
   resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed"
   integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==
@@ -2961,10 +2170,10 @@ rc@^1.2.7, rc@^1.2.8:
     minimist "^1.2.0"
     strip-json-comments "~2.0.1"
 
-readable-stream@^2.0.0, readable-stream@^2.0.5, readable-stream@^2.0.6:
-  version "2.3.7"
-  resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
-  integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==
+readable-stream@^2.0.0, readable-stream@^2.0.5:
+  version "2.3.8"
+  resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.8.tgz#91125e8042bba1b9887f49345f6277027ce8be9b"
+  integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==
   dependencies:
     core-util-is "~1.0.0"
     inherits "~2.0.3"
@@ -2975,25 +2184,25 @@ readable-stream@^2.0.0, readable-stream@^2.0.5, readable-stream@^2.0.6:
     util-deprecate "~1.0.1"
 
 readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.6.0:
-  version "3.6.0"
-  resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198"
-  integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==
+  version "3.6.2"
+  resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967"
+  integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==
   dependencies:
     inherits "^2.0.3"
     string_decoder "^1.1.1"
     util-deprecate "^1.0.1"
 
-readdir-glob@^1.0.0:
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/readdir-glob/-/readdir-glob-1.1.1.tgz#f0e10bb7bf7bfa7e0add8baffdc54c3f7dbee6c4"
-  integrity sha512-91/k1EzZwDx6HbERR+zucygRFfiPl2zkIYZtv3Jjr6Mn7SkKcVct8aVO+sSRiGMc6fLf72du3d92/uY63YPdEA==
+readdir-glob@^1.1.2:
+  version "1.1.3"
+  resolved "https://registry.yarnpkg.com/readdir-glob/-/readdir-glob-1.1.3.tgz#c3d831f51f5e7bfa62fa2ffbe4b508c640f09584"
+  integrity sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==
   dependencies:
-    minimatch "^3.0.4"
+    minimatch "^5.1.0"
 
-readdirp@~3.4.0:
-  version "3.4.0"
-  resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.4.0.tgz#9fdccdf9e9155805449221ac645e8303ab5b9ada"
-  integrity sha512-0xe001vZBnJEK+uKcj8qOhyAKPzIT+gStxWr3LCB0DwcXR5NZJ3IaC+yGnHCYzB/S7ov3m3EEbZI2zeNvX+hGQ==
+readdirp@~3.6.0:
+  version "3.6.0"
+  resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7"
+  integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==
   dependencies:
     picomatch "^2.2.1"
 
@@ -3004,24 +2213,10 @@ rechoir@^0.8.0:
   dependencies:
     resolve "^1.20.0"
 
-registry-auth-token@^4.0.0:
-  version "4.2.0"
-  resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-4.2.0.tgz#1d37dffda72bbecd0f581e4715540213a65eb7da"
-  integrity sha512-P+lWzPrsgfN+UEpDS3U8AQKg/UjZX6mQSJueZj3EK+vNESoqBSpBUD3gmu4sF9lOsjXWjF11dQKUqemf3veq1w==
-  dependencies:
-    rc "^1.2.8"
-
-registry-url@^5.0.0:
-  version "5.1.0"
-  resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-5.1.0.tgz#e98334b50d5434b81136b44ec638d9c2009c5009"
-  integrity sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw==
-  dependencies:
-    rc "^1.2.8"
-
 require-directory@^2.1.1:
   version "2.1.1"
   resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42"
-  integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I=
+  integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==
 
 require-from-string@^2.0.2:
   version "2.0.2"
@@ -3033,49 +2228,25 @@ require-main-filename@^2.0.0:
   resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b"
   integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==
 
-resolve-from@^4.0.0:
-  version "4.0.0"
-  resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6"
-  integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==
-
 resolve-from@^5.0.0:
   version "5.0.0"
   resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69"
   integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==
 
 resolve@^1.20.0:
-  version "1.22.1"
-  resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177"
-  integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==
+  version "1.22.10"
+  resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.10.tgz#b663e83ffb09bbf2386944736baae803029b8b39"
+  integrity sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==
   dependencies:
-    is-core-module "^2.9.0"
+    is-core-module "^2.16.0"
     path-parse "^1.0.7"
     supports-preserve-symlinks-flag "^1.0.0"
 
-responselike@^1.0.2:
-  version "1.0.2"
-  resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7"
-  integrity sha1-kYcg7ztjHFZCvgaPFa3lpG9Loec=
-  dependencies:
-    lowercase-keys "^1.0.0"
-
 retry@^0.12.0:
   version "0.12.0"
   resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b"
   integrity sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==
 
-reusify@^1.0.4:
-  version "1.0.4"
-  resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76"
-  integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==
-
-rimraf@^2.6.1:
-  version "2.7.1"
-  resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec"
-  integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==
-  dependencies:
-    glob "^7.1.3"
-
 rimraf@^3.0.2:
   version "3.0.2"
   resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a"
@@ -3083,75 +2254,40 @@ rimraf@^3.0.2:
   dependencies:
     glob "^7.1.3"
 
-run-parallel@^1.1.9:
-  version "1.2.0"
-  resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee"
-  integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==
-  dependencies:
-    queue-microtask "^1.2.2"
-
-safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1:
-  version "5.1.2"
-  resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
-  integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
-
-safe-buffer@5.2.1, safe-buffer@^5.0.1, safe-buffer@^5.1.2, safe-buffer@^5.2.1, safe-buffer@~5.2.0:
+safe-buffer@5.2.1, safe-buffer@^5.0.1, safe-buffer@~5.2.0:
   version "5.2.1"
   resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
   integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
 
+safe-buffer@~5.1.0, safe-buffer@~5.1.1:
+  version "5.1.2"
+  resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
+  integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
+
 "safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0", safer-buffer@~2.1.0:
   version "2.1.2"
   resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
   integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
 
-sax@^1.2.4:
-  version "1.2.4"
-  resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9"
-  integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==
-
-semver-diff@^3.1.1:
-  version "3.1.1"
-  resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-3.1.1.tgz#05f77ce59f325e00e2706afd67bb506ddb1ca32b"
-  integrity sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg==
-  dependencies:
-    semver "^6.3.0"
-
-semver@^5.3.0, semver@^5.7.1:
+semver@^5.7.1:
   version "5.7.2"
   resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8"
   integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==
 
-semver@^6.0.0, semver@^6.2.0, semver@^6.3.0:
+semver@^6.0.0:
   version "6.3.1"
   resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4"
   integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==
 
-semver@^7.3.5, semver@^7.3.8:
-  version "7.5.4"
-  resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e"
-  integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==
-  dependencies:
-    lru-cache "^6.0.0"
+semver@^7.3.5, semver@^7.5.4:
+  version "7.7.2"
+  resolved "https://registry.yarnpkg.com/semver/-/semver-7.7.2.tgz#67d99fdcd35cec21e6f8b87a7fd515a33f982b58"
+  integrity sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==
 
-send@0.18.0:
-  version "0.18.0"
-  resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be"
-  integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==
-  dependencies:
-    debug "2.6.9"
-    depd "2.0.0"
-    destroy "1.2.0"
-    encodeurl "~1.0.2"
-    escape-html "~1.0.3"
-    etag "~1.8.1"
-    fresh "0.5.2"
-    http-errors "2.0.0"
-    mime "1.6.0"
-    ms "2.1.3"
-    on-finished "2.4.1"
-    range-parser "~1.2.1"
-    statuses "2.0.1"
+semver@~7.0.0:
+  version "7.0.0"
+  resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e"
+  integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==
 
 send@0.19.0:
   version "0.19.0"
@@ -3177,66 +2313,67 @@ seq-queue@^0.0.5:
   resolved "https://registry.yarnpkg.com/seq-queue/-/seq-queue-0.0.5.tgz#d56812e1c017a6e4e7c3e3a37a1da6d78dd3c93e"
   integrity sha512-hr3Wtp/GZIc/6DAGPDcV4/9WoZhjrkXsi5B/07QgX8tsdc6ilr7BFM6PM6rbdAX1kFSDYeZGLipIZZKyQP0O5Q==
 
-serve-static@1.16.0:
-  version "1.16.0"
-  resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.16.0.tgz#2bf4ed49f8af311b519c46f272bf6ac3baf38a92"
-  integrity sha512-pDLK8zwl2eKaYrs8mrPZBJua4hMplRWJ1tIFksVC3FtBEBnl8dxgeHtsaMS8DhS9i4fLObaon6ABoc4/hQGdPA==
+serve-static@1.16.2:
+  version "1.16.2"
+  resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.16.2.tgz#b6a5343da47f6bdd2673848bf45754941e803296"
+  integrity sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==
   dependencies:
-    encodeurl "~1.0.2"
+    encodeurl "~2.0.0"
     escape-html "~1.0.3"
     parseurl "~1.3.3"
-    send "0.18.0"
+    send "0.19.0"
 
-set-blocking@^2.0.0, set-blocking@~2.0.0:
+set-blocking@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7"
-  integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc=
-
-set-function-length@^1.2.1:
-  version "1.2.2"
-  resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.2.2.tgz#aac72314198eaed975cf77b2c3b6b880695e5449"
-  integrity sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==
-  dependencies:
-    define-data-property "^1.1.4"
-    es-errors "^1.3.0"
-    function-bind "^1.1.2"
-    get-intrinsic "^1.2.4"
-    gopd "^1.0.1"
-    has-property-descriptors "^1.0.2"
+  integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==
 
 setprototypeof@1.2.0:
   version "1.2.0"
   resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424"
   integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==
 
-shebang-command@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea"
-  integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==
+side-channel-list@^1.0.0:
+  version "1.0.0"
+  resolved "https://registry.yarnpkg.com/side-channel-list/-/side-channel-list-1.0.0.tgz#10cb5984263115d3b7a0e336591e290a830af8ad"
+  integrity sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==
   dependencies:
-    shebang-regex "^3.0.0"
-
-shebang-regex@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172"
-  integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==
-
-side-channel@^1.0.4, side-channel@^1.0.6:
-  version "1.0.6"
-  resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.6.tgz#abd25fb7cd24baf45466406b1096b7831c9215f2"
-  integrity sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==
-  dependencies:
-    call-bind "^1.0.7"
     es-errors "^1.3.0"
-    get-intrinsic "^1.2.4"
-    object-inspect "^1.13.1"
+    object-inspect "^1.13.3"
 
-signal-exit@^3.0.0, signal-exit@^3.0.2:
-  version "3.0.3"
-  resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c"
-  integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==
+side-channel-map@^1.0.1:
+  version "1.0.1"
+  resolved "https://registry.yarnpkg.com/side-channel-map/-/side-channel-map-1.0.1.tgz#d6bb6b37902c6fef5174e5f533fab4c732a26f42"
+  integrity sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==
+  dependencies:
+    call-bound "^1.0.2"
+    es-errors "^1.3.0"
+    get-intrinsic "^1.2.5"
+    object-inspect "^1.13.3"
 
-signal-exit@^3.0.7:
+side-channel-weakmap@^1.0.2:
+  version "1.0.2"
+  resolved "https://registry.yarnpkg.com/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz#11dda19d5368e40ce9ec2bdc1fb0ecbc0790ecea"
+  integrity sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==
+  dependencies:
+    call-bound "^1.0.2"
+    es-errors "^1.3.0"
+    get-intrinsic "^1.2.5"
+    object-inspect "^1.13.3"
+    side-channel-map "^1.0.1"
+
+side-channel@^1.0.6:
+  version "1.1.0"
+  resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.1.0.tgz#c3fcff9c4da932784873335ec9765fa94ff66bc9"
+  integrity sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==
+  dependencies:
+    es-errors "^1.3.0"
+    object-inspect "^1.13.3"
+    side-channel-list "^1.0.0"
+    side-channel-map "^1.0.1"
+    side-channel-weakmap "^1.0.2"
+
+signal-exit@^3.0.0, signal-exit@^3.0.7:
   version "3.0.7"
   resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9"
   integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==
@@ -3250,6 +2387,27 @@ signale@1.4.0:
     figures "^2.0.0"
     pkg-conf "^2.1.0"
 
+simple-concat@^1.0.0:
+  version "1.0.1"
+  resolved "https://registry.yarnpkg.com/simple-concat/-/simple-concat-1.0.1.tgz#f46976082ba35c2263f1c8ab5edfe26c41c9552f"
+  integrity sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==
+
+simple-get@^4.0.0:
+  version "4.0.1"
+  resolved "https://registry.yarnpkg.com/simple-get/-/simple-get-4.0.1.tgz#4a39db549287c979d352112fa03fd99fd6bc3543"
+  integrity sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==
+  dependencies:
+    decompress-response "^6.0.0"
+    once "^1.3.1"
+    simple-concat "^1.0.0"
+
+simple-update-notifier@^1.0.7:
+  version "1.1.0"
+  resolved "https://registry.yarnpkg.com/simple-update-notifier/-/simple-update-notifier-1.1.0.tgz#67694c121de354af592b347cdba798463ed49c82"
+  integrity sha512-VpsrsJSUcJEseSbMHkrsrAVSdvVS5I96Qo1QAQ4FxQ9wXFcB+pjj7FB7/us9+GcgfW4ziHtYMc1J0PLczb55mg==
+  dependencies:
+    semver "~7.0.0"
+
 smart-buffer@^4.2.0:
   version "4.2.0"
   resolved "https://registry.yarnpkg.com/smart-buffer/-/smart-buffer-4.2.0.tgz#6e1d71fa4f18c05f7d0ff216dd16a481d0e8d9ae"
@@ -3265,11 +2423,11 @@ socks-proxy-agent@^6.0.0:
     socks "^2.6.2"
 
 socks@^2.6.2:
-  version "2.7.1"
-  resolved "https://registry.yarnpkg.com/socks/-/socks-2.7.1.tgz#d8e651247178fde79c0663043e07240196857d55"
-  integrity sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ==
+  version "2.8.7"
+  resolved "https://registry.yarnpkg.com/socks/-/socks-2.8.7.tgz#e2fb1d9a603add75050a2067db8c381a0b5669ea"
+  integrity sha512-HLpt+uLy/pxB+bum/9DzAgiKS8CX1EvbWxI4zlmgGCExImLdiad2iCwXT5Z4c9c3Eq8rP2318mPW2c+QbtjK8A==
   dependencies:
-    ip "^2.0.0"
+    ip-address "^10.0.1"
     smart-buffer "^4.2.0"
 
 split2@^4.1.0:
@@ -3277,18 +2435,14 @@ split2@^4.1.0:
   resolved "https://registry.yarnpkg.com/split2/-/split2-4.2.0.tgz#c9c5920904d148bab0b9f67145f245a86aadbfa4"
   integrity sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==
 
-sprintf-js@~1.0.2:
-  version "1.0.3"
-  resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
-  integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=
-
-sqlite3@5.1.6:
-  version "5.1.6"
-  resolved "https://registry.yarnpkg.com/sqlite3/-/sqlite3-5.1.6.tgz#1d4fbc90fe4fbd51e952e0a90fd8f6c2b9098e97"
-  integrity sha512-olYkWoKFVNSSSQNvxVUfjiVbz3YtBwTJj+mfV5zpHmqW3sELx2Cf4QCdirMelhM5Zh+KDVaKgQHqCxrqiWHybw==
+sqlite3@^5.1.7:
+  version "5.1.7"
+  resolved "https://registry.yarnpkg.com/sqlite3/-/sqlite3-5.1.7.tgz#59ca1053c1ab38647396586edad019b1551041b7"
+  integrity sha512-GGIyOiFaG+TUra3JIfkI/zGP8yZYLPQ0pl1bH+ODjiX57sPhrLU5sQJn1y9bDKZUFYkX1crlrPfSYt0BKKdkog==
   dependencies:
-    "@mapbox/node-pre-gyp" "^1.0.0"
-    node-addon-api "^4.2.0"
+    bindings "^1.5.0"
+    node-addon-api "^7.0.0"
+    prebuild-install "^7.1.1"
     tar "^6.1.11"
   optionalDependencies:
     node-gyp "8.x"
@@ -3310,29 +2464,12 @@ statuses@2.0.1:
   resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63"
   integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==
 
-streamsearch@0.1.2:
-  version "0.1.2"
-  resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-0.1.2.tgz#808b9d0e56fc273d809ba57338e929919a1a9f1a"
-  integrity sha1-gIudDlb8Jz2Am6VzOOkpkZoanxo=
+streamsearch@^1.1.0:
+  version "1.1.0"
+  resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764"
+  integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==
 
-string-width@^1.0.1:
-  version "1.0.2"
-  resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3"
-  integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=
-  dependencies:
-    code-point-at "^1.0.0"
-    is-fullwidth-code-point "^1.0.0"
-    strip-ansi "^3.0.0"
-
-"string-width@^1.0.2 || 2":
-  version "2.1.1"
-  resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e"
-  integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==
-  dependencies:
-    is-fullwidth-code-point "^2.0.0"
-    strip-ansi "^4.0.0"
-
-"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.2.3:
+"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3:
   version "4.2.3"
   resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010"
   integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
@@ -3341,24 +2478,6 @@ string-width@^1.0.1:
     is-fullwidth-code-point "^3.0.0"
     strip-ansi "^6.0.1"
 
-string-width@^3.0.0:
-  version "3.1.0"
-  resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961"
-  integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==
-  dependencies:
-    emoji-regex "^7.0.1"
-    is-fullwidth-code-point "^2.0.0"
-    strip-ansi "^5.1.0"
-
-string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0:
-  version "4.2.0"
-  resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5"
-  integrity sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==
-  dependencies:
-    emoji-regex "^8.0.0"
-    is-fullwidth-code-point "^3.0.0"
-    strip-ansi "^6.0.0"
-
 string_decoder@^1.1.1:
   version "1.3.0"
   resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e"
@@ -3373,35 +2492,7 @@ string_decoder@~1.1.1:
   dependencies:
     safe-buffer "~5.1.0"
 
-strip-ansi@^3.0.0, strip-ansi@^3.0.1:
-  version "3.0.1"
-  resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf"
-  integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=
-  dependencies:
-    ansi-regex "^2.0.0"
-
-strip-ansi@^4.0.0:
-  version "4.0.0"
-  resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f"
-  integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8=
-  dependencies:
-    ansi-regex "^3.0.0"
-
-strip-ansi@^5.1.0:
-  version "5.2.0"
-  resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae"
-  integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==
-  dependencies:
-    ansi-regex "^4.1.0"
-
-strip-ansi@^6.0.0:
-  version "6.0.0"
-  resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.0.tgz#0b1571dd7669ccd4f3e06e14ef1eed26225ae532"
-  integrity sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==
-  dependencies:
-    ansi-regex "^5.0.0"
-
-strip-ansi@^6.0.1:
+strip-ansi@^6.0.0, strip-ansi@^6.0.1:
   version "6.0.1"
   resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
   integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
@@ -3411,17 +2502,12 @@ strip-ansi@^6.0.1:
 strip-bom@^3.0.0:
   version "3.0.0"
   resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3"
-  integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=
-
-strip-json-comments@^3.1.0, strip-json-comments@^3.1.1:
-  version "3.1.1"
-  resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006"
-  integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==
+  integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==
 
 strip-json-comments@~2.0.1:
   version "2.0.1"
   resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
-  integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo=
+  integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==
 
 supports-color@^5.3.0, supports-color@^5.5.0:
   version "5.5.0"
@@ -3431,9 +2517,9 @@ supports-color@^5.3.0, supports-color@^5.5.0:
     has-flag "^3.0.0"
 
 supports-color@^7.1.0:
-  version "7.1.0"
-  resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.1.0.tgz#68e32591df73e25ad1c4b49108a2ec507962bfd1"
-  integrity sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==
+  version "7.2.0"
+  resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da"
+  integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==
   dependencies:
     has-flag "^4.0.0"
 
@@ -3442,7 +2528,17 @@ supports-preserve-symlinks-flag@^1.0.0:
   resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09"
   integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==
 
-tar-stream@^2.2.0:
+tar-fs@^2.0.0:
+  version "2.1.4"
+  resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.4.tgz#800824dbf4ef06ded9afea4acafe71c67c76b930"
+  integrity sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==
+  dependencies:
+    chownr "^1.1.1"
+    mkdirp-classic "^0.5.2"
+    pump "^3.0.0"
+    tar-stream "^2.1.4"
+
+tar-stream@^2.1.4, tar-stream@^2.2.0:
   version "2.2.0"
   resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287"
   integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==
@@ -3453,27 +2549,14 @@ tar-stream@^2.2.0:
     inherits "^2.0.3"
     readable-stream "^3.1.1"
 
-tar@^4.4.2:
-  version "4.4.19"
-  resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.19.tgz#2e4d7263df26f2b914dee10c825ab132123742f3"
-  integrity sha512-a20gEsvHnWe0ygBY8JbxoM4w3SJdhc7ZAuxkLqh+nvNQN2IOt0B5lLgM490X5Hl8FF0dl0tOf2ewFYAlIFgzVA==
-  dependencies:
-    chownr "^1.1.4"
-    fs-minipass "^1.2.7"
-    minipass "^2.9.0"
-    minizlib "^1.3.3"
-    mkdirp "^0.5.5"
-    safe-buffer "^5.2.1"
-    yallist "^3.1.1"
-
 tar@^6.0.2, tar@^6.1.11, tar@^6.1.2:
-  version "6.1.13"
-  resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.13.tgz#46e22529000f612180601a6fe0680e7da508847b"
-  integrity sha512-jdIBIN6LTIe2jqzay/2vtYLlBHa3JF42ot3h1dW8Q0PaAG4v8rm0cvpVePtau5C6OKXGGcgO9q2AMNSWxiLqKw==
+  version "6.2.1"
+  resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a"
+  integrity sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==
   dependencies:
     chownr "^2.0.0"
     fs-minipass "^2.0.0"
-    minipass "^4.0.0"
+    minipass "^5.0.0"
     minizlib "^2.1.1"
     mkdirp "^1.0.3"
     yallist "^4.0.0"
@@ -3486,7 +2569,7 @@ tarn@^3.0.2:
 temp-dir@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-1.0.0.tgz#0a7c0ea26d3a39afa7e0ebea9c1fc0bc4daa011d"
-  integrity sha1-CnwOom06Oa+n4OvqnB/AvE2qAR0=
+  integrity sha512-xZFXEGbG7SNC3itwBzI3RYjq/cEhBkx2hJuKGIUOcEULmkQExXiHat2z/qkISYsuR+IKumhEfKKbV5qXmhICFQ==
 
 temp-write@^4.0.0:
   version "4.0.0"
@@ -3499,26 +2582,11 @@ temp-write@^4.0.0:
     temp-dir "^1.0.0"
     uuid "^3.3.2"
 
-term-size@^2.1.0:
-  version "2.2.0"
-  resolved "https://registry.yarnpkg.com/term-size/-/term-size-2.2.0.tgz#1f16adedfe9bdc18800e1776821734086fcc6753"
-  integrity sha512-a6sumDlzyHVJWb8+YofY4TW112G6p2FCPEAFk+59gIYHv3XHRhm9ltVQ9kli4hNWeQBwSpe8cRN25x0ROunMOw==
-
-text-table@^0.2.0:
-  version "0.2.0"
-  resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4"
-  integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=
-
 tildify@2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/tildify/-/tildify-2.0.0.tgz#f205f3674d677ce698b7067a99e949ce03b4754a"
   integrity sha512-Cc+OraorugtXNfs50hU9KS369rFXCfgGLpfCfvlc+Ud5u6VWmUQsOAa9HbTvheQdYnrdJqqv1e5oIqXppMYnSw==
 
-to-readable-stream@^1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/to-readable-stream/-/to-readable-stream-1.0.0.tgz#ce0aa0c2f3df6adf852efb404a783e77c0475771"
-  integrity sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q==
-
 to-regex-range@^5.0.1:
   version "5.0.1"
   resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4"
@@ -3532,33 +2600,21 @@ toidentifier@1.0.1:
   integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==
 
 touch@^3.1.0:
-  version "3.1.0"
-  resolved "https://registry.yarnpkg.com/touch/-/touch-3.1.0.tgz#fe365f5f75ec9ed4e56825e0bb76d24ab74af83b"
-  integrity sha512-WBx8Uy5TLtOSRtIq+M03/sKDrXCLHxwDcquSP2c43Le03/9serjQBIztjRz6FkJez9D/hleyAXTBGLwwZUw9lA==
-  dependencies:
-    nopt "~1.0.10"
+  version "3.1.1"
+  resolved "https://registry.yarnpkg.com/touch/-/touch-3.1.1.tgz#097a23d7b161476435e5c1344a95c0f75b4a5694"
+  integrity sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==
 
 tr46@~0.0.3:
   version "0.0.3"
   resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a"
   integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==
 
-type-check@^0.4.0, type-check@~0.4.0:
-  version "0.4.0"
-  resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1"
-  integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==
+tunnel-agent@^0.6.0:
+  version "0.6.0"
+  resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd"
+  integrity sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==
   dependencies:
-    prelude-ls "^1.2.1"
-
-type-fest@^0.20.2:
-  version "0.20.2"
-  resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4"
-  integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==
-
-type-fest@^0.8.1:
-  version "0.8.1"
-  resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d"
-  integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==
+    safe-buffer "^5.0.1"
 
 type-is@~1.6.18:
   version "1.6.18"
@@ -3568,19 +2624,10 @@ type-is@~1.6.18:
     media-typer "0.3.0"
     mime-types "~2.1.24"
 
-typedarray-to-buffer@^3.1.5:
-  version "3.1.5"
-  resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080"
-  integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==
-  dependencies:
-    is-typedarray "^1.0.0"
-
-undefsafe@^2.0.2:
-  version "2.0.3"
-  resolved "https://registry.yarnpkg.com/undefsafe/-/undefsafe-2.0.3.tgz#6b166e7094ad46313b2202da7ecc2cd7cc6e7aae"
-  integrity sha512-nrXZwwXrD/T/JXeygJqdCO6NZZ1L66HrxM/Z7mIq2oPanoN0F1nLx3lwJMu6AwJY69hdixaFQOuoYsMjE5/C2A==
-  dependencies:
-    debug "^2.2.0"
+undefsafe@^2.0.5:
+  version "2.0.5"
+  resolved "https://registry.yarnpkg.com/undefsafe/-/undefsafe-2.0.5.tgz#38733b9327bdcd226db889fb723a6efd162e6e2c"
+  integrity sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==
 
 unique-filename@^1.1.1:
   version "1.1.1"
@@ -3596,55 +2643,15 @@ unique-slug@^2.0.0:
   dependencies:
     imurmurhash "^0.1.4"
 
-unique-string@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d"
-  integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==
-  dependencies:
-    crypto-random-string "^2.0.0"
-
 unpipe@1.0.0, unpipe@~1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
-  integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=
-
-update-notifier@^4.0.0:
-  version "4.1.0"
-  resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-4.1.0.tgz#4866b98c3bc5b5473c020b1250583628f9a328f3"
-  integrity sha512-w3doE1qtI0/ZmgeoDoARmI5fjDoT93IfKgEGqm26dGUOh8oNpaSTsGNdYRN/SjOuo10jcJGwkEL3mroKzktkew==
-  dependencies:
-    boxen "^4.2.0"
-    chalk "^3.0.0"
-    configstore "^5.0.1"
-    has-yarn "^2.1.0"
-    import-lazy "^2.1.0"
-    is-ci "^2.0.0"
-    is-installed-globally "^0.3.1"
-    is-npm "^4.0.0"
-    is-yarn-global "^0.3.0"
-    latest-version "^5.0.0"
-    pupa "^2.0.1"
-    semver-diff "^3.1.1"
-    xdg-basedir "^4.0.0"
-
-uri-js@^4.2.2:
-  version "4.2.2"
-  resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.2.2.tgz#94c540e1ff772956e2299507c010aea6c8838eb0"
-  integrity sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==
-  dependencies:
-    punycode "^2.1.0"
-
-url-parse-lax@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c"
-  integrity sha1-FrXK/Afb42dsGxmZF3gj1lA6yww=
-  dependencies:
-    prepend-http "^2.0.0"
+  integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==
 
 util-deprecate@^1.0.1, util-deprecate@~1.0.1:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
-  integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=
+  integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==
 
 util@^0.10.3:
   version "0.10.4"
@@ -3656,7 +2663,7 @@ util@^0.10.3:
 utils-merge@1.0.1:
   version "1.0.1"
   resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713"
-  integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=
+  integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==
 
 uuid@^3.3.2:
   version "3.4.0"
@@ -3666,7 +2673,7 @@ uuid@^3.3.2:
 vary@~1.1.2:
   version "1.1.2"
   resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc"
-  integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=
+  integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==
 
 webidl-conversions@^3.0.0:
   version "3.0.1"
@@ -3682,24 +2689,17 @@ whatwg-url@^5.0.0:
     webidl-conversions "^3.0.0"
 
 which-module@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
-  integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=
+  version "2.0.1"
+  resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.1.tgz#776b1fe35d90aebe99e8ac15eb24093389a4a409"
+  integrity sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==
 
-which@^2.0.1, which@^2.0.2:
+which@^2.0.2:
   version "2.0.2"
   resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1"
   integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==
   dependencies:
     isexe "^2.0.0"
 
-wide-align@^1.1.0:
-  version "1.1.3"
-  resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457"
-  integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==
-  dependencies:
-    string-width "^1.0.2 || 2"
-
 wide-align@^1.1.2, wide-align@^1.1.5:
   version "1.1.5"
   resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.5.tgz#df1d4c206854369ecf3c9a4898f1b23fbd9d15d3"
@@ -3707,18 +2707,6 @@ wide-align@^1.1.2, wide-align@^1.1.5:
   dependencies:
     string-width "^1.0.2 || 2 || 3 || 4"
 
-widest-line@^3.1.0:
-  version "3.1.0"
-  resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-3.1.0.tgz#8292333bbf66cb45ff0de1603b136b7ae1496eca"
-  integrity sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==
-  dependencies:
-    string-width "^4.0.0"
-
-word-wrap@^1.2.3:
-  version "1.2.4"
-  resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.4.tgz#cb4b50ec9aca570abd1f52f33cd45b6c61739a9f"
-  integrity sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==
-
 wrap-ansi@^6.2.0:
   version "6.2.0"
   resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53"
@@ -3731,22 +2719,7 @@ wrap-ansi@^6.2.0:
 wrappy@1:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
-  integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
-
-write-file-atomic@^3.0.0:
-  version "3.0.3"
-  resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8"
-  integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==
-  dependencies:
-    imurmurhash "^0.1.4"
-    is-typedarray "^1.0.0"
-    signal-exit "^3.0.2"
-    typedarray-to-buffer "^3.1.5"
-
-xdg-basedir@^4.0.0:
-  version "4.0.0"
-  resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-4.0.0.tgz#4bc8d9984403696225ef83a1573cbbcb4e79db13"
-  integrity sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==
+  integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==
 
 xtend@^4.0.0:
   version "4.0.2"
@@ -3754,14 +2727,9 @@ xtend@^4.0.0:
   integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==
 
 y18n@^4.0.0:
-  version "4.0.1"
-  resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.1.tgz#8db2b83c31c5d75099bb890b23f3094891e247d4"
-  integrity sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==
-
-yallist@^3.0.0, yallist@^3.1.1:
-  version "3.1.1"
-  resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd"
-  integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==
+  version "4.0.3"
+  resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.3.tgz#b5f259c82cd6e336921efd7bfd8bf560de9eeedf"
+  integrity sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==
 
 yallist@^4.0.0:
   version "4.0.0"
@@ -3793,16 +2761,11 @@ yargs@^15.4.1:
     y18n "^4.0.0"
     yargs-parser "^18.1.2"
 
-yocto-queue@^0.1.0:
-  version "0.1.0"
-  resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"
-  integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==
-
 zip-stream@^4.1.0:
-  version "4.1.0"
-  resolved "https://registry.yarnpkg.com/zip-stream/-/zip-stream-4.1.0.tgz#51dd326571544e36aa3f756430b313576dc8fc79"
-  integrity sha512-zshzwQW7gG7hjpBlgeQP9RuyPGNxvJdzR8SUM3QhxCnLjWN2E7j3dOvpeDcQoETfHx0urRS7EtmVToql7YpU4A==
+  version "4.1.1"
+  resolved "https://registry.yarnpkg.com/zip-stream/-/zip-stream-4.1.1.tgz#1337fe974dbaffd2fa9a1ba09662a66932bd7135"
+  integrity sha512-9qv4rlDiopXg4E69k+vMHjNN63YFMe9sZMrdlvKnCjlCRWeCBswPPMPUfx+ipsAWq1LXHe70RcbaHdJJpS6hyQ==
   dependencies:
-    archiver-utils "^2.1.0"
-    compress-commons "^4.1.0"
+    archiver-utils "^3.0.4"
+    compress-commons "^4.1.2"
     readable-stream "^3.6.0"
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 0603e2de..913f79d5 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -39,7 +39,6 @@ EXPOSE 80 81 443
 
 COPY backend       /app
 COPY frontend/dist /app/frontend
-COPY global        /app/global
 
 WORKDIR /app
 RUN yarn install \
diff --git a/docker/dev/Dockerfile b/docker/dev/Dockerfile
index dcb1f1f9..45d97a32 100644
--- a/docker/dev/Dockerfile
+++ b/docker/dev/Dockerfile
@@ -15,7 +15,7 @@ ENV SUPPRESS_NO_CONFIG_WARNING=1 \
 
 RUN echo "fs.file-max = 65535" > /etc/sysctl.conf \
 	&& apt-get update \
-	&& apt-get install -y jq python3-pip logrotate \
+	&& apt-get install -y jq python3-pip logrotate moreutils \
 	&& apt-get clean \
 	&& rm -rf /var/lib/apt/lists/*
 
diff --git a/docker/docker-compose.ci.postgres.yml b/docker/docker-compose.ci.postgres.yml
index 5d5150c3..e9eb4bc2 100644
--- a/docker/docker-compose.ci.postgres.yml
+++ b/docker/docker-compose.ci.postgres.yml
@@ -1,17 +1,16 @@
 # WARNING: This is a CI docker-compose file used for building and testing of the entire app, it should not be used for production.
 services:
-
   cypress:
     environment:
-      CYPRESS_stack: 'postgres'
+      CYPRESS_stack: "postgres"
 
   fullstack:
     environment:
-      DB_POSTGRES_HOST: 'db-postgres'
-      DB_POSTGRES_PORT: '5432'
-      DB_POSTGRES_USER: 'npm'
-      DB_POSTGRES_PASSWORD: 'npmpass'
-      DB_POSTGRES_NAME: 'npm'
+      DB_POSTGRES_HOST: "db-postgres"
+      DB_POSTGRES_PORT: "5432"
+      DB_POSTGRES_USER: "npm"
+      DB_POSTGRES_PASSWORD: "npmpass"
+      DB_POSTGRES_NAME: "npm"
     depends_on:
       - db-postgres
       - authentik
@@ -21,9 +20,9 @@ services:
   db-postgres:
     image: postgres:17
     environment:
-      POSTGRES_USER: 'npm'
-      POSTGRES_PASSWORD: 'npmpass'
-      POSTGRES_DB: 'npm'
+      POSTGRES_USER: "npm"
+      POSTGRES_PASSWORD: "npmpass"
+      POSTGRES_DB: "npm"
     volumes:
       - psql_vol:/var/lib/postgresql/data
       - ./ci/postgres:/docker-entrypoint-initdb.d
@@ -31,11 +30,11 @@ services:
       - fulltest
 
   authentik-redis:
-    image: 'redis:alpine'
+    image: "redis:alpine"
     command: --save 60 1 --loglevel warning
     restart: unless-stopped
     healthcheck:
-      test: ['CMD-SHELL', 'redis-cli ping | grep PONG']
+      test: ["CMD-SHELL", "redis-cli ping | grep PONG"]
       start_period: 20s
       interval: 30s
       retries: 5
@@ -66,9 +65,9 @@ services:
   authentik-ldap:
     image: ghcr.io/goauthentik/ldap:2024.10.1
     environment:
-      AUTHENTIK_HOST: 'http://authentik:9000'
-      AUTHENTIK_INSECURE: 'true'
-      AUTHENTIK_TOKEN: 'wKYZuRcI0ETtb8vWzMCr04oNbhrQUUICy89hSpDln1OEKLjiNEuQ51044Vkp'
+      AUTHENTIK_HOST: "http://authentik:9000"
+      AUTHENTIK_INSECURE: "true"
+      AUTHENTIK_TOKEN: "wKYZuRcI0ETtb8vWzMCr04oNbhrQUUICy89hSpDln1OEKLjiNEuQ51044Vkp"
     restart: unless-stopped
     depends_on:
       - authentik
diff --git a/docker/docker-compose.ci.yml b/docker/docker-compose.ci.yml
index 280a0546..8070aa42 100644
--- a/docker/docker-compose.ci.yml
+++ b/docker/docker-compose.ci.yml
@@ -7,7 +7,9 @@ services:
   fullstack:
     image: "${IMAGE}:${BRANCH_LOWER}-ci-${BUILD_NUMBER}"
     environment:
+      TZ: "${TZ:-Australia/Brisbane}"
       DEBUG: 'true'
+      CI: 'true'
       FORCE_COLOR: 1
       # Required for DNS Certificate provisioning in CI
       LE_SERVER: 'https://ca.internal/acme/acme/directory'
diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml
index 5ad831bf..57c30e07 100644
--- a/docker/docker-compose.dev.yml
+++ b/docker/docker-compose.dev.yml
@@ -1,6 +1,5 @@
 # WARNING: This is a DEVELOPMENT docker-compose file, it should not be used for production.
 services:
-
   fullstack:
     image: npm2dev:core
     container_name: npm2dev.core
@@ -18,13 +17,14 @@ services:
           - website2.example.com
           - website3.example.com
     environment:
+      TZ: "${TZ:-Australia/Brisbane}"
       PUID: 1000
       PGID: 1000
       FORCE_COLOR: 1
       # specifically for dev:
-      DEBUG: 'true'
-      DEVELOPMENT: 'true'
-      LE_STAGING: 'true'
+      DEBUG: "true"
+      DEVELOPMENT: "true"
+      LE_STAGING: "true"
       # db:
       # DB_MYSQL_HOST: 'db'
       # DB_MYSQL_PORT: '3306'
@@ -32,23 +32,23 @@ services:
       # DB_MYSQL_PASSWORD: 'npm'
       # DB_MYSQL_NAME: 'npm'
       # db-postgres:
-      DB_POSTGRES_HOST: 'db-postgres'
-      DB_POSTGRES_PORT: '5432'
-      DB_POSTGRES_USER: 'npm'
-      DB_POSTGRES_PASSWORD: 'npmpass'
-      DB_POSTGRES_NAME: 'npm'
+      DB_POSTGRES_HOST: "db-postgres"
+      DB_POSTGRES_PORT: "5432"
+      DB_POSTGRES_USER: "npm"
+      DB_POSTGRES_PASSWORD: "npmpass"
+      DB_POSTGRES_NAME: "npm"
       # DB_SQLITE_FILE: "/data/database.sqlite"
       # DISABLE_IPV6: "true"
       # Required for DNS Certificate provisioning testing:
-      LE_SERVER: 'https://ca.internal/acme/acme/directory'
-      REQUESTS_CA_BUNDLE: '/etc/ssl/certs/NginxProxyManager.crt'
+      LE_SERVER: "https://ca.internal/acme/acme/directory"
+      REQUESTS_CA_BUNDLE: "/etc/ssl/certs/NginxProxyManager.crt"
     volumes:
       - npm_data:/data
       - le_data:/etc/letsencrypt
-      - './dev/resolv.conf:/etc/resolv.conf:ro'
+      - "./dev/resolv.conf:/etc/resolv.conf:ro"
       - ../backend:/app
-      - ../frontend:/app/frontend
-      - ../global:/app/global
+      - ../frontend:/frontend
+      - "/etc/localtime:/etc/localtime:ro"
     healthcheck:
       test: ["CMD", "/usr/bin/check-health"]
       interval: 10s
@@ -69,12 +69,14 @@ services:
     networks:
       - nginx_proxy_manager
     environment:
-      MYSQL_ROOT_PASSWORD: 'npm'
-      MYSQL_DATABASE: 'npm'
-      MYSQL_USER: 'npm'
-      MYSQL_PASSWORD: 'npm'
+      TZ: "${TZ:-Australia/Brisbane}"
+      MYSQL_ROOT_PASSWORD: "npm"
+      MYSQL_DATABASE: "npm"
+      MYSQL_USER: "npm"
+      MYSQL_PASSWORD: "npm"
     volumes:
       - db_data:/var/lib/mysql
+      - "/etc/localtime:/etc/localtime:ro"
 
   db-postgres:
     image: postgres:17
@@ -82,9 +84,9 @@ services:
     networks:
       - nginx_proxy_manager
     environment:
-      POSTGRES_USER: 'npm'
-      POSTGRES_PASSWORD: 'npmpass'
-      POSTGRES_DB: 'npm'
+      POSTGRES_USER: "npm"
+      POSTGRES_PASSWORD: "npmpass"
+      POSTGRES_DB: "npm"
     volumes:
       - psql_data:/var/lib/postgresql/data
       - ./ci/postgres:/docker-entrypoint-initdb.d
@@ -93,8 +95,8 @@ services:
     image: jc21/testca
     container_name: npm2dev.stepca
     volumes:
-      - './dev/resolv.conf:/etc/resolv.conf:ro'
-      - '/etc/localtime:/etc/localtime:ro'
+      - "./dev/resolv.conf:/etc/resolv.conf:ro"
+      - "/etc/localtime:/etc/localtime:ro"
     networks:
       nginx_proxy_manager:
         aliases:
@@ -115,7 +117,7 @@ services:
       - 3082:80
     environment:
       URL: "http://npm:81/api/schema"
-      PORT: '80'
+      PORT: "80"
     depends_on:
       - fullstack
 
@@ -123,9 +125,9 @@ services:
     image: ubuntu/squid
     container_name: npm2dev.squid
     volumes:
-      - './dev/squid.conf:/etc/squid/squid.conf:ro'
-      - './dev/resolv.conf:/etc/resolv.conf:ro'
-      - '/etc/localtime:/etc/localtime:ro'
+      - "./dev/squid.conf:/etc/squid/squid.conf:ro"
+      - "./dev/resolv.conf:/etc/resolv.conf:ro"
+      - "/etc/localtime:/etc/localtime:ro"
     networks:
       - nginx_proxy_manager
     ports:
@@ -135,18 +137,18 @@ services:
     image: pschiffe/pdns-mysql:4.8
     container_name: npm2dev.pdns
     volumes:
-      - '/etc/localtime:/etc/localtime:ro'
+      - "/etc/localtime:/etc/localtime:ro"
     environment:
-      PDNS_master: 'yes'
-      PDNS_api: 'yes'
-      PDNS_api_key: 'npm'
-      PDNS_webserver: 'yes'
-      PDNS_webserver_address: '0.0.0.0'
-      PDNS_webserver_password: 'npm'
-      PDNS_webserver-allow-from: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
-      PDNS_version_string: 'anonymous'
+      PDNS_master: "yes"
+      PDNS_api: "yes"
+      PDNS_api_key: "npm"
+      PDNS_webserver: "yes"
+      PDNS_webserver_address: "0.0.0.0"
+      PDNS_webserver_password: "npm"
+      PDNS_webserver-allow-from: "127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8"
+      PDNS_version_string: "anonymous"
       PDNS_default_ttl: 1500
-      PDNS_allow_axfr_ips: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
+      PDNS_allow_axfr_ips: "127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8"
       PDNS_gmysql_host: pdns-db
       PDNS_gmysql_port: 3306
       PDNS_gmysql_user: pdns
@@ -164,14 +166,14 @@ services:
     image: mariadb
     container_name: npm2dev.pdns-db
     environment:
-      MYSQL_ROOT_PASSWORD: 'pdns'
-      MYSQL_DATABASE: 'pdns'
-      MYSQL_USER: 'pdns'
-      MYSQL_PASSWORD: 'pdns'
+      MYSQL_ROOT_PASSWORD: "pdns"
+      MYSQL_DATABASE: "pdns"
+      MYSQL_USER: "pdns"
+      MYSQL_PASSWORD: "pdns"
     volumes:
-      - 'pdns_mysql:/var/lib/mysql'
-      - '/etc/localtime:/etc/localtime:ro'
-      - './dev/pdns-db.sql:/docker-entrypoint-initdb.d/01_init.sql:ro'
+      - "pdns_mysql:/var/lib/mysql"
+      - "/etc/localtime:/etc/localtime:ro"
+      - "./dev/pdns-db.sql:/docker-entrypoint-initdb.d/01_init.sql:ro"
     networks:
       - nginx_proxy_manager
 
@@ -182,25 +184,25 @@ services:
       context: ../
       dockerfile: test/cypress/Dockerfile
     environment:
-      HTTP_PROXY: 'squid:3128'
-      HTTPS_PROXY: 'squid:3128'
+      HTTP_PROXY: "squid:3128"
+      HTTPS_PROXY: "squid:3128"
     volumes:
-      - '../test/results:/results'
-      - './dev/resolv.conf:/etc/resolv.conf:ro'
-      - '/etc/localtime:/etc/localtime:ro'
+      - "../test/results:/results"
+      - "./dev/resolv.conf:/etc/resolv.conf:ro"
+      - "/etc/localtime:/etc/localtime:ro"
     command: cypress run --browser chrome --config-file=cypress/config/ci.js
     networks:
       - nginx_proxy_manager
 
   authentik-redis:
-    image: 'redis:alpine'
+    image: "redis:alpine"
     container_name: npm2dev.authentik-redis
     command: --save 60 1 --loglevel warning
     networks:
       - nginx_proxy_manager
     restart: unless-stopped
     healthcheck:
-      test: ['CMD-SHELL', 'redis-cli ping | grep PONG']
+      test: ["CMD-SHELL", "redis-cli ping | grep PONG"]
       start_period: 20s
       interval: 30s
       retries: 5
@@ -242,9 +244,9 @@ services:
     networks:
       - nginx_proxy_manager
     environment:
-      AUTHENTIK_HOST: 'http://authentik:9000'
-      AUTHENTIK_INSECURE: 'true'
-      AUTHENTIK_TOKEN: 'wKYZuRcI0ETtb8vWzMCr04oNbhrQUUICy89hSpDln1OEKLjiNEuQ51044Vkp'
+      AUTHENTIK_HOST: "http://authentik:9000"
+      AUTHENTIK_INSECURE: "true"
+      AUTHENTIK_TOKEN: "wKYZuRcI0ETtb8vWzMCr04oNbhrQUUICy89hSpDln1OEKLjiNEuQ51044Vkp"
     restart: unless-stopped
     depends_on:
       - authentik
diff --git a/docker/rootfs/etc/nginx/conf.d/dev.conf b/docker/rootfs/etc/nginx/conf.d/dev.conf
index edbdec8a..67efc0f8 100644
--- a/docker/rootfs/etc/nginx/conf.d/dev.conf
+++ b/docker/rootfs/etc/nginx/conf.d/dev.conf
@@ -12,6 +12,7 @@ server {
 
 	location /api/ {
 		add_header            X-Served-By $host;
+		proxy_http_version    1.1;
 		proxy_set_header Host $host;
 		proxy_set_header      X-Forwarded-Scheme $scheme;
 		proxy_set_header      X-Forwarded-Proto  $scheme;
@@ -23,7 +24,14 @@ server {
 	}
 
 	location / {
-		index index.html;
-		try_files $uri $uri.html $uri/ /index.html;
+		add_header            X-Served-By $host;
+		proxy_http_version    1.1;
+		proxy_set_header Host $host;
+		proxy_set_header      Upgrade $http_upgrade;
+		proxy_set_header      Connection "Upgrade";
+		proxy_set_header      X-Forwarded-Scheme $scheme;
+		proxy_set_header      X-Forwarded-Proto  $scheme;
+		proxy_set_header      X-Forwarded-For    $remote_addr;
+		proxy_pass            http://127.0.0.1:5173;
 	}
 }
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/frontend/run b/docker/rootfs/etc/s6-overlay/s6-rc.d/frontend/run
index 4f203555..91ed3fa8 100755
--- a/docker/rootfs/etc/s6-overlay/s6-rc.d/frontend/run
+++ b/docker/rootfs/etc/s6-overlay/s6-rc.d/frontend/run
@@ -7,15 +7,15 @@ set -e
 
 if [ "$DEVELOPMENT" = 'true' ]; then
 	. /usr/bin/common.sh
-	cd /app/frontend || exit 1
+	cd /frontend || exit 1
 	HOME=$NPMHOME
 	export HOME
-	mkdir -p /app/frontend/dist
-	chown -R "$PUID:$PGID" /app/frontend/dist
+	mkdir -p /frontend/dist
+	chown -R "$PUID:$PGID" /frontend/dist
 
 	log_info 'Starting frontend ...'
 	s6-setuidgid "$PUID:$PGID" yarn install
-	exec s6-setuidgid "$PUID:$PGID" yarn watch
+	exec s6-setuidgid "$PUID:$PGID" yarn dev
 else
 	exit 0
 fi
diff --git a/docs/package.json b/docs/package.json
index 3e3dcba2..d4523250 100644
--- a/docs/package.json
+++ b/docs/package.json
@@ -5,7 +5,7 @@
     "preview": "vitepress preview"
   },
   "devDependencies": {
-    "vitepress": "^1.4.0"
+    "vitepress": "^1.6.4"
   },
   "dependencies": {}
 }
diff --git a/docs/src/advanced-config/index.md b/docs/src/advanced-config/index.md
index 4a7c260e..e4a9594e 100644
--- a/docs/src/advanced-config/index.md
+++ b/docs/src/advanced-config/index.md
@@ -228,3 +228,13 @@ To enable the geoip2 module, you can create the custom configuration file `/data
 load_module /usr/lib/nginx/modules/ngx_http_geoip2_module.so;
 load_module /usr/lib/nginx/modules/ngx_stream_geoip2_module.so;
 ```
+
+## Auto Initial User Creation
+
+Setting these environment variables will create the default user on startup, skipping the UI first user setup screen:
+
+```
+    environment:
+      INITIAL_ADMIN_EMAIL: my@example.com
+      INITIAL_ADMIN_PASSWORD: mypassword1
+```
diff --git a/docs/src/faq/index.md b/docs/src/faq/index.md
index ea375f47..b3668562 100644
--- a/docs/src/faq/index.md
+++ b/docs/src/faq/index.md
@@ -23,4 +23,10 @@ Your best bet is to ask the [Reddit community for support](https://www.reddit.co
 
 ## When adding username and password access control to a proxy host, I can no longer login into the app.
 
-Having an Access Control List (ACL) with username and password requires the browser to always send this username and password in the `Authorization` header on each request. If your proxied app also requires authentication (like Nginx Proxy Manager itself), most likely the app will also use the `Authorization` header to transmit this information, as this is the standardized header meant for this kind of information. However having multiples of the same headers is not allowed in the [internet standard](https://www.rfc-editor.org/rfc/rfc7230#section-3.2.2) and almost all apps do not support multiple values in the `Authorization` header. Hence one of the two logins will be broken. This can only be fixed by either removing one of the logins or by changing the app to use other non-standard headers for authorization.
+Having an Access Control List (ACL) with username and password requires the browser to always send this username
+and password in the `Authorization` header on each request. If your proxied app also requires authentication (like
+Nginx Proxy Manager itself), most likely the app will also use the `Authorization` header to transmit this information,
+as this is the standardized header meant for this kind of information. However having multiples of the same headers
+is not allowed in the [internet standard](https://www.rfc-editor.org/rfc/rfc7230#section-3.2.2) and almost all apps
+do not support multiple values in the `Authorization` header. Hence one of the two logins will be broken. This can
+only be fixed by either removing one of the logins or by changing the app to use other non-standard headers for authorization.
diff --git a/docs/src/guide/index.md b/docs/src/guide/index.md
index fcf176fa..a9de63a9 100644
--- a/docs/src/guide/index.md
+++ b/docs/src/guide/index.md
@@ -35,7 +35,7 @@ so that the barrier for entry here is low.
 
 ## Features
 
-- Beautiful and Secure Admin Interface based on [Tabler](https://tabler.github.io/)
+- Beautiful and Secure Admin Interface based on [Tabler](https://tabler.io/)
 - Easily create forwarding domains, redirections, streams and 404 hosts without knowing anything about Nginx
 - Free SSL using Let's Encrypt or provide your own custom SSL certificates
 - Access Lists and basic HTTP Authentication for your hosts
@@ -66,6 +66,8 @@ services:
   app:
     image: 'jc21/nginx-proxy-manager:latest'
     restart: unless-stopped
+    environment:
+      TZ: "Australia/Brisbane"
     ports:
       - '80:80'
       - '81:81'
@@ -80,26 +82,16 @@ This is the bare minimum configuration required. See the [documentation](https:/
 3. Bring up your stack by running
 
 ```bash
-docker-compose up -d
-
-# If using docker-compose-plugin
 docker compose up -d
 ```
 
 4. Log in to the Admin UI
 
 When your docker container is running, connect to it on port `81` for the admin interface.
-Sometimes this can take a little bit because of the entropy of keys.
 
 [http://127.0.0.1:81](http://127.0.0.1:81)
 
-Default Admin User:
-```
-Email:    admin@example.com
-Password: changeme
-```
-
-Immediately after logging in with this default user you will be asked to modify your details and change your password.
+This startup can take a minute depending on your hardware.
 
 
 ## Contributing
diff --git a/docs/src/setup/index.md b/docs/src/setup/index.md
index 520f2b10..fd5a16f7 100644
--- a/docs/src/setup/index.md
+++ b/docs/src/setup/index.md
@@ -13,6 +13,7 @@ services:
   app:
     image: 'jc21/nginx-proxy-manager:latest'
     restart: unless-stopped
+
     ports:
       # These ports are in format :
       - '80:80' # Public HTTP Port
@@ -21,7 +22,9 @@ services:
       # Add any other Stream port you want to expose
       # - '21:21' # FTP
 
-    #environment:
+    environment:
+      TZ: "Australia/Brisbane"
+
       # Uncomment this if you want to change the location of
       # the SQLite DB file within the container
       # DB_SQLITE_FILE: "/data/database.sqlite"
@@ -65,6 +68,7 @@ services:
       # Add any other Stream port you want to expose
       # - '21:21' # FTP
     environment:
+      TZ: "Australia/Brisbane"
       # Mysql/Maria connection parameters:
       DB_MYSQL_HOST: "db"
       DB_MYSQL_PORT: 3306
@@ -115,6 +119,7 @@ services:
       # Add any other Stream port you want to expose
       # - '21:21' # FTP
     environment:
+      TZ: "Australia/Brisbane"
       # Postgres parameters:
       DB_POSTGRES_HOST: 'db'
       DB_POSTGRES_PORT: '5432'
@@ -173,21 +178,3 @@ After the app is running for the first time, the following will happen:
 3. A default admin user will be created
 
 This process can take a couple of minutes depending on your machine.
-
-## Default Administrator User
-
-```
-Email:    admin@example.com
-Password: changeme
-```
-
-Immediately after logging in with this default user you will be asked to modify your details and change your password. You can change defaults with:
-
-
-```
-    environment:
-      INITIAL_ADMIN_EMAIL: my@example.com
-      INITIAL_ADMIN_PASSWORD: mypassword1
-```
-
-
diff --git a/frontend/.babelrc b/frontend/.babelrc
deleted file mode 100644
index 54071ecd..00000000
--- a/frontend/.babelrc
+++ /dev/null
@@ -1,17 +0,0 @@
-{
-	"presets": [
-		[
-			"env",
-			{
-				"targets": {
-					"browsers": [
-						"Chrome >= 65"
-					]
-				},
-				"debug": false,
-				"modules": false,
-				"useBuiltIns": "usage"
-			}
-		]
-	]
-}
\ No newline at end of file
diff --git a/frontend/.gitignore b/frontend/.gitignore
index c8f4b4f9..8b7e5021 100644
--- a/frontend/.gitignore
+++ b/frontend/.gitignore
@@ -1,4 +1,22 @@
-dist
+# Logs
+logs
+*.log
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+pnpm-debug.log*
+lerna-debug.log*
+
 node_modules
-webpack_stats.html
-yarn-error.log
+dist
+dist-ssr
+*.local
+
+# Editor directories and files
+.idea
+.DS_Store
+*.suo
+*.ntvs*
+*.njsproj
+*.sln
+*.sw?
diff --git a/frontend/biome.json b/frontend/biome.json
new file mode 100644
index 00000000..ae628126
--- /dev/null
+++ b/frontend/biome.json
@@ -0,0 +1,92 @@
+{
+    "$schema": "https://biomejs.dev/schemas/2.3.1/schema.json",
+    "vcs": {
+        "enabled": true,
+        "clientKind": "git",
+        "useIgnoreFile": true
+    },
+    "files": {
+        "ignoreUnknown": false,
+        "includes": [
+            "**/*.ts",
+            "**/*.tsx",
+            "**/*.js",
+            "**/*.jsx",
+            "!**/dist/**/*"
+        ]
+    },
+    "formatter": {
+        "enabled": true,
+        "indentStyle": "tab",
+        "indentWidth": 4,
+        "lineWidth": 120,
+        "formatWithErrors": true
+    },
+    "assist": {
+        "actions": {
+            "source": {
+                "organizeImports": {
+                    "level": "on",
+                    "options": {
+                        "groups": [
+                            ":BUN:",
+                            ":NODE:",
+                            [
+                                "npm:*",
+                                "npm:*/**"
+                            ],
+                            ":PACKAGE_WITH_PROTOCOL:",
+                            ":URL:",
+                            ":PACKAGE:",
+                            [
+                                "/src/*",
+                                "/src/**"
+                            ],
+                            [
+                                "/**"
+                            ],
+                            [
+                                "#*",
+                                "#*/**"
+                            ],
+                            ":PATH:"
+                        ]
+                    }
+                }
+            }
+        }
+    },
+    "linter": {
+        "enabled": true,
+        "rules": {
+            "recommended": true,
+            "correctness": {
+                "useUniqueElementIds": "off"
+            },
+            "suspicious": {
+                "noExplicitAny": "off",
+                "noArrayIndexKey": "off"
+            },
+            "performance": {
+                "noDelete": "off"
+            },
+            "nursery": "off",
+            "a11y": {
+                "useSemanticElements": "off",
+                "useValidAnchor": "off"
+            },
+            "style": {
+                "noParameterAssign": "error",
+                "useAsConstAssertion": "error",
+                "useDefaultParameterLast": "error",
+                "useEnumInitializers": "error",
+                "useSelfClosingElements": "error",
+                "useSingleVarDeclarator": "error",
+                "noUnusedTemplateLiteral": "error",
+                "useNumberNamespace": "error",
+                "noInferrableTypes": "error",
+                "noUselessElse": "error"
+            }
+        }
+    }
+}
diff --git a/frontend/check-locales.cjs b/frontend/check-locales.cjs
new file mode 100755
index 00000000..03a8be14
--- /dev/null
+++ b/frontend/check-locales.cjs
@@ -0,0 +1,159 @@
+#!/usr/bin/env node
+
+// This file does a few things to ensure that the Locales are present and valid:
+// - Ensures that the name of the locale exists in the language list
+// - Ensures that each locale contains the translations used in the application
+// - Ensures that there are no unused translations in the locale files
+// - Also checks the error messages returned by the backend
+
+const allLocales = [
+	["en", "en-US"],
+	["fa", "fa-IR"],
+];
+
+const ignoreUnused = [
+	/^.*$/,
+];
+
+const { spawnSync } = require("child_process");
+const fs = require("fs");
+
+const tmp = require("tmp");
+
+// Parse backend errors
+const BACKEND_ERRORS_FILE = "../backend/internal/errors/errors.go";
+const BACKEND_ERRORS = [];
+/*
+try {
+	const backendErrorsContent = fs.readFileSync(BACKEND_ERRORS_FILE, "utf8");
+	const backendErrorsContentRes = [
+		...backendErrorsContent.matchAll(/(?:errors|eris)\.New\("([^"]+)"\)/g),
+	];
+	backendErrorsContentRes.map((item) => {
+		BACKEND_ERRORS.push("error." + item[1]);
+		return null;
+	});
+} catch (err) {
+	console.log("\x1b[31m%s\x1b[0m", err);
+	process.exit(1);
+}
+*/
+
+// get all translations used in frontend code
+const tmpobj = tmp.fileSync({ postfix: ".json" });
+spawnSync("yarn", ["locale-extract", "--out-file", tmpobj.name]);
+
+const allLocalesInProject = require(tmpobj.name);
+
+// get list og language names and locales
+const langList = require("./src/locale/src/lang-list.json");
+
+// store a list of all validation errors
+const allErrors = [];
+const allWarnings = [];
+const allKeys = [];
+
+const checkLangList = (fullCode) => {
+	const key = "locale-" + fullCode;
+	if (typeof langList[key] === "undefined") {
+		allErrors.push(
+			"ERROR: `" + key + "` language does not exist in lang-list.json",
+		);
+	}
+};
+
+const compareLocale = (locale) => {
+	const projectLocaleKeys = Object.keys(allLocalesInProject);
+	// Check that locale contains the items used in the codebase
+	projectLocaleKeys.map((key) => {
+		if (typeof locale.data[key] === "undefined") {
+			allErrors.push(
+				"ERROR: `" + locale[0] + "` does not contain item: `" + key + "`",
+			);
+		}
+		return null;
+	});
+	// Check that locale contains all error.* items
+	BACKEND_ERRORS.forEach((key) => {
+		if (typeof locale.data[key] === "undefined") {
+			allErrors.push(
+				"ERROR: `" + locale[0] + "` does not contain item: `" + key + "`",
+			);
+		}
+		return null;
+	});
+
+	// Check that locale does not contain items not used in the codebase
+	const localeKeys = Object.keys(locale.data);
+	localeKeys.map((key) => {
+		let ignored = false;
+		ignoreUnused.map((regex) => {
+			if (key.match(regex)) {
+				ignored = true;
+			}
+			return null;
+		});
+
+		if (!ignored && typeof allLocalesInProject[key] === "undefined") {
+			// ensure this key doesn't exist in the backend errors either
+			if (!BACKEND_ERRORS.includes(key)) {
+				allErrors.push(
+					"ERROR: `" + locale[0] + "` contains unused item: `" + key + "`",
+				);
+			}
+		}
+
+		// Add this key to allKeys
+		if (allKeys.indexOf(key) === -1) {
+			allKeys.push(key);
+		}
+		return null;
+	});
+};
+
+// Checks for any keys missing from this locale, that
+// have been defined in any other locales
+const checkForMissing = (locale) => {
+	allKeys.forEach((key) => {
+		if (typeof locale.data[key] === "undefined") {
+			allWarnings.push(
+				"WARN: `" + locale[0] + "` does not contain item: `" + key + "`",
+			);
+		}
+		return null;
+	});
+};
+
+// Local all locale data
+allLocales.map((locale, idx) => {
+	checkLangList(locale[1]);
+	allLocales[idx].data = require("./src/locale/src/" + locale[0] + ".json");
+	return null;
+});
+
+// Verify all locale data
+allLocales.map((locale) => {
+	compareLocale(locale);
+	checkForMissing(locale);
+	return null;
+});
+
+if (allErrors.length) {
+	allErrors.map((err) => {
+		console.log("\x1b[31m%s\x1b[0m", err);
+		return null;
+	});
+}
+if (allWarnings.length) {
+	allWarnings.map((err) => {
+		console.log("\x1b[33m%s\x1b[0m", err);
+		return null;
+	});
+}
+
+if (allErrors.length) {
+	process.exit(1);
+}
+
+console.log("\x1b[32m%s\x1b[0m", "Locale check passed");
+process.exit(0);
diff --git a/frontend/fonts/feather b/frontend/fonts/feather
deleted file mode 120000
index 440203ba..00000000
--- a/frontend/fonts/feather
+++ /dev/null
@@ -1 +0,0 @@
-../node_modules/tabler-ui/dist/assets/fonts/feather
\ No newline at end of file
diff --git a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700.woff b/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700.woff
deleted file mode 100644
index 96d8768e..00000000
Binary files a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700.woff and /dev/null differ
diff --git a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700.woff2 b/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700.woff2
deleted file mode 100644
index e97a2218..00000000
Binary files a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700.woff2 and /dev/null differ
diff --git a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700italic.woff b/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700italic.woff
deleted file mode 100644
index 0829caef..00000000
Binary files a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700italic.woff and /dev/null differ
diff --git a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700italic.woff2 b/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700italic.woff2
deleted file mode 100644
index 7c901cd8..00000000
Binary files a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700italic.woff2 and /dev/null differ
diff --git a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-italic.woff b/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-italic.woff
deleted file mode 100644
index 99652481..00000000
Binary files a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-italic.woff and /dev/null differ
diff --git a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-italic.woff2 b/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-italic.woff2
deleted file mode 100644
index 343e5ba8..00000000
Binary files a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-italic.woff2 and /dev/null differ
diff --git a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-regular.woff b/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-regular.woff
deleted file mode 100644
index 92c3260e..00000000
Binary files a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-regular.woff and /dev/null differ
diff --git a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-regular.woff2 b/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-regular.woff2
deleted file mode 100644
index d552543b..00000000
Binary files a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-regular.woff2 and /dev/null differ
diff --git a/frontend/html/index.ejs b/frontend/html/index.ejs
deleted file mode 100644
index ae08b012..00000000
--- a/frontend/html/index.ejs
+++ /dev/null
@@ -1,9 +0,0 @@
-<% var title = 'Nginx Proxy Manager' %>
-<%- include partials/header.ejs %>
-
-
-	
-
-
-
-<%- include partials/footer.ejs %>
diff --git a/frontend/html/login.ejs b/frontend/html/login.ejs
deleted file mode 100644
index bc4b9a27..00000000
--- a/frontend/html/login.ejs
+++ /dev/null
@@ -1,9 +0,0 @@
-<% var title = 'Login – Nginx Proxy Manager' %>
-<%- include partials/header.ejs %>
-
-
-	
-
-
-
-<%- include partials/footer.ejs %>
diff --git a/frontend/html/partials/footer.ejs b/frontend/html/partials/footer.ejs
deleted file mode 100644
index 7fb2bd61..00000000
--- a/frontend/html/partials/footer.ejs
+++ /dev/null
@@ -1,2 +0,0 @@
-