Compare commits

..

2 Commits

Author SHA1 Message Date
Jamie Curnow
32208f3864 More Persian lang updates 2025-11-03 08:12:52 +10:00
Jamie Curnow
52ab4844dc Persian Locale 2025-11-02 22:52:43 +10:00
380 changed files with 4624 additions and 20646 deletions

104
.github/dependabot.yml vendored
View File

@@ -1,104 +0,0 @@
version: 2
updates:
- package-ecosystem: "npm"
directory: "/backend"
schedule:
interval: "weekly"
groups:
dev-patch-updates:
dependency-type: "development"
update-types:
- "patch"
dev-minor-updates:
dependency-type: "development"
update-types:
- "minor"
prod-patch-updates:
dependency-type: "production"
update-types:
- "patch"
prod-minor-updates:
dependency-type: "production"
update-types:
- "minor"
- package-ecosystem: "npm"
directory: "/frontend"
schedule:
interval: "weekly"
groups:
dev-patch-updates:
dependency-type: "development"
update-types:
- "patch"
dev-minor-updates:
dependency-type: "development"
update-types:
- "minor"
prod-patch-updates:
dependency-type: "production"
update-types:
- "patch"
prod-minor-updates:
dependency-type: "production"
update-types:
- "minor"
- package-ecosystem: "npm"
directory: "/docs"
schedule:
interval: "weekly"
groups:
dev-patch-updates:
dependency-type: "development"
update-types:
- "patch"
dev-minor-updates:
dependency-type: "development"
update-types:
- "minor"
prod-patch-updates:
dependency-type: "production"
update-types:
- "patch"
prod-minor-updates:
dependency-type: "production"
update-types:
- "minor"
- package-ecosystem: "npm"
directory: "/test"
schedule:
interval: "weekly"
groups:
dev-patch-updates:
dependency-type: "development"
update-types:
- "patch"
dev-minor-updates:
dependency-type: "development"
update-types:
- "minor"
prod-patch-updates:
dependency-type: "production"
update-types:
- "patch"
prod-minor-updates:
dependency-type: "production"
update-types:
- "minor"
- package-ecosystem: "docker"
directory: "/docker"
schedule:
interval: "weekly"
groups:
updates:
update-types:
- "patch"
- "minor"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"

View File

@@ -8,7 +8,7 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v10
- uses: actions/stale@v9
with:
stale-issue-label: 'stale'
stale-pr-label: 'stale'

View File

@@ -1 +1 @@
2.14.0
2.13.0

285
Jenkinsfile vendored Normal file
View File

@@ -0,0 +1,285 @@
import groovy.transform.Field
@Field
def shOutput = ""
def buildxPushTags = ""
pipeline {
agent {
label 'docker-multiarch'
}
options {
buildDiscarder(logRotator(numToKeepStr: '5'))
disableConcurrentBuilds()
ansiColor('xterm')
}
environment {
IMAGE = 'nginx-proxy-manager'
BUILD_VERSION = getVersion()
MAJOR_VERSION = '2'
BRANCH_LOWER = "${BRANCH_NAME.toLowerCase().replaceAll('\\\\', '-').replaceAll('/', '-').replaceAll('\\.', '-')}"
BUILDX_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}"
COMPOSE_INTERACTIVE_NO_CLI = 1
}
stages {
stage('Environment') {
parallel {
stage('Master') {
when {
branch 'master'
}
steps {
script {
buildxPushTags = "-t docker.io/jc21/${IMAGE}:${BUILD_VERSION} -t docker.io/jc21/${IMAGE}:${MAJOR_VERSION} -t docker.io/jc21/${IMAGE}:latest"
}
}
}
stage('Other') {
when {
not {
branch 'master'
}
}
steps {
script {
// Defaults to the Branch name, which is applies to all branches AND pr's
buildxPushTags = "-t docker.io/nginxproxymanager/${IMAGE}-dev:${BRANCH_LOWER}"
}
}
}
stage('Versions') {
steps {
sh 'cat frontend/package.json | jq --arg BUILD_VERSION "${BUILD_VERSION}" \'.version = $BUILD_VERSION\' | sponge frontend/package.json'
sh 'echo -e "\\E[1;36mFrontend Version is:\\E[1;33m $(cat frontend/package.json | jq -r .version)\\E[0m"'
sh 'cat backend/package.json | jq --arg BUILD_VERSION "${BUILD_VERSION}" \'.version = $BUILD_VERSION\' | sponge backend/package.json'
sh 'echo -e "\\E[1;36mBackend Version is:\\E[1;33m $(cat backend/package.json | jq -r .version)\\E[0m"'
sh 'sed -i -E "s/(version-)[0-9]+\\.[0-9]+\\.[0-9]+(-green)/\\1${BUILD_VERSION}\\2/" README.md'
}
}
stage('Docker Login') {
steps {
withCredentials([usernamePassword(credentialsId: 'jc21-dockerhub', passwordVariable: 'dpass', usernameVariable: 'duser')]) {
sh 'docker login -u "${duser}" -p "${dpass}"'
}
}
}
}
}
stage('Builds') {
parallel {
stage('Project') {
steps {
script {
// Frontend and Backend
def shStatusCode = sh(label: 'Checking and Building', returnStatus: true, script: '''
set -e
./scripts/ci/frontend-build > ${WORKSPACE}/tmp-sh-build 2>&1
./scripts/ci/test-and-build > ${WORKSPACE}/tmp-sh-build 2>&1
''')
shOutput = readFile "${env.WORKSPACE}/tmp-sh-build"
if (shStatusCode != 0) {
error "${shOutput}"
}
}
}
post {
always {
sh 'rm -f ${WORKSPACE}/tmp-sh-build'
}
failure {
npmGithubPrComment("CI Error:\n\n```\n${shOutput}\n```", true)
}
}
}
stage('Docs') {
steps {
dir(path: 'docs') {
sh 'yarn install'
sh 'yarn build'
}
}
}
}
}
stage('Test Sqlite') {
environment {
COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_sqlite"
COMPOSE_FILE = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.sqlite.yml'
}
when {
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
steps {
sh 'rm -rf ./test/results/junit/*'
sh './scripts/ci/fulltest-cypress'
}
post {
always {
// Dumps to analyze later
sh 'mkdir -p debug/sqlite'
sh 'docker logs $(docker compose ps --all -q fullstack) > debug/sqlite/docker_fullstack.log 2>&1'
sh 'docker logs $(docker compose ps --all -q stepca) > debug/sqlite/docker_stepca.log 2>&1'
sh 'docker logs $(docker compose ps --all -q pdns) > debug/sqlite/docker_pdns.log 2>&1'
sh 'docker logs $(docker compose ps --all -q pdns-db) > debug/sqlite/docker_pdns-db.log 2>&1'
sh 'docker logs $(docker compose ps --all -q dnsrouter) > debug/sqlite/docker_dnsrouter.log 2>&1'
junit 'test/results/junit/*'
sh 'docker compose down --remove-orphans --volumes -t 30 || true'
}
unstable {
dir(path: 'test/results') {
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
}
}
}
}
stage('Test Mysql') {
environment {
COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_mysql"
COMPOSE_FILE = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.mysql.yml'
}
when {
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
steps {
sh 'rm -rf ./test/results/junit/*'
sh './scripts/ci/fulltest-cypress'
}
post {
always {
// Dumps to analyze later
sh 'mkdir -p debug/mysql'
sh 'docker logs $(docker compose ps --all -q fullstack) > debug/mysql/docker_fullstack.log 2>&1'
sh 'docker logs $(docker compose ps --all -q stepca) > debug/mysql/docker_stepca.log 2>&1'
sh 'docker logs $(docker compose ps --all -q pdns) > debug/mysql/docker_pdns.log 2>&1'
sh 'docker logs $(docker compose ps --all -q pdns-db) > debug/mysql/docker_pdns-db.log 2>&1'
sh 'docker logs $(docker compose ps --all -q dnsrouter) > debug/mysql/docker_dnsrouter.log 2>&1'
junit 'test/results/junit/*'
sh 'docker compose down --remove-orphans --volumes -t 30 || true'
}
unstable {
dir(path: 'test/results') {
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
}
}
}
}
stage('Test Postgres') {
environment {
COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_postgres"
COMPOSE_FILE = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.postgres.yml'
}
when {
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
steps {
sh 'rm -rf ./test/results/junit/*'
sh './scripts/ci/fulltest-cypress'
}
post {
always {
// Dumps to analyze later
sh 'mkdir -p debug/postgres'
sh 'docker logs $(docker compose ps --all -q fullstack) > debug/postgres/docker_fullstack.log 2>&1'
sh 'docker logs $(docker compose ps --all -q stepca) > debug/postgres/docker_stepca.log 2>&1'
sh 'docker logs $(docker compose ps --all -q pdns) > debug/postgres/docker_pdns.log 2>&1'
sh 'docker logs $(docker compose ps --all -q pdns-db) > debug/postgres/docker_pdns-db.log 2>&1'
sh 'docker logs $(docker compose ps --all -q dnsrouter) > debug/postgres/docker_dnsrouter.log 2>&1'
sh 'docker logs $(docker compose ps --all -q db-postgres) > debug/postgres/docker_db-postgres.log 2>&1'
sh 'docker logs $(docker compose ps --all -q authentik) > debug/postgres/docker_authentik.log 2>&1'
sh 'docker logs $(docker compose ps --all -q authentik-redis) > debug/postgres/docker_authentik-redis.log 2>&1'
sh 'docker logs $(docke rcompose ps --all -q authentik-ldap) > debug/postgres/docker_authentik-ldap.log 2>&1'
junit 'test/results/junit/*'
sh 'docker compose down --remove-orphans --volumes -t 30 || true'
}
unstable {
dir(path: 'test/results') {
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
}
}
}
}
stage('MultiArch Build') {
when {
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
steps {
sh "./scripts/buildx --push ${buildxPushTags}"
}
}
stage('Docs / Comment') {
parallel {
stage('Docs Job') {
when {
allOf {
branch pattern: "^(develop|master)\$", comparator: "REGEXP"
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
}
steps {
build wait: false, job: 'nginx-proxy-manager-docs', parameters: [string(name: 'docs_branch', value: "$BRANCH_NAME")]
}
}
stage('PR Comment') {
when {
allOf {
changeRequest()
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
}
steps {
script {
npmGithubPrComment("""Docker Image for build ${BUILD_NUMBER} is available on [DockerHub](https://cloud.docker.com/repository/docker/nginxproxymanager/${IMAGE}-dev):
```
nginxproxymanager/${IMAGE}-dev:${BRANCH_LOWER}
```
> [!NOTE]
> Ensure you backup your NPM instance before testing this image! Especially if there are database changes.
> This is a different docker image namespace than the official image.
> [!WARNING]
> Changes and additions to DNS Providers require verification by at least 2 members of the community!
""", true)
}
}
}
}
}
}
post {
always {
sh 'echo Reverting ownership'
sh 'docker run --rm -v "$(pwd):/data" jc21/ci-tools chown -R "$(id -u):$(id -g)" /data'
printResult(true)
}
failure {
archiveArtifacts(artifacts: 'debug/**/*.*', allowEmptyArchive: true)
}
unstable {
archiveArtifacts(artifacts: 'debug/**/*.*', allowEmptyArchive: true)
}
}
}
def getVersion() {
ver = sh(script: 'cat .version', returnStdout: true)
return ver.trim()
}
def getCommit() {
ver = sh(script: 'git log -n 1 --format=%h', returnStdout: true)
return ver.trim()
}

View File

@@ -1,7 +1,7 @@
<p align="center">
<img src="https://nginxproxymanager.com/github.png">
<br><br>
<img src="https://img.shields.io/badge/version-2.14.0-green.svg?style=for-the-badge">
<img src="https://img.shields.io/badge/version-2.13.0-green.svg?style=for-the-badge">
<a href="https://hub.docker.com/repository/docker/jc21/nginx-proxy-manager">
<img src="https://img.shields.io/docker/stars/jc21/nginx-proxy-manager.svg?style=for-the-badge">
</a>
@@ -36,10 +36,6 @@ so that the barrier for entry here is low.
- Advanced Nginx configuration available for super users
- User management, permissions and audit log
::: warning
`armv7` is no longer supported in version 2.14+. This is due to Nodejs dropping support for armhf. Please
use the `2.13.7` image tag if this applies to you.
:::
## Hosting your home network
@@ -47,15 +43,16 @@ I won't go in to too much detail here but here are the basics for someone new to
1. Your home router will have a Port Forwarding section somewhere. Log in and find it
2. Add port forwarding for port 80 and 443 to the server hosting this project
3. Configure your domain name details to point to your home, either with a static ip or a service like
- DuckDNS
- [Amazon Route53](https://github.com/jc21/route53-ddns)
- [Cloudflare](https://github.com/jc21/cloudflare-ddns)
3. Configure your domain name details to point to your home, either with a static ip or a service like DuckDNS or [Amazon Route53](https://github.com/jc21/route53-ddns)
4. Use the Nginx Proxy Manager as your gateway to forward to your other web based services
## Quick Setup
1. [Install Docker](https://docs.docker.com/install/)
1. Install Docker and Docker-Compose
- [Docker Install documentation](https://docs.docker.com/install/)
- [Docker-Compose Install documentation](https://docs.docker.com/compose/install/)
2. Create a docker-compose.yml file similar to this:
```yml

View File

@@ -5,7 +5,7 @@ import fileUpload from "express-fileupload";
import { isDebugMode } from "./lib/config.js";
import cors from "./lib/express/cors.js";
import jwt from "./lib/express/jwt.js";
import { debug, express as logger } from "./logger.js";
import { express as logger } from "./logger.js";
import mainRoutes from "./routes/main.js";
/**
@@ -80,7 +80,7 @@ app.use((err, req, res, _) => {
// Not every error is worth logging - but this is good for now until it gets annoying.
if (typeof err.stack !== "undefined" && err.stack) {
debug(logger, err.stack);
logger.debug(err.stack);
if (typeof err.public === "undefined" || !err.public) {
logger.warn(err.message);
}

View File

@@ -1,5 +1,5 @@
{
"$schema": "https://biomejs.dev/schemas/2.3.14/schema.json",
"$schema": "https://biomejs.dev/schemas/2.3.1/schema.json",
"vcs": {
"enabled": true,
"clientKind": "git",

View File

@@ -22,20 +22,12 @@
"dependencies": "",
"credentials": "dns_aliyun_access_key = 12345678\ndns_aliyun_access_key_secret = 1234567890abcdef1234567890abcdef",
"full_plugin_name": "dns-aliyun"
},
"arvan": {
"name": "ArvanCloud",
"package_name": "certbot-dns-arvan",
"version": ">=0.1.0",
"dependencies": "",
"credentials": "dns_arvan_key = Apikey xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
"full_plugin_name": "dns-arvan"
},
"azure": {
"name": "Azure",
"package_name": "certbot-dns-azure",
"version": "~=2.6.1",
"dependencies": "azure-mgmt-dns==8.2.0",
"version": "~=1.2.0",
"dependencies": "",
"credentials": "# This plugin supported API authentication using either Service Principals or utilizing a Managed Identity assigned to the virtual machine.\n# Regardless which authentication method used, the identity will need the “DNS Zone Contributor” role assigned to it.\n# As multiple Azure DNS Zones in multiple resource groups can exist, the config file needs a mapping of zone to resource group ID. Multiple zones -> ID mappings can be listed by using the key dns_azure_zoneX where X is a unique number. At least 1 zone mapping is required.\n\n# Using a service principal (option 1)\ndns_azure_sp_client_id = 912ce44a-0156-4669-ae22-c16a17d34ca5\ndns_azure_sp_client_secret = E-xqXU83Y-jzTI6xe9fs2YC~mck3ZzUih9\ndns_azure_tenant_id = ed1090f3-ab18-4b12-816c-599af8a88cf7\n\n# Using used assigned MSI (option 2)\n# dns_azure_msi_client_id = 912ce44a-0156-4669-ae22-c16a17d34ca5\n\n# Using system assigned MSI (option 3)\n# dns_azure_msi_system_assigned = true\n\n# Zones (at least one always required)\ndns_azure_zone1 = example.com:/subscriptions/c135abce-d87d-48df-936c-15596c6968a5/resourceGroups/dns1\ndns_azure_zone2 = example.org:/subscriptions/99800903-fb14-4992-9aff-12eaf2744622/resourceGroups/dns2",
"full_plugin_name": "dns-azure"
},
@@ -82,7 +74,7 @@
"cloudns": {
"name": "ClouDNS",
"package_name": "certbot-dns-cloudns",
"version": "~=0.7.0",
"version": "~=0.6.0",
"dependencies": "",
"credentials": "# Target user ID (see https://www.cloudns.net/api-settings/)\n\tdns_cloudns_auth_id=1234\n\t# Alternatively, one of the following two options can be set:\n\t# dns_cloudns_sub_auth_id=1234\n\t# dns_cloudns_sub_auth_user=foobar\n\n\t# API password\n\tdns_cloudns_auth_password=password1",
"full_plugin_name": "dns-cloudns"
@@ -263,14 +255,6 @@
"credentials": "dns_gcore_apitoken = 0123456789abcdef0123456789abcdef01234567",
"full_plugin_name": "dns-gcore"
},
"glesys": {
"name": "Glesys",
"package_name": "certbot-dns-glesys",
"version": "~=2.1.0",
"dependencies": "",
"credentials": "dns_glesys_user = CL00000\ndns_glesys_password = apikeyvalue",
"full_plugin_name": "dns-glesys"
},
"godaddy": {
"name": "GoDaddy",
"package_name": "certbot-dns-godaddy",
@@ -303,14 +287,6 @@
"credentials": "dns_he_user = Me\ndns_he_pass = my HE password",
"full_plugin_name": "dns-he"
},
"he-ddns": {
"name": "Hurricane Electric - DDNS",
"package_name": "certbot-dns-he-ddns",
"version": "~=0.1.0",
"dependencies": "",
"credentials": "dns_he_ddns_password = verysecurepassword",
"full_plugin_name": "dns-he-ddns"
},
"hetzner": {
"name": "Hetzner",
"package_name": "certbot-dns-hetzner",
@@ -318,14 +294,6 @@
"dependencies": "",
"credentials": "dns_hetzner_api_token = 0123456789abcdef0123456789abcdef",
"full_plugin_name": "dns-hetzner"
},
"hetzner-cloud": {
"name": "Hetzner Cloud",
"package_name": "certbot-dns-hetzner-cloud",
"version": "~=1.0.4",
"dependencies": "",
"credentials": "dns_hetzner_cloud_api_token = your_api_token_here",
"full_plugin_name": "dns-hetzner-cloud"
},
"hostingnl": {
"name": "Hosting.nl",
@@ -390,19 +358,11 @@
"dependencies": "",
"credentials": "dns_joker_username = <Dynamic DNS Authentication Username>\ndns_joker_password = <Dynamic DNS Authentication Password>\ndns_joker_domain = <Dynamic DNS Domain>",
"full_plugin_name": "dns-joker"
},
"kas": {
"name": "All-Inkl",
"package_name": "certbot-dns-kas",
"version": "~=0.1.1",
"dependencies": "kasserver",
"credentials": "dns_kas_user = your_kas_user\ndns_kas_password = your_kas_password",
"full_plugin_name": "dns-kas"
},
"leaseweb": {
"name": "LeaseWeb",
"package_name": "certbot-dns-leaseweb",
"version": "~=1.0.3",
"version": "~=1.0.1",
"dependencies": "",
"credentials": "dns_leaseweb_api_token = 01234556789",
"full_plugin_name": "dns-leaseweb"
@@ -431,14 +391,6 @@
"credentials": "dns_luadns_email = user@example.com\ndns_luadns_token = 0123456789abcdef0123456789abcdef",
"full_plugin_name": "dns-luadns"
},
"mchost24": {
"name": "MC-HOST24",
"package_name": "certbot-dns-mchost24",
"version": "",
"dependencies": "",
"credentials": "# Obtain API token using https://github.com/JoeJoeTV/mchost24-api-python\ndns_mchost24_api_token=<insert obtained API token here>",
"full_plugin_name": "dns-mchost24"
},
"mijnhost": {
"name": "mijn.host",
"package_name": "certbot-dns-mijn-host",
@@ -514,7 +466,7 @@
"porkbun": {
"name": "Porkbun",
"package_name": "certbot-dns-porkbun",
"version": "~=0.11.0",
"version": "~=0.9",
"dependencies": "",
"credentials": "dns_porkbun_key=your-porkbun-api-key\ndns_porkbun_secret=your-porkbun-api-secret",
"full_plugin_name": "dns-porkbun"
@@ -559,14 +511,6 @@
"credentials": "[default]\naws_access_key_id=AKIAIOSFODNN7EXAMPLE\naws_secret_access_key=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
"full_plugin_name": "dns-route53"
},
"simply": {
"name": "Simply",
"package_name": "certbot-dns-simply",
"version": "~=0.1.2",
"dependencies": "",
"credentials": "dns_simply_account_name = UExxxxxx\ndns_simply_api_key = DsHJdsjh2812872sahj",
"full_plugin_name": "dns-simply"
},
"spaceship": {
"name": "Spaceship",
"package_name": "certbot-dns-spaceship",

View File

@@ -2,7 +2,7 @@
"database": {
"engine": "knex-native",
"knex": {
"client": "better-sqlite3",
"client": "sqlite3",
"connection": {
"filename": "/app/config/mydb.sqlite"
},

View File

@@ -1,8 +1,6 @@
import knex from "knex";
import {configGet, configHas} from "./lib/config.js";
let instance = null;
const generateDbConfig = () => {
if (!configHas("database")) {
throw new Error(
@@ -24,7 +22,6 @@ const generateDbConfig = () => {
password: cfg.password,
database: cfg.name,
port: cfg.port,
...(cfg.ssl ? { ssl: cfg.ssl } : {})
},
migrations: {
tableName: "migrations",
@@ -32,11 +29,4 @@ const generateDbConfig = () => {
};
};
const getInstance = () => {
if (!instance) {
instance = knex(generateDbConfig());
}
return instance;
}
export default getInstance;
export default knex(generateDbConfig());

View File

@@ -1,305 +0,0 @@
import crypto from "node:crypto";
import bcrypt from "bcrypt";
import { createGuardrails, generateSecret, generateURI, verify } from "otplib";
import errs from "../lib/error.js";
import authModel from "../models/auth.js";
import internalUser from "./user.js";
const APP_NAME = "Nginx Proxy Manager";
const BACKUP_CODE_COUNT = 8;
/**
* Generate backup codes
* @returns {Promise<{plain: string[], hashed: string[]}>}
*/
const generateBackupCodes = async () => {
const plain = [];
const hashed = [];
for (let i = 0; i < BACKUP_CODE_COUNT; i++) {
const code = crypto.randomBytes(4).toString("hex").toUpperCase();
plain.push(code);
const hash = await bcrypt.hash(code, 10);
hashed.push(hash);
}
return { plain, hashed };
};
const internal2fa = {
/**
* Check if user has 2FA enabled
* @param {number} userId
* @returns {Promise<boolean>}
*/
isEnabled: async (userId) => {
const auth = await internal2fa.getUserPasswordAuth(userId);
return auth?.meta?.totp_enabled === true;
},
/**
* Get 2FA status for user
* @param {Access} access
* @param {number} userId
* @returns {Promise<{enabled: boolean, backup_codes_remaining: number}>}
*/
getStatus: async (access, userId) => {
await access.can("users:password", userId);
await internalUser.get(access, { id: userId });
const auth = await internal2fa.getUserPasswordAuth(userId);
const enabled = auth?.meta?.totp_enabled === true;
let backup_codes_remaining = 0;
if (enabled) {
const backupCodes = auth.meta.backup_codes || [];
backup_codes_remaining = backupCodes.length;
}
return {
enabled,
backup_codes_remaining,
};
},
/**
* Start 2FA setup - store pending secret
*
* @param {Access} access
* @param {number} userId
* @returns {Promise<{secret: string, otpauth_url: string}>}
*/
startSetup: async (access, userId) => {
await access.can("users:password", userId);
const user = await internalUser.get(access, { id: userId });
const secret = generateSecret();
const otpauth_url = generateURI({
issuer: APP_NAME,
label: user.email,
secret: secret,
});
const auth = await internal2fa.getUserPasswordAuth(userId);
// ensure user isn't already setup for 2fa
const enabled = auth?.meta?.totp_enabled === true;
if (enabled) {
throw new errs.ValidationError("2FA is already enabled");
}
const meta = auth.meta || {};
meta.totp_pending_secret = secret;
await authModel
.query()
.where("id", auth.id)
.andWhere("user_id", userId)
.andWhere("type", "password")
.patch({ meta });
return { secret, otpauth_url };
},
/**
* Enable 2FA after verifying code
*
* @param {Access} access
* @param {number} userId
* @param {string} code
* @returns {Promise<{backup_codes: string[]}>}
*/
enable: async (access, userId, code) => {
await access.can("users:password", userId);
await internalUser.get(access, { id: userId });
const auth = await internal2fa.getUserPasswordAuth(userId);
const secret = auth?.meta?.totp_pending_secret || false;
if (!secret) {
throw new errs.ValidationError("No pending 2FA setup found");
}
const result = await verify({ token: code, secret });
if (!result.valid) {
throw new errs.ValidationError("Invalid verification code");
}
const { plain, hashed } = await generateBackupCodes();
const meta = {
...auth.meta,
totp_secret: secret,
totp_enabled: true,
totp_enabled_at: new Date().toISOString(),
backup_codes: hashed,
};
delete meta.totp_pending_secret;
await authModel
.query()
.where("id", auth.id)
.andWhere("user_id", userId)
.andWhere("type", "password")
.patch({ meta });
return { backup_codes: plain };
},
/**
* Disable 2FA
*
* @param {Access} access
* @param {number} userId
* @param {string} code
* @returns {Promise<void>}
*/
disable: async (access, userId, code) => {
await access.can("users:password", userId);
await internalUser.get(access, { id: userId });
const auth = await internal2fa.getUserPasswordAuth(userId);
const enabled = auth?.meta?.totp_enabled === true;
if (!enabled) {
throw new errs.ValidationError("2FA is not enabled");
}
const result = await verify({
token: code,
secret: auth.meta.totp_secret,
guardrails: createGuardrails({
MIN_SECRET_BYTES: 10,
}),
});
if (!result.valid) {
throw new errs.AuthError("Invalid verification code");
}
const meta = { ...auth.meta };
delete meta.totp_secret;
delete meta.totp_enabled;
delete meta.totp_enabled_at;
delete meta.backup_codes;
await authModel
.query()
.where("id", auth.id)
.andWhere("user_id", userId)
.andWhere("type", "password")
.patch({ meta });
},
/**
* Verify 2FA code for login
*
* @param {number} userId
* @param {string} token
* @returns {Promise<boolean>}
*/
verifyForLogin: async (userId, token) => {
const auth = await internal2fa.getUserPasswordAuth(userId);
const secret = auth?.meta?.totp_secret || false;
if (!secret) {
return false;
}
// Try TOTP code first, if it's 6 chars. it will throw errors if it's not 6 chars
// and the backup codes are 8 chars.
if (token.length === 6) {
const result = await verify({
token,
secret,
// These guardrails lower the minimum length requirement for secrets.
// In v12 of otplib the default minimum length is 10 and in v13 it is 16.
// Since there are 2fa secrets in the wild generated with v12 we need to allow shorter secrets
// so people won't be locked out when upgrading.
guardrails: createGuardrails({
MIN_SECRET_BYTES: 10,
}),
});
if (result.valid) {
return true;
}
}
// Try backup codes
const backupCodes = auth?.meta?.backup_codes || [];
for (let i = 0; i < backupCodes.length; i++) {
const match = await bcrypt.compare(token.toUpperCase(), backupCodes[i]);
if (match) {
// Remove used backup code
const updatedCodes = [...backupCodes];
updatedCodes.splice(i, 1);
const meta = { ...auth.meta, backup_codes: updatedCodes };
await authModel
.query()
.where("id", auth.id)
.andWhere("user_id", userId)
.andWhere("type", "password")
.patch({ meta });
return true;
}
}
return false;
},
/**
* Regenerate backup codes
*
* @param {Access} access
* @param {number} userId
* @param {string} token
* @returns {Promise<{backup_codes: string[]}>}
*/
regenerateBackupCodes: async (access, userId, token) => {
await access.can("users:password", userId);
await internalUser.get(access, { id: userId });
const auth = await internal2fa.getUserPasswordAuth(userId);
const enabled = auth?.meta?.totp_enabled === true;
const secret = auth?.meta?.totp_secret || false;
if (!enabled) {
throw new errs.ValidationError("2FA is not enabled");
}
if (!secret) {
throw new errs.ValidationError("No 2FA secret found");
}
const result = await verify({
token,
secret,
});
if (!result.valid) {
throw new errs.ValidationError("Invalid verification code");
}
const { plain, hashed } = await generateBackupCodes();
const meta = { ...auth.meta, backup_codes: hashed };
await authModel
.query()
.where("id", auth.id)
.andWhere("user_id", userId)
.andWhere("type", "password")
.patch({ meta });
return { backup_codes: plain };
},
getUserPasswordAuth: async (userId) => {
const auth = await authModel
.query()
.where("user_id", userId)
.andWhere("type", "password")
.first();
if (!auth) {
throw new errs.ItemNotFoundError("Auth not found");
}
return auth;
},
};
export default internal2fa;

View File

@@ -4,14 +4,13 @@ import path from "path";
import archiver from "archiver";
import _ from "lodash";
import moment from "moment";
import { ProxyAgent } from "proxy-agent";
import tempWrite from "temp-write";
import dnsPlugins from "../certbot/dns-plugins.json" with { type: "json" };
import { installPlugin } from "../lib/certbot.js";
import { useLetsencryptServer, useLetsencryptStaging } from "../lib/config.js";
import error from "../lib/error.js";
import utils from "../lib/utils.js";
import { debug, ssl as logger } from "../logger.js";
import { ssl as logger } from "../logger.js";
import certificateModel from "../models/certificate.js";
import tokenModel from "../models/token.js";
import userModel from "../models/user.js";
@@ -356,7 +355,7 @@ const internalCertificate = {
const opName = `/tmp/${downloadName}`;
await internalCertificate.zipFiles(certFiles, opName);
debug(logger, "zip completed : ", opName);
logger.debug("zip completed : ", opName);
return {
fileName: opName,
};
@@ -376,7 +375,7 @@ const internalCertificate = {
return new Promise((resolve, reject) => {
source.map((fl) => {
const fileName = path.basename(fl);
debug(logger, fl, "added to certificate zip");
logger.debug(fl, "added to certificate zip");
archive.file(fl, { name: fileName });
return true;
});
@@ -630,7 +629,7 @@ const internalCertificate = {
* @param {String} privateKey This is the entire key contents as a string
*/
checkPrivateKey: async (privateKey) => {
const filepath = await tempWrite(privateKey);
const filepath = await tempWrite(privateKey, "/tmp");
const failTimeout = setTimeout(() => {
throw new error.ValidationError(
"Result Validation Error: Validation timed out. This could be due to the key being passphrase-protected.",
@@ -660,8 +659,8 @@ const internalCertificate = {
* @param {Boolean} [throwExpired] Throw when the certificate is out of date
*/
getCertificateInfo: async (certificate, throwExpired) => {
const filepath = await tempWrite(certificate);
try {
const filepath = await tempWrite(certificate, "/tmp");
const certData = await internalCertificate.getCertificateInfoFromFile(filepath, throwExpired);
fs.unlinkSync(filepath);
return certData;
@@ -798,11 +797,6 @@ const internalCertificate = {
certificate.domain_names.join(","),
];
// Add key-type parameter if specified
if (certificate.meta?.key_type) {
args.push("--key-type", certificate.meta.key_type);
}
const adds = internalCertificate.getAdditionalCertbotArgs(certificate.id);
args.push(...adds.args);
@@ -863,11 +857,6 @@ const internalCertificate = {
);
}
// Add key-type parameter if specified
if (certificate.meta?.key_type) {
args.push("--key-type", certificate.meta.key_type);
}
const adds = internalCertificate.getAdditionalCertbotArgs(certificate.id, certificate.meta.dns_provider);
args.push(...adds.args);
@@ -948,11 +937,6 @@ const internalCertificate = {
"--disable-hook-validation",
];
// Add key-type parameter if specified
if (certificate.meta?.key_type) {
args.push("--key-type", certificate.meta.key_type);
}
const adds = internalCertificate.getAdditionalCertbotArgs(certificate.id, certificate.meta.dns_provider);
args.push(...adds.args);
@@ -994,11 +978,6 @@ const internalCertificate = {
"--no-random-sleep-on-renew",
];
// Add key-type parameter if specified
if (certificate.meta?.key_type) {
args.push("--key-type", certificate.meta.key_type);
}
const adds = internalCertificate.getAdditionalCertbotArgs(certificate.id, certificate.meta.dns_provider);
args.push(...adds.args);
@@ -1135,7 +1114,6 @@ const internalCertificate = {
performTestForDomain: async (domain) => {
logger.info(`Testing http challenge for ${domain}`);
const agent = new ProxyAgent();
const url = `http://${domain}/.well-known/acme-challenge/test-challenge`;
const formBody = `method=G&url=${encodeURI(url)}&bodytype=T&requestbody=&headername=User-Agent&headervalue=None&locationid=1&ch=false&cc=false`;
const options = {
@@ -1145,7 +1123,6 @@ const internalCertificate = {
"Content-Type": "application/x-www-form-urlencoded",
"Content-Length": Buffer.byteLength(formBody),
},
agent,
};
const result = await new Promise((resolve) => {

View File

@@ -2,7 +2,6 @@ import fs from "node:fs";
import https from "node:https";
import { dirname } from "node:path";
import { fileURLToPath } from "node:url";
import { ProxyAgent } from "proxy-agent";
import errs from "../lib/error.js";
import utils from "../lib/utils.js";
import { ipRanges as logger } from "../logger.js";
@@ -30,11 +29,10 @@ const internalIpRanges = {
},
fetchUrl: (url) => {
const agent = new ProxyAgent();
return new Promise((resolve, reject) => {
logger.info(`Fetching ${url}`);
return https
.get(url, { agent }, (res) => {
.get(url, (res) => {
res.setEncoding("utf8");
let raw_data = "";
res.on("data", (chunk) => {

View File

@@ -4,7 +4,7 @@ import { fileURLToPath } from "node:url";
import _ from "lodash";
import errs from "../lib/error.js";
import utils from "../lib/utils.js";
import { debug, nginx as logger } from "../logger.js";
import { nginx as logger } from "../logger.js";
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
@@ -68,7 +68,7 @@ const internalNginx = {
return true;
});
debug(logger, "Nginx test failed:", valid_lines.join("\n"));
logger.debug("Nginx test failed:", valid_lines.join("\n"));
// config is bad, update meta and delete config
combined_meta = _.assign({}, host.meta, {
@@ -102,7 +102,7 @@ const internalNginx = {
* @returns {Promise}
*/
test: () => {
debug(logger, "Testing Nginx configuration");
logger.debug("Testing Nginx configuration");
return utils.execFile("/usr/sbin/nginx", ["-t", "-g", "error_log off;"]);
},
@@ -190,7 +190,7 @@ const internalNginx = {
const host = JSON.parse(JSON.stringify(host_row));
const nice_host_type = internalNginx.getFileFriendlyHostType(host_type);
debug(logger, `Generating ${nice_host_type} Config:`, JSON.stringify(host, null, 2));
logger.debug(`Generating ${nice_host_type} Config:`, JSON.stringify(host, null, 2));
const renderEngine = utils.getRenderEngine();
@@ -216,11 +216,6 @@ const internalNginx = {
}
}
// For redirection hosts, if the scheme is not http or https, set it to $scheme
if (nice_host_type === "redirection_host" && ['http', 'https'].indexOf(host.forward_scheme.toLowerCase()) === -1) {
host.forward_scheme = "$scheme";
}
if (host.locations) {
//logger.info ('host.locations = ' + JSON.stringify(host.locations, null, 2));
origLocations = [].concat(host.locations);
@@ -246,7 +241,7 @@ const internalNginx = {
.parseAndRender(template, host)
.then((config_text) => {
fs.writeFileSync(filename, config_text, { encoding: "utf8" });
debug(logger, "Wrote config:", filename, config_text);
logger.debug("Wrote config:", filename, config_text);
// Restore locations array
host.locations = origLocations;
@@ -254,7 +249,7 @@ const internalNginx = {
resolve(true);
})
.catch((err) => {
debug(logger, `Could not write ${filename}:`, err.message);
logger.debug(`Could not write ${filename}:`, err.message);
reject(new errs.ConfigurationError(err.message));
});
});
@@ -270,7 +265,7 @@ const internalNginx = {
* @returns {Promise}
*/
generateLetsEncryptRequestConfig: (certificate) => {
debug(logger, "Generating LetsEncrypt Request Config:", certificate);
logger.debug("Generating LetsEncrypt Request Config:", certificate);
const renderEngine = utils.getRenderEngine();
return new Promise((resolve, reject) => {
@@ -290,11 +285,11 @@ const internalNginx = {
.parseAndRender(template, certificate)
.then((config_text) => {
fs.writeFileSync(filename, config_text, { encoding: "utf8" });
debug(logger, "Wrote config:", filename, config_text);
logger.debug("Wrote config:", filename, config_text);
resolve(true);
})
.catch((err) => {
debug(logger, `Could not write ${filename}:`, err.message);
logger.debug(`Could not write ${filename}:`, err.message);
reject(new errs.ConfigurationError(err.message));
});
});
@@ -310,10 +305,10 @@ const internalNginx = {
return;
}
try {
debug(logger, `Deleting file: ${filename}`);
logger.debug(`Deleting file: ${filename}`);
fs.unlinkSync(filename);
} catch (err) {
debug(logger, "Could not delete file:", JSON.stringify(err, null, 2));
logger.debug("Could not delete file:", JSON.stringify(err, null, 2));
}
},

View File

@@ -1,84 +0,0 @@
import https from "node:https";
import { ProxyAgent } from "proxy-agent";
import { debug, remoteVersion as logger } from "../logger.js";
import pjson from "../package.json" with { type: "json" };
const VERSION_URL = "https://api.github.com/repos/NginxProxyManager/nginx-proxy-manager/releases/latest";
const internalRemoteVersion = {
cache_timeout: 1000 * 60 * 15, // 15 minutes
last_result: null,
last_fetch_time: null,
/**
* Fetch the latest version info, using a cached result if within the cache timeout period.
* @return {Promise<{current: string, latest: string, update_available: boolean}>} Version info
*/
get: async () => {
if (
!internalRemoteVersion.last_result ||
!internalRemoteVersion.last_fetch_time ||
Date.now() - internalRemoteVersion.last_fetch_time > internalRemoteVersion.cache_timeout
) {
const raw = await internalRemoteVersion.fetchUrl(VERSION_URL);
const data = JSON.parse(raw);
internalRemoteVersion.last_result = data;
internalRemoteVersion.last_fetch_time = Date.now();
} else {
debug(logger, "Using cached remote version result");
}
const latestVersion = internalRemoteVersion.last_result.tag_name;
const version = pjson.version.split("-").shift().split(".");
const currentVersion = `v${version[0]}.${version[1]}.${version[2]}`;
return {
current: currentVersion,
latest: latestVersion,
update_available: internalRemoteVersion.compareVersions(currentVersion, latestVersion),
};
},
fetchUrl: (url) => {
const agent = new ProxyAgent();
const headers = {
"User-Agent": `NginxProxyManager v${pjson.version}`,
};
return new Promise((resolve, reject) => {
logger.info(`Fetching ${url}`);
return https
.get(url, { agent, headers }, (res) => {
res.setEncoding("utf8");
let raw_data = "";
res.on("data", (chunk) => {
raw_data += chunk;
});
res.on("end", () => {
resolve(raw_data);
});
})
.on("error", (err) => {
reject(err);
});
});
},
compareVersions: (current, latest) => {
const cleanCurrent = current.replace(/^v/, "");
const cleanLatest = latest.replace(/^v/, "");
const currentParts = cleanCurrent.split(".").map(Number);
const latestParts = cleanLatest.split(".").map(Number);
for (let i = 0; i < Math.max(currentParts.length, latestParts.length); i++) {
const curr = currentParts[i] || 0;
const lat = latestParts[i] || 0;
if (lat > curr) return true;
if (lat < curr) return false;
}
return false;
},
};
export default internalRemoteVersion;

View File

@@ -15,10 +15,10 @@ const internalReport = {
const userId = access.token.getUserId(1);
const promises = [
internalProxyHost.getCount(userId, access_data.permission_visibility),
internalRedirectionHost.getCount(userId, access_data.permission_visibility),
internalStream.getCount(userId, access_data.permission_visibility),
internalDeadHost.getCount(userId, access_data.permission_visibility),
internalProxyHost.getCount(userId, access_data.visibility),
internalRedirectionHost.getCount(userId, access_data.visibility),
internalStream.getCount(userId, access_data.visibility),
internalDeadHost.getCount(userId, access_data.visibility),
];
return Promise.all(promises);

View File

@@ -4,12 +4,9 @@ import { parseDatePeriod } from "../lib/helpers.js";
import authModel from "../models/auth.js";
import TokenModel from "../models/token.js";
import userModel from "../models/user.js";
import twoFactor from "./2fa.js";
const ERROR_MESSAGE_INVALID_AUTH = "Invalid email or password";
const ERROR_MESSAGE_INVALID_AUTH_I18N = "error.invalid-auth";
const ERROR_MESSAGE_INVALID_2FA = "Invalid verification code";
const ERROR_MESSAGE_INVALID_2FA_I18N = "error.invalid-2fa";
export default {
/**
@@ -62,25 +59,6 @@ export default {
throw new errs.AuthError(`Invalid scope: ${data.scope}`);
}
// Check if 2FA is enabled
const has2FA = await twoFactor.isEnabled(user.id);
if (has2FA) {
// Return challenge token instead of full token
const challengeToken = await Token.create({
iss: issuer || "api",
attrs: {
id: user.id,
},
scope: ["2fa-challenge"],
expiresIn: "5m",
});
return {
requires_2fa: true,
challenge_token: challengeToken.token,
};
}
// Create a moment of the expiry expression
const expiry = parseDatePeriod(data.expiry);
if (expiry === null) {
@@ -151,65 +129,6 @@ export default {
throw new error.AssertionFailedError("Existing token contained invalid user data");
},
/**
* Verify 2FA code and return full token
* @param {string} challengeToken
* @param {string} code
* @param {string} [expiry]
* @returns {Promise}
*/
verify2FA: async (challengeToken, code, expiry) => {
const Token = TokenModel();
const tokenExpiry = expiry || "1d";
// Verify challenge token
let tokenData;
try {
tokenData = await Token.load(challengeToken);
} catch {
throw new errs.AuthError("Invalid or expired challenge token");
}
// Check scope
if (!tokenData.scope || tokenData.scope[0] !== "2fa-challenge") {
throw new errs.AuthError("Invalid challenge token");
}
const userId = tokenData.attrs?.id;
if (!userId) {
throw new errs.AuthError("Invalid challenge token");
}
// Verify 2FA code
const valid = await twoFactor.verifyForLogin(userId, code);
if (!valid) {
throw new errs.AuthError(
ERROR_MESSAGE_INVALID_2FA,
ERROR_MESSAGE_INVALID_2FA_I18N,
);
}
// Create full token
const expiryDate = parseDatePeriod(tokenExpiry);
if (expiryDate === null) {
throw new errs.AuthError(`Invalid expiry time: ${tokenExpiry}`);
}
const signed = await Token.create({
iss: "api",
attrs: {
id: userId,
},
scope: ["user"],
expiresIn: tokenExpiry,
});
return {
token: signed.token,
expires: expiryDate.toISOString(),
};
},
/**
* @param {Object} user
* @returns {Promise}

View File

@@ -5,7 +5,7 @@ import { global as logger } from "../logger.js";
const keysFile = '/data/keys.json';
const mysqlEngine = 'mysql2';
const postgresEngine = 'pg';
const sqliteClientName = 'better-sqlite3';
const sqliteClientName = 'sqlite3';
let instance = null;
@@ -25,26 +25,15 @@ const configure = () => {
if (configData?.database) {
logger.info(`Using configuration from file: ${filename}`);
// Migrate those who have "mysql" engine to "mysql2"
if (configData.database.engine === "mysql") {
configData.database.engine = mysqlEngine;
}
instance = configData;
instance.keys = getKeys();
return;
}
}
const toBool = (v) => /^(1|true|yes|on)$/i.test((v || '').trim());
const envMysqlHost = process.env.DB_MYSQL_HOST || null;
const envMysqlUser = process.env.DB_MYSQL_USER || null;
const envMysqlName = process.env.DB_MYSQL_NAME || null;
const envMysqlSSL = toBool(process.env.DB_MYSQL_SSL);
const envMysqlSSLRejectUnauthorized = process.env.DB_MYSQL_SSL_REJECT_UNAUTHORIZED === undefined ? true : toBool(process.env.DB_MYSQL_SSL_REJECT_UNAUTHORIZED);
const envMysqlSSLVerifyIdentity = process.env.DB_MYSQL_SSL_VERIFY_IDENTITY === undefined ? true : toBool(process.env.DB_MYSQL_SSL_VERIFY_IDENTITY);
if (envMysqlHost && envMysqlUser && envMysqlName) {
// we have enough mysql creds to go with mysql
logger.info("Using MySQL configuration");
@@ -56,7 +45,6 @@ const configure = () => {
user: envMysqlUser,
password: process.env.DB_MYSQL_PASSWORD,
name: envMysqlName,
ssl: envMysqlSSL ? { rejectUnauthorized: envMysqlSSLRejectUnauthorized, verifyIdentity: envMysqlSSLVerifyIdentity } : false,
},
keys: getKeys(),
};
@@ -84,7 +72,6 @@ const configure = () => {
}
const envSqliteFile = process.env.DB_SQLITE_FILE || "/data/database.sqlite";
logger.info(`Using Sqlite: ${envSqliteFile}`);
instance = {
database: {
@@ -103,9 +90,7 @@ const configure = () => {
const getKeys = () => {
// Get keys from file
if (isDebugMode()) {
logger.debug("Checking for keys file:", keysFile);
}
logger.debug("Cheecking for keys file:", keysFile);
if (!fs.existsSync(keysFile)) {
generateKeys();
} else if (process.env.DEBUG) {

View File

@@ -3,14 +3,14 @@ import { dirname } from "node:path";
import { fileURLToPath } from "node:url";
import { Liquid } from "liquidjs";
import _ from "lodash";
import { debug, global as logger } from "../logger.js";
import { global as logger } from "../logger.js";
import errs from "./error.js";
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const exec = async (cmd, options = {}) => {
debug(logger, "CMD:", cmd);
logger.debug("CMD:", cmd);
const { stdout, stderr } = await new Promise((resolve, reject) => {
const child = nodeExec(cmd, options, (isError, stdout, stderr) => {
if (isError) {
@@ -34,7 +34,7 @@ const exec = async (cmd, options = {}) => {
* @returns {Promise}
*/
const execFile = (cmd, args, options) => {
debug(logger, `CMD: ${cmd} ${args ? args.join(" ") : ""}`);
logger.debug(`CMD: ${cmd} ${args ? args.join(" ") : ""}`);
const opts = options || {};
return new Promise((resolve, reject) => {

View File

@@ -1,5 +1,4 @@
import signale from "signale";
import { isDebugMode } from "./lib/config.js";
const opts = {
logLevel: "info",
@@ -15,12 +14,5 @@ const certbot = new signale.Signale({ scope: "Certbot ", ...opts });
const importer = new signale.Signale({ scope: "Importer ", ...opts });
const setup = new signale.Signale({ scope: "Setup ", ...opts });
const ipRanges = new signale.Signale({ scope: "IP Ranges", ...opts });
const remoteVersion = new signale.Signale({ scope: "Remote Version", ...opts });
const debug = (logger, ...args) => {
if (isDebugMode()) {
logger.debug(...args);
}
};
export { debug, global, migrate, express, access, nginx, ssl, certbot, importer, setup, ipRanges, remoteVersion };
export { global, migrate, express, access, nginx, ssl, certbot, importer, setup, ipRanges };

View File

@@ -2,9 +2,9 @@ import db from "./db.js";
import { migrate as logger } from "./logger.js";
const migrateUp = async () => {
const version = await db().migrate.currentVersion();
const version = await db.migrate.currentVersion();
logger.info("Current database version:", version);
return await db().migrate.latest({
return await db.migrate.latest({
tableName: "migrations",
directory: "migrations",
});

View File

@@ -1,50 +0,0 @@
import { migrate as logger } from "../logger.js";
const migrateName = "redirect_auto_scheme";
/**
* Migrate
*
* @see http://knexjs.org/#Schema
*
* @param {Object} knex
* @returns {Promise}
*/
const up = (knex) => {
logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema
.table("redirection_host", async (table) => {
// change the column default from $scheme to auto
await table.string("forward_scheme").notNull().defaultTo("auto").alter();
await knex('redirection_host')
.where('forward_scheme', '$scheme')
.update({ forward_scheme: 'auto' });
})
.then(() => {
logger.info(`[${migrateName}] redirection_host Table altered`);
});
};
/**
* Undo Migrate
*
* @param {Object} knex
* @returns {Promise}
*/
const down = (knex) => {
logger.info(`[${migrateName}] Migrating Down...`);
return knex.schema
.table("redirection_host", async (table) => {
await table.string("forward_scheme").notNull().defaultTo("$scheme").alter();
await knex('redirection_host')
.where('forward_scheme', 'auto')
.update({ forward_scheme: '$scheme' });
})
.then(() => {
logger.info(`[${migrateName}] redirection_host Table altered`);
});
};
export { up, down };

View File

@@ -1,43 +0,0 @@
import { migrate as logger } from "../logger.js";
const migrateName = "trust_forwarded_proto";
/**
* Migrate
*
* @see http://knexjs.org/#Schema
*
* @param {Object} knex
* @returns {Promise}
*/
const up = function (knex) {
logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema
.alterTable('proxy_host', (table) => {
table.tinyint('trust_forwarded_proto').notNullable().defaultTo(0);
})
.then(() => {
logger.info(`[${migrateName}] proxy_host Table altered`);
});
};
/**
* Undo Migrate
*
* @param {Object} knex
* @returns {Promise}
*/
const down = function (knex) {
logger.info(`[${migrateName}] Migrating Down...`);
return knex.schema
.alterTable('proxy_host', (table) => {
table.dropColumn('trust_forwarded_proto');
})
.then(() => {
logger.info(`[${migrateName}] proxy_host Table altered`);
});
};
export { up, down };

View File

@@ -10,7 +10,7 @@ import now from "./now_helper.js";
import ProxyHostModel from "./proxy_host.js";
import User from "./user.js";
Model.knex(db());
Model.knex(db);
const boolFields = ["is_deleted", "satisfy_any", "pass_auth"];

View File

@@ -6,7 +6,7 @@ import db from "../db.js";
import accessListModel from "./access_list.js";
import now from "./now_helper.js";
Model.knex(db());
Model.knex(db);
class AccessListAuth extends Model {
$beforeInsert() {

View File

@@ -6,7 +6,7 @@ import db from "../db.js";
import accessListModel from "./access_list.js";
import now from "./now_helper.js";
Model.knex(db());
Model.knex(db);
class AccessListClient extends Model {
$beforeInsert() {

View File

@@ -6,7 +6,7 @@ import db from "../db.js";
import now from "./now_helper.js";
import User from "./user.js";
Model.knex(db());
Model.knex(db);
class AuditLog extends Model {
$beforeInsert() {

View File

@@ -8,7 +8,7 @@ import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.j
import now from "./now_helper.js";
import User from "./user.js";
Model.knex(db());
Model.knex(db);
const boolFields = ["is_deleted"];

View File

@@ -11,7 +11,7 @@ import redirectionHostModel from "./redirection_host.js";
import streamModel from "./stream.js";
import userModel from "./user.js";
Model.knex(db());
Model.knex(db);
const boolFields = ["is_deleted"];

View File

@@ -8,7 +8,7 @@ import Certificate from "./certificate.js";
import now from "./now_helper.js";
import User from "./user.js";
Model.knex(db());
Model.knex(db);
const boolFields = ["is_deleted", "ssl_forced", "http2_support", "enabled", "hsts_enabled", "hsts_subdomains"];

View File

@@ -2,7 +2,7 @@ import { Model } from "objection";
import db from "../db.js";
import { isSqlite } from "../lib/config.js";
Model.knex(db());
Model.knex(db);
export default () => {
if (isSqlite()) {

View File

@@ -9,7 +9,7 @@ import Certificate from "./certificate.js";
import now from "./now_helper.js";
import User from "./user.js";
Model.knex(db());
Model.knex(db);
const boolFields = [
"is_deleted",
@@ -21,7 +21,6 @@ const boolFields = [
"enabled",
"hsts_enabled",
"hsts_subdomains",
"trust_forwarded_proto",
];
class ProxyHost extends Model {

View File

@@ -8,7 +8,7 @@ import Certificate from "./certificate.js";
import now from "./now_helper.js";
import User from "./user.js";
Model.knex(db());
Model.knex(db);
const boolFields = [
"is_deleted",

View File

@@ -4,7 +4,7 @@
import { Model } from "objection";
import db from "../db.js";
Model.knex(db());
Model.knex(db);
class Setting extends Model {
$beforeInsert () {

View File

@@ -5,7 +5,7 @@ import Certificate from "./certificate.js";
import now from "./now_helper.js";
import User from "./user.js";
Model.knex(db());
Model.knex(db);
const boolFields = ["is_deleted", "enabled", "tcp_forwarding", "udp_forwarding"];

View File

@@ -7,7 +7,7 @@ import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.j
import now from "./now_helper.js";
import UserPermission from "./user_permission.js";
Model.knex(db());
Model.knex(db);
const boolFields = ["is_deleted", "is_disabled"];

View File

@@ -5,7 +5,7 @@ import { Model } from "objection";
import db from "../db.js";
import now from "./now_helper.js";
Model.knex(db());
Model.knex(db);
class UserPermission extends Model {
$beforeInsert () {

View File

@@ -12,38 +12,35 @@
"validate-schema": "node validate-schema.js"
},
"dependencies": {
"@apidevtools/json-schema-ref-parser": "^14.1.1",
"ajv": "^8.18.0",
"archiver": "^7.0.1",
"@apidevtools/json-schema-ref-parser": "^11.7.0",
"ajv": "^8.17.1",
"archiver": "^5.3.0",
"batchflow": "^0.4.0",
"bcrypt": "^6.0.0",
"better-sqlite3": "^12.6.2",
"body-parser": "^2.2.2",
"bcrypt": "^5.0.0",
"body-parser": "^1.20.3",
"compression": "^1.7.4",
"express": "^5.2.1",
"express": "^4.20.0",
"express-fileupload": "^1.5.2",
"gravatar": "^1.8.2",
"jsonwebtoken": "^9.0.3",
"knex": "3.1.0",
"liquidjs": "10.24.0",
"lodash": "^4.17.23",
"jsonwebtoken": "^9.0.2",
"knex": "2.4.2",
"liquidjs": "10.6.1",
"lodash": "^4.17.21",
"moment": "^2.30.1",
"mysql2": "^3.17.1",
"mysql2": "^3.15.3",
"node-rsa": "^1.1.1",
"objection": "3.1.5",
"otplib": "^13.3.0",
"objection": "3.0.1",
"path": "^0.12.7",
"pg": "^8.18.0",
"proxy-agent": "^6.5.0",
"pg": "^8.16.3",
"signale": "1.4.0",
"sqlite3": "^5.1.7",
"temp-write": "^4.0.0"
},
"devDependencies": {
"@apidevtools/swagger-parser": "^12.1.0",
"@biomejs/biome": "^2.3.14",
"chalk": "5.6.2",
"nodemon": "^3.1.11"
"@apidevtools/swagger-parser": "^10.1.0",
"@biomejs/biome": "^2.3.2",
"chalk": "4.1.2",
"nodemon": "^2.0.2"
},
"signale": {
"displayDate": true,

View File

@@ -2,7 +2,7 @@ import express from "express";
import internalAuditLog from "../internal/audit-log.js";
import jwtdecode from "../lib/express/jwt-decode.js";
import validator from "../lib/validator/index.js";
import { debug, express as logger } from "../logger.js";
import { express as logger } from "../logger.js";
const router = express.Router({
caseSensitive: true,
@@ -47,7 +47,7 @@ router
const rows = await internalAuditLog.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -99,7 +99,7 @@ router
});
res.status(200).send(item);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});

View File

@@ -14,7 +14,6 @@ import schemaRoutes from "./schema.js";
import settingsRoutes from "./settings.js";
import tokensRoutes from "./tokens.js";
import usersRoutes from "./users.js";
import versionRoutes from "./version.js";
const router = express.Router({
caseSensitive: true,
@@ -47,7 +46,6 @@ router.use("/users", usersRoutes);
router.use("/audit-log", auditLogRoutes);
router.use("/reports", reportsRoutes);
router.use("/settings", settingsRoutes);
router.use("/version", versionRoutes);
router.use("/nginx/proxy-hosts", proxyHostsRoutes);
router.use("/nginx/redirection-hosts", redirectionHostsRoutes);
router.use("/nginx/dead-hosts", deadHostsRoutes);

View File

@@ -3,7 +3,7 @@ import internalAccessList from "../../internal/access-list.js";
import jwtdecode from "../../lib/express/jwt-decode.js";
import apiValidator from "../../lib/validator/api.js";
import validator from "../../lib/validator/index.js";
import { debug, express as logger } from "../../logger.js";
import { express as logger } from "../../logger.js";
import { getValidationSchema } from "../../schema/index.js";
const router = express.Router({
@@ -49,7 +49,7 @@ router
const rows = await internalAccessList.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -65,7 +65,7 @@ router
const result = await internalAccessList.create(res.locals.access, payload);
res.status(201).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -113,7 +113,7 @@ router
});
res.status(200).send(row);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -130,7 +130,7 @@ router
const result = await internalAccessList.update(res.locals.access, payload);
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -147,7 +147,7 @@ router
});
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});

View File

@@ -5,7 +5,7 @@ import errs from "../../lib/error.js";
import jwtdecode from "../../lib/express/jwt-decode.js";
import apiValidator from "../../lib/validator/api.js";
import validator from "../../lib/validator/index.js";
import { debug, express as logger } from "../../logger.js";
import { express as logger } from "../../logger.js";
import { getValidationSchema } from "../../schema/index.js";
const router = express.Router({
@@ -58,7 +58,7 @@ router
);
res.status(200).send(rows);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -81,7 +81,7 @@ router
);
res.status(201).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -115,7 +115,7 @@ router
clean.sort((a, b) => a.name.localeCompare(b.name));
res.status(200).send(clean);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -151,7 +151,7 @@ router
);
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -185,7 +185,7 @@ router
});
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -236,7 +236,7 @@ router
});
res.status(200).send(row);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -253,7 +253,7 @@ router
});
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -288,7 +288,7 @@ router
});
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -318,7 +318,7 @@ router
});
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -347,7 +347,7 @@ router
});
res.status(200).download(result.fileName);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});

View File

@@ -3,7 +3,7 @@ import internalDeadHost from "../../internal/dead-host.js";
import jwtdecode from "../../lib/express/jwt-decode.js";
import apiValidator from "../../lib/validator/api.js";
import validator from "../../lib/validator/index.js";
import { debug, express as logger } from "../../logger.js";
import { express as logger } from "../../logger.js";
import { getValidationSchema } from "../../schema/index.js";
const router = express.Router({
@@ -49,7 +49,7 @@ router
const rows = await internalDeadHost.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -65,7 +65,7 @@ router
const result = await internalDeadHost.create(res.locals.access, payload);
res.status(201).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -113,7 +113,7 @@ router
});
res.status(200).send(row);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -130,7 +130,7 @@ router
const result = await internalDeadHost.update(res.locals.access, payload);
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -147,7 +147,7 @@ router
});
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -174,7 +174,7 @@ router
});
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -199,7 +199,7 @@ router
const result = internalDeadHost.disable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) });
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});

View File

@@ -3,7 +3,7 @@ import internalProxyHost from "../../internal/proxy-host.js";
import jwtdecode from "../../lib/express/jwt-decode.js";
import apiValidator from "../../lib/validator/api.js";
import validator from "../../lib/validator/index.js";
import { debug, express as logger } from "../../logger.js";
import { express as logger } from "../../logger.js";
import { getValidationSchema } from "../../schema/index.js";
const router = express.Router({
@@ -49,7 +49,7 @@ router
const rows = await internalProxyHost.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -65,7 +65,7 @@ router
const result = await internalProxyHost.create(res.locals.access, payload);
res.status(201).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err} ${JSON.stringify(err.debug, null, 2)}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err} ${JSON.stringify(err.debug, null, 2)}`);
next(err);
}
});
@@ -113,7 +113,7 @@ router
});
res.status(200).send(row);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -130,7 +130,7 @@ router
const result = await internalProxyHost.update(res.locals.access, payload);
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -147,7 +147,7 @@ router
});
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -174,7 +174,7 @@ router
});
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -201,7 +201,7 @@ router
});
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});

View File

@@ -3,7 +3,7 @@ import internalRedirectionHost from "../../internal/redirection-host.js";
import jwtdecode from "../../lib/express/jwt-decode.js";
import apiValidator from "../../lib/validator/api.js";
import validator from "../../lib/validator/index.js";
import { debug, express as logger } from "../../logger.js";
import { express as logger } from "../../logger.js";
import { getValidationSchema } from "../../schema/index.js";
const router = express.Router({
@@ -49,7 +49,7 @@ router
const rows = await internalRedirectionHost.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -65,7 +65,7 @@ router
const result = await internalRedirectionHost.create(res.locals.access, payload);
res.status(201).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -113,7 +113,7 @@ router
});
res.status(200).send(row);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -133,7 +133,7 @@ router
const result = await internalRedirectionHost.update(res.locals.access, payload);
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -150,7 +150,7 @@ router
});
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -177,7 +177,7 @@ router
});
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -204,7 +204,7 @@ router
});
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});

View File

@@ -3,7 +3,7 @@ import internalStream from "../../internal/stream.js";
import jwtdecode from "../../lib/express/jwt-decode.js";
import apiValidator from "../../lib/validator/api.js";
import validator from "../../lib/validator/index.js";
import { debug, express as logger } from "../../logger.js";
import { express as logger } from "../../logger.js";
import { getValidationSchema } from "../../schema/index.js";
const router = express.Router({
@@ -49,7 +49,7 @@ router
const rows = await internalStream.getAll(res.locals.access, data.expand, data.query);
res.status(200).send(rows);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -65,7 +65,7 @@ router
const result = await internalStream.create(res.locals.access, payload);
res.status(201).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -113,7 +113,7 @@ router
});
res.status(200).send(row);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -130,7 +130,7 @@ router
const result = await internalStream.update(res.locals.access, payload);
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -147,7 +147,7 @@ router
});
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -174,7 +174,7 @@ router
});
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -201,7 +201,7 @@ router
});
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});

View File

@@ -1,7 +1,7 @@
import express from "express";
import internalReport from "../internal/report.js";
import jwtdecode from "../lib/express/jwt-decode.js";
import { debug, express as logger } from "../logger.js";
import { express as logger } from "../logger.js";
const router = express.Router({
caseSensitive: true,
@@ -24,7 +24,7 @@ router
const data = await internalReport.getHostsReport(res.locals.access);
res.status(200).send(data);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});

View File

@@ -1,5 +1,5 @@
import express from "express";
import { debug, express as logger } from "../logger.js";
import { express as logger } from "../logger.js";
import PACKAGE from "../package.json" with { type: "json" };
import { getCompiledSchema } from "../schema/index.js";
@@ -36,7 +36,7 @@ router
swaggerJSON.servers[0].url = `${origin}/api`;
res.status(200).send(swaggerJSON);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});

View File

@@ -3,7 +3,7 @@ import internalSetting from "../internal/setting.js";
import jwtdecode from "../lib/express/jwt-decode.js";
import apiValidator from "../lib/validator/api.js";
import validator from "../lib/validator/index.js";
import { debug, express as logger } from "../logger.js";
import { express as logger } from "../logger.js";
import { getValidationSchema } from "../schema/index.js";
const router = express.Router({
@@ -32,7 +32,7 @@ router
const rows = await internalSetting.getAll(res.locals.access);
res.status(200).send(rows);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -76,7 +76,7 @@ router
});
res.status(200).send(row);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -93,7 +93,7 @@ router
const result = await internalSetting.update(res.locals.access, payload);
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});

View File

@@ -2,7 +2,7 @@ import express from "express";
import internalToken from "../internal/token.js";
import jwtdecode from "../lib/express/jwt-decode.js";
import apiValidator from "../lib/validator/api.js";
import { debug, express as logger } from "../logger.js";
import { express as logger } from "../logger.js";
import { getValidationSchema } from "../schema/index.js";
const router = express.Router({
@@ -32,7 +32,7 @@ router
});
res.status(200).send(data);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -48,29 +48,7 @@ router
const result = await internalToken.getTokenFromEmail(data);
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
router
.route("/2fa")
.options((_, res) => {
res.sendStatus(204);
})
/**
* POST /tokens/2fa
*
* Verify 2FA code and get full token
*/
.post(async (req, res, next) => {
try {
const { challenge_token, code } = await apiValidator(getValidationSchema("/tokens/2fa", "post"), req.body);
const result = await internalToken.verify2FA(challenge_token, code);
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});

View File

@@ -1,5 +1,4 @@
import express from "express";
import internal2FA from "../internal/2fa.js";
import internalUser from "../internal/user.js";
import Access from "../lib/access.js";
import { isCI } from "../lib/config.js";
@@ -8,7 +7,7 @@ import jwtdecode from "../lib/express/jwt-decode.js";
import userIdFromMe from "../lib/express/user-id-from-me.js";
import apiValidator from "../lib/validator/api.js";
import validator from "../lib/validator/index.js";
import { debug, express as logger } from "../logger.js";
import { express as logger } from "../logger.js";
import { getValidationSchema } from "../schema/index.js";
import { isSetup } from "../setup.js";
@@ -62,7 +61,7 @@ router
);
res.status(200).send(users);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -102,7 +101,7 @@ router
const user = await internalUser.create(res.locals.access, payload);
res.status(201).send(user);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -125,7 +124,7 @@ router
await internalUser.deleteAll();
res.status(200).send(true);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
return;
@@ -186,7 +185,7 @@ router
});
res.status(200).send(user);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -206,7 +205,7 @@ router
const result = await internalUser.update(res.locals.access, payload);
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
@@ -223,7 +222,7 @@ router
});
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -256,7 +255,7 @@ router
const result = await internalUser.setPassword(res.locals.access, payload);
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -292,7 +291,7 @@ router
);
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
@@ -321,133 +320,7 @@ router
});
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
/**
* User 2FA status
*
* /api/users/123/2fa
*/
router
.route("/:user_id/2fa")
.options((_, res) => {
res.sendStatus(204);
})
.all(jwtdecode())
.all(userIdFromMe)
/**
* POST /api/users/123/2fa
*
* Start 2FA setup, returns QR code URL
*/
.post(async (req, res, next) => {
try {
const result = await internal2FA.startSetup(res.locals.access, req.params.user_id);
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
/**
* GET /api/users/123/2fa
*
* Get 2FA status for a user
*/
.get(async (req, res, next) => {
try {
const status = await internal2FA.getStatus(res.locals.access, req.params.user_id);
res.status(200).send(status);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
/**
* DELETE /api/users/123/2fa?code=XXXXXX
*
* Disable 2FA for a user
*/
.delete(async (req, res, next) => {
try {
const code = typeof req.query.code === "string" ? req.query.code : null;
if (!code) {
throw new errs.ValidationError("Missing required parameter: code");
}
await internal2FA.disable(res.locals.access, req.params.user_id, code);
res.status(200).send(true);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
/**
* User 2FA enable
*
* /api/users/123/2fa/enable
*/
router
.route("/:user_id/2fa/enable")
.options((_, res) => {
res.sendStatus(204);
})
.all(jwtdecode())
.all(userIdFromMe)
/**
* POST /api/users/123/2fa/enable
*
* Verify code and enable 2FA
*/
.post(async (req, res, next) => {
try {
const { code } = await apiValidator(
getValidationSchema("/users/{userID}/2fa/enable", "post"),
req.body,
);
const result = await internal2FA.enable(res.locals.access, req.params.user_id, code);
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
/**
* User 2FA backup codes
*
* /api/users/123/2fa/backup-codes
*/
router
.route("/:user_id/2fa/backup-codes")
.options((_, res) => {
res.sendStatus(204);
})
.all(jwtdecode())
.all(userIdFromMe)
/**
* POST /api/users/123/2fa/backup-codes
*
* Regenerate backup codes
*/
.post(async (req, res, next) => {
try {
const { code } = await apiValidator(
getValidationSchema("/users/{userID}/2fa/backup-codes", "post"),
req.body,
);
const result = await internal2FA.regenerateBackupCodes(res.locals.access, req.params.user_id, code);
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});

View File

@@ -1,40 +0,0 @@
import express from "express";
import internalRemoteVersion from "../internal/remote-version.js";
import { debug, express as logger } from "../logger.js";
const router = express.Router({
caseSensitive: true,
strict: true,
mergeParams: true,
});
/**
* /api/version/check
*/
router
.route("/check")
.options((_, res) => {
res.sendStatus(204);
})
/**
* GET /api/version/check
*
* Check for available updates
*/
.get(async (req, res, _next) => {
try {
const data = await internalRemoteVersion.get();
res.status(200).send(data);
} catch (error) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${error}`);
// Send 200 even though there's an error to avoid triggering update checks repeatedly
res.status(200).send({
current: null,
latest: null,
update_available: false,
});
}
});
export default router;

View File

@@ -71,11 +71,6 @@
"propagation_seconds": {
"type": "integer",
"minimum": 0
},
"key_type": {
"type": "string",
"enum": ["rsa", "ecdsa"],
"default": "rsa"
}
},
"example": {

View File

@@ -1,23 +0,0 @@
{
"type": "object",
"description": "Check Version object",
"additionalProperties": false,
"required": ["current", "latest", "update_available"],
"properties": {
"current": {
"type": ["string", "null"],
"description": "Current version string",
"example": "v2.10.1"
},
"latest": {
"type": ["string", "null"],
"description": "Latest version string",
"example": "v2.13.4"
},
"update_available": {
"type": "boolean",
"description": "Whether there's an update available",
"example": true
}
}
}

View File

@@ -22,8 +22,7 @@
"enabled",
"locations",
"hsts_enabled",
"hsts_subdomains",
"trust_forwarded_proto"
"hsts_subdomains"
],
"properties": {
"id": {
@@ -142,11 +141,6 @@
"hsts_subdomains": {
"$ref": "../common.json#/properties/hsts_subdomains"
},
"trust_forwarded_proto":{
"type": "boolean",
"description": "Trust the forwarded headers",
"example": false
},
"certificate": {
"oneOf": [
{

View File

@@ -1,18 +0,0 @@
{
"type": "object",
"description": "Token object",
"required": ["requires_2fa", "challenge_token"],
"additionalProperties": false,
"properties": {
"requires_2fa": {
"description": "Whether this token request requires two-factor authentication",
"example": true,
"type": "boolean"
},
"challenge_token": {
"description": "Challenge Token used in subsequent 2FA verification",
"example": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4",
"type": "string"
}
}
}

View File

@@ -58,8 +58,7 @@
"enabled": true,
"locations": [],
"hsts_enabled": false,
"hsts_subdomains": false,
"trust_forwarded_proto": false
"hsts_subdomains": false
}
]
}

View File

@@ -56,7 +56,6 @@
"locations": [],
"hsts_enabled": false,
"hsts_subdomains": false,
"trust_forwarded_proto": false,
"owner": {
"id": 1,
"created_on": "2025-10-28T00:50:24.000Z",

View File

@@ -56,9 +56,6 @@
"hsts_subdomains": {
"$ref": "../../../../components/proxy-host-object.json#/properties/hsts_subdomains"
},
"trust_forwarded_proto": {
"$ref": "../../../../components/proxy-host-object.json#/properties/trust_forwarded_proto"
},
"http2_support": {
"$ref": "../../../../components/proxy-host-object.json#/properties/http2_support"
},
@@ -125,7 +122,6 @@
"locations": [],
"hsts_enabled": false,
"hsts_subdomains": false,
"trust_forwarded_proto": false,
"owner": {
"id": 1,
"created_on": "2025-10-28T00:50:24.000Z",

View File

@@ -48,9 +48,6 @@
"hsts_subdomains": {
"$ref": "../../../components/proxy-host-object.json#/properties/hsts_subdomains"
},
"trust_forwarded_proto": {
"$ref": "../../../components/proxy-host-object.json#/properties/trust_forwarded_proto"
},
"http2_support": {
"$ref": "../../../components/proxy-host-object.json#/properties/http2_support"
},
@@ -122,7 +119,6 @@
"locations": [],
"hsts_enabled": false,
"hsts_subdomains": false,
"trust_forwarded_proto": false,
"certificate": null,
"owner": {
"id": 1,

View File

@@ -1,55 +0,0 @@
{
"operationId": "loginWith2FA",
"summary": "Verify 2FA code and get full token",
"tags": ["tokens"],
"requestBody": {
"description": "2fa Challenge Payload",
"required": true,
"content": {
"application/json": {
"schema": {
"additionalProperties": false,
"properties": {
"challenge_token": {
"minLength": 1,
"type": "string",
"example": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4"
},
"code": {
"minLength": 6,
"maxLength": 8,
"type": "string",
"example": "012345"
}
},
"required": ["challenge_token", "code"],
"type": "object"
},
"example": {
"challenge_token": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4",
"code": "012345"
}
}
}
},
"responses": {
"200": {
"content": {
"application/json": {
"examples": {
"default": {
"value": {
"expires": "2025-02-04T20:40:46.340Z",
"token": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4"
}
}
},
"schema": {
"$ref": "../../../components/token-object.json"
}
}
},
"description": "200 response"
}
}
}

View File

@@ -50,14 +50,7 @@
}
},
"schema": {
"oneOf": [
{
"$ref": "../../components/token-object.json"
},
{
"$ref": "../../components/token-challenge.json"
}
]
}
}
},

View File

@@ -1,92 +0,0 @@
{
"operationId": "regenUser2faCodes",
"summary": "Regenerate 2FA backup codes",
"tags": ["users"],
"parameters": [
{
"in": "path",
"name": "userID",
"schema": {
"type": "integer",
"minimum": 1
},
"required": true,
"description": "User ID",
"example": 2
}
],
"requestBody": {
"description": "Verification Payload",
"required": true,
"content": {
"application/json": {
"schema": {
"additionalProperties": false,
"properties": {
"code": {
"minLength": 6,
"maxLength": 8,
"type": "string",
"example": "123456"
}
},
"required": ["code"],
"type": "object"
},
"example": {
"code": "123456"
}
}
}
},
"responses": {
"200": {
"content": {
"application/json": {
"examples": {
"default": {
"value": {
"backup_codes": [
"6CD7CB06",
"495302F3",
"D8037852",
"A6FFC956",
"BC1A1851",
"A05E644F",
"A406D2E8",
"0AE3C522"
]
}
}
},
"schema": {
"type": "object",
"required": ["backup_codes"],
"additionalProperties": false,
"properties": {
"backup_codes": {
"description": "Backup codes",
"example": [
"6CD7CB06",
"495302F3",
"D8037852",
"A6FFC956",
"BC1A1851",
"A05E644F",
"A406D2E8",
"0AE3C522"
],
"type": "array",
"items": {
"type": "string",
"example": "6CD7CB06"
}
}
}
}
}
},
"description": "200 response"
}
}
}

View File

@@ -1,48 +0,0 @@
{
"operationId": "disableUser2fa",
"summary": "Disable 2fa for user",
"tags": ["users"],
"parameters": [
{
"in": "path",
"name": "userID",
"schema": {
"type": "integer",
"minimum": 1
},
"required": true,
"description": "User ID",
"example": 2
},
{
"in": "query",
"name": "code",
"schema": {
"type": "string",
"minLength": 6,
"maxLength": 6,
"example": "012345"
},
"required": true,
"description": "2fa Code",
"example": "012345"
}
],
"responses": {
"200": {
"content": {
"application/json": {
"examples": {
"default": {
"value": true
}
},
"schema": {
"type": "boolean"
}
}
},
"description": "200 response"
}
}
}

View File

@@ -1,92 +0,0 @@
{
"operationId": "enableUser2fa",
"summary": "Verify code and enable 2FA",
"tags": ["users"],
"parameters": [
{
"in": "path",
"name": "userID",
"schema": {
"type": "integer",
"minimum": 1
},
"required": true,
"description": "User ID",
"example": 2
}
],
"requestBody": {
"description": "Verification Payload",
"required": true,
"content": {
"application/json": {
"schema": {
"additionalProperties": false,
"properties": {
"code": {
"minLength": 6,
"maxLength": 8,
"type": "string",
"example": "123456"
}
},
"required": ["code"],
"type": "object"
},
"example": {
"code": "123456"
}
}
}
},
"responses": {
"200": {
"content": {
"application/json": {
"examples": {
"default": {
"value": {
"backup_codes": [
"6CD7CB06",
"495302F3",
"D8037852",
"A6FFC956",
"BC1A1851",
"A05E644F",
"A406D2E8",
"0AE3C522"
]
}
}
},
"schema": {
"type": "object",
"required": ["backup_codes"],
"additionalProperties": false,
"properties": {
"backup_codes": {
"description": "Backup codes",
"example": [
"6CD7CB06",
"495302F3",
"D8037852",
"A6FFC956",
"BC1A1851",
"A05E644F",
"A406D2E8",
"0AE3C522"
],
"type": "array",
"items": {
"type": "string",
"example": "6CD7CB06"
}
}
}
}
}
},
"description": "200 response"
}
}
}

View File

@@ -1,57 +0,0 @@
{
"operationId": "getUser2faStatus",
"summary": "Get user 2fa Status",
"tags": ["users"],
"security": [
{
"bearerAuth": []
}
],
"parameters": [
{
"in": "path",
"name": "userID",
"schema": {
"type": "integer",
"minimum": 1
},
"required": true,
"description": "User ID",
"example": 2
}
],
"responses": {
"200": {
"description": "200 response",
"content": {
"application/json": {
"examples": {
"default": {
"value": {
"enabled": false,
"backup_codes_remaining": 0
}
}
},
"schema": {
"type": "object",
"additionalProperties": false,
"required": ["enabled", "backup_codes_remaining"],
"properties": {
"enabled": {
"type": "boolean",
"description": "Is 2FA enabled for this user",
"example": true
},
"backup_codes_remaining": {
"type": "integer",
"description": "Number of remaining backup codes for this user",
"example": 5
}
}
}
}
}
}
}
}

View File

@@ -1,52 +0,0 @@
{
"operationId": "setupUser2fa",
"summary": "Start 2FA setup, returns QR code URL",
"tags": ["users"],
"parameters": [
{
"in": "path",
"name": "userID",
"schema": {
"type": "integer",
"minimum": 1
},
"required": true,
"description": "User ID",
"example": 2
}
],
"responses": {
"200": {
"content": {
"application/json": {
"examples": {
"default": {
"value": {
"secret": "JZYCEBIEEJYUGPQM",
"otpauth_url": "otpauth://totp/Nginx%20Proxy%20Manager:jc%40jc21.com?secret=JZYCEBIEEJYUGPQM&period=30&digits=6&algorithm=SHA1&issuer=Nginx%20Proxy%20Manager"
}
}
},
"schema": {
"type": "object",
"required": ["secret", "otpauth_url"],
"additionalProperties": false,
"properties": {
"secret": {
"description": "TOTP Secret",
"example": "JZYCEBIEEJYUGPQM",
"type": "string"
},
"otpauth_url": {
"description": "OTP Auth URL for QR Code generation",
"example": "otpauth://totp/Nginx%20Proxy%20Manager:jc%40jc21.com?secret=JZYCEBIEEJYUGPQM&period=30&digits=6&algorithm=SHA1&issuer=Nginx%20Proxy%20Manager",
"type": "string"
}
}
}
}
},
"description": "200 response"
}
}
}

View File

@@ -1,26 +0,0 @@
{
"operationId": "checkVersion",
"summary": "Returns any new version data from github",
"tags": ["public"],
"responses": {
"200": {
"description": "200 response",
"content": {
"application/json": {
"examples": {
"default": {
"value": {
"current": "v2.12.0",
"latest": "v2.13.4",
"update_available": true
}
}
},
"schema": {
"$ref": "../../../components/check-version-object.json"
}
}
}
}
}
}

View File

@@ -293,16 +293,6 @@
"$ref": "./paths/tokens/post.json"
}
},
"/tokens/2fa": {
"post": {
"$ref": "./paths/tokens/2fa/post.json"
}
},
"/version/check": {
"get": {
"$ref": "./paths/version/check/get.json"
}
},
"/users": {
"get": {
"$ref": "./paths/users/get.json"
@@ -322,27 +312,6 @@
"$ref": "./paths/users/userID/delete.json"
}
},
"/users/{userID}/2fa": {
"post": {
"$ref": "./paths/users/userID/2fa/post.json"
},
"get": {
"$ref": "./paths/users/userID/2fa/get.json"
},
"delete": {
"$ref": "./paths/users/userID/2fa/delete.json"
}
},
"/users/{userID}/2fa/enable": {
"post": {
"$ref": "./paths/users/userID/2fa/enable/post.json"
}
},
"/users/{userID}/2fa/backup-codes": {
"post": {
"$ref": "./paths/users/userID/2fa/backup-codes/post.json"
}
},
"/users/{userID}/auth": {
"put": {
"$ref": "./paths/users/userID/auth/put.json"

View File

@@ -37,7 +37,7 @@ const setupDefaultUser = async () => {
const data = {
is_deleted: 0,
email: initialAdminEmail,
email: email,
name: "Administrator",
nickname: "Admin",
avatar: "",
@@ -53,7 +53,7 @@ const setupDefaultUser = async () => {
.insert({
user_id: user.id,
type: "password",
secret: initialAdminPassword,
secret: password,
meta: {},
});

View File

@@ -4,7 +4,7 @@
auth_basic "Authorization required";
auth_basic_user_file /data/access/{{ access_list_id }};
{% if access_list.pass_auth == 0 or access_list.pass_auth == false %}
{% if access_list.pass_auth == 0 or access_list.pass_auth == true %}
proxy_set_header Authorization "";
{% endif %}

View File

@@ -1,11 +1,6 @@
{% if certificate and certificate_id > 0 -%}
{% if ssl_forced == 1 or ssl_forced == true %}
# Force SSL
{% if trust_forwarded_proto == true %}
set $trust_forwarded_proto "T";
{% else %}
set $trust_forwarded_proto "F";
{% endif %}
include conf.d/include/force-ssl.conf;
{% endif %}
{% endif %}

View File

@@ -12,9 +12,6 @@ server {
proxy_pass {{ forwarding_host }}:{{ forwarding_port }};
access_log /data/logs/stream-{{ id }}_access.log stream;
error_log /data/logs/stream-{{ id }}_error.log warn;
# Custom
include /data/nginx/custom/server_stream[.]conf;
include /data/nginx/custom/server_stream_tcp[.]conf;
@@ -28,9 +25,6 @@ server {
proxy_pass {{ forwarding_host }}:{{ forwarding_port }};
access_log /data/logs/stream-{{ id }}_access.log stream;
error_log /data/logs/stream-{{ id }}_error.log warn;
# Custom
include /data/nginx/custom/server_stream[.]conf;
include /data/nginx/custom/server_stream_udp[.]conf;

File diff suppressed because it is too large Load Diff

View File

@@ -4,6 +4,7 @@
# This file assumes that the frontend has been built using ./scripts/frontend-build
FROM nginxproxymanager/testca AS testca
FROM letsencrypt/pebble AS pebbleca
FROM nginxproxymanager/nginx-full:certbot-node
ARG TARGETPLATFORM
@@ -45,6 +46,7 @@ RUN yarn install \
# add late to limit cache-busting by modifications
COPY docker/rootfs /
COPY --from=pebbleca /test/certs/pebble.minica.pem /etc/ssl/certs/pebble.minica.pem
COPY --from=testca /home/step/certs/root_ca.crt /etc/ssl/certs/NginxProxyManager.crt
# Remove frontend service not required for prod, dev nginx config as well

View File

@@ -1,6 +1,6 @@
AUTHENTIK_SECRET_KEY=gl8woZe8L6IIX8SC0c5Ocsj0xPkX5uJo5DVZCFl+L/QGbzuplfutYuua2ODNLEiDD3aFd9H2ylJmrke0
AUTHENTIK_REDIS__HOST=authentik-redis
AUTHENTIK_POSTGRESQL__HOST=pgdb.internal
AUTHENTIK_POSTGRESQL__HOST=db-postgres
AUTHENTIK_POSTGRESQL__USER=authentik
AUTHENTIK_POSTGRESQL__NAME=authentik
AUTHENTIK_POSTGRESQL__PASSWORD=07EKS5NLI6Tpv68tbdvrxfvj

View File

@@ -1,4 +1,5 @@
FROM nginxproxymanager/testca AS testca
FROM letsencrypt/pebble AS pebbleca
FROM nginxproxymanager/nginx-full:certbot-node
LABEL maintainer="Jamie Curnow <jc@jc21.com>"
@@ -32,6 +33,7 @@ RUN rm -f /etc/nginx/conf.d/production.conf \
&& chmod 644 -R /root/.cache
# Certs for testing purposes
COPY --from=pebbleca /test/certs/pebble.minica.pem /etc/ssl/certs/pebble.minica.pem
COPY --from=testca /home/step/certs/root_ca.crt /etc/ssl/certs/NginxProxyManager.crt
EXPOSE 80 81 443

View File

@@ -0,0 +1,12 @@
{
"pebble": {
"listenAddress": "0.0.0.0:443",
"managementListenAddress": "0.0.0.0:15000",
"certificate": "test/certs/localhost/cert.pem",
"privateKey": "test/certs/localhost/key.pem",
"httpPort": 80,
"tlsPort": 443,
"ocspResponderURL": "",
"externalAccountBindingRequired": false
}
}

View File

@@ -6,7 +6,7 @@ services:
fullstack:
environment:
DB_POSTGRES_HOST: "pgdb.internal"
DB_POSTGRES_HOST: "db-postgres"
DB_POSTGRES_PORT: "5432"
DB_POSTGRES_USER: "npm"
DB_POSTGRES_PASSWORD: "npmpass"
@@ -27,9 +27,7 @@ services:
- psql_vol:/var/lib/postgresql/data
- ./ci/postgres:/docker-entrypoint-initdb.d
networks:
fulltest:
aliases:
- pgdb.internal
- fulltest
authentik-redis:
image: "redis:alpine"
@@ -43,8 +41,6 @@ services:
timeout: 3s
volumes:
- redis_vol:/data
networks:
- fulltest
authentik:
image: ghcr.io/goauthentik/server:2024.10.1
@@ -55,8 +51,6 @@ services:
depends_on:
- authentik-redis
- db-postgres
networks:
- fulltest
authentik-worker:
image: ghcr.io/goauthentik/server:2024.10.1
@@ -67,8 +61,6 @@ services:
depends_on:
- authentik-redis
- db-postgres
networks:
- fulltest
authentik-ldap:
image: ghcr.io/goauthentik/ldap:2024.10.1
@@ -79,8 +71,6 @@ services:
restart: unless-stopped
depends_on:
- authentik
networks:
- fulltest
volumes:
psql_vol:

View File

@@ -3,34 +3,31 @@
# This is a base compose file, it should be extended with a
# docker-compose.ci.*.yml file
services:
fullstack:
image: "${IMAGE}:${BRANCH_LOWER}-ci-${BUILD_NUMBER}"
environment:
TZ: "${TZ:-Australia/Brisbane}"
DEBUG: "true"
CI: "true"
DEBUG: 'true'
CI: 'true'
FORCE_COLOR: 1
# Required for DNS Certificate provisioning in CI
LE_SERVER: "https://ca.internal/acme/acme/directory"
REQUESTS_CA_BUNDLE: "/etc/ssl/certs/NginxProxyManager.crt"
LE_SERVER: 'https://ca.internal/acme/acme/directory'
REQUESTS_CA_BUNDLE: '/etc/ssl/certs/NginxProxyManager.crt'
volumes:
- "npm_data_ci:/data"
- "npm_le_ci:/etc/letsencrypt"
- "./dev/letsencrypt.ini:/etc/letsencrypt.ini:ro"
- "./dev/resolv.conf:/etc/resolv.conf:ro"
- "/etc/localtime:/etc/localtime:ro"
- 'npm_data_ci:/data'
- 'npm_le_ci:/etc/letsencrypt'
- './dev/letsencrypt.ini:/etc/letsencrypt.ini:ro'
- './dev/resolv.conf:/etc/resolv.conf:ro'
- '/etc/localtime:/etc/localtime:ro'
healthcheck:
test: ["CMD", "/usr/bin/check-health"]
interval: 10s
timeout: 3s
expose:
- "80/tcp"
- "81/tcp"
- "443/tcp"
- "1500/tcp"
- "1501/tcp"
- "1502/tcp"
- "1503/tcp"
- '80-81/tcp'
- '443/tcp'
- '1500-1503/tcp'
networks:
fulltest:
aliases:
@@ -41,8 +38,8 @@ services:
stepca:
image: jc21/testca
volumes:
- "./dev/resolv.conf:/etc/resolv.conf:ro"
- "/etc/localtime:/etc/localtime:ro"
- './dev/resolv.conf:/etc/resolv.conf:ro'
- '/etc/localtime:/etc/localtime:ro'
networks:
fulltest:
aliases:
@@ -51,18 +48,18 @@ services:
pdns:
image: pschiffe/pdns-mysql:4.8
volumes:
- "/etc/localtime:/etc/localtime:ro"
- '/etc/localtime:/etc/localtime:ro'
environment:
PDNS_master: "yes"
PDNS_api: "yes"
PDNS_api_key: "npm"
PDNS_webserver: "yes"
PDNS_webserver_address: "0.0.0.0"
PDNS_webserver_password: "npm"
PDNS_webserver-allow-from: "127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8"
PDNS_version_string: "anonymous"
PDNS_master: 'yes'
PDNS_api: 'yes'
PDNS_api_key: 'npm'
PDNS_webserver: 'yes'
PDNS_webserver_address: '0.0.0.0'
PDNS_webserver_password: 'npm'
PDNS_webserver-allow-from: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
PDNS_version_string: 'anonymous'
PDNS_default_ttl: 1500
PDNS_allow_axfr_ips: "127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8"
PDNS_allow_axfr_ips: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
PDNS_gmysql_host: pdns-db
PDNS_gmysql_port: 3306
PDNS_gmysql_user: pdns
@@ -79,14 +76,14 @@ services:
pdns-db:
image: mariadb
environment:
MYSQL_ROOT_PASSWORD: "pdns"
MYSQL_DATABASE: "pdns"
MYSQL_USER: "pdns"
MYSQL_PASSWORD: "pdns"
MYSQL_ROOT_PASSWORD: 'pdns'
MYSQL_DATABASE: 'pdns'
MYSQL_USER: 'pdns'
MYSQL_PASSWORD: 'pdns'
volumes:
- "pdns_mysql_vol:/var/lib/mysql"
- "/etc/localtime:/etc/localtime:ro"
- "./dev/pdns-db.sql:/docker-entrypoint-initdb.d/01_init.sql:ro"
- 'pdns_mysql_vol:/var/lib/mysql'
- '/etc/localtime:/etc/localtime:ro'
- './dev/pdns-db.sql:/docker-entrypoint-initdb.d/01_init.sql:ro'
networks:
- fulltest
@@ -103,22 +100,22 @@ services:
context: ../
dockerfile: test/cypress/Dockerfile
environment:
HTTP_PROXY: "squid:3128"
HTTPS_PROXY: "squid:3128"
HTTP_PROXY: 'squid:3128'
HTTPS_PROXY: 'squid:3128'
volumes:
- "cypress_logs:/test/results"
- "./dev/resolv.conf:/etc/resolv.conf:ro"
- "/etc/localtime:/etc/localtime:ro"
command: cypress run --browser chrome --config-file=cypress/config/ci.mjs
- 'cypress_logs:/test/results'
- './dev/resolv.conf:/etc/resolv.conf:ro'
- '/etc/localtime:/etc/localtime:ro'
command: cypress run --browser chrome --config-file=cypress/config/ci.js
networks:
- fulltest
squid:
image: ubuntu/squid
volumes:
- "./dev/squid.conf:/etc/squid/squid.conf:ro"
- "./dev/resolv.conf:/etc/resolv.conf:ro"
- "/etc/localtime:/etc/localtime:ro"
- './dev/squid.conf:/etc/squid/squid.conf:ro'
- './dev/resolv.conf:/etc/resolv.conf:ro'
- '/etc/localtime:/etc/localtime:ro'
networks:
- fulltest

View File

@@ -32,7 +32,7 @@ services:
# DB_MYSQL_PASSWORD: 'npm'
# DB_MYSQL_NAME: 'npm'
# db-postgres:
DB_POSTGRES_HOST: "pgdb.internal"
DB_POSTGRES_HOST: "db-postgres"
DB_POSTGRES_PORT: "5432"
DB_POSTGRES_USER: "npm"
DB_POSTGRES_PASSWORD: "npmpass"
@@ -81,6 +81,8 @@ services:
db-postgres:
image: postgres:17
container_name: npm2dev.db-postgres
networks:
- nginx_proxy_manager
environment:
POSTGRES_USER: "npm"
POSTGRES_PASSWORD: "npmpass"
@@ -88,10 +90,6 @@ services:
volumes:
- psql_data:/var/lib/postgresql/data
- ./ci/postgres:/docker-entrypoint-initdb.d
networks:
nginx_proxy_manager:
aliases:
- pgdb.internal
stepca:
image: jc21/testca
@@ -192,7 +190,7 @@ services:
- "../test/results:/results"
- "./dev/resolv.conf:/etc/resolv.conf:ro"
- "/etc/localtime:/etc/localtime:ro"
command: cypress run --browser chrome --config-file=cypress/config/ci.mjs
command: cypress run --browser chrome --config-file=cypress/config/ci.js
networks:
- nginx_proxy_manager

View File

@@ -8,8 +8,8 @@ server {
set $port "80";
server_name localhost-nginx-proxy-manager;
access_log /data/logs/fallback_http_access.log standard;
error_log /data/logs/fallback_http_error.log warn;
access_log /data/logs/fallback_access.log standard;
error_log /data/logs/fallback_error.log warn;
include conf.d/include/assets.conf;
include conf.d/include/block-exploits.conf;
include conf.d/include/letsencrypt-acme-challenge.conf;
@@ -30,7 +30,7 @@ server {
set $port "443";
server_name localhost;
access_log /data/logs/fallback_http_access.log standard;
access_log /data/logs/fallback_access.log standard;
error_log /dev/null crit;
include conf.d/include/ssl-ciphers.conf;
ssl_reject_handshake on;

View File

@@ -5,28 +5,6 @@ if ($scheme = "http") {
if ($request_uri = /.well-known/acme-challenge/test-challenge) {
set $test "${test}T";
}
# Check if the ssl staff has been handled
set $test_ssl_handled "";
if ($trust_forwarded_proto = "") {
set $trust_forwarded_proto "F";
}
if ($trust_forwarded_proto = "T") {
set $test_ssl_handled "${test_ssl_handled}T";
}
if ($http_x_forwarded_proto = "https") {
set $test_ssl_handled "${test_ssl_handled}S";
}
if ($http_x_forwarded_scheme = "https") {
set $test_ssl_handled "${test_ssl_handled}S";
}
if ($test_ssl_handled = "TSS") {
set $test_ssl_handled "TS";
}
if ($test_ssl_handled = "TS") {
set $test "${test}S";
}
if ($test = H) {
return 301 https://$host$request_uri;
}

View File

@@ -1,3 +0,0 @@
log_format stream '[$time_local] [Client $remote_addr:$remote_port] $protocol $status $bytes_sent $bytes_received $session_time [Sent-to $upstream_addr] [Sent $upstream_bytes_sent] [Received $upstream_bytes_received] [Time $upstream_connect_time] $ssl_protocol $ssl_cipher';
access_log /data/logs/fallback_stream_access.log stream;

View File

@@ -1,4 +1,4 @@
log_format proxy '[$time_local] $upstream_cache_status $upstream_status $status - $request_method $scheme $host "$request_uri" [Client $remote_addr] [Length $body_bytes_sent] [Gzip $gzip_ratio] [Sent-to $server] "$http_user_agent" "$http_referer"';
log_format standard '[$time_local] $status - $request_method $scheme $host "$request_uri" [Client $remote_addr] [Length $body_bytes_sent] [Gzip $gzip_ratio] "$http_user_agent" "$http_referer"';
access_log /data/logs/fallback_http_access.log proxy;
access_log /data/logs/fallback_access.log proxy;

View File

@@ -1,7 +1,7 @@
add_header X-Served-By $host;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-Scheme $x_forwarded_scheme;
proxy_set_header X-Forwarded-Proto $x_forwarded_proto;
proxy_set_header X-Forwarded-Scheme $scheme;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Real-IP $remote_addr;
proxy_pass $forward_scheme://$server:$port$request_uri;

View File

@@ -47,7 +47,7 @@ http {
proxy_cache_path /var/lib/nginx/cache/private levels=1:2 keys_zone=private-cache:5m max_size=1024m;
# Log format and fallback log file
include /etc/nginx/conf.d/include/log-proxy.conf;
include /etc/nginx/conf.d/include/log.conf;
# Dynamically generated resolvers file
include /etc/nginx/conf.d/include/resolvers.conf;
@@ -57,18 +57,6 @@ http {
default http;
}
# Handle upstream X-Forwarded-Proto and X-Forwarded-Scheme header
map $http_x_forwarded_proto $x_forwarded_proto {
"http" "http";
"https" "https";
default $scheme;
}
map $http_x_forwarded_scheme $x_forwarded_scheme {
"http" "http";
"https" "https";
default $scheme;
}
# Real IP Determination
# Local subnets:
@@ -97,9 +85,6 @@ http {
}
stream {
# Log format and fallback log file
include /etc/nginx/conf.d/include/log-stream.conf;
# Files generated by NPM
include /data/nginx/stream/*.conf;

View File

@@ -17,6 +17,10 @@ case $TARGETPLATFORM in
S6_ARCH=aarch64
;;
linux/arm/v7)
S6_ARCH=armhf
;;
*)
S6_ARCH=x86_64
;;

View File

@@ -24,5 +24,4 @@
.inline-img img {
display: inline;
margin-right: 8px;
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 106 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 178 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 173 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 141 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 64 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 64 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 50 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 47 KiB

Some files were not shown because too many files have changed in this diff Show More