Compare commits

..

7 Commits

174 changed files with 6582 additions and 4781 deletions

View File

@ -1 +1 @@
2.10.2
2.9.9

111
Jenkinsfile vendored
View File

@ -1,9 +1,3 @@
import groovy.transform.Field
@Field
def shOutput = ""
def buildxPushTags = ""
pipeline {
agent {
label 'docker-multiarch'
@ -14,16 +8,14 @@ pipeline {
ansiColor('xterm')
}
environment {
IMAGE = 'nginx-proxy-manager'
IMAGE = "nginx-proxy-manager"
BUILD_VERSION = getVersion()
MAJOR_VERSION = '2'
MAJOR_VERSION = "2"
BRANCH_LOWER = "${BRANCH_NAME.toLowerCase().replaceAll('/', '-')}"
COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}"
COMPOSE_FILE = 'docker/docker-compose.ci.yml'
COMPOSE_INTERACTIVE_NO_CLI = 1
BUILDX_NAME = "${COMPOSE_PROJECT_NAME}"
DOCS_BUCKET = 'jc21-npm-site'
DOCS_CDN = 'EN1G6DEWZUTDT'
}
stages {
stage('Environment') {
@ -34,7 +26,7 @@ pipeline {
}
steps {
script {
buildxPushTags = "-t docker.io/jc21/${IMAGE}:${BUILD_VERSION} -t docker.io/jc21/${IMAGE}:${MAJOR_VERSION} -t docker.io/jc21/${IMAGE}:latest"
env.BUILDX_PUSH_TAGS = "-t docker.io/jc21/${IMAGE}:${BUILD_VERSION} -t docker.io/jc21/${IMAGE}:${MAJOR_VERSION} -t docker.io/jc21/${IMAGE}:latest"
}
}
}
@ -47,7 +39,7 @@ pipeline {
steps {
script {
// Defaults to the Branch name, which is applies to all branches AND pr's
buildxPushTags = "-t docker.io/jc21/${IMAGE}:github-${BRANCH_LOWER}"
env.BUILDX_PUSH_TAGS = "-t docker.io/jc21/${IMAGE}:github-${BRANCH_LOWER}"
}
}
}
@ -62,52 +54,54 @@ pipeline {
}
}
}
stage('Build and Test') {
stage('Frontend') {
steps {
script {
// Frontend and Backend
def shStatusCode = sh(label: 'Checking and Building', returnStatus: true, script: '''
set -e
./scripts/ci/frontend-build > ${WORKSPACE}/tmp-sh-build 2>&1
./scripts/ci/test-and-build > ${WORKSPACE}/tmp-sh-build 2>&1
''')
shOutput = readFile "${env.WORKSPACE}/tmp-sh-build"
if (shStatusCode != 0) {
error "${shOutput}"
sh './scripts/frontend-build'
}
}
}
post {
always {
sh 'rm -f ${WORKSPACE}/tmp-sh-build'
}
failure {
npmGithubPrComment("CI Error:\n\n```\n${shOutput}\n```", true)
}
stage('Backend') {
steps {
echo 'Checking Syntax ...'
// See: https://github.com/yarnpkg/yarn/issues/3254
sh '''docker run --rm \\
-v "$(pwd)/backend:/app" \\
-v "$(pwd)/global:/app/global" \\
-w /app \\
node:latest \\
sh -c "yarn install && yarn eslint . && rm -rf node_modules"
'''
echo 'Docker Build ...'
sh '''docker build --pull --no-cache --squash --compress \\
-t "${IMAGE}:ci-${BUILD_NUMBER}" \\
-f docker/Dockerfile \\
--build-arg TARGETPLATFORM=linux/amd64 \\
--build-arg BUILDPLATFORM=linux/amd64 \\
--build-arg BUILD_VERSION="${BUILD_VERSION}" \\
--build-arg BUILD_COMMIT="${BUILD_COMMIT}" \\
--build-arg BUILD_DATE="$(date '+%Y-%m-%d %T %Z')" \\
.
'''
}
}
stage('Integration Tests Sqlite') {
steps {
// Bring up a stack
sh 'docker-compose up -d fullstack-sqlite'
sh './scripts/wait-healthy $(docker-compose ps --all -q fullstack-sqlite) 120'
// Stop and Start it, as this will test it's ability to restart with existing data
sh 'docker-compose stop fullstack-sqlite'
sh 'docker-compose start fullstack-sqlite'
sh './scripts/wait-healthy $(docker-compose ps --all -q fullstack-sqlite) 120'
sh './scripts/wait-healthy $(docker-compose ps -q fullstack-sqlite) 120'
// Run tests
sh 'rm -rf test/results'
sh 'docker-compose up cypress-sqlite'
// Get results
sh 'docker cp -L "$(docker-compose ps --all -q cypress-sqlite):/test/results" test/'
sh 'docker cp -L "$(docker-compose ps -q cypress-sqlite):/test/results" test/'
}
post {
always {
// Dumps to analyze later
sh 'mkdir -p debug'
sh 'docker-compose logs fullstack-sqlite > debug/docker_fullstack_sqlite.log'
sh 'docker-compose logs db > debug/docker_db.log'
sh 'docker-compose logs fullstack-sqlite | gzip > debug/docker_fullstack_sqlite.log.gz'
sh 'docker-compose logs db | gzip > debug/docker_db.log.gz'
// Cypress videos and screenshot artifacts
dir(path: 'test/results') {
archiveArtifacts allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml'
@ -120,20 +114,20 @@ pipeline {
steps {
// Bring up a stack
sh 'docker-compose up -d fullstack-mysql'
sh './scripts/wait-healthy $(docker-compose ps --all -q fullstack-mysql) 120'
sh './scripts/wait-healthy $(docker-compose ps -q fullstack-mysql) 120'
// Run tests
sh 'rm -rf test/results'
sh 'docker-compose up cypress-mysql'
// Get results
sh 'docker cp -L "$(docker-compose ps --all -q cypress-mysql):/test/results" test/'
sh 'docker cp -L "$(docker-compose ps -q cypress-mysql):/test/results" test/'
}
post {
always {
// Dumps to analyze later
sh 'mkdir -p debug'
sh 'docker-compose logs fullstack-mysql > debug/docker_fullstack_mysql.log'
sh 'docker-compose logs db > debug/docker_db.log'
sh 'docker-compose logs fullstack-mysql | gzip > debug/docker_fullstack_mysql.log.gz'
sh 'docker-compose logs db | gzip > debug/docker_db.log.gz'
// Cypress videos and screenshot artifacts
dir(path: 'test/results') {
archiveArtifacts allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml'
@ -169,8 +163,10 @@ pipeline {
}
steps {
withCredentials([usernamePassword(credentialsId: 'jc21-dockerhub', passwordVariable: 'dpass', usernameVariable: 'duser')]) {
sh 'docker login -u "${duser}" -p "${dpass}"'
sh "./scripts/buildx --push ${buildxPushTags}"
// Docker Login
sh "docker login -u '${duser}' -p '${dpass}'"
// Buildx with push from cache
sh "./scripts/buildx --push ${BUILDX_PUSH_TAGS}"
}
}
}
@ -184,7 +180,26 @@ pipeline {
}
}
steps {
npmDocsRelease("$DOCS_BUCKET", "$DOCS_CDN")
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'npm-s3-docs', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
sh """docker run --rm \\
--name \${COMPOSE_PROJECT_NAME}-docs-upload \\
-e S3_BUCKET=jc21-npm-site \\
-e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \\
-e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \\
-v \$(pwd):/app \\
-w /app \\
jc21/ci-tools \\
scripts/docs-upload /app/docs/.vuepress/dist/
"""
sh """docker run --rm \\
--name \${COMPOSE_PROJECT_NAME}-docs-invalidate \\
-e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \\
-e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \\
jc21/ci-tools \\
aws cloudfront create-invalidation --distribution-id EN1G6DEWZUTDT --paths '/*'
"""
}
}
}
stage('PR Comment') {
@ -198,14 +213,14 @@ pipeline {
}
steps {
script {
npmGithubPrComment("Docker Image for build ${BUILD_NUMBER} is available on [DockerHub](https://cloud.docker.com/repository/docker/jc21/${IMAGE}) as `jc21/${IMAGE}:github-${BRANCH_LOWER}`\n\n**Note:** ensure you backup your NPM instance before testing this PR image! Especially if this PR contains database changes.", true)
def comment = pullRequest.comment("This is an automated message from CI:\n\nDocker Image for build ${BUILD_NUMBER} is available on [DockerHub](https://cloud.docker.com/repository/docker/jc21/${IMAGE}) as `jc21/${IMAGE}:github-${BRANCH_LOWER}`\n\n**Note:** ensure you backup your NPM instance before testing this PR image! Especially if this PR contains database changes.")
}
}
}
}
post {
always {
sh 'docker-compose down --remove-orphans --volumes -t 30'
sh 'docker-compose down --rmi all --remove-orphans --volumes -t 30'
sh 'echo Reverting ownership'
sh 'docker run --rm -v $(pwd):/data jc21/ci-tools chown -R $(id -u):$(id -g) /data'
}

436
README.md
View File

@ -1,13 +1,22 @@
<p align="center">
<img src="https://nginxproxymanager.com/github.png">
<br><br>
<img src="https://img.shields.io/badge/version-2.10.2-green.svg?style=for-the-badge">
<img src="https://img.shields.io/badge/version-2.9.9-green.svg?style=for-the-badge">
<a href="https://hub.docker.com/repository/docker/jc21/nginx-proxy-manager">
<img src="https://img.shields.io/docker/stars/jc21/nginx-proxy-manager.svg?style=for-the-badge">
</a>
<a href="https://hub.docker.com/repository/docker/jc21/nginx-proxy-manager">
<img src="https://img.shields.io/docker/pulls/jc21/nginx-proxy-manager.svg?style=for-the-badge">
</a>
<a href="https://ci.nginxproxymanager.com/blue/organizations/jenkins/nginx-proxy-manager/branches/">
<img src="https://img.shields.io/jenkins/build?jobUrl=https%3A%2F%2Fci.nginxproxymanager.com%2Fjob%2Fnginx-proxy-manager%2Fjob%2Fmaster&style=for-the-badge">
</a>
<a href="https://gitter.im/nginx-proxy-manager/community">
<img alt="Gitter" src="https://img.shields.io/gitter/room/nginx-proxy-manager/community?style=for-the-badge">
</a>
<a href="https://reddit.com/r/nginxproxymanager">
<img alt="Reddit" src="https://img.shields.io/reddit/subreddit-subscribers/nginxproxymanager?label=Reddit%20Community&style=for-the-badge">
</a>
</p>
This project comes as a pre-built docker image that enables you to easily forward to your websites
@ -56,7 +65,7 @@ I won't go in to too much detail here but here are the basics for someone new to
2. Create a docker-compose.yml file similar to this:
```yml
version: '3.8'
version: '3'
services:
app:
image: 'jc21/nginx-proxy-manager:latest'
@ -65,21 +74,31 @@ services:
- '80:80'
- '81:81'
- '443:443'
environment:
DB_MYSQL_HOST: "db"
DB_MYSQL_PORT: 3306
DB_MYSQL_USER: "npm"
DB_MYSQL_PASSWORD: "npm"
DB_MYSQL_NAME: "npm"
volumes:
- ./data:/data
- ./letsencrypt:/etc/letsencrypt
db:
image: 'jc21/mariadb-aria:latest'
restart: unless-stopped
environment:
MYSQL_ROOT_PASSWORD: 'npm'
MYSQL_DATABASE: 'npm'
MYSQL_USER: 'npm'
MYSQL_PASSWORD: 'npm'
volumes:
- ./data/mysql:/var/lib/mysql
```
This is the bare minimum configuration required. See the [documentation](https://nginxproxymanager.com/setup/) for more.
3. Bring up your stack by running
3. Bring up your stack
```bash
docker-compose up -d
# If using docker-compose-plugin
docker compose up -d
```
4. Log in to the Admin UI
@ -100,12 +119,395 @@ Immediately after logging in with this default user you will be asked to modify
## Contributors
Special thanks to [all of our contributors](https://github.com/NginxProxyManager/nginx-proxy-manager/graphs/contributors).
Special thanks to the following contributors:
## Getting Support
1. [Found a bug?](https://github.com/NginxProxyManager/nginx-proxy-manager/issues)
2. [Discussions](https://github.com/NginxProxyManager/nginx-proxy-manager/discussions)
3. [Development Gitter](https://gitter.im/nginx-proxy-manager/community)
4. [Reddit](https://reddit.com/r/nginxproxymanager)
<!-- prettier-ignore-start -->
<!-- markdownlint-disable -->
<table>
<tr>
<td align="center">
<a href="https://github.com/Subv">
<img src="https://avatars1.githubusercontent.com/u/357072?s=460&u=d8adcdc91d749ae53e177973ed9b6bb6c4c894a3&v=4" width="80" alt=""/>
<br /><sub><b>Sebastian Valle</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/Indemnity83">
<img src="https://avatars3.githubusercontent.com/u/35218?s=460&u=7082004ff35138157c868d7d9c683ccebfce5968&v=4" width="80" alt=""/>
<br /><sub><b>Kyle Klaus</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/theraw">
<img src="https://avatars1.githubusercontent.com/u/32969774?s=460&u=6b359971e15685fb0359e6a8c065a399b40dc228&v=4" width="80" alt=""/>
<br /><sub><b>ƬHE ЯAW</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/spalger">
<img src="https://avatars2.githubusercontent.com/u/1329312?s=400&u=565223e38f1c052afb4c5dcca3fcf1c63ba17ae7&v=4" width="80" alt=""/>
<br /><sub><b>Spencer</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/Xantios">
<img src="https://avatars3.githubusercontent.com/u/1507836?s=460&v=4" width="80" alt=""/>
<br /><sub><b>Xantios Krugor</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/dpanesso">
<img src="https://avatars2.githubusercontent.com/u/2687121?s=460&v=4" width="80" alt=""/>
<br /><sub><b>David Panesso</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/IronTooch">
<img src="https://avatars3.githubusercontent.com/u/27360514?s=460&u=69bf854a6647c55725f62ecb8d39249c6c0b2602&v=4" width="80" alt=""/>
<br /><sub><b>IronTooch</b></sub>
</a>
</td>
</tr>
<tr>
<td align="center">
<a href="https://github.com/damianog">
<img src="https://avatars1.githubusercontent.com/u/2786682?s=460&u=76c6136fae797abb76b951cd8a246dcaecaf21af&v=4" width="80" alt=""/>
<br /><sub><b>Damiano</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/tfmm">
<img src="https://avatars3.githubusercontent.com/u/6880538?s=460&u=ce0160821cc4aa802df8395200f2d4956a5bc541&v=4" width="80" alt=""/>
<br /><sub><b>Russ</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/margaale">
<img src="https://avatars3.githubusercontent.com/u/20794934?s=460&v=4" width="80" alt=""/>
<br /><sub><b>Marcelo Castagna</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/Steven-Harris">
<img src="https://avatars2.githubusercontent.com/u/7720242?s=460&v=4" width="80" alt=""/>
<br /><sub><b>Steven Harris</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/jlesage">
<img src="https://avatars0.githubusercontent.com/u/1791123?s=460&v=4" width="80" alt=""/>
<br /><sub><b>Jocelyn Le Sage</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/cmer">
<img src="https://avatars0.githubusercontent.com/u/412?s=460&u=67dd8b2e3661bfd6f68ec1eaa5b9821bd8a321cd&v=4" width="80" alt=""/>
<br /><sub><b>Carl Mercier</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/the1ts">
<img src="https://avatars1.githubusercontent.com/u/84956?s=460&v=4" width="80" alt=""/>
<br /><sub><b>Paul Mansfield</b></sub>
</a>
</td>
</tr>
<tr>
<td align="center">
<a href="https://github.com/OhHeyAlan">
<img src="https://avatars0.githubusercontent.com/u/11955126?s=460&u=fbaa5a1a4f73ef8960132c703349bfd037fe2630&v=4" width="80" alt=""/>
<br /><sub><b>OhHeyAlan</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/dogmatic69">
<img src="https://avatars2.githubusercontent.com/u/94674?s=460&u=ca7647de53145c6283b6373ade5dc94ba99347db&v=4" width="80" alt=""/>
<br /><sub><b>Carl Sutton</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/tg44">
<img src="https://avatars0.githubusercontent.com/u/31839?s=460&u=ad32f4cadfef5e5fb09cdfa4b7b7b36a99ba6811&v=4" width="80" alt=""/>
<br /><sub><b>Gergő Törcsvári</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/vrenjith">
<img src="https://avatars3.githubusercontent.com/u/2093241?s=460&u=96ce93a9bebabdd0a60a2dc96cd093a41d5edaba&v=4" width="80" alt=""/>
<br /><sub><b>vrenjith</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/duhruh">
<img src="https://avatars2.githubusercontent.com/u/1133969?s=460&u=c0691e6131ec6d516416c1c6fcedb5034f877bbe&v=4" width="80" alt=""/>
<br /><sub><b>David Rivera</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/jipjan">
<img src="https://avatars2.githubusercontent.com/u/1384618?s=460&v=4" width="80" alt=""/>
<br /><sub><b>Jaap-Jan de Wit</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/jmwebslave">
<img src="https://avatars2.githubusercontent.com/u/6118262?s=460&u=7db409c47135b1e141c366bbb03ed9fae6ac2638&v=4" width="80" alt=""/>
<br /><sub><b>James Morgan</b></sub>
</a>
</td>
</tr>
<tr>
<td align="center">
<a href="https://github.com/chaptergy">
<img src="https://avatars2.githubusercontent.com/u/26956711?s=460&u=7d9adebabb6b4e7af7cb05d98d751087a372304b&v=4" width="80" alt=""/>
<br /><sub><b>chaptergy</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/Philip-Mooney">
<img src="https://avatars0.githubusercontent.com/u/48624631?s=460&v=4" width="80" alt=""/>
<br /><sub><b>Philip Mooney</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/WaterCalm">
<img src="https://avatars1.githubusercontent.com/u/23502129?s=400&v=4" width="80" alt=""/>
<br /><sub><b>WaterCalm</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/lebrou34">
<img src="https://avatars1.githubusercontent.com/u/16373103?s=460&v=4" width="80" alt=""/>
<br /><sub><b>lebrou34</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/lightglitch">
<img src="https://avatars0.githubusercontent.com/u/196953?s=460&v=4" width="80" alt=""/>
<br /><sub><b>Mário Franco</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/klutchell">
<img src="https://avatars3.githubusercontent.com/u/20458272?s=460&v=4" width="80" alt=""/>
<br /><sub><b>Kyle Harding</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/ahgraber">
<img src="https://avatars.githubusercontent.com/u/24922003?s=460&u=8376c9f00af9b6057ba4d2fb03b4f1b20a75277f&v=4" width="80" alt=""/>
<br /><sub><b>Alex Graber</b></sub>
</a>
</td>
</tr>
<tr>
<td align="center">
<a href="https://github.com/MooBaloo">
<img src="https://avatars.githubusercontent.com/u/9493496?s=460&v=4" width="80" alt=""/>
<br /><sub><b>MooBaloo</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/Shuro">
<img src="https://avatars.githubusercontent.com/u/944030?s=460&v=4" width="80" alt=""/>
<br /><sub><b>Shuro</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/lorisbergeron">
<img src="https://avatars.githubusercontent.com/u/51918567?s=460&u=778e4ff284b7d7304450f98421c99f79298371fb&v=4" width="80" alt=""/>
<br /><sub><b>Loris Bergeron</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/hepelayo">
<img src="https://avatars.githubusercontent.com/u/8243119?v=4" width="80" alt=""/>
<br /><sub><b>hepelayo</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/jonasled">
<img src="https://avatars.githubusercontent.com/u/46790650?v=4" width="80" alt=""/>
<br /><sub><b>Jonas Leder</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/stegmannb">
<img src="https://avatars.githubusercontent.com/u/12850482?v=4" width="80" alt=""/>
<br /><sub><b>Bastian Stegmann</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/Stealthii">
<img src="https://avatars.githubusercontent.com/u/998920?v=4" width="80" alt=""/>
<br /><sub><b>Stealthii</b></sub>
</a>
</td>
</tr>
<tr>
<td align="center">
<a href="https://github.com/thegamingninja">
<img src="https://avatars.githubusercontent.com/u/8020534?v=4" width="80" alt=""/>
<br /><sub><b>THEGamingninja</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/italobb">
<img src="https://avatars.githubusercontent.com/u/1801687?v=4" width="80" alt=""/>
<br /><sub><b>Italo Borssatto</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/GurjinderSingh">
<img src="https://avatars.githubusercontent.com/u/3470709?v=4" width="80" alt=""/>
<br /><sub><b>Gurjinder Singh</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/phantomski77">
<img src="https://avatars.githubusercontent.com/u/69464125?v=4" width="80" alt=""/>
<br /><sub><b>David Dosoudil</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/ijaron">
<img src="https://avatars.githubusercontent.com/u/5156472?v=4" width="80" alt=""/>
<br /><sub><b>ijaron</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/nielscil">
<img src="https://avatars.githubusercontent.com/u/9073152?v=4" width="80" alt=""/>
<br /><sub><b>Niels Bouma</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/ogarai">
<img src="https://avatars.githubusercontent.com/u/2949572?v=4" width="80" alt=""/>
<br /><sub><b>Orko Garai</b></sub>
</a>
</td>
</tr>
<tr>
<td align="center">
<a href="https://github.com/baruffaldi">
<img src="https://avatars.githubusercontent.com/u/36949?v=4" width="80" alt=""/>
<br /><sub><b>Filippo Baruffaldi</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/bikram990">
<img src="https://avatars.githubusercontent.com/u/6782131?v=4" width="80" alt=""/>
<br /><sub><b>Bikramjeet Singh</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/razvanstoica89">
<img src="https://avatars.githubusercontent.com/u/28236583?v=4" width="80" alt=""/>
<br /><sub><b>Razvan Stoica</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/psharma04">
<img src="https://avatars.githubusercontent.com/u/22587474?v=4" width="80" alt=""/>
<br /><sub><b>RBXII3</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/demize">
<img src="https://avatars.githubusercontent.com/u/264914?v=4" width="80" alt=""/>
<br /><sub><b>demize</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/PUP-Loki">
<img src="https://avatars.githubusercontent.com/u/75944209?v=4" width="80" alt=""/>
<br /><sub><b>PUP-Loki</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/DSorlov">
<img src="https://avatars.githubusercontent.com/u/8133650?v=4" width="80" alt=""/>
<br /><sub><b>Daniel Sörlöv</b></sub>
</a>
</td>
</tr>
<tr>
<td align="center">
<a href="https://github.com/Theyooo">
<img src="https://avatars.githubusercontent.com/u/58510131?v=4" width="80" alt=""/>
<br /><sub><b>Theyooo</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/mrdink">
<img src="https://avatars.githubusercontent.com/u/514751?v=4" width="80" alt=""/>
<br /><sub><b>Justin Peacock</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/ChrisTracy">
<img src="https://avatars.githubusercontent.com/u/58871574?v=4" width="80" alt=""/>
<br /><sub><b>Chris Tracy</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/Fuechslein">
<img src="https://avatars.githubusercontent.com/u/15112818?v=4" width="80" alt=""/>
<br /><sub><b>Fuechslein</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/nightah">
<img src="https://avatars.githubusercontent.com/u/3339418?v=4" width="80" alt=""/>
<br /><sub><b>Amir Zarrinkafsh</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/gabbe">
<img src="https://avatars.githubusercontent.com/u/156397?v=4" width="80" alt=""/>
<br /><sub><b>gabbe</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/bmbvenom">
<img src="https://avatars.githubusercontent.com/u/20530371?v=4" width="80" alt=""/>
<br /><sub><b>bmbvenom</b></sub>
</a>
</td>
</tr>
<tr>
<td align="center">
<a href="https://github.com/FMeinicke">
<img src="https://avatars.githubusercontent.com/u/42121639?v=4" width="80" alt=""/>
<br /><sub><b>Florian Meinicke</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/ssrahul96">
<img src="https://avatars.githubusercontent.com/u/15570570?v=4" width="80" alt=""/>
<br /><sub><b>Rahul Somasundaram</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/BjoernAkAManf">
<img src="https://avatars.githubusercontent.com/u/833043?v=4" width="80" alt=""/>
<br /><sub><b>Björn Heinrichs</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/realJoshByrnes">
<img src="https://avatars.githubusercontent.com/u/204185?v=4" width="80" alt=""/>
<br /><sub><b>Josh Byrnes</b></sub>
</a>
</td>
<td align="center">
<a href="https://github.com/bergi9">
<img src="https://avatars.githubusercontent.com/u/5556750?v=4" width="80" alt=""/>
<br /><sub><b>bergi9</b></sub>
</a>
</td>
</tr>
</table>
<!-- markdownlint-enable -->
<!-- prettier-ignore-end -->

View File

@ -2,7 +2,6 @@ const express = require('express');
const bodyParser = require('body-parser');
const fileUpload = require('express-fileupload');
const compression = require('compression');
const config = require('./lib/config');
const log = require('./logger').express;
/**
@ -25,7 +24,7 @@ app.enable('trust proxy', ['loopback', 'linklocal', 'uniquelocal']);
app.enable('strict routing');
// pretty print JSON when not live
if (config.debug()) {
if (process.env.NODE_ENV !== 'production') {
app.set('json spaces', 2);
}
@ -41,6 +40,7 @@ app.use(function (req, res, next) {
}
res.set({
'Strict-Transport-Security': 'includeSubDomains; max-age=631138519; preload',
'X-XSS-Protection': '1; mode=block',
'X-Content-Type-Options': 'nosniff',
'X-Frame-Options': x_frame_options,
@ -66,7 +66,7 @@ app.use(function (err, req, res, next) {
}
};
if (config.debug() || (req.baseUrl + req.path).includes('nginx/certificates')) {
if (process.env.NODE_ENV === 'development' || (req.baseUrl + req.path).includes('nginx/certificates')) {
payload.debug = {
stack: typeof err.stack !== 'undefined' && err.stack ? err.stack.split('\n') : null,
previous: err.previous
@ -74,13 +74,11 @@ app.use(function (err, req, res, next) {
}
// Not every error is worth logging - but this is good for now until it gets annoying.
if (typeof err.stack !== 'undefined' && err.stack) {
if (config.debug()) {
log.debug(err.stack);
} else if (typeof err.public == 'undefined' || !err.public) {
if (process.env.NODE_ENV === 'development' || process.env.DEBUG) {
log.debug(err);
} else if (typeof err.stack !== 'undefined' && err.stack && (typeof err.public == 'undefined' || !err.public)) {
log.warn(err.message);
}
}
res
.status(err.status || 500)

View File

@ -1,22 +1,21 @@
const config = require('./lib/config');
const config = require('config');
if (!config.has('database')) {
throw new Error('Database config does not exist! Please read the instructions: https://nginxproxymanager.com/setup/');
throw new Error('Database config does not exist! Please read the instructions: https://github.com/jc21/nginx-proxy-manager/blob/master/doc/INSTALL.md');
}
function generateDbConfig() {
const cfg = config.get('database');
if (cfg.engine === 'knex-native') {
return cfg.knex;
}
if (config.database.engine === 'knex-native') {
return config.database.knex;
} else
return {
client: cfg.engine,
client: config.database.engine,
connection: {
host: cfg.host,
user: cfg.user,
password: cfg.password,
database: cfg.name,
port: cfg.port
host: config.database.host,
user: config.database.user,
password: config.database.password,
database: config.database.name,
port: config.database.port
},
migrations: {
tableName: 'migrations'
@ -24,4 +23,11 @@ function generateDbConfig() {
};
}
module.exports = require('knex')(generateDbConfig());
let data = generateDbConfig();
if (typeof config.database.version !== 'undefined') {
data.version = config.database.version;
}
module.exports = require('knex')(data);

View File

@ -40,210 +40,6 @@
}
}
},
"/nginx/proxy-hosts": {
"get": {
"operationId": "getProxyHosts",
"summary": "Get all proxy hosts",
"tags": ["Proxy Hosts"],
"security": [
{
"BearerAuth": ["users"]
}
],
"parameters": [
{
"in": "query",
"name": "expand",
"description": "Expansions",
"schema": {
"type": "string",
"enum": ["access_list", "owner", "certificate"]
}
}
],
"responses": {
"200": {
"description": "200 response",
"content": {
"application/json": {
"examples": {
"default": {
"value": [
{
"id": 1,
"created_on": "2023-03-30T01:12:23.000Z",
"modified_on": "2023-03-30T02:15:40.000Z",
"owner_user_id": 1,
"domain_names": ["aasdasdad"],
"forward_host": "asdasd",
"forward_port": 80,
"access_list_id": 0,
"certificate_id": 0,
"ssl_forced": 0,
"caching_enabled": 0,
"block_exploits": 0,
"advanced_config": "sdfsdfsdf",
"meta": {
"letsencrypt_agree": false,
"dns_challenge": false,
"nginx_online": false,
"nginx_err": "Command failed: /usr/sbin/nginx -t -g \"error_log off;\"\nnginx: [emerg] unknown directive \"sdfsdfsdf\" in /data/nginx/proxy_host/1.conf:37\nnginx: configuration file /etc/nginx/nginx.conf test failed\n"
},
"allow_websocket_upgrade": 0,
"http2_support": 0,
"forward_scheme": "http",
"enabled": 1,
"locations": [],
"hsts_enabled": 0,
"hsts_subdomains": 0,
"owner": {
"id": 1,
"created_on": "2023-03-30T01:11:50.000Z",
"modified_on": "2023-03-30T01:11:50.000Z",
"is_deleted": 0,
"is_disabled": 0,
"email": "admin@example.com",
"name": "Administrator",
"nickname": "Admin",
"avatar": "",
"roles": ["admin"]
},
"access_list": null,
"certificate": null
},
{
"id": 2,
"created_on": "2023-03-30T02:11:49.000Z",
"modified_on": "2023-03-30T02:11:49.000Z",
"owner_user_id": 1,
"domain_names": ["test.example.com"],
"forward_host": "1.1.1.1",
"forward_port": 80,
"access_list_id": 0,
"certificate_id": 0,
"ssl_forced": 0,
"caching_enabled": 0,
"block_exploits": 0,
"advanced_config": "",
"meta": {
"letsencrypt_agree": false,
"dns_challenge": false,
"nginx_online": true,
"nginx_err": null
},
"allow_websocket_upgrade": 0,
"http2_support": 0,
"forward_scheme": "http",
"enabled": 1,
"locations": [],
"hsts_enabled": 0,
"hsts_subdomains": 0,
"owner": {
"id": 1,
"created_on": "2023-03-30T01:11:50.000Z",
"modified_on": "2023-03-30T01:11:50.000Z",
"is_deleted": 0,
"is_disabled": 0,
"email": "admin@example.com",
"name": "Administrator",
"nickname": "Admin",
"avatar": "",
"roles": ["admin"]
},
"access_list": null,
"certificate": null
}
]
}
},
"schema": {
"$ref": "#/components/schemas/ProxyHostsList"
}
}
}
}
}
},
"post": {
"operationId": "createProxyHost",
"summary": "Create a Proxy Host",
"tags": ["Proxy Hosts"],
"security": [
{
"BearerAuth": ["users"]
}
],
"parameters": [
{
"in": "body",
"name": "proxyhost",
"description": "Proxy Host Payload",
"required": true,
"schema": {
"$ref": "#/components/schemas/ProxyHostObject"
}
}
],
"responses": {
"201": {
"description": "201 response",
"content": {
"application/json": {
"examples": {
"default": {
"value": {
"id": 3,
"created_on": "2023-03-30T02:31:27.000Z",
"modified_on": "2023-03-30T02:31:27.000Z",
"owner_user_id": 1,
"domain_names": ["test2.example.com"],
"forward_host": "1.1.1.1",
"forward_port": 80,
"access_list_id": 0,
"certificate_id": 0,
"ssl_forced": 0,
"caching_enabled": 0,
"block_exploits": 0,
"advanced_config": "",
"meta": {
"letsencrypt_agree": false,
"dns_challenge": false
},
"allow_websocket_upgrade": 0,
"http2_support": 0,
"forward_scheme": "http",
"enabled": 1,
"locations": [],
"hsts_enabled": 0,
"hsts_subdomains": 0,
"certificate": null,
"owner": {
"id": 1,
"created_on": "2023-03-30T01:11:50.000Z",
"modified_on": "2023-03-30T01:11:50.000Z",
"is_deleted": 0,
"is_disabled": 0,
"email": "admin@example.com",
"name": "Administrator",
"nickname": "Admin",
"avatar": "",
"roles": ["admin"]
},
"access_list": null,
"use_default_location": true,
"ipv6": true
}
}
},
"schema": {
"$ref": "#/components/schemas/ProxyHostObject"
}
}
}
}
}
}
},
"/schema": {
"get": {
"operationId": "schema",
@ -259,10 +55,14 @@
"get": {
"operationId": "refreshToken",
"summary": "Refresh your access token",
"tags": ["Tokens"],
"tags": [
"Tokens"
],
"security": [
{
"BearerAuth": ["tokens"]
"BearerAuth": [
"tokens"
]
}
],
"responses": {
@ -304,14 +104,19 @@
"scope": {
"minLength": 1,
"type": "string",
"enum": ["user"]
"enum": [
"user"
]
},
"secret": {
"minLength": 1,
"type": "string"
}
},
"required": ["identity", "secret"],
"required": [
"identity",
"secret"
],
"type": "object"
}
}
@ -339,17 +144,23 @@
}
},
"summary": "Request a new access token from credentials",
"tags": ["Tokens"]
"tags": [
"Tokens"
]
}
},
"/settings": {
"get": {
"operationId": "getSettings",
"summary": "Get all settings",
"tags": ["Settings"],
"tags": [
"Settings"
],
"security": [
{
"BearerAuth": ["settings"]
"BearerAuth": [
"settings"
]
}
],
"responses": {
@ -383,10 +194,14 @@
"get": {
"operationId": "getSetting",
"summary": "Get a setting",
"tags": ["Settings"],
"tags": [
"Settings"
],
"security": [
{
"BearerAuth": ["settings"]
"BearerAuth": [
"settings"
]
}
],
"parameters": [
@ -429,10 +244,14 @@
"put": {
"operationId": "updateSetting",
"summary": "Update a setting",
"tags": ["Settings"],
"tags": [
"Settings"
],
"security": [
{
"BearerAuth": ["settings"]
"BearerAuth": [
"settings"
]
}
],
"parameters": [
@ -486,10 +305,14 @@
"get": {
"operationId": "getUsers",
"summary": "Get all users",
"tags": ["Users"],
"tags": [
"Users"
],
"security": [
{
"BearerAuth": ["users"]
"BearerAuth": [
"users"
]
}
],
"parameters": [
@ -499,7 +322,9 @@
"description": "Expansions",
"schema": {
"type": "string",
"enum": ["permissions"]
"enum": [
"permissions"
]
}
}
],
@ -520,7 +345,9 @@
"name": "Jamie Curnow",
"nickname": "James",
"avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
"roles": ["admin"]
"roles": [
"admin"
]
}
]
},
@ -535,7 +362,9 @@
"name": "Jamie Curnow",
"nickname": "James",
"avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
"roles": ["admin"],
"roles": [
"admin"
],
"permissions": {
"visibility": "all",
"proxy_hosts": "manage",
@ -560,10 +389,14 @@
"post": {
"operationId": "createUser",
"summary": "Create a User",
"tags": ["Users"],
"tags": [
"Users"
],
"security": [
{
"BearerAuth": ["users"]
"BearerAuth": [
"users"
]
}
],
"parameters": [
@ -593,7 +426,9 @@
"name": "Jamie Curnow",
"nickname": "James",
"avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
"roles": ["admin"],
"roles": [
"admin"
],
"permissions": {
"visibility": "all",
"proxy_hosts": "manage",
@ -619,10 +454,14 @@
"get": {
"operationId": "getUser",
"summary": "Get a user",
"tags": ["Users"],
"tags": [
"Users"
],
"security": [
{
"BearerAuth": ["users"]
"BearerAuth": [
"users"
]
}
],
"parameters": [
@ -662,7 +501,9 @@
"name": "Jamie Curnow",
"nickname": "James",
"avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
"roles": ["admin"]
"roles": [
"admin"
]
}
}
},
@ -677,10 +518,14 @@
"put": {
"operationId": "updateUser",
"summary": "Update a User",
"tags": ["Users"],
"tags": [
"Users"
],
"security": [
{
"BearerAuth": ["users"]
"BearerAuth": [
"users"
]
}
],
"parameters": [
@ -729,7 +574,9 @@
"name": "Jamie Curnow",
"nickname": "James",
"avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
"roles": ["admin"]
"roles": [
"admin"
]
}
}
},
@ -744,10 +591,14 @@
"delete": {
"operationId": "deleteUser",
"summary": "Delete a User",
"tags": ["Users"],
"tags": [
"Users"
],
"security": [
{
"BearerAuth": ["users"]
"BearerAuth": [
"users"
]
}
],
"parameters": [
@ -786,10 +637,14 @@
"put": {
"operationId": "updateUserAuth",
"summary": "Update a User's Authentication",
"tags": ["Users"],
"tags": [
"Users"
],
"security": [
{
"BearerAuth": ["users"]
"BearerAuth": [
"users"
]
}
],
"parameters": [
@ -845,10 +700,14 @@
"put": {
"operationId": "updateUserPermissions",
"summary": "Update a User's Permissions",
"tags": ["Users"],
"tags": [
"Users"
],
"security": [
{
"BearerAuth": ["users"]
"BearerAuth": [
"users"
]
}
],
"parameters": [
@ -896,10 +755,14 @@
"put": {
"operationId": "loginAsUser",
"summary": "Login as this user",
"tags": ["Users"],
"tags": [
"Users"
],
"security": [
{
"BearerAuth": ["users"]
"BearerAuth": [
"users"
]
}
],
"parameters": [
@ -934,7 +797,9 @@
"name": "Jamie Curnow",
"nickname": "James",
"avatar": "//www.gravatar.com/avatar/3c8d73f45fd8763f827b964c76e6032a?default=mm",
"roles": ["admin"]
"roles": [
"admin"
]
}
}
}
@ -942,7 +807,11 @@
"schema": {
"type": "object",
"description": "Login object",
"required": ["expires", "token", "user"],
"required": [
"expires",
"token",
"user"
],
"additionalProperties": false,
"properties": {
"expires": {
@ -971,10 +840,14 @@
"get": {
"operationId": "reportsHosts",
"summary": "Report on Host Statistics",
"tags": ["Reports"],
"tags": [
"Reports"
],
"security": [
{
"BearerAuth": ["reports"]
"BearerAuth": [
"reports"
]
}
],
"responses": {
@ -1005,10 +878,14 @@
"get": {
"operationId": "getAuditLog",
"summary": "Get Audit Log",
"tags": ["Audit Log"],
"tags": [
"Audit Log"
],
"security": [
{
"BearerAuth": ["audit-log"]
"BearerAuth": [
"audit-log"
]
}
],
"responses": {
@ -1048,7 +925,10 @@
"type": "object",
"description": "Health object",
"additionalProperties": false,
"required": ["status", "version"],
"required": [
"status",
"version"
],
"properties": {
"status": {
"type": "string",
@ -1064,7 +944,11 @@
"revision": 0
},
"additionalProperties": false,
"required": ["major", "minor", "revision"],
"required": [
"major",
"minor",
"revision"
],
"properties": {
"major": {
"type": "integer",
@ -1085,7 +969,10 @@
"TokenObject": {
"type": "object",
"description": "Token object",
"required": ["expires", "token"],
"required": [
"expires",
"token"
],
"additionalProperties": false,
"properties": {
"expires": {
@ -1101,147 +988,16 @@
}
}
},
"ProxyHostObject": {
"type": "object",
"description": "Proxy Host object",
"required": [
"id",
"created_on",
"modified_on",
"owner_user_id",
"domain_names",
"forward_host",
"forward_port",
"access_list_id",
"certificate_id",
"ssl_forced",
"caching_enabled",
"block_exploits",
"advanced_config",
"meta",
"allow_websocket_upgrade",
"http2_support",
"forward_scheme",
"enabled",
"locations",
"hsts_enabled",
"hsts_subdomains",
"certificate",
"use_default_location",
"ipv6"
],
"additionalProperties": false,
"properties": {
"id": {
"type": "integer",
"description": "Proxy Host ID",
"minimum": 1,
"example": 1
},
"created_on": {
"type": "string",
"description": "Created Date",
"example": "2020-01-30T09:36:08.000Z"
},
"modified_on": {
"type": "string",
"description": "Modified Date",
"example": "2020-01-30T09:41:04.000Z"
},
"owner_user_id": {
"type": "integer",
"minimum": 1,
"example": 1
},
"domain_names": {
"type": "array",
"minItems": 1,
"items": {
"type": "string",
"minLength": 1
}
},
"forward_host": {
"type": "string",
"minLength": 1
},
"forward_port": {
"type": "integer",
"minimum": 1
},
"access_list_id": {
"type": "integer"
},
"certificate_id": {
"type": "integer"
},
"ssl_forced": {
"type": "integer"
},
"caching_enabled": {
"type": "integer"
},
"block_exploits": {
"type": "integer"
},
"advanced_config": {
"type": "string"
},
"meta": {
"type": "object"
},
"allow_websocket_upgrade": {
"type": "integer"
},
"http2_support": {
"type": "integer"
},
"forward_scheme": {
"type": "string"
},
"enabled": {
"type": "integer"
},
"locations": {
"type": "array"
},
"hsts_enabled": {
"type": "integer"
},
"hsts_subdomains": {
"type": "integer"
},
"certificate": {
"type": "object",
"nullable": true
},
"owner": {
"type": "object",
"nullable": true
},
"access_list": {
"type": "object",
"nullable": true
},
"use_default_location": {
"type": "boolean"
},
"ipv6": {
"type": "boolean"
}
}
},
"ProxyHostsList": {
"type": "array",
"description": "Proxyn Hosts list",
"items": {
"$ref": "#/components/schemas/ProxyHostObject"
}
},
"SettingObject": {
"type": "object",
"description": "Setting object",
"required": ["id", "name", "description", "value", "meta"],
"required": [
"id",
"name",
"description",
"value",
"meta"
],
"additionalProperties": false,
"properties": {
"id": {
@ -1301,7 +1057,17 @@
"UserObject": {
"type": "object",
"description": "User object",
"required": ["id", "created_on", "modified_on", "is_disabled", "email", "name", "nickname", "avatar", "roles"],
"required": [
"id",
"created_on",
"modified_on",
"is_disabled",
"email",
"name",
"nickname",
"avatar",
"roles"
],
"additionalProperties": false,
"properties": {
"id": {
@ -1351,7 +1117,9 @@
},
"roles": {
"description": "Roles applied",
"example": ["admin"],
"example": [
"admin"
],
"type": "array",
"items": {
"type": "string"
@ -1369,7 +1137,10 @@
"AuthObject": {
"type": "object",
"description": "Authentication Object",
"required": ["type", "secret"],
"required": [
"type",
"secret"
],
"properties": {
"type": {
"type": "string",
@ -1396,37 +1167,64 @@
"visibility": {
"type": "string",
"description": "Visibility Type",
"enum": ["all", "user"]
"enum": [
"all",
"user"
]
},
"access_lists": {
"type": "string",
"description": "Access Lists Permissions",
"enum": ["hidden", "view", "manage"]
"enum": [
"hidden",
"view",
"manage"
]
},
"dead_hosts": {
"type": "string",
"description": "404 Hosts Permissions",
"enum": ["hidden", "view", "manage"]
"enum": [
"hidden",
"view",
"manage"
]
},
"proxy_hosts": {
"type": "string",
"description": "Proxy Hosts Permissions",
"enum": ["hidden", "view", "manage"]
"enum": [
"hidden",
"view",
"manage"
]
},
"redirection_hosts": {
"type": "string",
"description": "Redirection Permissions",
"enum": ["hidden", "view", "manage"]
"enum": [
"hidden",
"view",
"manage"
]
},
"streams": {
"type": "string",
"description": "Streams Permissions",
"enum": ["hidden", "view", "manage"]
"enum": [
"hidden",
"view",
"manage"
]
},
"certificates": {
"type": "string",
"description": "Certificates Permissions",
"enum": ["hidden", "view", "manage"]
"enum": [
"hidden",
"view",
"manage"
]
}
}
},

View File

@ -3,6 +3,9 @@
const logger = require('./logger').global;
async function appStart () {
// Create config file db settings if environment variables have been set
await createDbConfigFromEnvironment();
const migrate = require('./migrate');
const setup = require('./setup');
const app = require('./app');
@ -39,6 +42,89 @@ async function appStart () {
});
}
async function createDbConfigFromEnvironment() {
return new Promise((resolve, reject) => {
const envMysqlHost = process.env.DB_MYSQL_HOST || null;
const envMysqlPort = process.env.DB_MYSQL_PORT || null;
const envMysqlUser = process.env.DB_MYSQL_USER || null;
const envMysqlName = process.env.DB_MYSQL_NAME || null;
const envSqliteFile = process.env.DB_SQLITE_FILE || null;
if ((envMysqlHost && envMysqlPort && envMysqlUser && envMysqlName) || envSqliteFile) {
const fs = require('fs');
const filename = (process.env.NODE_CONFIG_DIR || './config') + '/' + (process.env.NODE_ENV || 'default') + '.json';
let configData = {};
try {
configData = require(filename);
} catch (err) {
// do nothing
}
if (configData.database && configData.database.engine && !configData.database.fromEnv) {
logger.info('Manual db configuration already exists, skipping config creation from environment variables');
resolve();
return;
}
if (envMysqlHost && envMysqlPort && envMysqlUser && envMysqlName) {
const newConfig = {
fromEnv: true,
engine: 'mysql',
host: envMysqlHost,
port: envMysqlPort,
user: envMysqlUser,
password: process.env.DB_MYSQL_PASSWORD,
name: envMysqlName,
};
if (JSON.stringify(configData.database) === JSON.stringify(newConfig)) {
// Config is unchanged, skip overwrite
resolve();
return;
}
logger.info('Generating MySQL db configuration from environment variables');
configData.database = newConfig;
} else {
const newConfig = {
fromEnv: true,
engine: 'knex-native',
knex: {
client: 'sqlite3',
connection: {
filename: envSqliteFile
},
useNullAsDefault: true
}
};
if (JSON.stringify(configData.database) === JSON.stringify(newConfig)) {
// Config is unchanged, skip overwrite
resolve();
return;
}
logger.info('Generating Sqlite db configuration from environment variables');
configData.database = newConfig;
}
// Write config
fs.writeFile(filename, JSON.stringify(configData, null, 2), (err) => {
if (err) {
logger.error('Could not write db config to config file: ' + filename);
reject(err);
} else {
logger.info('Wrote db configuration to config file: ' + filename);
resolve();
}
});
} else {
resolve();
}
});
}
try {
appStart();
} catch (err) {

View File

@ -3,13 +3,13 @@ const fs = require('fs');
const batchflow = require('batchflow');
const logger = require('../logger').access;
const error = require('../lib/error');
const utils = require('../lib/utils');
const accessListModel = require('../models/access_list');
const accessListAuthModel = require('../models/access_list_auth');
const accessListClientModel = require('../models/access_list_client');
const proxyHostModel = require('../models/proxy_host');
const internalAuditLog = require('./audit-log');
const internalNginx = require('./nginx');
const utils = require('../lib/utils');
function omissions () {
return ['is_deleted'];
@ -27,13 +27,13 @@ const internalAccessList = {
.then((/*access_data*/) => {
return accessListModel
.query()
.omit(omissions())
.insertAndFetch({
name: data.name,
satisfy_any: data.satisfy_any,
pass_auth: data.pass_auth,
owner_user_id: access.token.getUserId(1)
})
.then(utils.omitRow(omissions()));
});
})
.then((row) => {
data.id = row.id;
@ -218,7 +218,7 @@ const internalAccessList = {
// re-fetch with expansions
return internalAccessList.get(access, {
id: data.id,
expand: ['owner', 'items', 'clients', 'proxy_hosts.[certificate,access_list.[clients,items]]']
expand: ['owner', 'items', 'clients', 'proxy_hosts.access_list.[clients,items]']
}, true /* <- skip masking */);
})
.then((row) => {
@ -256,31 +256,35 @@ const internalAccessList = {
.joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0')
.where('access_list.is_deleted', 0)
.andWhere('access_list.id', data.id)
.allowGraph('[owner,items,clients,proxy_hosts.[certificate,access_list.[clients,items]]]')
.allowEager('[owner,items,clients,proxy_hosts.[*, access_list.[clients,items]]]')
.omit(['access_list.is_deleted'])
.first();
if (access_data.permission_visibility !== 'all') {
query.andWhere('access_list.owner_user_id', access.token.getUserId(1));
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.withGraphFetched('[' + data.expand.join(', ') + ']');
// Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) {
query.omit(data.omit);
}
return query.then(utils.omitRow(omissions()));
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.eager('[' + data.expand.join(', ') + ']');
}
return query;
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
}
if (row) {
if (!skip_masking && typeof row.items !== 'undefined' && row.items) {
row = internalAccessList.maskItems(row);
}
// Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) {
row = _.omit(row, data.omit);
return _.omit(row, omissions());
} else {
throw new error.ItemNotFoundError(data.id);
}
return row;
});
},
@ -377,7 +381,8 @@ const internalAccessList = {
.joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0')
.where('access_list.is_deleted', 0)
.groupBy('access_list.id')
.allowGraph('[owner,items,clients]')
.omit(['access_list.is_deleted'])
.allowEager('[owner,items,clients]')
.orderBy('access_list.name', 'ASC');
if (access_data.permission_visibility !== 'all') {
@ -392,10 +397,10 @@ const internalAccessList = {
}
if (typeof expand !== 'undefined' && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']');
query.eager('[' + expand.join(', ') + ']');
}
return query.then(utils.omitRows(omissions()));
return query;
})
.then((rows) => {
if (rows) {
@ -502,7 +507,7 @@ const internalAccessList = {
if (typeof item.password !== 'undefined' && item.password.length) {
logger.info('Adding: ' + item.username);
utils.execFile('/usr/bin/htpasswd', ['-b', htpasswd_file, item.username, item.password])
utils.exec('/usr/bin/htpasswd -b "' + htpasswd_file + '" "' + item.username + '" "' + item.password + '"')
.then((/*result*/) => {
next();
})

View File

@ -19,7 +19,7 @@ const internalAuditLog = {
.orderBy('created_on', 'DESC')
.orderBy('id', 'DESC')
.limit(100)
.allowGraph('[user]');
.allowEager('[user]');
// Query is used for searching
if (typeof search_query === 'string') {
@ -29,7 +29,7 @@ const internalAuditLog = {
}
if (typeof expand !== 'undefined' && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']');
query.eager('[' + expand.join(', ') + ']');
}
return query;

View File

@ -1,10 +1,8 @@
const _ = require('lodash');
const fs = require('fs');
const https = require('https');
const tempWrite = require('temp-write');
const moment = require('moment');
const logger = require('../logger').ssl;
const config = require('../lib/config');
const error = require('../lib/error');
const utils = require('../lib/utils');
const certificateModel = require('../models/certificate');
@ -12,13 +10,11 @@ const dnsPlugins = require('../global/certbot-dns-plugins');
const internalAuditLog = require('./audit-log');
const internalNginx = require('./nginx');
const internalHost = require('./host');
const archiver = require('archiver');
const path = require('path');
const { isArray } = require('lodash');
const letsencryptStaging = config.useLetsencryptStaging();
const letsencryptStaging = process.env.NODE_ENV !== 'production';
const letsencryptConfig = '/etc/letsencrypt.ini';
const certbotCommand = 'certbot';
const archiver = require('archiver');
const path = require('path');
function omissions() {
return ['is_deleted'];
@ -48,8 +44,6 @@ const internalCertificate = {
const cmd = certbotCommand + ' renew --non-interactive --quiet ' +
'--config "' + letsencryptConfig + '" ' +
'--work-dir "/tmp/letsencrypt-lib" ' +
'--logs-dir "/tmp/letsencrypt-log" ' +
'--preferred-challenges "dns,http" ' +
'--disable-hook-validation ' +
(letsencryptStaging ? '--staging' : '');
@ -120,13 +114,13 @@ const internalCertificate = {
data.owner_user_id = access.token.getUserId(1);
if (data.provider === 'letsencrypt') {
data.nice_name = data.domain_names.join(', ');
data.nice_name = data.domain_names.sort().join(', ');
}
return certificateModel
.query()
.insertAndFetch(data)
.then(utils.omitRow(omissions()));
.omit(omissions())
.insertAndFetch(data);
})
.then((certificate) => {
if (certificate.provider === 'letsencrypt') {
@ -175,7 +169,6 @@ const internalCertificate = {
// 3. Generate the LE config
return internalNginx.generateLetsEncryptRequestConfig(certificate)
.then(internalNginx.reload)
.then(async() => await new Promise((r) => setTimeout(r, 5000)))
.then(() => {
// 4. Request cert
return internalCertificate.requestLetsEncryptSsl(certificate);
@ -273,8 +266,8 @@ const internalCertificate = {
return certificateModel
.query()
.omit(omissions())
.patchAndFetchById(row.id, data)
.then(utils.omitRow(omissions()))
.then((saved_row) => {
saved_row.meta = internalCertificate.cleanMeta(saved_row.meta);
data.meta = internalCertificate.cleanMeta(data.meta);
@ -292,7 +285,7 @@ const internalCertificate = {
meta: _.omit(data, ['expires_on']) // this prevents json circular reference because expires_on might be raw
})
.then(() => {
return saved_row;
return _.omit(saved_row, omissions());
});
});
});
@ -317,28 +310,30 @@ const internalCertificate = {
.query()
.where('is_deleted', 0)
.andWhere('id', data.id)
.allowGraph('[owner]')
.allowEager('[owner]')
.first();
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.withGraphFetched('[' + data.expand.join(', ') + ']');
}
return query.then(utils.omitRow(omissions()));
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
}
// Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) {
row = _.omit(row, data.omit);
query.omit(data.omit);
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.eager('[' + data.expand.join(', ') + ']');
}
return query;
})
.then((row) => {
if (row) {
return _.omit(row, omissions());
} else {
throw new error.ItemNotFoundError(data.id);
}
return row;
});
},
@ -468,7 +463,8 @@ const internalCertificate = {
.query()
.where('is_deleted', 0)
.groupBy('id')
.allowGraph('[owner]')
.omit(['is_deleted'])
.allowEager('[owner]')
.orderBy('nice_name', 'ASC');
if (access_data.permission_visibility !== 'all') {
@ -478,15 +474,15 @@ const internalCertificate = {
// Query is used for searching
if (typeof search_query === 'string') {
query.where(function () {
this.where('nice_name', 'like', '%' + search_query + '%');
this.where('name', 'like', '%' + search_query + '%');
});
}
if (typeof expand !== 'undefined' && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']');
query.eager('[' + expand.join(', ') + ']');
}
return query.then(utils.omitRows(omissions()));
return query;
});
},
@ -663,6 +659,7 @@ const internalCertificate = {
meta: _.clone(row.meta) // Prevent the update method from changing this value that we'll use later
})
.then((certificate) => {
console.log('ROWMETA:', row.meta);
certificate.meta = row.meta;
return internalCertificate.writeCustomCert(certificate);
});
@ -835,10 +832,8 @@ const internalCertificate = {
requestLetsEncryptSsl: (certificate) => {
logger.info('Requesting Let\'sEncrypt certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
const cmd = certbotCommand + ' certonly ' +
const cmd = certbotCommand + ' certonly --non-interactive ' +
'--config "' + letsencryptConfig + '" ' +
'--work-dir "/tmp/letsencrypt-lib" ' +
'--logs-dir "/tmp/letsencrypt-log" ' +
'--cert-name "npm-' + certificate.id + '" ' +
'--agree-tos ' +
'--authenticator webroot ' +
@ -873,19 +868,13 @@ const internalCertificate = {
logger.info(`Requesting Let'sEncrypt certificates via ${dns_plugin.display_name} for Cert #${certificate.id}: ${certificate.domain_names.join(', ')}`);
const credentialsLocation = '/etc/letsencrypt/credentials/credentials-' + certificate.id;
// Escape single quotes and backslashes
const escapedCredentials = certificate.meta.dns_provider_credentials.replaceAll('\'', '\\\'').replaceAll('\\', '\\\\');
const credentialsCmd = 'mkdir -p /etc/letsencrypt/credentials 2> /dev/null; echo \'' + escapedCredentials + '\' > \'' + credentialsLocation + '\' && chmod 600 \'' + credentialsLocation + '\'';
// we call `. /opt/certbot/bin/activate` (`.` is alternative to `source` in dash) to access certbot venv
const prepareCmd = '. /opt/certbot/bin/activate && pip install --no-cache-dir --user ' + dns_plugin.package_name + (dns_plugin.version_requirement || '') + ' ' + dns_plugin.dependencies + ' && deactivate';
const credentialsCmd = 'mkdir -p /etc/letsencrypt/credentials 2> /dev/null; echo \'' + certificate.meta.dns_provider_credentials.replace('\'', '\\\'') + '\' > \'' + credentialsLocation + '\' && chmod 600 \'' + credentialsLocation + '\'';
const prepareCmd = 'pip install ' + dns_plugin.package_name + (dns_plugin.version_requirement || '') + ' ' + dns_plugin.dependencies;
// Whether the plugin has a --<name>-credentials argument
const hasConfigArg = certificate.meta.dns_provider !== 'route53';
let mainCmd = certbotCommand + ' certonly ' +
'--config "' + letsencryptConfig + '" ' +
'--work-dir "/tmp/letsencrypt-lib" ' +
'--logs-dir "/tmp/letsencrypt-log" ' +
let mainCmd = certbotCommand + ' certonly --non-interactive ' +
'--cert-name "npm-' + certificate.id + '" ' +
'--agree-tos ' +
'--email "' + certificate.meta.letsencrypt_email + '" ' +
@ -980,13 +969,10 @@ const internalCertificate = {
renewLetsEncryptSsl: (certificate) => {
logger.info('Renewing Let\'sEncrypt certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
const cmd = certbotCommand + ' renew --force-renewal ' +
const cmd = certbotCommand + ' renew --force-renewal --non-interactive ' +
'--config "' + letsencryptConfig + '" ' +
'--work-dir "/tmp/letsencrypt-lib" ' +
'--logs-dir "/tmp/letsencrypt-log" ' +
'--cert-name "npm-' + certificate.id + '" ' +
'--preferred-challenges "dns,http" ' +
'--no-random-sleep-on-renew ' +
'--disable-hook-validation ' +
(letsencryptStaging ? '--staging' : '');
@ -1012,13 +998,9 @@ const internalCertificate = {
logger.info(`Renewing Let'sEncrypt certificates via ${dns_plugin.display_name} for Cert #${certificate.id}: ${certificate.domain_names.join(', ')}`);
let mainCmd = certbotCommand + ' renew ' +
'--config "' + letsencryptConfig + '" ' +
'--work-dir "/tmp/letsencrypt-lib" ' +
'--logs-dir "/tmp/letsencrypt-log" ' +
let mainCmd = certbotCommand + ' renew --non-interactive ' +
'--cert-name "npm-' + certificate.id + '" ' +
'--disable-hook-validation' +
'--no-random-sleep-on-renew ' +
(letsencryptStaging ? ' --staging' : '');
// Prepend the path to the credentials file as an environment variable
@ -1044,8 +1026,7 @@ const internalCertificate = {
revokeLetsEncryptSsl: (certificate, throw_errors) => {
logger.info('Revoking Let\'sEncrypt certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
const mainCmd = certbotCommand + ' revoke ' +
'--config "' + letsencryptConfig + '" ' +
const mainCmd = certbotCommand + ' revoke --non-interactive ' +
'--cert-path "/etc/letsencrypt/live/npm-' + certificate.id + '/fullchain.pem" ' +
'--delete-after-revoke ' +
(letsencryptStaging ? '--staging' : '');
@ -1138,94 +1119,6 @@ const internalCertificate = {
} else {
return Promise.resolve();
}
},
testHttpsChallenge: async (access, domains) => {
await access.can('certificates:list');
if (!isArray(domains)) {
throw new error.InternalValidationError('Domains must be an array of strings');
}
if (domains.length === 0) {
throw new error.InternalValidationError('No domains provided');
}
// Create a test challenge file
const testChallengeDir = '/data/letsencrypt-acme-challenge/.well-known/acme-challenge';
const testChallengeFile = testChallengeDir + '/test-challenge';
fs.mkdirSync(testChallengeDir, {recursive: true});
fs.writeFileSync(testChallengeFile, 'Success', {encoding: 'utf8'});
async function performTestForDomain (domain) {
logger.info('Testing http challenge for ' + domain);
const url = `http://${domain}/.well-known/acme-challenge/test-challenge`;
const formBody = `method=G&url=${encodeURI(url)}&bodytype=T&requestbody=&headername=User-Agent&headervalue=None&locationid=1&ch=false&cc=false`;
const options = {
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': Buffer.byteLength(formBody)
}
};
const result = await new Promise((resolve) => {
const req = https.request('https://www.site24x7.com/tools/restapi-tester', options, function (res) {
let responseBody = '';
res.on('data', (chunk) => responseBody = responseBody + chunk);
res.on('end', function () {
const parsedBody = JSON.parse(responseBody + '');
if (res.statusCode !== 200) {
logger.warn(`Failed to test HTTP challenge for domain ${domain}`, res);
resolve(undefined);
}
resolve(parsedBody);
});
});
// Make sure to write the request body.
req.write(formBody);
req.end();
req.on('error', function (e) { logger.warn(`Failed to test HTTP challenge for domain ${domain}`, e);
resolve(undefined); });
});
if (!result) {
// Some error occurred while trying to get the data
return 'failed';
} else if (`${result.responsecode}` === '200' && result.htmlresponse === 'Success') {
// Server exists and has responded with the correct data
return 'ok';
} else if (`${result.responsecode}` === '200') {
// Server exists but has responded with wrong data
logger.info(`HTTP challenge test failed for domain ${domain} because of invalid returned data:`, result.htmlresponse);
return 'wrong-data';
} else if (`${result.responsecode}` === '404') {
// Server exists but responded with a 404
logger.info(`HTTP challenge test failed for domain ${domain} because code 404 was returned`);
return '404';
} else if (`${result.responsecode}` === '0' || (typeof result.reason === 'string' && result.reason.toLowerCase() === 'host unavailable')) {
// Server does not exist at domain
logger.info(`HTTP challenge test failed for domain ${domain} the host was not found`);
return 'no-host';
} else {
// Other errors
logger.info(`HTTP challenge test failed for domain ${domain} because code ${result.responsecode} was returned`);
return `other:${result.responsecode}`;
}
}
const results = {};
for (const domain of domains){
results[domain] = await performTestForDomain(domain);
}
// Remove the test challenge file
fs.unlinkSync(testChallengeFile);
return results;
}
};

View File

@ -1,6 +1,5 @@
const _ = require('lodash');
const error = require('../lib/error');
const utils = require('../lib/utils');
const deadHostModel = require('../models/dead_host');
const internalHost = require('./host');
const internalNginx = require('./nginx');
@ -50,8 +49,8 @@ const internalDeadHost = {
return deadHostModel
.query()
.insertAndFetch(data)
.then(utils.omitRow(omissions()));
.omit(omissions())
.insertAndFetch(data);
})
.then((row) => {
if (create_certificate) {
@ -219,28 +218,31 @@ const internalDeadHost = {
.query()
.where('is_deleted', 0)
.andWhere('id', data.id)
.allowGraph('[owner,certificate]')
.allowEager('[owner,certificate]')
.first();
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.withGraphFetched('[' + data.expand.join(', ') + ']');
}
return query.then(utils.omitRow(omissions()));
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
}
// Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) {
row = _.omit(row, data.omit);
query.omit(data.omit);
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.eager('[' + data.expand.join(', ') + ']');
}
return query;
})
.then((row) => {
if (row) {
row = internalHost.cleanRowCertificateMeta(row);
return _.omit(row, omissions());
} else {
throw new error.ItemNotFoundError(data.id);
}
return row;
});
},
@ -402,7 +404,8 @@ const internalDeadHost = {
.query()
.where('is_deleted', 0)
.groupBy('id')
.allowGraph('[owner,certificate]')
.omit(['is_deleted'])
.allowEager('[owner,certificate]')
.orderBy('domain_names', 'ASC');
if (access_data.permission_visibility !== 'all') {
@ -417,10 +420,10 @@ const internalDeadHost = {
}
if (typeof expand !== 'undefined' && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']');
query.eager('[' + expand.join(', ') + ']');
}
return query.then(utils.omitRows(omissions()));
return query;
})
.then((rows) => {
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {

View File

@ -2,6 +2,7 @@ const _ = require('lodash');
const proxyHostModel = require('../models/proxy_host');
const redirectionHostModel = require('../models/redirection_host');
const deadHostModel = require('../models/dead_host');
const sslPassthroughHostModel = require('../models/ssl_passthrough_host');
const internalHost = {
@ -81,6 +82,9 @@ const internalHost = {
.query()
.where('is_deleted', 0),
deadHostModel
.query()
.where('is_deleted', 0),
sslPassthroughHostModel
.query()
.where('is_deleted', 0)
];
@ -112,6 +116,12 @@ const internalHost = {
response_object.total_count += response_object.dead_hosts.length;
}
if (promises_results[3]) {
// SSL Passthrough Hosts
response_object.ssl_passthrough_hosts = internalHost._getHostsWithDomains(promises_results[3], domain_names);
response_object.total_count += response_object.ssl_passthrough_hosts.length;
}
return response_object;
});
},
@ -137,7 +147,11 @@ const internalHost = {
deadHostModel
.query()
.where('is_deleted', 0)
.andWhere('domain_names', 'like', '%' + hostname + '%')
.andWhere('domain_names', 'like', '%' + hostname + '%'),
sslPassthroughHostModel
.query()
.where('is_deleted', 0)
.andWhere('domain_name', '=', hostname),
];
return Promise.all(promises)
@ -165,6 +179,13 @@ const internalHost = {
}
}
if (promises_results[3]) {
// SSL Passthrough Hosts
if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[3], ignore_type === 'ssl_passthrough' && ignore_id ? ignore_id : 0)) {
is_taken = true;
}
}
return {
hostname: hostname,
is_taken: is_taken
@ -185,14 +206,21 @@ const internalHost = {
if (existing_rows && existing_rows.length) {
existing_rows.map(function (existing_row) {
existing_row.domain_names.map(function (existing_hostname) {
function checkHostname(existing_hostname) {
// Does this domain match?
if (existing_hostname.toLowerCase() === hostname.toLowerCase()) {
if (!ignore_id || ignore_id !== existing_row.id) {
is_taken = true;
}
}
});
}
if (existing_row.domain_names) {
existing_row.domain_names.map(checkHostname);
} else if (existing_row.domain_name) {
checkHostname(existing_row.domain_name);
}
});
}

View File

@ -2,16 +2,13 @@ const https = require('https');
const fs = require('fs');
const logger = require('../logger').ip_ranges;
const error = require('../lib/error');
const utils = require('../lib/utils');
const internalNginx = require('./nginx');
const { Liquid } = require('liquidjs');
const CLOUDFRONT_URL = 'https://ip-ranges.amazonaws.com/ip-ranges.json';
const CLOUDFARE_V4_URL = 'https://www.cloudflare.com/ips-v4';
const CLOUDFARE_V6_URL = 'https://www.cloudflare.com/ips-v6';
const regIpV4 = /^(\d+\.?){4}\/\d+/;
const regIpV6 = /^(([\da-fA-F]+)?:)+\/\d+/;
const internalIpRanges = {
interval_timeout: 1000 * 60 * 60 * 6, // 6 hours
@ -77,14 +74,14 @@ const internalIpRanges = {
return internalIpRanges.fetchUrl(CLOUDFARE_V4_URL);
})
.then((cloudfare_data) => {
let items = cloudfare_data.split('\n').filter((line) => regIpV4.test(line));
let items = cloudfare_data.split('\n');
ip_ranges = [... ip_ranges, ... items];
})
.then(() => {
return internalIpRanges.fetchUrl(CLOUDFARE_V6_URL);
})
.then((cloudfare_data) => {
let items = cloudfare_data.split('\n').filter((line) => regIpV6.test(line));
let items = cloudfare_data.split('\n');
ip_ranges = [... ip_ranges, ... items];
})
.then(() => {
@ -119,7 +116,10 @@ const internalIpRanges = {
* @returns {Promise}
*/
generateConfig: (ip_ranges) => {
const renderEngine = utils.getRenderEngine();
let renderEngine = new Liquid({
root: __dirname + '/../templates/'
});
return new Promise((resolve, reject) => {
let template = null;
let filename = '/etc/nginx/conf.d/include/ip_ranges.conf';

View File

@ -1,9 +1,11 @@
const _ = require('lodash');
const fs = require('fs');
const logger = require('../logger').nginx;
const config = require('../lib/config');
const utils = require('../lib/utils');
const error = require('../lib/error');
const { Liquid } = require('liquidjs');
const passthroughHostModel = require('../models/ssl_passthrough_host');
const debug_mode = process.env.NODE_ENV !== 'production' || !!process.env.DEBUG;
const internalNginx = {
@ -23,17 +25,34 @@ const internalNginx = {
*/
configure: (model, host_type, host) => {
let combined_meta = {};
const sslPassthroughEnabled = internalNginx.sslPassthroughEnabled();
return internalNginx.test()
.then(() => {
// Nginx is OK
// We're deleting this config regardless.
// Don't throw errors, as the file may not exist at all
// Delete the .err file too
return internalNginx.deleteConfig(host_type, host, false, true);
return internalNginx.deleteConfig(host_type, host); // Don't throw errors, as the file may not exist at all
})
.then(() => {
if (host_type === 'ssl_passthrough_host' && !sslPassthroughEnabled){
// ssl passthrough is disabled
const meta = {
nginx_online: false,
nginx_err: 'SSL passthrough is not enabled in environment'
};
return passthroughHostModel
.query()
.where('is_deleted', 0)
.andWhere('enabled', 1)
.patch({
meta
}).then(() => {
return internalNginx.deleteConfig('ssl_passthrough_host', host, false);
});
} else {
return internalNginx.generateConfig(host_type, host);
}
})
.then(() => {
// Test nginx again and update meta with result
@ -45,12 +64,27 @@ const internalNginx = {
nginx_err: null
});
if (host_type === 'ssl_passthrough_host'){
// If passthrough is disabled we have already marked the hosts as offline
if (sslPassthroughEnabled) {
return passthroughHostModel
.query()
.where('is_deleted', 0)
.andWhere('enabled', 1)
.patch({
meta: combined_meta
});
}
return Promise.resolve();
}
return model
.query()
.where('id', host.id)
.patch({
meta: combined_meta
});
})
.catch((err) => {
// Remove the error_log line because it's a docker-ism false positive that doesn't need to be reported.
@ -65,7 +99,7 @@ const internalNginx = {
}
});
if (config.debug()) {
if (debug_mode) {
logger.error('Nginx test failed:', valid_lines.join('\n'));
}
@ -75,15 +109,24 @@ const internalNginx = {
nginx_err: valid_lines.join('\n')
});
if (host_type === 'ssl_passthrough_host'){
return passthroughHostModel
.query()
.where('is_deleted', 0)
.andWhere('enabled', 1)
.patch({
meta: combined_meta
}).then(() => {
return internalNginx.deleteConfig('ssl_passthrough_host', host, true);
});
}
return model
.query()
.where('id', host.id)
.patch({
meta: combined_meta
})
.then(() => {
internalNginx.renameConfigAsError(host_type, host);
})
.then(() => {
return internalNginx.deleteConfig(host_type, host, true);
});
@ -101,7 +144,7 @@ const internalNginx = {
* @returns {Promise}
*/
test: () => {
if (config.debug()) {
if (debug_mode) {
logger.info('Testing Nginx configuration');
}
@ -125,10 +168,15 @@ const internalNginx = {
* @returns {String}
*/
getConfigName: (host_type, host_id) => {
host_type = host_type.replace(new RegExp('-', 'g'), '_');
if (host_type === 'default') {
return '/data/nginx/default_host/site.conf';
} else if (host_type === 'ssl_passthrough_host') {
return '/data/nginx/ssl_passthrough_host/hosts.conf';
}
return '/data/nginx/' + internalNginx.getFileFriendlyHostType(host_type) + '/' + host_id + '.conf';
return '/data/nginx/' + host_type + '/' + host_id + '.conf';
},
/**
@ -137,6 +185,8 @@ const internalNginx = {
* @returns {Promise}
*/
renderLocations: (host) => {
//logger.info('host = ' + JSON.stringify(host, null, 2));
return new Promise((resolve, reject) => {
let template;
@ -147,7 +197,9 @@ const internalNginx = {
return;
}
const renderEngine = utils.getRenderEngine();
let renderer = new Liquid({
root: __dirname + '/../templates/'
});
let renderedLocations = '';
const locationRendering = async () => {
@ -165,8 +217,10 @@ const internalNginx = {
locationCopy.forward_path = `/${splitted.join('/')}`;
}
//logger.info('locationCopy = ' + JSON.stringify(locationCopy, null, 2));
// eslint-disable-next-line
renderedLocations += await renderEngine.parseAndRender(template, locationCopy);
renderedLocations += await renderer.parseAndRender(template, locationCopy);
}
};
@ -181,31 +235,51 @@ const internalNginx = {
* @param {Object} host
* @returns {Promise}
*/
generateConfig: (host_type, host) => {
const nice_host_type = internalNginx.getFileFriendlyHostType(host_type);
generateConfig: async (host_type, host) => {
host_type = host_type.replace(new RegExp('-', 'g'), '_');
if (config.debug()) {
logger.info('Generating ' + nice_host_type + ' Config:', JSON.stringify(host, null, 2));
if (debug_mode) {
logger.info('Generating ' + host_type + ' Config:', host);
}
const renderEngine = utils.getRenderEngine();
// logger.info('host = ' + JSON.stringify(host, null, 2));
let renderEngine = new Liquid({
root: __dirname + '/../templates/'
});
return new Promise((resolve, reject) => {
let template = null;
let filename = internalNginx.getConfigName(nice_host_type, host.id);
let filename = internalNginx.getConfigName(host_type, host.id);
try {
template = fs.readFileSync(__dirname + '/../templates/' + nice_host_type + '.conf', {encoding: 'utf8'});
template = fs.readFileSync(__dirname + '/../templates/' + host_type + '.conf', {encoding: 'utf8'});
} catch (err) {
reject(new error.ConfigurationError(err.message));
return;
throw new error.ConfigurationError(err.message);
}
let locationsPromise;
let origLocations;
// Manipulate the data a bit before sending it to the template
if (nice_host_type !== 'default') {
if (host_type === 'ssl_passthrough_host') {
if (internalNginx.sslPassthroughEnabled()){
const allHosts = await passthroughHostModel
.query()
.where('is_deleted', 0)
.groupBy('id')
.omit(['is_deleted']);
host = {
all_passthrough_hosts: allHosts.map((host) => {
// Replace dots in domain
host.forwarding_host = internalNginx.addIpv6Brackets(host.forwarding_host);
return host;
}),
};
} else {
internalNginx.deleteConfig(host_type, host, false);
}
} else if (host_type !== 'default') {
host.use_default_location = true;
if (typeof host.advanced_config !== 'undefined' && host.advanced_config) {
host.use_default_location = !internalNginx.advancedConfigHasDefaultLocation(host.advanced_config);
@ -233,28 +307,27 @@ const internalNginx = {
// Set the IPv6 setting for the host
host.ipv6 = internalNginx.ipv6Enabled();
locationsPromise.then(() => {
return locationsPromise.then(() => {
renderEngine
.parseAndRender(template, host)
.then((config_text) => {
fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
if (config.debug()) {
if (debug_mode) {
logger.success('Wrote config:', filename, config_text);
}
// Restore locations array
host.locations = origLocations;
resolve(true);
return true;
})
.catch((err) => {
if (config.debug()) {
if (debug_mode) {
logger.warn('Could not write ' + filename + ':', err.message);
}
reject(new error.ConfigurationError(err.message));
});
throw new error.ConfigurationError(err.message);
});
});
},
@ -268,11 +341,13 @@ const internalNginx = {
* @returns {Promise}
*/
generateLetsEncryptRequestConfig: (certificate) => {
if (config.debug()) {
if (debug_mode) {
logger.info('Generating LetsEncrypt Request Config:', certificate);
}
const renderEngine = utils.getRenderEngine();
let renderEngine = new Liquid({
root: __dirname + '/../templates/'
});
return new Promise((resolve, reject) => {
let template = null;
@ -292,14 +367,14 @@ const internalNginx = {
.then((config_text) => {
fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
if (config.debug()) {
if (debug_mode) {
logger.success('Wrote config:', filename, config_text);
}
resolve(true);
})
.catch((err) => {
if (config.debug()) {
if (debug_mode) {
logger.warn('Could not write ' + filename + ':', err.message);
}
@ -308,58 +383,33 @@ const internalNginx = {
});
},
/**
* A simple wrapper around unlinkSync that writes to the logger
*
* @param {String} filename
*/
deleteFile: (filename) => {
logger.debug('Deleting file: ' + filename);
try {
fs.unlinkSync(filename);
} catch (err) {
logger.debug('Could not delete file:', JSON.stringify(err, null, 2));
}
},
/**
*
* @param {String} host_type
* @returns String
*/
getFileFriendlyHostType: (host_type) => {
return host_type.replace(new RegExp('-', 'g'), '_');
},
/**
* This removes the temporary nginx config file generated by `generateLetsEncryptRequestConfig`
*
* @param {Object} certificate
* @param {Boolean} [throw_errors]
* @returns {Promise}
*/
deleteLetsEncryptRequestConfig: (certificate) => {
const config_file = '/data/nginx/temp/letsencrypt_' + certificate.id + '.conf';
return new Promise((resolve/*, reject*/) => {
internalNginx.deleteFile(config_file);
resolve();
});
},
deleteLetsEncryptRequestConfig: (certificate, throw_errors) => {
return new Promise((resolve, reject) => {
try {
let config_file = '/data/nginx/temp/letsencrypt_' + certificate.id + '.conf';
/**
* @param {String} host_type
* @param {Object} [host]
* @param {Boolean} [delete_err_file]
* @returns {Promise}
*/
deleteConfig: (host_type, host, delete_err_file) => {
const config_file = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id);
const config_file_err = config_file + '.err';
return new Promise((resolve/*, reject*/) => {
internalNginx.deleteFile(config_file);
if (delete_err_file) {
internalNginx.deleteFile(config_file_err);
if (debug_mode) {
logger.warn('Deleting nginx config: ' + config_file);
}
fs.unlinkSync(config_file);
} catch (err) {
if (debug_mode) {
logger.warn('Could not delete config:', err.message);
}
if (throw_errors) {
reject(err);
}
}
resolve();
});
},
@ -367,21 +417,33 @@ const internalNginx = {
/**
* @param {String} host_type
* @param {Object} [host]
* @param {Boolean} [throw_errors]
* @returns {Promise}
*/
renameConfigAsError: (host_type, host) => {
const config_file = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id);
const config_file_err = config_file + '.err';
deleteConfig: (host_type, host, throw_errors) => {
host_type = host_type.replace(new RegExp('-', 'g'), '_');
return new Promise((resolve, reject) => {
try {
let config_file = internalNginx.getConfigName(host_type, typeof host === 'undefined' ? 0 : host.id);
if (debug_mode) {
logger.warn('Deleting nginx config: ' + config_file);
}
fs.unlinkSync(config_file);
} catch (err) {
if (debug_mode) {
logger.warn('Could not delete config:', err.message);
}
if (throw_errors) {
reject(err);
}
}
return new Promise((resolve/*, reject*/) => {
fs.unlink(config_file, () => {
// ignore result, continue
fs.rename(config_file, config_file_err, () => {
// also ignore result, as this is a debugging informative file anyway
resolve();
});
});
});
},
/**
@ -401,12 +463,13 @@ const internalNginx = {
/**
* @param {String} host_type
* @param {Array} hosts
* @param {Boolean} [throw_errors]
* @returns {Promise}
*/
bulkDeleteConfigs: (host_type, hosts) => {
bulkDeleteConfigs: (host_type, hosts, throw_errors) => {
let promises = [];
hosts.map(function (host) {
promises.push(internalNginx.deleteConfig(host_type, host, true));
promises.push(internalNginx.deleteConfig(host_type, host, throw_errors));
});
return Promise.all(promises);
@ -416,8 +479,8 @@ const internalNginx = {
* @param {string} config
* @returns {boolean}
*/
advancedConfigHasDefaultLocation: function (cfg) {
return !!cfg.match(/^(?:.*;)?\s*?location\s*?\/\s*?{/im);
advancedConfigHasDefaultLocation: function (config) {
return !!config.match(/^(?:.*;)?\s*?location\s*?\/\s*?{/im);
},
/**
@ -430,6 +493,33 @@ const internalNginx = {
}
return true;
},
/**
* @returns {boolean}
*/
sslPassthroughEnabled: function () {
if (typeof process.env.ENABLE_SSL_PASSTHROUGH !== 'undefined') {
const enabled = process.env.ENABLE_SSL_PASSTHROUGH.toLowerCase();
return (enabled === 'on' || enabled === 'true' || enabled === '1' || enabled === 'yes');
}
return false;
},
/**
* Helper function to add brackets to an IP if it is IPv6
* @returns {string}
*/
addIpv6Brackets: function (ip) {
// Only run check if ipv6 is enabled
if (internalNginx.ipv6Enabled()) {
const ipv6Regex = /^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$/gi;
if (ipv6Regex.test(ip)){
return `[${ip}]`;
}
}
return ip;
}
};

View File

@ -1,6 +1,5 @@
const _ = require('lodash');
const error = require('../lib/error');
const utils = require('../lib/utils');
const proxyHostModel = require('../models/proxy_host');
const internalHost = require('./host');
const internalNginx = require('./nginx');
@ -50,8 +49,8 @@ const internalProxyHost = {
return proxyHostModel
.query()
.insertAndFetch(data)
.then(utils.omitRow(omissions()));
.omit(omissions())
.insertAndFetch(data);
})
.then((row) => {
if (create_certificate) {
@ -171,7 +170,6 @@ const internalProxyHost = {
.query()
.where({id: data.id})
.patch(data)
.then(utils.omitRow(omissions()))
.then((saved_row) => {
// Add to audit log
return internalAuditLog.add(access, {
@ -181,7 +179,7 @@ const internalProxyHost = {
meta: data
})
.then(() => {
return saved_row;
return _.omit(saved_row, omissions());
});
});
})
@ -225,29 +223,31 @@ const internalProxyHost = {
.query()
.where('is_deleted', 0)
.andWhere('id', data.id)
.allowGraph('[owner,access_list,access_list.[clients,items],certificate]')
.allowEager('[owner,access_list,access_list.[clients,items],certificate]')
.first();
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.withGraphFetched('[' + data.expand.join(', ') + ']');
}
return query.then(utils.omitRow(omissions()));
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
}
row = internalHost.cleanRowCertificateMeta(row);
// Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) {
row = _.omit(row, data.omit);
query.omit(data.omit);
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.eager('[' + data.expand.join(', ') + ']');
}
return query;
})
.then((row) => {
if (row) {
row = internalHost.cleanRowCertificateMeta(row);
return _.omit(row, omissions());
} else {
throw new error.ItemNotFoundError(data.id);
}
return row;
});
},
@ -409,7 +409,8 @@ const internalProxyHost = {
.query()
.where('is_deleted', 0)
.groupBy('id')
.allowGraph('[owner,access_list,certificate]')
.omit(['is_deleted'])
.allowEager('[owner,access_list,certificate]')
.orderBy('domain_names', 'ASC');
if (access_data.permission_visibility !== 'all') {
@ -424,10 +425,10 @@ const internalProxyHost = {
}
if (typeof expand !== 'undefined' && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']');
query.eager('[' + expand.join(', ') + ']');
}
return query.then(utils.omitRows(omissions()));
return query;
})
.then((rows) => {
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {

View File

@ -1,6 +1,5 @@
const _ = require('lodash');
const error = require('../lib/error');
const utils = require('../lib/utils');
const redirectionHostModel = require('../models/redirection_host');
const internalHost = require('./host');
const internalNginx = require('./nginx');
@ -50,8 +49,8 @@ const internalRedirectionHost = {
return redirectionHostModel
.query()
.insertAndFetch(data)
.then(utils.omitRow(omissions()));
.omit(omissions())
.insertAndFetch(data);
})
.then((row) => {
if (create_certificate) {
@ -66,8 +65,9 @@ const internalRedirectionHost = {
.then(() => {
return row;
});
}
} else {
return row;
}
})
.then((row) => {
// re-fetch with cert
@ -218,29 +218,31 @@ const internalRedirectionHost = {
.query()
.where('is_deleted', 0)
.andWhere('id', data.id)
.allowGraph('[owner,certificate]')
.allowEager('[owner,certificate]')
.first();
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.withGraphFetched('[' + data.expand.join(', ') + ']');
}
return query.then(utils.omitRow(omissions()));
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
}
row = internalHost.cleanRowCertificateMeta(row);
// Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) {
row = _.omit(row, data.omit);
query.omit(data.omit);
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.eager('[' + data.expand.join(', ') + ']');
}
return query;
})
.then((row) => {
if (row) {
row = internalHost.cleanRowCertificateMeta(row);
return _.omit(row, omissions());
} else {
throw new error.ItemNotFoundError(data.id);
}
return row;
});
},
@ -402,7 +404,8 @@ const internalRedirectionHost = {
.query()
.where('is_deleted', 0)
.groupBy('id')
.allowGraph('[owner,certificate]')
.omit(['is_deleted'])
.allowEager('[owner,certificate]')
.orderBy('domain_names', 'ASC');
if (access_data.permission_visibility !== 'all') {
@ -417,10 +420,10 @@ const internalRedirectionHost = {
}
if (typeof expand !== 'undefined' && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']');
query.eager('[' + expand.join(', ') + ']');
}
return query.then(utils.omitRows(omissions()));
return query;
})
.then((rows) => {
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {

View File

@ -0,0 +1,365 @@
const _ = require('lodash');
const error = require('../lib/error');
const passthroughHostModel = require('../models/ssl_passthrough_host');
const internalHost = require('./host');
const internalNginx = require('./nginx');
const internalAuditLog = require('./audit-log');
function omissions () {
return ['is_deleted'];
}
const internalPassthroughHost = {
/**
* @param {Access} access
* @param {Object} data
* @returns {Promise}
*/
create: (access, data) => {
return access.can('ssl_passthrough_hosts:create', data)
.then(() => {
// Get the domain name and check it against existing records
return internalHost.isHostnameTaken(data.domain_name)
.then((result) => {
if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use');
}
});
}).then((/*access_data*/) => {
data.owner_user_id = access.token.getUserId(1);
if (typeof data.meta === 'undefined') {
data.meta = {};
}
return passthroughHostModel
.query()
.omit(omissions())
.insertAndFetch(data);
})
.then((row) => {
// Configure nginx
return internalNginx.configure(passthroughHostModel, 'ssl_passthrough_host', {})
.then(() => {
return internalPassthroughHost.get(access, {id: row.id, expand: ['owner']});
});
})
.then((row) => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'created',
object_type: 'ssl-passthrough-host',
object_id: row.id,
meta: data
})
.then(() => {
return row;
});
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @return {Promise}
*/
update: (access, data) => {
return access.can('ssl_passthrough_hosts:update', data.id)
.then((/*access_data*/) => {
// Get the domain name and check it against existing records
if (typeof data.domain_name !== 'undefined') {
return internalHost.isHostnameTaken(data.domain_name, 'ssl_passthrough', data.id)
.then((result) => {
if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use');
}
});
}
}).then((/*access_data*/) => {
return internalPassthroughHost.get(access, {id: data.id});
})
.then((row) => {
if (row.id !== data.id) {
// Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('SSL Passthrough Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
}
return passthroughHostModel
.query()
.omit(omissions())
.patchAndFetchById(row.id, data)
.then(() => {
return internalNginx.configure(passthroughHostModel, 'ssl_passthrough_host', {})
.then(() => {
return internalPassthroughHost.get(access, {id: row.id, expand: ['owner']});
});
})
.then((saved_row) => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'updated',
object_type: 'ssl-passthrough-host',
object_id: row.id,
meta: data
})
.then(() => {
return _.omit(saved_row, omissions());
});
});
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {Array} [data.expand]
* @param {Array} [data.omit]
* @return {Promise}
*/
get: (access, data) => {
if (typeof data === 'undefined') {
data = {};
}
return access.can('ssl_passthrough_hosts:get', data.id)
.then((access_data) => {
let query = passthroughHostModel
.query()
.where('is_deleted', 0)
.andWhere('id', data.id)
.allowEager('[owner]')
.first();
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
// Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) {
query.omit(data.omit);
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.eager('[' + data.expand.join(', ') + ']');
}
return query;
})
.then((row) => {
if (row) {
return _.omit(row, omissions());
} else {
throw new error.ItemNotFoundError(data.id);
}
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {String} [data.reason]
* @returns {Promise}
*/
delete: (access, data) => {
return access.can('ssl_passthrough_hosts:delete', data.id)
.then(() => {
return internalPassthroughHost.get(access, {id: data.id});
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
}
return passthroughHostModel
.query()
.where('id', row.id)
.patch({
is_deleted: 1
})
.then(() => {
// Update Nginx Config
return internalNginx.configure(passthroughHostModel, 'ssl_passthrough_host', {})
.then(() => {
return internalNginx.reload();
});
})
.then(() => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'deleted',
object_type: 'ssl-passthrough-host',
object_id: row.id,
meta: _.omit(row, omissions())
});
});
})
.then(() => {
return true;
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {String} [data.reason]
* @returns {Promise}
*/
enable: (access, data) => {
return access.can('ssl_passthrough_hosts:update', data.id)
.then(() => {
return internalPassthroughHost.get(access, {
id: data.id,
expand: ['owner']
});
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
} else if (row.enabled) {
throw new error.ValidationError('Host is already enabled');
}
row.enabled = 1;
return passthroughHostModel
.query()
.where('id', row.id)
.patch({
enabled: 1
})
.then(() => {
// Configure nginx
return internalNginx.configure(passthroughHostModel, 'ssl_passthrough_host', {});
})
.then(() => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'enabled',
object_type: 'ssl-passthrough-host',
object_id: row.id,
meta: _.omit(row, omissions())
});
});
})
.then(() => {
return true;
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {String} [data.reason]
* @returns {Promise}
*/
disable: (access, data) => {
return access.can('ssl_passthrough_hosts:update', data.id)
.then(() => {
return internalPassthroughHost.get(access, {id: data.id});
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
} else if (!row.enabled) {
throw new error.ValidationError('Host is already disabled');
}
row.enabled = 0;
return passthroughHostModel
.query()
.where('id', row.id)
.patch({
enabled: 0
})
.then(() => {
// Update Nginx Config
return internalNginx.configure(passthroughHostModel, 'ssl_passthrough_host', {})
.then(() => {
return internalNginx.reload();
});
})
.then(() => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'disabled',
object_type: 'ssl-passthrough-host',
object_id: row.id,
meta: _.omit(row, omissions())
});
});
})
.then(() => {
return true;
});
},
/**
* All SSL Passthrough Hosts
*
* @param {Access} access
* @param {Array} [expand]
* @param {String} [search_query]
* @returns {Promise}
*/
getAll: (access, expand, search_query) => {
return access.can('ssl_passthrough_hosts:list')
.then((access_data) => {
let query = passthroughHostModel
.query()
.where('is_deleted', 0)
.groupBy('id')
.omit(['is_deleted'])
.allowEager('[owner]')
.orderBy('domain_name', 'ASC');
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
// Query is used for searching
if (typeof search_query === 'string') {
query.where(function () {
this.where('domain_name', 'like', '%' + search_query + '%');
});
}
if (typeof expand !== 'undefined' && expand !== null) {
query.eager('[' + expand.join(', ') + ']');
}
return query;
});
},
/**
* Report use
*
* @param {Number} user_id
* @param {String} visibility
* @returns {Promise}
*/
getCount: (user_id, visibility) => {
let query = passthroughHostModel
.query()
.count('id as count')
.where('is_deleted', 0);
if (visibility !== 'all') {
query.andWhere('owner_user_id', user_id);
}
return query.first()
.then((row) => {
return parseInt(row.count, 10);
});
}
};
module.exports = internalPassthroughHost;

View File

@ -1,6 +1,5 @@
const _ = require('lodash');
const error = require('../lib/error');
const utils = require('../lib/utils');
const streamModel = require('../models/stream');
const internalNginx = require('./nginx');
const internalAuditLog = require('./audit-log');
@ -28,8 +27,8 @@ const internalStream = {
return streamModel
.query()
.insertAndFetch(data)
.then(utils.omitRow(omissions()));
.omit(omissions())
.insertAndFetch(data);
})
.then((row) => {
// Configure nginx
@ -72,8 +71,8 @@ const internalStream = {
return streamModel
.query()
.omit(omissions())
.patchAndFetchById(row.id, data)
.then(utils.omitRow(omissions()))
.then((saved_row) => {
return internalNginx.configure(streamModel, 'stream', saved_row)
.then(() => {
@ -89,7 +88,7 @@ const internalStream = {
meta: data
})
.then(() => {
return saved_row;
return _.omit(saved_row, omissions());
});
});
});
@ -114,28 +113,30 @@ const internalStream = {
.query()
.where('is_deleted', 0)
.andWhere('id', data.id)
.allowGraph('[owner]')
.allowEager('[owner]')
.first();
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.withGraphFetched('[' + data.expand.join(', ') + ']');
}
return query.then(utils.omitRow(omissions()));
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
}
// Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) {
row = _.omit(row, data.omit);
query.omit(data.omit);
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.eager('[' + data.expand.join(', ') + ']');
}
return query;
})
.then((row) => {
if (row) {
return _.omit(row, omissions());
} else {
throw new error.ItemNotFoundError(data.id);
}
return row;
});
},
@ -297,7 +298,8 @@ const internalStream = {
.query()
.where('is_deleted', 0)
.groupBy('id')
.allowGraph('[owner]')
.omit(['is_deleted'])
.allowEager('[owner]')
.orderBy('incoming_port', 'ASC');
if (access_data.permission_visibility !== 'all') {
@ -312,10 +314,10 @@ const internalStream = {
}
if (typeof expand !== 'undefined' && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']');
query.eager('[' + expand.join(', ') + ']');
}
return query.then(utils.omitRows(omissions()));
return query;
});
},

View File

@ -24,7 +24,7 @@ module.exports = {
return userModel
.query()
.where('email', data.identity.toLowerCase().trim())
.where('email', data.identity)
.andWhere('is_deleted', 0)
.andWhere('is_disabled', 0)
.first()

View File

@ -1,6 +1,5 @@
const _ = require('lodash');
const error = require('../lib/error');
const utils = require('../lib/utils');
const userModel = require('../models/user');
const userPermissionModel = require('../models/user_permission');
const authModel = require('../models/auth');
@ -36,8 +35,8 @@ const internalUser = {
return userModel
.query()
.insertAndFetch(data)
.then(utils.omitRow(omissions()));
.omit(omissions())
.insertAndFetch(data);
})
.then((user) => {
if (auth) {
@ -68,6 +67,7 @@ const internalUser = {
proxy_hosts: 'manage',
redirection_hosts: 'manage',
dead_hosts: 'manage',
ssl_passthrough_hosts: 'manage',
streams: 'manage',
access_lists: 'manage',
certificates: 'manage'
@ -141,8 +141,11 @@ const internalUser = {
return userModel
.query()
.omit(omissions())
.patchAndFetchById(user.id, data)
.then(utils.omitRow(omissions()));
.then((saved_user) => {
return _.omit(saved_user, omissions());
});
})
.then(() => {
return internalUser.get(access, {id: data.id});
@ -184,24 +187,26 @@ const internalUser = {
.query()
.where('is_deleted', 0)
.andWhere('id', data.id)
.allowGraph('[permissions]')
.allowEager('[permissions]')
.first();
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.withGraphFetched('[' + data.expand.join(', ') + ']');
}
return query.then(utils.omitRow(omissions()));
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
}
// Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) {
row = _.omit(row, data.omit);
query.omit(data.omit);
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.eager('[' + data.expand.join(', ') + ']');
}
return query;
})
.then((row) => {
if (row) {
return _.omit(row, omissions());
} else {
throw new error.ItemNotFoundError(data.id);
}
return row;
});
},
@ -318,7 +323,8 @@ const internalUser = {
.query()
.where('is_deleted', 0)
.groupBy('id')
.allowGraph('[permissions]')
.omit(['is_deleted'])
.allowEager('[permissions]')
.orderBy('name', 'ASC');
// Query is used for searching
@ -330,10 +336,10 @@ const internalUser = {
}
if (typeof expand !== 'undefined' && expand !== null) {
query.withGraphFetched('[' + expand.join(', ') + ']');
query.eager('[' + expand.join(', ') + ']');
}
return query.then(utils.omitRows(omissions()));
return query;
});
},

View File

@ -55,8 +55,8 @@ module.exports = function (token_string) {
.where('id', token_data.attrs.id)
.andWhere('is_deleted', 0)
.andWhere('is_disabled', 0)
.allowGraph('[permissions]')
.withGraphFetched('[permissions]')
.allowEager('[permissions]')
.eager('[permissions]')
.first()
.then((user) => {
if (user) {

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_ssl_passthrough_hosts", "roles"],
"properties": {
"permission_ssl_passthrough_hosts": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_ssl_passthrough_hosts", "roles"],
"properties": {
"permission_ssl_passthrough_hosts": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_ssl_passthrough_hosts", "roles"],
"properties": {
"permission_ssl_passthrough_hosts": {
"$ref": "perms#/definitions/view"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_ssl_passthrough_hosts", "roles"],
"properties": {
"permission_ssl_passthrough_hosts": {
"$ref": "perms#/definitions/view"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_ssl_passthrough_hosts", "roles"],
"properties": {
"permission_ssl_passthrough_hosts": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -1,184 +0,0 @@
const fs = require('fs');
const NodeRSA = require('node-rsa');
const logger = require('../logger').global;
const keysFile = '/data/keys.json';
let instance = null;
// 1. Load from config file first (not recommended anymore)
// 2. Use config env variables next
const configure = () => {
const filename = (process.env.NODE_CONFIG_DIR || './config') + '/' + (process.env.NODE_ENV || 'default') + '.json';
if (fs.existsSync(filename)) {
let configData;
try {
configData = require(filename);
} catch (err) {
// do nothing
}
if (configData && configData.database) {
logger.info(`Using configuration from file: ${filename}`);
instance = configData;
instance.keys = getKeys();
return;
}
}
const envMysqlHost = process.env.DB_MYSQL_HOST || null;
const envMysqlUser = process.env.DB_MYSQL_USER || null;
const envMysqlName = process.env.DB_MYSQL_NAME || null;
if (envMysqlHost && envMysqlUser && envMysqlName) {
// we have enough mysql creds to go with mysql
logger.info('Using MySQL configuration');
instance = {
database: {
engine: 'mysql',
host: envMysqlHost,
port: process.env.DB_MYSQL_PORT || 3306,
user: envMysqlUser,
password: process.env.DB_MYSQL_PASSWORD,
name: envMysqlName,
},
keys: getKeys(),
};
return;
}
const envSqliteFile = process.env.DB_SQLITE_FILE || '/data/database.sqlite';
logger.info(`Using Sqlite: ${envSqliteFile}`);
instance = {
database: {
engine: 'knex-native',
knex: {
client: 'sqlite3',
connection: {
filename: envSqliteFile
},
useNullAsDefault: true
}
},
keys: getKeys(),
};
};
const getKeys = () => {
// Get keys from file
if (!fs.existsSync(keysFile)) {
generateKeys();
} else if (process.env.DEBUG) {
logger.info('Keys file exists OK');
}
try {
return require(keysFile);
} catch (err) {
logger.error('Could not read JWT key pair from config file: ' + keysFile, err);
process.exit(1);
}
};
const generateKeys = () => {
logger.info('Creating a new JWT key pair...');
// Now create the keys and save them in the config.
const key = new NodeRSA({ b: 2048 });
key.generateKeyPair();
const keys = {
key: key.exportKey('private').toString(),
pub: key.exportKey('public').toString(),
};
// Write keys config
try {
fs.writeFileSync(keysFile, JSON.stringify(keys, null, 2));
} catch (err) {
logger.error('Could not write JWT key pair to config file: ' + keysFile + ': ' . err.message);
process.exit(1);
}
logger.info('Wrote JWT key pair to config file: ' + keysFile);
};
module.exports = {
/**
*
* @param {string} key ie: 'database' or 'database.engine'
* @returns {boolean}
*/
has: function(key) {
instance === null && configure();
const keys = key.split('.');
let level = instance;
let has = true;
keys.forEach((keyItem) =>{
if (typeof level[keyItem] === 'undefined') {
has = false;
} else {
level = level[keyItem];
}
});
return has;
},
/**
* Gets a specific key from the top level
*
* @param {string} key
* @returns {*}
*/
get: function (key) {
instance === null && configure();
if (key && typeof instance[key] !== 'undefined') {
return instance[key];
}
return instance;
},
/**
* Is this a sqlite configuration?
*
* @returns {boolean}
*/
isSqlite: function () {
instance === null && configure();
return instance.database.knex && instance.database.knex.client === 'sqlite3';
},
/**
* Are we running in debug mdoe?
*
* @returns {boolean}
*/
debug: function () {
return !!process.env.DEBUG;
},
/**
* Returns a public key
*
* @returns {string}
*/
getPublicKey: function () {
instance === null && configure();
return instance.keys.pub;
},
/**
* Returns a private key
*
* @returns {string}
*/
getPrivateKey: function () {
instance === null && configure();
return instance.keys.key;
},
/**
* @returns {boolean}
*/
useLetsencryptStaging: function () {
return !!process.env.LE_STAGING;
}
};

View File

@ -1,8 +1,4 @@
const _ = require('lodash');
const exec = require('child_process').exec;
const execFile = require('child_process').execFile;
const { Liquid } = require('liquidjs');
const logger = require('../logger').global;
module.exports = {
@ -20,82 +16,5 @@ module.exports = {
}
});
});
},
/**
* @param {String} cmd
* @param {Array} args
* @returns {Promise}
*/
execFile: function (cmd, args) {
logger.debug('CMD: ' + cmd + ' ' + (args ? args.join(' ') : ''));
return new Promise((resolve, reject) => {
execFile(cmd, args, function (err, stdout, /*stderr*/) {
if (err && typeof err === 'object') {
reject(err);
} else {
resolve(stdout.trim());
}
});
});
},
/**
* Used in objection query builder
*
* @param {Array} omissions
* @returns {Function}
*/
omitRow: function (omissions) {
/**
* @param {Object} row
* @returns {Object}
*/
return (row) => {
return _.omit(row, omissions);
};
},
/**
* Used in objection query builder
*
* @param {Array} omissions
* @returns {Function}
*/
omitRows: function (omissions) {
/**
* @param {Array} rows
* @returns {Object}
*/
return (rows) => {
rows.forEach((row, idx) => {
rows[idx] = _.omit(row, omissions);
});
return rows;
};
},
/**
* @returns {Object} Liquid render engine
*/
getRenderEngine: function () {
const renderEngine = new Liquid({
root: __dirname + '/../templates/'
});
/**
* nginxAccessRule expects the object given to have 2 properties:
*
* directive string
* address string
*/
renderEngine.registerFilter('nginxAccessRule', (v) => {
if (typeof v.directive !== 'undefined' && typeof v.address !== 'undefined' && v.directive && v.address) {
return `${v.directive} ${v.address};`;
}
return '';
});
return renderEngine;
}
};

View File

@ -5,7 +5,7 @@ const definitions = require('../../schema/definitions.json');
RegExp.prototype.toJSON = RegExp.prototype.toString;
const ajv = require('ajv')({
verbose: true,
verbose: true, //process.env.NODE_ENV === 'development',
allErrors: true,
format: 'full', // strict regexes for format checks
coerceTypes: true,

View File

@ -0,0 +1,85 @@
const migrate_name = 'ssl_passthrough_host';
const logger = require('../logger').migrate;
/**
* Migrate
*
* @see http://knexjs.org/#Schema
*
* @param {Object} knex
* @param {Promise} Promise
* @returns {Promise}
*/
exports.up = async function (knex/*, Promise*/) {
logger.info('[' + migrate_name + '] Migrating Up...');
await knex.schema.createTable('ssl_passthrough_host', (table) => {
table.increments().primary();
table.dateTime('created_on').notNull();
table.dateTime('modified_on').notNull();
table.integer('owner_user_id').notNull().unsigned();
table.integer('is_deleted').notNull().unsigned().defaultTo(0);
table.string('domain_name').notNull();
table.string('forwarding_host').notNull();
table.integer('forwarding_port').notNull().unsigned();
table.integer('enabled').notNull().unsigned().defaultTo(1);
table.json('meta').notNull();
});
logger.info('[' + migrate_name + '] Table created');
// Remove unique constraint so name can be used for new table
await knex.schema.alterTable('user_permission', (table) => {
table.dropUnique('user_id');
});
await knex.schema.renameTable('user_permission', 'user_permission_old');
// We need to recreate the table since sqlite does not support altering columns
await knex.schema.createTable('user_permission', (table) => {
table.increments().primary();
table.dateTime('created_on').notNull();
table.dateTime('modified_on').notNull();
table.integer('user_id').notNull().unsigned();
table.string('visibility').notNull();
table.string('proxy_hosts').notNull();
table.string('redirection_hosts').notNull();
table.string('dead_hosts').notNull();
table.string('streams').notNull();
table.string('ssl_passthrough_hosts').notNull();
table.string('access_lists').notNull();
table.string('certificates').notNull();
table.unique('user_id');
});
await knex('user_permission_old').select('*', 'streams as ssl_passthrough_hosts').then((data) => {
if (data.length) {
return knex('user_permission').insert(data);
}
return Promise.resolve();
});
await knex.schema.dropTableIfExists('user_permission_old');
logger.info('[' + migrate_name + '] permissions updated');
};
/**
* Undo Migrate
*
* @param {Object} knex
* @param {Promise} Promise
* @returns {Promise}
*/
exports.down = function (knex/*, Promise*/) {
logger.info('[' + migrate_name + '] Migrating Down...');
return knex.schema.dropTable('stream').then(() => {
return knex.schema.table('user_permission', (table) => {
table.dropColumn('ssl_passthrough_hosts');
});
})
.then(function () {
logger.info('[' + migrate_name + '] Table altered and permissions updated');
});
};

View File

@ -1,50 +0,0 @@
const migrate_name = 'stream_domain';
const logger = require('../logger').migrate;
const internalNginx = require('../internal/nginx');
async function regenerateDefaultHost(knex) {
const row = await knex('setting').select('*').where('id', 'default-site').first();
if (!row) {
return Promise.resolve();
}
return internalNginx.deleteConfig('default')
.then(() => {
return internalNginx.generateConfig('default', row);
})
.then(() => {
return internalNginx.test();
})
.then(() => {
return internalNginx.reload();
});
}
/**
* Migrate
*
* @see http://knexjs.org/#Schema
*
* @param {Object} knex
* @param {Promise} Promise
* @returns {Promise}
*/
exports.up = function (knex) {
logger.info('[' + migrate_name + '] Migrating Up...');
return regenerateDefaultHost(knex);
};
/**
* Undo Migrate
*
* @param {Object} knex
* @param {Promise} Promise
* @returns {Promise}
*/
exports.down = function (knex) {
logger.info('[' + migrate_name + '] Migrating Down...');
return regenerateDefaultHost(knex);
};

View File

@ -50,6 +50,7 @@ class AccessList extends Model {
},
modify: function (qb) {
qb.where('user.is_deleted', 0);
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
}
},
items: {
@ -58,6 +59,9 @@ class AccessList extends Model {
join: {
from: 'access_list.id',
to: 'access_list_auth.access_list_id'
},
modify: function (qb) {
qb.omit(['id', 'created_on', 'modified_on', 'access_list_id', 'meta']);
}
},
clients: {
@ -66,6 +70,9 @@ class AccessList extends Model {
join: {
from: 'access_list.id',
to: 'access_list_client.access_list_id'
},
modify: function (qb) {
qb.omit(['id', 'created_on', 'modified_on', 'access_list_id', 'meta']);
}
},
proxy_hosts: {
@ -77,10 +84,19 @@ class AccessList extends Model {
},
modify: function (qb) {
qb.where('proxy_host.is_deleted', 0);
qb.omit(['is_deleted', 'meta']);
}
}
};
}
get satisfy() {
return this.satisfy_any ? 'satisfy any' : 'satisfy all';
}
get passauth() {
return this.pass_auth ? '' : 'proxy_set_header Authorization "";';
}
}
module.exports = AccessList;

View File

@ -45,6 +45,7 @@ class AccessListAuth extends Model {
},
modify: function (qb) {
qb.where('access_list.is_deleted', 0);
qb.omit(['created_on', 'modified_on', 'is_deleted', 'access_list_id']);
}
}
};

View File

@ -45,10 +45,15 @@ class AccessListClient extends Model {
},
modify: function (qb) {
qb.where('access_list.is_deleted', 0);
qb.omit(['created_on', 'modified_on', 'is_deleted', 'access_list_id']);
}
}
};
}
get rule() {
return `${this.directive} ${this.address}`;
}
}
module.exports = AccessListClient;

View File

@ -43,6 +43,9 @@ class AuditLog extends Model {
join: {
from: 'audit_log.user_id',
to: 'user.id'
},
modify: function (qb) {
qb.omit(['id', 'created_on', 'modified_on', 'roles']);
}
}
};

View File

@ -74,6 +74,9 @@ class Auth extends Model {
},
filter: {
is_deleted: 0
},
modify: function (qb) {
qb.omit(['is_deleted']);
}
}
};

View File

@ -63,6 +63,7 @@ class Certificate extends Model {
},
modify: function (qb) {
qb.where('user.is_deleted', 0);
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
}
}
};

View File

@ -59,6 +59,7 @@ class DeadHost extends Model {
},
modify: function (qb) {
qb.where('user.is_deleted', 0);
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
}
},
certificate: {
@ -70,6 +71,7 @@ class DeadHost extends Model {
},
modify: function (qb) {
qb.where('certificate.is_deleted', 0);
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted']);
}
}
};

View File

@ -1,13 +1,13 @@
const db = require('../db');
const config = require('../lib/config');
const config = require('config');
const Model = require('objection').Model;
Model.knex(db);
module.exports = function () {
if (config.isSqlite()) {
// eslint-disable-next-line
return Model.raw("datetime('now','localtime')");
}
if (config.database.knex && config.database.knex.client === 'sqlite3') {
return Model.raw('datetime(\'now\',\'localtime\')');
} else {
return Model.raw('NOW()');
}
};

View File

@ -60,6 +60,7 @@ class ProxyHost extends Model {
},
modify: function (qb) {
qb.where('user.is_deleted', 0);
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
}
},
access_list: {
@ -71,6 +72,7 @@ class ProxyHost extends Model {
},
modify: function (qb) {
qb.where('access_list.is_deleted', 0);
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted']);
}
},
certificate: {
@ -82,6 +84,7 @@ class ProxyHost extends Model {
},
modify: function (qb) {
qb.where('certificate.is_deleted', 0);
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted']);
}
}
};

View File

@ -1,4 +1,3 @@
// Objection Docs:
// http://vincit.github.io/objection.js/
@ -60,6 +59,7 @@ class RedirectionHost extends Model {
},
modify: function (qb) {
qb.where('user.is_deleted', 0);
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
}
},
certificate: {
@ -71,6 +71,7 @@ class RedirectionHost extends Model {
},
modify: function (qb) {
qb.where('certificate.is_deleted', 0);
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted']);
}
}
};

View File

@ -0,0 +1,56 @@
// Objection Docs:
// http://vincit.github.io/objection.js/
const db = require('../db');
const Model = require('objection').Model;
const User = require('./user');
const now = require('./now_helper');
Model.knex(db);
class SslPassthrougHost extends Model {
$beforeInsert () {
this.created_on = now();
this.modified_on = now();
// Default for meta
if (typeof this.meta === 'undefined') {
this.meta = {};
}
}
$beforeUpdate () {
this.modified_on = now();
}
static get name () {
return 'SslPassthrougHost';
}
static get tableName () {
return 'ssl_passthrough_host';
}
static get jsonAttributes () {
return ['meta'];
}
static get relationMappings () {
return {
owner: {
relation: Model.HasOneRelation,
modelClass: User,
join: {
from: 'ssl_passthrough_host.owner_user_id',
to: 'user.id'
},
modify: function (qb) {
qb.where('user.is_deleted', 0);
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
}
}
};
}
}
module.exports = SslPassthrougHost;

View File

@ -46,6 +46,7 @@ class Stream extends Model {
},
modify: function (qb) {
qb.where('user.is_deleted', 0);
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
}
}
};

View File

@ -6,36 +6,44 @@
const _ = require('lodash');
const jwt = require('jsonwebtoken');
const crypto = require('crypto');
const config = require('../lib/config');
const error = require('../lib/error');
const logger = require('../logger').global;
const ALGO = 'RS256';
let public_key = null;
let private_key = null;
function checkJWTKeyPair() {
if (!public_key || !private_key) {
let config = require('config');
public_key = config.get('jwt.pub');
private_key = config.get('jwt.key');
}
}
module.exports = function () {
let token_data = {};
const self = {
let self = {
/**
* @param {Object} payload
* @returns {Promise}
*/
create: (payload) => {
if (!config.getPrivateKey()) {
logger.error('Private key is empty!');
}
// sign with RSA SHA256
const options = {
let options = {
algorithm: ALGO,
expiresIn: payload.expiresIn || '1d'
};
payload.jti = crypto.randomBytes(12)
.toString('base64')
.substring(-8);
.substr(-8);
checkJWTKeyPair();
return new Promise((resolve, reject) => {
jwt.sign(payload, config.getPrivateKey(), options, (err, token) => {
jwt.sign(payload, private_key, options, (err, token) => {
if (err) {
reject(err);
} else {
@ -54,15 +62,13 @@ module.exports = function () {
* @returns {Promise}
*/
load: function (token) {
if (!config.getPublicKey()) {
logger.error('Public key is empty!');
}
return new Promise((resolve, reject) => {
checkJWTKeyPair();
try {
if (!token || token === null || token === 'null') {
reject(new error.AuthError('Empty token'));
} else {
jwt.verify(token, config.getPublicKey(), {ignoreExpiration: false, algorithms: [ALGO]}, (err, result) => {
jwt.verify(token, public_key, {ignoreExpiration: false, algorithms: [ALGO]}, (err, result) => {
if (err) {
if (err.name === 'TokenExpiredError') {
@ -77,6 +83,8 @@ module.exports = function () {
// Hack: some tokens out in the wild have a scope of 'all' instead of 'user'.
// For 30 days at least, we need to replace 'all' with user.
if ((typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'all') !== -1)) {
//console.log('Warning! Replacing "all" scope with "user"');
token_data.scope = ['user'];
}
@ -126,7 +134,7 @@ module.exports = function () {
* @returns {Integer}
*/
getUserId: (default_value) => {
const attrs = self.get('attrs');
let attrs = self.get('attrs');
if (attrs && typeof attrs.id !== 'undefined' && attrs.id) {
return attrs.id;
}

View File

@ -43,6 +43,9 @@ class User extends Model {
join: {
from: 'user.id',
to: 'user_permission.user_id'
},
modify: function (qb) {
qb.omit(['id', 'created_on', 'modified_on', 'user_id']);
}
}
};

View File

@ -10,22 +10,29 @@
"bcrypt": "^5.0.0",
"body-parser": "^1.19.0",
"compression": "^1.7.4",
"express": "^4.17.3",
"config": "^3.3.1",
"diskdb": "^0.1.17",
"express": "^4.17.1",
"express-fileupload": "^1.1.9",
"gravatar": "^1.8.0",
"html-entities": "^1.2.1",
"json-schema-ref-parser": "^8.0.0",
"jsonwebtoken": "^9.0.0",
"knex": "2.4.2",
"liquidjs": "10.6.1",
"jsonwebtoken": "^8.5.1",
"knex": "^0.20.13",
"liquidjs": "^9.11.10",
"lodash": "^4.17.21",
"moment": "^2.29.4",
"moment": "^2.24.0",
"mysql": "^2.18.1",
"node-rsa": "^1.0.8",
"objection": "3.0.1",
"nodemon": "^2.0.2",
"objection": "^2.2.16",
"path": "^0.12.7",
"signale": "1.4.0",
"sqlite3": "5.1.6",
"temp-write": "^4.0.0"
"pg": "^7.12.1",
"restler": "^3.4.0",
"signale": "^1.4.0",
"sqlite3": "^4.1.1",
"temp-write": "^4.0.0",
"unix-timestamp": "^0.2.0"
},
"signale": {
"displayDate": true,
@ -34,9 +41,8 @@
"author": "Jamie Curnow <jc@jc21.com>",
"license": "MIT",
"devDependencies": {
"eslint": "^8.36.0",
"eslint": "^6.8.0",
"eslint-plugin-align-assignments": "^1.1.2",
"nodemon": "^2.0.2",
"prettier": "^2.0.4"
}
}

View File

@ -1,6 +1,7 @@
const express = require('express');
const pjson = require('../../package.json');
const error = require('../../lib/error');
const internalNginx = require('../../internal/nginx');
let router = express.Router({
caseSensitive: true,
@ -34,10 +35,18 @@ router.use('/settings', require('./settings'));
router.use('/nginx/proxy-hosts', require('./nginx/proxy_hosts'));
router.use('/nginx/redirection-hosts', require('./nginx/redirection_hosts'));
router.use('/nginx/dead-hosts', require('./nginx/dead_hosts'));
router.use('/nginx/ssl-passthrough-hosts', require('./nginx/ssl_passthrough_hosts'));
router.use('/nginx/streams', require('./nginx/streams'));
router.use('/nginx/access-lists', require('./nginx/access_lists'));
router.use('/nginx/certificates', require('./nginx/certificates'));
router.get('/ssl-passthrough-enabled', (req, res/*, next*/) => {
res.status(200).send({
status: 'OK',
ssl_passthrough_enabled: internalNginx.sslPassthroughEnabled()
});
});
/**
* API 404 for all other routes
*

View File

@ -68,32 +68,6 @@ router
.catch(next);
});
/**
* Test HTTP challenge for domains
*
* /api/nginx/certificates/test-http
*/
router
.route('/test-http')
.options((req, res) => {
res.sendStatus(204);
})
.all(jwtdecode())
/**
* GET /api/nginx/certificates/test-http
*
* Test HTTP challenge for domains
*/
.get((req, res, next) => {
internalCertificate.testHttpsChallenge(res.locals.access, JSON.parse(req.query.domains))
.then((result) => {
res.status(200)
.send(result);
})
.catch(next);
});
/**
* Specific certificate
*
@ -235,6 +209,7 @@ router
.catch(next);
});
/**
* Download LE Certs
*

View File

@ -0,0 +1,196 @@
const express = require('express');
const validator = require('../../../lib/validator');
const jwtdecode = require('../../../lib/express/jwt-decode');
const internalSslPassthrough = require('../../../internal/ssl-passthrough-host');
const apiValidator = require('../../../lib/validator/api');
let router = express.Router({
caseSensitive: true,
strict: true,
mergeParams: true
});
/**
* /api/nginx/ssl-passthrough-hosts
*/
router
.route('/')
.options((req, res) => {
res.sendStatus(204);
})
.all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
/**
* GET /api/nginx/ssl-passthrough-hosts
*
* Retrieve all ssl passthrough hosts
*/
.get((req, res, next) => {
validator({
additionalProperties: false,
properties: {
expand: {
$ref: 'definitions#/definitions/expand'
},
query: {
$ref: 'definitions#/definitions/query'
}
}
}, {
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
query: (typeof req.query.query === 'string' ? req.query.query : null)
})
.then((data) => {
return internalSslPassthrough.getAll(res.locals.access, data.expand, data.query);
})
.then((rows) => {
res.status(200)
.send(rows);
})
.catch(next);
})
/**
* POST /api/nginx/ssl-passthrough-hosts
*
* Create a new ssl passthrough host
*/
.post((req, res, next) => {
apiValidator({$ref: 'endpoints/ssl-passthrough-hosts#/links/1/schema'}, req.body)
.then((payload) => {
return internalSslPassthrough.create(res.locals.access, payload);
})
.then((result) => {
res.status(201)
.send(result);
})
.catch(next);
});
/**
* Specific ssl passthrough host
*
* /api/nginx/ssl-passthrough-hosts/123
*/
router
.route('/:host_id')
.options((req, res) => {
res.sendStatus(204);
})
.all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
/**
* GET /api/nginx/ssl-passthrough-hosts/123
*
* Retrieve a specific ssl passthrough host
*/
.get((req, res, next) => {
validator({
required: ['host_id'],
additionalProperties: false,
properties: {
host_id: {
$ref: 'definitions#/definitions/id'
},
expand: {
$ref: 'definitions#/definitions/expand'
}
}
}, {
host_id: req.params.host_id,
expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
})
.then((data) => {
return internalSslPassthrough.get(res.locals.access, {
id: parseInt(data.host_id, 10),
expand: data.expand
});
})
.then((row) => {
res.status(200)
.send(row);
})
.catch(next);
})
/**
* PUT /api/nginx/ssl-passthrough-hosts/123
*
* Update an existing ssl passthrough host
*/
.put((req, res, next) => {
apiValidator({$ref: 'endpoints/ssl-passthrough-hosts#/links/2/schema'}, req.body)
.then((payload) => {
payload.id = parseInt(req.params.host_id, 10);
return internalSslPassthrough.update(res.locals.access, payload);
})
.then((result) => {
res.status(200)
.send(result);
})
.catch(next);
})
/**
* DELETE /api/nginx/ssl-passthrough-hosts/123
*
* Delete an ssl passthrough host
*/
.delete((req, res, next) => {
internalSslPassthrough.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
.then((result) => {
res.status(200)
.send(result);
})
.catch(next);
});
/**
* Enable ssl passthrough host
*
* /api/nginx/ssl-passthrough-hosts/123/enable
*/
router
.route('/:host_id/enable')
.options((req, res) => {
res.sendStatus(204);
})
.all(jwtdecode())
/**
* POST /api/nginx/ssl-passthrough-hosts/123/enable
*/
.post((req, res, next) => {
internalSslPassthrough.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
.then((result) => {
res.status(200)
.send(result);
})
.catch(next);
});
/**
* Disable ssl passthrough host
*
* /api/nginx/ssl-passthrough-hosts/123/disable
*/
router
.route('/:host_id/disable')
.options((req, res) => {
res.sendStatus(204);
})
.all(jwtdecode())
/**
* POST /api/nginx/ssl-passthrough-hosts/123/disable
*/
.post((req, res, next) => {
internalSslPassthrough.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
.then((result) => {
res.status(200)
.send(result);
})
.catch(next);
});
module.exports = router;

View File

@ -157,17 +157,6 @@
"targetSchema": {
"type": "boolean"
}
},
{
"title": "Test HTTP Challenge",
"description": "Tests whether the HTTP challenge should work",
"href": "/nginx/certificates/{definitions.identity.example}/test-http",
"access": "private",
"method": "GET",
"rel": "info",
"http_header": {
"$ref": "../examples.json#/definitions/auth_header"
}
}
]
}

View File

@ -0,0 +1,208 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "endpoints/ssl-passthrough-hosts",
"title": "SSL Passthrough Hosts",
"description": "Endpoints relating to SSL Passthrough Hosts",
"stability": "stable",
"type": "object",
"definitions": {
"id": {
"$ref": "../definitions.json#/definitions/id"
},
"created_on": {
"$ref": "../definitions.json#/definitions/created_on"
},
"modified_on": {
"$ref": "../definitions.json#/definitions/modified_on"
},
"domain_name": {
"$ref": "../definitions.json#/definitions/domain_name"
},
"forwarding_host": {
"anyOf": [
{
"$ref": "../definitions.json#/definitions/domain_name"
},
{
"type": "string",
"format": "ipv4"
},
{
"type": "string",
"format": "ipv6"
}
]
},
"forwarding_port": {
"type": "integer",
"minimum": 1,
"maximum": 65535
},
"enabled": {
"$ref": "../definitions.json#/definitions/enabled"
},
"meta": {
"type": "object"
}
},
"properties": {
"id": {
"$ref": "#/definitions/id"
},
"created_on": {
"$ref": "#/definitions/created_on"
},
"modified_on": {
"$ref": "#/definitions/modified_on"
},
"domain_name": {
"$ref": "#/definitions/domain_name"
},
"forwarding_host": {
"$ref": "#/definitions/forwarding_host"
},
"forwarding_port": {
"$ref": "#/definitions/forwarding_port"
},
"enabled": {
"$ref": "#/definitions/enabled"
},
"meta": {
"$ref": "#/definitions/meta"
}
},
"links": [
{
"title": "List",
"description": "Returns a list of SSL Passthrough Hosts",
"href": "/nginx/ssl-passthrough-hosts",
"access": "private",
"method": "GET",
"rel": "self",
"http_header": {
"$ref": "../examples.json#/definitions/auth_header"
},
"targetSchema": {
"type": "array",
"items": {
"$ref": "#/properties"
}
}
},
{
"title": "Create",
"description": "Creates a new SSL Passthrough Host",
"href": "/nginx/ssl-passthrough-hosts",
"access": "private",
"method": "POST",
"rel": "create",
"http_header": {
"$ref": "../examples.json#/definitions/auth_header"
},
"schema": {
"type": "object",
"additionalProperties": false,
"required": [
"domain_name",
"forwarding_host",
"forwarding_port"
],
"properties": {
"domain_name": {
"$ref": "#/definitions/domain_name"
},
"forwarding_host": {
"$ref": "#/definitions/forwarding_host"
},
"forwarding_port": {
"$ref": "#/definitions/forwarding_port"
},
"meta": {
"$ref": "#/definitions/meta"
}
}
},
"targetSchema": {
"properties": {
"$ref": "#/properties"
}
}
},
{
"title": "Update",
"description": "Updates a existing SSL Passthrough Host",
"href": "/nginx/ssl-passthrough-hosts/{definitions.identity.example}",
"access": "private",
"method": "PUT",
"rel": "update",
"http_header": {
"$ref": "../examples.json#/definitions/auth_header"
},
"schema": {
"type": "object",
"additionalProperties": false,
"properties": {
"domain_name": {
"$ref": "#/definitions/domain_name"
},
"forwarding_host": {
"$ref": "#/definitions/forwarding_host"
},
"forwarding_port": {
"$ref": "#/definitions/forwarding_port"
},
"meta": {
"$ref": "#/definitions/meta"
}
}
},
"targetSchema": {
"properties": {
"$ref": "#/properties"
}
}
},
{
"title": "Delete",
"description": "Deletes a existing SSL Passthrough Host",
"href": "/nginx/ssl-passthrough-hosts/{definitions.identity.example}",
"access": "private",
"method": "DELETE",
"rel": "delete",
"http_header": {
"$ref": "../examples.json#/definitions/auth_header"
},
"targetSchema": {
"type": "boolean"
}
},
{
"title": "Enable",
"description": "Enables a existing SSL Passthrough Host",
"href": "/nginx/ssl-passthrough-hosts/{definitions.identity.example}/enable",
"access": "private",
"method": "POST",
"rel": "update",
"http_header": {
"$ref": "../examples.json#/definitions/auth_header"
},
"targetSchema": {
"type": "boolean"
}
},
{
"title": "Disable",
"description": "Disables a existing SSL Passthrough Host",
"href": "/nginx/ssl-passthrough-hosts/{definitions.identity.example}/disable",
"access": "private",
"method": "POST",
"rel": "update",
"http_header": {
"$ref": "../examples.json#/definitions/auth_header"
},
"targetSchema": {
"type": "boolean"
}
}
]
}

View File

@ -26,6 +26,9 @@
"dead-hosts": {
"$ref": "endpoints/dead-hosts.json"
},
"ssl-passthrough-hosts": {
"$ref": "endpoints/ssl-passthrough-hosts.json"
},
"streams": {
"$ref": "endpoints/streams.json"
},

View File

@ -1,4 +1,6 @@
const config = require('./lib/config');
const fs = require('fs');
const NodeRSA = require('node-rsa');
const config = require('config');
const logger = require('./logger').setup;
const certificateModel = require('./models/certificate');
const userModel = require('./models/user');
@ -6,7 +8,65 @@ const userPermissionModel = require('./models/user_permission');
const utils = require('./lib/utils');
const authModel = require('./models/auth');
const settingModel = require('./models/setting');
const passthroughHostModel = require('./models/ssl_passthrough_host');
const dns_plugins = require('./global/certbot-dns-plugins');
const internalNginx = require('./internal/nginx');
const debug_mode = process.env.NODE_ENV !== 'production' || !!process.env.DEBUG;
/**
* Creates a new JWT RSA Keypair if not alread set on the config
*
* @returns {Promise}
*/
const setupJwt = () => {
return new Promise((resolve, reject) => {
// Now go and check if the jwt gpg keys have been created and if not, create them
if (!config.has('jwt') || !config.has('jwt.key') || !config.has('jwt.pub')) {
logger.info('Creating a new JWT key pair...');
// jwt keys are not configured properly
const filename = config.util.getEnv('NODE_CONFIG_DIR') + '/' + (config.util.getEnv('NODE_ENV') || 'default') + '.json';
let config_data = {};
try {
config_data = require(filename);
} catch (err) {
// do nothing
if (debug_mode) {
logger.debug(filename + ' config file could not be required');
}
}
// Now create the keys and save them in the config.
let key = new NodeRSA({ b: 2048 });
key.generateKeyPair();
config_data.jwt = {
key: key.exportKey('private').toString(),
pub: key.exportKey('public').toString(),
};
// Write config
fs.writeFile(filename, JSON.stringify(config_data, null, 2), (err) => {
if (err) {
logger.error('Could not write JWT key pair to config file: ' + filename);
reject(err);
} else {
logger.info('Wrote JWT key pair to config file: ' + filename);
delete require.cache[require.resolve('config')];
resolve();
}
});
} else {
// JWT key pair exists
if (debug_mode) {
logger.debug('JWT Keypair already exists');
}
resolve();
}
});
};
/**
* Creates a default admin users if one doesn't already exist in the database
@ -52,6 +112,7 @@ const setupDefaultUser = () => {
proxy_hosts: 'manage',
redirection_hosts: 'manage',
dead_hosts: 'manage',
ssl_passthrough_hosts: 'manage',
streams: 'manage',
access_lists: 'manage',
certificates: 'manage',
@ -61,8 +122,8 @@ const setupDefaultUser = () => {
.then(() => {
logger.info('Initial admin setup completed');
});
} else if (config.debug()) {
logger.info('Admin user setup not required');
} else if (debug_mode) {
logger.debug('Admin user setup not required');
}
});
};
@ -93,8 +154,8 @@ const setupDefaultSettings = () => {
logger.info('Default settings added');
});
}
if (config.debug()) {
logger.info('Default setting setup not required');
if (debug_mode) {
logger.debug('Default setting setup not required');
}
});
};
@ -117,21 +178,19 @@ const setupCertbotPlugins = () => {
certificates.map(function (certificate) {
if (certificate.meta && certificate.meta.dns_challenge === true) {
const dns_plugin = dns_plugins[certificate.meta.dns_provider];
const packages_to_install = `${dns_plugin.package_name}${dns_plugin.version_requirement || ''} ${dns_plugin.dependencies}`;
if (plugins.indexOf(packages_to_install) === -1) plugins.push(packages_to_install);
// Make sure credentials file exists
const credentials_loc = '/etc/letsencrypt/credentials/credentials-' + certificate.id;
// Escape single quotes and backslashes
const escapedCredentials = certificate.meta.dns_provider_credentials.replaceAll('\'', '\\\'').replaceAll('\\', '\\\\');
const credentials_cmd = '[ -f \'' + credentials_loc + '\' ] || { mkdir -p /etc/letsencrypt/credentials 2> /dev/null; echo \'' + escapedCredentials + '\' > \'' + credentials_loc + '\' && chmod 600 \'' + credentials_loc + '\'; }';
const credentials_cmd = '[ -f \'' + credentials_loc + '\' ] || { mkdir -p /etc/letsencrypt/credentials 2> /dev/null; echo \'' + certificate.meta.dns_provider_credentials.replace('\'', '\\\'') + '\' > \'' + credentials_loc + '\' && chmod 600 \'' + credentials_loc + '\'; }';
promises.push(utils.exec(credentials_cmd));
}
});
if (plugins.length) {
const install_cmd = '. /opt/certbot/bin/activate && pip install --no-cache-dir --user ' + plugins.join(' ') + ' && deactivate';
const install_cmd = 'pip install ' + plugins.join(' ');
promises.push(utils.exec(install_cmd));
}
@ -166,9 +225,19 @@ const setupLogrotation = () => {
return runLogrotate();
};
/**
* Makes sure the ssl passthrough option is reflected in the nginx config
* @returns {Promise}
*/
const setupSslPassthrough = () => {
return internalNginx.configure(passthroughHostModel, 'ssl_passthrough_host', {});
};
module.exports = function () {
return setupDefaultUser()
return setupJwt()
.then(setupDefaultUser)
.then(setupDefaultSettings)
.then(setupCertbotPlugins)
.then(setupLogrotation);
.then(setupLogrotation)
.then(setupSslPassthrough);
};

View File

@ -1,25 +0,0 @@
{% if access_list_id > 0 %}
{% if access_list.items.length > 0 %}
# Authorization
auth_basic "Authorization required";
auth_basic_user_file /data/access/{{ access_list_id }};
{% if access_list.pass_auth == 0 %}
proxy_set_header Authorization "";
{% endif %}
{% endif %}
# Access Rules: {{ access_list.clients | size }} total
{% for client in access_list.clients %}
{{client | nginxAccessRule}}
{% endfor %}
deny all;
# Access checks must...
{% if access_list.satisfy_any == 1 %}
satisfy any;
{% else %}
satisfy all;
{% endif %}
{% endif %}

View File

@ -1,14 +1,36 @@
location {{ path }} {
set $upstream {{ forward_scheme }}://{{ forward_host }}:{{ forward_port }}{{ forward_path }};
proxy_set_header Host $host;
proxy_set_header X-Forwarded-Scheme $scheme;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $remote_addr;
proxy_set_header X-Real-IP $remote_addr;
proxy_pass {{ forward_scheme }}://{{ forward_host }}:{{ forward_port }}{{ forward_path }};
proxy_pass $upstream;
{% if access_list_id > 0 %}
{% if access_list.items.length > 0 %}
# Authorization
auth_basic "Authorization required";
auth_basic_user_file /data/access/{{ access_list_id }};
{{ access_list.passauth }}
{% endif %}
# Access Rules
{% for client in access_list.clients %}
{{- client.rule -}};
{% endfor %}deny all;
# Access checks must...
{% if access_list.satisfy %}
{{ access_list.satisfy }};
{% endif %}
{% endif %}
{% include "_access.conf" %}
{% include "_assets.conf" %}
{% include "_exploits.conf" %}
{% include "_forced_ssl.conf" %}
{% include "_hsts.conf" %}

View File

@ -7,9 +7,9 @@
server {
listen 80 default;
{% if ipv6 -%}
listen [::]:80 default;
listen [::]:80;
{% else -%}
#listen [::]:80 default;
#listen [::]:80;
{% endif %}
server_name default-host.localhost;
access_log /data/logs/default-host_access.log combined;

View File

@ -30,7 +30,27 @@ proxy_http_version 1.1;
location / {
{% include "_access.conf" %}
{% if access_list_id > 0 %}
{% if access_list.items.length > 0 %}
# Authorization
auth_basic "Authorization required";
auth_basic_user_file /data/access/{{ access_list_id }};
{{ access_list.passauth }}
{% endif %}
# Access Rules
{% for client in access_list.clients %}
{{- client.rule -}};
{% endfor %}deny all;
# Access checks must...
{% if access_list.satisfy %}
{{ access_list.satisfy }};
{% endif %}
{% endif %}
{% include "_hsts.conf" %}
{% if allow_websocket_upgrade == 1 or allow_websocket_upgrade == true %}

View File

@ -0,0 +1,41 @@
# ------------------------------------------------------------
# SSL Passthrough hosts
# ------------------------------------------------------------
map $ssl_preread_server_name $name {
{% for host in all_passthrough_hosts %}
{% if host.enabled %}
{{ host.domain_name }} ssl_passthrough_{{ host.domain_name }};
{% endif %}
{% endfor %}
default https_default_backend;
}
{% for host in all_passthrough_hosts %}
{% if host.enabled %}
upstream ssl_passthrough_{{ host.domain_name }} {
server {{host.forwarding_host}}:{{host.forwarding_port}};
}
{% endif %}
{% endfor %}
upstream https_default_backend {
server 127.0.0.1:443;
}
server {
listen 444;
{% if ipv6 -%}
listen [::]:444;
{% else -%}
#listen [::]:444;
{% endif %}
proxy_pass $name;
ssl_preread on;
error_log /data/logs/ssl-passthrough-hosts_error.log warn;
# Custom
include /data/nginx/custom/server_ssl_passthrough[.]conf;
}

File diff suppressed because it is too large Load Diff

View File

@ -3,7 +3,7 @@
# This file assumes that the frontend has been built using ./scripts/frontend-build
FROM jc21/nginx-full:certbot-node
FROM nginxproxymanager/nginx-full:node
ARG TARGETPLATFORM
ARG BUILD_VERSION
@ -25,7 +25,7 @@ RUN echo "fs.file-max = 65535" > /etc/sysctl.conf \
&& rm -rf /var/lib/apt/lists/*
# s6 overlay
COPY docker/scripts/install-s6 /tmp/install-s6
COPY scripts/install-s6 /tmp/install-s6
RUN /tmp/install-s6 "${TARGETPLATFORM}" && rm -f /tmp/install-s6
EXPOSE 80 81 443
@ -35,17 +35,16 @@ COPY frontend/dist /app/frontend
COPY global /app/global
WORKDIR /app
RUN yarn install \
&& yarn cache clean
RUN yarn install
# add late to limit cache-busting by modifications
COPY docker/rootfs /
# Remove frontend service not required for prod, dev nginx config as well
RUN rm -rf /etc/services.d/frontend /etc/nginx/conf.d/dev.conf \
&& chmod 644 /etc/logrotate.d/nginx-proxy-manager \
&& pip uninstall --yes setuptools \
&& pip install --no-cache-dir "setuptools==58.0.0"
RUN rm -rf /etc/services.d/frontend /etc/nginx/conf.d/dev.conf
# Change permission of logrotate config file
RUN chmod 644 /etc/logrotate.d/nginx-proxy-manager
VOLUME [ "/data", "/etc/letsencrypt" ]
ENTRYPOINT [ "/init" ]

View File

@ -1,4 +1,4 @@
FROM jc21/nginx-full:certbot-node
FROM nginxproxymanager/nginx-full:node
LABEL maintainer="Jamie Curnow <jc@jc21.com>"
ENV S6_LOGGING=0 \
@ -7,7 +7,7 @@ ENV S6_LOGGING=0 \
RUN echo "fs.file-max = 65535" > /etc/sysctl.conf \
&& apt-get update \
&& apt-get install -y jq python3-pip logrotate \
&& apt-get install -y certbot jq python3-pip logrotate \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
@ -21,8 +21,9 @@ RUN rm -f /etc/nginx/conf.d/production.conf
RUN chmod 644 /etc/logrotate.d/nginx-proxy-manager
# s6 overlay
COPY scripts/install-s6 /tmp/install-s6
RUN /tmp/install-s6 "${TARGETPLATFORM}" && rm -f /tmp/install-s6
RUN curl -L -o /tmp/s6-overlay-amd64.tar.gz "https://github.com/just-containers/s6-overlay/releases/download/v1.22.1.0/s6-overlay-amd64.tar.gz" \
&& tar -xzf /tmp/s6-overlay-amd64.tar.gz -C /
EXPOSE 80 81 443
ENTRYPOINT [ "/init" ]

View File

@ -1,18 +1,17 @@
# WARNING: This is a CI docker-compose file used for building and testing of the entire app, it should not be used for production.
version: '3.8'
version: "3"
services:
fullstack-mysql:
image: "${IMAGE}:ci-${BUILD_NUMBER}"
image: ${IMAGE}:ci-${BUILD_NUMBER}
environment:
DEBUG: 'true'
LE_STAGING: 'true'
NODE_ENV: "development"
FORCE_COLOR: 1
DB_MYSQL_HOST: 'db'
DB_MYSQL_PORT: '3306'
DB_MYSQL_USER: 'npm'
DB_MYSQL_PASSWORD: 'npm'
DB_MYSQL_NAME: 'npm'
DB_MYSQL_HOST: "db"
DB_MYSQL_PORT: 3306
DB_MYSQL_USER: "npm"
DB_MYSQL_PASSWORD: "npm"
DB_MYSQL_NAME: "npm"
volumes:
- npm_data:/data
expose:
@ -27,14 +26,11 @@ services:
timeout: 3s
fullstack-sqlite:
image: "${IMAGE}:ci-${BUILD_NUMBER}"
image: ${IMAGE}:ci-${BUILD_NUMBER}
environment:
DEBUG: 'true'
LE_STAGING: 'true'
NODE_ENV: "development"
FORCE_COLOR: 1
DB_SQLITE_FILE: '/data/mydb.sqlite'
PUID: 1000
PGID: 1000
DB_SQLITE_FILE: "/data/database.sqlite"
volumes:
- npm_data:/data
expose:
@ -49,26 +45,26 @@ services:
db:
image: jc21/mariadb-aria
environment:
MYSQL_ROOT_PASSWORD: 'npm'
MYSQL_DATABASE: 'npm'
MYSQL_USER: 'npm'
MYSQL_PASSWORD: 'npm'
MYSQL_ROOT_PASSWORD: "npm"
MYSQL_DATABASE: "npm"
MYSQL_USER: "npm"
MYSQL_PASSWORD: "npm"
volumes:
- db_data:/var/lib/mysql
cypress-mysql:
image: "${IMAGE}-cypress:ci-${BUILD_NUMBER}"
image: ${IMAGE}-cypress:ci-${BUILD_NUMBER}
build:
context: ../test/
dockerfile: cypress/Dockerfile
environment:
CYPRESS_baseUrl: 'http://fullstack-mysql:81'
CYPRESS_baseUrl: "http://fullstack-mysql:81"
volumes:
- cypress-logs:/results
command: cypress run --browser chrome --config-file=${CYPRESS_CONFIG:-cypress/config/ci.json}
cypress-sqlite:
image: "${IMAGE}-cypress:ci-${BUILD_NUMBER}"
image: ${IMAGE}-cypress:ci-${BUILD_NUMBER}
build:
context: ../test/
dockerfile: cypress/Dockerfile

View File

@ -1,7 +1,6 @@
# WARNING: This is a DEVELOPMENT docker-compose file, it should not be used for production.
version: '3.8'
version: "3.5"
services:
npm:
image: nginxproxymanager:dev
container_name: npm_core
@ -11,23 +10,20 @@ services:
ports:
- 3080:80
- 3081:81
- 3443:443
- 3443:443 # Ususally you would only have this one
- 3444:444 # This is to test ssl passthrough
networks:
- nginx_proxy_manager
environment:
PUID: 1000
PGID: 1000
NODE_ENV: "development"
FORCE_COLOR: 1
# specifically for dev:
DEBUG: 'true'
DEVELOPMENT: 'true'
LE_STAGING: 'true'
# db:
DB_MYSQL_HOST: 'db'
DB_MYSQL_PORT: '3306'
DB_MYSQL_USER: 'npm'
DB_MYSQL_PASSWORD: 'npm'
DB_MYSQL_NAME: 'npm'
DEVELOPMENT: "true"
DB_MYSQL_HOST: "db"
DB_MYSQL_PORT: 3306
DB_MYSQL_USER: "npm"
DB_MYSQL_PASSWORD: "npm"
DB_MYSQL_NAME: "npm"
# ENABLE_SSL_PASSTHROUGH: "true"
# DB_SQLITE_FILE: "/data/database.sqlite"
# DISABLE_IPV6: "true"
volumes:
@ -43,17 +39,30 @@ services:
db:
image: jc21/mariadb-aria
container_name: npm_db
networks:
- nginx_proxy_manager
ports:
- 33306:3306
environment:
MYSQL_ROOT_PASSWORD: "npm"
MYSQL_DATABASE: "npm"
MYSQL_USER: "npm"
MYSQL_PASSWORD: "npm"
volumes:
- db_data:/var/lib/mysql
swagger:
image: "swaggerapi/swagger-ui:latest"
container_name: npm_swagger
ports:
- 3001:80
networks:
- nginx_proxy_manager
environment:
MYSQL_ROOT_PASSWORD: 'npm'
MYSQL_DATABASE: 'npm'
MYSQL_USER: 'npm'
MYSQL_PASSWORD: 'npm'
volumes:
- db_data:/var/lib/mysql
URL: "http://127.0.0.1:3081/api/schema"
PORT: "80"
depends_on:
- npm
volumes:
npm_data:

View File

@ -1,42 +0,0 @@
#!/bin/bash
set -e
CYAN='\E[1;36m'
BLUE='\E[1;34m'
YELLOW='\E[1;33m'
RED='\E[1;31m'
RESET='\E[0m'
export CYAN BLUE YELLOW RED RESET
PUID=${PUID:-0}
PGID=${PGID:-0}
if [[ "$PUID" -ne '0' ]] && [ "$PGID" = '0' ]; then
# set group id to same as user id,
# the user probably forgot to specify the group id and
# it would be rediculous to intentionally use the root group
# for a non-root user
PGID=$PUID
fi
export PUID PGID
log_info () {
echo -e "${BLUE} ${CYAN}$1${RESET}"
}
log_error () {
echo -e "${RED} $1${RESET}"
}
# The `run` file will only execute 1 line so this helps keep things
# logically separated
log_fatal () {
echo -e "${RED}--------------------------------------${RESET}"
echo -e "${RED}ERROR: $1${RESET}"
echo -e "${RED}--------------------------------------${RESET}"
/run/s6/basedir/bin/halt
exit 1
}

View File

@ -0,0 +1,46 @@
#!/bin/bash
# This command reads the `DISABLE_IPV6` env var and will either enable
# or disable ipv6 in all nginx configs based on this setting.
# Lowercase
DISABLE_IPV6=$(echo "${DISABLE_IPV6:-}" | tr '[:upper:]' '[:lower:]')
CYAN='\E[1;36m'
BLUE='\E[1;34m'
YELLOW='\E[1;33m'
RED='\E[1;31m'
RESET='\E[0m'
FOLDER=$1
if [ "$FOLDER" == "" ]; then
echo -e "${RED} $0 requires a absolute folder path as the first argument!${RESET}"
echo -e "${YELLOW} ie: $0 /data/nginx${RESET}"
exit 1
fi
FILES=$(find "$FOLDER" -type f -name "*.conf")
if [ "$DISABLE_IPV6" == "true" ] || [ "$DISABLE_IPV6" == "on" ] || [ "$DISABLE_IPV6" == "1" ] || [ "$DISABLE_IPV6" == "yes" ]; then
# IPV6 is disabled
echo "Disabling IPV6 in hosts"
echo -e "${BLUE} ${CYAN}Disabling IPV6 in hosts: ${YELLOW}${FOLDER}${RESET}"
# Iterate over configs and run the regex
for FILE in $FILES
do
echo -e " ${BLUE} ${YELLOW}${FILE}${RESET}"
sed -E -i 's/^([^#]*)listen \[::\]/\1#listen [::]/g' "$FILE"
done
else
# IPV6 is enabled
echo -e "${BLUE} ${CYAN}Enabling IPV6 in hosts: ${YELLOW}${FOLDER}${RESET}"
# Iterate over configs and run the regex
for FILE in $FILES
do
echo -e " ${BLUE} ${YELLOW}${FILE}${RESET}"
sed -E -i 's/^(\s*)#listen \[::\]/\1listen [::]/g' "$FILE"
done
fi

View File

@ -0,0 +1,2 @@
*
!.gitignore

View File

@ -0,0 +1,3 @@
*
!.gitignore
!*.sh

View File

@ -0,0 +1,7 @@
#!/usr/bin/with-contenv bash
set -e
mkdir -p /data/logs
echo "Changing ownership of /data/logs to $(id -u):$(id -g)"
chown -R "$(id -u):$(id -g)" /data/logs

View File

@ -0,0 +1,29 @@
#!/usr/bin/with-contenv bash
# ref: https://github.com/linuxserver/docker-baseimage-alpine/blob/master/root/etc/cont-init.d/01-envfile
# in s6, environmental variables are written as text files for s6 to monitor
# seach through full-path filenames for files ending in "__FILE"
for FILENAME in $(find /var/run/s6/container_environment/ | grep "__FILE$"); do
echo "[secret-init] Evaluating ${FILENAME##*/} ..."
# set SECRETFILE to the contents of the full-path textfile
SECRETFILE=$(cat ${FILENAME})
# SECRETFILE=${FILENAME}
# echo "[secret-init] Set SECRETFILE to ${SECRETFILE}" # DEBUG - rm for prod!
# if SECRETFILE exists / is not null
if [[ -f ${SECRETFILE} ]]; then
# strip the appended "__FILE" from environmental variable name ...
STRIPFILE=$(echo ${FILENAME} | sed "s/__FILE//g")
# echo "[secret-init] Set STRIPFILE to ${STRIPFILE}" # DEBUG - rm for prod!
# ... and set value to contents of secretfile
# since s6 uses text files, this is effectively "export ..."
printf $(cat ${SECRETFILE}) > ${STRIPFILE}
# echo "[secret-init] Set ${STRIPFILE##*/} to $(cat ${STRIPFILE})" # DEBUG - rm for prod!"
echo "[secret-init] Success! ${STRIPFILE##*/} set from ${FILENAME##*/}"
else
echo "[secret-init] cannot find secret in ${FILENAME}"
fi
done

View File

@ -0,0 +1,2 @@
*
!.gitignore

View File

@ -3,4 +3,3 @@ non-interactive = True
webroot-path = /data/letsencrypt-acme-challenge
key-type = ecdsa
elliptic-curve = secp384r1
preferred-chain = ISRG Root X1

View File

@ -30,9 +30,11 @@ server {
set $port "443";
server_name localhost;
access_log /data/logs/fallback_access.log standard;
access_log /data/logs/fallback-access.log standard;
error_log /dev/null crit;
ssl_reject_handshake on;
ssl_certificate /data/nginx/dummycert.pem;
ssl_certificate_key /data/nginx/dummykey.pem;
include conf.d/include/ssl-ciphers.conf;
return 444;
}

View File

@ -1,4 +1,4 @@
location ~* ^.*\.(css|js|jpe?g|gif|png|webp|woff|eot|ttf|svg|ico|css\.map|js\.map)$ {
location ~* ^.*\.(css|js|jpe?g|gif|png|woff|eot|ttf|svg|ico|css\.map|js\.map)$ {
if_modified_since off;
# use the public cache

View File

@ -2,7 +2,7 @@ add_header X-Served-By $host;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-Scheme $scheme;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-For $remote_addr;
proxy_set_header X-Real-IP $remote_addr;
proxy_pass $forward_scheme://$server:$port$request_uri;
proxy_pass $forward_scheme://$server:$port;

View File

@ -1,7 +1,7 @@
# run nginx in foreground
daemon off;
pid /run/nginx/nginx.pid;
user npmuser;
user root;
# Set number of worker processes automatically based on number of CPU cores.
worker_processes auto;
@ -15,7 +15,7 @@ error_log /data/logs/fallback_error.log warn;
include /etc/nginx/modules/*.conf;
events {
include /data/nginx/custom/events[.]conf;
worker_connections 1024;
}
http {
@ -85,6 +85,7 @@ http {
stream {
# Files generated by NPM
include /data/nginx/ssl_passthrough_host/hosts[.]conf;
include /data/nginx/stream/*.conf;
# Custom

View File

@ -1,21 +0,0 @@
#!/command/with-contenv bash
# shellcheck shell=bash
set -e
. /bin/common.sh
cd /app || exit 1
log_info 'Starting backend ...'
if [ "${DEVELOPMENT:-}" = 'true' ]; then
s6-setuidgid npmuser yarn install
exec s6-setuidgid npmuser bash -c 'export HOME=/tmp/npmuserhome;node --max_old_space_size=250 --abort_on_uncaught_exception node_modules/nodemon/bin/nodemon.js'
else
while :
do
s6-setuidgid npmuser bash -c 'export HOME=/tmp/npmuserhome;node --abort_on_uncaught_exception --max_old_space_size=250 index.js'
sleep 1
done
fi

View File

@ -1 +0,0 @@
longrun

View File

@ -1,21 +0,0 @@
#!/command/with-contenv bash
# shellcheck shell=bash
set -e
# This service is DEVELOPMENT only.
if [ "$DEVELOPMENT" = 'true' ]; then
. /bin/common.sh
cd /app/frontend || exit 1
HOME=/tmp/npmuserhome
export HOME
mkdir -p /app/frontend/dist
chown -R "$PUID:$PGID" /app/frontend/dist
log_info 'Starting frontend ...'
s6-setuidgid npmuser yarn install
exec s6-setuidgid npmuser yarn watch
else
exit 0
fi

View File

@ -1 +0,0 @@
longrun

View File

@ -1,9 +0,0 @@
#!/command/with-contenv bash
# shellcheck shell=bash
set -e
. /bin/common.sh
log_info 'Starting nginx ...'
exec s6-setuidgid npmuser nginx

View File

@ -1 +0,0 @@
longrun

View File

@ -1,18 +0,0 @@
#!/command/with-contenv bash
# shellcheck shell=bash
set -e
. /bin/common.sh
if [ "$(id -u)" != "0" ]; then
log_fatal "This docker container must be run as root, do not specify a user.\nYou can specify PUID and PGID env vars to run processes as that user and group after initialization."
fi
. /etc/s6-overlay/s6-rc.d/prepare/10-npmuser.sh
. /etc/s6-overlay/s6-rc.d/prepare/20-paths.sh
. /etc/s6-overlay/s6-rc.d/prepare/30-ownership.sh
. /etc/s6-overlay/s6-rc.d/prepare/40-dynamic.sh
. /etc/s6-overlay/s6-rc.d/prepare/50-ipv6.sh
. /etc/s6-overlay/s6-rc.d/prepare/60-secrets.sh
. /etc/s6-overlay/s6-rc.d/prepare/90-banner.sh

View File

@ -1,20 +0,0 @@
#!/command/with-contenv bash
# shellcheck shell=bash
set -e
log_info 'Configuring npmuser ...'
if id -u npmuser; then
# user already exists
usermod -u "$PUID" npmuser || exit 1
else
# Add npmuser user
useradd -o -u "$PUID" -U -d /tmp/npmuserhome -s /bin/false npmuser || exit 1
fi
usermod -G "$PGID" npmuser || exit 1
groupmod -o -g "$PGID" npmuser || exit 1
# Home for npmuser
mkdir -p /tmp/npmuserhome
chown -R "$PUID:$PGID" /tmp/npmuserhome

View File

@ -1,41 +0,0 @@
#!/command/with-contenv bash
# shellcheck shell=bash
set -e
log_info 'Checking paths ...'
# Ensure /data is mounted
if [ ! -d '/data' ]; then
log_fatal '/data is not mounted! Check your docker configuration.'
fi
# Ensure /etc/letsencrypt is mounted
if [ ! -d '/etc/letsencrypt' ]; then
log_fatal '/etc/letsencrypt is not mounted! Check your docker configuration.'
fi
# Create required folders
mkdir -p \
/data/nginx \
/data/custom_ssl \
/data/logs \
/data/access \
/data/nginx/default_host \
/data/nginx/default_www \
/data/nginx/proxy_host \
/data/nginx/redirection_host \
/data/nginx/stream \
/data/nginx/dead_host \
/data/nginx/temp \
/data/letsencrypt-acme-challenge \
/run/nginx \
/tmp/nginx/body \
/var/log/nginx \
/var/lib/nginx/cache/public \
/var/lib/nginx/cache/private \
/var/cache/nginx/proxy_temp
touch /var/log/nginx/error.log || true
chmod 777 /var/log/nginx/error.log || true
chmod -R 777 /var/cache/nginx || true
chmod 644 /etc/logrotate.d/nginx-proxy-manager

View File

@ -1,24 +0,0 @@
#!/command/with-contenv bash
# shellcheck shell=bash
set -e
log_info 'Setting ownership ...'
# root
chown root /tmp/nginx
# npmuser
chown -R "$PUID:$PGID" /data \
/etc/letsencrypt \
/run/nginx \
/tmp/nginx \
/var/cache/nginx \
/var/lib/logrotate \
/var/lib/nginx \
/var/log/nginx
# Don't chown entire /etc/nginx folder as this causes crashes on some systems
chown -R "$PUID:$PGID" /etc/nginx/nginx \
/etc/nginx/nginx.conf \
/etc/nginx/conf.d

View File

@ -1,17 +0,0 @@
#!/command/with-contenv bash
# shellcheck shell=bash
set -e
log_info 'Dynamic resolvers ...'
DISABLE_IPV6=$(echo "${DISABLE_IPV6:-}" | tr '[:upper:]' '[:lower:]')
# Dynamically generate resolvers file, if resolver is IPv6, enclose in `[]`
# thanks @tfmm
if [ "$DISABLE_IPV6" == "true" ] || [ "$DISABLE_IPV6" == "on" ] || [ "$DISABLE_IPV6" == "1" ] || [ "$DISABLE_IPV6" == "yes" ];
then
echo resolver "$(awk 'BEGIN{ORS=" "} $1=="nameserver" { sub(/%.*$/,"",$2); print ($2 ~ ":")? "["$2"]": $2}' /etc/resolv.conf) ipv6=off valid=10s;" > /etc/nginx/conf.d/include/resolvers.conf
else
echo resolver "$(awk 'BEGIN{ORS=" "} $1=="nameserver" { sub(/%.*$/,"",$2); print ($2 ~ ":")? "["$2"]": $2}' /etc/resolv.conf) valid=10s;" > /etc/nginx/conf.d/include/resolvers.conf
fi

View File

@ -1,36 +0,0 @@
#!/bin/bash
# This command reads the `DISABLE_IPV6` env var and will either enable
# or disable ipv6 in all nginx configs based on this setting.
log_info 'IPv6 ...'
# Lowercase
DISABLE_IPV6=$(echo "${DISABLE_IPV6:-}" | tr '[:upper:]' '[:lower:]')
process_folder () {
FILES=$(find "$1" -type f -name "*.conf")
SED_REGEX=
if [ "$DISABLE_IPV6" == "true" ] || [ "$DISABLE_IPV6" == "on" ] || [ "$DISABLE_IPV6" == "1" ] || [ "$DISABLE_IPV6" == "yes" ]; then
# IPV6 is disabled
echo "Disabling IPV6 in hosts in: $1"
SED_REGEX='s/^([^#]*)listen \[::\]/\1#listen [::]/g'
else
# IPV6 is enabled
echo "Enabling IPV6 in hosts in: $1"
SED_REGEX='s/^(\s*)#listen \[::\]/\1listen [::]/g'
fi
for FILE in $FILES
do
echo "- ${FILE}"
sed -E -i "$SED_REGEX" "$FILE"
done
# ensure the files are still owned by the npmuser
chown -R "$PUID:$PGID" "$1"
}
process_folder /etc/nginx/conf.d
process_folder /data/nginx

View File

@ -1,30 +0,0 @@
#!/command/with-contenv bash
# shellcheck shell=bash
set -e
# in s6, environmental variables are written as text files for s6 to monitor
# search through full-path filenames for files ending in "__FILE"
log_info 'Docker secrets ...'
for FILENAME in $(find /var/run/s6/container_environment/ | grep "__FILE$"); do
echo "[secret-init] Evaluating ${FILENAME##*/} ..."
# set SECRETFILE to the contents of the full-path textfile
SECRETFILE=$(cat "${FILENAME}")
# if SECRETFILE exists / is not null
if [[ -f "${SECRETFILE}" ]]; then
# strip the appended "__FILE" from environmental variable name ...
STRIPFILE=$(echo "${FILENAME}" | sed "s/__FILE//g")
# echo "[secret-init] Set STRIPFILE to ${STRIPFILE}" # DEBUG - rm for prod!
# ... and set value to contents of secretfile
# since s6 uses text files, this is effectively "export ..."
printf $(cat "${SECRETFILE}") > "${STRIPFILE}"
# echo "[secret-init] Set ${STRIPFILE##*/} to $(cat ${STRIPFILE})" # DEBUG - rm for prod!"
echo "Success: ${STRIPFILE##*/} set from ${FILENAME##*/}"
else
echo "Cannot find secret in ${FILENAME}"
fi
done

View File

@ -1,17 +0,0 @@
#!/command/with-contenv bash
# shellcheck shell=bash
set -e
echo "
-------------------------------------
_ _ ____ __ __
| \ | | _ \| \/ |
| \| | |_) | |\/| |
| |\ | __/| | | |
|_| \_|_| |_| |_|
-------------------------------------
User ID: $PUID
Group ID: $PGID
-------------------------------------
"

View File

@ -1 +0,0 @@
oneshot

View File

@ -1,2 +0,0 @@
# shellcheck shell=bash
/etc/s6-overlay/s6-rc.d/prepare/00-all.sh

View File

@ -0,0 +1,6 @@
#!/usr/bin/execlineb -S1
if { s6-test ${1} -ne 0 }
if { s6-test ${1} -ne 256 }
s6-svscanctl -t /var/run/s6/services

Some files were not shown because too many files have changed in this diff Show More