Compare commits

..

12 Commits

Author SHA1 Message Date
e677bfa2e8 Merge pull request #4073 from NginxProxyManager/develop
v2.12.0
2024-10-16 15:41:55 +10:00
fe2d8895d6 Cypress test for http and dns cert provision 2024-10-16 14:53:57 +10:00
5bdc05878f Fix issues with certbot command when using LE_SERVER 2024-10-16 11:23:58 +10:00
929ac3bd7c Adds env var to set certbot acme server
this is required for test suite to use dns certbot request
without talking to live or staging letsencrypt servers or
production level dns providers. This is a backwards port
from the v3 branch and opens the door for a full certificate
cypress test
2024-10-16 11:06:29 +10:00
f48e1b46a8 Updated swagger cypress package,
which works with proxies
2024-10-16 08:32:49 +10:00
351ba8dacd More tests for certificates, fixed schema problems 2024-10-16 08:32:49 +10:00
3b89d5f380 Merge pull request #4068 from Hadatko/fixWedosParamDescription
All checks were successful
Close stale issues and PRs / stale (push) Successful in 5s
fixed wedos password description
2024-10-15 10:23:33 +10:00
e5aa880ec4 fixed wedos password description
Signed-off-by: Dusan Cervenka <cervenka.dusan@gmail.com>
2024-10-15 01:58:15 +02:00
7322d35bd7 Fix CI
All checks were successful
Close stale issues and PRs / stale (push) Successful in 4s
2024-10-14 07:39:50 +10:00
81b89185f2 Squid ci fixes
All checks were successful
Close stale issues and PRs / stale (push) Successful in 4s
2024-10-13 22:15:18 +10:00
f2bb8f2b3d Squid ci fixes 2024-10-13 22:04:07 +10:00
b01817bc7f Adds squid to dev/CI stacks
- for testing forwarded ip address later
2024-10-13 21:54:58 +10:00
26 changed files with 540 additions and 127 deletions

19
Jenkinsfile vendored
View File

@ -127,6 +127,11 @@ pipeline {
junit 'test/results/junit/*'
sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
}
unstable {
dir(path: 'testing/results') {
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
}
}
}
}
stage('Test Mysql') {
@ -155,6 +160,11 @@ pipeline {
junit 'test/results/junit/*'
sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
}
unstable {
dir(path: 'testing/results') {
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
}
}
}
}
stage('MultiArch Build') {
@ -204,20 +214,13 @@ pipeline {
always {
sh 'echo Reverting ownership'
sh 'docker run --rm -v "$(pwd):/data" jc21/ci-tools chown -R "$(id -u):$(id -g)" /data'
}
success {
juxtapose event: 'success'
sh 'figlet "SUCCESS"'
printResult(true)
}
failure {
archiveArtifacts(artifacts: 'debug/**/*.*', allowEmptyArchive: true)
juxtapose event: 'failure'
sh 'figlet "FAILURE"'
}
unstable {
archiveArtifacts(artifacts: 'debug/**/*.*', allowEmptyArchive: true)
juxtapose event: 'unstable'
sh 'figlet "UNSTABLE"'
}
}
}

View File

@ -3,27 +3,29 @@ const fs = require('fs');
const https = require('https');
const tempWrite = require('temp-write');
const moment = require('moment');
const archiver = require('archiver');
const path = require('path');
const { isArray } = require('lodash');
const logger = require('../logger').ssl;
const config = require('../lib/config');
const error = require('../lib/error');
const utils = require('../lib/utils');
const certbot = require('../lib/certbot');
const certificateModel = require('../models/certificate');
const tokenModel = require('../models/token');
const dnsPlugins = require('../global/certbot-dns-plugins.json');
const internalAuditLog = require('./audit-log');
const internalNginx = require('./nginx');
const internalHost = require('./host');
const certbot = require('../lib/certbot');
const archiver = require('archiver');
const path = require('path');
const { isArray } = require('lodash');
const letsencryptStaging = config.useLetsencryptStaging();
const letsencryptServer = config.useLetsencryptServer();
const letsencryptConfig = '/etc/letsencrypt.ini';
const certbotCommand = 'certbot';
function omissions() {
return ['is_deleted'];
return ['is_deleted', 'owner.is_deleted'];
}
const internalCertificate = {
@ -207,6 +209,7 @@ const internalCertificate = {
.patchAndFetchById(certificate.id, {
expires_on: moment(cert_info.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss')
})
.then(utils.omitRow(omissions()))
.then((saved_row) => {
// Add cert data for audit log
saved_row.meta = _.assign({}, saved_row.meta, {
@ -730,29 +733,29 @@ const internalCertificate = {
return utils.exec('openssl x509 -in ' + certificate_file + ' -subject -noout')
.then((result) => {
// Examples:
// subject=CN = *.jc21.com
// subject=CN = something.example.com
const regex = /(?:subject=)?[^=]+=\s+(\S+)/gim;
const match = regex.exec(result);
if (typeof match[1] === 'undefined') {
throw new error.ValidationError('Could not determine subject from certificate: ' + result);
if (match && typeof match[1] !== 'undefined') {
certData['cn'] = match[1];
}
certData['cn'] = match[1];
})
.then(() => {
return utils.exec('openssl x509 -in ' + certificate_file + ' -issuer -noout');
})
.then((result) => {
// Examples:
// issuer=C = US, O = Let's Encrypt, CN = Let's Encrypt Authority X3
// issuer=C = US, O = Let's Encrypt, CN = E5
// issuer=O = NginxProxyManager, CN = NginxProxyManager Intermediate CA","O = NginxProxyManager, CN = NginxProxyManager Intermediate CA
const regex = /^(?:issuer=)?(.*)$/gim;
const match = regex.exec(result);
if (typeof match[1] === 'undefined') {
throw new error.ValidationError('Could not determine issuer from certificate: ' + result);
if (match && typeof match[1] !== 'undefined') {
certData['issuer'] = match[1];
}
certData['issuer'] = match[1];
})
.then(() => {
return utils.exec('openssl x509 -in ' + certificate_file + ' -dates -noout');
@ -827,17 +830,18 @@ const internalCertificate = {
requestLetsEncryptSsl: (certificate) => {
logger.info('Requesting Let\'sEncrypt certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
const cmd = certbotCommand + ' certonly ' +
'--config "' + letsencryptConfig + '" ' +
const cmd = `${certbotCommand} certonly ` +
`--config '${letsencryptConfig}' ` +
'--work-dir "/tmp/letsencrypt-lib" ' +
'--logs-dir "/tmp/letsencrypt-log" ' +
'--cert-name "npm-' + certificate.id + '" ' +
`--cert-name "npm-${certificate.id}" ` +
'--agree-tos ' +
'--authenticator webroot ' +
'--email "' + certificate.meta.letsencrypt_email + '" ' +
`--email '${certificate.meta.letsencrypt_email}' ` +
'--preferred-challenges "dns,http" ' +
'--domains "' + certificate.domain_names.join(',') + '" ' +
(letsencryptStaging ? '--staging' : '');
`--domains "${certificate.domain_names.join(',')}" ` +
(letsencryptServer !== null ? `--server '${letsencryptServer}' ` : '') +
(letsencryptStaging && letsencryptServer === null ? '--staging ' : '');
logger.info('Command:', cmd);
@ -868,25 +872,26 @@ const internalCertificate = {
const hasConfigArg = certificate.meta.dns_provider !== 'route53';
let mainCmd = certbotCommand + ' certonly ' +
'--config "' + letsencryptConfig + '" ' +
`--config '${letsencryptConfig}' ` +
'--work-dir "/tmp/letsencrypt-lib" ' +
'--logs-dir "/tmp/letsencrypt-log" ' +
'--cert-name "npm-' + certificate.id + '" ' +
`--cert-name 'npm-${certificate.id}' ` +
'--agree-tos ' +
'--email "' + certificate.meta.letsencrypt_email + '" ' +
'--domains "' + certificate.domain_names.join(',') + '" ' +
'--authenticator ' + dnsPlugin.full_plugin_name + ' ' +
`--email '${certificate.meta.letsencrypt_email}' ` +
`--domains '${certificate.domain_names.join(',')}' ` +
`--authenticator '${dnsPlugin.full_plugin_name}' ` +
(
hasConfigArg
? '--' + dnsPlugin.full_plugin_name + '-credentials "' + credentialsLocation + '"'
? `--${dnsPlugin.full_plugin_name}-credentials '${credentialsLocation}' `
: ''
) +
(
certificate.meta.propagation_seconds !== undefined
? ' --' + dnsPlugin.full_plugin_name + '-propagation-seconds ' + certificate.meta.propagation_seconds
? `--${dnsPlugin.full_plugin_name}-propagation-seconds '${certificate.meta.propagation_seconds}' `
: ''
) +
(letsencryptStaging ? ' --staging' : '');
(letsencryptServer !== null ? `--server '${letsencryptServer}' ` : '') +
(letsencryptStaging && letsencryptServer === null ? '--staging ' : '');
// Prepend the path to the credentials file as an environment variable
if (certificate.meta.dns_provider === 'route53') {
@ -963,14 +968,15 @@ const internalCertificate = {
logger.info('Renewing Let\'sEncrypt certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
const cmd = certbotCommand + ' renew --force-renewal ' +
'--config "' + letsencryptConfig + '" ' +
`--config '${letsencryptConfig}' ` +
'--work-dir "/tmp/letsencrypt-lib" ' +
'--logs-dir "/tmp/letsencrypt-log" ' +
'--cert-name "npm-' + certificate.id + '" ' +
`--cert-name 'npm-${certificate.id}' ` +
'--preferred-challenges "dns,http" ' +
'--no-random-sleep-on-renew ' +
'--disable-hook-validation ' +
(letsencryptStaging ? '--staging' : '');
(letsencryptServer !== null ? `--server '${letsencryptServer}' ` : '') +
(letsencryptStaging && letsencryptServer === null ? '--staging ' : '');
logger.info('Command:', cmd);
@ -995,13 +1001,14 @@ const internalCertificate = {
logger.info(`Renewing Let'sEncrypt certificates via ${dnsPlugin.name} for Cert #${certificate.id}: ${certificate.domain_names.join(', ')}`);
let mainCmd = certbotCommand + ' renew --force-renewal ' +
'--config "' + letsencryptConfig + '" ' +
`--config "${letsencryptConfig}" ` +
'--work-dir "/tmp/letsencrypt-lib" ' +
'--logs-dir "/tmp/letsencrypt-log" ' +
'--cert-name "npm-' + certificate.id + '" ' +
`--cert-name 'npm-${certificate.id}' ` +
'--disable-hook-validation ' +
'--no-random-sleep-on-renew ' +
(letsencryptStaging ? ' --staging' : '');
(letsencryptServer !== null ? `--server '${letsencryptServer}' ` : '') +
(letsencryptStaging && letsencryptServer === null ? '--staging ' : '');
// Prepend the path to the credentials file as an environment variable
if (certificate.meta.dns_provider === 'route53') {
@ -1027,12 +1034,13 @@ const internalCertificate = {
logger.info('Revoking Let\'sEncrypt certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
const mainCmd = certbotCommand + ' revoke ' +
'--config "' + letsencryptConfig + '" ' +
`--config '${letsencryptConfig}' ` +
'--work-dir "/tmp/letsencrypt-lib" ' +
'--logs-dir "/tmp/letsencrypt-log" ' +
'--cert-path "/etc/letsencrypt/live/npm-' + certificate.id + '/fullchain.pem" ' +
`--cert-path '/etc/letsencrypt/live/npm-${certificate.id}/fullchain.pem' ` +
'--delete-after-revoke ' +
(letsencryptStaging ? '--staging' : '');
(letsencryptServer !== null ? `--server '${letsencryptServer}' ` : '') +
(letsencryptStaging && letsencryptServer === null ? '--staging ' : '');
// Don't fail command if file does not exist
const delete_credentialsCmd = `rm -f '/etc/letsencrypt/credentials/credentials-${certificate.id}' || true`;

View File

@ -180,5 +180,15 @@ module.exports = {
*/
useLetsencryptStaging: function () {
return !!process.env.LE_STAGING;
},
/**
* @returns {string|null}
*/
useLetsencryptServer: function () {
if (process.env.LE_SERVER) {
return process.env.LE_SERVER;
}
return null;
}
};

View File

@ -24,22 +24,34 @@
"description": "Nice Name for the custom certificate"
},
"domain_names": {
"$ref": "../common.json#/properties/domain_names"
"description": "Domain Names separated by a comma",
"type": "array",
"maxItems": 100,
"uniqueItems": true,
"items": {
"type": "string",
"pattern": "^[^&| @!#%^();:/\\\\}{=+?<>,~`'\"]+$"
}
},
"expires_on": {
"description": "Date and time of expiration",
"readOnly": true,
"type": "string"
},
"owner": {
"$ref": "./user-object.json"
},
"meta": {
"type": "object",
"additionalProperties": false,
"properties": {
"letsencrypt_email": {
"type": "string"
"certificate": {
"type": "string",
"minLength": 1
},
"letsencrypt_agree": {
"type": "boolean"
"certificate_key": {
"type": "string",
"minLength": 1
},
"dns_challenge": {
"type": "boolean"
@ -50,13 +62,18 @@
"dns_provider_credentials": {
"type": "string"
},
"letsencrypt_agree": {
"type": "boolean"
},
"letsencrypt_certificate": {
"type": "object"
},
"letsencrypt_email": {
"type": "string"
},
"propagation_seconds": {
"anyOf": [
{
"type": "integer",
"minimum": 0
}
]
"type": "integer",
"minimum": 0
}
}
}

View File

@ -55,6 +55,25 @@
"certificate_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC1n9j9C5Bes1nd\nqACDckERauxXVNKCnUlUM1buGBx1xc+j2e2Ar23wUJJuWBY18VfT8yqfqVDktO2w\nrbmvZvLuPmXePOKbIKS+XXh+2NG9L5bDG9rwGFCRXnbQj+GWCdMfzx14+CR1IHge\nYz6Cv/Si2/LJPCh/CoBfM4hUQJON3lxAWrWBpdbZnKYMrxuPBRfW9OuzTbCVXToQ\noxRAHiOR9081Xn1WeoKr7kVBIa5UphlvWXa12w1YmUwJu7YndnJGIavLWeNCVc7Z\nEo+nS8Wr/4QWicatIWZXpVaEOPhRoeplQDxNWg5b/Q26rYoVd7PrCmRs7sVcH79X\nzGONeH1PAgMBAAECggEAANb3Wtwl07pCjRrMvc7WbC0xYIn82yu8/g2qtjkYUJcU\nia5lQbYN7RGCS85Oc/tkq48xQEG5JQWNH8b918jDEMTrFab0aUEyYcru1q9L8PL6\nYHaNgZSrMrDcHcS8h0QOXNRJT5jeGkiHJaTR0irvB526tqF3knbK9yW22KTfycUe\na0Z9voKn5xRk1DCbHi/nk2EpT7xnjeQeLFaTIRXbS68omkr4YGhwWm5OizoyEGZu\nW0Zum5BkQyMr6kor3wdxOTG97ske2rcyvvHi+ErnwL0xBv0qY0Dhe8DpuXpDezqw\no72yY8h31Fu84i7sAj24YuE5Df8DozItFXQpkgbQ6QKBgQDPrufhvIFm2S/MzBdW\nH8JxY7CJlJPyxOvc1NIl9RczQGAQR90kx52cgIcuIGEG6/wJ/xnGfMmW40F0DnQ+\nN+oLgB9SFxeLkRb7s9Z/8N3uIN8JJFYcerEOiRQeN2BXEEWJ7bUThNtsVrAcKoUh\nELsDmnHW/3V+GKwhd0vpk842+wKBgQDf4PGLG9PTE5tlAoyHFodJRd2RhTJQkwsU\nMDNjLJ+KecLv+Nl+QiJhoflG1ccqtSFlBSCG067CDQ5LV0xm3mLJ7pfJoMgjcq31\nqjEmX4Ls91GuVOPtbwst3yFKjsHaSoKB5fBvWRcKFpBUezM7Qcw2JP3+dQT+bQIq\ncMTkRWDSvQKBgQDOdCQFDjxg/lR7NQOZ1PaZe61aBz5P3pxNqa7ClvMaOsuEQ7w9\nvMYcdtRq8TsjA2JImbSI0TIg8gb2FQxPcYwTJKl+FICOeIwtaSg5hTtJZpnxX5LO\nutTaC0DZjNkTk5RdOdWA8tihyUdGqKoxJY2TVmwGe2rUEDjFB++J4inkEwKBgB6V\ng0nmtkxanFrzOzFlMXwgEEHF+Xaqb9QFNa/xs6XeNnREAapO7JV75Cr6H2hFMFe1\nmJjyqCgYUoCWX3iaHtLJRnEkBtNY4kzyQB6m46LtsnnnXO/dwKA2oDyoPfFNRoDq\nYatEd3JIXNU9s2T/+x7WdOBjKhh72dTkbPFmTPDdAoGAU6rlPBevqOFdObYxdPq8\nEQWu44xqky3Mf5sBpOwtu6rqCYuziLiN7K4sjN5GD5mb1cEU+oS92ZiNcUQ7MFXk\n8yTYZ7U0VcXyAcpYreWwE8thmb0BohJBr+Mp3wLTx32x0HKdO6vpUa0d35LUTUmM\nRrKmPK/msHKK/sVHiL+NFqo=\n-----END PRIVATE KEY-----\n"
}
}
},
"schema": {
"type": "object",
"additionalProperties": false,
"required": ["certificate", "certificate_key"],
"properties": {
"certificate": {
"type": "string",
"minLength": 1
},
"certificate_key": {
"type": "string",
"minLength": 1
},
"intermediate_certificate": {
"type": "string",
"minLength": 1
}
}
}
}
}

View File

@ -3,6 +3,8 @@
# This file assumes that the frontend has been built using ./scripts/frontend-build
FROM nginxproxymanager/testca AS testca
FROM letsencrypt/pebble AS pebbleca
FROM nginxproxymanager/nginx-full:certbot-node
ARG TARGETPLATFORM
@ -45,6 +47,8 @@ RUN yarn install \
# add late to limit cache-busting by modifications
COPY docker/rootfs /
COPY --from=pebbleca /test/certs/pebble.minica.pem /etc/ssl/certs/pebble.minica.pem
COPY --from=testca /home/step/certs/root_ca.crt /etc/ssl/certs/NginxProxyManager.crt
# Remove frontend service not required for prod, dev nginx config as well
RUN rm -rf /etc/s6-overlay/s6-rc.d/user/contents.d/frontend /etc/nginx/conf.d/dev.conf \

View File

@ -1,7 +1,10 @@
FROM nginxproxymanager/testca AS testca
FROM letsencrypt/pebble AS pebbleca
FROM nginxproxymanager/nginx-full:certbot-node
LABEL maintainer="Jamie Curnow <jc@jc21.com>"
# See: https://github.com/just-containers/s6-overlay/blob/master/README.md
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
ENV SUPPRESS_NO_CONFIG_WARNING=1 \
S6_BEHAVIOUR_IF_STAGE2_FAILS=1 \
S6_CMD_WAIT_FOR_SERVICES_MAXTIME=0 \
@ -17,17 +20,20 @@ RUN echo "fs.file-max = 65535" > /etc/sysctl.conf \
&& rm -rf /var/lib/apt/lists/*
# Task
RUN cd /usr \
&& curl -sL https://taskfile.dev/install.sh | sh \
&& cd /root
WORKDIR /usr
RUN curl -sL https://taskfile.dev/install.sh | sh
WORKDIR /root
COPY rootfs /
RUN rm -f /etc/nginx/conf.d/production.conf
RUN chmod 644 /etc/logrotate.d/nginx-proxy-manager
# s6 overlay
COPY scripts/install-s6 /tmp/install-s6
RUN /tmp/install-s6 "${TARGETPLATFORM}" && rm -f /tmp/install-s6
RUN rm -f /etc/nginx/conf.d/production.conf \
&& chmod 644 /etc/logrotate.d/nginx-proxy-manager \
&& /tmp/install-s6 "${TARGETPLATFORM}" \
&& rm -f /tmp/install-s6
# Certs for testing purposes
COPY --from=pebbleca /test/certs/pebble.minica.pem /etc/ssl/certs/pebble.minica.pem
COPY --from=testca /home/step/certs/root_ca.crt /etc/ssl/certs/NginxProxyManager.crt
EXPOSE 80 81 443
ENTRYPOINT [ "/init" ]

92
docker/dev/squid.conf Normal file
View File

@ -0,0 +1,92 @@
# WELCOME TO SQUID 6.6
# ----------------------------
#
# This is the documentation for the Squid configuration file.
# This documentation can also be found online at:
# http://www.squid-cache.org/Doc/config/
#
# You may wish to look at the Squid home page and wiki for the
# FAQ and other documentation:
# http://www.squid-cache.org/
# https://wiki.squid-cache.org/SquidFaq
# https://wiki.squid-cache.org/ConfigExamples
#
# Example rule allowing access from your local networks.
# Adapt to list your (internal) IP networks from where browsing
# should be allowed
acl localnet src 0.0.0.1-0.255.255.255 # RFC 1122 "this" network (LAN)
acl localnet src 10.0.0.0/8 # RFC 1918 local private network (LAN)
acl localnet src 100.64.0.0/10 # RFC 6598 shared address space (CGN)
acl localnet src 169.254.0.0/16 # RFC 3927 link-local (directly plugged) machines
acl localnet src 172.0.0.0/8
acl localnet src 192.168.0.0/16 # RFC 1918 local private network (LAN)
acl localnet src fc00::/7 # RFC 4193 local private network range
acl localnet src fe80::/10 # RFC 4291 link-local (directly plugged) machines
acl SSL_ports port 443
acl Safe_ports port 80 # http
acl Safe_ports port 81
acl Safe_ports port 443 # https
#
# Recommended minimum Access Permission configuration:
#
# Deny requests to certain unsafe ports
http_access deny !Safe_ports
# Deny CONNECT to other than secure SSL ports
http_access deny CONNECT !SSL_ports
# Only allow cachemgr access from localhost
http_access allow localhost manager
http_access deny manager
# This default configuration only allows localhost requests because a more
# permissive Squid installation could introduce new attack vectors into the
# network by proxying external TCP connections to unprotected services.
http_access allow localhost
# The two deny rules below are unnecessary in this default configuration
# because they are followed by a "deny all" rule. However, they may become
# critically important when you start allowing external requests below them.
# Protect web applications running on the same server as Squid. They often
# assume that only local users can access them at "localhost" ports.
http_access deny to_localhost
# Protect cloud servers that provide local users with sensitive info about
# their server via certain well-known link-local (a.k.a. APIPA) addresses.
http_access deny to_linklocal
#
# INSERT YOUR OWN RULE(S) HERE TO ALLOW ACCESS FROM YOUR CLIENTS
#
include /etc/squid/conf.d/*.conf
# For example, to allow access from your local networks, you may uncomment the
# following rule (and/or add rules that match your definition of "local"):
# http_access allow localnet
# And finally deny all other access to this proxy
http_access deny all
# Squid normally listens to port 3128
http_port 3128
# Leave coredumps in the first cache dir
coredump_dir /var/spool/squid
#
# Add any of your own refresh_pattern entries above these.
#
refresh_pattern ^ftp: 1440 20% 10080
refresh_pattern -i (/cgi-bin/|\?) 0 0% 0
refresh_pattern \/(Packages|Sources)(|\.bz2|\.gz|\.xz)$ 0 0% 0 refresh-ims
refresh_pattern \/Release(|\.gpg)$ 0 0% 0 refresh-ims
refresh_pattern \/InRelease$ 0 0% 0 refresh-ims
refresh_pattern \/(Translation-.*)(|\.bz2|\.gz|\.xz)$ 0 0% 0 refresh-ims
# example pattern for deb packages
#refresh_pattern (\.deb|\.udeb)$ 129600 100% 129600
refresh_pattern . 0 20% 4320

View File

@ -9,6 +9,9 @@ services:
environment:
DEBUG: 'true'
FORCE_COLOR: 1
# Required for DNS Certificate provisioning in CI
LE_SERVER: 'https://ca.internal/acme/acme/directory'
REQUESTS_CA_BUNDLE: '/etc/ssl/certs/NginxProxyManager.crt'
volumes:
- 'npm_data_ci:/data'
- 'npm_le_ci:/etc/letsencrypt'
@ -91,14 +94,25 @@ services:
context: ../
dockerfile: test/cypress/Dockerfile
environment:
CYPRESS_baseUrl: 'http://fullstack:81'
HTTP_PROXY: 'squid:3128'
HTTPS_PROXY: 'squid:3128'
volumes:
- 'cypress_logs:/results'
- './dev/resolv.conf:/etc/resolv.conf:ro'
- '/etc/localtime:/etc/localtime:ro'
command: cypress run --browser chrome --config-file=cypress/config/ci.js
networks:
- fulltest
squid:
image: ubuntu/squid
volumes:
- './dev/squid.conf:/etc/squid/squid.conf:ro'
- './dev/resolv.conf:/etc/resolv.conf:ro'
- '/etc/localtime:/etc/localtime:ro'
networks:
- fulltest
volumes:
cypress_logs:
npm_data_ci:

View File

@ -1,7 +1,7 @@
# WARNING: This is a DEVELOPMENT docker-compose file, it should not be used for production.
services:
npm:
fullstack:
image: nginxproxymanager:dev
container_name: npm_core
build:
@ -12,7 +12,11 @@ services:
- 3081:81
- 3443:443
networks:
- nginx_proxy_manager
nginx_proxy_manager:
aliases:
- website1.example.com
- website2.example.com
- website3.example.com
environment:
PUID: 1000
PGID: 1000
@ -29,12 +33,20 @@ services:
DB_MYSQL_NAME: 'npm'
# DB_SQLITE_FILE: "/data/database.sqlite"
# DISABLE_IPV6: "true"
# Required for DNS Certificate provisioning testing:
LE_SERVER: 'https://ca.internal/acme/acme/directory'
REQUESTS_CA_BUNDLE: '/etc/ssl/certs/NginxProxyManager.crt'
volumes:
- npm_data:/data
- le_data:/etc/letsencrypt
- './dev/resolv.conf:/etc/resolv.conf:ro'
- ../backend:/app
- ../frontend:/app/frontend
- ../global:/app/global
healthcheck:
test: ["CMD", "/usr/bin/check-health"]
interval: 10s
timeout: 3s
depends_on:
- db
working_dir: /app
@ -54,6 +66,23 @@ services:
volumes:
- db_data:/var/lib/mysql
stepca:
image: jc21/testca
volumes:
- './dev/resolv.conf:/etc/resolv.conf:ro'
- '/etc/localtime:/etc/localtime:ro'
networks:
nginx_proxy_manager:
aliases:
- ca.internal
dnsrouter:
image: jc21/dnsrouter
volumes:
- ./dev/dnsrouter-config.json.tmp:/dnsrouter-config.json:ro
networks:
- nginx_proxy_manager
swagger:
image: swaggerapi/swagger-ui:latest
container_name: npm_swagger
@ -63,7 +92,77 @@ services:
URL: "http://npm:81/api/schema"
PORT: '80'
depends_on:
- npm
- fullstack
squid:
image: ubuntu/squid
container_name: npm_squid
volumes:
- './dev/squid.conf:/etc/squid/squid.conf:ro'
- './dev/resolv.conf:/etc/resolv.conf:ro'
- '/etc/localtime:/etc/localtime:ro'
networks:
- nginx_proxy_manager
ports:
- 8128:3128
pdns:
image: pschiffe/pdns-mysql
volumes:
- '/etc/localtime:/etc/localtime:ro'
environment:
PDNS_master: 'yes'
PDNS_api: 'yes'
PDNS_api_key: 'npm'
PDNS_webserver: 'yes'
PDNS_webserver_address: '0.0.0.0'
PDNS_webserver_password: 'npm'
PDNS_webserver-allow-from: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
PDNS_version_string: 'anonymous'
PDNS_default_ttl: 1500
PDNS_allow_axfr_ips: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
PDNS_gmysql_host: pdns-db
PDNS_gmysql_port: 3306
PDNS_gmysql_user: pdns
PDNS_gmysql_password: pdns
PDNS_gmysql_dbname: pdns
depends_on:
- pdns-db
networks:
nginx_proxy_manager:
aliases:
- ns1.pdns
- ns2.pdns
pdns-db:
image: mariadb
environment:
MYSQL_ROOT_PASSWORD: 'pdns'
MYSQL_DATABASE: 'pdns'
MYSQL_USER: 'pdns'
MYSQL_PASSWORD: 'pdns'
volumes:
- 'pdns_mysql:/var/lib/mysql'
- '/etc/localtime:/etc/localtime:ro'
- './dev/pdns-db.sql:/docker-entrypoint-initdb.d/01_init.sql:ro'
networks:
- nginx_proxy_manager
cypress:
image: "npm_dev_cypress"
build:
context: ../
dockerfile: test/cypress/Dockerfile
environment:
HTTP_PROXY: 'squid:3128'
HTTPS_PROXY: 'squid:3128'
volumes:
- '../test/results:/results'
- './dev/resolv.conf:/etc/resolv.conf:ro'
- '/etc/localtime:/etc/localtime:ro'
command: cypress run --browser chrome --config-file=cypress/config/ci.js
networks:
- nginx_proxy_manager
volumes:
npm_data:
@ -72,6 +171,8 @@ volumes:
name: npm_le_data
db_data:
name: npm_db_data
pdns_mysql:
name: npm_pdns_mysql
networks:
nginx_proxy_manager:

View File

@ -492,7 +492,7 @@
"package_name": "certbot-dns-wedos",
"version": "~=2.2",
"dependencies": "",
"credentials": "dns_wedos_user = <wedos_registration>\ndns_wedos_auth = <wapi_sha256_password>",
"credentials": "dns_wedos_user = <wedos_registration>\ndns_wedos_auth = <wapi_password>",
"full_plugin_name": "dns-wedos"
},
"edgedns": {

View File

@ -15,3 +15,13 @@ COMPOSE_PROJECT_NAME="npmdev"
COMPOSE_FILE="docker/docker-compose.dev.yml"
export COMPOSE_FILE COMPOSE_PROJECT_NAME
# $1: container_name
get_container_ip () {
local container_name=$1
local container
local ip
container=$(docker-compose ps --all -q "${container_name}" | tail -n1)
ip=$(docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' "$container")
echo "$ip"
}

View File

@ -65,7 +65,7 @@ rm -rf "${LOCAL_RESOLVE}"
printf "nameserver %s\noptions ndots:0" "${DNSROUTER_IP}" > "${LOCAL_RESOLVE}"
# bring up all remaining containers, except cypress!
docker-compose up -d --remove-orphans stepca
docker-compose up -d --remove-orphans stepca squid
docker-compose pull db-mysql || true # ok to fail
docker-compose up -d --remove-orphans --pull=never fullstack

13
scripts/cypress-dev Executable file
View File

@ -0,0 +1,13 @@
#!/bin/bash -e
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
. "$DIR/.common.sh"
# Ensure docker-compose exists
if hash docker-compose 2>/dev/null; then
cd "${DIR}/.."
rm -rf "$DIR/../test/results"
docker-compose up --build cypress
else
echo -e "${RED} docker-compose command is not available${RESET}"
fi

View File

@ -7,8 +7,43 @@ DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
if hash docker-compose 2>/dev/null; then
cd "${DIR}/.."
echo -e "${BLUE} ${CYAN}Starting Dev Stack ...${RESET}"
echo -e "${BLUE} $(docker-compose config)${RESET}"
docker-compose up -d --remove-orphans --force-recreate --build
# Bring up a stack, in steps so we can inject IPs everywhere
docker-compose up -d pdns pdns-db
PDNS_IP=$(get_container_ip "pdns")
echo -e "${BLUE} ${YELLOW}PDNS IP is ${PDNS_IP}${RESET}"
# adjust the dnsrouter config
LOCAL_DNSROUTER_CONFIG="$DIR/../docker/dev/dnsrouter-config.json"
rm -rf "$LOCAL_DNSROUTER_CONFIG.tmp"
# IMPORTANT: changes to dnsrouter-config.json will affect this line:
jq --arg a "$PDNS_IP" '.servers[0].upstreams[1].upstream = $a' "$LOCAL_DNSROUTER_CONFIG" > "$LOCAL_DNSROUTER_CONFIG.tmp"
docker-compose up -d dnsrouter
DNSROUTER_IP=$(get_container_ip "dnsrouter")
echo -e "${BLUE} ${YELLOW}DNS Router IP is ${DNSROUTER_IP}"
if [ "${DNSROUTER_IP:-}" = "" ]; then
echo -e "${RED} ERROR: DNS Router IP is not set${RESET}"
exit 1
fi
# mount the resolver
LOCAL_RESOLVE="$DIR/../docker/dev/resolv.conf"
rm -rf "${LOCAL_RESOLVE}"
printf "nameserver %s\noptions ndots:0" "${DNSROUTER_IP}" > "${LOCAL_RESOLVE}"
# bring up all remaining containers, except cypress!
docker-compose up -d --remove-orphans stepca squid
docker-compose pull db
docker-compose up -d --remove-orphans --pull=never fullstack
docker-compose up -d --remove-orphans swagger
# docker-compose up -d --remove-orphans --force-recreate --build
# wait for main container to be healthy
bash "$DIR/wait-healthy" "$(docker-compose ps --all -q fullstack)" 120
echo ""
echo -e "${CYAN}Admin UI: http://127.0.0.1:3081${RESET}"

View File

@ -15,8 +15,8 @@ module.exports = defineConfig({
return require("../plugins/index.js")(on, config);
},
env: {
swaggerBase: '{{baseUrl}}/api/schema',
swaggerBase: '{{baseUrl}}/api/schema?ts=' + Date.now(),
},
baseUrl: 'http://localhost:1234',
baseUrl: 'http://fullstack:81',
}
});

View File

@ -1,22 +0,0 @@
const { defineConfig } = require('cypress');
module.exports = defineConfig({
requestTimeout: 30000,
defaultCommandTimeout: 20000,
reporter: 'cypress-multi-reporters',
reporterOptions: {
configFile: 'multi-reporter.json'
},
video: false,
videosFolder: 'results/videos',
screenshotsFolder: 'results/screenshots',
e2e: {
setupNodeEvents(on, config) {
return require("../plugins/index.js")(on, config);
},
env: {
swaggerBase: '{{baseUrl}}/api/schema',
},
baseUrl: 'http://localhost:1234',
}
});

View File

@ -1,7 +1,8 @@
/// <reference types="Cypress" />
/// <reference types="cypress" />
describe('Certificates endpoints', () => {
let token;
let certID;
before(() => {
cy.getToken().then((tok) => {
@ -24,6 +25,54 @@ describe('Certificates endpoints', () => {
});
});
it('Custom certificate lifecycle', function() {
// Create custom cert
cy.task('backendApiPost', {
token: token,
path: '/api/nginx/certificates',
data: {
provider: "other",
nice_name: "Test Certificate",
},
}).then((data) => {
cy.validateSwaggerSchema('post', 201, '/nginx/certificates', data);
expect(data).to.have.property('id');
certID = data.id;
// Upload files
cy.task('backendApiPostFiles', {
token: token,
path: `/api/nginx/certificates/${certID}/upload`,
files: {
certificate: 'test.example.com.pem',
certificate_key: 'test.example.com-key.pem',
},
}).then((data) => {
cy.validateSwaggerSchema('post', 200, '/nginx/certificates/{certID}/upload', data);
expect(data).to.have.property('certificate');
expect(data).to.have.property('certificate_key');
// Get all certs
cy.task('backendApiGet', {
token: token,
path: '/api/nginx/certificates?expand=owner'
}).then((data) => {
cy.validateSwaggerSchema('get', 200, '/nginx/certificates', data);
expect(data.length).to.be.greaterThan(0);
// Delete cert
cy.task('backendApiDelete', {
token: token,
path: `/api/nginx/certificates/${certID}`
}).then((data) => {
cy.validateSwaggerSchema('delete', 200, '/nginx/certificates/{certID}', data);
expect(data).to.be.equal(true);
});
});
});
});
});
it('Request Certificate - CVE-2024-46256/CVE-2024-46257', function() {
cy.task('backendApiPost', {
token: token,

View File

@ -0,0 +1,61 @@
/// <reference types="cypress" />
describe('Full Certificate Provisions', () => {
let token;
before(() => {
cy.getToken().then((tok) => {
token = tok;
});
});
it.only('Should be able to create new http certificate', function() {
cy.task('backendApiPost', {
token: token,
path: '/api/nginx/certificates',
data: {
domain_names: [
'website1.example.com'
],
meta: {
letsencrypt_email: 'admin@example.com',
letsencrypt_agree: true,
dns_challenge: false
},
provider: 'letsencrypt'
}
}).then((data) => {
cy.validateSwaggerSchema('post', 201, '/nginx/certificates', data);
expect(data).to.have.property('id');
expect(data.id).to.be.greaterThan(0);
expect(data.provider).to.be.equal('letsencrypt');
});
});
it('Should be able to create new DNS certificate with Powerdns', function() {
cy.task('backendApiPost', {
token: token,
path: '/api/certificates',
data: {
domain_names: [
'website2.example.com'
],
meta: {
letsencrypt_email: "admin@example.com",
dns_challenge: true,
dns_provider: 'powerdns',
dns_provider_credentials: 'dns_powerdns_api_url = http://ns1.pdns:8081\r\ndns_powerdns_api_key = npm',
letsencrypt_agree: true
},
provider: 'letsencrypt'
}
}).then((data) => {
cy.validateSwaggerSchema('post', 201, '/nginx/certificates', data);
expect(data).to.have.property('id');
expect(data.id).to.be.greaterThan(0);
expect(data.provider).to.be.equal('letsencrypt');
expect(data.meta.dns_provider).to.be.equal('powerdns');
});
});
});

View File

@ -1,4 +1,4 @@
/// <reference types="Cypress" />
/// <reference types="cypress" />
describe('Basic API checks', () => {
it('Should return a valid health payload', function () {
@ -12,7 +12,7 @@ describe('Basic API checks', () => {
it('Should return a valid schema payload', function () {
cy.task('backendApiGet', {
path: '/api/schema',
path: '/api/schema?ts=' + Date.now(),
}).then((data) => {
expect(data.openapi).to.be.equal('3.1.0');
});

View File

@ -1,4 +1,4 @@
/// <reference types="Cypress" />
/// <reference types="cypress" />
describe('Hosts endpoints', () => {
let token;

View File

@ -1,4 +1,4 @@
/// <reference types="Cypress" />
/// <reference types="cypress" />
describe('Users endpoints', () => {
let token;

View File

@ -7,7 +7,7 @@ const BackendApi = function(config, token) {
this.axios = axios.create({
baseURL: config.baseUrl,
timeout: 5000,
timeout: 60000,
});
};
@ -80,7 +80,7 @@ BackendApi.prototype._handleError = function(err, resolve, reject, returnOnError
* @returns {Promise<object>}
*/
BackendApi.prototype.request = function (method, path, returnOnError, data) {
logger(method.toUpperCase(), this.config.baseUrl + path);
logger(method.toUpperCase(), path);
const options = this._prepareOptions(returnOnError);
return new Promise((resolve, reject) => {

View File

@ -1,4 +1,4 @@
const {SwaggerValidation} = require('@jc21/cypress-swagger-validation');
const { SwaggerValidation } = require('@jc21/cypress-swagger-validation');
module.exports = (on, config) => {
// Replace swaggerBase config var wildcard

View File

@ -4,7 +4,7 @@
"description": "",
"main": "index.js",
"dependencies": {
"@jc21/cypress-swagger-validation": "^0.2.7",
"@jc21/cypress-swagger-validation": "^0.3.1",
"axios": "^1.7.7",
"cypress": "^13.15.0",
"cypress-multi-reporters": "^1.6.4",
@ -19,8 +19,8 @@
"mocha-junit-reporter": "^2.2.1"
},
"scripts": {
"cypress": "cypress open --config-file=cypress/config/dev.js --config baseUrl=${BASE_URL:-http://127.0.0.1:3081}",
"cypress:headless": "cypress run --config-file=cypress/config/dev.js --config baseUrl=${BASE_URL:-http://127.0.0.1:3081}"
"cypress": "HTTP_PROXY=127.0.0.1:8128 HTTPS_PROXY=127.0.0.1:8128 cypress open --config-file=cypress/config/ci.js",
"cypress:headless": "HTTP_PROXY=127.0.0.1:8128 HTTPS_PROXY=127.0.0.1:8128 cypress run --config-file=cypress/config/ci.js"
},
"author": "",
"license": "ISC"

View File

@ -11,14 +11,13 @@
call-me-maybe "^1.0.1"
js-yaml "^3.13.1"
"@apidevtools/json-schema-ref-parser@9.0.9":
version "9.0.9"
resolved "https://registry.yarnpkg.com/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-9.0.9.tgz#d720f9256e3609621280584f2b47ae165359268b"
integrity sha512-GBD2Le9w2+lVFoc4vswGI/TjkNIZSVp7+9xPf+X3uidBfWnAeUWmquteSyt0+VCrhNMWj/FTABISQrD3Z/YA+w==
"@apidevtools/json-schema-ref-parser@^11.7.2":
version "11.7.2"
resolved "https://registry.yarnpkg.com/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-11.7.2.tgz#cdf3e0aded21492364a70e193b45b7cf4177f031"
integrity sha512-4gY54eEGEstClvEkGnwVkTkrx0sqwemEFG5OSRRn3tD91XH0+Q8XIkYIfo7IwEWPpJZwILb9GUXeShtplRc/eA==
dependencies:
"@jsdevtools/ono" "^7.1.3"
"@types/json-schema" "^7.0.6"
call-me-maybe "^1.0.1"
"@types/json-schema" "^7.0.15"
js-yaml "^4.1.0"
"@apidevtools/openapi-schemas@^2.1.0":
@ -167,15 +166,16 @@
resolved "https://registry.yarnpkg.com/@humanwhocodes/retry/-/retry-0.3.1.tgz#c72a5c76a9fbaf3488e231b13dc52c0da7bab42a"
integrity sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==
"@jc21/cypress-swagger-validation@^0.2.7":
version "0.2.7"
resolved "https://registry.yarnpkg.com/@jc21/cypress-swagger-validation/-/cypress-swagger-validation-0.2.7.tgz#64642b12d98b884df8c30b72852162941285d2af"
integrity sha512-4EQ0gfigRwVVl3DnVYbR48/EKGnn7oH5YYdMzf6zqypO+bqYvDHu9kgk/WqkGlT/aauGQ7e0YGMo8ZvR7mL0Ng==
"@jc21/cypress-swagger-validation@^0.3.1":
version "0.3.1"
resolved "https://registry.yarnpkg.com/@jc21/cypress-swagger-validation/-/cypress-swagger-validation-0.3.1.tgz#1cdd49850a20f876ed62149623f99988264751be"
integrity sha512-Vdt1gLfj8p0tJhA42Cfn43XBbsKocNfVCEVSwkn7RmZgWUyRKjqhBBRTVa9cKZTozyg8Co/yhBMsNyjmHFVXtQ==
dependencies:
"@apidevtools/json-schema-ref-parser" "^11.7.2"
"@apidevtools/swagger-parser" "^10.1.0"
ajv "^8.17.1"
axios "^1.7.7"
json-schema "^0.4.0"
json-schema-ref-parser "^9.0.9"
jsonpath "^1.1.1"
lodash "^4.17.21"
openapi-types "^12.1.3"
@ -196,7 +196,7 @@
resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.6.tgz#628effeeae2064a1b4e79f78e81d87b7e5fc7b50"
integrity sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==
"@types/json-schema@^7.0.15", "@types/json-schema@^7.0.6":
"@types/json-schema@^7.0.15":
version "7.0.15"
resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841"
integrity sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==
@ -1468,13 +1468,6 @@ json-buffer@3.0.1:
resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13"
integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==
json-schema-ref-parser@^9.0.9:
version "9.0.9"
resolved "https://registry.yarnpkg.com/json-schema-ref-parser/-/json-schema-ref-parser-9.0.9.tgz#66ea538e7450b12af342fa3d5b8458bc1e1e013f"
integrity sha512-qcP2lmGy+JUoQJ4DOQeLaZDqH9qSkeGCK3suKWxJXS82dg728Mn3j97azDMaOUmJAN4uCq91LdPx4K7E8F1a7Q==
dependencies:
"@apidevtools/json-schema-ref-parser" "9.0.9"
json-schema-traverse@^0.4.1:
version "0.4.1"
resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660"