Compare commits

..

1 Commits

Author SHA1 Message Date
c2965789a0 Bump freedns plugin 2024-10-17 12:57:49 +10:00
51 changed files with 153 additions and 1070 deletions

View File

@ -1 +1 @@
2.12.2
2.12.1

42
Jenkinsfile vendored
View File

@ -128,7 +128,7 @@ pipeline {
sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
}
unstable {
dir(path: 'test/results') {
dir(path: 'testing/results') {
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
}
}
@ -161,45 +161,7 @@ pipeline {
sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
}
unstable {
dir(path: 'test/results') {
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
}
}
}
}
stage('Test Postgres') {
environment {
COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_postgres"
COMPOSE_FILE = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.postgres.yml'
}
when {
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
steps {
sh 'rm -rf ./test/results/junit/*'
sh './scripts/ci/fulltest-cypress'
}
post {
always {
// Dumps to analyze later
sh 'mkdir -p debug/postgres'
sh 'docker logs $(docker-compose ps --all -q fullstack) > debug/postgres/docker_fullstack.log 2>&1'
sh 'docker logs $(docker-compose ps --all -q stepca) > debug/postgres/docker_stepca.log 2>&1'
sh 'docker logs $(docker-compose ps --all -q pdns) > debug/postgres/docker_pdns.log 2>&1'
sh 'docker logs $(docker-compose ps --all -q pdns-db) > debug/postgres/docker_pdns-db.log 2>&1'
sh 'docker logs $(docker-compose ps --all -q dnsrouter) > debug/postgres/docker_dnsrouter.log 2>&1'
sh 'docker logs $(docker-compose ps --all -q db-postgres) > debug/postgres/docker_db-postgres.log 2>&1'
sh 'docker logs $(docker-compose ps --all -q authentik) > debug/postgres/docker_authentik.log 2>&1'
sh 'docker logs $(docker-compose ps --all -q authentik-redis) > debug/postgres/docker_authentik-redis.log 2>&1'
sh 'docker logs $(docker-compose ps --all -q authentik-ldap) > debug/postgres/docker_authentik-ldap.log 2>&1'
junit 'test/results/junit/*'
sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
}
unstable {
dir(path: 'test/results') {
dir(path: 'testing/results') {
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
}
}

View File

@ -1,7 +1,7 @@
<p align="center">
<img src="https://nginxproxymanager.com/github.png">
<br><br>
<img src="https://img.shields.io/badge/version-2.12.2-green.svg?style=for-the-badge">
<img src="https://img.shields.io/badge/version-2.12.1-green.svg?style=for-the-badge">
<a href="https://hub.docker.com/repository/docker/jc21/nginx-proxy-manager">
<img src="https://img.shields.io/docker/stars/jc21/nginx-proxy-manager.svg?style=for-the-badge">
</a>

View File

@ -81,7 +81,7 @@ const internalAccessList = {
return internalAccessList.build(row)
.then(() => {
if (parseInt(row.proxy_host_count, 10)) {
if (row.proxy_host_count) {
return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
}
})
@ -223,7 +223,7 @@ const internalAccessList = {
.then((row) => {
return internalAccessList.build(row)
.then(() => {
if (parseInt(row.proxy_host_count, 10)) {
if (row.proxy_host_count) {
return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
}
}).then(internalNginx.reload)
@ -252,13 +252,9 @@ const internalAccessList = {
let query = accessListModel
.query()
.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
.leftJoin('proxy_host', function() {
this.on('proxy_host.access_list_id', '=', 'access_list.id')
.andOn('proxy_host.is_deleted', '=', 0);
})
.joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0')
.where('access_list.is_deleted', 0)
.andWhere('access_list.id', data.id)
.groupBy('access_list.id')
.allowGraph('[owner,items,clients,proxy_hosts.[certificate,access_list.[clients,items]]]')
.first();
@ -377,10 +373,7 @@ const internalAccessList = {
let query = accessListModel
.query()
.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
.leftJoin('proxy_host', function() {
this.on('proxy_host.access_list_id', '=', 'access_list.id')
.andOn('proxy_host.is_deleted', '=', 0);
})
.joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0')
.where('access_list.is_deleted', 0)
.groupBy('access_list.id')
.allowGraph('[owner,items,clients]')

View File

@ -1,6 +1,5 @@
const error = require('../lib/error');
const auditLogModel = require('../models/audit-log');
const {castJsonIfNeed} = require('../lib/helpers');
const error = require('../lib/error');
const auditLogModel = require('../models/audit-log');
const internalAuditLog = {
@ -23,9 +22,9 @@ const internalAuditLog = {
.allowGraph('[user]');
// Query is used for searching
if (typeof search_query === 'string' && search_query.length > 0) {
if (typeof search_query === 'string') {
query.where(function () {
this.where(castJsonIfNeed('meta'), 'like', '%' + search_query + '%');
this.where('meta', 'like', '%' + search_query + '%');
});
}

View File

@ -6,7 +6,6 @@ const internalHost = require('./host');
const internalNginx = require('./nginx');
const internalAuditLog = require('./audit-log');
const internalCertificate = require('./certificate');
const {castJsonIfNeed} = require('../lib/helpers');
function omissions () {
return ['is_deleted'];
@ -410,16 +409,16 @@ const internalDeadHost = {
.where('is_deleted', 0)
.groupBy('id')
.allowGraph('[owner,certificate]')
.orderBy(castJsonIfNeed('domain_names'), 'ASC');
.orderBy('domain_names', 'ASC');
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
// Query is used for searching
if (typeof search_query === 'string' && search_query.length > 0) {
if (typeof search_query === 'string') {
query.where(function () {
this.where(castJsonIfNeed('domain_names'), 'like', '%' + search_query + '%');
this.where('domain_names', 'like', '%' + search_query + '%');
});
}

View File

@ -2,7 +2,6 @@ const _ = require('lodash');
const proxyHostModel = require('../models/proxy_host');
const redirectionHostModel = require('../models/redirection_host');
const deadHostModel = require('../models/dead_host');
const {castJsonIfNeed} = require('../lib/helpers');
const internalHost = {
@ -18,7 +17,7 @@ const internalHost = {
cleanSslHstsData: function (data, existing_data) {
existing_data = existing_data === undefined ? {} : existing_data;
const combined_data = _.assign({}, existing_data, data);
let combined_data = _.assign({}, existing_data, data);
if (!combined_data.certificate_id) {
combined_data.ssl_forced = false;
@ -74,7 +73,7 @@ const internalHost = {
* @returns {Promise}
*/
getHostsWithDomains: function (domain_names) {
const promises = [
let promises = [
proxyHostModel
.query()
.where('is_deleted', 0),
@ -126,19 +125,19 @@ const internalHost = {
* @returns {Promise}
*/
isHostnameTaken: function (hostname, ignore_type, ignore_id) {
const promises = [
let promises = [
proxyHostModel
.query()
.where('is_deleted', 0)
.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%'),
.andWhere('domain_names', 'like', '%' + hostname + '%'),
redirectionHostModel
.query()
.where('is_deleted', 0)
.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%'),
.andWhere('domain_names', 'like', '%' + hostname + '%'),
deadHostModel
.query()
.where('is_deleted', 0)
.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%')
.andWhere('domain_names', 'like', '%' + hostname + '%')
];
return Promise.all(promises)

View File

@ -6,7 +6,6 @@ const internalHost = require('./host');
const internalNginx = require('./nginx');
const internalAuditLog = require('./audit-log');
const internalCertificate = require('./certificate');
const {castJsonIfNeed} = require('../lib/helpers');
function omissions () {
return ['is_deleted', 'owner.is_deleted'];
@ -417,16 +416,16 @@ const internalProxyHost = {
.where('is_deleted', 0)
.groupBy('id')
.allowGraph('[owner,access_list,certificate]')
.orderBy(castJsonIfNeed('domain_names'), 'ASC');
.orderBy('domain_names', 'ASC');
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
// Query is used for searching
if (typeof search_query === 'string' && search_query.length > 0) {
if (typeof search_query === 'string') {
query.where(function () {
this.where(castJsonIfNeed('domain_names'), 'like', `%${search_query}%`);
this.where('domain_names', 'like', '%' + search_query + '%');
});
}

View File

@ -6,7 +6,6 @@ const internalHost = require('./host');
const internalNginx = require('./nginx');
const internalAuditLog = require('./audit-log');
const internalCertificate = require('./certificate');
const {castJsonIfNeed} = require('../lib/helpers');
function omissions () {
return ['is_deleted'];
@ -410,16 +409,16 @@ const internalRedirectionHost = {
.where('is_deleted', 0)
.groupBy('id')
.allowGraph('[owner,certificate]')
.orderBy(castJsonIfNeed('domain_names'), 'ASC');
.orderBy('domain_names', 'ASC');
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
// Query is used for searching
if (typeof search_query === 'string' && search_query.length > 0) {
if (typeof search_query === 'string') {
query.where(function () {
this.where(castJsonIfNeed('domain_names'), 'like', `%${search_query}%`);
this.where('domain_names', 'like', '%' + search_query + '%');
});
}

View File

@ -4,7 +4,6 @@ const utils = require('../lib/utils');
const streamModel = require('../models/stream');
const internalNginx = require('./nginx');
const internalAuditLog = require('./audit-log');
const {castJsonIfNeed} = require('../lib/helpers');
function omissions () {
return ['is_deleted'];
@ -294,21 +293,21 @@ const internalStream = {
getAll: (access, expand, search_query) => {
return access.can('streams:list')
.then((access_data) => {
const query = streamModel
let query = streamModel
.query()
.where('is_deleted', 0)
.groupBy('id')
.allowGraph('[owner]')
.orderByRaw('CAST(incoming_port AS INTEGER) ASC');
.orderBy('incoming_port', 'ASC');
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
// Query is used for searching
if (typeof search_query === 'string' && search_query.length > 0) {
if (typeof search_query === 'string') {
query.where(function () {
this.where(castJsonIfNeed('incoming_port'), 'like', `%${search_query}%`);
this.where('incoming_port', 'like', '%' + search_query + '%');
});
}
@ -328,9 +327,9 @@ const internalStream = {
* @returns {Promise}
*/
getCount: (user_id, visibility) => {
const query = streamModel
let query = streamModel
.query()
.count('id AS count')
.count('id as count')
.where('is_deleted', 0);
if (visibility !== 'all') {

View File

@ -5,8 +5,6 @@ const authModel = require('../models/auth');
const helpers = require('../lib/helpers');
const TokenModel = require('../models/token');
const ERROR_MESSAGE_INVALID_AUTH = 'Invalid email or password';
module.exports = {
/**
@ -71,15 +69,15 @@ module.exports = {
};
});
} else {
throw new error.AuthError(ERROR_MESSAGE_INVALID_AUTH);
throw new error.AuthError('Invalid password');
}
});
} else {
throw new error.AuthError(ERROR_MESSAGE_INVALID_AUTH);
throw new error.AuthError('No password auth for user');
}
});
} else {
throw new error.AuthError(ERROR_MESSAGE_INVALID_AUTH);
throw new error.AuthError('No relevant user found');
}
});
},

View File

@ -2,10 +2,7 @@ const fs = require('fs');
const NodeRSA = require('node-rsa');
const logger = require('../logger').global;
const keysFile = '/data/keys.json';
const mysqlEngine = 'mysql2';
const postgresEngine = 'pg';
const sqliteClientName = 'sqlite3';
const keysFile = '/data/keys.json';
let instance = null;
@ -17,7 +14,7 @@ const configure = () => {
let configData;
try {
configData = require(filename);
} catch (_) {
} catch (err) {
// do nothing
}
@ -37,7 +34,7 @@ const configure = () => {
logger.info('Using MySQL configuration');
instance = {
database: {
engine: mysqlEngine,
engine: 'mysql2',
host: envMysqlHost,
port: process.env.DB_MYSQL_PORT || 3306,
user: envMysqlUser,
@ -49,33 +46,13 @@ const configure = () => {
return;
}
const envPostgresHost = process.env.DB_POSTGRES_HOST || null;
const envPostgresUser = process.env.DB_POSTGRES_USER || null;
const envPostgresName = process.env.DB_POSTGRES_NAME || null;
if (envPostgresHost && envPostgresUser && envPostgresName) {
// we have enough postgres creds to go with postgres
logger.info('Using Postgres configuration');
instance = {
database: {
engine: postgresEngine,
host: envPostgresHost,
port: process.env.DB_POSTGRES_PORT || 5432,
user: envPostgresUser,
password: process.env.DB_POSTGRES_PASSWORD,
name: envPostgresName,
},
keys: getKeys(),
};
return;
}
const envSqliteFile = process.env.DB_SQLITE_FILE || '/data/database.sqlite';
logger.info(`Using Sqlite: ${envSqliteFile}`);
instance = {
database: {
engine: 'knex-native',
knex: {
client: sqliteClientName,
client: 'sqlite3',
connection: {
filename: envSqliteFile
},
@ -166,27 +143,7 @@ module.exports = {
*/
isSqlite: function () {
instance === null && configure();
return instance.database.knex && instance.database.knex.client === sqliteClientName;
},
/**
* Is this a mysql configuration?
*
* @returns {boolean}
*/
isMysql: function () {
instance === null && configure();
return instance.database.engine === mysqlEngine;
},
/**
* Is this a postgres configuration?
*
* @returns {boolean}
*/
isPostgres: function () {
instance === null && configure();
return instance.database.engine === postgresEngine;
return instance.database.knex && instance.database.knex.client === 'sqlite3';
},
/**

View File

@ -1,6 +1,4 @@
const moment = require('moment');
const {isPostgres} = require('./config');
const {ref} = require('objection');
const moment = require('moment');
module.exports = {
@ -47,16 +45,6 @@ module.exports = {
}
});
return obj;
},
/**
* Casts a column to json if using postgres
*
* @param {string} colName
* @returns {string|Objection.ReferenceBuilder}
*/
castJsonIfNeed: function (colName) {
return isPostgres() ? ref(colName).castText() : colName;
}
};

View File

@ -17,9 +17,6 @@ const boolFields = [
'preserve_path',
'ssl_forced',
'block_exploits',
'hsts_enabled',
'hsts_subdomains',
'http2_support',
];
class RedirectionHost extends Model {

View File

@ -23,7 +23,6 @@
"node-rsa": "^1.0.8",
"objection": "3.0.1",
"path": "^0.12.7",
"pg": "^8.13.1",
"signale": "1.4.0",
"sqlite3": "5.1.6",
"temp-write": "^4.0.0"

View File

@ -19,9 +19,7 @@
"incoming_port": {
"type": "integer",
"minimum": 1,
"maximum": 65535,
"if": {"properties": {"tcp_forwarding": {"const": true}}},
"then": {"not": {"oneOf": [{"const": 80}, {"const": 443}]}}
"maximum": 65535
},
"forwarding_host": {
"anyOf": [

View File

@ -49,7 +49,8 @@
"minLength": 1
},
"password": {
"type": "string"
"type": "string",
"minLength": 1
}
}
}

View File

@ -15,18 +15,18 @@ const certbot = require('./lib/certbot');
const setupDefaultUser = () => {
return userModel
.query()
.select('id', )
.select(userModel.raw('COUNT(`id`) as `count`'))
.where('is_deleted', 0)
.first()
.then((row) => {
if (!row || !row.id) {
if (!row.count) {
// Create a new user and set password
const email = process.env.INITIAL_ADMIN_EMAIL || 'admin@example.com';
const password = process.env.INITIAL_ADMIN_PASSWORD || 'changeme';
let email = process.env.INITIAL_ADMIN_EMAIL || 'admin@example.com';
let password = process.env.INITIAL_ADMIN_PASSWORD || 'changeme';
logger.info('Creating a new user: ' + email + ' with password: ' + password);
const data = {
let data = {
is_deleted: 0,
email: email,
name: 'Administrator',
@ -77,11 +77,11 @@ const setupDefaultUser = () => {
const setupDefaultSettings = () => {
return settingModel
.query()
.select('id')
.select(settingModel.raw('COUNT(`id`) as `count`'))
.where({id: 'default-site'})
.first()
.then((row) => {
if (!row || !row.id) {
if (!row.count) {
settingModel
.query()
.insert({

View File

@ -4,7 +4,7 @@
auth_basic "Authorization required";
auth_basic_user_file /data/access/{{ access_list_id }};
{% if access_list.pass_auth == 0 or access_list.pass_auth == true %}
{% if access_list.pass_auth == 0 %}
proxy_set_header Authorization "";
{% endif %}
@ -17,7 +17,7 @@
deny all;
# Access checks must...
{% if access_list.satisfy_any == 1 or access_list.satisfy_any == true %}
{% if access_list.satisfy_any == 1 %}
satisfy any;
{% else %}
satisfy all;

View File

@ -5,16 +5,11 @@
#listen [::]:80;
{% endif %}
{% if certificate -%}
listen 443 ssl;
listen 443 ssl{% if http2_support == 1 or http2_support == true %} http2{% endif %};
{% if ipv6 -%}
listen [::]:443 ssl;
listen [::]:443 ssl{% if http2_support == 1 or http2_support == true %} http2{% endif %};
{% else -%}
#listen [::]:443;
{% endif %}
{% endif %}
server_name {{ domain_names | join: " " }};
{% if http2_support == 1 or http2_support == true %}
http2 on;
{% else -%}
http2 off;
{% endif %}

View File

@ -7,7 +7,11 @@
proxy_set_header X-Forwarded-For $remote_addr;
proxy_set_header X-Real-IP $remote_addr;
proxy_pass {{ forward_scheme }}://{{ forward_host }}:{{ forward_port }}{{ forward_path }};
set $proxy_forward_scheme {{ forward_scheme }};
set $proxy_server "{{ forward_host }}";
set $proxy_port {{ forward_port }};
proxy_pass $proxy_forward_scheme://$proxy_server:$proxy_port{{ forward_path }};
{% include "_access.conf" %}
{% include "_assets.conf" %}

View File

@ -22,7 +22,5 @@ server {
}
{% endif %}
# Custom
include /data/nginx/custom/server_dead[.]conf;
}
{% endif %}

View File

@ -830,9 +830,9 @@ crc32-stream@^4.0.2:
readable-stream "^3.4.0"
cross-spawn@^7.0.2:
version "7.0.6"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f"
integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==
version "7.0.3"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6"
integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==
dependencies:
path-key "^3.1.0"
shebang-command "^2.0.0"
@ -2735,67 +2735,11 @@ path@^0.12.7:
process "^0.11.1"
util "^0.10.3"
pg-cloudflare@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/pg-cloudflare/-/pg-cloudflare-1.1.1.tgz#e6d5833015b170e23ae819e8c5d7eaedb472ca98"
integrity sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==
pg-connection-string@2.5.0:
version "2.5.0"
resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.5.0.tgz#538cadd0f7e603fc09a12590f3b8a452c2c0cf34"
integrity sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==
pg-connection-string@^2.7.0:
version "2.7.0"
resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.7.0.tgz#f1d3489e427c62ece022dba98d5262efcb168b37"
integrity sha512-PI2W9mv53rXJQEOb8xNR8lH7Hr+EKa6oJa38zsK0S/ky2er16ios1wLKhZyxzD7jUReiWokc9WK5nxSnC7W1TA==
pg-int8@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/pg-int8/-/pg-int8-1.0.1.tgz#943bd463bf5b71b4170115f80f8efc9a0c0eb78c"
integrity sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==
pg-pool@^3.7.0:
version "3.7.0"
resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-3.7.0.tgz#d4d3c7ad640f8c6a2245adc369bafde4ebb8cbec"
integrity sha512-ZOBQForurqh4zZWjrgSwwAtzJ7QiRX0ovFkZr2klsen3Nm0aoh33Ls0fzfv3imeH/nw/O27cjdz5kzYJfeGp/g==
pg-protocol@^1.7.0:
version "1.7.0"
resolved "https://registry.yarnpkg.com/pg-protocol/-/pg-protocol-1.7.0.tgz#ec037c87c20515372692edac8b63cf4405448a93"
integrity sha512-hTK/mE36i8fDDhgDFjy6xNOG+LCorxLG3WO17tku+ij6sVHXh1jQUJ8hYAnRhNla4QVD2H8er/FOjc/+EgC6yQ==
pg-types@^2.1.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/pg-types/-/pg-types-2.2.0.tgz#2d0250d636454f7cfa3b6ae0382fdfa8063254a3"
integrity sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==
dependencies:
pg-int8 "1.0.1"
postgres-array "~2.0.0"
postgres-bytea "~1.0.0"
postgres-date "~1.0.4"
postgres-interval "^1.1.0"
pg@^8.13.1:
version "8.13.1"
resolved "https://registry.yarnpkg.com/pg/-/pg-8.13.1.tgz#6498d8b0a87ff76c2df7a32160309d3168c0c080"
integrity sha512-OUir1A0rPNZlX//c7ksiu7crsGZTKSOXJPgtNiHGIlC9H0lO+NC6ZDYksSgBYY/thSWhnSRBv8w1lieNNGATNQ==
dependencies:
pg-connection-string "^2.7.0"
pg-pool "^3.7.0"
pg-protocol "^1.7.0"
pg-types "^2.1.0"
pgpass "1.x"
optionalDependencies:
pg-cloudflare "^1.1.1"
pgpass@1.x:
version "1.0.5"
resolved "https://registry.yarnpkg.com/pgpass/-/pgpass-1.0.5.tgz#9b873e4a564bb10fa7a7dbd55312728d422a223d"
integrity sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==
dependencies:
split2 "^4.1.0"
picomatch@^2.0.4, picomatch@^2.2.1:
version "2.2.2"
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
@ -2814,28 +2758,6 @@ pkg-conf@^2.1.0:
find-up "^2.0.0"
load-json-file "^4.0.0"
postgres-array@~2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/postgres-array/-/postgres-array-2.0.0.tgz#48f8fce054fbc69671999329b8834b772652d82e"
integrity sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==
postgres-bytea@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/postgres-bytea/-/postgres-bytea-1.0.0.tgz#027b533c0aa890e26d172d47cf9ccecc521acd35"
integrity sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==
postgres-date@~1.0.4:
version "1.0.7"
resolved "https://registry.yarnpkg.com/postgres-date/-/postgres-date-1.0.7.tgz#51bc086006005e5061c591cee727f2531bf641a8"
integrity sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==
postgres-interval@^1.1.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/postgres-interval/-/postgres-interval-1.2.0.tgz#b460c82cb1587507788819a06aa0fffdb3544695"
integrity sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==
dependencies:
xtend "^4.0.0"
prelude-ls@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396"
@ -3272,11 +3194,6 @@ socks@^2.6.2:
ip "^2.0.0"
smart-buffer "^4.2.0"
split2@^4.1.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/split2/-/split2-4.2.0.tgz#c9c5920904d148bab0b9f67145f245a86aadbfa4"
integrity sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==
sprintf-js@~1.0.2:
version "1.0.3"
resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
@ -3748,11 +3665,6 @@ xdg-basedir@^4.0.0:
resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-4.0.0.tgz#4bc8d9984403696225ef83a1573cbbcb4e79db13"
integrity sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==
xtend@^4.0.0:
version "4.0.2"
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54"
integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==
y18n@^4.0.0:
version "4.0.1"
resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.1.tgz#8db2b83c31c5d75099bb890b23f3094891e247d4"

View File

@ -1,8 +0,0 @@
AUTHENTIK_SECRET_KEY=gl8woZe8L6IIX8SC0c5Ocsj0xPkX5uJo5DVZCFl+L/QGbzuplfutYuua2ODNLEiDD3aFd9H2ylJmrke0
AUTHENTIK_REDIS__HOST=authentik-redis
AUTHENTIK_POSTGRESQL__HOST=db-postgres
AUTHENTIK_POSTGRESQL__USER=authentik
AUTHENTIK_POSTGRESQL__NAME=authentik
AUTHENTIK_POSTGRESQL__PASSWORD=07EKS5NLI6Tpv68tbdvrxfvj
AUTHENTIK_BOOTSTRAP_PASSWORD=admin
AUTHENTIK_BOOTSTRAP_EMAIL=admin@example.com

Binary file not shown.

View File

@ -29,8 +29,7 @@ COPY scripts/install-s6 /tmp/install-s6
RUN rm -f /etc/nginx/conf.d/production.conf \
&& chmod 644 /etc/logrotate.d/nginx-proxy-manager \
&& /tmp/install-s6 "${TARGETPLATFORM}" \
&& rm -f /tmp/install-s6 \
&& chmod 644 -R /root/.cache
&& rm -f /tmp/install-s6
# Certs for testing purposes
COPY --from=pebbleca /test/certs/pebble.minica.pem /etc/ssl/certs/pebble.minica.pem

View File

@ -1,78 +0,0 @@
# WARNING: This is a CI docker-compose file used for building and testing of the entire app, it should not be used for production.
services:
cypress:
environment:
CYPRESS_stack: 'postgres'
fullstack:
environment:
DB_POSTGRES_HOST: 'db-postgres'
DB_POSTGRES_PORT: '5432'
DB_POSTGRES_USER: 'npm'
DB_POSTGRES_PASSWORD: 'npmpass'
DB_POSTGRES_NAME: 'npm'
depends_on:
- db-postgres
- authentik
- authentik-worker
- authentik-ldap
db-postgres:
image: postgres:latest
environment:
POSTGRES_USER: 'npm'
POSTGRES_PASSWORD: 'npmpass'
POSTGRES_DB: 'npm'
volumes:
- psql_vol:/var/lib/postgresql/data
- ./ci/postgres:/docker-entrypoint-initdb.d
networks:
- fulltest
authentik-redis:
image: 'redis:alpine'
command: --save 60 1 --loglevel warning
restart: unless-stopped
healthcheck:
test: ['CMD-SHELL', 'redis-cli ping | grep PONG']
start_period: 20s
interval: 30s
retries: 5
timeout: 3s
volumes:
- redis_vol:/data
authentik:
image: ghcr.io/goauthentik/server:2024.10.1
restart: unless-stopped
command: server
env_file:
- ci.env
depends_on:
- authentik-redis
- db-postgres
authentik-worker:
image: ghcr.io/goauthentik/server:2024.10.1
restart: unless-stopped
command: worker
env_file:
- ci.env
depends_on:
- authentik-redis
- db-postgres
authentik-ldap:
image: ghcr.io/goauthentik/ldap:2024.10.1
environment:
AUTHENTIK_HOST: 'http://authentik:9000'
AUTHENTIK_INSECURE: 'true'
AUTHENTIK_TOKEN: 'wKYZuRcI0ETtb8vWzMCr04oNbhrQUUICy89hSpDln1OEKLjiNEuQ51044Vkp'
restart: unless-stopped
depends_on:
- authentik
volumes:
psql_vol:
redis_vol:

View File

@ -40,7 +40,7 @@ services:
- ca.internal
pdns:
image: pschiffe/pdns-mysql:4.8
image: pschiffe/pdns-mysql
volumes:
- '/etc/localtime:/etc/localtime:ro'
environment:

View File

@ -2,8 +2,8 @@
services:
fullstack:
image: npm2dev:core
container_name: npm2dev.core
image: nginxproxymanager:dev
container_name: npm_core
build:
context: ./
dockerfile: ./dev/Dockerfile
@ -26,17 +26,11 @@ services:
DEVELOPMENT: 'true'
LE_STAGING: 'true'
# db:
# DB_MYSQL_HOST: 'db'
# DB_MYSQL_PORT: '3306'
# DB_MYSQL_USER: 'npm'
# DB_MYSQL_PASSWORD: 'npm'
# DB_MYSQL_NAME: 'npm'
# db-postgres:
DB_POSTGRES_HOST: 'db-postgres'
DB_POSTGRES_PORT: '5432'
DB_POSTGRES_USER: 'npm'
DB_POSTGRES_PASSWORD: 'npmpass'
DB_POSTGRES_NAME: 'npm'
DB_MYSQL_HOST: 'db'
DB_MYSQL_PORT: '3306'
DB_MYSQL_USER: 'npm'
DB_MYSQL_PASSWORD: 'npm'
DB_MYSQL_NAME: 'npm'
# DB_SQLITE_FILE: "/data/database.sqlite"
# DISABLE_IPV6: "true"
# Required for DNS Certificate provisioning testing:
@ -55,15 +49,11 @@ services:
timeout: 3s
depends_on:
- db
- db-postgres
- authentik
- authentik-worker
- authentik-ldap
working_dir: /app
db:
image: jc21/mariadb-aria
container_name: npm2dev.db
container_name: npm_db
ports:
- 33306:3306
networks:
@ -76,22 +66,8 @@ services:
volumes:
- db_data:/var/lib/mysql
db-postgres:
image: postgres:latest
container_name: npm2dev.db-postgres
networks:
- nginx_proxy_manager
environment:
POSTGRES_USER: 'npm'
POSTGRES_PASSWORD: 'npmpass'
POSTGRES_DB: 'npm'
volumes:
- psql_data:/var/lib/postgresql/data
- ./ci/postgres:/docker-entrypoint-initdb.d
stepca:
image: jc21/testca
container_name: npm2dev.stepca
volumes:
- './dev/resolv.conf:/etc/resolv.conf:ro'
- '/etc/localtime:/etc/localtime:ro'
@ -102,7 +78,6 @@ services:
dnsrouter:
image: jc21/dnsrouter
container_name: npm2dev.dnsrouter
volumes:
- ./dev/dnsrouter-config.json.tmp:/dnsrouter-config.json:ro
networks:
@ -110,7 +85,7 @@ services:
swagger:
image: swaggerapi/swagger-ui:latest
container_name: npm2dev.swagger
container_name: npm_swagger
ports:
- 3082:80
environment:
@ -121,7 +96,7 @@ services:
squid:
image: ubuntu/squid
container_name: npm2dev.squid
container_name: npm_squid
volumes:
- './dev/squid.conf:/etc/squid/squid.conf:ro'
- './dev/resolv.conf:/etc/resolv.conf:ro'
@ -132,8 +107,7 @@ services:
- 8128:3128
pdns:
image: pschiffe/pdns-mysql:4.8
container_name: npm2dev.pdns
image: pschiffe/pdns-mysql
volumes:
- '/etc/localtime:/etc/localtime:ro'
environment:
@ -162,7 +136,6 @@ services:
pdns-db:
image: mariadb
container_name: npm2dev.pdns-db
environment:
MYSQL_ROOT_PASSWORD: 'pdns'
MYSQL_DATABASE: 'pdns'
@ -176,8 +149,7 @@ services:
- nginx_proxy_manager
cypress:
image: npm2dev:cypress
container_name: npm2dev.cypress
image: "npm_dev_cypress"
build:
context: ../
dockerfile: test/cypress/Dockerfile
@ -192,77 +164,16 @@ services:
networks:
- nginx_proxy_manager
authentik-redis:
image: 'redis:alpine'
container_name: npm2dev.authentik-redis
command: --save 60 1 --loglevel warning
networks:
- nginx_proxy_manager
restart: unless-stopped
healthcheck:
test: ['CMD-SHELL', 'redis-cli ping | grep PONG']
start_period: 20s
interval: 30s
retries: 5
timeout: 3s
volumes:
- redis_data:/data
authentik:
image: ghcr.io/goauthentik/server:2024.10.1
container_name: npm2dev.authentik
restart: unless-stopped
command: server
networks:
- nginx_proxy_manager
env_file:
- ci.env
ports:
- 9000:9000
depends_on:
- authentik-redis
- db-postgres
authentik-worker:
image: ghcr.io/goauthentik/server:2024.10.1
container_name: npm2dev.authentik-worker
restart: unless-stopped
command: worker
networks:
- nginx_proxy_manager
env_file:
- ci.env
depends_on:
- authentik-redis
- db-postgres
authentik-ldap:
image: ghcr.io/goauthentik/ldap:2024.10.1
container_name: npm2dev.authentik-ldap
networks:
- nginx_proxy_manager
environment:
AUTHENTIK_HOST: 'http://authentik:9000'
AUTHENTIK_INSECURE: 'true'
AUTHENTIK_TOKEN: 'wKYZuRcI0ETtb8vWzMCr04oNbhrQUUICy89hSpDln1OEKLjiNEuQ51044Vkp'
restart: unless-stopped
depends_on:
- authentik
volumes:
npm_data:
name: npm2dev_core_data
name: npm_core_data
le_data:
name: npm2dev_le_data
name: npm_le_data
db_data:
name: npm2dev_db_data
name: npm_db_data
pdns_mysql:
name: npnpm2dev_pdns_mysql
psql_data:
name: npm2dev_psql_data
redis_data:
name: npm2dev_redis_data
name: npm_pdns_mysql
networks:
nginx_proxy_manager:
name: npm2dev_network
name: npm_network

View File

@ -1,4 +1,4 @@
location ~* ^.*\.(css|js|jpe?g|gif|png|webp|woff|woff2|eot|ttf|svg|ico|css\.map|js\.map)$ {
location ~* ^.*\.(css|js|jpe?g|gif|png|webp|woff|eot|ttf|svg|ico|css\.map|js\.map)$ {
if_modified_since off;
# use the public cache

View File

@ -50,6 +50,7 @@ networks:
Let's look at a Portainer example:
```yml
version: '3.8'
services:
portainer:
@ -91,6 +92,8 @@ This image supports the use of Docker secrets to import from files and keep sens
You can set any environment variable from a file by appending `__FILE` (double-underscore FILE) to the environmental variable name.
```yml
version: '3.8'
secrets:
# Secrets are single-line text files where the sole content is the secret
# Paths in this example assume that secrets are kept in local folder called ".secrets"
@ -181,7 +184,6 @@ You can add your custom configuration snippet files at `/data/nginx/custom` as f
- `/data/nginx/custom/server_stream.conf`: Included at the end of every stream server block
- `/data/nginx/custom/server_stream_tcp.conf`: Included at the end of every TCP stream server block
- `/data/nginx/custom/server_stream_udp.conf`: Included at the end of every UDP stream server block
- `/data/nginx/custom/server_dead.conf`: Included at the end of every 404 server block
Every file is optional.

View File

@ -9,6 +9,7 @@ outline: deep
Create a `docker-compose.yml` file:
```yml
version: '3.8'
services:
app:
image: 'jc21/nginx-proxy-manager:latest'
@ -21,7 +22,8 @@ services:
# Add any other Stream port you want to expose
# - '21:21' # FTP
environment:
# Uncomment the next line if you uncomment anything in the section
# environment:
# Uncomment this if you want to change the location of
# the SQLite DB file within the container
# DB_SQLITE_FILE: "/data/database.sqlite"
@ -53,6 +55,7 @@ are going to use.
Here is an example of what your `docker-compose.yml` will look like when using a MariaDB container:
```yml
version: '3.8'
services:
app:
image: 'jc21/nginx-proxy-manager:latest'
@ -98,53 +101,6 @@ Please note, that `DB_MYSQL_*` environment variables will take precedent over `D
:::
## Using Postgres database
Similar to the MySQL server setup:
```yml
services:
app:
image: 'jc21/nginx-proxy-manager:latest'
restart: unless-stopped
ports:
# These ports are in format <host-port>:<container-port>
- '80:80' # Public HTTP Port
- '443:443' # Public HTTPS Port
- '81:81' # Admin Web Port
# Add any other Stream port you want to expose
# - '21:21' # FTP
environment:
# Postgres parameters:
DB_POSTGRES_HOST: 'db'
DB_POSTGRES_PORT: '5432'
DB_POSTGRES_USER: 'npm'
DB_POSTGRES_PASSWORD: 'npmpass'
DB_POSTGRES_NAME: 'npm'
# Uncomment this if IPv6 is not enabled on your host
# DISABLE_IPV6: 'true'
volumes:
- ./data:/data
- ./letsencrypt:/etc/letsencrypt
depends_on:
- db
db:
image: postgres:latest
environment:
POSTGRES_USER: 'npm'
POSTGRES_PASSWORD: 'npmpass'
POSTGRES_DB: 'npm'
volumes:
- ./postgres:/var/lib/postgresql/data
```
::: warning
Custom Postgres schema is not supported, as such `public` will be used.
:::
## Running on Raspberry PI / ARM devices
The docker images support the following architectures:
@ -181,13 +137,5 @@ Email: admin@example.com
Password: changeme
```
Immediately after logging in with this default user you will be asked to modify your details and change your password. You can change defaults with:
```
environment:
INITIAL_ADMIN_EMAIL: my@example.com
INITIAL_ADMIN_PASSWORD: mypassword1
```
Immediately after logging in with this default user you will be asked to modify your details and change your password.

View File

@ -12,7 +12,6 @@ Known integrations:
- [HomeAssistant Hass.io plugin](https://github.com/hassio-addons/addon-nginx-proxy-manager)
- [UnRaid / Synology](https://github.com/jlesage/docker-nginx-proxy-manager)
- [Proxmox Scripts](https://github.com/ej52/proxmox-scripts/tree/main/apps/nginx-proxy-manager)
- [Proxmox VE Helper-Scripts](https://community-scripts.github.io/ProxmoxVE/scripts?id=nginxproxymanager)
- [nginxproxymanagerGraf](https://github.com/ma-karai/nginxproxymanagerGraf)

View File

@ -873,9 +873,9 @@ mitt@^3.0.1:
integrity sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==
nanoid@^3.3.7:
version "3.3.8"
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.8.tgz#b1be3030bee36aaff18bacb375e5cce521684baf"
integrity sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==
version "3.3.7"
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.7.tgz#d0c301a691bc8d54efa0a2226ccf3fe2fd656bd8"
integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==
oniguruma-to-js@0.4.3:
version "0.4.3"
@ -1065,9 +1065,9 @@ vfile@^6.0.0:
vfile-message "^4.0.0"
vite@^5.4.8:
version "5.4.14"
resolved "https://registry.yarnpkg.com/vite/-/vite-5.4.14.tgz#ff8255edb02134df180dcfca1916c37a6abe8408"
integrity sha512-EK5cY7Q1D8JNhSaPKVK4pwBFvaTmZxEnoKXLG/U9gmdDcihQGNzFlgIvaxezFR4glP1LsuiedwMBqCXH3wZccA==
version "5.4.8"
resolved "https://registry.yarnpkg.com/vite/-/vite-5.4.8.tgz#af548ce1c211b2785478d3ba3e8da51e39a287e8"
integrity sha512-FqrItQ4DT1NC4zCUqMB4c4AZORMKIa0m8/URVCZ77OZ/QSNeJ54bU1vrFADbDsuwfIPcgknRkmqakQcgnL4GiQ==
dependencies:
esbuild "^0.21.3"
postcss "^8.4.43"

View File

@ -1,6 +1,6 @@
<td class="text-center">
<div class="avatar d-block" style="background-image: url(<%- (owner && owner.avatar) || '/images/default-avatar.jpg' %>)" title="Owned by <%- (owner && owner.name) || 'a deleted user' %>">
<span class="avatar-status <%- owner && !owner.is_disabled ? 'bg-green' : 'bg-red' %>"></span>
<div class="avatar d-block" style="background-image: url(<%- owner.avatar || '/images/default-avatar.jpg' %>)" title="Owned by <%- owner.name %>">
<span class="avatar-status <%- owner.is_disabled ? 'bg-red' : 'bg-green' %>"></span>
</div>
</td>
<td>

View File

@ -1,6 +1,6 @@
<td class="text-center">
<div class="avatar d-block" style="background-image: url(<%- (owner && owner.avatar) || '/images/default-avatar.jpg' %>)" title="Owned by <%- (owner && owner.name) || 'a deleted user' %>">
<span class="avatar-status <%- owner && !owner.is_disabled ? 'bg-green' : 'bg-red' %>"></span>
<div class="avatar d-block" style="background-image: url(<%- owner.avatar || '/images/default-avatar.jpg' %>)" title="Owned by <%- owner.name %>">
<span class="avatar-status <%- owner.is_disabled ? 'bg-red' : 'bg-green' %>"></span>
</div>
</td>
<td>

View File

@ -1,6 +1,6 @@
<td class="text-center">
<div class="avatar d-block" style="background-image: url(<%- (owner && owner.avatar) || '/images/default-avatar.jpg' %>)" title="Owned by <%- (owner && owner.name) || 'a deleted user' %>">
<span class="avatar-status <%- owner && !owner.is_disabled ? 'bg-green' : 'bg-red' %>"></span>
<div class="avatar d-block" style="background-image: url(<%- owner.avatar || '/images/default-avatar.jpg' %>)" title="Owned by <%- owner.name %>">
<span class="avatar-status <%- owner.is_disabled ? 'bg-red' : 'bg-green' %>"></span>
</div>
</td>
<td>

View File

@ -1,6 +1,6 @@
<td class="text-center">
<div class="avatar d-block" style="background-image: url(<%- (owner && owner.avatar) || '/images/default-avatar.jpg' %>)" title="Owned by <%- (owner && owner.name) || 'a deleted user' %>">
<span class="avatar-status <%- owner && !owner.is_disabled ? 'bg-green' : 'bg-red' %>"></span>
<div class="avatar d-block" style="background-image: url(<%- owner.avatar || '/images/default-avatar.jpg' %>)" title="Owned by <%- owner.name %>">
<span class="avatar-status <%- owner.is_disabled ? 'bg-red' : 'bg-green' %>"></span>
</div>
</td>
<td>

View File

@ -1,6 +1,6 @@
<td class="text-center">
<div class="avatar d-block" style="background-image: url(<%- (owner && owner.avatar) || '/images/default-avatar.jpg' %>)" title="Owned by <%- (owner && owner.name) || 'a deleted user' %>">
<span class="avatar-status <%- owner && !owner.is_disabled ? 'bg-green' : 'bg-red' %>"></span>
<div class="avatar d-block" style="background-image: url(<%- owner.avatar || '/images/default-avatar.jpg' %>)" title="Owned by <%- owner.name %>">
<span class="avatar-status <%- owner.is_disabled ? 'bg-red' : 'bg-green' %>"></span>
</div>
</td>
<td>

View File

@ -1,6 +1,6 @@
<td class="text-center">
<div class="avatar d-block" style="background-image: url(<%- (owner && owner.avatar) || '/images/default-avatar.jpg' %>)" title="Owned by <%- (owner && owner.name) || 'a deleted user' %>">
<span class="avatar-status <%- owner && !owner.is_disabled ? 'bg-green' : 'bg-red' %>"></span>
<div class="avatar d-block" style="background-image: url(<%- owner.avatar || '/images/default-avatar.jpg' %>)" title="Owned by <%- owner.name %>">
<span class="avatar-status <%- owner.is_disabled ? 'bg-red' : 'bg-green' %>"></span>
</div>
</td>
<td>

View File

@ -1,10 +1,10 @@
<div class="modal-content">
<form>
<div class="modal-header">
<h5 class="modal-title"><%- i18n('users', 'form-title', {id: id}) %></h5>
<button type="button" class="close cancel" aria-label="Close" data-dismiss="modal">&nbsp;</button>
</div>
<div class="modal-body">
<div class="modal-header">
<h5 class="modal-title"><%- i18n('users', 'form-title', {id: id}) %></h5>
<button type="button" class="close cancel" aria-label="Close" data-dismiss="modal">&nbsp;</button>
</div>
<div class="modal-body">
<form>
<div class="row">
<div class="col-sm-6 col-md-6">
<div class="form-group">
@ -49,10 +49,10 @@
</div>
<% } %>
</div>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary cancel" data-dismiss="modal"><%- i18n('str', 'cancel') %></button>
<button type="submit" class="btn btn-teal save"><%- i18n('str', 'save') %></button>
</div>
</form>
</form>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary cancel" data-dismiss="modal"><%- i18n('str', 'cancel') %></button>
<button type="button" class="btn btn-teal save"><%- i18n('str', 'save') %></button>
</div>
</div>

View File

@ -19,7 +19,7 @@ module.exports = Mn.View.extend({
events: {
'submit @ui.form': function (e) {
'click @ui.save': function (e) {
e.preventDefault();
this.ui.error.hide();
let view = this;

View File

@ -292,297 +292,5 @@
"default-site-html": "Custom Page",
"default-site-redirect": "Redirect"
}
},
"zh": {
"str": {
"email-address": "邮箱地址",
"username": "账号",
"password": "密码",
"sign-in": "登录",
"sign-out": "退出",
"try-again": "重试",
"name": "名字",
"email": "邮箱",
"roles": "角色",
"created-on": "创建:{date}",
"save": "保存",
"cancel": "取消",
"close": "关闭",
"enable": "启用",
"disable": "禁用",
"sure": "是的,我确定",
"disabled": "禁用",
"choose-file": "选择文件",
"source": "来源",
"destination": "目标地址",
"ssl": "SSL",
"access": "接入",
"public": "公开",
"edit": "编辑",
"delete": "删除",
"logs": "日志",
"status": "状态",
"online": "在线",
"offline": "离线",
"unknown": "未知",
"expires": "过期",
"value": "值",
"please-wait": "请稍等...",
"all": "全部",
"any": "任意"
},
"login": {
"title": "登录到您的账户"
},
"main": {
"app": "Nginx代理管理器",
"version": "v{version}",
"welcome": "欢迎来到Nginx代理管理器",
"logged-in": "您的登录身份是 {name}",
"unknown-error": "加载出错,请重新加载应用程序。",
"unknown-user": "未知用户",
"sign-in-as": "重新登录为 {name}"
},
"roles": {
"title": "角色",
"admin": "管理员",
"user": "Apache Helicopter"
},
"menu": {
"dashboard": "仪表盘",
"hosts": "主机"
},
"footer": {
"fork-me": "在Github上Fork项目",
"copy": "&copy; 2022 <a href=\"{url}\" target=\"_blank\">jc21.com</a>.",
"theme": "Theme by <a href=\"{url}\" target=\"_blank\">Tabler</a>"
},
"dashboard": {
"title": "你好 {name}"
},
"all-hosts": {
"empty-subtitle": "{manage, select, true{Why don't you create one?} other{And you don't have permission to create one.}}",
"details": "详细内容",
"enable-ssl": "启用SSL",
"force-ssl": "强制SSL",
"http2-support": "支持HTTP/2",
"domain-names": "域名",
"cert-provider": "证书提供商",
"block-exploits": "阻止常见漏洞",
"caching-enabled": "缓存资源",
"ssl-certificate": "SSL证书",
"none": "无",
"new-cert": "申请一个新的SSL证书",
"with-le": "使用Let's Encrypt",
"no-ssl": "该主机将不使用HTTPS",
"advanced": "高级",
"advanced-warning": "在此输入你的自定义Nginx配置风险自负!",
"advanced-config": "自定义Nginx配置",
"advanced-config-var-headline": "这些代理详情可以作为nginx的变量。",
"advanced-config-header-info": "请注意这里添加的任何add_header或set_header配置都不会被nginx使用。你将不得不添加一个自定义的位置'/',并在那里的自定义配置中添加头信息。",
"hsts-enabled": "启用了HSTS",
"hsts-subdomains": "HSTS子域",
"locations": "自定义位置"
},
"locations": {
"new_location": "添加位置",
"path": "/path",
"location_label": "定义位置",
"delete": "删除"
},
"ssl": {
"letsencrypt": "Let's Encrypt",
"other": "自定义",
"none": "仅HTTP",
"letsencrypt-email": "Let's Encrypt ",
"letsencrypt-agree": "我同意 <a href=\"{url}\" target=\"_blank\">Let's Encrypt 服务条款</a>",
"delete-ssl": "附加的SSL证书将不会被删除它们需要手动删除。",
"hosts-warning": "这些域名必须配置为指向本设备。",
"no-wildcard-without-dns": "不使用DNS认证时不能用通配符域名申请Let's Encrypt证书",
"dns-challenge": "使用DNS认证",
"certbot-warning": "本节需要一些关于Certbot及其DNS扩展的知识。请查阅相关扩展的文档。",
"dns-provider": "DNS提供者",
"please-choose": "选择...",
"credentials-file-content": "证书内容",
"credentials-file-content-info": "这个插件需要一个包含API令牌或其他供应商凭证的配置文件。",
"stored-as-plaintext-info": "这些数据将以明文形式存储在数据库和文件中",
"propagation-seconds": "等待时间(秒)",
"propagation-seconds-info": "留空为默认值。等待DNS生效的时间。",
"processing-info": "处理中... 这可能需要几分钟的时间。",
"passphrase-protection-support-info": "不支持使用密码保护密钥文件。"
},
"proxy-hosts": {
"title": "代理主机",
"empty": "无代理主机",
"add": "添加代理主机",
"form-title": "{id, select, undefined{New} other{Edit}} 代理主机",
"forward-scheme": "协议",
"forward-host": "转发主机/IP",
"forward-port": "转发端口",
"delete": "删除代理主机",
"delete-confirm": "你确定要删除代理主机 <strong>{domains}</strong> 吗?",
"help-title": "什么是代理主机?",
"help-content": "代理主机是你想转发网络应用的接入主机。\n代理主机可以为没有SSL服务的网络应用提供SSL服务可选。\n代理主机是Nginx代理管理器的最常见用途之一。",
"access-list": "接入列表",
"allow-websocket-upgrade": "支持WebSockets",
"ignore-invalid-upstream-ssl": "忽略无效的SSL",
"custom-forward-host-help": "为子目录转发添加路径。\n例如203.0.113.25/路径",
"search": "搜索主机…"
},
"redirection-hosts": {
"title": "重定向主机",
"empty": "无重定向主机",
"add": "添加重定向主机",
"form-title": "{id, select, undefined{New} other{Edit}} 重定向主机",
"forward-scheme": "协议",
"forward-http-status-code": "HTTP 代码",
"forward-domain": "转发域名",
"preserve-path": "保留路径",
"delete": "删除重定向主机",
"delete-confirm": "确定要删除 <strong>{domains}</strong> 的重定向主机吗?",
"help-title": "什么是重定向主机?",
"help-content": "重定向主机是将接入域名的请求推送到另一个域名。\n使用这种类型的主机最常见的原因是当你的网站改变了域名但你仍然有链接指向旧域名的应用。",
"search": "搜索主机…"
},
"dead-hosts": {
"title": "404主机",
"empty": "没有404主机",
"add": "添加404主机",
"form-title": "{id, select, undefined{New} other{Edit}} 404主机",
"delete": "删除404主机",
"delete-confirm": "确定要删除404主机吗",
"help-title": "什么是404主机",
"help-content": "404主机是一个简单的主机设置显示404页面。\n当你的域名被列入搜索引擎而你想提供一个更好的错误页面或特别是告诉搜索索引者域名页面不再存在时这可能是有用的。\n拥有这种主机的另一个好处是可以跟踪点击它的日志并查看访问来源。",
"search": "搜索主机…"
},
"streams": {
"title": "Stream模块",
"empty": "没有Stream模块",
"add": "添加Stream",
"form-title": "{id, select, undefined{New} other{Edit}} Stream",
"incoming-port": "入站端口",
"forwarding-host": "转发主机",
"forwarding-port": "转发端口",
"tcp-forwarding": "TCP转发",
"udp-forwarding": "UDP转发",
"forward-type-error": "至少有一种协议必须被启用",
"protocol": "协议",
"tcp": "TCP",
"udp": "UDP",
"delete": "删除Stream",
"delete-confirm": "你确定你要删除这个Stream吗",
"help-title": "什么是Stream",
"help-content": "Stream作为Nginx的一个相对较新的功能可以直接转发TCP/UDP流量到网络上的另一台计算机。\n如果你正在运行游戏服务器、FTP或SSH服务器这个功能就会很有用。",
"search": "搜索入站端口…"
},
"certificates": {
"title": "SSL证书",
"empty": "没有SSL证书",
"add": "添加SSL证书",
"form-title": "添加 {provider, select, letsencrypt{Let's Encrypt} other{Custom}} 证书",
"delete": "删除SSL证书",
"delete-confirm": "你确定要删除这个SSL证书吗任何使用该证书的主机之后都需要进行更新。",
"help-title": "SSL证书",
"help-content": "SSL证书TLS证书是一种加密密钥的形式它允许你的网站为终端用户进行数据加密。\n目前使用Let's Encrypt的服务来免费发放SSL证书。\n如果你的反向代理后面有个人信息、密码或敏感数据使用证书是个非常好的安全措施。\n如果你的网站不是面向互联网运行或者你只是想要一个通配符证书需要使用DNS认证获取证书。",
"other-certificate": "证书",
"other-certificate-key": "证书密钥",
"other-intermediate-certificate": "中间证书",
"force-renew": "现在更新",
"test-reachability": "测试服务器的可用性",
"reachability-title": "测试服务器的可用性",
"reachability-info": "使用Site24x7测试域名是否可以从公共互联网访问。在使用DNS认证时没有必要这样做。",
"reachability-failed-to-reach-api": "与API的通信失败请检查NPM是否正确运行",
"reachability-failed-to-check": "由于与site24x7.com的通信错误检查可用性失败。",
"reachability-ok": "您的服务器是可用的,创建证书应该是可能的。",
"reachability-404": "在这个域名中发现了一个服务器但它似乎指向的不是本Nginx代理管理器。请确保你的域名指向本NPM实例的IP。",
"reachability-not-resolved": "在这个域名中没有可用的服务器。请确保你的域名存在并且指向本NPM实例运行的IP如果有必要请检查80端口在路由器中是否被映射到外网。",
"reachability-wrong-data": "在这个域名中发现了一个服务器但它返回了一个意外的数据。它是指向本NPM服务器吗请确保你的域名指向本NPM实例的IP。",
"reachability-other": "在这个域名中发现了一个服务器,但它返回了一个意外的状态代码{code}。它是指向本NPM服务器吗请确保你的域名指向本NPM实例的IP。",
"download": "下载",
"renew-title": "更新Let'sEncrypt证书",
"search": "搜索证书…"
},
"access-lists": {
"title": "接入列表",
"empty": "没有接入项目",
"add": "添加接入",
"form-title": "{id, select, undefined{New} other{Edit}} 接入列表",
"delete": "删除接入列表",
"delete-confirm": "您确定要删除这个接入列表吗?",
"public": "公开接入",
"public-sub": "没有接入限制",
"help-title": "什么是接入列表?",
"help-content": "接入列表提供了一个特定客户IP地址的黑名单或白名单以及通过基本HTTP认证对代理主机的认证。\n你可以为一个接入列表配置多个客户规则、用户名和密码然后将其应用于代理主机。\n这对那些没有内置认证机制的转发网络服务或你想保护其免受未知客户的访问是最有用的。",
"item-count": "{count} {count, select, 1{User} other{Users}}",
"client-count": "{count} {count, select, 1{Rule} other{Rules}}",
"proxy-host-count": "{count} {count, select, 1{Proxy Host} other{Proxy Hosts}}",
"delete-has-hosts": "该接入列表与{count}代理主机有关,在被删除后将成为公开的。",
"details": "详情",
"authorization": "授权",
"access": "接入",
"satisfy": "满足",
"satisfy-any": "满足任何要求",
"pass-auth": "授权访问主机",
"access-add": "添加",
"auth-add": "添加",
"search": "搜索接入…"
},
"users": {
"title": "用户",
"default_error": "必须改变默认的邮箱地址",
"add": "添加用户",
"nickname": "昵称",
"full-name": "名称",
"edit-details": "编辑详情",
"change-password": "修改密码",
"edit-permissions": "编辑权限",
"sign-in-as": "以用户身份登录",
"form-title": "{id, select, undefined{New} other{Edit}} 用户",
"delete": "删除 {name, select, undefined{User} other{{name}}}",
"delete-confirm": "您确定要删除 <strong>{name}</strong> 吗?",
"password-title": "修改密码{self, select, false{ for {name}} other{}}",
"current-password": "修改密码",
"new-password": "新密码",
"confirm-password": "确认密码",
"permissions-title": "{name} 的权限",
"admin-perms": "该用户是管理员,一些项目不能被修改。",
"perms-visibility": "项目可见性",
"perms-visibility-user": "仅限创建的项目",
"perms-visibility-all": "所有项目",
"perm-manage": "管理项目",
"perm-view": "仅限查看",
"perm-hidden": "隐藏",
"search": "搜索用户…"
},
"audit-log": {
"title": "检查日志",
"empty": "没有日志。",
"empty-subtitle": "只要你或其他用户修改了什么,这些历史就会在这里显示出来。",
"proxy-host": "代理主机",
"redirection-host": "重定向主机",
"dead-host": "404主机",
"stream": "Stream",
"user": "用户",
"certificate": "证书",
"access-list": "访问列表",
"created": "创建 {name}",
"updated": "更新 {name}",
"deleted": "删除 {name}",
"enabled": "启用 {name}",
"disabled": "禁用 {name}",
"renewed": "续约 {name}",
"meta-title": "事件详情",
"view-meta": "查看详情",
"date": "日期",
"search": "搜索日志…"
},
"settings": {
"title": "设置",
"default-site": "默认站点",
"default-site-congratulations": "成功页面",
"default-site-404": "404页面",
"default-site-html": "自定义页面",
"default-site-redirect": "重定向"
}
}
}

View File

@ -2648,9 +2648,9 @@ electron-to-chromium@^1.3.47:
integrity sha512-67V62Z4CFOiAtox+o+tosGfVk0QX4DJgH609tjT8QymbJZVAI/jWnAthnr8c5hnRNziIRwkc9EMQYejiVz3/9Q==
elliptic@^6.5.3, elliptic@^6.5.4:
version "6.6.0"
resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.6.0.tgz#5919ec723286c1edf28685aa89261d4761afa210"
integrity sha512-dpwoQcLc/2WLQvJvLRHKZ+f9FgOdjnq11rurqwekGQygGPsYSK29OMMD2WalatiqQ+XGFDglTNixpPfI+lpaAA==
version "6.5.7"
resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.7.tgz#8ec4da2cb2939926a1b9a73619d768207e647c8b"
integrity sha512-ESVCtTwiA+XhY3wyh24QqRGBoP3rEdDUl3EDUUo9tft074fi19IrdpH7hLCMMP3CIj7jb3W96rn8lt/BqIlt5Q==
dependencies:
bn.js "^4.11.9"
brorand "^1.1.0"

View File

@ -7,7 +7,7 @@
"credentials": "dns_acmedns_api_url = http://acmedns-server/\ndns_acmedns_registration_file = /data/acme-registration.json",
"full_plugin_name": "dns-acmedns"
},
"active24": {
"active24":{
"name": "Active24",
"package_name": "certbot-dns-active24",
"version": "~=1.5.1",
@ -18,7 +18,7 @@
"aliyun": {
"name": "Aliyun",
"package_name": "certbot-dns-aliyun",
"version": "~=2.0.0",
"version": "~=0.38.1",
"dependencies": "",
"credentials": "dns_aliyun_access_key = 12345678\ndns_aliyun_access_key_secret = 1234567890abcdef1234567890abcdef",
"full_plugin_name": "dns-aliyun"
@ -31,14 +31,6 @@
"credentials": "# This plugin supported API authentication using either Service Principals or utilizing a Managed Identity assigned to the virtual machine.\n# Regardless which authentication method used, the identity will need the “DNS Zone Contributor” role assigned to it.\n# As multiple Azure DNS Zones in multiple resource groups can exist, the config file needs a mapping of zone to resource group ID. Multiple zones -> ID mappings can be listed by using the key dns_azure_zoneX where X is a unique number. At least 1 zone mapping is required.\n\n# Using a service principal (option 1)\ndns_azure_sp_client_id = 912ce44a-0156-4669-ae22-c16a17d34ca5\ndns_azure_sp_client_secret = E-xqXU83Y-jzTI6xe9fs2YC~mck3ZzUih9\ndns_azure_tenant_id = ed1090f3-ab18-4b12-816c-599af8a88cf7\n\n# Using used assigned MSI (option 2)\n# dns_azure_msi_client_id = 912ce44a-0156-4669-ae22-c16a17d34ca5\n\n# Using system assigned MSI (option 3)\n# dns_azure_msi_system_assigned = true\n\n# Zones (at least one always required)\ndns_azure_zone1 = example.com:/subscriptions/c135abce-d87d-48df-936c-15596c6968a5/resourceGroups/dns1\ndns_azure_zone2 = example.org:/subscriptions/99800903-fb14-4992-9aff-12eaf2744622/resourceGroups/dns2",
"full_plugin_name": "dns-azure"
},
"beget": {
"name":"Beget",
"package_name": "certbot-beget-plugin",
"version": "~=1.0.0.dev9",
"dependencies": "",
"credentials": "# Beget API credentials used by Certbot\nbeget_plugin_username = username\nbeget_plugin_password = password",
"full_plugin_name": "beget-plugin"
},
"bunny": {
"name": "bunny.net",
"package_name": "certbot-dns-bunny",
@ -202,7 +194,7 @@
"freedns": {
"name": "FreeDNS",
"package_name": "certbot-dns-freedns",
"version": "~=0.1.0",
"version": "~=0.2.0",
"dependencies": "",
"credentials": "dns_freedns_username = myremoteuser\ndns_freedns_password = verysecureremoteuserpassword",
"full_plugin_name": "dns-freedns"
@ -215,14 +207,6 @@
"credentials": "# Gandi personal access token\ndns_gandi_token=PERSONAL_ACCESS_TOKEN",
"full_plugin_name": "dns-gandi"
},
"gcore": {
"name": "Gcore DNS",
"package_name": "certbot-dns-gcore",
"version": "~=0.1.8",
"dependencies": "",
"credentials": "dns_gcore_apitoken = 0123456789abcdef0123456789abcdef01234567",
"full_plugin_name": "dns-gcore"
},
"godaddy": {
"name": "GoDaddy",
"package_name": "certbot-dns-godaddy",
@ -263,14 +247,6 @@
"credentials": "dns_hetzner_api_token = 0123456789abcdef0123456789abcdef",
"full_plugin_name": "dns-hetzner"
},
"hostingnl": {
"name": "Hosting.nl",
"package_name": "certbot-dns-hostingnl",
"version": "~=0.1.5",
"dependencies": "",
"credentials": "dns_hostingnl_api_key = 0123456789abcdef0123456789abcdef",
"full_plugin_name": "dns-hostingnl"
},
"hover": {
"name": "Hover",
"package_name": "certbot-dns-hover",
@ -327,14 +303,6 @@
"credentials": "dns_joker_username = <Dynamic DNS Authentication Username>\ndns_joker_password = <Dynamic DNS Authentication Password>\ndns_joker_domain = <Dynamic DNS Domain>",
"full_plugin_name": "dns-joker"
},
"leaseweb": {
"name": "LeaseWeb",
"package_name": "certbot-dns-leaseweb",
"version": "~=1.0.1",
"dependencies": "",
"credentials": "dns_leaseweb_api_token = 01234556789",
"full_plugin_name": "dns-leaseweb"
},
"linode": {
"name": "Linode",
"package_name": "certbot-dns-linode",
@ -426,7 +394,7 @@
"porkbun": {
"name": "Porkbun",
"package_name": "certbot-dns-porkbun",
"version": "~=0.9",
"version": "~=0.2",
"dependencies": "",
"credentials": "dns_porkbun_key=your-porkbun-api-key\ndns_porkbun_secret=your-porkbun-api-secret",
"full_plugin_name": "dns-porkbun"
@ -456,13 +424,13 @@
"full_plugin_name": "dns-rfc2136"
},
"rockenstein": {
"name": "rockenstein AG",
"package_name": "certbot-dns-rockenstein",
"version": "~=1.0.0",
"dependencies": "",
"credentials": "dns_rockenstein_token=<token>",
"full_plugin_name": "dns-rockenstein"
},
"name": "rockenstein AG",
"package_name": "certbot-dns-rockenstein",
"version": "~=1.0.0",
"dependencies": "",
"credentials": "dns_rockenstein_token=<token>",
"full_plugin_name": "dns-rockenstein"
},
"route53": {
"name": "Route 53 (Amazon)",
"package_name": "certbot-dns-route53",
@ -519,7 +487,7 @@
"credentials": "dns_websupport_identifier = <api_key>\ndns_websupport_secret_key = <secret>",
"full_plugin_name": "dns-websupport"
},
"wedos": {
"wedos":{
"name": "Wedos",
"package_name": "certbot-dns-wedos",
"version": "~=2.2",
@ -535,4 +503,4 @@
"credentials": "edgedns_client_secret = as3d1asd5d1a32sdfsdfs2d1asd5=\nedgedns_host = sdflskjdf-dfsdfsdf-sdfsdfsdf.luna.akamaiapis.net\nedgedns_access_token = kjdsi3-34rfsdfsdf-234234fsdfsdf\nedgedns_client_token = dkfjdf-342fsdfsd-23fsdfsdfsdf",
"full_plugin_name": "edgedns"
}
}
}

View File

@ -11,7 +11,7 @@ YELLOW='\E[1;33m'
export BLUE CYAN GREEN RED RESET YELLOW
# Docker Compose
COMPOSE_PROJECT_NAME="npm2dev"
COMPOSE_PROJECT_NAME="npmdev"
COMPOSE_FILE="docker/docker-compose.dev.yml"
export COMPOSE_FILE COMPOSE_PROJECT_NAME

View File

@ -67,8 +67,6 @@ printf "nameserver %s\noptions ndots:0" "${DNSROUTER_IP}" > "${LOCAL_RESOLVE}"
# bring up all remaining containers, except cypress!
docker-compose up -d --remove-orphans stepca squid
docker-compose pull db-mysql || true # ok to fail
docker-compose pull db-postgres || true # ok to fail
docker-compose pull authentik authentik-redis authentik-ldap || true # ok to fail
docker-compose up -d --remove-orphans --pull=never fullstack
# wait for main container to be healthy

View File

@ -36,11 +36,12 @@ if hash docker-compose 2>/dev/null; then
# bring up all remaining containers, except cypress!
docker-compose up -d --remove-orphans stepca squid
docker-compose pull db db-postgres authentik-redis authentik authentik-worker authentik-ldap
docker-compose build --pull --parallel fullstack
docker-compose up -d --remove-orphans fullstack
docker-compose pull db
docker-compose up -d --remove-orphans --pull=never fullstack
docker-compose up -d --remove-orphans swagger
# docker-compose up -d --remove-orphans --force-recreate --build
# wait for main container to be healthy
bash "$DIR/wait-healthy" "$(docker-compose ps --all -q fullstack)" 120
@ -52,10 +53,10 @@ if hash docker-compose 2>/dev/null; then
if [ "$1" == "-f" ]; then
echo -e "${BLUE} ${YELLOW}Following Backend Container:${RESET}"
docker logs -f npm2dev.core
docker logs -f npm_core
else
echo -e "${YELLOW}Hint:${RESET} You can follow the output of some of the containers with:"
echo " docker logs -f npm2dev.core"
echo " docker logs -f npm_core"
fi
else
echo -e "${RED} docker-compose command is not available${RESET}"

View File

@ -1,64 +0,0 @@
/// <reference types="cypress" />
describe('LDAP with Authentik', () => {
let token;
if (Cypress.env('skipStackCheck') === 'true' || Cypress.env('stack') === 'postgres') {
before(() => {
cy.getToken().then((tok) => {
token = tok;
// cy.task('backendApiPut', {
// token: token,
// path: '/api/settings/ldap-auth',
// data: {
// value: {
// host: 'authentik-ldap:3389',
// base_dn: 'ou=users,DC=ldap,DC=goauthentik,DC=io',
// user_dn: 'cn={{USERNAME}},ou=users,DC=ldap,DC=goauthentik,DC=io',
// email_property: 'mail',
// name_property: 'sn',
// self_filter: '(&(cn={{USERNAME}})(ak-active=TRUE))',
// auto_create_user: true
// }
// }
// }).then((data) => {
// cy.validateSwaggerSchema('put', 200, '/settings/{name}', data);
// expect(data.result).to.have.property('id');
// expect(data.result.id).to.be.greaterThan(0);
// });
// cy.task('backendApiPut', {
// token: token,
// path: '/api/settings/auth-methods',
// data: {
// value: [
// 'local',
// 'ldap'
// ]
// }
// }).then((data) => {
// cy.validateSwaggerSchema('put', 200, '/settings/{name}', data);
// expect(data.result).to.have.property('id');
// expect(data.result.id).to.be.greaterThan(0);
// });
});
});
it.skip('Should log in with LDAP', function() {
// cy.task('backendApiPost', {
// token: token,
// path: '/api/auth',
// data: {
// // Authentik LDAP creds:
// type: 'ldap',
// identity: 'cypress',
// secret: 'fqXBfUYqHvYqiwBHWW7f'
// }
// }).then((data) => {
// cy.validateSwaggerSchema('post', 200, '/auth', data);
// expect(data.result).to.have.property('token');
// });
});
}
});

View File

@ -1,97 +0,0 @@
/// <reference types="cypress" />
describe('OAuth with Authentik', () => {
let token;
if (Cypress.env('skipStackCheck') === 'true' || Cypress.env('stack') === 'postgres') {
before(() => {
cy.getToken().then((tok) => {
token = tok;
// cy.task('backendApiPut', {
// token: token,
// path: '/api/settings/oauth-auth',
// data: {
// value: {
// client_id: '7iO2AvuUp9JxiSVkCcjiIbQn4mHmUMBj7yU8EjqU',
// client_secret: 'VUMZzaGTrmXJ8PLksyqzyZ6lrtz04VvejFhPMBP9hGZNCMrn2LLBanySs4ta7XGrDr05xexPyZT1XThaf4ubg00WqvHRVvlu4Naa1aMootNmSRx3VAk6RSslUJmGyHzq',
// authorization_url: 'http://authentik:9000/application/o/authorize/',
// resource_url: 'http://authentik:9000/application/o/userinfo/',
// token_url: 'http://authentik:9000/application/o/token/',
// logout_url: 'http://authentik:9000/application/o/npm/end-session/',
// identifier: 'preferred_username',
// scopes: [],
// auto_create_user: true
// }
// }
// }).then((data) => {
// cy.validateSwaggerSchema('put', 200, '/settings/{name}', data);
// expect(data.result).to.have.property('id');
// expect(data.result.id).to.be.greaterThan(0);
// });
// cy.task('backendApiPut', {
// token: token,
// path: '/api/settings/auth-methods',
// data: {
// value: [
// 'local',
// 'oauth'
// ]
// }
// }).then((data) => {
// cy.validateSwaggerSchema('put', 200, '/settings/{name}', data);
// expect(data.result).to.have.property('id');
// expect(data.result.id).to.be.greaterThan(0);
// });
});
});
it.skip('Should log in with OAuth', function() {
// cy.task('backendApiGet', {
// path: '/oauth/login?redirect_base=' + encodeURI(Cypress.config('baseUrl')),
// }).then((data) => {
// expect(data).to.have.property('result');
// cy.origin('http://authentik:9000', {args: data.result}, (url) => {
// cy.visit(url);
// cy.get('ak-flow-executor')
// .shadow()
// .find('ak-stage-identification')
// .shadow()
// .find('input[name="uidField"]', { visible: true })
// .type('cypress');
// cy.get('ak-flow-executor')
// .shadow()
// .find('ak-stage-identification')
// .shadow()
// .find('button[type="submit"]', { visible: true })
// .click();
// cy.get('ak-flow-executor')
// .shadow()
// .find('ak-stage-password')
// .shadow()
// .find('input[name="password"]', { visible: true })
// .type('fqXBfUYqHvYqiwBHWW7f');
// cy.get('ak-flow-executor')
// .shadow()
// .find('ak-stage-password')
// .shadow()
// .find('button[type="submit"]', { visible: true })
// .click();
// })
// // we should be logged in
// cy.get('#root p.chakra-text')
// .first()
// .should('have.text', 'Nginx Proxy Manager');
// // logout:
// cy.clearLocalStorage();
// });
});
}
});

View File

@ -132,9 +132,9 @@
integrity sha512-BsWiH1yFGjXXS2yvrf5LyuoSIIbPrGUWob917o+BTKuZ7qJdxX8aJLRxs1fS9n6r7vESrq1OUqb68dANcFXuQQ==
"@eslint/plugin-kit@^0.2.0":
version "0.2.3"
resolved "https://registry.yarnpkg.com/@eslint/plugin-kit/-/plugin-kit-0.2.3.tgz#812980a6a41ecf3a8341719f92a6d1e784a2e0e8"
integrity sha512-2b/g5hRmpbb1o4GnTZax9N9m0FXzz9OV42ZzI4rDDMDuHUqigAiQCEWChBWCY4ztAGVRjoWT19v0yMmc5/L5kA==
version "0.2.0"
resolved "https://registry.yarnpkg.com/@eslint/plugin-kit/-/plugin-kit-0.2.0.tgz#8712dccae365d24e9eeecb7b346f85e750ba343d"
integrity sha512-vH9PiIMMwvhCx31Af3HiGzsVNULDbyVkHXwlemn/B0TFj/00ho3y55efXrUZTfQipxoHC5u4xq6zblww1zm1Ig==
dependencies:
levn "^0.4.1"
@ -628,9 +628,9 @@ core-util-is@1.0.2:
integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=
cross-spawn@^7.0.0, cross-spawn@^7.0.2:
version "7.0.6"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f"
integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==
version "7.0.3"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6"
integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==
dependencies:
path-key "^3.1.0"
shebang-command "^2.0.0"