mirror of
https://github.com/NginxProxyManager/nginx-proxy-manager.git
synced 2025-12-06 00:16:49 +00:00
Compare commits
1 Commits
8eba31913f
...
v2.13.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e88d55f1d2 |
285
Jenkinsfile
vendored
Normal file
285
Jenkinsfile
vendored
Normal file
@@ -0,0 +1,285 @@
|
|||||||
|
import groovy.transform.Field
|
||||||
|
|
||||||
|
@Field
|
||||||
|
def shOutput = ""
|
||||||
|
def buildxPushTags = ""
|
||||||
|
|
||||||
|
pipeline {
|
||||||
|
agent {
|
||||||
|
label 'docker-multiarch'
|
||||||
|
}
|
||||||
|
options {
|
||||||
|
buildDiscarder(logRotator(numToKeepStr: '5'))
|
||||||
|
disableConcurrentBuilds()
|
||||||
|
ansiColor('xterm')
|
||||||
|
}
|
||||||
|
environment {
|
||||||
|
IMAGE = 'nginx-proxy-manager'
|
||||||
|
BUILD_VERSION = getVersion()
|
||||||
|
MAJOR_VERSION = '2'
|
||||||
|
BRANCH_LOWER = "${BRANCH_NAME.toLowerCase().replaceAll('\\\\', '-').replaceAll('/', '-').replaceAll('\\.', '-')}"
|
||||||
|
BUILDX_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}"
|
||||||
|
COMPOSE_INTERACTIVE_NO_CLI = 1
|
||||||
|
}
|
||||||
|
stages {
|
||||||
|
stage('Environment') {
|
||||||
|
parallel {
|
||||||
|
stage('Master') {
|
||||||
|
when {
|
||||||
|
branch 'master'
|
||||||
|
}
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
buildxPushTags = "-t docker.io/jc21/${IMAGE}:${BUILD_VERSION} -t docker.io/jc21/${IMAGE}:${MAJOR_VERSION} -t docker.io/jc21/${IMAGE}:latest"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Other') {
|
||||||
|
when {
|
||||||
|
not {
|
||||||
|
branch 'master'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
// Defaults to the Branch name, which is applies to all branches AND pr's
|
||||||
|
buildxPushTags = "-t docker.io/nginxproxymanager/${IMAGE}-dev:${BRANCH_LOWER}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Versions') {
|
||||||
|
steps {
|
||||||
|
sh 'cat frontend/package.json | jq --arg BUILD_VERSION "${BUILD_VERSION}" \'.version = $BUILD_VERSION\' | sponge frontend/package.json'
|
||||||
|
sh 'echo -e "\\E[1;36mFrontend Version is:\\E[1;33m $(cat frontend/package.json | jq -r .version)\\E[0m"'
|
||||||
|
sh 'cat backend/package.json | jq --arg BUILD_VERSION "${BUILD_VERSION}" \'.version = $BUILD_VERSION\' | sponge backend/package.json'
|
||||||
|
sh 'echo -e "\\E[1;36mBackend Version is:\\E[1;33m $(cat backend/package.json | jq -r .version)\\E[0m"'
|
||||||
|
sh 'sed -i -E "s/(version-)[0-9]+\\.[0-9]+\\.[0-9]+(-green)/\\1${BUILD_VERSION}\\2/" README.md'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Docker Login') {
|
||||||
|
steps {
|
||||||
|
withCredentials([usernamePassword(credentialsId: 'jc21-dockerhub', passwordVariable: 'dpass', usernameVariable: 'duser')]) {
|
||||||
|
sh 'docker login -u "${duser}" -p "${dpass}"'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Builds') {
|
||||||
|
parallel {
|
||||||
|
stage('Project') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
// Frontend and Backend
|
||||||
|
def shStatusCode = sh(label: 'Checking and Building', returnStatus: true, script: '''
|
||||||
|
set -e
|
||||||
|
./scripts/ci/frontend-build > ${WORKSPACE}/tmp-sh-build 2>&1
|
||||||
|
./scripts/ci/test-and-build > ${WORKSPACE}/tmp-sh-build 2>&1
|
||||||
|
''')
|
||||||
|
shOutput = readFile "${env.WORKSPACE}/tmp-sh-build"
|
||||||
|
if (shStatusCode != 0) {
|
||||||
|
error "${shOutput}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
post {
|
||||||
|
always {
|
||||||
|
sh 'rm -f ${WORKSPACE}/tmp-sh-build'
|
||||||
|
}
|
||||||
|
failure {
|
||||||
|
npmGithubPrComment("CI Error:\n\n```\n${shOutput}\n```", true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Docs') {
|
||||||
|
steps {
|
||||||
|
dir(path: 'docs') {
|
||||||
|
sh 'yarn install'
|
||||||
|
sh 'yarn build'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Test Sqlite') {
|
||||||
|
environment {
|
||||||
|
COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_sqlite"
|
||||||
|
COMPOSE_FILE = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.sqlite.yml'
|
||||||
|
}
|
||||||
|
when {
|
||||||
|
not {
|
||||||
|
equals expected: 'UNSTABLE', actual: currentBuild.result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
steps {
|
||||||
|
sh 'rm -rf ./test/results/junit/*'
|
||||||
|
sh './scripts/ci/fulltest-cypress'
|
||||||
|
}
|
||||||
|
post {
|
||||||
|
always {
|
||||||
|
// Dumps to analyze later
|
||||||
|
sh 'mkdir -p debug/sqlite'
|
||||||
|
sh 'docker logs $(docker compose ps --all -q fullstack) > debug/sqlite/docker_fullstack.log 2>&1'
|
||||||
|
sh 'docker logs $(docker compose ps --all -q stepca) > debug/sqlite/docker_stepca.log 2>&1'
|
||||||
|
sh 'docker logs $(docker compose ps --all -q pdns) > debug/sqlite/docker_pdns.log 2>&1'
|
||||||
|
sh 'docker logs $(docker compose ps --all -q pdns-db) > debug/sqlite/docker_pdns-db.log 2>&1'
|
||||||
|
sh 'docker logs $(docker compose ps --all -q dnsrouter) > debug/sqlite/docker_dnsrouter.log 2>&1'
|
||||||
|
junit 'test/results/junit/*'
|
||||||
|
sh 'docker compose down --remove-orphans --volumes -t 30 || true'
|
||||||
|
}
|
||||||
|
unstable {
|
||||||
|
dir(path: 'test/results') {
|
||||||
|
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Test Mysql') {
|
||||||
|
environment {
|
||||||
|
COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_mysql"
|
||||||
|
COMPOSE_FILE = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.mysql.yml'
|
||||||
|
}
|
||||||
|
when {
|
||||||
|
not {
|
||||||
|
equals expected: 'UNSTABLE', actual: currentBuild.result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
steps {
|
||||||
|
sh 'rm -rf ./test/results/junit/*'
|
||||||
|
sh './scripts/ci/fulltest-cypress'
|
||||||
|
}
|
||||||
|
post {
|
||||||
|
always {
|
||||||
|
// Dumps to analyze later
|
||||||
|
sh 'mkdir -p debug/mysql'
|
||||||
|
sh 'docker logs $(docker compose ps --all -q fullstack) > debug/mysql/docker_fullstack.log 2>&1'
|
||||||
|
sh 'docker logs $(docker compose ps --all -q stepca) > debug/mysql/docker_stepca.log 2>&1'
|
||||||
|
sh 'docker logs $(docker compose ps --all -q pdns) > debug/mysql/docker_pdns.log 2>&1'
|
||||||
|
sh 'docker logs $(docker compose ps --all -q pdns-db) > debug/mysql/docker_pdns-db.log 2>&1'
|
||||||
|
sh 'docker logs $(docker compose ps --all -q dnsrouter) > debug/mysql/docker_dnsrouter.log 2>&1'
|
||||||
|
junit 'test/results/junit/*'
|
||||||
|
sh 'docker compose down --remove-orphans --volumes -t 30 || true'
|
||||||
|
}
|
||||||
|
unstable {
|
||||||
|
dir(path: 'test/results') {
|
||||||
|
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Test Postgres') {
|
||||||
|
environment {
|
||||||
|
COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_postgres"
|
||||||
|
COMPOSE_FILE = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.postgres.yml'
|
||||||
|
}
|
||||||
|
when {
|
||||||
|
not {
|
||||||
|
equals expected: 'UNSTABLE', actual: currentBuild.result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
steps {
|
||||||
|
sh 'rm -rf ./test/results/junit/*'
|
||||||
|
sh './scripts/ci/fulltest-cypress'
|
||||||
|
}
|
||||||
|
post {
|
||||||
|
always {
|
||||||
|
// Dumps to analyze later
|
||||||
|
sh 'mkdir -p debug/postgres'
|
||||||
|
sh 'docker logs $(docker compose ps --all -q fullstack) > debug/postgres/docker_fullstack.log 2>&1'
|
||||||
|
sh 'docker logs $(docker compose ps --all -q stepca) > debug/postgres/docker_stepca.log 2>&1'
|
||||||
|
sh 'docker logs $(docker compose ps --all -q pdns) > debug/postgres/docker_pdns.log 2>&1'
|
||||||
|
sh 'docker logs $(docker compose ps --all -q pdns-db) > debug/postgres/docker_pdns-db.log 2>&1'
|
||||||
|
sh 'docker logs $(docker compose ps --all -q dnsrouter) > debug/postgres/docker_dnsrouter.log 2>&1'
|
||||||
|
sh 'docker logs $(docker compose ps --all -q db-postgres) > debug/postgres/docker_db-postgres.log 2>&1'
|
||||||
|
sh 'docker logs $(docker compose ps --all -q authentik) > debug/postgres/docker_authentik.log 2>&1'
|
||||||
|
sh 'docker logs $(docker compose ps --all -q authentik-redis) > debug/postgres/docker_authentik-redis.log 2>&1'
|
||||||
|
sh 'docker logs $(docke rcompose ps --all -q authentik-ldap) > debug/postgres/docker_authentik-ldap.log 2>&1'
|
||||||
|
|
||||||
|
junit 'test/results/junit/*'
|
||||||
|
sh 'docker compose down --remove-orphans --volumes -t 30 || true'
|
||||||
|
}
|
||||||
|
unstable {
|
||||||
|
dir(path: 'test/results') {
|
||||||
|
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('MultiArch Build') {
|
||||||
|
when {
|
||||||
|
not {
|
||||||
|
equals expected: 'UNSTABLE', actual: currentBuild.result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
steps {
|
||||||
|
sh "./scripts/buildx --push ${buildxPushTags}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Docs / Comment') {
|
||||||
|
parallel {
|
||||||
|
stage('Docs Job') {
|
||||||
|
when {
|
||||||
|
allOf {
|
||||||
|
branch pattern: "^(develop|master)\$", comparator: "REGEXP"
|
||||||
|
not {
|
||||||
|
equals expected: 'UNSTABLE', actual: currentBuild.result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
steps {
|
||||||
|
build wait: false, job: 'nginx-proxy-manager-docs', parameters: [string(name: 'docs_branch', value: "$BRANCH_NAME")]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('PR Comment') {
|
||||||
|
when {
|
||||||
|
allOf {
|
||||||
|
changeRequest()
|
||||||
|
not {
|
||||||
|
equals expected: 'UNSTABLE', actual: currentBuild.result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
npmGithubPrComment("""Docker Image for build ${BUILD_NUMBER} is available on [DockerHub](https://cloud.docker.com/repository/docker/nginxproxymanager/${IMAGE}-dev):
|
||||||
|
```
|
||||||
|
nginxproxymanager/${IMAGE}-dev:${BRANCH_LOWER}
|
||||||
|
```
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> Ensure you backup your NPM instance before testing this image! Especially if there are database changes.
|
||||||
|
> This is a different docker image namespace than the official image.
|
||||||
|
|
||||||
|
> [!WARNING]
|
||||||
|
> Changes and additions to DNS Providers require verification by at least 2 members of the community!
|
||||||
|
""", true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
post {
|
||||||
|
always {
|
||||||
|
sh 'echo Reverting ownership'
|
||||||
|
sh 'docker run --rm -v "$(pwd):/data" jc21/ci-tools chown -R "$(id -u):$(id -g)" /data'
|
||||||
|
printResult(true)
|
||||||
|
}
|
||||||
|
failure {
|
||||||
|
archiveArtifacts(artifacts: 'debug/**/*.*', allowEmptyArchive: true)
|
||||||
|
}
|
||||||
|
unstable {
|
||||||
|
archiveArtifacts(artifacts: 'debug/**/*.*', allowEmptyArchive: true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def getVersion() {
|
||||||
|
ver = sh(script: 'cat .version', returnStdout: true)
|
||||||
|
return ver.trim()
|
||||||
|
}
|
||||||
|
|
||||||
|
def getCommit() {
|
||||||
|
ver = sh(script: 'git log -n 1 --format=%h', returnStdout: true)
|
||||||
|
return ver.trim()
|
||||||
|
}
|
||||||
@@ -1,8 +1,6 @@
|
|||||||
import knex from "knex";
|
import knex from "knex";
|
||||||
import {configGet, configHas} from "./lib/config.js";
|
import {configGet, configHas} from "./lib/config.js";
|
||||||
|
|
||||||
let instance = null;
|
|
||||||
|
|
||||||
const generateDbConfig = () => {
|
const generateDbConfig = () => {
|
||||||
if (!configHas("database")) {
|
if (!configHas("database")) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
@@ -32,11 +30,4 @@ const generateDbConfig = () => {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
const getInstance = () => {
|
export default knex(generateDbConfig());
|
||||||
if (!instance) {
|
|
||||||
instance = knex(generateDbConfig());
|
|
||||||
}
|
|
||||||
return instance;
|
|
||||||
}
|
|
||||||
|
|
||||||
export default getInstance;
|
|
||||||
|
|||||||
@@ -2,9 +2,9 @@ import db from "./db.js";
|
|||||||
import { migrate as logger } from "./logger.js";
|
import { migrate as logger } from "./logger.js";
|
||||||
|
|
||||||
const migrateUp = async () => {
|
const migrateUp = async () => {
|
||||||
const version = await db().migrate.currentVersion();
|
const version = await db.migrate.currentVersion();
|
||||||
logger.info("Current database version:", version);
|
logger.info("Current database version:", version);
|
||||||
return await db().migrate.latest({
|
return await db.migrate.latest({
|
||||||
tableName: "migrations",
|
tableName: "migrations",
|
||||||
directory: "migrations",
|
directory: "migrations",
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ import now from "./now_helper.js";
|
|||||||
import ProxyHostModel from "./proxy_host.js";
|
import ProxyHostModel from "./proxy_host.js";
|
||||||
import User from "./user.js";
|
import User from "./user.js";
|
||||||
|
|
||||||
Model.knex(db());
|
Model.knex(db);
|
||||||
|
|
||||||
const boolFields = ["is_deleted", "satisfy_any", "pass_auth"];
|
const boolFields = ["is_deleted", "satisfy_any", "pass_auth"];
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import db from "../db.js";
|
|||||||
import accessListModel from "./access_list.js";
|
import accessListModel from "./access_list.js";
|
||||||
import now from "./now_helper.js";
|
import now from "./now_helper.js";
|
||||||
|
|
||||||
Model.knex(db());
|
Model.knex(db);
|
||||||
|
|
||||||
class AccessListAuth extends Model {
|
class AccessListAuth extends Model {
|
||||||
$beforeInsert() {
|
$beforeInsert() {
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import db from "../db.js";
|
|||||||
import accessListModel from "./access_list.js";
|
import accessListModel from "./access_list.js";
|
||||||
import now from "./now_helper.js";
|
import now from "./now_helper.js";
|
||||||
|
|
||||||
Model.knex(db());
|
Model.knex(db);
|
||||||
|
|
||||||
class AccessListClient extends Model {
|
class AccessListClient extends Model {
|
||||||
$beforeInsert() {
|
$beforeInsert() {
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import db from "../db.js";
|
|||||||
import now from "./now_helper.js";
|
import now from "./now_helper.js";
|
||||||
import User from "./user.js";
|
import User from "./user.js";
|
||||||
|
|
||||||
Model.knex(db());
|
Model.knex(db);
|
||||||
|
|
||||||
class AuditLog extends Model {
|
class AuditLog extends Model {
|
||||||
$beforeInsert() {
|
$beforeInsert() {
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.j
|
|||||||
import now from "./now_helper.js";
|
import now from "./now_helper.js";
|
||||||
import User from "./user.js";
|
import User from "./user.js";
|
||||||
|
|
||||||
Model.knex(db());
|
Model.knex(db);
|
||||||
|
|
||||||
const boolFields = ["is_deleted"];
|
const boolFields = ["is_deleted"];
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import redirectionHostModel from "./redirection_host.js";
|
|||||||
import streamModel from "./stream.js";
|
import streamModel from "./stream.js";
|
||||||
import userModel from "./user.js";
|
import userModel from "./user.js";
|
||||||
|
|
||||||
Model.knex(db());
|
Model.knex(db);
|
||||||
|
|
||||||
const boolFields = ["is_deleted"];
|
const boolFields = ["is_deleted"];
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import Certificate from "./certificate.js";
|
|||||||
import now from "./now_helper.js";
|
import now from "./now_helper.js";
|
||||||
import User from "./user.js";
|
import User from "./user.js";
|
||||||
|
|
||||||
Model.knex(db());
|
Model.knex(db);
|
||||||
|
|
||||||
const boolFields = ["is_deleted", "ssl_forced", "http2_support", "enabled", "hsts_enabled", "hsts_subdomains"];
|
const boolFields = ["is_deleted", "ssl_forced", "http2_support", "enabled", "hsts_enabled", "hsts_subdomains"];
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import { Model } from "objection";
|
|||||||
import db from "../db.js";
|
import db from "../db.js";
|
||||||
import { isSqlite } from "../lib/config.js";
|
import { isSqlite } from "../lib/config.js";
|
||||||
|
|
||||||
Model.knex(db());
|
Model.knex(db);
|
||||||
|
|
||||||
export default () => {
|
export default () => {
|
||||||
if (isSqlite()) {
|
if (isSqlite()) {
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import Certificate from "./certificate.js";
|
|||||||
import now from "./now_helper.js";
|
import now from "./now_helper.js";
|
||||||
import User from "./user.js";
|
import User from "./user.js";
|
||||||
|
|
||||||
Model.knex(db());
|
Model.knex(db);
|
||||||
|
|
||||||
const boolFields = [
|
const boolFields = [
|
||||||
"is_deleted",
|
"is_deleted",
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import Certificate from "./certificate.js";
|
|||||||
import now from "./now_helper.js";
|
import now from "./now_helper.js";
|
||||||
import User from "./user.js";
|
import User from "./user.js";
|
||||||
|
|
||||||
Model.knex(db());
|
Model.knex(db);
|
||||||
|
|
||||||
const boolFields = [
|
const boolFields = [
|
||||||
"is_deleted",
|
"is_deleted",
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
import { Model } from "objection";
|
import { Model } from "objection";
|
||||||
import db from "../db.js";
|
import db from "../db.js";
|
||||||
|
|
||||||
Model.knex(db());
|
Model.knex(db);
|
||||||
|
|
||||||
class Setting extends Model {
|
class Setting extends Model {
|
||||||
$beforeInsert () {
|
$beforeInsert () {
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import Certificate from "./certificate.js";
|
|||||||
import now from "./now_helper.js";
|
import now from "./now_helper.js";
|
||||||
import User from "./user.js";
|
import User from "./user.js";
|
||||||
|
|
||||||
Model.knex(db());
|
Model.knex(db);
|
||||||
|
|
||||||
const boolFields = ["is_deleted", "enabled", "tcp_forwarding", "udp_forwarding"];
|
const boolFields = ["is_deleted", "enabled", "tcp_forwarding", "udp_forwarding"];
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.j
|
|||||||
import now from "./now_helper.js";
|
import now from "./now_helper.js";
|
||||||
import UserPermission from "./user_permission.js";
|
import UserPermission from "./user_permission.js";
|
||||||
|
|
||||||
Model.knex(db());
|
Model.knex(db);
|
||||||
|
|
||||||
const boolFields = ["is_deleted", "is_disabled"];
|
const boolFields = ["is_deleted", "is_disabled"];
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import { Model } from "objection";
|
|||||||
import db from "../db.js";
|
import db from "../db.js";
|
||||||
import now from "./now_helper.js";
|
import now from "./now_helper.js";
|
||||||
|
|
||||||
Model.knex(db());
|
Model.knex(db);
|
||||||
|
|
||||||
class UserPermission extends Model {
|
class UserPermission extends Model {
|
||||||
$beforeInsert () {
|
$beforeInsert () {
|
||||||
|
|||||||
@@ -4,6 +4,7 @@
|
|||||||
# This file assumes that the frontend has been built using ./scripts/frontend-build
|
# This file assumes that the frontend has been built using ./scripts/frontend-build
|
||||||
|
|
||||||
FROM nginxproxymanager/testca AS testca
|
FROM nginxproxymanager/testca AS testca
|
||||||
|
FROM letsencrypt/pebble AS pebbleca
|
||||||
FROM nginxproxymanager/nginx-full:certbot-node
|
FROM nginxproxymanager/nginx-full:certbot-node
|
||||||
|
|
||||||
ARG TARGETPLATFORM
|
ARG TARGETPLATFORM
|
||||||
@@ -45,6 +46,7 @@ RUN yarn install \
|
|||||||
|
|
||||||
# add late to limit cache-busting by modifications
|
# add late to limit cache-busting by modifications
|
||||||
COPY docker/rootfs /
|
COPY docker/rootfs /
|
||||||
|
COPY --from=pebbleca /test/certs/pebble.minica.pem /etc/ssl/certs/pebble.minica.pem
|
||||||
COPY --from=testca /home/step/certs/root_ca.crt /etc/ssl/certs/NginxProxyManager.crt
|
COPY --from=testca /home/step/certs/root_ca.crt /etc/ssl/certs/NginxProxyManager.crt
|
||||||
|
|
||||||
# Remove frontend service not required for prod, dev nginx config as well
|
# Remove frontend service not required for prod, dev nginx config as well
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
FROM nginxproxymanager/testca AS testca
|
FROM nginxproxymanager/testca AS testca
|
||||||
|
FROM letsencrypt/pebble AS pebbleca
|
||||||
FROM nginxproxymanager/nginx-full:certbot-node
|
FROM nginxproxymanager/nginx-full:certbot-node
|
||||||
LABEL maintainer="Jamie Curnow <jc@jc21.com>"
|
LABEL maintainer="Jamie Curnow <jc@jc21.com>"
|
||||||
|
|
||||||
@@ -32,6 +33,7 @@ RUN rm -f /etc/nginx/conf.d/production.conf \
|
|||||||
&& chmod 644 -R /root/.cache
|
&& chmod 644 -R /root/.cache
|
||||||
|
|
||||||
# Certs for testing purposes
|
# Certs for testing purposes
|
||||||
|
COPY --from=pebbleca /test/certs/pebble.minica.pem /etc/ssl/certs/pebble.minica.pem
|
||||||
COPY --from=testca /home/step/certs/root_ca.crt /etc/ssl/certs/NginxProxyManager.crt
|
COPY --from=testca /home/step/certs/root_ca.crt /etc/ssl/certs/NginxProxyManager.crt
|
||||||
|
|
||||||
EXPOSE 80 81 443
|
EXPOSE 80 81 443
|
||||||
|
|||||||
12
docker/dev/pebble-config.json
Normal file
12
docker/dev/pebble-config.json
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"pebble": {
|
||||||
|
"listenAddress": "0.0.0.0:443",
|
||||||
|
"managementListenAddress": "0.0.0.0:15000",
|
||||||
|
"certificate": "test/certs/localhost/cert.pem",
|
||||||
|
"privateKey": "test/certs/localhost/key.pem",
|
||||||
|
"httpPort": 80,
|
||||||
|
"tlsPort": 443,
|
||||||
|
"ocspResponderURL": "",
|
||||||
|
"externalAccountBindingRequired": false
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,77 +0,0 @@
|
|||||||
import { DateTimeFormat } from "src/locale";
|
|
||||||
import { afterAll, beforeAll, describe, expect, it } from "vitest";
|
|
||||||
|
|
||||||
describe("DateFormatter", () => {
|
|
||||||
// Keep a reference to the real Intl to restore later
|
|
||||||
const RealIntl = global.Intl;
|
|
||||||
const desiredTimeZone = "Europe/London";
|
|
||||||
const desiredLocale = "en-GB";
|
|
||||||
|
|
||||||
beforeAll(() => {
|
|
||||||
// Ensure Node-based libs using TZ behave deterministically
|
|
||||||
try {
|
|
||||||
process.env.TZ = desiredTimeZone;
|
|
||||||
} catch {
|
|
||||||
// ignore if not available
|
|
||||||
}
|
|
||||||
|
|
||||||
// Mock Intl.DateTimeFormat so formatting is stable regardless of host
|
|
||||||
const MockedDateTimeFormat = class extends RealIntl.DateTimeFormat {
|
|
||||||
constructor(
|
|
||||||
_locales?: string | string[],
|
|
||||||
options?: Intl.DateTimeFormatOptions,
|
|
||||||
) {
|
|
||||||
super(desiredLocale, {
|
|
||||||
...options,
|
|
||||||
timeZone: desiredTimeZone,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} as unknown as typeof Intl.DateTimeFormat;
|
|
||||||
|
|
||||||
global.Intl = {
|
|
||||||
...RealIntl,
|
|
||||||
DateTimeFormat: MockedDateTimeFormat,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
afterAll(() => {
|
|
||||||
// Restore original Intl after tests
|
|
||||||
global.Intl = RealIntl;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("format date from iso date", () => {
|
|
||||||
const value = "2024-01-01T00:00:00.000Z";
|
|
||||||
const text = DateTimeFormat(value);
|
|
||||||
expect(text).toBe("Monday, 01/01/2024, 12:00:00 am");
|
|
||||||
});
|
|
||||||
|
|
||||||
it("format date from unix timestamp number", () => {
|
|
||||||
const value = 1762476112;
|
|
||||||
const text = DateTimeFormat(value);
|
|
||||||
expect(text).toBe("Friday, 07/11/2025, 12:41:52 am");
|
|
||||||
});
|
|
||||||
|
|
||||||
it("format date from unix timestamp string", () => {
|
|
||||||
const value = "1762476112";
|
|
||||||
const text = DateTimeFormat(value);
|
|
||||||
expect(text).toBe("Friday, 07/11/2025, 12:41:52 am");
|
|
||||||
});
|
|
||||||
|
|
||||||
it("catch bad format from string", () => {
|
|
||||||
const value = "this is not a good date";
|
|
||||||
const text = DateTimeFormat(value);
|
|
||||||
expect(text).toBe("this is not a good date");
|
|
||||||
});
|
|
||||||
|
|
||||||
it("catch bad format from number", () => {
|
|
||||||
const value = -100;
|
|
||||||
const text = DateTimeFormat(value);
|
|
||||||
expect(text).toBe("-100");
|
|
||||||
});
|
|
||||||
|
|
||||||
it("catch bad format from number as string", () => {
|
|
||||||
const value = "-100";
|
|
||||||
const text = DateTimeFormat(value);
|
|
||||||
expect(text).toBe("-100");
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,36 +1,15 @@
|
|||||||
import { fromUnixTime, intlFormat, parseISO } from "date-fns";
|
import { intlFormat, parseISO } from "date-fns";
|
||||||
|
|
||||||
const isUnixTimestamp = (value: unknown): boolean => {
|
const DateTimeFormat = (isoDate: string) =>
|
||||||
if (typeof value !== "number" && typeof value !== "string") return false;
|
intlFormat(parseISO(isoDate), {
|
||||||
const num = Number(value);
|
weekday: "long",
|
||||||
if (!Number.isFinite(num)) return false;
|
year: "numeric",
|
||||||
// Check plausible Unix timestamp range: from 1970 to ~year 3000
|
month: "numeric",
|
||||||
// Support both seconds and milliseconds
|
day: "numeric",
|
||||||
if (num > 0 && num < 10000000000) return true; // seconds (<= 10 digits)
|
hour: "numeric",
|
||||||
if (num >= 10000000000 && num < 32503680000000) return true; // milliseconds (<= 13 digits)
|
minute: "numeric",
|
||||||
return false;
|
second: "numeric",
|
||||||
};
|
hour12: true,
|
||||||
|
});
|
||||||
const DateTimeFormat = (value: string | number): string => {
|
|
||||||
if (typeof value !== "number" && typeof value !== "string") return `${value}`;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const d = isUnixTimestamp(value)
|
|
||||||
? fromUnixTime(+value)
|
|
||||||
: parseISO(`${value}`);
|
|
||||||
return intlFormat(d, {
|
|
||||||
weekday: "long",
|
|
||||||
year: "numeric",
|
|
||||||
month: "numeric",
|
|
||||||
day: "numeric",
|
|
||||||
hour: "numeric",
|
|
||||||
minute: "numeric",
|
|
||||||
second: "numeric",
|
|
||||||
hour12: true,
|
|
||||||
});
|
|
||||||
} catch {
|
|
||||||
return `${value}`;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export { DateTimeFormat };
|
export { DateTimeFormat };
|
||||||
|
|||||||
@@ -30,20 +30,13 @@ const getLocale = (short = false) => {
|
|||||||
if (short) {
|
if (short) {
|
||||||
return loc.slice(0, 2);
|
return loc.slice(0, 2);
|
||||||
}
|
}
|
||||||
// finally, fallback
|
|
||||||
if (!loc) {
|
|
||||||
loc = "en";
|
|
||||||
}
|
|
||||||
return loc;
|
return loc;
|
||||||
};
|
};
|
||||||
|
|
||||||
const cache = createIntlCache();
|
const cache = createIntlCache();
|
||||||
|
|
||||||
const initialMessages = loadMessages(getLocale());
|
const initialMessages = loadMessages(getLocale());
|
||||||
let intl = createIntl(
|
let intl = createIntl({ locale: getLocale(), messages: initialMessages }, cache);
|
||||||
{ locale: getLocale(), messages: initialMessages },
|
|
||||||
cache,
|
|
||||||
);
|
|
||||||
|
|
||||||
const changeLocale = (locale: string): void => {
|
const changeLocale = (locale: string): void => {
|
||||||
const messages = loadMessages(locale);
|
const messages = loadMessages(locale);
|
||||||
@@ -83,12 +76,4 @@ const T = ({
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
export {
|
export { localeOptions, getFlagCodeForLocale, getLocale, createIntl, changeLocale, intl, T };
|
||||||
localeOptions,
|
|
||||||
getFlagCodeForLocale,
|
|
||||||
getLocale,
|
|
||||||
createIntl,
|
|
||||||
changeLocale,
|
|
||||||
intl,
|
|
||||||
T,
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ if hash docker 2>/dev/null; then
|
|||||||
-e NODE_OPTIONS=--openssl-legacy-provider \
|
-e NODE_OPTIONS=--openssl-legacy-provider \
|
||||||
-v "$(pwd)/frontend:/app/frontend" \
|
-v "$(pwd)/frontend:/app/frontend" \
|
||||||
-w /app/frontend "${DOCKER_IMAGE}" \
|
-w /app/frontend "${DOCKER_IMAGE}" \
|
||||||
sh -c "yarn install && yarn lint && yarn vitest run && yarn build && chown -R $(id -u):$(id -g) /app/frontend"
|
sh -c "yarn install && yarn lint && yarn build && chown -R $(id -u):$(id -g) /app/frontend"
|
||||||
|
|
||||||
echo -e "${BLUE}❯ ${GREEN}Building Frontend Complete${RESET}"
|
echo -e "${BLUE}❯ ${GREEN}Building Frontend Complete${RESET}"
|
||||||
else
|
else
|
||||||
|
|||||||
Reference in New Issue
Block a user