Compare commits

..

5 Commits

Author SHA1 Message Date
Jamie Curnow
7214bcdacb Attempt to fix race condition with database instantiation 2025-11-06 12:56:07 +10:00
Jamie Curnow
9dc7dc28d4 Update sqlite3 package again 2025-11-06 12:55:48 +10:00
Jamie Curnow
36e040157f Remove references to pebble ca 2025-11-06 09:42:32 +10:00
Jamie Curnow
be2599872e Use new pebble image location 2025-11-06 08:14:27 +10:00
Jamie Curnow
d7ff736abf Revert sqlite package to same as 2.12.* 2025-11-06 07:57:14 +10:00
6 changed files with 303 additions and 131 deletions

285
Jenkinsfile vendored Normal file
View File

@@ -0,0 +1,285 @@
import groovy.transform.Field
@Field
def shOutput = ""
def buildxPushTags = ""
pipeline {
agent {
label 'docker-multiarch'
}
options {
buildDiscarder(logRotator(numToKeepStr: '5'))
disableConcurrentBuilds()
ansiColor('xterm')
}
environment {
IMAGE = 'nginx-proxy-manager'
BUILD_VERSION = getVersion()
MAJOR_VERSION = '2'
BRANCH_LOWER = "${BRANCH_NAME.toLowerCase().replaceAll('\\\\', '-').replaceAll('/', '-').replaceAll('\\.', '-')}"
BUILDX_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}"
COMPOSE_INTERACTIVE_NO_CLI = 1
}
stages {
stage('Environment') {
parallel {
stage('Master') {
when {
branch 'master'
}
steps {
script {
buildxPushTags = "-t docker.io/jc21/${IMAGE}:${BUILD_VERSION} -t docker.io/jc21/${IMAGE}:${MAJOR_VERSION} -t docker.io/jc21/${IMAGE}:latest"
}
}
}
stage('Other') {
when {
not {
branch 'master'
}
}
steps {
script {
// Defaults to the Branch name, which is applies to all branches AND pr's
buildxPushTags = "-t docker.io/nginxproxymanager/${IMAGE}-dev:${BRANCH_LOWER}"
}
}
}
stage('Versions') {
steps {
sh 'cat frontend/package.json | jq --arg BUILD_VERSION "${BUILD_VERSION}" \'.version = $BUILD_VERSION\' | sponge frontend/package.json'
sh 'echo -e "\\E[1;36mFrontend Version is:\\E[1;33m $(cat frontend/package.json | jq -r .version)\\E[0m"'
sh 'cat backend/package.json | jq --arg BUILD_VERSION "${BUILD_VERSION}" \'.version = $BUILD_VERSION\' | sponge backend/package.json'
sh 'echo -e "\\E[1;36mBackend Version is:\\E[1;33m $(cat backend/package.json | jq -r .version)\\E[0m"'
sh 'sed -i -E "s/(version-)[0-9]+\\.[0-9]+\\.[0-9]+(-green)/\\1${BUILD_VERSION}\\2/" README.md'
}
}
stage('Docker Login') {
steps {
withCredentials([usernamePassword(credentialsId: 'jc21-dockerhub', passwordVariable: 'dpass', usernameVariable: 'duser')]) {
sh 'docker login -u "${duser}" -p "${dpass}"'
}
}
}
}
}
stage('Builds') {
parallel {
stage('Project') {
steps {
script {
// Frontend and Backend
def shStatusCode = sh(label: 'Checking and Building', returnStatus: true, script: '''
set -e
./scripts/ci/frontend-build > ${WORKSPACE}/tmp-sh-build 2>&1
./scripts/ci/test-and-build > ${WORKSPACE}/tmp-sh-build 2>&1
''')
shOutput = readFile "${env.WORKSPACE}/tmp-sh-build"
if (shStatusCode != 0) {
error "${shOutput}"
}
}
}
post {
always {
sh 'rm -f ${WORKSPACE}/tmp-sh-build'
}
failure {
npmGithubPrComment("CI Error:\n\n```\n${shOutput}\n```", true)
}
}
}
stage('Docs') {
steps {
dir(path: 'docs') {
sh 'yarn install'
sh 'yarn build'
}
}
}
}
}
stage('Test Sqlite') {
environment {
COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_sqlite"
COMPOSE_FILE = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.sqlite.yml'
}
when {
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
steps {
sh 'rm -rf ./test/results/junit/*'
sh './scripts/ci/fulltest-cypress'
}
post {
always {
// Dumps to analyze later
sh 'mkdir -p debug/sqlite'
sh 'docker logs $(docker compose ps --all -q fullstack) > debug/sqlite/docker_fullstack.log 2>&1'
sh 'docker logs $(docker compose ps --all -q stepca) > debug/sqlite/docker_stepca.log 2>&1'
sh 'docker logs $(docker compose ps --all -q pdns) > debug/sqlite/docker_pdns.log 2>&1'
sh 'docker logs $(docker compose ps --all -q pdns-db) > debug/sqlite/docker_pdns-db.log 2>&1'
sh 'docker logs $(docker compose ps --all -q dnsrouter) > debug/sqlite/docker_dnsrouter.log 2>&1'
junit 'test/results/junit/*'
sh 'docker compose down --remove-orphans --volumes -t 30 || true'
}
unstable {
dir(path: 'test/results') {
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
}
}
}
}
stage('Test Mysql') {
environment {
COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_mysql"
COMPOSE_FILE = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.mysql.yml'
}
when {
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
steps {
sh 'rm -rf ./test/results/junit/*'
sh './scripts/ci/fulltest-cypress'
}
post {
always {
// Dumps to analyze later
sh 'mkdir -p debug/mysql'
sh 'docker logs $(docker compose ps --all -q fullstack) > debug/mysql/docker_fullstack.log 2>&1'
sh 'docker logs $(docker compose ps --all -q stepca) > debug/mysql/docker_stepca.log 2>&1'
sh 'docker logs $(docker compose ps --all -q pdns) > debug/mysql/docker_pdns.log 2>&1'
sh 'docker logs $(docker compose ps --all -q pdns-db) > debug/mysql/docker_pdns-db.log 2>&1'
sh 'docker logs $(docker compose ps --all -q dnsrouter) > debug/mysql/docker_dnsrouter.log 2>&1'
junit 'test/results/junit/*'
sh 'docker compose down --remove-orphans --volumes -t 30 || true'
}
unstable {
dir(path: 'test/results') {
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
}
}
}
}
stage('Test Postgres') {
environment {
COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_postgres"
COMPOSE_FILE = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.postgres.yml'
}
when {
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
steps {
sh 'rm -rf ./test/results/junit/*'
sh './scripts/ci/fulltest-cypress'
}
post {
always {
// Dumps to analyze later
sh 'mkdir -p debug/postgres'
sh 'docker logs $(docker compose ps --all -q fullstack) > debug/postgres/docker_fullstack.log 2>&1'
sh 'docker logs $(docker compose ps --all -q stepca) > debug/postgres/docker_stepca.log 2>&1'
sh 'docker logs $(docker compose ps --all -q pdns) > debug/postgres/docker_pdns.log 2>&1'
sh 'docker logs $(docker compose ps --all -q pdns-db) > debug/postgres/docker_pdns-db.log 2>&1'
sh 'docker logs $(docker compose ps --all -q dnsrouter) > debug/postgres/docker_dnsrouter.log 2>&1'
sh 'docker logs $(docker compose ps --all -q db-postgres) > debug/postgres/docker_db-postgres.log 2>&1'
sh 'docker logs $(docker compose ps --all -q authentik) > debug/postgres/docker_authentik.log 2>&1'
sh 'docker logs $(docker compose ps --all -q authentik-redis) > debug/postgres/docker_authentik-redis.log 2>&1'
sh 'docker logs $(docke rcompose ps --all -q authentik-ldap) > debug/postgres/docker_authentik-ldap.log 2>&1'
junit 'test/results/junit/*'
sh 'docker compose down --remove-orphans --volumes -t 30 || true'
}
unstable {
dir(path: 'test/results') {
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
}
}
}
}
stage('MultiArch Build') {
when {
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
steps {
sh "./scripts/buildx --push ${buildxPushTags}"
}
}
stage('Docs / Comment') {
parallel {
stage('Docs Job') {
when {
allOf {
branch pattern: "^(develop|master)\$", comparator: "REGEXP"
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
}
steps {
build wait: false, job: 'nginx-proxy-manager-docs', parameters: [string(name: 'docs_branch', value: "$BRANCH_NAME")]
}
}
stage('PR Comment') {
when {
allOf {
changeRequest()
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
}
steps {
script {
npmGithubPrComment("""Docker Image for build ${BUILD_NUMBER} is available on [DockerHub](https://cloud.docker.com/repository/docker/nginxproxymanager/${IMAGE}-dev):
```
nginxproxymanager/${IMAGE}-dev:${BRANCH_LOWER}
```
> [!NOTE]
> Ensure you backup your NPM instance before testing this image! Especially if there are database changes.
> This is a different docker image namespace than the official image.
> [!WARNING]
> Changes and additions to DNS Providers require verification by at least 2 members of the community!
""", true)
}
}
}
}
}
}
post {
always {
sh 'echo Reverting ownership'
sh 'docker run --rm -v "$(pwd):/data" jc21/ci-tools chown -R "$(id -u):$(id -g)" /data'
printResult(true)
}
failure {
archiveArtifacts(artifacts: 'debug/**/*.*', allowEmptyArchive: true)
}
unstable {
archiveArtifacts(artifacts: 'debug/**/*.*', allowEmptyArchive: true)
}
}
}
def getVersion() {
ver = sh(script: 'cat .version', returnStdout: true)
return ver.trim()
}
def getCommit() {
ver = sh(script: 'git log -n 1 --format=%h', returnStdout: true)
return ver.trim()
}

View File

@@ -1847,9 +1847,9 @@ netmask@^2.0.2:
integrity sha512-dBpDMdxv9Irdq66304OLfEmQ9tbNRFnFTuZiLo+bD+r332bBmMJ8GBLXklIXXgxd3+v9+KUnZaUR5PJMa75Gsg==
node-abi@^3.3.0:
version "3.78.0"
resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-3.78.0.tgz#fd0ecbd0aa89857b98da06bd3909194abb0821ba"
integrity sha512-E2wEyrgX/CqvicaQYU3Ze1PFGjc4QYPGsjUrlYkqAE0WjHEZwgOsGMPMzkMse4LjJbDmaEuDX3CM036j5K2DSQ==
version "3.80.0"
resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-3.80.0.tgz#d7390951f27caa129cceeec01e1c20fc9f07581c"
integrity sha512-LyPuZJcI9HVwzXK1GPxWNzrr+vr8Hp/3UqlmWxxh8p54U1ZbclOqbSog9lWHaCX+dBaiGi6n/hIX+mKu74GmPA==
dependencies:
semver "^7.3.5"

View File

@@ -1,77 +0,0 @@
import { DateTimeFormat } from "src/locale";
import { afterAll, beforeAll, describe, expect, it } from "vitest";
describe("DateFormatter", () => {
// Keep a reference to the real Intl to restore later
const RealIntl = global.Intl;
const desiredTimeZone = "Europe/London";
const desiredLocale = "en-GB";
beforeAll(() => {
// Ensure Node-based libs using TZ behave deterministically
try {
process.env.TZ = desiredTimeZone;
} catch {
// ignore if not available
}
// Mock Intl.DateTimeFormat so formatting is stable regardless of host
const MockedDateTimeFormat = class extends RealIntl.DateTimeFormat {
constructor(
_locales?: string | string[],
options?: Intl.DateTimeFormatOptions,
) {
super(desiredLocale, {
...options,
timeZone: desiredTimeZone,
});
}
} as unknown as typeof Intl.DateTimeFormat;
global.Intl = {
...RealIntl,
DateTimeFormat: MockedDateTimeFormat,
};
});
afterAll(() => {
// Restore original Intl after tests
global.Intl = RealIntl;
});
it("format date from iso date", () => {
const value = "2024-01-01T00:00:00.000Z";
const text = DateTimeFormat(value);
expect(text).toBe("Monday, 01/01/2024, 12:00:00 am");
});
it("format date from unix timestamp number", () => {
const value = 1762476112;
const text = DateTimeFormat(value);
expect(text).toBe("Friday, 07/11/2025, 12:41:52 am");
});
it("format date from unix timestamp string", () => {
const value = "1762476112";
const text = DateTimeFormat(value);
expect(text).toBe("Friday, 07/11/2025, 12:41:52 am");
});
it("catch bad format from string", () => {
const value = "this is not a good date";
const text = DateTimeFormat(value);
expect(text).toBe("this is not a good date");
});
it("catch bad format from number", () => {
const value = -100;
const text = DateTimeFormat(value);
expect(text).toBe("-100");
});
it("catch bad format from number as string", () => {
const value = "-100";
const text = DateTimeFormat(value);
expect(text).toBe("-100");
});
});

View File

@@ -1,36 +1,15 @@
import { fromUnixTime, intlFormat, parseISO } from "date-fns";
import { intlFormat, parseISO } from "date-fns";
const isUnixTimestamp = (value: unknown): boolean => {
if (typeof value !== "number" && typeof value !== "string") return false;
const num = Number(value);
if (!Number.isFinite(num)) return false;
// Check plausible Unix timestamp range: from 1970 to ~year 3000
// Support both seconds and milliseconds
if (num > 0 && num < 10000000000) return true; // seconds (<= 10 digits)
if (num >= 10000000000 && num < 32503680000000) return true; // milliseconds (<= 13 digits)
return false;
};
const DateTimeFormat = (value: string | number): string => {
if (typeof value !== "number" && typeof value !== "string") return `${value}`;
try {
const d = isUnixTimestamp(value)
? fromUnixTime(+value)
: parseISO(`${value}`);
return intlFormat(d, {
weekday: "long",
year: "numeric",
month: "numeric",
day: "numeric",
hour: "numeric",
minute: "numeric",
second: "numeric",
hour12: true,
});
} catch {
return `${value}`;
}
};
const DateTimeFormat = (isoDate: string) =>
intlFormat(parseISO(isoDate), {
weekday: "long",
year: "numeric",
month: "numeric",
day: "numeric",
hour: "numeric",
minute: "numeric",
second: "numeric",
hour12: true,
});
export { DateTimeFormat };

View File

@@ -30,20 +30,13 @@ const getLocale = (short = false) => {
if (short) {
return loc.slice(0, 2);
}
// finally, fallback
if (!loc) {
loc = "en";
}
return loc;
};
const cache = createIntlCache();
const initialMessages = loadMessages(getLocale());
let intl = createIntl(
{ locale: getLocale(), messages: initialMessages },
cache,
);
let intl = createIntl({ locale: getLocale(), messages: initialMessages }, cache);
const changeLocale = (locale: string): void => {
const messages = loadMessages(locale);
@@ -83,12 +76,4 @@ const T = ({
);
};
export {
localeOptions,
getFlagCodeForLocale,
getLocale,
createIntl,
changeLocale,
intl,
T,
};
export { localeOptions, getFlagCodeForLocale, getLocale, createIntl, changeLocale, intl, T };

View File

@@ -16,7 +16,7 @@ if hash docker 2>/dev/null; then
-e NODE_OPTIONS=--openssl-legacy-provider \
-v "$(pwd)/frontend:/app/frontend" \
-w /app/frontend "${DOCKER_IMAGE}" \
sh -c "yarn install && yarn lint && yarn vitest run && yarn build && chown -R $(id -u):$(id -g) /app/frontend"
sh -c "yarn install && yarn lint && yarn build && chown -R $(id -u):$(id -g) /app/frontend"
echo -e "${BLUE} ${GREEN}Building Frontend Complete${RESET}"
else