From b9b166407ea811607164ce81517aabc8ab554730 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 09:44:01 -0400 Subject: [PATCH 01/59] =?UTF-8?q?=E2=9C=A8=20feat(registry):=20add=20mTLS?= =?UTF-8?q?=20client=20certificate=20support?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add CLIENTCERT and CLIENTKEY configuration options for self-hosted registries that require mutual TLS authentication. Both options must be provided together (enforced by Joi .and() constraint). Supported registries: artifactory, custom, forgejo, gitea, harbor, nexus. Based on #135 by @Waler with fixes: - Fixed config key check (certfile → clientcert) - Fixed typo (mutal → mutual) - Added Custom registry schema support (was missing) - Added full test coverage for mTLS agent and schema validation Co-Authored-By: Waler --- app/registries/BaseRegistry.ts | 18 +++- .../providers/custom/Custom.test.ts | 32 ++++++ app/registries/providers/custom/Custom.ts | 5 +- .../providers/shared/SelfHostedBasic.test.ts | 102 ++++++++++++++++++ .../providers/shared/SelfHostedBasic.ts | 5 +- .../registries/artifactory/index.mdx | 2 + .../configuration/registries/custom/index.mdx | 2 + .../registries/forgejo/index.mdx | 2 + .../configuration/registries/gitea/index.mdx | 2 + .../configuration/registries/harbor/index.mdx | 2 + .../configuration/registries/nexus/index.mdx | 2 + 11 files changed, 171 insertions(+), 3 deletions(-) diff --git a/app/registries/BaseRegistry.ts b/app/registries/BaseRegistry.ts index fd00601b..7d2abf77 100644 --- a/app/registries/BaseRegistry.ts +++ b/app/registries/BaseRegistry.ts @@ -76,7 +76,8 @@ class BaseRegistry extends Registry { private getHttpsAgent() { const shouldDisableTlsVerification = this.configuration?.insecure === true; const hasCaFile = Boolean(this.configuration?.cafile); - if (!shouldDisableTlsVerification && !hasCaFile) { + const hasMutualTls = Boolean(this.configuration?.clientcert); + if (!shouldDisableTlsVerification && !hasCaFile && !hasMutualTls) { return undefined; } @@ -92,10 +93,25 @@ class BaseRegistry extends Registry { ca = fs.readFileSync(caPath); } + let cert; + let key; + if (hasMutualTls) { + const certPath = resolveConfiguredPath(this.configuration.clientcert, { + label: `registry ${this.getId()} client certificate file path`, + }); + cert = fs.readFileSync(certPath); + const keyPath = resolveConfiguredPath(this.configuration.clientkey, { + label: `registry ${this.getId()} client key file path`, + }); + key = fs.readFileSync(keyPath); + } + // Intentional opt-in for self-hosted registries with private/self-signed cert chains. // lgtm[js/disabling-certificate-validation] this.httpsAgent = new https.Agent({ ca, + cert, + key, rejectUnauthorized: !shouldDisableTlsVerification, }); return this.httpsAgent; diff --git a/app/registries/providers/custom/Custom.test.ts b/app/registries/providers/custom/Custom.test.ts index a31deedc..ab84d041 100644 --- a/app/registries/providers/custom/Custom.test.ts +++ b/app/registries/providers/custom/Custom.test.ts @@ -43,6 +43,38 @@ test('validatedConfiguration should accept cafile and insecure tls options', asy }); }); +test('validatedConfiguration should accept mTLS client certificate options', async () => { + expect( + custom.validateConfiguration({ + url: 'http://localhost:5000', + clientcert: '/certs/client.pem', + clientkey: '/certs/client-key.pem', + }), + ).toStrictEqual({ + url: 'http://localhost:5000', + clientcert: '/certs/client.pem', + clientkey: '/certs/client-key.pem', + }); +}); + +test('validatedConfiguration should reject clientcert without clientkey', async () => { + expect(() => + custom.validateConfiguration({ + url: 'http://localhost:5000', + clientcert: '/certs/client.pem', + }), + ).toThrow(); +}); + +test('validatedConfiguration should reject clientkey without clientcert', async () => { + expect(() => + custom.validateConfiguration({ + url: 'http://localhost:5000', + clientkey: '/certs/client-key.pem', + }), + ).toThrow(); +}); + test('validatedConfiguration should throw error when auth is not base64', async () => { expect(() => { custom.validateConfiguration({ diff --git a/app/registries/providers/custom/Custom.ts b/app/registries/providers/custom/Custom.ts index 68f1cdda..a709b3e7 100644 --- a/app/registries/providers/custom/Custom.ts +++ b/app/registries/providers/custom/Custom.ts @@ -18,10 +18,13 @@ class Custom extends BaseRegistry { auth: authSchema, cafile: this.joi.string(), insecure: this.joi.boolean(), + clientcert: this.joi.string(), + clientkey: this.joi.string(), }) .and('login', 'password') .without('login', 'auth') - .without('password', 'auth'); + .without('password', 'auth') + .and('clientcert', 'clientkey'); return this.joi.alternatives([this.joi.string().allow(''), customConfigSchema]); } diff --git a/app/registries/providers/shared/SelfHostedBasic.test.ts b/app/registries/providers/shared/SelfHostedBasic.test.ts index 7fb28758..64b390d2 100644 --- a/app/registries/providers/shared/SelfHostedBasic.test.ts +++ b/app/registries/providers/shared/SelfHostedBasic.test.ts @@ -185,6 +185,41 @@ test('validateConfiguration should allow cafile and insecure options', async () }); }); +test('validateConfiguration should allow mTLS client certificate options', async () => { + const registry = new SelfHostedBasic(); + expect( + registry.validateConfiguration({ + url: 'https://registry.acme.com', + clientcert: '/certs/client.pem', + clientkey: '/certs/client-key.pem', + }), + ).toStrictEqual({ + url: 'https://registry.acme.com', + clientcert: '/certs/client.pem', + clientkey: '/certs/client-key.pem', + }); +}); + +test('validateConfiguration should reject clientcert without clientkey', async () => { + const registry = new SelfHostedBasic(); + expect(() => + registry.validateConfiguration({ + url: 'https://registry.acme.com', + clientcert: '/certs/client.pem', + }), + ).toThrow(); +}); + +test('validateConfiguration should reject clientkey without clientcert', async () => { + const registry = new SelfHostedBasic(); + expect(() => + registry.validateConfiguration({ + url: 'https://registry.acme.com', + clientkey: '/certs/client-key.pem', + }), + ).toThrow(); +}); + test('authenticate should set httpsAgent with rejectUnauthorized=false when insecure=true', async () => { const registry = new SelfHostedBasic(); registry.configuration = { @@ -231,3 +266,70 @@ test('authenticate should load CA file into httpsAgent when cafile is configured fs.rmSync(tempDir, { recursive: true, force: true }); } }); + +test('authenticate should load client cert and key into httpsAgent for mTLS', async () => { + const registry = new SelfHostedBasic(); + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'drydock-selfhosted-')); + const certPath = path.join(tempDir, 'client.pem'); + const keyPath = path.join(tempDir, 'client-key.pem'); + + try { + fs.writeFileSync(certPath, 'test-client-cert'); + fs.writeFileSync(keyPath, 'test-client-key'); + registry.configuration = { + url: 'https://registry.acme.com', + clientcert: certPath, + clientkey: keyPath, + }; + + const result = await registry.authenticate( + { + name: 'library/nginx', + registry: { url: 'registry.acme.com' }, + }, + { headers: {} }, + ); + + expect(result.httpsAgent).toBeDefined(); + expect(result.httpsAgent.options.rejectUnauthorized).toBe(true); + expect(result.httpsAgent.options.cert.toString('utf-8')).toBe('test-client-cert'); + expect(result.httpsAgent.options.key.toString('utf-8')).toBe('test-client-key'); + } finally { + fs.rmSync(tempDir, { recursive: true, force: true }); + } +}); + +test('authenticate should combine CA file and mTLS client cert in httpsAgent', async () => { + const registry = new SelfHostedBasic(); + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'drydock-selfhosted-')); + const caPath = path.join(tempDir, 'ca.pem'); + const certPath = path.join(tempDir, 'client.pem'); + const keyPath = path.join(tempDir, 'client-key.pem'); + + try { + fs.writeFileSync(caPath, 'test-ca-content'); + fs.writeFileSync(certPath, 'test-client-cert'); + fs.writeFileSync(keyPath, 'test-client-key'); + registry.configuration = { + url: 'https://registry.acme.com', + cafile: caPath, + clientcert: certPath, + clientkey: keyPath, + }; + + const result = await registry.authenticate( + { + name: 'library/nginx', + registry: { url: 'registry.acme.com' }, + }, + { headers: {} }, + ); + + expect(result.httpsAgent).toBeDefined(); + expect(result.httpsAgent.options.ca.toString('utf-8')).toBe('test-ca-content'); + expect(result.httpsAgent.options.cert.toString('utf-8')).toBe('test-client-cert'); + expect(result.httpsAgent.options.key.toString('utf-8')).toBe('test-client-key'); + } finally { + fs.rmSync(tempDir, { recursive: true, force: true }); + } +}); diff --git a/app/registries/providers/shared/SelfHostedBasic.ts b/app/registries/providers/shared/SelfHostedBasic.ts index 71e4e03b..e110794c 100644 --- a/app/registries/providers/shared/SelfHostedBasic.ts +++ b/app/registries/providers/shared/SelfHostedBasic.ts @@ -18,10 +18,13 @@ class SelfHostedBasic extends BaseRegistry { auth: authSchema, cafile: this.joi.string(), insecure: this.joi.boolean(), + clientcert: this.joi.string(), + clientkey: this.joi.string(), }) .and('login', 'password') .without('login', 'auth') - .without('password', 'auth'); + .without('password', 'auth') + .and('clientcert', 'clientkey'); } maskConfiguration() { diff --git a/content/docs/current/configuration/registries/artifactory/index.mdx b/content/docs/current/configuration/registries/artifactory/index.mdx index b938e83b..3c41a628 100644 --- a/content/docs/current/configuration/registries/artifactory/index.mdx +++ b/content/docs/current/configuration/registries/artifactory/index.mdx @@ -19,6 +19,8 @@ The `artifactory` registry lets you configure a [JFrog Artifactory](https://jfro | `DD_REGISTRY_ARTIFACTORY_{REGISTRY_NAME}_AUTH` | ⚪ | Base64 encoded `login:password` string | DD_REGISTRY_ARTIFACTORY_\{REGISTRY_NAME\}_LOGIN/PASSWORD must not be defined | | | `DD_REGISTRY_ARTIFACTORY_{REGISTRY_NAME}_CAFILE` | ⚪ | Path to custom CA certificate file | | | | `DD_REGISTRY_ARTIFACTORY_{REGISTRY_NAME}_INSECURE` | ⚪ | Allow insecure (non-TLS) connections | `true`, `false` | `false` | +| `DD_REGISTRY_ARTIFACTORY_{REGISTRY_NAME}_CLIENTCERT` | ⚪ | Path to client certificate file for mTLS | | | +| `DD_REGISTRY_ARTIFACTORY_{REGISTRY_NAME}_CLIENTKEY` | ⚪ | Path to client key file for mTLS | DD_REGISTRY_ARTIFACTORY_\{REGISTRY_NAME\}_CLIENTCERT must be defined | | ## Examples diff --git a/content/docs/current/configuration/registries/custom/index.mdx b/content/docs/current/configuration/registries/custom/index.mdx index 9782b337..32f385e5 100644 --- a/content/docs/current/configuration/registries/custom/index.mdx +++ b/content/docs/current/configuration/registries/custom/index.mdx @@ -19,6 +19,8 @@ The `custom` registry lets you configure a self-hosted [Docker Registry](https:/ | `DD_REGISTRY_CUSTOM_{REGISTRY_NAME}_AUTH` | ⚪ | Base64-encoded `login:password` string | DD_REGISTRY_CUSTOM_\{REGISTRY_NAME\}_LOGIN/PASSWORD must not be defined | | | `DD_REGISTRY_CUSTOM_{REGISTRY_NAME}_CAFILE` | ⚪ | Path to custom CA certificate file | | | | `DD_REGISTRY_CUSTOM_{REGISTRY_NAME}_INSECURE` | ⚪ | Allow insecure (non-TLS) connections | `true`, `false` | `false` | +| `DD_REGISTRY_CUSTOM_{REGISTRY_NAME}_CLIENTCERT` | ⚪ | Path to client certificate file for mTLS | | | +| `DD_REGISTRY_CUSTOM_{REGISTRY_NAME}_CLIENTKEY` | ⚪ | Path to client key file for mTLS | DD_REGISTRY_CUSTOM_\{REGISTRY_NAME\}_CLIENTCERT must be defined | | ## Examples diff --git a/content/docs/current/configuration/registries/forgejo/index.mdx b/content/docs/current/configuration/registries/forgejo/index.mdx index 5ea62462..8323e77f 100644 --- a/content/docs/current/configuration/registries/forgejo/index.mdx +++ b/content/docs/current/configuration/registries/forgejo/index.mdx @@ -19,6 +19,8 @@ The `forgejo` registry lets you configure a self-hosted [Forgejo](https://forgej | `DD_REGISTRY_FORGEJO_{REGISTRY_NAME}_AUTH` | ⚪ | Base64-encoded `login:password` string | DD_REGISTRY_FORGEJO_\{REGISTRY_NAME\}_LOGIN/PASSWORD must not be defined | | | `DD_REGISTRY_FORGEJO_{REGISTRY_NAME}_CAFILE` | ⚪ | Path to custom CA certificate file | | | | `DD_REGISTRY_FORGEJO_{REGISTRY_NAME}_INSECURE` | ⚪ | Allow insecure (non-TLS) connections | `true`, `false` | `false` | +| `DD_REGISTRY_FORGEJO_{REGISTRY_NAME}_CLIENTCERT` | ⚪ | Path to client certificate file for mTLS | | | +| `DD_REGISTRY_FORGEJO_{REGISTRY_NAME}_CLIENTKEY` | ⚪ | Path to client key file for mTLS | DD_REGISTRY_FORGEJO_\{REGISTRY_NAME\}_CLIENTCERT must be defined | | ## Examples diff --git a/content/docs/current/configuration/registries/gitea/index.mdx b/content/docs/current/configuration/registries/gitea/index.mdx index 6c8500b8..762292d1 100644 --- a/content/docs/current/configuration/registries/gitea/index.mdx +++ b/content/docs/current/configuration/registries/gitea/index.mdx @@ -19,6 +19,8 @@ The `gitea` registry lets you configure a self-hosted [Gitea](https://gitea.com) | `DD_REGISTRY_GITEA_{REGISTRY_NAME}_AUTH` | ⚪ | Base64-encoded `login:password` string | DD_REGISTRY_GITEA_\{REGISTRY_NAME\}_LOGIN/PASSWORD must not be defined | | | `DD_REGISTRY_GITEA_{REGISTRY_NAME}_CAFILE` | ⚪ | Path to custom CA certificate file | | | | `DD_REGISTRY_GITEA_{REGISTRY_NAME}_INSECURE` | ⚪ | Allow insecure (non-TLS) connections | `true`, `false` | `false` | +| `DD_REGISTRY_GITEA_{REGISTRY_NAME}_CLIENTCERT` | ⚪ | Path to client certificate file for mTLS | | | +| `DD_REGISTRY_GITEA_{REGISTRY_NAME}_CLIENTKEY` | ⚪ | Path to client key file for mTLS | DD_REGISTRY_GITEA_\{REGISTRY_NAME\}_CLIENTCERT must be defined | | ## Examples diff --git a/content/docs/current/configuration/registries/harbor/index.mdx b/content/docs/current/configuration/registries/harbor/index.mdx index d674c49e..5b0cec3d 100644 --- a/content/docs/current/configuration/registries/harbor/index.mdx +++ b/content/docs/current/configuration/registries/harbor/index.mdx @@ -19,6 +19,8 @@ The `harbor` registry lets you configure a self-hosted [Harbor](https://goharbor | `DD_REGISTRY_HARBOR_{REGISTRY_NAME}_AUTH` | ⚪ | Base64 encoded `login:password` string | DD_REGISTRY_HARBOR_\{REGISTRY_NAME\}_LOGIN/PASSWORD must not be defined | | | `DD_REGISTRY_HARBOR_{REGISTRY_NAME}_CAFILE` | ⚪ | Path to custom CA certificate file | | | | `DD_REGISTRY_HARBOR_{REGISTRY_NAME}_INSECURE` | ⚪ | Allow insecure (non-TLS) connections | `true`, `false` | `false` | +| `DD_REGISTRY_HARBOR_{REGISTRY_NAME}_CLIENTCERT` | ⚪ | Path to client certificate file for mTLS | | | +| `DD_REGISTRY_HARBOR_{REGISTRY_NAME}_CLIENTKEY` | ⚪ | Path to client key file for mTLS | DD_REGISTRY_HARBOR_\{REGISTRY_NAME\}_CLIENTCERT must be defined | | ## Examples diff --git a/content/docs/current/configuration/registries/nexus/index.mdx b/content/docs/current/configuration/registries/nexus/index.mdx index 8d30d443..2a61489f 100644 --- a/content/docs/current/configuration/registries/nexus/index.mdx +++ b/content/docs/current/configuration/registries/nexus/index.mdx @@ -19,6 +19,8 @@ The `nexus` registry lets you configure a [Sonatype Nexus](https://www.sonatype. | `DD_REGISTRY_NEXUS_{REGISTRY_NAME}_AUTH` | ⚪ | Base64 encoded `login:password` string | DD_REGISTRY_NEXUS_\{REGISTRY_NAME\}_LOGIN/PASSWORD must not be defined | | | `DD_REGISTRY_NEXUS_{REGISTRY_NAME}_CAFILE` | ⚪ | Path to custom CA certificate file | | | | `DD_REGISTRY_NEXUS_{REGISTRY_NAME}_INSECURE` | ⚪ | Allow insecure (non-TLS) connections | `true`, `false` | `false` | +| `DD_REGISTRY_NEXUS_{REGISTRY_NAME}_CLIENTCERT` | ⚪ | Path to client certificate file for mTLS | | | +| `DD_REGISTRY_NEXUS_{REGISTRY_NAME}_CLIENTKEY` | ⚪ | Path to client key file for mTLS | DD_REGISTRY_NEXUS_\{REGISTRY_NAME\}_CLIENTCERT must be defined | | ## Examples From d62ec7926a2c12ed0dd1169d014ac7932ab604c6 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 10:18:08 -0400 Subject: [PATCH 02/59] =?UTF-8?q?=F0=9F=93=9D=20docs(readme):=20replace=20?= =?UTF-8?q?screenshots=20with=20live=20demo=20link?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Keep dashboard light/dark screenshots as a preview, remove the other 5 screenshot sections (containers, detail, security, login, mobile) and replace with a prominent link to demo.drydock.codeswhat.com. --- README.md | 82 ++++++------------------------------------------------- 1 file changed, 8 insertions(+), 74 deletions(-) diff --git a/README.md b/README.md index be47edfa..8be5788d 100644 --- a/README.md +++ b/README.md @@ -52,7 +52,7 @@ - [📖 Documentation](https://drydock.codeswhat.com/docs) - [🚀 Quick Start](#quick-start) -- [📸 Screenshots](#screenshots) +- [📸 Screenshots & Live Demo](#screenshots) - [✨ Features](#features) - [🔌 Supported Integrations](#supported-integrations) - [⚖️ Feature Comparison](#feature-comparison) @@ -92,10 +92,8 @@ See the [Quick Start guide](https://drydock.codeswhat.com/docs/quickstart) for D
-

📸 Screenshots

+

📸 Screenshots & Live Demo

-
-Dashboard @@ -106,81 +104,16 @@ See the [Quick Start guide](https://drydock.codeswhat.com/docs/quickstart) for D
LightDashboard Dark
-
-
-Containers - - - - - - - - - -
LightDark
Containers LightContainers Dark
-
+
-
-Container Detail - - - - - - - - - -
LightDark
Container Detail LightContainer Detail Dark
-
+**Why look at screenshots when you can experience it yourself?** -
-Security - - - - - - - - - -
LightDark
Security LightSecurity Dark
-
+Try the Live Demo -
-Login - - - - - - - - - -
LightDark
Login LightLogin Dark
-
+Fully interactive — real UI, mock data, no install required. Runs entirely in-browser. -
-Mobile Responsive - - - - - - - - - - - - - -
Dashboard LightDashboard DarkContainers LightContainers Dark
Mobile Dashboard LightMobile Dashboard DarkMobile Containers LightMobile Containers Dark
-
+

@@ -388,6 +321,7 @@ Drop-in replacement — swap the image, restart, done. All `WUD_*` env vars and | Resource | Link | | --- | --- | | Website | [drydock.codeswhat.com](https://drydock.codeswhat.com/) | +| Live Demo | [demo.drydock.codeswhat.com](https://demo.drydock.codeswhat.com) | | Docs | [drydock.codeswhat.com/docs](https://drydock.codeswhat.com/docs) | | Configuration | [Configuration](https://drydock.codeswhat.com/docs/configuration) | | Quick Start | [Quick Start](https://drydock.codeswhat.com/docs/quickstart) | From c5c41b533598c9799a1a4be82d8a963fbb5391f5 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 13:20:55 -0400 Subject: [PATCH 03/59] =?UTF-8?q?=E2=9A=A1=20perf(mqtt):=20default=20HASS?= =?UTF-8?q?=5FATTRIBUTES=20preset=20to=20short?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Large SBOM documents, scan vulnerabilities, container details, and labels cause massive delays in Home Assistant when published via MQTT. Change the default from full to short so new installations get a performant experience out of the box. Users who need the full payload can set DD_TRIGGER_MQTT_{name}_HASS_ATTRIBUTES=full explicitly. --- app/triggers/providers/mqtt/Mqtt.test.ts | 23 ++++++++++--------- app/triggers/providers/mqtt/Mqtt.ts | 8 +++---- .../configuration/triggers/mqtt/index.mdx | 6 ++--- 3 files changed, 19 insertions(+), 18 deletions(-) diff --git a/app/triggers/providers/mqtt/Mqtt.test.ts b/app/triggers/providers/mqtt/Mqtt.test.ts index fbf20f30..21606958 100644 --- a/app/triggers/providers/mqtt/Mqtt.test.ts +++ b/app/triggers/providers/mqtt/Mqtt.test.ts @@ -31,7 +31,7 @@ const configurationValid = { discovery: false, enabled: false, prefix: 'homeassistant', - attributes: 'full', + attributes: 'short', filter: { include: '', exclude: '', @@ -115,7 +115,7 @@ test('validateConfiguration should default hass.discovery to true when hass.enab enabled: true, prefix: 'homeassistant', discovery: true, - attributes: 'full', + attributes: 'short', filter: { include: '', exclude: '', @@ -165,7 +165,7 @@ test('initTrigger should init Mqtt client', async () => { enabled: true, discovery: true, prefix: 'homeassistant', - attributes: 'full', + attributes: 'short', filter: { include: '', exclude: '', @@ -248,7 +248,7 @@ test('initTrigger should read TLS files when configured', async () => { enabled: false, discovery: false, prefix: 'homeassistant', - attributes: 'full', + attributes: 'short', filter: { include: '', exclude: '', @@ -306,7 +306,7 @@ test('initTrigger should execute registered container event callbacks', async () enabled: false, discovery: false, prefix: 'homeassistant', - attributes: 'full', + attributes: 'short', filter: { include: '', exclude: '', @@ -335,7 +335,7 @@ test('deregister then initTrigger should not duplicate container event callbacks enabled: false, discovery: false, prefix: 'homeassistant', - attributes: 'full', + attributes: 'short', filter: { include: '', exclude: '', @@ -367,12 +367,12 @@ describe('hass.attributes validation', () => { expect(validated.hass.attributes).toBe('short'); }); - test('should default hass.attributes to full', () => { + test('should default hass.attributes to short', () => { const validated = mqtt.validateConfiguration({ url: configurationValid.url, clientid: 'dd', }); - expect(validated.hass.attributes).toBe('full'); + expect(validated.hass.attributes).toBe('short'); }); test('should reject invalid hass.attributes value', () => { @@ -601,7 +601,7 @@ describe('trigger filtering', () => { expect(publishedPayload).toHaveProperty('labels_com_docker_compose_project', 'app'); }); - test('should default hass.attributes to full when not provided in runtime config', async () => { + test('should default hass.attributes to short when not provided in runtime config', async () => { mqtt.configuration = { topic: 'dd/container', exclude: '', @@ -615,7 +615,8 @@ describe('trigger filtering', () => { await mqtt.trigger(containerWithSecurity); const publishedPayload = JSON.parse(mqtt.client.publish.mock.calls[0][1]); - expect(publishedPayload).toHaveProperty('security_scan_vulnerabilities_0_id', 'CVE-2024-0001'); - expect(publishedPayload).toHaveProperty('details_ports_0', '80/tcp'); + expect(publishedPayload).not.toHaveProperty('security_scan_vulnerabilities_0_id'); + expect(publishedPayload).not.toHaveProperty('details_ports_0'); + expect(publishedPayload).toHaveProperty('name', 'filtered-test'); }); }); diff --git a/app/triggers/providers/mqtt/Mqtt.ts b/app/triggers/providers/mqtt/Mqtt.ts index 50e80ca9..e53cc70c 100644 --- a/app/triggers/providers/mqtt/Mqtt.ts +++ b/app/triggers/providers/mqtt/Mqtt.ts @@ -86,7 +86,7 @@ class Mqtt extends Trigger { enabled: false, prefix: hassDefaultPrefix, discovery: false, - attributes: 'full', + attributes: 'short', filter: { include: '', exclude: '', @@ -144,7 +144,7 @@ class Mqtt extends Trigger { attributes: this.joi .string() .valid(...HASS_ATTRIBUTE_PRESET_VALUES) - .default('full'), + .default('short'), filter: this.joi .object({ include: this.joi.string().allow('').default(''), @@ -159,7 +159,7 @@ class Mqtt extends Trigger { enabled: false, prefix: hassDefaultPrefix, discovery: false, - attributes: 'full', + attributes: 'short', filter: { include: '', exclude: '', @@ -286,7 +286,7 @@ class Mqtt extends Trigger { return { mode: 'exclude', stage: 'container', - paths: HASS_ATTRIBUTE_PRESETS[this.configuration.hass?.attributes ?? 'full'], + paths: HASS_ATTRIBUTE_PRESETS[this.configuration.hass?.attributes ?? 'short'], }; } diff --git a/content/docs/current/configuration/triggers/mqtt/index.mdx b/content/docs/current/configuration/triggers/mqtt/index.mdx index af37f6c3..cae85941 100644 --- a/content/docs/current/configuration/triggers/mqtt/index.mdx +++ b/content/docs/current/configuration/triggers/mqtt/index.mdx @@ -22,7 +22,7 @@ The `mqtt` trigger lets you send container update notifications to an MQTT broke | `DD_TRIGGER_MQTT_{trigger_name}_HASS_ENABLED` | ⚪ | Enable [Home-assistant](https://www.home-assistant.io/) integration and deliver additional topics | `true`, `false` | `false` | | `DD_TRIGGER_MQTT_{trigger_name}_HASS_DISCOVERY` | ⚪ | Enable [Home-assistant](https://www.home-assistant.io/) integration including discovery | `true`, `false` | `false` | | `DD_TRIGGER_MQTT_{trigger_name}_HASS_PREFIX` | ⚪ | Base topic for hass entity discovery | | `homeassistant` | -| `DD_TRIGGER_MQTT_{trigger_name}_HASS_ATTRIBUTES` | ⚪ | Attribute preset controlling which container fields are included in MQTT payloads. `full` sends everything; `short` excludes large fields like SBOM documents, scan vulnerabilities, details, and labels. | `full`, `short` | `full` | +| `DD_TRIGGER_MQTT_{trigger_name}_HASS_ATTRIBUTES` | ⚪ | Attribute preset controlling which container fields are included in MQTT payloads. `full` sends everything; `short` excludes large fields like SBOM documents, scan vulnerabilities, details, and labels. | `full`, `short` | `short` | | `DD_TRIGGER_MQTT_{trigger_name}_HASS_FILTER_INCLUDE` | ⚪ | Comma-separated list of flattened MQTT attribute keys to keep (include-mode). When set, only these keys are published. | Comma-separated flattened keys | | | `DD_TRIGGER_MQTT_{trigger_name}_HASS_FILTER_EXCLUDE` | ⚪ | Comma-separated list of flattened MQTT attribute keys to remove (exclude-mode). Used when `HASS_FILTER_INCLUDE` is empty. | Comma-separated flattened keys | | | `DD_TRIGGER_MQTT_{trigger_name}_EXCLUDE` | ⚪ | Legacy comma-separated dot-paths to exclude from the nested container object before flattening. Used only when both `HASS_FILTER_INCLUDE` and `HASS_FILTER_EXCLUDE` are empty. | Comma-separated dot-paths | | @@ -252,7 +252,7 @@ When no explicit include/exclude list is configured, `HASS_ATTRIBUTES` is used: | Preset | Behavior | | --- | --- | -| `full` (default) | Sends the entire container object — no filtering | -| `short` | Excludes `security.sbom.documents`, `security.updateSbom.documents`, `security.scan.vulnerabilities`, `security.updateScan.vulnerabilities`, `details`, and `labels` | +| `full` | Sends the entire container object — no filtering | +| `short` (default) | Excludes `security.sbom.documents`, `security.updateSbom.documents`, `security.scan.vulnerabilities`, `security.updateScan.vulnerabilities`, `details`, and `labels` | `HASS_FILTER_INCLUDE` and `HASS_FILTER_EXCLUDE` match flattened MQTT keys (snake_case + underscore delimiter), because filtering is applied after flattening to the payload shape Home Assistant receives. From c93cc2bc3d31efcf79d2930be5071c07d1d1c804 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 13:21:06 -0400 Subject: [PATCH 04/59] =?UTF-8?q?=F0=9F=90=9B=20fix(ui):=20retry=20server?= =?UTF-8?q?=20feature=20flags=20after=20pre-login=20fetch=20failure?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The useServerFeatures composable fired a bootstrap fetch at app startup before the user was authenticated. When the API returned 401, the catch block set loaded=true which prevented any subsequent retry. After login, container actions, delete, and other feature-gated UI elements were permanently disabled for the session. Move loaded=true into the success path so failed fetches leave loaded as false, allowing autoLoad consumers to retry after authentication completes. --- CHANGELOG.md | 8 ++++++++ ui/src/composables/useServerFeatures.ts | 2 +- .../composables/useServerFeatures.spec.ts | 20 +++++++++++++++++++ 3 files changed, 29 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 49002a65..e306a14c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Fixed + +- **Server feature flags not loaded after login** — Feature flags (`containeractions`, `delete`) were permanently stuck as disabled when authentication was required, because the pre-login bootstrap fetch failure marked the flags as "loaded" and never retried. Now failed fetches allow automatic retry after login. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) + +### Changed + +- **MQTT HASS_ATTRIBUTES default changed to `short`** — The MQTT trigger `HASS_ATTRIBUTES` preset now defaults to `short` instead of `full`, excluding large SBOM documents, scan vulnerabilities, details, and labels from Home Assistant entity payloads. Users who need the full payload can set `DD_TRIGGER_MQTT_{name}_HASS_ATTRIBUTES=full` explicitly. + ## [1.4.0] — 2026-02-28 ### Added diff --git a/ui/src/composables/useServerFeatures.ts b/ui/src/composables/useServerFeatures.ts index cfb2a691..e883fc33 100644 --- a/ui/src/composables/useServerFeatures.ts +++ b/ui/src/composables/useServerFeatures.ts @@ -43,11 +43,11 @@ async function loadServerFeatures(): Promise { try { const serverData = await getServer(); featureFlags.value = normalizeFeatureFlags(serverData?.configuration?.feature); + loaded.value = true; } catch (e: unknown) { featureFlags.value = {}; error.value = errorMessage(e, 'Failed to load server feature configuration'); } finally { - loaded.value = true; loading.value = false; loadPromise = null; } diff --git a/ui/tests/composables/useServerFeatures.spec.ts b/ui/tests/composables/useServerFeatures.spec.ts index ff890065..cacb3435 100644 --- a/ui/tests/composables/useServerFeatures.spec.ts +++ b/ui/tests/composables/useServerFeatures.spec.ts @@ -55,7 +55,27 @@ describe('useServerFeatures', () => { ); expect(features.featureFlags.value).toEqual({}); expect(features.error.value).toBe('server unavailable'); + expect(features.loaded.value).toBe(false); + }); + + it('retries loading after a failed fetch succeeds on next attempt', async () => { + mockGetServer.mockRejectedValueOnce(new Error('401 Unauthorized')).mockResolvedValueOnce({ + configuration: { + feature: { containeractions: true, delete: true }, + }, + }); + + const { useServerFeatures } = await loadComposable(); + const features = useServerFeatures({ autoLoad: false }); + + await features.loadServerFeatures(); + expect(features.loaded.value).toBe(false); + expect(features.containerActionsEnabled.value).toBe(false); + + await features.loadServerFeatures(); expect(features.loaded.value).toBe(true); + expect(features.containerActionsEnabled.value).toBe(true); + expect(mockGetServer).toHaveBeenCalledTimes(2); }); it('coalesces concurrent loads and caches the first successful payload', async () => { From 4dc74128b9816a9cf182626fbb330c5f280a65ae Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 13:29:59 -0400 Subject: [PATCH 05/59] =?UTF-8?q?=E2=9C=A8=20feat(audit):=20record=20conta?= =?UTF-8?q?iner-update=20events=20from=20external=20lifecycle=20changes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Container start/stop/restart events from external tools (Portainer, CLI) now generate container-update audit entries. Previously the Docker event listener updated the container store but silently skipped audit logging, so the audit trail only showed Drydock-initiated actions. Subscribe to the existing containerUpdated event in the audit subscriptions and emit a container-update entry with the new status. --- CHANGELOG.md | 4 +++ app/event/audit-subscriptions.test.ts | 1 + app/event/audit-subscriptions.ts | 14 +++++++++++ app/event/index.audit.test.ts | 35 +++++++++++++++++++++++++++ app/event/index.ts | 1 + 5 files changed, 55 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e306a14c..d8085dc7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Added + +- **Audit log for container state changes** — External container lifecycle events (start, stop, restart via Portainer or CLI) now generate `container-update` audit entries with the new status, so the audit log reflects all state changes, not just Drydock-initiated actions. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) + ### Fixed - **Server feature flags not loaded after login** — Feature flags (`containeractions`, `delete`) were permanently stuck as disabled when authentication was required, because the pre-login bootstrap fetch failure marked the flags as "loaded" and never retried. Now failed fetches allow automatic retry after login. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) diff --git a/app/event/audit-subscriptions.test.ts b/app/event/audit-subscriptions.test.ts index 612cf49a..e8d923e7 100644 --- a/app/event/audit-subscriptions.test.ts +++ b/app/event/audit-subscriptions.test.ts @@ -61,6 +61,7 @@ function setupAuditSubscriptions(): { handlers.agentDisconnected = handler; }), registerContainerAdded: registerEvent(() => {}), + registerContainerUpdated: registerEvent(() => {}), registerContainerRemoved: registerEvent(() => {}), }; diff --git a/app/event/audit-subscriptions.ts b/app/event/audit-subscriptions.ts index 89e8f880..a8e684c7 100644 --- a/app/event/audit-subscriptions.ts +++ b/app/event/audit-subscriptions.ts @@ -31,6 +31,7 @@ export interface AuditSubscriptionRegistrars { registerSecurityAlert: OrderedEventRegistrarFn; registerAgentDisconnected: OrderedEventRegistrarFn; registerContainerAdded: EventRegistrarFn; + registerContainerUpdated: EventRegistrarFn; registerContainerRemoved: EventRegistrarFn; } @@ -165,6 +166,19 @@ export function registerAuditLogSubscriptions(registrars: AuditSubscriptionRegis getAuditCounter()?.inc({ action: 'container-added' }); }); + registrars.registerContainerUpdated((containerUpdated) => { + auditStore.insertAudit({ + id: '', + timestamp: new Date().toISOString(), + action: 'container-update', + containerName: containerUpdated.name || containerUpdated.id || '', + containerImage: containerUpdated.image?.name, + status: 'info', + details: containerUpdated.status ? `status: ${containerUpdated.status}` : undefined, + }); + getAuditCounter()?.inc({ action: 'container-update' }); + }); + registrars.registerContainerRemoved((containerRemoved) => { auditStore.insertAudit({ id: '', diff --git a/app/event/index.audit.test.ts b/app/event/index.audit.test.ts index ca113b18..329fc6f6 100644 --- a/app/event/index.audit.test.ts +++ b/app/event/index.audit.test.ts @@ -156,6 +156,41 @@ describe('event default audit listeners', () => { expect(mockInc).toHaveBeenCalledWith({ action: 'container-removed' }); }); + test('should record container-update audit with status details', async () => { + const event = await loadEventModule(); + + event.emitContainerUpdated({ + name: 'nginx', + status: 'running', + image: { name: 'library/nginx' }, + }); + + expect(mockInsertAudit).toHaveBeenCalledWith( + expect.objectContaining({ + action: 'container-update', + containerName: 'nginx', + containerImage: 'library/nginx', + status: 'info', + details: 'status: running', + }), + ); + expect(mockInc).toHaveBeenCalledWith({ action: 'container-update' }); + }); + + test('should record container-update audit with id fallback and no status', async () => { + const event = await loadEventModule(); + + event.emitContainerUpdated({ id: 'abc123' }); + + expect(mockInsertAudit).toHaveBeenCalledWith( + expect.objectContaining({ + action: 'container-update', + containerName: 'abc123', + details: undefined, + }), + ); + }); + test('should record security-alert audits', async () => { const event = await loadEventModule(); diff --git a/app/event/index.ts b/app/event/index.ts index 6c3e614a..9599b7ed 100644 --- a/app/event/index.ts +++ b/app/event/index.ts @@ -407,6 +407,7 @@ registerAuditLogSubscriptions({ registerSecurityAlert, registerAgentDisconnected, registerContainerAdded, + registerContainerUpdated, registerContainerRemoved, }); From 74d8d32b483004fc22a0d20f82be98833ab8ca9f Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 14:58:06 -0400 Subject: [PATCH 06/59] =?UTF-8?q?=F0=9F=90=9B=20fix(agent):=20load=20CA=20?= =?UTF-8?q?certificate=20when=20CAFILE=20provided=20without=20CERTFILE?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The httpsAgent was only created when certfile was present (mTLS), which meant providing only cafile to trust a private CA had no effect. Node.js would use the default CA bundle and reject the self-signed certificate chain. Now the httpsAgent is created when either cafile or certfile is provided, matching the pattern used by BaseRegistry. Also updates HTTPS auto-detection to consider cafile presence. Fixes: #197 --- app/agent/AgentClient.test.ts | 11 +++++++++++ app/agent/AgentClient.ts | 12 ++++++------ 2 files changed, 17 insertions(+), 6 deletions(-) diff --git a/app/agent/AgentClient.test.ts b/app/agent/AgentClient.test.ts index 6ee3c2a4..0122bd32 100644 --- a/app/agent/AgentClient.test.ts +++ b/app/agent/AgentClient.test.ts @@ -122,6 +122,17 @@ describe('AgentClient', () => { expect(c.axiosOptions.httpsAgent).toBeDefined(); }); + test('should create https agent when cafile provided without certfile', () => { + const c = new AgentClient('a', { + host: 'myhost', + port: 4000, + secret: 's', + cafile: '/path/to/ca.pem', + }); + expect(c.baseUrl).toBe('https://myhost:4000'); + expect(c.axiosOptions.httpsAgent).toBeDefined(); + }); + test('should skip cert file read when resolved cert path is empty', () => { mockResolveConfiguredPath.mockImplementation((path, options) => { if (options?.label === 'a cert file') { diff --git a/app/agent/AgentClient.ts b/app/agent/AgentClient.ts index 8a352118..c0b410f6 100644 --- a/app/agent/AgentClient.ts +++ b/app/agent/AgentClient.ts @@ -54,7 +54,7 @@ export class AgentClient { let candidateUrl = `${this.config.host}:${port}`; // Add protocol if not present if (!candidateUrl.startsWith('http')) { - const useHttps = Boolean(this.config.certfile) || port === 443; + const useHttps = Boolean(this.config.certfile) || Boolean(this.config.cafile) || port === 443; candidateUrl = `http${useHttps ? 's' : ''}://${candidateUrl}`; } // Validate the URL to prevent request forgery (CodeQL js/request-forgery) @@ -70,17 +70,17 @@ export class AgentClient { }, }; - if (this.config.certfile) { + if (this.config.certfile || this.config.cafile) { const caPath = this.config.cafile ? resolveConfiguredPath(this.config.cafile, { label: `${name} ca file` }) : undefined; - const certPath = resolveConfiguredPath(this.config.certfile, { - label: `${name} cert file`, - }); + const certPath = this.config.certfile + ? resolveConfiguredPath(this.config.certfile, { label: `${name} cert file` }) + : undefined; const keyPath = this.config.keyfile ? resolveConfiguredPath(this.config.keyfile, { label: `${name} key file` }) : undefined; - // Intentional: mTLS with optional self-signed CA for agent communication + // Intentional: custom CA / mTLS for agent communication // lgtm[js/disabling-certificate-validation] this.axiosOptions.httpsAgent = new https.Agent({ ca: caPath ? fs.readFileSync(caPath) : undefined, From d3355ee4125618f3e67fafa871c3f444e8b67b25 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 14:58:33 -0400 Subject: [PATCH 07/59] =?UTF-8?q?=F0=9F=90=9B=20fix(log):=20propagate=20co?= =?UTF-8?q?nfigured=20level=20to=20multistream=20destinations?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit DD_LOG_LEVEL was correctly parsed but debug/trace messages were silently dropped because pino multistream stream objects lacked an explicit level property, defaulting each to 'info'. Now each stream inherits the configured log level so all messages at or above that level are emitted. Closes #134 --- CHANGELOG.md | 1 + app/log/index.debug-level.test.ts | 51 +++++++++++++++++++++++++++++++ app/log/index.ts | 5 +-- 3 files changed, 55 insertions(+), 2 deletions(-) create mode 100644 app/log/index.debug-level.test.ts diff --git a/CHANGELOG.md b/CHANGELOG.md index d8085dc7..6391ed42 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed +- **Log level setting had no effect** — `DD_LOG_LEVEL=debug` was correctly parsed but debug messages were silently dropped because pino's multistream destinations defaulted to `info` level. Stream destinations now inherit the configured log level. ([#134](https://github.com/CodesWhat/drydock/issues/134)) - **Server feature flags not loaded after login** — Feature flags (`containeractions`, `delete`) were permanently stuck as disabled when authentication was required, because the pre-login bootstrap fetch failure marked the flags as "loaded" and never retried. Now failed fetches allow automatic retry after login. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) ### Changed diff --git a/app/log/index.debug-level.test.ts b/app/log/index.debug-level.test.ts new file mode 100644 index 00000000..0e609809 --- /dev/null +++ b/app/log/index.debug-level.test.ts @@ -0,0 +1,51 @@ +const { mockAddEntry } = vi.hoisted(() => ({ + mockAddEntry: vi.fn(), +})); + +vi.mock('../configuration', () => ({ + getLogLevel: vi.fn(() => 'debug'), + getLogFormat: vi.fn(() => 'json'), + getLogBufferEnabled: vi.fn(() => true), +})); + +vi.mock('./buffer.js', () => ({ + addEntry: mockAddEntry, +})); + +vi.mock('./warn.js', () => ({ + setWarnLogger: vi.fn(), +})); + +describe('Logger with debug level', () => { + test('should propagate debug level to multistream destinations', async () => { + const log = (await import('./index.js')).default; + + expect(log.level).toBe('debug'); + + log.debug({ component: 'test' }, 'debug-level-message'); + + await new Promise((resolve) => setTimeout(resolve, 50)); + + expect(mockAddEntry).toHaveBeenCalledWith( + expect.objectContaining({ + level: 'debug', + msg: 'debug-level-message', + }), + ); + }); + + test('should deliver info messages when level is debug', async () => { + const log = (await import('./index.js')).default; + + log.info({ component: 'test' }, 'info-level-message'); + + await new Promise((resolve) => setTimeout(resolve, 50)); + + expect(mockAddEntry).toHaveBeenCalledWith( + expect.objectContaining({ + level: 'info', + msg: 'info-level-message', + }), + ); + }); +}); diff --git a/app/log/index.ts b/app/log/index.ts index 0b9686f6..3d623e41 100644 --- a/app/log/index.ts +++ b/app/log/index.ts @@ -37,9 +37,10 @@ function createMainLogStream() { } function createLogStreams() { - const streams: { stream: Writable }[] = [{ stream: createMainLogStream() }]; + const level = getLogLevel(); + const streams: { stream: Writable; level: string }[] = [{ stream: createMainLogStream(), level }]; if (getLogBufferEnabled()) { - streams.push({ stream: bufferStream }); + streams.push({ stream: bufferStream, level }); } return streams; } From 144a14f57cf64b84e89451a2b29235b6ca0536a2 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 14:59:13 -0400 Subject: [PATCH 08/59] =?UTF-8?q?=F0=9F=94=92=20security(demo):=20validate?= =?UTF-8?q?=20postMessage=20origin=20in=20service=20worker?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add origin check to the demo app's mockServiceWorker.js to prevent cross-origin message injection. Add regression test to verify the guard persists across MSW regenerations. --- apps/demo/.gitignore | 1 + apps/demo/public/mockServiceWorker.js | 4 ++++ .../mockServiceWorker-origin-check.spec.ts | 19 +++++++++++++++++++ 3 files changed, 24 insertions(+) create mode 100644 apps/demo/.gitignore create mode 100644 ui/tests/security/mockServiceWorker-origin-check.spec.ts diff --git a/apps/demo/.gitignore b/apps/demo/.gitignore new file mode 100644 index 00000000..e985853e --- /dev/null +++ b/apps/demo/.gitignore @@ -0,0 +1 @@ +.vercel diff --git a/apps/demo/public/mockServiceWorker.js b/apps/demo/public/mockServiceWorker.js index a255338c..5a021619 100644 --- a/apps/demo/public/mockServiceWorker.js +++ b/apps/demo/public/mockServiceWorker.js @@ -21,6 +21,10 @@ addEventListener('activate', (event) => { }); addEventListener('message', async (event) => { + if (event.origin !== self.location.origin) { + return; + } + const clientId = Reflect.get(event.source || {}, 'id'); if (!clientId || !self.clients) { diff --git a/ui/tests/security/mockServiceWorker-origin-check.spec.ts b/ui/tests/security/mockServiceWorker-origin-check.spec.ts new file mode 100644 index 00000000..9f79aca2 --- /dev/null +++ b/ui/tests/security/mockServiceWorker-origin-check.spec.ts @@ -0,0 +1,19 @@ +import { readFileSync } from 'node:fs'; +import { resolve } from 'node:path'; + +const workerPath = resolve(process.cwd(), '../apps/demo/public/mockServiceWorker.js'); + +describe('demo mockServiceWorker message handler', () => { + it('rejects postMessage events from a different origin', () => { + const workerSource = readFileSync(workerPath, 'utf8'); + const messageHandler = workerSource.match( + /addEventListener\('message',\s*async\s*\(event\)\s*=>\s*\{[\s\S]*?\n\}\);/, + )?.[0]; + + expect(messageHandler).toBeDefined(); + expect(messageHandler).toContain('event.origin'); + expect(messageHandler).toMatch( + /if\s*\(\s*event\.origin\s*!==\s*self\.location\.origin\s*\)\s*\{\s*return;\s*\}/, + ); + }); +}); From f0cdfcb80ec1b7a8f721721fb2d1c628d7a859b5 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 15:51:19 -0400 Subject: [PATCH 09/59] =?UTF-8?q?=F0=9F=94=A7=20chore(qa):=20add=20Traefik?= =?UTF-8?q?=20and=20nginx=20reverse=20proxy=20QA=20environments?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Traefik stack matches the Pangolin/Traefik setup used by community testers. Nginx stack provides a baseline comparison. Both terminate TLS with self-signed certs and forward standard proxy headers. Includes compose trigger, container actions, and audit log testing. --- test/qa-proxy-certs/cert.pem | 19 +++++++++++ test/qa-proxy-certs/key.pem | 28 +++++++++++++++++ test/qa-proxy-compose.yml | 47 +++++++++++++++++++++++++++ test/qa-proxy-nginx.conf | 16 ++++++++++ test/qa-traefik-compose.yml | 61 ++++++++++++++++++++++++++++++++++++ test/qa-traefik-dynamic.yml | 9 ++++++ 6 files changed, 180 insertions(+) create mode 100644 test/qa-proxy-certs/cert.pem create mode 100644 test/qa-proxy-certs/key.pem create mode 100644 test/qa-proxy-compose.yml create mode 100644 test/qa-proxy-nginx.conf create mode 100644 test/qa-traefik-compose.yml create mode 100644 test/qa-traefik-dynamic.yml diff --git a/test/qa-proxy-certs/cert.pem b/test/qa-proxy-certs/cert.pem new file mode 100644 index 00000000..ca624588 --- /dev/null +++ b/test/qa-proxy-certs/cert.pem @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDCTCCAfGgAwIBAgIUIxsKgAJTBH7amTLrNF+H7knrdBMwDQYJKoZIhvcNAQEL +BQAwFDESMBAGA1UEAwwJbG9jYWxob3N0MB4XDTI2MDMxMTE2MzQxMFoXDTI2MDQx +MDE2MzQxMFowFDESMBAGA1UEAwwJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAyHQlopOX6rEBQwx77NSD6FhpzFojscgQYK9IYAY+QLGI +DzSGeC7FeO+WhvqBhMnEhen6pjXITSqWNwo3OYBXp4GpinnQXSdQ5yYh9k5Zg1fZ +jWPwfERRmrjSenbrxt4TycKxDuS288EMUHt0g3Ur3NNood5/oX8aNSPAPyE3nuYl +4Kvhs6sfpMAs2VBfOqaBo4TgKbXIY4TKAQiEHHXOSX6bMmz2P2aqhrK0T5LGIrJp +x4rsiDabxyGBFHcD7GsSSqYLSVSYdBU9XuS2YEco0U3LiV5kuih5u2ZYh2SrL0+I +LboimfhJ9XZLk4hWKqdTI3fEVgF5J4ckPQaBMA7tlwIDAQABo1MwUTAdBgNVHQ4E +FgQUsYmvKjV1p1uiTNnkEApxA14wB3gwHwYDVR0jBBgwFoAUsYmvKjV1p1uiTNnk +EApxA14wB3gwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAFrKq +DXtD5cfv3hyru3aaSL/7FZSVjwMT+xXWis2+h8dnOsiIZOn8p+lxZvfeHvOSurGn +7E3i6L+xbZqnFSeUfc5j14HlICkjIavKjB9J2884v5SmPrRRRSdnMOZNdnMbwbjo +N/N2XxvNgyBN4KDB8yWqtP5+LWuRrl8JfpjQRbyi2BY4HVI4qpjHeHF1qomaMHsb +p5vha07jtHR4u6gmXaie3fhcMGbocpjEEHooWNILhjVkFL9evjvC3KTpFt/fQxPt +2EnXAPlLI7RRRhPf/Q0mR6mhqGlvwZ5r7d2VOM8r422BF2b0bvjCB4nMOTjArF6s +mCFNiwLp0HQWlCtIdg== +-----END CERTIFICATE----- diff --git a/test/qa-proxy-certs/key.pem b/test/qa-proxy-certs/key.pem new file mode 100644 index 00000000..5fd615aa --- /dev/null +++ b/test/qa-proxy-certs/key.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEuwIBADANBgkqhkiG9w0BAQEFAASCBKUwggShAgEAAoIBAQDIdCWik5fqsQFD +DHvs1IPoWGnMWiOxyBBgr0hgBj5AsYgPNIZ4LsV475aG+oGEycSF6fqmNchNKpY3 +Cjc5gFengamKedBdJ1DnJiH2TlmDV9mNY/B8RFGauNJ6duvG3hPJwrEO5LbzwQxQ +e3SDdSvc02ih3n+hfxo1I8A/ITee5iXgq+Gzqx+kwCzZUF86poGjhOAptchjhMoB +CIQcdc5JfpsybPY/ZqqGsrRPksYismnHiuyINpvHIYEUdwPsaxJKpgtJVJh0FT1e +5LZgRyjRTcuJXmS6KHm7ZliHZKsvT4gtuiKZ+En1dkuTiFYqp1Mjd8RWAXknhyQ9 +BoEwDu2XAgMBAAECggEAIIYj4xySfhxpKYFZjCQxIN5TkDoh54MgESfvlND+gc2s +V1c86Dwig0xQfqcfo7V/IwFDT3uTe93IkLaiHjzSPBYANb0tDyhFarPXt9ifkn2L +CXNpPihxmyqY0BECkXpP+KETzGhUvPkaL+bQEVJOIx7UL8tjpwbx8CRUWYngtfG0 +z+I46Vg9BLwivGTLpgyuRMq48Gh9Dcw+kmIacfSYY8TbauLc3Ls6EoDg7BAzOkB4 +9hNLgBRRT+95zmpHv1++j17AIohr3apvxDVar9Ub+/jpfMUatUwG8oH+Ef8QmRNG +gKfEf3wUPKA3lF3FZKKe/HsKGYDuQAhfRReSKeI+DQKBgQD73mANjWpjyuI2lAtt +ZsW7PTCh9pHB5hFVnNw2jtr3wvAprh+F8yr+X00+7IlTfM2VcrV9g3LFFLUDFIEE +AzAKCz1fTAEKn9WlTx/yW6V16mj7gwhGhWayjRikbNsTiEHK7wdNmsX09j7kFmTw +X8xriPJcbnObP/XqL1aYL/l+ewKBgQDLvd/n8wrC4az1VLmAhedW+2BxgAmdu8L3 +CuAMylUrSM99PEOGn/zFBIZrN3VeUWX5TyhuDwNjCQayp3jCIZohJn73DFLbuKCU +yK9ICkUiquCvSmTQ88Xy+/bmVgwZlMituvdq56ldZOXPQvDey34kL37nWxOe4KF7 +Z9Cxpz/wlQJ/bTT+cOHgP2S4sPGAAu+MzKK7c351tJ0M5xIcaYuHZeYgO/JDuNC2 +05R+6cmlwY1blnEmQEW+fbV8xtkdF6BKNBCri6ZozTKAcCzerTcPhxEcc/FcpTcy +UDjddm28j7uEy3jYsc1qB8y9eCg9m/vtprK6Y1mAxs/00JW19kbW5wKBgC6B+UqJ +QDucKE8YOAAOkPBaEXnXMFrBMZAS+3Hv/eETjcmYqBFjE+AlWEnBLxmImy5900zM +QImq1cySTg6CfRx0HSdnuMJPMtjDtr9LGN0BBKj+4mSQQO9mdMW/fqOYQvblZvUQ +TKj1D0Bwl2tEKFc6QE2vjJsHy9TPrePfucjBAoGBAOnzNd++P/x6/xIPHtxbs72q +EelJZduYhIYcLmbMzuu+MlYQUyrQ++ctbLC/nCpQa8aDG90eDeChoLDFs1xv+WKj ++d4SXcGwPEA2Qc2LYyDtNt9G3+inBb1WMM2DcdFF92umGnw8LI7Wijl6toAMc1ZC +IDZ131VHqUzkRkw52K86 +-----END PRIVATE KEY----- diff --git a/test/qa-proxy-compose.yml b/test/qa-proxy-compose.yml new file mode 100644 index 00000000..1e9b6b29 --- /dev/null +++ b/test/qa-proxy-compose.yml @@ -0,0 +1,47 @@ +services: + # Nginx reverse proxy — TLS-terminating proxy in front of drydock + # Simulates the common self-hosted setup (NPM, Traefik, Caddy, etc.) + proxy: + image: nginx:alpine + container_name: qa-proxy + ports: + - "8443:443" + volumes: + - ./qa-proxy-nginx.conf:/etc/nginx/conf.d/default.conf:ro + - ./qa-proxy-certs:/etc/nginx/certs:ro + depends_on: + - drydock + + drydock: + image: drydock:dev + container_name: drydock-qa-proxy + user: root + ports: + - "3333:3000" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + - ./qa-proxy-compose.yml:/drydock/qa-compose.yml:rw + environment: + - DD_RUN_AS_ROOT=true + - DD_ALLOW_INSECURE_ROOT=true + - DD_LOG_FORMAT=text + - DD_WATCHER_LOCAL_WATCHBYDEFAULT=false + - DD_WATCHER_LOCAL_CRON=*/2 * * * * + - DD_TRIGGER_DOCKER_LOCAL_AUTO=false + - DD_TRIGGER_DOCKERCOMPOSE_QA_FILE=/drydock/qa-compose.yml + - DD_TRIGGER_DOCKERCOMPOSE_QA_AUTO=false + - DD_AUTH_BASIC_ADMIN_USER=admin + - "DD_AUTH_BASIC_ADMIN_HASH=argon2id$$65536$$3$$4$$zUAK9+ktVWNHyQdv3SaOSgMv3T02F1Zj8D+t1un7D98=$$KEmn6d94w03YxIlw7U7l/ikD9lW+H3IC1N7xbAhOB9yKofA3HoxUBmuvBawvShhv337zDv4+g9hobNWeQEWwkQ==" + - DD_SESSION_SECRET=qa-test-session-secret + # Test scenario: trustproxy=1 (required for reverse proxy) + - DD_SERVER_TRUSTPROXY=1 + - DD_PUBLIC_URL=https://localhost:8443 + + # Test container with compose trigger label + nginx-test: + image: nginx:1.25.5 + pull_policy: never + container_name: nginx-compose-test + labels: + - dd.watch=true + - dd.display.name=Nginx Compose Test diff --git a/test/qa-proxy-nginx.conf b/test/qa-proxy-nginx.conf new file mode 100644 index 00000000..e1e811d6 --- /dev/null +++ b/test/qa-proxy-nginx.conf @@ -0,0 +1,16 @@ +server { + listen 443 ssl; + server_name localhost; + + ssl_certificate /etc/nginx/certs/cert.pem; + ssl_certificate_key /etc/nginx/certs/key.pem; + + location / { + proxy_pass http://drydock:3000; + proxy_set_header Host $http_host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header X-Forwarded-Host $http_host; + } +} diff --git a/test/qa-traefik-compose.yml b/test/qa-traefik-compose.yml new file mode 100644 index 00000000..7acfc9dd --- /dev/null +++ b/test/qa-traefik-compose.yml @@ -0,0 +1,61 @@ +services: + # Traefik reverse proxy — TLS-terminating, matching rj10rd/Pangolin stack + traefik: + image: traefik:latest + container_name: qa-traefik + command: + - --api.insecure=true + - --providers.docker=true + - --providers.docker.exposedByDefault=false + - --providers.file.filename=/etc/traefik/dynamic.yml + - --entrypoints.websecure.address=:443 + ports: + - "8443:443" + - "8080:8080" # Traefik dashboard + volumes: + - /var/run/docker.sock:/var/run/docker.sock:ro + - ./qa-proxy-certs:/certs:ro + - ./qa-traefik-dynamic.yml:/etc/traefik/dynamic.yml:ro + + drydock: + image: drydock:dev + container_name: drydock-qa-traefik + user: root + expose: + - "3000" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + - ./qa-traefik-compose.yml:/drydock/qa-compose.yml:rw + environment: + - DD_RUN_AS_ROOT=true + - DD_ALLOW_INSECURE_ROOT=true + - DD_LOG_FORMAT=text + - DD_LOG_LEVEL=debug + - DD_WATCHER_LOCAL_WATCHBYDEFAULT=false + - DD_WATCHER_LOCAL_CRON=*/2 * * * * + # Docker trigger (manual only) + - DD_TRIGGER_DOCKER_LOCAL_AUTO=false + # Compose trigger — file must be writable inside container + - DD_TRIGGER_DOCKERCOMPOSE_QA_FILE=/drydock/qa-compose.yml + - DD_TRIGGER_DOCKERCOMPOSE_QA_AUTO=false + - DD_AUTH_BASIC_ADMIN_USER=admin + - "DD_AUTH_BASIC_ADMIN_HASH=argon2id$$65536$$3$$4$$zUAK9+ktVWNHyQdv3SaOSgMv3T02F1Zj8D+t1un7D98=$$KEmn6d94w03YxIlw7U7l/ikD9lW+H3IC1N7xbAhOB9yKofA3HoxUBmuvBawvShhv337zDv4+g9hobNWeQEWwkQ==" + - DD_SESSION_SECRET=qa-test-session-secret + # Required for reverse proxy + - DD_SERVER_TRUSTPROXY=1 + - DD_PUBLIC_URL=https://localhost:8443 + labels: + - traefik.enable=true + - traefik.http.routers.drydock.rule=Host(`localhost`) + - traefik.http.routers.drydock.entrypoints=websecure + - traefik.http.routers.drydock.tls=true + - traefik.http.services.drydock.loadbalancer.server.port=3000 + + # Test container with compose trigger + watch labels + nginx-test: + image: nginx:1.25.5 + pull_policy: never + container_name: nginx-compose-test + labels: + - dd.watch=true + - dd.display.name=Nginx Compose Test diff --git a/test/qa-traefik-dynamic.yml b/test/qa-traefik-dynamic.yml new file mode 100644 index 00000000..c46de37e --- /dev/null +++ b/test/qa-traefik-dynamic.yml @@ -0,0 +1,9 @@ +tls: + certificates: + - certFile: /certs/cert.pem + keyFile: /certs/key.pem + stores: + default: + defaultCertificate: + certFile: /certs/cert.pem + keyFile: /certs/key.pem From ce98a74ac6c37ee287823ff92bf335d541adb577 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 15:55:11 -0400 Subject: [PATCH 10/59] =?UTF-8?q?=F0=9F=90=9B=20fix(trigger):=20upgrade=20?= =?UTF-8?q?silent=20skip=20paths=20to=20warn-level=20logging?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Compose trigger has many silent failure paths logged at debug level, making it nearly impossible to diagnose why a trigger reports success but containers don't update. Promote key diagnostic messages: - compose file mismatch skip → warn - compose label inspect failure → warn - no containers matched → new warn message - "already up to date" → includes container names --- app/triggers/providers/docker/Docker.test.ts | 30 ++++++++++++++++ app/triggers/providers/docker/Docker.ts | 2 +- .../dockercompose/Dockercompose.test.ts | 36 +++++++++++++++++-- .../providers/dockercompose/Dockercompose.ts | 24 +++++++++---- 4 files changed, 82 insertions(+), 10 deletions(-) diff --git a/app/triggers/providers/docker/Docker.test.ts b/app/triggers/providers/docker/Docker.test.ts index 38a56468..b531faf5 100644 --- a/app/triggers/providers/docker/Docker.test.ts +++ b/app/triggers/providers/docker/Docker.test.ts @@ -2237,6 +2237,36 @@ describe('additional docker trigger coverage', () => { ); }); + test('cleanupOldImages should warn when digest image removal fails', async () => { + docker.configuration.prune = true; + vi.spyOn(docker, 'removeImage').mockRejectedValue(new Error('remove failed')); + const registryProvider = { + getImageFullName: vi.fn(() => 'my-registry/test/test:sha256:old'), + }; + const logContainer = createMockLog('warn'); + + await docker.cleanupOldImages( + {}, + registryProvider, + { + image: { + registry: { name: 'hub', url: 'my-registry' }, + name: 'test/test', + tag: { value: '1.0.0' }, + digest: { repo: 'sha256:old' }, + }, + updateKind: { + kind: 'digest', + }, + }, + logContainer, + ); + + expect(logContainer.warn).toHaveBeenCalledWith( + expect.stringContaining('Unable to remove previous digest image'), + ); + }); + test('cleanupOldImages should skip digest pruning when digest repo is missing', async () => { docker.configuration.prune = true; const removeImageSpy = vi.spyOn(docker, 'removeImage').mockResolvedValue(undefined); diff --git a/app/triggers/providers/docker/Docker.ts b/app/triggers/providers/docker/Docker.ts index 6ab46d55..ff9eda05 100644 --- a/app/triggers/providers/docker/Docker.ts +++ b/app/triggers/providers/docker/Docker.ts @@ -807,7 +807,7 @@ class Docker extends Trigger { const oldImage = registry.getImageFullName(container.image, container.image.digest.repo); await this.removeImage(dockerApi, oldImage, logContainer); } catch (e) { - logContainer.debug(`Unable to remove previous digest image (${e.message})`); + logContainer.warn(`Unable to remove previous digest image (${e.message})`); } } } diff --git a/app/triggers/providers/dockercompose/Dockercompose.test.ts b/app/triggers/providers/dockercompose/Dockercompose.test.ts index 4ccf34eb..80f38ebe 100644 --- a/app/triggers/providers/dockercompose/Dockercompose.test.ts +++ b/app/triggers/providers/dockercompose/Dockercompose.test.ts @@ -708,6 +708,7 @@ describe('Dockercompose Trigger', () => { await trigger.processComposeFile('/opt/drydock/test/stack.yml', [container]); expect(mockLog.warn).toHaveBeenCalledWith(expect.stringContaining('No containers found')); + expect(mockLog.warn).toHaveBeenCalledWith(expect.stringContaining('not found in compose file')); }); test('processComposeFile should warn and continue on compose/runtime reconciliation mismatch by default', async () => { @@ -2997,6 +2998,35 @@ describe('Dockercompose Trigger', () => { expect(mockLog.warn).toHaveBeenCalledWith(expect.stringContaining('permission denied')); }); + test('triggerBatch should warn when container compose file does not match configured file', async () => { + trigger.configuration.file = '/opt/drydock/configured.yml'; + fs.access.mockResolvedValue(undefined); + + const container = { + name: 'mismatched', + watcher: 'local', + labels: { 'dd.compose.file': '/opt/drydock/other.yml' }, + }; + + await trigger.triggerBatch([container]); + + expect(mockLog.warn).toHaveBeenCalledWith( + expect.stringContaining('do not match configured file'), + ); + }); + + test('triggerBatch should warn when no containers matched any compose file', async () => { + trigger.configuration.file = undefined; + + const container = { name: 'orphan', watcher: 'local' }; + + await trigger.triggerBatch([container]); + + expect(mockLog.warn).toHaveBeenCalledWith( + 'No containers matched any compose file for this trigger', + ); + }); + test('triggerBatch should group containers by compose file and process each', async () => { trigger.configuration.file = undefined; fs.access.mockResolvedValue(undefined); @@ -3077,8 +3107,8 @@ describe('Dockercompose Trigger', () => { expect(processComposeFileSpy).toHaveBeenCalledWith('/opt/drydock/test/monitoring.yml', [ monitoringContainer, ]); - expect(mockLog.warn).not.toHaveBeenCalledWith( - expect.stringContaining('/opt/drydock/test/mysql.yml'), + expect(mockLog.warn).toHaveBeenCalledWith( + expect.stringContaining('do not match configured file'), ); }); @@ -3977,7 +4007,7 @@ describe('Dockercompose Trigger', () => { name: 'nginx', } as any), ).resolves.toEqual([]); - expect(mockLog.debug).toHaveBeenCalledWith( + expect(mockLog.warn).toHaveBeenCalledWith( expect.stringContaining('Unable to inspect compose labels'), ); }); diff --git a/app/triggers/providers/dockercompose/Dockercompose.ts b/app/triggers/providers/dockercompose/Dockercompose.ts index e32ca2cb..0a5e165a 100644 --- a/app/triggers/providers/dockercompose/Dockercompose.ts +++ b/app/triggers/providers/dockercompose/Dockercompose.ts @@ -793,7 +793,7 @@ class Dockercompose extends Docker { container.name, ); } catch (e) { - this.log.debug( + this.log.warn( `Unable to inspect compose labels for container ${container.name}; falling back to default compose file resolution (${e.message})`, ); return []; @@ -1319,7 +1319,7 @@ class Dockercompose extends Docker { continue; } if (configuredComposeFilePath && !composeFiles.includes(configuredComposeFilePath)) { - this.log.debug( + this.log.warn( `Skip container ${container.name} because compose files ${composeFiles.join(', ')} do not match configured file ${configuredComposeFilePath}`, ); continue; @@ -1373,6 +1373,10 @@ class Dockercompose extends Docker { configuredComposeFilePath, ); + if (containersByComposeFile.size === 0) { + this.log.warn('No containers matched any compose file for this trigger'); + } + // Process each compose file group const batchResults: unknown[] = []; for (const { @@ -1408,9 +1412,15 @@ class Dockercompose extends Docker { const compose = await this.getComposeFileChainAsObject(composeFileChain, composeByFile); // Filter containers that belong to this compose file - const containersFiltered = containers.filter((container) => - doesContainerBelongToCompose(compose, container), - ); + const containersFiltered = containers.filter((container) => { + const belongs = doesContainerBelongToCompose(compose, container); + if (!belongs) { + this.log.warn( + `Container ${container.name} not found in compose file ${composeFileChainSummary} (image mismatch)`, + ); + } + return belongs; + }); if (containersFiltered.length === 0) { this.log.warn(`No containers found in compose file ${composeFileChainSummary}`); @@ -1453,7 +1463,9 @@ class Dockercompose extends Docker { ); if (mappingsNeedingRuntimeUpdate.length === 0) { - this.log.info(`All containers in ${composeFileChainSummary} are already up to date`); + this.log.info( + `All containers in ${composeFileChainSummary} are already up to date (checked: ${versionMappings.map((m) => m.container.name).join(', ') || 'none'})`, + ); return; } From e1c54e1c1090acee25d4cfcfb966bd8c919ef108 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 15:58:02 -0400 Subject: [PATCH 11/59] =?UTF-8?q?=F0=9F=90=9B=20fix(icons):=20use=20no-sto?= =?UTF-8?q?re=20cache=20header=20for=20fallback=20icon=20responses?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fallback icons were served with immutable cache headers, causing browsers to cache the Docker placeholder permanently even after the real icon becomes available from the provider. --- app/api/icons.test.ts | 63 +++++++++++++++++++++++++++++++++- app/api/icons/response.test.ts | 1 + app/api/icons/response.ts | 5 ++- app/api/icons/settings.ts | 2 ++ 4 files changed, 69 insertions(+), 2 deletions(-) diff --git a/app/api/icons.test.ts b/app/api/icons.test.ts index 42c37123..ed66b16e 100644 --- a/app/api/icons.test.ts +++ b/app/api/icons.test.ts @@ -1076,13 +1076,74 @@ describe('Icons Router', () => { expect(res.status).not.toHaveBeenCalledWith(404); expect(res.json).not.toHaveBeenCalled(); - expect(res.set).toHaveBeenCalledWith('Cache-Control', 'public, max-age=31536000, immutable'); + expect(res.set).toHaveBeenCalledWith('Cache-Control', 'no-store'); + expect(res.type).toHaveBeenCalledWith('image/png'); + expect(res.sendFile).toHaveBeenCalledWith('docker.png', { + root: '/runtime/assets/icons/selfhst', + }); + }); + + test('should use no-store cache headers for fallback images instead of immutable', async () => { + const upstreamError = Object.assign(new Error('not found'), { + response: { status: 404 }, + }); + mockAccess.mockImplementation(async (targetPath: string) => { + if (targetPath === '/runtime/assets/icons/selfhst/docker.png') { + return; + } + throw new Error('not found'); + }); + mockAxiosGet.mockRejectedValue(upstreamError); + mockAxiosIsAxiosError.mockReturnValue(true); + const handler = getHandler(); + const res = createResponse(); + + await handler( + { + params: { + provider: 'homarr', + slug: 'missing', + }, + headers: { + 'sec-fetch-dest': 'image', + }, + }, + res, + ); + + expect(res.set).toHaveBeenCalledWith('Cache-Control', 'no-store'); + expect(res.set).not.toHaveBeenCalledWith( + 'Cache-Control', + 'public, max-age=31536000, immutable', + ); expect(res.type).toHaveBeenCalledWith('image/png'); expect(res.sendFile).toHaveBeenCalledWith('docker.png', { root: '/runtime/assets/icons/selfhst', }); }); + test('should use immutable cache headers for successfully cached icons', async () => { + mockAccess.mockResolvedValue(undefined); + const handler = getHandler(); + const res = createResponse(); + + await handler( + { + params: { + provider: 'homarr', + slug: 'docker', + }, + }, + res, + ); + + expect(res.set).toHaveBeenCalledWith('Cache-Control', 'public, max-age=31536000, immutable'); + expect(res.type).toHaveBeenCalledWith('image/png'); + expect(res.sendFile).toHaveBeenCalledWith('docker.png', { + root: '/store/icons/homarr', + }); + }); + test('should serve bundled fallback image when sec-fetch-dest header is an array', async () => { const upstreamError = Object.assign(new Error('forbidden'), { response: { status: 403 }, diff --git a/app/api/icons/response.test.ts b/app/api/icons/response.test.ts index 9ea370f8..e9c36001 100644 --- a/app/api/icons/response.test.ts +++ b/app/api/icons/response.test.ts @@ -51,6 +51,7 @@ describe('icons/response', () => { }); expect(mockFindBundledIconPath).toHaveBeenCalledWith('selfhst', 'docker', 'png'); + expect(res.set).toHaveBeenCalledWith('Cache-Control', 'no-store'); expect(res.status).not.toHaveBeenCalled(); expect(res.json).not.toHaveBeenCalled(); expect(res.sendFile).toHaveBeenCalledWith('docker.png', { diff --git a/app/api/icons/response.ts b/app/api/icons/response.ts index 4f519b3d..e6089dcd 100644 --- a/app/api/icons/response.ts +++ b/app/api/icons/response.ts @@ -3,6 +3,7 @@ import type { Request, Response } from 'express'; import { providers } from './providers.js'; import { CACHE_CONTROL_HEADER, + FALLBACK_CACHE_CONTROL_HEADER, FALLBACK_ICON, FALLBACK_IMAGE_PROVIDER, FALLBACK_IMAGE_SLUG, @@ -50,7 +51,9 @@ async function sendMissingIconResponse({ providers[FALLBACK_IMAGE_PROVIDER].extension, ); if (fallbackPath) { - sendCachedIcon(res, fallbackPath, providers[FALLBACK_IMAGE_PROVIDER].contentType); + res.set('Cache-Control', FALLBACK_CACHE_CONTROL_HEADER); + res.type(providers[FALLBACK_IMAGE_PROVIDER].contentType); + res.sendFile(path.basename(fallbackPath), { root: path.dirname(fallbackPath) }); return; } } diff --git a/app/api/icons/settings.ts b/app/api/icons/settings.ts index 07aaffe8..601794e0 100644 --- a/app/api/icons/settings.ts +++ b/app/api/icons/settings.ts @@ -1,6 +1,7 @@ import { toPositiveInteger } from '../../util/parse.js'; const CACHE_CONTROL_HEADER = 'public, max-age=31536000, immutable'; +const FALLBACK_CACHE_CONTROL_HEADER = 'no-store'; const FALLBACK_ICON = 'fab fa-docker'; const FALLBACK_IMAGE_PROVIDER = 'selfhst'; const FALLBACK_IMAGE_SLUG = 'docker'; @@ -48,6 +49,7 @@ function getIconInFlightTimeoutMs() { export { CACHE_CONTROL_HEADER, + FALLBACK_CACHE_CONTROL_HEADER, FALLBACK_ICON, FALLBACK_IMAGE_PROVIDER, FALLBACK_IMAGE_SLUG, From db3859e6becceb25d4fff4b017bb9540a07c12e8 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 15:58:38 -0400 Subject: [PATCH 12/59] =?UTF-8?q?=F0=9F=93=9D=20docs(changelog):=20add=20t?= =?UTF-8?q?rigger=20diagnostics=20and=20icon=20cache=20fix=20entries?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6391ed42..aac49543 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - **Log level setting had no effect** — `DD_LOG_LEVEL=debug` was correctly parsed but debug messages were silently dropped because pino's multistream destinations defaulted to `info` level. Stream destinations now inherit the configured log level. ([#134](https://github.com/CodesWhat/drydock/issues/134)) - **Server feature flags not loaded after login** — Feature flags (`containeractions`, `delete`) were permanently stuck as disabled when authentication was required, because the pre-login bootstrap fetch failure marked the flags as "loaded" and never retried. Now failed fetches allow automatic retry after login. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) +- **Compose trigger silently skips containers** — Multiple failure paths in the compose trigger were logged at `debug` level, making it nearly impossible to diagnose why a trigger reports success but containers don't update. Key diagnostic messages (compose file mismatch, label inspect failure, no containers matched) promoted to `warn` level, and the "already up to date" message now includes container names. ([#84](https://github.com/CodesWhat/drydock/discussions/84)) +- **Fallback icon cached permanently** — The Docker placeholder icon was served with `immutable` cache headers, causing browsers to cache it permanently even after the real provider icon becomes available. Fallback responses now use `no-store`. ### Changed From 798733d45fe544f7b084018d210fc54da104aff2 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 18:51:45 -0400 Subject: [PATCH 13/59] =?UTF-8?q?=F0=9F=90=9B=20fix(trigger):=20resolve=20?= =?UTF-8?q?compose=20directory=20paths=20to=20compose=20file=20candidates?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When DD_TRIGGER_DOCKERCOMPOSE_*_FILE points to a directory instead of a file, the compose trigger now probes for compose.yaml, compose.yml, docker-compose.yaml, and docker-compose.yml inside that directory. Also adds a no-op guard: trigger() now throws when updateAvailable=true but no compose file mutations were applied, surfacing silent failures instead of reporting false success. --- .../dockercompose/Dockercompose.test.ts | 47 ++++++++++- .../providers/dockercompose/Dockercompose.ts | 77 ++++++++++++++++--- 2 files changed, 113 insertions(+), 11 deletions(-) diff --git a/app/triggers/providers/dockercompose/Dockercompose.test.ts b/app/triggers/providers/dockercompose/Dockercompose.test.ts index 80f38ebe..099201f4 100644 --- a/app/triggers/providers/dockercompose/Dockercompose.test.ts +++ b/app/triggers/providers/dockercompose/Dockercompose.test.ts @@ -3112,6 +3112,41 @@ describe('Dockercompose Trigger', () => { ); }); + test('triggerBatch should resolve a configured compose directory to compose.yaml for affinity matching', async () => { + trigger.configuration.file = '/opt/drydock/stacks/filebrowser'; + fs.stat.mockImplementation(async (candidatePath: string) => { + if (candidatePath === '/opt/drydock/stacks/filebrowser') { + return { + isDirectory: () => true, + mtimeMs: 1_700_000_000_000, + } as any; + } + return { + isDirectory: () => false, + mtimeMs: 1_700_000_000_000, + } as any; + }); + fs.access.mockResolvedValue(undefined); + + const container = { + name: 'filebrowser', + watcher: 'local', + labels: { 'dd.compose.file': '/opt/drydock/stacks/filebrowser/compose.yaml' }, + }; + const processComposeFileSpy = vi.spyOn(trigger, 'processComposeFile').mockResolvedValue(true); + + await trigger.triggerBatch([container]); + + expect(processComposeFileSpy).toHaveBeenCalledTimes(1); + expect(processComposeFileSpy).toHaveBeenCalledWith( + '/opt/drydock/stacks/filebrowser/compose.yaml', + [container], + ); + expect(mockLog.warn).not.toHaveBeenCalledWith( + expect.stringContaining('do not match configured file'), + ); + }); + // ----------------------------------------------------------------------- // getComposeFileForContainer // ----------------------------------------------------------------------- @@ -3285,13 +3320,23 @@ describe('Dockercompose Trigger', () => { test('trigger should delegate to triggerBatch with single container', async () => { const container = { name: 'test' }; - const spy = vi.spyOn(trigger, 'triggerBatch').mockResolvedValue(); + const spy = vi.spyOn(trigger, 'triggerBatch').mockResolvedValue([true]); await trigger.trigger(container); expect(spy).toHaveBeenCalledWith([container]); }); + test('trigger should throw when update is still available but compose trigger applies no runtime updates', async () => { + trigger.configuration.dryrun = false; + const container = { name: 'test', updateAvailable: true }; + vi.spyOn(trigger, 'triggerBatch').mockResolvedValue([false]); + + await expect(trigger.trigger(container)).rejects.toThrow( + 'No compose updates were applied for container test', + ); + }); + test('getConfigurationSchema should extend Docker schema with compose hardening options', () => { const schema = trigger.getConfigurationSchema(); expect(schema).toBeDefined(); diff --git a/app/triggers/providers/dockercompose/Dockercompose.ts b/app/triggers/providers/dockercompose/Dockercompose.ts index 0a5e165a..5e3b9938 100644 --- a/app/triggers/providers/dockercompose/Dockercompose.ts +++ b/app/triggers/providers/dockercompose/Dockercompose.ts @@ -15,6 +15,12 @@ const COMPOSE_RENAME_RETRY_MS = 200; const COMPOSE_PROJECT_LABEL = 'com.docker.compose.project'; const COMPOSE_PROJECT_CONFIG_FILES_LABEL = 'com.docker.compose.project.config_files'; const COMPOSE_PROJECT_WORKING_DIR_LABEL = 'com.docker.compose.project.working_dir'; +const COMPOSE_DIRECTORY_FILE_CANDIDATES = [ + 'compose.yaml', + 'compose.yml', + 'docker-compose.yaml', + 'docker-compose.yml', +]; const COMPOSE_CACHE_MAX_ENTRIES = 256; const POST_START_ENVIRONMENT_KEY_PATTERN = /^[A-Za-z_][A-Za-z0-9_]*$/; const SELF_CONTAINER_IDENTIFIER_PATTERN = /^[a-zA-Z0-9][a-zA-Z0-9_.-]*$/; @@ -815,13 +821,49 @@ class Dockercompose extends Docker { return composeFilesFromInspect; } - const composeFileFromDefault = this.getDefaultComposeFilePath(); + const composeFileFromDefault = await this.resolveDefaultComposeFilePathForRuntime(); if (!composeFileFromDefault) { return []; } return [composeFileFromDefault]; } + async resolveComposeFilePathFromDirectory(composePath: string): Promise { + try { + const composePathStat = await fs.stat(composePath); + if (!composePathStat.isDirectory()) { + return composePath; + } + } catch { + // Keep existing behavior for missing/inaccessible files; downstream checks + // emit detailed does-not-exist/permission warnings. + return composePath; + } + + for (const composeFileCandidate of COMPOSE_DIRECTORY_FILE_CANDIDATES) { + const composeFileCandidatePath = path.join(composePath, composeFileCandidate); + try { + await fs.access(composeFileCandidatePath); + return composeFileCandidatePath; + } catch { + // try next candidate + } + } + + this.log.warn( + `Configured compose path ${composePath} is a directory and does not contain a compose file candidate (${COMPOSE_DIRECTORY_FILE_CANDIDATES.join(', ')})`, + ); + return null; + } + + async resolveDefaultComposeFilePathForRuntime(): Promise { + const composeFileFromDefault = this.getDefaultComposeFilePath(); + if (!composeFileFromDefault) { + return null; + } + return this.resolveComposeFilePathFromDirectory(composeFileFromDefault); + } + normalizeDigestPinningValue(value: unknown): string | null { if (!value || typeof value !== 'string') { return null; @@ -1291,7 +1333,17 @@ class Dockercompose extends Docker { * @returns {Promise} */ async trigger(container) { - await this.triggerBatch([container]); + const triggerBatchResults = await this.triggerBatch([container]); + const hasRuntimeUpdates = triggerBatchResults.some((result) => result === true); + if ( + this.configuration.dryrun !== true && + container?.updateAvailable === true && + !hasRuntimeUpdates + ) { + throw new Error( + `No compose updates were applied for container ${container?.name || 'unknown'}`, + ); + } } async resolveAndGroupContainersByComposeFile( @@ -1364,10 +1416,10 @@ class Dockercompose extends Docker { /** * Update the docker-compose stack. * @param containers the containers - * @returns {Promise} + * @returns {Promise} */ - async triggerBatch(containers): Promise { - const configuredComposeFilePath = this.getDefaultComposeFilePath(); + async triggerBatch(containers): Promise { + const configuredComposeFilePath = await this.resolveDefaultComposeFilePathForRuntime(); const containersByComposeFile = await this.resolveAndGroupContainersByComposeFile( containers, configuredComposeFilePath, @@ -1378,7 +1430,7 @@ class Dockercompose extends Docker { } // Process each compose file group - const batchResults: unknown[] = []; + const batchResults: boolean[] = []; for (const { composeFile, composeFiles, @@ -1399,9 +1451,13 @@ class Dockercompose extends Docker { * Process a specific compose file with its associated containers. * @param composeFile * @param containers - * @returns {Promise} + * @returns {Promise} true if runtime updates were applied, false otherwise */ - async processComposeFile(composeFile, containers, composeFiles = [composeFile]) { + async processComposeFile( + composeFile, + containers, + composeFiles = [composeFile], + ): Promise { const composeFileChain = this.normalizeComposeFileChain(composeFile, composeFiles); const composeFileChainSummary = composeFileChain.join(', '); this.log.info(`Processing compose file: ${composeFileChainSummary}`); @@ -1424,7 +1480,7 @@ class Dockercompose extends Docker { if (containersFiltered.length === 0) { this.log.warn(`No containers found in compose file ${composeFileChainSummary}`); - return; + return false; } // [{ container, current: '1.0.0', update: '2.0.0' }, {...}] @@ -1466,7 +1522,7 @@ class Dockercompose extends Docker { this.log.info( `All containers in ${composeFileChainSummary} are already up to date (checked: ${versionMappings.map((m) => m.container.name).join(', ') || 'none'})`, ); - return; + return false; } // Dry-run? @@ -1575,6 +1631,7 @@ class Dockercompose extends Docker { composeFileOnceHandledServices.add(service); } } + return true; } async resolveComposeServiceContext(container, currentImage) { From d9c6703bcad6bc891665d0f56ef67fa2aee3e004 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 18:51:52 -0400 Subject: [PATCH 14/59] =?UTF-8?q?=F0=9F=90=9B=20fix(docker):=20support=20T?= =?UTF-8?q?LS=20backend=20in=20container=20healthcheck?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Healthcheck now detects DD_SERVER_TLS_ENABLED and switches to https://localhost with --insecure for the self-signed cert. Also fixes trailing newline at end of Dockerfile. --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index de37df74..964fed75 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,7 +12,7 @@ ENV WORKDIR=/home/node/app ENV DD_LOG_FORMAT=text ENV DD_VERSION=$DD_VERSION -HEALTHCHECK --interval=30s --timeout=5s CMD ["sh", "-c", "if [ -z \"$DD_SERVER_ENABLED\" ] || [ \"$DD_SERVER_ENABLED\" = 'true' ]; then curl --fail http://localhost:${DD_SERVER_PORT:-3000}/health || exit 1; else exit 0; fi"] +HEALTHCHECK --interval=30s --timeout=5s CMD ["sh", "-c", "if [ -n \"$DD_SERVER_ENABLED\" ] && [ \"$DD_SERVER_ENABLED\" != 'true' ]; then exit 0; fi; if [ \"$DD_SERVER_TLS_ENABLED\" = 'true' ]; then curl --fail --insecure https://localhost:${DD_SERVER_PORT:-3000}/health || exit 1; else curl --fail http://localhost:${DD_SERVER_PORT:-3000}/health || exit 1; fi"] # Install system packages, trivy, and cosign # hadolint ignore=DL3018,DL3028,DL4006 @@ -75,4 +75,4 @@ COPY --from=app-build /home/node/app/dist ./dist COPY --from=app-build /home/node/app/package.json ./package.json # Copy ui -COPY --from=ui-build /home/node/ui/dist/ ./ui \ No newline at end of file +COPY --from=ui-build /home/node/ui/dist/ ./ui From 25300b47f23bdde9dade909af76f133ef5c28f97 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 18:52:03 -0400 Subject: [PATCH 15/59] =?UTF-8?q?=F0=9F=90=9B=20fix(auth):=20accept=20lega?= =?UTF-8?q?cy=20v1.3.9=20htpasswd=20hash=20formats=20on=20upgrade?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds runtime verification for APR1/MD5 ($apr1$/$1$), DES crypt, and plain-text htpasswd hashes alongside existing SHA-1 and argon2id support. Schema validation relaxed to only reject malformed argon2id prefixes — all other formats pass through to runtime verification. SHA parseShaHash now decodes base64 and validates 20-byte digest length, with buffer-to-buffer timingSafeEqual comparison matching argon2id path. New deps: apache-md5 (MIT, 0 transitive), unix-crypt-td-js (BSD-3, 0 transitive) — both CJS-only, loaded via createRequire. UI banner updated from SHA-specific to generic legacy hash wording. --- CHANGELOG.md | 1 + README.md | 2 +- .../providers/basic/Basic.test.ts | 228 +++++++++++++++++- app/authentications/providers/basic/Basic.ts | 195 +++++++++++++-- app/package-lock.json | 17 ++ app/package.json | 2 + ui/src/layouts/AppLayout.vue | 38 +-- ui/tests/layouts/AppLayout.spec.ts | 12 +- 8 files changed, 432 insertions(+), 63 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index aac49543..a0ffdd6d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - **Server feature flags not loaded after login** — Feature flags (`containeractions`, `delete`) were permanently stuck as disabled when authentication was required, because the pre-login bootstrap fetch failure marked the flags as "loaded" and never retried. Now failed fetches allow automatic retry after login. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) - **Compose trigger silently skips containers** — Multiple failure paths in the compose trigger were logged at `debug` level, making it nearly impossible to diagnose why a trigger reports success but containers don't update. Key diagnostic messages (compose file mismatch, label inspect failure, no containers matched) promoted to `warn` level, and the "already up to date" message now includes container names. ([#84](https://github.com/CodesWhat/drydock/discussions/84)) - **Fallback icon cached permanently** — The Docker placeholder icon was served with `immutable` cache headers, causing browsers to cache it permanently even after the real provider icon becomes available. Fallback responses now use `no-store`. +- **Basic auth upgrade compatibility restored** — v1.4 now accepts legacy v1.3.9 Basic auth hashes (`{SHA}`, `$apr1$`/`$1$`, `crypt`, and plain fallback) to preserve smooth upgrades. Legacy formats remain deprecated and continue showing a migration banner, with removal still planned for v1.6.0. ### Changed diff --git a/README.md b/README.md index 8be5788d..2f7d315a 100644 --- a/README.md +++ b/README.md @@ -82,7 +82,7 @@ docker run -d \ > node -e 'const c=require("node:crypto");const s=c.randomBytes(32);const h=c.argon2Sync("argon2id",{message:process.argv[1],nonce:s,memory:65536,passes:3,parallelism:4,tagLength:64});console.log("argon2id$65536$3$4$"+s.toString("base64")+"$"+h.toString("base64"));' "yourpassword" > ``` > -> Legacy `{SHA}` hashes are accepted but deprecated (removed in v1.6.0). MD5/crypt/plain htpasswd hashes are not supported. +> Legacy v1.3.9 Basic auth hashes (`{SHA}`, `$apr1$`/`$1$`, `crypt`, and plain) are accepted for upgrade compatibility but deprecated (removed in v1.6.0). Argon2id is recommended for all new configurations. > Authentication is **required by default**. See the [auth docs](https://drydock.codeswhat.com/docs/configuration/authentications) for OIDC, anonymous access, and other options. > To explicitly allow anonymous access on fresh installs, set `DD_ANONYMOUS_AUTH_CONFIRM=true`. diff --git a/app/authentications/providers/basic/Basic.test.ts b/app/authentications/providers/basic/Basic.test.ts index eca1d60e..8da2cba6 100644 --- a/app/authentications/providers/basic/Basic.test.ts +++ b/app/authentications/providers/basic/Basic.test.ts @@ -56,6 +56,10 @@ function createShaHash(password: string) { const VALID_SALT_BASE64 = Buffer.alloc(16, 1).toString('base64'); const VALID_HASH_BASE64 = Buffer.alloc(32, 1).toString('base64'); +const LEGACY_APR1_HASH = '$apr1$r31.....$HqJZimcKQFAMYayBlzkrA/'; +const LEGACY_MD5_HASH = '$1$saltsalt$2vnaRpHa6Jxjz5n83ok8Z0'; +const LEGACY_CRYPT_HASH = 'rqXexS6ZhobKA'; +const LEGACY_PLAIN_HASH = 'plaintext-password'; describe('Basic Authentication', () => { let basic: InstanceType; @@ -393,7 +397,7 @@ describe('Basic Authentication', () => { ).toThrow('must be an argon2id hash'); }); - describe('SHA-1 legacy hash support', () => { + describe('legacy v1.3.9 hash support', () => { test('should accept SHA-1 hash in configuration schema', async () => { const hash = createShaHash('password'); expect( @@ -481,23 +485,54 @@ describe('Basic Authentication', () => { }); }); - test('should reject SHA-1 hash with invalid digest length', async () => { + test('should accept SHA-1 hash with invalid digest length in schema but reject authentication', async () => { const shortDigest = Buffer.alloc(10, 1).toString('base64'); - expect(() => + + expect( basic.validateConfiguration({ user: 'testuser', hash: `{SHA}${shortDigest}`, }), - ).toThrow('must be an argon2id hash'); + ).toEqual({ + user: 'testuser', + hash: `{SHA}${shortDigest}`, + }); + + basic.configuration = { + user: 'testuser', + hash: `{SHA}${shortDigest}`, + }; + + await new Promise((resolve) => { + basic.authenticate('testuser', 'password', (_err, result) => { + expect(result).toBe(false); + resolve(); + }); + }); }); - test('should reject SHA-1 hash with malformed base64', async () => { - expect(() => + test('should accept SHA-1 hash with malformed base64 in schema but reject authentication', async () => { + expect( basic.validateConfiguration({ user: 'testuser', hash: '{SHA}not*valid*base64', }), - ).toThrow('must be an argon2id hash'); + ).toEqual({ + user: 'testuser', + hash: '{SHA}not*valid*base64', + }); + + basic.configuration = { + user: 'testuser', + hash: '{SHA}not*valid*base64', + }; + + await new Promise((resolve) => { + basic.authenticate('testuser', 'password', (_err, result) => { + expect(result).toBe(false); + resolve(); + }); + }); }); test('should reject when SHA hash parsing fails during verification', async () => { @@ -560,14 +595,160 @@ describe('Basic Authentication', () => { createHashSpy.mockRestore(); }); - test('should reject unrecognized hash formats', async () => { + test('should accept APR1 hash in configuration schema', async () => { + expect( + basic.validateConfiguration({ + user: 'testuser', + hash: LEGACY_APR1_HASH, + }), + ).toEqual({ + user: 'testuser', + hash: LEGACY_APR1_HASH, + }); + }); + + test('should authenticate valid user with APR1 hash', async () => { + basic.configuration = { + user: 'testuser', + hash: LEGACY_APR1_HASH, + }; + + await new Promise((resolve) => { + basic.authenticate('testuser', 'myPassword', (_err, result) => { + expect(result).toEqual({ username: 'testuser' }); + resolve(); + }); + }); + }); + + test('should reject invalid password with APR1 hash', async () => { + basic.configuration = { + user: 'testuser', + hash: LEGACY_APR1_HASH, + }; + + await new Promise((resolve) => { + basic.authenticate('testuser', 'wrongpassword', (_err, result) => { + expect(result).toBe(false); + resolve(); + }); + }); + }); + + test('should accept $1$ MD5 hash in configuration schema', async () => { + expect( + basic.validateConfiguration({ + user: 'testuser', + hash: LEGACY_MD5_HASH, + }), + ).toEqual({ + user: 'testuser', + hash: LEGACY_MD5_HASH, + }); + }); + + test('should authenticate valid user with $1$ MD5 hash', async () => { + basic.configuration = { + user: 'testuser', + hash: LEGACY_MD5_HASH, + }; + + await new Promise((resolve) => { + basic.authenticate('testuser', 'myPassword', (_err, result) => { + expect(result).toEqual({ username: 'testuser' }); + resolve(); + }); + }); + }); + + test('should reject invalid password with $1$ MD5 hash', async () => { + basic.configuration = { + user: 'testuser', + hash: LEGACY_MD5_HASH, + }; + + await new Promise((resolve) => { + basic.authenticate('testuser', 'wrongpassword', (_err, result) => { + expect(result).toBe(false); + resolve(); + }); + }); + }); + + test('should accept crypt hash in configuration schema', async () => { + expect( + basic.validateConfiguration({ + user: 'testuser', + hash: LEGACY_CRYPT_HASH, + }), + ).toEqual({ + user: 'testuser', + hash: LEGACY_CRYPT_HASH, + }); + }); + + test('should authenticate valid user with crypt hash', async () => { + basic.configuration = { + user: 'testuser', + hash: LEGACY_CRYPT_HASH, + }; + + await new Promise((resolve) => { + basic.authenticate('testuser', 'myPassword', (_err, result) => { + expect(result).toEqual({ username: 'testuser' }); + resolve(); + }); + }); + }); + + test('should reject invalid password with crypt hash', async () => { + basic.configuration = { + user: 'testuser', + hash: LEGACY_CRYPT_HASH, + }; + + await new Promise((resolve) => { + basic.authenticate('testuser', 'wrongpassword', (_err, result) => { + expect(result).toBe(false); + resolve(); + }); + }); + }); + + test('should accept plain hash fallback in configuration schema', async () => { + expect( + basic.validateConfiguration({ + user: 'testuser', + hash: LEGACY_PLAIN_HASH, + }), + ).toEqual({ + user: 'testuser', + hash: LEGACY_PLAIN_HASH, + }); + }); + + test('should authenticate valid user with plain hash fallback', async () => { + basic.configuration = { + user: 'testuser', + hash: LEGACY_PLAIN_HASH, + }; + + await new Promise((resolve) => { + basic.authenticate('testuser', LEGACY_PLAIN_HASH, (_err, result) => { + expect(result).toEqual({ username: 'testuser' }); + resolve(); + }); + }); + }); + + test('should reject invalid password with plain hash fallback', async () => { basic.configuration = { user: 'testuser', - hash: 'plaintext-password', + hash: LEGACY_PLAIN_HASH, }; await new Promise((resolve) => { - basic.authenticate('testuser', 'plaintext-password', (err, result) => { + basic.authenticate('testuser', 'wrongpassword', (_err, result) => { expect(result).toBe(false); resolve(); }); @@ -591,6 +772,14 @@ describe('Basic Authentication', () => { }; expect(basic.getMetadata()).toEqual({ usesLegacyHash: true }); }); + + test('should return usesLegacyHash: true for APR1 hash', () => { + basic.configuration = { + user: 'testuser', + hash: LEGACY_APR1_HASH, + }; + expect(basic.getMetadata()).toEqual({ usesLegacyHash: true }); + }); }); describe('initAuthentication', () => { @@ -604,7 +793,24 @@ describe('Basic Authentication', () => { basic.initAuthentication(); - expect(warnFn).toHaveBeenCalledWith(expect.stringContaining('SHA-1 password hash detected')); + expect(warnFn).toHaveBeenCalledWith( + expect.stringContaining('Legacy password hash format detected (sha1)'), + ); + }); + + test('should log deprecation warning when APR1 hash is registered', () => { + const warnFn = vi.fn(); + basic.log = { warn: warnFn, info: vi.fn(), debug: vi.fn(), error: vi.fn() } as any; + basic.configuration = { + user: 'testuser', + hash: LEGACY_APR1_HASH, + }; + + basic.initAuthentication(); + + expect(warnFn).toHaveBeenCalledWith( + expect.stringContaining('Legacy password hash format detected (apr1)'), + ); }); test('should not log warning when argon2id hash is registered', () => { diff --git a/app/authentications/providers/basic/Basic.ts b/app/authentications/providers/basic/Basic.ts index 9ac9e84d..fc4bc648 100644 --- a/app/authentications/providers/basic/Basic.ts +++ b/app/authentications/providers/basic/Basic.ts @@ -1,7 +1,12 @@ import { argon2, createHash, timingSafeEqual } from 'node:crypto'; +import { createRequire } from 'node:module'; import Authentication from '../Authentication.js'; import BasicStrategy from './BasicStrategy.js'; +const require = createRequire(import.meta.url); +const apacheMd5 = require('apache-md5') as (password: string, salt: string) => string; +const unixCrypt = require('unix-crypt-td-js') as (password: string, salt: string) => string; + function hashValue(value: string): Buffer { return createHash('sha256').update(value, 'utf8').digest(); } @@ -24,6 +29,23 @@ interface ParsedArgon2Hash { hash: Buffer; } +interface ParsedMd5Hash { + variant: 'apr1' | '1'; + salt: string; + encodedHash: string; +} + +interface ParsedCryptHash { + salt: string; + encodedHash: string; +} + +type LegacyHashFormat = 'sha1' | 'apr1' | 'md5' | 'crypt' | 'plain'; + +function normalizeHash(rawHash: string): string { + return rawHash.trim(); +} + function parsePositiveInteger(raw: string): number | undefined { if (!/^\d+$/.test(raw)) { return undefined; @@ -47,7 +69,7 @@ function decodeBase64(raw: string): Buffer | undefined { } function parseArgon2Hash(rawHash: string): ParsedArgon2Hash | undefined { - const parts = rawHash.split('$'); + const parts = normalizeHash(rawHash).split('$'); if (parts.length !== ARGON2_HASH_PARTS || parts[0] !== 'argon2id') { return undefined; } @@ -79,22 +101,97 @@ function parseArgon2Hash(rawHash: string): ParsedArgon2Hash | undefined { return { memory, passes, parallelism, salt, hash }; } +const SHA1_DIGEST_SIZE = 20; + function parseShaHash(rawHash: string): Buffer | undefined { - if (rawHash.length < 5) { + const normalizedHash = normalizeHash(rawHash); + if (normalizedHash.length < 5) { return undefined; } - const prefix = rawHash.substring(0, 5); + const prefix = normalizedHash.substring(0, 5); if (prefix.toLowerCase() !== '{sha}') { return undefined; } - const encoded = rawHash.substring(5); - const decoded = decodeBase64(encoded); - if (!decoded || decoded.length !== 20) { + const encoded = normalizedHash.substring(5); + if (!encoded) { + return undefined; + } + const decoded = Buffer.from(encoded, 'base64'); + if (decoded.length !== SHA1_DIGEST_SIZE) { return undefined; } return decoded; } +function parseMd5Hash(rawHash: string): ParsedMd5Hash | undefined { + const normalizedHash = normalizeHash(rawHash); + if (!normalizedHash.startsWith('$apr1$') && !normalizedHash.startsWith('$1$')) { + return undefined; + } + + const parts = normalizedHash.split('$'); + if (parts.length < 4) { + return undefined; + } + + const variant = parts[1]; + const salt = parts[2]; + if ((variant !== 'apr1' && variant !== '1') || !salt) { + return undefined; + } + + return { + variant, + salt, + encodedHash: normalizedHash, + }; +} + +function parseCryptHash(rawHash: string): ParsedCryptHash | undefined { + const normalizedHash = normalizeHash(rawHash); + if (normalizedHash.length !== 13) { + return undefined; + } + return { + salt: normalizedHash.substring(0, 2), + encodedHash: normalizedHash, + }; +} + +function timingSafeEqualString(left: string, right: string): boolean { + const leftBuffer = Buffer.from(left, 'utf8'); + const rightBuffer = Buffer.from(right, 'utf8'); + if (leftBuffer.length !== rightBuffer.length) { + return false; + } + + try { + return timingSafeEqual(leftBuffer, rightBuffer); + } catch { + return false; + } +} + +function getLegacyHashFormat(hash: string): LegacyHashFormat | undefined { + if (parseArgon2Hash(hash)) { + return undefined; + } + if (parseShaHash(hash) !== undefined) { + return 'sha1'; + } + + const md5Hash = parseMd5Hash(hash); + if (md5Hash) { + return md5Hash.variant === 'apr1' ? 'apr1' : 'md5'; + } + + if (parseCryptHash(hash)) { + return 'crypt'; + } + + return 'plain'; +} + function deriveArgon2Password(password: string, parsedHash: ParsedArgon2Hash): Promise { return new Promise((resolve, reject) => { argon2( @@ -133,36 +230,80 @@ async function verifyArgon2Password(password: string, encodedHash: string): Prom } function verifyShaPassword(password: string, encodedHash: string): boolean { - const expectedHash = parseShaHash(encodedHash); - if (!expectedHash) { + const expectedDigest = parseShaHash(encodedHash); + if (!expectedDigest) { + return false; + } + + try { + const actualDigest = createHash('sha1').update(password).digest(); + return timingSafeEqual(actualDigest, expectedDigest); + } catch { + return false; + } +} + +function verifyMd5Password(password: string, encodedHash: string): boolean { + const parsedHash = parseMd5Hash(encodedHash); + if (!parsedHash) { return false; } try { - const actualHash = createHash('sha1').update(password).digest(); - return timingSafeEqual(actualHash, expectedHash); + const salt = `$${parsedHash.variant}$${parsedHash.salt}$`; + const actualHash = apacheMd5(password, salt); + return timingSafeEqualString(actualHash, parsedHash.encodedHash); + } catch { + return false; + } +} + +function verifyCryptPassword(password: string, encodedHash: string): boolean { + const parsedHash = parseCryptHash(encodedHash); + if (!parsedHash) { + return false; + } + + try { + const actualHash = unixCrypt(password, parsedHash.salt); + return timingSafeEqualString(actualHash, parsedHash.encodedHash); + } catch { + return false; + } +} + +function verifyPlainPassword(password: string, encodedHash: string): boolean { + try { + return timingSafeEqualString(password, normalizeHash(encodedHash)); } catch { return false; } } async function verifyPassword(password: string, encodedHash: string): Promise { - if (parseArgon2Hash(encodedHash)) { - return await verifyArgon2Password(password, encodedHash); + const normalizedHash = normalizeHash(encodedHash); + if (parseArgon2Hash(normalizedHash)) { + return await verifyArgon2Password(password, normalizedHash); + } + if (parseShaHash(normalizedHash)) { + return verifyShaPassword(password, normalizedHash); + } + if (parseMd5Hash(normalizedHash)) { + return verifyMd5Password(password, normalizedHash); } - if (parseShaHash(encodedHash)) { - return verifyShaPassword(password, encodedHash); + if (parseCryptHash(normalizedHash)) { + return verifyCryptPassword(password, normalizedHash); } - return false; + return verifyPlainPassword(password, normalizedHash); } -function isLegacyShaHash(hash: string): boolean { - return parseShaHash(hash) !== undefined; +function isLegacyHash(hash: string): boolean { + return getLegacyHashFormat(hash) !== undefined; } /** * Basic authentication backed by argon2id password hashes. - * Legacy SHA-1 {SHA} hashes are accepted with deprecation warnings. + * Legacy v1.3.9 hash formats are accepted with deprecation warnings. */ class Basic extends Authentication { /** @@ -174,27 +315,29 @@ class Basic extends Authentication { user: this.joi.string().required(), hash: this.joi .string() + .trim() .required() .custom((value: string, helpers: { error: (key: string) => unknown }) => { - if (parseArgon2Hash(value) || parseShaHash(value)) { - return value; + if (value.startsWith('argon2id$') && !parseArgon2Hash(value)) { + return helpers.error('any.invalid'); } - return helpers.error('any.invalid'); + return value; }, 'password hash validation') .messages({ 'any.invalid': - '"hash" must be an argon2id hash (argon2id$memory$passes$parallelism$salt$hash) or a legacy {SHA} hash', + '"hash" must be an argon2id hash (argon2id$memory$passes$parallelism$salt$hash) or a supported legacy v1.3.9 hash', }), }); } /** - * Init authentication. Log deprecation warning if SHA hash detected. + * Init authentication. Log deprecation warning if legacy hash is detected. */ initAuthentication(): void { - if (isLegacyShaHash(this.configuration.hash)) { + const format = getLegacyHashFormat(this.configuration.hash); + if (format) { this.log.warn( - 'SHA-1 password hash detected — SHA-1 is deprecated and will be removed in v1.6.0. Migrate to argon2id hashing.', + `Legacy password hash format detected (${format}) — v1.3.9 formats (SHA, APR1/MD5, crypt, plain) are deprecated and will be removed in v1.6.0. Migrate to argon2id hashing.`, ); } } @@ -226,7 +369,7 @@ class Basic extends Authentication { getMetadata(): Record { return { - usesLegacyHash: isLegacyShaHash(this.configuration.hash), + usesLegacyHash: isLegacyHash(this.configuration.hash), }; } diff --git a/app/package-lock.json b/app/package-lock.json index f54f4485..d1995b47 100644 --- a/app/package-lock.json +++ b/app/package-lock.json @@ -13,6 +13,7 @@ "@slack/web-api": "^7.14.1", "ajv": "^8.18.0", "ajv-formats": "^3.0.1", + "apache-md5": "^1.1.8", "axios": "^1.13.6", "capitalize": "2.0.4", "change-case": "^5.4.4", @@ -52,6 +53,7 @@ "set-value": "4.1.0", "sort-es": "1.7.18", "undici": "^7.22.0", + "unix-crypt-td-js": "^1.1.4", "uuid": "^13.0.0", "yaml": "2.8.2" }, @@ -3416,6 +3418,15 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/apache-md5": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/apache-md5/-/apache-md5-1.1.8.tgz", + "integrity": "sha512-FCAJojipPn0bXjuEpjOOOMN8FZDkxfWWp4JGN9mifU2IhxvKyXZYqpzPHdnTSUpmPDy+tsslB6Z1g+Vg6nVbYA==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/arg": { "version": "4.1.3", "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", @@ -7453,6 +7464,12 @@ "integrity": "sha512-AsuCzffGHJybSaRrmr5eHr81mwJU3kjw6M+uprWvCXiNeN9SOGwQ3Jn8jb8m3Z6izVgknn1R0FTCEAP2QrLY/w==", "license": "MIT" }, + "node_modules/unix-crypt-td-js": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/unix-crypt-td-js/-/unix-crypt-td-js-1.1.4.tgz", + "integrity": "sha512-8rMeVYWSIyccIJscb9NdCfZKSRBKYTeVnwmiRYT2ulE3qd1RaDQ0xQDP+rI3ccIWbhu/zuo5cgN8z73belNZgw==", + "license": "BSD-3-Clause" + }, "node_modules/unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", diff --git a/app/package.json b/app/package.json index c54c5d8f..3c6f2039 100644 --- a/app/package.json +++ b/app/package.json @@ -24,6 +24,7 @@ "@slack/web-api": "^7.14.1", "ajv": "^8.18.0", "ajv-formats": "^3.0.1", + "apache-md5": "^1.1.8", "axios": "^1.13.6", "capitalize": "2.0.4", "change-case": "^5.4.4", @@ -63,6 +64,7 @@ "set-value": "4.1.0", "sort-es": "1.7.18", "undici": "^7.22.0", + "unix-crypt-td-js": "^1.1.4", "uuid": "^13.0.0", "yaml": "2.8.2" }, diff --git a/ui/src/layouts/AppLayout.vue b/ui/src/layouts/AppLayout.vue index 34297d1f..2e77572d 100644 --- a/ui/src/layouts/AppLayout.vue +++ b/ui/src/layouts/AppLayout.vue @@ -245,9 +245,9 @@ const hideOidcHttpBannerPermanently = useStorageRef( false, (value): value is boolean => typeof value === 'boolean', ); -const shaHashDetected = ref(false); -const hideShaHashBannerForSession = ref(false); -const hideShaHashBannerPermanently = useStorageRef( +const legacyHashDetected = ref(false); +const hideLegacyHashBannerForSession = ref(false); +const hideLegacyHashBannerPermanently = useStorageRef( 'dd-banner-sha-hash-v1', false, (value): value is boolean => typeof value === 'boolean', @@ -574,7 +574,7 @@ function dismissOidcHttpBannerPermanently() { hideOidcHttpBannerPermanently.value = true; } -function isLegacyShaHash(authentication: unknown): boolean { +function isLegacyBasicHash(authentication: unknown): boolean { if (!authentication || typeof authentication !== 'object') { return false; } @@ -589,19 +589,19 @@ function isLegacyShaHash(authentication: unknown): boolean { return (metadata as Record).usesLegacyHash === true; } -const showShaHashDeprecationBanner = computed( +const showLegacyHashDeprecationBanner = computed( () => - shaHashDetected.value && - !hideShaHashBannerForSession.value && - !hideShaHashBannerPermanently.value, + legacyHashDetected.value && + !hideLegacyHashBannerForSession.value && + !hideLegacyHashBannerPermanently.value, ); -function dismissShaHashBannerForSession() { - hideShaHashBannerForSession.value = true; +function dismissLegacyHashBannerForSession() { + hideLegacyHashBannerForSession.value = true; } -function dismissShaHashBannerPermanently() { - hideShaHashBannerPermanently.value = true; +function dismissLegacyHashBannerPermanently() { + hideLegacyHashBannerPermanently.value = true; } async function refreshSearchResources() { @@ -619,8 +619,8 @@ async function refreshSearchResources() { oidcHttpDiscoveryDetected.value = Array.isArray(authentications) ? authentications.some((authentication) => isHttpOidcDiscovery(authentication)) : false; - shaHashDetected.value = Array.isArray(authentications) - ? authentications.some((authentication) => isLegacyShaHash(authentication)) + legacyHashDetected.value = Array.isArray(authentications) + ? authentications.some((authentication) => isLegacyBasicHash(authentication)) : false; searchResourceResults.value = buildSearchIndexResults({ agents, @@ -1307,13 +1307,13 @@ onUnmounted(() => { - Your basic authentication uses an insecure SHA-1 password hash. SHA-1 hashing is deprecated and will be removed in v1.6.0. Migrate to argon2id hashing. + @dismiss="dismissLegacyHashBannerForSession" + @dismiss-permanent="dismissLegacyHashBannerPermanently"> + Your basic authentication uses a legacy password hash format. Legacy v1.3.9 formats are deprecated and will be removed in v1.6.0. Migrate to argon2id hashing. diff --git a/ui/tests/layouts/AppLayout.spec.ts b/ui/tests/layouts/AppLayout.spec.ts index 1c2e46b7..166af045 100644 --- a/ui/tests/layouts/AppLayout.spec.ts +++ b/ui/tests/layouts/AppLayout.spec.ts @@ -420,7 +420,7 @@ describe('AppLayout', () => { expect(wrapper.find('[data-testid="oidc-http-compat-banner"]').exists()).toBe(false); }); - it('shows a SHA-1 hash deprecation banner when basic auth uses legacy SHA hash', async () => { + it('shows a legacy hash deprecation banner when basic auth uses non-argon hash', async () => { mockGetAllAuthentications.mockResolvedValue([ { id: 'basic.admin', @@ -437,10 +437,10 @@ describe('AppLayout', () => { const banner = wrapper.find('[data-testid="sha-hash-deprecation-banner"]'); expect(banner.exists()).toBe(true); - expect(banner.text()).toContain('SHA-1 hashing is deprecated'); + expect(banner.text()).toContain('legacy password hash format'); }); - it('supports dismissing SHA-1 hash deprecation banner for current session', async () => { + it('supports dismissing legacy hash deprecation banner for current session', async () => { mockGetAllAuthentications.mockResolvedValue([ { id: 'basic.admin', @@ -465,7 +465,7 @@ describe('AppLayout', () => { expect(wrapper.find('[data-testid="sha-hash-deprecation-banner"]').exists()).toBe(false); }); - it('supports permanently dismissing SHA-1 hash deprecation banner', async () => { + it('supports permanently dismissing legacy hash deprecation banner', async () => { mockGetAllAuthentications.mockResolvedValue([ { id: 'basic.admin', @@ -491,7 +491,7 @@ describe('AppLayout', () => { expect(localStorage.getItem('dd-banner-sha-hash-v1')).toBe('true'); }); - it('does not show SHA-1 hash deprecation banner after permanent dismissal is persisted', async () => { + it('does not show legacy hash deprecation banner after permanent dismissal is persisted', async () => { localStorage.setItem('dd-banner-sha-hash-v1', 'true'); mockGetAllAuthentications.mockResolvedValue([ { @@ -510,7 +510,7 @@ describe('AppLayout', () => { expect(wrapper.find('[data-testid="sha-hash-deprecation-banner"]').exists()).toBe(false); }); - it('does not show SHA-1 hash deprecation banner when basic auth uses argon2id hash', async () => { + it('does not show legacy hash deprecation banner when basic auth uses argon2id hash', async () => { mockGetAllAuthentications.mockResolvedValue([ { id: 'basic.admin', From 566ff6f068d89b96a98eb0a7c910cab5a662e722 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 20:35:50 -0400 Subject: [PATCH 16/59] =?UTF-8?q?=F0=9F=90=9B=20fix(trigger):=20accept=20l?= =?UTF-8?q?owercased=20env=20var=20keys=20for=20camelCase=20compose=20conf?= =?UTF-8?q?ig?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Env vars like DD_TRIGGER_DOCKERCOMPOSE_*_COMPOSEFILEONCE are lowercased by the config parser but the Joi schema expected camelCase keys (composeFileOnce, composeFileLabel, reconciliationMode, digestPinning). Add Joi .rename() mappings from lowercase to camelCase for all four keys. --- .../dockercompose/Dockercompose.test.ts | 20 ++++++++++ .../providers/dockercompose/Dockercompose.ts | 37 ++++++++++++++----- 2 files changed, 47 insertions(+), 10 deletions(-) diff --git a/app/triggers/providers/dockercompose/Dockercompose.test.ts b/app/triggers/providers/dockercompose/Dockercompose.test.ts index 099201f4..38548703 100644 --- a/app/triggers/providers/dockercompose/Dockercompose.test.ts +++ b/app/triggers/providers/dockercompose/Dockercompose.test.ts @@ -3354,6 +3354,26 @@ describe('Dockercompose Trigger', () => { expect(error).toBeUndefined(); }); + test('getConfigurationSchema should accept env-normalized compose hardening keys', () => { + const schema = trigger.getConfigurationSchema(); + const { error, value } = schema.validate({ + prune: false, + dryrun: false, + autoremovetimeout: 10000, + file: '/opt/drydock/test/compose.yml', + backup: true, + composefilelabel: 'com.example.compose.file', + reconciliationmode: 'block', + digestpinning: true, + composefileonce: true, + }); + expect(error).toBeUndefined(); + expect(value.composeFileLabel).toBe('com.example.compose.file'); + expect(value.reconciliationMode).toBe('block'); + expect(value.digestPinning).toBe(true); + expect(value.composeFileOnce).toBe(true); + }); + test('normalizeImplicitLatest should return input when image is empty or already digest/tag qualified', () => { expect(testable_normalizeImplicitLatest('')).toBe(''); expect(testable_normalizeImplicitLatest('alpine@sha256:abc')).toBe('alpine@sha256:abc'); diff --git a/app/triggers/providers/dockercompose/Dockercompose.ts b/app/triggers/providers/dockercompose/Dockercompose.ts index 5e3b9938..b8c6ac5a 100644 --- a/app/triggers/providers/dockercompose/Dockercompose.ts +++ b/app/triggers/providers/dockercompose/Dockercompose.ts @@ -447,16 +447,33 @@ class Dockercompose extends Docker { */ getConfigurationSchema() { const schemaDocker = super.getConfigurationSchema(); - return schemaDocker.append({ - // Make file optional since we now support per-container compose files - file: this.joi.string().optional(), - backup: this.joi.boolean().default(false), - // Add configuration for the label name to look for - composeFileLabel: this.joi.string().default('dd.compose.file'), - reconciliationMode: this.joi.string().valid('warn', 'block', 'off').default('warn'), - digestPinning: this.joi.boolean().default(false), - composeFileOnce: this.joi.boolean().default(false), - }); + return schemaDocker + .append({ + // Make file optional since we now support per-container compose files + file: this.joi.string().optional(), + backup: this.joi.boolean().default(false), + // Add configuration for the label name to look for + composeFileLabel: this.joi.string().default('dd.compose.file'), + reconciliationMode: this.joi.string().valid('warn', 'block', 'off').default('warn'), + digestPinning: this.joi.boolean().default(false), + composeFileOnce: this.joi.boolean().default(false), + }) + .rename('composefilelabel', 'composeFileLabel', { + ignoreUndefined: true, + override: true, + }) + .rename('reconciliationmode', 'reconciliationMode', { + ignoreUndefined: true, + override: true, + }) + .rename('digestpinning', 'digestPinning', { + ignoreUndefined: true, + override: true, + }) + .rename('composefileonce', 'composeFileOnce', { + ignoreUndefined: true, + override: true, + }); } async initTrigger() { From 9f07649abefc30a96201949d4e84675711671dc6 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 20:36:19 -0400 Subject: [PATCH 17/59] =?UTF-8?q?=E2=9C=A8=20feat(trigger):=20preserve=20e?= =?UTF-8?q?xplicit=20docker.io=20registry=20prefix=20in=20compose=20mutati?= =?UTF-8?q?ons?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When a compose file uses an explicit docker.io/ prefix (e.g. docker.io/collabora/code:25.04.9.1.1), the updated image reference now preserves that prefix instead of stripping it to a bare library path. Does not apply when the target already has a different registry host. --- .../dockercompose/Dockercompose.test.ts | 34 ++++++++++ .../providers/dockercompose/Dockercompose.ts | 65 ++++++++++++++----- 2 files changed, 83 insertions(+), 16 deletions(-) diff --git a/app/triggers/providers/dockercompose/Dockercompose.test.ts b/app/triggers/providers/dockercompose/Dockercompose.test.ts index 38548703..fbac4915 100644 --- a/app/triggers/providers/dockercompose/Dockercompose.test.ts +++ b/app/triggers/providers/dockercompose/Dockercompose.test.ts @@ -4558,6 +4558,40 @@ describe('Dockercompose Trigger', () => { ).toBe('nginx:1.1.0'); }); + test('getComposeMutationImageReference should preserve explicit docker.io prefix from compose image', () => { + const container = makeContainer({ + updateKind: 'digest', + remoteValue: 'abc123', + result: {}, + }); + + trigger.configuration.digestPinning = false; + expect( + trigger.getComposeMutationImageReference( + container as any, + 'nginx:1.1.0', + 'docker.io/nginx:1.0.0', + ), + ).toBe('docker.io/nginx:1.1.0'); + + trigger.configuration.digestPinning = true; + expect( + trigger.getComposeMutationImageReference( + container as any, + 'nginx:1.1.0', + 'docker.io/nginx:1.0.0', + ), + ).toBe('docker.io/nginx@sha256:abc123'); + + expect( + trigger.getComposeMutationImageReference( + container as any, + 'ghcr.io/acme/nginx:1.1.0', + 'docker.io/nginx:1.0.0', + ), + ).toBe('ghcr.io/acme/nginx@sha256:abc123'); + }); + test('buildComposeServiceImageUpdates should use runtime update image when compose update override is missing', () => { const serviceUpdates = trigger.buildComposeServiceImageUpdates([ { diff --git a/app/triggers/providers/dockercompose/Dockercompose.ts b/app/triggers/providers/dockercompose/Dockercompose.ts index b8c6ac5a..02388649 100644 --- a/app/triggers/providers/dockercompose/Dockercompose.ts +++ b/app/triggers/providers/dockercompose/Dockercompose.ts @@ -180,6 +180,35 @@ function normalizeImplicitLatest(image) { return `${image}:latest`; } +function hasExplicitRegistryHost(imageReference: string): boolean { + if (!imageReference) { + return false; + } + const referenceWithoutDigest = imageReference.split('@')[0]; + const firstSlashIndex = referenceWithoutDigest.indexOf('/'); + if (firstSlashIndex < 0) { + return false; + } + const firstSegment = referenceWithoutDigest.slice(0, firstSlashIndex); + return firstSegment.includes('.') || firstSegment.includes(':') || firstSegment === 'localhost'; +} + +function preserveExplicitDockerIoPrefix( + currentComposeImage: string | null | undefined, + targetImageReference: string, +): string { + if (!targetImageReference || typeof currentComposeImage !== 'string') { + return targetImageReference; + } + if (!/^docker\.io\//i.test(currentComposeImage.trim())) { + return targetImageReference; + } + if (hasExplicitRegistryHost(targetImageReference)) { + return targetImageReference; + } + return `docker.io/${targetImageReference}`; +} + function normalizePostStartHooks(postStart) { if (!postStart) { return []; @@ -914,22 +943,22 @@ class Dockercompose extends Docker { getComposeMutationImageReference( container: RuntimeUpdateContainerReference, runtimeUpdateImage: string, + currentComposeImage?: string, ): string { - if (this.configuration.digestPinning !== true) { - return runtimeUpdateImage; - } - const digestPinningCandidate = - container?.result?.digest || - (container?.updateKind?.kind === 'digest' ? container?.updateKind?.remoteValue : undefined); - const digestToPin = this.normalizeDigestPinningValue(digestPinningCandidate); - if (!digestToPin) { - return runtimeUpdateImage; - } - const imageName = this.getImageNameFromReference(runtimeUpdateImage); - if (!imageName) { - return runtimeUpdateImage; + let composeMutationReference = runtimeUpdateImage; + if (this.configuration.digestPinning === true) { + const digestPinningCandidate = + container?.result?.digest || + (container?.updateKind?.kind === 'digest' ? container?.updateKind?.remoteValue : undefined); + const digestToPin = this.normalizeDigestPinningValue(digestPinningCandidate); + if (digestToPin) { + const imageName = this.getImageNameFromReference(runtimeUpdateImage); + if (imageName) { + composeMutationReference = `${imageName}@${digestToPin}`; + } + } } - return `${imageName}@${digestToPin}`; + return preserveExplicitDockerIoPrefix(currentComposeImage, composeMutationReference); } getContainerRuntimeImageReference(container: RegistryImageContainerReference): string { @@ -1508,7 +1537,11 @@ class Dockercompose extends Docker { return undefined; } const runtimeImage = this.getContainerRuntimeImageReference(container); - const composeUpdate = this.getComposeMutationImageReference(container, map.update); + const composeUpdate = this.getComposeMutationImageReference( + container, + map.update, + map.current, + ); return { container, runtimeImage, @@ -1695,7 +1728,7 @@ class Dockercompose extends Docker { const currentServiceImage = mapping?.current || (compose as Record)?.services?.[service]?.image; const targetServiceImage = mapping - ? this.getComposeMutationImageReference(container, mapping.update) + ? this.getComposeMutationImageReference(container, mapping.update, currentServiceImage) : preview.newImage; const composePreview = { files: composeFiles, From d7cd180d1858007206a173849f4a1aebe81b2c31 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 21:27:08 -0400 Subject: [PATCH 18/59] =?UTF-8?q?=F0=9F=90=9B=20fix(demo):=20default=20the?= =?UTF-8?q?me=20to=20system=20variant=20for=20OS=20light/dark=20match?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit First-time demo visitors now see a theme matching their OS preference instead of always defaulting to dark mode. Existing preferences are preserved — the localStorage seed only applies when no dd-preferences key exists yet. --- apps/demo/src/main.ts | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/apps/demo/src/main.ts b/apps/demo/src/main.ts index 3afa25b9..c1f1b22f 100644 --- a/apps/demo/src/main.ts +++ b/apps/demo/src/main.ts @@ -6,6 +6,7 @@ * 3. Boot the real Vue UI (imported from ../../ui/src via Vite alias) */ +import { DEFAULTS } from '@/preferences/schema'; import { FakeEventSource } from './mocks/sse'; // Patch EventSource BEFORE any UI code loads — the SSE service @@ -23,6 +24,18 @@ async function boot() { // Import demo CSS for Tailwind @source directive await import('./demo.css'); + // Default demo theme to 'system' variant so it follows the user's OS + // light/dark preference, matching the surrounding website. + if (!localStorage.getItem('dd-preferences')) { + localStorage.setItem( + 'dd-preferences', + JSON.stringify({ + ...structuredClone(DEFAULTS), + theme: { family: 'one-dark', variant: 'system' }, + }), + ); + } + // Now boot the real UI await import('@/main'); From e839194cc0272eaf46efa43e640866c51b21d37f Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 21:27:15 -0400 Subject: [PATCH 19/59] =?UTF-8?q?=F0=9F=90=9B=20fix(demo):=20navigate=20th?= =?UTF-8?q?eme=20editor=20button=20to=20appearance=20tab?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The "Theme Editor" button in both inline and fullscreen demo modes now navigates to /config?tab=appearance instead of /config, landing directly on the Appearance tab rather than the default General tab. --- apps/web/components/demo-section.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/web/components/demo-section.tsx b/apps/web/components/demo-section.tsx index 5ff619d0..9a0aed4e 100644 --- a/apps/web/components/demo-section.tsx +++ b/apps/web/components/demo-section.tsx @@ -204,7 +204,7 @@ export function DemoSection() { {/* Action Buttons (inline only) */} {mode === "inline" && (
- @@ -261,7 +261,7 @@ export function DemoSection() {
- From 3336eff75abac1a4dfc33863a117c42438fa0e41 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 21:28:53 -0400 Subject: [PATCH 20/59] =?UTF-8?q?=F0=9F=93=9D=20docs:=20add=20changelog=20?= =?UTF-8?q?entries=20for=20rc.12=20fixes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add entries to both CHANGELOG.md and the published docs changelog for all rc.12 fixes: compose case-sensitivity, docker.io prefix, directory resolution, TLS healthcheck, agent CAFILE, mTLS support, and SW origin validation. --- CHANGELOG.md | 7 +++++++ content/docs/current/changelog/index.mdx | 23 +++++++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a0ffdd6d..677501be 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added - **Audit log for container state changes** — External container lifecycle events (start, stop, restart via Portainer or CLI) now generate `container-update` audit entries with the new status, so the audit log reflects all state changes, not just Drydock-initiated actions. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) +- **mTLS client certificate support** — Registry providers now accept `CERTFILE` and `KEYFILE` options for mutual TLS authentication with private registries that require client certificates. ### Fixed @@ -21,6 +22,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - **Compose trigger silently skips containers** — Multiple failure paths in the compose trigger were logged at `debug` level, making it nearly impossible to diagnose why a trigger reports success but containers don't update. Key diagnostic messages (compose file mismatch, label inspect failure, no containers matched) promoted to `warn` level, and the "already up to date" message now includes container names. ([#84](https://github.com/CodesWhat/drydock/discussions/84)) - **Fallback icon cached permanently** — The Docker placeholder icon was served with `immutable` cache headers, causing browsers to cache it permanently even after the real provider icon becomes available. Fallback responses now use `no-store`. - **Basic auth upgrade compatibility restored** — v1.4 now accepts legacy v1.3.9 Basic auth hashes (`{SHA}`, `$apr1$`/`$1$`, `crypt`, and plain fallback) to preserve smooth upgrades. Legacy formats remain deprecated and continue showing a migration banner, with removal still planned for v1.6.0. +- **Compose trigger rejects lowercase env var keys** — Configuration keys like `COMPOSEFILEONCE`, `DIGESTPINNING`, and `RECONCILIATIONMODE` were lowercased by the env parser but the Joi schema expected camelCase. Schema now maps lowercase keys to their camelCase equivalents. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) +- **Compose trigger strips docker.io prefix** — When a compose file uses an explicit `docker.io/` registry prefix, compose mutations now preserve it instead of stripping it to a bare library path. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) +- **Compose trigger fails when FILE points to directory** — `DD_TRIGGER_DOCKERCOMPOSE_{name}_FILE` now accepts directories, automatically probing for `compose.yaml`, `compose.yml`, `docker-compose.yaml`, or `docker-compose.yml` inside the directory. ([#84](https://github.com/CodesWhat/drydock/discussions/84)) +- **Container healthcheck fails with TLS backend** — The Dockerfile healthcheck now detects `DD_SERVER_TLS_ENABLED=true` and switches to `curl --insecure https://` for self-signed certificates. Also skips the healthcheck entirely when `DD_SERVER_ENABLED=false`. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) +- **Agent CAFILE ignored without CERTFILE** — The agent subsystem now loads the CA certificate from `CAFILE` even when `CERTFILE` is not provided, fixing TLS verification for agents behind reverse proxies with custom CA chains. +- **Service worker accepts cross-origin postMessage** — The demo service worker now validates `postMessage` origins against the current host, preventing potential cross-origin message injection. ### Changed diff --git a/content/docs/current/changelog/index.mdx b/content/docs/current/changelog/index.mdx index fb2ef9bc..de8ebe0b 100644 --- a/content/docs/current/changelog/index.mdx +++ b/content/docs/current/changelog/index.mdx @@ -13,6 +13,29 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Added + +- **Audit log for container state changes** — External container lifecycle events (start, stop, restart via Portainer or CLI) now generate `container-update` audit entries with the new status, so the audit log reflects all state changes, not just Drydock-initiated actions. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) +- **mTLS client certificate support** — Registry providers now accept `CERTFILE` and `KEYFILE` options for mutual TLS authentication with private registries that require client certificates. + +### Fixed + +- **Log level setting had no effect** — `DD_LOG_LEVEL=debug` was correctly parsed but debug messages were silently dropped because pino's multistream destinations defaulted to `info` level. Stream destinations now inherit the configured log level. ([#134](https://github.com/CodesWhat/drydock/issues/134)) +- **Server feature flags not loaded after login** — Feature flags (`containeractions`, `delete`) were permanently stuck as disabled when authentication was required, because the pre-login bootstrap fetch failure marked the flags as "loaded" and never retried. Now failed fetches allow automatic retry after login. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) +- **Compose trigger silently skips containers** — Multiple failure paths in the compose trigger were logged at `debug` level, making it nearly impossible to diagnose why a trigger reports success but containers don't update. Key diagnostic messages (compose file mismatch, label inspect failure, no containers matched) promoted to `warn` level, and the "already up to date" message now includes container names. ([#84](https://github.com/CodesWhat/drydock/discussions/84)) +- **Fallback icon cached permanently** — The Docker placeholder icon was served with `immutable` cache headers, causing browsers to cache it permanently even after the real provider icon becomes available. Fallback responses now use `no-store`. +- **Basic auth upgrade compatibility restored** — v1.4 now accepts legacy v1.3.9 Basic auth hashes (`{SHA}`, `$apr1$`/`$1$`, `crypt`, and plain fallback) to preserve smooth upgrades. Legacy formats remain deprecated and continue showing a migration banner, with removal still planned for v1.6.0. +- **Compose trigger rejects lowercase env var keys** — Configuration keys like `COMPOSEFILEONCE`, `DIGESTPINNING`, and `RECONCILIATIONMODE` were lowercased by the env parser but the Joi schema expected camelCase. Schema now maps lowercase keys to their camelCase equivalents. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) +- **Compose trigger strips docker.io prefix** — When a compose file uses an explicit `docker.io/` registry prefix, compose mutations now preserve it instead of stripping it to a bare library path. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) +- **Compose trigger fails when FILE points to directory** — `DD_TRIGGER_DOCKERCOMPOSE_{name}_FILE` now accepts directories, automatically probing for `compose.yaml`, `compose.yml`, `docker-compose.yaml`, or `docker-compose.yml` inside the directory. ([#84](https://github.com/CodesWhat/drydock/discussions/84)) +- **Container healthcheck fails with TLS backend** — The Dockerfile healthcheck now detects `DD_SERVER_TLS_ENABLED=true` and switches to `curl --insecure https://` for self-signed certificates. Also skips the healthcheck entirely when `DD_SERVER_ENABLED=false`. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) +- **Agent CAFILE ignored without CERTFILE** — The agent subsystem now loads the CA certificate from `CAFILE` even when `CERTFILE` is not provided, fixing TLS verification for agents behind reverse proxies with custom CA chains. +- **Service worker accepts cross-origin postMessage** — The demo service worker now validates `postMessage` origins against the current host, preventing potential cross-origin message injection. + +### Changed + +- **MQTT HASS_ATTRIBUTES default changed to `short`** — The MQTT trigger `HASS_ATTRIBUTES` preset now defaults to `short` instead of `full`, excluding large SBOM documents, scan vulnerabilities, details, and labels from Home Assistant entity payloads. Users who need the full payload can set `DD_TRIGGER_MQTT_{name}_HASS_ATTRIBUTES=full` explicitly. + ## [1.4.0] — 2026-02-28 ### Added From 319402debd803b7461e3af5e115bd508444a033e Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 21:29:01 -0400 Subject: [PATCH 21/59] =?UTF-8?q?=F0=9F=93=9D=20docs(config):=20update=20d?= =?UTF-8?q?ocs=20for=20basic=20auth=20legacy=20hashes,=20TLS=20healthcheck?= =?UTF-8?q?,=20and=20compose=20trigger?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Basic auth: list all accepted legacy hash formats (SHA-1, APR1, MD5-crypt, DES crypt, plain) - Server: document automatic HTTPS healthcheck when TLS is enabled - Compose trigger: document directory resolution for FILE and case-insensitive env var keys --- .../current/configuration/authentications/basic/index.mdx | 2 +- content/docs/current/configuration/server/index.mdx | 4 ++++ .../current/configuration/triggers/docker-compose/index.mdx | 4 +++- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/content/docs/current/configuration/authentications/basic/index.mdx b/content/docs/current/configuration/authentications/basic/index.mdx index a0240010..b169d9d6 100644 --- a/content/docs/current/configuration/authentications/basic/index.mdx +++ b/content/docs/current/configuration/authentications/basic/index.mdx @@ -82,4 +82,4 @@ node -e ' ' "yourpassword" ``` -Legacy `{SHA}` hashes from WUD/htpasswd are still accepted but deprecated. They will be removed in v1.6.0. Use the commands above to generate an argon2id hash and update your `DD_AUTH_BASIC_*_HASH` values. +Legacy htpasswd hash formats from WUD/v1.3.x — `{SHA}` (SHA-1), `$apr1$` (Apache APR1-MD5), `$1$` (MD5-crypt), DES crypt, and plain text — are still accepted at runtime but deprecated. They will be removed in v1.6.0. Use the commands above to generate an argon2id hash and update your `DD_AUTH_BASIC_*_HASH` values. diff --git a/content/docs/current/configuration/server/index.mdx b/content/docs/current/configuration/server/index.mdx index 39fd7731..73d22aa2 100644 --- a/content/docs/current/configuration/server/index.mdx +++ b/content/docs/current/configuration/server/index.mdx @@ -44,6 +44,10 @@ For production deployments, set an explicit trusted origin: - `DD_SERVER_CORS_ORIGIN=https://drydock.example.com` - `DD_SERVER_CORS_ORIGIN=https://ops.example.com` +## Container Healthcheck + +The official Docker image includes a built-in `HEALTHCHECK` that polls the `/health` endpoint. When `DD_SERVER_TLS_ENABLED=true`, the healthcheck automatically switches to HTTPS (with `--insecure` for self-signed certificates). No additional configuration is needed. + ## Plain HTTP Deployments When `DD_SERVER_TLS_ENABLED` is not set or is `false`, drydock automatically adjusts its security headers for plain HTTP: diff --git a/content/docs/current/configuration/triggers/docker-compose/index.mdx b/content/docs/current/configuration/triggers/docker-compose/index.mdx index 34e65e11..2db0598e 100644 --- a/content/docs/current/configuration/triggers/docker-compose/index.mdx +++ b/content/docs/current/configuration/triggers/docker-compose/index.mdx @@ -26,7 +26,7 @@ The trigger will: | Env var | Required | Description | Supported values | Default value when missing | | --- | :---: | --- | --- | --- | -| `DD_TRIGGER_DOCKERCOMPOSE_{trigger_name}_FILE` | ⚪ | The docker-compose.yml file location (can also be set per container via the `dd.compose.file` label) | | | +| `DD_TRIGGER_DOCKERCOMPOSE_{trigger_name}_FILE` | ⚪ | The docker-compose.yml file location or directory (can also be set per container via the `dd.compose.file` label). When a directory is given, Drydock probes for `compose.yaml`, `compose.yml`, `docker-compose.yaml`, and `docker-compose.yml` in order. | | | | `DD_TRIGGER_DOCKERCOMPOSE_{trigger_name}_BACKUP` | ⚪ | Backup the docker-compose.yml file as `.back` before updating? | `true`, `false` | `false` | | `DD_TRIGGER_DOCKERCOMPOSE_{trigger_name}_PRUNE` | ⚪ | If the old image must be pruned after upgrade | `true`, `false` | `false` | | `DD_TRIGGER_DOCKERCOMPOSE_{trigger_name}_DRYRUN` | ⚪ | When enabled, only pull the new image ahead of time | `true`, `false` | `false` | @@ -39,6 +39,8 @@ The trigger will: This trigger also supports the [common configuration variables](/docs/configuration/triggers/#common-trigger-configuration). but only supports the `batch` mode. +The env var keys for `COMPOSEFILEONCE`, `COMPOSEFILELABEL`, `RECONCILIATIONMODE`, and `DIGESTPINNING` are case-insensitive — both the lowercased form (e.g. `composefileonce`) and the camelCase form (e.g. `composeFileOnce`) are accepted. + Legacy compatibility: compose file label fallback `wud.compose.file` is still accepted when `dd.compose.file` is not present. Prefer `dd.compose.file` for new configs, and use `node dist/index.js config migrate` to rewrite existing labels. ## Auto-detection From b17129a15ccadd081944031b1684a3b38589a3f4 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 21:29:09 -0400 Subject: [PATCH 22/59] =?UTF-8?q?=E2=9C=85=20test(qa):=20add=20rc.12=20QA?= =?UTF-8?q?=20compose=20environment=20and=20test=20stacks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit QA compose file with labeled services covering all 14 rc.12 fixes: compose directory resolution, docker.io prefix, case-sensitivity, legacy auth hashes, TLS healthcheck, log level, feature flags, audit entries, MQTT attributes, trigger logging, and icon caching. --- test/qa-rc12-fixes.yml | 204 +++++++++++++++++++ test/qa-rc12-stacks/filebrowser/compose.yaml | 7 + 2 files changed, 211 insertions(+) create mode 100644 test/qa-rc12-fixes.yml create mode 100644 test/qa-rc12-stacks/filebrowser/compose.yaml diff --git a/test/qa-rc12-fixes.yml b/test/qa-rc12-fixes.yml new file mode 100644 index 00000000..714d5ae8 --- /dev/null +++ b/test/qa-rc12-fixes.yml @@ -0,0 +1,204 @@ +# QA E2E — rc.12 tracker fixes +# Verifies all 14 rc.12 fixes end-to-end against running drydock instances. +# +# Usage: +# docker build -t drydock:dev . +# docker compose -f test/qa-rc12-fixes.yml up -d +# docker compose -f test/qa-rc12-fixes.yml ps # wait for all healthy +# # run Playwright MCP scenarios +# docker compose -f test/qa-rc12-fixes.yml down -v +# +# Scenarios covered: +# 1. Compose directory resolution (208245ec) +# 2. docker.io prefix preservation (16598ba0) +# 3. Compose config case-sensitivity (14b468fe) +# 4a. Legacy SHA-1 hash login (8ba1a6f2) +# 4b. Legacy APR1 hash login (8ba1a6f2) +# 4c. Legacy DES crypt hash login (8ba1a6f2) +# 4d. Legacy plaintext hash login (8ba1a6f2) +# 5. TLS healthcheck (4e6c5d96) +# 6. Log level propagation (b0e48186) +# 7. Feature flags after login (82aa55d2) +# 8. Container-update audit (632e478c) +# 9. MQTT HASS_ATTRIBUTES (8401c205) +# 10. Trigger warn logging (98547dd3) +# 13. Icon cache headers (2b342a5e) + +services: + # ── Main drydock instance ─────────────────────────────── + # Covers: Scenario 1, 2, 3, 6, 7, 8, 9, 10, 13 + drydock-rc12: + image: drydock:dev + container_name: drydock-rc12 + user: root + ports: + - "3400:3000" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + - ./qa-rc12-stacks:/drydock/stacks:rw + environment: + - DD_RUN_AS_ROOT=true + - DD_ALLOW_INSECURE_ROOT=true + - DD_LOG_FORMAT=text + # Scenario 6: log level propagation + - DD_LOG_LEVEL=debug + - DD_WATCHER_LOCAL_WATCHBYDEFAULT=false + - DD_WATCHER_LOCAL_CRON=*/2 * * * * + - DD_SESSION_SECRET=qa-rc12-session-secret + - DD_PUBLIC_URL=http://localhost:3400 + # Auth (argon2id — normal login for main instance) + - DD_AUTH_BASIC_ADMIN_USER=admin + - "DD_AUTH_BASIC_ADMIN_HASH=argon2id$$65536$$3$$4$$zUAK9+ktVWNHyQdv3SaOSgMv3T02F1Zj8D+t1un7D98=$$KEmn6d94w03YxIlw7U7l/ikD9lW+H3IC1N7xbAhOB9yKofA3HoxUBmuvBawvShhv337zDv4+g9hobNWeQEWwkQ==" + # Scenario 1: compose directory resolution — FILE points to directory + - DD_TRIGGER_DOCKERCOMPOSE_DOCKGE_FILE=/drydock/stacks/filebrowser + # Scenario 3: lowercase env var keys for compose config + - DD_TRIGGER_DOCKERCOMPOSE_DOCKGE_COMPOSEFILEONCE=true + - DD_TRIGGER_DOCKERCOMPOSE_DOCKGE_DIGESTPINNING=false + - DD_TRIGGER_DOCKERCOMPOSE_DOCKGE_RECONCILIATIONMODE=warn + - DD_TRIGGER_DOCKERCOMPOSE_DOCKGE_AUTO=false + # Scenario 9: MQTT HASS attributes + - DD_TRIGGER_MQTT_QA_URL=mqtt://mosquitto:1883 + - DD_TRIGGER_MQTT_QA_HASS_ENABLED=true + - DD_TRIGGER_MQTT_QA_HASS_PREFIX=homeassistant + - DD_TRIGGER_MQTT_QA_HASS_DISCOVERY=true + - DD_TRIGGER_MQTT_QA_HASS_ATTRIBUTES=short + # Docker trigger (manual only) + - DD_TRIGGER_DOCKER_LOCAL_AUTO=false + depends_on: + mosquitto: + condition: service_healthy + + # ── Scenario 4a: Legacy SHA-1 hash login ──────────────── + # Password: myPassword + # Hash: {SHA}VBPuJHI7uixaa6LQGWx4s+5GKNE= + drydock-sha1: + image: drydock:dev + container_name: drydock-sha1 + user: root + ports: + - "3401:3000" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + environment: + - DD_RUN_AS_ROOT=true + - DD_ALLOW_INSECURE_ROOT=true + - DD_LOG_FORMAT=text + - DD_WATCHER_LOCAL_WATCHBYDEFAULT=false + - DD_WATCHER_LOCAL_CRON=*/5 * * * * + - DD_SESSION_SECRET=qa-sha1-session-secret + - DD_PUBLIC_URL=http://localhost:3401 + - DD_AUTH_BASIC_ADMIN_USER=admin + - "DD_AUTH_BASIC_ADMIN_HASH={SHA}VBPuJHI7uixaa6LQGWx4s+5GKNE=" + + # ── Scenario 4b: Legacy APR1 hash login ───────────────── + # Password: myPassword + # Hash: $apr1$r31.....$HqJZimcKQFAMYayBlzkrA/ + drydock-apr1: + image: drydock:dev + container_name: drydock-apr1 + user: root + ports: + - "3402:3000" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + environment: + - DD_RUN_AS_ROOT=true + - DD_ALLOW_INSECURE_ROOT=true + - DD_LOG_FORMAT=text + - DD_WATCHER_LOCAL_WATCHBYDEFAULT=false + - DD_WATCHER_LOCAL_CRON=*/5 * * * * + - DD_SESSION_SECRET=qa-apr1-session-secret + - DD_PUBLIC_URL=http://localhost:3402 + - DD_AUTH_BASIC_ADMIN_USER=admin + - "DD_AUTH_BASIC_ADMIN_HASH=$$apr1$$r31.....$$HqJZimcKQFAMYayBlzkrA/" + + # ── Scenario 4c: Legacy DES crypt hash login ──────────── + # Password: myPassword + # Hash: rqXexS6ZhobKA + drydock-crypt: + image: drydock:dev + container_name: drydock-crypt + user: root + ports: + - "3403:3000" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + environment: + - DD_RUN_AS_ROOT=true + - DD_ALLOW_INSECURE_ROOT=true + - DD_LOG_FORMAT=text + - DD_WATCHER_LOCAL_WATCHBYDEFAULT=false + - DD_WATCHER_LOCAL_CRON=*/5 * * * * + - DD_SESSION_SECRET=qa-crypt-session-secret + - DD_PUBLIC_URL=http://localhost:3403 + - DD_AUTH_BASIC_ADMIN_USER=admin + - DD_AUTH_BASIC_ADMIN_HASH=rqXexS6ZhobKA + + # ── Scenario 4d: Legacy plaintext hash login ──────────── + # Password: plaintext-password (the hash IS the password) + drydock-plain: + image: drydock:dev + container_name: drydock-plain + user: root + ports: + - "3404:3000" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + environment: + - DD_RUN_AS_ROOT=true + - DD_ALLOW_INSECURE_ROOT=true + - DD_LOG_FORMAT=text + - DD_WATCHER_LOCAL_WATCHBYDEFAULT=false + - DD_WATCHER_LOCAL_CRON=*/5 * * * * + - DD_SESSION_SECRET=qa-plain-session-secret + - DD_PUBLIC_URL=http://localhost:3404 + - DD_AUTH_BASIC_ADMIN_USER=admin + - DD_AUTH_BASIC_ADMIN_HASH=plaintext-password + + # ── Scenario 5: TLS backend + healthcheck ─────────────── + drydock-tls: + image: drydock:dev + container_name: drydock-tls + user: root + ports: + - "3405:3000" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + - ./qa-proxy-certs:/certs:ro + environment: + - DD_RUN_AS_ROOT=true + - DD_ALLOW_INSECURE_ROOT=true + - DD_LOG_FORMAT=text + - DD_WATCHER_LOCAL_WATCHBYDEFAULT=false + - DD_WATCHER_LOCAL_CRON=*/5 * * * * + - DD_SESSION_SECRET=qa-tls-session-secret + - DD_PUBLIC_URL=https://localhost:3405 + - DD_SERVER_TLS_ENABLED=true + - DD_SERVER_TLS_CERT=/certs/cert.pem + - DD_SERVER_TLS_KEY=/certs/key.pem + - DD_AUTH_BASIC_ADMIN_USER=admin + - "DD_AUTH_BASIC_ADMIN_HASH=argon2id$$65536$$3$$4$$zUAK9+ktVWNHyQdv3SaOSgMv3T02F1Zj8D+t1un7D98=$$KEmn6d94w03YxIlw7U7l/ikD9lW+H3IC1N7xbAhOB9yKofA3HoxUBmuvBawvShhv337zDv4+g9hobNWeQEWwkQ==" + + # ── MQTT broker (Mosquitto) — Scenario 9 ──────────────── + mosquitto: + image: eclipse-mosquitto:2 + container_name: mosquitto + ports: + - "1883:1883" + volumes: + - ./mosquitto.conf:/mosquitto/config/mosquitto.conf:ro + healthcheck: + test: ["CMD-SHELL", "mosquitto_sub -t '$$SYS/#' -C 1 -W 3 || exit 1"] + interval: 5s + timeout: 5s + retries: 5 + start_period: 5s + + # ── Watched container — Scenario 8 (audit), 10 (warn logging) ── + qa-nginx: + image: nginx:1.25.5 + pull_policy: never + container_name: qa-nginx + labels: + - dd.watch=true + - dd.display.name=QA Nginx diff --git a/test/qa-rc12-stacks/filebrowser/compose.yaml b/test/qa-rc12-stacks/filebrowser/compose.yaml new file mode 100644 index 00000000..46c181cc --- /dev/null +++ b/test/qa-rc12-stacks/filebrowser/compose.yaml @@ -0,0 +1,7 @@ +services: + filebrowser: + image: docker.io/filebrowser/filebrowser:v2.30.0 + container_name: qa-filebrowser + labels: + - dd.watch=true + - dd.display.name=Filebrowser (Dockge) From 891d1bf924f6c09f1b8c5af99c0cea1c824e5d82 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 21:38:06 -0400 Subject: [PATCH 23/59] =?UTF-8?q?=F0=9F=93=9D=20docs:=20update=20GHCR=20pu?= =?UTF-8?q?lls=20badge=20to=2032K+?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2f7d315a..1046fe50 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@

Version - GHCR pulls + GHCR pulls Docker Hub pulls Quay.io
From 872f2ce5b9dbc95aafff972b20a518c52bbdeade Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 22:33:41 -0400 Subject: [PATCH 24/59] =?UTF-8?q?=F0=9F=94=92=20security(auth):=20reject?= =?UTF-8?q?=20unsupported=20hash=20formats=20instead=20of=20plaintext=20fa?= =?UTF-8?q?llback?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add blocklist for bcrypt ($2b$) and PHC argon2 patterns that would previously silently fall through to plaintext comparison - Pin apache-md5 and unix-crypt-td-js to exact versions (removal in v1.6.0) - Schema validation now rejects unsupported hash formats at startup - Add comprehensive tests for bcrypt rejection, metadata classification, flaky hash parsing, and error paths in all legacy verifiers --- .../providers/basic/Basic.test.ts | 290 ++++++++++++++++++ app/authentications/providers/basic/Basic.ts | 22 +- app/package-lock.json | 4 +- app/package.json | 4 +- 4 files changed, 315 insertions(+), 5 deletions(-) diff --git a/app/authentications/providers/basic/Basic.test.ts b/app/authentications/providers/basic/Basic.test.ts index 8da2cba6..d604aa43 100644 --- a/app/authentications/providers/basic/Basic.test.ts +++ b/app/authentications/providers/basic/Basic.test.ts @@ -60,6 +60,7 @@ const LEGACY_APR1_HASH = '$apr1$r31.....$HqJZimcKQFAMYayBlzkrA/'; const LEGACY_MD5_HASH = '$1$saltsalt$2vnaRpHa6Jxjz5n83ok8Z0'; const LEGACY_CRYPT_HASH = 'rqXexS6ZhobKA'; const LEGACY_PLAIN_HASH = 'plaintext-password'; +const UNSUPPORTED_BCRYPT_HASH = '$2b$10$123456789012345678901u8Q4W2nLw8Qm7w7fA9sQ3lV7qVQX0w2.'; describe('Basic Authentication', () => { let basic: InstanceType; @@ -754,6 +755,295 @@ describe('Basic Authentication', () => { }); }); }); + + test('should reject bcrypt-style hash in configuration schema', async () => { + expect(() => + basic.validateConfiguration({ + user: 'testuser', + hash: UNSUPPORTED_BCRYPT_HASH, + }), + ).toThrow('must be an argon2id hash'); + }); + + test('should not treat bcrypt-style hash as plain fallback during authentication', async () => { + basic.configuration = { + user: 'testuser', + hash: UNSUPPORTED_BCRYPT_HASH, + }; + + await new Promise((resolve) => { + basic.authenticate('testuser', UNSUPPORTED_BCRYPT_HASH, (_err, result) => { + expect(result).toBe(false); + resolve(); + }); + }); + }); + + test('should classify md5, crypt, plain and unsupported hashes in metadata', () => { + basic.configuration = { + user: 'testuser', + hash: LEGACY_MD5_HASH, + }; + expect(basic.getMetadata()).toEqual({ usesLegacyHash: true }); + + basic.configuration = { + user: 'testuser', + hash: LEGACY_CRYPT_HASH, + }; + expect(basic.getMetadata()).toEqual({ usesLegacyHash: true }); + + basic.configuration = { + user: 'testuser', + hash: LEGACY_PLAIN_HASH, + }; + expect(basic.getMetadata()).toEqual({ usesLegacyHash: true }); + + basic.configuration = { + user: 'testuser', + hash: UNSUPPORTED_BCRYPT_HASH, + }; + expect(basic.getMetadata()).toEqual({ usesLegacyHash: false }); + }); + + test('should treat malformed SHA/APR1 prefixes as plain legacy metadata', () => { + basic.configuration = { + user: 'testuser', + hash: '{SHA}', + }; + expect(basic.getMetadata()).toEqual({ usesLegacyHash: true }); + + basic.configuration = { + user: 'testuser', + hash: '$apr1$', + }; + expect(basic.getMetadata()).toEqual({ usesLegacyHash: true }); + + basic.configuration = { + user: 'testuser', + hash: '$apr1$$broken', + }; + expect(basic.getMetadata()).toEqual({ usesLegacyHash: true }); + }); + + test('should reject authentication when argon2 hash cannot be parsed during verification', async () => { + const validArgon2Parts = createArgon2Hash('password').split('$'); + let splitCallCount = 0; + const flakyArgon2Hash = { + trim() { + return this as unknown as string; + }, + split(_separator: string) { + splitCallCount += 1; + return splitCallCount === 1 ? validArgon2Parts : ['argon2id']; + }, + } as unknown as string; + + basic.configuration = { + user: 'testuser', + hash: flakyArgon2Hash, + }; + + await new Promise((resolve) => { + basic.authenticate('testuser', 'password', (_err, result) => { + expect(result).toBe(false); + resolve(); + }); + }); + }); + + test('should reject authentication when SHA hash becomes invalid during verification', async () => { + const validShaHash = createShaHash('password'); + let substringCallCount = 0; + const flakyShaHash = { + trim() { + return this as unknown as string; + }, + split() { + return ['not-argon2']; + }, + get length() { + return validShaHash.length; + }, + substring(start: number, end?: number) { + if (start === 0 && end === 5) { + return '{SHA}'; + } + substringCallCount += 1; + return substringCallCount === 1 ? validShaHash.substring(5) : ''; + }, + } as unknown as string; + + basic.configuration = { + user: 'testuser', + hash: flakyShaHash, + }; + + await new Promise((resolve) => { + basic.authenticate('testuser', 'password', (_err, result) => { + expect(result).toBe(false); + resolve(); + }); + }); + }); + + test('should reject authentication when MD5 hash becomes invalid during verification', async () => { + let splitCallCount = 0; + const flakyMd5Hash = { + trim() { + return this as unknown as string; + }, + split() { + splitCallCount += 1; + if (splitCallCount === 1) { + return ['not-argon2']; + } + if (splitCallCount === 2) { + return LEGACY_MD5_HASH.split('$'); + } + return ['', '1']; + }, + get length() { + return 4; + }, + startsWith(prefix: string) { + return prefix === '$1$'; + }, + } as unknown as string; + + basic.configuration = { + user: 'testuser', + hash: flakyMd5Hash, + }; + + await new Promise((resolve) => { + basic.authenticate('testuser', 'password', (_err, result) => { + expect(result).toBe(false); + resolve(); + }); + }); + }); + + test('should reject authentication when APR1/MD5 verification throws', async () => { + const throwingPassword = { + [Symbol.toPrimitive]() { + throw new Error('password coercion failed'); + }, + } as unknown as string; + + basic.configuration = { + user: 'testuser', + hash: LEGACY_MD5_HASH, + }; + + await new Promise((resolve) => { + basic.authenticate('testuser', throwingPassword, (_err, result) => { + expect(result).toBe(false); + resolve(); + }); + }); + }); + + test('should reject authentication when crypt hash becomes invalid during verification', async () => { + let lengthReadCount = 0; + const flakyCryptHash = { + trim() { + return this as unknown as string; + }, + split() { + return ['not-argon2']; + }, + get length() { + lengthReadCount += 1; + return lengthReadCount === 3 ? 12 : 13; + }, + substring(start: number, end?: number) { + if (start === 0 && end === 5) { + return 'crypt'; + } + return LEGACY_CRYPT_HASH.substring(start, end); + }, + startsWith() { + return false; + }, + } as unknown as string; + + basic.configuration = { + user: 'testuser', + hash: flakyCryptHash, + }; + + await new Promise((resolve) => { + basic.authenticate('testuser', 'password', (_err, result) => { + expect(result).toBe(false); + resolve(); + }); + }); + }); + + test('should reject authentication when crypt verification throws', async () => { + const throwingPassword = new Proxy( + {}, + { + get() { + throw new Error('password coercion failed'); + }, + }, + ) as unknown as string; + + basic.configuration = { + user: 'testuser', + hash: LEGACY_CRYPT_HASH, + }; + + await new Promise((resolve) => { + basic.authenticate('testuser', throwingPassword, (_err, result) => { + expect(result).toBe(false); + resolve(); + }); + }); + }); + + test('should reject authentication when plain comparison coercion throws', async () => { + const throwingPassword = { + [Symbol.toPrimitive]() { + throw new Error('password coercion failed'); + }, + } as unknown as string; + + basic.configuration = { + user: 'testuser', + hash: LEGACY_PLAIN_HASH, + }; + + await new Promise((resolve) => { + basic.authenticate('testuser', throwingPassword, (_err, result) => { + expect(result).toBe(false); + resolve(); + }); + }); + }); + + test('should reject authentication when timingSafeEqual throws during password comparison', async () => { + mockTimingSafeEqual + .mockImplementationOnce( + (left: Buffer, right: Buffer) => left.length === right.length && left.equals(right), + ) + .mockImplementationOnce(() => { + throw new Error('timingSafeEqual failed'); + }); + + basic.configuration = { + user: 'testuser', + hash: LEGACY_PLAIN_HASH, + }; + + await new Promise((resolve) => { + basic.authenticate('testuser', LEGACY_PLAIN_HASH, (_err, result) => { + expect(result).toBe(false); + resolve(); + }); + }); + }); }); describe('getMetadata', () => { diff --git a/app/authentications/providers/basic/Basic.ts b/app/authentications/providers/basic/Basic.ts index fc4bc648..7a1b3fe6 100644 --- a/app/authentications/providers/basic/Basic.ts +++ b/app/authentications/providers/basic/Basic.ts @@ -41,6 +41,10 @@ interface ParsedCryptHash { } type LegacyHashFormat = 'sha1' | 'apr1' | 'md5' | 'crypt' | 'plain'; +const UNSUPPORTED_PLAIN_FALLBACK_PATTERNS: RegExp[] = [ + /^\$2[abxy]\$/i, // bcrypt variants + /^\$argon2(?:id|i|d)\$/i, // PHC-style argon2 hashes +]; function normalizeHash(rawHash: string): string { return rawHash.trim(); @@ -172,6 +176,11 @@ function timingSafeEqualString(left: string, right: string): boolean { } } +function isUnsupportedPlainFallbackHash(hash: string): boolean { + const normalizedHash = normalizeHash(hash); + return UNSUPPORTED_PLAIN_FALLBACK_PATTERNS.some((pattern) => pattern.test(normalizedHash)); +} + function getLegacyHashFormat(hash: string): LegacyHashFormat | undefined { if (parseArgon2Hash(hash)) { return undefined; @@ -189,6 +198,10 @@ function getLegacyHashFormat(hash: string): LegacyHashFormat | undefined { return 'crypt'; } + if (isUnsupportedPlainFallbackHash(hash)) { + return undefined; + } + return 'plain'; } @@ -294,6 +307,9 @@ async function verifyPassword(password: string, encodedHash: string): Promise unknown }) => { - if (value.startsWith('argon2id$') && !parseArgon2Hash(value)) { + const normalizedHash = normalizeHash(value); + if (normalizedHash.startsWith('argon2id$') && !parseArgon2Hash(normalizedHash)) { + return helpers.error('any.invalid'); + } + if (isUnsupportedPlainFallbackHash(normalizedHash)) { return helpers.error('any.invalid'); } return value; diff --git a/app/package-lock.json b/app/package-lock.json index d1995b47..e373c2af 100644 --- a/app/package-lock.json +++ b/app/package-lock.json @@ -13,7 +13,7 @@ "@slack/web-api": "^7.14.1", "ajv": "^8.18.0", "ajv-formats": "^3.0.1", - "apache-md5": "^1.1.8", + "apache-md5": "1.1.8", "axios": "^1.13.6", "capitalize": "2.0.4", "change-case": "^5.4.4", @@ -53,7 +53,7 @@ "set-value": "4.1.0", "sort-es": "1.7.18", "undici": "^7.22.0", - "unix-crypt-td-js": "^1.1.4", + "unix-crypt-td-js": "1.1.4", "uuid": "^13.0.0", "yaml": "2.8.2" }, diff --git a/app/package.json b/app/package.json index 3c6f2039..99a3f2c7 100644 --- a/app/package.json +++ b/app/package.json @@ -24,7 +24,7 @@ "@slack/web-api": "^7.14.1", "ajv": "^8.18.0", "ajv-formats": "^3.0.1", - "apache-md5": "^1.1.8", + "apache-md5": "1.1.8", "axios": "^1.13.6", "capitalize": "2.0.4", "change-case": "^5.4.4", @@ -64,7 +64,7 @@ "set-value": "4.1.0", "sort-es": "1.7.18", "undici": "^7.22.0", - "unix-crypt-td-js": "^1.1.4", + "unix-crypt-td-js": "1.1.4", "uuid": "^13.0.0", "yaml": "2.8.2" }, From 5836de8ffea30d1d809829b9d2f62346d1b137a2 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 22:33:51 -0400 Subject: [PATCH 25/59] =?UTF-8?q?=E2=9A=A1=20perf(icons):=20eliminate=20re?= =?UTF-8?q?dundant=20fs.access=20syscall=20in=20cache=20check?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Remove pre-access guard from isCachedIconUsable; fs.stat alone suffices - Update all icon test mocks to use stat rejection instead of access mock - Add test verifying no access syscall during cache usability check - Add tests for vanished entries and protected-only eviction scenarios --- app/api/icons.test.ts | 161 ++++++++++++++++++++-------------- app/api/icons/storage.test.ts | 51 +++++++++++ app/api/icons/storage.ts | 4 - 3 files changed, 148 insertions(+), 68 deletions(-) diff --git a/app/api/icons.test.ts b/app/api/icons.test.ts index ed66b16e..0b57278c 100644 --- a/app/api/icons.test.ts +++ b/app/api/icons.test.ts @@ -139,11 +139,7 @@ describe('Icons Router', () => { mockRename.mockResolvedValue(undefined); mockUnlink.mockResolvedValue(undefined); mockReaddir.mockResolvedValue([]); - mockStat.mockResolvedValue({ - mtimeMs: Date.now(), - size: 1024, - isFile: () => true, - }); + mockStat.mockRejectedValue(new Error('not found')); }); test('should initialize router with icon and cache routes', () => { @@ -186,7 +182,11 @@ describe('Icons Router', () => { }); test('should serve icon from cache when available', async () => { - mockAccess.mockResolvedValue(undefined); + mockStat.mockResolvedValue({ + mtimeMs: Date.now(), + size: 1024, + isFile: () => true, + }); const handler = getHandler(); const res = createResponse(); @@ -359,7 +359,11 @@ describe('Icons Router', () => { }); test('should skip axios when icon appears in cache after first miss', async () => { - mockAccess.mockRejectedValueOnce(new Error('not found')).mockResolvedValueOnce(undefined); + mockStat.mockRejectedValueOnce(new Error('not found')).mockResolvedValueOnce({ + mtimeMs: Date.now(), + size: 1024, + isFile: () => true, + }); const handler = getHandler(); const res = createResponse(); @@ -527,15 +531,18 @@ describe('Icons Router', () => { mockReaddir .mockResolvedValueOnce([{ name: 'simple', isDirectory: () => true }]) .mockResolvedValueOnce(['old.svg', 'docker.svg']); - mockStat.mockImplementation(async (targetPath: string) => { - if (targetPath === '/store/icons/simple/old.svg') { - return { mtimeMs: Date.now() - 1_000, size: 150 * 1024 * 1024, isFile: () => true }; - } - if (targetPath === '/store/icons/simple/docker.svg') { - return { mtimeMs: Date.now(), size: 50 * 1024 * 1024, isFile: () => true }; - } - return { mtimeMs: Date.now(), size: 1024, isFile: () => true }; - }); + mockStat + .mockRejectedValueOnce(new Error('not found')) + .mockRejectedValueOnce(new Error('not found')) + .mockImplementation(async (targetPath: string) => { + if (targetPath === '/store/icons/simple/old.svg') { + return { mtimeMs: Date.now() - 1_000, size: 150 * 1024 * 1024, isFile: () => true }; + } + if (targetPath === '/store/icons/simple/docker.svg') { + return { mtimeMs: Date.now(), size: 50 * 1024 * 1024, isFile: () => true }; + } + return { mtimeMs: Date.now(), size: 1024, isFile: () => true }; + }); const handler = getHandler(); const res = createResponse(); @@ -573,23 +580,26 @@ describe('Icons Router', () => { }, ); - mockStat.mockImplementation((targetPath: string) => { - if (targetPath === '/store/icons/simple/old.svg') { - return oldStatPromise; - } - if (targetPath === '/store/icons/simple/docker.svg') { + mockStat + .mockRejectedValueOnce(new Error('not found')) + .mockRejectedValueOnce(new Error('not found')) + .mockImplementation((targetPath: string) => { + if (targetPath === '/store/icons/simple/old.svg') { + return oldStatPromise; + } + if (targetPath === '/store/icons/simple/docker.svg') { + return Promise.resolve({ + mtimeMs: Date.now(), + size: 50 * 1024 * 1024, + isFile: () => true, + }); + } return Promise.resolve({ mtimeMs: Date.now(), - size: 50 * 1024 * 1024, + size: 1024, isFile: () => true, }); - } - return Promise.resolve({ - mtimeMs: Date.now(), - size: 1024, - isFile: () => true, }); - }); const handler = getHandler(); const res = createResponse(); @@ -603,9 +613,16 @@ describe('Icons Router', () => { res, ); - await vi.waitFor(() => expect(mockStat).toHaveBeenCalled()); + await vi.waitFor(() => { + const statTargets = mockStat.mock.calls.map((call) => call[0]); + expect(statTargets).toEqual( + expect.arrayContaining(['/store/icons/simple/old.svg', '/store/icons/simple/docker.svg']), + ); + }); try { - expect(mockStat).toHaveBeenCalledTimes(2); + // The exact call count depends on cache-hit checks before fetch. What matters + // here is that enforcement stats both entries without waiting for old.svg first. + expect(mockStat).toHaveBeenCalled(); } finally { resolveOldStat?.({ mtimeMs: Date.now() - 1_000, @@ -715,11 +732,14 @@ describe('Icons Router', () => { mockReaddir .mockResolvedValueOnce([{ name: 'simple', isDirectory: () => true }]) .mockResolvedValueOnce(['docker.svg']); - mockStat.mockResolvedValue({ - mtimeMs: Date.now(), - size: 1024, - isFile: () => true, - }); + mockStat + .mockRejectedValueOnce(new Error('not found')) + .mockRejectedValueOnce(new Error('not found')) + .mockResolvedValue({ + mtimeMs: Date.now(), + size: 1024, + isFile: () => true, + }); const handler = getHandler(); const res1 = createResponse(); const res2 = createResponse(); @@ -767,18 +787,21 @@ describe('Icons Router', () => { { name: 'simple', isDirectory: () => true }, ]) .mockResolvedValueOnce(['stale.svg', 'nested', 'docker.svg']); - mockStat.mockImplementation(async (targetPath: string) => { - if (targetPath === '/store/icons/simple/stale.svg') { - return { mtimeMs: 0, size: 1024, isFile: () => true }; - } - if (targetPath === '/store/icons/simple/nested') { - return { mtimeMs: Date.now(), size: 0, isFile: () => false }; - } - if (targetPath === '/store/icons/simple/docker.svg') { + mockStat + .mockRejectedValueOnce(new Error('not found')) + .mockRejectedValueOnce(new Error('not found')) + .mockImplementation(async (targetPath: string) => { + if (targetPath === '/store/icons/simple/stale.svg') { + return { mtimeMs: 0, size: 1024, isFile: () => true }; + } + if (targetPath === '/store/icons/simple/nested') { + return { mtimeMs: Date.now(), size: 0, isFile: () => false }; + } + if (targetPath === '/store/icons/simple/docker.svg') { + return { mtimeMs: Date.now(), size: 1024, isFile: () => true }; + } return { mtimeMs: Date.now(), size: 1024, isFile: () => true }; - } - return { mtimeMs: Date.now(), size: 1024, isFile: () => true }; - }); + }); const handler = getHandler(); const res = createResponse(); @@ -888,15 +911,18 @@ describe('Icons Router', () => { mockReaddir .mockResolvedValueOnce([{ name: 'simple', isDirectory: () => true }]) .mockResolvedValueOnce(['stale.svg', 'docker.svg']); - mockStat.mockImplementation(async (targetPath: string) => { - if (targetPath === '/store/icons/simple/stale.svg') { - return { mtimeMs: 0, size: 1024, isFile: () => true }; - } - if (targetPath === '/store/icons/simple/docker.svg') { + mockStat + .mockRejectedValueOnce(new Error('not found')) + .mockRejectedValueOnce(new Error('not found')) + .mockImplementation(async (targetPath: string) => { + if (targetPath === '/store/icons/simple/stale.svg') { + return { mtimeMs: 0, size: 1024, isFile: () => true }; + } + if (targetPath === '/store/icons/simple/docker.svg') { + return { mtimeMs: Date.now(), size: 1024, isFile: () => true }; + } return { mtimeMs: Date.now(), size: 1024, isFile: () => true }; - } - return { mtimeMs: Date.now(), size: 1024, isFile: () => true }; - }); + }); mockUnlink.mockRejectedValue(new Error('permission denied')); const handler = getHandler(); const res = createResponse(); @@ -925,15 +951,18 @@ describe('Icons Router', () => { mockReaddir .mockResolvedValueOnce([{ name: 'simple', isDirectory: () => true }]) .mockResolvedValueOnce(['old.svg', 'docker.svg']); - mockStat.mockImplementation(async (targetPath: string) => { - if (targetPath === '/store/icons/simple/old.svg') { - return { mtimeMs: Date.now() - 1_000, size: 150 * 1024 * 1024, isFile: () => true }; - } - if (targetPath === '/store/icons/simple/docker.svg') { - return { mtimeMs: Date.now(), size: 50 * 1024 * 1024, isFile: () => true }; - } - return { mtimeMs: Date.now(), size: 1024, isFile: () => true }; - }); + mockStat + .mockRejectedValueOnce(new Error('not found')) + .mockRejectedValueOnce(new Error('not found')) + .mockImplementation(async (targetPath: string) => { + if (targetPath === '/store/icons/simple/old.svg') { + return { mtimeMs: Date.now() - 1_000, size: 150 * 1024 * 1024, isFile: () => true }; + } + if (targetPath === '/store/icons/simple/docker.svg') { + return { mtimeMs: Date.now(), size: 50 * 1024 * 1024, isFile: () => true }; + } + return { mtimeMs: Date.now(), size: 1024, isFile: () => true }; + }); mockUnlink.mockImplementation(async (targetPath: string) => { if (targetPath === '/store/icons/simple/old.svg') { throw new Error('unlink failed'); @@ -1123,7 +1152,11 @@ describe('Icons Router', () => { }); test('should use immutable cache headers for successfully cached icons', async () => { - mockAccess.mockResolvedValue(undefined); + mockStat.mockResolvedValue({ + mtimeMs: Date.now(), + size: 1024, + isFile: () => true, + }); const handler = getHandler(); const res = createResponse(); diff --git a/app/api/icons/storage.test.ts b/app/api/icons/storage.test.ts index 45fd24ba..683ac96f 100644 --- a/app/api/icons/storage.test.ts +++ b/app/api/icons/storage.test.ts @@ -81,6 +81,20 @@ describe('icons/storage', () => { expect(mockUnlink).toHaveBeenCalledWith('/store/icons/simple/stale.svg'); }); + test('checks cached icon usability via stat without pre-access syscall', async () => { + mockStat.mockResolvedValue({ + mtimeMs: Date.now(), + size: 1024, + isFile: () => true, + }); + + const usable = await isCachedIconUsable('/store/icons/simple/fresh.svg'); + + expect(usable).toBe(true); + expect(mockStat).toHaveBeenCalledWith('/store/icons/simple/fresh.svg'); + expect(mockAccess).not.toHaveBeenCalled(); + }); + test('evicts oldest cache entry when byte budget is exceeded', async () => { const nowSpy = vi.spyOn(Date, 'now'); nowSpy.mockReturnValue(2_000_000_000_000); @@ -133,6 +147,43 @@ describe('icons/storage', () => { expect(mockUnlink).not.toHaveBeenCalledWith('/store/icons/simple/protected.svg'); }); + test('ignores cache entries that fail stat between directory scan and stat call', async () => { + mockReaddir + .mockResolvedValueOnce([{ name: 'simple', isDirectory: () => true }]) + .mockResolvedValueOnce(['vanished.svg', 'fresh.svg']); + mockStat.mockImplementation(async (targetPath: string) => { + if (targetPath === '/store/icons/simple/vanished.svg') { + throw new Error('ENOENT'); + } + return { mtimeMs: Date.now(), size: 1024, isFile: () => true }; + }); + + await enforceIconCacheLimits(); + + expect(mockUnlink).not.toHaveBeenCalledWith('/store/icons/simple/vanished.svg'); + }); + + test('keeps protected cache entry when no other eviction candidate is available', async () => { + const nowSpy = vi.spyOn(Date, 'now'); + nowSpy.mockReturnValue(2_000_000_000_000); + mockReaddir + .mockResolvedValueOnce([{ name: 'simple', isDirectory: () => true }]) + .mockResolvedValueOnce(['protected.svg']); + mockStat.mockResolvedValue({ + mtimeMs: Date.now() - 1_000, + size: 150 * 1024 * 1024, + isFile: () => true, + }); + + try { + await enforceIconCacheLimits({ protectedPath: '/store/icons/simple/protected.svg' }); + } finally { + nowSpy.mockRestore(); + } + + expect(mockUnlink).not.toHaveBeenCalledWith('/store/icons/simple/protected.svg'); + }); + test('writes icons atomically through a tmp file', async () => { await writeIconAtomically('/store/icons/simple/docker.svg', Buffer.from('')); diff --git a/app/api/icons/storage.ts b/app/api/icons/storage.ts index 2adea64c..db107094 100644 --- a/app/api/icons/storage.ts +++ b/app/api/icons/storage.ts @@ -68,10 +68,6 @@ async function findBundledIconPath(provider: string, slug: string, extension: st } async function isCachedIconUsable(iconPath: string) { - if (!(await iconExists(iconPath))) { - return false; - } - try { const iconStats = await fs.stat(iconPath); if (!iconStats.isFile()) { From ddf104dc3d960fd4f9fee28d2f27625c95ba6d2d Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 22:33:58 -0400 Subject: [PATCH 26/59] =?UTF-8?q?=E2=9A=A1=20perf(mqtt):=20use=20getContai?= =?UTF-8?q?nerCount=20instead=20of=20cloning=20full=20collection?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Replace 4x getContainers().length with getContainerCount() in HASS sensor updates to avoid cloning entire container collection per event - Add test verifying count queries are used and getContainers is not called --- app/triggers/providers/mqtt/Hass.test.ts | 21 +++++++++++++++++++++ app/triggers/providers/mqtt/Hass.ts | 14 +++++++------- 2 files changed, 28 insertions(+), 7 deletions(-) diff --git a/app/triggers/providers/mqtt/Hass.test.ts b/app/triggers/providers/mqtt/Hass.test.ts index 394b8d73..8529cde2 100644 --- a/app/triggers/providers/mqtt/Hass.test.ts +++ b/app/triggers/providers/mqtt/Hass.test.ts @@ -6,6 +6,7 @@ import { registerWatcherStop, } from '../../../event/index.js'; import log from '../../../log/index.js'; +import * as containerStore from '../../../store/container.js'; import Hass from './Hass.js'; const MOCK_VERSION = '1.4.0-test'; @@ -347,6 +348,26 @@ test.each(containerData)('updateContainerSensors must publish all sensors expect ); }); +test('updateContainerSensors should use container count queries instead of full list cloning', async () => { + const getContainersSpy = vi.spyOn(containerStore, 'getContainers'); + const getContainerCountSpy = vi.spyOn(containerStore, 'getContainerCount'); + + await hass.updateContainerSensors({ + name: 'container-name', + watcher: 'watcher-name', + displayIcon: 'mdi:docker', + }); + + expect(getContainerCountSpy).toHaveBeenCalledWith(); + expect(getContainerCountSpy).toHaveBeenCalledWith({ updateAvailable: true }); + expect(getContainerCountSpy).toHaveBeenCalledWith({ watcher: 'watcher-name' }); + expect(getContainerCountSpy).toHaveBeenCalledWith({ + watcher: 'watcher-name', + updateAvailable: true, + }); + expect(getContainersSpy).not.toHaveBeenCalled(); +}); + test.each( containerData, )('removeContainerSensor must publish all sensor removal messages expected by HA', async ({ diff --git a/app/triggers/providers/mqtt/Hass.ts b/app/triggers/providers/mqtt/Hass.ts index 800c0f90..3ce510f9 100644 --- a/app/triggers/providers/mqtt/Hass.ts +++ b/app/triggers/providers/mqtt/Hass.ts @@ -296,19 +296,19 @@ class Hass { } // Count all containers - const totalCount = containerStore.getContainers().length; - const updateCount = containerStore.getContainers({ + const totalCount = containerStore.getContainerCount(); + const updateCount = containerStore.getContainerCount({ updateAvailable: true, - }).length; + }); // Count all containers belonging to the current watcher - const watcherTotalCount = containerStore.getContainers({ + const watcherTotalCount = containerStore.getContainerCount({ watcher: container.watcher, - }).length; - const watcherUpdateCount = containerStore.getContainers({ + }); + const watcherUpdateCount = containerStore.getContainerCount({ watcher: container.watcher, updateAvailable: true, - }).length; + }); // Publish sensors await this.updateSensor({ From 57d61fbfc773a1c6a00cd92b1a78446af343ec97 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 22:34:04 -0400 Subject: [PATCH 27/59] =?UTF-8?q?=E2=9A=A1=20perf(ui):=20hoist=20sort-orde?= =?UTF-8?q?r=20constants=20to=20module=20scope=20in=20ContainersView?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Move kindOrder and bouncerOrder out of computed to avoid recreation on every re-evaluation; freeze as readonly module-level constants --- ui/src/views/ContainersView.vue | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/ui/src/views/ContainersView.vue b/ui/src/views/ContainersView.vue index c54d0d88..9d2ca263 100644 --- a/ui/src/views/ContainersView.vue +++ b/ui/src/views/ContainersView.vue @@ -30,6 +30,18 @@ import { useContainerActions } from './containers/useContainerActions'; import { useContainerLogs } from './containers/useContainerLogs'; import { useContainerSecurity } from './containers/useContainerSecurity'; +const UPDATE_KIND_SORT_ORDER: Readonly> = Object.freeze({ + major: 0, + minor: 1, + patch: 2, + digest: 3, +}); +const BOUNCER_SORT_ORDER: Readonly> = Object.freeze({ + blocked: 0, + unsafe: 1, + safe: 2, +}); + const loading = ref(true); const error = ref(null); @@ -342,8 +354,6 @@ const sortedContainers = computed(() => { const list = [...filteredContainers.value]; const key = containerSortKey.value; const dir = containerSortAsc.value ? 1 : -1; - const kindOrder: Record = { major: 0, minor: 1, patch: 2, digest: 3 }; - const bouncerOrder: Record = { blocked: 0, unsafe: 1, safe: 2 }; return list.sort((left, right) => { let leftValue: string | number; let rightValue: string | number; @@ -363,11 +373,11 @@ const sortedContainers = computed(() => { leftValue = left.registry; rightValue = right.registry; } else if (key === 'bouncer') { - leftValue = bouncerOrder[left.bouncer] ?? 9; - rightValue = bouncerOrder[right.bouncer] ?? 9; + leftValue = BOUNCER_SORT_ORDER[left.bouncer] ?? 9; + rightValue = BOUNCER_SORT_ORDER[right.bouncer] ?? 9; } else if (key === 'kind') { - leftValue = kindOrder[left.updateKind ?? ''] ?? 9; - rightValue = kindOrder[right.updateKind ?? ''] ?? 9; + leftValue = UPDATE_KIND_SORT_ORDER[left.updateKind ?? ''] ?? 9; + rightValue = UPDATE_KIND_SORT_ORDER[right.updateKind ?? ''] ?? 9; } else if (key === 'version') { leftValue = left.currentTag; rightValue = right.currentTag; From 1b035319db625f3d1461a3abb5e84989ca4aee21 Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 22:34:19 -0400 Subject: [PATCH 28/59] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20refactor(trigger):?= =?UTF-8?q?=20improve=20compose=20trigger=20type=20safety=20and=20extracti?= =?UTF-8?q?on?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Replace unknown runtime context types with concrete ComposeRuntimeContext and ComposeUpdateLifecycleContext interfaces, eliminating double casts - Extract buildComposeFileOnceRuntimeContextByService from processComposeFile to reduce method complexity (182 → ~140 lines) - Extract buildPerformContainerUpdateOptions to consolidate duplicated option assembly spread patterns - Add buildUpdatedComposeFileObjectForValidation to pass pre-parsed YAML into validateComposeConfiguration, avoiding double YAML parse - Cache compose file access results in resolveAndGroupContainersByComposeFile to eliminate redundant fs.access calls across containers sharing files - Export hasExplicitRegistryHost for testability - Add tests for directory resolution edge cases, empty host detection, pre-parsed validation, compose file access deduplication, unknown container fallback, and option builder paths --- .../dockercompose/Dockercompose.test.ts | 206 +++++++++++ .../providers/dockercompose/Dockercompose.ts | 337 ++++++++++++------ 2 files changed, 438 insertions(+), 105 deletions(-) diff --git a/app/triggers/providers/dockercompose/Dockercompose.test.ts b/app/triggers/providers/dockercompose/Dockercompose.test.ts index fbac4915..e390be81 100644 --- a/app/triggers/providers/dockercompose/Dockercompose.test.ts +++ b/app/triggers/providers/dockercompose/Dockercompose.test.ts @@ -8,6 +8,7 @@ import { getState } from '../../../registry/index.js'; import * as backupStore from '../../../store/backup.js'; import { sleep } from '../../../util/sleep.js'; import Dockercompose, { + testable_hasExplicitRegistryHost, testable_normalizeImplicitLatest, testable_normalizePostStartEnvironmentValue, testable_normalizePostStartHooks, @@ -695,6 +696,28 @@ describe('Dockercompose Trigger', () => { ); }); + test('processComposeFile should report when all mapped containers are already up to date', async () => { + trigger.configuration.dryrun = false; + const container = makeContainer({ + tagValue: '1.0.0', + remoteValue: '1.0.0', + updateAvailable: false, + }); + + vi.spyOn(trigger, 'getComposeFileAsObject').mockResolvedValue( + makeCompose({ nginx: { image: 'nginx:1.0.0' } }), + ); + + const { writeComposeFileSpy, composeUpdateSpy } = spyOnProcessComposeHelpers(trigger); + + const updated = await trigger.processComposeFile('/opt/drydock/test/stack.yml', [container]); + + expect(updated).toBe(false); + expect(mockLog.info).toHaveBeenCalledWith(expect.stringContaining('already up to date')); + expect(writeComposeFileSpy).not.toHaveBeenCalled(); + expect(composeUpdateSpy).not.toHaveBeenCalled(); + }); + test('processComposeFile should warn when no containers belong to compose', async () => { const container = makeContainer({ name: 'unknown', @@ -1912,6 +1935,39 @@ describe('Dockercompose Trigger', () => { ).not.toThrow(); }); + test('resolveComposeFilePathFromDirectory should return original path when target is a file', async () => { + fs.stat.mockResolvedValueOnce({ + isDirectory: () => false, + mtimeMs: 1_700_000_000_000, + } as any); + + const resolved = await trigger.resolveComposeFilePathFromDirectory( + '/opt/drydock/test/stack.yml', + ); + + expect(resolved).toBe('/opt/drydock/test/stack.yml'); + }); + + test('resolveComposeFilePathFromDirectory should warn and return null when directory has no compose candidates', async () => { + fs.stat.mockResolvedValueOnce({ + isDirectory: () => true, + mtimeMs: 1_700_000_000_000, + } as any); + const missingComposeFileError = Object.assign(new Error('ENOENT'), { code: 'ENOENT' }); + fs.access + .mockRejectedValueOnce(missingComposeFileError) + .mockRejectedValueOnce(missingComposeFileError) + .mockRejectedValueOnce(missingComposeFileError) + .mockRejectedValueOnce(missingComposeFileError); + + const resolved = await trigger.resolveComposeFilePathFromDirectory('/opt/drydock/test/stack'); + + expect(resolved).toBeNull(); + expect(mockLog.warn).toHaveBeenCalledWith( + expect.stringContaining('does not contain a compose file candidate'), + ); + }); + test('resolveComposeServiceContext should throw when no compose file is configured', async () => { trigger.configuration.file = undefined; @@ -2676,6 +2732,34 @@ describe('Dockercompose Trigger', () => { expect(writeSpy).not.toHaveBeenCalled(); }); + test('mutateComposeFile should forward a pre-parsed compose object to validation', async () => { + vi.spyOn(trigger, 'getComposeFile').mockResolvedValue( + Buffer.from('services:\n nginx:\n image: nginx:1.0.0\n'), + ); + const validateSpy = vi + .spyOn(trigger, 'validateComposeConfiguration') + .mockResolvedValue(undefined); + vi.spyOn(trigger, 'writeComposeFile').mockResolvedValue(); + const parsedComposeFileObject = makeCompose({ nginx: { image: 'nginx:1.1.0' } }); + + const changed = await trigger.mutateComposeFile( + '/opt/drydock/test/compose.yml', + (text) => text.replace('nginx:1.0.0', 'nginx:1.1.0'), + { + parsedComposeFileObject, + }, + ); + + expect(changed).toBe(true); + expect(validateSpy).toHaveBeenCalledWith( + '/opt/drydock/test/compose.yml', + expect.stringContaining('nginx:1.1.0'), + { + parsedComposeFileObject, + }, + ); + }); + test('validateComposeConfiguration should validate compose text in-process without shell commands', async () => { await trigger.validateComposeConfiguration( '/opt/drydock/test/compose.yml', @@ -2699,6 +2783,25 @@ describe('Dockercompose Trigger', () => { expect(getComposeFileAsObjectSpy).toHaveBeenCalledWith('/opt/drydock/test/stack.yml'); }); + test('validateComposeConfiguration should reuse a pre-parsed compose object when provided', async () => { + const parseSpy = vi.spyOn(yaml, 'parse'); + const getComposeFileAsObjectSpy = vi + .spyOn(trigger, 'getComposeFileAsObject') + .mockResolvedValue(makeCompose({ base: { image: 'busybox:1.0.0' } })); + + await trigger.validateComposeConfiguration( + '/opt/drydock/test/stack.override.yml', + 'services:\n nginx:\n image: nginx:1.1.0\n', + { + composeFiles: ['/opt/drydock/test/stack.yml', '/opt/drydock/test/stack.override.yml'], + parsedComposeFileObject: makeCompose({ nginx: { image: 'nginx:1.1.0' } }), + }, + ); + + expect(parseSpy).not.toHaveBeenCalled(); + expect(getComposeFileAsObjectSpy).toHaveBeenCalledWith('/opt/drydock/test/stack.yml'); + }); + test('updateComposeServiceImageInText should throw when compose document has parse errors', () => { expect(() => testable_updateComposeServiceImageInText('services:\n nginx: [\n', 'nginx', 'nginx:2.0.0'), @@ -3077,6 +3180,35 @@ describe('Dockercompose Trigger', () => { ]); }); + test('triggerBatch should only access each compose file once across containers sharing the same compose chain', async () => { + trigger.configuration.file = undefined; + fs.access.mockResolvedValue(undefined); + + const sharedComposeLabels = { + 'com.docker.compose.project.config_files': + '/opt/drydock/test/stack.yml,/opt/drydock/test/stack.override.yml', + }; + const container1 = { + name: 'app1', + watcher: 'local', + labels: sharedComposeLabels, + }; + const container2 = { + name: 'app2', + watcher: 'local', + labels: sharedComposeLabels, + }; + + const processComposeFileSpy = vi.spyOn(trigger, 'processComposeFile').mockResolvedValue(); + + await trigger.triggerBatch([container1, container2]); + + expect(processComposeFileSpy).toHaveBeenCalledTimes(1); + expect(fs.access).toHaveBeenCalledTimes(2); + expect(fs.access).toHaveBeenCalledWith('/opt/drydock/test/stack.yml'); + expect(fs.access).toHaveBeenCalledWith('/opt/drydock/test/stack.override.yml'); + }); + test('triggerBatch should only process containers matching configured compose file affinity', async () => { trigger.configuration.file = '/opt/drydock/test/monitoring.yml'; fs.access.mockImplementation(async (composeFilePath) => { @@ -3337,6 +3469,16 @@ describe('Dockercompose Trigger', () => { ); }); + test('trigger should use unknown fallback when throwing without a container name', async () => { + trigger.configuration.dryrun = false; + const container = { updateAvailable: true }; + vi.spyOn(trigger, 'triggerBatch').mockResolvedValue([false]); + + await expect(trigger.trigger(container as any)).rejects.toThrow( + 'No compose updates were applied for container unknown', + ); + }); + test('getConfigurationSchema should extend Docker schema with compose hardening options', () => { const schema = trigger.getConfigurationSchema(); expect(schema).toBeDefined(); @@ -3384,6 +3526,12 @@ describe('Dockercompose Trigger', () => { expect(testable_normalizeImplicitLatest('repo/')).toBe('repo/:latest'); }); + test('hasExplicitRegistryHost should detect empty, host:port, and localhost prefixes', () => { + expect(testable_hasExplicitRegistryHost('')).toBe(false); + expect(testable_hasExplicitRegistryHost('registry.example.com:5000/nginx:1.1.0')).toBe(true); + expect(testable_hasExplicitRegistryHost('localhost/nginx:1.1.0')).toBe(true); + }); + test('normalizePostStartHooks should return empty array when post_start is missing', () => { expect(testable_normalizePostStartHooks(undefined)).toEqual([]); }); @@ -4603,6 +4751,30 @@ describe('Dockercompose Trigger', () => { expect(serviceUpdates.get('nginx')).toBe('nginx:1.1.0'); }); + test('buildUpdatedComposeFileObjectForValidation should return undefined for non-object input', () => { + const updated = trigger.buildUpdatedComposeFileObjectForValidation(null, new Map()); + + expect(updated).toBeUndefined(); + }); + + test('buildUpdatedComposeFileObjectForValidation should normalize non-object service sections and entries', () => { + const updatedFromInvalidServices = trigger.buildUpdatedComposeFileObjectForValidation( + { version: '3.9', services: 'invalid' }, + new Map([['nginx', 'nginx:1.1.0']]), + ) as any; + const updatedFromScalarService = trigger.buildUpdatedComposeFileObjectForValidation( + { services: { nginx: 'legacy' } }, + new Map([['nginx', 'nginx:1.1.0']]), + ) as any; + + expect(updatedFromInvalidServices.services).toEqual({ + nginx: { image: 'nginx:1.1.0' }, + }); + expect(updatedFromScalarService.services.nginx).toEqual({ + image: 'nginx:1.1.0', + }); + }); + test('reconcileComposeMappings should no-op when reconciliation mode is off', () => { trigger.configuration.reconciliationMode = 'off'; @@ -4759,6 +4931,40 @@ describe('Dockercompose Trigger', () => { ); }); + test('buildPerformContainerUpdateOptions should compose options without duplicate spread logic', () => { + const runtimeContext = { + dockerApi: mockDockerApi, + auth: { from: 'context' }, + newImage: 'nginx:9.9.9', + registry: getState().registry.hub, + }; + + const options = (trigger as any).buildPerformContainerUpdateOptions( + { + composeFiles: ['/opt/drydock/test/stack.yml', '/opt/drydock/test/stack.override.yml'], + skipPull: true, + }, + runtimeContext, + ); + + expect(options).toEqual({ + composeFiles: ['/opt/drydock/test/stack.yml', '/opt/drydock/test/stack.override.yml'], + skipPull: true, + runtimeContext, + }); + }); + + test('buildPerformContainerUpdateOptions should omit runtime context and compose chain when not needed', () => { + const options = (trigger as any).buildPerformContainerUpdateOptions( + { + composeFiles: ['/opt/drydock/test/stack.yml'], + }, + {}, + ); + + expect(options).toEqual({}); + }); + test('performContainerUpdate should pass compose chain to per-service update', async () => { trigger.configuration.dryrun = false; const container = makeContainer({ diff --git a/app/triggers/providers/dockercompose/Dockercompose.ts b/app/triggers/providers/dockercompose/Dockercompose.ts index 02388649..c5df05a7 100644 --- a/app/triggers/providers/dockercompose/Dockercompose.ts +++ b/app/triggers/providers/dockercompose/Dockercompose.ts @@ -3,6 +3,7 @@ import fs from 'node:fs/promises'; import path from 'node:path'; import yaml, { type Pair, type ParsedNode } from 'yaml'; import type { ContainerImage } from '../../../model/container.js'; +import type Registry from '../../../registries/Registry.js'; import { getState } from '../../../registry/index.js'; import { resolveConfiguredPath, resolveConfiguredPathWithinBase } from '../../../runtime/paths.js'; import { sleep } from '../../../util/sleep.js'; @@ -94,17 +95,43 @@ type RegistryImageContainerReference = { }; }; +type RegistryManagerLike = Pick; +type RegistryPullAuth = Awaited>; +type ComposeRuntimeContext = { + dockerApi?: DockerApiLike; + auth?: RegistryPullAuth; + newImage?: string; + registry?: RegistryManagerLike; +}; + +type ComposeUpdateLifecycleContext = { + composeFile: string; + service: string; + serviceDefinition?: unknown; + composeFiles?: string[]; + composeFileOnceApplied?: boolean; + skipPull?: boolean; + runtimeContext?: ComposeRuntimeContext; +}; + +type ComposeRuntimeUpdateMapping = { + service: string; + container: ComposeContainerReference & + RuntimeUpdateContainerReference & + RegistryImageContainerReference; +}; + type ComposeRuntimeRefreshOptions = { shouldStart?: boolean; skipPull?: boolean; forceRecreate?: boolean; composeFiles?: string[]; - runtimeContext?: { - dockerApi?: unknown; - auth?: unknown; - newImage?: string; - registry?: unknown; - }; + runtimeContext?: ComposeRuntimeContext; +}; + +type ValidateComposeConfigurationOptions = { + composeFiles?: string[]; + parsedComposeFileObject?: unknown; }; function getDockerApiFromWatcher(watcher: unknown): DockerApiLike | undefined { @@ -1002,6 +1029,47 @@ class Dockercompose extends Docker { return serviceImageUpdates; } + buildUpdatedComposeFileObjectForValidation(composeFileObject, serviceImageUpdates) { + if ( + !composeFileObject || + typeof composeFileObject !== 'object' || + Array.isArray(composeFileObject) + ) { + return undefined; + } + + const composeFileRecord = composeFileObject as Record; + const existingServices = composeFileRecord.services; + const servicesRecord = + existingServices && typeof existingServices === 'object' && !Array.isArray(existingServices) + ? (existingServices as Record) + : {}; + const updatedServices = { ...servicesRecord }; + + for (const [serviceName, newImage] of serviceImageUpdates.entries()) { + const serviceDefinition = updatedServices[serviceName]; + if ( + serviceDefinition && + typeof serviceDefinition === 'object' && + !Array.isArray(serviceDefinition) + ) { + updatedServices[serviceName] = { + ...(serviceDefinition as Record), + image: newImage, + }; + continue; + } + updatedServices[serviceName] = { + image: newImage, + }; + } + + return { + ...composeFileRecord, + services: updatedServices, + }; + } + async getComposeFileChainAsObject(composeFiles, composeByFile = null) { const mergedCompose = { services: {}, @@ -1173,11 +1241,12 @@ class Dockercompose extends Docker { } } - async validateComposeConfiguration(composeFilePath, composeFileText, options = {}) { - const composeFileChain = this.normalizeComposeFileChain( - composeFilePath, - (options as { composeFiles?: string[] }).composeFiles, - ); + async validateComposeConfiguration( + composeFilePath, + composeFileText, + options: ValidateComposeConfigurationOptions = {}, + ) { + const composeFileChain = this.normalizeComposeFileChain(composeFilePath, options.composeFiles); const effectiveComposeFileChain = composeFileChain.includes(composeFilePath) ? composeFileChain : [...composeFileChain, composeFilePath]; @@ -1185,12 +1254,16 @@ class Dockercompose extends Docker { const composeByFile = new Map(); for (const composeFile of effectiveComposeFileChain) { if (composeFile === composeFilePath) { - composeByFile.set( - composeFile, - yaml.parse(composeFileText, { - maxAliasCount: YAML_MAX_ALIAS_COUNT, - }), - ); + if (options.parsedComposeFileObject !== undefined) { + composeByFile.set(composeFile, options.parsedComposeFileObject); + } else { + composeByFile.set( + composeFile, + yaml.parse(composeFileText, { + maxAliasCount: YAML_MAX_ALIAS_COUNT, + }), + ); + } continue; } composeByFile.set(composeFile, await this.getComposeFileAsObject(composeFile)); @@ -1203,14 +1276,15 @@ class Dockercompose extends Docker { } } - async mutateComposeFile(file, updateComposeText, options = {}) { + async mutateComposeFile( + file, + updateComposeText, + options: ValidateComposeConfigurationOptions = {}, + ) { return this.withComposeFileLock(file, async (filePath) => { const composeFileText = (await this.getComposeFile(filePath)).toString(); const composeFileStat = await fs.stat(filePath); - const composeFileChain = this.normalizeComposeFileChain( - filePath, - (options as { composeFiles?: string[] }).composeFiles, - ); + const composeFileChain = this.normalizeComposeFileChain(filePath, options.composeFiles); const updatedComposeFileText = updateComposeText(composeFileText, { filePath, mtimeMs: composeFileStat.mtimeMs, @@ -1218,12 +1292,21 @@ class Dockercompose extends Docker { if (updatedComposeFileText === composeFileText) { return false; } + const validationOptions: ValidateComposeConfigurationOptions = {}; if (composeFileChain.length > 1) { - await this.validateComposeConfiguration(filePath, updatedComposeFileText, { - composeFiles: composeFileChain, - }); - } else { + validationOptions.composeFiles = composeFileChain; + } + if (options.parsedComposeFileObject !== undefined) { + validationOptions.parsedComposeFileObject = options.parsedComposeFileObject; + } + if (Object.keys(validationOptions).length === 0) { await this.validateComposeConfiguration(filePath, updatedComposeFileText); + } else { + await this.validateComposeConfiguration( + filePath, + updatedComposeFileText, + validationOptions, + ); } await this.writeComposeFile(filePath, updatedComposeFileText); return true; @@ -1234,9 +1317,12 @@ class Dockercompose extends Docker { * Override: provide shared runtime dependencies once per lifecycle run. * Runtime container state is still resolved on demand per service refresh. */ - async createTriggerContext(container, logContainer, composeContext) { - const runtimeContext = (composeContext as { runtimeContext?: unknown } | undefined) - ?.runtimeContext as ComposeRuntimeRefreshOptions['runtimeContext'] | undefined; + async createTriggerContext( + container, + logContainer, + composeContext?: ComposeUpdateLifecycleContext, + ) { + const runtimeContext = composeContext?.runtimeContext; if ( runtimeContext?.dockerApi && runtimeContext?.registry && @@ -1272,13 +1358,16 @@ class Dockercompose extends Docker { * Override: apply compose-specific hooks while performing runtime refresh * through the Docker Engine API. */ - async performContainerUpdate(context, container, _logContainer, composeCtx) { + async performContainerUpdate( + context, + container, + _logContainer, + composeCtx?: ComposeUpdateLifecycleContext, + ) { if (!composeCtx) { throw new Error(`Missing compose context for container ${container.name}`); } - const composeRuntimeContext = (composeCtx as { runtimeContext?: unknown })?.runtimeContext as - | ComposeRuntimeRefreshOptions['runtimeContext'] - | undefined; + const composeRuntimeContext = composeCtx.runtimeContext; const runtimeContext = { dockerApi: context?.dockerApi, auth: context?.auth, @@ -1286,11 +1375,10 @@ class Dockercompose extends Docker { registry: context?.registry, ...(composeRuntimeContext || {}), }; - const hasRuntimeContext = - runtimeContext.dockerApi !== undefined || - runtimeContext.auth !== undefined || - runtimeContext.newImage !== undefined || - runtimeContext.registry !== undefined; + const composeUpdateOptions = this.buildPerformContainerUpdateOptions( + composeCtx, + runtimeContext, + ); if (composeCtx.composeFileOnceApplied === true) { const logContainer = this.log.child({ @@ -1300,28 +1388,12 @@ class Dockercompose extends Docker { `Skip per-service compose refresh for ${composeCtx.service} because compose-file-once mode already refreshed ${composeCtx.composeFile}`, ); } else { - if (Array.isArray(composeCtx.composeFiles) && composeCtx.composeFiles.length > 1) { - await this.updateContainerWithCompose( - composeCtx.composeFile, - composeCtx.service, - container, - { - composeFiles: composeCtx.composeFiles, - ...(composeCtx.skipPull === true ? { skipPull: true } : {}), - ...(hasRuntimeContext ? { runtimeContext } : {}), - }, - ); - } else { - await this.updateContainerWithCompose( - composeCtx.composeFile, - composeCtx.service, - container, - { - ...(composeCtx.skipPull === true ? { skipPull: true } : {}), - ...(hasRuntimeContext ? { runtimeContext } : {}), - }, - ); - } + await this.updateContainerWithCompose( + composeCtx.composeFile, + composeCtx.service, + container, + composeUpdateOptions, + ); } await this.runServicePostStartHooks( container, @@ -1332,6 +1404,33 @@ class Dockercompose extends Docker { return !this.configuration.dryrun; } + buildPerformContainerUpdateOptions( + composeCtx: ComposeUpdateLifecycleContext, + runtimeContext: ComposeRuntimeContext, + ): Pick { + const composeUpdateOptions = {} as Pick< + ComposeRuntimeRefreshOptions, + 'composeFiles' | 'skipPull' | 'runtimeContext' + >; + + if (Array.isArray(composeCtx.composeFiles) && composeCtx.composeFiles.length > 1) { + composeUpdateOptions.composeFiles = composeCtx.composeFiles; + } + if (composeCtx.skipPull === true) { + composeUpdateOptions.skipPull = true; + } + if ( + runtimeContext.dockerApi !== undefined || + runtimeContext.auth !== undefined || + runtimeContext.newImage !== undefined || + runtimeContext.registry !== undefined + ) { + composeUpdateOptions.runtimeContext = runtimeContext; + } + + return composeUpdateOptions; + } + /** * Keep compose dry-run side-effect free: no prune and no backup records. */ @@ -1397,6 +1496,25 @@ class Dockercompose extends Docker { configuredComposeFilePath: string | null, ): Promise> { const containersByComposeFile = new Map(); + const composeFileAccessErrorByPath = new Map(); + + const getComposeFileAccessError = async (composeFile: string): Promise => { + if (composeFileAccessErrorByPath.has(composeFile)) { + return composeFileAccessErrorByPath.get(composeFile) ?? null; + } + try { + await fs.access(composeFile); + composeFileAccessErrorByPath.set(composeFile, null); + return null; + } catch (e) { + const reason = + e.code === 'EACCES' + ? `permission denied (${ROOT_MODE_BREAK_GLASS_HINT})` + : 'does not exist'; + composeFileAccessErrorByPath.set(composeFile, reason); + return reason; + } + }; for (const container of containers) { // Filter on containers running on local host @@ -1424,20 +1542,19 @@ class Dockercompose extends Docker { } let missingComposeFile = null as string | null; + let missingComposeFileReason = null as string | null; for (const composeFile of composeFiles) { - try { - await fs.access(composeFile); - } catch (e) { - const reason = - e.code === 'EACCES' - ? `permission denied (${ROOT_MODE_BREAK_GLASS_HINT})` - : 'does not exist'; - this.log.warn(`Compose file ${composeFile} for container ${container.name} ${reason}`); + const composeFileAccessError = await getComposeFileAccessError(composeFile); + if (composeFileAccessError) { missingComposeFile = composeFile; + missingComposeFileReason = composeFileAccessError; break; } } if (missingComposeFile) { + this.log.warn( + `Compose file ${missingComposeFile} for container ${container.name} ${missingComposeFileReason}`, + ); continue; } @@ -1493,6 +1610,44 @@ class Dockercompose extends Docker { return batchResults; } + private async buildComposeFileOnceRuntimeContextByService( + mappingsNeedingRuntimeUpdate: ComposeRuntimeUpdateMapping[], + ): Promise>> { + const composeFileOnceRuntimeContextByService = new Map< + string, + NonNullable + >(); + const firstContainerByService = new Map(); + for (const mapping of mappingsNeedingRuntimeUpdate) { + if (!firstContainerByService.has(mapping.service)) { + firstContainerByService.set(mapping.service, mapping); + } + } + await Promise.all( + [...firstContainerByService.entries()].map(async ([service, mapping]) => { + const runtimeContainer = mapping.container; + const logContainer = this.log.child({ + container: runtimeContainer.name, + }); + const watcher = this.getWatcher(runtimeContainer); + const { dockerApi } = watcher; + const registry = this.resolveRegistryManager(runtimeContainer, logContainer, { + allowAnonymousFallback: true, + }); + const auth = await registry.getAuthPull(); + const newImage = this.getNewImageFullName(registry, runtimeContainer); + composeFileOnceRuntimeContextByService.set(service, { + dockerApi, + registry, + auth, + newImage, + }); + await this.pullImage(dockerApi, auth, newImage, logContainer); + }), + ); + return composeFileOnceRuntimeContextByService; + } + /** * Process a specific compose file with its associated containers. * @param composeFile @@ -1598,6 +1753,10 @@ class Dockercompose extends Docker { // Replace only the targeted compose service image values. const serviceImageUpdates = this.buildComposeServiceImageUpdates(composeUpdates); + const parsedComposeFileObject = this.buildUpdatedComposeFileObjectForValidation( + composeByFile.get(writableComposeFile), + serviceImageUpdates, + ); await this.mutateComposeFile( writableComposeFile, (composeFileText, composeFileMetadata) => @@ -1612,6 +1771,7 @@ class Dockercompose extends Docker { ), { composeFiles: composeFileChain, + parsedComposeFileObject, }, ); } @@ -1620,43 +1780,9 @@ class Dockercompose extends Docker { const composeFileOnceHandledServices = new Set(); const composeFileOnceEnabled = this.configuration.composeFileOnce === true && this.configuration.dryrun !== true; - const composeFileOnceRuntimeContextByService = new Map< - string, - NonNullable - >(); - if (composeFileOnceEnabled) { - const firstContainerByService = new Map< - string, - (typeof mappingsNeedingRuntimeUpdate)[number] - >(); - for (const mapping of mappingsNeedingRuntimeUpdate) { - if (!firstContainerByService.has(mapping.service)) { - firstContainerByService.set(mapping.service, mapping); - } - } - await Promise.all( - [...firstContainerByService.entries()].map(async ([service, mapping]) => { - const runtimeContainer = mapping.container; - const logContainer = this.log.child({ - container: runtimeContainer.name, - }); - const watcher = this.getWatcher(runtimeContainer); - const { dockerApi } = watcher; - const registry = this.resolveRegistryManager(runtimeContainer, logContainer, { - allowAnonymousFallback: true, - }); - const auth = await registry.getAuthPull(); - const newImage = this.getNewImageFullName(registry, runtimeContainer); - composeFileOnceRuntimeContextByService.set(service, { - dockerApi, - registry, - auth, - newImage, - }); - await this.pullImage(dockerApi, auth, newImage, logContainer); - }), - ); - } + const composeFileOnceRuntimeContextByService = composeFileOnceEnabled + ? await this.buildComposeFileOnceRuntimeContextByService(mappingsNeedingRuntimeUpdate) + : new Map>(); // Refresh all containers requiring a runtime update via the shared // lifecycle orchestrator (security gate, hooks, prune/backup, events). @@ -2187,6 +2313,7 @@ class Dockercompose extends Docker { export default Dockercompose; export { + hasExplicitRegistryHost as testable_hasExplicitRegistryHost, normalizeImplicitLatest as testable_normalizeImplicitLatest, normalizePostStartHooks as testable_normalizePostStartHooks, normalizePostStartEnvironmentValue as testable_normalizePostStartEnvironmentValue, From f58345767ee763dff5f758bdd9084c1643463e5c Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 22:34:28 -0400 Subject: [PATCH 29/59] =?UTF-8?q?=F0=9F=90=9B=20fix(watcher):=20skip=20sam?= =?UTF-8?q?e-name=20dedupe=20for=20containers=20with=20empty=20names?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Guard stale container cleanup to only run when name is non-empty, preventing spurious deletions for unnamed containers - Add test for empty-name skip path --- ...docker-image-details-orchestration.test.ts | 31 +++++++++++++++++++ .../docker-image-details-orchestration.ts | 16 +++++----- 2 files changed, 40 insertions(+), 7 deletions(-) diff --git a/app/watchers/providers/docker/docker-image-details-orchestration.test.ts b/app/watchers/providers/docker/docker-image-details-orchestration.test.ts index c0622abf..9bd47d26 100644 --- a/app/watchers/providers/docker/docker-image-details-orchestration.test.ts +++ b/app/watchers/providers/docker/docker-image-details-orchestration.test.ts @@ -539,4 +539,35 @@ describe('docker image details orchestration module', () => { expect(getContainersSpy).toHaveBeenCalledWith({ watcher: 'docker-test', name: 'service' }); expect(deleteContainerSpy).toHaveBeenCalledWith('old-container-id'); }); + + test('skips same-name dedupe when the discovered container name is empty', async () => { + vi.spyOn(storeContainer, 'getContainer').mockReturnValue(undefined); + const getContainersSpy = vi.spyOn(storeContainer, 'getContainers').mockReturnValue([ + { + id: 'old-container-id', + watcher: 'docker-test', + name: '', + } as any, + ]); + const deleteContainerSpy = vi + .spyOn(storeContainer, 'deleteContainer') + .mockImplementation(() => {}); + + const { watcher } = createWatcher(); + + const result = await addImageDetailsToContainerOrchestration( + watcher as any, + createDockerSummaryContainer({ + Id: 'new-container-id', + Names: [], + }), + {}, + createHelpers() as any, + ); + + expect(result?.id).toBe('new-container-id'); + expect(result?.name).toBe(''); + expect(getContainersSpy).not.toHaveBeenCalled(); + expect(deleteContainerSpy).not.toHaveBeenCalled(); + }); }); diff --git a/app/watchers/providers/docker/docker-image-details-orchestration.ts b/app/watchers/providers/docker/docker-image-details-orchestration.ts index 58076599..9bddc461 100644 --- a/app/watchers/providers/docker/docker-image-details-orchestration.ts +++ b/app/watchers/providers/docker/docker-image-details-orchestration.ts @@ -344,12 +344,14 @@ export async function addImageDetailsToContainerOrchestration( updateAvailable: false, updateKind: { kind: 'unknown' }, } as Container); - const containersWithSameName = storeContainer.getContainers({ - watcher: watcher.name, - name: containerToReturn.name, - }); - containersWithSameName - .filter((staleContainer) => staleContainer.id !== containerToReturn.id) - .forEach((staleContainer) => storeContainer.deleteContainer(staleContainer.id)); + if (typeof containerToReturn.name === 'string' && containerToReturn.name !== '') { + const containersWithSameName = storeContainer.getContainers({ + watcher: watcher.name, + name: containerToReturn.name, + }); + containersWithSameName + .filter((staleContainer) => staleContainer.id !== containerToReturn.id) + .forEach((staleContainer) => storeContainer.deleteContainer(staleContainer.id)); + } return containerToReturn; } From 8718ec0f14ea83db73300f1c6e9a9a565494893a Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 22:34:38 -0400 Subject: [PATCH 30/59] =?UTF-8?q?=F0=9F=90=9B=20fix(demo):=20harden=20post?= =?UTF-8?q?Message=20origin,=20share=20fallback,=20and=20SSE=20dispatch?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Replace wildcard postMessage origin with referrer-derived parent origin - Remove invalid X-Frame-Options ALLOW-FROM header (CSP handles framing) - Add try/catch around navigator.share and clipboard.writeText - Dispatch onmessage handler for generic message events in FakeEventSource - Use no-store cache-control for Docker fallback icons in mock handlers --- apps/demo/src/main.ts | 18 +++++++++++++++++- apps/demo/src/mocks/handlers/icons.ts | 4 +++- apps/demo/src/mocks/sse.ts | 3 +++ apps/demo/vercel.json | 1 - apps/web/components/demo-section.tsx | 20 +++++++++++++++++--- 5 files changed, 40 insertions(+), 6 deletions(-) diff --git a/apps/demo/src/main.ts b/apps/demo/src/main.ts index c1f1b22f..bd940f17 100644 --- a/apps/demo/src/main.ts +++ b/apps/demo/src/main.ts @@ -13,6 +13,18 @@ import { FakeEventSource } from './mocks/sse'; // creates an EventSource in AppLayout, so this must happen first. (globalThis as unknown as { EventSource: typeof FakeEventSource }).EventSource = FakeEventSource; +function getParentOrigin(): string | null { + if (!document.referrer) { + return null; + } + + try { + return new URL(document.referrer).origin; + } catch { + return null; + } +} + async function boot() { // Start MSW — must be running before the UI makes any fetch() calls const { worker } = await import('./mocks/browser'); @@ -41,7 +53,11 @@ async function boot() { // Tell the parent frame (website) we loaded successfully if (window.parent !== window) { - window.parent.postMessage({ type: 'drydock-demo-ready' }, '*'); + const parentOrigin = getParentOrigin(); + + if (parentOrigin) { + window.parent.postMessage({ type: 'drydock-demo-ready' }, parentOrigin); + } } // Auto-fill login credentials so demo visitors just click "Sign in". diff --git a/apps/demo/src/mocks/handlers/icons.ts b/apps/demo/src/mocks/handlers/icons.ts index 6282554b..923b3b52 100644 --- a/apps/demo/src/mocks/handlers/icons.ts +++ b/apps/demo/src/mocks/handlers/icons.ts @@ -48,6 +48,7 @@ export const iconHandlers = [ // Try primary provider let upstream = await tryFetch(config.url(slug)); + let usedDockerFallback = false; // Selfhst miss → try homarr fallback if (!upstream && provider === 'selfhst') { @@ -57,6 +58,7 @@ export const iconHandlers = [ // Still nothing → Docker icon as final fallback if (!upstream) { upstream = await tryFetch(DOCKER_FALLBACK_URL); + usedDockerFallback = upstream !== null; } if (!upstream) { @@ -69,7 +71,7 @@ export const iconHandlers = [ return new HttpResponse(buffer, { headers: { 'Content-Type': contentType, - 'Cache-Control': 'public, max-age=31536000, immutable', + 'Cache-Control': usedDockerFallback ? 'no-store' : 'public, max-age=31536000, immutable', }, }); }), diff --git a/apps/demo/src/mocks/sse.ts b/apps/demo/src/mocks/sse.ts index 03b61856..292c1b09 100644 --- a/apps/demo/src/mocks/sse.ts +++ b/apps/demo/src/mocks/sse.ts @@ -75,6 +75,9 @@ export class FakeEventSource { private dispatch(type: string, data: string): void { const event = new MessageEvent(type, { data }); + if (type === 'message') { + this.onmessage?.(event); + } this.listeners.get(type)?.forEach((fn) => fn(event)); } } diff --git a/apps/demo/vercel.json b/apps/demo/vercel.json index d3890290..0500e238 100644 --- a/apps/demo/vercel.json +++ b/apps/demo/vercel.json @@ -8,7 +8,6 @@ { "source": "/(.*)", "headers": [ - { "key": "X-Frame-Options", "value": "ALLOW-FROM https://drydock.codeswhat.com" }, { "key": "Content-Security-Policy", "value": "frame-ancestors 'self' https://drydock.codeswhat.com https://*.vercel.app" diff --git a/apps/web/components/demo-section.tsx b/apps/web/components/demo-section.tsx index 9a0aed4e..ce245b3c 100644 --- a/apps/web/components/demo-section.tsx +++ b/apps/web/components/demo-section.tsx @@ -168,9 +168,23 @@ export function DemoSection() { }; if (navigator.share) { - await navigator.share(shareData); - } else { - await navigator.clipboard.writeText(shareData.url); + try { + await navigator.share(shareData); + return; + } catch (error) { + // Ignore user-cancelled share prompts. + if (error instanceof DOMException && error.name === "AbortError") { + return; + } + } + } + + if (navigator.clipboard?.writeText) { + try { + await navigator.clipboard.writeText(shareData.url); + } catch (error) { + console.warn("Failed to copy demo URL to clipboard", error); + } } } From c47cc8d825a3d53f8a0baa77029a96a7a8ff8afa Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 22:34:44 -0400 Subject: [PATCH 31/59] =?UTF-8?q?=E2=9C=85=20test(log):=20replace=20setTim?= =?UTF-8?q?eout=20with=20vi.waitFor=20in=20debug-level=20tests?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Eliminate timing-dependent 50ms setTimeout in favor of vi.waitFor polling, preventing flaky failures under CI load --- app/log/index.debug-level.test.ts | 32 +++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/app/log/index.debug-level.test.ts b/app/log/index.debug-level.test.ts index 0e609809..3ef174fd 100644 --- a/app/log/index.debug-level.test.ts +++ b/app/log/index.debug-level.test.ts @@ -24,14 +24,14 @@ describe('Logger with debug level', () => { log.debug({ component: 'test' }, 'debug-level-message'); - await new Promise((resolve) => setTimeout(resolve, 50)); - - expect(mockAddEntry).toHaveBeenCalledWith( - expect.objectContaining({ - level: 'debug', - msg: 'debug-level-message', - }), - ); + await vi.waitFor(() => { + expect(mockAddEntry).toHaveBeenCalledWith( + expect.objectContaining({ + level: 'debug', + msg: 'debug-level-message', + }), + ); + }); }); test('should deliver info messages when level is debug', async () => { @@ -39,13 +39,13 @@ describe('Logger with debug level', () => { log.info({ component: 'test' }, 'info-level-message'); - await new Promise((resolve) => setTimeout(resolve, 50)); - - expect(mockAddEntry).toHaveBeenCalledWith( - expect.objectContaining({ - level: 'info', - msg: 'info-level-message', - }), - ); + await vi.waitFor(() => { + expect(mockAddEntry).toHaveBeenCalledWith( + expect.objectContaining({ + level: 'info', + msg: 'info-level-message', + }), + ); + }); }); }); From d597db9c9c362527526bb5998d9a05b04dea762d Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 22:34:51 -0400 Subject: [PATCH 32/59] =?UTF-8?q?=E2=9C=85=20test(audit):=20add=20containe?= =?UTF-8?q?r-update=20handler=20and=20empty-name=20fallback=20test?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Wire containerUpdated handler in test harness setup - Add test for container-update audit with missing name/id fields --- app/event/audit-subscriptions.test.ts | 27 +++++++++++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) diff --git a/app/event/audit-subscriptions.test.ts b/app/event/audit-subscriptions.test.ts index e8d923e7..6f7f443a 100644 --- a/app/event/audit-subscriptions.test.ts +++ b/app/event/audit-subscriptions.test.ts @@ -31,10 +31,12 @@ type OrderedEventHandlerFn = (payload: TPayload) => void | Promise; agentDisconnectedHandler: OrderedEventHandlerFn; + containerUpdatedHandler: (payload: ContainerLifecycleEventPayload) => void; } { const handlers: { securityAlert?: OrderedEventHandlerFn; agentDisconnected?: OrderedEventHandlerFn; + containerUpdated?: (payload: ContainerLifecycleEventPayload) => void; } = {}; const registerOrdered = @@ -61,19 +63,22 @@ function setupAuditSubscriptions(): { handlers.agentDisconnected = handler; }), registerContainerAdded: registerEvent(() => {}), - registerContainerUpdated: registerEvent(() => {}), + registerContainerUpdated: registerEvent((handler) => { + handlers.containerUpdated = handler; + }), registerContainerRemoved: registerEvent(() => {}), }; registerAuditLogSubscriptions(registrars); - if (!handlers.securityAlert || !handlers.agentDisconnected) { + if (!handlers.securityAlert || !handlers.agentDisconnected || !handlers.containerUpdated) { throw new Error('Expected audit handlers to be registered'); } return { securityAlertHandler: handlers.securityAlert, agentDisconnectedHandler: handlers.agentDisconnected, + containerUpdatedHandler: handlers.containerUpdated, }; } @@ -154,4 +159,22 @@ describe('audit-subscriptions dedupe windows', () => { expect(mockInsertAudit).toHaveBeenCalledTimes(2); expect(mockInc).toHaveBeenCalledTimes(2); }); + + test('records container update audit with empty containerName fallback when name and id are missing', () => { + const { containerUpdatedHandler } = setupAuditSubscriptions(); + + containerUpdatedHandler({ + image: { name: 'nginx' }, + status: 'running', + } as unknown as ContainerLifecycleEventPayload); + + expect(mockInsertAudit).toHaveBeenCalledWith( + expect.objectContaining({ + action: 'container-update', + containerName: '', + details: 'status: running', + }), + ); + expect(mockInc).toHaveBeenCalledWith({ action: 'container-update' }); + }); }); From 669357e6a416994a0d527c4dbefbb32eb9e3bb1d Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 22:35:01 -0400 Subject: [PATCH 33/59] =?UTF-8?q?=F0=9F=93=9D=20docs:=20move=20rc.12=20cha?= =?UTF-8?q?ngelog=20entries=20under=20v1.4.0=20and=20fix=20mTLS=20key=20na?= =?UTF-8?q?mes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Move all rc.12 fix entries from [Unreleased] into [1.4.0] section - Fix mTLS option names: CERTFILE/KEYFILE → CLIENTCERT/CLIENTKEY - Move HASS_ATTRIBUTES default change to Breaking Changes section - Update docs changelog to match corrected mTLS option names - Simplify QA filebrowser display name label --- CHANGELOG.md | 42 ++++++++----------- .../providers/dockercompose/Dockercompose.ts | 5 +-- content/docs/current/changelog/index.mdx | 2 +- test/qa-rc12-stacks/filebrowser/compose.yaml | 2 +- 4 files changed, 22 insertions(+), 29 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 677501be..aeca0df7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,35 +8,17 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 > **Fork point:** upstream post-8.1.1 (2025-11-27) > **Upstream baseline:** WUD 8.1.1 + 65 merged PRs on `main` (Vue 3 migration, Alpine base image, Rocket.Chat trigger, threshold system, semver improvements, request→axios migration, and more) -## [Unreleased] - -### Added - -- **Audit log for container state changes** — External container lifecycle events (start, stop, restart via Portainer or CLI) now generate `container-update` audit entries with the new status, so the audit log reflects all state changes, not just Drydock-initiated actions. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) -- **mTLS client certificate support** — Registry providers now accept `CERTFILE` and `KEYFILE` options for mutual TLS authentication with private registries that require client certificates. - -### Fixed - -- **Log level setting had no effect** — `DD_LOG_LEVEL=debug` was correctly parsed but debug messages were silently dropped because pino's multistream destinations defaulted to `info` level. Stream destinations now inherit the configured log level. ([#134](https://github.com/CodesWhat/drydock/issues/134)) -- **Server feature flags not loaded after login** — Feature flags (`containeractions`, `delete`) were permanently stuck as disabled when authentication was required, because the pre-login bootstrap fetch failure marked the flags as "loaded" and never retried. Now failed fetches allow automatic retry after login. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) -- **Compose trigger silently skips containers** — Multiple failure paths in the compose trigger were logged at `debug` level, making it nearly impossible to diagnose why a trigger reports success but containers don't update. Key diagnostic messages (compose file mismatch, label inspect failure, no containers matched) promoted to `warn` level, and the "already up to date" message now includes container names. ([#84](https://github.com/CodesWhat/drydock/discussions/84)) -- **Fallback icon cached permanently** — The Docker placeholder icon was served with `immutable` cache headers, causing browsers to cache it permanently even after the real provider icon becomes available. Fallback responses now use `no-store`. -- **Basic auth upgrade compatibility restored** — v1.4 now accepts legacy v1.3.9 Basic auth hashes (`{SHA}`, `$apr1$`/`$1$`, `crypt`, and plain fallback) to preserve smooth upgrades. Legacy formats remain deprecated and continue showing a migration banner, with removal still planned for v1.6.0. -- **Compose trigger rejects lowercase env var keys** — Configuration keys like `COMPOSEFILEONCE`, `DIGESTPINNING`, and `RECONCILIATIONMODE` were lowercased by the env parser but the Joi schema expected camelCase. Schema now maps lowercase keys to their camelCase equivalents. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) -- **Compose trigger strips docker.io prefix** — When a compose file uses an explicit `docker.io/` registry prefix, compose mutations now preserve it instead of stripping it to a bare library path. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) -- **Compose trigger fails when FILE points to directory** — `DD_TRIGGER_DOCKERCOMPOSE_{name}_FILE` now accepts directories, automatically probing for `compose.yaml`, `compose.yml`, `docker-compose.yaml`, or `docker-compose.yml` inside the directory. ([#84](https://github.com/CodesWhat/drydock/discussions/84)) -- **Container healthcheck fails with TLS backend** — The Dockerfile healthcheck now detects `DD_SERVER_TLS_ENABLED=true` and switches to `curl --insecure https://` for self-signed certificates. Also skips the healthcheck entirely when `DD_SERVER_ENABLED=false`. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) -- **Agent CAFILE ignored without CERTFILE** — The agent subsystem now loads the CA certificate from `CAFILE` even when `CERTFILE` is not provided, fixing TLS verification for agents behind reverse proxies with custom CA chains. -- **Service worker accepts cross-origin postMessage** — The demo service worker now validates `postMessage` origins against the current host, preventing potential cross-origin message injection. +## [1.4.0] — 2026-02-28 -### Changed +### Breaking Changes -- **MQTT HASS_ATTRIBUTES default changed to `short`** — The MQTT trigger `HASS_ATTRIBUTES` preset now defaults to `short` instead of `full`, excluding large SBOM documents, scan vulnerabilities, details, and labels from Home Assistant entity payloads. Users who need the full payload can set `DD_TRIGGER_MQTT_{name}_HASS_ATTRIBUTES=full` explicitly. - -## [1.4.0] — 2026-02-28 +- **MQTT `HASS_ATTRIBUTES` default changed from `full` to `short`** — This changes Home Assistant entity payloads by default, excluding large SBOM documents, scan vulnerabilities, details, and labels. To retain the previous payload behavior, set `DD_TRIGGER_MQTT_{name}_HASS_ATTRIBUTES=full` explicitly. ### Added +- **Audit log for container state changes** — External container lifecycle events (start, stop, restart via Portainer or CLI) now generate `container-update` audit entries with the new status, so the audit log reflects all state changes, not just Drydock-initiated actions. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) +- **mTLS client certificate support** — Registry providers now accept `CLIENTCERT` and `CLIENTKEY` options for mutual TLS authentication with private registries that require client certificates. + #### Backend / Core - **Container recent-status API** — `GET /api/containers/recent-status` returns pre-computed update status (`updated`/`pending`/`failed`) per container, replacing the client-side audit log scan and reducing dashboard fetch payload size. @@ -145,6 +127,18 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed +- **Log level setting had no effect** — `DD_LOG_LEVEL=debug` was correctly parsed but debug messages were silently dropped because pino's multistream destinations defaulted to `info` level. Stream destinations now inherit the configured log level. ([#134](https://github.com/CodesWhat/drydock/issues/134)) +- **Server feature flags not loaded after login** — Feature flags (`containeractions`, `delete`) were permanently stuck as disabled when authentication was required, because the pre-login bootstrap fetch failure marked the flags as "loaded" and never retried. Now failed fetches allow automatic retry after login. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) +- **Compose trigger silently skips containers** — Multiple failure paths in the compose trigger were logged at `debug` level, making it nearly impossible to diagnose why a trigger reports success but containers don't update. Key diagnostic messages (compose file mismatch, label inspect failure, no containers matched) promoted to `warn` level, and the "already up to date" message now includes container names. ([#84](https://github.com/CodesWhat/drydock/discussions/84)) +- **Fallback icon cached permanently** — The Docker placeholder icon was served with `immutable` cache headers, causing browsers to cache it permanently even after the real provider icon becomes available. Fallback responses now use `no-store`. +- **Basic auth upgrade compatibility restored** — Basic auth now accepts legacy v1.3.9 Basic auth hashes (`{SHA}`, `$apr1$`/`$1$`, `crypt`, and plain fallback) to preserve smooth upgrades. Legacy formats remain deprecated and continue showing a migration banner, with removal still planned for v1.6.0. +- **Compose trigger rejects lowercase env var keys** — Configuration keys like `COMPOSEFILEONCE`, `DIGESTPINNING`, and `RECONCILIATIONMODE` were lowercased by the env parser but the Joi schema expected camelCase. Schema now maps lowercase keys to their camelCase equivalents. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) +- **Compose trigger strips docker.io prefix** — When a compose file uses an explicit `docker.io/` registry prefix, compose mutations now preserve it instead of stripping it to a bare library path. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) +- **Compose trigger fails when FILE points to directory** — `DD_TRIGGER_DOCKERCOMPOSE_{name}_FILE` now accepts directories, automatically probing for `compose.yaml`, `compose.yml`, `docker-compose.yaml`, or `docker-compose.yml` inside the directory. ([#84](https://github.com/CodesWhat/drydock/discussions/84)) +- **Container healthcheck fails with TLS backend** — The Dockerfile healthcheck now detects `DD_SERVER_TLS_ENABLED=true` and switches to `curl --insecure https://` for self-signed certificates. Also skips the healthcheck entirely when `DD_SERVER_ENABLED=false`. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) +- **Agent CAFILE ignored without CERTFILE** — The agent subsystem now loads the CA certificate from `CAFILE` even when `CERTFILE` is not provided, fixing TLS verification for agents behind reverse proxies with custom CA chains. +- **Service worker accepts cross-origin postMessage** — The demo service worker now validates `postMessage` origins against the current host, preventing potential cross-origin message injection. + - **Action buttons disable and show spinner during in-progress actions** — Container action buttons (Stop, Start, Restart, Update, Delete) now show a disabled state with a spinner while the action runs in the background, providing clear visual feedback. The confirm dialog closes immediately on accept instead of blocking the UI. - **Command palette clears stale filter on navigation** — Navigating to a container via Ctrl+K search now clears the active `filterKind`, preventing stale filter state from hiding the navigated container. - **Manual update button works with compose triggers** — The update container endpoint now searches for both `docker` and `dockercompose` trigger types, matching the existing preview endpoint behavior. Previously, users with only a compose trigger saw "No docker trigger found for this container". diff --git a/app/triggers/providers/dockercompose/Dockercompose.ts b/app/triggers/providers/dockercompose/Dockercompose.ts index c5df05a7..3b47b2e0 100644 --- a/app/triggers/providers/dockercompose/Dockercompose.ts +++ b/app/triggers/providers/dockercompose/Dockercompose.ts @@ -95,13 +95,12 @@ type RegistryImageContainerReference = { }; }; -type RegistryManagerLike = Pick; type RegistryPullAuth = Awaited>; type ComposeRuntimeContext = { - dockerApi?: DockerApiLike; + dockerApi?: unknown; auth?: RegistryPullAuth; newImage?: string; - registry?: RegistryManagerLike; + registry?: unknown; }; type ComposeUpdateLifecycleContext = { diff --git a/content/docs/current/changelog/index.mdx b/content/docs/current/changelog/index.mdx index de8ebe0b..162e2662 100644 --- a/content/docs/current/changelog/index.mdx +++ b/content/docs/current/changelog/index.mdx @@ -16,7 +16,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added - **Audit log for container state changes** — External container lifecycle events (start, stop, restart via Portainer or CLI) now generate `container-update` audit entries with the new status, so the audit log reflects all state changes, not just Drydock-initiated actions. ([#120](https://github.com/CodesWhat/drydock/discussions/120)) -- **mTLS client certificate support** — Registry providers now accept `CERTFILE` and `KEYFILE` options for mutual TLS authentication with private registries that require client certificates. +- **mTLS client certificate support** — Registry providers now accept `CLIENTCERT` and `CLIENTKEY` options for mutual TLS authentication with private registries that require client certificates. ### Fixed diff --git a/test/qa-rc12-stacks/filebrowser/compose.yaml b/test/qa-rc12-stacks/filebrowser/compose.yaml index 46c181cc..5afccb2f 100644 --- a/test/qa-rc12-stacks/filebrowser/compose.yaml +++ b/test/qa-rc12-stacks/filebrowser/compose.yaml @@ -4,4 +4,4 @@ services: container_name: qa-filebrowser labels: - dd.watch=true - - dd.display.name=Filebrowser (Dockge) + - dd.display.name=Filebrowser From 40d1b7ac25319086b75b80a24b9d8c928ed60a1c Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 23:37:39 -0400 Subject: [PATCH 34/59] =?UTF-8?q?=F0=9F=92=84=20style(ui):=20remove=20bord?= =?UTF-8?q?ers=20from=20shared=20data=20components=20and=20layout?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - DataTable: remove outer border, row separators, thead/body divider - DataFilterBar: remove container border, toolbar button borders, scale icons - DataCardGrid: remove default card border (keep selection border) - DataListAccordion: remove default item border, soften detail divider - EmptyState/SecurityEmptyState: remove card borders - AppLayout: add subtle header bottom border for breadcrumb separation - Update stories to match new borderless style --- ui/src/components/DataCardGrid.vue | 2 +- ui/src/components/DataFilterBar.stories.ts | 2 +- ui/src/components/DataFilterBar.vue | 16 ++++++---------- ui/src/components/DataListAccordion.vue | 4 ++-- ui/src/components/DataTable.vue | 8 ++++---- ui/src/components/DataViewLayout.stories.ts | 2 +- ui/src/components/EmptyState.vue | 1 - ui/src/components/SecurityEmptyState.vue | 1 - ui/src/layouts/AppLayout.vue | 1 + 9 files changed, 16 insertions(+), 21 deletions(-) diff --git a/ui/src/components/DataCardGrid.vue b/ui/src/components/DataCardGrid.vue index 927f1135..28a1efb6 100644 --- a/ui/src/components/DataCardGrid.vue +++ b/ui/src/components/DataCardGrid.vue @@ -46,7 +46,7 @@ function onCardKeydown(event: KeyboardEvent, item: Record) { backgroundColor: 'var(--dd-bg-card)', border: selectedKey != null && getKey(item, itemKey) === selectedKey ? '1.5px solid var(--color-drydock-secondary)' - : '1px solid var(--dd-border-strong)', + : 'none', borderRadius: 'var(--dd-radius)', overflow: 'hidden', }" diff --git a/ui/src/components/DataFilterBar.stories.ts b/ui/src/components/DataFilterBar.stories.ts index 43c225a1..9d19aa17 100644 --- a/ui/src/components/DataFilterBar.stories.ts +++ b/ui/src/components/DataFilterBar.stories.ts @@ -26,7 +26,7 @@ const renderWithFilters = (args: Story['args']) => ({ diff --git a/ui/src/components/DataFilterBar.vue b/ui/src/components/DataFilterBar.vue index c35ca435..d20afd72 100644 --- a/ui/src/components/DataFilterBar.vue +++ b/ui/src/components/DataFilterBar.vue @@ -34,20 +34,18 @@ function viewModeLabel(id: string): string {

{{ filteredCount }}/{{ totalCount }} -
+ aria-label="View mode">
diff --git a/ui/src/components/DataTable.vue b/ui/src/components/DataTable.vue index b34a95ce..67bb86f0 100644 --- a/ui/src/components/DataTable.vue +++ b/ui/src/components/DataTable.vue @@ -318,7 +318,7 @@ function handleHeaderKeydown(event: KeyboardEvent, col: DataTableColumn) { diff --git a/ui/src/views/SecurityView.vue b/ui/src/views/SecurityView.vue index f010ba29..5db1c679 100644 --- a/ui/src/views/SecurityView.vue +++ b/ui/src/views/SecurityView.vue @@ -221,7 +221,7 @@ onUnmounted(() => { :count-label="displayCountLabel"> From 23ae4b781bfacca58001cdd05e4b14ebb45f2eff Mon Sep 17 00:00:00 2001 From: superuserjr <80784472+turbodaemon@users.noreply.github.com> Date: Wed, 11 Mar 2026 23:38:05 -0400 Subject: [PATCH 36/59] =?UTF-8?q?=F0=9F=92=84=20style(ui):=20redesign=20de?= =?UTF-8?q?tail=20panel=20full-page=20button=20with=20frame-corners=20icon?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Move full-page button left of L/M/S size controls - Replace expand icon + "Full Page" text with icon-only frame-corners - Add frame-corners icon mapping across all 7 icon libraries - Keep tooltip for discoverability --- ui/src/boot/icon-bundle.json | 35 +++++++++++++++++++++++++++++++ ui/src/components/DetailPanel.vue | 21 +++++++++---------- ui/src/icons.ts | 9 ++++++++ 3 files changed, 54 insertions(+), 11 deletions(-) diff --git a/ui/src/boot/icon-bundle.json b/ui/src/boot/icon-bundle.json index 84da0d54..d951cf07 100644 --- a/ui/src/boot/icon-bundle.json +++ b/ui/src/boot/icon-bundle.json @@ -339,6 +339,11 @@ "width": 512, "height": 512 }, + "fa6-solid:expand": { + "body": "", + "width": 448, + "height": 512 + }, "fa6-solid:book": { "body": "", "width": 448, @@ -1059,6 +1064,16 @@ "width": 256, "height": 256 }, + "ph:frame-corners": { + "body": "", + "width": 256, + "height": 256 + }, + "ph:frame-corners-duotone": { + "body": "", + "width": 256, + "height": 256 + }, "ph:book-open": { "body": "", "width": 256, @@ -1434,6 +1449,11 @@ "width": 24, "height": 24 }, + "lucide:scan": { + "body": "", + "width": 24, + "height": 24 + }, "lucide:book-open": { "body": "", "width": 24, @@ -1799,6 +1819,11 @@ "width": 24, "height": 24 }, + "tabler:scan": { + "body": "", + "width": 24, + "height": 24 + }, "tabler:book": { "body": "", "width": 24, @@ -2134,6 +2159,11 @@ "width": 24, "height": 24 }, + "heroicons:viewfinder-circle": { + "body": "", + "width": 24, + "height": 24 + }, "heroicons:book-open": { "body": "", "width": 24, @@ -2464,6 +2494,11 @@ "width": 24, "height": 24 }, + "iconoir:frame-select": { + "body": "", + "width": 24, + "height": 24 + }, "iconoir:book": { "body": "", "width": 24, diff --git a/ui/src/components/DetailPanel.vue b/ui/src/components/DetailPanel.vue index 4a4d0df8..ab0e8787 100644 --- a/ui/src/components/DetailPanel.vue +++ b/ui/src/components/DetailPanel.vue @@ -60,7 +60,6 @@ onUnmounted(() => globalThis.removeEventListener('keydown', handleKeydown)); width: isMobile ? '100%' : panelDesktopWidth, maxWidth: isMobile ? '100%' : 'min(calc(100vw - 32px), 920px)', backgroundColor: 'var(--dd-bg-card)', - border: '1px solid var(--dd-border-strong)', height: isMobile ? '100vh' : 'calc(100vh - 96px)', minHeight: '480px', }"> @@ -69,9 +68,16 @@ onUnmounted(() => globalThis.removeEventListener('keydown', handleKeydown));
-
- +