From 3d41643e38f90c35e9ed8a72aee3ae7861473e4f Mon Sep 17 00:00:00 2001 From: "Node.js GitHub Bot" Date: Sat, 24 Jan 2026 17:35:35 -0500 Subject: [PATCH 001/115] deps: update undici to 7.19.0 PR-URL: https://github.com/nodejs/node/pull/61470 Reviewed-By: Colin Ihrig Reviewed-By: Antoine du Hamel --- deps/undici/src/docs/docs/api/Client.md | 2 + deps/undici/src/lib/core/symbols.js | 2 + deps/undici/src/lib/core/util.js | 2 + deps/undici/src/lib/dispatcher/client-h2.js | 23 +- deps/undici/src/lib/dispatcher/client.js | 22 +- deps/undici/src/lib/interceptor/cache.js | 44 ++- deps/undici/src/lib/llhttp/wasm_build_env.txt | 2 +- deps/undici/src/lib/mock/mock-agent.js | 10 +- deps/undici/src/lib/mock/mock-utils.js | 15 +- deps/undici/src/lib/web/cache/cache.js | 4 +- deps/undici/src/lib/web/fetch/index.js | 81 ++++- deps/undici/src/lib/web/fetch/request.js | 2 + deps/undici/src/lib/web/fetch/response.js | 3 +- deps/undici/src/lib/web/fetch/util.js | 27 +- .../src/lib/web/websocket/connection.js | 5 +- .../web/websocket/stream/websocketstream.js | 6 +- deps/undici/src/package-lock.json | 328 +++++++++--------- deps/undici/src/package.json | 2 +- deps/undici/src/types/cache-interceptor.d.ts | 6 + deps/undici/src/types/client.d.ts | 10 + deps/undici/undici.js | 92 ++++- src/undici_version.h | 2 +- 22 files changed, 493 insertions(+), 197 deletions(-) diff --git a/deps/undici/src/docs/docs/api/Client.md b/deps/undici/src/docs/docs/api/Client.md index 9bc93673fc1643..abc02d87d17bb3 100644 --- a/deps/undici/src/docs/docs/api/Client.md +++ b/deps/undici/src/docs/docs/api/Client.md @@ -32,6 +32,8 @@ Returns: `Client` * **allowH2**: `boolean` - Default: `false`. Enables support for H2 if the server has assigned bigger priority to it through ALPN negotiation. * **useH2c**: `boolean` - Default: `false`. Enforces h2c for non-https connections. * **maxConcurrentStreams**: `number` - Default: `100`. Dictates the maximum number of concurrent streams for a single H2 session. It can be overridden by a SETTINGS remote frame. +* **initialWindowSize**: `number` (optional) - Default: `262144` (256KB). Sets the HTTP/2 stream-level flow-control window size (SETTINGS_INITIAL_WINDOW_SIZE). Must be a positive integer greater than 0. This default is higher than Node.js core's default (65535 bytes) to improve throughput, Node's choice is very conservative for current high-bandwith networks. See [RFC 7540 Section 6.9.2](https://datatracker.ietf.org/doc/html/rfc7540#section-6.9.2) for more details. +* **connectionWindowSize**: `number` (optional) - Default `524288` (512KB). Sets the HTTP/2 connection-level flow-control window size using `ClientHttp2Session.setLocalWindowSize()`. Must be a positive integer greater than 0. This provides better flow control for the entire connection across multiple streams. See [Node.js HTTP/2 documentation](https://nodejs.org/api/http2.html#clienthttp2sessionsetlocalwindowsize) for more details. > **Notes about HTTP/2** > - It only works under TLS connections. h2c is not supported. diff --git a/deps/undici/src/lib/core/symbols.js b/deps/undici/src/lib/core/symbols.js index 00c4f8ff23beb8..ec45d7951efbcf 100644 --- a/deps/undici/src/lib/core/symbols.js +++ b/deps/undici/src/lib/core/symbols.js @@ -62,6 +62,8 @@ module.exports = { kListeners: Symbol('listeners'), kHTTPContext: Symbol('http context'), kMaxConcurrentStreams: Symbol('max concurrent streams'), + kHTTP2InitialWindowSize: Symbol('http2 initial window size'), + kHTTP2ConnectionWindowSize: Symbol('http2 connection window size'), kEnableConnectProtocol: Symbol('http2session connect protocol'), kRemoteSettings: Symbol('http2session remote settings'), kHTTP2Stream: Symbol('http2session client stream'), diff --git a/deps/undici/src/lib/core/util.js b/deps/undici/src/lib/core/util.js index fd5b0dfaafbc15..abfa156f15303d 100644 --- a/deps/undici/src/lib/core/util.js +++ b/deps/undici/src/lib/core/util.js @@ -58,6 +58,8 @@ function wrapRequestBody (body) { // to determine whether or not it has been disturbed. This is just // a workaround. return new BodyAsyncIterable(body) + } else if (body && isFormDataLike(body)) { + return body } else if ( body && typeof body !== 'string' && diff --git a/deps/undici/src/lib/dispatcher/client-h2.js b/deps/undici/src/lib/dispatcher/client-h2.js index 6406b9fd4385ec..b77c4cffee5f6f 100644 --- a/deps/undici/src/lib/dispatcher/client-h2.js +++ b/deps/undici/src/lib/dispatcher/client-h2.js @@ -25,6 +25,8 @@ const { kOnError, kMaxConcurrentStreams, kHTTP2Session, + kHTTP2InitialWindowSize, + kHTTP2ConnectionWindowSize, kResume, kSize, kHTTPContext, @@ -87,12 +89,16 @@ function parseH2Headers (headers) { function connectH2 (client, socket) { client[kSocket] = socket + const http2InitialWindowSize = client[kHTTP2InitialWindowSize] + const http2ConnectionWindowSize = client[kHTTP2ConnectionWindowSize] + const session = http2.connect(client[kUrl], { createConnection: () => socket, peerMaxConcurrentStreams: client[kMaxConcurrentStreams], settings: { // TODO(metcoder95): add support for PUSH - enablePush: false + enablePush: false, + ...(http2InitialWindowSize != null ? { initialWindowSize: http2InitialWindowSize } : null) } }) @@ -107,6 +113,11 @@ function connectH2 (client, socket) { // States whether or not we have received the remote settings from the server session[kRemoteSettings] = false + // Apply connection-level flow control once connected (if supported). + if (http2ConnectionWindowSize) { + util.addListener(session, 'connect', applyConnectionWindowSize.bind(session, http2ConnectionWindowSize)) + } + util.addListener(session, 'error', onHttp2SessionError) util.addListener(session, 'frameError', onHttp2FrameError) util.addListener(session, 'end', onHttp2SessionEnd) @@ -211,6 +222,16 @@ function resumeH2 (client) { } } +function applyConnectionWindowSize (connectionWindowSize) { + try { + if (typeof this.setLocalWindowSize === 'function') { + this.setLocalWindowSize(connectionWindowSize) + } + } catch { + // Best-effort only. + } +} + function onHttp2RemoteSettings (settings) { // Fallbacks are a safe bet, remote setting will always override this[kClient][kMaxConcurrentStreams] = settings.maxConcurrentStreams ?? this[kClient][kMaxConcurrentStreams] diff --git a/deps/undici/src/lib/dispatcher/client.js b/deps/undici/src/lib/dispatcher/client.js index b73cd259019dcd..e603cfebd393e2 100644 --- a/deps/undici/src/lib/dispatcher/client.js +++ b/deps/undici/src/lib/dispatcher/client.js @@ -52,6 +52,8 @@ const { kOnError, kHTTPContext, kMaxConcurrentStreams, + kHTTP2InitialWindowSize, + kHTTP2ConnectionWindowSize, kResume } = require('../core/symbols.js') const connectH1 = require('./client-h1.js') @@ -108,7 +110,9 @@ class Client extends DispatcherBase { // h2 maxConcurrentStreams, allowH2, - useH2c + useH2c, + initialWindowSize, + connectionWindowSize } = {}) { if (keepAlive !== undefined) { throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead') @@ -204,6 +208,14 @@ class Client extends DispatcherBase { throw new InvalidArgumentError('useH2c must be a valid boolean value') } + if (initialWindowSize != null && (!Number.isInteger(initialWindowSize) || initialWindowSize < 1)) { + throw new InvalidArgumentError('initialWindowSize must be a positive integer, greater than 0') + } + + if (connectionWindowSize != null && (!Number.isInteger(connectionWindowSize) || connectionWindowSize < 1)) { + throw new InvalidArgumentError('connectionWindowSize must be a positive integer, greater than 0') + } + super() if (typeof connect !== 'function') { @@ -239,6 +251,14 @@ class Client extends DispatcherBase { this[kClosedResolve] = null this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1 this[kMaxConcurrentStreams] = maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server + // HTTP/2 window sizes are set to higher defaults than Node.js core for better performance: + // - initialWindowSize: 262144 (256KB) vs Node.js default 65535 (64KB - 1) + // Allows more data to be sent before requiring acknowledgment, improving throughput + // especially on high-latency networks. This matches common production HTTP/2 servers. + // - connectionWindowSize: 524288 (512KB) vs Node.js default (none set) + // Provides better flow control for the entire connection across multiple streams. + this[kHTTP2InitialWindowSize] = initialWindowSize != null ? initialWindowSize : 262144 + this[kHTTP2ConnectionWindowSize] = connectionWindowSize != null ? connectionWindowSize : 524288 this[kHTTPContext] = null // kQueue is built up of 3 sections separated by diff --git a/deps/undici/src/lib/interceptor/cache.js b/deps/undici/src/lib/interceptor/cache.js index 0bc3d9d9bda570..b0449374fd4782 100644 --- a/deps/undici/src/lib/interceptor/cache.js +++ b/deps/undici/src/lib/interceptor/cache.js @@ -9,6 +9,23 @@ const CacheRevalidationHandler = require('../handler/cache-revalidation-handler' const { assertCacheStore, assertCacheMethods, makeCacheKey, normalizeHeaders, parseCacheControlHeader } = require('../util/cache.js') const { AbortError } = require('../core/errors.js') +/** + * @param {(string | RegExp)[] | undefined} origins + * @param {string} name + */ +function assertCacheOrigins (origins, name) { + if (origins === undefined) return + if (!Array.isArray(origins)) { + throw new TypeError(`expected ${name} to be an array or undefined, got ${typeof origins}`) + } + for (let i = 0; i < origins.length; i++) { + const origin = origins[i] + if (typeof origin !== 'string' && !(origin instanceof RegExp)) { + throw new TypeError(`expected ${name}[${i}] to be a string or RegExp, got ${typeof origin}`) + } + } +} + const nop = () => {} /** @@ -372,7 +389,8 @@ module.exports = (opts = {}) => { store = new MemoryCacheStore(), methods = ['GET'], cacheByDefault = undefined, - type = 'shared' + type = 'shared', + origins = undefined } = opts if (typeof opts !== 'object' || opts === null) { @@ -381,6 +399,7 @@ module.exports = (opts = {}) => { assertCacheStore(store, 'opts.store') assertCacheMethods(methods, 'opts.methods') + assertCacheOrigins(origins, 'opts.origins') if (typeof cacheByDefault !== 'undefined' && typeof cacheByDefault !== 'number') { throw new TypeError(`expected opts.cacheByDefault to be number or undefined, got ${typeof cacheByDefault}`) @@ -406,6 +425,29 @@ module.exports = (opts = {}) => { return dispatch(opts, handler) } + // Check if origin is in whitelist + if (origins !== undefined) { + const requestOrigin = opts.origin.toString().toLowerCase() + let isAllowed = false + + for (let i = 0; i < origins.length; i++) { + const allowed = origins[i] + if (typeof allowed === 'string') { + if (allowed.toLowerCase() === requestOrigin) { + isAllowed = true + break + } + } else if (allowed.test(requestOrigin)) { + isAllowed = true + break + } + } + + if (!isAllowed) { + return dispatch(opts, handler) + } + } + opts = { ...opts, headers: normalizeHeaders(opts) diff --git a/deps/undici/src/lib/llhttp/wasm_build_env.txt b/deps/undici/src/lib/llhttp/wasm_build_env.txt index ec4d6fca239ff1..e0837fc906bc30 100644 --- a/deps/undici/src/lib/llhttp/wasm_build_env.txt +++ b/deps/undici/src/lib/llhttp/wasm_build_env.txt @@ -1,5 +1,5 @@ -> undici@7.18.2 build:wasm +> undici@7.19.0 build:wasm > node build/wasm.js --docker > docker run --rm --platform=linux/x86_64 --user 1001:1001 --mount type=bind,source=/home/runner/work/node/node/deps/undici/src/lib/llhttp,target=/home/node/build/lib/llhttp --mount type=bind,source=/home/runner/work/node/node/deps/undici/src/build,target=/home/node/build/build --mount type=bind,source=/home/runner/work/node/node/deps/undici/src/deps,target=/home/node/build/deps -t ghcr.io/nodejs/wasm-builder@sha256:975f391d907e42a75b8c72eb77c782181e941608687d4d8694c3e9df415a0970 node build/wasm.js diff --git a/deps/undici/src/lib/mock/mock-agent.js b/deps/undici/src/lib/mock/mock-agent.js index 3ab14949f35c22..61449e077ead35 100644 --- a/deps/undici/src/lib/mock/mock-agent.js +++ b/deps/undici/src/lib/mock/mock-agent.js @@ -22,7 +22,7 @@ const { } = require('./mock-symbols') const MockClient = require('./mock-client') const MockPool = require('./mock-pool') -const { matchValue, normalizeSearchParams, buildAndValidateMockOptions } = require('./mock-utils') +const { matchValue, normalizeSearchParams, buildAndValidateMockOptions, normalizeOrigin } = require('./mock-utils') const { InvalidArgumentError, UndiciError } = require('../core/errors') const Dispatcher = require('../dispatcher/dispatcher') const PendingInterceptorsFormatter = require('./pending-interceptors-formatter') @@ -56,9 +56,9 @@ class MockAgent extends Dispatcher { } get (origin) { - const originKey = this[kIgnoreTrailingSlash] - ? origin.replace(/\/$/, '') - : origin + // Normalize origin to handle URL objects and case-insensitive hostnames + const normalizedOrigin = normalizeOrigin(origin) + const originKey = this[kIgnoreTrailingSlash] ? normalizedOrigin.replace(/\/$/, '') : normalizedOrigin let dispatcher = this[kMockAgentGet](originKey) @@ -70,6 +70,8 @@ class MockAgent extends Dispatcher { } dispatch (opts, handler) { + opts.origin = normalizeOrigin(opts.origin) + // Call MockAgent.get to perform additional setup before dispatching as normal this.get(opts.origin) diff --git a/deps/undici/src/lib/mock/mock-utils.js b/deps/undici/src/lib/mock/mock-utils.js index 3b6d5b741bc148..e1e3f04064305c 100644 --- a/deps/undici/src/lib/mock/mock-utils.js +++ b/deps/undici/src/lib/mock/mock-utils.js @@ -396,6 +396,18 @@ function checkNetConnect (netConnect, origin) { return false } +function normalizeOrigin (origin) { + if (typeof origin !== 'string' && !(origin instanceof URL)) { + return origin + } + + if (origin instanceof URL) { + return origin.origin + } + + return origin.toLowerCase() +} + function buildAndValidateMockOptions (opts) { const { agent, ...mockOptions } = opts @@ -430,5 +442,6 @@ module.exports = { buildAndValidateMockOptions, getHeaderByName, buildHeadersFromArray, - normalizeSearchParams + normalizeSearchParams, + normalizeOrigin } diff --git a/deps/undici/src/lib/web/cache/cache.js b/deps/undici/src/lib/web/cache/cache.js index 70a3787a71d415..10decbede6ad17 100644 --- a/deps/undici/src/lib/web/cache/cache.js +++ b/deps/undici/src/lib/web/cache/cache.js @@ -794,9 +794,9 @@ class Cache { // 5.5.2 for (const response of responses) { // 5.5.2.1 - const responseObject = fromInnerResponse(response, 'immutable') + const responseObject = fromInnerResponse(cloneResponse(response), 'immutable') - responseList.push(responseObject.clone()) + responseList.push(responseObject) if (responseList.length >= maxResponses) { break diff --git a/deps/undici/src/lib/web/fetch/index.js b/deps/undici/src/lib/web/fetch/index.js index e94f3593983b79..56e540d9d88258 100644 --- a/deps/undici/src/lib/web/fetch/index.js +++ b/deps/undici/src/lib/web/fetch/index.js @@ -42,7 +42,10 @@ const { simpleRangeHeaderValue, buildContentRange, createInflate, - extractMimeType + extractMimeType, + hasAuthenticationEntry, + includesCredentials, + isTraversableNavigable } = require('./util') const assert = require('node:assert') const { safelyExtractBody, extractBody } = require('./body') @@ -1524,13 +1527,39 @@ async function httpNetworkOrCacheFetch ( httpRequest.headersList.delete('host', true) - // 20. If includeCredentials is true, then: + // 21. If includeCredentials is true, then: if (includeCredentials) { // 1. If the user agent is not configured to block cookies for httpRequest // (see section 7 of [COOKIES]), then: // TODO: credentials + // 2. If httpRequest’s header list does not contain `Authorization`, then: - // TODO: credentials + if (!httpRequest.headersList.contains('authorization', true)) { + // 1. Let authorizationValue be null. + let authorizationValue = null + + // 2. If there’s an authentication entry for httpRequest and either + // httpRequest’s use-URL-credentials flag is unset or httpRequest’s + // current URL does not include credentials, then set + // authorizationValue to authentication entry. + if (hasAuthenticationEntry(httpRequest) && ( + httpRequest.useURLCredentials === undefined || !includesCredentials(requestCurrentURL(httpRequest)) + )) { + // TODO + } else if (includesCredentials(requestCurrentURL(httpRequest)) && isAuthenticationFetch) { + // 3. Otherwise, if httpRequest’s current URL does include credentials + // and isAuthenticationFetch is true, set authorizationValue to + // httpRequest’s current URL, converted to an `Authorization` value + const { username, password } = requestCurrentURL(httpRequest) + authorizationValue = `Basic ${Buffer.from(`${username}:${password}`).toString('base64')}` + } + + // 4. If authorizationValue is non-null, then append (`Authorization`, + // authorizationValue) to httpRequest’s header list. + if (authorizationValue !== null) { + httpRequest.headersList.append('Authorization', authorizationValue, false) + } + } } // 21. If there’s a proxy-authentication entry, use it as appropriate. @@ -1612,10 +1641,48 @@ async function httpNetworkOrCacheFetch ( // 13. Set response’s request-includes-credentials to includeCredentials. response.requestIncludesCredentials = includeCredentials - // 14. If response’s status is 401, httpRequest’s response tainting is not - // "cors", includeCredentials is true, and request’s window is an environment - // settings object, then: - // TODO + // 14. If response’s status is 401, httpRequest’s response tainting is not "cors", + // includeCredentials is true, and request’s traversable for user prompts is + // a traversable navigable: + if (response.status === 401 && httpRequest.responseTainting !== 'cors' && includeCredentials && isTraversableNavigable(request.traversableForUserPrompts)) { + // 2. If request’s body is non-null, then: + if (request.body != null) { + // 1. If request’s body’s source is null, then return a network error. + if (request.body.source == null) { + return makeNetworkError('expected non-null body source') + } + + // 2. Set request’s body to the body of the result of safely extracting + // request’s body’s source. + request.body = safelyExtractBody(request.body.source)[0] + } + + // 3. If request’s use-URL-credentials flag is unset or isAuthenticationFetch is + // true, then: + if (request.useURLCredentials === undefined || isAuthenticationFetch) { + // 1. If fetchParams is canceled, then return the appropriate network error + // for fetchParams. + if (isCancelled(fetchParams)) { + return makeAppropriateNetworkError(fetchParams) + } + + // 2. Let username and password be the result of prompting the end user for a + // username and password, respectively, in request’s traversable for user prompts. + // TODO + + // 3. Set the username given request’s current URL and username. + // requestCurrentURL(request).username = TODO + + // 4. Set the password given request’s current URL and password. + // requestCurrentURL(request).password = TODO + } + + // 4. Set response to the result of running HTTP-network-or-cache fetch given + // fetchParams and true. + fetchParams.controller.connection.destroy() + + response = await httpNetworkOrCacheFetch(fetchParams, true) + } // 15. If response’s status is 407, then: if (response.status === 407) { diff --git a/deps/undici/src/lib/web/fetch/request.js b/deps/undici/src/lib/web/fetch/request.js index e3c11bc01ffb83..6ef40f99920840 100644 --- a/deps/undici/src/lib/web/fetch/request.js +++ b/deps/undici/src/lib/web/fetch/request.js @@ -918,6 +918,8 @@ function makeRequest (init) { preventNoCacheCacheControlHeaderModification: init.preventNoCacheCacheControlHeaderModification ?? false, done: init.done ?? false, timingAllowFailed: init.timingAllowFailed ?? false, + useURLCredentials: init.useURLCredentials ?? undefined, + traversableForUserPrompts: init.traversableForUserPrompts ?? 'client', urlList: init.urlList, url: init.urlList[0], headersList: init.headersList diff --git a/deps/undici/src/lib/web/fetch/response.js b/deps/undici/src/lib/web/fetch/response.js index b3f942c34e1ff1..6b954afaab35de 100644 --- a/deps/undici/src/lib/web/fetch/response.js +++ b/deps/undici/src/lib/web/fetch/response.js @@ -555,7 +555,8 @@ function fromInnerResponse (innerResponse, guard) { setHeadersList(headers, innerResponse.headersList) setHeadersGuard(headers, guard) - if (innerResponse.body?.stream) { + // Note: If innerResponse's urlList contains a URL, it is a fetch response. + if (innerResponse.urlList.length !== 0 && innerResponse.body?.stream) { // If the target (response) is reclaimed, the cleanup callback may be called at some point with // the held value provided for it (innerResponse.body.stream). The held value can be any value: // a primitive or an object, even undefined. If the held value is an object, the registry keeps diff --git a/deps/undici/src/lib/web/fetch/util.js b/deps/undici/src/lib/web/fetch/util.js index 775ef296db2622..5e51bdd35aa954 100644 --- a/deps/undici/src/lib/web/fetch/util.js +++ b/deps/undici/src/lib/web/fetch/util.js @@ -1429,6 +1429,28 @@ function getDecodeSplit (name, list) { return gettingDecodingSplitting(value) } +function hasAuthenticationEntry (request) { + return false +} + +/** + * @see https://url.spec.whatwg.org/#include-credentials + * @param {URL} url + */ +function includesCredentials (url) { + // A URL includes credentials if its username or password is not the empty string. + return !!(url.username && url.password) +} + +/** + * @see https://html.spec.whatwg.org/multipage/document-sequences.html#traversable-navigable + * @param {object|string} navigable + */ +function isTraversableNavigable (navigable) { + // TODO + return true +} + class EnvironmentSettingsObjectBase { get baseUrl () { return getGlobalOrigin() @@ -1491,5 +1513,8 @@ module.exports = { extractMimeType, getDecodeSplit, environmentSettingsObject, - isOriginIPPotentiallyTrustworthy + isOriginIPPotentiallyTrustworthy, + hasAuthenticationEntry, + includesCredentials, + isTraversableNavigable } diff --git a/deps/undici/src/lib/web/websocket/connection.js b/deps/undici/src/lib/web/websocket/connection.js index 06b62047e9c5bb..4ecc8a195fcd60 100644 --- a/deps/undici/src/lib/web/websocket/connection.js +++ b/deps/undici/src/lib/web/websocket/connection.js @@ -33,7 +33,7 @@ function establishWebSocketConnection (url, protocols, client, handler, options) // 2. Let request be a new request, whose URL is requestURL, client is client, // service-workers mode is "none", referrer is "no-referrer", mode is // "websocket", credentials mode is "include", cache mode is "no-store" , - // and redirect mode is "error". + // redirect mode is "error", and use-URL-credentials flag is set. const request = makeRequest({ urlList: [requestURL], client, @@ -42,7 +42,8 @@ function establishWebSocketConnection (url, protocols, client, handler, options) mode: 'websocket', credentials: 'include', cache: 'no-store', - redirect: 'error' + redirect: 'error', + useURLCredentials: true }) // Note: undici extension, allow setting custom headers. diff --git a/deps/undici/src/lib/web/websocket/stream/websocketstream.js b/deps/undici/src/lib/web/websocket/stream/websocketstream.js index 4d8db87c843e7e..ce3be84fc3d034 100644 --- a/deps/undici/src/lib/web/websocket/stream/websocketstream.js +++ b/deps/undici/src/lib/web/websocket/stream/websocketstream.js @@ -370,7 +370,7 @@ class WebSocketStream { this.#openedPromise.reject(new WebSocketError('Socket never opened')) } - const result = this.#parser.closingInfo + const result = this.#parser?.closingInfo // 4. Let code be the WebSocket connection close code . // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.5 @@ -411,10 +411,10 @@ class WebSocketStream { const error = createUnvalidatedWebSocketError('unclean close', code, reason) // 7.2. Error stream ’s readable stream with error . - this.#readableStreamController.error(error) + this.#readableStreamController?.error(error) // 7.3. Error stream ’s writable stream with error . - this.#writableStream.abort(error) + this.#writableStream?.abort(error) // 7.4. Reject stream ’s closed promise with error . this.#closedPromise.reject(error) diff --git a/deps/undici/src/package-lock.json b/deps/undici/src/package-lock.json index c22f475f46c1eb..a76cd9df684c7e 100644 --- a/deps/undici/src/package-lock.json +++ b/deps/undici/src/package-lock.json @@ -1,12 +1,12 @@ { "name": "undici", - "version": "7.18.2", + "version": "7.19.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "undici", - "version": "7.18.2", + "version": "7.19.0", "license": "MIT", "devDependencies": { "@fastify/busboy": "3.2.0", @@ -76,13 +76,13 @@ "license": "MIT" }, "node_modules/@babel/code-frame": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", - "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.28.6.tgz", + "integrity": "sha512-JYgintcMjRiCvS8mMECzaEn+m3PfoQiyqukOMCCVQtoJGYJw8j/8LBJEiqkHLkfwCcs74E3pbAUFNg7d9VNJ+Q==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-validator-identifier": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5", "js-tokens": "^4.0.0", "picocolors": "^1.1.1" }, @@ -91,9 +91,9 @@ } }, "node_modules/@babel/compat-data": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.5.tgz", - "integrity": "sha512-6uFXyCayocRbqhZOB+6XcuZbkMNimwfVGFji8CTZnCzOHVGvDqzvitu1re2AU5LROliz7eQPhB8CpAMvnx9EjA==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.6.tgz", + "integrity": "sha512-2lfu57JtzctfIrcGMz992hyLlByuzgIk58+hhGCxjKZ3rWI82NnVLjXcaTqkI2NvlcvOskZaiZ5kjUALo3Lpxg==", "dev": true, "license": "MIT", "engines": { @@ -101,21 +101,21 @@ } }, "node_modules/@babel/core": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.5.tgz", - "integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.6.tgz", + "integrity": "sha512-H3mcG6ZDLTlYfaSNi0iOKkigqMFvkTKlGUYlD8GW7nNOYRrevuA46iTypPyv+06V3fEmvvazfntkBU34L0azAw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.5", - "@babel/helper-compilation-targets": "^7.27.2", - "@babel/helper-module-transforms": "^7.28.3", - "@babel/helpers": "^7.28.4", - "@babel/parser": "^7.28.5", - "@babel/template": "^7.27.2", - "@babel/traverse": "^7.28.5", - "@babel/types": "^7.28.5", + "@babel/code-frame": "^7.28.6", + "@babel/generator": "^7.28.6", + "@babel/helper-compilation-targets": "^7.28.6", + "@babel/helper-module-transforms": "^7.28.6", + "@babel/helpers": "^7.28.6", + "@babel/parser": "^7.28.6", + "@babel/template": "^7.28.6", + "@babel/traverse": "^7.28.6", + "@babel/types": "^7.28.6", "@jridgewell/remapping": "^2.3.5", "convert-source-map": "^2.0.0", "debug": "^4.1.0", @@ -132,14 +132,14 @@ } }, "node_modules/@babel/generator": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz", - "integrity": "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.6.tgz", + "integrity": "sha512-lOoVRwADj8hjf7al89tvQ2a1lf53Z+7tiXMgpZJL3maQPDxh0DgLMN62B2MKUOFcoodBHLMbDM6WAbKgNy5Suw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/parser": "^7.28.5", - "@babel/types": "^7.28.5", + "@babel/parser": "^7.28.6", + "@babel/types": "^7.28.6", "@jridgewell/gen-mapping": "^0.3.12", "@jridgewell/trace-mapping": "^0.3.28", "jsesc": "^3.0.2" @@ -149,13 +149,13 @@ } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.27.2", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", - "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz", + "integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==", "dev": true, "license": "MIT", "dependencies": { - "@babel/compat-data": "^7.27.2", + "@babel/compat-data": "^7.28.6", "@babel/helper-validator-option": "^7.27.1", "browserslist": "^4.24.0", "lru-cache": "^5.1.1", @@ -176,29 +176,29 @@ } }, "node_modules/@babel/helper-module-imports": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", - "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz", + "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" + "@babel/traverse": "^7.28.6", + "@babel/types": "^7.28.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.3.tgz", - "integrity": "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz", + "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-module-imports": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1", - "@babel/traverse": "^7.28.3" + "@babel/helper-module-imports": "^7.28.6", + "@babel/helper-validator-identifier": "^7.28.5", + "@babel/traverse": "^7.28.6" }, "engines": { "node": ">=6.9.0" @@ -208,9 +208,9 @@ } }, "node_modules/@babel/helper-plugin-utils": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", - "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz", + "integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==", "dev": true, "license": "MIT", "engines": { @@ -248,27 +248,27 @@ } }, "node_modules/@babel/helpers": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", - "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.6.tgz", + "integrity": "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4" + "@babel/template": "^7.28.6", + "@babel/types": "^7.28.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/parser": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz", - "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.6.tgz", + "integrity": "sha512-TeR9zWR18BvbfPmGbLampPMW+uW1NZnJlRuuHso8i87QZNq2JRF9i6RgxRqtEq+wQGsS19NNTWr2duhnE49mfQ==", "dev": true, "license": "MIT", "dependencies": { - "@babel/types": "^7.28.5" + "@babel/types": "^7.28.6" }, "bin": { "parser": "bin/babel-parser.js" @@ -333,13 +333,13 @@ } }, "node_modules/@babel/plugin-syntax-import-attributes": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.27.1.tgz", - "integrity": "sha512-oFT0FrKHgF53f4vOsZGi2Hh3I35PfSmVs4IBFLFj4dnafP+hIWDLg3VyKmUHfLoLHlyxY4C7DGtmHuJgn+IGww==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.28.6.tgz", + "integrity": "sha512-jiLC0ma9XkQT3TKJ9uYvlakm66Pamywo+qwL+oL8HJOvc6TWdZXVfhqJr8CCzbSGUAbDOzlGHJC1U+vRfLQDvw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" + "@babel/helper-plugin-utils": "^7.28.6" }, "engines": { "node": ">=6.9.0" @@ -375,13 +375,13 @@ } }, "node_modules/@babel/plugin-syntax-jsx": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.27.1.tgz", - "integrity": "sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.28.6.tgz", + "integrity": "sha512-wgEmr06G6sIpqr8YDwA2dSRTE3bJ+V0IfpzfSY3Lfgd7YWOaAdlykvJi13ZKBt8cZHfgH1IXN+CL656W3uUa4w==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" + "@babel/helper-plugin-utils": "^7.28.6" }, "engines": { "node": ">=6.9.0" @@ -501,13 +501,13 @@ } }, "node_modules/@babel/plugin-syntax-typescript": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.27.1.tgz", - "integrity": "sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.28.6.tgz", + "integrity": "sha512-+nDNmQye7nlnuuHDboPbGm00Vqg3oO8niRRL27/4LYHUsHYh0zJ1xWOz0uRwNFmM1Avzk8wZbc6rdiYhomzv/A==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" + "@babel/helper-plugin-utils": "^7.28.6" }, "engines": { "node": ">=6.9.0" @@ -517,33 +517,33 @@ } }, "node_modules/@babel/template": { - "version": "7.27.2", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", - "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz", + "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==", "dev": true, "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/parser": "^7.27.2", - "@babel/types": "^7.27.1" + "@babel/code-frame": "^7.28.6", + "@babel/parser": "^7.28.6", + "@babel/types": "^7.28.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.5.tgz", - "integrity": "sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.6.tgz", + "integrity": "sha512-fgWX62k02qtjqdSNTAGxmKYY/7FSL9WAS1o2Hu5+I5m9T0yxZzr4cnrfXQ/MX0rIifthCSs6FKTlzYbJcPtMNg==", "dev": true, "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.5", + "@babel/code-frame": "^7.28.6", + "@babel/generator": "^7.28.6", "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.5", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.5", + "@babel/parser": "^7.28.6", + "@babel/template": "^7.28.6", + "@babel/types": "^7.28.6", "debug": "^4.3.1" }, "engines": { @@ -551,9 +551,9 @@ } }, "node_modules/@babel/types": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz", - "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.6.tgz", + "integrity": "sha512-0ZrskXVEHSWIqZM/sQZ4EV3jZJXRkio/WCxaqKZP1g//CEWEPSfeZFcms4XeKBCHU0ZKnIkdJeU/kF+eRp5lBg==", "dev": true, "license": "MIT", "dependencies": { @@ -2185,9 +2185,9 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "20.19.27", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.27.tgz", - "integrity": "sha512-N2clP5pJhB2YnZJ3PIHFk5RkygRX5WO/5f0WC08tp0wd+sv0rsJk3MqWn3CbNmT2J505a5336jaQj4ph1AdMug==", + "version": "20.19.30", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.30.tgz", + "integrity": "sha512-WJtwWJu7UdlvzEAUm484QNg5eAoq5QR08KDNx7g45Usrs2NtOPiX8ugDqmKdXkyL03rBqU5dYNYVQetEpBHq2g==", "dev": true, "license": "MIT", "dependencies": { @@ -2226,17 +2226,17 @@ "license": "MIT" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.52.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.52.0.tgz", - "integrity": "sha512-okqtOgqu2qmZJ5iN4TWlgfF171dZmx2FzdOv2K/ixL2LZWDStL8+JgQerI2sa8eAEfoydG9+0V96m7V+P8yE1Q==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.53.1.tgz", + "integrity": "sha512-cFYYFZ+oQFi6hUnBTbLRXfTJiaQtYE3t4O692agbBl+2Zy+eqSKWtPjhPXJu1G7j4RLjKgeJPDdq3EqOwmX5Ag==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.12.2", - "@typescript-eslint/scope-manager": "8.52.0", - "@typescript-eslint/type-utils": "8.52.0", - "@typescript-eslint/utils": "8.52.0", - "@typescript-eslint/visitor-keys": "8.52.0", + "@typescript-eslint/scope-manager": "8.53.1", + "@typescript-eslint/type-utils": "8.53.1", + "@typescript-eslint/utils": "8.53.1", + "@typescript-eslint/visitor-keys": "8.53.1", "ignore": "^7.0.5", "natural-compare": "^1.4.0", "ts-api-utils": "^2.4.0" @@ -2249,7 +2249,7 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^8.52.0", + "@typescript-eslint/parser": "^8.53.1", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } @@ -2265,16 +2265,16 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "8.52.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.52.0.tgz", - "integrity": "sha512-iIACsx8pxRnguSYhHiMn2PvhvfpopO9FXHyn1mG5txZIsAaB6F0KwbFnUQN3KCiG3Jcuad/Cao2FAs1Wp7vAyg==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.53.1.tgz", + "integrity": "sha512-nm3cvFN9SqZGXjmw5bZ6cGmvJSyJPn0wU9gHAZZHDnZl2wF9PhHv78Xf06E0MaNk4zLVHL8hb2/c32XvyJOLQg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/scope-manager": "8.52.0", - "@typescript-eslint/types": "8.52.0", - "@typescript-eslint/typescript-estree": "8.52.0", - "@typescript-eslint/visitor-keys": "8.52.0", + "@typescript-eslint/scope-manager": "8.53.1", + "@typescript-eslint/types": "8.53.1", + "@typescript-eslint/typescript-estree": "8.53.1", + "@typescript-eslint/visitor-keys": "8.53.1", "debug": "^4.4.3" }, "engines": { @@ -2290,14 +2290,14 @@ } }, "node_modules/@typescript-eslint/project-service": { - "version": "8.52.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.52.0.tgz", - "integrity": "sha512-xD0MfdSdEmeFa3OmVqonHi+Cciab96ls1UhIF/qX/O/gPu5KXD0bY9lu33jj04fjzrXHcuvjBcBC+D3SNSadaw==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.53.1.tgz", + "integrity": "sha512-WYC4FB5Ra0xidsmlPb+1SsnaSKPmS3gsjIARwbEkHkoWloQmuzcfypljaJcR78uyLA1h8sHdWWPHSLDI+MtNog==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/tsconfig-utils": "^8.52.0", - "@typescript-eslint/types": "^8.52.0", + "@typescript-eslint/tsconfig-utils": "^8.53.1", + "@typescript-eslint/types": "^8.53.1", "debug": "^4.4.3" }, "engines": { @@ -2312,14 +2312,14 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "8.52.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.52.0.tgz", - "integrity": "sha512-ixxqmmCcc1Nf8S0mS0TkJ/3LKcC8mruYJPOU6Ia2F/zUUR4pApW7LzrpU3JmtePbRUTes9bEqRc1Gg4iyRnDzA==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.53.1.tgz", + "integrity": "sha512-Lu23yw1uJMFY8cUeq7JlrizAgeQvWugNQzJp8C3x8Eo5Jw5Q2ykMdiiTB9vBVOOUBysMzmRRmUfwFrZuI2C4SQ==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.52.0", - "@typescript-eslint/visitor-keys": "8.52.0" + "@typescript-eslint/types": "8.53.1", + "@typescript-eslint/visitor-keys": "8.53.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2330,9 +2330,9 @@ } }, "node_modules/@typescript-eslint/tsconfig-utils": { - "version": "8.52.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.52.0.tgz", - "integrity": "sha512-jl+8fzr/SdzdxWJznq5nvoI7qn2tNYV/ZBAEcaFMVXf+K6jmXvAFrgo/+5rxgnL152f//pDEAYAhhBAZGrVfwg==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.53.1.tgz", + "integrity": "sha512-qfvLXS6F6b1y43pnf0pPbXJ+YoXIC7HKg0UGZ27uMIemKMKA6XH2DTxsEDdpdN29D+vHV07x/pnlPNVLhdhWiA==", "dev": true, "license": "MIT", "engines": { @@ -2347,15 +2347,15 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.52.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.52.0.tgz", - "integrity": "sha512-JD3wKBRWglYRQkAtsyGz1AewDu3mTc7NtRjR/ceTyGoPqmdS5oCdx/oZMWD5Zuqmo6/MpsYs0wp6axNt88/2EQ==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.53.1.tgz", + "integrity": "sha512-MOrdtNvyhy0rHyv0ENzub1d4wQYKb2NmIqG7qEqPWFW7Mpy2jzFC3pQ2yKDvirZB7jypm5uGjF2Qqs6OIqu47w==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.52.0", - "@typescript-eslint/typescript-estree": "8.52.0", - "@typescript-eslint/utils": "8.52.0", + "@typescript-eslint/types": "8.53.1", + "@typescript-eslint/typescript-estree": "8.53.1", + "@typescript-eslint/utils": "8.53.1", "debug": "^4.4.3", "ts-api-utils": "^2.4.0" }, @@ -2372,9 +2372,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "8.52.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.52.0.tgz", - "integrity": "sha512-LWQV1V4q9V4cT4H5JCIx3481iIFxH1UkVk+ZkGGAV1ZGcjGI9IoFOfg3O6ywz8QqCDEp7Inlg6kovMofsNRaGg==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.53.1.tgz", + "integrity": "sha512-jr/swrr2aRmUAUjW5/zQHbMaui//vQlsZcJKijZf3M26bnmLj8LyZUpj8/Rd6uzaek06OWsqdofN/Thenm5O8A==", "dev": true, "license": "MIT", "engines": { @@ -2386,16 +2386,16 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.52.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.52.0.tgz", - "integrity": "sha512-XP3LClsCc0FsTK5/frGjolyADTh3QmsLp6nKd476xNI9CsSsLnmn4f0jrzNoAulmxlmNIpeXuHYeEQv61Q6qeQ==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.53.1.tgz", + "integrity": "sha512-RGlVipGhQAG4GxV1s34O91cxQ/vWiHJTDHbXRr0li2q/BGg3RR/7NM8QDWgkEgrwQYCvmJV9ichIwyoKCQ+DTg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/project-service": "8.52.0", - "@typescript-eslint/tsconfig-utils": "8.52.0", - "@typescript-eslint/types": "8.52.0", - "@typescript-eslint/visitor-keys": "8.52.0", + "@typescript-eslint/project-service": "8.53.1", + "@typescript-eslint/tsconfig-utils": "8.53.1", + "@typescript-eslint/types": "8.53.1", + "@typescript-eslint/visitor-keys": "8.53.1", "debug": "^4.4.3", "minimatch": "^9.0.5", "semver": "^7.7.3", @@ -2453,16 +2453,16 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.52.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.52.0.tgz", - "integrity": "sha512-wYndVMWkweqHpEpwPhwqE2lnD2DxC6WVLupU/DOt/0/v+/+iQbbzO3jOHjmBMnhu0DgLULvOaU4h4pwHYi2oRQ==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.53.1.tgz", + "integrity": "sha512-c4bMvGVWW4hv6JmDUEG7fSYlWOl3II2I4ylt0NM+seinYQlZMQIaKaXIIVJWt9Ofh6whrpM+EdDQXKXjNovvrg==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.9.1", - "@typescript-eslint/scope-manager": "8.52.0", - "@typescript-eslint/types": "8.52.0", - "@typescript-eslint/typescript-estree": "8.52.0" + "@typescript-eslint/scope-manager": "8.53.1", + "@typescript-eslint/types": "8.53.1", + "@typescript-eslint/typescript-estree": "8.53.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2477,13 +2477,13 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.52.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.52.0.tgz", - "integrity": "sha512-ink3/Zofus34nmBsPjow63FP5M7IGff0RKAgqR6+CFpdk22M7aLwC9gOcLGYqr7MczLPzZVERW9hRog3O4n1sQ==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.53.1.tgz", + "integrity": "sha512-oy+wV7xDKFPRyNggmXuZQSBzvoLnpmJs+GhzRhPjrxl2b/jIlyjVokzm47CZCDUdXKr2zd7ZLodPfOBpOPyPlg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.52.0", + "@typescript-eslint/types": "8.53.1", "eslint-visitor-keys": "^4.2.1" }, "engines": { @@ -3331,9 +3331,9 @@ "license": "MIT" }, "node_modules/baseline-browser-mapping": { - "version": "2.9.11", - "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.11.tgz", - "integrity": "sha512-Sg0xJUNDU1sJNGdfGWhVHX0kkZ+HWcvmVymJbj6NSgZZmW/8S9Y2HQ5euytnIgakgxN6papOAWiwDo1ctFDcoQ==", + "version": "2.9.17", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.17.tgz", + "integrity": "sha512-agD0MgJFUP/4nvjqzIB29zRPUuCF7Ge6mEv9s8dHrtYD7QWXRcx75rOADE/d5ah1NI+0vkDl0yorDd5U852IQQ==", "dev": true, "license": "Apache-2.0", "bin": { @@ -3621,9 +3621,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001762", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001762.tgz", - "integrity": "sha512-PxZwGNvH7Ak8WX5iXzoK1KPZttBXNPuaOvI2ZYU7NrlM+d9Ov+TUvlLOBNGzVXAntMSMMlJPd+jY6ovrVjSmUw==", + "version": "1.0.30001765", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001765.tgz", + "integrity": "sha512-LWcNtSyZrakjECqmpP4qdg0MMGdN368D7X8XvvAqOcqMv0RxnlqVKZl2V6/mBR68oYMxOZPLw/gO7DuisMHUvQ==", "dev": true, "funding": [ { @@ -3808,9 +3808,9 @@ "license": "MIT" }, "node_modules/comment-parser": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/comment-parser/-/comment-parser-1.4.1.tgz", - "integrity": "sha512-buhp5kePrmda3vhc5B9t7pUQXAb2Tnd0qgpkIhPhkHXxJpiPJ11H0ZEU0oBpJ2QztSbzG/ZxMj/CHsYJqRHmyg==", + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/comment-parser/-/comment-parser-1.4.4.tgz", + "integrity": "sha512-0D6qSQ5IkeRrGJFHRClzaMOenMeT0gErz3zIw3AprKMqhRN6LNU2jQOdkPG/FZ+8bCgXE1VidrgSzlBBDZRr8A==", "dev": true, "license": "MIT", "engines": { @@ -4721,9 +4721,9 @@ } }, "node_modules/eslint-plugin-n": { - "version": "17.23.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-17.23.1.tgz", - "integrity": "sha512-68PealUpYoHOBh332JLLD9Sj7OQUDkFpmcfqt8R9sySfFSeuGJjMTJQvCRRB96zO3A/PELRLkPrzsHmzEFQQ5A==", + "version": "17.23.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-17.23.2.tgz", + "integrity": "sha512-RhWBeb7YVPmNa2eggvJooiuehdL76/bbfj/OJewyoGT80qn5PXdz8zMOTO6YHOsI7byPt7+Ighh/i/4a5/v7hw==", "dev": true, "license": "MIT", "dependencies": { @@ -9587,9 +9587,9 @@ } }, "node_modules/synckit": { - "version": "0.11.11", - "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.11.11.tgz", - "integrity": "sha512-MeQTA1r0litLUf0Rp/iisCaL8761lKAZHaimlbGK4j0HysC4PLfqygQj9srcs0m2RdtDYnF8UuYyKpbjHYp7Jw==", + "version": "0.11.12", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.11.12.tgz", + "integrity": "sha512-Bh7QjT8/SuKUIfObSXNHNSK6WHo6J1tHCqJsuaFDP7gP0fkzSfTxI8y85JrppZ0h8l0maIgc2tfuZQ6/t3GtnQ==", "dev": true, "license": "MIT", "dependencies": { @@ -10027,16 +10027,16 @@ } }, "node_modules/typescript-eslint": { - "version": "8.52.0", - "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.52.0.tgz", - "integrity": "sha512-atlQQJ2YkO4pfTVQmQ+wvYQwexPDOIgo+RaVcD7gHgzy/IQA+XTyuxNM9M9TVXvttkF7koBHmcwisKdOAf2EcA==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.53.1.tgz", + "integrity": "sha512-gB+EVQfP5RDElh9ittfXlhZJdjSU4jUSTyE2+ia8CYyNvet4ElfaLlAIqDvQV9JPknKx0jQH1racTYe/4LaLSg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/eslint-plugin": "8.52.0", - "@typescript-eslint/parser": "8.52.0", - "@typescript-eslint/typescript-estree": "8.52.0", - "@typescript-eslint/utils": "8.52.0" + "@typescript-eslint/eslint-plugin": "8.53.1", + "@typescript-eslint/parser": "8.53.1", + "@typescript-eslint/typescript-estree": "8.53.1", + "@typescript-eslint/utils": "8.53.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -10308,9 +10308,9 @@ } }, "node_modules/which-typed-array": { - "version": "1.1.19", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", - "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", + "version": "1.1.20", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.20.tgz", + "integrity": "sha512-LYfpUkmqwl0h9A2HL09Mms427Q1RZWuOHsukfVcKRq9q95iQxdw0ix1JQrqbcDR9PH1QDwf5Qo8OZb5lksZ8Xg==", "dev": true, "license": "MIT", "dependencies": { diff --git a/deps/undici/src/package.json b/deps/undici/src/package.json index 235c9b04fd3e81..18637c469dedae 100644 --- a/deps/undici/src/package.json +++ b/deps/undici/src/package.json @@ -1,6 +1,6 @@ { "name": "undici", - "version": "7.18.2", + "version": "7.19.0", "description": "An HTTP/1.1 client, written from scratch for Node.js", "homepage": "https://undici.nodejs.org", "bugs": { diff --git a/deps/undici/src/types/cache-interceptor.d.ts b/deps/undici/src/types/cache-interceptor.d.ts index 013e207e1d4fba..8588ccdcb358be 100644 --- a/deps/undici/src/types/cache-interceptor.d.ts +++ b/deps/undici/src/types/cache-interceptor.d.ts @@ -39,6 +39,12 @@ declare namespace CacheHandler { */ type?: 'shared' | 'private' + /** + * Array of origins to cache. Only requests to these origins will be cached. + * Supports strings (case insensitive) and RegExp patterns. + * @default undefined (cache all origins) + */ + origins?: (string | RegExp)[] } export interface CacheControlDirectives { diff --git a/deps/undici/src/types/client.d.ts b/deps/undici/src/types/client.d.ts index 04b8f29f1bbab7..f5ccde2be436af 100644 --- a/deps/undici/src/types/client.d.ts +++ b/deps/undici/src/types/client.d.ts @@ -92,6 +92,16 @@ export declare namespace Client { * @default 100 */ maxConcurrentStreams?: number; + /** + * @description Sets the HTTP/2 stream-level flow-control window size (SETTINGS_INITIAL_WINDOW_SIZE). + * @default 262144 + */ + initialWindowSize?: number; + /** + * @description Sets the HTTP/2 connection-level flow-control window size (ClientHttp2Session.setLocalWindowSize). + * @default 524288 + */ + connectionWindowSize?: number; } export interface SocketInfo { localAddress?: string diff --git a/deps/undici/undici.js b/deps/undici/undici.js index 19915b8029af9f..77efb43720baec 100644 --- a/deps/undici/undici.js +++ b/deps/undici/undici.js @@ -525,6 +525,8 @@ var require_symbols = __commonJS({ kListeners: Symbol("listeners"), kHTTPContext: Symbol("http context"), kMaxConcurrentStreams: Symbol("max concurrent streams"), + kHTTP2InitialWindowSize: Symbol("http2 initial window size"), + kHTTP2ConnectionWindowSize: Symbol("http2 connection window size"), kEnableConnectProtocol: Symbol("http2session connect protocol"), kRemoteSettings: Symbol("http2session remote settings"), kHTTP2Stream: Symbol("http2session client stream"), @@ -1220,6 +1222,8 @@ var require_util = __commonJS({ return body; } else if (body && typeof body.pipeTo === "function") { return new BodyAsyncIterable(body); + } else if (body && isFormDataLike(body)) { + return body; } else if (body && typeof body !== "string" && !ArrayBuffer.isView(body) && isIterable(body)) { return new BodyAsyncIterable(body); } else { @@ -6022,6 +6026,18 @@ var require_util2 = __commonJS({ return gettingDecodingSplitting(value); } __name(getDecodeSplit, "getDecodeSplit"); + function hasAuthenticationEntry(request) { + return false; + } + __name(hasAuthenticationEntry, "hasAuthenticationEntry"); + function includesCredentials(url) { + return !!(url.username && url.password); + } + __name(includesCredentials, "includesCredentials"); + function isTraversableNavigable(navigable) { + return true; + } + __name(isTraversableNavigable, "isTraversableNavigable"); var EnvironmentSettingsObjectBase = class { static { __name(this, "EnvironmentSettingsObjectBase"); @@ -6085,7 +6101,10 @@ var require_util2 = __commonJS({ extractMimeType, getDecodeSplit, environmentSettingsObject, - isOriginIPPotentiallyTrustworthy + isOriginIPPotentiallyTrustworthy, + hasAuthenticationEntry, + includesCredentials, + isTraversableNavigable }; } }); @@ -8127,6 +8146,8 @@ var require_client_h2 = __commonJS({ kOnError, kMaxConcurrentStreams, kHTTP2Session, + kHTTP2InitialWindowSize, + kHTTP2ConnectionWindowSize, kResume, kSize, kHTTPContext, @@ -8175,12 +8196,15 @@ var require_client_h2 = __commonJS({ __name(parseH2Headers, "parseH2Headers"); function connectH2(client, socket) { client[kSocket] = socket; + const http2InitialWindowSize = client[kHTTP2InitialWindowSize]; + const http2ConnectionWindowSize = client[kHTTP2ConnectionWindowSize]; const session = http2.connect(client[kUrl], { createConnection: /* @__PURE__ */ __name(() => socket, "createConnection"), peerMaxConcurrentStreams: client[kMaxConcurrentStreams], settings: { // TODO(metcoder95): add support for PUSH - enablePush: false + enablePush: false, + ...http2InitialWindowSize != null ? { initialWindowSize: http2InitialWindowSize } : null } }); session[kOpenStreams] = 0; @@ -8189,6 +8213,9 @@ var require_client_h2 = __commonJS({ session[kHTTP2Session] = null; session[kEnableConnectProtocol] = false; session[kRemoteSettings] = false; + if (http2ConnectionWindowSize) { + util.addListener(session, "connect", applyConnectionWindowSize.bind(session, http2ConnectionWindowSize)); + } util.addListener(session, "error", onHttp2SessionError); util.addListener(session, "frameError", onHttp2FrameError); util.addListener(session, "end", onHttp2SessionEnd); @@ -8268,6 +8295,15 @@ var require_client_h2 = __commonJS({ } } __name(resumeH2, "resumeH2"); + function applyConnectionWindowSize(connectionWindowSize) { + try { + if (typeof this.setLocalWindowSize === "function") { + this.setLocalWindowSize(connectionWindowSize); + } + } catch { + } + } + __name(applyConnectionWindowSize, "applyConnectionWindowSize"); function onHttp2RemoteSettings(settings) { this[kClient][kMaxConcurrentStreams] = settings.maxConcurrentStreams ?? this[kClient][kMaxConcurrentStreams]; if (this[kRemoteSettings] === true && this[kEnableConnectProtocol] === true && settings.enableConnectProtocol === false) { @@ -8854,6 +8890,8 @@ var require_client = __commonJS({ kOnError, kHTTPContext, kMaxConcurrentStreams, + kHTTP2InitialWindowSize, + kHTTP2ConnectionWindowSize, kResume } = require_symbols(); var connectH1 = require_client_h1(); @@ -8904,7 +8942,9 @@ var require_client = __commonJS({ // h2 maxConcurrentStreams, allowH2, - useH2c + useH2c, + initialWindowSize, + connectionWindowSize } = {}) { if (keepAlive !== void 0) { throw new InvalidArgumentError("unsupported keepAlive, use pipelining=0 instead"); @@ -8973,6 +9013,12 @@ var require_client = __commonJS({ if (useH2c != null && typeof useH2c !== "boolean") { throw new InvalidArgumentError("useH2c must be a valid boolean value"); } + if (initialWindowSize != null && (!Number.isInteger(initialWindowSize) || initialWindowSize < 1)) { + throw new InvalidArgumentError("initialWindowSize must be a positive integer, greater than 0"); + } + if (connectionWindowSize != null && (!Number.isInteger(connectionWindowSize) || connectionWindowSize < 1)) { + throw new InvalidArgumentError("connectionWindowSize must be a positive integer, greater than 0"); + } super(); if (typeof connect2 !== "function") { connect2 = buildConnector({ @@ -9007,6 +9053,8 @@ var require_client = __commonJS({ this[kClosedResolve] = null; this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1; this[kMaxConcurrentStreams] = maxConcurrentStreams != null ? maxConcurrentStreams : 100; + this[kHTTP2InitialWindowSize] = initialWindowSize != null ? initialWindowSize : 262144; + this[kHTTP2ConnectionWindowSize] = connectionWindowSize != null ? connectionWindowSize : 524288; this[kHTTPContext] = null; this[kQueue] = []; this[kRunningIdx] = 0; @@ -10808,7 +10856,7 @@ var require_response = __commonJS({ setResponseHeaders(response, headers); setHeadersList(headers, innerResponse.headersList); setHeadersGuard(headers, guard); - if (innerResponse.body?.stream) { + if (innerResponse.urlList.length !== 0 && innerResponse.body?.stream) { streamRegistry.register(response, new WeakRef(innerResponse.body.stream)); } return response; @@ -11473,6 +11521,8 @@ var require_request2 = __commonJS({ preventNoCacheCacheControlHeaderModification: init.preventNoCacheCacheControlHeaderModification ?? false, done: init.done ?? false, timingAllowFailed: init.timingAllowFailed ?? false, + useURLCredentials: init.useURLCredentials ?? void 0, + traversableForUserPrompts: init.traversableForUserPrompts ?? "client", urlList: init.urlList, url: init.urlList[0], headersList: init.headersList ? new HeadersList(init.headersList) : new HeadersList() @@ -11820,7 +11870,10 @@ var require_fetch = __commonJS({ simpleRangeHeaderValue, buildContentRange, createInflate, - extractMimeType + extractMimeType, + hasAuthenticationEntry, + includesCredentials, + isTraversableNavigable } = require_util2(); var assert = require("node:assert"); var { safelyExtractBody, extractBody } = require_body(); @@ -12492,6 +12545,17 @@ var require_fetch = __commonJS({ } httpRequest.headersList.delete("host", true); if (includeCredentials) { + if (!httpRequest.headersList.contains("authorization", true)) { + let authorizationValue = null; + if (hasAuthenticationEntry(httpRequest) && (httpRequest.useURLCredentials === void 0 || !includesCredentials(requestCurrentURL(httpRequest)))) { + } else if (includesCredentials(requestCurrentURL(httpRequest)) && isAuthenticationFetch) { + const { username, password } = requestCurrentURL(httpRequest); + authorizationValue = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`; + } + if (authorizationValue !== null) { + httpRequest.headersList.append("Authorization", authorizationValue, false); + } + } } if (httpCache == null) { httpRequest.cache = "no-store"; @@ -12520,6 +12584,21 @@ var require_fetch = __commonJS({ response.rangeRequested = true; } response.requestIncludesCredentials = includeCredentials; + if (response.status === 401 && httpRequest.responseTainting !== "cors" && includeCredentials && isTraversableNavigable(request.traversableForUserPrompts)) { + if (request.body != null) { + if (request.body.source == null) { + return makeNetworkError("expected non-null body source"); + } + request.body = safelyExtractBody(request.body.source)[0]; + } + if (request.useURLCredentials === void 0 || isAuthenticationFetch) { + if (isCancelled(fetchParams)) { + return makeAppropriateNetworkError(fetchParams); + } + } + fetchParams.controller.connection.destroy(); + response = await httpNetworkOrCacheFetch(fetchParams, true); + } if (response.status === 407) { if (request.window === "no-window") { return makeNetworkError(); @@ -13520,7 +13599,8 @@ var require_connection = __commonJS({ mode: "websocket", credentials: "include", cache: "no-store", - redirect: "error" + redirect: "error", + useURLCredentials: true }); if (options.headers) { const headersList = getHeadersList(new Headers(options.headers)); diff --git a/src/undici_version.h b/src/undici_version.h index bbb72660120842..3b69219c7e4865 100644 --- a/src/undici_version.h +++ b/src/undici_version.h @@ -2,5 +2,5 @@ // Refer to tools/dep_updaters/update-undici.sh #ifndef SRC_UNDICI_VERSION_H_ #define SRC_UNDICI_VERSION_H_ -#define UNDICI_VERSION "7.18.2" +#define UNDICI_VERSION "7.19.0" #endif // SRC_UNDICI_VERSION_H_ From 05e9a9fb5eb2cacb534b47a2eecbd7a33cb0934a Mon Sep 17 00:00:00 2001 From: "Node.js GitHub Bot" Date: Sun, 25 Jan 2026 12:29:43 -0500 Subject: [PATCH 002/115] deps: update undici to 7.19.1 PR-URL: https://github.com/nodejs/node/pull/61514 Reviewed-By: Antoine du Hamel Reviewed-By: Richard Lau Reviewed-By: Colin Ihrig --- deps/undici/src/lib/llhttp/wasm_build_env.txt | 2 +- deps/undici/src/lib/web/fetch/index.js | 5 +++ deps/undici/src/package-lock.json | 34 +++++++++---------- deps/undici/src/package.json | 2 +- deps/undici/src/scripts/release.js | 14 ++++---- deps/undici/undici.js | 1 + src/undici_version.h | 2 +- 7 files changed, 33 insertions(+), 27 deletions(-) diff --git a/deps/undici/src/lib/llhttp/wasm_build_env.txt b/deps/undici/src/lib/llhttp/wasm_build_env.txt index e0837fc906bc30..445df079257810 100644 --- a/deps/undici/src/lib/llhttp/wasm_build_env.txt +++ b/deps/undici/src/lib/llhttp/wasm_build_env.txt @@ -1,5 +1,5 @@ -> undici@7.19.0 build:wasm +> undici@7.19.1 build:wasm > node build/wasm.js --docker > docker run --rm --platform=linux/x86_64 --user 1001:1001 --mount type=bind,source=/home/runner/work/node/node/deps/undici/src/lib/llhttp,target=/home/node/build/lib/llhttp --mount type=bind,source=/home/runner/work/node/node/deps/undici/src/build,target=/home/node/build/build --mount type=bind,source=/home/runner/work/node/node/deps/undici/src/deps,target=/home/node/build/deps -t ghcr.io/nodejs/wasm-builder@sha256:975f391d907e42a75b8c72eb77c782181e941608687d4d8694c3e9df415a0970 node build/wasm.js diff --git a/deps/undici/src/lib/web/fetch/index.js b/deps/undici/src/lib/web/fetch/index.js index 56e540d9d88258..24d724c2218898 100644 --- a/deps/undici/src/lib/web/fetch/index.js +++ b/deps/undici/src/lib/web/fetch/index.js @@ -1675,6 +1675,11 @@ async function httpNetworkOrCacheFetch ( // 4. Set the password given request’s current URL and password. // requestCurrentURL(request).password = TODO + + // In browsers, the user will be prompted to enter a username/password before the request + // is re-sent. To prevent an infinite 401 loop, return a network error for now. + // https://github.com/nodejs/undici/pull/4756 + return makeNetworkError() } // 4. Set response to the result of running HTTP-network-or-cache fetch given diff --git a/deps/undici/src/package-lock.json b/deps/undici/src/package-lock.json index a76cd9df684c7e..75cec7ec1791ba 100644 --- a/deps/undici/src/package-lock.json +++ b/deps/undici/src/package-lock.json @@ -1,12 +1,12 @@ { "name": "undici", - "version": "7.19.0", + "version": "7.19.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "undici", - "version": "7.19.0", + "version": "7.19.1", "license": "MIT", "devDependencies": { "@fastify/busboy": "3.2.0", @@ -1987,9 +1987,9 @@ "license": "MIT" }, "node_modules/@sinclair/typebox": { - "version": "0.34.47", - "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.47.tgz", - "integrity": "sha512-ZGIBQ+XDvO5JQku9wmwtabcVTHJsgSWAHYtVuM9pBNNR5E88v6Jcj/llpmsjivig5X8A8HHOb4/mbEKPS5EvAw==", + "version": "0.34.48", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.48.tgz", + "integrity": "sha512-kKJTNuK3AQOrgjjotVxMrCn1sUJwM76wMszfq1kdU4uYVJjvEWuFQ6HgvLt4Xz3fSmZlTOxJ/Ie13KnIcWQXFA==", "dev": true, "license": "MIT" }, @@ -3331,9 +3331,9 @@ "license": "MIT" }, "node_modules/baseline-browser-mapping": { - "version": "2.9.17", - "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.17.tgz", - "integrity": "sha512-agD0MgJFUP/4nvjqzIB29zRPUuCF7Ge6mEv9s8dHrtYD7QWXRcx75rOADE/d5ah1NI+0vkDl0yorDd5U852IQQ==", + "version": "2.9.18", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.18.tgz", + "integrity": "sha512-e23vBV1ZLfjb9apvfPk4rHVu2ry6RIr2Wfs+O324okSidrX7pTAnEJPCh/O5BtRlr7QtZI7ktOP3vsqr7Z5XoA==", "dev": true, "license": "Apache-2.0", "bin": { @@ -3621,9 +3621,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001765", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001765.tgz", - "integrity": "sha512-LWcNtSyZrakjECqmpP4qdg0MMGdN368D7X8XvvAqOcqMv0RxnlqVKZl2V6/mBR68oYMxOZPLw/gO7DuisMHUvQ==", + "version": "1.0.30001766", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001766.tgz", + "integrity": "sha512-4C0lfJ0/YPjJQHagaE9x2Elb69CIqEPZeG0anQt9SIvIoOH4a4uaRl73IavyO+0qZh6MDLH//DrXThEYKHkmYA==", "dev": true, "funding": [ { @@ -3808,9 +3808,9 @@ "license": "MIT" }, "node_modules/comment-parser": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/comment-parser/-/comment-parser-1.4.4.tgz", - "integrity": "sha512-0D6qSQ5IkeRrGJFHRClzaMOenMeT0gErz3zIw3AprKMqhRN6LNU2jQOdkPG/FZ+8bCgXE1VidrgSzlBBDZRr8A==", + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/comment-parser/-/comment-parser-1.4.5.tgz", + "integrity": "sha512-aRDkn3uyIlCFfk5NUA+VdwMmMsh8JGhc4hapfV4yxymHGQ3BVskMQfoXGpCo5IoBuQ9tS5iiVKhCpTcB4pW4qw==", "dev": true, "license": "MIT", "engines": { @@ -4123,9 +4123,9 @@ "license": "MIT" }, "node_modules/electron-to-chromium": { - "version": "1.5.267", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.267.tgz", - "integrity": "sha512-0Drusm6MVRXSOJpGbaSVgcQsuB4hEkMpHXaVstcPmhu5LIedxs1xNK/nIxmQIU/RPC0+1/o0AVZfBTkTNJOdUw==", + "version": "1.5.278", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.278.tgz", + "integrity": "sha512-dQ0tM1svDRQOwxnXxm+twlGTjr9Upvt8UFWAgmLsxEzFQxhbti4VwxmMjsDxVC51Zo84swW7FVCXEV+VAkhuPw==", "dev": true, "license": "ISC" }, diff --git a/deps/undici/src/package.json b/deps/undici/src/package.json index 18637c469dedae..95a13d54f6fce8 100644 --- a/deps/undici/src/package.json +++ b/deps/undici/src/package.json @@ -1,6 +1,6 @@ { "name": "undici", - "version": "7.19.0", + "version": "7.19.1", "description": "An HTTP/1.1 client, written from scratch for Node.js", "homepage": "https://undici.nodejs.org", "bugs": { diff --git a/deps/undici/src/scripts/release.js b/deps/undici/src/scripts/release.js index dd3e86eb5dc760..7ca72d8dd3dd04 100644 --- a/deps/undici/src/scripts/release.js +++ b/deps/undici/src/scripts/release.js @@ -2,7 +2,7 @@ // Called from .github/workflows -const generateReleaseNotes = async ({ github, owner, repo, versionTag, defaultBranch }) => { +const generateReleaseNotes = async ({ github, owner, repo, versionTag, commitHash }) => { const { data: releases } = await github.rest.repos.listReleases({ owner, repo @@ -14,7 +14,7 @@ const generateReleaseNotes = async ({ github, owner, repo, versionTag, defaultBr owner, repo, tag_name: versionTag, - target_commitish: defaultBranch, + target_commitish: commitHash, previous_tag_name: previousRelease?.tag_name }) @@ -25,9 +25,9 @@ const generateReleaseNotes = async ({ github, owner, repo, versionTag, defaultBr return bodyWithoutReleasePr } -const generatePr = async ({ github, context, defaultBranch, versionTag }) => { +const generatePr = async ({ github, context, defaultBranch, versionTag, commitHash }) => { const { owner, repo } = context.repo - const releaseNotes = await generateReleaseNotes({ github, owner, repo, versionTag, defaultBranch }) + const releaseNotes = await generateReleaseNotes({ github, owner, repo, versionTag, commitHash }) await github.rest.pulls.create({ owner, @@ -39,15 +39,15 @@ const generatePr = async ({ github, context, defaultBranch, versionTag }) => { }) } -const release = async ({ github, context, defaultBranch, versionTag }) => { +const release = async ({ github, context, versionTag, commitHash }) => { const { owner, repo } = context.repo - const releaseNotes = await generateReleaseNotes({ github, owner, repo, versionTag, defaultBranch }) + const releaseNotes = await generateReleaseNotes({ github, owner, repo, versionTag, commitHash }) await github.rest.repos.createRelease({ owner, repo, tag_name: versionTag, - target_commitish: defaultBranch, + target_commitish: commitHash, name: versionTag, body: releaseNotes, draft: false, diff --git a/deps/undici/undici.js b/deps/undici/undici.js index 77efb43720baec..f09f633f969ffc 100644 --- a/deps/undici/undici.js +++ b/deps/undici/undici.js @@ -12595,6 +12595,7 @@ var require_fetch = __commonJS({ if (isCancelled(fetchParams)) { return makeAppropriateNetworkError(fetchParams); } + return makeNetworkError(); } fetchParams.controller.connection.destroy(); response = await httpNetworkOrCacheFetch(fetchParams, true); diff --git a/src/undici_version.h b/src/undici_version.h index 3b69219c7e4865..0bbb7571b3ed9c 100644 --- a/src/undici_version.h +++ b/src/undici_version.h @@ -2,5 +2,5 @@ // Refer to tools/dep_updaters/update-undici.sh #ifndef SRC_UNDICI_VERSION_H_ #define SRC_UNDICI_VERSION_H_ -#define UNDICI_VERSION "7.19.0" +#define UNDICI_VERSION "7.19.1" #endif // SRC_UNDICI_VERSION_H_ From 625b90b76b78691d65e598a83fbf233fb0030692 Mon Sep 17 00:00:00 2001 From: "Node.js GitHub Bot" Date: Fri, 30 Jan 2026 11:19:55 -0500 Subject: [PATCH 003/115] deps: update undici to 7.19.2 PR-URL: https://github.com/nodejs/node/pull/61566 Reviewed-By: Filip Skokan Reviewed-By: Matthew Aitken Reviewed-By: Colin Ihrig --- deps/undici/src/lib/core/util.js | 25 +-- deps/undici/src/lib/dispatcher/client-h2.js | 26 +-- deps/undici/src/lib/interceptor/decompress.js | 7 +- deps/undici/src/lib/llhttp/wasm_build_env.txt | 2 +- deps/undici/src/lib/web/fetch/index.js | 4 +- deps/undici/src/package-lock.json | 199 ++++++++---------- deps/undici/src/package.json | 2 +- deps/undici/undici.js | 35 +-- src/undici_version.h | 2 +- 9 files changed, 122 insertions(+), 180 deletions(-) diff --git a/deps/undici/src/lib/core/util.js b/deps/undici/src/lib/core/util.js index abfa156f15303d..be2c1a7320d8a2 100644 --- a/deps/undici/src/lib/core/util.js +++ b/deps/undici/src/lib/core/util.js @@ -431,22 +431,17 @@ function parseHeaders (headers, obj) { val = [val] obj[key] = val } - val.push(headers[i + 1].toString('utf8')) + val.push(headers[i + 1].toString('latin1')) } else { const headersValue = headers[i + 1] if (typeof headersValue === 'string') { obj[key] = headersValue } else { - obj[key] = Array.isArray(headersValue) ? headersValue.map(x => x.toString('utf8')) : headersValue.toString('utf8') + obj[key] = Array.isArray(headersValue) ? headersValue.map(x => x.toString('latin1')) : headersValue.toString('latin1') } } } - // See https://github.com/nodejs/node/pull/46528 - if ('content-length' in obj && 'content-disposition' in obj) { - obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1') - } - return obj } @@ -461,34 +456,20 @@ function parseRawHeaders (headers) { */ const ret = new Array(headersLength) - let hasContentLength = false - let contentDispositionIdx = -1 let key let val - let kLen = 0 for (let n = 0; n < headersLength; n += 2) { key = headers[n] val = headers[n + 1] typeof key !== 'string' && (key = key.toString()) - typeof val !== 'string' && (val = val.toString('utf8')) + typeof val !== 'string' && (val = val.toString('latin1')) - kLen = key.length - if (kLen === 14 && key[7] === '-' && (key === 'content-length' || key.toLowerCase() === 'content-length')) { - hasContentLength = true - } else if (kLen === 19 && key[7] === '-' && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) { - contentDispositionIdx = n + 1 - } ret[n] = key ret[n + 1] = val } - // See https://github.com/nodejs/node/pull/46528 - if (hasContentLength && contentDispositionIdx !== -1) { - ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1') - } - return ret } diff --git a/deps/undici/src/lib/dispatcher/client-h2.js b/deps/undici/src/lib/dispatcher/client-h2.js index b77c4cffee5f6f..c9aec504af12c6 100644 --- a/deps/undici/src/lib/dispatcher/client-h2.js +++ b/deps/undici/src/lib/dispatcher/client-h2.js @@ -642,9 +642,13 @@ function writeH2 (client, request) { ++session[kOpenStreams] stream.setTimeout(requestTimeout) + // Track whether we received a response (headers) + let responseReceived = false + stream.once('response', headers => { const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers request.onResponseStarted() + responseReceived = true // Due to the stream nature, it is possible we face a race condition // where the stream has been assigned, but the request has been aborted @@ -667,14 +671,10 @@ function writeH2 (client, request) { } }) - stream.once('end', (err) => { + stream.once('end', () => { stream.removeAllListeners('data') - // When state is null, it means we haven't consumed body and the stream still do not have - // a state. - // Present specially when using pipeline or stream - if (stream.state?.state == null || stream.state.state < 6) { - // Do not complete the request if it was aborted - // Not prone to happen for as safety net to avoid race conditions with 'trailers' + // If we received a response, this is a normal completion + if (responseReceived) { if (!request.aborted && !request.completed) { request.onComplete({}) } @@ -682,15 +682,9 @@ function writeH2 (client, request) { client[kQueue][client[kRunningIdx]++] = null client[kResume]() } else { - // Stream is closed or half-closed-remote (6), decrement counter and cleanup - // It does not have sense to continue working with the stream as we do not - // have yet RST_STREAM support on client-side - --session[kOpenStreams] - if (session[kOpenStreams] === 0) { - session.unref() - } - - abort(err ?? new InformationalError('HTTP/2: stream half-closed (remote)')) + // Stream ended without receiving a response - this is an error + // (e.g., server destroyed the stream before sending headers) + abort(new InformationalError('HTTP/2: stream half-closed (remote)')) client[kQueue][client[kRunningIdx]++] = null client[kPendingIdx] = client[kRunningIdx] client[kResume]() diff --git a/deps/undici/src/lib/interceptor/decompress.js b/deps/undici/src/lib/interceptor/decompress.js index 9d0cc58cf728e3..ee4202a96f708b 100644 --- a/deps/undici/src/lib/interceptor/decompress.js +++ b/deps/undici/src/lib/interceptor/decompress.js @@ -33,8 +33,6 @@ let warningEmitted = /** @type {boolean} */ (false) class DecompressHandler extends DecoratorHandler { /** @type {Transform[]} */ #decompressors = [] - /** @type {NodeJS.WritableStream&NodeJS.ReadableStream|null} */ - #pipelineStream /** @type {Readonly} */ #skipStatusCodes /** @type {boolean} */ @@ -139,7 +137,7 @@ class DecompressHandler extends DecoratorHandler { const lastDecompressor = this.#decompressors[this.#decompressors.length - 1] this.#setupDecompressorEvents(lastDecompressor, controller) - this.#pipelineStream = pipeline(this.#decompressors, (err) => { + pipeline(this.#decompressors, (err) => { if (err) { super.onResponseError(controller, err) return @@ -154,7 +152,6 @@ class DecompressHandler extends DecoratorHandler { */ #cleanupDecompressors () { this.#decompressors.length = 0 - this.#pipelineStream = null } /** @@ -190,7 +187,7 @@ class DecompressHandler extends DecoratorHandler { this.#setupMultipleDecompressors(controller) } - super.onResponseStart(controller, statusCode, newHeaders, statusMessage) + return super.onResponseStart(controller, statusCode, newHeaders, statusMessage) } /** diff --git a/deps/undici/src/lib/llhttp/wasm_build_env.txt b/deps/undici/src/lib/llhttp/wasm_build_env.txt index 445df079257810..96c7c9c0dbb124 100644 --- a/deps/undici/src/lib/llhttp/wasm_build_env.txt +++ b/deps/undici/src/lib/llhttp/wasm_build_env.txt @@ -1,5 +1,5 @@ -> undici@7.19.1 build:wasm +> undici@7.19.2 build:wasm > node build/wasm.js --docker > docker run --rm --platform=linux/x86_64 --user 1001:1001 --mount type=bind,source=/home/runner/work/node/node/deps/undici/src/lib/llhttp,target=/home/node/build/lib/llhttp --mount type=bind,source=/home/runner/work/node/node/deps/undici/src/build,target=/home/node/build/build --mount type=bind,source=/home/runner/work/node/node/deps/undici/src/deps,target=/home/node/build/deps -t ghcr.io/nodejs/wasm-builder@sha256:975f391d907e42a75b8c72eb77c782181e941608687d4d8694c3e9df415a0970 node build/wasm.js diff --git a/deps/undici/src/lib/web/fetch/index.js b/deps/undici/src/lib/web/fetch/index.js index 24d724c2218898..bb33e8d77e8e11 100644 --- a/deps/undici/src/lib/web/fetch/index.js +++ b/deps/undici/src/lib/web/fetch/index.js @@ -1677,9 +1677,9 @@ async function httpNetworkOrCacheFetch ( // requestCurrentURL(request).password = TODO // In browsers, the user will be prompted to enter a username/password before the request - // is re-sent. To prevent an infinite 401 loop, return a network error for now. + // is re-sent. To prevent an infinite 401 loop, return the response for now. // https://github.com/nodejs/undici/pull/4756 - return makeNetworkError() + return response } // 4. Set response to the result of running HTTP-network-or-cache fetch given diff --git a/deps/undici/src/package-lock.json b/deps/undici/src/package-lock.json index 75cec7ec1791ba..60330a63604a0a 100644 --- a/deps/undici/src/package-lock.json +++ b/deps/undici/src/package-lock.json @@ -1,12 +1,12 @@ { "name": "undici", - "version": "7.19.1", + "version": "7.19.2", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "undici", - "version": "7.19.1", + "version": "7.19.2", "license": "MIT", "devDependencies": { "@fastify/busboy": "3.2.0", @@ -37,41 +37,41 @@ } }, "node_modules/@actions/core": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz", - "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-2.0.3.tgz", + "integrity": "sha512-Od9Thc3T1mQJYddvVPM4QGiLUewdh+3txmDYHHxoNdkqysR1MbCT+rFOtNUxYAz+7+6RIsqipVahY2GJqGPyxA==", "dev": true, "license": "MIT", "dependencies": { - "@actions/exec": "^1.1.1", - "@actions/http-client": "^2.0.1" + "@actions/exec": "^2.0.0", + "@actions/http-client": "^3.0.2" } }, "node_modules/@actions/exec": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz", - "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-2.0.0.tgz", + "integrity": "sha512-k8ngrX2voJ/RIN6r9xB82NVqKpnMRtxDoiO+g3olkIUpQNqjArXrCQceduQZCQj3P3xm32pChRLqRrtXTlqhIw==", "dev": true, "license": "MIT", "dependencies": { - "@actions/io": "^1.0.1" + "@actions/io": "^2.0.0" } }, "node_modules/@actions/http-client": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.3.tgz", - "integrity": "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-3.0.2.tgz", + "integrity": "sha512-JP38FYYpyqvUsz+Igqlc/JG6YO9PaKuvqjM3iGvaLqFnJ7TFmcLyy2IDrY0bI0qCQug8E9K+elv5ZNfw62ZJzA==", "dev": true, "license": "MIT", "dependencies": { "tunnel": "^0.0.6", - "undici": "^5.25.4" + "undici": "^6.23.0" } }, "node_modules/@actions/io": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz", - "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@actions/io/-/io-2.0.0.tgz", + "integrity": "sha512-Jv33IN09XLO+0HS79aaODsvIRyduiF7NY/F6LYeK5oeUmrsz7aFdRphQjFoESF4jS7lMauDOttKALcpapVDIAg==", "dev": true, "license": "MIT" }, @@ -1969,13 +1969,13 @@ } }, "node_modules/@reporters/github": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/@reporters/github/-/github-1.11.0.tgz", - "integrity": "sha512-sP/fSOgIoMYXZFWVy2Hw6vWUG3akUBiykqnFjx2jWI/kdqj55VZNXAQ27MYuiNffWlITW6mMBcv8+i47O7C77w==", + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/@reporters/github/-/github-1.12.0.tgz", + "integrity": "sha512-2f9wjb6ncO3iLXXGWFJaRT7ztYtPGuzK2FnBODWK7VTZLyhhjr4RnBI4l3D8RJGsSAEXSv0tsH+0bvapTGdg9g==", "dev": true, "license": "MIT", "dependencies": { - "@actions/core": "^1.10.0", + "@actions/core": "^2.0.3", "stack-utils": "^2.0.6" } }, @@ -2226,17 +2226,17 @@ "license": "MIT" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.53.1.tgz", - "integrity": "sha512-cFYYFZ+oQFi6hUnBTbLRXfTJiaQtYE3t4O692agbBl+2Zy+eqSKWtPjhPXJu1G7j4RLjKgeJPDdq3EqOwmX5Ag==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.54.0.tgz", + "integrity": "sha512-hAAP5io/7csFStuOmR782YmTthKBJ9ND3WVL60hcOjvtGFb+HJxH4O5huAcmcZ9v9G8P+JETiZ/G1B8MALnWZQ==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.12.2", - "@typescript-eslint/scope-manager": "8.53.1", - "@typescript-eslint/type-utils": "8.53.1", - "@typescript-eslint/utils": "8.53.1", - "@typescript-eslint/visitor-keys": "8.53.1", + "@typescript-eslint/scope-manager": "8.54.0", + "@typescript-eslint/type-utils": "8.54.0", + "@typescript-eslint/utils": "8.54.0", + "@typescript-eslint/visitor-keys": "8.54.0", "ignore": "^7.0.5", "natural-compare": "^1.4.0", "ts-api-utils": "^2.4.0" @@ -2249,7 +2249,7 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^8.53.1", + "@typescript-eslint/parser": "^8.54.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } @@ -2265,16 +2265,16 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.53.1.tgz", - "integrity": "sha512-nm3cvFN9SqZGXjmw5bZ6cGmvJSyJPn0wU9gHAZZHDnZl2wF9PhHv78Xf06E0MaNk4zLVHL8hb2/c32XvyJOLQg==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.54.0.tgz", + "integrity": "sha512-BtE0k6cjwjLZoZixN0t5AKP0kSzlGu7FctRXYuPAm//aaiZhmfq1JwdYpYr1brzEspYyFeF+8XF5j2VK6oalrA==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/scope-manager": "8.53.1", - "@typescript-eslint/types": "8.53.1", - "@typescript-eslint/typescript-estree": "8.53.1", - "@typescript-eslint/visitor-keys": "8.53.1", + "@typescript-eslint/scope-manager": "8.54.0", + "@typescript-eslint/types": "8.54.0", + "@typescript-eslint/typescript-estree": "8.54.0", + "@typescript-eslint/visitor-keys": "8.54.0", "debug": "^4.4.3" }, "engines": { @@ -2290,14 +2290,14 @@ } }, "node_modules/@typescript-eslint/project-service": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.53.1.tgz", - "integrity": "sha512-WYC4FB5Ra0xidsmlPb+1SsnaSKPmS3gsjIARwbEkHkoWloQmuzcfypljaJcR78uyLA1h8sHdWWPHSLDI+MtNog==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.54.0.tgz", + "integrity": "sha512-YPf+rvJ1s7MyiWM4uTRhE4DvBXrEV+d8oC3P9Y2eT7S+HBS0clybdMIPnhiATi9vZOYDc7OQ1L/i6ga6NFYK/g==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/tsconfig-utils": "^8.53.1", - "@typescript-eslint/types": "^8.53.1", + "@typescript-eslint/tsconfig-utils": "^8.54.0", + "@typescript-eslint/types": "^8.54.0", "debug": "^4.4.3" }, "engines": { @@ -2312,14 +2312,14 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.53.1.tgz", - "integrity": "sha512-Lu23yw1uJMFY8cUeq7JlrizAgeQvWugNQzJp8C3x8Eo5Jw5Q2ykMdiiTB9vBVOOUBysMzmRRmUfwFrZuI2C4SQ==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.54.0.tgz", + "integrity": "sha512-27rYVQku26j/PbHYcVfRPonmOlVI6gihHtXFbTdB5sb6qA0wdAQAbyXFVarQ5t4HRojIz64IV90YtsjQSSGlQg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.53.1", - "@typescript-eslint/visitor-keys": "8.53.1" + "@typescript-eslint/types": "8.54.0", + "@typescript-eslint/visitor-keys": "8.54.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2330,9 +2330,9 @@ } }, "node_modules/@typescript-eslint/tsconfig-utils": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.53.1.tgz", - "integrity": "sha512-qfvLXS6F6b1y43pnf0pPbXJ+YoXIC7HKg0UGZ27uMIemKMKA6XH2DTxsEDdpdN29D+vHV07x/pnlPNVLhdhWiA==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.54.0.tgz", + "integrity": "sha512-dRgOyT2hPk/JwxNMZDsIXDgyl9axdJI3ogZ2XWhBPsnZUv+hPesa5iuhdYt2gzwA9t8RE5ytOJ6xB0moV0Ujvw==", "dev": true, "license": "MIT", "engines": { @@ -2347,15 +2347,15 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.53.1.tgz", - "integrity": "sha512-MOrdtNvyhy0rHyv0ENzub1d4wQYKb2NmIqG7qEqPWFW7Mpy2jzFC3pQ2yKDvirZB7jypm5uGjF2Qqs6OIqu47w==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.54.0.tgz", + "integrity": "sha512-hiLguxJWHjjwL6xMBwD903ciAwd7DmK30Y9Axs/etOkftC3ZNN9K44IuRD/EB08amu+Zw6W37x9RecLkOo3pMA==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.53.1", - "@typescript-eslint/typescript-estree": "8.53.1", - "@typescript-eslint/utils": "8.53.1", + "@typescript-eslint/types": "8.54.0", + "@typescript-eslint/typescript-estree": "8.54.0", + "@typescript-eslint/utils": "8.54.0", "debug": "^4.4.3", "ts-api-utils": "^2.4.0" }, @@ -2372,9 +2372,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.53.1.tgz", - "integrity": "sha512-jr/swrr2aRmUAUjW5/zQHbMaui//vQlsZcJKijZf3M26bnmLj8LyZUpj8/Rd6uzaek06OWsqdofN/Thenm5O8A==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.54.0.tgz", + "integrity": "sha512-PDUI9R1BVjqu7AUDsRBbKMtwmjWcn4J3le+5LpcFgWULN3LvHC5rkc9gCVxbrsrGmO1jfPybN5s6h4Jy+OnkAA==", "dev": true, "license": "MIT", "engines": { @@ -2386,16 +2386,16 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.53.1.tgz", - "integrity": "sha512-RGlVipGhQAG4GxV1s34O91cxQ/vWiHJTDHbXRr0li2q/BGg3RR/7NM8QDWgkEgrwQYCvmJV9ichIwyoKCQ+DTg==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.54.0.tgz", + "integrity": "sha512-BUwcskRaPvTk6fzVWgDPdUndLjB87KYDrN5EYGetnktoeAvPtO4ONHlAZDnj5VFnUANg0Sjm7j4usBlnoVMHwA==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/project-service": "8.53.1", - "@typescript-eslint/tsconfig-utils": "8.53.1", - "@typescript-eslint/types": "8.53.1", - "@typescript-eslint/visitor-keys": "8.53.1", + "@typescript-eslint/project-service": "8.54.0", + "@typescript-eslint/tsconfig-utils": "8.54.0", + "@typescript-eslint/types": "8.54.0", + "@typescript-eslint/visitor-keys": "8.54.0", "debug": "^4.4.3", "minimatch": "^9.0.5", "semver": "^7.7.3", @@ -2453,16 +2453,16 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.53.1.tgz", - "integrity": "sha512-c4bMvGVWW4hv6JmDUEG7fSYlWOl3II2I4ylt0NM+seinYQlZMQIaKaXIIVJWt9Ofh6whrpM+EdDQXKXjNovvrg==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.54.0.tgz", + "integrity": "sha512-9Cnda8GS57AQakvRyG0PTejJNlA2xhvyNtEVIMlDWOOeEyBkYWhGPnfrIAnqxLMTSTo6q8g12XVjjev5l1NvMA==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.9.1", - "@typescript-eslint/scope-manager": "8.53.1", - "@typescript-eslint/types": "8.53.1", - "@typescript-eslint/typescript-estree": "8.53.1" + "@typescript-eslint/scope-manager": "8.54.0", + "@typescript-eslint/types": "8.54.0", + "@typescript-eslint/typescript-estree": "8.54.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2477,13 +2477,13 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.53.1.tgz", - "integrity": "sha512-oy+wV7xDKFPRyNggmXuZQSBzvoLnpmJs+GhzRhPjrxl2b/jIlyjVokzm47CZCDUdXKr2zd7ZLodPfOBpOPyPlg==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.54.0.tgz", + "integrity": "sha512-VFlhGSl4opC0bprJiItPQ1RfUhGDIBokcPwaFH4yiBCaNPeld/9VeXbiPO1cLyorQi1G1vL+ecBk1x8o1axORA==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.53.1", + "@typescript-eslint/types": "8.54.0", "eslint-visitor-keys": "^4.2.1" }, "engines": { @@ -3331,9 +3331,9 @@ "license": "MIT" }, "node_modules/baseline-browser-mapping": { - "version": "2.9.18", - "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.18.tgz", - "integrity": "sha512-e23vBV1ZLfjb9apvfPk4rHVu2ry6RIr2Wfs+O324okSidrX7pTAnEJPCh/O5BtRlr7QtZI7ktOP3vsqr7Z5XoA==", + "version": "2.9.19", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.19.tgz", + "integrity": "sha512-ipDqC8FrAl/76p2SSWKSI+H9tFwm7vYqXQrItCuiVPt26Km0jS+NzSsBWAaBusvSbQcfJG+JitdMm+wZAgTYqg==", "dev": true, "license": "Apache-2.0", "bin": { @@ -4123,9 +4123,9 @@ "license": "MIT" }, "node_modules/electron-to-chromium": { - "version": "1.5.278", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.278.tgz", - "integrity": "sha512-dQ0tM1svDRQOwxnXxm+twlGTjr9Upvt8UFWAgmLsxEzFQxhbti4VwxmMjsDxVC51Zo84swW7FVCXEV+VAkhuPw==", + "version": "1.5.279", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.279.tgz", + "integrity": "sha512-0bblUU5UNdOt5G7XqGiJtpZMONma6WAfq9vsFmtn9x1+joAObr6x1chfqyxFSDCAFwFhCQDrqeAr6MYdpwJ9Hg==", "dev": true, "license": "ISC" }, @@ -10027,16 +10027,16 @@ } }, "node_modules/typescript-eslint": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.53.1.tgz", - "integrity": "sha512-gB+EVQfP5RDElh9ittfXlhZJdjSU4jUSTyE2+ia8CYyNvet4ElfaLlAIqDvQV9JPknKx0jQH1racTYe/4LaLSg==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.54.0.tgz", + "integrity": "sha512-CKsJ+g53QpsNPqbzUsfKVgd3Lny4yKZ1pP4qN3jdMOg/sisIDLGyDMezycquXLE5JsEU0wp3dGNdzig0/fmSVQ==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/eslint-plugin": "8.53.1", - "@typescript-eslint/parser": "8.53.1", - "@typescript-eslint/typescript-estree": "8.53.1", - "@typescript-eslint/utils": "8.53.1" + "@typescript-eslint/eslint-plugin": "8.54.0", + "@typescript-eslint/parser": "8.54.0", + "@typescript-eslint/typescript-estree": "8.54.0", + "@typescript-eslint/utils": "8.54.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -10070,16 +10070,13 @@ } }, "node_modules/undici": { - "version": "5.29.0", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz", - "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==", + "version": "6.23.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-6.23.0.tgz", + "integrity": "sha512-VfQPToRA5FZs/qJxLIinmU59u0r7LXqoJkCzinq3ckNJp3vKEh7jTWN589YQ5+aoAC/TGRLyJLCPKcLQbM8r9g==", "dev": true, "license": "MIT", - "dependencies": { - "@fastify/busboy": "^2.0.0" - }, "engines": { - "node": ">=14.0" + "node": ">=18.17" } }, "node_modules/undici-types": { @@ -10089,16 +10086,6 @@ "dev": true, "license": "MIT" }, - "node_modules/undici/node_modules/@fastify/busboy": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", - "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=14" - } - }, "node_modules/unicorn-magic": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.1.0.tgz", diff --git a/deps/undici/src/package.json b/deps/undici/src/package.json index 95a13d54f6fce8..bdfe7896b191a6 100644 --- a/deps/undici/src/package.json +++ b/deps/undici/src/package.json @@ -1,6 +1,6 @@ { "name": "undici", - "version": "7.19.1", + "version": "7.19.2", "description": "An HTTP/1.1 client, written from scratch for Node.js", "homepage": "https://undici.nodejs.org", "bugs": { diff --git a/deps/undici/undici.js b/deps/undici/undici.js index f09f633f969ffc..8ea790e4860045 100644 --- a/deps/undici/undici.js +++ b/deps/undici/undici.js @@ -1423,47 +1423,32 @@ var require_util = __commonJS({ val = [val]; obj[key] = val; } - val.push(headers[i + 1].toString("utf8")); + val.push(headers[i + 1].toString("latin1")); } else { const headersValue = headers[i + 1]; if (typeof headersValue === "string") { obj[key] = headersValue; } else { - obj[key] = Array.isArray(headersValue) ? headersValue.map((x) => x.toString("utf8")) : headersValue.toString("utf8"); + obj[key] = Array.isArray(headersValue) ? headersValue.map((x) => x.toString("latin1")) : headersValue.toString("latin1"); } } } - if ("content-length" in obj && "content-disposition" in obj) { - obj["content-disposition"] = Buffer.from(obj["content-disposition"]).toString("latin1"); - } return obj; } __name(parseHeaders, "parseHeaders"); function parseRawHeaders(headers) { const headersLength = headers.length; const ret = new Array(headersLength); - let hasContentLength = false; - let contentDispositionIdx = -1; let key; let val; - let kLen = 0; for (let n = 0; n < headersLength; n += 2) { key = headers[n]; val = headers[n + 1]; typeof key !== "string" && (key = key.toString()); - typeof val !== "string" && (val = val.toString("utf8")); - kLen = key.length; - if (kLen === 14 && key[7] === "-" && (key === "content-length" || key.toLowerCase() === "content-length")) { - hasContentLength = true; - } else if (kLen === 19 && key[7] === "-" && (key === "content-disposition" || key.toLowerCase() === "content-disposition")) { - contentDispositionIdx = n + 1; - } + typeof val !== "string" && (val = val.toString("latin1")); ret[n] = key; ret[n + 1] = val; } - if (hasContentLength && contentDispositionIdx !== -1) { - ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString("latin1"); - } return ret; } __name(parseRawHeaders, "parseRawHeaders"); @@ -8573,9 +8558,11 @@ var require_client_h2 = __commonJS({ } ++session[kOpenStreams]; stream.setTimeout(requestTimeout); + let responseReceived = false; stream.once("response", (headers2) => { const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers2; request.onResponseStarted(); + responseReceived = true; if (request.aborted) { stream.removeAllListeners("data"); return; @@ -8589,20 +8576,16 @@ var require_client_h2 = __commonJS({ stream.pause(); } }); - stream.once("end", (err) => { + stream.once("end", () => { stream.removeAllListeners("data"); - if (stream.state?.state == null || stream.state.state < 6) { + if (responseReceived) { if (!request.aborted && !request.completed) { request.onComplete({}); } client[kQueue][client[kRunningIdx]++] = null; client[kResume](); } else { - --session[kOpenStreams]; - if (session[kOpenStreams] === 0) { - session.unref(); - } - abort(err ?? new InformationalError("HTTP/2: stream half-closed (remote)")); + abort(new InformationalError("HTTP/2: stream half-closed (remote)")); client[kQueue][client[kRunningIdx]++] = null; client[kPendingIdx] = client[kRunningIdx]; client[kResume](); @@ -12595,7 +12578,7 @@ var require_fetch = __commonJS({ if (isCancelled(fetchParams)) { return makeAppropriateNetworkError(fetchParams); } - return makeNetworkError(); + return response; } fetchParams.controller.connection.destroy(); response = await httpNetworkOrCacheFetch(fetchParams, true); diff --git a/src/undici_version.h b/src/undici_version.h index 0bbb7571b3ed9c..a494e8adc3ece6 100644 --- a/src/undici_version.h +++ b/src/undici_version.h @@ -2,5 +2,5 @@ // Refer to tools/dep_updaters/update-undici.sh #ifndef SRC_UNDICI_VERSION_H_ #define SRC_UNDICI_VERSION_H_ -#define UNDICI_VERSION "7.19.1" +#define UNDICI_VERSION "7.19.2" #endif // SRC_UNDICI_VERSION_H_ From ebe01cc70885598ce750de372b9cb49b83ed72aa Mon Sep 17 00:00:00 2001 From: Matteo Collina Date: Mon, 22 Dec 2025 18:25:33 +0100 Subject: [PATCH 004/115] tls: route callback exceptions through error handlers Wrap pskCallback and ALPNCallback invocations in try-catch blocks to route exceptions through owner.destroy() instead of letting them become uncaught exceptions. This prevents remote attackers from crashing TLS servers or causing resource exhaustion. Fixes: https://hackerone.com/reports/3473882 PR-URL: https://github.com/nodejs-private/node-private/pull/782 PR-URL: https://github.com/nodejs-private/node-private/pull/790 CVE-ID: CVE-2026-21637 --- ...ls-psk-alpn-callback-exception-handling.js | 24 +++++-------------- 1 file changed, 6 insertions(+), 18 deletions(-) diff --git a/test/parallel/test-tls-psk-alpn-callback-exception-handling.js b/test/parallel/test-tls-psk-alpn-callback-exception-handling.js index 93bf7396d2ab53..d4308d62087ea0 100644 --- a/test/parallel/test-tls-psk-alpn-callback-exception-handling.js +++ b/test/parallel/test-tls-psk-alpn-callback-exception-handling.js @@ -90,9 +90,7 @@ describe('TLS callback exception handling', () => { } })); - server.on('secureConnection', common.mustNotCall(() => { - reject(new Error('secureConnection should not fire')); - })); + server.on('secureConnection', common.mustNotCall('secureConnection should not fire')); await new Promise((res) => server.listen(0, res)); @@ -137,9 +135,7 @@ describe('TLS callback exception handling', () => { } })); - server.on('secureConnection', common.mustNotCall(() => { - reject(new Error('secureConnection should not fire')); - })); + server.on('secureConnection', common.mustNotCall('secureConnection should not fire')); await new Promise((res) => server.listen(0, res)); @@ -185,9 +181,7 @@ describe('TLS callback exception handling', () => { } })); - server.on('secureConnection', common.mustNotCall(() => { - reject(new Error('secureConnection should not fire')); - })); + server.on('secureConnection', common.mustNotCall('secureConnection should not fire')); await new Promise((res) => server.listen(0, res)); @@ -228,9 +222,7 @@ describe('TLS callback exception handling', () => { } })); - server.on('secureConnection', common.mustNotCall(() => { - reject(new Error('secureConnection should not fire')); - })); + server.on('secureConnection', common.mustNotCall('secureConnection should not fire')); await new Promise((res) => server.listen(0, res)); const client = tls.connect({ @@ -259,9 +251,7 @@ describe('TLS callback exception handling', () => { const { promise, resolve, reject } = createTestPromise(); - server.on('secureConnection', common.mustNotCall(() => { - reject(new Error('secureConnection should not fire')); - })); + server.on('secureConnection', common.mustNotCall('secureConnection should not fire')); await new Promise((res) => server.listen(0, res)); @@ -303,9 +293,7 @@ describe('TLS callback exception handling', () => { const { promise, resolve, reject } = createTestPromise(); - server.on('secureConnection', common.mustNotCall(() => { - reject(new Error('secureConnection should not fire')); - })); + server.on('secureConnection', common.mustNotCall('secureConnection should not fire')); await new Promise((res) => server.listen(0, res)); From b896ab473189c74de8562f8817fad46061eab240 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Sat, 15 Nov 2025 15:16:37 +0200 Subject: [PATCH 005/115] test: ensure assertions are reached on more tests PR-URL: https://github.com/nodejs/node/pull/60634 Reviewed-By: Colin Ihrig --- test/eslint.config_partial.mjs | 2 +- test/parallel/test-vfs.js | 36 ++++----- test/parallel/test-vm-module-basic.js | 6 +- test/parallel/test-vm-module-link.js | 4 +- .../test-watch-file-shared-dependency.mjs | 4 +- test/parallel/test-weakref.js | 6 +- test/parallel/test-web-locks-query.js | 10 +-- test/parallel/test-web-locks.js | 54 +++++++------- .../test-webcrypto-cryptokey-workers.js | 4 +- test/parallel/test-webcrypto-digest.js | 4 +- test/parallel/test-webstreams-clone-unref.js | 6 +- test/parallel/test-webstreams-compose.js | 32 ++++---- test/parallel/test-webstreams-finished.js | 18 ++--- test/parallel/test-webstreams-pipeline.js | 20 ++--- .../test-whatwg-encoding-custom-internals.js | 6 +- ...g-encoding-custom-textdecoder-streaming.js | 5 +- ...ents-add-event-listener-options-passive.js | 18 ++--- ...vents-add-event-listener-options-signal.js | 73 +++++-------------- .../test-whatwg-events-customevent.js | 14 ++-- .../test-whatwg-readablebytestream.js | 4 + test/parallel/test-whatwg-readablestream.js | 37 +++++----- .../test-whatwg-webstreams-compression.js | 4 +- .../test-whatwg-webstreams-transfer.js | 4 +- test/parallel/test-whatwg-writablestream.js | 2 +- .../test-worker-arraybuffer-zerofill.js | 4 +- test/parallel/test-worker-debug.js | 1 + test/parallel/test-worker-environmentdata.js | 15 ++-- test/parallel/test-worker-exit-code.js | 4 +- test/parallel/test-worker-hasref.js | 14 ++-- test/parallel/test-worker-init-failure.js | 8 +- test/parallel/test-worker-memory.js | 4 +- .../parallel/test-worker-message-port-move.js | 5 +- test/parallel/test-worker-message-port.js | 12 +-- .../test-worker-messageport-hasref.js | 14 ++-- .../test-worker-messaging-errors-handler.js | 4 +- .../test-worker-messaging-errors-invalid.js | 8 +- .../test-worker-messaging-errors-timeout.js | 4 +- test/parallel/test-worker-messaging.js | 16 ++-- .../test-worker-nexttick-terminate.js | 4 +- .../test-worker-workerdata-messageport.js | 10 +-- test/parallel/test-wrap-js-stream-destroy.js | 24 +++--- test/parallel/test-x509-escaping.js | 12 +-- 42 files changed, 245 insertions(+), 291 deletions(-) diff --git a/test/eslint.config_partial.mjs b/test/eslint.config_partial.mjs index fdf0dbde10a50b..e18896580d896f 100644 --- a/test/eslint.config_partial.mjs +++ b/test/eslint.config_partial.mjs @@ -204,7 +204,7 @@ export default [ Array.from({ length: 13 }, (_, i) => String.fromCharCode(0x61 + i, 42)).join(',') },n*,${ // 0x61 is code for 'a', this generates a string enumerating latin letters: 'z*,y*,…' - Array.from({ length: 2 }, (_, i) => String.fromCharCode(0x61 + 25 - i, 42)).join(',') + Array.from({ length: 5 }, (_, i) => String.fromCharCode(0x61 + 25 - i, 42)).join(',') }}.{js,mjs,cjs}`, ], rules: { diff --git a/test/parallel/test-vfs.js b/test/parallel/test-vfs.js index 3ce8f8344a9ca2..237d9323b8f6ac 100644 --- a/test/parallel/test-vfs.js +++ b/test/parallel/test-vfs.js @@ -6,7 +6,7 @@ const common = require('../common'); const Module = require('module'); const fs = require('fs'); const tmpdir = require('../common/tmpdir'); -const { deepStrictEqual, ok, strictEqual, throws } = require('assert'); +const assert = require('assert'); const { join } = require('path'); const directory = tmpdir.resolve('directory'); @@ -17,20 +17,20 @@ tmpdir.refresh(); fs.writeFileSync(file, "module.exports = { a: 'b' }"); fs.mkdirSync(directory); -strictEqual(Module._stat(directory), 1); -ok(Module._stat(doesNotExist) < 0); -strictEqual(Module._stat(file), 0); +assert.strictEqual(Module._stat(directory), 1); +assert.ok(Module._stat(doesNotExist) < 0); +assert.strictEqual(Module._stat(file), 0); const vfsDirectory = join(process.execPath, 'directory'); const vfsDoesNotExist = join(process.execPath, 'does-not-exist'); const vfsFile = join(process.execPath, 'file.js'); -ok(Module._stat(vfsDirectory) < 0); -ok(Module._stat(vfsDoesNotExist) < 0); -ok(Module._stat(vfsFile) < 0); +assert.ok(Module._stat(vfsDirectory) < 0); +assert.ok(Module._stat(vfsDoesNotExist) < 0); +assert.ok(Module._stat(vfsFile) < 0); -deepStrictEqual(require(file), { a: 'b' }); -throws(() => require(vfsFile), { code: 'MODULE_NOT_FOUND' }); +assert.deepStrictEqual(require(file), { a: 'b' }); +assert.throws(() => require(vfsFile), { code: 'MODULE_NOT_FOUND' }); common.expectWarning( 'ExperimentalWarning', @@ -75,15 +75,15 @@ fs.realpathSync = function realpathSync(pathArgument, options) { return pathArgument; }; -strictEqual(Module._stat(directory), 1); -ok(Module._stat(doesNotExist) < 0); -strictEqual(Module._stat(file), 0); +assert.strictEqual(Module._stat(directory), 1); +assert.ok(Module._stat(doesNotExist) < 0); +assert.strictEqual(Module._stat(file), 0); -strictEqual(Module._stat(vfsDirectory), 1); -ok(Module._stat(vfsDoesNotExist) < 0); -strictEqual(Module._stat(vfsFile), 0); +assert.strictEqual(Module._stat(vfsDirectory), 1); +assert.ok(Module._stat(vfsDoesNotExist) < 0); +assert.strictEqual(Module._stat(vfsFile), 0); -strictEqual(Module._stat(process.execPath), 1); +assert.strictEqual(Module._stat(process.execPath), 1); -deepStrictEqual(require(file), { a: 'b' }); -deepStrictEqual(require(vfsFile), { x: 'y' }); +assert.deepStrictEqual(require(file), { a: 'b' }); +assert.deepStrictEqual(require(vfsFile), { x: 'y' }); diff --git a/test/parallel/test-vm-module-basic.js b/test/parallel/test-vm-module-basic.js index 83695715fd595c..69c67f7c1c1fc5 100644 --- a/test/parallel/test-vm-module-basic.js +++ b/test/parallel/test-vm-module-basic.js @@ -166,13 +166,13 @@ const util = require('util'); const module = new SyntheticModule([], () => {}); module.link(() => {}); const f = compileFunction('return import("x")', [], { - importModuleDynamically(specifier, referrer) { + importModuleDynamically: common.mustCall((specifier, referrer) => { assert.strictEqual(specifier, 'x'); assert.strictEqual(referrer, f); return module; - }, + }), }); f().then((ns) => { assert.strictEqual(ns, module.namespace); - }); + }).then(common.mustCall()); } diff --git a/test/parallel/test-vm-module-link.js b/test/parallel/test-vm-module-link.js index 26dcb69885ba3c..b9b7b8385ca652 100644 --- a/test/parallel/test-vm-module-link.js +++ b/test/parallel/test-vm-module-link.js @@ -144,7 +144,7 @@ async function asserts() { const m = new SourceTextModule(` import "foo" with { n1: 'v1', n2: 'v2' }; `, { identifier: 'm' }); - await m.link((s, r, p) => { + await m.link(common.mustCall((s, r, p) => { assert.strictEqual(s, 'foo'); assert.strictEqual(r.identifier, 'm'); assert.strictEqual(p.attributes.n1, 'v1'); @@ -152,7 +152,7 @@ async function asserts() { assert.strictEqual(p.attributes.n2, 'v2'); assert.strictEqual(p.assert.n2, 'v2'); return new SourceTextModule(''); - }); + })); } const finished = common.mustCall(); diff --git a/test/parallel/test-watch-file-shared-dependency.mjs b/test/parallel/test-watch-file-shared-dependency.mjs index 3889bc4d074865..08fbf38177df1d 100644 --- a/test/parallel/test-watch-file-shared-dependency.mjs +++ b/test/parallel/test-watch-file-shared-dependency.mjs @@ -34,7 +34,7 @@ describe('watch file with shared dependency', () => { const controller = new AbortController(); const watcher = new FilesWatcher({ signal: controller.signal }); - watcher.on('changed', ({ owners }) => { + watcher.on('changed', common.mustCall(({ owners }) => { if (owners.size !== 2) return; // If this code is never reached the test times out. @@ -42,7 +42,7 @@ describe('watch file with shared dependency', () => { assert.ok(owners.has(fixturePaths['test-2.js'])); controller.abort(); done(); - }); + })); watcher.filterFile(fixturePaths['test.js']); watcher.filterFile(fixturePaths['test-2.js']); watcher.filterFile(fixturePaths['dependency.js'], fixturePaths['test.js']); diff --git a/test/parallel/test-weakref.js b/test/parallel/test-weakref.js index ca7485aaa6a40c..f8f37bc2ed72d2 100644 --- a/test/parallel/test-weakref.js +++ b/test/parallel/test-weakref.js @@ -2,12 +2,12 @@ // Flags: --expose-gc -require('../common'); +const common = require('../common'); const assert = require('assert'); const w = new globalThis.WeakRef({}); -setTimeout(() => { +setTimeout(common.mustCall(() => { globalThis.gc(); assert.strictEqual(w.deref(), undefined); -}, 200); +}), 200); diff --git a/test/parallel/test-web-locks-query.js b/test/parallel/test-web-locks-query.js index 962abd92f130dd..116e0c23bb494c 100644 --- a/test/parallel/test-web-locks-query.js +++ b/test/parallel/test-web-locks-query.js @@ -1,6 +1,6 @@ 'use strict'; -require('../common'); +const common = require('../common'); const { describe, it } = require('node:test'); const assert = require('node:assert'); const { Worker } = require('worker_threads'); @@ -26,7 +26,7 @@ describe('Web Locks - query missing WPT tests', () => { worker.once('message', resolve); }); - await navigator.locks.request('different-contexts-resource', { mode: 'shared' }, async (lock) => { + await navigator.locks.request('different-contexts-resource', { mode: 'shared' }, common.mustCall(async (lock) => { const state = await navigator.locks.query(); const heldLocks = state.held.filter((l) => l.name === 'different-contexts-resource'); @@ -35,7 +35,7 @@ describe('Web Locks - query missing WPT tests', () => { assert.notStrictEqual(mainClientId, workerResult.clientId); worker.postMessage('release'); - }); + })); await worker.terminate(); }); @@ -67,7 +67,7 @@ describe('Web Locks - query missing WPT tests', () => { }); assert.strictEqual(step1.acquired, 'resource1'); - await navigator.locks.request('deadlock-resource-2', async (lock2) => { + await navigator.locks.request('deadlock-resource-2', common.mustCall(async (lock2) => { worker.postMessage('try-resource2'); const step2 = await new Promise((resolve) => { @@ -85,7 +85,7 @@ describe('Web Locks - query missing WPT tests', () => { assert(resource2Lock); worker.postMessage('release'); - }); + })); await worker.terminate(); }); diff --git a/test/parallel/test-web-locks.js b/test/parallel/test-web-locks.js index e938bf89f3a52b..9b0ab92e7c409d 100644 --- a/test/parallel/test-web-locks.js +++ b/test/parallel/test-web-locks.js @@ -26,10 +26,10 @@ describe('Web Locks with worker threads', () => { assert.strictEqual(result.success, true); await worker.terminate(); - await navigator.locks.request('exclusive-test', async (lock) => { + await navigator.locks.request('exclusive-test', common.mustCall(async (lock) => { assert.strictEqual(lock.mode, 'exclusive'); assert.strictEqual(lock.name, 'exclusive-test'); - }); + })); }); it('should handle shared locks', async () => { @@ -48,41 +48,43 @@ describe('Web Locks with worker threads', () => { }); assert.strictEqual(result.success, true); - await navigator.locks.request('shared-test', { mode: 'shared' }, async (lock1) => { - await navigator.locks.request('shared-test', { mode: 'shared' }, async (lock2) => { + await navigator.locks.request('shared-test', { mode: 'shared' }, common.mustCall(async (lock1) => { + await navigator.locks.request('shared-test', { mode: 'shared' }, common.mustCall(async (lock2) => { assert.strictEqual(lock1.mode, 'shared'); assert.strictEqual(lock2.mode, 'shared'); - }); - }); + })); + })); await worker.terminate(); }); it('should handle steal option - no existing lock', async () => { - await navigator.locks.request('steal-simple', { steal: true }, async (lock) => { + await navigator.locks.request('steal-simple', { steal: true }, common.mustCall(async (lock) => { assert.strictEqual(lock.name, 'steal-simple'); assert.strictEqual(lock.mode, 'exclusive'); - }); + })); }); it('should handle steal option - existing lock', async () => { let originalLockRejected = false; - const originalLockPromise = navigator.locks.request('steal-target', async (lock) => { + const originalLockPromise = navigator.locks.request('steal-target', common.mustCall(async (lock) => { assert.strictEqual(lock.name, 'steal-target'); return 'original-completed'; - }).catch((err) => { + })).catch(common.mustCall((err) => { originalLockRejected = true; assert.strictEqual(err.name, 'AbortError'); assert.strictEqual(err.message, 'The operation was aborted'); return 'original-rejected'; - }); + })); - const stealResult = await navigator.locks.request('steal-target', { steal: true }, async (stolenLock) => { - assert.strictEqual(stolenLock.name, 'steal-target'); - assert.strictEqual(stolenLock.mode, 'exclusive'); - return 'steal-completed'; - }); + const stealResult = await navigator.locks.request( + 'steal-target', { steal: true }, + common.mustCall(async (stolenLock) => { + assert.strictEqual(stolenLock.name, 'steal-target'); + assert.strictEqual(stolenLock.mode, 'exclusive'); + return 'steal-completed'; + })); assert.strictEqual(stealResult, 'steal-completed'); @@ -92,7 +94,7 @@ describe('Web Locks with worker threads', () => { }); it('should handle ifAvailable option', async () => { - await navigator.locks.request('ifavailable-test', async () => { + await navigator.locks.request('ifavailable-test', common.mustCall(async () => { const result = await navigator.locks.request('ifavailable-test', { ifAvailable: true }, (lock) => { return lock; // should be null }); @@ -105,7 +107,7 @@ describe('Web Locks with worker threads', () => { }); assert.strictEqual(availableResult, true); - }); + })); }); it('should handle AbortSignal', async () => { @@ -179,18 +181,18 @@ describe('Web Locks with worker threads', () => { const als = new AsyncLocalStorage(); const store = { id: 'lock' }; - als.run(store, () => { + als.run(store, common.mustCall(() => { navigator.locks - .request('als-context-test', async () => { + .request('als-context-test', common.mustCall(async () => { assert.strictEqual(als.getStore(), store); - }) + })) .then(common.mustCall()); - }); + })); }); it('should clean up when worker is terminated with a pending lock', async () => { // Acquire the lock in the main thread so that the worker's request will be pending - await navigator.locks.request('cleanup-test', async () => { + await navigator.locks.request('cleanup-test', common.mustCall(async () => { // Launch a worker that requests the same lock const worker = new Worker(` const { parentPort } = require('worker_threads'); @@ -212,11 +214,11 @@ describe('Web Locks with worker threads', () => { await worker.terminate(); - }); + })); // Request the lock again to make sure cleanup succeeded - await navigator.locks.request('cleanup-test', async (lock) => { + await navigator.locks.request('cleanup-test', common.mustCall(async (lock) => { assert.strictEqual(lock.name, 'cleanup-test'); - }); + })); }); }); diff --git a/test/parallel/test-webcrypto-cryptokey-workers.js b/test/parallel/test-webcrypto-cryptokey-workers.js index 4de221ec6e822a..5c96a66f639109 100644 --- a/test/parallel/test-webcrypto-cryptokey-workers.js +++ b/test/parallel/test-webcrypto-cryptokey-workers.js @@ -30,10 +30,10 @@ async function doSig(key) { } if (process.env.HAS_STARTED_WORKER) { - return parentPort.once('message', (key) => { + return parentPort.once('message', common.mustCall((key) => { assert.strictEqual(key.algorithm.name, 'HMAC'); doSig(key).then(common.mustCall()); - }); + })); } // Don't use isMainThread to allow running this test inside a worker. diff --git a/test/parallel/test-webcrypto-digest.js b/test/parallel/test-webcrypto-digest.js index e91214047dea43..4d22006937f8cb 100644 --- a/test/parallel/test-webcrypto-digest.js +++ b/test/parallel/test-webcrypto-digest.js @@ -69,10 +69,10 @@ const kData = (new TextEncoder()).encode('hello'); // Compare that the legacy crypto API and SubtleCrypto API // produce the same results - (await values).forEach((v) => { + for (const v of await values) { assert(v instanceof ArrayBuffer); assert.strictEqual(checkValue, Buffer.from(v).toString('hex')); - }); + } })); })().then(common.mustCall()); diff --git a/test/parallel/test-webstreams-clone-unref.js b/test/parallel/test-webstreams-clone-unref.js index 88a9cebd9c3046..78974fc8a0a00c 100644 --- a/test/parallel/test-webstreams-clone-unref.js +++ b/test/parallel/test-webstreams-clone-unref.js @@ -1,7 +1,7 @@ 'use strict'; require('../common'); -const { ok } = require('node:assert'); +const assert = require('node:assert'); // This test verifies that cloned ReadableStream and WritableStream instances // do not keep the process alive. The test fails if it timesout (it should just @@ -12,5 +12,5 @@ const ws1 = new WritableStream(); const [rs2, ws2] = structuredClone([rs1, ws1], { transfer: [rs1, ws1] }); -ok(rs2 instanceof ReadableStream); -ok(ws2 instanceof WritableStream); +assert.ok(rs2 instanceof ReadableStream); +assert.ok(ws2 instanceof WritableStream); diff --git a/test/parallel/test-webstreams-compose.js b/test/parallel/test-webstreams-compose.js index 5514d12bd02eb0..c9c65fcb47ddde 100644 --- a/test/parallel/test-webstreams-compose.js +++ b/test/parallel/test-webstreams-compose.js @@ -328,9 +328,9 @@ const { ) .on('data', common.mustNotCall()) .on('end', common.mustNotCall()) - .on('error', (err) => { + .on('error', common.mustCall((err) => { assert.strictEqual(err?.message, 'asd'); - }) + })) .end('xyz'); } @@ -350,9 +350,9 @@ const { ) .on('data', common.mustNotCall()) .on('end', common.mustNotCall()) - .on('error', (err) => { + .on('error', common.mustCall((err) => { assert.strictEqual(err?.message, 'asd'); - }) + })) .end('xyz'); } @@ -378,9 +378,9 @@ const { ) .on('data', common.mustNotCall()) .on('end', common.mustNotCall()) - .on('error', (err) => { + .on('error', common.mustCall((err) => { assert.strictEqual(err?.message, 'asd'); - }) + })) .end('xyz'); } @@ -398,9 +398,9 @@ const { ) .on('data', common.mustNotCall()) .on('end', common.mustNotCall()) - .on('error', (err) => { + .on('error', common.mustCall((err) => { assert.strictEqual(err?.message, 'asd'); - }) + })) .end('xyz'); } @@ -418,9 +418,9 @@ const { ) .on('data', common.mustNotCall()) .on('end', common.mustNotCall()) - .on('error', (err) => { + .on('error', common.mustCall((err) => { assert.strictEqual(err?.message, 'asd'); - }) + })) .end('xyz'); } @@ -437,9 +437,7 @@ const { ) .on('data', common.mustNotCall()) .on('end', common.mustNotCall()) - .on('error', (err) => { - assert.strictEqual(err?.message, 'asd'); - }) + .on('error', common.mustNotCall()) .end('xyz'); } @@ -456,9 +454,9 @@ const { }) }) ) - .on('error', (err) => { + .on('error', common.mustCall((err) => { assert.strictEqual(err?.message, 'asd'); - }) + })) .end('xyz'); } @@ -477,7 +475,7 @@ const { async function(source) { throw new Error('asd'); } - ).on('error', (err) => { + ).on('error', common.mustCall((err) => { assert.strictEqual(err?.message, 'asd'); - }).end('xyz'); + })).end('xyz'); } diff --git a/test/parallel/test-webstreams-finished.js b/test/parallel/test-webstreams-finished.js index 2a19c1ebae36d0..15c08400faa6be 100644 --- a/test/parallel/test-webstreams-finished.js +++ b/test/parallel/test-webstreams-finished.js @@ -180,9 +180,9 @@ const { finished: finishedPromise } = require('stream/promises'); })); const writer = ws.getWriter(); - writer.write('asd').catch((err) => { - assert.strictEqual(err?.message, 'asd'); - }); + assert.rejects(writer.write('asd'), { + message: 'asd', + }).then(common.mustCall()); } { @@ -221,14 +221,14 @@ const { finished: finishedPromise } = require('stream/promises'); } }); - finishedPromise(ws).then(common.mustNotCall()).catch(common.mustCall((err) => { - assert.strictEqual(err?.message, 'asd'); - })); + assert.rejects(finishedPromise(ws), { + message: 'asd', + }).then(common.mustCall()); const writer = ws.getWriter(); - writer.write('asd').catch((err) => { - assert.strictEqual(err?.message, 'asd'); - }); + assert.rejects(writer.write('asd'), { + message: 'asd', + }).then(common.mustCall()); } { diff --git a/test/parallel/test-webstreams-pipeline.js b/test/parallel/test-webstreams-pipeline.js index ac673dd9d42ea7..79188d88bf54be 100644 --- a/test/parallel/test-webstreams-pipeline.js +++ b/test/parallel/test-webstreams-pipeline.js @@ -207,7 +207,7 @@ const http = require('http'); }); req.end(); const values = []; - req.on('response', (res) => { + req.on('response', common.mustCall((res) => { res.on('data', (chunk) => { values.push(chunk?.toString()); }); @@ -215,22 +215,22 @@ const http = require('http'); assert.deepStrictEqual(values, ['hello', 'world']); server.close(); })); - }); + })); })); } { const values = []; - const server = http.createServer((req, res) => { + const server = http.createServer(common.mustCall((req, res) => { const ts = new TransformStream({ transform(chunk, controller) { controller.enqueue(chunk?.toString().toUpperCase()); } }); pipeline(req, ts, res, common.mustSucceed()); - }); + })); - server.listen(0, () => { + server.listen(0, common.mustCall(() => { const req = http.request({ port: server.address().port, method: 'POST', @@ -246,7 +246,7 @@ const http = require('http'); pipeline(rs, req, common.mustSucceed()); - req.on('response', (res) => { + req.on('response', common.mustCall((res) => { res.on('data', (chunk) => { values.push(chunk?.toString()); } @@ -255,8 +255,8 @@ const http = require('http'); assert.deepStrictEqual(values, ['HELLO']); server.close(); })); - }); - }); + })); + })); } { @@ -330,9 +330,9 @@ const http = require('http'); pipeline(rs, async function(source) { throw new Error('kaboom'); - }, (err) => { + }, common.mustCall((err) => { assert.strictEqual(err?.message, 'kaboom'); - }); + })); } { diff --git a/test/parallel/test-whatwg-encoding-custom-internals.js b/test/parallel/test-whatwg-encoding-custom-internals.js index 505e623a681998..d4780c80958847 100644 --- a/test/parallel/test-whatwg-encoding-custom-internals.js +++ b/test/parallel/test-whatwg-encoding-custom-internals.js @@ -278,12 +278,10 @@ const { getEncodingFromLabel } = require('internal/encoding'); ], 'x-user-defined': [] }; - Object.entries(mappings).forEach((i) => { - const enc = i[0]; - const labels = i[1]; + for (const [enc, labels] of Object.entries(mappings)) { assert.strictEqual(getEncodingFromLabel(enc), enc); labels.forEach((l) => assert.strictEqual(getEncodingFromLabel(l), enc)); - }); + } assert.strictEqual(getEncodingFromLabel('made-up'), undefined); } diff --git a/test/parallel/test-whatwg-encoding-custom-textdecoder-streaming.js b/test/parallel/test-whatwg-encoding-custom-textdecoder-streaming.js index 5484929326254d..2998b7ece0428a 100644 --- a/test/parallel/test-whatwg-encoding-custom-textdecoder-streaming.js +++ b/test/parallel/test-whatwg-encoding-custom-textdecoder-streaming.js @@ -21,9 +21,8 @@ const octets = { 0xFF, 0xDF] }; -Object.keys(octets).forEach((encoding) => { +for (const [encoding, encoded] of Object.entries(octets)) { for (let len = 1; len <= 5; ++len) { - const encoded = octets[encoding]; const decoder = new TextDecoder(encoding); let out = ''; for (let i = 0; i < encoded.length; i += len) { @@ -35,4 +34,4 @@ Object.keys(octets).forEach((encoding) => { out += decoder.decode(); assert.strictEqual(out, string); } -}); +} diff --git a/test/parallel/test-whatwg-events-add-event-listener-options-passive.js b/test/parallel/test-whatwg-events-add-event-listener-options-passive.js index 0299c7fa5fb0cc..7f37ecc497f9f9 100644 --- a/test/parallel/test-whatwg-events-add-event-listener-options-passive.js +++ b/test/parallel/test-whatwg-events-add-event-listener-options-passive.js @@ -5,11 +5,7 @@ require('../common'); // Manually converted from https://github.com/web-platform-tests/wpt/blob/master/dom/events/AddEventListenerOptions-passive.html // in order to define the `document` ourselves -const { - fail, - ok, - strictEqual -} = require('assert'); +const assert = require('assert'); { const document = new EventTarget(); @@ -20,17 +16,17 @@ const { return false; }, get dummy() { - fail('dummy value getter invoked'); + assert.fail('dummy value getter invoked'); return false; } }; document.addEventListener('test_event', null, query_options); - ok(supportsPassive); + assert.ok(supportsPassive); supportsPassive = false; document.removeEventListener('test_event', null, query_options); - strictEqual(supportsPassive, false); + assert.strictEqual(supportsPassive, false); } { function testPassiveValue(optionsValue, expectedDefaultPrevented) { @@ -38,7 +34,7 @@ const { let defaultPrevented; function handler(e) { if (e.defaultPrevented) { - fail('Event prematurely marked defaultPrevented'); + assert.fail('Event prematurely marked defaultPrevented'); } e.preventDefault(); defaultPrevented = e.defaultPrevented; @@ -49,8 +45,8 @@ const { const ev = new Event('test', { bubbles: true, cancelable: true }); const uncanceled = document.dispatchEvent(ev); - strictEqual(defaultPrevented, expectedDefaultPrevented); - strictEqual(uncanceled, !expectedDefaultPrevented); + assert.strictEqual(defaultPrevented, expectedDefaultPrevented); + assert.strictEqual(uncanceled, !expectedDefaultPrevented); document.removeEventListener('test', handler, optionsValue); } diff --git a/test/parallel/test-whatwg-events-add-event-listener-options-signal.js b/test/parallel/test-whatwg-events-add-event-listener-options-signal.js index 460d2ee3f27652..27aa5c62a4c0ca 100644 --- a/test/parallel/test-whatwg-events-add-event-listener-options-signal.js +++ b/test/parallel/test-whatwg-events-add-event-listener-options-signal.js @@ -1,11 +1,8 @@ 'use strict'; -require('../common'); +const common = require('../common'); -const { - strictEqual, - throws, -} = require('assert'); +const assert = require('node:assert'); // Manually ported from: wpt@dom/events/AddEventListenerOptions-signal.any.js @@ -13,134 +10,98 @@ const { // Passing an AbortSignal to addEventListener does not prevent // removeEventListener let count = 0; - function handler() { + const handler = common.mustCall(() => { count++; - } + }, 2); const et = new EventTarget(); const controller = new AbortController(); et.addEventListener('test', handler, { signal: controller.signal }); et.dispatchEvent(new Event('test')); - strictEqual(count, 1, 'Adding a signal still adds a listener'); + assert.strictEqual(count, 1); et.dispatchEvent(new Event('test')); - strictEqual(count, 2, 'The listener was not added with the once flag'); controller.abort(); et.dispatchEvent(new Event('test')); - strictEqual(count, 2, 'Aborting on the controller removes the listener'); // See: https://github.com/nodejs/node/pull/37696 , adding an event listener // should always return undefined. - strictEqual( + assert.strictEqual( et.addEventListener('test', handler, { signal: controller.signal }), undefined); et.dispatchEvent(new Event('test')); - strictEqual(count, 2, 'Passing an aborted signal never adds the handler'); } { // Passing an AbortSignal to addEventListener works with the once flag - let count = 0; - function handler() { - count++; - } + const handler = common.mustNotCall(); const et = new EventTarget(); const controller = new AbortController(); et.addEventListener('test', handler, { signal: controller.signal }); et.removeEventListener('test', handler); et.dispatchEvent(new Event('test')); - strictEqual(count, 0, 'The listener was still removed'); } { // Removing a once listener works with a passed signal - let count = 0; - function handler() { - count++; - } const et = new EventTarget(); const controller = new AbortController(); const options = { signal: controller.signal, once: true }; - et.addEventListener('test', handler, options); + et.addEventListener('test', common.mustNotCall(), options); controller.abort(); et.dispatchEvent(new Event('test')); - strictEqual(count, 0, 'The listener was still removed'); } { - let count = 0; - function handler() { - count++; - } + const handler = common.mustNotCall(); const et = new EventTarget(); const controller = new AbortController(); const options = { signal: controller.signal, once: true }; et.addEventListener('test', handler, options); et.removeEventListener('test', handler); et.dispatchEvent(new Event('test')); - strictEqual(count, 0, 'The listener was still removed'); } { // Passing an AbortSignal to multiple listeners - let count = 0; - function handler() { - count++; - } const et = new EventTarget(); const controller = new AbortController(); const options = { signal: controller.signal, once: true }; - et.addEventListener('first', handler, options); - et.addEventListener('second', handler, options); + et.addEventListener('first', common.mustNotCall(), options); + et.addEventListener('second', common.mustNotCall(), options); controller.abort(); et.dispatchEvent(new Event('first')); et.dispatchEvent(new Event('second')); - strictEqual(count, 0, 'The listener was still removed'); } { // Passing an AbortSignal to addEventListener works with the capture flag - let count = 0; - function handler() { - count++; - } const et = new EventTarget(); const controller = new AbortController(); const options = { signal: controller.signal, capture: true }; - et.addEventListener('test', handler, options); + et.addEventListener('test', common.mustNotCall(), options); controller.abort(); et.dispatchEvent(new Event('test')); - strictEqual(count, 0, 'The listener was still removed'); } { // Aborting from a listener does not call future listeners - let count = 0; - function handler() { - count++; - } const et = new EventTarget(); const controller = new AbortController(); const options = { signal: controller.signal }; et.addEventListener('test', () => { controller.abort(); }, options); - et.addEventListener('test', handler, options); + et.addEventListener('test', common.mustNotCall(), options); et.dispatchEvent(new Event('test')); - strictEqual(count, 0, 'The listener was still removed'); } { // Adding then aborting a listener in another listener does not call it - let count = 0; - function handler() { - count++; - } const et = new EventTarget(); const controller = new AbortController(); - et.addEventListener('test', () => { - et.addEventListener('test', handler, { signal: controller.signal }); + et.addEventListener('test', common.mustCall(() => { + et.addEventListener('test', common.mustNotCall(), { signal: controller.signal }); controller.abort(); - }, { signal: controller.signal }); + }), { signal: controller.signal }); et.dispatchEvent(new Event('test')); - strictEqual(count, 0, 'The listener was still removed'); } { @@ -161,7 +122,7 @@ const { { const et = new EventTarget(); [1, 1n, {}, [], null, true, 'hi', Symbol(), () => {}].forEach((signal) => { - throws(() => et.addEventListener('foo', () => {}, { signal }), { + assert.throws(() => et.addEventListener('foo', () => {}, { signal }), { name: 'TypeError', }); }); diff --git a/test/parallel/test-whatwg-events-customevent.js b/test/parallel/test-whatwg-events-customevent.js index e21ea1783f8998..75650e47a41f18 100644 --- a/test/parallel/test-whatwg-events-customevent.js +++ b/test/parallel/test-whatwg-events-customevent.js @@ -2,7 +2,7 @@ const common = require('../common'); -const { strictEqual, throws, equal } = require('assert'); +const assert = require('assert'); // Manually converted from https://github.com/web-platform-tests/wpt/blob/master/dom/events/CustomEvent.html // in order to define the `document` ourselves @@ -12,22 +12,22 @@ const { strictEqual, throws, equal } = require('assert'); const target = new EventTarget(); target.addEventListener(type, common.mustCall((evt) => { - strictEqual(evt.type, type); + assert.strictEqual(evt.type, type); })); target.dispatchEvent(new Event(type)); } { - throws(() => { + assert.throws(() => { new Event(); }, TypeError); } { const event = new Event('foo'); - equal(event.type, 'foo'); - equal(event.bubbles, false); - equal(event.cancelable, false); - equal(event.detail, null); + assert.strictEqual(event.type, 'foo'); + assert.strictEqual(event.bubbles, false); + assert.strictEqual(event.cancelable, false); + assert.strictEqual(event.detail, undefined); } diff --git a/test/parallel/test-whatwg-readablebytestream.js b/test/parallel/test-whatwg-readablebytestream.js index f7f2179c60c560..2207cb0cf43b8f 100644 --- a/test/parallel/test-whatwg-readablebytestream.js +++ b/test/parallel/test-whatwg-readablebytestream.js @@ -86,6 +86,7 @@ class Source { async pull(controller) { const byobRequest = controller.byobRequest; + // eslint-disable-next-line node-core/must-call-assert assert.match(inspect(byobRequest), /ReadableStreamBYOBRequest/); const view = byobRequest.view; @@ -102,15 +103,18 @@ class Source { this.controller.close(); } + // eslint-disable-next-line node-core/must-call-assert assert.throws(() => byobRequest.respondWithNewView({}), { code: 'ERR_INVALID_ARG_TYPE', }); byobRequest.respond(bytesRead); + // eslint-disable-next-line node-core/must-call-assert assert.throws(() => byobRequest.respond(bytesRead), { code: 'ERR_INVALID_STATE', }); + // eslint-disable-next-line node-core/must-call-assert assert.throws(() => byobRequest.respondWithNewView(view), { code: 'ERR_INVALID_STATE', }); diff --git a/test/parallel/test-whatwg-readablestream.js b/test/parallel/test-whatwg-readablestream.js index 9af751ddd02958..840ce2bcf26825 100644 --- a/test/parallel/test-whatwg-readablestream.js +++ b/test/parallel/test-whatwg-readablestream.js @@ -270,10 +270,10 @@ const { } }); - setImmediate(() => { + setImmediate(common.mustCall(() => { assert.strictEqual(r[kState].state, 'errored'); assert.match(r[kState].storedError?.message, /boom/); - }); + })); } { @@ -774,9 +774,9 @@ assert.throws(() => { const error2 = new Error('boom2'); const stream = new ReadableStream({ - cancel(reason) { + cancel: common.mustCall((reason) => { assert.deepStrictEqual(reason, [error1, error2]); - } + }), }); const { 0: s1, 1: s2 } = stream.tee(); @@ -789,9 +789,9 @@ assert.throws(() => { const error2 = new Error('boom2'); const stream = new ReadableStream({ - cancel(reason) { + cancel: common.mustCall((reason) => { assert.deepStrictEqual(reason, [error1, error2]); - } + }), }); const { 0: s1, 1: s2 } = stream.tee(); @@ -1106,17 +1106,20 @@ assert.throws(() => { cancelCalled = false; start(controller) { + // eslint-disable-next-line node-core/must-call-assert assert.strictEqual(this, source); this.startCalled = true; controller.enqueue('a'); } pull() { + // eslint-disable-next-line node-core/must-call-assert assert.strictEqual(this, source); this.pullCalled = true; } cancel() { + // eslint-disable-next-line node-core/must-call-assert assert.strictEqual(this, source); this.cancelCalled = true; } @@ -1138,33 +1141,27 @@ assert.throws(() => { } { - let startCalled = false; new ReadableStream({ - start(controller) { + start: common.mustCall((controller) => { assert.strictEqual(controller.desiredSize, 10); controller.close(); assert.strictEqual(controller.desiredSize, 0); - startCalled = true; - } + }), }, { - highWaterMark: 10 + highWaterMark: 10, }); - assert(startCalled); } { - let startCalled = false; new ReadableStream({ - start(controller) { + start: common.mustCall((controller) => { assert.strictEqual(controller.desiredSize, 10); controller.error(); assert.strictEqual(controller.desiredSize, null); - startCalled = true; - } + }), }, { - highWaterMark: 10 + highWaterMark: 10, }); - assert(startCalled); } { @@ -1176,7 +1173,7 @@ assert.throws(() => { { let startCalled = false; new ReadableStream({ - start(controller) { + start: common.mustCall((controller) => { assert.strictEqual(controller.desiredSize, 1); controller.enqueue('a'); assert.strictEqual(controller.desiredSize, 0); @@ -1187,7 +1184,7 @@ assert.throws(() => { controller.enqueue('a'); assert.strictEqual(controller.desiredSize, -3); startCalled = true; - } + }), }); assert(startCalled); } diff --git a/test/parallel/test-whatwg-webstreams-compression.js b/test/parallel/test-whatwg-webstreams-compression.js index bf87696eed1b2f..68cc097037792a 100644 --- a/test/parallel/test-whatwg-webstreams-compression.js +++ b/test/parallel/test-whatwg-webstreams-compression.js @@ -24,13 +24,13 @@ async function test(format) { const writer = gzip.writable.getWriter(); const compressed_data = []; - const reader_function = ({ value, done }) => { + const reader_function = common.mustCallAtLeast(({ value, done }) => { if (value) compressed_data.push(value); if (!done) return reader.read().then(reader_function); assert.strictEqual(dec.decode(Buffer.concat(compressed_data)), 'hello'); - }; + }); const reader_promise = reader.read().then(reader_function); await Promise.all([ diff --git a/test/parallel/test-whatwg-webstreams-transfer.js b/test/parallel/test-whatwg-webstreams-transfer.js index 7be01c339652c0..6b1358f131c4af 100644 --- a/test/parallel/test-whatwg-webstreams-transfer.js +++ b/test/parallel/test-whatwg-webstreams-transfer.js @@ -321,14 +321,14 @@ const theData = 'hello'; }), }); - port1.onmessage = ({ data }) => { + port1.onmessage = common.mustCall(({ data }) => { const reader = data.getReader(); assert.rejects(reader.read(), { code: 25, name: 'DataCloneError', }).then(common.mustCall()); port1.close(); - }; + }); port2.postMessage(readable, [readable]); diff --git a/test/parallel/test-whatwg-writablestream.js b/test/parallel/test-whatwg-writablestream.js index 32985660b2a2c5..88d9c57b9de752 100644 --- a/test/parallel/test-whatwg-writablestream.js +++ b/test/parallel/test-whatwg-writablestream.js @@ -287,5 +287,5 @@ class Sink { writer.abort(new Error('boom')); assert.strictEqual(writer.desiredSize, null); - setImmediate(() => assert.strictEqual(writer.desiredSize, null)); + setImmediate(common.mustCall(() => assert.strictEqual(writer.desiredSize, null))); } diff --git a/test/parallel/test-worker-arraybuffer-zerofill.js b/test/parallel/test-worker-arraybuffer-zerofill.js index 29d041fd557f9d..bdbc821775fcf3 100644 --- a/test/parallel/test-worker-arraybuffer-zerofill.js +++ b/test/parallel/test-worker-arraybuffer-zerofill.js @@ -35,9 +35,9 @@ describe('Allocating uninitialized ArrayBuffers ...', () => { assert(fn.mock.calls.length > 0); })); - w.on('message', (sum) => { + w.on('message', common.mustCallAtLeast((sum) => { assert.strictEqual(sum, 0); if (countdown.remaining) countdown.dec(); - }); + })); }); }); diff --git a/test/parallel/test-worker-debug.js b/test/parallel/test-worker-debug.js index da8e26b39ca453..df84560c201160 100644 --- a/test/parallel/test-worker-debug.js +++ b/test/parallel/test-worker-debug.js @@ -105,6 +105,7 @@ class WorkerSession extends EventEmitter { this.post(command); const notification = await notificationPromise; const callFrame = notification.params.callFrames[0]; + // eslint-disable-next-line node-core/must-call-assert assert.strictEqual(callFrame.location.lineNumber, line); } diff --git a/test/parallel/test-worker-environmentdata.js b/test/parallel/test-worker-environmentdata.js index aef0e1213ff6af..84eb4c14cb7648 100644 --- a/test/parallel/test-worker-environmentdata.js +++ b/test/parallel/test-worker-environmentdata.js @@ -11,26 +11,23 @@ const { const { assignEnvironmentData } = require('internal/worker'); -const { - deepStrictEqual, - strictEqual, -} = require('assert'); +const assert = require('assert'); if (!process.env.HAS_STARTED_WORKER) { process.env.HAS_STARTED_WORKER = 1; setEnvironmentData('foo', 'bar'); setEnvironmentData('hello', { value: 'world' }); setEnvironmentData(1, 2); - strictEqual(getEnvironmentData(1), 2); + assert.strictEqual(getEnvironmentData(1), 2); setEnvironmentData(1); // Delete it, key won't show up in the worker. new Worker(__filename); setEnvironmentData('hello'); // Delete it. Has no impact on the worker. } else { - strictEqual(getEnvironmentData('foo'), 'bar'); - deepStrictEqual(getEnvironmentData('hello'), { value: 'world' }); - strictEqual(getEnvironmentData(1), undefined); + assert.strictEqual(getEnvironmentData('foo'), 'bar'); + assert.deepStrictEqual(getEnvironmentData('hello'), { value: 'world' }); + assert.strictEqual(getEnvironmentData(1), undefined); assignEnvironmentData(undefined); // It won't setup any key. - strictEqual(getEnvironmentData(undefined), undefined); + assert.strictEqual(getEnvironmentData(undefined), undefined); // Recurse to make sure the environment data is inherited if (threadId <= 2) diff --git a/test/parallel/test-worker-exit-code.js b/test/parallel/test-worker-exit-code.js index 738a8b038e8285..c9b84d7b1c348d 100644 --- a/test/parallel/test-worker-exit-code.js +++ b/test/parallel/test-worker-exit-code.js @@ -25,7 +25,7 @@ if (!process.env.HAS_STARTED_WORKER) { } function parent() { - const test = (arg, name = 'worker', exit, error = null) => { + const test = common.mustCall((arg, name = 'worker', exit, error = null) => { const w = new Worker(__filename); w.on('exit', common.mustCall((code) => { assert.strictEqual( @@ -40,7 +40,7 @@ function parent() { })); } w.postMessage(arg); - }; + }, testCases.length); testCases.forEach((tc, i) => test(i, tc.func.name, tc.result, tc.error)); } diff --git a/test/parallel/test-worker-hasref.js b/test/parallel/test-worker-hasref.js index 51593b14725f5b..936a144bebba51 100644 --- a/test/parallel/test-worker-hasref.js +++ b/test/parallel/test-worker-hasref.js @@ -3,7 +3,7 @@ const common = require('../common'); const { Worker } = require('worker_threads'); const { createHook } = require('async_hooks'); -const { strictEqual } = require('assert'); +const assert = require('assert'); let handle; @@ -18,16 +18,16 @@ createHook({ const w = new Worker('', { eval: true }); -strictEqual(handle.hasRef(), true); +assert.strictEqual(handle.hasRef(), true); w.unref(); -strictEqual(handle.hasRef(), false); +assert.strictEqual(handle.hasRef(), false); w.ref(); -strictEqual(handle.hasRef(), true); +assert.strictEqual(handle.hasRef(), true); w.on('exit', common.mustCall((exitCode) => { - strictEqual(exitCode, 0); - strictEqual(handle.hasRef(), true); + assert.strictEqual(exitCode, 0); + assert.strictEqual(handle.hasRef(), true); setTimeout(common.mustCall(() => { - strictEqual(handle.hasRef(), undefined); + assert.strictEqual(handle.hasRef(), undefined); }), 0); })); diff --git a/test/parallel/test-worker-init-failure.js b/test/parallel/test-worker-init-failure.js index 8233116e1e05ba..190a349b48c625 100644 --- a/test/parallel/test-worker-init-failure.js +++ b/test/parallel/test-worker-init-failure.js @@ -29,9 +29,9 @@ if (process.argv[2] === 'child') { const worker = new Worker( 'require(\'worker_threads\').parentPort.postMessage(2 + 2)', { eval: true }); - worker.on('message', (result) => { + worker.on('message', common.mustCallAtLeast((result) => { assert.strictEqual(result, 4); - }); + }, 0)); // We want to test that if there is an error in a constrained running // environment, it will be one of `ENFILE`, `EMFILE`, 'ENOENT', or @@ -40,9 +40,9 @@ if (process.argv[2] === 'child') { // `common.mustCall*` cannot be used here as in some environments // (i.e. single cpu) `ulimit` may not lead to such an error. - worker.on('error', (e) => { + worker.on('error', common.mustCallAtLeast((e) => { assert.ok(expected.includes(e.code), `${e.code} not expected`); - }); + }, 0)); } } else { diff --git a/test/parallel/test-worker-memory.js b/test/parallel/test-worker-memory.js index 8c38409a26bab1..9b115053aab5a4 100644 --- a/test/parallel/test-worker-memory.js +++ b/test/parallel/test-worker-memory.js @@ -35,7 +35,7 @@ function run(n, done) { const startStats = process.memoryUsage(); let finished = 0; for (let i = 0; i < numWorkers; ++i) { - run(60 / numWorkers, () => { + run(60 / numWorkers, common.mustCall(() => { console.log(`done() called (finished=${finished})`); if (++finished === numWorkers) { const finishStats = process.memoryUsage(); @@ -47,5 +47,5 @@ for (let i = 0; i < numWorkers; ++i) { 'Unexpected memory overhead: ' + util.inspect([startStats, finishStats])); } - }); + })); } diff --git a/test/parallel/test-worker-message-port-move.js b/test/parallel/test-worker-message-port-move.js index b8db31b88c7bc4..2b2c1538db835d 100644 --- a/test/parallel/test-worker-message-port-move.js +++ b/test/parallel/test-worker-message-port-move.js @@ -14,6 +14,7 @@ context.global = context; Object.assign(context, { global: context, assert, + common: { mustCall: (f) => f }, MessagePort, MessageChannel }); @@ -34,14 +35,14 @@ vm.runInContext('(' + function() { assert(!(port instanceof MessagePort)); assert.strictEqual(port.onmessage, undefined); - port.onmessage = function({ data, ports }) { + port.onmessage = common.mustCall(function({ data, ports }) { assert(data instanceof Object); assert(ports instanceof Array); assert.strictEqual(ports.length, 1); assert.strictEqual(ports[0], data.p); assert(!(data.p instanceof MessagePort)); port.postMessage({}); - }; + }); port.start(); } diff --git a/test/parallel/test-worker-message-port.js b/test/parallel/test-worker-message-port.js index 2663dde2a1b75f..baaeb23a860b23 100644 --- a/test/parallel/test-worker-message-port.js +++ b/test/parallel/test-worker-message-port.js @@ -52,12 +52,12 @@ const { MessageChannel, MessagePort } = require('worker_threads'); port1.postMessage(input); // Check that the message still gets delivered if `port2` has its // `on('message')` handler attached at a later point in time. - setImmediate(() => { + setImmediate(common.mustCall(() => { port2.on('message', common.mustCall((received) => { assert.deepStrictEqual(received, input); port2.close(common.mustCall()); })); - }); + })); } { @@ -70,16 +70,16 @@ const { MessageChannel, MessagePort } = require('worker_threads'); // `on('message')` handler attached at a later point in time, even if a // listener was removed previously. port2.addListener('message', dummy); - setImmediate(() => { + setImmediate(common.mustCall(() => { port2.removeListener('message', dummy); port1.postMessage(input); - setImmediate(() => { + setImmediate(common.mustCall(() => { port2.on('message', common.mustCall((received) => { assert.deepStrictEqual(received, input); port2.close(common.mustCall()); })); - }); - }); + })); + })); } { diff --git a/test/parallel/test-worker-messageport-hasref.js b/test/parallel/test-worker-messageport-hasref.js index 448787742e308d..c46feb7c66eef6 100644 --- a/test/parallel/test-worker-messageport-hasref.js +++ b/test/parallel/test-worker-messageport-hasref.js @@ -3,7 +3,7 @@ const common = require('../common'); const { Worker } = require('worker_threads'); const { createHook } = require('async_hooks'); -const { deepStrictEqual, strictEqual } = require('assert'); +const assert = require('assert'); const m = new Map(); createHook({ @@ -30,16 +30,16 @@ function getActiveWorkerAndMessagePortTypes() { } const w = new Worker('', { eval: true }); -deepStrictEqual(getActiveWorkerAndMessagePortTypes(), ['WORKER']); +assert.deepStrictEqual(getActiveWorkerAndMessagePortTypes(), ['WORKER']); w.unref(); -deepStrictEqual(getActiveWorkerAndMessagePortTypes(), []); +assert.deepStrictEqual(getActiveWorkerAndMessagePortTypes(), []); w.ref(); -deepStrictEqual(getActiveWorkerAndMessagePortTypes(), ['WORKER', 'MESSAGEPORT']); +assert.deepStrictEqual(getActiveWorkerAndMessagePortTypes(), ['WORKER', 'MESSAGEPORT']); w.on('exit', common.mustCall((exitCode) => { - strictEqual(exitCode, 0); - deepStrictEqual(getActiveWorkerAndMessagePortTypes(), ['WORKER']); + assert.strictEqual(exitCode, 0); + assert.deepStrictEqual(getActiveWorkerAndMessagePortTypes(), ['WORKER']); setTimeout(common.mustCall(() => { - deepStrictEqual(getActiveWorkerAndMessagePortTypes(), []); + assert.deepStrictEqual(getActiveWorkerAndMessagePortTypes(), []); }), 0); })); diff --git a/test/parallel/test-worker-messaging-errors-handler.js b/test/parallel/test-worker-messaging-errors-handler.js index c50c5e3bf160f7..a4a0ae033d9031 100644 --- a/test/parallel/test-worker-messaging-errors-handler.js +++ b/test/parallel/test-worker-messaging-errors-handler.js @@ -7,12 +7,12 @@ const { Worker, workerData, } = require('node:worker_threads'); -const { rejects } = require('node:assert'); +const assert = require('node:assert'); async function test() { const worker = new Worker(__filename, { workerData: { children: true } }); - await rejects(common.mustCall(function() { + await assert.rejects(common.mustCall(function() { return postMessageToThread(worker.threadId); }), { name: 'Error', diff --git a/test/parallel/test-worker-messaging-errors-invalid.js b/test/parallel/test-worker-messaging-errors-invalid.js index 48f8a3a444fe15..e55e3485e904c3 100644 --- a/test/parallel/test-worker-messaging-errors-invalid.js +++ b/test/parallel/test-worker-messaging-errors-invalid.js @@ -9,17 +9,17 @@ const { Worker, workerData, } = require('node:worker_threads'); -const { rejects } = require('node:assert'); +const assert = require('node:assert'); async function test() { - await rejects(common.mustCall(function() { + await assert.rejects(common.mustCall(function() { return postMessageToThread(threadId); }), { name: 'Error', code: 'ERR_WORKER_MESSAGING_SAME_THREAD', }); - await rejects(common.mustCall(function() { + await assert.rejects(common.mustCall(function() { return postMessageToThread(Date.now()); }), { name: 'Error', @@ -30,7 +30,7 @@ async function test() { const worker = new Worker(__filename, { workerData: { children: true } }); await once(worker, 'message'); - await rejects(common.mustCall(function() { + await assert.rejects(common.mustCall(function() { return postMessageToThread(worker.threadId); }), { name: 'Error', diff --git a/test/parallel/test-worker-messaging-errors-timeout.js b/test/parallel/test-worker-messaging-errors-timeout.js index a46daa01e7e84e..f069005c411027 100644 --- a/test/parallel/test-worker-messaging-errors-timeout.js +++ b/test/parallel/test-worker-messaging-errors-timeout.js @@ -6,7 +6,7 @@ const { workerData, Worker, } = require('node:worker_threads'); -const { rejects } = require('node:assert'); +const assert = require('node:assert'); const memory = new SharedArrayBuffer(4); @@ -14,7 +14,7 @@ async function test() { const worker = new Worker(__filename, { workerData: { memory, children: true } }); const array = new Int32Array(memory); - await rejects(common.mustCall(function() { + await assert.rejects(common.mustCall(function() { return postMessageToThread(worker.threadId, 0, common.platformTimeout(500)); }), { name: 'Error', diff --git a/test/parallel/test-worker-messaging.js b/test/parallel/test-worker-messaging.js index c155735f8c876a..e29258bea130d6 100644 --- a/test/parallel/test-worker-messaging.js +++ b/test/parallel/test-worker-messaging.js @@ -9,7 +9,7 @@ const { workerData, Worker, } = require('node:worker_threads'); -const { strictEqual, deepStrictEqual } = require('node:assert'); +const assert = require('node:assert'); const { once } = require('node:events'); // Spawn threads on three levels: 1 main thread, two children, four grand childrens. 7 threads total, max id = 6 @@ -45,7 +45,7 @@ async function ping() { await postMessageToThread(target, { level, port: port2 }, [port2]); port1.on('message', common.mustCall(function(message) { - deepStrictEqual(message, { message: 'pong', source: target, destination: threadId }); + assert.deepStrictEqual(message, { message: 'pong', source: target, destination: threadId }); port1.close(); if (level === 0) { @@ -59,23 +59,23 @@ async function ping() { } // Do not use mustCall here as the thread might not receive any connection request -process.on('workerMessage', ({ port, level }, source) => { +process.on('workerMessage', common.mustCallAtLeast(({ port, level }, source) => { // Let's verify the source hierarchy // Given we do depth first, the level is 1 for thread 1 and 4, 2 for other threads if (source !== mainThread) { const currentThread = source - mainThread; - strictEqual(level, (currentThread === 1 || currentThread === 4) ? 1 : 2); + assert.strictEqual(level, (currentThread === 1 || currentThread === 4) ? 1 : 2); } else { - strictEqual(level, 0); + assert.strictEqual(level, 0); } // Verify communication port.on('message', common.mustCall(function(message) { - deepStrictEqual(message, { message: 'ping', source, destination: threadId }); + assert.deepStrictEqual(message, { message: 'ping', source, destination: threadId }); port.postMessage({ message: 'pong', source: threadId, destination: source }); port.close(); })); -}); +}, 0)); async function test() { if (level < MAX_LEVEL) { @@ -101,7 +101,7 @@ async function test() { if (level > 0) { const currentThread = threadId - mainThread; - strictEqual(level, (currentThread === 1 || currentThread === 4) ? 1 : 2); + assert.strictEqual(level, (currentThread === 1 || currentThread === 4) ? 1 : 2); parentPort.postMessage({ type: 'ready', threadId }); } else { channel.postMessage('start'); diff --git a/test/parallel/test-worker-nexttick-terminate.js b/test/parallel/test-worker-nexttick-terminate.js index 0e5d7e096c57ec..91a55e85b20388 100644 --- a/test/parallel/test-worker-nexttick-terminate.js +++ b/test/parallel/test-worker-nexttick-terminate.js @@ -19,7 +19,7 @@ common.expectWarning( 'It returns a Promise instead.', 'DEP0132'); w.on('message', common.mustCall(() => { - setTimeout(() => { + setTimeout(common.mustCall(() => { w.terminate(common.mustCall()).then(common.mustCall()); - }, 1); + }), 1); })); diff --git a/test/parallel/test-worker-workerdata-messageport.js b/test/parallel/test-worker-workerdata-messageport.js index 7a0436d485b325..a30a77805f0e4c 100644 --- a/test/parallel/test-worker-workerdata-messageport.js +++ b/test/parallel/test-worker-workerdata-messageport.js @@ -1,6 +1,6 @@ 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('node:assert'); const { @@ -38,9 +38,9 @@ const meowScript = () => 'meow'; transferList: [uint8Array.buffer] }).on( 'message', - (message) => + common.mustCall((message) => assert.deepStrictEqual(message, Uint8Array.of(1, 2, 3, 4)) - ); + )); assert.strictEqual(uint8Array.length, 0); } @@ -83,7 +83,7 @@ const meowScript = () => 'meow'; }); channel.port1.on( 'message', - (message) => + common.mustCall((message) => assert.strictEqual(message, 'Meow') - ); + )); } diff --git a/test/parallel/test-wrap-js-stream-destroy.js b/test/parallel/test-wrap-js-stream-destroy.js index 5c1ed1e7e65d10..dad2afb6f7848c 100644 --- a/test/parallel/test-wrap-js-stream-destroy.js +++ b/test/parallel/test-wrap-js-stream-destroy.js @@ -11,12 +11,12 @@ const net = require('net'); // "close" events, and vice versa. { let port; - const server = net.createServer((socket) => { + const server = net.createServer(common.mustCall((socket) => { socket.on('error', common.mustNotCall()); socket.on('end', common.mustNotCall()); socket.on('close', common.mustCall()); socket.destroy(); - }); + })); server.listen(() => { port = server.address().port; @@ -27,7 +27,7 @@ const net = require('net'); let streamWrap; const socket = new net.connect({ port, - }, () => { + }, common.mustCall(() => { socket.on('error', common.mustNotCall()); socket.on('end', common.mustCall()); socket.on('close', common.mustCall()); @@ -42,7 +42,7 @@ const net = require('net'); streamWrap.on('close', common.mustCall(() => { server.close(); })); - }); + })); streamWrap = new StreamWrap(socket); } } @@ -50,14 +50,14 @@ const net = require('net'); // Destroy the streamWrap and test again. { let port; - const server = net.createServer((socket) => { + const server = net.createServer(common.mustCall((socket) => { socket.on('error', common.mustNotCall()); socket.on('end', common.mustCall()); socket.on('close', common.mustCall(() => { server.close(); })); // Do not `socket.end()` and directly `socket.destroy()`. - }); + })); server.listen(() => { port = server.address().port; @@ -68,7 +68,7 @@ const net = require('net'); let streamWrap; const socket = new net.connect({ port, - }, () => { + }, common.mustCall(() => { socket.on('error', common.mustNotCall()); socket.on('end', common.mustNotCall()); socket.on('close', common.mustCall()); @@ -79,7 +79,7 @@ const net = require('net'); // the corresponding client-side socket closed. streamWrap.on('close', common.mustCall()); streamWrap.destroy(); - }); + })); streamWrap = new StreamWrap(socket); } } @@ -87,13 +87,13 @@ const net = require('net'); // Destroy the client socket and test again. { let port; - const server = net.createServer((socket) => { + const server = net.createServer(common.mustCall((socket) => { socket.on('error', common.mustNotCall()); socket.on('end', common.mustCall()); socket.on('close', common.mustCall(() => { server.close(); })); - }); + })); server.listen(() => { port = server.address().port; @@ -104,7 +104,7 @@ const net = require('net'); let streamWrap; const socket = new net.connect({ port, - }, () => { + }, common.mustCall(() => { socket.on('error', common.mustNotCall()); socket.on('end', common.mustNotCall()); socket.on('close', common.mustCall()); @@ -113,7 +113,7 @@ const net = require('net'); streamWrap.on('end', common.mustNotCall()); streamWrap.on('close', common.mustCall()); socket.destroy(); - }); + })); streamWrap = new StreamWrap(socket); } } diff --git a/test/parallel/test-x509-escaping.js b/test/parallel/test-x509-escaping.js index c8fc4abbb108a6..a5937a09cb1535 100644 --- a/test/parallel/test-x509-escaping.js +++ b/test/parallel/test-x509-escaping.js @@ -130,10 +130,10 @@ const { hasOpenSSL3 } = require('../common/crypto'); tls.connect(port, { ca: pem, servername: 'example.com', - checkServerIdentity: (hostname, peerCert) => { + checkServerIdentity: common.mustCall((hostname, peerCert) => { assert.strictEqual(hostname, 'example.com'); assert.strictEqual(peerCert.subjectaltname, expectedSANs[i]); - }, + }), }, common.mustCall()); })); } @@ -237,7 +237,7 @@ const { hasOpenSSL3 } = require('../common/crypto'); tls.connect(port, { ca: pem, servername: 'example.com', - checkServerIdentity: (hostname, peerCert) => { + checkServerIdentity: common.mustCall((hostname, peerCert) => { assert.strictEqual(hostname, 'example.com'); assert.deepStrictEqual(peerCert.infoAccess, Object.assign({ __proto__: null }, @@ -251,7 +251,7 @@ const { hasOpenSSL3 } = require('../common/crypto'); assert.strictEqual(obj.issuerCertificate, undefined); obj.issuerCertificate = obj; assert.deepStrictEqual(peerCert, obj); - }, + }), }, common.mustCall()); })); } @@ -351,7 +351,7 @@ const { hasOpenSSL3 } = require('../common/crypto'); tls.connect(port, { ca: pem, servername: 'example.com', - checkServerIdentity: (hostname, peerCert) => { + checkServerIdentity: common.mustCall((hostname, peerCert) => { assert.strictEqual(hostname, 'example.com'); const expectedObject = Object.assign({ __proto__: null }, expected.legacy); @@ -369,7 +369,7 @@ const { hasOpenSSL3 } = require('../common/crypto'); assert.strictEqual(obj.issuerCertificate, undefined); obj.issuerCertificate = obj; assert.deepStrictEqual(peerCert, obj); - }, + }), }, common.mustCall()); })); } From b764b84b2c098381696ea0342eb9276d146595ce Mon Sep 17 00:00:00 2001 From: Hans Klunder Date: Sun, 16 Nov 2025 07:51:24 +0100 Subject: [PATCH 006/115] lib: add support for readable byte streams to .toWeb() Add support for the creation of ReadableByteStream to Readable.toWeb() and Duplex.toWeb() This enables the use of .getReader({ mode: "byob" }) on e.g. socket().toWeb() Refs: https://github.com/nodejs/node/issues/56004#issuecomment-2908265316 Refs: https://developer.mozilla.org/en-US/docs/Web/API/Streams_API/Using_readable_byte_streams PR-URL: https://github.com/nodejs/node/pull/58664 Reviewed-By: Matteo Collina Reviewed-By: Ethan Arrowood Reviewed-By: Mattias Buelens Reviewed-By: James M Snell --- doc/api/stream.md | 11 ++++- doc/api/webstreams.md | 2 +- lib/internal/streams/duplex.js | 4 +- lib/internal/webstreams/adapters.js | 38 +++++++++++--- test/parallel/test-stream-duplex.js | 23 +++++++++ .../test-stream-readable-to-web-byob.js | 49 +++++++++++++++++++ ...stream-readable-to-web-termination-byob.js | 15 ++++++ 7 files changed, 132 insertions(+), 10 deletions(-) create mode 100644 test/parallel/test-stream-readable-to-web-byob.js create mode 100644 test/parallel/test-stream-readable-to-web-termination-byob.js diff --git a/doc/api/stream.md b/doc/api/stream.md index 40dab71dd8c745..f7d0aafa12bbae 100644 --- a/doc/api/stream.md +++ b/doc/api/stream.md @@ -3174,6 +3174,9 @@ Returns whether the stream has been read from or cancelled. * `streamDuplex` {stream.Duplex} +* `options` {Object} + * `type` {string} Must be 'bytes' or undefined. * Returns: {Object} * `readable` {ReadableStream} * `writable` {WritableStream} diff --git a/doc/api/webstreams.md b/doc/api/webstreams.md index d38196ab0432fa..905c0da6e9a8cf 100644 --- a/doc/api/webstreams.md +++ b/doc/api/webstreams.md @@ -1773,7 +1773,7 @@ text(readable).then((data) => { [Streams]: stream.md [WHATWG Streams Standard]: https://streams.spec.whatwg.org/ [`stream.Duplex.fromWeb`]: stream.md#streamduplexfromwebpair-options -[`stream.Duplex.toWeb`]: stream.md#streamduplextowebstreamduplex +[`stream.Duplex.toWeb`]: stream.md#streamduplextowebstreamduplex-options [`stream.Duplex`]: stream.md#class-streamduplex [`stream.Readable.fromWeb`]: stream.md#streamreadablefromwebreadablestream-options [`stream.Readable.toWeb`]: stream.md#streamreadabletowebstreamreadable-options diff --git a/lib/internal/streams/duplex.js b/lib/internal/streams/duplex.js index 713322c0f4aef9..dce8bd6e0fdff1 100644 --- a/lib/internal/streams/duplex.js +++ b/lib/internal/streams/duplex.js @@ -191,8 +191,8 @@ Duplex.fromWeb = function(pair, options) { options); }; -Duplex.toWeb = function(duplex) { - return lazyWebStreams().newReadableWritablePairFromDuplex(duplex); +Duplex.toWeb = function(duplex, options) { + return lazyWebStreams().newReadableWritablePairFromDuplex(duplex, options); }; let duplexify; diff --git a/lib/internal/webstreams/adapters.js b/lib/internal/webstreams/adapters.js index f9b76bc5e644a7..8d5c1d9fc6d9d0 100644 --- a/lib/internal/webstreams/adapters.js +++ b/lib/internal/webstreams/adapters.js @@ -73,6 +73,7 @@ const { validateBoolean, validateFunction, validateObject, + validateOneOf, } = require('internal/validators'); const { @@ -417,7 +418,8 @@ function newStreamWritableFromWritableStream(writableStream, options = kEmptyObj * @typedef {import('./queuingstrategies').QueuingStrategy} QueuingStrategy * @param {Readable} streamReadable * @param {{ - * strategy : QueuingStrategy + * strategy? : QueuingStrategy + * type? : 'bytes', * }} [options] * @returns {ReadableStream} */ @@ -432,6 +434,12 @@ function newReadableStreamFromStreamReadable(streamReadable, options = kEmptyObj 'stream.Readable', streamReadable); } + validateObject(options, 'options'); + if (options.type !== undefined) { + validateOneOf(options.type, 'options.type', ['bytes', undefined]); + } + + const isBYOB = options.type === 'bytes'; if (isDestroyed(streamReadable) || !isReadable(streamReadable)) { const readable = new ReadableStream(); @@ -443,6 +451,9 @@ function newReadableStreamFromStreamReadable(streamReadable, options = kEmptyObj const highWaterMark = streamReadable.readableHighWaterMark; const evaluateStrategyOrFallback = (strategy) => { + // If the stream is BYOB, we only use highWaterMark + if (isBYOB) + return { highWaterMark }; // If there is a strategy available, use it if (strategy) return strategy; @@ -491,7 +502,19 @@ function newReadableStreamFromStreamReadable(streamReadable, options = kEmptyObj streamReadable.on('data', onData); return new ReadableStream({ - start(c) { controller = c; }, + type: isBYOB ? 'bytes' : undefined, + start(c) { + controller = c; + if (isBYOB) { + streamReadable.once('end', () => { + // close the controller + controller.close(); + // And unlock the last BYOB read request + controller.byobRequest?.respond(0); + wasCanceled = true; + }); + } + }, pull() { streamReadable.resume(); }, @@ -601,9 +624,10 @@ function newStreamReadableFromReadableStream(readableStream, options = kEmptyObj /** * @param {Duplex} duplex + * @param {{ type?: 'bytes' }} [options] * @returns {ReadableWritablePair} */ -function newReadableWritablePairFromDuplex(duplex) { +function newReadableWritablePairFromDuplex(duplex, options = kEmptyObject) { // Not using the internal/streams/utils isWritableNodeStream and // isReadableNodeStream utilities here because they will return false // if the duplex was created with writable or readable options set to @@ -615,9 +639,11 @@ function newReadableWritablePairFromDuplex(duplex) { throw new ERR_INVALID_ARG_TYPE('duplex', 'stream.Duplex', duplex); } + validateObject(options, 'options'); + if (isDestroyed(duplex)) { const writable = new WritableStream(); - const readable = new ReadableStream(); + const readable = new ReadableStream({ type: options.type }); writable.close(); readable.cancel(); return { readable, writable }; @@ -633,8 +659,8 @@ function newReadableWritablePairFromDuplex(duplex) { const readable = isReadable(duplex) ? - newReadableStreamFromStreamReadable(duplex) : - new ReadableStream(); + newReadableStreamFromStreamReadable(duplex, options) : + new ReadableStream({ type: options.type }); if (!isReadable(duplex)) readable.cancel(); diff --git a/test/parallel/test-stream-duplex.js b/test/parallel/test-stream-duplex.js index 490744910cb1e1..6fa5541c555c28 100644 --- a/test/parallel/test-stream-duplex.js +++ b/test/parallel/test-stream-duplex.js @@ -131,3 +131,26 @@ process.on('exit', () => { assert.deepStrictEqual(Buffer.from(result.value), dataToRead); })); } + +// Duplex.toWeb BYOB +{ + const dataToRead = Buffer.from('hello'); + const dataToWrite = Buffer.from('world'); + + const duplex = Duplex({ + read() { + this.push(dataToRead); + this.push(null); + }, + write: common.mustCall((chunk) => { + assert.strictEqual(chunk, dataToWrite); + }) + }); + + const { writable, readable } = Duplex.toWeb(duplex, { type: 'bytes' }); + writable.getWriter().write(dataToWrite); + const data = new Uint8Array(dataToRead.length); + readable.getReader({ mode: 'byob' }).read(data).then(common.mustCall((result) => { + assert.deepStrictEqual(Buffer.from(result.value), dataToRead); + })); +} diff --git a/test/parallel/test-stream-readable-to-web-byob.js b/test/parallel/test-stream-readable-to-web-byob.js new file mode 100644 index 00000000000000..8e5f10efee1be2 --- /dev/null +++ b/test/parallel/test-stream-readable-to-web-byob.js @@ -0,0 +1,49 @@ +'use strict'; +require('../common'); +const { Readable } = require('stream'); +const assert = require('assert'); +const common = require('../common'); + +let count = 0; + +const nodeStream = new Readable({ + read(size) { + if (this.destroyed) { + return; + } + // Simulate a stream that pushes sequences of 16 bytes + const buffer = Buffer.alloc(size); + for (let i = 0; i < size; i++) { + buffer[i] = count++ % 16; + } + this.push(buffer); + } +}); + +// Test validation of 'type' option +assert.throws( + () => { + Readable.toWeb(nodeStream, { type: 'wrong type' }); + }, + { + code: 'ERR_INVALID_ARG_VALUE' + } +); + +// Test normal operation with ReadableByteStream +const webStream = Readable.toWeb(nodeStream, { type: 'bytes' }); +const reader = webStream.getReader({ mode: 'byob' }); +const expected = new Uint8Array(16); +for (let i = 0; i < 16; i++) { + expected[i] = count++; +} + +for (let i = 0; i < 1000; i++) { + // Read 16 bytes of data from the stream + const receive = new Uint8Array(16); + reader.read(receive).then(common.mustCall((result) => { + // Verify the data received + assert.ok(!result.done); + assert.deepStrictEqual(result.value, expected); + })); +} diff --git a/test/parallel/test-stream-readable-to-web-termination-byob.js b/test/parallel/test-stream-readable-to-web-termination-byob.js new file mode 100644 index 00000000000000..8b1f8d1817c0a5 --- /dev/null +++ b/test/parallel/test-stream-readable-to-web-termination-byob.js @@ -0,0 +1,15 @@ +'use strict'; +require('../common'); +const { Readable } = require('stream'); +const assert = require('assert'); +const common = require('../common'); +{ + const r = Readable.from([]); + // Cancelling reader while closing should not cause uncaught exceptions + r.on('close', common.mustCall(() => reader.cancel())); + + const reader = Readable.toWeb(r, { type: 'bytes' }).getReader({ mode: 'byob' }); + reader.read(new Uint8Array(16)).then(common.mustCall((result) => { + assert.ok(result.done); + })); +} From f999960ee83154cf6f9c1d65bcefaf700659772d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 20 Nov 2025 08:02:02 +0000 Subject: [PATCH 007/115] meta: bump actions/checkout from 5.0.0 to 5.0.1 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/checkout](https://github.com/actions/checkout) from 5.0.0 to 5.0.1. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/08c6903cd8c0fde910a37f88322edcfb5dd907a8...93cb6efe18208431cddfb8368fd83d5badbf9bfd) --- updated-dependencies: - dependency-name: actions/checkout dependency-version: 5.0.1 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] PR-URL: https://github.com/nodejs/node/pull/60767 Reviewed-By: Antoine du Hamel Reviewed-By: Michaël Zasso Reviewed-By: Ulises Gascón Reviewed-By: Rafael Gonzaga Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca --- .github/workflows/auto-start-ci.yml | 2 +- .github/workflows/build-tarball.yml | 4 ++-- .github/workflows/codeql.yml | 2 +- .github/workflows/commit-lint.yml | 2 +- .github/workflows/commit-queue.yml | 2 +- .../workflows/coverage-linux-without-intl.yml | 2 +- .github/workflows/coverage-linux.yml | 2 +- .github/workflows/coverage-windows.yml | 2 +- .github/workflows/create-release-proposal.yml | 2 +- .github/workflows/daily-wpt-fyi.yml | 4 ++-- .github/workflows/daily.yml | 2 +- .github/workflows/doc.yml | 2 +- .../workflows/find-inactive-collaborators.yml | 2 +- .github/workflows/find-inactive-tsc.yml | 4 ++-- .github/workflows/license-builder.yml | 2 +- .github/workflows/lint-release-proposal.yml | 2 +- .github/workflows/linters.yml | 20 +++++++++---------- .github/workflows/notify-on-push.yml | 2 +- .github/workflows/scorecard.yml | 2 +- .github/workflows/test-internet.yml | 2 +- .github/workflows/test-linux.yml | 2 +- .github/workflows/test-macos.yml | 2 +- .github/workflows/timezone-update.yml | 4 ++-- .github/workflows/tools.yml | 2 +- .github/workflows/update-openssl.yml | 2 +- .github/workflows/update-v8.yml | 2 +- .github/workflows/update-wpt.yml | 2 +- 27 files changed, 40 insertions(+), 40 deletions(-) diff --git a/.github/workflows/auto-start-ci.yml b/.github/workflows/auto-start-ci.yml index c89d289e95a9bf..e86400db95f0fe 100644 --- a/.github/workflows/auto-start-ci.yml +++ b/.github/workflows/auto-start-ci.yml @@ -45,7 +45,7 @@ jobs: if: needs.get-prs-for-ci.outputs.numbers != '' runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false diff --git a/.github/workflows/build-tarball.yml b/.github/workflows/build-tarball.yml index eec24f78eb1c13..5679a96ac731cc 100644 --- a/.github/workflows/build-tarball.yml +++ b/.github/workflows/build-tarball.yml @@ -41,7 +41,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -70,7 +70,7 @@ jobs: CXX: sccache clang++ SCCACHE_GHA_ENABLED: 'true' steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false sparse-checkout: .github/actions/install-clang diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 7a94ebe76f0621..5cd00466a79510 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -23,7 +23,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/commit-lint.yml b/.github/workflows/commit-lint.yml index 100725b9913740..0d594875f5c4c9 100644 --- a/.github/workflows/commit-lint.yml +++ b/.github/workflows/commit-lint.yml @@ -17,7 +17,7 @@ jobs: run: | echo "plusOne=$((${{ github.event.pull_request.commits }} + 1))" >> $GITHUB_OUTPUT echo "minusOne=$((${{ github.event.pull_request.commits }} - 1))" >> $GITHUB_OUTPUT - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: fetch-depth: ${{ steps.nb-of-commits.outputs.plusOne }} persist-credentials: false diff --git a/.github/workflows/commit-queue.yml b/.github/workflows/commit-queue.yml index fdce1e8ddbaef9..bf6f202aab874a 100644 --- a/.github/workflows/commit-queue.yml +++ b/.github/workflows/commit-queue.yml @@ -59,7 +59,7 @@ jobs: if: needs.get_mergeable_prs.outputs.numbers != '' runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: # A personal token is required because pushing with GITHUB_TOKEN will # prevent commits from running CI after they land. It needs diff --git a/.github/workflows/coverage-linux-without-intl.yml b/.github/workflows/coverage-linux-without-intl.yml index daca9165fdc7d2..905afce6ace277 100644 --- a/.github/workflows/coverage-linux-without-intl.yml +++ b/.github/workflows/coverage-linux-without-intl.yml @@ -48,7 +48,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/coverage-linux.yml b/.github/workflows/coverage-linux.yml index 52eb3b72917cd7..788ac898492e85 100644 --- a/.github/workflows/coverage-linux.yml +++ b/.github/workflows/coverage-linux.yml @@ -48,7 +48,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/coverage-windows.yml b/.github/workflows/coverage-windows.yml index eace33d7a0fb10..d42d98f99fd390 100644 --- a/.github/workflows/coverage-windows.yml +++ b/.github/workflows/coverage-windows.yml @@ -45,7 +45,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: windows-2025 steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/create-release-proposal.yml b/.github/workflows/create-release-proposal.yml index c9b1ff34d1452a..8b71c1483c57a2 100644 --- a/.github/workflows/create-release-proposal.yml +++ b/.github/workflows/create-release-proposal.yml @@ -33,7 +33,7 @@ jobs: RELEASE_LINE: ${{ inputs.release-line }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: ref: ${{ env.STAGING_BRANCH }} persist-credentials: false diff --git a/.github/workflows/daily-wpt-fyi.yml b/.github/workflows/daily-wpt-fyi.yml index c941e6945ac534..66ee24b060c7e2 100644 --- a/.github/workflows/daily-wpt-fyi.yml +++ b/.github/workflows/daily-wpt-fyi.yml @@ -63,7 +63,7 @@ jobs: SHORT_SHA=$(node -p 'process.version.split(/-nightly\d{8}/)[1]') echo "NIGHTLY_REF=$(gh api /repos/nodejs/node/commits/$SHORT_SHA --jq '.sha')" >> $GITHUB_ENV - name: Checkout ${{ steps.setup-node.outputs.node-version }} - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false ref: ${{ env.NIGHTLY_REF || steps.setup-node.outputs.node-version }} @@ -79,7 +79,7 @@ jobs: run: rm -rf wpt working-directory: test/fixtures - name: Checkout epochs/daily WPT - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: repository: web-platform-tests/wpt persist-credentials: false diff --git a/.github/workflows/daily.yml b/.github/workflows/daily.yml index ea29a9835b33f0..2e2d64f7ebc20d 100644 --- a/.github/workflows/daily.yml +++ b/.github/workflows/daily.yml @@ -15,7 +15,7 @@ jobs: build-lto: runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index 57e30eb7f0c83a..2aaa16db7baa3a 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -24,7 +24,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} diff --git a/.github/workflows/find-inactive-collaborators.yml b/.github/workflows/find-inactive-collaborators.yml index d45712fc749413..49f6894977396a 100644 --- a/.github/workflows/find-inactive-collaborators.yml +++ b/.github/workflows/find-inactive-collaborators.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: fetch-depth: 0 persist-credentials: false diff --git a/.github/workflows/find-inactive-tsc.yml b/.github/workflows/find-inactive-tsc.yml index 91146e6290245f..bc666acf832373 100644 --- a/.github/workflows/find-inactive-tsc.yml +++ b/.github/workflows/find-inactive-tsc.yml @@ -20,13 +20,13 @@ jobs: steps: - name: Checkout the repo - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: fetch-depth: 0 persist-credentials: false - name: Clone nodejs/TSC repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: fetch-depth: 0 path: .tmp diff --git a/.github/workflows/license-builder.yml b/.github/workflows/license-builder.yml index 6c7dc8721d382b..e5c594377829ad 100644 --- a/.github/workflows/license-builder.yml +++ b/.github/workflows/license-builder.yml @@ -17,7 +17,7 @@ jobs: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false - run: ./tools/license-builder.sh # Run the license builder tool diff --git a/.github/workflows/lint-release-proposal.yml b/.github/workflows/lint-release-proposal.yml index 479f1f79cb4798..12ae68a75d2b8c 100644 --- a/.github/workflows/lint-release-proposal.yml +++ b/.github/workflows/lint-release-proposal.yml @@ -23,7 +23,7 @@ jobs: contents: read pull-requests: read steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false fetch-depth: 2 diff --git a/.github/workflows/linters.yml b/.github/workflows/linters.yml index 052f05734a69ee..61a976b9243e50 100644 --- a/.github/workflows/linters.yml +++ b/.github/workflows/linters.yml @@ -25,7 +25,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} @@ -40,7 +40,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -55,7 +55,7 @@ jobs: if: ${{ github.event.pull_request && github.event.pull_request.draft == false && github.base_ref == github.event.repository.default_branch }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: fetch-depth: 0 persist-credentials: false @@ -93,7 +93,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} @@ -142,7 +142,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false sparse-checkout: | @@ -169,7 +169,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false sparse-checkout: | @@ -193,7 +193,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false sparse-checkout: | @@ -207,7 +207,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false - uses: mszostok/codeowners-validator@7f3f5e28c6d7b8dfae5731e54ce2272ca384592f @@ -217,7 +217,7 @@ jobs: if: ${{ github.event.pull_request }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: fetch-depth: 2 persist-credentials: false @@ -230,7 +230,7 @@ jobs: lint-readme: runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false sparse-checkout: | diff --git a/.github/workflows/notify-on-push.yml b/.github/workflows/notify-on-push.yml index 67bd1b1e2e69b6..e9cae10384bd81 100644 --- a/.github/workflows/notify-on-push.yml +++ b/.github/workflows/notify-on-push.yml @@ -32,7 +32,7 @@ jobs: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false - name: Check commit message diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 02480c5c7ffd56..e27af8905998cd 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -38,7 +38,7 @@ jobs: egress-policy: audit # TODO: change to 'egress-policy: block' after couple of runs - name: Checkout code - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false diff --git a/.github/workflows/test-internet.yml b/.github/workflows/test-internet.yml index a7f17dd6d9e2da..beeaa009ea6549 100644 --- a/.github/workflows/test-internet.yml +++ b/.github/workflows/test-internet.yml @@ -45,7 +45,7 @@ jobs: if: github.event_name == 'schedule' && github.repository == 'nodejs/node' || github.event.pull_request.draft == false runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/test-linux.yml b/.github/workflows/test-linux.yml index 9605dae7a6e15e..41991b6d2e9021 100644 --- a/.github/workflows/test-linux.yml +++ b/.github/workflows/test-linux.yml @@ -47,7 +47,7 @@ jobs: matrix: os: [ubuntu-24.04, ubuntu-24.04-arm] steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false path: node diff --git a/.github/workflows/test-macos.yml b/.github/workflows/test-macos.yml index eabe54e2fab44e..fe101cf48fa612 100644 --- a/.github/workflows/test-macos.yml +++ b/.github/workflows/test-macos.yml @@ -49,7 +49,7 @@ jobs: CXX: sccache g++ SCCACHE_GHA_ENABLED: 'true' steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false path: node diff --git a/.github/workflows/timezone-update.yml b/.github/workflows/timezone-update.yml index 51dd8f155b89fa..5f330165c35930 100644 --- a/.github/workflows/timezone-update.yml +++ b/.github/workflows/timezone-update.yml @@ -20,12 +20,12 @@ jobs: steps: - name: Checkout nodejs/node - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false - name: Checkout unicode-org/icu-data - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: path: icu-data persist-credentials: false diff --git a/.github/workflows/tools.yml b/.github/workflows/tools.yml index 9df140078318d3..feae9d90218657 100644 --- a/.github/workflows/tools.yml +++ b/.github/workflows/tools.yml @@ -280,7 +280,7 @@ jobs: run: | git config --global user.name "Node.js GitHub Bot" git config --global user.email "github-bot@iojs.org" - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 if: github.event_name == 'schedule' || inputs.id == 'all' || inputs.id == matrix.id with: persist-credentials: false diff --git a/.github/workflows/update-openssl.yml b/.github/workflows/update-openssl.yml index ee9a3e0fa11c03..03130109faf44a 100644 --- a/.github/workflows/update-openssl.yml +++ b/.github/workflows/update-openssl.yml @@ -14,7 +14,7 @@ jobs: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false - name: Check and download new OpenSSL version diff --git a/.github/workflows/update-v8.yml b/.github/workflows/update-v8.yml index 1db258a6020d42..5c3fcd3b9f0095 100644 --- a/.github/workflows/update-v8.yml +++ b/.github/workflows/update-v8.yml @@ -16,7 +16,7 @@ jobs: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false - name: Cache node modules and update-v8 diff --git a/.github/workflows/update-wpt.yml b/.github/workflows/update-wpt.yml index edf4aaeba35c7a..8ef288519d003d 100644 --- a/.github/workflows/update-wpt.yml +++ b/.github/workflows/update-wpt.yml @@ -27,7 +27,7 @@ jobs: subsystem: ${{ fromJSON(github.event.inputs.subsystems || '["url", "urlpattern", "WebCryptoAPI"]') }} steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false From 80d6a48d7440325178689fba66d58125d29bd1ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C4=90=E1=BB=97=20Tr=E1=BB=8Dng=20H=E1=BA=A3i?= <41283691+hainenber@users.noreply.github.com> Date: Fri, 21 Nov 2025 05:04:36 +0700 Subject: [PATCH 008/115] repl: tab completion targets `` instead of `new ` MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: hainenber PR-URL: https://github.com/nodejs/node/pull/60319 Reviewed-By: Ruben Bridgewater Reviewed-By: René --- lib/internal/repl/completion.js | 10 +++++ .../test-repl-tab-complete-new-expression.js | 41 +++++++++++++++++++ 2 files changed, 51 insertions(+) create mode 100644 test/parallel/test-repl-tab-complete-new-expression.js diff --git a/lib/internal/repl/completion.js b/lib/internal/repl/completion.js index 8ff58ecd197fb4..96b646a4e4469d 100644 --- a/lib/internal/repl/completion.js +++ b/lib/internal/repl/completion.js @@ -613,6 +613,16 @@ function findExpressionCompleteTarget(code) { return findExpressionCompleteTarget(argumentCode); } + // If the last statement is an expression statement with "new" syntax + // we want to extract the callee for completion (e.g. for `new Sample` we want `Sample`) + if (lastBodyStatement.type === 'ExpressionStatement' && + lastBodyStatement.expression.type === 'NewExpression' && + lastBodyStatement.expression.callee) { + const callee = lastBodyStatement.expression.callee; + const calleeCode = code.slice(callee.start, callee.end); + return findExpressionCompleteTarget(calleeCode); + } + // Walk the AST for the current block of code, and check whether it contains any // statement or expression type that would potentially have side effects if evaluated. let isAllowed = true; diff --git a/test/parallel/test-repl-tab-complete-new-expression.js b/test/parallel/test-repl-tab-complete-new-expression.js new file mode 100644 index 00000000000000..ef269f44b7698b --- /dev/null +++ b/test/parallel/test-repl-tab-complete-new-expression.js @@ -0,0 +1,41 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { startNewREPLServer } = require('../common/repl'); +const { describe, it } = require('node:test'); + +// This test verifies that tab completion works correctly with `new` operator +// for a class. Property access has higher precedence than `new` so the properties +// should be displayed as autocompletion result. + +describe('REPL tab completion with new expressions', () => { + it('should output completion of class properties', () => { + const { replServer, input } = startNewREPLServer({ terminal: false }); + + input.run([ + ` + class X { x = 1 }; + X.Y = class Y { y = 2 }; + `, + ]); + + // Handle completion for property of root class. + replServer.complete( + 'new X.', + common.mustSucceed((completions) => { + assert.strictEqual(completions[1], 'X.'); + }) + ); + + // Handle completion for property with another class as value. + replServer.complete( + 'new X.Y.', + common.mustSucceed((completions) => { + assert.strictEqual(completions[1], 'X.Y.'); + }) + ); + + replServer.close(); + }); +}); From 1ad165a6ab993af8d82ee41955227190061ea65e Mon Sep 17 00:00:00 2001 From: Livia Medeiros Date: Sun, 23 Nov 2025 16:45:46 +0800 Subject: [PATCH 009/115] lib: prefer `call()` over `apply()` if argument list is not array MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/60796 Reviewed-By: René Reviewed-By: Antoine du Hamel Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca --- lib/assert.js | 6 +++--- lib/dgram.js | 3 +-- lib/fs.js | 3 ++- lib/internal/child_process.js | 3 +-- lib/internal/cluster/child.js | 5 +++-- lib/internal/cluster/worker.js | 3 ++- lib/internal/crypto/cipher.js | 10 +++++----- lib/internal/crypto/hash.js | 6 +++--- lib/internal/crypto/sig.js | 5 ++--- lib/internal/crypto/webcrypto.js | 25 +++++++++++++------------ lib/internal/dns/callback_resolver.js | 4 ++-- lib/internal/dns/promises.js | 4 ++-- lib/internal/file.js | 4 ++-- lib/internal/fs/read/context.js | 4 ++-- lib/internal/fs/streams.js | 5 +++-- lib/internal/fs/sync_write_stream.js | 4 ++-- lib/internal/fs/utils.js | 4 +--- lib/internal/modules/cjs/loader.js | 5 ++--- lib/internal/process/per_thread.js | 2 +- lib/internal/tls/wrap.js | 10 +++++----- lib/internal/vm.js | 16 +++++++--------- lib/internal/vm/module.js | 8 ++------ lib/repl.js | 5 +++-- 23 files changed, 69 insertions(+), 75 deletions(-) diff --git a/lib/assert.js b/lib/assert.js index e7430bbde6ed06..a201127570618e 100644 --- a/lib/assert.js +++ b/lib/assert.js @@ -27,13 +27,13 @@ const { ArrayPrototypePush, ArrayPrototypeSlice, Error, + FunctionPrototypeCall, NumberIsNaN, ObjectAssign, ObjectDefineProperty, ObjectIs, ObjectKeys, ObjectPrototypeIsPrototypeOf, - ReflectApply, RegExpPrototypeExec, String, StringPrototypeIndexOf, @@ -575,7 +575,7 @@ function expectedException(actual, expected, message, fn) { throwError = true; } else { // Check validation functions return value. - const res = ReflectApply(expected, {}, [actual]); + const res = FunctionPrototypeCall(expected, {}, actual); if (res !== true) { if (!message) { generatedMessage = true; @@ -720,7 +720,7 @@ function hasMatchingError(actual, expected) { if (ObjectPrototypeIsPrototypeOf(Error, expected)) { return false; } - return ReflectApply(expected, {}, [actual]) === true; + return FunctionPrototypeCall(expected, {}, actual) === true; } function expectsNoError(stackStartFn, actual, error, message) { diff --git a/lib/dgram.js b/lib/dgram.js index c790ed1f3d46ae..419dd947fbe88e 100644 --- a/lib/dgram.js +++ b/lib/dgram.js @@ -29,7 +29,6 @@ const { FunctionPrototypeCall, ObjectDefineProperty, ObjectSetPrototypeOf, - ReflectApply, SymbolAsyncDispose, SymbolDispose, } = primordials; @@ -437,7 +436,7 @@ Socket.prototype.connect = function(port, address, callback) { return; } - ReflectApply(_connect, this, [port, address, callback]); + FunctionPrototypeCall(_connect, this, port, address, callback); }; diff --git a/lib/fs.js b/lib/fs.js index 0845ae0c7906cf..012c408253c651 100644 --- a/lib/fs.js +++ b/lib/fs.js @@ -29,6 +29,7 @@ const { ArrayPrototypePush, BigIntPrototypeToString, Boolean, + FunctionPrototypeCall, MathMax, Number, ObjectDefineProperties, @@ -370,7 +371,7 @@ function readFile(path, options, callback) { } if (context.isUserFd) { process.nextTick(function tick(context) { - ReflectApply(readFileAfterOpen, { context }, [null, path]); + FunctionPrototypeCall(readFileAfterOpen, { context }, null, path); }, context); return; } diff --git a/lib/internal/child_process.js b/lib/internal/child_process.js index dada6b8cc5a1fd..5ce8f0f7e9057c 100644 --- a/lib/internal/child_process.js +++ b/lib/internal/child_process.js @@ -821,8 +821,7 @@ function setupChannel(target, channel, serializationMode) { obj = handleConversion[message.type]; // convert TCP object to native handle object - handle = ReflectApply(handleConversion[message.type].send, - target, [message, handle, options]); + handle = FunctionPrototypeCall(handleConversion[message.type].send, target, message, handle, options); // If handle was sent twice, or it is impossible to get native handle // out of it - just send a text without the handle. diff --git a/lib/internal/cluster/child.js b/lib/internal/cluster/child.js index 7c132310a81874..b6ea60e660fe97 100644 --- a/lib/internal/cluster/child.js +++ b/lib/internal/cluster/child.js @@ -3,6 +3,7 @@ const { ArrayPrototypeJoin, FunctionPrototype, + FunctionPrototypeCall, ObjectAssign, ReflectApply, SafeMap, @@ -58,7 +59,7 @@ cluster._setupWorker = function() { if (message.act === 'newconn') onconnection(message, handle); else if (message.act === 'disconnect') - ReflectApply(_disconnect, worker, [true]); + FunctionPrototypeCall(_disconnect, worker, true); } }; @@ -287,7 +288,7 @@ function _disconnect(primaryInitiated) { Worker.prototype.disconnect = function() { if (this.state !== 'disconnecting' && this.state !== 'destroying') { this.state = 'disconnecting'; - ReflectApply(_disconnect, this, []); + FunctionPrototypeCall(_disconnect, this); } return this; diff --git a/lib/internal/cluster/worker.js b/lib/internal/cluster/worker.js index 872c5f89e0ceb3..f85df862952d52 100644 --- a/lib/internal/cluster/worker.js +++ b/lib/internal/cluster/worker.js @@ -1,6 +1,7 @@ 'use strict'; const { + FunctionPrototypeCall, ObjectSetPrototypeOf, ReflectApply, } = primordials; @@ -16,7 +17,7 @@ function Worker(options) { if (!(this instanceof Worker)) return new Worker(options); - ReflectApply(EventEmitter, this, []); + FunctionPrototypeCall(EventEmitter, this); if (options === null || typeof options !== 'object') options = kEmptyObject; diff --git a/lib/internal/crypto/cipher.js b/lib/internal/crypto/cipher.js index dc77342b56a62b..2fa4bd6dfb9997 100644 --- a/lib/internal/crypto/cipher.js +++ b/lib/internal/crypto/cipher.js @@ -1,8 +1,8 @@ 'use strict'; const { + FunctionPrototypeCall, ObjectSetPrototypeOf, - ReflectApply, StringPrototypeToLowerCase, } = primordials; @@ -117,7 +117,7 @@ function createCipherBase(cipher, credential, options, isEncrypt, iv) { this[kHandle] = new CipherBase(isEncrypt, cipher, credential, iv, authTagLength); this._decoder = null; - ReflectApply(LazyTransform, this, [options]); + FunctionPrototypeCall(LazyTransform, this, options); } function createCipherWithIV(cipher, key, options, isEncrypt, iv) { @@ -125,7 +125,7 @@ function createCipherWithIV(cipher, key, options, isEncrypt, iv) { const encoding = getStringOption(options, 'encoding'); key = prepareSecretKey(key, encoding); iv = iv === null ? null : getArrayBufferOrView(iv, 'iv'); - ReflectApply(createCipherBase, this, [cipher, key, options, isEncrypt, iv]); + FunctionPrototypeCall(createCipherBase, this, cipher, key, options, isEncrypt, iv); } // The Cipher class is part of the legacy Node.js crypto API. It exposes @@ -215,7 +215,7 @@ function Cipheriv(cipher, key, iv, options) { if (!(this instanceof Cipheriv)) return new Cipheriv(cipher, key, iv, options); - ReflectApply(createCipherWithIV, this, [cipher, key, options, true, iv]); + FunctionPrototypeCall(createCipherWithIV, this, cipher, key, options, true, iv); } function addCipherPrototypeFunctions(constructor) { @@ -244,7 +244,7 @@ function Decipheriv(cipher, key, iv, options) { if (!(this instanceof Decipheriv)) return new Decipheriv(cipher, key, iv, options); - ReflectApply(createCipherWithIV, this, [cipher, key, options, false, iv]); + FunctionPrototypeCall(createCipherWithIV, this, cipher, key, options, false, iv); } ObjectSetPrototypeOf(Decipheriv.prototype, LazyTransform.prototype); diff --git a/lib/internal/crypto/hash.js b/lib/internal/crypto/hash.js index ef8020ebb587bf..43417cf3544933 100644 --- a/lib/internal/crypto/hash.js +++ b/lib/internal/crypto/hash.js @@ -1,8 +1,8 @@ 'use strict'; const { + FunctionPrototypeCall, ObjectSetPrototypeOf, - ReflectApply, StringPrototypeReplace, StringPrototypeToLowerCase, Symbol, @@ -107,7 +107,7 @@ function Hash(algorithm, options) { if (!isCopy && xofLen === undefined) { maybeEmitDeprecationWarning(algorithm); } - ReflectApply(LazyTransform, this, [options]); + FunctionPrototypeCall(LazyTransform, this, options); } ObjectSetPrototypeOf(Hash.prototype, LazyTransform.prototype); @@ -171,7 +171,7 @@ function Hmac(hmac, key, options) { this[kState] = { [kFinalized]: false, }; - ReflectApply(LazyTransform, this, [options]); + FunctionPrototypeCall(LazyTransform, this, options); } ObjectSetPrototypeOf(Hmac.prototype, LazyTransform.prototype); diff --git a/lib/internal/crypto/sig.js b/lib/internal/crypto/sig.js index a5bdaaf22b5ef0..7d38c0bdf60687 100644 --- a/lib/internal/crypto/sig.js +++ b/lib/internal/crypto/sig.js @@ -3,7 +3,6 @@ const { FunctionPrototypeCall, ObjectSetPrototypeOf, - ReflectApply, } = primordials; const { @@ -59,7 +58,7 @@ function Sign(algorithm, options) { this[kHandle] = new _Sign(); this[kHandle].init(algorithm); - ReflectApply(Writable, this, [options]); + FunctionPrototypeCall(Writable, this, options); } ObjectSetPrototypeOf(Sign.prototype, Writable.prototype); @@ -219,7 +218,7 @@ function Verify(algorithm, options) { this[kHandle] = new _Verify(); this[kHandle].init(algorithm); - ReflectApply(Writable, this, [options]); + FunctionPrototypeCall(Writable, this, options); } ObjectSetPrototypeOf(Verify.prototype, Writable.prototype); diff --git a/lib/internal/crypto/webcrypto.js b/lib/internal/crypto/webcrypto.js index 869c07ef87fbe6..0fba4e9108c329 100644 --- a/lib/internal/crypto/webcrypto.js +++ b/lib/internal/crypto/webcrypto.js @@ -2,6 +2,7 @@ const { ArrayPrototypeIncludes, + FunctionPrototypeCall, JSONParse, JSONStringify, ObjectDefineProperties, @@ -84,7 +85,7 @@ async function digest(algorithm, data) { algorithm = normalizeAlgorithm(algorithm, 'digest'); - return await ReflectApply(asyncDigest, this, [algorithm, data]); + return await FunctionPrototypeCall(asyncDigest, this, algorithm, data); } function randomUUID() { @@ -377,10 +378,10 @@ async function deriveKey( throw lazyDOMException('Unrecognized algorithm name', 'NotSupportedError'); } - return ReflectApply( + return FunctionPrototypeCall( importKeySync, this, - ['raw-secret', bits, derivedKeyAlgorithm, extractable, keyUsages], + 'raw-secret', bits, derivedKeyAlgorithm, extractable, keyUsages, ); } @@ -889,10 +890,10 @@ async function importKey( algorithm = normalizeAlgorithm(algorithm, 'importKey'); - return ReflectApply( + return FunctionPrototypeCall( importKeySync, this, - [format, keyData, algorithm, extractable, keyUsages], + format, keyData, algorithm, extractable, keyUsages, ); } @@ -926,7 +927,7 @@ async function wrapKey(format, key, wrappingKey, algorithm) { } catch { algorithm = normalizeAlgorithm(algorithm, 'encrypt'); } - let keyData = await ReflectApply(exportKey, this, [format, key]); + let keyData = await FunctionPrototypeCall(exportKey, this, format, key); if (format === 'jwk') { const ec = new TextEncoder(); @@ -1023,10 +1024,10 @@ async function unwrapKey( } } - return ReflectApply( + return FunctionPrototypeCall( importKeySync, this, - [format, keyData, unwrappedKeyAlgo, extractable, keyUsages], + format, keyData, unwrappedKeyAlgo, extractable, keyUsages, ); } @@ -1349,10 +1350,10 @@ async function encapsulateKey(encapsulationAlgorithm, encapsulationKey, sharedKe throw lazyDOMException('Unrecognized algorithm name', 'NotSupportedError'); } - const sharedKey = ReflectApply( + const sharedKey = FunctionPrototypeCall( importKeySync, this, - ['raw-secret', encapsulateBits.sharedKey, normalizedSharedKeyAlgorithm, extractable, usages], + 'raw-secret', encapsulateBits.sharedKey, normalizedSharedKeyAlgorithm, extractable, usages, ); const encapsulatedKey = { @@ -1469,10 +1470,10 @@ async function decapsulateKey( throw lazyDOMException('Unrecognized algorithm name', 'NotSupportedError'); } - return ReflectApply( + return FunctionPrototypeCall( importKeySync, this, - ['raw-secret', decapsulatedBits, normalizedSharedKeyAlgorithm, extractable, usages], + 'raw-secret', decapsulatedBits, normalizedSharedKeyAlgorithm, extractable, usages, ); } diff --git a/lib/internal/dns/callback_resolver.js b/lib/internal/dns/callback_resolver.js index fdafd310ad10a5..e1532a3ee142ab 100644 --- a/lib/internal/dns/callback_resolver.js +++ b/lib/internal/dns/callback_resolver.js @@ -2,8 +2,8 @@ const { ArrayPrototypeMap, + FunctionPrototypeCall, ObjectDefineProperty, - ReflectApply, Symbol, } = primordials; @@ -104,7 +104,7 @@ function resolve(hostname, rrtype, callback) { } if (typeof resolver === 'function') { - return ReflectApply(resolver, this, [hostname, callback]); + return FunctionPrototypeCall(resolver, this, hostname, callback); } throw new ERR_INVALID_ARG_VALUE('rrtype', rrtype); } diff --git a/lib/internal/dns/promises.js b/lib/internal/dns/promises.js index e7b6f9622332f3..40d4d1c4c8e448 100644 --- a/lib/internal/dns/promises.js +++ b/lib/internal/dns/promises.js @@ -1,9 +1,9 @@ 'use strict'; const { ArrayPrototypeMap, + FunctionPrototypeCall, ObjectDefineProperty, Promise, - ReflectApply, Symbol, } = primordials; @@ -359,7 +359,7 @@ function resolve(hostname, rrtype) { resolver = resolveMap.A; } - return ReflectApply(resolver, this, [hostname]); + return FunctionPrototypeCall(resolver, this, hostname); } // Promise-based resolver. diff --git a/lib/internal/file.js b/lib/internal/file.js index 46dc96fa1bc6aa..aa05a602975e40 100644 --- a/lib/internal/file.js +++ b/lib/internal/file.js @@ -2,7 +2,7 @@ const { DateNow, - FunctionPrototypeApply, + FunctionPrototypeCall, NumberIsNaN, ObjectDefineProperties, ObjectSetPrototypeOf, @@ -129,7 +129,7 @@ class File extends Blob { } function TransferableFile(handle, length, type = '') { - FunctionPrototypeApply(TransferableBlob, this, [handle, length, type]); + FunctionPrototypeCall(TransferableBlob, this, handle, length, type); ObjectSetPrototypeOf(this, File.prototype); } diff --git a/lib/internal/fs/read/context.js b/lib/internal/fs/read/context.js index bbbf4f35e4ba40..193251a0516d76 100644 --- a/lib/internal/fs/read/context.js +++ b/lib/internal/fs/read/context.js @@ -2,8 +2,8 @@ const { ArrayPrototypePush, + FunctionPrototypeCall, MathMin, - ReflectApply, } = primordials; const { @@ -112,7 +112,7 @@ class ReadFileContext { close(err) { if (this.isUserFd) { process.nextTick(function tick(context) { - ReflectApply(readFileAfterClose, { context }, [null]); + FunctionPrototypeCall(readFileAfterClose, { context }, null); }, this); return; } diff --git a/lib/internal/fs/streams.js b/lib/internal/fs/streams.js index e35934d987bb3d..ae6d927bd4ceb3 100644 --- a/lib/internal/fs/streams.js +++ b/lib/internal/fs/streams.js @@ -3,6 +3,7 @@ const { Array, FunctionPrototypeBind, + FunctionPrototypeCall, MathMin, ObjectDefineProperty, ObjectSetPrototypeOf, @@ -224,7 +225,7 @@ function ReadStream(path, options) { } } - ReflectApply(Readable, this, [options]); + FunctionPrototypeCall(Readable, this, options); } ObjectSetPrototypeOf(ReadStream.prototype, Readable.prototype); ObjectSetPrototypeOf(ReadStream, Readable); @@ -392,7 +393,7 @@ function WriteStream(path, options) { this.pos = this.start; } - ReflectApply(Writable, this, [options]); + FunctionPrototypeCall(Writable, this, options); if (options.encoding) this.setDefaultEncoding(options.encoding); diff --git a/lib/internal/fs/sync_write_stream.js b/lib/internal/fs/sync_write_stream.js index 5ce4f1883b905d..06d8cde1b52392 100644 --- a/lib/internal/fs/sync_write_stream.js +++ b/lib/internal/fs/sync_write_stream.js @@ -1,8 +1,8 @@ 'use strict'; const { + FunctionPrototypeCall, ObjectSetPrototypeOf, - ReflectApply, } = primordials; const { kEmptyObject } = require('internal/util'); @@ -10,7 +10,7 @@ const { Writable } = require('stream'); const { closeSync, writeSync } = require('fs'); function SyncWriteStream(fd, options) { - ReflectApply(Writable, this, [{ autoDestroy: true }]); + FunctionPrototypeCall(Writable, this, { autoDestroy: true }); options ||= kEmptyObject; diff --git a/lib/internal/fs/utils.js b/lib/internal/fs/utils.js index 957f65177fafe8..ae17260a2393af 100644 --- a/lib/internal/fs/utils.js +++ b/lib/internal/fs/utils.js @@ -16,7 +16,6 @@ const { ObjectDefineProperty, ObjectIs, ObjectSetPrototypeOf, - ReflectApply, ReflectOwnKeys, RegExpPrototypeSymbolReplace, StringPrototypeEndsWith, @@ -494,8 +493,7 @@ const lazyDateFields = { function BigIntStats(dev, mode, nlink, uid, gid, rdev, blksize, ino, size, blocks, atimeNs, mtimeNs, ctimeNs, birthtimeNs) { - ReflectApply(StatsBase, this, [dev, mode, nlink, uid, gid, rdev, blksize, - ino, size, blocks]); + FunctionPrototypeCall(StatsBase, this, dev, mode, nlink, uid, gid, rdev, blksize, ino, size, blocks); this.atimeMs = atimeNs / kNsPerMsBigInt; this.mtimeMs = mtimeNs / kNsPerMsBigInt; diff --git a/lib/internal/modules/cjs/loader.js b/lib/internal/modules/cjs/loader.js index 2b199e82fee3a9..e24a40e2c42281 100644 --- a/lib/internal/modules/cjs/loader.js +++ b/lib/internal/modules/cjs/loader.js @@ -36,6 +36,7 @@ const { ArrayPrototypeUnshiftApply, Boolean, Error, + FunctionPrototypeCall, JSONParse, ObjectDefineProperty, ObjectFreeze, @@ -47,7 +48,6 @@ const { ObjectPrototypeHasOwnProperty, ObjectSetPrototypeOf, Proxy, - ReflectApply, ReflectSet, RegExpPrototypeExec, SafeMap, @@ -1801,8 +1801,7 @@ Module.prototype._compile = function(content, filename, format) { result = callAndPauseOnStart(compiledWrapper, thisValue, exports, require, module, filename, dirname); } else { - result = ReflectApply(compiledWrapper, thisValue, - [exports, require, module, filename, dirname]); + result = FunctionPrototypeCall(compiledWrapper, thisValue, exports, require, module, filename, dirname); } this[kIsExecuting] = false; if (requireDepth === 0) { statCache = null; } diff --git a/lib/internal/process/per_thread.js b/lib/internal/process/per_thread.js index da2ba93e7e93ae..86a5e808097449 100644 --- a/lib/internal/process/per_thread.js +++ b/lib/internal/process/per_thread.js @@ -478,7 +478,7 @@ function buildAllowedFlags() { forEach(callback, thisArg = undefined) { ArrayPrototypeForEach( this[kInternal].array, - (v) => ReflectApply(callback, thisArg, [v, v, this]), + (v) => FunctionPrototypeCall(callback, thisArg, v, v, this), ); } diff --git a/lib/internal/tls/wrap.js b/lib/internal/tls/wrap.js index d630050073e473..251e77a720eed4 100644 --- a/lib/internal/tls/wrap.js +++ b/lib/internal/tls/wrap.js @@ -22,6 +22,7 @@ 'use strict'; const { + FunctionPrototypeCall, ObjectAssign, ObjectDefineProperty, ObjectSetPrototypeOf, @@ -582,7 +583,7 @@ function TLSSocket(socket, opts) { // distinguishable from regular ones. this.encrypted = true; - ReflectApply(net.Socket, this, [{ + FunctionPrototypeCall(net.Socket, this, { handle: this._wrapHandle(wrap, handle, wrapHasActiveWriteFromPrevOwner), allowHalfOpen: socket ? socket.allowHalfOpen : tlsOptions.allowHalfOpen, pauseOnCreate: tlsOptions.pauseOnConnect, @@ -590,7 +591,7 @@ function TLSSocket(socket, opts) { highWaterMark: tlsOptions.highWaterMark, onread: !socket ? tlsOptions.onread : null, signal: tlsOptions.signal, - }]); + }); // Proxy for API compatibility this.ssl = this._handle; // C++ TLSWrap object @@ -1390,7 +1391,7 @@ function Server(options, listener) { } // constructor call - ReflectApply(net.Server, this, [options, tlsConnectionListener]); + FunctionPrototypeCall(net.Server, this, options, tlsConnectionListener); if (listener) { this.on('secureConnection', listener); @@ -1579,8 +1580,7 @@ Server.prototype[EE.captureRejectionSymbol] = function( sock.destroy(err); break; default: - ReflectApply(net.Server.prototype[SymbolFor('nodejs.rejection')], this, - [err, event, sock]); + FunctionPrototypeCall(net.Server.prototype[SymbolFor('nodejs.rejection')], this, err, event, sock); } }; diff --git a/lib/internal/vm.js b/lib/internal/vm.js index 42060d7a8c38dc..9f1c33b0ccfe75 100644 --- a/lib/internal/vm.js +++ b/lib/internal/vm.js @@ -1,7 +1,7 @@ 'use strict'; const { - ReflectApply, + FunctionPrototypeCall, Symbol, } = primordials; @@ -216,16 +216,14 @@ function makeContextifyScript(code, * @returns {any} */ function runScriptInThisContext(script, displayErrors, breakOnFirstLine) { - return ReflectApply( + return FunctionPrototypeCall( runInContext, script, - [ - null, // sandbox - use current context - -1, // timeout - displayErrors, // displayErrors - false, // breakOnSigint - breakOnFirstLine, // breakOnFirstLine - ], + null, // sandbox - use current context + -1, // timeout + displayErrors, // displayErrors + false, // breakOnSigint + breakOnFirstLine, // breakOnFirstLine ); } diff --git a/lib/internal/vm/module.js b/lib/internal/vm/module.js index 6403a1f76710f9..ce2350f6cd16cb 100644 --- a/lib/internal/vm/module.js +++ b/lib/internal/vm/module.js @@ -7,6 +7,7 @@ const { ArrayPrototypeIndexOf, ArrayPrototypeMap, ArrayPrototypeSome, + FunctionPrototypeCall, ObjectDefineProperty, ObjectFreeze, ObjectGetPrototypeOf, @@ -15,7 +16,6 @@ const { PromisePrototypeThen, PromiseReject, PromiseResolve, - ReflectApply, SafePromiseAllReturnArrayLike, Symbol, SymbolToStringTag, @@ -522,11 +522,7 @@ class SyntheticModule extends Module { function importModuleDynamicallyWrap(importModuleDynamically) { const importModuleDynamicallyWrapper = async (specifier, referrer, attributes, phase) => { const phaseName = phaseEnumToPhaseName(phase); - const m = await ReflectApply( - importModuleDynamically, - this, - [specifier, referrer, attributes, phaseName], - ); + const m = await FunctionPrototypeCall(importModuleDynamically, this, specifier, referrer, attributes, phaseName); if (isModuleNamespaceObject(m)) { if (phase === kSourcePhase) throw new ERR_VM_MODULE_NOT_MODULE(); return m; diff --git a/lib/repl.js b/lib/repl.js index a8aef00bb1a802..37853f0a12cb88 100644 --- a/lib/repl.js +++ b/lib/repl.js @@ -57,6 +57,7 @@ const { Boolean, Error: MainContextError, FunctionPrototypeBind, + FunctionPrototypeCall, JSONStringify, MathMaxApply, NumberIsNaN, @@ -887,7 +888,7 @@ function REPLServer(prompt, function finish(e, ret) { debug('finish', e, ret); - ReflectApply(_memory, self, [cmd]); + FunctionPrototypeCall(_memory, self, cmd); if (e && !self[kBufferedCommandSymbol] && StringPrototypeStartsWith(StringPrototypeTrim(cmd), 'npm ') && @@ -1277,7 +1278,7 @@ function _memory(cmd) { function _turnOnEditorMode(repl) { repl.editorMode = true; - ReflectApply(Interface.prototype.setPrompt, repl, ['']); + FunctionPrototypeCall(Interface.prototype.setPrompt, repl, ''); } function _turnOffEditorMode(repl) { From fa46e8f65e7a4433b66e4b13b4f4fa520c8801a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ren=C3=A9?= Date: Wed, 26 Nov 2025 11:09:07 +0000 Subject: [PATCH 010/115] events: repurpose `events.listenerCount()` to accept EventTargets PR-URL: https://github.com/nodejs/node/pull/60214 Reviewed-By: Matteo Collina Reviewed-By: Benjamin Gruenbaum --- doc/api/deprecations.md | 13 ++-- doc/api/events.md | 62 +++++++++++++------ lib/events.js | 44 ++++++------- lib/internal/streams/legacy.js | 3 +- test/parallel/test-aborted-util.js | 6 +- test/parallel/test-child-process-execfile.js | 4 +- ...-child-process-fork-timeout-kill-signal.js | 6 +- ...child-process-spawn-timeout-kill-signal.js | 6 +- test/parallel/test-events-once.js | 10 +-- test/parallel/test-eventtarget.js | 5 +- .../test-http-agent-abort-controller.js | 8 +-- .../test-http-client-abort-destroy.js | 8 +-- test/parallel/test-http2-client-destroy.js | 22 +++---- test/parallel/test-https-abortcontroller.js | 8 +-- .../test-https-agent-abort-controller.js | 8 +-- .../test-net-connect-abort-controller.js | 14 ++--- test/parallel/test-readline-interface.js | 12 ++-- test/parallel/test-runner-mock-timers.js | 12 ++-- .../test-timers-immediate-promisified.js | 4 +- .../test-timers-interval-promisified.js | 10 +-- .../test-timers-timeout-promisified.js | 4 +- .../test-tls-connect-abort-controller.js | 14 ++--- 22 files changed, 159 insertions(+), 124 deletions(-) diff --git a/doc/api/deprecations.md b/doc/api/deprecations.md index 358d835c7aa825..f200e0d807defd 100644 --- a/doc/api/deprecations.md +++ b/doc/api/deprecations.md @@ -780,6 +780,9 @@ The [`domain`][] module is deprecated and should not be used. -Type: Documentation-only +Type: Revoked -The [`events.listenerCount(emitter, eventName)`][] API is -deprecated. Please use [`emitter.listenerCount(eventName)`][] instead. +The [`events.listenerCount(emitter, eventName)`][] API was deprecated, as it +provided identical fuctionality to [`emitter.listenerCount(eventName)`][]. The +deprecation was revoked because this function has been repurposed to also +accept {EventTarget} arguments. ### DEP0034: `fs.exists(path, callback)` @@ -4344,7 +4349,7 @@ import { opendir } from 'node:fs/promises'; [`domain`]: domain.md [`ecdh.setPublicKey()`]: crypto.md#ecdhsetpublickeypublickey-encoding [`emitter.listenerCount(eventName)`]: events.md#emitterlistenercounteventname-listener -[`events.listenerCount(emitter, eventName)`]: events.md#eventslistenercountemitter-eventname +[`events.listenerCount(emitter, eventName)`]: events.md#eventslistenercountemitterortarget-eventname [`fs.Dir`]: fs.md#class-fsdir [`fs.FileHandle`]: fs.md#class-filehandle [`fs.access()`]: fs.md#fsaccesspath-mode-callback diff --git a/doc/api/events.md b/doc/api/events.md index bad5814c373281..2a37b093ac9c94 100644 --- a/doc/api/events.md +++ b/doc/api/events.md @@ -1622,39 +1622,66 @@ changes: See how to write a custom [rejection handler][rejection]. -## `events.listenerCount(emitter, eventName)` +## `events.listenerCount(emitterOrTarget, eventName)` -> Stability: 0 - Deprecated: Use [`emitter.listenerCount()`][] instead. +* `emitterOrTarget` {EventEmitter|EventTarget} +* `eventName` {string|symbol} +* Returns: {integer} + +Returns the number of registered listeners for the event named `eventName`. -* `emitter` {EventEmitter} The emitter to query -* `eventName` {string|symbol} The event name +For `EventEmitter`s this behaves exactly the same as calling `.listenerCount` +on the emitter. -A class method that returns the number of listeners for the given `eventName` -registered on the given `emitter`. +For `EventTarget`s this is the only way to obtain the listener count. This can +be useful for debugging and diagnostic purposes. ```mjs import { EventEmitter, listenerCount } from 'node:events'; -const myEmitter = new EventEmitter(); -myEmitter.on('event', () => {}); -myEmitter.on('event', () => {}); -console.log(listenerCount(myEmitter, 'event')); -// Prints: 2 +{ + const ee = new EventEmitter(); + ee.on('event', () => {}); + ee.on('event', () => {}); + console.log(listenerCount(ee, 'event')); // 2 +} +{ + const et = new EventTarget(); + et.addEventListener('event', () => {}); + et.addEventListener('event', () => {}); + console.log(listenerCount(et, 'event')); // 2 +} ``` ```cjs const { EventEmitter, listenerCount } = require('node:events'); -const myEmitter = new EventEmitter(); -myEmitter.on('event', () => {}); -myEmitter.on('event', () => {}); -console.log(listenerCount(myEmitter, 'event')); -// Prints: 2 +{ + const ee = new EventEmitter(); + ee.on('event', () => {}); + ee.on('event', () => {}); + console.log(listenerCount(ee, 'event')); // 2 +} +{ + const et = new EventTarget(); + et.addEventListener('event', () => {}); + et.addEventListener('event', () => {}); + console.log(listenerCount(et, 'event')); // 2 +} ``` ## `events.on(emitter, eventName[, options])` @@ -2646,7 +2673,6 @@ to the `EventTarget`. [`Event` Web API]: https://dom.spec.whatwg.org/#event [`domain`]: domain.md [`e.stopImmediatePropagation()`]: #eventstopimmediatepropagation -[`emitter.listenerCount()`]: #emitterlistenercounteventname-listener [`emitter.removeListener()`]: #emitterremovelistenereventname-listener [`emitter.setMaxListeners(n)`]: #emittersetmaxlistenersn [`event.defaultPrevented`]: #eventdefaultprevented diff --git a/lib/events.js b/lib/events.js index bfd22ce4e6cae1..f134789c8690a1 100644 --- a/lib/events.js +++ b/lib/events.js @@ -33,7 +33,6 @@ const { Error, ErrorCaptureStackTrace, FunctionPrototypeBind, - FunctionPrototypeCall, NumberMAX_SAFE_INTEGER, ObjectDefineProperties, ObjectDefineProperty, @@ -215,6 +214,7 @@ module.exports.once = once; module.exports.on = on; module.exports.getEventListeners = getEventListeners; module.exports.getMaxListeners = getMaxListeners; +module.exports.listenerCount = listenerCount; // Backwards-compat with node 0.10.x EventEmitter.EventEmitter = EventEmitter; @@ -814,31 +814,14 @@ EventEmitter.prototype.rawListeners = function rawListeners(type) { return _listeners(this, type, false); }; -/** - * Returns the number of listeners listening to the event name - * specified as `type`. - * @deprecated since v3.2.0 - * @param {EventEmitter} emitter - * @param {string | symbol} type - * @returns {number} - */ -EventEmitter.listenerCount = function(emitter, type) { - if (typeof emitter.listenerCount === 'function') { - return emitter.listenerCount(type); - } - return FunctionPrototypeCall(listenerCount, emitter, type); -}; - -EventEmitter.prototype.listenerCount = listenerCount; - /** * Returns the number of listeners listening to event name * specified as `type`. * @param {string | symbol} type - * @param {Function} listener + * @param {Function} [listener] * @returns {number} */ -function listenerCount(type, listener) { +EventEmitter.prototype.listenerCount = function listenerCount(type, listener) { const events = this._events; if (events !== undefined) { @@ -868,7 +851,7 @@ function listenerCount(type, listener) { } return 0; -} +}; /** * Returns an array listing the events for which @@ -950,6 +933,25 @@ function getMaxListeners(emitterOrTarget) { emitterOrTarget); } +/** + * Returns the number of registered listeners for `type`. + * @param {EventEmitter | EventTarget} emitterOrTarget + * @param {string | symbol} type + * @returns {number} + */ +function listenerCount(emitterOrTarget, type) { + if (typeof emitterOrTarget.listenerCount === 'function') { + return emitterOrTarget.listenerCount(type); + } + const { isEventTarget, kEvents } = require('internal/event_target'); + if (isEventTarget(emitterOrTarget)) { + return emitterOrTarget[kEvents].get(type)?.size ?? 0; + } + throw new ERR_INVALID_ARG_TYPE('emitter', + ['EventEmitter', 'EventTarget'], + emitterOrTarget); +} + /** * Creates a `Promise` that is fulfilled when the emitter * emits the given event. diff --git a/lib/internal/streams/legacy.js b/lib/internal/streams/legacy.js index fdd95e5c25bb39..8e75dfe6182207 100644 --- a/lib/internal/streams/legacy.js +++ b/lib/internal/streams/legacy.js @@ -59,7 +59,8 @@ Stream.prototype.pipe = function(dest, options) { // Don't leave dangling pipes when there are errors. function onerror(er) { cleanup(); - if (EE.listenerCount(this, 'error') === 0) { + // If we removed the last error handler, trigger an unhandled error event. + if (this.listenerCount?.('error') === 0) { this.emit('error', er); } } diff --git a/test/parallel/test-aborted-util.js b/test/parallel/test-aborted-util.js index f352cacb48c652..ff29578507ee7b 100644 --- a/test/parallel/test-aborted-util.js +++ b/test/parallel/test-aborted-util.js @@ -4,7 +4,7 @@ const common = require('../common'); const { aborted } = require('util'); const assert = require('assert'); -const { getEventListeners } = require('events'); +const { listenerCount } = require('events'); const { inspect } = require('util'); const { @@ -17,7 +17,7 @@ test('Aborted works when provided a resource', async () => { ac.abort(); await promise; assert.strictEqual(ac.signal.aborted, true); - assert.strictEqual(getEventListeners(ac.signal, 'abort').length, 0); + assert.strictEqual(listenerCount(ac.signal, 'abort'), 0); }); test('Aborted with gc cleanup', async () => { @@ -31,7 +31,7 @@ test('Aborted with gc cleanup', async () => { globalThis.gc(); ac.abort(); assert.strictEqual(ac.signal.aborted, true); - assert.strictEqual(getEventListeners(ac.signal, 'abort').length, 0); + assert.strictEqual(listenerCount(ac.signal, 'abort'), 0); resolve(); })); diff --git a/test/parallel/test-child-process-execfile.js b/test/parallel/test-child-process-execfile.js index d44bae59d17484..9c1733e2603900 100644 --- a/test/parallel/test-child-process-execfile.js +++ b/test/parallel/test-child-process-execfile.js @@ -3,7 +3,7 @@ const common = require('../common'); const assert = require('assert'); const { execFile, execFileSync } = require('child_process'); -const { getEventListeners } = require('events'); +const { listenerCount } = require('events'); const { getSystemErrorName } = require('util'); const fixtures = require('../common/fixtures'); const os = require('os'); @@ -106,7 +106,7 @@ common.expectWarning( const { signal } = ac; const callback = common.mustCall((err) => { - assert.strictEqual(getEventListeners(ac.signal).length, 0); + assert.strictEqual(listenerCount(ac.signal, 'abort'), 0); assert.strictEqual(err, null); }); execFile(process.execPath, [fixture, 0], { signal }, callback); diff --git a/test/parallel/test-child-process-fork-timeout-kill-signal.js b/test/parallel/test-child-process-fork-timeout-kill-signal.js index 430ee1ad566aee..b7271eb74b7e4e 100644 --- a/test/parallel/test-child-process-fork-timeout-kill-signal.js +++ b/test/parallel/test-child-process-fork-timeout-kill-signal.js @@ -4,7 +4,7 @@ const { mustCall } = require('../common'); const assert = require('assert'); const fixtures = require('../common/fixtures'); const { fork } = require('child_process'); -const { getEventListeners } = require('events'); +const { listenerCount } = require('events'); { // Verify default signal @@ -43,8 +43,8 @@ const { getEventListeners } = require('events'); timeout: 6, signal, }); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); cp.on('exit', mustCall(() => { - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); })); } diff --git a/test/parallel/test-child-process-spawn-timeout-kill-signal.js b/test/parallel/test-child-process-spawn-timeout-kill-signal.js index 05229e13dcb102..fa74a90b5545a5 100644 --- a/test/parallel/test-child-process-spawn-timeout-kill-signal.js +++ b/test/parallel/test-child-process-spawn-timeout-kill-signal.js @@ -4,7 +4,7 @@ const { mustCall } = require('../common'); const assert = require('assert'); const fixtures = require('../common/fixtures'); const { spawn } = require('child_process'); -const { getEventListeners } = require('events'); +const { listenerCount } = require('events'); const aliveForeverFile = 'child-process-stay-alive-forever.js'; { @@ -43,8 +43,8 @@ const aliveForeverFile = 'child-process-stay-alive-forever.js'; timeout: 6, signal, }); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); cp.on('exit', mustCall(() => { - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); })); } diff --git a/test/parallel/test-events-once.js b/test/parallel/test-events-once.js index 5c5cc71d6634b7..268e60416b3fb4 100644 --- a/test/parallel/test-events-once.js +++ b/test/parallel/test-events-once.js @@ -2,7 +2,7 @@ // Flags: --no-warnings const common = require('../common'); -const { once, EventEmitter, getEventListeners } = require('events'); +const { once, EventEmitter, listenerCount } = require('events'); const assert = require('assert'); async function onceAnEvent() { @@ -72,7 +72,7 @@ async function catchesErrorsWithAbortSignal() { try { const promise = once(ee, 'myevent', { signal }); assert.strictEqual(ee.listenerCount('error'), 1); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); await promise; } catch (e) { @@ -81,7 +81,7 @@ async function catchesErrorsWithAbortSignal() { assert.strictEqual(err, expected); assert.strictEqual(ee.listenerCount('error'), 0); assert.strictEqual(ee.listenerCount('myevent'), 0); - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); } async function stopListeningAfterCatchingError() { @@ -192,9 +192,9 @@ async function abortSignalAfterEvent() { ac.abort(); }); const promise = once(ee, 'foo', { signal: ac.signal }); - assert.strictEqual(getEventListeners(ac.signal, 'abort').length, 1); + assert.strictEqual(listenerCount(ac.signal, 'abort'), 1); await promise; - assert.strictEqual(getEventListeners(ac.signal, 'abort').length, 0); + assert.strictEqual(listenerCount(ac.signal, 'abort'), 0); } async function abortSignalRemoveListener() { diff --git a/test/parallel/test-eventtarget.js b/test/parallel/test-eventtarget.js index c0841d0d2ba5dd..d0b3ad03a1df6a 100644 --- a/test/parallel/test-eventtarget.js +++ b/test/parallel/test-eventtarget.js @@ -9,7 +9,7 @@ const { const assert = require('assert'); -const { once } = require('events'); +const { listenerCount, once } = require('events'); const { inspect } = require('util'); const { setTimeout: delay } = require('timers/promises'); @@ -141,10 +141,13 @@ let asyncTest = Promise.resolve(); eventTarget.addEventListener('foo', ev1); eventTarget.addEventListener('foo', ev2, { once: true }); + assert.strictEqual(listenerCount(eventTarget, 'foo'), 2); assert.ok(eventTarget.dispatchEvent(new Event('foo'))); + assert.strictEqual(listenerCount(eventTarget, 'foo'), 1); eventTarget.dispatchEvent(new Event('foo')); eventTarget.removeEventListener('foo', ev1); + assert.strictEqual(listenerCount(eventTarget, 'foo'), 0); eventTarget.dispatchEvent(new Event('foo')); } { diff --git a/test/parallel/test-http-agent-abort-controller.js b/test/parallel/test-http-agent-abort-controller.js index c5ece3ab353bf0..2bfa9cf7082de5 100644 --- a/test/parallel/test-http-agent-abort-controller.js +++ b/test/parallel/test-http-agent-abort-controller.js @@ -3,7 +3,7 @@ const common = require('../common'); const assert = require('assert'); const http = require('http'); const Agent = http.Agent; -const { getEventListeners, once } = require('events'); +const { listenerCount, once } = require('events'); const agent = new Agent(); const server = http.createServer(); @@ -20,7 +20,7 @@ server.listen(0, common.mustCall(async () => { const ac = new AbortController(); const { signal } = ac; const connection = agent.createConnection({ ...options, signal }); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); ac.abort(); const [err] = await once(connection, 'error'); assert.strictEqual(err?.name, 'AbortError'); @@ -44,7 +44,7 @@ server.listen(0, common.mustCall(async () => { agent: agent, signal, }); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); ac.abort(); const [err] = await once(request, 'error'); assert.strictEqual(err?.name, 'AbortError'); @@ -60,7 +60,7 @@ server.listen(0, common.mustCall(async () => { agent: agent, signal, }); - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); const [err] = await once(request, 'error'); assert.strictEqual(err?.name, 'AbortError'); } diff --git a/test/parallel/test-http-client-abort-destroy.js b/test/parallel/test-http-client-abort-destroy.js index 922ae907944227..289c5346c1024c 100644 --- a/test/parallel/test-http-client-abort-destroy.js +++ b/test/parallel/test-http-client-abort-destroy.js @@ -2,7 +2,7 @@ const common = require('../common'); const http = require('http'); const assert = require('assert'); -const { getEventListeners } = require('events'); +const { listenerCount } = require('events'); { // abort @@ -85,7 +85,7 @@ const { getEventListeners } = require('events'); assert.strictEqual(err.name, 'AbortError'); server.close(); })); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); assert.strictEqual(req.aborted, false); assert.strictEqual(req.destroyed, false); controller.abort(); @@ -113,7 +113,7 @@ const { getEventListeners } = require('events'); server.close(); })); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); process.nextTick(() => controller.abort()); })); } @@ -127,7 +127,7 @@ const { getEventListeners } = require('events'); controller.abort(); const options = { port: server.address().port, signal }; const req = http.get(options, common.mustNotCall()); - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); req.on('error', common.mustCall((err) => { assert.strictEqual(err.code, 'ABORT_ERR'); assert.strictEqual(err.name, 'AbortError'); diff --git a/test/parallel/test-http2-client-destroy.js b/test/parallel/test-http2-client-destroy.js index d84ccaa2945653..ff98c23e864f74 100644 --- a/test/parallel/test-http2-client-destroy.js +++ b/test/parallel/test-http2-client-destroy.js @@ -9,7 +9,7 @@ const assert = require('assert'); const h2 = require('http2'); const { kSocket } = require('internal/http2/util'); const Countdown = require('../common/countdown'); -const { getEventListeners } = require('events'); +const { listenerCount } = require('events'); { const server = h2.createServer(); server.listen(0, common.mustCall(() => { @@ -179,11 +179,11 @@ const { getEventListeners } = require('events'); client.on('close', common.mustCall()); const { signal } = controller; - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); client.on('error', common.mustCall(() => { // After underlying stream dies, signal listener detached - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); })); const req = client.request({}, { signal }); @@ -197,7 +197,7 @@ const { getEventListeners } = require('events'); assert.strictEqual(req.aborted, false); assert.strictEqual(req.destroyed, false); // Signal listener attached - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); controller.abort(); @@ -218,16 +218,16 @@ const { getEventListeners } = require('events'); const { signal } = controller; controller.abort(); - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); client.on('error', common.mustCall(() => { // After underlying stream dies, signal listener detached - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); })); const req = client.request({}, { signal }); // Signal already aborted, so no event listener attached. - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); assert.strictEqual(req.aborted, false); // Destroyed on same tick as request made @@ -256,7 +256,7 @@ const { getEventListeners } = require('events'); signal, }); client.on('close', common.mustCall()); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); client.on('error', common.mustCall(common.mustCall((err) => { assert.strictEqual(err.code, 'ABORT_ERR'); @@ -264,7 +264,7 @@ const { getEventListeners } = require('events'); }))); const req = client.request({}, {}); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); req.on('error', common.mustCall((err) => { assert.strictEqual(err.code, 'ERR_HTTP2_STREAM_CANCEL'); @@ -277,7 +277,7 @@ const { getEventListeners } = require('events'); assert.strictEqual(req.aborted, false); assert.strictEqual(req.destroyed, false); // Signal listener attached - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); controller.abort(); })); @@ -305,7 +305,7 @@ const { getEventListeners } = require('events'); const { signal } = controller; const req = client.request({}, { signal }); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); req.on('error', common.mustCall((err) => { assert.strictEqual(err.code, 'ABORT_ERR'); assert.strictEqual(err.name, 'AbortError'); diff --git a/test/parallel/test-https-abortcontroller.js b/test/parallel/test-https-abortcontroller.js index 420bf9217e8f5b..d69ae82c2fafdd 100644 --- a/test/parallel/test-https-abortcontroller.js +++ b/test/parallel/test-https-abortcontroller.js @@ -7,7 +7,7 @@ if (!common.hasCrypto) const fixtures = require('../common/fixtures'); const https = require('https'); const assert = require('assert'); -const { once, getEventListeners } = require('events'); +const { once, listenerCount } = require('events'); const options = { key: fixtures.readKey('agent1-key.pem'), @@ -30,7 +30,7 @@ const options = { rejectUnauthorized: false, signal: ac.signal, }); - assert.strictEqual(getEventListeners(ac.signal, 'abort').length, 1); + assert.strictEqual(listenerCount(ac.signal, 'abort'), 1); process.nextTick(() => ac.abort()); const [ err ] = await once(req, 'error'); assert.strictEqual(err.name, 'AbortError'); @@ -57,7 +57,7 @@ const options = { rejectUnauthorized: false, signal, }); - assert.strictEqual(getEventListeners(ac.signal, 'abort').length, 1); + assert.strictEqual(listenerCount(ac.signal, 'abort'), 1); ac.abort(); const [ err ] = await once(req, 'error'); assert.strictEqual(err.name, 'AbortError'); @@ -85,7 +85,7 @@ const options = { rejectUnauthorized: false, signal, }); - assert.strictEqual(getEventListeners(ac.signal, 'abort').length, 0); + assert.strictEqual(listenerCount(ac.signal, 'abort'), 0); const [ err ] = await once(req, 'error'); assert.strictEqual(err.name, 'AbortError'); assert.strictEqual(err.code, 'ABORT_ERR'); diff --git a/test/parallel/test-https-agent-abort-controller.js b/test/parallel/test-https-agent-abort-controller.js index 14331e7036d66d..9373cbd6e58e99 100644 --- a/test/parallel/test-https-agent-abort-controller.js +++ b/test/parallel/test-https-agent-abort-controller.js @@ -9,7 +9,7 @@ const { once } = require('events'); const Agent = https.Agent; const fixtures = require('../common/fixtures'); -const { getEventListeners } = require('events'); +const { listenerCount } = require('events'); const agent = new Agent(); const options = { @@ -33,7 +33,7 @@ server.listen(0, common.mustCall(async () => { const ac = new AbortController(); const { signal } = ac; const connection = agent.createConnection({ ...options, signal }); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); ac.abort(); const [err] = await once(connection, 'error'); assert.strictEqual(err.name, 'AbortError'); @@ -58,7 +58,7 @@ server.listen(0, common.mustCall(async () => { agent: agent, signal, }); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); ac.abort(); const [err] = await once(request, 'error'); assert.strictEqual(err.name, 'AbortError'); @@ -74,7 +74,7 @@ server.listen(0, common.mustCall(async () => { agent: agent, signal, }); - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); const [err] = await once(request, 'error'); assert.strictEqual(err.name, 'AbortError'); } diff --git a/test/parallel/test-net-connect-abort-controller.js b/test/parallel/test-net-connect-abort-controller.js index a073b75db19e1d..3d2b1961504471 100644 --- a/test/parallel/test-net-connect-abort-controller.js +++ b/test/parallel/test-net-connect-abort-controller.js @@ -3,7 +3,7 @@ const common = require('../common'); const net = require('net'); const assert = require('assert'); const server = net.createServer(); -const { getEventListeners, once } = require('events'); +const { listenerCount, once } = require('events'); const liveConnections = new Set(); @@ -31,7 +31,7 @@ server.listen(0, common.mustCall(async () => { const ac = new AbortController(); const { signal } = ac; const socket = net.connect(socketOptions(signal)); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); ac.abort(); await assertAbort(socket, 'postAbort'); } @@ -41,7 +41,7 @@ server.listen(0, common.mustCall(async () => { const { signal } = ac; ac.abort(); const socket = net.connect(socketOptions(signal)); - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); await assertAbort(socket, 'preAbort'); } @@ -50,7 +50,7 @@ server.listen(0, common.mustCall(async () => { const { signal } = ac; setImmediate(() => ac.abort()); const socket = net.connect(socketOptions(signal)); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); await assertAbort(socket, 'tickAbort'); } @@ -59,7 +59,7 @@ server.listen(0, common.mustCall(async () => { const { signal } = ac; ac.abort(); const socket = new net.Socket(socketOptions(signal)); - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); await assertAbort(socket, 'testConstructor'); } @@ -67,7 +67,7 @@ server.listen(0, common.mustCall(async () => { const ac = new AbortController(); const { signal } = ac; const socket = new net.Socket(socketOptions(signal)); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); ac.abort(); await assertAbort(socket, 'testConstructorPost'); } @@ -76,7 +76,7 @@ server.listen(0, common.mustCall(async () => { const ac = new AbortController(); const { signal } = ac; const socket = new net.Socket(socketOptions(signal)); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); setImmediate(() => ac.abort()); await assertAbort(socket, 'testConstructorPostTick'); } diff --git a/test/parallel/test-readline-interface.js b/test/parallel/test-readline-interface.js index c640654a7c742d..5a7b87cd75a804 100644 --- a/test/parallel/test-readline-interface.js +++ b/test/parallel/test-readline-interface.js @@ -34,7 +34,7 @@ const { getStringWidth, stripVTControlCharacters } = require('internal/util/inspect'); -const { EventEmitter, getEventListeners } = require('events'); +const { EventEmitter, listenerCount } = require('events'); const { Writable, Readable } = require('stream'); class FakeInput extends EventEmitter { @@ -1438,7 +1438,7 @@ for (let i = 0; i < 12; i++) { signal, }); rl.on('close', common.mustCall()); - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); } { @@ -1450,10 +1450,10 @@ for (let i = 0; i < 12; i++) { output: fi, signal, }); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); rl.on('close', common.mustCall()); ac.abort(); - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); } { @@ -1465,9 +1465,9 @@ for (let i = 0; i < 12; i++) { output: fi, signal, }); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); rl.close(); - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); } { diff --git a/test/parallel/test-runner-mock-timers.js b/test/parallel/test-runner-mock-timers.js index f99936f449bc77..fdf6d6416a6d44 100644 --- a/test/parallel/test-runner-mock-timers.js +++ b/test/parallel/test-runner-mock-timers.js @@ -4,7 +4,7 @@ process.env.NODE_TEST_KNOWN_GLOBALS = 0; const common = require('../common'); const assert = require('node:assert'); -const { getEventListeners } = require('node:events'); +const { listenerCount } = require('node:events'); const { it, mock, describe } = require('node:test'); const nodeTimers = require('node:timers'); const nodeTimersPromises = require('node:timers/promises'); @@ -444,8 +444,6 @@ describe('Mock Timers Test Suite', () => { }); describe('timers/promises', () => { - const hasAbortListener = (signal) => !!getEventListeners(signal, 'abort').length; - describe('setTimeout Suite', () => { it('should advance in time and trigger timers when calling the .tick function multiple times', async (t) => { t.mock.timers.enable({ apis: ['setTimeout'] }); @@ -548,11 +546,11 @@ describe('Mock Timers Test Suite', () => { signal: controller.signal, }); - assert(hasAbortListener(controller.signal)); + assert.strictEqual(listenerCount(controller.signal, 'abort'), 1); t.mock.timers.tick(500); await p; - assert(!hasAbortListener(controller.signal)); + assert.strictEqual(listenerCount(controller.signal, 'abort'), 0); }); it('should reject given an an invalid signal instance', async (t) => { @@ -780,9 +778,9 @@ describe('Mock Timers Test Suite', () => { t.mock.timers.tick(); await first; - assert(hasAbortListener(abortController.signal)); + assert.strictEqual(listenerCount(abortController.signal, 'abort'), 1); await intervalIterator.return(); - assert(!hasAbortListener(abortController.signal)); + assert.strictEqual(listenerCount(abortController.signal, 'abort'), 0); }); it('should abort operation given an abort controller signal on a real use case', async (t) => { diff --git a/test/parallel/test-timers-immediate-promisified.js b/test/parallel/test-timers-immediate-promisified.js index 586fd501273ae3..9d937468aec21e 100644 --- a/test/parallel/test-timers-immediate-promisified.js +++ b/test/parallel/test-timers-immediate-promisified.js @@ -5,7 +5,7 @@ const assert = require('assert'); const timers = require('timers'); const { promisify } = require('util'); -const { getEventListeners } = require('events'); +const { listenerCount } = require('events'); const { NodeEventTarget } = require('internal/event_target'); const timerPromises = require('timers/promises'); @@ -58,7 +58,7 @@ process.on('multipleResolves', common.mustNotCall()); const signal = new NodeEventTarget(); signal.aborted = false; setPromiseImmediate(0, { signal }).finally(common.mustCall(() => { - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); })); } diff --git a/test/parallel/test-timers-interval-promisified.js b/test/parallel/test-timers-interval-promisified.js index e9434625e38026..88a734b25e044d 100644 --- a/test/parallel/test-timers-interval-promisified.js +++ b/test/parallel/test-timers-interval-promisified.js @@ -5,7 +5,7 @@ const assert = require('assert'); const timers = require('timers'); const { promisify } = require('util'); -const { getEventListeners } = require('events'); +const { listenerCount } = require('events'); const { NodeEventTarget } = require('internal/event_target'); const timerPromises = require('timers/promises'); @@ -120,10 +120,10 @@ process.on('multipleResolves', common.mustNotCall()); signal.aborted = false; const iterator = setInterval(1, undefined, { signal }); iterator.next().then(common.mustCall(() => { - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); iterator.return(); })).finally(common.mustCall(() => { - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); })); } @@ -137,7 +137,7 @@ process.on('multipleResolves', common.mustNotCall()); // eslint-disable-next-line no-unused-vars for await (const _ of iterator) { if (i === 0) { - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); } i++; if (i === 2) { @@ -145,7 +145,7 @@ process.on('multipleResolves', common.mustNotCall()); } } assert.strictEqual(i, 2); - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); } tryBreak().then(common.mustCall()); diff --git a/test/parallel/test-timers-timeout-promisified.js b/test/parallel/test-timers-timeout-promisified.js index a89ccad7305f89..95c0d76ff0e0a5 100644 --- a/test/parallel/test-timers-timeout-promisified.js +++ b/test/parallel/test-timers-timeout-promisified.js @@ -5,7 +5,7 @@ const assert = require('assert'); const timers = require('timers'); const { promisify } = require('util'); -const { getEventListeners } = require('events'); +const { listenerCount } = require('events'); const { NodeEventTarget } = require('internal/event_target'); const timerPromises = require('timers/promises'); @@ -58,7 +58,7 @@ process.on('multipleResolves', common.mustNotCall()); const signal = new NodeEventTarget(); signal.aborted = false; setPromiseTimeout(0, null, { signal }).finally(common.mustCall(() => { - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); })); } diff --git a/test/parallel/test-tls-connect-abort-controller.js b/test/parallel/test-tls-connect-abort-controller.js index bc0321000e7bc3..6ee0f2bb688b67 100644 --- a/test/parallel/test-tls-connect-abort-controller.js +++ b/test/parallel/test-tls-connect-abort-controller.js @@ -6,7 +6,7 @@ if (!common.hasCrypto) const tls = require('tls'); const assert = require('assert'); const fixtures = require('../common/fixtures'); -const { getEventListeners, once } = require('events'); +const { listenerCount, once } = require('events'); const serverOptions = { key: fixtures.readKey('agent1-key.pem'), @@ -33,7 +33,7 @@ server.listen(0, common.mustCall(async () => { const ac = new AbortController(); const { signal } = ac; const socket = tls.connect(connectOptions(signal)); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); ac.abort(); await assertAbort(socket, 'postAbort'); } @@ -43,7 +43,7 @@ server.listen(0, common.mustCall(async () => { const { signal } = ac; ac.abort(); const socket = tls.connect(connectOptions(signal)); - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); await assertAbort(socket, 'preAbort'); } @@ -52,7 +52,7 @@ server.listen(0, common.mustCall(async () => { const { signal } = ac; const socket = tls.connect(connectOptions(signal)); setImmediate(() => ac.abort()); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); await assertAbort(socket, 'tickAbort'); } @@ -61,7 +61,7 @@ server.listen(0, common.mustCall(async () => { const { signal } = ac; ac.abort(); const socket = new tls.TLSSocket(undefined, connectOptions(signal)); - assert.strictEqual(getEventListeners(signal, 'abort').length, 0); + assert.strictEqual(listenerCount(signal, 'abort'), 0); await assertAbort(socket, 'testConstructor'); } @@ -69,7 +69,7 @@ server.listen(0, common.mustCall(async () => { const ac = new AbortController(); const { signal } = ac; const socket = new tls.TLSSocket(undefined, connectOptions(signal)); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); ac.abort(); await assertAbort(socket, 'testConstructorPost'); } @@ -79,7 +79,7 @@ server.listen(0, common.mustCall(async () => { const { signal } = ac; const socket = new tls.TLSSocket(undefined, connectOptions(signal)); setImmediate(() => ac.abort()); - assert.strictEqual(getEventListeners(signal, 'abort').length, 1); + assert.strictEqual(listenerCount(signal, 'abort'), 1); await assertAbort(socket, 'testConstructorPostTick'); } From fa8b635b675aeca8f1eed99a1c89e25992f7f3b6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Dec 2025 20:38:44 +0000 Subject: [PATCH 011/115] meta: bump actions/checkout from 5.0.1 to 6.0.0 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/checkout](https://github.com/actions/checkout) from 5.0.1 to 6.0.0. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) --- updated-dependencies: - dependency-name: actions/checkout dependency-version: 6.0.0 dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] PR-URL: https://github.com/nodejs/node/pull/60925 Reviewed-By: Rafael Gonzaga Reviewed-By: Antoine du Hamel Reviewed-By: Luigi Pinca Reviewed-By: Ulises Gascón Reviewed-By: Colin Ihrig --- .github/workflows/auto-start-ci.yml | 2 +- .github/workflows/build-tarball.yml | 4 ++-- .github/workflows/codeql.yml | 2 +- .github/workflows/commit-lint.yml | 2 +- .github/workflows/commit-queue.yml | 2 +- .../workflows/coverage-linux-without-intl.yml | 2 +- .github/workflows/coverage-linux.yml | 2 +- .github/workflows/coverage-windows.yml | 2 +- .github/workflows/create-release-proposal.yml | 2 +- .github/workflows/daily-wpt-fyi.yml | 4 ++-- .github/workflows/daily.yml | 2 +- .github/workflows/doc.yml | 2 +- .../workflows/find-inactive-collaborators.yml | 2 +- .github/workflows/find-inactive-tsc.yml | 4 ++-- .github/workflows/license-builder.yml | 2 +- .github/workflows/lint-release-proposal.yml | 2 +- .github/workflows/linters.yml | 20 +++++++++---------- .github/workflows/notify-on-push.yml | 2 +- .github/workflows/scorecard.yml | 2 +- .github/workflows/test-internet.yml | 2 +- .github/workflows/test-linux.yml | 2 +- .github/workflows/test-macos.yml | 2 +- .github/workflows/timezone-update.yml | 4 ++-- .github/workflows/tools.yml | 2 +- .github/workflows/update-openssl.yml | 2 +- .github/workflows/update-v8.yml | 2 +- .github/workflows/update-wpt.yml | 2 +- 27 files changed, 40 insertions(+), 40 deletions(-) diff --git a/.github/workflows/auto-start-ci.yml b/.github/workflows/auto-start-ci.yml index e86400db95f0fe..f842cb069bc777 100644 --- a/.github/workflows/auto-start-ci.yml +++ b/.github/workflows/auto-start-ci.yml @@ -45,7 +45,7 @@ jobs: if: needs.get-prs-for-ci.outputs.numbers != '' runs-on: ubuntu-latest steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false diff --git a/.github/workflows/build-tarball.yml b/.github/workflows/build-tarball.yml index 5679a96ac731cc..ec8a279834b47b 100644 --- a/.github/workflows/build-tarball.yml +++ b/.github/workflows/build-tarball.yml @@ -41,7 +41,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -70,7 +70,7 @@ jobs: CXX: sccache clang++ SCCACHE_GHA_ENABLED: 'true' steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false sparse-checkout: .github/actions/install-clang diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 5cd00466a79510..3b636e7cf84a67 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -23,7 +23,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/commit-lint.yml b/.github/workflows/commit-lint.yml index 0d594875f5c4c9..da5fef0526a6cc 100644 --- a/.github/workflows/commit-lint.yml +++ b/.github/workflows/commit-lint.yml @@ -17,7 +17,7 @@ jobs: run: | echo "plusOne=$((${{ github.event.pull_request.commits }} + 1))" >> $GITHUB_OUTPUT echo "minusOne=$((${{ github.event.pull_request.commits }} - 1))" >> $GITHUB_OUTPUT - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: fetch-depth: ${{ steps.nb-of-commits.outputs.plusOne }} persist-credentials: false diff --git a/.github/workflows/commit-queue.yml b/.github/workflows/commit-queue.yml index bf6f202aab874a..b619be09cb01e6 100644 --- a/.github/workflows/commit-queue.yml +++ b/.github/workflows/commit-queue.yml @@ -59,7 +59,7 @@ jobs: if: needs.get_mergeable_prs.outputs.numbers != '' runs-on: ubuntu-latest steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: # A personal token is required because pushing with GITHUB_TOKEN will # prevent commits from running CI after they land. It needs diff --git a/.github/workflows/coverage-linux-without-intl.yml b/.github/workflows/coverage-linux-without-intl.yml index 905afce6ace277..9b235490bd13e1 100644 --- a/.github/workflows/coverage-linux-without-intl.yml +++ b/.github/workflows/coverage-linux-without-intl.yml @@ -48,7 +48,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/coverage-linux.yml b/.github/workflows/coverage-linux.yml index 788ac898492e85..ea4408e36241e0 100644 --- a/.github/workflows/coverage-linux.yml +++ b/.github/workflows/coverage-linux.yml @@ -48,7 +48,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/coverage-windows.yml b/.github/workflows/coverage-windows.yml index d42d98f99fd390..b2ac670e5ae877 100644 --- a/.github/workflows/coverage-windows.yml +++ b/.github/workflows/coverage-windows.yml @@ -45,7 +45,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: windows-2025 steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/create-release-proposal.yml b/.github/workflows/create-release-proposal.yml index 8b71c1483c57a2..fdcf4bddd0d449 100644 --- a/.github/workflows/create-release-proposal.yml +++ b/.github/workflows/create-release-proposal.yml @@ -33,7 +33,7 @@ jobs: RELEASE_LINE: ${{ inputs.release-line }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: ref: ${{ env.STAGING_BRANCH }} persist-credentials: false diff --git a/.github/workflows/daily-wpt-fyi.yml b/.github/workflows/daily-wpt-fyi.yml index 66ee24b060c7e2..189f06c401106c 100644 --- a/.github/workflows/daily-wpt-fyi.yml +++ b/.github/workflows/daily-wpt-fyi.yml @@ -63,7 +63,7 @@ jobs: SHORT_SHA=$(node -p 'process.version.split(/-nightly\d{8}/)[1]') echo "NIGHTLY_REF=$(gh api /repos/nodejs/node/commits/$SHORT_SHA --jq '.sha')" >> $GITHUB_ENV - name: Checkout ${{ steps.setup-node.outputs.node-version }} - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false ref: ${{ env.NIGHTLY_REF || steps.setup-node.outputs.node-version }} @@ -79,7 +79,7 @@ jobs: run: rm -rf wpt working-directory: test/fixtures - name: Checkout epochs/daily WPT - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: repository: web-platform-tests/wpt persist-credentials: false diff --git a/.github/workflows/daily.yml b/.github/workflows/daily.yml index 2e2d64f7ebc20d..3658c829504b2c 100644 --- a/.github/workflows/daily.yml +++ b/.github/workflows/daily.yml @@ -15,7 +15,7 @@ jobs: build-lto: runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index 2aaa16db7baa3a..9aaed22eb8c434 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -24,7 +24,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} diff --git a/.github/workflows/find-inactive-collaborators.yml b/.github/workflows/find-inactive-collaborators.yml index 49f6894977396a..516de960c2aa8a 100644 --- a/.github/workflows/find-inactive-collaborators.yml +++ b/.github/workflows/find-inactive-collaborators.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: fetch-depth: 0 persist-credentials: false diff --git a/.github/workflows/find-inactive-tsc.yml b/.github/workflows/find-inactive-tsc.yml index bc666acf832373..b8af6c14114a33 100644 --- a/.github/workflows/find-inactive-tsc.yml +++ b/.github/workflows/find-inactive-tsc.yml @@ -20,13 +20,13 @@ jobs: steps: - name: Checkout the repo - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: fetch-depth: 0 persist-credentials: false - name: Clone nodejs/TSC repository - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: fetch-depth: 0 path: .tmp diff --git a/.github/workflows/license-builder.yml b/.github/workflows/license-builder.yml index e5c594377829ad..6abd7865ea12ba 100644 --- a/.github/workflows/license-builder.yml +++ b/.github/workflows/license-builder.yml @@ -17,7 +17,7 @@ jobs: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false - run: ./tools/license-builder.sh # Run the license builder tool diff --git a/.github/workflows/lint-release-proposal.yml b/.github/workflows/lint-release-proposal.yml index 12ae68a75d2b8c..f9959970a968db 100644 --- a/.github/workflows/lint-release-proposal.yml +++ b/.github/workflows/lint-release-proposal.yml @@ -23,7 +23,7 @@ jobs: contents: read pull-requests: read steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false fetch-depth: 2 diff --git a/.github/workflows/linters.yml b/.github/workflows/linters.yml index 61a976b9243e50..cf64d7eafa29f4 100644 --- a/.github/workflows/linters.yml +++ b/.github/workflows/linters.yml @@ -25,7 +25,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} @@ -40,7 +40,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -55,7 +55,7 @@ jobs: if: ${{ github.event.pull_request && github.event.pull_request.draft == false && github.base_ref == github.event.repository.default_branch }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: fetch-depth: 0 persist-credentials: false @@ -93,7 +93,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} @@ -142,7 +142,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false sparse-checkout: | @@ -169,7 +169,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false sparse-checkout: | @@ -193,7 +193,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false sparse-checkout: | @@ -207,7 +207,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false - uses: mszostok/codeowners-validator@7f3f5e28c6d7b8dfae5731e54ce2272ca384592f @@ -217,7 +217,7 @@ jobs: if: ${{ github.event.pull_request }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: fetch-depth: 2 persist-credentials: false @@ -230,7 +230,7 @@ jobs: lint-readme: runs-on: ubuntu-latest steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false sparse-checkout: | diff --git a/.github/workflows/notify-on-push.yml b/.github/workflows/notify-on-push.yml index e9cae10384bd81..ce4180b36ba09f 100644 --- a/.github/workflows/notify-on-push.yml +++ b/.github/workflows/notify-on-push.yml @@ -32,7 +32,7 @@ jobs: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false - name: Check commit message diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index e27af8905998cd..b82f1cb047de97 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -38,7 +38,7 @@ jobs: egress-policy: audit # TODO: change to 'egress-policy: block' after couple of runs - name: Checkout code - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false diff --git a/.github/workflows/test-internet.yml b/.github/workflows/test-internet.yml index beeaa009ea6549..757c1e18534bf6 100644 --- a/.github/workflows/test-internet.yml +++ b/.github/workflows/test-internet.yml @@ -45,7 +45,7 @@ jobs: if: github.event_name == 'schedule' && github.repository == 'nodejs/node' || github.event.pull_request.draft == false runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/test-linux.yml b/.github/workflows/test-linux.yml index 41991b6d2e9021..e87080eba83431 100644 --- a/.github/workflows/test-linux.yml +++ b/.github/workflows/test-linux.yml @@ -47,7 +47,7 @@ jobs: matrix: os: [ubuntu-24.04, ubuntu-24.04-arm] steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false path: node diff --git a/.github/workflows/test-macos.yml b/.github/workflows/test-macos.yml index fe101cf48fa612..fbd087a63e9291 100644 --- a/.github/workflows/test-macos.yml +++ b/.github/workflows/test-macos.yml @@ -49,7 +49,7 @@ jobs: CXX: sccache g++ SCCACHE_GHA_ENABLED: 'true' steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false path: node diff --git a/.github/workflows/timezone-update.yml b/.github/workflows/timezone-update.yml index 5f330165c35930..5465298fa9da50 100644 --- a/.github/workflows/timezone-update.yml +++ b/.github/workflows/timezone-update.yml @@ -20,12 +20,12 @@ jobs: steps: - name: Checkout nodejs/node - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false - name: Checkout unicode-org/icu-data - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: path: icu-data persist-credentials: false diff --git a/.github/workflows/tools.yml b/.github/workflows/tools.yml index feae9d90218657..0ba5837e11a381 100644 --- a/.github/workflows/tools.yml +++ b/.github/workflows/tools.yml @@ -280,7 +280,7 @@ jobs: run: | git config --global user.name "Node.js GitHub Bot" git config --global user.email "github-bot@iojs.org" - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 if: github.event_name == 'schedule' || inputs.id == 'all' || inputs.id == matrix.id with: persist-credentials: false diff --git a/.github/workflows/update-openssl.yml b/.github/workflows/update-openssl.yml index 03130109faf44a..ccd9868b6c50e5 100644 --- a/.github/workflows/update-openssl.yml +++ b/.github/workflows/update-openssl.yml @@ -14,7 +14,7 @@ jobs: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false - name: Check and download new OpenSSL version diff --git a/.github/workflows/update-v8.yml b/.github/workflows/update-v8.yml index 5c3fcd3b9f0095..c3d065cd1814e8 100644 --- a/.github/workflows/update-v8.yml +++ b/.github/workflows/update-v8.yml @@ -16,7 +16,7 @@ jobs: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false - name: Cache node modules and update-v8 diff --git a/.github/workflows/update-wpt.yml b/.github/workflows/update-wpt.yml index 8ef288519d003d..07cb6c4a156be6 100644 --- a/.github/workflows/update-wpt.yml +++ b/.github/workflows/update-wpt.yml @@ -27,7 +27,7 @@ jobs: subsystem: ${{ fromJSON(github.event.inputs.subsystems || '["url", "urlpattern", "WebCryptoAPI"]') }} steps: - - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 with: persist-credentials: false From 1cf109d6da90450e2e71fab3bb439c91daa72363 Mon Sep 17 00:00:00 2001 From: Jan Olaf Martin Date: Wed, 3 Dec 2025 18:27:04 -0800 Subject: [PATCH 012/115] module: allow subpath imports that start with `#/` It's a common ecosystem pattern to map a source root directory to `@/` but it requires special tooling support. This turns `#/*` into a more realistic alternative for that pattern. PR-URL: https://github.com/nodejs/node/pull/60864 Reviewed-By: Guy Bedford Reviewed-By: Geoffrey Booth Reviewed-By: Claudio Wunder Reviewed-By: Zeyu "Alex" Yang --- doc/api/esm.md | 2 +- doc/api/packages.md | 4 ++++ lib/internal/modules/esm/resolve.js | 3 +-- test/es-module/test-esm-imports.mjs | 6 ++++-- test/fixtures/es-modules/pkgimports/package.json | 1 + test/fixtures/es-modules/pkgimports/src/foo.js | 1 + 6 files changed, 12 insertions(+), 5 deletions(-) create mode 100644 test/fixtures/es-modules/pkgimports/src/foo.js diff --git a/doc/api/esm.md b/doc/api/esm.md index 76e1aad1f6e6a1..22106ae331cd0c 100644 --- a/doc/api/esm.md +++ b/doc/api/esm.md @@ -1091,7 +1091,7 @@ Note: This function is directly invoked by the CommonJS resolution algorithm. Note: This function is directly invoked by the CommonJS resolution algorithm. > 1. Assert: _specifier_ begins with _"#"_. -> 2. If _specifier_ is exactly equal to _"#"_ or starts with _"#/"_, then +> 2. If _specifier_ is exactly equal to _"#"_, then > 1. Throw an _Invalid Module Specifier_ error. > 3. Let _packageURL_ be the result of **LOOKUP\_PACKAGE\_SCOPE**(_parentURL_). > 4. If _packageURL_ is not **null**, then diff --git a/doc/api/packages.md b/doc/api/packages.md index fe9d6ccde9e309..b14c107e14fcca 100644 --- a/doc/api/packages.md +++ b/doc/api/packages.md @@ -527,6 +527,10 @@ can be written: added: - v14.6.0 - v12.19.0 +changes: + - version: REPLACEME + pr-url: https://github.com/nodejs/node/pull/60864 + description: Allow subpath imports that start with `#/`. --> In addition to the [`"exports"`][] field, there is a package `"imports"` field diff --git a/lib/internal/modules/esm/resolve.js b/lib/internal/modules/esm/resolve.js index c1d774b9c5c404..cc1230648881d8 100644 --- a/lib/internal/modules/esm/resolve.js +++ b/lib/internal/modules/esm/resolve.js @@ -693,8 +693,7 @@ function patternKeyCompare(a, b) { * @returns {URL} The resolved import URL. */ function packageImportsResolve(name, base, conditions) { - if (name === '#' || StringPrototypeStartsWith(name, '#/') || - StringPrototypeEndsWith(name, '/')) { + if (name === '#' || StringPrototypeEndsWith(name, '/')) { const reason = 'is not a valid internal imports specifier name'; throw new ERR_INVALID_MODULE_SPECIFIER(name, reason, fileURLToPath(base)); } diff --git a/test/es-module/test-esm-imports.mjs b/test/es-module/test-esm-imports.mjs index 7d2f7a63773514..b9982e2d0a585b 100644 --- a/test/es-module/test-esm-imports.mjs +++ b/test/es-module/test-esm-imports.mjs @@ -15,6 +15,10 @@ const { requireImport, importImport } = importer; const internalImports = new Map([ // Base case ['#test', maybeWrapped({ default: 'test' })], + // Root wildcard import + ['#/foo', maybeWrapped({ default: 'foo' })], + // Explicit #/ mapping + ['#/initialslash', maybeWrapped({ default: 'test' })], // import / require conditions ['#branch', maybeWrapped({ default: isRequire ? 'requirebranch' : 'importbranch' })], // Subpath imports @@ -64,8 +68,6 @@ const { requireImport, importImport } = importer; ['#external/subpath/x%5Cy', 'must not include encoded "/" or "\\"'], // Target must have a name ['#', '#'], - // Initial slash target must have a leading name - ['#/initialslash', '#/initialslash'], // Percent-encoded target paths ['#encodedslash', 'must not include encoded "/" or "\\"'], ['#encodedbackslash', 'must not include encoded "/" or "\\"'], diff --git a/test/fixtures/es-modules/pkgimports/package.json b/test/fixtures/es-modules/pkgimports/package.json index dbbbcd1ab01ea1..82e57b11ce45cd 100644 --- a/test/fixtures/es-modules/pkgimports/package.json +++ b/test/fixtures/es-modules/pkgimports/package.json @@ -1,5 +1,6 @@ { "imports": { + "#/*": "./src/*.js", "#branch": { "import": "./importbranch.js", "require": "./requirebranch.js" diff --git a/test/fixtures/es-modules/pkgimports/src/foo.js b/test/fixtures/es-modules/pkgimports/src/foo.js new file mode 100644 index 00000000000000..2651774ae60543 --- /dev/null +++ b/test/fixtures/es-modules/pkgimports/src/foo.js @@ -0,0 +1 @@ +module.exports = 'foo'; From a4edda09e8771ad9fb45a40207cce77088110640 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Sun, 7 Dec 2025 21:28:57 +0100 Subject: [PATCH 013/115] test: skip tests not passing without `NODE_OPTIONS` support PR-URL: https://github.com/nodejs/node/pull/60912 Reviewed-By: Benjamin Gruenbaum --- test/embedding/test-embedding.js | 2 +- test/es-module/test-esm-import-flag.mjs | 3 +- test/es-module/test-esm-symlink-type.js | 4 +++ test/es-module/test-esm-type-field-errors.js | 4 +++ test/parallel/test-cli-options-as-flags.js | 10 ++++--- test/parallel/test-config-file.js | 29 ++++++++++++------- .../test-max-old-space-size-percentage.js | 4 +++ ...test-permission-allow-child-process-cli.js | 3 ++ ...-permission-child-process-inherit-flags.js | 3 ++ test/parallel/test-process-warnings.mjs | 4 ++- test/parallel/test-unicode-node-options.js | 6 +++- test/parallel/test-worker-execargv-invalid.js | 6 +++- test/parallel/test-worker-node-options.js | 7 ++++- 13 files changed, 64 insertions(+), 21 deletions(-) diff --git a/test/embedding/test-embedding.js b/test/embedding/test-embedding.js index 71c4f7f324c973..49706079fd8b6a 100644 --- a/test/embedding/test-embedding.js +++ b/test/embedding/test-embedding.js @@ -154,7 +154,7 @@ for (const extraSnapshotArgs of [ } // Guarantee NODE_REPL_EXTERNAL_MODULE won't bypass kDisableNodeOptionsEnv -{ +if (!process.config.variables.node_without_node_options) { spawnSyncAndExit( binary, ['require("os")'], diff --git a/test/es-module/test-esm-import-flag.mjs b/test/es-module/test-esm-import-flag.mjs index 81de3b11a38609..c0c22704f53a8a 100644 --- a/test/es-module/test-esm-import-flag.mjs +++ b/test/es-module/test-esm-import-flag.mjs @@ -9,6 +9,7 @@ const cjsImport = fixtures.fileURL('es-modules', 'cjs-file.cjs'); const mjsEntry = fixtures.path('es-modules', 'mjs-file.mjs'); const mjsImport = fixtures.fileURL('es-modules', 'mjs-file.mjs'); +const onlyIfNodeOptionsSupport = { skip: process.config.variables.node_without_node_options }; describe('import modules using --import', { concurrency: !process.env.TEST_PARALLEL }, () => { it('should import when using --eval', async () => { @@ -199,7 +200,7 @@ describe('import modules using --import', { concurrency: !process.env.TEST_PARAL assert.strictEqual(signal, null); }); - it('should import files from the env before ones from the CLI', async () => { + it('should import files from the env before ones from the CLI', onlyIfNodeOptionsSupport, async () => { const { code, signal, stderr, stdout } = await spawnPromisified( execPath, [ diff --git a/test/es-module/test-esm-symlink-type.js b/test/es-module/test-esm-symlink-type.js index 2537881f46c0e2..922991c9a61184 100644 --- a/test/es-module/test-esm-symlink-type.js +++ b/test/es-module/test-esm-symlink-type.js @@ -6,6 +6,10 @@ const assert = require('assert'); const exec = require('child_process').execFile; const fs = require('fs'); +if (process.config.variables.node_without_node_options) { + common.skip('missing NODE_OPTIONS support'); +} + const tmpdir = require('../common/tmpdir'); tmpdir.refresh(); const tmpDir = tmpdir.path; diff --git a/test/es-module/test-esm-type-field-errors.js b/test/es-module/test-esm-type-field-errors.js index 4bf52f3ad6e7d3..dcd411202068b5 100644 --- a/test/es-module/test-esm-type-field-errors.js +++ b/test/es-module/test-esm-type-field-errors.js @@ -4,6 +4,10 @@ const assert = require('assert'); const exec = require('child_process').execFile; const { describe, it } = require('node:test'); +if (process.config.variables.node_without_node_options) { + common.skip('missing NODE_OPTIONS support'); +} + const mjsFile = require.resolve('../fixtures/es-modules/mjs-file.mjs'); const cjsFile = require.resolve('../fixtures/es-modules/cjs-file.cjs'); const packageWithoutTypeMain = diff --git a/test/parallel/test-cli-options-as-flags.js b/test/parallel/test-cli-options-as-flags.js index 04e9e801b8ec81..403a18d162ca61 100644 --- a/test/parallel/test-cli-options-as-flags.js +++ b/test/parallel/test-cli-options-as-flags.js @@ -12,6 +12,8 @@ const fixtureFile = fixtures.path(path.join('options-as-flags', 'fixture.cjs')); const configFile = fixtures.path(path.join('options-as-flags', 'test-config.json')); const envFile = fixtures.path(path.join('options-as-flags', '.test.env')); +const onlyIfNodeOptionsSupport = { skip: process.config.variables.node_without_node_options }; + describe('getOptionsAsFlagsFromBinding', () => { it('should extract flags from command line arguments', async () => { const result = await spawnPromisified(process.execPath, [ @@ -28,7 +30,7 @@ describe('getOptionsAsFlagsFromBinding', () => { assert.strictEqual(flags.includes('--stack-trace-limit=512'), true); }); - it('should extract flags from NODE_OPTIONS environment variable', async () => { + it('should extract flags from NODE_OPTIONS environment variable', onlyIfNodeOptionsSupport, async () => { const result = await spawnPromisified(process.execPath, [ '--no-warnings', '--expose-internals', @@ -49,7 +51,7 @@ describe('getOptionsAsFlagsFromBinding', () => { assert.strictEqual(flags.includes('--no-warnings'), true); }); - it('should extract flags from config file', async () => { + it('should extract flags from config file', onlyIfNodeOptionsSupport, async () => { const result = await spawnPromisified(process.execPath, [ '--no-warnings', '--expose-internals', @@ -69,7 +71,7 @@ describe('getOptionsAsFlagsFromBinding', () => { assert.strictEqual(flags.includes('--no-warnings'), true); }); - it('should extract flags from config file and command line', async () => { + it('should extract flags from config file and command line', onlyIfNodeOptionsSupport, async () => { const result = await spawnPromisified(process.execPath, [ '--no-warnings', '--expose-internals', @@ -92,7 +94,7 @@ describe('getOptionsAsFlagsFromBinding', () => { assert.strictEqual(flags.includes('--test-isolation=none'), true); }); - it('should extract flags from .env file', async () => { + it('should extract flags from .env file', onlyIfNodeOptionsSupport, async () => { const result = await spawnPromisified(process.execPath, [ '--no-warnings', '--expose-internals', diff --git a/test/parallel/test-config-file.js b/test/parallel/test-config-file.js index 5e6cf5a3d5c5fd..a3d16cbb65e4ef 100644 --- a/test/parallel/test-config-file.js +++ b/test/parallel/test-config-file.js @@ -13,7 +13,14 @@ const { test, it, describe } = require('node:test'); const { chmodSync, writeFileSync, constants } = require('node:fs'); const { join } = require('node:path'); +const onlyIfNodeOptionsSupport = { skip: process.config.variables.node_without_node_options }; const onlyWithAmaro = { skip: !process.config.variables.node_use_amaro }; +const onlyWithAmaroAndNodeOptions = { + skip: !process.config.variables.node_use_amaro || process.config.variables.node_without_node_options, +}; +const onlyWithInspectorAndNodeOptions = { + skip: !process.features.inspector || process.config.variables.node_without_node_options, +}; test('should handle non existing json', async () => { const result = await spawnPromisified(process.execPath, [ @@ -51,7 +58,7 @@ test('should handle empty object json', async () => { assert.strictEqual(result.code, 0); }); -test('should parse boolean flag', onlyWithAmaro, async () => { +test('should parse boolean flag', onlyWithAmaroAndNodeOptions, async () => { const result = await spawnPromisified(process.execPath, [ '--experimental-config-file', fixtures.path('rc/transform-types.json'), @@ -62,7 +69,7 @@ test('should parse boolean flag', onlyWithAmaro, async () => { assert.strictEqual(result.code, 0); }); -test('should parse boolean flag defaulted to true', async () => { +test('should parse boolean flag defaulted to true', onlyIfNodeOptionsSupport, async () => { const result = await spawnPromisified(process.execPath, [ '--experimental-config-file', fixtures.path('rc/warnings-false.json'), @@ -85,7 +92,7 @@ test('should throw an error when a flag is declared twice', async () => { assert.strictEqual(result.code, 9); }); -test('should override env-file', onlyWithAmaro, async () => { +test('should override env-file', onlyWithAmaroAndNodeOptions, async () => { const result = await spawnPromisified(process.execPath, [ '--no-warnings', '--experimental-config-file', @@ -128,7 +135,7 @@ test('should not override CLI flags', onlyWithAmaro, async () => { assert.strictEqual(result.code, 1); }); -test('should parse array flag correctly', async () => { +test('should parse array flag correctly', onlyIfNodeOptionsSupport, async () => { const result = await spawnPromisified(process.execPath, [ '--no-warnings', '--experimental-config-file', @@ -152,7 +159,7 @@ test('should validate invalid array flag', async () => { assert.strictEqual(result.code, 9); }); -test('should validate array flag as string', async () => { +test('should validate array flag as string', onlyIfNodeOptionsSupport, async () => { const result = await spawnPromisified(process.execPath, [ '--no-warnings', '--experimental-config-file', @@ -188,7 +195,7 @@ test('should throw at flag not available in NODE_OPTIONS', async () => { assert.strictEqual(result.code, 9); }); -test('unsigned flag should be parsed correctly', async () => { +test('unsigned flag should be parsed correctly', onlyIfNodeOptionsSupport, async () => { const result = await spawnPromisified(process.execPath, [ '--no-warnings', '--experimental-config-file', @@ -225,7 +232,7 @@ test('v8 flag should not be allowed in config file', async () => { assert.strictEqual(result.code, 9); }); -test('string flag should be parsed correctly', async () => { +test('string flag should be parsed correctly', onlyIfNodeOptionsSupport, async () => { const result = await spawnPromisified(process.execPath, [ '--no-warnings', '--test', @@ -238,7 +245,7 @@ test('string flag should be parsed correctly', async () => { assert.strictEqual(result.code, 0); }); -test('host port flag should be parsed correctly', { skip: !process.features.inspector }, async () => { +test('host port flag should be parsed correctly', onlyWithInspectorAndNodeOptions, async () => { const result = await spawnPromisified(process.execPath, [ '--no-warnings', '--expose-internals', @@ -251,7 +258,7 @@ test('host port flag should be parsed correctly', { skip: !process.features.insp assert.strictEqual(result.code, 0); }); -test('--inspect=true should be parsed correctly', { skip: !process.features.inspector }, async () => { +test('--inspect=true should be parsed correctly', onlyWithInspectorAndNodeOptions, async () => { const result = await spawnPromisified(process.execPath, [ '--no-warnings', '--experimental-config-file', @@ -351,7 +358,7 @@ test('broken value in node_options', async () => { assert.strictEqual(result.code, 9); }); -test('should use node.config.json as default', async () => { +test('should use node.config.json as default', onlyIfNodeOptionsSupport, async () => { const result = await spawnPromisified(process.execPath, [ '--no-warnings', '--experimental-default-config-file', @@ -364,7 +371,7 @@ test('should use node.config.json as default', async () => { assert.strictEqual(result.code, 0); }); -test('should override node.config.json when specificied', async () => { +test('should override node.config.json when specificied', onlyIfNodeOptionsSupport, async () => { const result = await spawnPromisified(process.execPath, [ '--no-warnings', '--experimental-default-config-file', diff --git a/test/parallel/test-max-old-space-size-percentage.js b/test/parallel/test-max-old-space-size-percentage.js index c5071d2f959ce3..88db60299e1526 100644 --- a/test/parallel/test-max-old-space-size-percentage.js +++ b/test/parallel/test-max-old-space-size-percentage.js @@ -44,6 +44,10 @@ invalidPercentages.forEach((input) => { assert.match(result.stderr.toString(), input[1]); }); +if (process.config.variables.node_without_node_options) { + common.skip('missing NODE_OPTIONS support'); +} + // Test NODE_OPTIONS with valid percentages validPercentages.forEach((input) => { const result = spawnSync(process.execPath, [], { diff --git a/test/parallel/test-permission-allow-child-process-cli.js b/test/parallel/test-permission-allow-child-process-cli.js index f2ec02f02f46f5..d82332ba3d5084 100644 --- a/test/parallel/test-permission-allow-child-process-cli.js +++ b/test/parallel/test-permission-allow-child-process-cli.js @@ -8,6 +8,9 @@ const { isMainThread } = require('worker_threads'); if (!isMainThread) { common.skip('This test only works on a main thread'); } +if (process.config.variables.node_without_node_options) { + common.skip('missing NODE_OPTIONS support'); +} const assert = require('assert'); const childProcess = require('child_process'); diff --git a/test/parallel/test-permission-child-process-inherit-flags.js b/test/parallel/test-permission-child-process-inherit-flags.js index fb1506bf274f43..36e89bfb1c7286 100644 --- a/test/parallel/test-permission-child-process-inherit-flags.js +++ b/test/parallel/test-permission-child-process-inherit-flags.js @@ -7,6 +7,9 @@ const { isMainThread } = require('worker_threads'); if (!isMainThread) { common.skip('This test only works on a main thread'); } +if (process.config.variables.node_without_node_options) { + common.skip('missing NODE_OPTIONS support'); +} const assert = require('assert'); const childProcess = require('child_process'); diff --git a/test/parallel/test-process-warnings.mjs b/test/parallel/test-process-warnings.mjs index 907681b25c4630..75e4986b817dc1 100644 --- a/test/parallel/test-process-warnings.mjs +++ b/test/parallel/test-process-warnings.mjs @@ -9,6 +9,8 @@ const dep1Message = /\(node:\d+\) \[DEP1\] DeprecationWarning/; const dep2Message = /\(node:\d+\) \[DEP2\] DeprecationWarning/; const experimentalWarningMessage = /\(node:\d+\) ExperimentalWarning/; +const onlyIfNodeOptionsSupport = { skip: process.config.variables.node_without_node_options }; + describe('process warnings', { concurrency: !process.env.TEST_PARALLEL }, () => { it('should emit all warnings by default', async () => { @@ -142,7 +144,7 @@ describe('process warnings', { concurrency: !process.env.TEST_PARALLEL }, () => assert.strictEqual(signal, null); }); - it('should be specifiable in NODE_OPTIONS', async () => { + it('should be specifiable in NODE_OPTIONS', onlyIfNodeOptionsSupport, async () => { const { stdout, stderr, code, signal } = await spawnPromisified(process.execPath, [ fixturePath, ], { diff --git a/test/parallel/test-unicode-node-options.js b/test/parallel/test-unicode-node-options.js index e5a40d118791d3..3f8cf46d986cd3 100644 --- a/test/parallel/test-unicode-node-options.js +++ b/test/parallel/test-unicode-node-options.js @@ -1,10 +1,14 @@ 'use strict'; // Flags: --expose-internals -require('../common'); +const common = require('../common'); const { getOptionValue } = require('internal/options'); const assert = require('assert'); const cp = require('child_process'); +if (process.config.variables.node_without_node_options) { + common.skip('missing NODE_OPTIONS support'); +} + const expected_redirect_value = 'foó'; if (process.argv.length === 2) { diff --git a/test/parallel/test-worker-execargv-invalid.js b/test/parallel/test-worker-execargv-invalid.js index be8ab0b8c423b7..06c33c678dbcb3 100644 --- a/test/parallel/test-worker-execargv-invalid.js +++ b/test/parallel/test-worker-execargv-invalid.js @@ -1,9 +1,13 @@ 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); const { Worker } = require('worker_threads'); +if (process.config.variables.node_without_node_options) { + common.skip('missing NODE_OPTIONS support'); +} + { const expectedErr = { code: 'ERR_INVALID_ARG_TYPE', diff --git a/test/parallel/test-worker-node-options.js b/test/parallel/test-worker-node-options.js index 7a26154e2f4800..73f71b57ac7607 100644 --- a/test/parallel/test-worker-node-options.js +++ b/test/parallel/test-worker-node-options.js @@ -1,10 +1,15 @@ 'use strict'; -require('../common'); +const common = require('../common'); const { spawnSyncAndExitWithoutError, spawnSyncAndAssert, } = require('../common/child_process'); + +if (process.config.variables.node_without_node_options) { + common.skip('missing NODE_OPTIONS support'); +} + const fixtures = require('../common/fixtures'); spawnSyncAndExitWithoutError( process.execPath, From 7424eaa8ff76e40732837f9f18ad7724cad5fa5d Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Wed, 10 Dec 2025 17:12:28 +0100 Subject: [PATCH 014/115] http: add http.setGlobalProxyFromEnv() This adds an API to dynamically enable built-in proxy support for all of fetch() and http.request()/https.request(), so that users do not have to be aware of them all and configure them one by one. PR-URL: https://github.com/nodejs/node/pull/60953 Reviewed-By: Matteo Collina Reviewed-By: Tim Perry --- doc/api/http.md | 110 ++++++++++++++++++ lib/_http_agent.js | 8 +- lib/http.js | 61 +++++++++- lib/https.js | 8 +- lib/internal/http.js | 29 ++++- test/client-proxy/test-http-proxy-fetch.mjs | 4 +- ...et-global-proxy-from-env-fetch-default.mjs | 35 ++++++ ...-set-global-proxy-from-env-fetch-empty.mjs | 21 ++++ ...-set-global-proxy-from-env-fetch-https.mjs | 38 ++++++ ...t-global-proxy-from-env-fetch-no-proxy.mjs | 23 ++++ ...et-global-proxy-from-env-fetch-restore.mjs | 35 ++++++ ...t-http-set-global-proxy-from-env-fetch.mjs | 34 ++++++ ...al-proxy-from-env-http-request-default.mjs | 33 ++++++ ...obal-proxy-from-env-http-request-empty.mjs | 20 ++++ ...al-proxy-from-env-http-request-restore.mjs | 33 ++++++ ...set-global-proxy-from-env-http-request.mjs | 29 +++++ ...et-global-proxy-from-env-https-request.mjs | 37 ++++++ ...set-global-proxy-from-env-invalid-type.mjs | 15 +++ ...-set-global-proxy-from-env-invalid-url.mjs | 21 ++++ ...ttp-set-global-proxy-from-env-no-proxy.mjs | 23 ++++ ...t-global-proxy-from-env-override-fetch.mjs | 44 +++++++ ...et-global-proxy-from-env-override-http.mjs | 40 +++++++ test/common/proxy-server.js | 74 +++++++++++- test/fixtures/fetch-and-log.mjs | 17 +++ test/fixtures/request-and-log.js | 59 +++++++--- 25 files changed, 807 insertions(+), 44 deletions(-) create mode 100644 test/client-proxy/test-http-set-global-proxy-from-env-fetch-default.mjs create mode 100644 test/client-proxy/test-http-set-global-proxy-from-env-fetch-empty.mjs create mode 100644 test/client-proxy/test-http-set-global-proxy-from-env-fetch-https.mjs create mode 100644 test/client-proxy/test-http-set-global-proxy-from-env-fetch-no-proxy.mjs create mode 100644 test/client-proxy/test-http-set-global-proxy-from-env-fetch-restore.mjs create mode 100644 test/client-proxy/test-http-set-global-proxy-from-env-fetch.mjs create mode 100644 test/client-proxy/test-http-set-global-proxy-from-env-http-request-default.mjs create mode 100644 test/client-proxy/test-http-set-global-proxy-from-env-http-request-empty.mjs create mode 100644 test/client-proxy/test-http-set-global-proxy-from-env-http-request-restore.mjs create mode 100644 test/client-proxy/test-http-set-global-proxy-from-env-http-request.mjs create mode 100644 test/client-proxy/test-http-set-global-proxy-from-env-https-request.mjs create mode 100644 test/client-proxy/test-http-set-global-proxy-from-env-invalid-type.mjs create mode 100644 test/client-proxy/test-http-set-global-proxy-from-env-invalid-url.mjs create mode 100644 test/client-proxy/test-http-set-global-proxy-from-env-no-proxy.mjs create mode 100644 test/client-proxy/test-http-set-global-proxy-from-env-override-fetch.mjs create mode 100644 test/client-proxy/test-http-set-global-proxy-from-env-override-http.mjs diff --git a/doc/api/http.md b/doc/api/http.md index f20ad756061278..8388f61d3a7f23 100644 --- a/doc/api/http.md +++ b/doc/api/http.md @@ -4338,6 +4338,32 @@ added: Set the maximum number of idle HTTP parsers. +## `http.setGlobalProxyFromEnv([proxyEnv])` + + + +* `proxyEnv` {Object} An object containing proxy configuration. This accepts the + same options as the `proxyEnv` option accepted by [`Agent`][]. **Default:** + `process.env`. +* Returns: {Function} A function that restores the original agent and dispatcher + settings to the state before this `http.setGlobalProxyFromEnv()` is invoked. + +Dynamically resets the global configurations to enable built-in proxy support for +`fetch()` and `http.request()`/`https.request()` at runtime, as an alternative +to using the `--use-env-proxy` flag or `NODE_USE_ENV_PROXY` environment variable. +It can also be used to override settings configured from the environment variables. + +As this function resets the global configurations, any previously configured +`http.globalAgent`, `https.globalAgent` or undici global dispatcher would be +overridden after this function is invoked. It's recommended to invoke it before any +requests are made and avoid invoking it in the middle of any requests. + +See [Built-in Proxy Support][] for details on proxy URL formats and `NO_PROXY` +syntax. + ## Class: `WebSocket` + +* `signalCode` {string} A signal name (e.g., `'SIGTERM'`, `'SIGKILL'`). +* Returns: {number|null} The exit code, or `null` if the signal is invalid. + +The `util.convertProcessSignalToExitCode()` method converts a signal name to its +corresponding POSIX exit code. Following the POSIX standard, the exit code +for a process terminated by a signal is calculated as `128 + signal number`. + +```mjs +import { convertProcessSignalToExitCode } from 'node:util'; + +console.log(convertProcessSignalToExitCode('SIGTERM')); // 143 (128 + 15) +console.log(convertProcessSignalToExitCode('SIGKILL')); // 137 (128 + 9) +console.log(convertProcessSignalToExitCode('INVALID')); // null +``` + +```cjs +const { convertProcessSignalToExitCode } = require('node:util'); + +console.log(convertProcessSignalToExitCode('SIGTERM')); // 143 (128 + 15) +console.log(convertProcessSignalToExitCode('SIGKILL')); // 137 (128 + 9) +console.log(convertProcessSignalToExitCode('INVALID')); // null +``` + +This is particularly useful when working with processes to determine +the exit code based on the signal that terminated the process. + ## `util.debuglog(section[, callback])` -* `stream` {Stream|Iterable|AsyncIterable|Function} +* `stream` {Writable|Duplex|WritableStream|TransformStream|Function} * `options` {Object} * `signal` {AbortSignal} allows destroying the stream if the signal is aborted. @@ -2038,13 +2038,18 @@ async function* splitToWords(source) { } } -const wordsStream = Readable.from(['this is', 'compose as operator']).compose(splitToWords); +const wordsStream = Readable.from(['text passed through', 'composed stream']).compose(splitToWords); const words = await wordsStream.toArray(); -console.log(words); // prints ['this', 'is', 'compose', 'as', 'operator'] +console.log(words); // prints ['text', 'passed', 'through', 'composed', 'stream'] ``` -See [`stream.compose`][] for more information. +`readable.compose(s)` is equivalent to `stream.compose(readable, s)`. + +This method also allows for an {AbortSignal} to be provided, which will destroy +the composed stream when aborted. + +See [`stream.compose(...streams)`][] for more information. ##### `readable.iterator([options])` @@ -3040,7 +3045,8 @@ await finished(compose(s1, s2, s3)); console.log(res); // prints 'HELLOWORLD' ``` -See [`readable.compose(stream)`][] for `stream.compose` as operator. +For convenience, the [`readable.compose(stream)`][] method is available on +{Readable} and {Duplex} streams as a wrapper for this function. ### `stream.isErrored(stream)` @@ -4974,7 +4980,7 @@ contain multi-byte characters. [`readable.setEncoding()`]: #readablesetencodingencoding [`stream.Readable.from()`]: #streamreadablefromiterable-options [`stream.addAbortSignal()`]: #streamaddabortsignalsignal-stream -[`stream.compose`]: #streamcomposestreams +[`stream.compose(...streams)`]: #streamcomposestreams [`stream.cork()`]: #writablecork [`stream.duplexPair()`]: #streamduplexpairoptions [`stream.finished()`]: #streamfinishedstream-options-callback diff --git a/lib/internal/streams/operators.js b/lib/internal/streams/operators.js index 27c22f89926021..6db2df0e3646e0 100644 --- a/lib/internal/streams/operators.js +++ b/lib/internal/streams/operators.js @@ -18,7 +18,6 @@ const { AbortController, AbortSignal } = require('internal/abort_controller'); const { AbortError, codes: { - ERR_INVALID_ARG_VALUE, ERR_MISSING_ARGS, ERR_OUT_OF_RANGE, }, @@ -31,40 +30,10 @@ const { } = require('internal/validators'); const { kWeakHandler, kResistStopPropagation } = require('internal/event_target'); const { finished } = require('internal/streams/end-of-stream'); -const staticCompose = require('internal/streams/compose'); -const { - addAbortSignalNoValidate, -} = require('internal/streams/add-abort-signal'); -const { isWritable, isNodeStream } = require('internal/streams/utils'); const kEmpty = Symbol('kEmpty'); const kEof = Symbol('kEof'); -function compose(stream, options) { - if (options != null) { - validateObject(options, 'options'); - } - if (options?.signal != null) { - validateAbortSignal(options.signal, 'options.signal'); - } - - if (isNodeStream(stream) && !isWritable(stream)) { - throw new ERR_INVALID_ARG_VALUE('stream', stream, 'must be writable'); - } - - const composedStream = staticCompose(this, stream); - - if (options?.signal) { - // Not validating as we already validated before - addAbortSignalNoValidate( - options.signal, - composedStream, - ); - } - - return composedStream; -} - function map(fn, options) { validateFunction(fn, 'fn'); if (options != null) { @@ -408,7 +377,6 @@ module.exports.streamReturningOperators = { flatMap, map, take, - compose, }; module.exports.promiseReturningOperators = { diff --git a/lib/internal/streams/readable.js b/lib/internal/streams/readable.js index e05de6164c6c2f..00cf273add0453 100644 --- a/lib/internal/streams/readable.js +++ b/lib/internal/streams/readable.js @@ -47,6 +47,7 @@ const { Buffer } = require('buffer'); const { addAbortSignal, + addAbortSignalNoValidate, } = require('internal/streams/add-abort-signal'); const { eos } = require('internal/streams/end-of-stream'); @@ -85,7 +86,10 @@ const { ERR_UNKNOWN_ENCODING, }, } = require('internal/errors'); -const { validateObject } = require('internal/validators'); +const { + validateAbortSignal, + validateObject, +} = require('internal/validators'); const FastBuffer = Buffer[SymbolSpecies]; @@ -1409,6 +1413,30 @@ async function* createAsyncIterator(stream, options) { } } +let composeImpl; + +Readable.prototype.compose = function compose(stream, options) { + if (options != null) { + validateObject(options, 'options'); + } + if (options?.signal != null) { + validateAbortSignal(options.signal, 'options.signal'); + } + + composeImpl ??= require('internal/streams/compose'); + const composedStream = composeImpl(this, stream); + + if (options?.signal) { + // Not validating as we already validated before + addAbortSignalNoValidate( + options.signal, + composedStream, + ); + } + + return composedStream; +}; + // Making it explicit these properties are not enumerable // because otherwise some prototype manipulation in // userland will fail. diff --git a/test/parallel/test-stream-compose-operator.js b/test/parallel/test-stream-readable-compose.js similarity index 77% rename from test/parallel/test-stream-compose-operator.js rename to test/parallel/test-stream-readable-compose.js index 4fefb004f5a1e5..cacdfae1c034d6 100644 --- a/test/parallel/test-stream-compose-operator.js +++ b/test/parallel/test-stream-readable-compose.js @@ -2,7 +2,9 @@ const common = require('../common'); const { - Readable, Transform, + PassThrough, + Readable, + Transform, } = require('stream'); const assert = require('assert'); @@ -19,6 +21,8 @@ const assert = require('assert'); } } }); + assert.strictEqual(stream.readable, true); + assert.strictEqual(stream.writable, false); const result = ['ab', 'cd']; (async () => { for await (const item of stream) { @@ -35,6 +39,8 @@ const assert = require('assert'); callback(null, chunk); }, 4) })); + assert.strictEqual(stream.readable, true); + assert.strictEqual(stream.writable, false); const result = ['a', 'b', 'c', 'd']; (async () => { for await (const item of stream) { @@ -43,6 +49,26 @@ const assert = require('assert'); })().then(common.mustCall()); } +{ + // With Duplex stream as `this`, ensuring writes to the composed stream + // are passed to the head of the pipeline + const pt = new PassThrough({ objectMode: true }); + const composed = pt.compose(async function *(stream) { + for await (const chunk of stream) { + yield chunk * 2; + } + }); + assert.strictEqual(composed.readable, true); + assert.strictEqual(composed.writable, true); + pt.on('data', common.mustCall((chunk) => { + assert.strictEqual(chunk, 123); + })); + composed.on('data', common.mustCall((chunk) => { + assert.strictEqual(chunk, 246); + })); + composed.end(123); +} + { // Throwing an error during `compose` (before waiting for data) const stream = Readable.from([1, 2, 3, 4, 5]).compose(async function *(stream) { // eslint-disable-line require-yield From dffba04624392f0c39ddb75ff81f76b0f37a295c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 3 Jan 2026 18:14:17 +0000 Subject: [PATCH 019/115] meta: bump actions/upload-artifact from 5.0.0 to 6.0.0 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 5.0.0 to 6.0.0. - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/330a01c490aca151604b8cf639adc76d48f6c5d4...b7c566a772e6b6bfb58ed0dc250532a479d7789f) --- updated-dependencies: - dependency-name: actions/upload-artifact dependency-version: 6.0.0 dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] PR-URL: https://github.com/nodejs/node/pull/61238 Reviewed-By: Antoine du Hamel Reviewed-By: Luigi Pinca Reviewed-By: Colin Ihrig Reviewed-By: Ulises Gascón --- .github/workflows/build-tarball.yml | 2 +- .github/workflows/daily-wpt-fyi.yml | 2 +- .github/workflows/doc.yml | 2 +- .github/workflows/scorecard.yml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-tarball.yml b/.github/workflows/build-tarball.yml index ec8a279834b47b..710e37c90cbcd1 100644 --- a/.github/workflows/build-tarball.yml +++ b/.github/workflows/build-tarball.yml @@ -57,7 +57,7 @@ jobs: export COMMIT=$(git rev-parse --short=10 "$GITHUB_SHA") ./configure && make tar -j4 SKIP_XZ=1 - name: Upload tarball artifact - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: name: tarballs path: '*.tar.gz' diff --git a/.github/workflows/daily-wpt-fyi.yml b/.github/workflows/daily-wpt-fyi.yml index 189f06c401106c..450da8a269b03a 100644 --- a/.github/workflows/daily-wpt-fyi.yml +++ b/.github/workflows/daily-wpt-fyi.yml @@ -102,7 +102,7 @@ jobs: run: cp wptreport.json wptreport-${{ steps.setup-node.outputs.node-version }}.json - name: Upload GitHub Actions artifact if: ${{ env.WPT_REPORT != '' }} - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: path: out/wpt/wptreport-*.json name: WPT Report for ${{ steps.setup-node.outputs.node-version }} diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index 9aaed22eb8c434..ecda8d517bd7ee 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -35,7 +35,7 @@ jobs: run: npx envinfo - name: Build run: NODE=$(command -v node) make doc-only - - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: name: docs path: out/doc diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index b82f1cb047de97..72905c7f35c7c6 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -65,7 +65,7 @@ jobs: # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # format to the repository Actions tab. - name: Upload artifact - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: name: SARIF file path: results.sarif From 7285f16e2b6bc1054f3430f1eeaa3124211e3122 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 3 Jan 2026 18:14:26 +0000 Subject: [PATCH 020/115] meta: bump actions/checkout from 6.0.0 to 6.0.1 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/checkout](https://github.com/actions/checkout) from 6.0.0 to 6.0.1. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/1af3b93b6815bc44a9784bd300feb67ff0d1eeb3...8e8c483db84b4bee98b60c0593521ed34d9990e8) --- updated-dependencies: - dependency-name: actions/checkout dependency-version: 6.0.1 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] PR-URL: https://github.com/nodejs/node/pull/61239 Reviewed-By: Rafael Gonzaga Reviewed-By: Luigi Pinca Reviewed-By: Colin Ihrig Reviewed-By: Ulises Gascón --- .github/workflows/auto-start-ci.yml | 2 +- .github/workflows/build-tarball.yml | 4 ++-- .github/workflows/codeql.yml | 2 +- .github/workflows/commit-lint.yml | 2 +- .github/workflows/commit-queue.yml | 2 +- .../workflows/coverage-linux-without-intl.yml | 2 +- .github/workflows/coverage-linux.yml | 2 +- .github/workflows/coverage-windows.yml | 2 +- .github/workflows/create-release-proposal.yml | 2 +- .github/workflows/daily-wpt-fyi.yml | 4 ++-- .github/workflows/daily.yml | 2 +- .github/workflows/doc.yml | 2 +- .../workflows/find-inactive-collaborators.yml | 2 +- .github/workflows/find-inactive-tsc.yml | 4 ++-- .github/workflows/license-builder.yml | 2 +- .github/workflows/lint-release-proposal.yml | 2 +- .github/workflows/linters.yml | 20 +++++++++---------- .github/workflows/notify-on-push.yml | 2 +- .github/workflows/scorecard.yml | 2 +- .github/workflows/test-internet.yml | 2 +- .github/workflows/test-linux.yml | 2 +- .github/workflows/test-macos.yml | 2 +- .github/workflows/timezone-update.yml | 4 ++-- .github/workflows/tools.yml | 2 +- .github/workflows/update-openssl.yml | 2 +- .github/workflows/update-v8.yml | 2 +- .github/workflows/update-wpt.yml | 2 +- 27 files changed, 40 insertions(+), 40 deletions(-) diff --git a/.github/workflows/auto-start-ci.yml b/.github/workflows/auto-start-ci.yml index f842cb069bc777..ea182ea1792eed 100644 --- a/.github/workflows/auto-start-ci.yml +++ b/.github/workflows/auto-start-ci.yml @@ -45,7 +45,7 @@ jobs: if: needs.get-prs-for-ci.outputs.numbers != '' runs-on: ubuntu-latest steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false diff --git a/.github/workflows/build-tarball.yml b/.github/workflows/build-tarball.yml index 710e37c90cbcd1..3d613fd4f7c30d 100644 --- a/.github/workflows/build-tarball.yml +++ b/.github/workflows/build-tarball.yml @@ -41,7 +41,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -70,7 +70,7 @@ jobs: CXX: sccache clang++ SCCACHE_GHA_ENABLED: 'true' steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false sparse-checkout: .github/actions/install-clang diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 3b636e7cf84a67..4c8cc67515f9b3 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -23,7 +23,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/commit-lint.yml b/.github/workflows/commit-lint.yml index da5fef0526a6cc..48fccde1946eea 100644 --- a/.github/workflows/commit-lint.yml +++ b/.github/workflows/commit-lint.yml @@ -17,7 +17,7 @@ jobs: run: | echo "plusOne=$((${{ github.event.pull_request.commits }} + 1))" >> $GITHUB_OUTPUT echo "minusOne=$((${{ github.event.pull_request.commits }} - 1))" >> $GITHUB_OUTPUT - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: fetch-depth: ${{ steps.nb-of-commits.outputs.plusOne }} persist-credentials: false diff --git a/.github/workflows/commit-queue.yml b/.github/workflows/commit-queue.yml index b619be09cb01e6..1f3bfe7297deb6 100644 --- a/.github/workflows/commit-queue.yml +++ b/.github/workflows/commit-queue.yml @@ -59,7 +59,7 @@ jobs: if: needs.get_mergeable_prs.outputs.numbers != '' runs-on: ubuntu-latest steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: # A personal token is required because pushing with GITHUB_TOKEN will # prevent commits from running CI after they land. It needs diff --git a/.github/workflows/coverage-linux-without-intl.yml b/.github/workflows/coverage-linux-without-intl.yml index 9b235490bd13e1..379daf948a4285 100644 --- a/.github/workflows/coverage-linux-without-intl.yml +++ b/.github/workflows/coverage-linux-without-intl.yml @@ -48,7 +48,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/coverage-linux.yml b/.github/workflows/coverage-linux.yml index ea4408e36241e0..adb1d6de1b1bbe 100644 --- a/.github/workflows/coverage-linux.yml +++ b/.github/workflows/coverage-linux.yml @@ -48,7 +48,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/coverage-windows.yml b/.github/workflows/coverage-windows.yml index b2ac670e5ae877..ac74162b0962bc 100644 --- a/.github/workflows/coverage-windows.yml +++ b/.github/workflows/coverage-windows.yml @@ -45,7 +45,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: windows-2025 steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/create-release-proposal.yml b/.github/workflows/create-release-proposal.yml index fdcf4bddd0d449..ace808e1040764 100644 --- a/.github/workflows/create-release-proposal.yml +++ b/.github/workflows/create-release-proposal.yml @@ -33,7 +33,7 @@ jobs: RELEASE_LINE: ${{ inputs.release-line }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: ref: ${{ env.STAGING_BRANCH }} persist-credentials: false diff --git a/.github/workflows/daily-wpt-fyi.yml b/.github/workflows/daily-wpt-fyi.yml index 450da8a269b03a..555ee34b39e17a 100644 --- a/.github/workflows/daily-wpt-fyi.yml +++ b/.github/workflows/daily-wpt-fyi.yml @@ -63,7 +63,7 @@ jobs: SHORT_SHA=$(node -p 'process.version.split(/-nightly\d{8}/)[1]') echo "NIGHTLY_REF=$(gh api /repos/nodejs/node/commits/$SHORT_SHA --jq '.sha')" >> $GITHUB_ENV - name: Checkout ${{ steps.setup-node.outputs.node-version }} - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false ref: ${{ env.NIGHTLY_REF || steps.setup-node.outputs.node-version }} @@ -79,7 +79,7 @@ jobs: run: rm -rf wpt working-directory: test/fixtures - name: Checkout epochs/daily WPT - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: repository: web-platform-tests/wpt persist-credentials: false diff --git a/.github/workflows/daily.yml b/.github/workflows/daily.yml index 3658c829504b2c..95b5cc90e426a7 100644 --- a/.github/workflows/daily.yml +++ b/.github/workflows/daily.yml @@ -15,7 +15,7 @@ jobs: build-lto: runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index ecda8d517bd7ee..251164a89c8cc4 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -24,7 +24,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} diff --git a/.github/workflows/find-inactive-collaborators.yml b/.github/workflows/find-inactive-collaborators.yml index 516de960c2aa8a..11911967900174 100644 --- a/.github/workflows/find-inactive-collaborators.yml +++ b/.github/workflows/find-inactive-collaborators.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: fetch-depth: 0 persist-credentials: false diff --git a/.github/workflows/find-inactive-tsc.yml b/.github/workflows/find-inactive-tsc.yml index b8af6c14114a33..53a8916bcbe3fc 100644 --- a/.github/workflows/find-inactive-tsc.yml +++ b/.github/workflows/find-inactive-tsc.yml @@ -20,13 +20,13 @@ jobs: steps: - name: Checkout the repo - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: fetch-depth: 0 persist-credentials: false - name: Clone nodejs/TSC repository - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: fetch-depth: 0 path: .tmp diff --git a/.github/workflows/license-builder.yml b/.github/workflows/license-builder.yml index 6abd7865ea12ba..8c1fffd1285fcc 100644 --- a/.github/workflows/license-builder.yml +++ b/.github/workflows/license-builder.yml @@ -17,7 +17,7 @@ jobs: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - run: ./tools/license-builder.sh # Run the license builder tool diff --git a/.github/workflows/lint-release-proposal.yml b/.github/workflows/lint-release-proposal.yml index f9959970a968db..3ca5c5ce4f5ea5 100644 --- a/.github/workflows/lint-release-proposal.yml +++ b/.github/workflows/lint-release-proposal.yml @@ -23,7 +23,7 @@ jobs: contents: read pull-requests: read steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false fetch-depth: 2 diff --git a/.github/workflows/linters.yml b/.github/workflows/linters.yml index cf64d7eafa29f4..53378ae1404457 100644 --- a/.github/workflows/linters.yml +++ b/.github/workflows/linters.yml @@ -25,7 +25,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} @@ -40,7 +40,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -55,7 +55,7 @@ jobs: if: ${{ github.event.pull_request && github.event.pull_request.draft == false && github.base_ref == github.event.repository.default_branch }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: fetch-depth: 0 persist-credentials: false @@ -93,7 +93,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} @@ -142,7 +142,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false sparse-checkout: | @@ -169,7 +169,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false sparse-checkout: | @@ -193,7 +193,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false sparse-checkout: | @@ -207,7 +207,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - uses: mszostok/codeowners-validator@7f3f5e28c6d7b8dfae5731e54ce2272ca384592f @@ -217,7 +217,7 @@ jobs: if: ${{ github.event.pull_request }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: fetch-depth: 2 persist-credentials: false @@ -230,7 +230,7 @@ jobs: lint-readme: runs-on: ubuntu-latest steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false sparse-checkout: | diff --git a/.github/workflows/notify-on-push.yml b/.github/workflows/notify-on-push.yml index ce4180b36ba09f..562c043b781778 100644 --- a/.github/workflows/notify-on-push.yml +++ b/.github/workflows/notify-on-push.yml @@ -32,7 +32,7 @@ jobs: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Check commit message diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 72905c7f35c7c6..4d96ec7eb2a46a 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -38,7 +38,7 @@ jobs: egress-policy: audit # TODO: change to 'egress-policy: block' after couple of runs - name: Checkout code - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false diff --git a/.github/workflows/test-internet.yml b/.github/workflows/test-internet.yml index 757c1e18534bf6..1bf5f3bbb7567a 100644 --- a/.github/workflows/test-internet.yml +++ b/.github/workflows/test-internet.yml @@ -45,7 +45,7 @@ jobs: if: github.event_name == 'schedule' && github.repository == 'nodejs/node' || github.event.pull_request.draft == false runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/test-linux.yml b/.github/workflows/test-linux.yml index e87080eba83431..db1f1db9fde26d 100644 --- a/.github/workflows/test-linux.yml +++ b/.github/workflows/test-linux.yml @@ -47,7 +47,7 @@ jobs: matrix: os: [ubuntu-24.04, ubuntu-24.04-arm] steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false path: node diff --git a/.github/workflows/test-macos.yml b/.github/workflows/test-macos.yml index fbd087a63e9291..8681c2cf9869fa 100644 --- a/.github/workflows/test-macos.yml +++ b/.github/workflows/test-macos.yml @@ -49,7 +49,7 @@ jobs: CXX: sccache g++ SCCACHE_GHA_ENABLED: 'true' steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false path: node diff --git a/.github/workflows/timezone-update.yml b/.github/workflows/timezone-update.yml index 5465298fa9da50..a7afc7ad0700e8 100644 --- a/.github/workflows/timezone-update.yml +++ b/.github/workflows/timezone-update.yml @@ -20,12 +20,12 @@ jobs: steps: - name: Checkout nodejs/node - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Checkout unicode-org/icu-data - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: path: icu-data persist-credentials: false diff --git a/.github/workflows/tools.yml b/.github/workflows/tools.yml index 0ba5837e11a381..50d101b5e22635 100644 --- a/.github/workflows/tools.yml +++ b/.github/workflows/tools.yml @@ -280,7 +280,7 @@ jobs: run: | git config --global user.name "Node.js GitHub Bot" git config --global user.email "github-bot@iojs.org" - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 if: github.event_name == 'schedule' || inputs.id == 'all' || inputs.id == matrix.id with: persist-credentials: false diff --git a/.github/workflows/update-openssl.yml b/.github/workflows/update-openssl.yml index ccd9868b6c50e5..f541e4669efd0c 100644 --- a/.github/workflows/update-openssl.yml +++ b/.github/workflows/update-openssl.yml @@ -14,7 +14,7 @@ jobs: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Check and download new OpenSSL version diff --git a/.github/workflows/update-v8.yml b/.github/workflows/update-v8.yml index c3d065cd1814e8..18e5436949aa24 100644 --- a/.github/workflows/update-v8.yml +++ b/.github/workflows/update-v8.yml @@ -16,7 +16,7 @@ jobs: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Cache node modules and update-v8 diff --git a/.github/workflows/update-wpt.yml b/.github/workflows/update-wpt.yml index 07cb6c4a156be6..cf5c0151ac52ec 100644 --- a/.github/workflows/update-wpt.yml +++ b/.github/workflows/update-wpt.yml @@ -27,7 +27,7 @@ jobs: subsystem: ${{ fromJSON(github.event.inputs.subsystems || '["url", "urlpattern", "WebCryptoAPI"]') }} steps: - - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false From a0c992cfc7db1525bf8589973c160797aa665313 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 3 Jan 2026 18:14:58 +0000 Subject: [PATCH 021/115] meta: bump actions/download-artifact from 6.0.0 to 7.0.0 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 6.0.0 to 7.0.0. - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/018cc2cf5baa6db3ef3c5f8a56943fffe632ef53...37930b1c2abaa49bbe596cd826c3c89aef350131) --- updated-dependencies: - dependency-name: actions/download-artifact dependency-version: 7.0.0 dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] PR-URL: https://github.com/nodejs/node/pull/61242 Reviewed-By: Antoine du Hamel Reviewed-By: Colin Ihrig Reviewed-By: Ulises Gascón --- .github/workflows/build-tarball.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-tarball.yml b/.github/workflows/build-tarball.yml index 3d613fd4f7c30d..2f8e0842d274cd 100644 --- a/.github/workflows/build-tarball.yml +++ b/.github/workflows/build-tarball.yml @@ -86,7 +86,7 @@ jobs: - name: Environment Information run: npx envinfo - name: Download tarball - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 with: name: tarballs path: tarballs From b6cb8cd918f514dcbf010bd27446926ddff688ba Mon Sep 17 00:00:00 2001 From: Richard Lau Date: Fri, 16 Jan 2026 13:00:18 +0000 Subject: [PATCH 022/115] deps: V8: cherry-pick highway@dcc0ca1cd42 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Original commit message: Fix for GCC 15 compiler error on PPC8/PPC9/PPC10 Refs: https://github.com/google/highway/commit/dcc0ca1cd4245ecff9e5ba50818e47d5e2ccf699 PR-URL: https://github.com/nodejs/node/pull/61008 Fixes: https://github.com/nodejs/node/issues/60992 Refs: https://github.com/google/highway/issues/2443 Reviewed-By: Antoine du Hamel Reviewed-By: Colin Ihrig Reviewed-By: Juan José Arboleda --- common.gypi | 2 +- .../highway/src/hwy/ops/ppc_vsx-inl.h | 167 +++++++++++------- 2 files changed, 104 insertions(+), 65 deletions(-) diff --git a/common.gypi b/common.gypi index 5adfd888711ae4..c5a7dc9cacf8b9 100644 --- a/common.gypi +++ b/common.gypi @@ -38,7 +38,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.40', + 'v8_embedder_string': '-node.41', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/third_party/highway/src/hwy/ops/ppc_vsx-inl.h b/deps/v8/third_party/highway/src/hwy/ops/ppc_vsx-inl.h index d216c54853cde9..73e736e8baa0ee 100644 --- a/deps/v8/third_party/highway/src/hwy/ops/ppc_vsx-inl.h +++ b/deps/v8/third_party/highway/src/hwy/ops/ppc_vsx-inl.h @@ -3701,16 +3701,73 @@ static HWY_INLINE V VsxF2INormalizeSrcVals(V v) { #endif } +template +static HWY_INLINE HWY_MAYBE_UNUSED VFromD>> +VsxXvcvspsxds(VF32 vf32) { + using VI64 = VFromD>>; +#if (HWY_COMPILER_GCC_ACTUAL && HWY_COMPILER_GCC_ACTUAL < 1500) || \ + HWY_HAS_BUILTIN(__builtin_vsx_xvcvspsxds) + // Use __builtin_vsx_xvcvspsxds if it is available (which is the case with + // GCC 4.8 through GCC 14 or Clang 13 or later on PPC8/PPC9/PPC10) + return VI64{__builtin_vsx_xvcvspsxds(vf32.raw)}; +#elif HWY_COMPILER_GCC_ACTUAL >= 1500 && HWY_IS_LITTLE_ENDIAN + // On little-endian PPC8/PPC9/PPC10 with GCC 15 or later, use the F32->I64 + // vec_signedo intrinsic as the __builtin_vsx_xvcvspsxds intrinsic has been + // removed from GCC in GCC 15 + return VI64{vec_signedo(vf32.raw)}; +#elif HWY_COMPILER_GCC_ACTUAL >= 1500 && HWY_IS_BIG_ENDIAN + // On big-endian PPC8/PPC9/PPC10 with GCC 15 or later, use the F32->I64 + // vec_signede intrinsic as the __builtin_vsx_xvcvspsxds intrinsic has been + // removed from GCC in GCC 15 + return VI64{vec_signede(vf32.raw)}; +#else + // Inline assembly fallback for older versions of Clang that do not have the + // __builtin_vsx_xvcvspsxds intrinsic + __vector signed long long raw_result; + __asm__("xvcvspsxds %x0, %x1" : "=wa"(raw_result) : "wa"(vf32.raw) :); + return VI64{raw_result}; +#endif +} + +template +static HWY_INLINE HWY_MAYBE_UNUSED VFromD>> +VsxXvcvspuxds(VF32 vf32) { + using VU64 = VFromD>>; +#if (HWY_COMPILER_GCC_ACTUAL && HWY_COMPILER_GCC_ACTUAL < 1500) || \ + HWY_HAS_BUILTIN(__builtin_vsx_xvcvspuxds) + // Use __builtin_vsx_xvcvspuxds if it is available (which is the case with + // GCC 4.8 through GCC 14 or Clang 13 or later on PPC8/PPC9/PPC10) + return VU64{reinterpret_cast<__vector unsigned long long>( + __builtin_vsx_xvcvspuxds(vf32.raw))}; +#elif HWY_COMPILER_GCC_ACTUAL >= 1500 && HWY_IS_LITTLE_ENDIAN + // On little-endian PPC8/PPC9/PPC10 with GCC 15 or later, use the F32->U64 + // vec_unsignedo intrinsic as the __builtin_vsx_xvcvspuxds intrinsic has been + // removed from GCC in GCC 15 + return VU64{vec_unsignedo(vf32.raw)}; +#elif HWY_COMPILER_GCC_ACTUAL >= 1500 && HWY_IS_BIG_ENDIAN + // On big-endian PPC8/PPC9/PPC10 with GCC 15 or later, use the F32->U64 + // vec_unsignedo intrinsic as the __builtin_vsx_xvcvspuxds intrinsic has been + // removed from GCC in GCC 15 + return VU64{vec_unsignede(vf32.raw)}; +#else + // Inline assembly fallback for older versions of Clang that do not have the + // __builtin_vsx_xvcvspuxds intrinsic + __vector unsigned long long raw_result; + __asm__("xvcvspuxds %x0, %x1" : "=wa"(raw_result) : "wa"(vf32.raw) :); + return VU64{raw_result}; +#endif +} + } // namespace detail #endif // !HWY_S390X_HAVE_Z14 template HWY_API VFromD PromoteTo(D di64, VFromD> v) { -#if !HWY_S390X_HAVE_Z14 && \ - (HWY_COMPILER_GCC_ACTUAL || HWY_HAS_BUILTIN(__builtin_vsx_xvcvspsxds)) - const __vector float raw_v = - detail::VsxF2INormalizeSrcVals(InterleaveLower(v, v)).raw; - return VFromD{__builtin_vsx_xvcvspsxds(raw_v)}; +#if !HWY_S390X_HAVE_Z14 + const Repartition dt_f32; + const auto vt_f32 = ResizeBitCast(dt_f32, v); + return detail::VsxXvcvspsxds( + detail::VsxF2INormalizeSrcVals(InterleaveLower(vt_f32, vt_f32))); #else const RebindToFloat df64; return ConvertTo(di64, PromoteTo(df64, v)); @@ -3719,12 +3776,11 @@ HWY_API VFromD PromoteTo(D di64, VFromD> v) { template HWY_API VFromD PromoteTo(D du64, VFromD> v) { -#if !HWY_S390X_HAVE_Z14 && \ - (HWY_COMPILER_GCC_ACTUAL || HWY_HAS_BUILTIN(__builtin_vsx_xvcvspuxds)) - const __vector float raw_v = - detail::VsxF2INormalizeSrcVals(InterleaveLower(v, v)).raw; - return VFromD{reinterpret_cast<__vector unsigned long long>( - __builtin_vsx_xvcvspuxds(raw_v))}; +#if !HWY_S390X_HAVE_Z14 + const Repartition dt_f32; + const auto vt_f32 = ResizeBitCast(dt_f32, v); + return detail::VsxXvcvspuxds( + detail::VsxF2INormalizeSrcVals(InterleaveLower(vt_f32, vt_f32))); #else const RebindToFloat df64; return ConvertTo(du64, PromoteTo(df64, v)); @@ -3829,12 +3885,10 @@ HWY_API VFromD PromoteUpperTo(D df64, Vec128 v) { template HWY_API VFromD PromoteUpperTo(D di64, Vec128 v) { -#if !HWY_S390X_HAVE_Z14 && \ - (HWY_COMPILER_GCC_ACTUAL || HWY_HAS_BUILTIN(__builtin_vsx_xvcvspsxds)) - const __vector float raw_v = - detail::VsxF2INormalizeSrcVals(InterleaveUpper(Full128(), v, v)) - .raw; - return VFromD{__builtin_vsx_xvcvspsxds(raw_v)}; +#if !HWY_S390X_HAVE_Z14 + (void)di64; + return detail::VsxXvcvspsxds( + detail::VsxF2INormalizeSrcVals(InterleaveUpper(Full128(), v, v))); #else const RebindToFloat df64; return ConvertTo(di64, PromoteUpperTo(df64, v)); @@ -3843,13 +3897,10 @@ HWY_API VFromD PromoteUpperTo(D di64, Vec128 v) { template HWY_API VFromD PromoteUpperTo(D du64, Vec128 v) { -#if !HWY_S390X_HAVE_Z14 && \ - (HWY_COMPILER_GCC_ACTUAL || HWY_HAS_BUILTIN(__builtin_vsx_xvcvspuxds)) - const __vector float raw_v = - detail::VsxF2INormalizeSrcVals(InterleaveUpper(Full128(), v, v)) - .raw; - return VFromD{reinterpret_cast<__vector unsigned long long>( - __builtin_vsx_xvcvspuxds(raw_v))}; +#if !HWY_S390X_HAVE_Z14 + (void)du64; + return detail::VsxXvcvspuxds( + detail::VsxF2INormalizeSrcVals(InterleaveUpper(Full128(), v, v))); #else const RebindToFloat df64; return ConvertTo(du64, PromoteUpperTo(df64, v)); @@ -3937,20 +3988,18 @@ HWY_INLINE VFromD PromoteEvenTo(hwy::SignedTag /*to_type_tag*/, hwy::SizeTag<8> /*to_lane_size_tag*/, hwy::FloatTag /*from_type_tag*/, D d_to, V v) { -#if !HWY_S390X_HAVE_Z14 && \ - (HWY_COMPILER_GCC_ACTUAL || HWY_HAS_BUILTIN(__builtin_vsx_xvcvspsxds)) +#if !HWY_S390X_HAVE_Z14 (void)d_to; const auto normalized_v = detail::VsxF2INormalizeSrcVals(v); #if HWY_IS_LITTLE_ENDIAN - // __builtin_vsx_xvcvspsxds expects the source values to be in the odd lanes - // on little-endian PPC, and the vec_sld operation below will shift the even + // VsxXvcvspsxds expects the source values to be in the odd lanes on + // little-endian PPC, and the Shuffle2103 operation below will shift the even // lanes of normalized_v into the odd lanes. - return VFromD{ - __builtin_vsx_xvcvspsxds(vec_sld(normalized_v.raw, normalized_v.raw, 4))}; + return VsxXvcvspsxds(Shuffle2103(normalized_v)); #else - // __builtin_vsx_xvcvspsxds expects the source values to be in the even lanes - // on big-endian PPC. - return VFromD{__builtin_vsx_xvcvspsxds(normalized_v.raw)}; + // VsxXvcvspsxds expects the source values to be in the even lanes on + // big-endian PPC. + return VsxXvcvspsxds(normalized_v); #endif #else const RebindToFloat df64; @@ -3965,22 +4014,18 @@ HWY_INLINE VFromD PromoteEvenTo(hwy::UnsignedTag /*to_type_tag*/, hwy::SizeTag<8> /*to_lane_size_tag*/, hwy::FloatTag /*from_type_tag*/, D d_to, V v) { -#if !HWY_S390X_HAVE_Z14 && \ - (HWY_COMPILER_GCC_ACTUAL || HWY_HAS_BUILTIN(__builtin_vsx_xvcvspuxds)) +#if !HWY_S390X_HAVE_Z14 (void)d_to; const auto normalized_v = detail::VsxF2INormalizeSrcVals(v); #if HWY_IS_LITTLE_ENDIAN - // __builtin_vsx_xvcvspuxds expects the source values to be in the odd lanes - // on little-endian PPC, and the vec_sld operation below will shift the even - // lanes of normalized_v into the odd lanes. - return VFromD{ - reinterpret_cast<__vector unsigned long long>(__builtin_vsx_xvcvspuxds( - vec_sld(normalized_v.raw, normalized_v.raw, 4)))}; + // VsxXvcvspuxds expects the source values to be in the odd lanes + // on little-endian PPC, and the Shuffle2103 operation below will shift the + // even lanes of normalized_v into the odd lanes. + return VsxXvcvspuxds(Shuffle2103(normalized_v)); #else - // __builtin_vsx_xvcvspuxds expects the source values to be in the even lanes + // VsxXvcvspuxds expects the source values to be in the even lanes // on big-endian PPC. - return VFromD{reinterpret_cast<__vector unsigned long long>( - __builtin_vsx_xvcvspuxds(normalized_v.raw))}; + return VsxXvcvspuxds(normalized_v); #endif #else const RebindToFloat df64; @@ -4022,20 +4067,18 @@ HWY_INLINE VFromD PromoteOddTo(hwy::SignedTag /*to_type_tag*/, hwy::SizeTag<8> /*to_lane_size_tag*/, hwy::FloatTag /*from_type_tag*/, D d_to, V v) { -#if !HWY_S390X_HAVE_Z14 && \ - (HWY_COMPILER_GCC_ACTUAL || HWY_HAS_BUILTIN(__builtin_vsx_xvcvspsxds)) +#if !HWY_S390X_HAVE_Z14 (void)d_to; const auto normalized_v = detail::VsxF2INormalizeSrcVals(v); #if HWY_IS_LITTLE_ENDIAN - // __builtin_vsx_xvcvspsxds expects the source values to be in the odd lanes + // VsxXvcvspsxds expects the source values to be in the odd lanes // on little-endian PPC - return VFromD{__builtin_vsx_xvcvspsxds(normalized_v.raw)}; + return VsxXvcvspsxds(normalized_v); #else - // __builtin_vsx_xvcvspsxds expects the source values to be in the even lanes - // on big-endian PPC, and the vec_sld operation below will shift the odd lanes - // of normalized_v into the even lanes. - return VFromD{ - __builtin_vsx_xvcvspsxds(vec_sld(normalized_v.raw, normalized_v.raw, 4))}; + // VsxXvcvspsxds expects the source values to be in the even lanes + // on big-endian PPC, and the Shuffle0321 operation below will shift the odd + // lanes of normalized_v into the even lanes. + return VsxXvcvspsxds(Shuffle0321(normalized_v)); #endif #else const RebindToFloat df64; @@ -4050,22 +4093,18 @@ HWY_INLINE VFromD PromoteOddTo(hwy::UnsignedTag /*to_type_tag*/, hwy::SizeTag<8> /*to_lane_size_tag*/, hwy::FloatTag /*from_type_tag*/, D d_to, V v) { -#if !HWY_S390X_HAVE_Z14 && \ - (HWY_COMPILER_GCC_ACTUAL || HWY_HAS_BUILTIN(__builtin_vsx_xvcvspuxds)) +#if !HWY_S390X_HAVE_Z14 (void)d_to; const auto normalized_v = detail::VsxF2INormalizeSrcVals(v); #if HWY_IS_LITTLE_ENDIAN - // __builtin_vsx_xvcvspuxds expects the source values to be in the odd lanes + // VsxXvcvspuxds expects the source values to be in the odd lanes // on little-endian PPC - return VFromD{reinterpret_cast<__vector unsigned long long>( - __builtin_vsx_xvcvspuxds(normalized_v.raw))}; + return VsxXvcvspuxds(normalized_v); #else - // __builtin_vsx_xvcvspuxds expects the source values to be in the even lanes - // on big-endian PPC, and the vec_sld operation below will shift the odd lanes - // of normalized_v into the even lanes. - return VFromD{ - reinterpret_cast<__vector unsigned long long>(__builtin_vsx_xvcvspuxds( - vec_sld(normalized_v.raw, normalized_v.raw, 4)))}; + // VsxXvcvspuxds expects the source values to be in the even lanes + // on big-endian PPC, and the Shuffle0321 operation below will shift the odd + // lanes of normalized_v into the even lanes. + return VsxXvcvspuxds(Shuffle0321(normalized_v)); #endif #else const RebindToFloat df64; From f1ab938cb9824bbe9b85dcde858139f4d21ba7db Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Mon, 17 Nov 2025 19:10:24 +0200 Subject: [PATCH 023/115] test: ensure assertions are reached on more tests PR-URL: https://github.com/nodejs/node/pull/60728 Reviewed-By: Colin Ihrig Reviewed-By: Anna Henningsen --- test/eslint.config_partial.mjs | 2 +- test/parallel/test-tcp-wrap-connect.js | 10 +-- test/parallel/test-tcp-wrap-listen.js | 4 +- .../test-timers-immediate-queue-throw.js | 2 +- .../test-timers-interval-promisified.js | 4 +- .../test-timers-promises-scheduler.js | 14 ++-- test/parallel/test-timers-refresh.js | 22 ++--- .../test-timers-unref-throw-then-ref.js | 4 +- test/parallel/test-timers-user-call.js | 4 +- test/parallel/test-tls-add-context.js | 4 +- test/parallel/test-tls-alpn-server-client.js | 4 +- test/parallel/test-tls-cert-chains-in-ca.js | 7 +- test/parallel/test-tls-cipher-list.js | 4 +- .../parallel/test-tls-cli-min-max-conflict.js | 4 +- test/parallel/test-tls-client-auth.js | 82 ++++++++---------- test/parallel/test-tls-client-mindhsize.js | 12 +-- .../test-tls-client-renegotiation-13.js | 6 +- .../test-tls-client-renegotiation-limit.js | 29 +++---- test/parallel/test-tls-client-resume.js | 4 +- .../test-tls-close-event-after-write.js | 4 +- test/parallel/test-tls-close-notify.js | 4 +- ...test-tls-connect-allow-half-open-option.js | 4 +- test/parallel/test-tls-connect-memleak.js | 4 +- .../test-tls-connect-secure-context.js | 14 ++-- test/parallel/test-tls-connect-simple.js | 4 +- test/parallel/test-tls-delayed-attach.js | 4 +- test/parallel/test-tls-destroy-stream.js | 24 +++--- test/parallel/test-tls-econnreset.js | 4 +- .../parallel/test-tls-exportkeyingmaterial.js | 4 +- test/parallel/test-tls-fast-writing.js | 4 +- test/parallel/test-tls-getprotocol.js | 4 +- test/parallel/test-tls-inception.js | 8 +- test/parallel/test-tls-interleave.js | 8 +- test/parallel/test-tls-junk-server.js | 4 +- test/parallel/test-tls-keylog-tlsv13.js | 4 +- test/parallel/test-tls-max-send-fragment.js | 8 +- test/parallel/test-tls-multi-key.js | 6 +- test/parallel/test-tls-multi-pfx.js | 4 +- .../test-tls-net-connect-prefer-path.js | 12 +-- test/parallel/test-tls-no-rsa-key.js | 4 +- test/parallel/test-tls-no-sslv3.js | 4 +- test/parallel/test-tls-ocsp-callback.js | 4 +- .../parallel/test-tls-onread-static-buffer.js | 48 +++++------ test/parallel/test-tls-over-http-tunnel.js | 12 +-- test/parallel/test-tls-pause.js | 4 +- test/parallel/test-tls-peer-certificate.js | 10 +-- .../test-tls-pfx-authorizationerror.js | 8 +- test/parallel/test-tls-psk-circuit.js | 4 +- test/parallel/test-tls-psk-errors.js | 4 +- test/parallel/test-tls-psk-server.js | 8 +- .../test-tls-secure-context-usage-order.js | 8 +- .../test-tls-server-connection-server.js | 4 +- test/parallel/test-tls-server-setkeycert.js | 4 +- test/parallel/test-tls-server-verify.js | 17 ++-- test/parallel/test-tls-session-cache.js | 16 ++-- test/parallel/test-tls-set-encoding.js | 8 +- test/parallel/test-tls-set-secure-context.js | 2 +- test/parallel/test-tls-sni-option.js | 12 +-- test/parallel/test-tls-sni-server-client.js | 12 +-- test/parallel/test-tls-sni-servername.js | 4 +- test/parallel/test-tls-socket-close.js | 20 ++--- .../test-tls-socket-default-options.js | 4 +- ...tls-socket-failed-handshake-emits-error.js | 8 +- .../test-tls-startcom-wosign-whitelist.js | 12 +-- test/parallel/test-tls-ticket-cluster.js | 4 +- test/parallel/test-tls-ticket.js | 4 +- test/parallel/test-tls-tlswrap-segfault.js | 4 +- .../test-tls-translate-peer-certificate.js | 60 ++++++------- test/parallel/test-tls-wrap-timeout.js | 12 ++- test/parallel/test-trace-events-console.js | 19 ++--- .../test-trace-events-dynamic-enable.js | 4 +- .../parallel/test-trace-events-environment.js | 13 ++- test/parallel/test-tty-stdin-pipe.js | 19 ++--- test/parallel/test-tz-version.js | 4 +- .../parallel/test-url-domain-ascii-unicode.js | 6 +- test/parallel/test-urlpattern-invalidthis.js | 8 +- test/parallel/test-urlpattern-types.js | 20 ++--- test/parallel/test-urlpattern.js | 6 +- test/parallel/test-util-callbackify.js | 6 +- test/parallel/test-util-deprecate.js | 6 +- test/parallel/test-util-inspect.js | 12 +-- test/parallel/test-util-sigint-watchdog.js | 84 +++++++++---------- 82 files changed, 425 insertions(+), 461 deletions(-) diff --git a/test/eslint.config_partial.mjs b/test/eslint.config_partial.mjs index e18896580d896f..fd8ce1a69b256c 100644 --- a/test/eslint.config_partial.mjs +++ b/test/eslint.config_partial.mjs @@ -204,7 +204,7 @@ export default [ Array.from({ length: 13 }, (_, i) => String.fromCharCode(0x61 + i, 42)).join(',') },n*,${ // 0x61 is code for 'a', this generates a string enumerating latin letters: 'z*,y*,…' - Array.from({ length: 5 }, (_, i) => String.fromCharCode(0x61 + 25 - i, 42)).join(',') + Array.from({ length: 7 }, (_, i) => String.fromCharCode(0x61 + 25 - i, 42)).join(',') }}.{js,mjs,cjs}`, ], rules: { diff --git a/test/parallel/test-tcp-wrap-connect.js b/test/parallel/test-tcp-wrap-connect.js index 5e3e81f6e11712..15389a84048b98 100644 --- a/test/parallel/test-tcp-wrap-connect.js +++ b/test/parallel/test-tcp-wrap-connect.js @@ -1,6 +1,6 @@ // Flags: --expose-internals 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); const { internalBinding } = require('internal/test/binding'); const { @@ -17,7 +17,7 @@ function makeConnection() { const err = client.connect(req, '127.0.0.1', this.address().port); assert.strictEqual(err, 0); - req.oncomplete = function(status, client_, req_, readable, writable) { + req.oncomplete = common.mustCall((status, client_, req_, readable, writable) => { assert.strictEqual(status, 0); assert.strictEqual(client_, client); assert.strictEqual(req_, req); @@ -28,14 +28,14 @@ function makeConnection() { const err = client.shutdown(shutdownReq); assert.strictEqual(err, 0); - shutdownReq.oncomplete = function(status, client_, error) { + shutdownReq.oncomplete = common.mustCall((status, client_, error) => { assert.strictEqual(status, 0); assert.strictEqual(client_, client); assert.strictEqual(error, undefined); shutdownCount++; client.close(); - }; - }; + }); + }); } let connectCount = 0; diff --git a/test/parallel/test-tcp-wrap-listen.js b/test/parallel/test-tcp-wrap-listen.js index a19aed15071613..367b3bb4263872 100644 --- a/test/parallel/test-tcp-wrap-listen.js +++ b/test/parallel/test-tcp-wrap-listen.js @@ -24,7 +24,7 @@ server.getsockname(port); server.listen(128); -server.onconnection = (err, client) => { +server.onconnection = common.mustCall((err, client) => { assert.strictEqual(client.writeQueueSize, 0); console.log('got connection'); @@ -82,7 +82,7 @@ server.onconnection = (err, client) => { maybeCloseClient(); } }, 2); -}; +}); const net = require('net'); diff --git a/test/parallel/test-timers-immediate-queue-throw.js b/test/parallel/test-timers-immediate-queue-throw.js index e5aded86c55785..05b51f568a1917 100644 --- a/test/parallel/test-timers-immediate-queue-throw.js +++ b/test/parallel/test-timers-immediate-queue-throw.js @@ -31,7 +31,7 @@ process.once('uncaughtException', common.mustCall((err, errorOrigin) => { const d1 = domain.create(); d1.once('error', common.expectsError(errObj)); -d1.once('error', () => assert.strictEqual(stage, 0)); +d1.once('error', common.mustCall(() => assert.strictEqual(stage, 0))); const run = common.mustCall((callStage) => { assert(callStage >= stage); diff --git a/test/parallel/test-timers-interval-promisified.js b/test/parallel/test-timers-interval-promisified.js index 88a734b25e044d..21c936263168c3 100644 --- a/test/parallel/test-timers-interval-promisified.js +++ b/test/parallel/test-timers-interval-promisified.js @@ -198,7 +198,7 @@ process.on('multipleResolves', common.mustNotCall()); const { signal } = controller; const delay = 10; let totalIterations = 0; - const timeoutLoop = runInterval(async (iterationNumber) => { + const timeoutLoop = runInterval(common.mustCallAtLeast(async (iterationNumber) => { await setPromiseTimeout(delay * 4); if (iterationNumber <= 2) { assert.strictEqual(signal.aborted, false); @@ -212,7 +212,7 @@ process.on('multipleResolves', common.mustNotCall()); if (iterationNumber > totalIterations) { totalIterations = iterationNumber; } - }, delay, signal); + }, 0), delay, signal); timeoutLoop.catch(common.mustCall(() => { assert.ok(totalIterations >= 3, `iterations was ${totalIterations} < 3`); diff --git a/test/parallel/test-timers-promises-scheduler.js b/test/parallel/test-timers-promises-scheduler.js index 4eda43586f6c1d..0855db34b1b405 100644 --- a/test/parallel/test-timers-promises-scheduler.js +++ b/test/parallel/test-timers-promises-scheduler.js @@ -4,11 +4,7 @@ const common = require('../common'); const { scheduler } = require('timers/promises'); const { setTimeout } = require('timers'); -const { - strictEqual, - rejects, - throws, -} = require('assert'); +const assert = require('assert'); async function testYield() { await scheduler.yield(); @@ -23,7 +19,7 @@ async function testWait() { let value = 0; setTimeout(() => value++, 10); await scheduler.wait(15); - strictEqual(value, 1); + assert.strictEqual(value, 1); } testWait().then(common.mustCall()); @@ -32,7 +28,7 @@ async function testCancelableWait1() { const ac = new AbortController(); const wait = scheduler.wait(1e6, { signal: ac.signal }); ac.abort(); - await rejects(wait, { + await assert.rejects(wait, { code: 'ABORT_ERR', message: 'The operation was aborted', }); @@ -42,7 +38,7 @@ testCancelableWait1().then(common.mustCall()); async function testCancelableWait2() { const wait = scheduler.wait(10000, { signal: AbortSignal.abort() }); - await rejects(wait, { + await assert.rejects(wait, { code: 'ABORT_ERR', message: 'The operation was aborted', }); @@ -50,6 +46,6 @@ async function testCancelableWait2() { testCancelableWait2().then(common.mustCall()); -throws(() => new scheduler.constructor(), { +assert.throws(() => new scheduler.constructor(), { code: 'ERR_ILLEGAL_CONSTRUCTOR', }); diff --git a/test/parallel/test-timers-refresh.js b/test/parallel/test-timers-refresh.js index a26c4efac30b6c..174039538e6ba0 100644 --- a/test/parallel/test-timers-refresh.js +++ b/test/parallel/test-timers-refresh.js @@ -4,7 +4,7 @@ const common = require('../common'); -const { strictEqual, throws } = require('assert'); +const assert = require('assert'); const { setUnrefTimeout } = require('internal/timers'); // Schedule the unrefed cases first so that the later case keeps the event loop @@ -24,16 +24,16 @@ const { setUnrefTimeout } = require('internal/timers'); // This relies on implicit timers handle sorting within libuv. setTimeout(common.mustCall(() => { - strictEqual(called, false, 'unref()\'d timer returned before check'); + assert.strictEqual(called, false); }), 1); - strictEqual(timer.refresh(), timer); + assert.strictEqual(timer.refresh(), timer); } // Should throw with non-functions { [null, true, false, 0, 1, NaN, '', 'foo', {}, Symbol()].forEach((cb) => { - throws( + assert.throws( () => setUnrefTimeout(cb), { code: 'ERR_INVALID_ARG_TYPE', @@ -50,10 +50,10 @@ const { setUnrefTimeout } = require('internal/timers'); }), 1); setUnrefTimeout(common.mustCall(() => { - strictEqual(called, false, 'unref pooled timer returned before check'); + assert.strictEqual(called, false); }), 1); - strictEqual(timer.refresh(), timer); + assert.strictEqual(timer.refresh(), timer); } // regular timer @@ -64,10 +64,10 @@ const { setUnrefTimeout } = require('internal/timers'); }), 1); setTimeout(common.mustCall(() => { - strictEqual(called, false, 'pooled timer returned before check'); + assert.strictEqual(called, false); }), 1); - strictEqual(timer.refresh(), timer); + assert.strictEqual(timer.refresh(), timer); } // regular timer @@ -78,7 +78,7 @@ const { setUnrefTimeout } = require('internal/timers'); called = true; process.nextTick(common.mustCall(() => { timer.refresh(); - strictEqual(timer.hasRef(), true); + assert.strictEqual(timer.hasRef(), true); })); } }, 2), 1); @@ -95,8 +95,8 @@ const { setUnrefTimeout } = require('internal/timers'); }, 2), 1); setTimeout(common.mustCall(() => { - strictEqual(called, 0, 'pooled timer returned before check'); + assert.strictEqual(called, 0); }), 1); - strictEqual(timer.refresh(), timer); + assert.strictEqual(timer.refresh(), timer); } diff --git a/test/parallel/test-timers-unref-throw-then-ref.js b/test/parallel/test-timers-unref-throw-then-ref.js index 1dd5fdd0ad2786..d9f23ea19284b9 100644 --- a/test/parallel/test-timers-unref-throw-then-ref.js +++ b/test/parallel/test-timers-unref-throw-then-ref.js @@ -9,11 +9,11 @@ process.once('uncaughtException', common.mustCall((err) => { })); let called = false; -const t = setTimeout(() => { +const t = setTimeout(common.mustCall(() => { assert(!called); called = true; t.ref(); throw new Error('Timeout Error'); -}, 1).unref(); +}), 1).unref(); setTimeout(common.mustCall(), 1); diff --git a/test/parallel/test-timers-user-call.js b/test/parallel/test-timers-user-call.js index 4ff24e688b5aa3..9ebb6896d04248 100644 --- a/test/parallel/test-timers-user-call.js +++ b/test/parallel/test-timers-user-call.js @@ -25,12 +25,12 @@ const common = require('../common'); } { - const testInterval = (...args) => { + const testInterval = common.mustCall((...args) => { const fn = common.mustCall(() => { clearInterval(interval); }); fn.call = 'not a function'; fn.apply = 'also not a function'; const interval = setInterval(fn, 1, ...args); - }; + }, 5); testInterval(); testInterval('oneArg'); diff --git a/test/parallel/test-tls-add-context.js b/test/parallel/test-tls-add-context.js index 8d02866ce51c5e..d9ac1d16ae3df1 100644 --- a/test/parallel/test-tls-add-context.js +++ b/test/parallel/test-tls-add-context.js @@ -22,7 +22,7 @@ const serverOptions = { let connections = 0; -const server = tls.createServer(serverOptions, (c) => { +const server = tls.createServer(serverOptions, common.mustCall((c) => { if (++connections === 3) { server.close(); } @@ -31,7 +31,7 @@ const server = tls.createServer(serverOptions, (c) => { return; } assert.strictEqual(c.authorized, true); -}); +}, 3)); const secureContext = { key: loadPEM('agent1-key'), diff --git a/test/parallel/test-tls-alpn-server-client.js b/test/parallel/test-tls-alpn-server-client.js index a364d05e3dc210..92dfd493851056 100644 --- a/test/parallel/test-tls-alpn-server-client.js +++ b/test/parallel/test-tls-alpn-server-client.js @@ -235,7 +235,7 @@ function TestALPNCallback() { ALPNProtocols: ['a'], }]; - runTest(clientsOptions, serverOptions, function(results) { + runTest(clientsOptions, serverOptions, common.mustCall((results) => { // Callback picks 2nd preference => picks 'b' checkResults(results[0], { server: { ALPN: 'b' }, @@ -247,7 +247,7 @@ function TestALPNCallback() { assert.ok(allowedErrors.includes(results[1].client.error.code), `'${results[1].client.error.code}' was not one of ${allowedErrors}.`); TestBadALPNCallback(); - }); + })); } function TestBadALPNCallback() { diff --git a/test/parallel/test-tls-cert-chains-in-ca.js b/test/parallel/test-tls-cert-chains-in-ca.js index 8b4cbac36c2902..1c97dc8542eb1e 100644 --- a/test/parallel/test-tls-cert-chains-in-ca.js +++ b/test/parallel/test-tls-cert-chains-in-ca.js @@ -1,5 +1,5 @@ 'use strict'; -require('../common'); +const common = require('../common'); const fixtures = require('../common/fixtures'); // Check cert chain is received by client, and is completed with the ca cert @@ -25,8 +25,7 @@ connect({ key: keys.agent6.key, ca: agent6Middle, }, -}, function(err, pair, cleanup) { - assert.ifError(err); +}, common.mustSucceed((pair, cleanup) => { const peer = pair.client.conn.getPeerCertificate(); debug('peer:\n', peer); @@ -42,4 +41,4 @@ connect({ assert.match(root.serialNumber, /4AB16C8DFD6A7D0D2DFCABDF9C4B0E92C6AD0229/i); return cleanup(); -}); +})); diff --git a/test/parallel/test-tls-cipher-list.js b/test/parallel/test-tls-cipher-list.js index b1a61405898a9a..7371e2f192794b 100644 --- a/test/parallel/test-tls-cipher-list.js +++ b/test/parallel/test-tls-cipher-list.js @@ -18,9 +18,9 @@ function doCheck(arg, expression, check) { .on('error', common.mustNotCall()) .stdout.on('data', function(chunk) { out += chunk; - }).on('end', function() { + }).on('end', common.mustCall(() => { assert.strictEqual(out.trim(), check); - }).on('error', common.mustNotCall()); + })).on('error', common.mustNotCall()); } // Test the default unmodified version diff --git a/test/parallel/test-tls-cli-min-max-conflict.js b/test/parallel/test-tls-cli-min-max-conflict.js index fe4eba558fee01..ee1c7e2b0c5b4b 100644 --- a/test/parallel/test-tls-cli-min-max-conflict.js +++ b/test/parallel/test-tls-cli-min-max-conflict.js @@ -8,7 +8,7 @@ const assert = require('assert'); const child_process = require('child_process'); const args = ['--tls-min-v1.3', '--tls-max-v1.2', '-p', 'process.version']; -child_process.execFile(process.argv[0], args, (err) => { +child_process.execFile(process.argv[0], args, common.mustCall((err) => { assert(err); assert.match(err.message, /not both/); -}); +})); diff --git a/test/parallel/test-tls-client-auth.js b/test/parallel/test-tls-client-auth.js index b347c0a88df571..04bf40b9a9e1ac 100644 --- a/test/parallel/test-tls-client-auth.js +++ b/test/parallel/test-tls-client-auth.js @@ -47,10 +47,9 @@ connect({ ca: client.ca, requestCert: true, }, -}, function(err, pair, cleanup) { - assert.ifError(err); +}, common.mustSucceed((pair, cleanup) => { return cleanup(); -}); +})); // As above, but without requesting client's cert. connect({ @@ -63,10 +62,9 @@ connect({ cert: server.cert, ca: client.ca, }, -}, function(err, pair, cleanup) { - assert.ifError(err); +}, common.mustSucceed((pair, cleanup) => { return cleanup(); -}); +})); // Request cert from TLS1.2 client that doesn't have one. connect({ @@ -81,7 +79,7 @@ connect({ ca: client.ca, requestCert: true, }, -}, function(err, pair, cleanup) { +}, common.mustCall((err, pair, cleanup) => { assert.strictEqual(pair.server.err.code, 'ERR_SSL_PEER_DID_NOT_RETURN_A_CERTIFICATE'); const expectedErr = hasOpenSSL(3, 2) ? @@ -89,7 +87,7 @@ connect({ assert.strictEqual(pair.client.err.code, expectedErr); return cleanup(); -}); +})); // Request cert from TLS1.3 client that doesn't have one. if (tls.DEFAULT_MAX_VERSION === 'TLSv1.3') connect({ @@ -103,7 +101,7 @@ if (tls.DEFAULT_MAX_VERSION === 'TLSv1.3') connect({ ca: client.ca, requestCert: true, }, -}, function(err, pair, cleanup) { +}, common.mustCall((err, pair, cleanup) => { assert.strictEqual(pair.server.err.code, 'ERR_SSL_PEER_DID_NOT_RETURN_A_CERTIFICATE'); @@ -115,7 +113,7 @@ if (tls.DEFAULT_MAX_VERSION === 'TLSv1.3') connect({ assert.strictEqual(err.code, 'ERR_SSL_TLSV13_ALERT_CERTIFICATE_REQUIRED'); cleanup(); })); -}); +})); // Typical configuration error, incomplete cert chains sent, we have to know the // peer's subordinate CAs in order to verify the peer. @@ -132,10 +130,9 @@ connect({ ca: [client.ca, client.subca], requestCert: true, }, -}, function(err, pair, cleanup) { - assert.ifError(err); +}, common.mustSucceed((pair, cleanup) => { return cleanup(); -}); +})); // Like above, but provide root CA and subordinate CA as multi-PEM. connect({ @@ -151,10 +148,9 @@ connect({ ca: client.ca + '\n' + client.subca, requestCert: true, }, -}, function(err, pair, cleanup) { - assert.ifError(err); +}, common.mustSucceed((pair, cleanup) => { return cleanup(); -}); +})); // Like above, but provide multi-PEM in an array. connect({ @@ -170,10 +166,9 @@ connect({ ca: [client.ca + '\n' + client.subca], requestCert: true, }, -}, function(err, pair, cleanup) { - assert.ifError(err); +}, common.mustSucceed((pair, cleanup) => { return cleanup(); -}); +})); // Fail to complete server's chain connect({ @@ -185,10 +180,10 @@ connect({ key: server.key, cert: server.single, }, -}, function(err, pair, cleanup) { +}, common.mustCall((err, pair, cleanup) => { assert.strictEqual(err.code, 'UNABLE_TO_VERIFY_LEAF_SIGNATURE'); return cleanup(); -}); +})); // Fail to complete client's chain. connect({ @@ -204,12 +199,12 @@ connect({ ca: client.ca, requestCert: true, }, -}, function(err, pair, cleanup) { +}, common.mustCall((err, pair, cleanup) => { assert.ifError(pair.client.error); assert.ifError(pair.server.error); assert.strictEqual(err.code, 'ECONNRESET'); return cleanup(); -}); +})); // Fail to find CA for server. connect({ @@ -220,10 +215,10 @@ connect({ key: server.key, cert: server.cert, }, -}, function(err, pair, cleanup) { +}, common.mustCall((err, pair, cleanup) => { assert.strictEqual(err.code, 'UNABLE_TO_GET_ISSUER_CERT_LOCALLY'); return cleanup(); -}); +})); // Server sent their CA, but CA cannot be trusted if it is not locally known. connect({ @@ -234,10 +229,10 @@ connect({ key: server.key, cert: server.cert + '\n' + server.ca, }, -}, function(err, pair, cleanup) { +}, common.mustCall((err, pair, cleanup) => { assert.strictEqual(err.code, 'SELF_SIGNED_CERT_IN_CHAIN'); return cleanup(); -}); +})); // Server sent their CA, wrongly, but its OK since we know the CA locally. connect({ @@ -249,10 +244,9 @@ connect({ key: server.key, cert: server.cert + '\n' + server.ca, }, -}, function(err, pair, cleanup) { - assert.ifError(err); +}, common.mustSucceed((pair, cleanup) => { return cleanup(); -}); +})); // Fail to complete client's chain. connect({ @@ -268,10 +262,10 @@ connect({ ca: client.ca, requestCert: true, }, -}, function(err, pair, cleanup) { +}, common.mustCall((err, pair, cleanup) => { assert.strictEqual(err.code, 'ECONNRESET'); return cleanup(); -}); +})); // Fail to find CA for client. connect({ @@ -286,10 +280,10 @@ connect({ cert: server.cert, requestCert: true, }, -}, function(err, pair, cleanup) { +}, common.mustCall((err, pair, cleanup) => { assert.strictEqual(err.code, 'ECONNRESET'); return cleanup(); -}); +})); // Confirm support for "BEGIN TRUSTED CERTIFICATE". connect({ @@ -305,10 +299,9 @@ connect({ ca: client.ca, requestCert: true, }, -}, function(err, pair, cleanup) { - assert.ifError(err); +}, common.mustSucceed((pair, cleanup) => { return cleanup(); -}); +})); // Confirm support for "BEGIN TRUSTED CERTIFICATE". connect({ @@ -324,10 +317,9 @@ connect({ ca: client.ca.replace(/CERTIFICATE/g, 'TRUSTED CERTIFICATE'), requestCert: true, }, -}, function(err, pair, cleanup) { - assert.ifError(err); +}, common.mustSucceed((pair, cleanup) => { return cleanup(); -}); +})); // Confirm support for "BEGIN X509 CERTIFICATE". connect({ @@ -343,10 +335,9 @@ connect({ ca: client.ca, requestCert: true, }, -}, function(err, pair, cleanup) { - assert.ifError(err); +}, common.mustSucceed((pair, cleanup) => { return cleanup(); -}); +})); // Confirm support for "BEGIN X509 CERTIFICATE". connect({ @@ -362,7 +353,6 @@ connect({ ca: client.ca.replace(/CERTIFICATE/g, 'X509 CERTIFICATE'), requestCert: true, }, -}, function(err, pair, cleanup) { - assert.ifError(err); +}, common.mustSucceed((pair, cleanup) => { return cleanup(); -}); +})); diff --git a/test/parallel/test-tls-client-mindhsize.js b/test/parallel/test-tls-client-mindhsize.js index 50246f621b361c..778e4b710b4e92 100644 --- a/test/parallel/test-tls-client-mindhsize.js +++ b/test/parallel/test-tls-client-mindhsize.js @@ -36,12 +36,12 @@ function test(size, err, next) { conn.end(); }); - server.on('close', function(isException) { + server.on('close', common.mustCall(function(isException) { assert(!isException); if (next) next(); - }); + })); - server.listen(0, function() { + server.listen(0, common.mustCall(function() { // Client set minimum DH parameter size to 2048 or 3072 bits // so that it fails when it makes a connection to the tls // server where is too small. This depends on the openssl @@ -57,13 +57,13 @@ function test(size, err, next) { server.close(); }); if (err) { - client.on('error', function(e) { + client.on('error', common.mustCall((e) => { nerror++; assert.strictEqual(e.code, 'ERR_TLS_DH_PARAM_SIZE'); server.close(); - }); + })); } - }); + })); } // A client connection fails with an error when a client has an diff --git a/test/parallel/test-tls-client-renegotiation-13.js b/test/parallel/test-tls-client-renegotiation-13.js index 38a72fb525b430..5afa8389ed37ca 100644 --- a/test/parallel/test-tls-client-renegotiation-13.js +++ b/test/parallel/test-tls-client-renegotiation-13.js @@ -26,9 +26,7 @@ connect({ key: server.key, cert: server.cert, }, -}, function(err, pair, cleanup) { - assert.ifError(err); - +}, common.mustSucceed((pair, cleanup) => { const client = pair.client.conn; assert.strictEqual(client.getProtocol(), 'TLSv1.3'); @@ -46,4 +44,4 @@ connect({ })); assert.strictEqual(ok, false); -}); +})); diff --git a/test/parallel/test-tls-client-renegotiation-limit.js b/test/parallel/test-tls-client-renegotiation-limit.js index b35140e8964ac1..86111d6da0b402 100644 --- a/test/parallel/test-tls-client-renegotiation-limit.js +++ b/test/parallel/test-tls-client-renegotiation-limit.js @@ -57,16 +57,16 @@ function test(next) { key: fixtures.readKey('rsa_private.pem'), }; - const server = tls.createServer(options, (conn) => { - conn.on('error', (err) => { + const server = tls.createServer(options, common.mustCall((conn) => { + conn.on('error', common.mustCall((err) => { console.error(`Caught exception: ${err}`); assert.match(err.message, /TLS session renegotiation attack/); conn.destroy(); - }); + })); conn.pipe(conn); - }); + })); - server.listen(0, () => { + server.listen(0, common.mustCall(() => { const options = { host: server.address().host, port: server.address().port, @@ -76,30 +76,27 @@ function test(next) { let renegs = 0; - client.on('close', () => { + client.on('close', common.mustCall(() => { assert.strictEqual(renegs, tls.CLIENT_RENEG_LIMIT + 1); server.close(); process.nextTick(next); - }); + })); - client.on('error', (err) => { - console.log('CLIENT ERR', err); - throw err; - }); + client.on('error', common.mustNotCall('CLIENT ERR')); - client.on('close', (hadErr) => { + client.on('close', common.mustCall((hadErr) => { assert.strictEqual(hadErr, false); - }); + })); // Simulate renegotiation attack function spam() { client.write(''); - client.renegotiate({}, (err) => { + client.renegotiate({}, common.mustCallAtLeast((err) => { assert.ifError(err); assert.ok(renegs <= tls.CLIENT_RENEG_LIMIT); spam(); - }); + }, 0)); renegs++; } - }); + })); } diff --git a/test/parallel/test-tls-client-resume.js b/test/parallel/test-tls-client-resume.js index b9d7fd828b5380..7d1e964d8ec2c1 100644 --- a/test/parallel/test-tls-client-resume.js +++ b/test/parallel/test-tls-client-resume.js @@ -75,10 +75,10 @@ server.listen(0, common.mustCall(function() { console.log('client1 session#', ++sessions); }); - client1.on('close', () => { + client1.on('close', common.mustCall(() => { console.log('client1 close'); assert.strictEqual(sessions, tls13 ? 2 : 1); - }); + })); function reconnect() { assert(sessionx); diff --git a/test/parallel/test-tls-close-event-after-write.js b/test/parallel/test-tls-close-event-after-write.js index 57c79e2e5ab72d..31515cd56f8aa8 100644 --- a/test/parallel/test-tls-close-event-after-write.js +++ b/test/parallel/test-tls-close-event-after-write.js @@ -26,11 +26,11 @@ function test() { const server = tls.createServer({ key: fixtures.readKey('agent1-key.pem'), cert: fixtures.readKey('agent1-cert.pem') -}, (c) => { +}, common.mustCall((c) => { c.on('close', common.mustCall(() => server.close())); sconn = c; test(); -}).listen(0, common.mustCall(function() { +})).listen(0, common.mustCall(function() { tls.connect(this.address().port, { rejectUnauthorized: false }, common.mustCall(function() { diff --git a/test/parallel/test-tls-close-notify.js b/test/parallel/test-tls-close-notify.js index 6411e68bd2d1a8..19d2c6bcabb62b 100644 --- a/test/parallel/test-tls-close-notify.js +++ b/test/parallel/test-tls-close-notify.js @@ -34,12 +34,12 @@ const { ShutdownWrap } = internalBinding('stream_wrap'); const server = tls.createServer({ key: fixtures.readKey('agent1-key.pem'), cert: fixtures.readKey('agent1-cert.pem') -}, function(c) { +}, common.mustCall((c) => { // Ensure that we receive 'end' event anyway. c.on('end', common.mustCall(function() { server.close(); })); -}).listen(0, common.mustCall(function() { +})).listen(0, common.mustCall(function() { const c = tls.connect(this.address().port, { rejectUnauthorized: false }, common.mustCall(function() { diff --git a/test/parallel/test-tls-connect-allow-half-open-option.js b/test/parallel/test-tls-connect-allow-half-open-option.js index e39eb509a81fa7..bb19e2e9de811a 100644 --- a/test/parallel/test-tls-connect-allow-half-open-option.js +++ b/test/parallel/test-tls-connect-allow-half-open-option.js @@ -59,11 +59,11 @@ server.listen(0, common.mustCall(() => { socket.on('end', common.mustCall(() => { assert.strictEqual(message, 'Hello'); - setTimeout(() => { + setTimeout(common.mustCall(() => { assert(socket.writable); assert(socket.write('Bye')); socket.end(); - }, 50); + }), 50); })); socket.write('Hello'); diff --git a/test/parallel/test-tls-connect-memleak.js b/test/parallel/test-tls-connect-memleak.js index 7b9cb71d8df0ba..220ea4a9248ede 100644 --- a/test/parallel/test-tls-connect-memleak.js +++ b/test/parallel/test-tls-connect-memleak.js @@ -58,9 +58,9 @@ const gcListener = { ongc() { collected = true; } }; function done(sock) { globalThis.gc(); - setImmediate(() => { + setImmediate(common.mustCall(() => { assert.strictEqual(collected, true); sock.end(); server.close(); - }); + })); } diff --git a/test/parallel/test-tls-connect-secure-context.js b/test/parallel/test-tls-connect-secure-context.js index 31941656c09a05..a0d9170c20904f 100644 --- a/test/parallel/test-tls-connect-secure-context.js +++ b/test/parallel/test-tls-connect-secure-context.js @@ -1,11 +1,11 @@ 'use strict'; -require('../common'); +const common = require('../common'); // Verify connection with explicitly created client SecureContext. const fixtures = require('../common/fixtures'); const { - assert, connect, keys, tls + connect, keys, tls } = require(fixtures.path('tls-connect')); connect({ @@ -19,10 +19,9 @@ connect({ cert: keys.agent1.cert, key: keys.agent1.key, }, -}, function(err, pair, cleanup) { - assert.ifError(err); +}, common.mustSucceed((pair, cleanup) => { return cleanup(); -}); +})); connect({ client: { @@ -47,7 +46,6 @@ connect({ cert: keys.agent1.cert, key: keys.agent1.key, }, -}, function(err, pair, cleanup) { - assert.ifError(err); +}, common.mustSucceed((pair, cleanup) => { return cleanup(); -}); +})); diff --git a/test/parallel/test-tls-connect-simple.js b/test/parallel/test-tls-connect-simple.js index 633529d6d3ccb1..69179ccfe54977 100644 --- a/test/parallel/test-tls-connect-simple.js +++ b/test/parallel/test-tls-connect-simple.js @@ -42,7 +42,7 @@ const server = tls.Server(options, common.mustCall(function(socket) { } }, 2)); -server.listen(0, function() { +server.listen(0, common.mustCall(function() { const client1options = { port: this.address().port, rejectUnauthorized: false @@ -59,4 +59,4 @@ server.listen(0, function() { client2.on('secureConnect', common.mustCall(function() { client2.end(); })); -}); +})); diff --git a/test/parallel/test-tls-delayed-attach.js b/test/parallel/test-tls-delayed-attach.js index b756db2d67ca5f..c80858f1b17e06 100644 --- a/test/parallel/test-tls-delayed-attach.js +++ b/test/parallel/test-tls-delayed-attach.js @@ -44,7 +44,7 @@ const options = { }; const server = net.createServer(common.mustCall((c) => { - setTimeout(function() { + setTimeout(common.mustCall(() => { const s = new tls.TLSSocket(c, { isServer: true, secureContext: tls.createSecureContext(options) @@ -58,7 +58,7 @@ const server = net.createServer(common.mustCall((c) => { server.close(); s.destroy(); })); - }, 200); + }), 200); })).listen(0, common.mustCall(() => { const c = tls.connect(server.address().port, { rejectUnauthorized: false diff --git a/test/parallel/test-tls-destroy-stream.js b/test/parallel/test-tls-destroy-stream.js index 7af85856093183..1b3a61e379deba 100644 --- a/test/parallel/test-tls-destroy-stream.js +++ b/test/parallel/test-tls-destroy-stream.js @@ -22,22 +22,22 @@ const tlsServer = tls.createServer( cert: fixtures.readKey('rsa_cert.crt'), ca: [fixtures.readKey('rsa_ca.crt')], }, - (socket) => { + common.mustCall((socket) => { socket.on('close', common.mustCall()); socket.write(CONTENT); socket.destroy(); - socket.on('error', (err) => { + socket.on('error', common.mustCallAtLeast((err) => { // destroy() is sync, write() is async, whether write completes depends // on the protocol, it is not guaranteed by stream API. if (err.code === 'ERR_STREAM_DESTROYED') return; assert.ifError(err); - }); - }, + }, 0)); + }), ); -const server = net.createServer((conn) => { +const server = net.createServer(common.mustCall((conn) => { conn.on('error', common.mustNotCall()); // Assume that we want to use data to determine what to do with connections. conn.once('data', common.mustCall((chunk) => { @@ -61,17 +61,17 @@ const server = net.createServer((conn) => { tlsServer.emit('connection', serverSide); })); -}); +})); -server.listen(0, () => { +server.listen(0, common.mustCall(() => { const port = server.address().port; - const conn = tls.connect({ port, rejectUnauthorized: false }, () => { + const conn = tls.connect({ port, rejectUnauthorized: false }, common.mustCall(() => { // Whether the server's write() completed before its destroy() is // indeterminate, but if data was written, we should receive it correctly. - conn.on('data', (data) => { + conn.on('data', common.mustCallAtLeast((data) => { assert.strictEqual(data.toString('utf8'), CONTENT); - }); + }, 0)); conn.on('error', common.mustNotCall()); conn.on('close', common.mustCall(() => server.close())); - }); -}); + })); +})); diff --git a/test/parallel/test-tls-econnreset.js b/test/parallel/test-tls-econnreset.js index a056f908190fd6..8308c8904d99ff 100644 --- a/test/parallel/test-tls-econnreset.js +++ b/test/parallel/test-tls-econnreset.js @@ -34,11 +34,11 @@ let clientError = null; const server = tls.createServer({ cert: fixtures.readKey('agent1-cert.pem'), key: fixtures.readKey('agent1-key.pem'), -}, common.mustNotCall()).on('tlsClientError', function(err, conn) { +}, common.mustNotCall()).on('tlsClientError', common.mustCall(function(err, conn) { assert(!clientError && conn); clientError = err; server.close(); -}).listen(0, function() { +})).listen(0, function() { net.connect(this.address().port, function() { // Destroy the socket once it is connected, so the server sees ECONNRESET. this.destroy(); diff --git a/test/parallel/test-tls-exportkeyingmaterial.js b/test/parallel/test-tls-exportkeyingmaterial.js index b3173f94001194..5f3281ffc4f84b 100644 --- a/test/parallel/test-tls-exportkeyingmaterial.js +++ b/test/parallel/test-tls-exportkeyingmaterial.js @@ -92,11 +92,11 @@ const server = net.createServer(common.mustCall((s) => { tlsSocket.end(); server.close(); })); -})).listen(0, () => { +})).listen(0, common.mustCall(() => { const opts = { port: server.address().port, rejectUnauthorized: false }; tls.connect(opts, common.mustCall(function() { this.end(); })); -}); +})); diff --git a/test/parallel/test-tls-fast-writing.js b/test/parallel/test-tls-fast-writing.js index 4718acf2858499..e59a1c27ccd1cf 100644 --- a/test/parallel/test-tls-fast-writing.js +++ b/test/parallel/test-tls-fast-writing.js @@ -37,7 +37,7 @@ let gotChunk = false; let gotDrain = false; function onconnection(conn) { - conn.on('data', function(c) { + conn.on('data', common.mustCall(function(c) { if (!gotChunk) { gotChunk = true; console.log('ok - got chunk'); @@ -49,7 +49,7 @@ function onconnection(conn) { if (gotDrain) process.exit(0); - }); + })); } server.listen(0, function() { diff --git a/test/parallel/test-tls-getprotocol.js b/test/parallel/test-tls-getprotocol.js index c28b329ba7b642..5fe46c43c376cf 100644 --- a/test/parallel/test-tls-getprotocol.js +++ b/test/parallel/test-tls-getprotocol.js @@ -38,7 +38,7 @@ if (!process.features.openssl_is_boringssl) { } const server = tls.createServer(serverConfig, common.mustCall(clientConfigs.length)) -.listen(0, common.localhostIPv4, function() { +.listen(0, common.localhostIPv4, common.mustCall(function() { let connected = 0; for (const v of clientConfigs) { tls.connect({ @@ -57,4 +57,4 @@ const server = tls.createServer(serverConfig, common.mustCall(clientConfigs.leng server.close(); })); } -}); +})); diff --git a/test/parallel/test-tls-inception.js b/test/parallel/test-tls-inception.js index 7310308e6f9876..5154148294efd5 100644 --- a/test/parallel/test-tls-inception.js +++ b/test/parallel/test-tls-inception.js @@ -59,8 +59,8 @@ const b = tls.createServer(options, function(socket) { socket.end(body); }); -a.listen(0, function() { - b.listen(0, function() { +a.listen(0, common.mustCall(function() { + b.listen(0, common.mustCall(function() { const myOptions = { host: '127.0.0.1', port: a.address().port, @@ -82,5 +82,5 @@ a.listen(0, function() { a.close(); b.close(); })); - }); -}); + })); +})); diff --git a/test/parallel/test-tls-interleave.js b/test/parallel/test-tls-interleave.js index 91449b5b3ae571..a071dc0bd7fe5b 100644 --- a/test/parallel/test-tls-interleave.js +++ b/test/parallel/test-tls-interleave.js @@ -48,9 +48,9 @@ const server = tls.createServer(options, function(c) { }); }).listen(0, common.mustCall(function() { const connectOpts = { rejectUnauthorized: false }; - const c = tls.connect(this.address().port, connectOpts, function() { + const c = tls.connect(this.address().port, connectOpts, common.mustCall(function() { c.write('some client data'); - c.on('readable', function() { + c.on('readable', common.mustCallAtLeast(() => { let data = c.read(); if (data === null) return; @@ -65,8 +65,8 @@ const server = tls.createServer(options, function(c) { server.close(); } } - }); - }); + })); + })); })); diff --git a/test/parallel/test-tls-junk-server.js b/test/parallel/test-tls-junk-server.js index 7147c46ebee7ca..a349c35494588b 100644 --- a/test/parallel/test-tls-junk-server.js +++ b/test/parallel/test-tls-junk-server.js @@ -19,7 +19,7 @@ const server = net.createServer(function(s) { }); }); -server.listen(0, function() { +server.listen(0, common.mustCall(function() { const req = https.request({ port: this.address().port }); req.end(); @@ -31,4 +31,4 @@ server.listen(0, function() { assert.match(err.message, expectedErrorMessage); server.close(); })); -}); +})); diff --git a/test/parallel/test-tls-keylog-tlsv13.js b/test/parallel/test-tls-keylog-tlsv13.js index f26dece2f427f2..0ee20496c9643c 100644 --- a/test/parallel/test-tls-keylog-tlsv13.js +++ b/test/parallel/test-tls-keylog-tlsv13.js @@ -15,7 +15,7 @@ const server = tls.createServer({ // version, so force a specific one: minVersion: 'TLSv1.3', maxVersion: 'TLSv1.3', -}).listen(() => { +}).listen(common.mustCall(() => { const client = tls.connect({ port: server.address().port, rejectUnauthorized: false, @@ -33,4 +33,4 @@ const server = tls.createServer({ server.close(); client.end(); }); -}); +})); diff --git a/test/parallel/test-tls-max-send-fragment.js b/test/parallel/test-tls-max-send-fragment.js index bbb7849f005bf9..009021045624bb 100644 --- a/test/parallel/test-tls-max-send-fragment.js +++ b/test/parallel/test-tls-max-send-fragment.js @@ -41,7 +41,7 @@ const invalidArgumentError = { const server = tls.createServer({ key: fixtures.readKey('agent1-key.pem'), cert: fixtures.readKey('agent1-cert.pem') -}, function(c) { +}, common.mustCall((c) => { // No size is passed. assert.throws(() => c.setMaxSendFragment(), invalidArgumentError); @@ -68,14 +68,14 @@ const server = tls.createServer({ assert(c.setMaxSendFragment(maxChunk)); c.end(buf); -}).listen(0, common.mustCall(function() { +})).listen(0, common.mustCall(function() { const c = tls.connect(this.address().port, { rejectUnauthorized: false }, common.mustCall(function() { - c.on('data', function(chunk) { + c.on('data', common.mustCallAtLeast((chunk) => { assert(chunk.length <= maxChunk); received += chunk.length; - }); + })); // Ensure that we receive 'end' event anyway c.on('end', common.mustCall(function() { diff --git a/test/parallel/test-tls-multi-key.js b/test/parallel/test-tls-multi-key.js index aeec8b7218155d..89f9931e5bdd77 100644 --- a/test/parallel/test-tls-multi-key.js +++ b/test/parallel/test-tls-multi-key.js @@ -153,8 +153,8 @@ function test(options) { ciphers: 'ECDHE-ECDSA-AES256-GCM-SHA384', rejectUnauthorized: true, ca: clientTrustRoots, - checkServerIdentity: (_, c) => assert.strictEqual(c.subject.CN, eccCN), - maxVersion: 'TLSv1.2' + checkServerIdentity: common.mustCall((_, c) => assert.strictEqual(c.subject.CN, eccCN)), + maxVersion: 'TLSv1.2', }, common.mustCall(function() { assert.deepStrictEqual(ecdsa.getCipher(), { name: 'ECDHE-ECDSA-AES256-GCM-SHA384', @@ -174,7 +174,7 @@ function test(options) { ciphers: 'ECDHE-RSA-AES256-GCM-SHA384', rejectUnauthorized: true, ca: clientTrustRoots, - checkServerIdentity: (_, c) => assert.strictEqual(c.subject.CN, rsaCN), + checkServerIdentity: common.mustCallAtLeast((_, c) => assert.strictEqual(c.subject.CN, rsaCN)), maxVersion: 'TLSv1.2', }, common.mustCall(function() { assert.deepStrictEqual(rsa.getCipher(), { diff --git a/test/parallel/test-tls-multi-pfx.js b/test/parallel/test-tls-multi-pfx.js index 80bd0d37281f13..526b77b1484cd3 100644 --- a/test/parallel/test-tls-multi-pfx.js +++ b/test/parallel/test-tls-multi-pfx.js @@ -21,7 +21,7 @@ const ciphers = []; const server = tls.createServer(options, function(conn) { conn.end('ok'); -}).listen(0, function() { +}).listen(0, common.mustCall(function() { const ecdsa = tls.connect(this.address().port, { ciphers: 'ECDHE-ECDSA-AES256-GCM-SHA384', maxVersion: 'TLSv1.2', @@ -39,7 +39,7 @@ const server = tls.createServer(options, function(conn) { server.close(); })); })); -}); +})); process.on('exit', function() { assert.deepStrictEqual(ciphers, [{ diff --git a/test/parallel/test-tls-net-connect-prefer-path.js b/test/parallel/test-tls-net-connect-prefer-path.js index cefeb5d4714e70..223c95e4742d7f 100644 --- a/test/parallel/test-tls-net-connect-prefer-path.js +++ b/test/parallel/test-tls-net-connect-prefer-path.js @@ -38,14 +38,14 @@ function mkServer(lib, tcp, cb) { } function testLib(lib, cb) { - mkServer(lib, true, (tcpServer) => { - mkServer(lib, false, (unixServer) => { + mkServer(lib, true, common.mustCall((tcpServer) => { + mkServer(lib, false, common.mustCall((unixServer) => { const client = lib.connect({ path: unixServer.address(), port: tcpServer.address().port, host: 'localhost', rejectUnauthorized: false - }, () => { + }, common.mustCall(() => { const bufs = []; client.on('data', common.mustCall((d) => { bufs.push(d); @@ -57,9 +57,9 @@ function testLib(lib, cb) { unixServer.close(); cb(); })); - }); - }); - }); + })); + })); + })); } testLib(net, common.mustCall(() => testLib(tls, common.mustCall()))); diff --git a/test/parallel/test-tls-no-rsa-key.js b/test/parallel/test-tls-no-rsa-key.js index e3c1b5eda316b3..18aeaba7062cc4 100644 --- a/test/parallel/test-tls-no-rsa-key.js +++ b/test/parallel/test-tls-no-rsa-key.js @@ -44,9 +44,9 @@ const server = tls.createServer(options, function(conn) { server.close(); })); - c.on('data', function(data) { + c.on('data', common.mustCallAtLeast((data) => { assert.strictEqual(data.toString(), 'ok'); - }); + })); const cert = c.getPeerCertificate(); assert.strictEqual(cert.subject.C, 'US'); diff --git a/test/parallel/test-tls-no-sslv3.js b/test/parallel/test-tls-no-sslv3.js index 906f22e2572155..c4a6e1ff7f20bb 100644 --- a/test/parallel/test-tls-no-sslv3.js +++ b/test/parallel/test-tls-no-sslv3.js @@ -21,7 +21,7 @@ const server = tls.createServer({ cert, key }, common.mustNotCall()); const errors = []; let stderr = ''; -server.listen(0, '127.0.0.1', function() { +server.listen(0, '127.0.0.1', common.mustCall(function() { const address = `${this.address().address}:${this.address().port}`; const args = ['s_client', '-ssl3', @@ -37,7 +37,7 @@ server.listen(0, '127.0.0.1', function() { assert.strictEqual(exitCode, 1); server.close(); })); -}); +})); server.on('tlsClientError', (err) => errors.push(err)); diff --git a/test/parallel/test-tls-ocsp-callback.js b/test/parallel/test-tls-ocsp-callback.js index 50305150257a5c..3a2d8e45f772ac 100644 --- a/test/parallel/test-tls-ocsp-callback.js +++ b/test/parallel/test-tls-ocsp-callback.js @@ -80,7 +80,7 @@ function test(testOptions, cb) { Buffer.from(testOptions.response) : null); })); - server.listen(0, function() { + server.listen(0, common.mustCall(function() { const client = tls.connect({ port: this.address().port, requestOCSP: testOptions.ocsp, @@ -100,7 +100,7 @@ function test(testOptions, cb) { client.on('close', common.mustCall(() => { server.close(cb); })); - }); + })); } test({ ocsp: true, response: false }); diff --git a/test/parallel/test-tls-onread-static-buffer.js b/test/parallel/test-tls-onread-static-buffer.js index 6e19184e887d2a..beb7084150b5f5 100644 --- a/test/parallel/test-tls-onread-static-buffer.js +++ b/test/parallel/test-tls-onread-static-buffer.js @@ -20,7 +20,7 @@ const largeMessage = Buffer.alloc(64 * 1024).fill('hello world'); tls.createServer(options, common.mustCall(function(socket) { this.close(); socket.end(smallMessage); -})).listen(0, function() { +})).listen(0, common.mustCall(function() { let received = 0; const buffers = []; const sockBuf = Buffer.alloc(8); @@ -29,23 +29,23 @@ tls.createServer(options, common.mustCall(function(socket) { rejectUnauthorized: false, onread: { buffer: sockBuf, - callback: function(nread, buf) { + callback: common.mustCallAtLeast((nread, buf) => { assert.strictEqual(buf, sockBuf); received += nread; buffers.push(Buffer.from(buf.slice(0, nread))); - } + }) } }).on('data', common.mustNotCall()).on('end', common.mustCall(() => { assert.strictEqual(received, smallMessage.length); assert.deepStrictEqual(Buffer.concat(buffers), smallMessage); })); -}); +})); // Test Uint8Array support tls.createServer(options, common.mustCall(function(socket) { this.close(); socket.end(smallMessage); -})).listen(0, function() { +})).listen(0, common.mustCall(function() { let received = 0; let incoming = new Uint8Array(0); const sockBuf = new Uint8Array(8); @@ -54,26 +54,26 @@ tls.createServer(options, common.mustCall(function(socket) { rejectUnauthorized: false, onread: { buffer: sockBuf, - callback: function(nread, buf) { + callback: common.mustCallAtLeast((nread, buf) => { assert.strictEqual(buf, sockBuf); received += nread; const newIncoming = new Uint8Array(incoming.length + nread); newIncoming.set(incoming); newIncoming.set(buf.slice(0, nread), incoming.length); incoming = newIncoming; - } + }) } }).on('data', common.mustNotCall()).on('end', common.mustCall(() => { assert.strictEqual(received, smallMessage.length); assert.deepStrictEqual(incoming, new Uint8Array(smallMessage)); })); -}); +})); // Test Buffer callback usage tls.createServer(options, common.mustCall(function(socket) { this.close(); socket.end(smallMessage); -})).listen(0, function() { +})).listen(0, common.mustCall(function() { let received = 0; const incoming = []; const bufPool = [ Buffer.alloc(2), Buffer.alloc(2), Buffer.alloc(2) ]; @@ -88,24 +88,24 @@ tls.createServer(options, common.mustCall(function(socket) { bufPoolIdx = (bufPoolIdx + 1) % bufPool.length; return bufPool[bufPoolIdx]; }, - callback: function(nread, buf) { + callback: common.mustCallAtLeast((nread, buf) => { assert.strictEqual(buf, bufPool[bufPoolIdx]); received += nread; incoming.push(Buffer.from(buf.slice(0, nread))); - } + }) } }).on('data', common.mustNotCall()).on('end', common.mustCall(() => { assert.strictEqual(received, smallMessage.length); assert.deepStrictEqual(Buffer.concat(incoming), smallMessage); assert.strictEqual(bufPoolUsage, 7); })); -}); +})); // Test Uint8Array callback support tls.createServer(options, common.mustCall(function(socket) { this.close(); socket.end(smallMessage); -})).listen(0, function() { +})).listen(0, common.mustCall(function() { let received = 0; let incoming = new Uint8Array(0); const bufPool = [ new Uint8Array(2), new Uint8Array(2), new Uint8Array(2) ]; @@ -120,28 +120,28 @@ tls.createServer(options, common.mustCall(function(socket) { bufPoolIdx = (bufPoolIdx + 1) % bufPool.length; return bufPool[bufPoolIdx]; }, - callback: function(nread, buf) { + callback: common.mustCallAtLeast((nread, buf) => { assert.strictEqual(buf, bufPool[bufPoolIdx]); received += nread; const newIncoming = new Uint8Array(incoming.length + nread); newIncoming.set(incoming); newIncoming.set(buf.slice(0, nread), incoming.length); incoming = newIncoming; - } + }), } }).on('data', common.mustNotCall()).on('end', common.mustCall(() => { assert.strictEqual(received, smallMessage.length); assert.deepStrictEqual(incoming, new Uint8Array(smallMessage)); assert.strictEqual(bufPoolUsage, 7); })); -}); +})); // Test explicit socket pause tls.createServer(options, common.mustCall(function(socket) { this.close(); // Need larger message here to observe the pause socket.end(largeMessage); -})).listen(0, function() { +})).listen(0, common.mustCall(function() { let received = 0; const buffers = []; const sockBuf = Buffer.alloc(64); @@ -151,7 +151,7 @@ tls.createServer(options, common.mustCall(function(socket) { rejectUnauthorized: false, onread: { buffer: sockBuf, - callback: function(nread, buf) { + callback: common.mustCallAtLeast((nread, buf) => { assert.strictEqual(buf, sockBuf); received += nread; buffers.push(Buffer.from(buf.slice(0, nread))); @@ -162,20 +162,20 @@ tls.createServer(options, common.mustCall(function(socket) { client.resume(); }, 100); } - } + }), } }).on('data', common.mustNotCall()).on('end', common.mustCall(() => { assert.strictEqual(received, largeMessage.length); assert.deepStrictEqual(Buffer.concat(buffers), largeMessage); })); -}); +})); // Test implicit socket pause tls.createServer(options, common.mustCall(function(socket) { this.close(); // Need larger message here to observe the pause socket.end(largeMessage); -})).listen(0, function() { +})).listen(0, common.mustCall(function() { let received = 0; const buffers = []; const sockBuf = Buffer.alloc(64); @@ -185,7 +185,7 @@ tls.createServer(options, common.mustCall(function(socket) { rejectUnauthorized: false, onread: { buffer: sockBuf, - callback: function(nread, buf) { + callback: common.mustCallAtLeast((nread, buf) => { assert.strictEqual(buf, sockBuf); received += nread; buffers.push(Buffer.from(buf.slice(0, nread))); @@ -197,10 +197,10 @@ tls.createServer(options, common.mustCall(function(socket) { return false; } return true; - } + }), } }).on('data', common.mustNotCall()).on('end', common.mustCall(() => { assert.strictEqual(received, largeMessage.length); assert.deepStrictEqual(Buffer.concat(buffers), largeMessage); })); -}); +})); diff --git a/test/parallel/test-tls-over-http-tunnel.js b/test/parallel/test-tls-over-http-tunnel.js index 503ec1f6600f5f..baef7a56f6884a 100644 --- a/test/parallel/test-tls-over-http-tunnel.js +++ b/test/parallel/test-tls-over-http-tunnel.js @@ -49,12 +49,12 @@ const server = https.createServer(options, common.mustCall((req, res) => { res.end('hello world\n'); })); -const proxy = net.createServer((clientSocket) => { +const proxy = net.createServer(common.mustCall((clientSocket) => { console.log('PROXY: got a client connection'); let serverSocket = null; - clientSocket.on('data', (chunk) => { + clientSocket.on('data', common.mustCallAtLeast((chunk) => { if (!serverSocket) { // Verify the CONNECT request assert.strictEqual(chunk.toString(), @@ -87,12 +87,12 @@ const proxy = net.createServer((clientSocket) => { } else { serverSocket.write(chunk); } - }); + })); clientSocket.on('end', () => { serverSocket.destroy(); }); -}); +})); server.listen(0); @@ -148,7 +148,7 @@ proxy.listen(0, common.mustCall(() => { socket: socket, // reuse the socket agent: false, rejectUnauthorized: false - }, (res) => { + }, common.mustCall((res) => { assert.strictEqual(res.statusCode, 200); res.on('data', common.mustCall((chunk) => { @@ -161,7 +161,7 @@ proxy.listen(0, common.mustCall(() => { proxy.close(); server.close(); })); - }).on('error', (er) => { + })).on('error', (er) => { // We're ok with getting ECONNRESET in this test, but it's // timing-dependent, and thus unreliable. Any other errors // are just failures, though. diff --git a/test/parallel/test-tls-pause.js b/test/parallel/test-tls-pause.js index 120d6d87a7a913..f98d8d9b745ce3 100644 --- a/test/parallel/test-tls-pause.js +++ b/test/parallel/test-tls-pause.js @@ -73,7 +73,7 @@ server.listen(0, common.mustCall(() => { console.error('resumed', client); })(); })); - client.on('data', (data) => { + client.on('data', common.mustCallAtLeast((data) => { console.error('data'); assert.ok(resumed); received += data.length; @@ -84,7 +84,7 @@ server.listen(0, common.mustCall(() => { client.end(); server.close(); } - }); + })); })); process.on('exit', () => { diff --git a/test/parallel/test-tls-peer-certificate.js b/test/parallel/test-tls-peer-certificate.js index 41e3c883d950e0..6c440ee44b8cf8 100644 --- a/test/parallel/test-tls-peer-certificate.js +++ b/test/parallel/test-tls-peer-certificate.js @@ -40,8 +40,7 @@ function sha256(s) { connect({ client: { rejectUnauthorized: false }, server: keys.agent1, -}, function(err, pair, cleanup) { - assert.ifError(err); +}, common.mustSucceed((pair, cleanup) => { const socket = pair.client.conn; const localCert = socket.getCertificate(); assert.deepStrictEqual(localCert, {}); @@ -98,13 +97,12 @@ connect({ assert.strictEqual(issuer.serialNumber, '4AB16C8DFD6A7D0D2DFCABDF9C4B0E92C6AD0229'); return cleanup(); -}); +})); connect({ client: { rejectUnauthorized: false }, server: keys.ec, -}, function(err, pair, cleanup) { - assert.ifError(err); +}, common.mustSucceed((pair, cleanup) => { const socket = pair.client.conn; let peerCert = socket.getPeerCertificate(true); assert.ok(peerCert.issuerCertificate); @@ -149,4 +147,4 @@ connect({ assert.strictEqual(issuer.serialNumber, '32E8197681DA33185867B52885F678BFDBA51727'); return cleanup(); -}); +})); diff --git a/test/parallel/test-tls-pfx-authorizationerror.js b/test/parallel/test-tls-pfx-authorizationerror.js index eb705d591ef23a..53fcc0b16b5bd6 100644 --- a/test/parallel/test-tls-pfx-authorizationerror.js +++ b/test/parallel/test-tls-pfx-authorizationerror.js @@ -28,7 +28,7 @@ const server = tls c.end(); }) ) - .listen(0, function() { + .listen(0, common.mustCall(function() { const client = tls.connect( { port: this.address().port, @@ -36,7 +36,7 @@ const server = tls passphrase: 'sample', rejectUnauthorized: false }, - function() { + common.mustCall(() => { for (let i = 0; i < 10; ++i) { // Calling this repeatedly is a regression test that verifies // that .getCertificate() does not accidentally decrease the @@ -46,6 +46,6 @@ const server = tls } client.end(); server.close(); - } + }), ); - }); + })); diff --git a/test/parallel/test-tls-psk-circuit.js b/test/parallel/test-tls-psk-circuit.js index 61861ecf4dafa6..9cbc7a91fb852b 100644 --- a/test/parallel/test-tls-psk-circuit.js +++ b/test/parallel/test-tls-psk-circuit.js @@ -18,11 +18,11 @@ const TEST_DATA = 'x'; const serverOptions = { ciphers: CIPHERS, - pskCallback(socket, id) { + pskCallback: common.mustCallAtLeast((socket, id) => { assert.ok(socket instanceof tls.TLSSocket); assert.ok(typeof id === 'string'); return USERS[id]; - }, + }), }; function test(secret, opts, error) { diff --git a/test/parallel/test-tls-psk-errors.js b/test/parallel/test-tls-psk-errors.js index 98627c0b235a5a..a970ca548226da 100644 --- a/test/parallel/test-tls-psk-errors.js +++ b/test/parallel/test-tls-psk-errors.js @@ -15,11 +15,11 @@ const tls = require('tls'); pskCallback: () => {}, pskIdentityHint: 'a'.repeat(512), // Too long identity hint. }); - server.on('tlsClientError', (err) => { + server.on('tlsClientError', common.mustCall((err) => { assert.ok(err instanceof Error); assert.strictEqual(err.code, 'ERR_TLS_PSK_SET_IDENTITY_HINT_FAILED'); server.close(); - }); + })); server.listen(0, () => { const client = tls.connect({ port: server.address().port, diff --git a/test/parallel/test-tls-psk-server.js b/test/parallel/test-tls-psk-server.js index 87fad86083e1ab..af038493469880 100644 --- a/test/parallel/test-tls-psk-server.js +++ b/test/parallel/test-tls-psk-server.js @@ -23,12 +23,12 @@ const IDENTITY = 'TestUser'; const server = tls.createServer({ ciphers: CIPHERS, pskIdentityHint: IDENTITY, - pskCallback(socket, identity) { + pskCallback: common.mustCall((socket, identity) => { assert.ok(socket instanceof tls.TLSSocket); assert.ok(typeof identity === 'string'); if (identity === IDENTITY) return Buffer.from(KEY, 'hex'); - } + }), }); server.on('connection', common.mustCall()); @@ -45,7 +45,7 @@ let gotHello = false; let sentWorld = false; let gotWorld = false; -server.listen(0, () => { +server.listen(0, common.mustCall(() => { const client = spawn(opensslCli, [ 's_client', '-connect', `127.0.0.1:${server.address().port}`, @@ -79,4 +79,4 @@ server.listen(0, () => { assert.strictEqual(code, 0); server.close(); })); -}); +})); diff --git a/test/parallel/test-tls-secure-context-usage-order.js b/test/parallel/test-tls-secure-context-usage-order.js index c79a3eac775822..490ac491b2e0b7 100644 --- a/test/parallel/test-tls-secure-context-usage-order.js +++ b/test/parallel/test-tls-secure-context-usage-order.js @@ -34,7 +34,7 @@ const goodSecureContext = { ca: [ loadPEM('ca1-cert') ] }; -const server = tls.createServer(serverOptions, (c) => { +const server = tls.createServer(serverOptions, common.mustCallAtLeast((c) => { // The 'a' and 'b' subdomains are used to distinguish between client // connections. // Connection to subdomain 'a' is made when the 'bad' secure context is @@ -47,13 +47,13 @@ const server = tls.createServer(serverOptions, (c) => { if ('b.example.com' === c.servername) { assert.strictEqual(c.authorized, true); } -}); +})); // 1. Add the 'bad' secure context. A connection using this context will not be // authorized. server.addContext('*.example.com', badSecureContext); -server.listen(0, () => { +server.listen(0, common.mustCall(() => { const options = { port: server.address().port, key: loadPEM('agent1-key'), @@ -96,4 +96,4 @@ server.listen(0, () => { })); })); })); -}); +})); diff --git a/test/parallel/test-tls-server-connection-server.js b/test/parallel/test-tls-server-connection-server.js index 7fb2c74996ab4b..7fbb58f06ca9c9 100644 --- a/test/parallel/test-tls-server-connection-server.js +++ b/test/parallel/test-tls-server-connection-server.js @@ -15,7 +15,7 @@ const options = { const server = tls.createServer(options, function(s) { s.end('hello'); -}).listen(0, function() { +}).listen(0, common.mustCall(function() { const opts = { port: this.address().port, rejectUnauthorized: false @@ -29,4 +29,4 @@ const server = tls.createServer(options, function(s) { const client = tls.connect(opts, function() { client.end(); }); -}); +})); diff --git a/test/parallel/test-tls-server-setkeycert.js b/test/parallel/test-tls-server-setkeycert.js index 982fefc1745769..6d9f619a1be8e8 100644 --- a/test/parallel/test-tls-server-setkeycert.js +++ b/test/parallel/test-tls-server-setkeycert.js @@ -35,7 +35,7 @@ const altKeyCertVals = [ }), }; - tls.createServer(options, (s) => s.end()).listen(0, function() { + tls.createServer(options, (s) => s.end()).listen(0, common.mustCall(function() { this.on('connection', common.mustCall((socket) => this.close())); tls.connect({ @@ -52,5 +52,5 @@ const altKeyCertVals = [ this.end(); next(); })); - }); + })); })(); diff --git a/test/parallel/test-tls-server-verify.js b/test/parallel/test-tls-server-verify.js index 2517c7c8dbbb1f..94f372d37a3b1f 100644 --- a/test/parallel/test-tls-server-verify.js +++ b/test/parallel/test-tls-server-verify.js @@ -221,7 +221,7 @@ function runClient(prefix, port, options, cb) { } }); - client.on('exit', function(code) { + client.on('exit', common.mustCall((code) => { if (options.shouldReject) { assert.strictEqual( rejected, true, @@ -237,7 +237,7 @@ function runClient(prefix, port, options, cb) { } cb(); - }); + })); } @@ -273,7 +273,7 @@ function runTest(port, testIndex) { } let renegotiated = false; - const server = tls.Server(serverOptions, function handleConnection(c) { + const server = tls.Server(serverOptions, common.mustCallAtLeast(function handleConnection(c) { c.on('error', function(e) { // child.kill() leads ECONNRESET error in the TLS connection of // openssl s_client via spawn(). A test result is already @@ -282,18 +282,17 @@ function runTest(port, testIndex) { }); if (tcase.renegotiate && !renegotiated) { renegotiated = true; - setTimeout(function() { + setTimeout(common.mustCall(() => { console.error(`${prefix}- connected, renegotiating`); c.write('\n_renegotiating\n'); return c.renegotiate({ requestCert: true, rejectUnauthorized: false - }, function(err) { - assert.ifError(err); + }, common.mustSucceed(() => { c.write('\n_renegotiated\n'); handleConnection(c); - }); - }, 200); + })); + }), 200); return; } @@ -305,7 +304,7 @@ function runTest(port, testIndex) { console.error(`${prefix}- unauthed connection: %s`, c.authorizationError); c.write('\n_unauthed\n'); } - }); + })); function runNextClient(clientIndex) { const options = tcase.clients[clientIndex]; diff --git a/test/parallel/test-tls-session-cache.js b/test/parallel/test-tls-session-cache.js index 9524764aa609ee..aaf9c2c03c83e9 100644 --- a/test/parallel/test-tls-session-cache.js +++ b/test/parallel/test-tls-session-cache.js @@ -74,16 +74,16 @@ function doTest(testOptions, callback) { ++requestCount; cleartext.end(''); }); - server.on('newSession', function(id, data, cb) { + server.on('newSession', common.mustCallAtLeast((id, data, cb) => { ++newSessionCount; // Emulate asynchronous store - setImmediate(() => { + setImmediate(common.mustCall(() => { assert.ok(!session); session = { id, data }; cb(); - }); - }); - server.on('resumeSession', function(id, callback) { + })); + }, 0)); + server.on('resumeSession', common.mustCallAtLeast((id, callback) => { ++resumeCount; assert.ok(session); assert.strictEqual(session.id.toString('hex'), id.toString('hex')); @@ -100,9 +100,9 @@ function doTest(testOptions, callback) { setImmediate(() => { callback(null, data); }); - }); + }, 0)); - server.listen(0, function() { + server.listen(0, common.mustCall(function() { const args = [ 's_client', '-tls1', @@ -143,7 +143,7 @@ function doTest(testOptions, callback) { } spawnClient(); - }); + })); process.on('exit', function() { // Each test run connects 6 times: an initial request and 5 reconnect diff --git a/test/parallel/test-tls-set-encoding.js b/test/parallel/test-tls-set-encoding.js index ad0fcf325d69a3..cdeff0d28cf7f0 100644 --- a/test/parallel/test-tls-set-encoding.js +++ b/test/parallel/test-tls-set-encoding.js @@ -45,7 +45,7 @@ const server = tls.Server(options, common.mustCall(function(socket) { })); -server.listen(0, function() { +server.listen(0, common.mustCall(function() { const client = tls.connect({ port: this.address().port, rejectUnauthorized: false @@ -55,11 +55,11 @@ server.listen(0, function() { client.setEncoding('ascii'); - client.on('data', function(d) { + client.on('data', common.mustCall((d) => { console.log('client: on data', d); assert.ok(typeof d === 'string'); buffer += d; - }); + })); client.on('secureConnect', common.mustCall(() => { console.log('client: on secureConnect'); @@ -83,4 +83,4 @@ server.listen(0, function() { server.close(); })); -}); +})); diff --git a/test/parallel/test-tls-set-secure-context.js b/test/parallel/test-tls-set-secure-context.js index 79ff5465fd8ef7..3d0f93c59ec464 100644 --- a/test/parallel/test-tls-set-secure-context.js +++ b/test/parallel/test-tls-set-secure-context.js @@ -87,7 +87,7 @@ async function makeRequest(port, id) { let errored = false; req.on('error', () => errored = true); - req.on('finish', () => assert.strictEqual(errored, false)); + req.on('finish', common.mustCallAtLeast(() => assert.strictEqual(errored, false), 0)); const [res] = await events.once(req, 'response'); res.setEncoding('utf8'); diff --git a/test/parallel/test-tls-sni-option.js b/test/parallel/test-tls-sni-option.js index 9ac5a8c5622de4..9857b53afd4500 100644 --- a/test/parallel/test-tls-sni-option.js +++ b/test/parallel/test-tls-sni-option.js @@ -136,12 +136,12 @@ test({ 'Invalid SNI context'); function test(options, clientResult, serverResult, clientError, serverError) { - const server = tls.createServer(serverOptions, (c) => { + const server = tls.createServer(serverOptions, common.mustCallAtLeast((c) => { assert.deepStrictEqual( serverResult, { sni: c.servername, authorized: c.authorized } ); - }); + }, 0)); if (serverResult) { assert(!serverError); @@ -153,14 +153,14 @@ function test(options, clientResult, serverResult, clientError, serverError) { })); } - server.listen(0, () => { + server.listen(0, common.mustCall(() => { options.port = server.address().port; - const client = tls.connect(options, () => { + const client = tls.connect(options, common.mustCallAtLeast(() => { const result = client.authorizationError && (client.authorizationError === 'ERR_TLS_CERT_ALTNAME_INVALID'); assert.strictEqual(result, clientResult); client.end(); - }); + }, 0)); client.on('close', common.mustCall(() => server.close())); @@ -170,5 +170,5 @@ function test(options, clientResult, serverResult, clientError, serverError) { })); else client.on('error', common.mustNotCall()); - }); + })); } diff --git a/test/parallel/test-tls-sni-server-client.js b/test/parallel/test-tls-sni-server-client.js index 79f3601561ee19..966804045bb531 100644 --- a/test/parallel/test-tls-sni-server-client.js +++ b/test/parallel/test-tls-sni-server-client.js @@ -100,10 +100,10 @@ test( ); function test(options, clientResult, serverResult) { - const server = tls.createServer(serverOptions, (c) => { + const server = tls.createServer(serverOptions, common.mustCall((c) => { assert.strictEqual(c.servername, serverResult); assert.strictEqual(c.authorized, false); - }); + })); server.addContext('a.example.com', SNIContexts['a.example.com']); server.addContext('*.test.com', SNIContexts['asterisk.test.com']); @@ -111,20 +111,20 @@ function test(options, clientResult, serverResult) { server.on('tlsClientError', common.mustNotCall()); - server.listen(0, () => { + server.listen(0, common.mustCall(() => { const client = tls.connect({ ...options, port: server.address().port, rejectUnauthorized: false - }, () => { + }, common.mustCall(() => { const result = client.authorizationError && (client.authorizationError === 'ERR_TLS_CERT_ALTNAME_INVALID'); assert.strictEqual(result, clientResult); client.end(); - }); + })); client.on('close', common.mustCall(() => { server.close(); })); - }); + })); } diff --git a/test/parallel/test-tls-sni-servername.js b/test/parallel/test-tls-sni-servername.js index 2c5785df5426c9..4b3e6083bf16ef 100644 --- a/test/parallel/test-tls-sni-servername.js +++ b/test/parallel/test-tls-sni-servername.js @@ -30,7 +30,7 @@ function test(options) { assert.strictEqual(socket.servername, options.servername); })); - server.listen(0, () => { + server.listen(0, common.mustCall(() => { options.port = server.address().port; const client = tls.connect(options, common.mustNotCall()); @@ -40,7 +40,7 @@ function test(options) { })); client.on('close', common.mustCall(() => server.close())); - }); + })); } test({ diff --git a/test/parallel/test-tls-socket-close.js b/test/parallel/test-tls-socket-close.js index be941af46eede2..0428fb6313eda1 100644 --- a/test/parallel/test-tls-socket-close.js +++ b/test/parallel/test-tls-socket-close.js @@ -13,27 +13,27 @@ const key = fixtures.readKey('agent2-key.pem'); const cert = fixtures.readKey('agent2-cert.pem'); let serverTlsSocket; -const tlsServer = tls.createServer({ cert, key }, (socket) => { +const tlsServer = tls.createServer({ cert, key }, common.mustCall((socket) => { serverTlsSocket = socket; - socket.on('data', (chunk) => { + socket.on('data', common.mustCall((chunk) => { assert.strictEqual(chunk[0], 46); socket.write('.'); - }); + })); socket.on('close', dec); -}); +})); // A plain net server, that manually passes connections to the TLS // server to be upgraded. let netSocket; let netSocketCloseEmitted = false; -const netServer = net.createServer((socket) => { +const netServer = net.createServer(common.mustCall((socket) => { netSocket = socket; tlsServer.emit('connection', socket); socket.on('close', common.mustCall(() => { netSocketCloseEmitted = true; assert.strictEqual(serverTlsSocket.destroyed, true); })); -}).listen(0, common.mustCall(() => { +})).listen(0, common.mustCall(() => { connectClient(netServer); })); @@ -51,18 +51,18 @@ function connectClient(server) { clientTlsSocket.write('.'); - clientTlsSocket.on('data', (chunk) => { + clientTlsSocket.on('data', common.mustCall((chunk) => { assert.strictEqual(chunk[0], 46); netSocket.destroy(); assert.strictEqual(netSocket.destroyed, true); - setImmediate(() => { + setImmediate(common.mustCall(() => { // Close callbacks are executed after `setImmediate()` callbacks. assert.strictEqual(netSocketCloseEmitted, false); assert.strictEqual(serverTlsSocket.destroyed, false); - }); - }); + })); + })); clientTlsSocket.on('close', dec); } diff --git a/test/parallel/test-tls-socket-default-options.js b/test/parallel/test-tls-socket-default-options.js index bcd32c55dfe720..8dfd912285dce5 100644 --- a/test/parallel/test-tls-socket-default-options.js +++ b/test/parallel/test-tls-socket-default-options.js @@ -41,7 +41,7 @@ function test(client, callback) { key: keys.agent1.key, cert: keys.agent1.cert, }, - }, function(err, pair, cleanup) { + }, common.mustCall(function(err, pair, cleanup) { assert.strictEqual(err.code, 'UNABLE_TO_VERIFY_LEAF_SIGNATURE'); let recv = ''; pair.server.server.once('secureConnection', common.mustCall((conn) => { @@ -64,5 +64,5 @@ function test(client, callback) { .on('secure', common.mustCall(function() { callback(this.ssl.verifyError()); })); - }); + })); } diff --git a/test/parallel/test-tls-socket-failed-handshake-emits-error.js b/test/parallel/test-tls-socket-failed-handshake-emits-error.js index 37dcd79754db8d..c88f0c3a1855f2 100644 --- a/test/parallel/test-tls-socket-failed-handshake-emits-error.js +++ b/test/parallel/test-tls-socket-failed-handshake-emits-error.js @@ -10,8 +10,8 @@ const assert = require('assert'); const bonkers = Buffer.alloc(1024, 42); -const server = net.createServer(function(c) { - setTimeout(function() { +const server = net.createServer(common.mustCall((c) => { + setTimeout(common.mustCall(() => { const s = new tls.TLSSocket(c, { isServer: true, server: server @@ -30,8 +30,8 @@ const server = net.createServer(function(c) { server.close(); s.destroy(); }); - }, common.platformTimeout(200)); -}).listen(0, function() { + }), common.platformTimeout(200)); +})).listen(0, function() { const c = net.connect({ port: this.address().port }, function() { c.write(bonkers); }); diff --git a/test/parallel/test-tls-startcom-wosign-whitelist.js b/test/parallel/test-tls-startcom-wosign-whitelist.js index 56ffd73aac0e54..678729888a57b9 100644 --- a/test/parallel/test-tls-startcom-wosign-whitelist.js +++ b/test/parallel/test-tls-startcom-wosign-whitelist.js @@ -58,22 +58,22 @@ function runTest(tindex) { const server = tls.createServer(tcase.serverOpts, function(s) { s.resume(); - }).listen(0, function() { + }).listen(0, common.mustCall(function() { tcase.clientOpts.port = this.address().port; const client = tls.connect(tcase.clientOpts); - client.on('error', function(e) { + client.on('error', common.mustCallAtLeast((e) => { assert.strictEqual(e.code, tcase.errorCode); runNextTest(server, tindex); - }); + }, 0)); - client.on('secureConnect', function() { + client.on('secureConnect', common.mustCall(() => { // agent8 can pass StartCom/WoSign check so that the secureConnect // is established. assert.strictEqual(tcase.errorCode, 'CERT_REVOKED'); client.end(); runNextTest(server, tindex); - }); - }); + })); + })); } diff --git a/test/parallel/test-tls-ticket-cluster.js b/test/parallel/test-tls-ticket-cluster.js index 628b2fcd6a9661..2ed4abb93c8d47 100644 --- a/test/parallel/test-tls-ticket-cluster.js +++ b/test/parallel/test-tls-ticket-cluster.js @@ -59,10 +59,10 @@ if (cluster.isPrimary) { } else { shoot(); } - }).once('session', (session) => { + }).once('session', common.mustCallAtLeast((session) => { assert(!lastSession); lastSession = session; - }); + }, 0)); c.resume(); // See close_notify comment in server } diff --git a/test/parallel/test-tls-ticket.js b/test/parallel/test-tls-ticket.js index dd781cb6410b58..0a77e52fb275cd 100644 --- a/test/parallel/test-tls-ticket.js +++ b/test/parallel/test-tls-ticket.js @@ -47,7 +47,7 @@ function createServer() { key: fixtures.readKey('agent1-key.pem'), cert: fixtures.readKey('agent1-cert.pem'), ticketKeys: keys - }, function(c) { + }, common.mustCallAtLeast(function(c) { serverLog.push(id); // TODO(@sam-github) Triggers close_notify before NewSessionTicket bug. // c.end(); @@ -85,7 +85,7 @@ function createServer() { } else { throw new Error('UNREACHABLE'); } - }); + })); return server; } diff --git a/test/parallel/test-tls-tlswrap-segfault.js b/test/parallel/test-tls-tlswrap-segfault.js index a36016efa48a02..d69cd5a9c8f81b 100644 --- a/test/parallel/test-tls-tlswrap-segfault.js +++ b/test/parallel/test-tls-tlswrap-segfault.js @@ -30,7 +30,7 @@ const server = tls.createServer(options, function(s) { }); function putImmediate(client) { - setImmediate(function() { + setImmediate(common.mustCall(() => { if (client.ssl) { const fd = client.ssl.fd; assert(!!fd); @@ -38,5 +38,5 @@ function putImmediate(client) { } else { server.close(); } - }); + })); } diff --git a/test/parallel/test-tls-translate-peer-certificate.js b/test/parallel/test-tls-translate-peer-certificate.js index de11567b138b15..0e83b74a8e4970 100644 --- a/test/parallel/test-tls-translate-peer-certificate.js +++ b/test/parallel/test-tls-translate-peer-certificate.js @@ -5,57 +5,57 @@ const common = require('../common'); if (!common.hasCrypto) common.skip('missing crypto'); -const { strictEqual, deepStrictEqual } = require('assert'); +const assert = require('assert'); const { translatePeerCertificate } = require('_tls_common'); const certString = '__proto__=42\nA=1\nB=2\nC=3'; -strictEqual(translatePeerCertificate(null), null); -strictEqual(translatePeerCertificate(undefined), null); +assert.strictEqual(translatePeerCertificate(null), null); +assert.strictEqual(translatePeerCertificate(undefined), null); -strictEqual(translatePeerCertificate(0), null); -strictEqual(translatePeerCertificate(1), 1); +assert.strictEqual(translatePeerCertificate(0), null); +assert.strictEqual(translatePeerCertificate(1), 1); -deepStrictEqual(translatePeerCertificate({}), {}); +assert.deepStrictEqual(translatePeerCertificate({}), {}); // Earlier versions of Node.js parsed the issuer property but did so // incorrectly. This behavior has now reached end-of-life and user-supplied // strings will not be parsed at all. -deepStrictEqual(translatePeerCertificate({ issuer: '' }), - { issuer: '' }); -deepStrictEqual(translatePeerCertificate({ issuer: null }), - { issuer: null }); -deepStrictEqual(translatePeerCertificate({ issuer: certString }), - { issuer: certString }); +assert.deepStrictEqual(translatePeerCertificate({ issuer: '' }), + { issuer: '' }); +assert.deepStrictEqual(translatePeerCertificate({ issuer: null }), + { issuer: null }); +assert.deepStrictEqual(translatePeerCertificate({ issuer: certString }), + { issuer: certString }); // Earlier versions of Node.js parsed the issuer property but did so // incorrectly. This behavior has now reached end-of-life and user-supplied // strings will not be parsed at all. -deepStrictEqual(translatePeerCertificate({ subject: '' }), - { subject: '' }); -deepStrictEqual(translatePeerCertificate({ subject: null }), - { subject: null }); -deepStrictEqual(translatePeerCertificate({ subject: certString }), - { subject: certString }); +assert.deepStrictEqual(translatePeerCertificate({ subject: '' }), + { subject: '' }); +assert.deepStrictEqual(translatePeerCertificate({ subject: null }), + { subject: null }); +assert.deepStrictEqual(translatePeerCertificate({ subject: certString }), + { subject: certString }); -deepStrictEqual(translatePeerCertificate({ issuerCertificate: '' }), - { issuerCertificate: null }); -deepStrictEqual(translatePeerCertificate({ issuerCertificate: null }), - { issuerCertificate: null }); -deepStrictEqual( +assert.deepStrictEqual(translatePeerCertificate({ issuerCertificate: '' }), + { issuerCertificate: null }); +assert.deepStrictEqual(translatePeerCertificate({ issuerCertificate: null }), + { issuerCertificate: null }); +assert.deepStrictEqual( translatePeerCertificate({ issuerCertificate: { subject: certString } }), { issuerCertificate: { subject: certString } }); { const cert = {}; cert.issuerCertificate = cert; - deepStrictEqual(translatePeerCertificate(cert), { issuerCertificate: cert }); + assert.deepStrictEqual(translatePeerCertificate(cert), { issuerCertificate: cert }); } -deepStrictEqual(translatePeerCertificate({ infoAccess: '' }), - { infoAccess: { __proto__: null } }); -deepStrictEqual(translatePeerCertificate({ infoAccess: null }), - { infoAccess: null }); +assert.deepStrictEqual(translatePeerCertificate({ infoAccess: '' }), + { infoAccess: { __proto__: null } }); +assert.deepStrictEqual(translatePeerCertificate({ infoAccess: null }), + { infoAccess: null }); { const input = '__proto__:mostly harmless\n' + @@ -65,6 +65,6 @@ deepStrictEqual(translatePeerCertificate({ infoAccess: null }), expected.__proto__ = ['mostly harmless']; expected.hasOwnProperty = ['not a function']; expected['OCSP - URI'] = ['file:///etc/passwd']; - deepStrictEqual(translatePeerCertificate({ infoAccess: input }), - { infoAccess: expected }); + assert.deepStrictEqual(translatePeerCertificate({ infoAccess: input }), + { infoAccess: expected }); } diff --git a/test/parallel/test-tls-wrap-timeout.js b/test/parallel/test-tls-wrap-timeout.js index 7b9cb4170c888f..d1e713e36dd415 100644 --- a/test/parallel/test-tls-wrap-timeout.js +++ b/test/parallel/test-tls-wrap-timeout.js @@ -31,11 +31,9 @@ const server = tls.createServer(options, common.mustCall((c) => { let socket; let lastIdleStart; -server.listen(0, () => { - socket = net.connect(server.address().port, function() { - const s = socket.setTimeout(TIMEOUT_MAX, function() { - throw new Error('timeout'); - }); +server.listen(0, common.mustCall(() => { + socket = net.connect(server.address().port, common.mustCall(() => { + const s = socket.setTimeout(TIMEOUT_MAX, common.mustNotCall('timeout')); assert.ok(s instanceof net.Socket); assert.notStrictEqual(socket[kTimeout]._idleTimeout, -1); @@ -46,8 +44,8 @@ server.listen(0, () => { rejectUnauthorized: false }); tsocket.resume(); - }); -}); + })); +})); process.on('exit', () => { assert.strictEqual(socket[kTimeout]._idleTimeout, -1); diff --git a/test/parallel/test-trace-events-console.js b/test/parallel/test-trace-events-console.js index 79ac37fc7d6570..745ca77f5b946c 100644 --- a/test/parallel/test-trace-events-console.js +++ b/test/parallel/test-trace-events-console.js @@ -48,16 +48,15 @@ if (process.argv[2] === 'child') { assert(fs.existsSync(file)); const data = await fs.promises.readFile(file, { encoding: 'utf8' }); - JSON.parse(data).traceEvents - .filter((trace) => trace.cat !== '__metadata') - .forEach((trace) => { - assert.strictEqual(trace.pid, proc.pid); - assert(names.includes(trace.name)); - if (trace.name === 'count::bar') - assert.strictEqual(trace.args.data, expectedCounts.shift()); - else if (trace.name === 'time::foo') - assert.strictEqual(trace.ph, expectedTimeTypes.shift()); - }); + for (const trace of JSON.parse(data).traceEvents + .filter((trace) => trace.cat !== '__metadata')) { + assert.strictEqual(trace.pid, proc.pid); + assert(names.includes(trace.name)); + if (trace.name === 'count::bar') + assert.strictEqual(trace.args.data, expectedCounts.shift()); + else if (trace.name === 'time::foo') + assert.strictEqual(trace.ph, expectedTimeTypes.shift()); + } assert.strictEqual(expectedCounts.length, 0); assert.strictEqual(expectedTimeTypes.length, 0); })); diff --git a/test/parallel/test-trace-events-dynamic-enable.js b/test/parallel/test-trace-events-dynamic-enable.js index 5b2ce313421568..bf65f80eef530a 100644 --- a/test/parallel/test-trace-events-dynamic-enable.js +++ b/test/parallel/test-trace-events-dynamic-enable.js @@ -43,10 +43,10 @@ async function test() { const events = []; let tracingComplete = false; - session.on('NodeTracing.dataCollected', (n) => { + session.on('NodeTracing.dataCollected', common.mustCall((n) => { assert.ok(n?.params?.value); events.push(...n.params.value); // append the events. - }); + }, 2)); session.on('NodeTracing.tracingComplete', () => tracingComplete = true); trace(kBeforeEvent, 'foo', 'test1', 0, 'test'); diff --git a/test/parallel/test-trace-events-environment.js b/test/parallel/test-trace-events-environment.js index 571c71c41133d9..13127cb494ddac 100644 --- a/test/parallel/test-trace-events-environment.js +++ b/test/parallel/test-trace-events-environment.js @@ -46,13 +46,12 @@ if (process.argv[2] === 'child') { assert(fs.existsSync(file)); const data = await fs.promises.readFile(file); - JSON.parse(data.toString()).traceEvents - .filter((trace) => trace.cat !== '__metadata') - .forEach((trace) => { - assert.strictEqual(trace.pid, proc.pid); - assert(names.has(trace.name)); - checkSet.add(trace.name); - }); + for (const trace of JSON.parse(data.toString()).traceEvents + .filter((trace) => trace.cat !== '__metadata')) { + assert.strictEqual(trace.pid, proc.pid); + assert(names.has(trace.name)); + checkSet.add(trace.name); + } assert.deepStrictEqual(names, checkSet); })); diff --git a/test/parallel/test-tty-stdin-pipe.js b/test/parallel/test-tty-stdin-pipe.js index 9e941532060384..f0bb614e7ae844 100644 --- a/test/parallel/test-tty-stdin-pipe.js +++ b/test/parallel/test-tty-stdin-pipe.js @@ -20,29 +20,20 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -require('../common'); +const common = require('../common'); // This test ensures piping from `stdin` isn't broken. // https://github.com/nodejs/node/issues/5927 -const assert = require('assert'); const readline = require('readline'); const rl = readline.createInterface(process.stdin, process.stdout); rl.resume(); -let hasPaused = false; - const origPause = rl.pause; -rl.pause = function() { - hasPaused = true; - origPause.apply(this, arguments); -}; - -const origSetRawMode = rl._setRawMode; -rl._setRawMode = function(mode) { - assert.ok(hasPaused); - origSetRawMode.apply(this, arguments); -}; +rl.pause = common.mustCall(function pause() { + Reflect.apply(origPause, this, arguments); +}); +rl._setRawMode = common.mustNotCall(); rl.close(); diff --git a/test/parallel/test-tz-version.js b/test/parallel/test-tz-version.js index 6e4b14e1ac1880..e08c12fdd6e563 100644 --- a/test/parallel/test-tz-version.js +++ b/test/parallel/test-tz-version.js @@ -21,8 +21,8 @@ const fixtures = require('../common/fixtures'); // This test ensures the correctness of the automated timezone upgrade PRs. -const { strictEqual } = require('assert'); +const assert = require('assert'); const { readFileSync } = require('fs'); const expectedVersion = readFileSync(fixtures.path('tz-version.txt'), 'utf8').trim(); -strictEqual(process.versions.tz, expectedVersion); +assert.strictEqual(process.versions.tz, expectedVersion); diff --git a/test/parallel/test-url-domain-ascii-unicode.js b/test/parallel/test-url-domain-ascii-unicode.js index f0a2f3db13960f..ebbeba427ee6e5 100644 --- a/test/parallel/test-url-domain-ascii-unicode.js +++ b/test/parallel/test-url-domain-ascii-unicode.js @@ -2,7 +2,7 @@ const { hasIntl } = require('../common'); -const { strictEqual } = require('node:assert'); +const assert = require('node:assert'); const { domainToASCII, domainToUnicode } = require('node:url'); const { test } = require('node:test'); @@ -20,8 +20,8 @@ const domainWithASCII = [ test('domainToASCII and domainToUnicode', { skip: !hasIntl }, () => { for (const [domain, ascii] of domainWithASCII) { const domainConvertedToASCII = domainToASCII(domain); - strictEqual(domainConvertedToASCII, ascii); + assert.strictEqual(domainConvertedToASCII, ascii); const asciiConvertedToUnicode = domainToUnicode(ascii); - strictEqual(asciiConvertedToUnicode, domain); + assert.strictEqual(asciiConvertedToUnicode, domain); } }); diff --git a/test/parallel/test-urlpattern-invalidthis.js b/test/parallel/test-urlpattern-invalidthis.js index fa4504199df516..74ca79031ca49b 100644 --- a/test/parallel/test-urlpattern-invalidthis.js +++ b/test/parallel/test-urlpattern-invalidthis.js @@ -3,7 +3,7 @@ require('../common'); const { URLPattern } = require('url'); -const { throws } = require('assert'); +const assert = require('assert'); const pattern = new URLPattern(); const proto = Object.getPrototypeOf(pattern); @@ -22,7 +22,7 @@ const proto = Object.getPrototypeOf(pattern); 'hasRegExpGroups', ].forEach((i) => { const prop = Object.getOwnPropertyDescriptor(proto, i).get; - throws(() => prop({}), { + assert.throws(() => prop({}), { message: 'Illegal invocation', }, i); }); @@ -32,9 +32,9 @@ const proto = Object.getPrototypeOf(pattern); const { test, exec } = pattern; -throws(() => test({}), { +assert.throws(() => test({}), { message: 'Illegal invocation', }); -throws(() => exec({}), { +assert.throws(() => exec({}), { message: 'Illegal invocation', }); diff --git a/test/parallel/test-urlpattern-types.js b/test/parallel/test-urlpattern-types.js index 71133a7f6e48ac..2459f0149e2d91 100644 --- a/test/parallel/test-urlpattern-types.js +++ b/test/parallel/test-urlpattern-types.js @@ -3,44 +3,44 @@ require('../common'); const { URLPattern } = require('url'); -const { throws } = require('assert'); +const assert = require('assert'); // Verifies that calling URLPattern with no new keyword throws. -throws(() => URLPattern(), { +assert.throws(() => URLPattern(), { code: 'ERR_CONSTRUCT_CALL_REQUIRED', }); // Verifies that type checks are performed on the arguments. -throws(() => new URLPattern(1), { +assert.throws(() => new URLPattern(1), { code: 'ERR_INVALID_ARG_TYPE', }); -throws(() => new URLPattern({}, 1), { +assert.throws(() => new URLPattern({}, 1), { code: 'ERR_INVALID_ARG_TYPE', }); -throws(() => new URLPattern({}, '', 1), { +assert.throws(() => new URLPattern({}, '', 1), { code: 'ERR_INVALID_ARG_TYPE', }); -throws(() => new URLPattern({}, { ignoreCase: '' }), { +assert.throws(() => new URLPattern({}, { ignoreCase: '' }), { code: 'ERR_INVALID_ARG_TYPE', }); const pattern = new URLPattern(); -throws(() => pattern.exec(1), { +assert.throws(() => pattern.exec(1), { code: 'ERR_INVALID_ARG_TYPE', }); -throws(() => pattern.exec('', 1), { +assert.throws(() => pattern.exec('', 1), { code: 'ERR_INVALID_ARG_TYPE', }); -throws(() => pattern.test(1), { +assert.throws(() => pattern.test(1), { code: 'ERR_INVALID_ARG_TYPE', }); -throws(() => pattern.test('', 1), { +assert.throws(() => pattern.test('', 1), { code: 'ERR_INVALID_ARG_TYPE', }); diff --git a/test/parallel/test-urlpattern.js b/test/parallel/test-urlpattern.js index 1a8d722c5d3e87..96c431f5b87f69 100644 --- a/test/parallel/test-urlpattern.js +++ b/test/parallel/test-urlpattern.js @@ -2,12 +2,12 @@ require('../common'); -const { throws } = require('assert'); +const assert = require('assert'); const { URLPattern } = require('url'); // Verify that if an error is thrown while accessing any of the // init options, the error is appropriately propagated. -throws(() => { +assert.throws(() => { new URLPattern({ get protocol() { throw new Error('boom'); @@ -19,7 +19,7 @@ throws(() => { // Verify that if an error is thrown while accessing the ignoreCase // option, the error is appropriately propagated. -throws(() => { +assert.throws(() => { new URLPattern({}, { get ignoreCase() { throw new Error('boom'); } }); diff --git a/test/parallel/test-util-callbackify.js b/test/parallel/test-util-callbackify.js index f287c91e946217..3d366a6ccf6c4b 100644 --- a/test/parallel/test-util-callbackify.js +++ b/test/parallel/test-util-callbackify.js @@ -188,6 +188,7 @@ const values = [ for (const value of values) { const iAmThis = { fn(arg) { + // eslint-disable-next-line node-core/must-call-assert assert.strictEqual(this, iAmThis); return Promise.resolve(arg); }, @@ -200,6 +201,7 @@ const values = [ const iAmThat = { async fn(arg) { + // eslint-disable-next-line node-core/must-call-assert assert.strictEqual(this, iAmThat); return arg; }, @@ -289,11 +291,11 @@ const values = [ const cbPromiseFn = callbackify(promiseFn); - cbPromiseFn(null, (err) => { + cbPromiseFn(null, common.mustCall((err) => { assert.strictEqual(err.message, 'Promise was rejected with falsy value'); assert.strictEqual(err.code, 'ERR_FALSY_VALUE_REJECTION'); assert.strictEqual(err.reason, null); const stack = err.stack.split(/[\r\n]+/); assert.match(stack[1], /at process\.processTicksAndRejections/); - }); + })); } diff --git a/test/parallel/test-util-deprecate.js b/test/parallel/test-util-deprecate.js index 9f37c5b275eddd..d4a3d69bd8f818 100644 --- a/test/parallel/test-util-deprecate.js +++ b/test/parallel/test-util-deprecate.js @@ -1,7 +1,7 @@ // Flags: --expose-internals 'use strict'; -require('../common'); +const common = require('../common'); // Tests basic functionality of util.deprecate(). @@ -83,7 +83,7 @@ for (const fn of [ assert.strictEqual(typeof deprecatedWithProto.prototype.testMethod, 'function'); } -process.on('warning', (warning) => { +process.on('warning', common.mustCallAtLeast((warning) => { assert.strictEqual(warning.name, 'DeprecationWarning'); assert.ok(expectedWarnings.has(warning.message)); const expected = expectedWarnings.get(warning.message); @@ -91,7 +91,7 @@ process.on('warning', (warning) => { expected.count = expected.count - 1; if (expected.count === 0) expectedWarnings.delete(warning.message); -}); +})); process.on('exit', () => { assert.deepStrictEqual(expectedWarnings, new Map()); diff --git a/test/parallel/test-util-inspect.js b/test/parallel/test-util-inspect.js index f859239a7d2b22..b6639a8d71a348 100644 --- a/test/parallel/test-util-inspect.js +++ b/test/parallel/test-util-inspect.js @@ -1084,11 +1084,11 @@ util.inspect({ hasOwnProperty: null }); assert.strictEqual(util.inspect(subject), '{ baz: \'quux\' }'); - subject[inspect] = (depth, opts) => { + subject[inspect] = common.mustCall((depth, opts) => { assert.strictEqual(opts.customInspectOptions, true); assert.strictEqual(opts.seen, null); return {}; - }; + }); util.inspect(subject, { customInspectOptions: true, seen: null }); } @@ -3191,7 +3191,7 @@ assert.strictEqual( ).join('\n'); } const escapedCWD = util.inspect(process.cwd()).slice(1, -1); - util.inspect(err, { colors: true }).split('\n').forEach((line, i) => { + util.inspect(err, { colors: true }).split('\n').forEach(common.mustCallAtLeast((line, i) => { let expected = stack[i].replace(/node_modules\/(@[^/]+\/[^/]+|[^/]+)/gi, (_, m) => { return `node_modules/\u001b[4m${m}\u001b[24m`; }).replaceAll(new RegExp(`(\\(?${escapedCWD}(\\\\|/))`, 'gi'), (_, m) => { @@ -3208,7 +3208,7 @@ assert.strictEqual( expected = expected.replaceAll('/', '\\'); } assert.strictEqual(line, expected); - }); + })); // Check ESM const encodedCwd = url.pathToFileURL(process.cwd()); @@ -3422,9 +3422,9 @@ assert.strictEqual( { // Cross platform checks. const err = new Error('foo'); - util.inspect(err, { colors: true }).split('\n').forEach((line, i) => { + util.inspect(err, { colors: true }).split('\n').forEach(common.mustCallAtLeast((line, i) => { assert(i < 2 || line.startsWith('\u001b[90m')); - }); + })); } { diff --git a/test/parallel/test-util-sigint-watchdog.js b/test/parallel/test-util-sigint-watchdog.js index 67a8f61539c673..88d5b9aa71629c 100644 --- a/test/parallel/test-util-sigint-watchdog.js +++ b/test/parallel/test-util-sigint-watchdog.js @@ -6,52 +6,52 @@ if (common.isWindows) { common.skip('platform not supported'); } +const { describe, test } = require('node:test'); const assert = require('assert'); const { internalBinding } = require('internal/test/binding'); const binding = internalBinding('contextify'); -[(next) => { - // Test with no signal observed. - binding.startSigintWatchdog(); - const hadPendingSignals = binding.stopSigintWatchdog(); - assert.strictEqual(hadPendingSignals, false); - next(); -}, - (next) => { - // Test with one call to the watchdog, one signal. - binding.startSigintWatchdog(); - process.kill(process.pid, 'SIGINT'); - waitForPendingSignal(common.mustCall(() => { - const hadPendingSignals = binding.stopSigintWatchdog(); - assert.strictEqual(hadPendingSignals, true); - next(); - })); - }, - (next) => { - // Nested calls are okay. - binding.startSigintWatchdog(); - binding.startSigintWatchdog(); - process.kill(process.pid, 'SIGINT'); - waitForPendingSignal(common.mustCall(() => { - const hadPendingSignals1 = binding.stopSigintWatchdog(); - const hadPendingSignals2 = binding.stopSigintWatchdog(); - assert.strictEqual(hadPendingSignals1, true); - assert.strictEqual(hadPendingSignals2, false); - next(); - })); - }, - () => { - // Signal comes in after first call to stop. - binding.startSigintWatchdog(); - binding.startSigintWatchdog(); - const hadPendingSignals1 = binding.stopSigintWatchdog(); - process.kill(process.pid, 'SIGINT'); - waitForPendingSignal(common.mustCall(() => { - const hadPendingSignals2 = binding.stopSigintWatchdog(); - assert.strictEqual(hadPendingSignals1, false); - assert.strictEqual(hadPendingSignals2, true); - })); - }].reduceRight((a, b) => common.mustCall(b).bind(null, a))(); +describe({ concurrency: false }, () => { + test('with no signal observed', (_, next) => { + binding.startSigintWatchdog(); + const hadPendingSignals = binding.stopSigintWatchdog(); + assert.strictEqual(hadPendingSignals, false); + next(); + }); + test('with one call to the watchdog, one signal', (_, next) => { + binding.startSigintWatchdog(); + process.kill(process.pid, 'SIGINT'); + waitForPendingSignal(common.mustCall(() => { + const hadPendingSignals = binding.stopSigintWatchdog(); + assert.strictEqual(hadPendingSignals, true); + next(); + })); + }); + test('Nested calls are okay', (_, next) => { + binding.startSigintWatchdog(); + binding.startSigintWatchdog(); + process.kill(process.pid, 'SIGINT'); + waitForPendingSignal(common.mustCall(() => { + const hadPendingSignals1 = binding.stopSigintWatchdog(); + const hadPendingSignals2 = binding.stopSigintWatchdog(); + assert.strictEqual(hadPendingSignals1, true); + assert.strictEqual(hadPendingSignals2, false); + next(); + })); + }); + test('Signal comes in after first call to stop', (_, done) => { + binding.startSigintWatchdog(); + binding.startSigintWatchdog(); + const hadPendingSignals1 = binding.stopSigintWatchdog(); + process.kill(process.pid, 'SIGINT'); + waitForPendingSignal(common.mustCall(() => { + const hadPendingSignals2 = binding.stopSigintWatchdog(); + assert.strictEqual(hadPendingSignals1, false); + assert.strictEqual(hadPendingSignals2, true); + done(); + })); + }); +}); function waitForPendingSignal(cb) { if (binding.watchdogHasPendingSigint()) From 8b08f905094cc60c00d5da651a3440a823322b4d Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Sun, 23 Nov 2025 21:25:36 +0200 Subject: [PATCH 024/115] test: use `RegExp.escape` to improve test reliability PR-URL: https://github.com/nodejs/node/pull/60803 Reviewed-By: LiviaMedeiros Reviewed-By: Benjamin Gruenbaum Reviewed-By: Luigi Pinca Reviewed-By: Colin Ihrig --- test/common/assertSnapshot.js | 2 +- test/parallel/test-crypto-argon2.js | 4 ++-- test/parallel/test-crypto-x509.js | 2 +- .../test-module-loading-globalpaths.js | 2 +- test/parallel/test-permission-warning-flags.js | 2 +- ...process-env-allowed-flags-are-documented.js | 2 +- test/parallel/test-release-changelog.js | 18 +++++++++--------- .../parallel/test-repl-custom-eval-previews.js | 4 ++-- test/parallel/test-util-inspect.js | 3 +-- .../test-watch-mode-kill-signal-default.mjs | 4 ++-- .../test-watch-mode-kill-signal-override.mjs | 4 ++-- .../test-whatwg-webstreams-encoding.js | 4 ++-- 12 files changed, 25 insertions(+), 26 deletions(-) diff --git a/test/common/assertSnapshot.js b/test/common/assertSnapshot.js index 0e350cd1dac6fa..af4345f5111f24 100644 --- a/test/common/assertSnapshot.js +++ b/test/common/assertSnapshot.js @@ -109,7 +109,7 @@ function replaceTestDuration(str) { const root = path.resolve(__dirname, '..', '..'); const color = '(\\[\\d+m)'; -const stackTraceBasePath = new RegExp(`${color}\\(${root.replaceAll(/[\\^$*+?.()|[\]{}]/g, '\\$&')}/?${color}(.*)${color}\\)`, 'g'); +const stackTraceBasePath = new RegExp(`${color}\\(${RegExp.escape(root)}/?${color}(.*)${color}\\)`, 'g'); function replaceSpecDuration(str) { return str diff --git a/test/parallel/test-crypto-argon2.js b/test/parallel/test-crypto-argon2.js index 268b722cf81bb2..c8015d00458ac1 100644 --- a/test/parallel/test-crypto-argon2.js +++ b/test/parallel/test-crypto-argon2.js @@ -112,7 +112,7 @@ for (const [algorithm, overrides, expected] of good) { for (const [algorithm, overrides, param] of bad) { const expected = { code: 'ERR_OUT_OF_RANGE', - message: new RegExp(`The value of "${param}" is out of range`), + message: new RegExp(`The value of "${RegExp.escape(param)}" is out of range`), }; const parameters = { ...defaults, ...overrides }; assert.throws(() => crypto.argon2(algorithm, parameters, () => {}), expected); @@ -122,7 +122,7 @@ for (const [algorithm, overrides, param] of bad) { for (const key of Object.keys(defaults)) { const expected = { code: 'ERR_INVALID_ARG_TYPE', - message: new RegExp(`"parameters\\.${key}"`), + message: new RegExp(`"parameters\\.${RegExp.escape(key)}"`), }; const parameters = { ...defaults }; delete parameters[key]; diff --git a/test/parallel/test-crypto-x509.js b/test/parallel/test-crypto-x509.js index e1a7701a03b14d..a122ee9e300f30 100644 --- a/test/parallel/test-crypto-x509.js +++ b/test/parallel/test-crypto-x509.js @@ -296,7 +296,7 @@ oans248kpal88CGqsN2so/wZKxVnpiXlPHMdiNL7hRSUqlHkUi07FrP2Htg8kjI= 'OCSP - URI': ['http://ocsp.nodejs.org/'], 'CA Issuers - URI': ['http://ca.nodejs.org/ca.cert'] }), - modulusPattern: new RegExp(`^${modulusOSSL}$`, 'i'), + modulusPattern: new RegExp(`^${RegExp.escape(modulusOSSL)}$`, 'i'), bits: 2048, exponent: '0x10001', valid_from: 'Sep 3 21:40:37 2022 GMT', diff --git a/test/parallel/test-module-loading-globalpaths.js b/test/parallel/test-module-loading-globalpaths.js index 88e348b4fc99fd..52a3251c7b2fef 100644 --- a/test/parallel/test-module-loading-globalpaths.js +++ b/test/parallel/test-module-loading-globalpaths.js @@ -54,7 +54,7 @@ if (process.argv[2] === 'child') { child_process.execFileSync(testExecPath, [ __filename, 'child' ], { encoding: 'utf8', env: env }); }, - new RegExp(`Cannot find module '${pkgName}'`)); + new RegExp(`Cannot find module '${RegExp.escape(pkgName)}'`)); // Test module in $HOME/.node_modules. const modHomeDir = path.join(testFixturesDir, 'home-pkg-in-node_modules'); diff --git a/test/parallel/test-permission-warning-flags.js b/test/parallel/test-permission-warning-flags.js index e203f4bc78b210..450597942c6933 100644 --- a/test/parallel/test-permission-warning-flags.js +++ b/test/parallel/test-permission-warning-flags.js @@ -21,6 +21,6 @@ for (const flag of warnFlags) { ] ); - assert.match(stderr.toString(), new RegExp(`SecurityWarning: The flag ${flag} must be used with extreme caution`)); + assert.match(stderr.toString(), new RegExp(`SecurityWarning: The flag ${RegExp.escape(flag)} must be used with extreme caution`)); assert.strictEqual(status, 0); } diff --git a/test/parallel/test-process-env-allowed-flags-are-documented.js b/test/parallel/test-process-env-allowed-flags-are-documented.js index f09bf0940dad20..4c3254f6180243 100644 --- a/test/parallel/test-process-env-allowed-flags-are-documented.js +++ b/test/parallel/test-process-env-allowed-flags-are-documented.js @@ -12,7 +12,7 @@ const cliMd = path.join(rootDir, 'doc', 'api', 'cli.md'); const cliText = fs.readFileSync(cliMd, { encoding: 'utf8' }); const parseSection = (text, startMarker, endMarker) => { - const regExp = new RegExp(`${startMarker}\r?\n([^]*)\r?\n${endMarker}`); + const regExp = new RegExp(`${RegExp.escape(startMarker)}\r?\n([^]*)\r?\n${RegExp.escape(endMarker)}`); const match = text.match(regExp); assert(match, `Unable to locate text between '${startMarker}' and '${endMarker}'.`); diff --git a/test/parallel/test-release-changelog.js b/test/parallel/test-release-changelog.js index 99889fa1724014..c78efeeff0cf68 100644 --- a/test/parallel/test-release-changelog.js +++ b/test/parallel/test-release-changelog.js @@ -8,7 +8,7 @@ const fs = require('fs'); const path = require('path'); const getDefine = (text, name) => { - const regexp = new RegExp(`#define\\s+${name}\\s+(.*)`); + const regexp = new RegExp(`#define\\s+${RegExp.escape(name)}\\s+(.*)`); const match = regexp.exec(text); assert.notStrictEqual(match, null); return match[1]; @@ -27,7 +27,7 @@ if (!release) { const major = getDefine(versionText, 'NODE_MAJOR_VERSION'); const minor = getDefine(versionText, 'NODE_MINOR_VERSION'); const patch = getDefine(versionText, 'NODE_PATCH_VERSION'); -const versionForRegex = `${major}\\.${minor}\\.${patch}`; +const versionForRegex = RegExp.escape(`${major}.${minor}.${patch}`); const lts = getDefine(versionText, 'NODE_VERSION_IS_LTS') !== '0'; const codename = getDefine(versionText, 'NODE_VERSION_LTS_CODENAME').slice(1, -1); @@ -45,7 +45,7 @@ const changelogPath = `doc/changelogs/CHANGELOG_V${major}.md`; // Check table header let tableHeader; if (lts) { - tableHeader = new RegExp(`LTS '${codename}'`); + tableHeader = new RegExp(`LTS '${RegExp.escape(codename)}'`); } else { tableHeader = /Current<\/th>/; } @@ -57,7 +57,7 @@ const changelogPath = `doc/changelogs/CHANGELOG_V${major}.md`; // Check title for changelog entry. let title; if (lts) { - title = new RegExp(`## \\d{4}-\\d{2}-\\d{2}, Version ${versionForRegex} '${codename}' \\(LTS\\), @\\S+`); + title = new RegExp(`## \\d{4}-\\d{2}-\\d{2}, Version ${versionForRegex} '${RegExp.escape(codename)}' \\(LTS\\), @\\S+`); } else { title = new RegExp(`## \\d{4}-\\d{2}-\\d{2}, Version ${versionForRegex} \\(Current\\), @\\S+`); } @@ -70,20 +70,20 @@ const changelogPath = `doc/changelogs/CHANGELOG_V${major}.md`; // Check for the link to the appropriate CHANGELOG_V*.md file. let linkToChangelog; if (lts) { - linkToChangelog = new RegExp(`\\[Node\\.js ${major}\\]\\(${changelogPath}\\) \\*\\*Long Term Support\\*\\*`); + linkToChangelog = new RegExp(`\\[Node\\.js ${major}\\]\\(${RegExp.escape(changelogPath)}\\) \\*\\*Long Term Support\\*\\*`); } else { - linkToChangelog = new RegExp(`\\[Node\\.js ${major}\\]\\(${changelogPath}\\) \\*\\*Current\\*\\*`); + linkToChangelog = new RegExp(`\\[Node\\.js ${major}\\]\\(${RegExp.escape(changelogPath)}\\) \\*\\*Current\\*\\*`); } assert.match(mainChangelog, linkToChangelog); // Check table header. let tableHeader; if (lts) { - tableHeader = new RegExp(`${major} \\(LTS\\)`); + tableHeader = new RegExp(`${major} \\(LTS\\)`); } else { - tableHeader = new RegExp(`${major} \\(Current\\)`); + tableHeader = new RegExp(`${major} \\(Current\\)`); } assert.match(mainChangelog, tableHeader); // Check the table contains a link to the release in the appropriate CHANGELOG_V*.md file. - const linkToVersion = new RegExp(`${versionForRegex}
`); + const linkToVersion = new RegExp(`${versionForRegex}
`); assert.match(mainChangelog, linkToVersion); } diff --git a/test/parallel/test-repl-custom-eval-previews.js b/test/parallel/test-repl-custom-eval-previews.js index 303115dd6fdfb1..5fea4e3d8c5a31 100644 --- a/test/parallel/test-repl-custom-eval-previews.js +++ b/test/parallel/test-repl-custom-eval-previews.js @@ -45,7 +45,7 @@ describe('with previews', () => { ); const lines = getSingleCommandLines(output); assert.match(lines.command, /^'Hello custom' \+ ' eval World!'/); - assert.match(lines.prompt, new RegExp(`${testingReplPrompt}$`)); + assert.match(lines.prompt, new RegExp(`${RegExp.escape(testingReplPrompt)}$`)); assert.strictEqual(lines.result, "'Hello custom eval World!'"); assert.strictEqual(lines.preview, undefined); }); @@ -62,7 +62,7 @@ describe('with previews', () => { ); const lines = getSingleCommandLines(output); assert.match(lines.command, /^'Hello custom' \+ ' eval World!'/); - assert.match(lines.prompt, new RegExp(`${testingReplPrompt}$`)); + assert.match(lines.prompt, new RegExp(`${RegExp.escape(testingReplPrompt)}$`)); assert.strictEqual(lines.result, "'Hello custom eval World!'"); assert.match(lines.preview, /'Hello custom eval World!'/); }); diff --git a/test/parallel/test-util-inspect.js b/test/parallel/test-util-inspect.js index b6639a8d71a348..dc50afd4624f7a 100644 --- a/test/parallel/test-util-inspect.js +++ b/test/parallel/test-util-inspect.js @@ -3190,11 +3190,10 @@ assert.strictEqual( frame.replaceAll('/', '\\')) ).join('\n'); } - const escapedCWD = util.inspect(process.cwd()).slice(1, -1); util.inspect(err, { colors: true }).split('\n').forEach(common.mustCallAtLeast((line, i) => { let expected = stack[i].replace(/node_modules\/(@[^/]+\/[^/]+|[^/]+)/gi, (_, m) => { return `node_modules/\u001b[4m${m}\u001b[24m`; - }).replaceAll(new RegExp(`(\\(?${escapedCWD}(\\\\|/))`, 'gi'), (_, m) => { + }).replaceAll(new RegExp(`(\\(?${RegExp.escape(process.cwd())}(\\\\|/))`, 'gi'), (_, m) => { return `\x1B[90m${m}\x1B[39m`; }); if (expected.includes(process.cwd()) && expected.endsWith(')')) { diff --git a/test/parallel/test-watch-mode-kill-signal-default.mjs b/test/parallel/test-watch-mode-kill-signal-default.mjs index 53aec43d22e050..c5323283be8bb5 100644 --- a/test/parallel/test-watch-mode-kill-signal-default.mjs +++ b/test/parallel/test-watch-mode-kill-signal-default.mjs @@ -63,5 +63,5 @@ child.on('message', (msg) => { await once(child, 'exit'); -assert.match(stdout, new RegExp(`__SIGTERM received__ ${firstGrandchildPid}`)); -assert.doesNotMatch(stdout, new RegExp(`__SIGINT received__ ${firstGrandchildPid}`)); +assert.match(stdout, new RegExp(`__SIGTERM received__ ${RegExp.escape(firstGrandchildPid)}`)); +assert.doesNotMatch(stdout, new RegExp(`__SIGINT received__ ${RegExp.escape(firstGrandchildPid)}`)); diff --git a/test/parallel/test-watch-mode-kill-signal-override.mjs b/test/parallel/test-watch-mode-kill-signal-override.mjs index dc3af3f76e848d..ca49b4880f90a3 100644 --- a/test/parallel/test-watch-mode-kill-signal-override.mjs +++ b/test/parallel/test-watch-mode-kill-signal-override.mjs @@ -67,5 +67,5 @@ await once(child, 'exit'); // The second grandchild, if there is one, could receive SIGTERM if it's killed as a // consequence of the parent being killed in this process instead of being killed by the // parent for file changes. Here we only care about the first grandchild. -assert.match(stdout, new RegExp(`__SIGINT received__ ${firstGrandchildPid}`)); -assert.doesNotMatch(stdout, new RegExp(`__SIGTERM received__ ${firstGrandchildPid}`)); +assert.match(stdout, new RegExp(`__SIGINT received__ ${RegExp.escape(firstGrandchildPid)}`)); +assert.doesNotMatch(stdout, new RegExp(`__SIGTERM received__ ${RegExp.escape(firstGrandchildPid)}`)); diff --git a/test/parallel/test-whatwg-webstreams-encoding.js b/test/parallel/test-whatwg-webstreams-encoding.js index 24d9bce7cc5212..588523fa788bc8 100644 --- a/test/parallel/test-whatwg-webstreams-encoding.js +++ b/test/parallel/test-whatwg-webstreams-encoding.js @@ -50,7 +50,7 @@ const kEuro = Buffer.from([0xe2, 0x82, 0xac]).toString(); () => Reflect.get(TextDecoderStream.prototype, getter, {}), { name: 'TypeError', message: /Cannot read private member/, - stack: new RegExp(`at get ${getter}`) + stack: new RegExp(`at get ${RegExp.escape(getter)}`) } ); }); @@ -79,7 +79,7 @@ const kEuro = Buffer.from([0xe2, 0x82, 0xac]).toString(); () => Reflect.get(TextDecoderStream.prototype, getter, {}), { name: 'TypeError', message: /Cannot read private member/, - stack: new RegExp(`at get ${getter}`) + stack: new RegExp(`at get ${RegExp.escape(getter)}`) } ); }); From 2a5c3ff82a314a3c2f23ab99b1cbbfe257b478d3 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Tue, 25 Nov 2025 00:44:48 +0200 Subject: [PATCH 025/115] test: ensure assertions are reached on more tests PR-URL: https://github.com/nodejs/node/pull/60760 Reviewed-By: Colin Ihrig --- test/eslint.config_partial.mjs | 2 +- ...cket-writes-before-passed-to-tls-socket.js | 4 +- test/parallel/test-socketaddress.js | 92 +++++++++---------- test/parallel/test-spawn-cmd-named-pipe.js | 8 +- test/parallel/test-sqlite-custom-functions.js | 6 +- test/parallel/test-stdin-pipe-resume.js | 6 +- test/parallel/test-stdio-pipe-stderr.js | 6 +- ...out-cannot-be-closed-child-process-pipe.js | 6 +- test/parallel/test-strace-openat-openssl.js | 4 +- test/parallel/test-stream-big-push.js | 4 +- test/parallel/test-stream-compose.js | 8 +- test/parallel/test-stream-construct.js | 8 +- test/parallel/test-stream-destroy.js | 24 ++--- test/parallel/test-stream-drop-take.js | 62 ++++++------- test/parallel/test-stream-duplex-from.js | 4 +- .../test-stream-duplex-writable-finished.js | 4 +- test/parallel/test-stream-end-paused.js | 8 +- test/parallel/test-stream-filter.js | 4 +- test/parallel/test-stream-finished.js | 32 +++---- test/parallel/test-stream-forEach.js | 4 +- test/parallel/test-stream-pipe-await-drain.js | 4 +- test/parallel/test-stream-pipe-flow.js | 4 +- test/parallel/test-stream-pipeline-http2.js | 8 +- .../test-stream-pipeline-listeners.js | 12 +-- test/parallel/test-stream-pipeline.js | 77 ++++++++-------- test/parallel/test-stream-readable-aborted.js | 4 +- .../test-stream-readable-async-iterators.js | 38 +++----- test/parallel/test-stream-readable-didRead.js | 4 +- test/parallel/test-stream-readable-ended.js | 4 +- test/parallel/test-stream-readable-event.js | 12 +-- ...test-stream-readable-hwm-0-no-flow-data.js | 8 +- .../test-stream-readable-strategy-option.js | 11 +-- test/parallel/test-stream-reduce.js | 4 +- test/parallel/test-stream-transform-final.js | 4 +- .../test-stream-transform-flush-data.js | 6 +- test/parallel/test-stream-unpipe-event.js | 24 ++--- .../test-stream-unshift-empty-chunk.js | 6 +- .../parallel/test-stream-unshift-read-race.js | 4 +- test/parallel/test-stream-wrap.js | 4 +- ...stream-writable-change-default-encoding.js | 14 +-- .../test-stream-writable-decoded-encoding.js | 24 ++--- test/parallel/test-stream-writable-destroy.js | 4 +- .../test-stream-writable-ended-state.js | 4 +- .../test-stream-writable-finished-state.js | 4 +- .../parallel/test-stream-writable-finished.js | 4 +- test/parallel/test-stream-writable-null.js | 5 +- test/parallel/test-stream-writev.js | 9 +- ...est-stream2-base64-single-char-read-end.js | 6 +- test/parallel/test-stream2-basic.js | 20 ++-- test/parallel/test-stream2-compatibility.js | 6 +- .../test-stream2-httpclient-response-end.js | 8 +- test/parallel/test-stream2-objects.js | 8 +- test/parallel/test-stream2-push.js | 6 +- test/parallel/test-stream2-readable-wrap.js | 4 +- test/parallel/test-stream2-writable.js | 27 ++---- test/parallel/test-stream3-cork-end.js | 18 ++-- test/parallel/test-stream3-cork-uncork.js | 18 ++-- test/parallel/test-stream3-pause-then-read.js | 10 +- .../test-stream3-pipeline-async-iterator.js | 1 + 59 files changed, 354 insertions(+), 380 deletions(-) diff --git a/test/eslint.config_partial.mjs b/test/eslint.config_partial.mjs index fd8ce1a69b256c..52f17349b500b7 100644 --- a/test/eslint.config_partial.mjs +++ b/test/eslint.config_partial.mjs @@ -204,7 +204,7 @@ export default [ Array.from({ length: 13 }, (_, i) => String.fromCharCode(0x61 + i, 42)).join(',') },n*,${ // 0x61 is code for 'a', this generates a string enumerating latin letters: 'z*,y*,…' - Array.from({ length: 7 }, (_, i) => String.fromCharCode(0x61 + 25 - i, 42)).join(',') + Array.from({ length: 8 }, (_, i) => String.fromCharCode(0x61 + 25 - i, 42)).join(',') }}.{js,mjs,cjs}`, ], rules: { diff --git a/test/parallel/test-socket-writes-before-passed-to-tls-socket.js b/test/parallel/test-socket-writes-before-passed-to-tls-socket.js index 22c5b87111579c..ff4fc9a486cd95 100644 --- a/test/parallel/test-socket-writes-before-passed-to-tls-socket.js +++ b/test/parallel/test-socket-writes-before-passed-to-tls-socket.js @@ -7,7 +7,7 @@ const net = require('net'); const HEAD = Buffer.alloc(1024 * 1024, 0); -const server = net.createServer((serverSock) => { +const server = net.createServer(common.mustCallAtLeast((serverSock) => { let recvLen = 0; const recv = []; serverSock.on('data', common.mustCallAtLeast((chunk) => { @@ -21,7 +21,7 @@ const server = net.createServer((serverSock) => { process.exit(0); } }, 1)); -}) +})) .listen(client); function client() { diff --git a/test/parallel/test-socketaddress.js b/test/parallel/test-socketaddress.js index bd117cc6b5edc1..cf29795a48fcfa 100644 --- a/test/parallel/test-socketaddress.js +++ b/test/parallel/test-socketaddress.js @@ -2,11 +2,7 @@ 'use strict'; const common = require('../common'); -const { - ok, - strictEqual, - throws, -} = require('assert'); +const assert = require('assert'); const { SocketAddress, } = require('net'); @@ -26,19 +22,19 @@ describe('net.SocketAddress...', () => { it('is cloneable', () => { const sa = new SocketAddress(); - strictEqual(sa.address, '127.0.0.1'); - strictEqual(sa.port, 0); - strictEqual(sa.family, 'ipv4'); - strictEqual(sa.flowlabel, 0); + assert.strictEqual(sa.address, '127.0.0.1'); + assert.strictEqual(sa.port, 0); + assert.strictEqual(sa.family, 'ipv4'); + assert.strictEqual(sa.flowlabel, 0); const mc = new MessageChannel(); mc.port1.onmessage = common.mustCall(({ data }) => { - ok(SocketAddress.isSocketAddress(data)); + assert.ok(SocketAddress.isSocketAddress(data)); - strictEqual(data.address, '127.0.0.1'); - strictEqual(data.port, 0); - strictEqual(data.family, 'ipv4'); - strictEqual(data.flowlabel, 0); + assert.strictEqual(data.address, '127.0.0.1'); + assert.strictEqual(data.port, 0); + assert.strictEqual(data.family, 'ipv4'); + assert.strictEqual(data.flowlabel, 0); mc.port1.close(); }); @@ -47,20 +43,20 @@ describe('net.SocketAddress...', () => { it('has reasonable defaults', () => { const sa = new SocketAddress({}); - strictEqual(sa.address, '127.0.0.1'); - strictEqual(sa.port, 0); - strictEqual(sa.family, 'ipv4'); - strictEqual(sa.flowlabel, 0); + assert.strictEqual(sa.address, '127.0.0.1'); + assert.strictEqual(sa.port, 0); + assert.strictEqual(sa.family, 'ipv4'); + assert.strictEqual(sa.flowlabel, 0); }); it('interprets simple ipv4 correctly', () => { const sa = new SocketAddress({ address: '123.123.123.123', }); - strictEqual(sa.address, '123.123.123.123'); - strictEqual(sa.port, 0); - strictEqual(sa.family, 'ipv4'); - strictEqual(sa.flowlabel, 0); + assert.strictEqual(sa.address, '123.123.123.123'); + assert.strictEqual(sa.port, 0); + assert.strictEqual(sa.family, 'ipv4'); + assert.strictEqual(sa.flowlabel, 0); }); it('sets the port correctly', () => { @@ -68,20 +64,20 @@ describe('net.SocketAddress...', () => { address: '123.123.123.123', port: 80 }); - strictEqual(sa.address, '123.123.123.123'); - strictEqual(sa.port, 80); - strictEqual(sa.family, 'ipv4'); - strictEqual(sa.flowlabel, 0); + assert.strictEqual(sa.address, '123.123.123.123'); + assert.strictEqual(sa.port, 80); + assert.strictEqual(sa.family, 'ipv4'); + assert.strictEqual(sa.flowlabel, 0); }); it('interprets simple ipv6 correctly', () => { const sa = new SocketAddress({ family: 'ipv6' }); - strictEqual(sa.address, '::'); - strictEqual(sa.port, 0); - strictEqual(sa.family, 'ipv6'); - strictEqual(sa.flowlabel, 0); + assert.strictEqual(sa.address, '::'); + assert.strictEqual(sa.port, 0); + assert.strictEqual(sa.family, 'ipv6'); + assert.strictEqual(sa.flowlabel, 0); }); it('uses the flowlabel correctly', () => { @@ -89,38 +85,38 @@ describe('net.SocketAddress...', () => { family: 'ipv6', flowlabel: 1, }); - strictEqual(sa.address, '::'); - strictEqual(sa.port, 0); - strictEqual(sa.family, 'ipv6'); - strictEqual(sa.flowlabel, 1); + assert.strictEqual(sa.address, '::'); + assert.strictEqual(sa.port, 0); + assert.strictEqual(sa.family, 'ipv6'); + assert.strictEqual(sa.flowlabel, 1); }); it('validates input correctly', () => { [1, false, 'hello'].forEach((i) => { - throws(() => new SocketAddress(i), { + assert.throws(() => new SocketAddress(i), { code: 'ERR_INVALID_ARG_TYPE' }); }); [1, false, {}, [], 'test'].forEach((family) => { - throws(() => new SocketAddress({ family }), { + assert.throws(() => new SocketAddress({ family }), { code: 'ERR_INVALID_ARG_VALUE' }); }); [1, false, {}, []].forEach((address) => { - throws(() => new SocketAddress({ address }), { + assert.throws(() => new SocketAddress({ address }), { code: 'ERR_INVALID_ARG_TYPE' }); }); [-1, false, {}, []].forEach((port) => { - throws(() => new SocketAddress({ port }), { + assert.throws(() => new SocketAddress({ port }), { code: 'ERR_SOCKET_BAD_PORT' }); }); - throws(() => new SocketAddress({ flowlabel: -1 }), { + assert.throws(() => new SocketAddress({ flowlabel: -1 }), { code: 'ERR_OUT_OF_RANGE' }); }); @@ -135,11 +131,11 @@ describe('net.SocketAddress...', () => { const flowlabel = 0; const handle = new _SocketAddress(address, port, AF_INET, flowlabel); const addr = new InternalSocketAddress(handle); - ok(addr instanceof SocketAddress); - strictEqual(addr.address, address); - strictEqual(addr.port, port); - strictEqual(addr.family, 'ipv4'); - strictEqual(addr.flowlabel, flowlabel); + assert.ok(addr instanceof SocketAddress); + assert.strictEqual(addr.address, address); + assert.strictEqual(addr.port, port); + assert.strictEqual(addr.family, 'ipv4'); + assert.strictEqual(addr.flowlabel, flowlabel); }); it('SocketAddress.parse() works as expected', () => { @@ -156,9 +152,9 @@ describe('net.SocketAddress...', () => { good.forEach((i) => { const addr = SocketAddress.parse(i.input); - strictEqual(addr.address, i.address); - strictEqual(addr.port, i.port); - strictEqual(addr.family, i.family); + assert.strictEqual(addr.address, i.address); + assert.strictEqual(addr.port, i.port); + assert.strictEqual(addr.family, i.family); }); const bad = [ @@ -169,7 +165,7 @@ describe('net.SocketAddress...', () => { ]; bad.forEach((i) => { - strictEqual(SocketAddress.parse(i), undefined); + assert.strictEqual(SocketAddress.parse(i), undefined); }); }); diff --git a/test/parallel/test-spawn-cmd-named-pipe.js b/test/parallel/test-spawn-cmd-named-pipe.js index 4e7ad185a5401c..e9eb206b1b1f7f 100644 --- a/test/parallel/test-spawn-cmd-named-pipe.js +++ b/test/parallel/test-spawn-cmd-named-pipe.js @@ -16,22 +16,22 @@ if (!process.argv[2]) { const stdinPipeName = `\\\\.\\pipe\\${pipeNamePrefix}.stdin`; const stdoutPipeName = `\\\\.\\pipe\\${pipeNamePrefix}.stdout`; - const stdinPipeServer = net.createServer(function(c) { + const stdinPipeServer = net.createServer(common.mustCall((c) => { c.on('end', common.mustCall()); c.end('hello'); - }); + })); stdinPipeServer.listen(stdinPipeName); const output = []; - const stdoutPipeServer = net.createServer(function(c) { + const stdoutPipeServer = net.createServer(common.mustCallAtLeast((c) => { c.on('data', function(x) { output.push(x); }); c.on('end', common.mustCall(function() { assert.strictEqual(output.join(''), 'hello'); })); - }); + })); stdoutPipeServer.listen(stdoutPipeName); const args = diff --git a/test/parallel/test-sqlite-custom-functions.js b/test/parallel/test-sqlite-custom-functions.js index d535cda821e10e..6b5f974ede893e 100644 --- a/test/parallel/test-sqlite-custom-functions.js +++ b/test/parallel/test-sqlite-custom-functions.js @@ -1,5 +1,5 @@ 'use strict'; -const { skipIfSQLiteMissing } = require('../common'); +const { skipIfSQLiteMissing, mustCall } = require('../common'); skipIfSQLiteMissing(); const assert = require('node:assert'); const { DatabaseSync } = require('node:sqlite'); @@ -376,14 +376,14 @@ suite('DatabaseSync.prototype.function()', () => { test('supported argument types', () => { const db = new DatabaseSync(':memory:'); - db.function('arguments', (i, f, s, n, b) => { + db.function('arguments', mustCall((i, f, s, n, b) => { assert.strictEqual(i, 5); assert.strictEqual(f, 3.14); assert.strictEqual(s, 'foo'); assert.strictEqual(n, null); assert.deepStrictEqual(b, new Uint8Array([254])); return 42; - }); + })); const stmt = db.prepare( 'SELECT arguments(5, 3.14, \'foo\', null, x\'fe\') as result' ); diff --git a/test/parallel/test-stdin-pipe-resume.js b/test/parallel/test-stdin-pipe-resume.js index e9000933a37cfa..90e41b95405b28 100644 --- a/test/parallel/test-stdin-pipe-resume.js +++ b/test/parallel/test-stdin-pipe-resume.js @@ -1,6 +1,6 @@ 'use strict'; // This tests that piping stdin will cause it to resume() as well. -require('../common'); +const common = require('../common'); const assert = require('assert'); if (process.argv[2] === 'child') { @@ -12,11 +12,11 @@ if (process.argv[2] === 'child') { child.stdout.on('data', function(c) { buffers.push(c); }); - child.stdout.on('close', function() { + child.stdout.on('close', common.mustCall(() => { const b = Buffer.concat(buffers).toString(); assert.strictEqual(b, 'Hello, world\n'); console.log('ok'); - }); + })); child.stdin.write('Hel'); child.stdin.write('lo,'); child.stdin.write(' wo'); diff --git a/test/parallel/test-stdio-pipe-stderr.js b/test/parallel/test-stdio-pipe-stderr.js index c914877062c425..ba4d2b71772c71 100644 --- a/test/parallel/test-stdio-pipe-stderr.js +++ b/test/parallel/test-stdio-pipe-stderr.js @@ -1,5 +1,5 @@ 'use strict'; -require('../common'); +const common = require('../common'); const tmpdir = require('../common/tmpdir'); const assert = require('assert'); const fs = require('fs'); @@ -19,7 +19,7 @@ const stream = fs.createWriteStream(stderrOutputPath); // non-built-in module. fs.writeFileSync(fakeModulePath, '', 'utf8'); -stream.on('open', () => { +stream.on('open', common.mustCall(() => { spawnSync(process.execPath, { input: `require(${JSON.stringify(fakeModulePath)})`, stdio: ['pipe', 'pipe', stream] @@ -33,4 +33,4 @@ stream.on('open', () => { stream.end(); fs.unlinkSync(stderrOutputPath); fs.unlinkSync(fakeModulePath); -}); +})); diff --git a/test/parallel/test-stdout-cannot-be-closed-child-process-pipe.js b/test/parallel/test-stdout-cannot-be-closed-child-process-pipe.js index 7cd4b90c008a2f..ac2a8706890e74 100644 --- a/test/parallel/test-stdout-cannot-be-closed-child-process-pipe.js +++ b/test/parallel/test-stdout-cannot-be-closed-child-process-pipe.js @@ -1,5 +1,5 @@ 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); if (process.argv[2] === 'child') @@ -23,10 +23,10 @@ function parent() { err += c; }); - child.on('close', function(code, signal) { + child.on('close', common.mustCall((code, signal) => { assert.strictEqual(code, 0); assert.strictEqual(err, ''); assert.strictEqual(out, 'foo'); console.log('ok'); - }); + })); } diff --git a/test/parallel/test-strace-openat-openssl.js b/test/parallel/test-strace-openat-openssl.js index 8e4a38fc6e7eaf..234c12f126f093 100644 --- a/test/parallel/test-strace-openat-openssl.js +++ b/test/parallel/test-strace-openat-openssl.js @@ -32,7 +32,7 @@ if (spawnSync('strace').error !== undefined) { // stderr is the default for strace const rl = createInterface({ input: strace.stderr }); - rl.on('line', (line) => { + rl.on('line', common.mustCallAtLeast((line) => { if (!line.startsWith('open')) { return; } @@ -48,7 +48,7 @@ if (spawnSync('strace').error !== undefined) { } assert(allowedOpenCalls.delete(file), `${file} is not in the list of allowed openat calls`); - }); + })); const debugOutput = []; strace.stderr.setEncoding('utf8'); strace.stderr.on('data', (chunk) => { diff --git a/test/parallel/test-stream-big-push.js b/test/parallel/test-stream-big-push.js index f9e75edd3f89d1..9923057932fb01 100644 --- a/test/parallel/test-stream-big-push.js +++ b/test/parallel/test-stream-big-push.js @@ -61,7 +61,7 @@ assert.strictEqual(chunk, str); chunk = r.read(); assert.strictEqual(chunk, null); -r.once('readable', () => { +r.once('readable', common.mustCall(() => { // This time, we'll get *all* the remaining data, because // it's been added synchronously, as the read WOULD take // us below the hwm, and so it triggered a _read() again, @@ -71,4 +71,4 @@ r.once('readable', () => { chunk = r.read(); assert.strictEqual(chunk, null); -}); +})); diff --git a/test/parallel/test-stream-compose.js b/test/parallel/test-stream-compose.js index d7a54e177668a2..36581d6d858276 100644 --- a/test/parallel/test-stream-compose.js +++ b/test/parallel/test-stream-compose.js @@ -219,9 +219,9 @@ const assert = require('assert'); .end(true) .on('data', common.mustNotCall()) .on('end', common.mustNotCall()) - .on('error', (err) => { + .on('error', common.mustCall((err) => { assert.strictEqual(err, _err); - }); + })); } { @@ -251,9 +251,9 @@ const assert = require('assert'); .end(true) .on('data', common.mustNotCall()) .on('end', common.mustNotCall()) - .on('error', (err) => { + .on('error', common.mustCall((err) => { assert.strictEqual(err, _err); - }); + })); } { diff --git a/test/parallel/test-stream-construct.js b/test/parallel/test-stream-construct.js index 907b9aa0e3e296..394ff503c66540 100644 --- a/test/parallel/test-stream-construct.js +++ b/test/parallel/test-stream-construct.js @@ -108,9 +108,9 @@ function testDestroy(factory) { s.on('close', common.mustCall(() => { assert.strictEqual(constructed, true); })); - s.destroy(null, () => { + s.destroy(null, common.mustCall(() => { assert.strictEqual(constructed, true); - }); + })); } { @@ -142,10 +142,10 @@ function testDestroy(factory) { s.on('error', common.mustCall((err) => { assert.strictEqual(err.message, 'kaboom'); })); - s.destroy(new Error('kaboom'), (err) => { + s.destroy(new Error('kaboom'), common.mustCall((err) => { assert.strictEqual(err.message, 'kaboom'); assert.strictEqual(constructed, true); - }); + })); } { diff --git a/test/parallel/test-stream-destroy.js b/test/parallel/test-stream-destroy.js index 5269ccfec50271..12706714aa7c8a 100644 --- a/test/parallel/test-stream-destroy.js +++ b/test/parallel/test-stream-destroy.js @@ -50,7 +50,7 @@ const http = require('http'); } { - const server = http.createServer((req, res) => { + const server = http.createServer(common.mustCallAtLeast((req, res) => { destroy(req); req.on('error', common.mustCall((err) => { assert.strictEqual(err.name, 'AbortError'); @@ -58,16 +58,16 @@ const http = require('http'); req.on('close', common.mustCall(() => { res.end('hello'); })); - }); + })); - server.listen(0, () => { + server.listen(0, common.mustCall(() => { const req = http.request({ method: 'POST', port: server.address().port, agent: new http.Agent() }); - req.on('response', (res) => { + req.on('response', common.mustCall((res) => { const buf = []; res.on('data', (data) => buf.push(data)); res.on('end', common.mustCall(() => { @@ -77,14 +77,14 @@ const http = require('http'); ); server.close(); })); - }); + })); req.end('asd'); - }); + })); } { - const server = http.createServer((req, res) => { + const server = http.createServer(common.mustCallAtLeast((req, res) => { req .resume() .on('end', () => { @@ -95,15 +95,15 @@ const http = require('http'); req.on('close', common.mustCall(() => { res.end('hello'); })); - }); + })); - server.listen(0, () => { + server.listen(0, common.mustCall(() => { const req = http.request({ method: 'POST', port: server.address().port, agent: new http.Agent() }); - req.on('response', (res) => { + req.on('response', common.mustCall((res) => { const buf = []; res.on('data', (data) => buf.push(data)); res.on('end', common.mustCall(() => { @@ -113,8 +113,8 @@ const http = require('http'); ); server.close(); })); - }); + })); req.end('asd'); - }); + })); } diff --git a/test/parallel/test-stream-drop-take.js b/test/parallel/test-stream-drop-take.js index 97e6c74dfa67ea..da09d242f0c492 100644 --- a/test/parallel/test-stream-drop-take.js +++ b/test/parallel/test-stream-drop-take.js @@ -4,7 +4,7 @@ const common = require('../common'); const { Readable, } = require('stream'); -const { deepStrictEqual, rejects, throws, strictEqual } = require('assert'); +const assert = require('assert'); const { from } = Readable; @@ -20,32 +20,32 @@ const naturals = () => from(async function*() { { // Synchronous streams (async () => { - deepStrictEqual(await from([1, 2, 3]).drop(2).toArray(), [3]); - deepStrictEqual(await from([1, 2, 3]).take(1).toArray(), [1]); - deepStrictEqual(await from([]).drop(2).toArray(), []); - deepStrictEqual(await from([]).take(1).toArray(), []); - deepStrictEqual(await from([1, 2, 3]).drop(1).take(1).toArray(), [2]); - deepStrictEqual(await from([1, 2]).drop(0).toArray(), [1, 2]); - deepStrictEqual(await from([1, 2]).take(0).toArray(), []); + assert.deepStrictEqual(await from([1, 2, 3]).drop(2).toArray(), [3]); + assert.deepStrictEqual(await from([1, 2, 3]).take(1).toArray(), [1]); + assert.deepStrictEqual(await from([]).drop(2).toArray(), []); + assert.deepStrictEqual(await from([]).take(1).toArray(), []); + assert.deepStrictEqual(await from([1, 2, 3]).drop(1).take(1).toArray(), [2]); + assert.deepStrictEqual(await from([1, 2]).drop(0).toArray(), [1, 2]); + assert.deepStrictEqual(await from([1, 2]).take(0).toArray(), []); })().then(common.mustCall()); // Asynchronous streams (async () => { - deepStrictEqual(await fromAsync([1, 2, 3]).drop(2).toArray(), [3]); - deepStrictEqual(await fromAsync([1, 2, 3]).take(1).toArray(), [1]); - deepStrictEqual(await fromAsync([]).drop(2).toArray(), []); - deepStrictEqual(await fromAsync([]).take(1).toArray(), []); - deepStrictEqual(await fromAsync([1, 2, 3]).drop(1).take(1).toArray(), [2]); - deepStrictEqual(await fromAsync([1, 2]).drop(0).toArray(), [1, 2]); - deepStrictEqual(await fromAsync([1, 2]).take(0).toArray(), []); + assert.deepStrictEqual(await fromAsync([1, 2, 3]).drop(2).toArray(), [3]); + assert.deepStrictEqual(await fromAsync([1, 2, 3]).take(1).toArray(), [1]); + assert.deepStrictEqual(await fromAsync([]).drop(2).toArray(), []); + assert.deepStrictEqual(await fromAsync([]).take(1).toArray(), []); + assert.deepStrictEqual(await fromAsync([1, 2, 3]).drop(1).take(1).toArray(), [2]); + assert.deepStrictEqual(await fromAsync([1, 2]).drop(0).toArray(), [1, 2]); + assert.deepStrictEqual(await fromAsync([1, 2]).take(0).toArray(), []); })().then(common.mustCall()); // Infinite streams // Asynchronous streams (async () => { - deepStrictEqual(await naturals().take(1).toArray(), [1]); - deepStrictEqual(await naturals().drop(1).take(1).toArray(), [2]); + assert.deepStrictEqual(await naturals().take(1).toArray(), [1]); + assert.deepStrictEqual(await naturals().drop(1).take(1).toArray(), [2]); const next10 = [11, 12, 13, 14, 15, 16, 17, 18, 19, 20]; - deepStrictEqual(await naturals().drop(10).take(10).toArray(), next10); - deepStrictEqual(await naturals().take(5).take(1).toArray(), [1]); + assert.deepStrictEqual(await naturals().drop(10).take(10).toArray(), next10); + assert.deepStrictEqual(await naturals().take(5).take(1).toArray(), [1]); })().then(common.mustCall()); } @@ -66,7 +66,7 @@ const naturals = () => from(async function*() { stream.take(1) .toArray() .then(common.mustCall(() => { - strictEqual(reached, false); + assert.strictEqual(reached, false); })) .finally(() => resolve()); } @@ -75,20 +75,20 @@ const naturals = () => from(async function*() { // Coercion (async () => { // The spec made me do this ^^ - deepStrictEqual(await naturals().take('cat').toArray(), []); - deepStrictEqual(await naturals().take('2').toArray(), [1, 2]); - deepStrictEqual(await naturals().take(true).toArray(), [1]); + assert.deepStrictEqual(await naturals().take('cat').toArray(), []); + assert.deepStrictEqual(await naturals().take('2').toArray(), [1, 2]); + assert.deepStrictEqual(await naturals().take(true).toArray(), [1]); })().then(common.mustCall()); } { // Support for AbortSignal const ac = new AbortController(); - rejects( + assert.rejects( Readable.from([1, 2, 3]).take(1, { signal: ac.signal }).toArray(), { name: 'AbortError', }).then(common.mustCall()); - rejects( + assert.rejects( Readable.from([1, 2, 3]).drop(1, { signal: ac.signal }).toArray(), { name: 'AbortError', }).then(common.mustCall()); @@ -98,7 +98,7 @@ const naturals = () => from(async function*() { { // Support for AbortSignal, already aborted const signal = AbortSignal.abort(); - rejects( + assert.rejects( Readable.from([1, 2, 3]).take(1, { signal }).toArray(), { name: 'AbortError', }).then(common.mustCall()); @@ -113,12 +113,12 @@ const naturals = () => from(async function*() { ]; for (const example of invalidArgs) { - throws(() => from([]).take(example).toArray(), /ERR_OUT_OF_RANGE/); + assert.throws(() => from([]).take(example).toArray(), /ERR_OUT_OF_RANGE/); } - throws(() => Readable.from([1]).drop(1, 1), /ERR_INVALID_ARG_TYPE/); - throws(() => Readable.from([1]).drop(1, { signal: true }), /ERR_INVALID_ARG_TYPE/); + assert.throws(() => Readable.from([1]).drop(1, 1), /ERR_INVALID_ARG_TYPE/); + assert.throws(() => Readable.from([1]).drop(1, { signal: true }), /ERR_INVALID_ARG_TYPE/); - throws(() => Readable.from([1]).take(1, 1), /ERR_INVALID_ARG_TYPE/); - throws(() => Readable.from([1]).take(1, { signal: true }), /ERR_INVALID_ARG_TYPE/); + assert.throws(() => Readable.from([1]).take(1, 1), /ERR_INVALID_ARG_TYPE/); + assert.throws(() => Readable.from([1]).take(1, { signal: true }), /ERR_INVALID_ARG_TYPE/); } diff --git a/test/parallel/test-stream-duplex-from.js b/test/parallel/test-stream-duplex-from.js index e3c117ff8dedb0..631b3586b36191 100644 --- a/test/parallel/test-stream-duplex-from.js +++ b/test/parallel/test-stream-duplex-from.js @@ -136,13 +136,13 @@ const { Blob } = require('buffer'); } yield rest; }), - async function * (source) { // eslint-disable-line require-yield + common.mustCall(async function * (source) { // eslint-disable-line require-yield let ret = ''; for await (const x of source) { ret += x; } assert.strictEqual(ret, 'abcdefghi'); - }, + }), common.mustSucceed(), ); } diff --git a/test/parallel/test-stream-duplex-writable-finished.js b/test/parallel/test-stream-duplex-writable-finished.js index 20c0781a22273d..e679c7295d614e 100644 --- a/test/parallel/test-stream-duplex-writable-finished.js +++ b/test/parallel/test-stream-duplex-writable-finished.js @@ -14,11 +14,11 @@ const assert = require('assert'); { const duplex = new Duplex(); - duplex._write = (chunk, encoding, cb) => { + duplex._write = common.mustCall((chunk, encoding, cb) => { // The state finished should start in false. assert.strictEqual(duplex.writableFinished, false); cb(); - }; + }); duplex.on('finish', common.mustCall(() => { assert.strictEqual(duplex.writableFinished, true); diff --git a/test/parallel/test-stream-end-paused.js b/test/parallel/test-stream-end-paused.js index f29c82f532c22b..f3d7289699519a 100644 --- a/test/parallel/test-stream-end-paused.js +++ b/test/parallel/test-stream-end-paused.js @@ -28,15 +28,13 @@ const assert = require('assert'); const Readable = require('stream').Readable; const stream = new Readable(); let calledRead = false; -stream._read = function() { +stream._read = common.mustCall(function() { assert(!calledRead); calledRead = true; this.push(null); -}; - -stream.on('data', function() { - throw new Error('should not ever get data'); }); + +stream.on('data', common.mustNotCall()); stream.pause(); setTimeout(common.mustCall(function() { diff --git a/test/parallel/test-stream-filter.js b/test/parallel/test-stream-filter.js index 173e4f47e24353..0b70c391c88fb1 100644 --- a/test/parallel/test-stream-filter.js +++ b/test/parallel/test-stream-filter.js @@ -132,10 +132,10 @@ const { setTimeout } = require('timers/promises'); name: 'AbortError', }).then(common.mustCall()); - setImmediate(() => { + setImmediate(common.mustCall(() => { ac.abort(); assert.strictEqual(calls, 2); - }); + })); } { diff --git a/test/parallel/test-stream-finished.js b/test/parallel/test-stream-finished.js index 9d66cbe59b11f3..b55107a7a6440f 100644 --- a/test/parallel/test-stream-finished.js +++ b/test/parallel/test-stream-finished.js @@ -342,10 +342,10 @@ function testClosed(factory) { const s = factory({ emitClose: false, - destroy(err, cb) { + destroy: common.mustCall((err, cb) => { cb(); finished(s, common.mustCall()); - } + }), }); s.destroy(); } @@ -354,14 +354,14 @@ function testClosed(factory) { // Invoke with deep async. const s = factory({ - destroy(err, cb) { - setImmediate(() => { + destroy: common.mustCall((err, cb) => { + setImmediate(common.mustCall(() => { cb(); - setImmediate(() => { + setImmediate(common.mustCall(() => { finished(s, common.mustCall()); - }); - }); - } + })); + })); + }), }); s.destroy(); } @@ -567,13 +567,13 @@ testClosed((opts) => new Writable({ write() {}, ...opts })); })); res.end(); })) - .listen(0, function() { + .listen(0, common.mustCall(function() { http.request({ method: 'GET', port: this.address().port }).end() .on('response', common.mustCall()); - }); + })); } { @@ -584,12 +584,12 @@ testClosed((opts) => new Writable({ write() {}, ...opts })); })); })); req.destroy(); - })).listen(0, function() { + })).listen(0, common.mustCall(function() { http.request({ method: 'GET', port: this.address().port }).end().on('error', common.mustCall()); - }); + })); } { @@ -601,10 +601,10 @@ testClosed((opts) => new Writable({ write() {}, ...opts })); w.aborted = false; w.end(); let closed = false; - w.on('finish', () => { + w.on('finish', common.mustCall(() => { assert.strictEqual(closed, false); w.emit('aborted'); - }); + })); w.on('close', common.mustCall(() => { closed = true; })); @@ -655,7 +655,7 @@ testClosed((opts) => new Writable({ write() {}, ...opts })); finished(res, common.mustCall(function(err) { assert.strictEqual(err, undefined); })); - })).listen(0, function() { + })).listen(0, common.mustCall(function() { http.request( { method: 'GET', port: this.address().port }, common.mustCall(function(res) { @@ -665,7 +665,7 @@ testClosed((opts) => new Writable({ write() {}, ...opts })); })); }) ).end(); - }); + })); } { diff --git a/test/parallel/test-stream-forEach.js b/test/parallel/test-stream-forEach.js index 627ea0ccf1be60..cccd263adf4c70 100644 --- a/test/parallel/test-stream-forEach.js +++ b/test/parallel/test-stream-forEach.js @@ -104,10 +104,10 @@ const { once } = require('events'); name: 'AbortError', }).then(common.mustCall()); - setImmediate(() => { + setImmediate(common.mustCall(() => { ac.abort(); assert.strictEqual(calls, 2); - }); + })); } { diff --git a/test/parallel/test-stream-pipe-await-drain.js b/test/parallel/test-stream-pipe-await-drain.js index 35b86f67f99676..9ce0e465243fde 100644 --- a/test/parallel/test-stream-pipe-await-drain.js +++ b/test/parallel/test-stream-pipe-await-drain.js @@ -22,7 +22,7 @@ writer1._write = common.mustCall(function(chunk, encoding, cb) { process.nextTick(cb); }, 1); -writer1.once('chunk-received', () => { +writer1.once('chunk-received', common.mustCallAtLeast(() => { assert.strictEqual( reader._readableState.awaitDrainWriters.size, 0, @@ -34,7 +34,7 @@ writer1.once('chunk-received', () => { // "done" processing. reader.push(buffer); }); -}); +})); // A "slow" consumer: writer2._write = common.mustCall((chunk, encoding, cb) => { diff --git a/test/parallel/test-stream-pipe-flow.js b/test/parallel/test-stream-pipe-flow.js index 1f2e8f54cec409..e985f0f8aed982 100644 --- a/test/parallel/test-stream-pipe-flow.js +++ b/test/parallel/test-stream-pipe-flow.js @@ -74,9 +74,7 @@ const { Readable, Writable, PassThrough } = require('stream'); const pt = rs .pipe(new PassThrough({ objectMode: true, highWaterMark: 2 })); assert.strictEqual(pt.listenerCount('drain'), 0); - pt.on('finish', () => { - assert.strictEqual(pt.listenerCount('drain'), 0); - }); + pt.on('finish', common.mustNotCall()); rs.push('asd'); assert.strictEqual(pt.listenerCount('drain'), 0); diff --git a/test/parallel/test-stream-pipeline-http2.js b/test/parallel/test-stream-pipeline-http2.js index d7ff08888afb8a..c35cd696bd1e55 100644 --- a/test/parallel/test-stream-pipeline-http2.js +++ b/test/parallel/test-stream-pipeline-http2.js @@ -7,11 +7,11 @@ const { Readable, pipeline } = require('stream'); const http2 = require('http2'); { - const server = http2.createServer((req, res) => { + const server = http2.createServer(common.mustCallAtLeast((req, res) => { pipeline(req, res, common.mustCall()); - }); + })); - server.listen(0, () => { + server.listen(0, common.mustCall(() => { const url = `http://localhost:${server.address().port}`; const client = http2.connect(url); const req = client.request({ ':method': 'POST' }); @@ -32,5 +32,5 @@ const http2 = require('http2'); cnt--; if (cnt === 0) rs.destroy(); }); - }); + })); } diff --git a/test/parallel/test-stream-pipeline-listeners.js b/test/parallel/test-stream-pipeline-listeners.js index 81e287b77c7589..456ff1d1124250 100644 --- a/test/parallel/test-stream-pipeline-listeners.js +++ b/test/parallel/test-stream-pipeline-listeners.js @@ -24,10 +24,10 @@ pipeline(a, b, common.mustCall((error) => { assert(a.listenerCount('error') > 0); assert.strictEqual(b.listenerCount('error'), 0); - setTimeout(() => { + setTimeout(common.mustCall(() => { assert.strictEqual(b.listenerCount('error'), 0); b.destroy(new Error('no way')); - }, 100); + }), 100); })); // Async generators @@ -47,10 +47,10 @@ const d = pipeline( assert(c.listenerCount('error') > 0); assert.strictEqual(d.listenerCount('error'), 0); - setTimeout(() => { + setTimeout(common.mustCall(() => { assert.strictEqual(b.listenerCount('error'), 0); d.destroy(new Error('no way')); - }, 100); + }), 100); }) ); @@ -69,8 +69,8 @@ pipeline(e, f, common.mustCall((error) => { assert(e.listenerCount('error') > 0); assert(f.listenerCount('error') > 0); - setTimeout(() => { + setTimeout(common.mustCall(() => { assert(f.listenerCount('error') > 0); f.destroy(new Error('no way')); - }, 100); + }), 100); })); diff --git a/test/parallel/test-stream-pipeline.js b/test/parallel/test-stream-pipeline.js index 2bbdabe9d347b1..6220bc15365361 100644 --- a/test/parallel/test-stream-pipeline.js +++ b/test/parallel/test-stream-pipeline.js @@ -170,13 +170,13 @@ tmpdir.refresh(); pipeline(rs, res, () => {}); }); - server.listen(0, () => { + server.listen(0, common.mustCall(() => { const req = http.request({ port: server.address().port }); req.end(); - req.on('response', (res) => { + req.on('response', common.mustCall((res) => { const buf = []; res.on('data', (data) => buf.push(data)); res.on('end', common.mustCall(() => { @@ -186,12 +186,12 @@ tmpdir.refresh(); ); server.close(); })); - }); - }); + })); + })); } { - const server = http.createServer((req, res) => { + const server = http.createServer(common.mustCallAtLeast((req, res) => { let sent = false; const rs = new Readable({ read() { @@ -208,7 +208,7 @@ tmpdir.refresh(); }); pipeline(rs, res, () => {}); - }); + })); server.listen(0, () => { const req = http.request({ @@ -226,7 +226,7 @@ tmpdir.refresh(); } { - const server = http.createServer((req, res) => { + const server = http.createServer(common.mustCallAtLeast((req, res) => { let sent = 0; const rs = new Readable({ read() { @@ -241,7 +241,7 @@ tmpdir.refresh(); }); pipeline(rs, res, () => {}); - }); + })); let cnt = 10; @@ -253,27 +253,27 @@ tmpdir.refresh(); } }); - server.listen(0, () => { + server.listen(0, common.mustCall(() => { const req = http.request({ port: server.address().port }); req.end(); - req.on('response', (res) => { + req.on('response', common.mustCall((res) => { pipeline(res, badSink, common.mustCall((err) => { assert.deepStrictEqual(err, new Error('kaboom')); server.close(); })); - }); - }); + })); + })); } { - const server = http.createServer((req, res) => { + const server = http.createServer(common.mustCallAtLeast((req, res) => { pipeline(req, res, common.mustSucceed()); - }); + })); - server.listen(0, () => { + server.listen(0, common.mustCall(() => { const req = http.request({ port: server.address().port }); @@ -299,11 +299,11 @@ tmpdir.refresh(); if (cnt === 0) rs.destroy(); }); }); - }); + })); } { - const makeTransform = () => { + const makeTransform = common.mustCallAtLeast(() => { const tr = new Transform({ transform(data, enc, cb) { cb(null, data); @@ -312,7 +312,7 @@ tmpdir.refresh(); tr.on('close', common.mustCall()); return tr; - }; + }); const rs = new Readable({ read() { @@ -375,10 +375,10 @@ tmpdir.refresh(); }); const ws = new Writable({ - write(data, enc, cb) { + write: common.mustCallAtLeast((data, enc, cb) => { assert.deepStrictEqual(data, expected.shift()); cb(); - } + }), }); let finished = false; @@ -581,8 +581,8 @@ tmpdir.refresh(); const server = http.Server(function(req, res) { res.write('asd'); }); - server.listen(0, function() { - http.get({ port: this.address().port }, (res) => { + server.listen(0, common.mustCall(function() { + http.get({ port: this.address().port }, common.mustCall((res) => { const stream = new PassThrough(); stream.on('error', common.mustCall()); @@ -597,8 +597,8 @@ tmpdir.refresh(); ); stream.destroy(new Error('oh no')); - }).on('error', common.mustNotCall()); - }); + })).on('error', common.mustNotCall()); + })); } { @@ -1004,9 +1004,9 @@ tmpdir.refresh(); cb(); } }); - pipeline(r, w, (err) => { + pipeline(r, w, common.mustCall((err) => { assert.strictEqual(err, undefined); - }); + })); r.push('asd'); r.push(null); r.emit('close'); @@ -1084,14 +1084,13 @@ tmpdir.refresh(); { const server = http.createServer((req, res) => { req.socket.on('error', common.mustNotCall()); - pipeline(req, new PassThrough(), (err) => { - assert.ifError(err); + pipeline(req, new PassThrough(), common.mustSucceed(() => { res.end(); server.close(); - }); + })); }); - server.listen(0, () => { + server.listen(0, common.mustCall(() => { const req = http.request({ method: 'PUT', port: server.address().port @@ -1099,7 +1098,7 @@ tmpdir.refresh(); req.end('asd123'); req.on('response', common.mustCall()); req.on('error', common.mustNotCall()); - }); + })); } { @@ -1210,10 +1209,10 @@ tmpdir.refresh(); d.push(null); }), final: common.mustCall((cb) => { - setTimeout(() => { + setTimeout(common.mustCall(() => { assert.strictEqual(d.destroyed, false); cb(); - }, 1000); + }), 1000); }), destroy: common.mustNotCall() }); @@ -1254,10 +1253,10 @@ tmpdir.refresh(); d.push(null); }), final: common.mustCall((cb) => { - setTimeout(() => { + setTimeout(common.mustCall(() => { assert.strictEqual(d.destroyed, false); cb(); - }, 1000); + }), 1000); }), // `destroy()` won't be invoked by pipeline since // the writable side has not completed when @@ -1691,11 +1690,11 @@ tmpdir.refresh(); }, }); - pipeline(src, dst, (err) => { + pipeline(src, dst, common.mustCall((err) => { assert.strictEqual(src.closed, true); assert.strictEqual(dst.closed, true); assert.strictEqual(err.message, 'problem'); - }); + })); src.destroy(new Error('problem')); } @@ -1712,7 +1711,7 @@ tmpdir.refresh(); passThroughs.push(new PassThrough()); } - pipeline(src, ...passThroughs, dst, (err) => { + pipeline(src, ...passThroughs, dst, common.mustCall((err) => { assert.strictEqual(src.closed, true); assert.strictEqual(dst.closed, true); assert.strictEqual(err.message, 'problem'); @@ -1720,7 +1719,7 @@ tmpdir.refresh(); for (let i = 0; i < passThroughs.length; i++) { assert.strictEqual(passThroughs[i].closed, true); } - }); + })); src.destroy(new Error('problem')); } diff --git a/test/parallel/test-stream-readable-aborted.js b/test/parallel/test-stream-readable-aborted.js index 9badffc51fc424..0796e1901c7ce4 100644 --- a/test/parallel/test-stream-readable-aborted.js +++ b/test/parallel/test-stream-readable-aborted.js @@ -49,9 +49,9 @@ const { Readable, Duplex } = require('stream'); assert.strictEqual(readable.readableAborted, false); readable.destroy(); assert.strictEqual(readable.readableAborted, false); - queueMicrotask(() => { + queueMicrotask(common.mustCall(() => { assert.strictEqual(readable.readableAborted, false); - }); + })); })); readable.resume(); } diff --git a/test/parallel/test-stream-readable-async-iterators.js b/test/parallel/test-stream-readable-async-iterators.js index beece038772a34..8081b35191a140 100644 --- a/test/parallel/test-stream-readable-async-iterators.js +++ b/test/parallel/test-stream-readable-async-iterators.js @@ -181,18 +181,14 @@ async function tests() { resolved.forEach(common.mustCall( (item, i) => assert.strictEqual(item.value, 'hello-' + i), max)); - errors.slice(0, 1).forEach((promise) => { - promise.catch(common.mustCall((err) => { - assert.strictEqual(err.message, 'kaboom'); - })); - }); + assert.rejects(errors[0], { message: 'kaboom' }).then(common.mustCall()); - errors.slice(1).forEach((promise) => { + errors.slice(1).forEach(common.mustCallAtLeast((promise) => { promise.then(common.mustCall(({ done, value }) => { assert.strictEqual(done, true); assert.strictEqual(value, undefined); })); - }); + })); readable.destroy(new Error('kaboom')); } @@ -643,9 +639,9 @@ async function tests() { this.push('asd'); this.push(null); } - }).on('end', () => { + }).on('end', common.mustCall(() => { assert.strictEqual(r.destroyed, false); - }); + })); for await (const chunk of r) { } // eslint-disable-line no-unused-vars, no-empty assert.strictEqual(r.destroyed, true); @@ -703,15 +699,11 @@ async function tests() { }); r.destroy(); - r.on('close', () => { + r.on('close', common.mustCall(() => { const it = r[Symbol.asyncIterator](); const next = it.next(); - next - .then(common.mustNotCall()) - .catch(common.mustCall((err) => { - assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); - })); - }); + assert.rejects(next, { code: 'ERR_STREAM_PREMATURE_CLOSE' }).then(common.mustCall()); + })); } { @@ -812,7 +804,7 @@ async function tests() { response.write('never ends'); }); - server.listen(() => { + server.listen(common.mustCall(() => { _req = http.request(`http://localhost:${server.address().port}`) .on('response', common.mustCall(async (res) => { setTimeout(() => { @@ -834,7 +826,7 @@ async function tests() { })) .on('error', common.mustCall()) .end(); - }); + })); } { @@ -853,12 +845,12 @@ async function tests() { } const str = JSON.stringify({ asd: true }); - const server = http.createServer(async (request, response) => { + const server = http.createServer(common.mustCallAtLeast(async (request, response) => { const body = await getParsedBody(request); response.statusCode = 200; assert.strictEqual(JSON.stringify(body), str); response.end(JSON.stringify(body)); - }).listen(() => { + })).listen(common.mustCall(() => { http .request({ method: 'POST', @@ -866,15 +858,15 @@ async function tests() { port: server.address().port, }) .end(str) - .on('response', async (res) => { + .on('response', common.mustCall(async (res) => { let body = ''; for await (const chunk of res) { body += chunk; } assert.strictEqual(body, str); server.close(); - }); - }); + })); + })); } // To avoid missing some tests if a promise does not resolve diff --git a/test/parallel/test-stream-readable-didRead.js b/test/parallel/test-stream-readable-didRead.js index 878340ba190786..46e8ac6b2986af 100644 --- a/test/parallel/test-stream-readable-didRead.js +++ b/test/parallel/test-stream-readable-didRead.js @@ -30,12 +30,12 @@ function check(readable, data, fn) { } readable.on('close', common.mustCall()); fn(); - setImmediate(() => { + setImmediate(common.mustCall(() => { assert.strictEqual(readable.readableDidRead, data > 0); if (data > 0) { assert.strictEqual(isDisturbed(readable), true); } - }); + })); } { diff --git a/test/parallel/test-stream-readable-ended.js b/test/parallel/test-stream-readable-ended.js index bdd714c9554b81..8f6eed464a4e27 100644 --- a/test/parallel/test-stream-readable-ended.js +++ b/test/parallel/test-stream-readable-ended.js @@ -14,14 +14,14 @@ const assert = require('assert'); { const readable = new Readable(); - readable._read = () => { + readable._read = common.mustCall(() => { // The state ended should start in false. assert.strictEqual(readable.readableEnded, false); readable.push('asd'); assert.strictEqual(readable.readableEnded, false); readable.push(null); assert.strictEqual(readable.readableEnded, false); - }; + }); readable.on('end', common.mustCall(() => { assert.strictEqual(readable.readableEnded, true); diff --git a/test/parallel/test-stream-readable-event.js b/test/parallel/test-stream-readable-event.js index 4f2383508aa61c..dc62b5e566b39c 100644 --- a/test/parallel/test-stream-readable-event.js +++ b/test/parallel/test-stream-readable-event.js @@ -37,11 +37,11 @@ const Readable = require('stream').Readable; // This triggers a 'readable' event, which is lost. r.push(Buffer.from('blerg')); - setTimeout(function() { + setTimeout(common.mustCall(() => { // We're testing what we think we are assert(!r._readableState.reading); r.on('readable', common.mustCall()); - }, 1); + }), 1); } { @@ -57,11 +57,11 @@ const Readable = require('stream').Readable; // This triggers a 'readable' event, which is lost. r.push(Buffer.from('bl')); - setTimeout(function() { + setTimeout(common.mustCall(() => { // Assert we're testing what we think we are assert(r._readableState.reading); r.on('readable', common.mustCall()); - }, 1); + }), 1); } { @@ -77,11 +77,11 @@ const Readable = require('stream').Readable; r.push(Buffer.from('blerg')); r.push(null); - setTimeout(function() { + setTimeout(common.mustCall(() => { // Assert we're testing what we think we are assert(!r._readableState.reading); r.on('readable', common.mustCall()); - }, 1); + }), 1); } { diff --git a/test/parallel/test-stream-readable-hwm-0-no-flow-data.js b/test/parallel/test-stream-readable-hwm-0-no-flow-data.js index 5f0186d720dd63..bdf99d14cc7409 100644 --- a/test/parallel/test-stream-readable-hwm-0-no-flow-data.js +++ b/test/parallel/test-stream-readable-hwm-0-no-flow-data.js @@ -49,7 +49,7 @@ assert.strictEqual(r.readableFlowing, false); // // We use setImmediate here to give the stream enough time to emit all the // events it's about to emit. -setImmediate(() => { +setImmediate(common.mustCall(() => { // Only the _read, push, readable calls have happened. No data must be // emitted yet. @@ -71,7 +71,7 @@ setImmediate(() => { // Using setImmediate again to give the stream enough time to emit all the // events it wants to emit. assert.strictEqual(r.read(), null); - setImmediate(() => { + setImmediate(common.mustCall(() => { // There's a new 'readable' event after the data has been pushed. // The 'end' event will be emitted only after a 'read()'. @@ -100,5 +100,5 @@ setImmediate(() => { ['_read:a', 'push:a', 'readable', 'data:a', '_read:null', 'push:null', 'readable', 'end']); }); - }); -}); + })); +})); diff --git a/test/parallel/test-stream-readable-strategy-option.js b/test/parallel/test-stream-readable-strategy-option.js index a32e70ef2155ea..255e570fdc4c60 100644 --- a/test/parallel/test-stream-readable-strategy-option.js +++ b/test/parallel/test-stream-readable-strategy-option.js @@ -2,7 +2,6 @@ const common = require('../common'); const { Readable } = require('stream'); const assert = require('assert'); -const { strictEqual } = require('assert'); { // Strategy 2 @@ -53,13 +52,13 @@ const { strictEqual } = require('assert'); const stringStream = new ReadableStream( { - start(controller) { + start: common.mustCall((controller) => { // Check if the strategy is being assigned on the init of the ReadableStream - strictEqual(controller.desiredSize, desireSizeExpected); + assert.strictEqual(controller.desiredSize, desireSizeExpected); controller.enqueue('a'); controller.enqueue('b'); controller.close(); - }, + }), }, new CountQueuingStrategy({ highWaterMark: desireSizeExpected }) ); @@ -69,7 +68,7 @@ const { strictEqual } = require('assert'); reader.read().then(common.mustCall()); reader.read().then(common.mustCall()); reader.read().then(({ value, done }) => { - strictEqual(value, undefined); - strictEqual(done, true); + assert.strictEqual(value, undefined); + assert.strictEqual(done, true); }); } diff --git a/test/parallel/test-stream-reduce.js b/test/parallel/test-stream-reduce.js index 42c734305fd559..99029f6f8310d5 100644 --- a/test/parallel/test-stream-reduce.js +++ b/test/parallel/test-stream-reduce.js @@ -103,13 +103,13 @@ function sum(p, c) { // Support for AbortSignal - deep const stream = Readable.from([1, 2, 3]); assert.rejects(async () => { - await stream.reduce(async (p, c, { signal }) => { + await stream.reduce(common.mustCallAtLeast(async (p, c, { signal }) => { signal.addEventListener('abort', common.mustCall(), { once: true }); if (c === 3) { await new Promise(() => {}); // Explicitly do not pass signal here } return Promise.resolve(); - }, 0, { signal: AbortSignal.abort() }); + }, 0), 0, { signal: AbortSignal.abort() }); }, { name: 'AbortError', }).then(common.mustCall(() => { diff --git a/test/parallel/test-stream-transform-final.js b/test/parallel/test-stream-transform-final.js index e0b2b7e40f7610..739324884915ba 100644 --- a/test/parallel/test-stream-transform-final.js +++ b/test/parallel/test-stream-transform-final.js @@ -70,12 +70,12 @@ const t = new stream.Transform({ state++; // finalCallback part 1 assert.strictEqual(state, 10); - setTimeout(function() { + setTimeout(common.mustCall(() => { state++; // finalCallback part 2 assert.strictEqual(state, 11); done(); - }, 100); + }), 100); }, 1), flush: common.mustCall(function(done) { state++; diff --git a/test/parallel/test-stream-transform-flush-data.js b/test/parallel/test-stream-transform-flush-data.js index 51e2c8bc5254e3..38880fcbd826db 100644 --- a/test/parallel/test-stream-transform-flush-data.js +++ b/test/parallel/test-stream-transform-flush-data.js @@ -1,6 +1,6 @@ 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); const Transform = require('stream').Transform; @@ -23,6 +23,6 @@ const t = new Transform({ }); t.end(Buffer.from('blerg')); -t.on('data', (data) => { +t.on('data', common.mustCall((data) => { assert.strictEqual(data.toString(), expected); -}); +})); diff --git a/test/parallel/test-stream-unpipe-event.js b/test/parallel/test-stream-unpipe-event.js index 46cc8e8cb0ae9e..3c2e67d29a92ee 100644 --- a/test/parallel/test-stream-unpipe-event.js +++ b/test/parallel/test-stream-unpipe-event.js @@ -22,9 +22,9 @@ class NeverEndReadable extends Readable { dest.on('pipe', common.mustCall()); dest.on('unpipe', common.mustCall()); src.pipe(dest); - setImmediate(() => { + setImmediate(common.mustCall(() => { assert.strictEqual(src._readableState.pipes.length, 0); - }); + })); } { @@ -33,9 +33,9 @@ class NeverEndReadable extends Readable { dest.on('pipe', common.mustCall()); dest.on('unpipe', common.mustNotCall('unpipe should not have been emitted')); src.pipe(dest); - setImmediate(() => { + setImmediate(common.mustCall(() => { assert.strictEqual(src._readableState.pipes.length, 1); - }); + })); } { @@ -45,9 +45,9 @@ class NeverEndReadable extends Readable { dest.on('unpipe', common.mustCall()); src.pipe(dest); src.unpipe(dest); - setImmediate(() => { + setImmediate(common.mustCall(() => { assert.strictEqual(src._readableState.pipes.length, 0); - }); + })); } { @@ -56,9 +56,9 @@ class NeverEndReadable extends Readable { dest.on('pipe', common.mustCall()); dest.on('unpipe', common.mustCall()); src.pipe(dest, { end: false }); - setImmediate(() => { + setImmediate(common.mustCall(() => { assert.strictEqual(src._readableState.pipes.length, 0); - }); + })); } { @@ -67,9 +67,9 @@ class NeverEndReadable extends Readable { dest.on('pipe', common.mustCall()); dest.on('unpipe', common.mustNotCall('unpipe should not have been emitted')); src.pipe(dest, { end: false }); - setImmediate(() => { + setImmediate(common.mustCall(() => { assert.strictEqual(src._readableState.pipes.length, 1); - }); + })); } { @@ -79,7 +79,7 @@ class NeverEndReadable extends Readable { dest.on('unpipe', common.mustCall()); src.pipe(dest, { end: false }); src.unpipe(dest); - setImmediate(() => { + setImmediate(common.mustCall(() => { assert.strictEqual(src._readableState.pipes.length, 0); - }); + })); } diff --git a/test/parallel/test-stream-unshift-empty-chunk.js b/test/parallel/test-stream-unshift-empty-chunk.js index e8136a68e9e6aa..5575d9857454d2 100644 --- a/test/parallel/test-stream-unshift-empty-chunk.js +++ b/test/parallel/test-stream-unshift-empty-chunk.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); // This test verifies that stream.unshift(Buffer.alloc(0)) or @@ -74,7 +74,7 @@ const expect = 'xxxxxxxxxx', 'yyyyy' ]; -r.on('end', () => { +r.on('end', common.mustCall(() => { assert.deepStrictEqual(seen, expect); console.log('ok'); -}); +})); diff --git a/test/parallel/test-stream-unshift-read-race.js b/test/parallel/test-stream-unshift-read-race.js index fe110ea285521e..aeb18282ad72ee 100644 --- a/test/parallel/test-stream-unshift-read-race.js +++ b/test/parallel/test-stream-unshift-read-race.js @@ -43,7 +43,7 @@ for (let i = 0; i < data.length; i++) { let pos = 0; let pushedNull = false; -r._read = function(n) { +r._read = common.mustCallAtLeast(function(n) { assert(!pushedNull, '_read after null push'); // Every third chunk is fast @@ -65,7 +65,7 @@ r._read = function(n) { }, 1); } } -}; +}); function pushError() { r.unshift(Buffer.allocUnsafe(1)); diff --git a/test/parallel/test-stream-wrap.js b/test/parallel/test-stream-wrap.js index 670c05fe3f0307..5d6a60f71d67e8 100644 --- a/test/parallel/test-stream-wrap.js +++ b/test/parallel/test-stream-wrap.js @@ -19,10 +19,10 @@ function testShutdown(callback) { const wrap = new StreamWrap(stream); const req = new ShutdownWrap(); - req.oncomplete = function(code) { + req.oncomplete = common.mustCall(function(code) { assert(code < 0); callback(); - }; + }); req.handle = wrap._handle; // Close the handle to simulate diff --git a/test/parallel/test-stream-writable-change-default-encoding.js b/test/parallel/test-stream-writable-change-default-encoding.js index 94a892567c1b21..dafa16580e8e4f 100644 --- a/test/parallel/test-stream-writable-change-default-encoding.js +++ b/test/parallel/test-stream-writable-change-default-encoding.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); const stream = require('stream'); @@ -38,17 +38,17 @@ class MyWritable extends stream.Writable { } (function defaultCondingIsUtf8() { - const m = new MyWritable(function(isBuffer, type, enc) { + const m = new MyWritable(common.mustCall((isBuffer, type, enc) => { assert.strictEqual(enc, 'utf8'); - }, { decodeStrings: false }); + }), { decodeStrings: false }); m.write('foo'); m.end(); }()); (function changeDefaultEncodingToAscii() { - const m = new MyWritable(function(isBuffer, type, enc) { + const m = new MyWritable(common.mustCall((isBuffer, type, enc) => { assert.strictEqual(enc, 'ascii'); - }, { decodeStrings: false }); + }), { decodeStrings: false }); m.setDefaultEncoding('ascii'); m.write('bar'); m.end(); @@ -69,9 +69,9 @@ assert.throws(() => { }); (function checkVariableCaseEncoding() { - const m = new MyWritable(function(isBuffer, type, enc) { + const m = new MyWritable(common.mustCall((isBuffer, type, enc) => { assert.strictEqual(enc, 'ascii'); - }, { decodeStrings: false }); + }), { decodeStrings: false }); m.setDefaultEncoding('AsCii'); m.write('bar'); m.end(); diff --git a/test/parallel/test-stream-writable-decoded-encoding.js b/test/parallel/test-stream-writable-decoded-encoding.js index e3caa9928fda8c..6f63c5e6872312 100644 --- a/test/parallel/test-stream-writable-decoded-encoding.js +++ b/test/parallel/test-stream-writable-decoded-encoding.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); const stream = require('stream'); @@ -38,21 +38,21 @@ class MyWritable extends stream.Writable { } { - const m = new MyWritable(function(isBuffer, type, enc) { + const m = new MyWritable(common.mustCall((isBuffer, type, enc) => { assert(isBuffer); assert.strictEqual(type, 'object'); assert.strictEqual(enc, 'buffer'); - }, { decodeStrings: true }); + }), { decodeStrings: true }); m.write('some-text', 'utf8'); m.end(); } { - const m = new MyWritable(function(isBuffer, type, enc) { + const m = new MyWritable(common.mustCall((isBuffer, type, enc) => { assert(!isBuffer); assert.strictEqual(type, 'string'); assert.strictEqual(enc, 'utf8'); - }, { decodeStrings: false }); + }), { decodeStrings: false }); m.write('some-text', 'utf8'); m.end(); } @@ -69,11 +69,11 @@ class MyWritable extends stream.Writable { } { - const w = new MyWritable(function(isBuffer, type, enc) { + const w = new MyWritable(common.mustCall((isBuffer, type, enc) => { assert(!isBuffer); assert.strictEqual(type, 'string'); assert.strictEqual(enc, 'hex'); - }, { + }), { defaultEncoding: 'hex', decodeStrings: false }); @@ -82,11 +82,11 @@ class MyWritable extends stream.Writable { } { - const w = new MyWritable(function(isBuffer, type, enc) { + const w = new MyWritable(common.mustCall((isBuffer, type, enc) => { assert(!isBuffer); assert.strictEqual(type, 'string'); assert.strictEqual(enc, 'utf8'); - }, { + }), { defaultEncoding: null, decodeStrings: false }); @@ -95,11 +95,11 @@ class MyWritable extends stream.Writable { } { - const m = new MyWritable(function(isBuffer, type, enc) { + const m = new MyWritable(common.mustCall((isBuffer, type, enc) => { assert.strictEqual(type, 'object'); assert.strictEqual(enc, 'utf8'); - }, { defaultEncoding: 'hex', - objectMode: true }); + }), { defaultEncoding: 'hex', + objectMode: true }); m.write({ foo: 'bar' }, 'utf8'); m.end(); } diff --git a/test/parallel/test-stream-writable-destroy.js b/test/parallel/test-stream-writable-destroy.js index 99981a20345fdc..31e9ac40664fdf 100644 --- a/test/parallel/test-stream-writable-destroy.js +++ b/test/parallel/test-stream-writable-destroy.js @@ -52,10 +52,10 @@ const assert = require('assert'); write(chunk, enc, cb) { cb(); } }); - write._destroy = function(err, cb) { + write._destroy = common.mustCall(function(err, cb) { assert.strictEqual(err, expected); cb(err); - }; + }); const expected = new Error('kaboom'); diff --git a/test/parallel/test-stream-writable-ended-state.js b/test/parallel/test-stream-writable-ended-state.js index 2c40c62a9ee9a5..dbd94f1aa80966 100644 --- a/test/parallel/test-stream-writable-ended-state.js +++ b/test/parallel/test-stream-writable-ended-state.js @@ -7,12 +7,12 @@ const stream = require('stream'); const writable = new stream.Writable(); -writable._write = (chunk, encoding, cb) => { +writable._write = common.mustCall((chunk, encoding, cb) => { assert.strictEqual(writable._writableState.ended, false); assert.strictEqual(writable._writableState.writable, undefined); assert.strictEqual(writable.writableEnded, false); cb(); -}; +}); assert.strictEqual(writable._writableState.ended, false); assert.strictEqual(writable._writableState.writable, undefined); diff --git a/test/parallel/test-stream-writable-finished-state.js b/test/parallel/test-stream-writable-finished-state.js index b42137ed0b5d6b..b58e155696350b 100644 --- a/test/parallel/test-stream-writable-finished-state.js +++ b/test/parallel/test-stream-writable-finished-state.js @@ -7,11 +7,11 @@ const stream = require('stream'); const writable = new stream.Writable(); -writable._write = (chunk, encoding, cb) => { +writable._write = common.mustCall((chunk, encoding, cb) => { // The state finished should start in false. assert.strictEqual(writable._writableState.finished, false); cb(); -}; +}); writable.on('finish', common.mustCall(() => { assert.strictEqual(writable._writableState.finished, true); diff --git a/test/parallel/test-stream-writable-finished.js b/test/parallel/test-stream-writable-finished.js index 933a80a2f94930..9871d9daa85732 100644 --- a/test/parallel/test-stream-writable-finished.js +++ b/test/parallel/test-stream-writable-finished.js @@ -14,11 +14,11 @@ const assert = require('assert'); { const writable = new Writable(); - writable._write = (chunk, encoding, cb) => { + writable._write = common.mustCall((chunk, encoding, cb) => { // The state finished should start in false. assert.strictEqual(writable.writableFinished, false); cb(); - }; + }); writable.on('finish', common.mustCall(() => { assert.strictEqual(writable.writableFinished, true); diff --git a/test/parallel/test-stream-writable-null.js b/test/parallel/test-stream-writable-null.js index 99419f1cf9a066..44c1b130a2af86 100644 --- a/test/parallel/test-stream-writable-null.js +++ b/test/parallel/test-stream-writable-null.js @@ -9,6 +9,7 @@ class MyWritable extends stream.Writable { super({ autoDestroy: false, ...options }); } _write(chunk, encoding, callback) { + // eslint-disable-next-line node-core/must-call-assert assert.notStrictEqual(chunk, null); callback(); } @@ -40,8 +41,6 @@ class MyWritable extends stream.Writable { } { // Should not throw. - const m = new MyWritable({ objectMode: true }).on('error', (e) => { - assert.ifError(e || new Error('should not get here')); - }); + const m = new MyWritable({ objectMode: true }).on('error', common.mustNotCall()); m.write(false, assert.ifError); } diff --git a/test/parallel/test-stream-writev.js b/test/parallel/test-stream-writev.js index 5a42411c6f3a93..b4de798e434b5e 100644 --- a/test/parallel/test-stream-writev.js +++ b/test/parallel/test-stream-writev.js @@ -51,11 +51,10 @@ function test(decode, uncork, multi, next) { function cnt(msg) { expectCount++; const expect = expectCount; - return function(er) { - assert.ifError(er); + return common.mustSucceed(() => { counter++; assert.strictEqual(counter, expect); - }; + }); } const w = new stream.Writable({ decodeStrings: decode }); @@ -112,12 +111,12 @@ function test(decode, uncork, multi, next) { w.end(cnt('end')); - w.on('finish', function() { + w.on('finish', common.mustCall(() => { // Make sure finish comes after all the write cb cnt('finish')(); assert.deepStrictEqual(actualChunks, expectChunks); next(); - }); + })); } { diff --git a/test/parallel/test-stream2-base64-single-char-read-end.js b/test/parallel/test-stream2-base64-single-char-read-end.js index 2e1eb15f9fd010..8643e63bc0f7ef 100644 --- a/test/parallel/test-stream2-base64-single-char-read-end.js +++ b/test/parallel/test-stream2-base64-single-char-read-end.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -require('../common'); +const common = require('../common'); const { Readable: R, Writable: W } = require('stream'); const assert = require('assert'); @@ -44,10 +44,10 @@ dst._write = function(chunk, enc, cb) { cb(); }; -src.on('end', function() { +src.on('end', common.mustCall(() => { assert.strictEqual(String(Buffer.concat(accum)), 'MQ=='); clearTimeout(timeout); -}); +})); src.pipe(dst); diff --git a/test/parallel/test-stream2-basic.js b/test/parallel/test-stream2-basic.js index 2670deda537c51..f51009241da56e 100644 --- a/test/parallel/test-stream2-basic.js +++ b/test/parallel/test-stream2-basic.js @@ -167,7 +167,7 @@ class TestWriter extends EE { const w = [ new TestWriter(), new TestWriter() ]; let writes = SPLIT; - w[0].on('write', function() { + w[0].on('write', common.mustCallAtLeast(() => { if (--writes === 0) { r.unpipe(); assert.deepStrictEqual(r._readableState.pipes, []); @@ -175,7 +175,7 @@ class TestWriter extends EE { r.pipe(w[1]); assert.deepStrictEqual(r._readableState.pipes, [w[1]]); } - }); + })); let ended = 0; @@ -281,14 +281,14 @@ class TestWriter extends EE { r.push(null); const w1 = new R(); - w1.write = function(chunk) { + w1.write = common.mustCall(function(chunk) { assert.strictEqual(chunk[0], 'one'); w1.emit('close'); process.nextTick(function() { r.pipe(w2); r.pipe(w3); }); - }; + }); w1.end = common.mustNotCall(); r.pipe(w1); @@ -296,7 +296,7 @@ class TestWriter extends EE { const expected = ['two', 'two', 'three', 'three', 'four', 'four']; const w2 = new R(); - w2.write = function(chunk) { + w2.write = common.mustCallAtLeast(function(chunk) { assert.strictEqual(chunk[0], expected.shift()); assert.strictEqual(counter, 0); @@ -312,11 +312,11 @@ class TestWriter extends EE { }, 10); return false; - }; + }); w2.end = common.mustCall(); const w3 = new R(); - w3.write = function(chunk) { + w3.write = common.mustCallAtLeast(function(chunk) { assert.strictEqual(chunk[0], expected.shift()); assert.strictEqual(counter, 1); @@ -332,7 +332,7 @@ class TestWriter extends EE { }, 50); return false; - }; + }); w3.end = common.mustCall(function() { assert.strictEqual(counter, 2); assert.strictEqual(expected.length, 0); @@ -354,11 +354,11 @@ class TestWriter extends EE { assert.strictEqual(v, null); const w = new R(); - w.write = function(buffer) { + w.write = common.mustCall(function(buffer) { written = true; assert.strictEqual(ended, false); assert.strictEqual(buffer.toString(), 'foo'); - }; + }); w.end = common.mustCall(function() { ended = true; diff --git a/test/parallel/test-stream2-compatibility.js b/test/parallel/test-stream2-compatibility.js index d760db8b32271c..b58cd54f80ff8e 100644 --- a/test/parallel/test-stream2-compatibility.js +++ b/test/parallel/test-stream2-compatibility.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -require('../common'); +const common = require('../common'); const { Readable: R, Writable: W } = require('stream'); const assert = require('assert'); @@ -43,11 +43,11 @@ class TestReader extends R { } const reader = new TestReader(); -setImmediate(function() { +setImmediate(common.mustCall(() => { assert.strictEqual(ondataCalled, 1); console.log('ok'); reader.push(null); -}); +})); class TestWriter extends W { constructor() { diff --git a/test/parallel/test-stream2-httpclient-response-end.js b/test/parallel/test-stream2-httpclient-response-end.js index 73667eb3dd2e92..1a9df08a3078f3 100644 --- a/test/parallel/test-stream2-httpclient-response-end.js +++ b/test/parallel/test-stream2-httpclient-response-end.js @@ -6,8 +6,8 @@ const msg = 'Hello'; const server = http.createServer(function(req, res) { res.writeHead(200, { 'Content-Type': 'text/plain' }); res.end(msg); -}).listen(0, function() { - http.get({ port: this.address().port }, function(res) { +}).listen(0, common.mustCall(function() { + http.get({ port: this.address().port }, common.mustCall((res) => { let data = ''; res.on('readable', common.mustCall(function() { console.log('readable event'); @@ -21,5 +21,5 @@ const server = http.createServer(function(req, res) { assert.strictEqual(msg, data); server.close(); })); - }); -}); + })); +})); diff --git a/test/parallel/test-stream2-objects.js b/test/parallel/test-stream2-objects.js index b7ad074628133d..59b28e4a931c4e 100644 --- a/test/parallel/test-stream2-objects.js +++ b/test/parallel/test-stream2-objects.js @@ -216,10 +216,10 @@ function fromArray(list) { // Verify that objects can be written to stream const w = new Writable({ objectMode: true }); - w._write = function(chunk, encoding, cb) { + w._write = common.mustCall(function(chunk, encoding, cb) { assert.deepStrictEqual(chunk, { foo: 'bar' }); cb(); - }; + }); w.on('finish', common.mustCall()); w.write({ foo: 'bar' }); @@ -279,14 +279,14 @@ function fromArray(list) { }); let called = false; - w._write = function(chunk, encoding, cb) { + w._write = common.mustCall(function(chunk, encoding, cb) { assert.strictEqual(chunk, 'foo'); process.nextTick(function() { called = true; cb(); }); - }; + }); w.on('finish', common.mustCall(function() { assert.strictEqual(called, true); diff --git a/test/parallel/test-stream2-push.js b/test/parallel/test-stream2-push.js index 748a77b9c496ba..c3e792ddc5d8cc 100644 --- a/test/parallel/test-stream2-push.js +++ b/test/parallel/test-stream2-push.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); const { Readable, Writable } = require('stream'); @@ -130,7 +130,7 @@ function end() { source.emit('end'); assert(!reading); writer.end(stream.read()); - setImmediate(function() { + setImmediate(common.mustCall(() => { assert(ended); - }); + })); } diff --git a/test/parallel/test-stream2-readable-wrap.js b/test/parallel/test-stream2-readable-wrap.js index eebe72bc0dd8ad..7efb015ea45413 100644 --- a/test/parallel/test-stream2-readable-wrap.js +++ b/test/parallel/test-stream2-readable-wrap.js @@ -45,13 +45,13 @@ function runTest(highWaterMark, objectMode, produce) { // Make sure pause is only emitted once. let pausing = false; - r.on('pause', () => { + r.on('pause', common.mustCallAtLeast(() => { assert.strictEqual(pausing, false); pausing = true; process.nextTick(() => { pausing = false; }); - }); + })); let flowing; let chunks = 10; diff --git a/test/parallel/test-stream2-writable.js b/test/parallel/test-stream2-writable.js index 6d233ae6b68107..215399c1d39f6a 100644 --- a/test/parallel/test-stream2-writable.js +++ b/test/parallel/test-stream2-writable.js @@ -140,10 +140,7 @@ for (let i = 0; i < chunks.length; i++) { 'utf-16le', undefined ]; - tw.on('finish', function() { - // Got the expected chunks - assert.deepStrictEqual(tw.buffer, chunks); - }); + tw.on('finish', common.mustNotCall()); chunks.forEach(function(chunk, i) { const enc = encodings[i % encodings.length]; @@ -159,11 +156,11 @@ for (let i = 0; i < chunks.length; i++) { decodeStrings: false }); - tw._write = function(chunk, encoding, cb) { + tw._write = common.mustCallAtLeast(function(chunk, encoding, cb) { assert.strictEqual(typeof chunk, 'string'); chunk = Buffer.from(chunk, encoding); return TestWriter.prototype._write.call(this, chunk, encoding, cb); - }; + }); const encodings = [ 'hex', @@ -179,10 +176,7 @@ for (let i = 0; i < chunks.length; i++) { 'utf-16le', undefined ]; - tw.on('finish', function() { - // Got the expected chunks - assert.deepStrictEqual(tw.buffer, chunks); - }); + tw.on('finish', common.mustNotCall()); chunks.forEach(function(chunk, i) { const enc = encodings[i % encodings.length]; @@ -194,10 +188,9 @@ for (let i = 0; i < chunks.length; i++) { { // Verify write callbacks const callbacks = chunks.map(function(chunk, i) { - return [i, function(err) { - assert.strictEqual(err, null); + return [i, common.mustSucceed(() => { callbacks._called[i] = chunk; - }]; + })]; }).reduce(function(set, x) { set[`callback-${x[0]}`] = x[1]; return set; @@ -309,10 +302,10 @@ const helloWorldBuffer = Buffer.from('hello world'); assert.strictEqual(msg.toString(), 'this is the end'); }); let gotError = false; - w.on('error', function(er) { + w.on('error', common.mustCall((er) => { gotError = true; assert.strictEqual(er.message, 'write after end'); - }); + })); w.end('this is the end'); w.end('and so is this'); process.nextTick(common.mustCall(function() { @@ -324,7 +317,7 @@ const helloWorldBuffer = Buffer.from('hello world'); // Verify stream doesn't end while writing const w = new W(); let wrote = false; - w._write = function(chunk, e, cb) { + w._write = common.mustCall(function(chunk, e, cb) { assert.strictEqual(this.writing, undefined); wrote = true; this.writing = true; @@ -332,7 +325,7 @@ const helloWorldBuffer = Buffer.from('hello world'); this.writing = false; cb(); }, 1); - }; + }); w.on('finish', common.mustCall(function() { assert.strictEqual(wrote, true); assert.strictEqual(this.writing, false); diff --git a/test/parallel/test-stream3-cork-end.js b/test/parallel/test-stream3-cork-end.js index 0cbc033a2eadc4..e19d05b2840770 100644 --- a/test/parallel/test-stream3-cork-end.js +++ b/test/parallel/test-stream3-cork-end.js @@ -1,5 +1,5 @@ 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); const stream = require('stream'); const Writable = stream.Writable; @@ -18,7 +18,7 @@ let seenEnd = false; const w = new Writable(); // Let's arrange to store the chunks. -w._write = function(chunk, encoding, cb) { +w._write = common.mustCallAtLeast(function(chunk, encoding, cb) { // Stream end event is not seen before the last write. assert.ok(!seenEnd); // Default encoding given none was specified. @@ -26,7 +26,7 @@ w._write = function(chunk, encoding, cb) { seenChunks.push(chunk); cb(); -}; +}); // Let's record the stream end event. w.on('finish', () => { seenEnd = true; @@ -37,13 +37,13 @@ function writeChunks(remainingChunks, callback) { let writeState; if (writeChunk) { - setImmediate(() => { + setImmediate(common.mustCall(() => { writeState = w.write(writeChunk); // We were not told to stop writing. assert.ok(writeState); writeChunks(remainingChunks, callback); - }); + })); } else { callback(); } @@ -60,7 +60,7 @@ seenChunks = []; w.cork(); // Write the bufferedChunks. -writeChunks(inputChunks, () => { +writeChunks(inputChunks, common.mustCall(() => { // Should not have seen anything yet. assert.strictEqual(seenChunks.length, 0); @@ -84,8 +84,8 @@ writeChunks(inputChunks, () => { assert.ok(seen.equals(expected)); } - setImmediate(() => { + setImmediate(common.mustCall(() => { // Stream should have ended in next tick. assert.ok(seenEnd); - }); -}); + })); +})); diff --git a/test/parallel/test-stream3-cork-uncork.js b/test/parallel/test-stream3-cork-uncork.js index dfb901af419803..1328cbdaf0070a 100644 --- a/test/parallel/test-stream3-cork-uncork.js +++ b/test/parallel/test-stream3-cork-uncork.js @@ -1,5 +1,5 @@ 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); const stream = require('stream'); const Writable = stream.Writable; @@ -18,13 +18,13 @@ let seenEnd = false; const w = new Writable(); // Let's arrange to store the chunks. -w._write = function(chunk, encoding, cb) { +w._write = common.mustCallAtLeast(function(chunk, encoding, cb) { // Default encoding given none was specified. assert.strictEqual(encoding, 'buffer'); seenChunks.push(chunk); cb(); -}; +}); // Let's record the stream end event. w.on('finish', () => { seenEnd = true; @@ -35,13 +35,13 @@ function writeChunks(remainingChunks, callback) { let writeState; if (writeChunk) { - setImmediate(() => { + setImmediate(common.mustCall(() => { writeState = w.write(writeChunk); // We were not told to stop writing. assert.ok(writeState); writeChunks(remainingChunks, callback); - }); + })); } else { callback(); } @@ -58,7 +58,7 @@ seenChunks = []; w.cork(); // Write the bufferedChunks. -writeChunks(inputChunks, () => { +writeChunks(inputChunks, common.mustCall(() => { // Should not have seen anything yet. assert.strictEqual(seenChunks.length, 0); @@ -79,8 +79,8 @@ writeChunks(inputChunks, () => { assert.ok(seen.equals(expected)); } - setImmediate(() => { + setImmediate(common.mustCall(() => { // The stream should not have been ended. assert.ok(!seenEnd); - }); -}); + })); +})); diff --git a/test/parallel/test-stream3-pause-then-read.js b/test/parallel/test-stream3-pause-then-read.js index 1a3854722052d7..b692c92762b0d8 100644 --- a/test/parallel/test-stream3-pause-then-read.js +++ b/test/parallel/test-stream3-pause-then-read.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); const stream = require('stream'); @@ -108,10 +108,10 @@ function pipeLittle() { console.error('pipe a little'); const w = new Writable(); let written = 0; - w.on('finish', () => { + w.on('finish', common.mustCall(() => { assert.strictEqual(written, 200); setImmediate(read1234); - }); + })); w._write = function(chunk, encoding, cb) { written += chunk.length; if (written >= 200) { @@ -160,11 +160,11 @@ function pipe() { written += chunk.length; cb(); }; - w.on('finish', () => { + w.on('finish', common.mustCall(() => { console.error('written', written, totalPushed); assert.strictEqual(written, expectEndingData); assert.strictEqual(totalPushed, expectTotalData); console.log('ok'); - }); + })); r.pipe(w); } diff --git a/test/parallel/test-stream3-pipeline-async-iterator.js b/test/parallel/test-stream3-pipeline-async-iterator.js index ad1e4647777bcd..fb505646c59f78 100644 --- a/test/parallel/test-stream3-pipeline-async-iterator.js +++ b/test/parallel/test-stream3-pipeline-async-iterator.js @@ -21,6 +21,7 @@ const { pipeline } = require('node:stream/promises'); myCustomWritable, ); // Importing here to avoid initializing streams + // eslint-disable-next-line node-core/must-call-assert require('assert').deepStrictEqual(messages, ['Hello', 'World']); })() .then(require('../common').mustCall()); From 0ff565a8d05c72d51f273ebf31a52de312c4ac09 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Tue, 18 Nov 2025 00:17:14 +0100 Subject: [PATCH 026/115] test: ensure assertions are reached on more tests PR-URL: https://github.com/nodejs/node/pull/60763 Reviewed-By: Colin Ihrig Reviewed-By: Benjamin Gruenbaum --- test/eslint.config_partial.mjs | 2 +- .../test-readable-from-iterator-closing.js | 24 ++++------ ...st-readable-from-web-enqueue-then-close.js | 6 +-- test/parallel/test-readable-from.js | 46 +++++++++---------- test/parallel/test-readline-interface.js | 16 +++---- test/parallel/test-readline-keys.js | 18 ++++---- .../test-readline-promises-interface.js | 16 +++---- .../test-readline-promises-tab-complete.js | 8 ++-- test/parallel/test-readline-set-raw-mode.js | 6 +-- test/parallel/test-readline-tab-complete.js | 12 ++--- test/parallel/test-release-changelog.js | 4 +- test/parallel/test-repl-autocomplete.js | 12 ++--- test/parallel/test-repl-autolibs.js | 8 ++-- test/parallel/test-repl-colors.js | 12 ++--- test/parallel/test-repl-dynamic-import.js | 4 +- test/parallel/test-repl-envvars.js | 8 ++-- test/parallel/test-repl-harmony.js | 6 +-- test/parallel/test-repl-history-navigation.js | 12 ++--- test/parallel/test-repl-null-thrown.js | 6 +-- test/parallel/test-repl-permission-model.js | 12 ++--- test/parallel/test-repl-persistent-history.js | 20 ++++---- ...repl-programmatic-history-setup-history.js | 12 ++--- .../test-repl-programmatic-history.js | 20 ++++---- test/parallel/test-repl-reverse-search.js | 12 ++--- test/parallel/test-repl-setprompt.js | 6 +-- test/parallel/test-repl-sigint-nested-eval.js | 4 +- test/parallel/test-repl-sigint.js | 4 +- .../test-repl-syntax-error-handling.js | 10 ++-- .../parallel/test-repl-tab-complete-buffer.js | 4 +- .../parallel/test-repl-tab-complete-import.js | 4 +- .../test-repl-tab-complete-require.js | 14 +++--- test/parallel/test-repl-tab.js | 5 +- .../test-repl-uncaught-exception-async.js | 6 +-- test/parallel/test-repl-underscore.js | 6 +-- .../test-repl-unexpected-token-recoverable.js | 6 +-- test/parallel/test-require-symlink.js | 8 ++-- test/parallel/test-runner-aliases.js | 8 ++-- test/parallel/test-runner-coverage.js | 8 ++-- .../parallel/test-runner-custom-assertions.js | 6 +-- .../parallel/test-runner-filetest-location.js | 12 ++--- .../test-runner-force-exit-failure.js | 12 ++--- test/parallel/test-runner-force-exit-flush.js | 32 ++++++------- test/parallel/test-runner-misc.js | 4 +- test/parallel/test-runner-mocking.js | 16 +++---- ...test-runner-no-isolation-different-cwd.mjs | 4 +- test/parallel/test-runner-no-isolation.mjs | 4 +- test/parallel/test-runner-root-duration.js | 14 +++--- .../parallel/test-runner-run-global-hooks.mjs | 14 +++--- test/parallel/test-runner-string-to-regexp.js | 12 ++--- test/parallel/test-runner-test-filepath.js | 18 ++++---- test/parallel/test-runner-test-fullname.js | 24 +++++----- test/parallel/test-runner-todo-skip-tests.js | 6 +-- test/parallel/test-runner-v8-deserializer.mjs | 10 ++-- 53 files changed, 283 insertions(+), 300 deletions(-) diff --git a/test/eslint.config_partial.mjs b/test/eslint.config_partial.mjs index 52f17349b500b7..bfe1f3d0864bd5 100644 --- a/test/eslint.config_partial.mjs +++ b/test/eslint.config_partial.mjs @@ -202,7 +202,7 @@ export default [ `test/parallel/test-{${ // 0x61 is code for 'a', this generates a string enumerating latin letters: 'a*,b*,…' Array.from({ length: 13 }, (_, i) => String.fromCharCode(0x61 + i, 42)).join(',') - },n*,${ + },n*,r*,${ // 0x61 is code for 'a', this generates a string enumerating latin letters: 'z*,y*,…' Array.from({ length: 8 }, (_, i) => String.fromCharCode(0x61 + 25 - i, 42)).join(',') }}.{js,mjs,cjs}`, diff --git a/test/parallel/test-readable-from-iterator-closing.js b/test/parallel/test-readable-from-iterator-closing.js index 02252ffe56854c..ff2746dc4601ea 100644 --- a/test/parallel/test-readable-from-iterator-closing.js +++ b/test/parallel/test-readable-from-iterator-closing.js @@ -2,7 +2,7 @@ const { mustCall, mustNotCall } = require('../common'); const { Readable } = require('stream'); -const { strictEqual } = require('assert'); +const assert = require('assert'); async function asyncSupport() { const finallyMustCall = mustCall(); @@ -20,7 +20,7 @@ async function asyncSupport() { for await (const chunk of stream) { bodyMustCall(); - strictEqual(chunk, 'a'); + assert.strictEqual(chunk, 'a'); break; } } @@ -41,7 +41,7 @@ async function syncSupport() { for await (const chunk of stream) { bodyMustCall(); - strictEqual(chunk, 'a'); + assert.strictEqual(chunk, 'a'); break; } } @@ -66,7 +66,7 @@ async function syncPromiseSupport() { for await (const chunk of stream) { bodyMustCall(); - strictEqual(chunk, 'a'); + assert.strictEqual(chunk, 'a'); break; } } @@ -130,7 +130,6 @@ async function noReturnAfterThrow() { async function closeStreamWhileNextIsPending() { const finallyMustCall = mustCall(); - const dataMustCall = mustCall(); let resolveDestroy; const destroyed = @@ -153,10 +152,9 @@ async function closeStreamWhileNextIsPending() { const stream = Readable.from(infiniteGenerate()); - stream.on('data', (data) => { - dataMustCall(); - strictEqual(data, 'a'); - }); + stream.on('data', mustCall((data) => { + assert.strictEqual(data, 'a'); + })); yielded.then(() => { stream.destroy(); @@ -166,7 +164,6 @@ async function closeStreamWhileNextIsPending() { async function closeAfterNullYielded() { const finallyMustCall = mustCall(); - const dataMustCall = mustCall(3); function* generate() { try { @@ -180,10 +177,9 @@ async function closeAfterNullYielded() { const stream = Readable.from(generate()); - stream.on('data', (chunk) => { - dataMustCall(); - strictEqual(chunk, 'a'); - }); + stream.on('data', mustCall((chunk) => { + assert.strictEqual(chunk, 'a'); + }, 3)); } Promise.all([ diff --git a/test/parallel/test-readable-from-web-enqueue-then-close.js b/test/parallel/test-readable-from-web-enqueue-then-close.js index e96df70c9eb9d1..67b861596741b2 100644 --- a/test/parallel/test-readable-from-web-enqueue-then-close.js +++ b/test/parallel/test-readable-from-web-enqueue-then-close.js @@ -1,7 +1,7 @@ 'use strict'; const { mustCall } = require('../common'); const { Readable, Duplex } = require('stream'); -const { strictEqual } = require('assert'); +const assert = require('assert'); function start(controller) { controller.enqueue(new Uint8Array(1)); @@ -10,7 +10,7 @@ function start(controller) { Readable.fromWeb(new ReadableStream({ start })) .on('data', mustCall((d) => { - strictEqual(d.length, 1); + assert.strictEqual(d.length, 1); })) .on('end', mustCall()) .resume(); @@ -20,7 +20,7 @@ Duplex.fromWeb({ writable: new WritableStream({ write(chunk) {} }) }) .on('data', mustCall((d) => { - strictEqual(d.length, 1); + assert.strictEqual(d.length, 1); })) .on('end', mustCall()) .resume(); diff --git a/test/parallel/test-readable-from.js b/test/parallel/test-readable-from.js index b844574dc9e347..1d812ade3f23e6 100644 --- a/test/parallel/test-readable-from.js +++ b/test/parallel/test-readable-from.js @@ -3,11 +3,11 @@ const { mustCall } = require('../common'); const { once } = require('events'); const { Readable } = require('stream'); -const { strictEqual, throws } = require('assert'); +const assert = require('assert'); const common = require('../common'); { - throws(() => { + assert.throws(() => { Readable.from(null); }, /ERR_INVALID_ARG_TYPE/); } @@ -24,7 +24,7 @@ async function toReadableBasicSupport() { const expected = ['a', 'b', 'c']; for await (const chunk of stream) { - strictEqual(chunk, expected.shift()); + assert.strictEqual(chunk, expected.shift()); } } @@ -40,7 +40,7 @@ async function toReadableSyncIterator() { const expected = ['a', 'b', 'c']; for await (const chunk of stream) { - strictEqual(chunk, expected.shift()); + assert.strictEqual(chunk, expected.shift()); } } @@ -56,7 +56,7 @@ async function toReadablePromises() { const expected = ['a', 'b', 'c']; for await (const chunk of stream) { - strictEqual(chunk, expected.shift()); + assert.strictEqual(chunk, expected.shift()); } } @@ -66,7 +66,7 @@ async function toReadableString() { const expected = ['abc']; for await (const chunk of stream) { - strictEqual(chunk, expected.shift()); + assert.strictEqual(chunk, expected.shift()); } } @@ -76,7 +76,7 @@ async function toReadableBuffer() { const expected = ['abc']; for await (const chunk of stream) { - strictEqual(chunk.toString(), expected.shift()); + assert.strictEqual(chunk.toString(), expected.shift()); } } @@ -92,14 +92,14 @@ async function toReadableOnData() { let iterations = 0; const expected = ['a', 'b', 'c']; - stream.on('data', (chunk) => { + stream.on('data', common.mustCallAtLeast((chunk) => { iterations++; - strictEqual(chunk, expected.shift()); - }); + assert.strictEqual(chunk, expected.shift()); + })); await once(stream, 'end'); - strictEqual(iterations, 3); + assert.strictEqual(iterations, 3); } async function toReadableOnDataNonObject() { @@ -114,15 +114,15 @@ async function toReadableOnDataNonObject() { let iterations = 0; const expected = ['a', 'b', 'c']; - stream.on('data', (chunk) => { + stream.on('data', common.mustCallAtLeast((chunk) => { iterations++; - strictEqual(chunk instanceof Buffer, true); - strictEqual(chunk.toString(), expected.shift()); - }); + assert.strictEqual(chunk instanceof Buffer, true); + assert.strictEqual(chunk.toString(), expected.shift()); + })); await once(stream, 'end'); - strictEqual(iterations, 3); + assert.strictEqual(iterations, 3); } async function destroysTheStreamWhenThrowing() { @@ -135,8 +135,8 @@ async function destroysTheStreamWhenThrowing() { stream.read(); const [err] = await once(stream, 'error'); - strictEqual(err.message, 'kaboom'); - strictEqual(stream.destroyed, true); + assert.strictEqual(err.message, 'kaboom'); + assert.strictEqual(stream.destroyed, true); } @@ -162,7 +162,7 @@ async function asTransformStream() { const expected = ['A', 'B', 'C']; for await (const chunk of stream) { - strictEqual(chunk, expected.shift()); + assert.strictEqual(chunk, expected.shift()); } } @@ -179,18 +179,18 @@ async function endWithError() { try { for await (const chunk of stream) { - strictEqual(chunk, expected.shift()); + assert.strictEqual(chunk, expected.shift()); } throw new Error(); } catch (err) { - strictEqual(expected.length, 0); - strictEqual(err, 'Boum'); + assert.strictEqual(expected.length, 0); + assert.strictEqual(err, 'Boum'); } } async function destroyingStreamWithErrorThrowsInGenerator() { const validateError = common.mustCall((e) => { - strictEqual(e, 'Boum'); + assert.strictEqual(e, 'Boum'); }); async function* generate() { try { diff --git a/test/parallel/test-readline-interface.js b/test/parallel/test-readline-interface.js index 5a7b87cd75a804..1adee53bfe4f04 100644 --- a/test/parallel/test-readline-interface.js +++ b/test/parallel/test-readline-interface.js @@ -284,10 +284,10 @@ function assertCursorRowsAndCols(rli, rows, cols) { const expectedLines = ['foo', 'bar', 'baz', 'bar', 'bat', 'bat']; // ['foo', 'baz', 'bar', bat']; let callCount = 0; - rli.on('line', (line) => { + rli.on('line', common.mustCallAtLeast((line) => { assert.strictEqual(line, expectedLines[callCount]); callCount++; - }); + })); fi.emit('data', `${expectedLines.join('\n')}\n`); assert.strictEqual(callCount, expectedLines.length); fi.emit('keypress', '.', { name: 'up' }); // 'bat' @@ -360,10 +360,10 @@ function assertCursorRowsAndCols(rli, rows, cols) { }); const expectedLines = ['foo', 'bar', 'baz', 'bar', 'bat', 'bat']; let callCount = 0; - rli.on('line', (line) => { + rli.on('line', common.mustCallAtLeast((line) => { assert.strictEqual(line, expectedLines[callCount]); callCount++; - }); + })); fi.emit('data', `${expectedLines.join('\n')}\n`); assert.strictEqual(callCount, expectedLines.length); fi.emit('keypress', '.', { name: 'up' }); // 'bat' @@ -968,10 +968,10 @@ for (let i = 0; i < 12; i++) { { const [rli, fi] = getInterface({ terminal }); let called = false; - rli.on('line', (line) => { + rli.on('line', common.mustCallAtLeast((line) => { called = true; assert.strictEqual(line, 'a'); - }); + })); fi.emit('data', 'a'); assert.ok(!called); fi.emit('data', '\n'); @@ -1020,10 +1020,10 @@ for (let i = 0; i < 12; i++) { const buf = Buffer.from('☮', 'utf8'); const [rli, fi] = getInterface({ terminal }); let callCount = 0; - rli.on('line', (line) => { + rli.on('line', common.mustCallAtLeast((line) => { callCount++; assert.strictEqual(line, buf.toString('utf8')); - }); + })); for (const i of buf) { fi.emit('data', Buffer.from([i])); } diff --git a/test/parallel/test-readline-keys.js b/test/parallel/test-readline-keys.js index 28b5846d4eb58f..4379193b82f1ed 100644 --- a/test/parallel/test-readline-keys.js +++ b/test/parallel/test-readline-keys.js @@ -49,9 +49,9 @@ function addTest(sequences, expectedKeys) { // (addKeyIntervalTest(..)(noop)))() // where noop is a terminal function(() => {}). -const addKeyIntervalTest = (sequences, expectedKeys, interval = 550, - assertDelay = 550) => { - const fn = common.mustCall((next) => () => { +function addKeyIntervalTest(sequences, expectedKeys, interval = 550, + assertDelay = 550) { + const fn = common.mustCall((next) => common.mustCall(() => { if (!Array.isArray(sequences)) { sequences = [ sequences ]; @@ -66,21 +66,21 @@ const addKeyIntervalTest = (sequences, expectedKeys, interval = 550, const keys = []; fi.on('keypress', (s, k) => keys.push(k)); - const emitKeys = ([head, ...tail]) => { + const emitKeys = common.mustCallAtLeast(([head, ...tail]) => { if (head) { fi.write(head); setTimeout(() => emitKeys(tail), interval); } else { - setTimeout(() => { + setTimeout(common.mustCall(() => { next(); assert.deepStrictEqual(keys, expectedKeys); - }, assertDelay); + }), assertDelay); } - }; + }); emitKeys(sequences); - }); + })); return fn; -}; +} // Regular alphanumerics addTest('io.JS', [ diff --git a/test/parallel/test-readline-promises-interface.js b/test/parallel/test-readline-promises-interface.js index 12d72f49735401..19c445572b9806 100644 --- a/test/parallel/test-readline-promises-interface.js +++ b/test/parallel/test-readline-promises-interface.js @@ -261,10 +261,10 @@ function assertCursorRowsAndCols(rli, rows, cols) { const expectedLines = ['foo', 'bar', 'baz', 'bar', 'bat', 'bat']; // ['foo', 'baz', 'bar', bat']; let callCount = 0; - rli.on('line', function(line) { + rli.on('line', common.mustCallAtLeast((line) => { assert.strictEqual(line, expectedLines[callCount]); callCount++; - }); + })); fi.emit('data', `${expectedLines.join('\n')}\n`); assert.strictEqual(callCount, expectedLines.length); fi.emit('keypress', '.', { name: 'up' }); // 'bat' @@ -337,10 +337,10 @@ function assertCursorRowsAndCols(rli, rows, cols) { }); const expectedLines = ['foo', 'bar', 'baz', 'bar', 'bat', 'bat']; let callCount = 0; - rli.on('line', function(line) { + rli.on('line', common.mustCallAtLeast((line) => { assert.strictEqual(line, expectedLines[callCount]); callCount++; - }); + })); fi.emit('data', `${expectedLines.join('\n')}\n`); assert.strictEqual(callCount, expectedLines.length); fi.emit('keypress', '.', { name: 'up' }); // 'bat' @@ -840,10 +840,10 @@ for (let i = 0; i < 12; i++) { { const [rli, fi] = getInterface({ terminal }); let called = false; - rli.on('line', (line) => { + rli.on('line', common.mustCallAtLeast((line) => { called = true; assert.strictEqual(line, 'a'); - }); + })); fi.emit('data', 'a'); assert.ok(!called); fi.emit('data', '\n'); @@ -892,10 +892,10 @@ for (let i = 0; i < 12; i++) { const buf = Buffer.from('☮', 'utf8'); const [rli, fi] = getInterface({ terminal }); let callCount = 0; - rli.on('line', function(line) { + rli.on('line', common.mustCallAtLeast((line) => { callCount++; assert.strictEqual(line, buf.toString('utf8')); - }); + })); for (const i of buf) { fi.emit('data', Buffer.from([i])); } diff --git a/test/parallel/test-readline-promises-tab-complete.js b/test/parallel/test-readline-promises-tab-complete.js index 602bdd9e7965bf..13c2c905a9796d 100644 --- a/test/parallel/test-readline-promises-tab-complete.js +++ b/test/parallel/test-readline-promises-tab-complete.js @@ -75,10 +75,10 @@ if (process.env.TERM === 'dumb') { rli.on('line', common.mustNotCall()); for (const character of `${char}\t\t`) { fi.emit('data', character); - queueMicrotask(() => { + queueMicrotask(common.mustCall(() => { assert.strictEqual(output, expectations.shift()); output = ''; - }); + })); } fi.end(); }); @@ -110,9 +110,9 @@ if (process.env.TERM === 'dumb') { rli.on('line', common.mustNotCall()); fi.emit('data', '\t'); - queueMicrotask(() => { + queueMicrotask(common.mustCall(() => { assert.match(output, /^Tab completion error: Error: message/); output = ''; - }); + })); fi.end(); } diff --git a/test/parallel/test-readline-set-raw-mode.js b/test/parallel/test-readline-set-raw-mode.js index de47d14b03de8a..57015077781fd0 100644 --- a/test/parallel/test-readline-set-raw-mode.js +++ b/test/parallel/test-readline-set-raw-mode.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); const readline = require('readline'); const Stream = require('stream'); @@ -31,10 +31,10 @@ let rawModeCalled = false; let resumeCalled = false; let pauseCalled = false; -stream.setRawMode = function(mode) { +stream.setRawMode = common.mustCallAtLeast(function(mode) { rawModeCalled = true; assert.strictEqual(mode, expectedRawMode); -}; +}); stream.resume = function() { resumeCalled = true; }; diff --git a/test/parallel/test-readline-tab-complete.js b/test/parallel/test-readline-tab-complete.js index 5b7b19102f412a..83b5da349d0d06 100644 --- a/test/parallel/test-readline-tab-complete.js +++ b/test/parallel/test-readline-tab-complete.js @@ -96,10 +96,10 @@ if (process.env.TERM === 'dumb') { rli.on('line', common.mustNotCall()); fi.emit('data', '\t'); - queueMicrotask(() => { + queueMicrotask(common.mustCall(() => { assert.match(output, /^Tab completion error: Error: message/); output = ''; - }); + })); rli.close(); } @@ -129,12 +129,12 @@ if (process.env.TERM === 'dumb') { rli.on('line', common.mustNotCall()); fi.emit('data', 'input'); - queueMicrotask(() => { + queueMicrotask(common.mustCall(() => { fi.emit('data', '\t'); - queueMicrotask(() => { + queueMicrotask(common.mustCall(() => { assert.match(output, /> Input/); output = ''; rli.close(); - }); - }); + })); + })); } diff --git a/test/parallel/test-release-changelog.js b/test/parallel/test-release-changelog.js index c78efeeff0cf68..6b8e6ece43af52 100644 --- a/test/parallel/test-release-changelog.js +++ b/test/parallel/test-release-changelog.js @@ -7,12 +7,12 @@ const assert = require('assert'); const fs = require('fs'); const path = require('path'); -const getDefine = (text, name) => { +const getDefine = common.mustCallAtLeast((text, name) => { const regexp = new RegExp(`#define\\s+${RegExp.escape(name)}\\s+(.*)`); const match = regexp.exec(text); assert.notStrictEqual(match, null); return match[1]; -}; +}); const srcRoot = path.join(__dirname, '..', '..'); const mainChangelogFile = path.join(srcRoot, 'CHANGELOG.md'); diff --git a/test/parallel/test-repl-autocomplete.js b/test/parallel/test-repl-autocomplete.js index a68322c501e264..de801a8e12f095 100644 --- a/test/parallel/test-repl-autocomplete.js +++ b/test/parallel/test-repl-autocomplete.js @@ -152,7 +152,7 @@ function runTest() { REPL.createInternalRepl(opts.env, { input: new ActionStream(), output: new stream.Writable({ - write(chunk, _, next) { + write: common.mustCallAtLeast((chunk, _, next) => { const output = chunk.toString(); if (!opts.showEscapeCodes && @@ -177,7 +177,7 @@ function runTest() { } next(); - } + }), }), allowBlockingCompletions: true, completer: opts.completer, @@ -185,13 +185,13 @@ function runTest() { useColors: false, preview: opts.preview, terminal: true - }, function(err, repl) { + }, common.mustCall((err, repl) => { if (err) { console.error(`Failed test # ${numtests - tests.length}`); throw err; } - repl.once('close', () => { + repl.once('close', common.mustCall(() => { if (opts.clean) cleanupTmpFile(); @@ -203,7 +203,7 @@ function runTest() { } setImmediate(runTestWrap, true); - }); + })); if (opts.columns) { Object.defineProperty(repl, 'columns', { @@ -212,7 +212,7 @@ function runTest() { }); } repl.input.run(opts.test); - }); + })); } // run the tests diff --git a/test/parallel/test-repl-autolibs.js b/test/parallel/test-repl-autolibs.js index 5a34e1aecb1c8c..d3ab60a1b1d0c7 100644 --- a/test/parallel/test-repl-autolibs.js +++ b/test/parallel/test-repl-autolibs.js @@ -33,7 +33,7 @@ test1(); function test1() { let gotWrite = false; - putIn.write = function(data) { + putIn.write = common.mustCall(function(data) { gotWrite = true; if (data.length) { @@ -44,7 +44,7 @@ function test1() { assert.strictEqual(globalThis.fs, require('fs')); test2(); } - }; + }); assert(!gotWrite); putIn.run(['fs']); assert(gotWrite); @@ -52,7 +52,7 @@ function test1() { function test2() { let gotWrite = false; - putIn.write = function(data) { + putIn.write = common.mustCallAtLeast(function(data) { gotWrite = true; if (data.length) { // REPL response error message @@ -60,7 +60,7 @@ function test2() { // Original value wasn't overwritten assert.strictEqual(val, globalThis.url); } - }; + }); const val = {}; globalThis.url = val; common.allowGlobals(val); diff --git a/test/parallel/test-repl-colors.js b/test/parallel/test-repl-colors.js index cdbca5790ad5aa..226585b377b107 100644 --- a/test/parallel/test-repl-colors.js +++ b/test/parallel/test-repl-colors.js @@ -2,7 +2,7 @@ require('../common'); const { Duplex } = require('stream'); const { inspect } = require('util'); -const { strictEqual } = require('assert'); +const assert = require('assert'); const { REPLServer } = require('repl'); let output = ''; @@ -25,9 +25,9 @@ process.on('exit', function() { // https://github.com/nodejs/node/pull/16485#issuecomment-350428638 // The color setting of the REPL should not have leaked over into // the color setting of `util.inspect.defaultOptions`. - strictEqual(output.includes(`"'string'"`), true); - strictEqual(output.includes(`'\u001b[32m\\'string\\'\u001b[39m'`), false); - strictEqual(inspect.defaultOptions.colors, false); - strictEqual(repl.writer.options.colors, true); - strictEqual(repl2.writer.options.colors, true); + assert.strictEqual(output.includes(`"'string'"`), true); + assert.strictEqual(output.includes(`'\u001b[32m\\'string\\'\u001b[39m'`), false); + assert.strictEqual(inspect.defaultOptions.colors, false); + assert.strictEqual(repl.writer.options.colors, true); + assert.strictEqual(repl2.writer.options.colors, true); }); diff --git a/test/parallel/test-repl-dynamic-import.js b/test/parallel/test-repl-dynamic-import.js index a043e31bf5b2d0..3ce21d662f6f56 100644 --- a/test/parallel/test-repl-dynamic-import.js +++ b/test/parallel/test-repl-dynamic-import.js @@ -14,7 +14,7 @@ setTimeout(() => { child.stdin.write('\nimport("fs");\n'); child.stdin.write('\nprocess.exit(0);\n'); }, common.platformTimeout(50)); -child.on('exit', (code, signal) => { +child.on('exit', common.mustCall((code, signal) => { assert.strictEqual(code, 0); assert.strictEqual(signal, null); -}); +})); diff --git a/test/parallel/test-repl-envvars.js b/test/parallel/test-repl-envvars.js index 4efa04072d5fee..ba4de43b20f987 100644 --- a/test/parallel/test-repl-envvars.js +++ b/test/parallel/test-repl-envvars.js @@ -2,7 +2,7 @@ // Flags: --expose-internals -require('../common'); +const common = require('../common'); const stream = require('stream'); const { describe, test } = require('node:test'); const REPL = require('internal/repl'); @@ -66,9 +66,7 @@ function run(test) { Object.assign(process.env, env); return new Promise((resolve) => { - REPL.createInternalRepl(process.env, opts, function(err, repl) { - assert.ifError(err); - + REPL.createInternalRepl(process.env, opts, common.mustSucceed((repl) => { assert.strictEqual(repl.terminal, expected.terminal, `Expected ${inspect(expected)} with ${inspect(env)}`); assert.strictEqual(repl.useColors, expected.useColors, @@ -80,7 +78,7 @@ function run(test) { } repl.close(); resolve(); - }); + })); }); } diff --git a/test/parallel/test-repl-harmony.js b/test/parallel/test-repl-harmony.js index f03cd03d0f97b2..ee42957fe03908 100644 --- a/test/parallel/test-repl-harmony.js +++ b/test/parallel/test-repl-harmony.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); const spawn = require('child_process').spawn; @@ -42,9 +42,9 @@ let out = ''; child.stdout.on('data', (d) => { out += d; }); -child.stdout.on('end', () => { +child.stdout.on('end', common.mustCall(() => { assert.match(out, expectOut); console.log('ok'); -}); +})); child.stdin.end(input); diff --git a/test/parallel/test-repl-history-navigation.js b/test/parallel/test-repl-history-navigation.js index 77be0b0fc05e59..88c1058c0b29b4 100644 --- a/test/parallel/test-repl-history-navigation.js +++ b/test/parallel/test-repl-history-navigation.js @@ -867,7 +867,7 @@ function runTest() { REPL.createInternalRepl(opts.env, { input: new ActionStream(), output: new stream.Writable({ - write(chunk, _, next) { + write: common.mustCallAtLeast((chunk, _, next) => { const output = chunk.toString(); if (!opts.showEscapeCodes && @@ -892,20 +892,20 @@ function runTest() { } next(); - } + }), }), completer: opts.completer, prompt, useColors: false, preview: opts.preview, terminal: true - }, function(err, repl) { + }, common.mustCall((err, repl) => { if (err) { console.error(`Failed test # ${numtests - tests.length}`); throw err; } - repl.once('close', () => { + repl.once('close', common.mustCall(() => { if (opts.clean) cleanupTmpFile(); @@ -917,7 +917,7 @@ function runTest() { } setImmediate(runTestWrap, true); - }); + })); if (opts.columns) { Object.defineProperty(repl, 'columns', { @@ -926,7 +926,7 @@ function runTest() { }); } repl.input.run(opts.test); - }); + })); } // run the tests diff --git a/test/parallel/test-repl-null-thrown.js b/test/parallel/test-repl-null-thrown.js index 4babbab1e3989e..e62c3706e02be8 100644 --- a/test/parallel/test-repl-null-thrown.js +++ b/test/parallel/test-repl-null-thrown.js @@ -1,5 +1,5 @@ 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); const { startNewREPLServer } = require('../common/repl'); @@ -8,6 +8,6 @@ const { replServer, output } = startNewREPLServer(); replServer.emit('line', 'process.nextTick(() => { throw null; })'); replServer.emit('line', '.exit'); -setTimeout(() => { +setTimeout(common.mustCall(() => { assert(output.accumulator.includes('Uncaught null')); -}, 0); +}), 0); diff --git a/test/parallel/test-repl-permission-model.js b/test/parallel/test-repl-permission-model.js index ab5c7bff06cde8..b29a03fba5745e 100644 --- a/test/parallel/test-repl-permission-model.js +++ b/test/parallel/test-repl-permission-model.js @@ -78,7 +78,7 @@ function runTest() { REPL.createInternalRepl(opts.env, { input: new ActionStream(), output: new stream.Writable({ - write(chunk, _, next) { + write: common.mustCallAtLeast((chunk, _, next) => { const output = chunk.toString(); if (!opts.showEscapeCodes && @@ -103,7 +103,7 @@ function runTest() { } next(); - } + }), }), allowBlockingCompletions: true, completer: opts.completer, @@ -111,13 +111,13 @@ function runTest() { useColors: false, preview: opts.preview, terminal: true - }, function(err, repl) { + }, common.mustCall((err, repl) => { if (err) { console.error(`Failed test # ${numtests - tests.length}`); throw err; } - repl.once('close', () => { + repl.once('close', common.mustCall(() => { if (opts.checkTotal) { assert.deepStrictEqual(lastChunks, expected); @@ -127,10 +127,10 @@ function runTest() { } setImmediate(runTestWrap, true); - }); + })); repl.input.run(opts.test); - }); + })); } // run the tests diff --git a/test/parallel/test-repl-persistent-history.js b/test/parallel/test-repl-persistent-history.js index efd1aa141357c2..0807a10a08a8a6 100644 --- a/test/parallel/test-repl-persistent-history.js +++ b/test/parallel/test-repl-persistent-history.js @@ -137,14 +137,12 @@ const tests = [ expected: [prompt, replFailedRead, prompt, replDisabled, prompt] }, { - before: function before() { + before: common.mustCall(function before() { if (common.isWindows) { const execSync = require('child_process').execSync; - execSync(`ATTRIB +H "${emptyHiddenHistoryPath}"`, (err) => { - assert.ifError(err); - }); + execSync(`ATTRIB +H "${emptyHiddenHistoryPath}"`); } - }, + }), env: { NODE_REPL_HISTORY: emptyHiddenHistoryPath }, test: [UP], expected: [prompt] @@ -216,7 +214,7 @@ function runTest(assertCleaned) { REPL.createInternalRepl(env, { input: new ActionStream(), output: new stream.Writable({ - write(chunk, _, next) { + write: common.mustCallAtLeast((chunk, _, next) => { const output = chunk.toString(); // Ignore escapes and blank lines @@ -230,12 +228,12 @@ function runTest(assertCleaned) { throw err; } next(); - } + }), }), prompt, useColors: false, terminal: true - }, function(err, repl) { + }, common.mustCall((err, repl) => { if (err) { console.error(`Failed test # ${numtests - tests.length}`); throw err; @@ -250,7 +248,7 @@ function runTest(assertCleaned) { onClose(); }); - function onClose() { + const onClose = common.mustCall(() => { const cleaned = clean === false ? false : cleanupTmpFile(); try { @@ -261,8 +259,8 @@ function runTest(assertCleaned) { console.error(`Failed test # ${numtests - tests.length}`); throw err; } - } + }); repl.inputStream.run(test); - }); + })); } diff --git a/test/parallel/test-repl-programmatic-history-setup-history.js b/test/parallel/test-repl-programmatic-history-setup-history.js index 038972b8566ba0..544f3994ef331b 100644 --- a/test/parallel/test-repl-programmatic-history-setup-history.js +++ b/test/parallel/test-repl-programmatic-history-setup-history.js @@ -138,14 +138,12 @@ const tests = [ }, // Checking the history file permissions { - before: function before() { + before: common.mustCall(function before() { if (common.isWindows) { const execSync = require('child_process').execSync; - execSync(`ATTRIB +H "${emptyHiddenHistoryPath}"`, (err) => { - assert.ifError(err); - }); + execSync(`ATTRIB +H "${emptyHiddenHistoryPath}"`); } - }, + }), env: { NODE_REPL_HISTORY: emptyHiddenHistoryPath }, test: [UP], expected: [prompt] @@ -220,7 +218,7 @@ function runTest(assertCleaned) { const repl = REPL.start({ input: new ActionStream(), output: new stream.Writable({ - write(chunk, _, next) { + write: common.mustCallAtLeast((chunk, _, next) => { const output = chunk.toString(); // Ignore escapes and blank lines @@ -234,7 +232,7 @@ function runTest(assertCleaned) { throw err; } next(); - } + }), }), prompt: prompt, useColors: false, diff --git a/test/parallel/test-repl-programmatic-history.js b/test/parallel/test-repl-programmatic-history.js index c762e83840e41c..c2bb6c88e52ed9 100644 --- a/test/parallel/test-repl-programmatic-history.js +++ b/test/parallel/test-repl-programmatic-history.js @@ -131,14 +131,12 @@ const tests = [ expected: [prompt, replFailedRead, prompt, replDisabled, prompt] }, { - before: function before() { + before: common.mustCall(function before() { if (common.isWindows) { const execSync = require('child_process').execSync; - execSync(`ATTRIB +H "${emptyHiddenHistoryPath}"`, (err) => { - assert.ifError(err); - }); + execSync(`ATTRIB +H "${emptyHiddenHistoryPath}"`); } - }, + }), env: { NODE_REPL_HISTORY: emptyHiddenHistoryPath }, test: [UP], expected: [prompt] @@ -211,7 +209,7 @@ function runTest(assertCleaned) { const repl = REPL.start({ input: new ActionStream(), output: new stream.Writable({ - write(chunk, _, next) { + write: common.mustCallAtLeast((chunk, _, next) => { const output = chunk.toString(); // Ignore escapes and blank lines @@ -225,7 +223,7 @@ function runTest(assertCleaned) { throw err; } next(); - } + }), }), prompt: prompt, useColors: false, @@ -233,7 +231,7 @@ function runTest(assertCleaned) { historySize }); - repl.setupHistory(file, function(err, repl) { + repl.setupHistory(file, common.mustCall((err, repl) => { if (err) { console.error(`Failed test # ${numtests - tests.length}`); throw err; @@ -248,7 +246,7 @@ function runTest(assertCleaned) { onClose(); }); - function onClose() { + const onClose = common.mustCall(() => { const cleaned = clean === false ? false : cleanupTmpFile(); try { @@ -259,8 +257,8 @@ function runTest(assertCleaned) { console.error(`Failed test # ${numtests - tests.length}`); throw err; } - } + }); repl.inputStream.run(test); - }); + })); } diff --git a/test/parallel/test-repl-reverse-search.js b/test/parallel/test-repl-reverse-search.js index 314d772adc67a5..cbe848afee082a 100644 --- a/test/parallel/test-repl-reverse-search.js +++ b/test/parallel/test-repl-reverse-search.js @@ -301,7 +301,7 @@ function runTest() { REPL.createInternalRepl(opts.env, { input: new ActionStream(), output: new stream.Writable({ - write(chunk, _, next) { + write: common.mustCallAtLeast((chunk, _, next) => { const output = chunk.toString(); if (!opts.showEscapeCodes && @@ -325,19 +325,19 @@ function runTest() { } next(); - } + }), }), completer: opts.completer, prompt, useColors: opts.useColors || false, terminal: true - }, function(err, repl) { + }, common.mustCall((err, repl) => { if (err) { console.error(`Failed test # ${numtests - tests.length}`); throw err; } - repl.once('close', () => { + repl.once('close', common.mustCall(() => { if (opts.clean) cleanupTmpFile(); @@ -349,7 +349,7 @@ function runTest() { } setImmediate(runTestWrap, true); - }); + })); if (opts.columns) { Object.defineProperty(repl, 'columns', { @@ -358,7 +358,7 @@ function runTest() { }); } repl.inputStream.run(opts.test); - }); + })); } // run the tests diff --git a/test/parallel/test-repl-setprompt.js b/test/parallel/test-repl-setprompt.js index d9eb85be145734..9901f8f974f646 100644 --- a/test/parallel/test-repl-setprompt.js +++ b/test/parallel/test-repl-setprompt.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); const spawn = require('child_process').spawn; const os = require('os'); @@ -41,9 +41,9 @@ child.stdout.on('data', function(d) { data += d; }); child.stdin.end(`e.setPrompt("${p}");${os.EOL}`); -child.on('close', function(code, signal) { +child.on('close', common.mustCall((code, signal) => { assert.strictEqual(code, 0); assert.ok(!signal); const lines = data.split('\n'); assert.strictEqual(lines.pop(), p); -}); +})); diff --git a/test/parallel/test-repl-sigint-nested-eval.js b/test/parallel/test-repl-sigint-nested-eval.js index 7955cf413f7c49..555802b725d145 100644 --- a/test/parallel/test-repl-sigint-nested-eval.js +++ b/test/parallel/test-repl-sigint-nested-eval.js @@ -40,7 +40,7 @@ child.stdout.once('data', common.mustCall(() => { ); })); -child.on('close', function(code) { +child.on('close', common.mustCall((code) => { const expected = 'Script execution was interrupted by `SIGINT`'; assert.ok( stdout.includes(expected), @@ -50,4 +50,4 @@ child.on('close', function(code) { stdout.includes('foobar'), `Expected stdout to contain "foobar", got ${stdout}` ); -}); +})); diff --git a/test/parallel/test-repl-sigint.js b/test/parallel/test-repl-sigint.js index f4087b11d488d6..33495f80a77a2a 100644 --- a/test/parallel/test-repl-sigint.js +++ b/test/parallel/test-repl-sigint.js @@ -39,7 +39,7 @@ child.stdout.once('data', common.mustCall(() => { 'while(true){}\n'); })); -child.on('close', function(code) { +child.on('close', common.mustCall((code) => { assert.strictEqual(code, 0); const expected = 'Script execution was interrupted by `SIGINT`'; assert.ok( @@ -50,4 +50,4 @@ child.on('close', function(code) { stdout.includes('42042\n'), `Expected stdout to contain "42042", got ${stdout}` ); -}); +})); diff --git a/test/parallel/test-repl-syntax-error-handling.js b/test/parallel/test-repl-syntax-error-handling.js index 91a8614d1deb93..2b57af8c80117f 100644 --- a/test/parallel/test-repl-syntax-error-handling.js +++ b/test/parallel/test-repl-syntax-error-handling.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); switch (process.argv[2]) { @@ -47,15 +47,15 @@ function parent() { child.stdout.on('data', function(c) { out += c; }); - child.stdout.on('end', function() { + child.stdout.on('end', common.mustCall(() => { assert.strictEqual(out, '10\n'); console.log('ok - got expected output'); - }); + })); - child.on('exit', function(c) { + child.on('exit', common.mustCall((c) => { assert(!c); console.log('ok - exit success'); - }); + })); } function child() { diff --git a/test/parallel/test-repl-tab-complete-buffer.js b/test/parallel/test-repl-tab-complete-buffer.js index 72c5e26cb799ae..25a5dc6fe6c8af 100644 --- a/test/parallel/test-repl-tab-complete-buffer.js +++ b/test/parallel/test-repl-tab-complete-buffer.js @@ -48,10 +48,10 @@ for (const type of [ assert.strictEqual(data[0].includes('ele.biu'), true); - data[0].forEach((key) => { + for (const key of data[0]) { if (!key || key === 'ele.biu') return; assert.notStrictEqual(ele[key.slice(4)], undefined); - }); + } }) ); } diff --git a/test/parallel/test-repl-tab-complete-import.js b/test/parallel/test-repl-tab-complete-import.js index 1f6cf7bff94b22..ed8a6c2de5efc5 100644 --- a/test/parallel/test-repl-tab-complete-import.js +++ b/test/parallel/test-repl-tab-complete-import.js @@ -56,9 +56,9 @@ replServer.complete("import\t( 'n", common.mustSucceed((data) => { assert.strictEqual(completions[lastIndex + 2], 'net'); assert.strictEqual(completions[lastIndex + 3], ''); // It's possible to pick up non-core modules too - completions.slice(lastIndex + 4).forEach((completion) => { + for (const completion of completions.slice(lastIndex + 4)) { assert.match(completion, /^n/); - }); + } })); { diff --git a/test/parallel/test-repl-tab-complete-require.js b/test/parallel/test-repl-tab-complete-require.js index 12be39c7f6aca3..47c03d8d5990a6 100644 --- a/test/parallel/test-repl-tab-complete-require.js +++ b/test/parallel/test-repl-tab-complete-require.js @@ -62,20 +62,18 @@ const { startNewREPLServer } = require('../common/repl'); // require(...) completions include `node:`-prefixed modules: let lastIndex = -1; - publicModules - .filter((lib) => !lib.startsWith('node:')) - .forEach((lib, index) => { - lastIndex = data[0].indexOf(`node:${lib}`); - assert.notStrictEqual(lastIndex, -1); - }); + for (const lib of publicModules.filter((lib) => !lib.startsWith('node:'))) { + lastIndex = data[0].indexOf(`node:${lib}`); + assert.notStrictEqual(lastIndex, -1); + } assert.strictEqual(data[0][lastIndex + 1], ''); // There is only one Node.js module that starts with n: assert.strictEqual(data[0][lastIndex + 2], 'net'); assert.strictEqual(data[0][lastIndex + 3], ''); // It's possible to pick up non-core modules too - data[0].slice(lastIndex + 4).forEach((completion) => { + for (const completion of data[0].slice(lastIndex + 4)) { assert.match(completion, /^n/); - }); + } }) ); } diff --git a/test/parallel/test-repl-tab.js b/test/parallel/test-repl-tab.js index f64a00d8bca99e..e99f667c4a38f5 100644 --- a/test/parallel/test-repl-tab.js +++ b/test/parallel/test-repl-tab.js @@ -1,6 +1,5 @@ 'use strict'; const common = require('../common'); -const assert = require('assert'); const repl = require('repl'); const zlib = require('zlib'); @@ -13,6 +12,4 @@ const testMe = repl.start('', putIn, function(cmd, context, filename, testMe._domain.on('error', common.mustNotCall()); -testMe.complete('', function(err, results) { - assert.strictEqual(err, null); -}); +testMe.complete('', common.mustSucceed()); diff --git a/test/parallel/test-repl-uncaught-exception-async.js b/test/parallel/test-repl-uncaught-exception-async.js index 8e0d9b4075ada4..f4180080f496f7 100644 --- a/test/parallel/test-repl-uncaught-exception-async.js +++ b/test/parallel/test-repl-uncaught-exception-async.js @@ -4,7 +4,7 @@ // does not suppress errors in the whole application. Adding such listener // should throw. -require('../common'); +const common = require('../common'); const assert = require('assert'); const { startNewREPLServer } = require('../common/repl'); @@ -32,10 +32,10 @@ replServer.write( '}, 1);console.log()\n' ); -setTimeout(() => { +setTimeout(common.mustCall(() => { replServer.close(); const len = process.listenerCount('uncaughtException'); process.removeAllListeners('uncaughtException'); assert.strictEqual(len, 0); assert.match(output.accumulator, /ERR_INVALID_REPL_INPUT.*(?!Type)RangeError: abc/s); -}, 2); +}), 2); diff --git a/test/parallel/test-repl-underscore.js b/test/parallel/test-repl-underscore.js index 9c8281ebce9e36..4c091a268fb701 100644 --- a/test/parallel/test-repl-underscore.js +++ b/test/parallel/test-repl-underscore.js @@ -1,6 +1,6 @@ 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); const repl = require('repl'); const { startNewREPLServer } = require('../common/repl'); @@ -152,7 +152,7 @@ function testError() { // throws error, async `); - setImmediate(() => { + setImmediate(common.mustCall(() => { const lines = output.accumulator.trim().split('\n').filter( (line) => !line.includes(testingReplPrompt) || line.includes('Uncaught Error') ); @@ -205,7 +205,7 @@ function testError() { 'Uncaught Error: quux', '0', ]); - }); + })); } function assertOutput(output, expected) { diff --git a/test/parallel/test-repl-unexpected-token-recoverable.js b/test/parallel/test-repl-unexpected-token-recoverable.js index 747f502ec56eaf..f81855c879b979 100644 --- a/test/parallel/test-repl-unexpected-token-recoverable.js +++ b/test/parallel/test-repl-unexpected-token-recoverable.js @@ -2,7 +2,7 @@ // This is a regression test for https://github.com/joyent/node/issues/8874. -require('../common'); +const common = require('../common'); const assert = require('assert'); const spawn = require('child_process').spawn; @@ -25,9 +25,9 @@ child.stdout.on('data', (d) => { out += d; }); -child.stdout.on('end', () => { +child.stdout.on('end', common.mustCall(() => { assert.match(out, expectOut); console.log('ok'); -}); +})); child.stdin.end(input); diff --git a/test/parallel/test-require-symlink.js b/test/parallel/test-require-symlink.js index 9ca543e8d64ca4..6e303517171089 100644 --- a/test/parallel/test-require-symlink.js +++ b/test/parallel/test-require-symlink.js @@ -76,19 +76,19 @@ function test() { // Load symlinked-script as main const node = process.execPath; const child = spawn(node, ['--preserve-symlinks', linkScript]); - child.on('close', function(code, signal) { + child.on('close', common.mustCall((code, signal) => { assert.strictEqual(code, 0); assert(!signal); - }); + })); // Also verify that symlinks works for setting preserve via env variables const childEnv = spawn(node, [linkScript], { env: { ...process.env, NODE_PRESERVE_SYMLINKS: '1' } }); - childEnv.on('close', function(code, signal) { + childEnv.on('close', common.mustCall((code, signal) => { assert.strictEqual(code, 0); assert(!signal); - }); + })); // Also verify that symlinks works for setting preserve via env variables in // Workers. diff --git a/test/parallel/test-runner-aliases.js b/test/parallel/test-runner-aliases.js index 1a61da896e9e38..00b8d24b61b8af 100644 --- a/test/parallel/test-runner-aliases.js +++ b/test/parallel/test-runner-aliases.js @@ -1,8 +1,8 @@ 'use strict'; require('../common'); -const { strictEqual } = require('node:assert'); +const assert = require('node:assert'); const test = require('node:test'); -strictEqual(test.test, test); -strictEqual(test.it, test); -strictEqual(test.describe, test.suite); +assert.strictEqual(test.test, test); +assert.strictEqual(test.it, test); +assert.strictEqual(test.describe, test.suite); diff --git a/test/parallel/test-runner-coverage.js b/test/parallel/test-runner-coverage.js index 9f5ef360a7c9d6..5a8f3d743538cb 100644 --- a/test/parallel/test-runner-coverage.js +++ b/test/parallel/test-runner-coverage.js @@ -230,7 +230,7 @@ test('coverage is combined for multiple processes', skipIfNoInspector, () => { assert.strictEqual(result.status, 0); }); -test.skip('coverage works with isolation=none', skipIfNoInspector, () => { +test.skip('coverage works with isolation=none', skipIfNoInspector, common.mustCallAtLeast(() => { // There is a bug in coverage calculation. The branch % in the common.js // fixture is different depending on the test isolation mode. The 'none' mode // is closer to what c8 reports here, so the bug is likely in the code that @@ -270,7 +270,7 @@ test.skip('coverage works with isolation=none', skipIfNoInspector, () => { assert.strictEqual(result.stderr.toString(), ''); assert(result.stdout.toString().includes(report)); assert.strictEqual(result.status, 0); -}); +}, 0)); test('coverage reports on lines, functions, and branches', skipIfNoInspector, async (t) => { const fixture = fixtures.path('test-runner', 'coverage.js'); @@ -290,9 +290,9 @@ test('coverage reports on lines, functions, and branches', skipIfNoInspector, as await t.test('does not include node_modules', () => { assert.strictEqual(coverage.summary.files.length, 3); const files = ['coverage.js', 'invalid-tap.js', 'throw.js']; - coverage.summary.files.forEach((file, index) => { + coverage.summary.files.forEach(common.mustCallAtLeast((file, index) => { assert.ok(file.path.endsWith(files[index])); - }); + })); }); const file = coverage.summary.files[0]; diff --git a/test/parallel/test-runner-custom-assertions.js b/test/parallel/test-runner-custom-assertions.js index a4bdf0f548be80..6e398339afe1f4 100644 --- a/test/parallel/test-runner-custom-assertions.js +++ b/test/parallel/test-runner-custom-assertions.js @@ -1,11 +1,11 @@ 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('node:assert'); const { test, assert: testAssertions } = require('node:test'); -testAssertions.register('isOdd', (n) => { +testAssertions.register('isOdd', common.mustCallAtLeast((n) => { assert.strictEqual(n % 2, 1); -}); +})); testAssertions.register('ok', () => { return 'ok'; diff --git a/test/parallel/test-runner-filetest-location.js b/test/parallel/test-runner-filetest-location.js index 44293632dbdc68..0b43198a743d78 100644 --- a/test/parallel/test-runner-filetest-location.js +++ b/test/parallel/test-runner-filetest-location.js @@ -1,7 +1,7 @@ 'use strict'; const common = require('../common'); const fixtures = require('../common/fixtures'); -const { strictEqual } = require('node:assert'); +const assert = require('node:assert'); const { relative } = require('node:path'); const { run } = require('node:test'); const fixture = fixtures.path('test-runner', 'index.js'); @@ -12,9 +12,9 @@ const stream = run({ }); stream.on('test:fail', common.mustCall((result) => { - strictEqual(result.name, relativePath); - strictEqual(result.details.error.failureType, 'testCodeFailure'); - strictEqual(result.line, 1); - strictEqual(result.column, 1); - strictEqual(result.file, fixture); + assert.strictEqual(result.name, relativePath); + assert.strictEqual(result.details.error.failureType, 'testCodeFailure'); + assert.strictEqual(result.line, 1); + assert.strictEqual(result.column, 1); + assert.strictEqual(result.file, fixture); })); diff --git a/test/parallel/test-runner-force-exit-failure.js b/test/parallel/test-runner-force-exit-failure.js index 9d40d4aea48e77..52032372405eab 100644 --- a/test/parallel/test-runner-force-exit-failure.js +++ b/test/parallel/test-runner-force-exit-failure.js @@ -1,6 +1,6 @@ 'use strict'; require('../common'); -const { match, doesNotMatch, strictEqual } = require('node:assert'); +const assert = require('node:assert'); const { spawnSync } = require('node:child_process'); const fixtures = require('../common/fixtures'); const fixture = fixtures.path('test-runner/throws_sync_and_async.js'); @@ -15,11 +15,11 @@ for (const isolation of ['none', 'process']) { ]; const r = spawnSync(process.execPath, args); - strictEqual(r.status, 1); - strictEqual(r.signal, null); - strictEqual(r.stderr.toString(), ''); + assert.strictEqual(r.status, 1); + assert.strictEqual(r.signal, null); + assert.strictEqual(r.stderr.toString(), ''); const stdout = r.stdout.toString(); - match(stdout, /Error: fails/); - doesNotMatch(stdout, /this should not have a chance to be thrown/); + assert.match(stdout, /Error: fails/); + assert.doesNotMatch(stdout, /this should not have a chance to be thrown/); } diff --git a/test/parallel/test-runner-force-exit-flush.js b/test/parallel/test-runner-force-exit-flush.js index ddc4ea9771913a..f3b3a7fc26cdc0 100644 --- a/test/parallel/test-runner-force-exit-flush.js +++ b/test/parallel/test-runner-force-exit-flush.js @@ -2,7 +2,7 @@ require('../common'); const fixtures = require('../common/fixtures'); const tmpdir = require('../common/tmpdir'); -const { match, strictEqual } = require('node:assert'); +const assert = require('node:assert'); const { spawnSync } = require('node:child_process'); const { readFileSync } = require('node:fs'); const { test } = require('node:test'); @@ -16,9 +16,9 @@ function runWithReporter(reporter) { fixtures.path('test-runner', 'reporters.js'), ]; const child = spawnSync(process.execPath, args); - strictEqual(child.stdout.toString(), ''); - strictEqual(child.stderr.toString(), ''); - strictEqual(child.status, 1); + assert.strictEqual(child.stdout.toString(), ''); + assert.strictEqual(child.stderr.toString(), ''); + assert.strictEqual(child.status, 1); return destination; } @@ -26,24 +26,24 @@ tmpdir.refresh(); test('junit reporter', () => { const output = readFileSync(runWithReporter('junit'), 'utf8'); - match(output, //); - match(output, //); - match(output, //); - match(output, //); + assert.match(output, //); + assert.match(output, //); + assert.match(output, / + +This flips the pass/fail reporting for a specific test or suite: A flagged test/test-case must throw +in order to "pass"; a test/test-case that does not throw, fails. + +In the following, `doTheThing()` returns _currently_ `false` (`false` does not equal `true`, causing +`strictEqual` to throw, so the test-case passes). + +```js +it.expectFailure('should do the thing', () => { + assert.strictEqual(doTheThing(), true); +}); + +it('should do the thing', { expectFailure: true }, () => { + assert.strictEqual(doTheThing(), true); +}); +``` + +`skip` and/or `todo` are mutually exclusive to `expectFailure`, and `skip` or `todo` +will "win" when both are applied (`skip` wins against both, and `todo` wins +against `expectFailure`). + +These tests will be skipped (and not run): + +```js +it.expectFailure('should do the thing', { skip: true }, () => { + assert.strictEqual(doTheThing(), true); +}); + +it.skip('should do the thing', { expectFailure: true }, () => { + assert.strictEqual(doTheThing(), true); +}); +``` + +These tests will be marked "todo" (silencing errors): + +```js +it.expectFailure('should do the thing', { todo: true }, () => { + assert.strictEqual(doTheThing(), true); +}); + +it.todo('should do the thing', { expectFailure: true }, () => { + assert.strictEqual(doTheThing(), true); +}); +``` + ## `describe()` and `it()` aliases Suites and tests can also be written using the `describe()` and `it()` diff --git a/lib/internal/test_runner/harness.js b/lib/internal/test_runner/harness.js index bd4b35f1d64033..6b3b13b2c88d65 100644 --- a/lib/internal/test_runner/harness.js +++ b/lib/internal/test_runner/harness.js @@ -377,7 +377,7 @@ function runInParentContext(Factory) { return run(name, options, fn, overrides); }; - ArrayPrototypeForEach(['skip', 'todo', 'only'], (keyword) => { + ArrayPrototypeForEach(['expectFailure', 'skip', 'todo', 'only'], (keyword) => { test[keyword] = (name, options, fn) => { const overrides = { __proto__: null, diff --git a/lib/internal/test_runner/reporter/tap.js b/lib/internal/test_runner/reporter/tap.js index 0f2aa6a722f858..01c698871b9134 100644 --- a/lib/internal/test_runner/reporter/tap.js +++ b/lib/internal/test_runner/reporter/tap.js @@ -33,12 +33,12 @@ async function * tapReporter(source) { for await (const { type, data } of source) { switch (type) { case 'test:fail': { - yield reportTest(data.nesting, data.testNumber, 'not ok', data.name, data.skip, data.todo); + yield reportTest(data.nesting, data.testNumber, 'not ok', data.name, data.skip, data.todo, data.expectFailure); const location = data.file ? `${data.file}:${data.line}:${data.column}` : null; yield reportDetails(data.nesting, data.details, location); break; } case 'test:pass': - yield reportTest(data.nesting, data.testNumber, 'ok', data.name, data.skip, data.todo); + yield reportTest(data.nesting, data.testNumber, 'ok', data.name, data.skip, data.todo, data.expectFailure); yield reportDetails(data.nesting, data.details, null); break; case 'test:plan': @@ -65,7 +65,7 @@ async function * tapReporter(source) { } } -function reportTest(nesting, testNumber, status, name, skip, todo) { +function reportTest(nesting, testNumber, status, name, skip, todo, expectFailure) { let line = `${indent(nesting)}${status} ${testNumber}`; if (name) { @@ -76,6 +76,8 @@ function reportTest(nesting, testNumber, status, name, skip, todo) { line += ` # SKIP${typeof skip === 'string' && skip.length ? ` ${tapEscape(skip)}` : ''}`; } else if (todo !== undefined) { line += ` # TODO${typeof todo === 'string' && todo.length ? ` ${tapEscape(todo)}` : ''}`; + } else if (expectFailure !== undefined) { + line += ' # EXPECTED FAILURE'; } line += '\n'; diff --git a/lib/internal/test_runner/reporter/utils.js b/lib/internal/test_runner/reporter/utils.js index 288c129579182f..26e4a2d1a5c36c 100644 --- a/lib/internal/test_runner/reporter/utils.js +++ b/lib/internal/test_runner/reporter/utils.js @@ -70,7 +70,7 @@ function formatError(error, indent) { function formatTestReport(type, data, showErrorDetails = true, prefix = '', indent = '') { let color = reporterColorMap[type] ?? colors.white; let symbol = reporterUnicodeSymbolMap[type] ?? ' '; - const { skip, todo } = data; + const { skip, todo, expectFailure } = data; const duration_ms = data.details?.duration_ms ? ` ${colors.gray}(${data.details.duration_ms}ms)${colors.white}` : ''; let title = `${data.name}${duration_ms}`; @@ -78,6 +78,8 @@ function formatTestReport(type, data, showErrorDetails = true, prefix = '', inde title += ` # ${typeof skip === 'string' && skip.length ? skip : 'SKIP'}`; } else if (todo !== undefined) { title += ` # ${typeof todo === 'string' && todo.length ? todo : 'TODO'}`; + } else if (expectFailure !== undefined) { + title += ` # EXPECTED FAILURE`; } const err = showErrorDetails && data.details?.error ? formatError(data.details.error, indent) : ''; diff --git a/lib/internal/test_runner/test.js b/lib/internal/test_runner/test.js index 2e70718fac361a..a1b17ffcc39a8c 100644 --- a/lib/internal/test_runner/test.js +++ b/lib/internal/test_runner/test.js @@ -496,7 +496,7 @@ class Test extends AsyncResource { super('Test'); let { fn, name, parent } = options; - const { concurrency, entryFile, loc, only, timeout, todo, skip, signal, plan } = options; + const { concurrency, entryFile, expectFailure, loc, only, timeout, todo, skip, signal, plan } = options; if (typeof fn !== 'function') { fn = noop; @@ -635,6 +635,7 @@ class Test extends AsyncResource { this.plan = null; this.expectedAssertions = plan; this.cancelled = false; + this.expectFailure = expectFailure !== undefined && expectFailure !== false; this.skipped = skip !== undefined && skip !== false; this.isTodo = (todo !== undefined && todo !== false) || this.parent?.isTodo; this.startTime = null; @@ -946,7 +947,12 @@ class Test extends AsyncResource { return; } - this.passed = false; + if (this.expectFailure === true) { + this.passed = true; + } else { + this.passed = false; + } + this.error = err; } @@ -1343,6 +1349,8 @@ class Test extends AsyncResource { directive = this.reporter.getSkip(this.message); } else if (this.isTodo) { directive = this.reporter.getTodo(this.message); + } else if (this.expectFailure) { + directive = this.reporter.getXFail(this.expectFailure); // TODO(@JakobJingleheimer): support specifying failure } if (this.reportedType) { @@ -1357,6 +1365,7 @@ class Test extends AsyncResource { if (this.passedAttempt !== undefined) { details.passed_on_attempt = this.passedAttempt; } + return { __proto__: null, details, directive }; } diff --git a/lib/internal/test_runner/tests_stream.js b/lib/internal/test_runner/tests_stream.js index 318d7f49998c0e..7b64487696f53f 100644 --- a/lib/internal/test_runner/tests_stream.js +++ b/lib/internal/test_runner/tests_stream.js @@ -87,6 +87,10 @@ class TestsStream extends Readable { return { __proto__: null, todo: reason ?? true }; } + getXFail(expectation = undefined) { + return { __proto__: null, expectFailure: expectation ?? true }; + } + enqueue(nesting, loc, name, type) { this[kEmitMessage]('test:enqueue', { __proto__: null, diff --git a/test/fixtures/test-runner/output/describe_it.js b/test/fixtures/test-runner/output/describe_it.js index b605019ad1b112..fd92f976c08f20 100644 --- a/test/fixtures/test-runner/output/describe_it.js +++ b/test/fixtures/test-runner/output/describe_it.js @@ -5,8 +5,23 @@ const { describe, it, test } = require('node:test'); const util = require('util'); -it.todo('sync pass todo', () => { +it.expectFailure('sync expect fail (method)', () => { + throw new Error('should pass'); +}); + +it('sync expect fail (options)', { expectFailure: true }, () => { + throw new Error('should pass'); +}); +it.expectFailure('async expect fail (method)', async () => { + throw new Error('should pass'); +}); + +it('async expect fail (options)', { expectFailure: true }, async () => { + throw new Error('should pass'); +}); + +it.todo('sync pass todo', () => { }); it('sync pass todo with message', { todo: 'this is a passing todo' }, () => { @@ -16,6 +31,10 @@ it.todo('sync todo', () => { throw new Error('should not count as a failure'); }); +it.todo('sync todo with expect fail', { expectFailure: true }, () => { + throw new Error('should not count as an expected failure'); +}); + it('sync todo with message', { todo: 'this is a failing todo' }, () => { throw new Error('should not count as a failure'); }); @@ -23,6 +42,10 @@ it('sync todo with message', { todo: 'this is a failing todo' }, () => { it.skip('sync skip pass', () => { }); +it.skip('sync skip expect fail', { expectFailure: true }, () => { + throw new Error('should not fail'); +}); + it('sync skip pass with message', { skip: 'this is skipped' }, () => { }); diff --git a/test/fixtures/test-runner/output/describe_it.snapshot b/test/fixtures/test-runner/output/describe_it.snapshot index 67d4af7f1b9f45..923ac8647acc2d 100644 --- a/test/fixtures/test-runner/output/describe_it.snapshot +++ b/test/fixtures/test-runner/output/describe_it.snapshot @@ -1,18 +1,42 @@ TAP version 13 +# Subtest: sync expect fail (method) +ok 1 - sync expect fail (method) # EXPECTED FAILURE + --- + duration_ms: * + type: 'test' + ... +# Subtest: sync expect fail (options) +ok 2 - sync expect fail (options) # EXPECTED FAILURE + --- + duration_ms: * + type: 'test' + ... +# Subtest: async expect fail (method) +ok 3 - async expect fail (method) # EXPECTED FAILURE + --- + duration_ms: * + type: 'test' + ... +# Subtest: async expect fail (options) +ok 4 - async expect fail (options) # EXPECTED FAILURE + --- + duration_ms: * + type: 'test' + ... # Subtest: sync pass todo -ok 1 - sync pass todo # TODO +ok 5 - sync pass todo # TODO --- duration_ms: * type: 'test' ... # Subtest: sync pass todo with message -ok 2 - sync pass todo with message # TODO this is a passing todo +ok 6 - sync pass todo with message # TODO this is a passing todo --- duration_ms: * type: 'test' ... # Subtest: sync todo -not ok 3 - sync todo # TODO +not ok 7 - sync todo # TODO --- duration_ms: * type: 'test' @@ -29,8 +53,14 @@ not ok 3 - sync todo # TODO * * ... +# Subtest: sync todo with expect fail +ok 8 - sync todo with expect fail # TODO + --- + duration_ms: * + type: 'test' + ... # Subtest: sync todo with message -not ok 4 - sync todo with message # TODO this is a failing todo +not ok 9 - sync todo with message # TODO this is a failing todo --- duration_ms: * type: 'test' @@ -48,25 +78,31 @@ not ok 4 - sync todo with message # TODO this is a failing todo * ... # Subtest: sync skip pass -ok 5 - sync skip pass # SKIP +ok 10 - sync skip pass # SKIP + --- + duration_ms: * + type: 'test' + ... +# Subtest: sync skip expect fail +ok 11 - sync skip expect fail # SKIP --- duration_ms: * type: 'test' ... # Subtest: sync skip pass with message -ok 6 - sync skip pass with message # SKIP this is skipped +ok 12 - sync skip pass with message # SKIP this is skipped --- duration_ms: * type: 'test' ... # Subtest: sync pass -ok 7 - sync pass +ok 13 - sync pass --- duration_ms: * type: 'test' ... # Subtest: sync throw fail -not ok 8 - sync throw fail +not ok 14 - sync throw fail --- duration_ms: * type: 'test' @@ -84,25 +120,25 @@ not ok 8 - sync throw fail * ... # Subtest: async skip pass -ok 9 - async skip pass # SKIP +ok 15 - async skip pass # SKIP --- duration_ms: * type: 'test' ... # Subtest: async pass -ok 10 - async pass +ok 16 - async pass --- duration_ms: * type: 'test' ... # Subtest: mixing describe/it and test should work -ok 11 - mixing describe/it and test should work +ok 17 - mixing describe/it and test should work --- duration_ms: * type: 'test' ... # Subtest: async throw fail -not ok 12 - async throw fail +not ok 18 - async throw fail --- duration_ms: * type: 'test' @@ -120,7 +156,7 @@ not ok 12 - async throw fail * ... # Subtest: async skip fail -not ok 13 - async skip fail # SKIP +not ok 19 - async skip fail # SKIP --- duration_ms: * type: 'test' @@ -130,7 +166,7 @@ not ok 13 - async skip fail # SKIP code: 'ERR_TEST_FAILURE' ... # Subtest: async assertion fail -not ok 14 - async assertion fail +not ok 20 - async assertion fail --- duration_ms: * type: 'test' @@ -156,13 +192,13 @@ not ok 14 - async assertion fail * ... # Subtest: resolve pass -ok 15 - resolve pass +ok 21 - resolve pass --- duration_ms: * type: 'test' ... # Subtest: reject fail -not ok 16 - reject fail +not ok 22 - reject fail --- duration_ms: * type: 'test' @@ -180,31 +216,31 @@ not ok 16 - reject fail * ... # Subtest: unhandled rejection - passes but warns -ok 17 - unhandled rejection - passes but warns +ok 23 - unhandled rejection - passes but warns --- duration_ms: * type: 'test' ... # Subtest: async unhandled rejection - passes but warns -ok 18 - async unhandled rejection - passes but warns +ok 24 - async unhandled rejection - passes but warns --- duration_ms: * type: 'test' ... # Subtest: immediate throw - passes but warns -ok 19 - immediate throw - passes but warns +ok 25 - immediate throw - passes but warns --- duration_ms: * type: 'test' ... # Subtest: immediate reject - passes but warns -ok 20 - immediate reject - passes but warns +ok 26 - immediate reject - passes but warns --- duration_ms: * type: 'test' ... # Subtest: immediate resolve pass -ok 21 - immediate resolve pass +ok 27 - immediate resolve pass --- duration_ms: * type: 'test' @@ -238,7 +274,7 @@ ok 21 - immediate resolve pass type: 'test' ... 1..2 -not ok 22 - subtest sync throw fail +not ok 28 - subtest sync throw fail --- duration_ms: * type: 'suite' @@ -248,7 +284,7 @@ not ok 22 - subtest sync throw fail code: 'ERR_TEST_FAILURE' ... # Subtest: sync throw non-error fail -not ok 23 - sync throw non-error fail +not ok 29 - sync throw non-error fail --- duration_ms: * type: 'test' @@ -283,31 +319,31 @@ not ok 23 - sync throw non-error fail type: 'test' ... 1..4 -ok 24 - level 0a +ok 30 - level 0a --- duration_ms: * type: 'suite' ... # Subtest: invalid subtest - pass but subtest fails -ok 25 - invalid subtest - pass but subtest fails +ok 31 - invalid subtest - pass but subtest fails --- duration_ms: * type: 'suite' ... # Subtest: sync skip option -ok 26 - sync skip option # SKIP +ok 32 - sync skip option # SKIP --- duration_ms: * type: 'test' ... # Subtest: sync skip option with message -ok 27 - sync skip option with message # SKIP this is skipped +ok 33 - sync skip option with message # SKIP this is skipped --- duration_ms: * type: 'test' ... # Subtest: sync skip option is false fail -not ok 28 - sync skip option is false fail +not ok 34 - sync skip option is false fail --- duration_ms: * type: 'test' @@ -325,61 +361,61 @@ not ok 28 - sync skip option is false fail * ... # Subtest: -ok 29 - +ok 35 - --- duration_ms: * type: 'test' ... # Subtest: functionOnly -ok 30 - functionOnly +ok 36 - functionOnly --- duration_ms: * type: 'test' ... # Subtest: -ok 31 - +ok 37 - --- duration_ms: * type: 'test' ... # Subtest: test with only a name provided -ok 32 - test with only a name provided +ok 38 - test with only a name provided --- duration_ms: * type: 'test' ... # Subtest: -ok 33 - +ok 39 - --- duration_ms: * type: 'test' ... # Subtest: -ok 34 - # SKIP +ok 40 - # SKIP --- duration_ms: * type: 'test' ... # Subtest: test with a name and options provided -ok 35 - test with a name and options provided # SKIP +ok 41 - test with a name and options provided # SKIP --- duration_ms: * type: 'test' ... # Subtest: functionAndOptions -ok 36 - functionAndOptions # SKIP +ok 42 - functionAndOptions # SKIP --- duration_ms: * type: 'test' ... # Subtest: callback pass -ok 37 - callback pass +ok 43 - callback pass --- duration_ms: * type: 'test' ... # Subtest: callback fail -not ok 38 - callback fail +not ok 44 - callback fail --- duration_ms: * type: 'test' @@ -392,25 +428,25 @@ not ok 38 - callback fail * ... # Subtest: sync t is this in test -ok 39 - sync t is this in test +ok 45 - sync t is this in test --- duration_ms: * type: 'test' ... # Subtest: async t is this in test -ok 40 - async t is this in test +ok 46 - async t is this in test --- duration_ms: * type: 'test' ... # Subtest: callback t is this in test -ok 41 - callback t is this in test +ok 47 - callback t is this in test --- duration_ms: * type: 'test' ... # Subtest: callback also returns a Promise -not ok 42 - callback also returns a Promise +not ok 48 - callback also returns a Promise --- duration_ms: * type: 'test' @@ -420,7 +456,7 @@ not ok 42 - callback also returns a Promise code: 'ERR_TEST_FAILURE' ... # Subtest: callback throw -not ok 43 - callback throw +not ok 49 - callback throw --- duration_ms: * type: 'test' @@ -438,7 +474,7 @@ not ok 43 - callback throw * ... # Subtest: callback called twice -not ok 44 - callback called twice +not ok 50 - callback called twice --- duration_ms: * type: 'test' @@ -451,13 +487,13 @@ not ok 44 - callback called twice * ... # Subtest: callback called twice in different ticks -ok 45 - callback called twice in different ticks +ok 51 - callback called twice in different ticks --- duration_ms: * type: 'test' ... # Subtest: callback called twice in future tick -not ok 46 - callback called twice in future tick +not ok 52 - callback called twice in future tick --- duration_ms: * type: 'test' @@ -469,7 +505,7 @@ not ok 46 - callback called twice in future tick * ... # Subtest: callback async throw -not ok 47 - callback async throw +not ok 53 - callback async throw --- duration_ms: * type: 'test' @@ -482,13 +518,13 @@ not ok 47 - callback async throw * ... # Subtest: callback async throw after done -ok 48 - callback async throw after done +ok 54 - callback async throw after done --- duration_ms: * type: 'test' ... # Subtest: custom inspect symbol fail -not ok 49 - custom inspect symbol fail +not ok 55 - custom inspect symbol fail --- duration_ms: * type: 'test' @@ -498,7 +534,7 @@ not ok 49 - custom inspect symbol fail code: 'ERR_TEST_FAILURE' ... # Subtest: custom inspect symbol that throws fail -not ok 50 - custom inspect symbol that throws fail +not ok 56 - custom inspect symbol that throws fail --- duration_ms: * type: 'test' @@ -554,7 +590,7 @@ not ok 50 - custom inspect symbol that throws fail * ... 1..2 -not ok 51 - subtest sync throw fails +not ok 57 - subtest sync throw fails --- duration_ms: * type: 'suite' @@ -575,7 +611,7 @@ not ok 51 - subtest sync throw fails code: 'ERR_TEST_FAILURE' ... 1..1 -not ok 52 - describe sync throw fails +not ok 58 - describe sync throw fails --- duration_ms: * type: 'suite' @@ -607,7 +643,7 @@ not ok 52 - describe sync throw fails code: 'ERR_TEST_FAILURE' ... 1..1 -not ok 53 - describe async throw fails +not ok 59 - describe async throw fails --- duration_ms: * type: 'suite' @@ -663,7 +699,7 @@ not ok 53 - describe async throw fails type: 'test' ... 1..4 -not ok 54 - timeouts +not ok 60 - timeouts --- duration_ms: * type: 'suite' @@ -692,7 +728,7 @@ not ok 54 - timeouts * ... 1..2 -not ok 55 - successful thenable +not ok 61 - successful thenable --- duration_ms: * type: 'suite' @@ -702,7 +738,7 @@ not ok 55 - successful thenable code: 'ERR_TEST_FAILURE' ... # Subtest: rejected thenable -not ok 56 - rejected thenable +not ok 62 - rejected thenable --- duration_ms: * type: 'suite' @@ -740,13 +776,13 @@ not ok 56 - rejected thenable type: 'suite' ... 1..3 -ok 57 - async describe function +ok 63 - async describe function --- duration_ms: * type: 'suite' ... # Subtest: invalid subtest fail -not ok 58 - invalid subtest fail +not ok 64 - invalid subtest fail --- duration_ms: * type: 'test' @@ -757,18 +793,18 @@ not ok 58 - invalid subtest fail stack: |- * ... -1..58 +1..64 # Error: Test "unhandled rejection - passes but warns" at test/fixtures/test-runner/output/describe_it.js:(LINE):1 generated asynchronous activity after the test ended. This activity created the error "Error: rejected from unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. # Error: Test "async unhandled rejection - passes but warns" at test/fixtures/test-runner/output/describe_it.js:(LINE):1 generated asynchronous activity after the test ended. This activity created the error "Error: rejected from async unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. # Error: Test "immediate throw - passes but warns" at test/fixtures/test-runner/output/describe_it.js:(LINE):1 generated asynchronous activity after the test ended. This activity created the error "Error: thrown from immediate throw fail" and would have caused the test to fail, but instead triggered an uncaughtException event. # Error: Test "immediate reject - passes but warns" at test/fixtures/test-runner/output/describe_it.js:(LINE):1 generated asynchronous activity after the test ended. This activity created the error "Error: rejected from immediate reject fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. # Error: Test "callback called twice in different ticks" at test/fixtures/test-runner/output/describe_it.js:(LINE):1 generated asynchronous activity after the test ended. This activity created the error "Error [ERR_TEST_FAILURE]: callback invoked multiple times" and would have caused the test to fail, but instead triggered an uncaughtException event. # Error: Test "callback async throw after done" at test/fixtures/test-runner/output/describe_it.js:(LINE):1 generated asynchronous activity after the test ended. This activity created the error "Error: thrown from callback async throw after done" and would have caused the test to fail, but instead triggered an uncaughtException event. -# tests 67 +# tests 73 # suites 11 -# pass 31 +# pass 35 # fail 19 # cancelled 4 -# skipped 9 -# todo 4 +# skipped 10 +# todo 5 # duration_ms * diff --git a/test/fixtures/test-runner/output/dot_reporter.js b/test/fixtures/test-runner/output/dot_reporter.js index e9b8f5cead88f0..9e006f7c1a3552 100644 --- a/test/fixtures/test-runner/output/dot_reporter.js +++ b/test/fixtures/test-runner/output/dot_reporter.js @@ -3,5 +3,12 @@ require('../../../common'); const fixtures = require('../../../common/fixtures'); const spawn = require('node:child_process').spawn; -spawn(process.execPath, - ['--no-warnings', '--test-reporter', 'dot', fixtures.path('test-runner/output/output.js')], { stdio: 'inherit' }); +spawn( + process.execPath, + [ + '--no-warnings', + '--test-reporter', 'dot', + fixtures.path('test-runner/output/output.js'), + ], + { stdio: 'inherit' }, +); diff --git a/test/fixtures/test-runner/output/dot_reporter.snapshot b/test/fixtures/test-runner/output/dot_reporter.snapshot index c50cb99c16a0fa..1c6422e89f8d16 100644 --- a/test/fixtures/test-runner/output/dot_reporter.snapshot +++ b/test/fixtures/test-runner/output/dot_reporter.snapshot @@ -1,7 +1,8 @@ -..XX...X..XXX.X..... -XXX............X.... -.....X...XXX.XX..... -XXXXXXX...XXXXX +........XX...X..XXX. +X.....XXX........... +.X.........X...XXX.X +X.....XXXXXXX...XXXX +X Failed tests: diff --git a/test/fixtures/test-runner/output/junit_reporter.js b/test/fixtures/test-runner/output/junit_reporter.js index 5843ec6d236bd6..eee3d0f86eaf2d 100644 --- a/test/fixtures/test-runner/output/junit_reporter.js +++ b/test/fixtures/test-runner/output/junit_reporter.js @@ -3,9 +3,12 @@ require('../../../common'); const fixtures = require('../../../common/fixtures'); const spawn = require('node:child_process').spawn; -spawn(process.execPath, - [ - '--no-warnings', '--test-reporter', 'junit', - fixtures.path('test-runner/output/output.js'), - ], - { stdio: 'inherit' }); +spawn( + process.execPath, + [ + '--no-warnings', + '--test-reporter', 'junit', + fixtures.path('test-runner/output/output.js'), + ], + { stdio: 'inherit' }, +); diff --git a/test/fixtures/test-runner/output/junit_reporter.snapshot b/test/fixtures/test-runner/output/junit_reporter.snapshot index 78711141fa217b..a24dc3646e4a30 100644 --- a/test/fixtures/test-runner/output/junit_reporter.snapshot +++ b/test/fixtures/test-runner/output/junit_reporter.snapshot @@ -1,5 +1,15 @@ + + + + + + + + + + @@ -508,19 +518,19 @@ Error [ERR_TEST_FAILURE]: test could not be started because its parent finished } - - + + - - - - - + + + + + - + - - + + diff --git a/test/fixtures/test-runner/output/lcov_reporter.js b/test/fixtures/test-runner/output/lcov_reporter.js index a4669a85f2724f..832b5d2247965c 100644 --- a/test/fixtures/test-runner/output/lcov_reporter.js +++ b/test/fixtures/test-runner/output/lcov_reporter.js @@ -3,14 +3,14 @@ require('../../../common'); const fixtures = require('../../../common/fixtures'); const spawn = require('node:child_process').spawn; -spawn(process.execPath, - [ - '--no-warnings', - '--experimental-test-coverage', - '--test-coverage-exclude=!test/**', - '--test-reporter', - 'lcov', - fixtures.path('test-runner/output/output.js'), - ], - { stdio: 'inherit' }, +spawn( + process.execPath, + [ + '--no-warnings', + '--experimental-test-coverage', + '--test-coverage-exclude=!test/**', + '--test-reporter', 'lcov', + fixtures.path('test-runner/output/output.js'), + ], + { stdio: 'inherit' }, ); diff --git a/test/fixtures/test-runner/output/lcov_reporter.snapshot b/test/fixtures/test-runner/output/lcov_reporter.snapshot index 4dd0c0dc96a0de..fca250d2844291 100644 --- a/test/fixtures/test-runner/output/lcov_reporter.snapshot +++ b/test/fixtures/test-runner/output/lcov_reporter.snapshot @@ -3,110 +3,116 @@ SF:test/fixtures/test-runner/output/output.js FN:8,anonymous_0 FN:12,anonymous_1 FN:16,anonymous_2 -FN:21,anonymous_3 -FN:26,anonymous_4 -FN:30,anonymous_5 -FN:34,anonymous_6 -FN:38,anonymous_7 -FN:42,anonymous_8 -FN:46,anonymous_9 +FN:20,anonymous_3 +FN:24,anonymous_4 +FN:28,anonymous_5 +FN:32,anonymous_6 +FN:36,anonymous_7 +FN:40,anonymous_8 +FN:45,anonymous_9 FN:50,anonymous_10 FN:54,anonymous_11 -FN:59,anonymous_12 -FN:64,anonymous_13 -FN:68,anonymous_14 -FN:72,anonymous_15 -FN:76,anonymous_16 -FN:80,anonymous_17 -FN:81,anonymous_18 -FN:86,anonymous_19 -FN:87,anonymous_20 -FN:92,anonymous_21 -FN:93,anonymous_22 -FN:94,anonymous_23 -FN:100,anonymous_24 -FN:101,anonymous_25 -FN:107,anonymous_26 -FN:111,anonymous_27 -FN:112,anonymous_28 -FN:113,anonymous_29 -FN:114,anonymous_30 -FN:122,anonymous_31 -FN:123,anonymous_32 -FN:130,anonymous_33 -FN:131,anonymous_34 -FN:132,anonymous_35 -FN:140,anonymous_36 -FN:141,anonymous_37 -FN:142,anonymous_38 -FN:150,anonymous_39 -FN:151,anonymous_40 -FN:159,anonymous_41 -FN:160,anonymous_42 -FN:161,anonymous_43 +FN:58,anonymous_12 +FN:62,anonymous_13 +FN:66,anonymous_14 +FN:70,anonymous_15 +FN:74,anonymous_16 +FN:78,anonymous_17 +FN:83,anonymous_18 +FN:88,anonymous_19 +FN:92,anonymous_20 +FN:96,anonymous_21 +FN:100,anonymous_22 +FN:104,anonymous_23 +FN:105,anonymous_24 +FN:110,anonymous_25 +FN:111,anonymous_26 +FN:116,anonymous_27 +FN:117,anonymous_28 +FN:118,anonymous_29 +FN:124,anonymous_30 +FN:125,anonymous_31 +FN:131,anonymous_32 +FN:135,anonymous_33 +FN:136,anonymous_34 +FN:137,anonymous_35 +FN:138,anonymous_36 +FN:146,anonymous_37 +FN:147,anonymous_38 +FN:154,anonymous_39 +FN:155,anonymous_40 +FN:156,anonymous_41 +FN:164,anonymous_42 +FN:165,anonymous_43 FN:166,anonymous_44 -FN:167,anonymous_45 -FN:171,anonymous_46 -FN:172,anonymous_47 -FN:173,anonymous_48 -FN:179,anonymous_49 -FN:183,anonymous_50 -FN:187,anonymous_51 -FN:195,functionOnly -FN:198,anonymous_53 -FN:213,functionAndOptions -FN:215,anonymous_55 -FN:219,anonymous_56 -FN:220,anonymous_57 -FN:225,anonymous_58 -FN:229,anonymous_59 -FN:233,anonymous_60 -FN:238,anonymous_61 -FN:242,anonymous_62 -FN:246,anonymous_63 -FN:251,anonymous_64 -FN:256,anonymous_65 +FN:174,anonymous_45 +FN:175,anonymous_46 +FN:183,anonymous_47 +FN:184,anonymous_48 +FN:185,anonymous_49 +FN:190,anonymous_50 +FN:191,anonymous_51 +FN:195,anonymous_52 +FN:196,anonymous_53 +FN:197,anonymous_54 +FN:203,anonymous_55 +FN:207,anonymous_56 +FN:211,anonymous_57 +FN:219,functionOnly +FN:222,anonymous_59 +FN:237,functionAndOptions +FN:239,anonymous_61 +FN:243,anonymous_62 +FN:244,anonymous_63 +FN:249,anonymous_64 +FN:253,anonymous_65 FN:257,anonymous_66 -FN:263,anonymous_67 -FN:264,anonymous_68 -FN:269,anonymous_69 -FN:270,anonymous_70 -FN:277,anonymous_71 -FN:287,anonymous_72 -FN:289,obj -FN:298,anonymous_74 -FN:300,obj -FN:309,anonymous_76 -FN:310,anonymous_77 -FN:313,anonymous_78 -FN:318,anonymous_79 -FN:319,anonymous_80 -FN:320,anonymous_81 -FN:327,anonymous_82 -FN:328,anonymous_83 -FN:335,anonymous_84 -FN:336,anonymous_85 -FN:341,anonymous_86 -FN:345,anonymous_87 -FN:348,get then -FN:351,anonymous_89 -FN:356,anonymous_90 -FN:359,get then -FN:362,anonymous_92 -FN:367,anonymous_93 -FN:368,anonymous_94 -FN:369,anonymous_95 -FN:373,anonymous_96 -FN:374,anonymous_97 -FN:375,anonymous_98 -FN:381,anonymous_99 -FN:385,anonymous_100 +FN:262,anonymous_67 +FN:266,anonymous_68 +FN:270,anonymous_69 +FN:275,anonymous_70 +FN:280,anonymous_71 +FN:281,anonymous_72 +FN:287,anonymous_73 +FN:288,anonymous_74 +FN:293,anonymous_75 +FN:294,anonymous_76 +FN:301,anonymous_77 +FN:311,anonymous_78 +FN:313,obj +FN:322,anonymous_80 +FN:324,obj +FN:333,anonymous_82 +FN:334,anonymous_83 +FN:337,anonymous_84 +FN:342,anonymous_85 +FN:343,anonymous_86 +FN:344,anonymous_87 +FN:351,anonymous_88 +FN:352,anonymous_89 +FN:359,anonymous_90 +FN:360,anonymous_91 +FN:365,anonymous_92 +FN:369,anonymous_93 +FN:372,get then +FN:375,anonymous_95 +FN:380,anonymous_96 +FN:383,get then +FN:386,anonymous_98 +FN:391,anonymous_99 +FN:392,anonymous_100 +FN:393,anonymous_101 +FN:397,anonymous_102 +FN:398,anonymous_103 +FN:399,anonymous_104 +FN:405,anonymous_105 +FN:409,anonymous_106 FNDA:1,anonymous_0 FNDA:1,anonymous_1 FNDA:1,anonymous_2 FNDA:1,anonymous_3 FNDA:1,anonymous_4 -FNDA:1,anonymous_5 +FNDA:0,anonymous_5 FNDA:1,anonymous_6 FNDA:1,anonymous_7 FNDA:1,anonymous_8 @@ -150,18 +156,18 @@ FNDA:1,anonymous_45 FNDA:1,anonymous_46 FNDA:1,anonymous_47 FNDA:1,anonymous_48 -FNDA:0,anonymous_49 -FNDA:0,anonymous_50 +FNDA:1,anonymous_49 +FNDA:1,anonymous_50 FNDA:1,anonymous_51 -FNDA:1,functionOnly +FNDA:1,anonymous_52 FNDA:1,anonymous_53 -FNDA:0,functionAndOptions -FNDA:1,anonymous_55 -FNDA:1,anonymous_56 +FNDA:1,anonymous_54 +FNDA:0,anonymous_55 +FNDA:0,anonymous_56 FNDA:1,anonymous_57 -FNDA:1,anonymous_58 +FNDA:1,functionOnly FNDA:1,anonymous_59 -FNDA:1,anonymous_60 +FNDA:0,functionAndOptions FNDA:1,anonymous_61 FNDA:1,anonymous_62 FNDA:1,anonymous_63 @@ -174,141 +180,152 @@ FNDA:1,anonymous_69 FNDA:1,anonymous_70 FNDA:1,anonymous_71 FNDA:1,anonymous_72 -FNDA:1,obj +FNDA:1,anonymous_73 FNDA:1,anonymous_74 -FNDA:1,obj +FNDA:1,anonymous_75 FNDA:1,anonymous_76 FNDA:1,anonymous_77 FNDA:1,anonymous_78 -FNDA:1,anonymous_79 +FNDA:1,obj FNDA:1,anonymous_80 -FNDA:1,anonymous_81 +FNDA:1,obj FNDA:1,anonymous_82 FNDA:1,anonymous_83 FNDA:1,anonymous_84 FNDA:1,anonymous_85 FNDA:1,anonymous_86 FNDA:1,anonymous_87 -FNDA:1,get then +FNDA:1,anonymous_88 FNDA:1,anonymous_89 FNDA:1,anonymous_90 -FNDA:1,get then +FNDA:1,anonymous_91 FNDA:1,anonymous_92 FNDA:1,anonymous_93 -FNDA:1,anonymous_94 +FNDA:1,get then FNDA:1,anonymous_95 FNDA:1,anonymous_96 -FNDA:1,anonymous_97 +FNDA:1,get then FNDA:1,anonymous_98 FNDA:1,anonymous_99 FNDA:1,anonymous_100 -FNF:101 -FNH:98 +FNDA:1,anonymous_101 +FNDA:1,anonymous_102 +FNDA:1,anonymous_103 +FNDA:1,anonymous_104 +FNDA:1,anonymous_105 +FNDA:1,anonymous_106 +FNF:107 +FNH:103 BRDA:1,0,0,1 BRDA:8,1,0,1 BRDA:12,2,0,1 BRDA:16,3,0,1 -BRDA:21,4,0,1 -BRDA:26,5,0,1 -BRDA:30,6,0,1 -BRDA:34,7,0,1 -BRDA:38,8,0,1 -BRDA:42,9,0,1 -BRDA:46,10,0,1 -BRDA:50,11,0,1 -BRDA:54,12,0,1 -BRDA:59,13,0,1 -BRDA:64,14,0,1 -BRDA:68,15,0,1 -BRDA:72,16,0,1 -BRDA:76,17,0,1 -BRDA:80,18,0,1 -BRDA:81,19,0,1 -BRDA:86,20,0,1 -BRDA:87,21,0,1 -BRDA:92,22,0,1 -BRDA:93,23,0,1 -BRDA:94,24,0,1 -BRDA:100,25,0,1 -BRDA:101,26,0,1 -BRDA:107,27,0,1 -BRDA:111,28,0,1 -BRDA:112,29,0,1 -BRDA:113,30,0,1 -BRDA:114,31,0,1 -BRDA:122,32,0,1 -BRDA:123,33,0,1 -BRDA:130,34,0,1 -BRDA:131,35,0,1 -BRDA:132,36,0,1 -BRDA:140,37,0,1 -BRDA:141,38,0,1 -BRDA:142,39,0,1 -BRDA:150,40,0,1 -BRDA:151,41,0,1 -BRDA:159,42,0,1 -BRDA:160,43,0,1 -BRDA:161,44,0,1 -BRDA:166,45,0,1 -BRDA:167,46,0,1 -BRDA:171,47,0,1 -BRDA:172,48,0,1 -BRDA:173,49,0,1 -BRDA:187,50,0,1 -BRDA:195,51,0,1 -BRDA:198,52,0,1 -BRDA:215,53,0,1 -BRDA:219,54,0,1 -BRDA:220,55,0,1 -BRDA:225,56,0,1 -BRDA:229,57,0,1 -BRDA:233,58,0,1 -BRDA:238,59,0,1 -BRDA:242,60,0,1 -BRDA:246,61,0,1 -BRDA:251,62,0,1 -BRDA:256,63,0,1 -BRDA:257,64,0,1 -BRDA:263,65,0,1 -BRDA:264,66,0,1 -BRDA:269,67,0,1 -BRDA:270,68,0,1 -BRDA:277,69,0,1 +BRDA:20,4,0,1 +BRDA:24,5,0,1 +BRDA:32,6,0,1 +BRDA:36,7,0,1 +BRDA:40,8,0,1 +BRDA:45,9,0,1 +BRDA:50,10,0,1 +BRDA:54,11,0,1 +BRDA:58,12,0,1 +BRDA:62,13,0,1 +BRDA:66,14,0,1 +BRDA:70,15,0,1 +BRDA:74,16,0,1 +BRDA:78,17,0,1 +BRDA:83,18,0,1 +BRDA:88,19,0,1 +BRDA:92,20,0,1 +BRDA:96,21,0,1 +BRDA:100,22,0,1 +BRDA:104,23,0,1 +BRDA:105,24,0,1 +BRDA:110,25,0,1 +BRDA:111,26,0,1 +BRDA:116,27,0,1 +BRDA:117,28,0,1 +BRDA:118,29,0,1 +BRDA:124,30,0,1 +BRDA:125,31,0,1 +BRDA:131,32,0,1 +BRDA:135,33,0,1 +BRDA:136,34,0,1 +BRDA:137,35,0,1 +BRDA:138,36,0,1 +BRDA:146,37,0,1 +BRDA:147,38,0,1 +BRDA:154,39,0,1 +BRDA:155,40,0,1 +BRDA:156,41,0,1 +BRDA:164,42,0,1 +BRDA:165,43,0,1 +BRDA:166,44,0,1 +BRDA:174,45,0,1 +BRDA:175,46,0,1 +BRDA:183,47,0,1 +BRDA:184,48,0,1 +BRDA:185,49,0,1 +BRDA:190,50,0,1 +BRDA:191,51,0,1 +BRDA:195,52,0,1 +BRDA:196,53,0,1 +BRDA:197,54,0,1 +BRDA:211,55,0,1 +BRDA:219,56,0,1 +BRDA:222,57,0,1 +BRDA:239,58,0,1 +BRDA:243,59,0,1 +BRDA:244,60,0,1 +BRDA:249,61,0,1 +BRDA:253,62,0,1 +BRDA:257,63,0,1 +BRDA:262,64,0,1 +BRDA:266,65,0,1 +BRDA:270,66,0,1 +BRDA:275,67,0,1 +BRDA:280,68,0,1 +BRDA:281,69,0,1 BRDA:287,70,0,1 -BRDA:289,71,0,1 -BRDA:298,72,0,1 -BRDA:300,73,0,1 -BRDA:309,74,0,1 -BRDA:310,75,0,1 +BRDA:288,71,0,1 +BRDA:293,72,0,1 +BRDA:294,73,0,1 +BRDA:301,74,0,1 +BRDA:311,75,0,1 BRDA:313,76,0,1 -BRDA:318,77,0,1 -BRDA:319,78,0,1 -BRDA:320,79,0,1 -BRDA:327,80,0,1 -BRDA:328,81,0,1 -BRDA:335,82,0,1 -BRDA:336,83,0,1 -BRDA:341,84,0,1 -BRDA:345,85,0,1 -BRDA:348,86,0,1 -BRDA:349,87,0,0 -BRDA:351,88,0,1 -BRDA:356,89,0,1 -BRDA:359,90,0,1 -BRDA:360,91,0,0 -BRDA:362,92,0,1 -BRDA:367,93,0,1 -BRDA:370,94,0,0 -BRDA:368,95,0,1 -BRDA:369,96,0,1 -BRDA:373,97,0,1 -BRDA:376,98,0,0 -BRDA:374,99,0,1 -BRDA:375,100,0,1 -BRDA:381,101,0,1 -BRDA:385,102,0,1 -BRF:103 -BRH:99 +BRDA:322,77,0,1 +BRDA:324,78,0,1 +BRDA:333,79,0,1 +BRDA:334,80,0,1 +BRDA:337,81,0,1 +BRDA:342,82,0,1 +BRDA:343,83,0,1 +BRDA:344,84,0,1 +BRDA:351,85,0,1 +BRDA:352,86,0,1 +BRDA:359,87,0,1 +BRDA:360,88,0,1 +BRDA:365,89,0,1 +BRDA:369,90,0,1 +BRDA:372,91,0,1 +BRDA:373,92,0,0 +BRDA:375,93,0,1 +BRDA:380,94,0,1 +BRDA:383,95,0,1 +BRDA:384,96,0,0 +BRDA:386,97,0,1 +BRDA:391,98,0,1 +BRDA:394,99,0,0 +BRDA:392,100,0,1 +BRDA:393,101,0,1 +BRDA:397,102,0,1 +BRDA:400,103,0,0 +BRDA:398,104,0,1 +BRDA:399,105,0,1 +BRDA:405,106,0,1 +BRDA:409,107,0,1 +BRF:108 +BRH:104 DA:1,1 DA:2,1 DA:3,1 @@ -337,7 +354,7 @@ DA:25,1 DA:26,1 DA:27,1 DA:28,1 -DA:29,1 +DA:29,0 DA:30,1 DA:31,1 DA:32,1 @@ -488,11 +505,11 @@ DA:176,1 DA:177,1 DA:178,1 DA:179,1 -DA:180,0 +DA:180,1 DA:181,1 DA:182,1 DA:183,1 -DA:184,0 +DA:184,1 DA:185,1 DA:186,1 DA:187,1 @@ -512,11 +529,11 @@ DA:200,1 DA:201,1 DA:202,1 DA:203,1 -DA:204,1 +DA:204,0 DA:205,1 DA:206,1 DA:207,1 -DA:208,1 +DA:208,0 DA:209,1 DA:210,1 DA:211,1 @@ -715,6 +732,30 @@ DA:403,1 DA:404,1 DA:405,1 DA:406,1 -LH:404 -LF:406 +DA:407,1 +DA:408,1 +DA:409,1 +DA:410,1 +DA:411,1 +DA:412,1 +DA:413,1 +DA:414,1 +DA:415,1 +DA:416,1 +DA:417,1 +DA:418,1 +DA:419,1 +DA:420,1 +DA:421,1 +DA:422,1 +DA:423,1 +DA:424,1 +DA:425,1 +DA:426,1 +DA:427,1 +DA:428,1 +DA:429,1 +DA:430,1 +LH:427 +LF:430 end_of_record diff --git a/test/fixtures/test-runner/output/output.js b/test/fixtures/test-runner/output/output.js index 766c93d0e3dbde..24a27a01791f21 100644 --- a/test/fixtures/test-runner/output/output.js +++ b/test/fixtures/test-runner/output/output.js @@ -5,6 +5,30 @@ const assert = require('node:assert'); const test = require('node:test'); const util = require('util'); +test.expectFailure('sync expect fail (method)', () => { + throw new Error('should pass'); +}); + +test('sync expect fail (options)', { expectFailure: true }, () => { + throw new Error('should pass'); +}); + +test.expectFailure('async expect fail (method)', async () => { + throw new Error('should pass'); +}); + +test('async expect fail (options)', { expectFailure: true }, async () => { + throw new Error('should pass'); +}); + +test.todo('sync todo with expect fail', { expectFailure: true }, () => { + throw new Error('should not count as an expected failure'); +}); + +test.skip('sync skip expect fail', { expectFailure: true }, () => { + throw new Error('should not fail'); +}); + test('sync pass todo', (t) => { t.todo(); }); diff --git a/test/fixtures/test-runner/output/output.snapshot b/test/fixtures/test-runner/output/output.snapshot index 044ac4137fa78d..8c84cd6feadd46 100644 --- a/test/fixtures/test-runner/output/output.snapshot +++ b/test/fixtures/test-runner/output/output.snapshot @@ -1,18 +1,54 @@ TAP version 13 +# Subtest: sync expect fail (method) +ok 1 - sync expect fail (method) # EXPECTED FAILURE + --- + duration_ms: * + type: 'test' + ... +# Subtest: sync expect fail (options) +ok 2 - sync expect fail (options) # EXPECTED FAILURE + --- + duration_ms: * + type: 'test' + ... +# Subtest: async expect fail (method) +ok 3 - async expect fail (method) # EXPECTED FAILURE + --- + duration_ms: * + type: 'test' + ... +# Subtest: async expect fail (options) +ok 4 - async expect fail (options) # EXPECTED FAILURE + --- + duration_ms: * + type: 'test' + ... +# Subtest: sync todo with expect fail +ok 5 - sync todo with expect fail # TODO + --- + duration_ms: * + type: 'test' + ... +# Subtest: sync skip expect fail +ok 6 - sync skip expect fail # SKIP + --- + duration_ms: * + type: 'test' + ... # Subtest: sync pass todo -ok 1 - sync pass todo # TODO +ok 7 - sync pass todo # TODO --- duration_ms: * type: 'test' ... # Subtest: sync pass todo with message -ok 2 - sync pass todo with message # TODO this is a passing todo +ok 8 - sync pass todo with message # TODO this is a passing todo --- duration_ms: * type: 'test' ... # Subtest: sync fail todo -not ok 3 - sync fail todo # TODO +not ok 9 - sync fail todo # TODO --- duration_ms: * type: 'test' @@ -30,7 +66,7 @@ not ok 3 - sync fail todo # TODO * ... # Subtest: sync fail todo with message -not ok 4 - sync fail todo with message # TODO this is a failing todo +not ok 10 - sync fail todo with message # TODO this is a failing todo --- duration_ms: * type: 'test' @@ -48,26 +84,26 @@ not ok 4 - sync fail todo with message # TODO this is a failing todo * ... # Subtest: sync skip pass -ok 5 - sync skip pass # SKIP +ok 11 - sync skip pass # SKIP --- duration_ms: * type: 'test' ... # Subtest: sync skip pass with message -ok 6 - sync skip pass with message # SKIP this is skipped +ok 12 - sync skip pass with message # SKIP this is skipped --- duration_ms: * type: 'test' ... # Subtest: sync pass -ok 7 - sync pass +ok 13 - sync pass --- duration_ms: * type: 'test' ... # this test should pass # Subtest: sync throw fail -not ok 8 - sync throw fail +not ok 14 - sync throw fail --- duration_ms: * type: 'test' @@ -85,19 +121,19 @@ not ok 8 - sync throw fail * ... # Subtest: async skip pass -ok 9 - async skip pass # SKIP +ok 15 - async skip pass # SKIP --- duration_ms: * type: 'test' ... # Subtest: async pass -ok 10 - async pass +ok 16 - async pass --- duration_ms: * type: 'test' ... # Subtest: async throw fail -not ok 11 - async throw fail +not ok 17 - async throw fail --- duration_ms: * type: 'test' @@ -115,7 +151,7 @@ not ok 11 - async throw fail * ... # Subtest: async skip fail -not ok 12 - async skip fail # SKIP +not ok 18 - async skip fail # SKIP --- duration_ms: * type: 'test' @@ -133,7 +169,7 @@ not ok 12 - async skip fail # SKIP * ... # Subtest: async assertion fail -not ok 13 - async assertion fail +not ok 19 - async assertion fail --- duration_ms: * type: 'test' @@ -159,13 +195,13 @@ not ok 13 - async assertion fail * ... # Subtest: resolve pass -ok 14 - resolve pass +ok 20 - resolve pass --- duration_ms: * type: 'test' ... # Subtest: reject fail -not ok 15 - reject fail +not ok 21 - reject fail --- duration_ms: * type: 'test' @@ -183,31 +219,31 @@ not ok 15 - reject fail * ... # Subtest: unhandled rejection - passes but warns -ok 16 - unhandled rejection - passes but warns +ok 22 - unhandled rejection - passes but warns --- duration_ms: * type: 'test' ... # Subtest: async unhandled rejection - passes but warns -ok 17 - async unhandled rejection - passes but warns +ok 23 - async unhandled rejection - passes but warns --- duration_ms: * type: 'test' ... # Subtest: immediate throw - passes but warns -ok 18 - immediate throw - passes but warns +ok 24 - immediate throw - passes but warns --- duration_ms: * type: 'test' ... # Subtest: immediate reject - passes but warns -ok 19 - immediate reject - passes but warns +ok 25 - immediate reject - passes but warns --- duration_ms: * type: 'test' ... # Subtest: immediate resolve pass -ok 20 - immediate resolve pass +ok 26 - immediate resolve pass --- duration_ms: * type: 'test' @@ -236,7 +272,7 @@ ok 20 - immediate resolve pass ... # this subtest should make its parent test fail 1..1 -not ok 21 - subtest sync throw fail +not ok 27 - subtest sync throw fail --- duration_ms: * type: 'test' @@ -246,7 +282,7 @@ not ok 21 - subtest sync throw fail code: 'ERR_TEST_FAILURE' ... # Subtest: sync throw non-error fail -not ok 22 - sync throw non-error fail +not ok 28 - sync throw non-error fail --- duration_ms: * type: 'test' @@ -281,7 +317,7 @@ not ok 22 - sync throw non-error fail type: 'test' ... 1..4 -ok 23 - level 0a +ok 29 - level 0a --- duration_ms: * type: 'test' @@ -307,31 +343,31 @@ ok 23 - level 0a type: 'test' ... 1..2 -ok 24 - top level +ok 30 - top level --- duration_ms: * type: 'test' ... # Subtest: invalid subtest - pass but subtest fails -ok 25 - invalid subtest - pass but subtest fails +ok 31 - invalid subtest - pass but subtest fails --- duration_ms: * type: 'test' ... # Subtest: sync skip option -ok 26 - sync skip option # SKIP +ok 32 - sync skip option # SKIP --- duration_ms: * type: 'test' ... # Subtest: sync skip option with message -ok 27 - sync skip option with message # SKIP this is skipped +ok 33 - sync skip option with message # SKIP this is skipped --- duration_ms: * type: 'test' ... # Subtest: sync skip option is false fail -not ok 28 - sync skip option is false fail +not ok 34 - sync skip option is false fail --- duration_ms: * type: 'test' @@ -349,61 +385,61 @@ not ok 28 - sync skip option is false fail * ... # Subtest: -ok 29 - +ok 35 - --- duration_ms: * type: 'test' ... # Subtest: functionOnly -ok 30 - functionOnly +ok 36 - functionOnly --- duration_ms: * type: 'test' ... # Subtest: -ok 31 - +ok 37 - --- duration_ms: * type: 'test' ... # Subtest: test with only a name provided -ok 32 - test with only a name provided +ok 38 - test with only a name provided --- duration_ms: * type: 'test' ... # Subtest: -ok 33 - +ok 39 - --- duration_ms: * type: 'test' ... # Subtest: -ok 34 - # SKIP +ok 40 - # SKIP --- duration_ms: * type: 'test' ... # Subtest: test with a name and options provided -ok 35 - test with a name and options provided # SKIP +ok 41 - test with a name and options provided # SKIP --- duration_ms: * type: 'test' ... # Subtest: functionAndOptions -ok 36 - functionAndOptions # SKIP +ok 42 - functionAndOptions # SKIP --- duration_ms: * type: 'test' ... # Subtest: callback pass -ok 37 - callback pass +ok 43 - callback pass --- duration_ms: * type: 'test' ... # Subtest: callback fail -not ok 38 - callback fail +not ok 44 - callback fail --- duration_ms: * type: 'test' @@ -416,25 +452,25 @@ not ok 38 - callback fail * ... # Subtest: sync t is this in test -ok 39 - sync t is this in test +ok 45 - sync t is this in test --- duration_ms: * type: 'test' ... # Subtest: async t is this in test -ok 40 - async t is this in test +ok 46 - async t is this in test --- duration_ms: * type: 'test' ... # Subtest: callback t is this in test -ok 41 - callback t is this in test +ok 47 - callback t is this in test --- duration_ms: * type: 'test' ... # Subtest: callback also returns a Promise -not ok 42 - callback also returns a Promise +not ok 48 - callback also returns a Promise --- duration_ms: * type: 'test' @@ -444,7 +480,7 @@ not ok 42 - callback also returns a Promise code: 'ERR_TEST_FAILURE' ... # Subtest: callback throw -not ok 43 - callback throw +not ok 49 - callback throw --- duration_ms: * type: 'test' @@ -462,7 +498,7 @@ not ok 43 - callback throw * ... # Subtest: callback called twice -not ok 44 - callback called twice +not ok 50 - callback called twice --- duration_ms: * type: 'test' @@ -475,13 +511,13 @@ not ok 44 - callback called twice * ... # Subtest: callback called twice in different ticks -ok 45 - callback called twice in different ticks +ok 51 - callback called twice in different ticks --- duration_ms: * type: 'test' ... # Subtest: callback called twice in future tick -not ok 46 - callback called twice in future tick +not ok 52 - callback called twice in future tick --- duration_ms: * type: 'test' @@ -493,7 +529,7 @@ not ok 46 - callback called twice in future tick * ... # Subtest: callback async throw -not ok 47 - callback async throw +not ok 53 - callback async throw --- duration_ms: * type: 'test' @@ -506,7 +542,7 @@ not ok 47 - callback async throw * ... # Subtest: callback async throw after done -ok 48 - callback async throw after done +ok 54 - callback async throw after done --- duration_ms: * type: 'test' @@ -531,13 +567,13 @@ ok 48 - callback async throw after done type: 'test' ... 1..3 -ok 49 - only is set on subtests but not in only mode +ok 55 - only is set on subtests but not in only mode --- duration_ms: * type: 'test' ... # Subtest: custom inspect symbol fail -not ok 50 - custom inspect symbol fail +not ok 56 - custom inspect symbol fail --- duration_ms: * type: 'test' @@ -547,7 +583,7 @@ not ok 50 - custom inspect symbol fail code: 'ERR_TEST_FAILURE' ... # Subtest: custom inspect symbol that throws fail -not ok 51 - custom inspect symbol that throws fail +not ok 57 - custom inspect symbol that throws fail --- duration_ms: * type: 'test' @@ -602,7 +638,7 @@ not ok 51 - custom inspect symbol that throws fail * ... 1..2 -not ok 52 - subtest sync throw fails +not ok 58 - subtest sync throw fails --- duration_ms: * type: 'test' @@ -612,7 +648,7 @@ not ok 52 - subtest sync throw fails code: 'ERR_TEST_FAILURE' ... # Subtest: timed out async test -not ok 53 - timed out async test +not ok 59 - timed out async test --- duration_ms: * type: 'test' @@ -622,7 +658,7 @@ not ok 53 - timed out async test code: 'ERR_TEST_FAILURE' ... # Subtest: timed out callback test -not ok 54 - timed out callback test +not ok 60 - timed out callback test --- duration_ms: * type: 'test' @@ -632,25 +668,25 @@ not ok 54 - timed out callback test code: 'ERR_TEST_FAILURE' ... # Subtest: large timeout async test is ok -ok 55 - large timeout async test is ok +ok 61 - large timeout async test is ok --- duration_ms: * type: 'test' ... # Subtest: large timeout callback test is ok -ok 56 - large timeout callback test is ok +ok 62 - large timeout callback test is ok --- duration_ms: * type: 'test' ... # Subtest: successful thenable -ok 57 - successful thenable +ok 63 - successful thenable --- duration_ms: * type: 'test' ... # Subtest: rejected thenable -not ok 58 - rejected thenable +not ok 64 - rejected thenable --- duration_ms: * type: 'test' @@ -660,7 +696,7 @@ not ok 58 - rejected thenable code: 'ERR_TEST_FAILURE' ... # Subtest: unfinished test with uncaughtException -not ok 59 - unfinished test with uncaughtException +not ok 65 - unfinished test with uncaughtException --- duration_ms: * type: 'test' @@ -674,7 +710,7 @@ not ok 59 - unfinished test with uncaughtException * ... # Subtest: unfinished test with unhandledRejection -not ok 60 - unfinished test with unhandledRejection +not ok 66 - unfinished test with unhandledRejection --- duration_ms: * type: 'test' @@ -688,7 +724,7 @@ not ok 60 - unfinished test with unhandledRejection * ... # Subtest: assertion errors display actual and expected properly -not ok 61 - assertion errors display actual and expected properly +not ok 67 - assertion errors display actual and expected properly --- duration_ms: * type: 'test' @@ -758,7 +794,7 @@ not ok 61 - assertion errors display actual and expected properly * ... # Subtest: invalid subtest fail -not ok 62 - invalid subtest fail +not ok 68 - invalid subtest fail --- duration_ms: * type: 'test' @@ -769,7 +805,7 @@ not ok 62 - invalid subtest fail stack: |- * ... -1..62 +1..68 # Error: Test "unhandled rejection - passes but warns" at test/fixtures/test-runner/output/output.js:(LINE):1 generated asynchronous activity after the test ended. This activity created the error "Error: rejected from unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. # Error: Test "async unhandled rejection - passes but warns" at test/fixtures/test-runner/output/output.js:(LINE):1 generated asynchronous activity after the test ended. This activity created the error "Error: rejected from async unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. # Error: A resource generated asynchronous activity after the test ended. This activity created the error "Error: uncaught from outside of a test" which triggered an uncaughtException event, caught by the test runner. @@ -777,11 +813,11 @@ not ok 62 - invalid subtest fail # Error: Test "immediate reject - passes but warns" at test/fixtures/test-runner/output/output.js:(LINE):1 generated asynchronous activity after the test ended. This activity created the error "Error: rejected from immediate reject fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. # Error: Test "callback called twice in different ticks" at test/fixtures/test-runner/output/output.js:(LINE):1 generated asynchronous activity after the test ended. This activity created the error "Error [ERR_TEST_FAILURE]: callback invoked multiple times" and would have caused the test to fail, but instead triggered an uncaughtException event. # Error: Test "callback async throw after done" at test/fixtures/test-runner/output/output.js:(LINE):1 generated asynchronous activity after the test ended. This activity created the error "Error: thrown from callback async throw after done" and would have caused the test to fail, but instead triggered an uncaughtException event. -# tests 75 +# tests 81 # suites 0 -# pass 36 +# pass 40 # fail 24 # cancelled 2 -# skipped 9 -# todo 4 +# skipped 10 +# todo 5 # duration_ms * diff --git a/test/fixtures/test-runner/output/output_cli.js b/test/fixtures/test-runner/output/output_cli.js index a36c099b7363a2..c5ac9399b77aa4 100644 --- a/test/fixtures/test-runner/output/output_cli.js +++ b/test/fixtures/test-runner/output/output_cli.js @@ -3,10 +3,14 @@ require('../../../common'); const fixtures = require('../../../common/fixtures'); const spawn = require('node:child_process').spawn; -spawn(process.execPath, - [ - '--no-warnings', '--test', '--test-reporter', 'tap', - fixtures.path('test-runner/output/output.js'), - fixtures.path('test-runner/output/single.js'), - ], - { stdio: 'inherit' }); +spawn( + process.execPath, + [ + '--no-warnings', + '--test-reporter', 'tap', + '--test', + fixtures.path('test-runner/output/output.js'), + fixtures.path('test-runner/output/single.js'), + ], + { stdio: 'inherit' }, +); diff --git a/test/fixtures/test-runner/output/output_cli.snapshot b/test/fixtures/test-runner/output/output_cli.snapshot index eaa085d97d06d1..8b75d4c8d89bb7 100644 --- a/test/fixtures/test-runner/output/output_cli.snapshot +++ b/test/fixtures/test-runner/output/output_cli.snapshot @@ -1,18 +1,54 @@ TAP version 13 +# Subtest: sync expect fail (method) +ok 1 - sync expect fail (method) # EXPECTED FAILURE + --- + duration_ms: * + type: 'test' + ... +# Subtest: sync expect fail (options) +ok 2 - sync expect fail (options) # EXPECTED FAILURE + --- + duration_ms: * + type: 'test' + ... +# Subtest: async expect fail (method) +ok 3 - async expect fail (method) # EXPECTED FAILURE + --- + duration_ms: * + type: 'test' + ... +# Subtest: async expect fail (options) +ok 4 - async expect fail (options) # EXPECTED FAILURE + --- + duration_ms: * + type: 'test' + ... +# Subtest: sync todo with expect fail +ok 5 - sync todo with expect fail # TODO + --- + duration_ms: * + type: 'test' + ... +# Subtest: sync skip expect fail +ok 6 - sync skip expect fail # SKIP + --- + duration_ms: * + type: 'test' + ... # Subtest: sync pass todo -ok 1 - sync pass todo # TODO +ok 7 - sync pass todo # TODO --- duration_ms: * type: 'test' ... # Subtest: sync pass todo with message -ok 2 - sync pass todo with message # TODO this is a passing todo +ok 8 - sync pass todo with message # TODO this is a passing todo --- duration_ms: * type: 'test' ... # Subtest: sync fail todo -not ok 3 - sync fail todo # TODO +not ok 9 - sync fail todo # TODO --- duration_ms: * type: 'test' @@ -30,7 +66,7 @@ not ok 3 - sync fail todo # TODO * ... # Subtest: sync fail todo with message -not ok 4 - sync fail todo with message # TODO this is a failing todo +not ok 10 - sync fail todo with message # TODO this is a failing todo --- duration_ms: * type: 'test' @@ -48,26 +84,26 @@ not ok 4 - sync fail todo with message # TODO this is a failing todo * ... # Subtest: sync skip pass -ok 5 - sync skip pass # SKIP +ok 11 - sync skip pass # SKIP --- duration_ms: * type: 'test' ... # Subtest: sync skip pass with message -ok 6 - sync skip pass with message # SKIP this is skipped +ok 12 - sync skip pass with message # SKIP this is skipped --- duration_ms: * type: 'test' ... # Subtest: sync pass -ok 7 - sync pass +ok 13 - sync pass --- duration_ms: * type: 'test' ... # this test should pass # Subtest: sync throw fail -not ok 8 - sync throw fail +not ok 14 - sync throw fail --- duration_ms: * type: 'test' @@ -85,19 +121,19 @@ not ok 8 - sync throw fail * ... # Subtest: async skip pass -ok 9 - async skip pass # SKIP +ok 15 - async skip pass # SKIP --- duration_ms: * type: 'test' ... # Subtest: async pass -ok 10 - async pass +ok 16 - async pass --- duration_ms: * type: 'test' ... # Subtest: async throw fail -not ok 11 - async throw fail +not ok 17 - async throw fail --- duration_ms: * type: 'test' @@ -115,7 +151,7 @@ not ok 11 - async throw fail * ... # Subtest: async skip fail -not ok 12 - async skip fail # SKIP +not ok 18 - async skip fail # SKIP --- duration_ms: * type: 'test' @@ -133,7 +169,7 @@ not ok 12 - async skip fail # SKIP * ... # Subtest: async assertion fail -not ok 13 - async assertion fail +not ok 19 - async assertion fail --- duration_ms: * type: 'test' @@ -159,13 +195,13 @@ not ok 13 - async assertion fail * ... # Subtest: resolve pass -ok 14 - resolve pass +ok 20 - resolve pass --- duration_ms: * type: 'test' ... # Subtest: reject fail -not ok 15 - reject fail +not ok 21 - reject fail --- duration_ms: * type: 'test' @@ -183,31 +219,31 @@ not ok 15 - reject fail * ... # Subtest: unhandled rejection - passes but warns -ok 16 - unhandled rejection - passes but warns +ok 22 - unhandled rejection - passes but warns --- duration_ms: * type: 'test' ... # Subtest: async unhandled rejection - passes but warns -ok 17 - async unhandled rejection - passes but warns +ok 23 - async unhandled rejection - passes but warns --- duration_ms: * type: 'test' ... # Subtest: immediate throw - passes but warns -ok 18 - immediate throw - passes but warns +ok 24 - immediate throw - passes but warns --- duration_ms: * type: 'test' ... # Subtest: immediate reject - passes but warns -ok 19 - immediate reject - passes but warns +ok 25 - immediate reject - passes but warns --- duration_ms: * type: 'test' ... # Subtest: immediate resolve pass -ok 20 - immediate resolve pass +ok 26 - immediate resolve pass --- duration_ms: * type: 'test' @@ -236,7 +272,7 @@ ok 20 - immediate resolve pass ... # this subtest should make its parent test fail 1..1 -not ok 21 - subtest sync throw fail +not ok 27 - subtest sync throw fail --- duration_ms: * type: 'test' @@ -246,7 +282,7 @@ not ok 21 - subtest sync throw fail code: 'ERR_TEST_FAILURE' ... # Subtest: sync throw non-error fail -not ok 22 - sync throw non-error fail +not ok 28 - sync throw non-error fail --- duration_ms: * type: 'test' @@ -281,7 +317,7 @@ not ok 22 - sync throw non-error fail type: 'test' ... 1..4 -ok 23 - level 0a +ok 29 - level 0a --- duration_ms: * type: 'test' @@ -307,31 +343,31 @@ ok 23 - level 0a type: 'test' ... 1..2 -ok 24 - top level +ok 30 - top level --- duration_ms: * type: 'test' ... # Subtest: invalid subtest - pass but subtest fails -ok 25 - invalid subtest - pass but subtest fails +ok 31 - invalid subtest - pass but subtest fails --- duration_ms: * type: 'test' ... # Subtest: sync skip option -ok 26 - sync skip option # SKIP +ok 32 - sync skip option # SKIP --- duration_ms: * type: 'test' ... # Subtest: sync skip option with message -ok 27 - sync skip option with message # SKIP this is skipped +ok 33 - sync skip option with message # SKIP this is skipped --- duration_ms: * type: 'test' ... # Subtest: sync skip option is false fail -not ok 28 - sync skip option is false fail +not ok 34 - sync skip option is false fail --- duration_ms: * type: 'test' @@ -349,61 +385,61 @@ not ok 28 - sync skip option is false fail * ... # Subtest: -ok 29 - +ok 35 - --- duration_ms: * type: 'test' ... # Subtest: functionOnly -ok 30 - functionOnly +ok 36 - functionOnly --- duration_ms: * type: 'test' ... # Subtest: -ok 31 - +ok 37 - --- duration_ms: * type: 'test' ... # Subtest: test with only a name provided -ok 32 - test with only a name provided +ok 38 - test with only a name provided --- duration_ms: * type: 'test' ... # Subtest: -ok 33 - +ok 39 - --- duration_ms: * type: 'test' ... # Subtest: -ok 34 - # SKIP +ok 40 - # SKIP --- duration_ms: * type: 'test' ... # Subtest: test with a name and options provided -ok 35 - test with a name and options provided # SKIP +ok 41 - test with a name and options provided # SKIP --- duration_ms: * type: 'test' ... # Subtest: functionAndOptions -ok 36 - functionAndOptions # SKIP +ok 42 - functionAndOptions # SKIP --- duration_ms: * type: 'test' ... # Subtest: callback pass -ok 37 - callback pass +ok 43 - callback pass --- duration_ms: * type: 'test' ... # Subtest: callback fail -not ok 38 - callback fail +not ok 44 - callback fail --- duration_ms: * type: 'test' @@ -416,25 +452,25 @@ not ok 38 - callback fail * ... # Subtest: sync t is this in test -ok 39 - sync t is this in test +ok 45 - sync t is this in test --- duration_ms: * type: 'test' ... # Subtest: async t is this in test -ok 40 - async t is this in test +ok 46 - async t is this in test --- duration_ms: * type: 'test' ... # Subtest: callback t is this in test -ok 41 - callback t is this in test +ok 47 - callback t is this in test --- duration_ms: * type: 'test' ... # Subtest: callback also returns a Promise -not ok 42 - callback also returns a Promise +not ok 48 - callback also returns a Promise --- duration_ms: * type: 'test' @@ -444,7 +480,7 @@ not ok 42 - callback also returns a Promise code: 'ERR_TEST_FAILURE' ... # Subtest: callback throw -not ok 43 - callback throw +not ok 49 - callback throw --- duration_ms: * type: 'test' @@ -462,7 +498,7 @@ not ok 43 - callback throw * ... # Subtest: callback called twice -not ok 44 - callback called twice +not ok 50 - callback called twice --- duration_ms: * type: 'test' @@ -475,13 +511,13 @@ not ok 44 - callback called twice * ... # Subtest: callback called twice in different ticks -ok 45 - callback called twice in different ticks +ok 51 - callback called twice in different ticks --- duration_ms: * type: 'test' ... # Subtest: callback called twice in future tick -not ok 46 - callback called twice in future tick +not ok 52 - callback called twice in future tick --- duration_ms: * type: 'test' @@ -493,7 +529,7 @@ not ok 46 - callback called twice in future tick * ... # Subtest: callback async throw -not ok 47 - callback async throw +not ok 53 - callback async throw --- duration_ms: * type: 'test' @@ -506,7 +542,7 @@ not ok 47 - callback async throw * ... # Subtest: callback async throw after done -ok 48 - callback async throw after done +ok 54 - callback async throw after done --- duration_ms: * type: 'test' @@ -539,13 +575,13 @@ ok 48 - callback async throw after done type: 'test' ... 1..4 -ok 49 - only is set on subtests but not in only mode +ok 55 - only is set on subtests but not in only mode --- duration_ms: * type: 'test' ... # Subtest: custom inspect symbol fail -not ok 50 - custom inspect symbol fail +not ok 56 - custom inspect symbol fail --- duration_ms: * type: 'test' @@ -555,7 +591,7 @@ not ok 50 - custom inspect symbol fail code: 'ERR_TEST_FAILURE' ... # Subtest: custom inspect symbol that throws fail -not ok 51 - custom inspect symbol that throws fail +not ok 57 - custom inspect symbol that throws fail --- duration_ms: * type: 'test' @@ -610,7 +646,7 @@ not ok 51 - custom inspect symbol that throws fail * ... 1..2 -not ok 52 - subtest sync throw fails +not ok 58 - subtest sync throw fails --- duration_ms: * type: 'test' @@ -620,7 +656,7 @@ not ok 52 - subtest sync throw fails code: 'ERR_TEST_FAILURE' ... # Subtest: timed out async test -not ok 53 - timed out async test +not ok 59 - timed out async test --- duration_ms: * type: 'test' @@ -630,7 +666,7 @@ not ok 53 - timed out async test code: 'ERR_TEST_FAILURE' ... # Subtest: timed out callback test -not ok 54 - timed out callback test +not ok 60 - timed out callback test --- duration_ms: * type: 'test' @@ -640,25 +676,25 @@ not ok 54 - timed out callback test code: 'ERR_TEST_FAILURE' ... # Subtest: large timeout async test is ok -ok 55 - large timeout async test is ok +ok 61 - large timeout async test is ok --- duration_ms: * type: 'test' ... # Subtest: large timeout callback test is ok -ok 56 - large timeout callback test is ok +ok 62 - large timeout callback test is ok --- duration_ms: * type: 'test' ... # Subtest: successful thenable -ok 57 - successful thenable +ok 63 - successful thenable --- duration_ms: * type: 'test' ... # Subtest: rejected thenable -not ok 58 - rejected thenable +not ok 64 - rejected thenable --- duration_ms: * type: 'test' @@ -668,7 +704,7 @@ not ok 58 - rejected thenable code: 'ERR_TEST_FAILURE' ... # Subtest: unfinished test with uncaughtException -not ok 59 - unfinished test with uncaughtException +not ok 65 - unfinished test with uncaughtException --- duration_ms: * type: 'test' @@ -682,7 +718,7 @@ not ok 59 - unfinished test with uncaughtException * ... # Subtest: unfinished test with unhandledRejection -not ok 60 - unfinished test with unhandledRejection +not ok 66 - unfinished test with unhandledRejection --- duration_ms: * type: 'test' @@ -696,7 +732,7 @@ not ok 60 - unfinished test with unhandledRejection * ... # Subtest: assertion errors display actual and expected properly -not ok 61 - assertion errors display actual and expected properly +not ok 67 - assertion errors display actual and expected properly --- duration_ms: * type: 'test' @@ -766,7 +802,7 @@ not ok 61 - assertion errors display actual and expected properly * ... # Subtest: invalid subtest fail -not ok 62 - invalid subtest fail +not ok 68 - invalid subtest fail --- duration_ms: * type: 'test' @@ -785,17 +821,17 @@ not ok 62 - invalid subtest fail # Error: Test "callback called twice in different ticks" at test/fixtures/test-runner/output/output.js:(LINE):1 generated asynchronous activity after the test ended. This activity created the error "Error [ERR_TEST_FAILURE]: callback invoked multiple times" and would have caused the test to fail, but instead triggered an uncaughtException event. # Error: Test "callback async throw after done" at test/fixtures/test-runner/output/output.js:(LINE):1 generated asynchronous activity after the test ended. This activity created the error "Error: thrown from callback async throw after done" and would have caused the test to fail, but instead triggered an uncaughtException event. # Subtest: last test -ok 63 - last test +ok 69 - last test --- duration_ms: * type: 'test' ... -1..63 -# tests 77 +1..69 +# tests 83 # suites 0 -# pass 38 +# pass 42 # fail 24 # cancelled 2 -# skipped 9 -# todo 4 +# skipped 10 +# todo 5 # duration_ms * diff --git a/test/fixtures/test-runner/output/spec_reporter.js b/test/fixtures/test-runner/output/spec_reporter.js index 46e18b1ca8630d..5edcb6e8d9c8df 100644 --- a/test/fixtures/test-runner/output/spec_reporter.js +++ b/test/fixtures/test-runner/output/spec_reporter.js @@ -3,9 +3,16 @@ require('../../../common'); const fixtures = require('../../../common/fixtures'); const spawn = require('node:child_process').spawn; -const child = spawn(process.execPath, - ['--no-warnings', '--test-reporter', 'spec', fixtures.path('test-runner/output/output.js')], - { stdio: 'pipe' }); +const child = spawn( + process.execPath, + [ + '--no-warnings', + '--test-reporter', 'spec', + fixtures.path('test-runner/output/output.js'), + ], + { stdio: 'pipe' }, +); + // eslint-disable-next-line no-control-regex child.stdout.on('data', (d) => process.stdout.write(d.toString().replace(/[^\x00-\x7F]/g, '').replace(/\u001b\[\d+m/g, ''))); child.stderr.pipe(process.stderr); diff --git a/test/fixtures/test-runner/output/spec_reporter.snapshot b/test/fixtures/test-runner/output/spec_reporter.snapshot index 749f0dce43a450..70adcbcf0e2ef2 100644 --- a/test/fixtures/test-runner/output/spec_reporter.snapshot +++ b/test/fixtures/test-runner/output/spec_reporter.snapshot @@ -1,3 +1,9 @@ + sync expect fail (method) (*ms) # EXPECTED FAILURE + sync expect fail (options) (*ms) # EXPECTED FAILURE + async expect fail (method) (*ms) # EXPECTED FAILURE + async expect fail (options) (*ms) # EXPECTED FAILURE + sync todo with expect fail (*ms) # TODO + sync skip expect fail (*ms) # SKIP sync pass todo (*ms) # TODO sync pass todo with message (*ms) # this is a passing todo sync fail todo (*ms) # TODO @@ -81,20 +87,20 @@ unfinished test with unhandledRejection (*ms) assertion errors display actual and expected properly (*ms) invalid subtest fail (*ms) - Error: Test "unhandled rejection - passes but warns" at test/fixtures/test-runner/output/output.js:72:1 generated asynchronous activity after the test ended. This activity created the error "Error: rejected from unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. - Error: Test "async unhandled rejection - passes but warns" at test/fixtures/test-runner/output/output.js:76:1 generated asynchronous activity after the test ended. This activity created the error "Error: rejected from async unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. + Error: Test "unhandled rejection - passes but warns" at test/fixtures/test-runner/output/output.js:96:1 generated asynchronous activity after the test ended. This activity created the error "Error: rejected from unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. + Error: Test "async unhandled rejection - passes but warns" at test/fixtures/test-runner/output/output.js:100:1 generated asynchronous activity after the test ended. This activity created the error "Error: rejected from async unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. Error: A resource generated asynchronous activity after the test ended. This activity created the error "Error: uncaught from outside of a test" which triggered an uncaughtException event, caught by the test runner. - Error: Test "immediate throw - passes but warns" at test/fixtures/test-runner/output/output.js:80:1 generated asynchronous activity after the test ended. This activity created the error "Error: thrown from immediate throw fail" and would have caused the test to fail, but instead triggered an uncaughtException event. - Error: Test "immediate reject - passes but warns" at test/fixtures/test-runner/output/output.js:86:1 generated asynchronous activity after the test ended. This activity created the error "Error: rejected from immediate reject fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. - Error: Test "callback called twice in different ticks" at test/fixtures/test-runner/output/output.js:251:1 generated asynchronous activity after the test ended. This activity created the error "Error [ERR_TEST_FAILURE]: callback invoked multiple times" and would have caused the test to fail, but instead triggered an uncaughtException event. - Error: Test "callback async throw after done" at test/fixtures/test-runner/output/output.js:269:1 generated asynchronous activity after the test ended. This activity created the error "Error: thrown from callback async throw after done" and would have caused the test to fail, but instead triggered an uncaughtException event. - tests 75 + Error: Test "immediate throw - passes but warns" at test/fixtures/test-runner/output/output.js:104:1 generated asynchronous activity after the test ended. This activity created the error "Error: thrown from immediate throw fail" and would have caused the test to fail, but instead triggered an uncaughtException event. + Error: Test "immediate reject - passes but warns" at test/fixtures/test-runner/output/output.js:110:1 generated asynchronous activity after the test ended. This activity created the error "Error: rejected from immediate reject fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. + Error: Test "callback called twice in different ticks" at test/fixtures/test-runner/output/output.js:275:1 generated asynchronous activity after the test ended. This activity created the error "Error [ERR_TEST_FAILURE]: callback invoked multiple times" and would have caused the test to fail, but instead triggered an uncaughtException event. + Error: Test "callback async throw after done" at test/fixtures/test-runner/output/output.js:293:1 generated asynchronous activity after the test ended. This activity created the error "Error: thrown from callback async throw after done" and would have caused the test to fail, but instead triggered an uncaughtException event. + tests 81 suites 0 - pass 36 + pass 40 fail 24 cancelled 2 - skipped 9 - todo 4 + skipped 10 + todo 5 duration_ms * failing tests: diff --git a/test/fixtures/test-runner/output/spec_reporter_cli.js b/test/fixtures/test-runner/output/spec_reporter_cli.js index 6fefa6b550b9f1..9802c6b3414a91 100644 --- a/test/fixtures/test-runner/output/spec_reporter_cli.js +++ b/test/fixtures/test-runner/output/spec_reporter_cli.js @@ -3,12 +3,17 @@ require('../../../common'); const fixtures = require('../../../common/fixtures'); const spawn = require('node:child_process').spawn; -const child = spawn(process.execPath, - [ - '--no-warnings', '--test', '--test-reporter', 'spec', - fixtures.path('test-runner/output/output.js'), - ], - { stdio: 'pipe' }); +const child = spawn( + process.execPath, + [ + '--no-warnings', + '--test-reporter', 'spec', + '--test', + fixtures.path('test-runner/output/output.js'), + ], + { stdio: 'pipe' }, +); + // eslint-disable-next-line no-control-regex child.stdout.on('data', (d) => process.stdout.write(d.toString().replace(/[^\x00-\x7F]/g, '').replace(/\u001b\[\d+m/g, ''))); child.stderr.pipe(process.stderr); diff --git a/test/fixtures/test-runner/output/spec_reporter_cli.snapshot b/test/fixtures/test-runner/output/spec_reporter_cli.snapshot index b3a352092c677a..64f65d0edfbead 100644 --- a/test/fixtures/test-runner/output/spec_reporter_cli.snapshot +++ b/test/fixtures/test-runner/output/spec_reporter_cli.snapshot @@ -1,3 +1,9 @@ + sync expect fail (method) (*ms) # EXPECTED FAILURE + sync expect fail (options) (*ms) # EXPECTED FAILURE + async expect fail (method) (*ms) # EXPECTED FAILURE + async expect fail (options) (*ms) # EXPECTED FAILURE + sync todo with expect fail (*ms) # TODO + sync skip expect fail (*ms) # SKIP sync pass todo (*ms) # TODO sync pass todo with message (*ms) # this is a passing todo sync fail todo (*ms) # TODO @@ -84,20 +90,20 @@ unfinished test with unhandledRejection (*ms) assertion errors display actual and expected properly (*ms) invalid subtest fail (*ms) - Error: Test "unhandled rejection - passes but warns" at test/fixtures/test-runner/output/output.js:72:1 generated asynchronous activity after the test ended. This activity created the error "Error: rejected from unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. - Error: Test "async unhandled rejection - passes but warns" at test/fixtures/test-runner/output/output.js:76:1 generated asynchronous activity after the test ended. This activity created the error "Error: rejected from async unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. + Error: Test "unhandled rejection - passes but warns" at test/fixtures/test-runner/output/output.js:96:1 generated asynchronous activity after the test ended. This activity created the error "Error: rejected from unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. + Error: Test "async unhandled rejection - passes but warns" at test/fixtures/test-runner/output/output.js:100:1 generated asynchronous activity after the test ended. This activity created the error "Error: rejected from async unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. Error: A resource generated asynchronous activity after the test ended. This activity created the error "Error: uncaught from outside of a test" which triggered an uncaughtException event, caught by the test runner. - Error: Test "immediate throw - passes but warns" at test/fixtures/test-runner/output/output.js:80:1 generated asynchronous activity after the test ended. This activity created the error "Error: thrown from immediate throw fail" and would have caused the test to fail, but instead triggered an uncaughtException event. - Error: Test "immediate reject - passes but warns" at test/fixtures/test-runner/output/output.js:86:1 generated asynchronous activity after the test ended. This activity created the error "Error: rejected from immediate reject fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. - Error: Test "callback called twice in different ticks" at test/fixtures/test-runner/output/output.js:251:1 generated asynchronous activity after the test ended. This activity created the error "Error [ERR_TEST_FAILURE]: callback invoked multiple times" and would have caused the test to fail, but instead triggered an uncaughtException event. - Error: Test "callback async throw after done" at test/fixtures/test-runner/output/output.js:269:1 generated asynchronous activity after the test ended. This activity created the error "Error: thrown from callback async throw after done" and would have caused the test to fail, but instead triggered an uncaughtException event. - tests 76 + Error: Test "immediate throw - passes but warns" at test/fixtures/test-runner/output/output.js:104:1 generated asynchronous activity after the test ended. This activity created the error "Error: thrown from immediate throw fail" and would have caused the test to fail, but instead triggered an uncaughtException event. + Error: Test "immediate reject - passes but warns" at test/fixtures/test-runner/output/output.js:110:1 generated asynchronous activity after the test ended. This activity created the error "Error: rejected from immediate reject fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. + Error: Test "callback called twice in different ticks" at test/fixtures/test-runner/output/output.js:275:1 generated asynchronous activity after the test ended. This activity created the error "Error [ERR_TEST_FAILURE]: callback invoked multiple times" and would have caused the test to fail, but instead triggered an uncaughtException event. + Error: Test "callback async throw after done" at test/fixtures/test-runner/output/output.js:293:1 generated asynchronous activity after the test ended. This activity created the error "Error: thrown from callback async throw after done" and would have caused the test to fail, but instead triggered an uncaughtException event. + tests 82 suites 0 - pass 37 + pass 41 fail 24 cancelled 2 - skipped 9 - todo 4 + skipped 10 + todo 5 duration_ms * failing tests: From cfceb22503c7dafe30f953dde5c2285ed6320f6f Mon Sep 17 00:00:00 2001 From: Chengzhong Wu Date: Mon, 19 Jan 2026 13:01:44 -0500 Subject: [PATCH 034/115] test: reveal wpt evaluation errors in status files PR-URL: https://github.com/nodejs/node/pull/61358 Reviewed-By: Mattias Buelens Reviewed-By: Colin Ihrig Reviewed-By: Jason Zhang Reviewed-By: Luigi Pinca --- test/common/wpt.js | 2 +- test/wpt/status/web-locks.json | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/test/common/wpt.js b/test/common/wpt.js index 227734171f9121..584f3c177ab0be 100644 --- a/test/common/wpt.js +++ b/test/common/wpt.js @@ -712,7 +712,7 @@ class WPTRunner { spec, { status: NODE_UNCAUGHT, - name: 'evaluation in WPTRunner.runJsTests()', + name: `${err}`, message: err.message, stack: inspect(err), }, diff --git a/test/wpt/status/web-locks.json b/test/wpt/status/web-locks.json index f8f4c1feabf9da..97a0a7900d64c5 100644 --- a/test/wpt/status/web-locks.json +++ b/test/wpt/status/web-locks.json @@ -3,7 +3,7 @@ "fail": { "note": "Flaky on ppc, linux x64 and s390x #59142", "flaky": [ - "evaluation in WPTRunner.runJsTests()" + "Error: this uncaught rejection is expected" ] } }, @@ -50,4 +50,4 @@ "storage-buckets.tentative.https.any.js": { "skip": "Node.js does not implement Storage Buckets API" } -} +} From 655d2bc32acdb4e6a890be0efbc9615aaf232621 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Guilherme=20Ara=C3=BAjo?= Date: Mon, 19 Jan 2026 20:27:03 -0300 Subject: [PATCH 035/115] sqlite: add sqlite prepare options args MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/61311 Reviewed-By: Matteo Collina Reviewed-By: René Reviewed-By: Gürgün Dayıoğlu Reviewed-By: Claudio Wunder --- doc/api/sqlite.md | 12 +- src/node_sqlite.cc | 100 +++++++ src/node_sqlite.h | 1 + test/parallel/test-sqlite-named-parameters.js | 100 +++++++ test/parallel/test-sqlite-statement-sync.js | 245 ++++++++++++++++++ 5 files changed, 457 insertions(+), 1 deletion(-) diff --git a/doc/api/sqlite.md b/doc/api/sqlite.md index 3e5dcddfc3b5b0..90bf882c3d51d4 100644 --- a/doc/api/sqlite.md +++ b/doc/api/sqlite.md @@ -441,13 +441,23 @@ Opens the database specified in the `path` argument of the `DatabaseSync` constructor. This method should only be used when the database is not opened via the constructor. An exception is thrown if the database is already open. -### `database.prepare(sql)` +### `database.prepare(sql[, options])` * `sql` {string} A SQL string to compile to a prepared statement. +* `options` {Object} Optional configuration for the prepared statement. + * `readBigInts` {boolean} If `true`, integer fields are read as `BigInt`s. + **Default:** inherited from database options or `false`. + * `returnArrays` {boolean} If `true`, results are returned as arrays. + **Default:** inherited from database options or `false`. + * `allowBareNamedParameters` {boolean} If `true`, allows binding named + parameters without the prefix character. **Default:** inherited from + database options or `true`. + * `allowUnknownNamedParameters` {boolean} If `true`, unknown named parameters + are ignored. **Default:** inherited from database options or `false`. * Returns: {StatementSync} The prepared statement. Compiles a SQL statement into a [prepared statement][]. This method is a wrapper diff --git a/src/node_sqlite.cc b/src/node_sqlite.cc index 6bfc54dd814465..1cbb0aefad9d13 100644 --- a/src/node_sqlite.cc +++ b/src/node_sqlite.cc @@ -1147,6 +1147,92 @@ void DatabaseSync::Prepare(const FunctionCallbackInfo& args) { return; } + std::optional return_arrays; + std::optional use_big_ints; + std::optional allow_bare_named_params; + std::optional allow_unknown_named_params; + + if (args.Length() > 1 && !args[1]->IsUndefined()) { + if (!args[1]->IsObject()) { + THROW_ERR_INVALID_ARG_TYPE(env->isolate(), + "The \"options\" argument must be an object."); + return; + } + Local options = args[1].As(); + + Local return_arrays_v; + if (!options + ->Get(env->context(), + FIXED_ONE_BYTE_STRING(env->isolate(), "returnArrays")) + .ToLocal(&return_arrays_v)) { + return; + } + if (!return_arrays_v->IsUndefined()) { + if (!return_arrays_v->IsBoolean()) { + THROW_ERR_INVALID_ARG_TYPE( + env->isolate(), + "The \"options.returnArrays\" argument must be a boolean."); + return; + } + return_arrays = return_arrays_v->IsTrue(); + } + + Local read_big_ints_v; + if (!options + ->Get(env->context(), + FIXED_ONE_BYTE_STRING(env->isolate(), "readBigInts")) + .ToLocal(&read_big_ints_v)) { + return; + } + if (!read_big_ints_v->IsUndefined()) { + if (!read_big_ints_v->IsBoolean()) { + THROW_ERR_INVALID_ARG_TYPE( + env->isolate(), + "The \"options.readBigInts\" argument must be a boolean."); + return; + } + use_big_ints = read_big_ints_v->IsTrue(); + } + + Local allow_bare_named_params_v; + if (!options + ->Get(env->context(), + FIXED_ONE_BYTE_STRING(env->isolate(), + "allowBareNamedParameters")) + .ToLocal(&allow_bare_named_params_v)) { + return; + } + if (!allow_bare_named_params_v->IsUndefined()) { + if (!allow_bare_named_params_v->IsBoolean()) { + THROW_ERR_INVALID_ARG_TYPE( + env->isolate(), + "The \"options.allowBareNamedParameters\" argument must be a " + "boolean."); + return; + } + allow_bare_named_params = allow_bare_named_params_v->IsTrue(); + } + + Local allow_unknown_named_params_v; + if (!options + ->Get(env->context(), + FIXED_ONE_BYTE_STRING(env->isolate(), + "allowUnknownNamedParameters")) + .ToLocal(&allow_unknown_named_params_v)) { + return; + } + if (!allow_unknown_named_params_v->IsUndefined()) { + if (!allow_unknown_named_params_v->IsBoolean()) { + THROW_ERR_INVALID_ARG_TYPE( + env->isolate(), + "The \"options.allowUnknownNamedParameters\" argument must be a " + "boolean."); + return; + } + allow_unknown_named_params = allow_unknown_named_params_v->IsTrue(); + } + } + Utf8Value sql(env->isolate(), args[0].As()); sqlite3_stmt* s = nullptr; int r = sqlite3_prepare_v2(db->connection_, *sql, -1, &s, nullptr); @@ -1155,6 +1241,20 @@ void DatabaseSync::Prepare(const FunctionCallbackInfo& args) { BaseObjectPtr stmt = StatementSync::Create(env, BaseObjectPtr(db), s); db->statements_.insert(stmt.get()); + + if (return_arrays.has_value()) { + stmt->return_arrays_ = return_arrays.value(); + } + if (use_big_ints.has_value()) { + stmt->use_big_ints_ = use_big_ints.value(); + } + if (allow_bare_named_params.has_value()) { + stmt->allow_bare_named_params_ = allow_bare_named_params.value(); + } + if (allow_unknown_named_params.has_value()) { + stmt->allow_unknown_named_params_ = allow_unknown_named_params.value(); + } + args.GetReturnValue().Set(stmt->object()); } diff --git a/src/node_sqlite.h b/src/node_sqlite.h index 2641c9d4f1e8c5..27622a15dbf1bb 100644 --- a/src/node_sqlite.h +++ b/src/node_sqlite.h @@ -246,6 +246,7 @@ class StatementSync : public BaseObject { bool BindParams(const v8::FunctionCallbackInfo& args); bool BindValue(const v8::Local& value, const int index); + friend class DatabaseSync; friend class StatementSyncIterator; friend class SQLTagStore; friend class StatementExecutionHelper; diff --git a/test/parallel/test-sqlite-named-parameters.js b/test/parallel/test-sqlite-named-parameters.js index e1acd0f38fa2f7..db8f46e6b6ce5a 100644 --- a/test/parallel/test-sqlite-named-parameters.js +++ b/test/parallel/test-sqlite-named-parameters.js @@ -119,3 +119,103 @@ suite('StatementSync.prototype.setAllowUnknownNamedParameters()', () => { }); }); }); + +suite('options.allowUnknownNamedParameters', () => { + test('unknown named parameters are allowed when input is true', (t) => { + const db = new DatabaseSync(':memory:'); + t.after(() => { db.close(); }); + const setup = db.exec( + 'CREATE TABLE data(key INTEGER, val INTEGER) STRICT;' + ); + t.assert.strictEqual(setup, undefined); + const stmt = db.prepare( + 'INSERT INTO data (key, val) VALUES ($k, $v)', + { allowUnknownNamedParameters: true } + ); + const params = { $a: 1, $b: 2, $k: 42, $y: 25, $v: 84, $z: 99 }; + t.assert.deepStrictEqual( + stmt.run(params), + { changes: 1, lastInsertRowid: 1 }, + ); + }); + + test('unknown named parameters throw when input is false', (t) => { + const db = new DatabaseSync(':memory:'); + t.after(() => { db.close(); }); + const setup = db.exec( + 'CREATE TABLE data(key INTEGER, val INTEGER) STRICT;' + ); + t.assert.strictEqual(setup, undefined); + const stmt = db.prepare( + 'INSERT INTO data (key, val) VALUES ($k, $v)', + { allowUnknownNamedParameters: false } + ); + const params = { $a: 1, $b: 2, $k: 42, $y: 25, $v: 84, $z: 99 }; + t.assert.throws(() => { + stmt.run(params); + }, { + code: 'ERR_INVALID_STATE', + message: /Unknown named parameter '\$a'/, + }); + }); + + test('unknown named parameters throws error by default', (t) => { + const db = new DatabaseSync(':memory:'); + t.after(() => { db.close(); }); + const setup = db.exec( + 'CREATE TABLE data(key INTEGER, val INTEGER) STRICT;' + ); + t.assert.strictEqual(setup, undefined); + const stmt = db.prepare('INSERT INTO data (key, val) VALUES ($k, $v)'); + const params = { $a: 1, $b: 2, $k: 42, $y: 25, $v: 84, $z: 99 }; + t.assert.throws(() => { + stmt.run(params); + }, { + code: 'ERR_INVALID_STATE', + message: /Unknown named parameter '\$a'/, + }); + }); + + test('throws when option is not a boolean', (t) => { + const db = new DatabaseSync(':memory:'); + t.after(() => { db.close(); }); + const setup = db.exec( + 'CREATE TABLE data(key INTEGER PRIMARY KEY, val INTEGER) STRICT;' + ); + t.assert.strictEqual(setup, undefined); + t.assert.throws(() => { + db.prepare( + 'INSERT INTO data (key, val) VALUES ($k, $v)', + { allowUnknownNamedParameters: 'true' } + ); + }, { + code: 'ERR_INVALID_ARG_TYPE', + message: /The "options\.allowUnknownNamedParameters" argument must be a boolean/, + }); + }); + + test('setAllowUnknownNamedParameters can override prepare option', (t) => { + const db = new DatabaseSync(':memory:'); + t.after(() => { db.close(); }); + const setup = db.exec( + 'CREATE TABLE data(key INTEGER, val INTEGER) STRICT;' + ); + t.assert.strictEqual(setup, undefined); + const stmt = db.prepare( + 'INSERT INTO data (key, val) VALUES ($k, $v)', + { allowUnknownNamedParameters: true } + ); + const params = { $a: 1, $b: 2, $k: 42, $y: 25, $v: 84, $z: 99 }; + t.assert.deepStrictEqual( + stmt.run(params), + { changes: 1, lastInsertRowid: 1 }, + ); + t.assert.strictEqual(stmt.setAllowUnknownNamedParameters(false), undefined); + t.assert.throws(() => { + stmt.run(params); + }, { + code: 'ERR_INVALID_STATE', + message: /Unknown named parameter '\$a'/, + }); + }); +}); diff --git a/test/parallel/test-sqlite-statement-sync.js b/test/parallel/test-sqlite-statement-sync.js index 04494a02c692a8..62e95363f1c46a 100644 --- a/test/parallel/test-sqlite-statement-sync.js +++ b/test/parallel/test-sqlite-statement-sync.js @@ -609,3 +609,248 @@ suite('StatementSync.prototype.setAllowBareNamedParameters()', () => { }); }); }); + +suite('options.readBigInts', () => { + test('BigInts are returned when input is true', (t) => { + const db = new DatabaseSync(nextDb()); + t.after(() => { db.close(); }); + const setup = db.exec(` + CREATE TABLE data(key INTEGER PRIMARY KEY, val INTEGER) STRICT; + INSERT INTO data (key, val) VALUES (1, 42); + `); + t.assert.strictEqual(setup, undefined); + + const query = db.prepare('SELECT val FROM data', { readBigInts: true }); + t.assert.deepStrictEqual(query.get(), { __proto__: null, val: 42n }); + }); + + test('numbers are returned when input is false', (t) => { + const db = new DatabaseSync(nextDb()); + t.after(() => { db.close(); }); + const setup = db.exec(` + CREATE TABLE data(key INTEGER PRIMARY KEY, val INTEGER) STRICT; + INSERT INTO data (key, val) VALUES (1, 42); + `); + t.assert.strictEqual(setup, undefined); + + const query = db.prepare('SELECT val FROM data', { readBigInts: false }); + t.assert.deepStrictEqual(query.get(), { __proto__: null, val: 42 }); + }); + + test('throws when input is not a boolean', (t) => { + const db = new DatabaseSync(nextDb()); + t.after(() => { db.close(); }); + const setup = db.exec( + 'CREATE TABLE data(key INTEGER PRIMARY KEY, val INTEGER) STRICT;' + ); + t.assert.strictEqual(setup, undefined); + t.assert.throws(() => { + db.prepare('SELECT val FROM data', { readBigInts: 'true' }); + }, { + code: 'ERR_INVALID_ARG_TYPE', + message: /The "options\.readBigInts" argument must be a boolean/, + }); + }); + + test('setReadBigInts can override prepare option', (t) => { + const db = new DatabaseSync(nextDb()); + t.after(() => { db.close(); }); + const setup = db.exec(` + CREATE TABLE data(key INTEGER PRIMARY KEY, val INTEGER) STRICT; + INSERT INTO data (key, val) VALUES (1, 42); + `); + t.assert.strictEqual(setup, undefined); + + const query = db.prepare('SELECT val FROM data', { readBigInts: true }); + t.assert.deepStrictEqual(query.get(), { __proto__: null, val: 42n }); + t.assert.strictEqual(query.setReadBigInts(false), undefined); + t.assert.deepStrictEqual(query.get(), { __proto__: null, val: 42 }); + }); +}); + +suite('options.returnArrays', () => { + test('arrays are returned when input is true', (t) => { + const db = new DatabaseSync(nextDb()); + t.after(() => { db.close(); }); + const setup = db.exec(` + CREATE TABLE data(key INTEGER PRIMARY KEY, val TEXT) STRICT; + INSERT INTO data (key, val) VALUES (1, 'one'); + `); + t.assert.strictEqual(setup, undefined); + + const query = db.prepare( + 'SELECT key, val FROM data WHERE key = 1', + { returnArrays: true } + ); + t.assert.deepStrictEqual(query.get(), [1, 'one']); + }); + + test('objects are returned when input is false', (t) => { + const db = new DatabaseSync(nextDb()); + t.after(() => { db.close(); }); + const setup = db.exec(` + CREATE TABLE data(key INTEGER PRIMARY KEY, val TEXT) STRICT; + INSERT INTO data (key, val) VALUES (1, 'one'); + `); + t.assert.strictEqual(setup, undefined); + + const query = db.prepare( + 'SELECT key, val FROM data WHERE key = 1', + { returnArrays: false } + ); + t.assert.deepStrictEqual(query.get(), { __proto__: null, key: 1, val: 'one' }); + }); + + test('throws when input is not a boolean', (t) => { + const db = new DatabaseSync(nextDb()); + t.after(() => { db.close(); }); + const setup = db.exec( + 'CREATE TABLE data(key INTEGER PRIMARY KEY, val TEXT) STRICT;' + ); + t.assert.strictEqual(setup, undefined); + t.assert.throws(() => { + db.prepare('SELECT key, val FROM data', { returnArrays: 'true' }); + }, { + code: 'ERR_INVALID_ARG_TYPE', + message: /The "options\.returnArrays" argument must be a boolean/, + }); + }); + + test('setReturnArrays can override prepare option', (t) => { + const db = new DatabaseSync(nextDb()); + t.after(() => { db.close(); }); + const setup = db.exec(` + CREATE TABLE data(key INTEGER PRIMARY KEY, val TEXT) STRICT; + INSERT INTO data (key, val) VALUES (1, 'one'); + `); + t.assert.strictEqual(setup, undefined); + + const query = db.prepare( + 'SELECT key, val FROM data WHERE key = 1', + { returnArrays: true } + ); + t.assert.deepStrictEqual(query.get(), [1, 'one']); + t.assert.strictEqual(query.setReturnArrays(false), undefined); + t.assert.deepStrictEqual(query.get(), { __proto__: null, key: 1, val: 'one' }); + }); + + test('all() returns arrays when input is true', (t) => { + const db = new DatabaseSync(nextDb()); + t.after(() => { db.close(); }); + const setup = db.exec(` + CREATE TABLE data(key INTEGER PRIMARY KEY, val TEXT) STRICT; + INSERT INTO data (key, val) VALUES (1, 'one'); + INSERT INTO data (key, val) VALUES (2, 'two'); + `); + t.assert.strictEqual(setup, undefined); + + const query = db.prepare( + 'SELECT key, val FROM data ORDER BY key', + { returnArrays: true } + ); + t.assert.deepStrictEqual(query.all(), [ + [1, 'one'], + [2, 'two'], + ]); + }); + + test('iterate() returns arrays when input is true', (t) => { + const db = new DatabaseSync(nextDb()); + t.after(() => { db.close(); }); + const setup = db.exec(` + CREATE TABLE data(key INTEGER PRIMARY KEY, val TEXT) STRICT; + INSERT INTO data (key, val) VALUES (1, 'one'); + INSERT INTO data (key, val) VALUES (2, 'two'); + `); + t.assert.strictEqual(setup, undefined); + + const query = db.prepare( + 'SELECT key, val FROM data ORDER BY key', + { returnArrays: true } + ); + t.assert.deepStrictEqual(query.iterate().toArray(), [ + [1, 'one'], + [2, 'two'], + ]); + }); +}); + +suite('options.allowBareNamedParameters', () => { + test('bare named parameters are allowed when input is true', (t) => { + const db = new DatabaseSync(nextDb()); + t.after(() => { db.close(); }); + const setup = db.exec( + 'CREATE TABLE data(key INTEGER PRIMARY KEY, val INTEGER) STRICT;' + ); + t.assert.strictEqual(setup, undefined); + const stmt = db.prepare( + 'INSERT INTO data (key, val) VALUES ($k, $v)', + { allowBareNamedParameters: true } + ); + t.assert.deepStrictEqual( + stmt.run({ k: 1, v: 2 }), + { changes: 1, lastInsertRowid: 1 }, + ); + }); + + test('bare named parameters throw when input is false', (t) => { + const db = new DatabaseSync(nextDb()); + t.after(() => { db.close(); }); + const setup = db.exec( + 'CREATE TABLE data(key INTEGER PRIMARY KEY, val INTEGER) STRICT;' + ); + t.assert.strictEqual(setup, undefined); + const stmt = db.prepare( + 'INSERT INTO data (key, val) VALUES ($k, $v)', + { allowBareNamedParameters: false } + ); + t.assert.throws(() => { + stmt.run({ k: 1, v: 2 }); + }, { + code: 'ERR_INVALID_STATE', + message: /Unknown named parameter 'k'/, + }); + }); + + test('throws when input is not a boolean', (t) => { + const db = new DatabaseSync(nextDb()); + t.after(() => { db.close(); }); + const setup = db.exec( + 'CREATE TABLE data(key INTEGER PRIMARY KEY, val INTEGER) STRICT;' + ); + t.assert.strictEqual(setup, undefined); + t.assert.throws(() => { + db.prepare( + 'INSERT INTO data (key, val) VALUES ($k, $v)', + { allowBareNamedParameters: 'true' } + ); + }, { + code: 'ERR_INVALID_ARG_TYPE', + message: /The "options\.allowBareNamedParameters" argument must be a boolean/, + }); + }); + + test('setAllowBareNamedParameters can override prepare option', (t) => { + const db = new DatabaseSync(nextDb()); + t.after(() => { db.close(); }); + const setup = db.exec( + 'CREATE TABLE data(key INTEGER PRIMARY KEY, val INTEGER) STRICT;' + ); + t.assert.strictEqual(setup, undefined); + const stmt = db.prepare( + 'INSERT INTO data (key, val) VALUES ($k, $v)', + { allowBareNamedParameters: false } + ); + t.assert.throws(() => { + stmt.run({ k: 1, v: 2 }); + }, { + code: 'ERR_INVALID_STATE', + message: /Unknown named parameter 'k'/, + }); + t.assert.strictEqual(stmt.setAllowBareNamedParameters(true), undefined); + t.assert.deepStrictEqual( + stmt.run({ k: 2, v: 4 }), + { changes: 1, lastInsertRowid: 2 }, + ); + }); +}); From 1967aec35eeb05ae71a241f7950c586519051762 Mon Sep 17 00:00:00 2001 From: Bart Louwers Date: Tue, 20 Jan 2026 12:10:53 +0100 Subject: [PATCH 036/115] sqlite: enable defensive mode by default MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/61266 Reviewed-By: René Reviewed-By: Colin Ihrig Reviewed-By: Edy Silva Reviewed-By: Yagiz Nizipli Reviewed-By: Jake Yuesong Li --- doc/api/sqlite.md | 5 ++++- src/node_sqlite.h | 2 +- test/parallel/test-sqlite-config.js | 13 ++++++++++--- 3 files changed, 15 insertions(+), 5 deletions(-) diff --git a/doc/api/sqlite.md b/doc/api/sqlite.md index 90bf882c3d51d4..a7c7271da799a9 100644 --- a/doc/api/sqlite.md +++ b/doc/api/sqlite.md @@ -102,6 +102,9 @@ exposed by this class execute synchronously. compressed +// count[0] (LSB) == 0 ----> chat16_t +template +using jstring_t = ART_30::Java::jstring_t; + +template +struct ALIGNED_(4) jdex_cache_t { + jobject_t object; + + // heap_reference_t dex; // REMOVED in ART 44 + heap_reference_t location; + uint32_t num_resolved_call_sites; // ADDED in ART 44 (related to DEX38 format) + uint64_t dex_file; + uint64_t resolved_call_sites; // ADDED in ART 44 (related to DEX38 format) + uint64_t resolved_fields; + uint64_t resolved_method_types; // ADDED in ART 44 + uint64_t resolved_methods; + uint64_t resolved_types; + uint64_t strings; + uint32_t num_resolved_fields; + uint32_t num_resolved_methods_types; // ADDED in ART 44 + uint32_t num_resolved_methods; + uint32_t num_resolved_types; + uint32_t num_strings; +}; + + +} // Namespace Java +} // Namespace ART_44 + + +// ====================== +// Android 8.1.X - ART 46 +// ====================== +namespace ART_46 { + +/// Namespace related to the Java part of ART 46 +namespace Java { + +using heap_reference_t = ART_44::Java::heap_reference_t; +using brooks_read_barrier_t = ART_44::Java::brooks_read_barrier_t; + +template +using jobject_t = ART_44::Java::jobject_t; + +template +using jarray_t = ART_44::Java::jarray_t; + +template +using jclass_t = ART_44::Java::jclass_t; + +template +using jstring_t = ART_44::Java::jstring_t; + +template +using jdex_cache_t = ART_44::Java::jdex_cache_t; + +} // Namespace Java +} // Namespace ART_46 + +// ====================== +// Android 9.0.0 - ART 66 +// ====================== +namespace ART_56 { + +/// Namespace related to the Java part of ART 46 +namespace Java { + +using heap_reference_t = ART_46::Java::heap_reference_t; +using brooks_read_barrier_t = ART_46::Java::brooks_read_barrier_t; + +template +using jobject_t = ART_46::Java::jobject_t; + +template +using jarray_t = ART_46::Java::jarray_t; + +template +using jclass_t = ART_46::Java::jclass_t; + +template +using jstring_t = ART_46::Java::jstring_t; + +template +using jdex_cache_t = ART_46::Java::jdex_cache_t; + +} // Namespace Java +} // Namespace ART_56 + +} // namespace details +} // Namespace ART +} // Namespace LIEF + + + +#endif diff --git a/deps/LIEF/include/LIEF/ART/json.hpp b/deps/LIEF/include/LIEF/ART/json.hpp new file mode 100644 index 00000000000000..3302272bd8c51a --- /dev/null +++ b/deps/LIEF/include/LIEF/ART/json.hpp @@ -0,0 +1,31 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_ART_PUBLIC_JSON_H +#define LIEF_ART_PUBLIC_JSON_H + +#include "LIEF/visibility.h" +#include + +namespace LIEF { +class Object; +namespace ART { + +LIEF_API std::string to_json(const Object& v); + +} +} + +#endif diff --git a/deps/LIEF/include/LIEF/ART/types.hpp b/deps/LIEF/include/LIEF/ART/types.hpp new file mode 100644 index 00000000000000..706e4143d08fbc --- /dev/null +++ b/deps/LIEF/include/LIEF/ART/types.hpp @@ -0,0 +1,31 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_ART_TYPE_TRAITS_H +#define LIEF_ART_TYPE_TRAITS_H + +#include +#include +#include "LIEF/iterators.hpp" + +namespace LIEF { +namespace ART { + +using art_version_t = uint32_t; + +} // Namesapce ART +} // Namespace LIEF + +#endif diff --git a/deps/LIEF/include/LIEF/ART/utils.hpp b/deps/LIEF/include/LIEF/ART/utils.hpp new file mode 100644 index 00000000000000..fc4bcd8e3eede0 --- /dev/null +++ b/deps/LIEF/include/LIEF/ART/utils.hpp @@ -0,0 +1,51 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_ART_UTILS_H +#define LIEF_ART_UTILS_H + +#include +#include + +#include "LIEF/ART/types.hpp" + +#include "LIEF/platforms/android.hpp" + +#include "LIEF/types.hpp" +#include "LIEF/visibility.h" + +namespace LIEF { +namespace ART { + +/// Check if the given file is an ART one. +LIEF_API bool is_art(const std::string& file); + +/// Check if the given raw data is an ART one. +LIEF_API bool is_art(const std::vector& raw); + +/// Return the ART version of the given file +LIEF_API art_version_t version(const std::string& file); + +/// Return the ART version of the raw data +LIEF_API art_version_t version(const std::vector& raw); + +/// Return the ANDROID_VERSIONS associated with the given ART version +LIEF_API LIEF::Android::ANDROID_VERSIONS android_version(art_version_t version); + +} +} + + +#endif diff --git a/deps/LIEF/include/LIEF/ASM.hpp b/deps/LIEF/include/LIEF/ASM.hpp new file mode 100644 index 00000000000000..5bc1a376b2c5ba --- /dev/null +++ b/deps/LIEF/include/LIEF/ASM.hpp @@ -0,0 +1,28 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_ASM_H +#define LIEF_ASM_H +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#endif diff --git a/deps/LIEF/include/LIEF/Abstract.hpp b/deps/LIEF/include/LIEF/Abstract.hpp new file mode 100644 index 00000000000000..bc10e669486524 --- /dev/null +++ b/deps/LIEF/include/LIEF/Abstract.hpp @@ -0,0 +1,26 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_ABSTRACT_H +#define LIEF_ABSTRACT_H + +#include +#include +#include +#include +#include +#include + +#endif diff --git a/deps/LIEF/include/LIEF/Abstract/Binary.hpp b/deps/LIEF/include/LIEF/Abstract/Binary.hpp new file mode 100644 index 00000000000000..8e4f738a76efdb --- /dev/null +++ b/deps/LIEF/include/LIEF/Abstract/Binary.hpp @@ -0,0 +1,441 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_ABSTRACT_BINARY_H +#define LIEF_ABSTRACT_BINARY_H + +#include +#include +#include + +#include "LIEF/visibility.h" +#include "LIEF/Object.hpp" +#include "LIEF/iterators.hpp" +#include "LIEF/errors.hpp" +#include "LIEF/span.hpp" + +#include "LIEF/Abstract/Header.hpp" +#include "LIEF/Abstract/Function.hpp" + +#include "LIEF/asm/Instruction.hpp" +#include "LIEF/asm/AssemblerConfig.hpp" + +namespace llvm { +class MCInst; +} + +/// LIEF namespace +namespace LIEF { +class Section; +class Relocation; +class Symbol; + +class DebugInfo; + +namespace assembly { +class Engine; +} + +/// Generic interface representing a binary executable. +/// +/// This class provides a unified interface across multiple binary formats +/// such as ELF, PE, Mach-O, and others. It enables users to access binary +/// components like headers, sections, symbols, relocations, +/// and functions in a format-agnostic way. +/// +/// Subclasses like LIEF::PE::Binary implement format-specific API +class LIEF_API Binary : public Object { + public: + + /// Enumeration of virtual address types used for patching and memory access. + enum class VA_TYPES { + /// Automatically determine if the address is absolute or relative + /// (default behavior). + AUTO = 0, + + /// Relative Virtual Address (RVA), offset from image base. + RVA = 1, + + /// Absolute Virtual Address. + VA = 2 + }; + + enum FORMATS { + UNKNOWN = 0, + ELF, + PE, + MACHO, + OAT, + }; + + using functions_t = std::vector; + + /// Internal container + using sections_t = std::vector; + + /// Iterator that outputs LIEF::Section& + using it_sections = ref_iterator; + + /// Iterator that outputs const LIEF::Section& + using it_const_sections = const_ref_iterator; + + /// Internal container + using symbols_t = std::vector; + + /// Iterator that outputs LIEF::Symbol& + using it_symbols = ref_iterator; + + /// Iterator that outputs const LIEF::Symbol& + using it_const_symbols = const_ref_iterator; + + /// Internal container + using relocations_t = std::vector; + + /// Iterator that outputs LIEF::Relocation& + using it_relocations = ref_iterator; + + /// Iterator that outputs const LIEF::Relocation& + using it_const_relocations = const_ref_iterator; + + /// Instruction iterator + using instructions_it = iterator_range; + + public: + Binary(); + Binary(FORMATS fmt); + + ~Binary() override; + + Binary& operator=(const Binary&) = delete; + Binary(const Binary&) = delete; + + /// Executable format (ELF, PE, Mach-O) of the underlying binary + FORMATS format() const { + return format_; + } + + /// Return the abstract header of the binary + Header header() const { + return get_abstract_header(); + } + + /// Return an iterator over the abstracted symbols in which the elements **can** be modified + it_symbols symbols() { + return get_abstract_symbols(); + } + + /// Return an iterator over the abstracted symbols in which the elements **can't** be modified + it_const_symbols symbols() const { + return const_cast(this)->get_abstract_symbols(); + } + + /// Check if a Symbol with the given name exists + bool has_symbol(const std::string& name) const { + return get_symbol(name) != nullptr; + } + + /// Return the Symbol with the given name + /// If the symbol does not exist, return a nullptr + const Symbol* get_symbol(const std::string& name) const; + + Symbol* get_symbol(const std::string& name) { + return const_cast(static_cast(this)->get_symbol(name)); + } + + /// Return an iterator over the binary's sections (LIEF::Section) + it_sections sections() { + return get_abstract_sections(); + } + + it_const_sections sections() const { + return const_cast(this)->get_abstract_sections(); + } + + /// Remove **all** the sections in the underlying binary + virtual void remove_section(const std::string& name, bool clear = false) = 0; + + /// Return an iterator over the binary relocation (LIEF::Relocation) + it_relocations relocations() { + return get_abstract_relocations(); + } + + it_const_relocations relocations() const { + return const_cast(this)->get_abstract_relocations(); + } + + /// Binary's entrypoint (if any) + virtual uint64_t entrypoint() const = 0; + + /// Binary's original size + uint64_t original_size() const { + return original_size_; + } + + /// Return the functions exported by the binary + functions_t exported_functions() const { + return get_abstract_exported_functions(); + } + + /// Return libraries which are imported by the binary + std::vector imported_libraries() const { + return get_abstract_imported_libraries(); + } + + /// Return functions imported by the binary + functions_t imported_functions() const { + return get_abstract_imported_functions(); + } + + /// Return the address of the given function name + virtual result get_function_address(const std::string& func_name) const; + + /// Method so that a ``visitor`` can visit us + void accept(Visitor& visitor) const override; + + std::vector xref(uint64_t address) const; + + /// Patch the content at virtual address @p address with @p patch_value + /// + /// @param[in] address Address to patch + /// @param[in] patch_value Patch to apply + /// @param[in] addr_type Specify if the address should be used as an + /// absolute virtual address or a RVA + virtual void patch_address(uint64_t address, const std::vector& patch_value, + VA_TYPES addr_type = VA_TYPES::AUTO) = 0; + + /// Patch the address with the given value + /// + /// @param[in] address Address to patch + /// @param[in] patch_value Patch to apply + /// @param[in] size Size of the value in **bytes** (1, 2, ... 8) + /// @param[in] addr_type Specify if the address should be used as an absolute virtual address or an RVA + virtual void patch_address(uint64_t address, uint64_t patch_value, size_t size = sizeof(uint64_t), + VA_TYPES addr_type = VA_TYPES::AUTO) = 0; + + /// Return the content located at the given virtual address + virtual span + get_content_from_virtual_address(uint64_t virtual_address, uint64_t size, + VA_TYPES addr_type = VA_TYPES::AUTO) const = 0; + + /// Get the integer value at the given virtual address + template + LIEF::result get_int_from_virtual_address( + uint64_t va, VA_TYPES addr_type = VA_TYPES::AUTO) const + { + T value; + static_assert(std::is_integral::value, "Require an integral type"); + span raw = get_content_from_virtual_address(va, sizeof(T), addr_type); + if (raw.empty() || raw.size() < sizeof(T)) { + return make_error_code(lief_errors::read_error); + } + + std::copy(raw.data(), raw.data() + sizeof(T), + reinterpret_cast(&value)); + return value; + } + + /// Change binary's original size. + /// + /// @warning + /// This function should be used carefully as some optimizations + /// can be performed with this value + void original_size(uint64_t size) { + original_size_ = size; + } + + /// Check if the binary is position independent + virtual bool is_pie() const = 0; + + /// Check if the binary uses ``NX`` protection + virtual bool has_nx() const = 0; + + /// Default image base address if the ASLR is not enabled. + virtual uint64_t imagebase() const = 0; + + /// Constructor functions that are called prior any other functions + virtual functions_t ctor_functions() const = 0; + + /// Convert the given offset into a virtual address. + /// + /// @param[in] offset The offset to convert. + /// @param[in] slide If not 0, it will replace the default base address (if any) + virtual result offset_to_virtual_address(uint64_t offset, uint64_t slide = 0) const = 0; + + virtual std::ostream& print(std::ostream& os) const { + return os; + } + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const Binary& binary) { + binary.print(os); + return os; + } + + /// Return the debug info if present. It can be either a + /// LIEF::dwarf::DebugInfo or a LIEF::pdb::DebugInfo + /// + /// For ELF and Mach-O binaries, it returns the given DebugInfo object **only** + /// if the binary embeds the DWARF debug info in the binary itself. + /// + /// For PE file, this function tries to find the **external** PDB using + /// the LIEF::PE::CodeViewPDB::filename() output (if present). One can also + /// use LIEF::pdb::load() or LIEF::pdb::DebugInfo::from_file() to get PDB debug + /// info. + /// + /// \warning This function requires LIEF's extended version otherwise it + /// **always** return a nullptr + DebugInfo* debug_info() const; + + /// Disassemble code starting at the given virtual address and with the given + /// size. + /// + /// ```cpp + /// auto insts = binary->disassemble(0xacde, 100); + /// for (std::unique_ptr inst : insts) { + /// std::cout << inst->to_string() << '\n'; + /// } + /// ``` + /// + /// \see LIEF::assembly::Instruction + instructions_it disassemble(uint64_t address, size_t size) const; + + /// Disassemble code starting at the given virtual address + /// + /// ```cpp + /// auto insts = binary->disassemble(0xacde); + /// for (std::unique_ptr inst : insts) { + /// std::cout << inst->to_string() << '\n'; + /// } + /// ``` + /// + /// \see LIEF::assembly::Instruction + instructions_it disassemble(uint64_t address) const; + + /// Disassemble code for the given symbol name + /// + /// ```cpp + /// auto insts = binary->disassemble("__libc_start_main"); + /// for (std::unique_ptr inst : insts) { + /// std::cout << inst->to_string() << '\n'; + /// } + /// ``` + /// + /// \see LIEF::assembly::Instruction + instructions_it disassemble(const std::string& function) const; + + /// Disassemble code provided by the given buffer at the specified + /// `address` parameter. + /// + /// \see LIEF::assembly::Instruction + instructions_it disassemble(const uint8_t* buffer, size_t size, + uint64_t address = 0) const; + + + /// Disassemble code provided by the given vector of bytes at the specified + /// `address` parameter. + /// + /// \see LIEF::assembly::Instruction + instructions_it disassemble(const std::vector& buffer, + uint64_t address = 0) const { + return disassemble(buffer.data(), buffer.size(), address); + } + + instructions_it disassemble(LIEF::span buffer, + uint64_t address = 0) const { + return disassemble(buffer.data(), buffer.size(), address); + } + + instructions_it disassemble(LIEF::span buffer, uint64_t address = 0) const { + return disassemble(buffer.data(), buffer.size(), address); + } + + /// Assemble **and patch** the provided assembly code at the specified address. + /// + /// The function returns the generated assembly bytes + /// + /// ```cpp + /// bin->assemble(0x12000440, R"asm( + /// xor rax, rbx; + /// mov rcx, rax; + /// )asm"); + /// ``` + /// + /// If you need to configure the assembly engine or to define addresses for + /// symbols, you can provide your own assembly::AssemblerConfig. + std::vector assemble(uint64_t address, const std::string& Asm, + assembly::AssemblerConfig& config = assembly::AssemblerConfig::default_config()); + + /// Assemble **and patch** the address with the given LLVM MCInst. + /// + /// \warning Because of ABI compatibility, this MCInst can **only be used** + /// with the **same** version of LLVM used by LIEF (see documentation) + std::vector assemble(uint64_t address, const llvm::MCInst& inst); + + /// Assemble **and patch** the address with the given LLVM MCInst. + /// + /// \warning Because of ABI compatibility, this MCInst can **only be used** + /// with the **same** version of LLVM used by LIEF (see documentation) + std::vector assemble(uint64_t address, + const std::vector& insts); + + /// Get the default memory page size according to the architecture and + /// the format of the current binary + virtual uint64_t page_size() const; + + /// Load and associate an external debug file (e.g., DWARF or PDB) with this binary. + /// + /// This method attempts to load the debug information from the file located at the given path, + /// and binds it to the current binary instance. If successful, it returns a pointer to the + /// loaded DebugInfo object. + /// + /// \param path Path to the external debug file (e.g., `.dwarf`, `.pdb`) + /// \return Pointer to the loaded DebugInfo object on success, or `nullptr` on failure. + /// + /// \warning It is the caller's responsibility to ensure that the debug file is + /// compatible with the binary. Incorrect associations may lead to + /// inconsistent or invalid results. + /// + /// \note This function does not verify that the debug file matches the binary's unique + /// identifier (e.g., build ID, GUID). + DebugInfo* load_debug_info(const std::string& path); + + protected: + FORMATS format_ = FORMATS::UNKNOWN; + mutable std::unique_ptr debug_info_; + mutable std::unordered_map> engines_; + uint64_t original_size_ = 0; + + assembly::Engine* get_engine(uint64_t address) const; + + template + LIEF_LOCAL assembly::Engine* get_cache_engine(uint64_t address, F&& f) const; + + // These functions need to be overloaded by the object that claims to extend this Abstract Binary + virtual Header get_abstract_header() const = 0; + virtual symbols_t get_abstract_symbols() = 0; + virtual sections_t get_abstract_sections() = 0; + virtual relocations_t get_abstract_relocations() = 0; + + virtual functions_t get_abstract_exported_functions() const = 0; + virtual functions_t get_abstract_imported_functions() const = 0; + virtual std::vector get_abstract_imported_libraries() const = 0; +}; + +LIEF_API const char* to_string(Binary::VA_TYPES e); +LIEF_API const char* to_string(Binary::FORMATS e); + +} + + +#endif diff --git a/deps/LIEF/include/LIEF/Abstract/DebugInfo.hpp b/deps/LIEF/include/LIEF/Abstract/DebugInfo.hpp new file mode 100644 index 00000000000000..24961cf791341b --- /dev/null +++ b/deps/LIEF/include/LIEF/Abstract/DebugInfo.hpp @@ -0,0 +1,79 @@ +/* Copyright 2022 - 2025 R. Thomas + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DEBUGINFO_H +#define LIEF_DEBUGINFO_H +#include +#include +#include + +#include "LIEF/visibility.h" +#include "LIEF/optional.hpp" +namespace LIEF { + +class Binary; + +namespace details { +class DebugInfo; +} + +/// This class provides a generic interface for accessing debug information +/// from different formats such as DWARF and PDB. +/// +/// Users can use this interface to access high-level debug features like +/// resolving function addresses. +/// +/// See: LIEF::pdb::DebugInfo, LIEF::dwarf::DebugInfo +class LIEF_API DebugInfo { + public: + friend class Binary; + enum class FORMAT { + UNKNOWN = 0, + DWARF, PDB, + }; + DebugInfo(std::unique_ptr impl); + + virtual ~DebugInfo(); + + virtual FORMAT format() const { + return FORMAT::UNKNOWN; + } + + /// This function can be used to **down cast** a DebugInfo instance: + /// + /// ```cpp + /// std::unique_ptr dbg = bin->debug_info(); + /// if (const auto* dwarf = inst->as()) { + /// dwarf->find_function("main"); + /// } + /// ``` + template + const T* as() const { + static_assert(std::is_base_of::value, + "Require Instruction inheritance"); + if (T::classof(this)) { + return static_cast(this); + } + return nullptr; + } + + /// Attempt to resolve the address of the function specified by `name`. + virtual optional find_function_address(const std::string& name) const = 0; + + protected: + std::unique_ptr impl_; +}; + +} +#endif diff --git a/deps/LIEF/include/LIEF/Abstract/Function.hpp b/deps/LIEF/include/LIEF/Abstract/Function.hpp new file mode 100644 index 00000000000000..66004763d6eca7 --- /dev/null +++ b/deps/LIEF/include/LIEF/Abstract/Function.hpp @@ -0,0 +1,121 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_ABSTRACT_FUNCTION_H +#define LIEF_ABSTRACT_FUNCTION_H + +#include +#include + +#include "LIEF/Abstract/Symbol.hpp" +#include "LIEF/visibility.h" +#include "LIEF/enums.hpp" + +namespace LIEF { + +/// Class that represents a function in the binary +class LIEF_API Function : public Symbol { + public: + /// Flags used to characterize the semantics of the function + enum class FLAGS : uint32_t { + NONE = 0, + /// The function acts as a constructor. + /// + /// Usually this flag is associated with functions + /// that are located in the `.init_array`, `__mod_init_func` or `.tls` sections + CONSTRUCTOR = 1 << 0, + + /// The function acts as a destructor. + /// + /// Usually this flag is associated with functions + /// that are located in the `.fini_array` or `__mod_term_func` sections + DESTRUCTOR = 1 << 1, + + /// The function is associated with Debug information + DEBUG_INFO = 1 << 2, + + /// The function is exported by the binary and the address() method + /// returns its virtual address in the binary + EXPORTED = 1 << 3, + + /// The function is **imported** by the binary and the address() should return 0 + IMPORTED = 1 << 4, + }; + + public: + Function() = default; + Function(const std::string& name) : + Symbol(name) + {} + Function(uint64_t address) : + Function("", address) + {} + Function(const std::string& name, uint64_t address) : + Symbol(name, address) + {} + Function(const std::string& name, uint64_t address, FLAGS flags) : + Function(name, address) + { + flags_ = flags; + } + + Function(const Function&) = default; + Function& operator=(const Function&) = default; + + ~Function() override = default; + + /// List of FLAGS + std::vector flags_list() const; + + FLAGS flags() const { + return flags_; + } + + /// Add a flag to the current function + Function& add(FLAGS f) { + flags_ = (FLAGS)((uint32_t)flags_ | (uint32_t)f); + return *this; + } + + /// Check if the function has the given flag + bool has(FLAGS f) const { + return ((uint32_t)flags_ & (uint32_t)f) != 0; + } + + /// Address of the current function. For functions that are set with the FLAGS::IMPORTED flag, + /// this value is likely 0. + uint64_t address() const { + return value_; + } + + void address(uint64_t address) { + value_ = address; + } + + void accept(Visitor& visitor) const override; + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const Function& entry); + + protected: + FLAGS flags_ = FLAGS::NONE; +}; + +LIEF_API const char* to_string(Function::FLAGS e); +} + +ENABLE_BITMASK_OPERATORS(LIEF::Function::FLAGS); + +#endif + diff --git a/deps/LIEF/include/LIEF/Abstract/Header.hpp b/deps/LIEF/include/LIEF/Abstract/Header.hpp new file mode 100644 index 00000000000000..036e64cf5a5459 --- /dev/null +++ b/deps/LIEF/include/LIEF/Abstract/Header.hpp @@ -0,0 +1,147 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_ABSTRACT_HEADER_H +#define LIEF_ABSTRACT_HEADER_H + +#include +#include +#include + +#include "LIEF/Object.hpp" +#include "LIEF/visibility.h" +#include "LIEF/enums.hpp" + +namespace LIEF { +namespace ELF { +class Binary; +} + +namespace PE { +class Binary; +} + +namespace MachO { +class Binary; +} + +class LIEF_API Header : public Object { + public: + enum class ARCHITECTURES { + UNKNOWN = 0, + ARM, + ARM64, + MIPS, + X86, + X86_64, + PPC, + SPARC, + SYSZ, + XCORE, + RISCV, + LOONGARCH, + PPC64, + }; + + enum class ENDIANNESS { + UNKNOWN = 0, + BIG, + LITTLE, + }; + + enum MODES : uint64_t { + NONE = 0, + + BITS_16 = 1LLU << 0, /// 16-bits architecture + BITS_32 = 1LLU << 1, /// 32-bits architecture + BITS_64 = 1LLU << 2, /// 64-bits architecture + THUMB = 1LLU << 3, /// Support ARM Thumb mode + + ARM64E = 1LLU << 4, /// ARM64 with extended (security) features + }; + + enum class OBJECT_TYPES { + UNKNOWN = 0, + EXECUTABLE, + LIBRARY, + OBJECT, + }; + + static Header from(const LIEF::ELF::Binary& elf); + static Header from(const LIEF::PE::Binary& pe); + static Header from(const LIEF::MachO::Binary& macho); + + Header() = default; + Header(const Header&) = default; + Header& operator=(const Header&) = default; + ~Header() override = default; + + /// Target architecture + ARCHITECTURES architecture() const { + return architecture_; + } + + /// Optional features for the given architecture + MODES modes() const { + return modes_; + } + + /// MODES as a vector + std::vector modes_list() const; + + bool is(MODES m) const { + return ((uint64_t)m & (uint64_t)modes_) != 0; + } + + OBJECT_TYPES object_type() const { + return object_type_; + } + uint64_t entrypoint() const { + return entrypoint_; + } + + ENDIANNESS endianness() const { + return endianness_; + } + + bool is_32() const { + return ((uint64_t)modes_ & (uint64_t)MODES::BITS_32) != 0; + } + + bool is_64() const { + return ((uint64_t)modes_ & (uint64_t)MODES::BITS_64) != 0; + } + + void accept(Visitor& visitor) const override; + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const Header& hdr); + + protected: + ARCHITECTURES architecture_ = ARCHITECTURES::UNKNOWN; + OBJECT_TYPES object_type_ = OBJECT_TYPES::UNKNOWN; + uint64_t entrypoint_ = 0; + ENDIANNESS endianness_ = ENDIANNESS::UNKNOWN; + MODES modes_ = MODES::NONE; +}; + +LIEF_API const char* to_string(Header::ARCHITECTURES e); +LIEF_API const char* to_string(Header::OBJECT_TYPES e); +LIEF_API const char* to_string(Header::MODES e); +LIEF_API const char* to_string(Header::ENDIANNESS e); +} + +ENABLE_BITMASK_OPERATORS(LIEF::Header::MODES); + +#endif diff --git a/deps/LIEF/include/LIEF/Abstract/Parser.hpp b/deps/LIEF/include/LIEF/Abstract/Parser.hpp new file mode 100644 index 00000000000000..9a3665192bb635 --- /dev/null +++ b/deps/LIEF/include/LIEF/Abstract/Parser.hpp @@ -0,0 +1,60 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_ABSTRACT_PARSER_H +#define LIEF_ABSTRACT_PARSER_H + +#include +#include +#include + +#include "LIEF/visibility.h" + +namespace LIEF { +class BinaryStream; +class Binary; + +/// Main interface to parse an executable regardless of its format +class LIEF_API Parser { + public: + /// Construct an LIEF::Binary from the given filename + /// + /// @warning If the target file is a FAT Mach-O, it will return the **last** one + /// @see LIEF::MachO::Parser::parse + static std::unique_ptr parse(const std::string& filename); + + + /// Construct an LIEF::Binary from the given raw data + /// + /// @warning If the target file is a FAT Mach-O, it will return the **last** one + /// @see LIEF::MachO::Parser::parse + static std::unique_ptr parse(const std::vector& raw); + + /// Construct an LIEF::Binary from the given stream + /// + /// @warning If the target file is a FAT Mach-O, it will return the **last** one + /// @see LIEF::MachO::Parser::parse + static std::unique_ptr parse(std::unique_ptr stream); + + protected: + Parser(const std::string& file); + uint64_t binary_size_ = 0; + + virtual ~Parser(); + Parser(); +}; +} + +#endif diff --git a/deps/LIEF/include/LIEF/Abstract/Relocation.hpp b/deps/LIEF/include/LIEF/Abstract/Relocation.hpp new file mode 100644 index 00000000000000..c22f8e7a508cf6 --- /dev/null +++ b/deps/LIEF/include/LIEF/Abstract/Relocation.hpp @@ -0,0 +1,98 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_ABSTRACT_RELOCATION_H +#define LIEF_ABSTRACT_RELOCATION_H + +#include +#include + +#include "LIEF/Object.hpp" +#include "LIEF/visibility.h" + +namespace LIEF { +/// Class which represents an abstracted Relocation +class LIEF_API Relocation : public Object { + + public: + Relocation() = default; + + /// Constructor from a relocation's address and size + Relocation(uint64_t address, uint8_t size) : + address_(address), + size_(size) + {} + + ~Relocation() override = default; + + Relocation& operator=(const Relocation&) = default; + Relocation(const Relocation&) = default; + void swap(Relocation& other) { + std::swap(address_, other.address_); + std::swap(size_, other.size_); + } + + /// Relocation's address + virtual uint64_t address() const { + return address_; + } + + /// Relocation size in **bits** + virtual size_t size() const { + return size_; + } + + virtual void address(uint64_t address) { + address_ = address; + } + + virtual void size(size_t size) { + size_ = (uint8_t)size; + } + + /// Method so that the ``visitor`` can visit us + void accept(Visitor& visitor) const override; + + + /// Comparaison based on the Relocation's **address** + virtual bool operator<(const Relocation& rhs) const { + return address() < rhs.address(); + } + + /// Comparaison based on the Relocation's **address** + virtual bool operator<=(const Relocation& rhs) const { + return !(address() > rhs.address()); + } + + /// Comparaison based on the Relocation's **address** + virtual bool operator>(const Relocation& rhs) const { + return address() > rhs.address(); + } + + /// Comparaison based on the Relocation's **address** + virtual bool operator>=(const Relocation& rhs) const { + return !(address() < rhs.address()); + } + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const Relocation& entry); + + protected: + uint64_t address_ = 0; + uint8_t size_ = 0; +}; + + +} +#endif diff --git a/deps/LIEF/include/LIEF/Abstract/Section.hpp b/deps/LIEF/include/LIEF/Abstract/Section.hpp new file mode 100644 index 00000000000000..eb89b42ccc57a4 --- /dev/null +++ b/deps/LIEF/include/LIEF/Abstract/Section.hpp @@ -0,0 +1,131 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_ABSTRACT_SECTION_H +#define LIEF_ABSTRACT_SECTION_H + +#include +#include +#include + +#include "LIEF/span.hpp" +#include "LIEF/Object.hpp" +#include "LIEF/visibility.h" + +namespace LIEF { +/// Class which represents an abstracted section +class LIEF_API Section : public Object { + public: + static constexpr size_t npos = -1; + + Section() = default; + Section(std::string name) : + name_(std::move(name)) + {} + + ~Section() override = default; + + Section& operator=(const Section&) = default; + Section(const Section&) = default; + + /// section's name + virtual std::string name() const { + return name_.c_str(); + } + + /// Return the **complete** section's name which might + /// trailing (``0``) bytes + virtual const std::string& fullname() const { + return name_; + } + + /// section's content + virtual span content() const { + return {}; + } + + /// Change the section size + virtual void size(uint64_t size) { + size_ = size; + } + + /// section's size (size in the binary, not the virtual size) + virtual uint64_t size() const { + return size_; + } + + /// Offset in the binary + virtual uint64_t offset() const { + return offset_; + } + + /// Address where the section should be mapped + virtual uint64_t virtual_address() const { + return virtual_address_; + } + + virtual void virtual_address(uint64_t virtual_address) { + virtual_address_ = virtual_address; + } + + /// Change the section's name + virtual void name(std::string name) { + name_ = std::move(name); + } + + /// Change section content + virtual void content(const std::vector&) {} + + virtual void offset(uint64_t offset) { + offset_ = offset; + } + + /// Section's entropy + double entropy() const; + + // Search functions + // ================ + size_t search(uint64_t integer, size_t pos, size_t size) const; + size_t search(const std::vector& pattern, size_t pos = 0) const; + size_t search(const std::string& pattern, size_t pos = 0) const; + size_t search(uint64_t integer, size_t pos = 0) const; + + // Search all functions + // ==================== + std::vector search_all(uint64_t v, size_t size) const; + + std::vector search_all(uint64_t v) const; + + std::vector search_all(const std::string& v) const; + + /// Method so that the ``visitor`` can visit us + void accept(Visitor& visitor) const override; + + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const Section& entry); + + protected: + std::string name_; + uint64_t virtual_address_ = 0; + uint64_t size_ = 0; + uint64_t offset_ = 0; + + private: + template + std::vector search_all_(const T& v) const; +}; +} + +#endif diff --git a/deps/LIEF/include/LIEF/Abstract/Symbol.hpp b/deps/LIEF/include/LIEF/Abstract/Symbol.hpp new file mode 100644 index 00000000000000..8d9802beec346c --- /dev/null +++ b/deps/LIEF/include/LIEF/Abstract/Symbol.hpp @@ -0,0 +1,97 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_ABSTRACT_SYMBOLS_H +#define LIEF_ABSTRACT_SYMBOLS_H + +#include +#include + +#include "LIEF/Object.hpp" +#include "LIEF/visibility.h" + +namespace LIEF { + +/// This class represents a symbol in an executable format. +class LIEF_API Symbol : public Object { + public: + Symbol() = default; + Symbol(std::string name) : + name_(std::move(name)) + {} + + Symbol(std::string name, uint64_t value) : + name_(std::move(name)), value_(value) + {} + + Symbol(std::string name, uint64_t value, uint64_t size) : + name_(std::move(name)), value_(value), size_(size) + {} + + Symbol(const Symbol&) = default; + Symbol& operator=(const Symbol&) = default; + + Symbol(Symbol&&) = default; + Symbol& operator=(Symbol&&) = default; + + ~Symbol() override = default; + + void swap(Symbol& other) noexcept; + + /// Return the symbol's name + virtual const std::string& name() const { + return name_; + } + + virtual std::string& name() { + return name_; + } + + /// Set symbol name + virtual void name(std::string name) { + name_ = std::move(name); + } + + // Symbol's value which is usually the **address** of the symbol + virtual uint64_t value() const { + return value_; + } + virtual void value(uint64_t value) { + value_ = value; + } + + /// This size of the symbol (when applicable) + virtual uint64_t size() const { + return size_; + } + + virtual void size(uint64_t value) { + size_ = value; + } + + /// Method so that the ``visitor`` can visit us + void accept(Visitor& visitor) const override; + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const Symbol& entry); + + protected: + std::string name_; + uint64_t value_ = 0; + uint64_t size_ = 0; +}; +} + +#endif + diff --git a/deps/LIEF/include/LIEF/Abstract/hash.hpp b/deps/LIEF/include/LIEF/Abstract/hash.hpp new file mode 100644 index 00000000000000..26f4a375f446e6 --- /dev/null +++ b/deps/LIEF/include/LIEF/Abstract/hash.hpp @@ -0,0 +1,49 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_ABSTRACT_HASH_H +#define LIEF_ABSTRACT_HASH_H + +#include "LIEF/visibility.h" +#include "LIEF/hash.hpp" + +namespace LIEF { +class Binary; +class Header; +class Relocation; +class Object; + +class LIEF_API AbstractHash : public LIEF::Hash { + public: + static LIEF::Hash::value_type hash(const Object& obj); + + public: + using LIEF::Hash::Hash; + using LIEF::Hash::visit; + + public: + void visit(const Binary& binary) override; + void visit(const Header& header) override; + void visit(const Section& section) override; + void visit(const Symbol& symbol) override; + void visit(const Relocation& relocation) override; + void visit(const Function& function) override; + + ~AbstractHash() override; +}; + +} + +#endif diff --git a/deps/LIEF/include/LIEF/Abstract/json.hpp b/deps/LIEF/include/LIEF/Abstract/json.hpp new file mode 100644 index 00000000000000..fa05cec3458482 --- /dev/null +++ b/deps/LIEF/include/LIEF/Abstract/json.hpp @@ -0,0 +1,30 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_ABSTRACT_JSON_H +#define LIEF_ABSTRACT_JSON_H + +#include "LIEF/visibility.h" +#include + +namespace LIEF { +class Object; + +LIEF_API std::string to_json_from_abstract(const Object& v); + +} + +#endif // LIEF_JSON_SUPPORT + diff --git a/deps/LIEF/include/LIEF/BinaryStream/ASN1Reader.hpp b/deps/LIEF/include/LIEF/BinaryStream/ASN1Reader.hpp new file mode 100644 index 00000000000000..385bb35dd22c08 --- /dev/null +++ b/deps/LIEF/include/LIEF/BinaryStream/ASN1Reader.hpp @@ -0,0 +1,72 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_ASN1_READER_H +#define LIEF_ASN1_READER_H + +#include +#include +#include +#include + +#include "LIEF/errors.hpp" + +struct mbedtls_x509_crt; +struct mbedtls_x509_time; + +namespace LIEF { +class BinaryStream; + +class ASN1Reader { + public: + ASN1Reader() = delete; + + ASN1Reader(BinaryStream& stream) : + stream_(stream) + {} + + ASN1Reader(const ASN1Reader&) = delete; + ASN1Reader& operator=(const ASN1Reader&) = delete; + + + result is_tag(int tag); + + result read_tag(int tag); + result read_len(); + result read_alg(); + result read_oid(); + result read_bool(); + result read_int(); + result read_int64(); + result> read_large_int(); + + result> read_bitstring(); + result> read_octet_string(); + result read_utf8_string(); + result> read_cert(); + result x509_read_names(); + result> x509_read_serial(); + result> x509_read_time(); + + std::string get_str_tag(); + + static std::string tag2str(int tag); + + private: + BinaryStream& stream_; +}; + +} +#endif diff --git a/deps/LIEF/include/LIEF/BinaryStream/BinaryStream.hpp b/deps/LIEF/include/LIEF/BinaryStream/BinaryStream.hpp new file mode 100644 index 00000000000000..48c16034e1df4f --- /dev/null +++ b/deps/LIEF/include/LIEF/BinaryStream/BinaryStream.hpp @@ -0,0 +1,476 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_BINARY_STREAM_H +#define LIEF_BINARY_STREAM_H + +#include +#include +#include +#include +#include + +#include "LIEF/endianness_support.hpp" +#include "LIEF/errors.hpp" +#include "LIEF/visibility.h" + +namespace LIEF { +class ASN1Reader; + +/// Class that is used to a read stream of data from different sources +class LIEF_API BinaryStream { + public: + friend class ASN1Reader; + + enum class STREAM_TYPE { + UNKNOWN = 0, + VECTOR, + MEMORY, + SPAN, + FILE, + + ELF_DATA_HANDLER, + }; + + BinaryStream(STREAM_TYPE type) : + stype_(type) + {} + virtual ~BinaryStream() = default; + virtual uint64_t size() const = 0; + + STREAM_TYPE type() const { + return stype_; + } + + result read_uleb128(size_t* size = nullptr) const; + result read_sleb128(size_t* size = nullptr) const; + + result read_dwarf_encoded(uint8_t encoding) const; + + result read_string(size_t maxsize = ~static_cast(0)) const; + result peek_string(size_t maxsize = ~static_cast(0)) const; + result peek_string_at(size_t offset, size_t maxsize = ~static_cast(0)) const; + + result read_u16string() const; + result peek_u16string() const; + + result read_mutf8(size_t maxsize = ~static_cast(0)) const; + + result read_u16string(size_t length) const; + result peek_u16string(size_t length) const; + result peek_u16string_at(size_t offset, size_t length) const; + + + virtual ok_error_t peek_data(std::vector& container, + uint64_t offset, uint64_t size, + uint64_t virtual_address = 0) + { + if (size == 0) { + return ok(); + } + // Even though offset + size < ... => offset < ... + // the addition could overflow so it's worth checking both + const bool read_ok = offset <= this->size() && (offset + size) <= this->size() + /* Check for an overflow */ + && (static_cast(offset) >= 0 && static_cast(size) >= 0) + && (static_cast(offset + size) >= 0); + if (!read_ok) { + return make_error_code(lief_errors::read_error); + } + container.resize(size); + if (peek_in(container.data(), offset, size, virtual_address)) { + return ok(); + } + return make_error_code(lief_errors::read_error); + } + + virtual ok_error_t read_data(std::vector& container, uint64_t size) { + if (!peek_data(container, pos(), size)) { + return make_error_code(lief_errors::read_error); + } + + increment_pos(size); + return ok(); + } + + ok_error_t read_data(std::vector& container) { + const size_t size = this->size() - this->pos(); + return read_data(container, size); + } + + template + ok_error_t read_objects(std::vector& container, uint64_t count) { + if (count == 0) { + return ok(); + } + const size_t size = count * sizeof(T); + auto ret = peek_objects(container, count); + if (!ret) { + return make_error_code(lief_errors::read_error); + } + increment_pos(size); + return ok(); + } + + template + ok_error_t peek_objects(std::vector& container, uint64_t count) { + return peek_objects_at(pos(), container, count); + } + + template + ok_error_t peek_objects_at(uint64_t offset, std::vector& container, uint64_t count) { + if (count == 0) { + return ok(); + } + const auto current_p = pos(); + setpos(offset); + + const size_t size = count * sizeof(T); + + if (!can_read(offset, size)) { + setpos(current_p); + return make_error_code(lief_errors::read_error); + } + + container.resize(count); + + if (!peek_in(container.data(), pos(), size)) { + setpos(current_p); + return make_error_code(lief_errors::read_error); + } + + setpos(current_p); + return ok(); + } + + void setpos(size_t pos) const { + pos_ = pos; + } + + const BinaryStream& increment_pos(size_t value) const { + pos_ += value; + return *this; + } + + void decrement_pos(size_t value) const { + if (pos_ > value) { + pos_ -= value; + } else { + pos_ = 0; + } + } + + size_t pos() const { + return pos_; + } + + operator bool() const { + return pos_ < size(); + } + + template + const T* read_array(size_t size) const; + + template + ok_error_t peek_array(std::array& dst) const { + if /*constexpr*/ (N == 0) { + return ok(); + } + // Even though offset + size < ... => offset < ... + // the addition could overflow so it's worth checking both + const bool read_ok = pos_ <= size() && (pos_ + N) <= size() + /* Check for an overflow */ + && (static_cast(pos_) >= 0 && static_cast(N) >= 0) + && (static_cast(pos_ + N) >= 0); + + if (!read_ok) { + return make_error_code(lief_errors::read_error); + } + if (peek_in(dst.data(), pos_, N)) { + return ok(); + } + return make_error_code(lief_errors::read_error); + } + + template + ok_error_t read_array(std::array& dst) const { + if (!peek_array(dst)) { + return make_error_code(lief_errors::read_error); + } + + increment_pos(N); + return ok(); + } + + template + result peek() const; + + template + result peek(size_t offset) const; + + template + const T* peek_array(size_t size) const; + + template + const T* peek_array(size_t offset, size_t size) const; + + template + result read() const; + + template + bool can_read() const; + + template + bool can_read(size_t offset) const; + + bool can_read(int64_t offset, int64_t size) const { + return offset < (int64_t)this->size() && (offset + size) < (int64_t)this->size(); + } + + size_t align(size_t align_on) const; + + void set_endian_swap(bool swap) { + endian_swap_ = swap; + } + + template + static bool is_all_zero(const T& buffer) { + const auto* ptr = reinterpret_cast(&buffer); + return std::all_of(ptr, ptr + sizeof(T), + [] (uint8_t x) { return x == 0; }); + } + + bool should_swap() const { + return endian_swap_; + } + + virtual const uint8_t* p() const { + return nullptr; + } + + virtual uint8_t* start() { + return const_cast(static_cast(this)->start()); + } + + virtual uint8_t* p() { + return const_cast(static_cast(this)->p()); + } + + virtual uint8_t* end() { + return const_cast(static_cast(this)->end()); + } + + virtual const uint8_t* start() const { + return nullptr; + } + + virtual const uint8_t* end() const { + return nullptr; + } + + virtual result read_at(uint64_t offset, uint64_t size, + uint64_t virtual_address = 0) const = 0; + virtual ok_error_t peek_in(void* dst, uint64_t offset, uint64_t size, + uint64_t virtual_address = 0) const { + if (auto raw = read_at(offset, size, virtual_address)) { + if (dst == nullptr) { + return make_error_code(lief_errors::read_error); + } + + const void* ptr = *raw; + + if (ptr == nullptr) { + return make_error_code(lief_errors::read_error); + } + + memcpy(dst, ptr, size); + return ok(); + } + return make_error_code(lief_errors::read_error); + } + + protected: + BinaryStream() = default; + + mutable size_t pos_ = 0; + bool endian_swap_ = false; + STREAM_TYPE stype_ = STREAM_TYPE::UNKNOWN; +}; + +class ScopedStream { + public: + ScopedStream(const ScopedStream&) = delete; + ScopedStream& operator=(const ScopedStream&) = delete; + + ScopedStream(ScopedStream&&) = delete; + ScopedStream& operator=(ScopedStream&&) = delete; + + explicit ScopedStream(BinaryStream& stream, uint64_t pos) : + pos_{stream.pos()}, + stream_{stream} + { + stream_.setpos(pos); + } + + explicit ScopedStream(BinaryStream& stream) : + pos_{stream.pos()}, + stream_{stream} + {} + + ~ScopedStream() { + stream_.setpos(pos_); + } + + BinaryStream* operator->() { + return &stream_; + } + + BinaryStream& operator*() { + return stream_; + } + + const BinaryStream& operator*() const { + return stream_; + } + + private: + uint64_t pos_ = 0; + BinaryStream& stream_; +}; + +class ToggleEndianness { + public: + ToggleEndianness(const ToggleEndianness&) = delete; + ToggleEndianness& operator=(const ToggleEndianness&) = delete; + + ToggleEndianness(ToggleEndianness&&) = delete; + ToggleEndianness& operator=(ToggleEndianness&&) = delete; + + explicit ToggleEndianness(BinaryStream& stream, bool value) : + endian_swap_(stream.should_swap()), + stream_{stream} + { + stream.set_endian_swap(value); + } + + explicit ToggleEndianness(BinaryStream& stream) : + endian_swap_(stream.should_swap()), + stream_{stream} + { + stream.set_endian_swap(!stream_.should_swap()); + } + + ~ToggleEndianness() { + stream_.set_endian_swap(endian_swap_); + } + + BinaryStream* operator->() { + return &stream_; + } + + BinaryStream& operator*() { + return stream_; + } + + const BinaryStream& operator*() const { + return stream_; + } + + private: + bool endian_swap_ = false; + BinaryStream& stream_; +}; + + +template +result BinaryStream::read() const { + result tmp = this->peek(); + if (!tmp) { + return tmp; + } + this->increment_pos(sizeof(T)); + return tmp; +} + +template +result BinaryStream::peek() const { + const auto current_p = pos(); + T ret{}; + if (auto res = peek_in(&ret, pos(), sizeof(T))) { + setpos(current_p); + if (endian_swap_) { + swap_endian(&ret); + } + return ret; + } + + setpos(current_p); + return make_error_code(lief_errors::read_error); +} + +template +result BinaryStream::peek(size_t offset) const { + const size_t saved_offset = this->pos(); + this->setpos(offset); + result r = this->peek(); + this->setpos(saved_offset); + return r; +} + + +template +const T* BinaryStream::peek_array(size_t size) const { + result raw = this->read_at(this->pos(), sizeof(T) * size); + if (!raw) { + return nullptr; + } + return reinterpret_cast(raw.value()); +} + +template +const T* BinaryStream::peek_array(size_t offset, size_t size) const { + const size_t saved_offset = this->pos(); + this->setpos(offset); + const T* r = this->peek_array(size); + this->setpos(saved_offset); + return r; +} + + +template +bool BinaryStream::can_read() const { + // Even though pos_ + sizeof(T) < ... => pos_ < ... + // the addition could overflow so it's worth checking both + return pos_ < size() && (pos_ + sizeof(T)) < size(); +} + + +template +bool BinaryStream::can_read(size_t offset) const { + // Even though offset + sizeof(T) < ... => offset < ... + // the addition could overflow so it's worth checking both + return offset < size() && (offset + sizeof(T)) < size(); +} + + +template +const T* BinaryStream::read_array(size_t size) const { + const T* tmp = this->peek_array(size); + this->increment_pos(sizeof(T) * size); + return tmp; +} + +} +#endif diff --git a/deps/LIEF/include/LIEF/BinaryStream/FileStream.hpp b/deps/LIEF/include/LIEF/BinaryStream/FileStream.hpp new file mode 100644 index 00000000000000..0e5d3e5cc86d9e --- /dev/null +++ b/deps/LIEF/include/LIEF/BinaryStream/FileStream.hpp @@ -0,0 +1,80 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_FILE_STREAM_H +#define LIEF_FILE_STREAM_H + +#include +#include +#include + +#include "LIEF/errors.hpp" +#include "LIEF/BinaryStream/BinaryStream.hpp" +#include "LIEF/visibility.h" + +namespace LIEF { + +/// Stream interface over a `std::ifstream` +class LIEF_API FileStream : public BinaryStream { + public: + static result from_file(const std::string& file); + FileStream(std::ifstream fs, uint64_t size) : + BinaryStream(STREAM_TYPE::FILE), + ifs_(std::move(fs)), + size_(size) + {} + + FileStream() = delete; + + FileStream(const FileStream&) = delete; + FileStream& operator=(const FileStream&) = delete; + + FileStream(FileStream&& other) noexcept = default; + FileStream& operator=(FileStream&& other) noexcept = default; + + uint64_t size() const override { + return size_; + } + + std::vector content() const; + ~FileStream() override = default; + + static bool classof(const BinaryStream& stream) { + return stream.type() == STREAM_TYPE::FILE; + } + + ok_error_t peek_in(void* dst, uint64_t offset, uint64_t size, + uint64_t /* virtual_address */= 0) const override { + if (offset > size_ || offset + size > size_) { + return make_error_code(lief_errors::read_error); + } + const auto pos = ifs_.tellg(); + ifs_.seekg(offset); + ifs_.read(static_cast(dst), size); + ifs_.seekg(pos); + return ok(); + } + + result read_at(uint64_t, uint64_t, uint64_t) const override { + return make_error_code(lief_errors::not_supported); + } + + protected: + mutable std::ifstream ifs_; + uint64_t size_ = 0; +}; +} + +#endif diff --git a/deps/LIEF/include/LIEF/BinaryStream/MemoryStream.hpp b/deps/LIEF/include/LIEF/BinaryStream/MemoryStream.hpp new file mode 100644 index 00000000000000..9bce7e764be3e9 --- /dev/null +++ b/deps/LIEF/include/LIEF/BinaryStream/MemoryStream.hpp @@ -0,0 +1,89 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_MEMORY_STREAM_H +#define LIEF_MEMORY_STREAM_H + +#include + +#include "LIEF/errors.hpp" +#include "LIEF/visibility.h" +#include "LIEF/BinaryStream/BinaryStream.hpp" + +namespace LIEF { +class Binary; +class LIEF_API MemoryStream : public BinaryStream { + public: + using BinaryStream::p; + using BinaryStream::end; + using BinaryStream::start; + + MemoryStream() = delete; + MemoryStream(uintptr_t base_address); + MemoryStream(uintptr_t base_address, uint64_t size) : + BinaryStream(BinaryStream::STREAM_TYPE::MEMORY), + baseaddr_(base_address), + size_(size) + {} + + MemoryStream(const MemoryStream&) = delete; + MemoryStream& operator=(const MemoryStream&) = delete; + + MemoryStream(MemoryStream&&) noexcept = default; + MemoryStream& operator=(MemoryStream&&) noexcept = default; + + uintptr_t base_address() const { + return this->baseaddr_; + } + + const uint8_t* p() const override { + return start() + pos(); + } + + const uint8_t* start() const override { + return reinterpret_cast(baseaddr_); + } + + const uint8_t* end() const override { + return start() + size_; + } + + void binary(Binary& bin) { + this->binary_ = &bin; + } + + Binary* binary() { + return this->binary_; + } + + uint64_t size() const override { + return size_; + } + + ~MemoryStream() override = default; + + static bool classof(const BinaryStream& stream) { + return stream.type() == BinaryStream::STREAM_TYPE::MEMORY; + } + + protected: + result read_at(uint64_t offset, uint64_t size, uint64_t va) const override; + uintptr_t baseaddr_ = 0; + uint64_t size_ = 0; + Binary* binary_ = nullptr; +}; +} + +#endif diff --git a/deps/LIEF/include/LIEF/BinaryStream/SpanStream.hpp b/deps/LIEF/include/LIEF/BinaryStream/SpanStream.hpp new file mode 100644 index 00000000000000..ce7e8b4c003322 --- /dev/null +++ b/deps/LIEF/include/LIEF/BinaryStream/SpanStream.hpp @@ -0,0 +1,129 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_SPAN_STREAM_H +#define LIEF_SPAN_STREAM_H + +#include +#include +#include +#include +#include + +#include "LIEF/errors.hpp" +#include "LIEF/span.hpp" +#include "LIEF/visibility.h" +#include "LIEF/BinaryStream/BinaryStream.hpp" + +namespace LIEF { +class VectorStream; +class LIEF_API SpanStream : public BinaryStream { + public: + using BinaryStream::p; + using BinaryStream::end; + using BinaryStream::start; + + static result from_vector(const std::vector& data) { + return SpanStream(data); + } + + template + static result from_array(const std::array& data) { + return SpanStream(data.data(), N); + } + + SpanStream(span data) : + SpanStream(data.data(), data.size()) + {} + + SpanStream(span data) : + SpanStream(data.data(), data.size()) + {} + + SpanStream(const uint8_t* p, size_t size) : + BinaryStream(BinaryStream::STREAM_TYPE::SPAN), + data_{p, p + size} + {} + + SpanStream(const std::vector& data) : + SpanStream(data.data(), data.size()) + {} + + std::unique_ptr clone() const { + return std::unique_ptr(new SpanStream(*this)); + } + + SpanStream() = delete; + + SpanStream(const SpanStream& other) = default; + SpanStream& operator=(const SpanStream& other) = default; + + SpanStream(SpanStream&& other) noexcept = default; + SpanStream& operator=(SpanStream&& other) noexcept = default; + + uint64_t size() const override { + return data_.size(); + } + + const uint8_t* p() const override { + return data_.data() + this->pos(); + } + + const uint8_t* start() const override { + return data_.data(); + } + + const uint8_t* end() const override { + return data_.data() + size(); + } + + std::vector content() const { + return {data_.begin(), data_.end()}; + } + + result slice(size_t offset, size_t size) const { + if (offset > data_.size() || (offset + size) > data_.size()) { + return make_error_code(lief_errors::read_out_of_bound); + } + return data_.subspan(offset, size); + } + result slice(size_t offset) const { + if (offset > data_.size()) { + return make_error_code(lief_errors::read_out_of_bound); + } + return data_.subspan(offset, data_.size() - offset); + } + + std::unique_ptr to_vector() const; + + static bool classof(const BinaryStream& stream) { + return stream.type() == BinaryStream::STREAM_TYPE::SPAN; + } + + ~SpanStream() override = default; + + protected: + result read_at(uint64_t offset, uint64_t size, uint64_t /*va*/) const override { + const uint64_t stream_size = this->size(); + if (offset > stream_size || (offset + size) > stream_size) { + return make_error_code(lief_errors::read_error); + } + return data_.data() + offset; + } + span data_; +}; +} + +#endif diff --git a/deps/LIEF/include/LIEF/BinaryStream/VectorStream.hpp b/deps/LIEF/include/LIEF/BinaryStream/VectorStream.hpp new file mode 100644 index 00000000000000..44293aa8c85795 --- /dev/null +++ b/deps/LIEF/include/LIEF/BinaryStream/VectorStream.hpp @@ -0,0 +1,97 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_VECTOR_STREAM_H +#define LIEF_VECTOR_STREAM_H + +#include +#include +#include + +#include "LIEF/errors.hpp" +#include "LIEF/visibility.h" +#include "LIEF/BinaryStream/BinaryStream.hpp" + +namespace LIEF { +class SpanStream; +class LIEF_API VectorStream : public BinaryStream { + public: + using BinaryStream::p; + using BinaryStream::end; + using BinaryStream::start; + + static result from_file(const std::string& file); + VectorStream(std::vector data) : + BinaryStream(BinaryStream::STREAM_TYPE::VECTOR), + binary_(std::move(data)), + size_(binary_.size()) + {} + + VectorStream() = delete; + + // VectorStream should not be copyable for performances reasons + VectorStream(const VectorStream&) = delete; + VectorStream& operator=(const VectorStream&) = delete; + + VectorStream(VectorStream&& other) noexcept = default; + VectorStream& operator=(VectorStream&& other) noexcept = default; + + uint64_t size() const override { + return size_; + } + + const std::vector& content() const { + return binary_; + } + + std::vector&& move_content() { + size_ = 0; + return std::move(binary_); + } + + const uint8_t* p() const override { + return this->binary_.data() + this->pos(); + } + + const uint8_t* start() const override { + return this->binary_.data(); + } + + const uint8_t* end() const override { + return this->binary_.data() + this->binary_.size(); + } + + + std::unique_ptr slice(uint32_t offset, size_t size) const; + std::unique_ptr slice(uint32_t offset) const; + + static bool classof(const BinaryStream& stream) { + return stream.type() == STREAM_TYPE::VECTOR; + } + + protected: + result read_at(uint64_t offset, uint64_t size, uint64_t /*va*/) const override { + const uint64_t stream_size = this->size(); + if (offset > stream_size || (offset + size) > stream_size) { + return make_error_code(lief_errors::read_error); + } + return binary_.data() + offset; + } + std::vector binary_; + uint64_t size_ = 0; // Original size without alignment +}; +} + +#endif diff --git a/deps/LIEF/include/LIEF/COFF.hpp b/deps/LIEF/include/LIEF/COFF.hpp new file mode 100644 index 00000000000000..dcc13dbde0d3e8 --- /dev/null +++ b/deps/LIEF/include/LIEF/COFF.hpp @@ -0,0 +1,42 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_COFF_H +#define LIEF_COFF_H +#include "LIEF/config.h" + +#if defined(LIEF_COFF_SUPPORT) +#include "LIEF/COFF/Binary.hpp" +#include "LIEF/COFF/utils.hpp" +#include "LIEF/COFF/Parser.hpp" +#include "LIEF/COFF/Header.hpp" +#include "LIEF/COFF/BigObjHeader.hpp" +#include "LIEF/COFF/RegularHeader.hpp" +#include "LIEF/COFF/ParserConfig.hpp" +#include "LIEF/COFF/Section.hpp" +#include "LIEF/COFF/Relocation.hpp" +#include "LIEF/COFF/Symbol.hpp" +#include "LIEF/COFF/String.hpp" + +#include "LIEF/COFF/AuxiliarySymbol.hpp" +#include "LIEF/COFF/AuxiliarySymbols/AuxiliarybfAndefSymbol.hpp" +#include "LIEF/COFF/AuxiliarySymbols/AuxiliaryCLRToken.hpp" +#include "LIEF/COFF/AuxiliarySymbols/AuxiliaryFile.hpp" +#include "LIEF/COFF/AuxiliarySymbols/AuxiliaryFunctionDefinition.hpp" +#include "LIEF/COFF/AuxiliarySymbols/AuxiliarySectionDefinition.hpp" +#include "LIEF/COFF/AuxiliarySymbols/AuxiliaryWeakExternal.hpp" +#endif + +#endif diff --git a/deps/LIEF/include/LIEF/COFF/AuxiliarySymbol.hpp b/deps/LIEF/include/LIEF/COFF/AuxiliarySymbol.hpp new file mode 100644 index 00000000000000..398776a324d418 --- /dev/null +++ b/deps/LIEF/include/LIEF/COFF/AuxiliarySymbol.hpp @@ -0,0 +1,125 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_COFF_AUXILIARY_SYMBOL_H +#define LIEF_COFF_AUXILIARY_SYMBOL_H + +#include +#include +#include + +#include "LIEF/visibility.h" +#include "LIEF/span.hpp" + +namespace LIEF { +class BinaryStream; + +namespace COFF { +class Symbol; + +/// Class that represents an auxiliary symbol. +/// +/// An auxiliary symbol has the same size as a regular LIEF::PE::Symbol (18 +/// bytes) but its content depends on the the parent symbol. +class LIEF_API AuxiliarySymbol { + public: + AuxiliarySymbol() = default; + AuxiliarySymbol(std::vector payload) : + type_(TYPE::UNKNOWN), + payload_(std::move(payload)) + {} + AuxiliarySymbol(const AuxiliarySymbol&) = default; + AuxiliarySymbol& operator=(const AuxiliarySymbol&) = default; + + AuxiliarySymbol(AuxiliarySymbol&&) = default; + AuxiliarySymbol& operator=(AuxiliarySymbol&&) = default; + + LIEF_LOCAL static std::unique_ptr + parse(Symbol& sym, std::vector payload); + + virtual std::unique_ptr clone() const { + return std::unique_ptr(new AuxiliarySymbol(*this)); + } + + /// Type discriminator for the subclasses + enum class TYPE { + UNKNOWN = 0, + CLR_TOKEN, + /// Auxiliary Format 1 from the PE-COFF documentation + FUNC_DEF, + /// Auxiliary Format 2: .bf and .ef Symbols from the PE-COFF documentation + BF_AND_EF, + /// Auxiliary Format 3: Weak Externals from the PE-COFF documentation + WEAK_EXTERNAL, + /// Auxiliary Format 4: Files from the PE-COFF documentation + FILE, + /// Auxiliary Format 5: Section Definitions from the PE-COFF documentation + SEC_DEF, + }; + + AuxiliarySymbol(TYPE ty) : + type_(ty) + {} + + static TYPE get_aux_type(const Symbol& sym); + + TYPE type() const { + return type_; + } + + /// For unknown type **only**, return the raw representation of this symbol + span payload() const { + return payload_; + } + + span payload() { + return payload_; + } + + virtual std::string to_string() const; + + virtual ~AuxiliarySymbol() = default; + + /// Helper to **downcast** a AuxiliarySymbol into a concrete implementation + template + const T* as() const { + static_assert(std::is_base_of::value, + "Require AuxiliarySymbol inheritance"); + if (T::classof(this)) { + return static_cast(this); + } + return nullptr; + } + + template + T* as() { + return const_cast(static_cast(this)->as()); + } + + LIEF_API friend + std::ostream& operator<<(std::ostream& os, const AuxiliarySymbol& aux) + { + os << aux.to_string(); + return os; + } + + protected: + TYPE type_ = TYPE::UNKNOWN; + std::vector payload_; +}; + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/COFF/AuxiliarySymbols/AuxiliaryCLRToken.hpp b/deps/LIEF/include/LIEF/COFF/AuxiliarySymbols/AuxiliaryCLRToken.hpp new file mode 100644 index 00000000000000..cc2ee5feb71fb1 --- /dev/null +++ b/deps/LIEF/include/LIEF/COFF/AuxiliarySymbols/AuxiliaryCLRToken.hpp @@ -0,0 +1,106 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_COFF_AUXILIARY_CLR_TOKEN_H +#define LIEF_COFF_AUXILIARY_CLR_TOKEN_H + +#include + +#include "LIEF/visibility.h" +#include "LIEF/COFF/AuxiliarySymbol.hpp" + +namespace LIEF { +namespace COFF { +class Symbol; +class Parser; + +/// Auxiliary symbol associated with the `CLR_TOKEN` storage class +class LIEF_API AuxiliaryCLRToken : public AuxiliarySymbol { + public: + friend class Parser; + + LIEF_LOCAL static std::unique_ptr + parse(const std::vector& payload); + + AuxiliaryCLRToken() : + AuxiliarySymbol(AuxiliarySymbol::TYPE::CLR_TOKEN) + {} + + AuxiliaryCLRToken(uint8_t aux_type, uint8_t reserved, uint32_t symbol_idx, + std::vector rgb_reserved) : + AuxiliarySymbol(AuxiliarySymbol::TYPE::CLR_TOKEN), + aux_type_(aux_type), + reserved_(reserved), + symbol_idx_(symbol_idx), + rgb_reserved_(std::move(rgb_reserved)) + {} + + AuxiliaryCLRToken(const AuxiliaryCLRToken&) = default; + AuxiliaryCLRToken& operator=(const AuxiliaryCLRToken&) = default; + + AuxiliaryCLRToken(AuxiliaryCLRToken&&) = default; + AuxiliaryCLRToken& operator=(AuxiliaryCLRToken&&) = default; + + std::unique_ptr clone() const override { + return std::unique_ptr(new AuxiliaryCLRToken{*this}); + } + + /// `IMAGE_AUX_SYMBOL_TYPE` which should be `IMAGE_AUX_SYMBOL_TYPE_TOKEN_DEF` (1) + uint8_t aux_type() const { + return aux_type_; + } + + /// Reserved value (should be 0) + uint8_t reserved() const { + return aux_type_; + } + + /// Index in the symbol table + uint32_t symbol_idx() const { + return symbol_idx_; + } + + /// Symbol referenced by symbol_idx() (if resolved) + const Symbol* symbol() const { + return sym_; + } + + Symbol* symbol() { + return sym_; + } + + /// Reserved (padding) values. Should be 0 + span rgb_reserved() const { + return rgb_reserved_; + } + + std::string to_string() const override; + + static bool classof(const AuxiliarySymbol* sym) { + return sym->type() == AuxiliarySymbol::TYPE::CLR_TOKEN; + } + + ~AuxiliaryCLRToken() override = default; + private: + uint8_t aux_type_ = 0; + uint8_t reserved_ = 0; + uint32_t symbol_idx_ = 0; + std::vector rgb_reserved_; + Symbol* sym_ = nullptr; +}; + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/COFF/AuxiliarySymbols/AuxiliaryFile.hpp b/deps/LIEF/include/LIEF/COFF/AuxiliarySymbols/AuxiliaryFile.hpp new file mode 100644 index 00000000000000..28ed0142d3415b --- /dev/null +++ b/deps/LIEF/include/LIEF/COFF/AuxiliarySymbols/AuxiliaryFile.hpp @@ -0,0 +1,85 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_PE_AUXILIARY_FILE_H +#define LIEF_PE_AUXILIARY_FILE_H + +#include + +#include "LIEF/visibility.h" +#include "LIEF/COFF/AuxiliarySymbol.hpp" + +namespace LIEF { +namespace COFF { + +/// This auxiliary symbol represents a filename (auxiliary format 4) +/// +/// The Symbol::name itself should start with `.file`, and this auxiliary record +/// gives the name of a source-code file. +/// +/// Reference: https://learn.microsoft.com/en-us/windows/win32/debug/pe-format#auxiliary-format-4-files +class LIEF_API AuxiliaryFile : public AuxiliarySymbol { + public: + LIEF_LOCAL static std::unique_ptr + parse(const std::vector& payload); + + AuxiliaryFile() : + AuxiliarySymbol(AuxiliarySymbol::TYPE::FILE) + {} + + AuxiliaryFile(std::string file) : + AuxiliarySymbol(AuxiliarySymbol::TYPE::FILE), + filename_(std::move(file)) + {} + + AuxiliaryFile(const AuxiliaryFile&) = default; + AuxiliaryFile& operator=(const AuxiliaryFile&) = default; + + AuxiliaryFile(AuxiliaryFile&&) = default; + AuxiliaryFile& operator=(AuxiliaryFile&&) = default; + + std::unique_ptr clone() const override { + return std::unique_ptr(new AuxiliaryFile{*this}); + } + + /// The associated filename + const std::string& filename() const { + return filename_; + } + + AuxiliaryFile& filename(std::string file) { + filename_ = std::move(file); + return *this; + } + + std::string to_string() const override { + std::string out = "AuxiliaryFile {\n"; + out += " " + filename_ + "\n}"; + return out; + } + + ~AuxiliaryFile() override = default; + + static bool classof(const AuxiliarySymbol* sym) { + return sym->type() == AuxiliarySymbol::TYPE::FILE; + } + + protected: + std::string filename_; +}; + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/COFF/AuxiliarySymbols/AuxiliaryFunctionDefinition.hpp b/deps/LIEF/include/LIEF/COFF/AuxiliarySymbols/AuxiliaryFunctionDefinition.hpp new file mode 100644 index 00000000000000..a3f184ff761041 --- /dev/null +++ b/deps/LIEF/include/LIEF/COFF/AuxiliarySymbols/AuxiliaryFunctionDefinition.hpp @@ -0,0 +1,110 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_COFF_AUXILIARY_FUNCTION_DEF_H +#define LIEF_COFF_AUXILIARY_FUNCTION_DEF_H + +#include + +#include "LIEF/visibility.h" +#include "LIEF/COFF/AuxiliarySymbol.hpp" + +namespace LIEF { +namespace COFF { + +/// This auxiliary symbols marks the beginning of a function definition. +/// +/// Reference: https://learn.microsoft.com/en-us/windows/win32/debug/pe-format#auxiliary-format-1-function-definitions +class LIEF_API AuxiliaryFunctionDefinition : public AuxiliarySymbol { + public: + LIEF_LOCAL static std::unique_ptr + parse(const std::vector& payload); + + AuxiliaryFunctionDefinition() : + AuxiliarySymbol(AuxiliarySymbol::TYPE::FUNC_DEF) + {} + + AuxiliaryFunctionDefinition(uint32_t tagidx, uint32_t totalsz, + uint32_t ptr_line, uint32_t ptr_next_func, + uint16_t padding) : + AuxiliarySymbol(AuxiliarySymbol::TYPE::FUNC_DEF), + tag_index_(tagidx), + total_size_(totalsz), + ptr_to_linenb_(ptr_line), + ptr_to_next_func_(ptr_next_func), + padding_(padding) + {} + + AuxiliaryFunctionDefinition(const AuxiliaryFunctionDefinition&) = default; + AuxiliaryFunctionDefinition& operator=(const AuxiliaryFunctionDefinition&) = default; + + AuxiliaryFunctionDefinition(AuxiliaryFunctionDefinition&&) = default; + AuxiliaryFunctionDefinition& operator=(AuxiliaryFunctionDefinition&&) = default; + + std::unique_ptr clone() const override { + return std::unique_ptr(new AuxiliaryFunctionDefinition{*this}); + } + + /// The symbol-table index of the corresponding `.bf` (begin function) + /// symbol record. + uint32_t tag_index() const { + return tag_index_; + } + + /// The size of the executable code for the function itself. + /// + /// If the function is in its own section, the `SizeOfRawData` in the section + /// header is greater or equal to this field, depending on alignment + /// considerations. + uint32_t total_size() const { + return total_size_; + } + + /// The file offset of the first COFF line-number entry for the function, + /// or zero if none exists (deprecated) + uint32_t ptr_to_line_number() const { + return ptr_to_linenb_; + } + + /// The symbol-table index of the record for the next function. If the function + /// is the last in the symbol table, this field is set to zero. + uint32_t ptr_to_next_func() const { + return ptr_to_next_func_; + } + + /// Padding value (should be 0) + uint16_t padding() const { + return padding_; + } + + std::string to_string() const override; + + static bool classof(const AuxiliarySymbol* sym) { + return sym->type() == AuxiliarySymbol::TYPE::FUNC_DEF; + } + + ~AuxiliaryFunctionDefinition() override = default; + + private: + uint32_t tag_index_ = 0; + uint32_t total_size_ = 0; + uint32_t ptr_to_linenb_ = 0; + uint32_t ptr_to_next_func_ = 0; + uint16_t padding_ = 0; +}; + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/COFF/AuxiliarySymbols/AuxiliarySectionDefinition.hpp b/deps/LIEF/include/LIEF/COFF/AuxiliarySymbols/AuxiliarySectionDefinition.hpp new file mode 100644 index 00000000000000..9037b7700af3c9 --- /dev/null +++ b/deps/LIEF/include/LIEF/COFF/AuxiliarySymbols/AuxiliarySectionDefinition.hpp @@ -0,0 +1,166 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_COFF_AUXILIARY_SEC_DEF_H +#define LIEF_COFF_AUXILIARY_SEC_DEF_H + +#include + +#include "LIEF/visibility.h" +#include "LIEF/COFF/AuxiliarySymbol.hpp" + +namespace LIEF { +namespace COFF { + +/// This auxiliary symbol exposes information about the associated section. +/// +/// It **duplicates** some information that are provided in the section header +class LIEF_API AuxiliarySectionDefinition : public AuxiliarySymbol { + public: + LIEF_LOCAL static std::unique_ptr + parse(const std::vector& payload); + + AuxiliarySectionDefinition() : + AuxiliarySymbol(AuxiliarySymbol::TYPE::SEC_DEF) + {} + + AuxiliarySectionDefinition(uint32_t length, uint16_t nb_relocs, + uint16_t nb_lines, uint32_t checksum, + uint32_t sec_idx, uint8_t selection, + uint8_t reserved) : + AuxiliarySymbol(AuxiliarySymbol::TYPE::SEC_DEF), + length_(length), + nb_relocs_(nb_relocs), + nb_lines_(nb_lines), + checksum_(checksum), + sec_idx_(sec_idx), + selection_((COMDAT_SELECTION)selection), + reserved_(reserved) + {} + + AuxiliarySectionDefinition(const AuxiliarySectionDefinition&) = default; + AuxiliarySectionDefinition& operator=(const AuxiliarySectionDefinition&) = default; + + AuxiliarySectionDefinition(AuxiliarySectionDefinition&&) = default; + AuxiliarySectionDefinition& operator=(AuxiliarySectionDefinition&&) = default; + + std::unique_ptr clone() const override { + return std::unique_ptr(new AuxiliarySectionDefinition{*this}); + } + + /// Values for the AuxiliarySectionDefinition::selection attribute + /// + /// See: https://learn.microsoft.com/en-us/windows/win32/debug/pe-format#comdat-sections-object-only + enum class COMDAT_SELECTION : uint8_t { + NONE = 0, + + /// If this symbol is already defined, the linker issues a `multiply defined symbol` + /// error. + NODUPLICATES = 1, + + /// Any section that defines the same COMDAT symbol can be linked; the rest + /// are removed. + ANY, + + /// The linker chooses an arbitrary section among the definitions for this + /// symbol. If all definitions are not the same size, a `multiply defined symbol` + /// error is issued. + SAME_SIZE, + + /// The linker chooses an arbitrary section among the definitions for this + /// symbol. If all definitions do not match exactly, a + /// `multiply defined symbol` error is issued. + EXACT_MATCH, + + /// The section is linked if a certain other COMDAT section is linked. + /// This other section is indicated by the Number field of the auxiliary + /// symbol record for the section definition. This setting is useful for + /// definitions that have components in multiple sections + /// (for example, code in one and data in another), but where all must be + /// linked or discarded as a set. The other section this section is + /// associated with must be a COMDAT section, which can be another + /// associative COMDAT section. An associative COMDAT section's section + /// association chain can't form a loop. The section association chain must + /// eventually come to a COMDAT section that doesn't have + /// COMDAT_SELECTION::ASSOCIATIVE set. + ASSOCIATIVE, + + /// The linker chooses the largest definition from among all of the definitions + /// for this symbol. If multiple definitions have this size, the choice + /// between them is arbitrary. + LARGEST + }; + + /// The size of section data. The same as `SizeOfRawData` in the section header. + uint32_t length() const { + return length_; + } + + /// The number of relocation entries for the section. + uint16_t nb_relocs() const { + return nb_relocs_; + } + + /// The number of line-number entries for the section. + uint16_t nb_line_numbers() const { + return nb_lines_; + } + + /// The checksum for communal data. It is applicable if the + /// `IMAGE_SCN_LNK_COMDAT` flag is set in the section header. + uint32_t checksum() const { + return checksum_; + } + + /// One-based index into the section table for the associated section. + /// This is used when the COMDAT selection setting is 5. + uint32_t section_idx() const { + return sec_idx_; + } + + /// The COMDAT selection number. This is applicable if the section is a + /// COMDAT section. + COMDAT_SELECTION selection() const { + return selection_; + } + + /// Reserved value (should be 0) + uint8_t reserved() const { + return reserved_; + } + + std::string to_string() const override; + + static bool classof(const AuxiliarySymbol* sym) { + return sym->type() == AuxiliarySymbol::TYPE::SEC_DEF; + } + + ~AuxiliarySectionDefinition() override = default; + + private: + uint32_t length_ = 0; + uint16_t nb_relocs_ = 0; + uint16_t nb_lines_ = 0; + uint32_t checksum_ = 0; + uint32_t sec_idx_ = 0; + COMDAT_SELECTION selection_ = COMDAT_SELECTION::NONE; + uint8_t reserved_ = 0; +}; + +LIEF_API const char* to_string(AuxiliarySectionDefinition::COMDAT_SELECTION e); + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/COFF/AuxiliarySymbols/AuxiliaryWeakExternal.hpp b/deps/LIEF/include/LIEF/COFF/AuxiliarySymbols/AuxiliaryWeakExternal.hpp new file mode 100644 index 00000000000000..488f787dec126c --- /dev/null +++ b/deps/LIEF/include/LIEF/COFF/AuxiliarySymbols/AuxiliaryWeakExternal.hpp @@ -0,0 +1,113 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_COFF_AUXILIARY_WEAK_EXTERNAL_H +#define LIEF_COFF_AUXILIARY_WEAK_EXTERNAL_H + +#include +#include + +#include "LIEF/visibility.h" +#include "LIEF/COFF/AuxiliarySymbol.hpp" + +namespace LIEF { + +namespace COFF { + +/// "Weak externals" are a mechanism for object files that allows flexibility at +/// link time. A module can contain an unresolved external symbol (`sym1`), but +/// it can also include an auxiliary record that indicates that if `sym1` is not +/// present at link time, another external symbol (`sym2`) is used to resolve +/// references instead. +/// +/// If a definition of `sym1` is linked, then an external reference to the +/// symbol is resolved normally. If a definition of `sym1` is not linked, then all +/// references to the weak external for `sym1` refer to `sym2` instead. The external +/// symbol, `sym2`, must always be linked; typically, it is defined in the module +/// that contains the weak reference to `sym1`. +/// +/// Reference: https://learn.microsoft.com/en-us/windows/win32/debug/pe-format#auxiliary-format-3-weak-externals +class LIEF_API AuxiliaryWeakExternal : public AuxiliarySymbol { + public: + enum class CHARACTERISTICS : uint32_t { + /// No library search for `sym1` should be performed. + SEARCH_NOLIBRARY = 1, + ///A library search for `sym1` should be performed. + SEARCH_LIBRARY = 2, + /// `sym1` is an alias for sym2 + SEARCH_ALIAS = 3, + ANTI_DEPENDENCY = 4 + }; + + LIEF_LOCAL static std::unique_ptr + parse(const std::vector& payload); + + AuxiliaryWeakExternal() : + AuxiliarySymbol(AuxiliarySymbol::TYPE::WEAK_EXTERNAL) + {} + + AuxiliaryWeakExternal(uint32_t sym_idx, uint32_t characteristics, + std::vector padding) : + AuxiliarySymbol(AuxiliarySymbol::TYPE::WEAK_EXTERNAL), + sym_idx_(sym_idx), + characteristics_(characteristics), + padding_(std::move(padding)) + { + assert(padding_.size() == 10); + } + + AuxiliaryWeakExternal(const AuxiliaryWeakExternal&) = default; + AuxiliaryWeakExternal& operator=(const AuxiliaryWeakExternal&) = default; + + AuxiliaryWeakExternal(AuxiliaryWeakExternal&&) = default; + AuxiliaryWeakExternal& operator=(AuxiliaryWeakExternal&&) = default; + + std::unique_ptr clone() const override { + return std::unique_ptr(new AuxiliaryWeakExternal{*this}); + } + + /// The symbol-table index of `sym2`, the symbol to be linked if `sym1` is not + /// found. + uint32_t sym_idx() const { + return sym_idx_; + } + + CHARACTERISTICS characteristics() const { + return (CHARACTERISTICS)characteristics_; + } + + span padding() const { + return padding_; + } + + std::string to_string() const override; + + static bool classof(const AuxiliarySymbol* sym) { + return sym->type() == AuxiliarySymbol::TYPE::WEAK_EXTERNAL; + } + + ~AuxiliaryWeakExternal() override = default; + + private: + uint32_t sym_idx_ = 0; + uint32_t characteristics_ = 0; + std::vector padding_; +}; + +LIEF_API const char* to_string(AuxiliaryWeakExternal::CHARACTERISTICS e); + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/COFF/AuxiliarySymbols/AuxiliarybfAndefSymbol.hpp b/deps/LIEF/include/LIEF/COFF/AuxiliarySymbols/AuxiliarybfAndefSymbol.hpp new file mode 100644 index 00000000000000..53b0666b588842 --- /dev/null +++ b/deps/LIEF/include/LIEF/COFF/AuxiliarySymbols/AuxiliarybfAndefSymbol.hpp @@ -0,0 +1,60 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_COFF_AUXILIARY_BF_AND_EF_H +#define LIEF_COFF_AUXILIARY_BF_AND_EF_H + +#include + +#include "LIEF/visibility.h" +#include "LIEF/COFF/AuxiliarySymbol.hpp" + +namespace LIEF { + +namespace COFF { + +class LIEF_API AuxiliarybfAndefSymbol : public AuxiliarySymbol { + public: + LIEF_LOCAL static std::unique_ptr + parse(Symbol& sym, const std::vector& payload); + + AuxiliarybfAndefSymbol() : + AuxiliarySymbol(AuxiliarySymbol::TYPE::BF_AND_EF) + {} + + AuxiliarybfAndefSymbol(const AuxiliarybfAndefSymbol&) = default; + AuxiliarybfAndefSymbol& operator=(const AuxiliarybfAndefSymbol&) = default; + + AuxiliarybfAndefSymbol(AuxiliarybfAndefSymbol&&) = default; + AuxiliarybfAndefSymbol& operator=(AuxiliarybfAndefSymbol&&) = default; + + std::unique_ptr clone() const override { + return std::unique_ptr(new AuxiliarybfAndefSymbol{*this}); + } + + std::string to_string() const override { + return "AuxiliarybfAndefSymbol"; + } + + static bool classof(const AuxiliarySymbol* sym) { + return sym->type() == AuxiliarySymbol::TYPE::BF_AND_EF; + } + + ~AuxiliarybfAndefSymbol() override = default; +}; + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/COFF/BigObjHeader.hpp b/deps/LIEF/include/LIEF/COFF/BigObjHeader.hpp new file mode 100644 index 00000000000000..a793092cd64887 --- /dev/null +++ b/deps/LIEF/include/LIEF/COFF/BigObjHeader.hpp @@ -0,0 +1,123 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_COFF_BIGOBJ_HEADER_H +#define LIEF_COFF_BIGOBJ_HEADER_H +#include +#include + +#include "LIEF/COFF/Header.hpp" + +#include "LIEF/visibility.h" +#include "LIEF/span.hpp" + +namespace LIEF { +namespace COFF { + +/// This class represents the header for a COFF object compiled +/// with `/bigobj` support (i.e. the number of sections can exceed 65536). +/// +/// The raw definition of the bigobj header is located in `winnt.h` and named +/// `ANON_OBJECT_HEADER_BIGOBJ` +class LIEF_API BigObjHeader : public Header { + public: + static constexpr auto UUID_SZ = 16; + BigObjHeader() : + Header(KIND::BIGOBJ) + {} + + static std::unique_ptr create(BinaryStream& stream); + + BigObjHeader& operator=(const BigObjHeader&) = default; + BigObjHeader(const BigObjHeader&) = default; + + BigObjHeader& operator=(BigObjHeader&&) = default; + BigObjHeader(BigObjHeader&&) = default; + + std::unique_ptr
clone() const override { + return std::unique_ptr
(new BigObjHeader(*this)); + } + + /// The version of this header which must be >= 2 + uint16_t version() const { + return version_; + } + + /// Originally named `ClassID`, this uuid should match: `{D1BAA1C7-BAEE-4ba9-AF20-FAF66AA4DCB8}` + span uuid() const { + return uuid_; + } + + /// Size of data that follows the header + uint32_t sizeof_data() const { + return sizeof_data_; + } + + /// 1 means that it contains metadata + uint32_t flags() const { + return flags_; + } + + /// Size of CLR metadata + uint32_t metadata_size() const { + return metadata_size_; + } + + /// Offset of CLR metadata + uint32_t metadata_offset() const { + return metadata_offset_; + } + + void version(uint16_t value) { + version_ = value; + } + + void sizeof_data(uint32_t value) { + sizeof_data_ = value; + } + + void flags(uint32_t value) { + flags_ = value; + } + + void metadata_size(uint32_t value) { + metadata_size_ = value; + } + + void metadata_offset(uint32_t value) { + metadata_offset_ = value; + } + + static bool classof(const Header* header) { + return header->kind() == Header::KIND::BIGOBJ; + } + + ~BigObjHeader() override = default; + + std::string to_string() const override; + + protected: + uint16_t version_ = 0; + std::array uuid_ = {}; + + uint32_t sizeof_data_ = 0; + uint32_t flags_ = 0; + uint32_t metadata_size_ = 0; + uint32_t metadata_offset_ = 0; +}; + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/COFF/Binary.hpp b/deps/LIEF/include/LIEF/COFF/Binary.hpp new file mode 100644 index 00000000000000..9467836970e76b --- /dev/null +++ b/deps/LIEF/include/LIEF/COFF/Binary.hpp @@ -0,0 +1,252 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_COFF_BINARY_H +#define LIEF_COFF_BINARY_H +#include "LIEF/visibility.h" +#include "LIEF/iterators.hpp" +#include "LIEF/span.hpp" + +#include "LIEF/COFF/String.hpp" + +#include "LIEF/asm/Instruction.hpp" + +#include +#include +#include + +namespace LIEF { + +namespace assembly { +class Engine; +} + +namespace COFF { +class Header; +class Parser; +class Section; +class Relocation; +class Symbol; + +/// Class that represents a COFF Binary +class LIEF_API Binary { + public: + friend class Parser; + + /// Internal container used to store COFF's section + using sections_t = std::vector>; + + /// Iterator that outputs Section& object + using it_sections = ref_iterator; + + /// Iterator that outputs const Section& object + using it_const_sections = const_ref_iterator; + + /// Internal container used to store COFF's relocations + using relocations_t = std::vector>; + + /// Iterator that outputs Relocation& object + using it_relocations = ref_iterator; + + /// Iterator that outputs const Relocation& object + using it_const_relocations = const_ref_iterator; + + /// Internal container used to store COFF's strings + using strings_table_t = std::vector; + + /// Iterator that outputs String& object + using it_strings_table = ref_iterator; + + /// Iterator that outputs const String& object + using it_const_strings_table = const_ref_iterator; + + /// Internal container used to store COFF's symbols + using symbols_t = std::vector>; + + /// Iterator that outputs Symbol& object + using it_symbols = ref_iterator; + + /// Iterator that outputs Symbol& object + using it_const_symbols = const_ref_iterator; + + /// Instruction iterator + using instructions_it = iterator_range; + + /// Iterator which outputs COFF symbols representing functions + using it_functions = filter_iterator; + + /// Iterator which outputs COFF symbols representing functions + using it_const_function = const_filter_iterator; + + /// The COFF header + const Header& header() const { + return *header_; + } + + Header& header() { + return *header_; + } + + /// Iterator over the different sections located in this COFF binary + it_sections sections() { + return sections_; + } + + it_const_sections sections() const { + return sections_; + } + + /// Iterator over **all** the relocations used by this COFF binary + it_relocations relocations() { + return relocations_; + } + + it_const_relocations relocations() const { + return relocations_; + } + + /// Iterator over the COFF's symbols + it_symbols symbols() { + return symbols_; + } + + it_const_symbols symbols() const { + return symbols_; + } + + /// Iterator over the COFF's strings + it_const_strings_table string_table() const { + return strings_table_; + } + + it_strings_table string_table() { + return strings_table_; + } + + /// Try to find the COFF string at the given offset in the COFF string table. + /// + /// \warning This offset must include the first 4 bytes holding the size of + /// the table. Hence, the first string starts a the offset 4. + String* find_string(uint32_t offset) { + auto it = std::find_if(strings_table_.begin(), strings_table_.end(), + [offset] (const String& item) { + return offset == item.offset(); + } + ); + return it == strings_table_.end() ? nullptr : &*it; + } + + const String* find_string(uint32_t offset) const { + return const_cast(this)->find_string(offset); + } + + /// Iterator over the functions implemented in this COFF + it_const_function functions() const; + + it_functions functions(); + + /// Try to find the function (symbol) with the given name + const Symbol* find_function(const std::string& name) const; + + Symbol* find_function(const std::string& name) { + return const_cast(static_cast(this)->find_function(name)); + } + + /// Try to find the function (symbol) with the given **demangled** name + const Symbol* find_demangled_function(const std::string& name) const; + + Symbol* find_demangled_function(const std::string& name) { + return const_cast(static_cast(this)->find_demangled_function(name)); + } + + /// Disassemble code for the given symbol + /// + /// ```cpp + /// const Symbol* func = binary->find_demangled_function("int __cdecl my_function(int, int)"); + /// auto insts = binary->disassemble(*func); + /// for (std::unique_ptr inst : insts) { + /// std::cout << inst->to_string() << '\n'; + /// } + /// ``` + /// + /// \see LIEF::assembly::Instruction + instructions_it disassemble(const Symbol& symbol) const; + + /// Disassemble code for the given symbol name + /// + /// ```cpp + /// auto insts = binary->disassemble("main"); + /// for (std::unique_ptr inst : insts) { + /// std::cout << inst->to_string() << '\n'; + /// } + /// ``` + /// + /// \see LIEF::assembly::Instruction + instructions_it disassemble(const std::string& symbol) const; + + /// Disassemble code provided by the given buffer at the specified + /// `address` parameter. + /// + /// \see LIEF::assembly::Instruction + instructions_it disassemble(const uint8_t* buffer, size_t size, + uint64_t address = 0) const; + + + /// Disassemble code provided by the given vector of bytes at the specified + /// `address` parameter. + /// + /// \see LIEF::assembly::Instruction + instructions_it disassemble(const std::vector& buffer, + uint64_t address = 0) const { + return disassemble(buffer.data(), buffer.size(), address); + } + + instructions_it disassemble(LIEF::span buffer, + uint64_t address = 0) const { + return disassemble(buffer.data(), buffer.size(), address); + } + + instructions_it disassemble(LIEF::span buffer, uint64_t address = 0) const { + return disassemble(buffer.data(), buffer.size(), address); + } + + std::string to_string() const; + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const Binary& bin) { + os << bin.to_string(); + return os; + } + + ~Binary(); + + private: + Binary(); + std::unique_ptr
header_; + sections_t sections_; + relocations_t relocations_; + strings_table_t strings_table_; + symbols_t symbols_; + + mutable std::unordered_map> engines_; + + assembly::Engine* get_engine(uint64_t address) const; + + template + LIEF_LOCAL assembly::Engine* get_cache_engine(uint64_t address, F&& f) const; +}; + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/COFF/Header.hpp b/deps/LIEF/include/LIEF/COFF/Header.hpp new file mode 100644 index 00000000000000..5239b73faa4979 --- /dev/null +++ b/deps/LIEF/include/LIEF/COFF/Header.hpp @@ -0,0 +1,155 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_COFF_HEADER_H +#define LIEF_COFF_HEADER_H +#include +#include +#include + +#include "LIEF/visibility.h" +#include "LIEF/PE/Header.hpp" + +namespace LIEF { +class BinaryStream; + +namespace COFF { + +/// Class that represents the COFF header. It is subclassed by +/// LIEF::COFF::RegularHeader and LIEF::COFF::BigObjHeader for normal vs +/// `/bigobj` files +class LIEF_API Header { + public: + + enum class KIND { + UNKNOWN = 0, + REGULAR, + BIGOBJ + }; + + /// The different architectures (mirrored from PE) + using MACHINE_TYPES = LIEF::PE::Header::MACHINE_TYPES; + + /// Create a header from the given stream + static std::unique_ptr
create(BinaryStream& stream); + static std::unique_ptr
create(BinaryStream& stream, KIND kind); + + Header() = default; + Header(KIND kind) : + kind_(kind) + {} + + Header& operator=(const Header&) = default; + Header(const Header&) = default; + + Header& operator=(Header&&) = default; + Header(Header&&) = default; + + virtual std::unique_ptr
clone() const = 0; + + /// The type of this header: whether it is regular or using the `/bigobj` + /// format + KIND kind() const { + return kind_; + } + + /// The machine type targeted by this COFF + MACHINE_TYPES machine() const { + return machine_; + } + + /// The number of sections + uint32_t nb_sections() const { + return nb_sections_; + } + + /// Offset of the symbols table + uint32_t pointerto_symbol_table() const { + return pointerto_symbol_table_; + } + + /// Number of symbols (including auxiliary symbols) + uint32_t nb_symbols() const { + return nb_symbols_; + } + + /// Timestamp when the COFF has been generated + uint32_t timedatestamp() const { + return timedatestamp_; + } + + void machine(MACHINE_TYPES machine) { + machine_ = machine; + } + + void nb_sections(uint32_t value) { + nb_sections_ = value; + } + + void pointerto_symbol_table(uint32_t value) { + pointerto_symbol_table_ = value; + } + + void nb_symbols(uint32_t value) { + nb_symbols_ = value; + } + + void timedatestamp(uint32_t value) { + timedatestamp_ = value; + } + + virtual std::string to_string() const; + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const Header& hdr) { + os << hdr.to_string(); + return os; + } + + template + const T* as() const { + static_assert(std::is_base_of::value, + "Require Header inheritance"); + if (T::classof(this)) { + return static_cast(this); + } + return nullptr; + } + + virtual ~Header() = default; + + protected: + KIND kind_ = KIND::UNKNOWN; + MACHINE_TYPES machine_ = MACHINE_TYPES::UNKNOWN; + uint32_t nb_sections_ = 0; + uint32_t pointerto_symbol_table_ = 0; + uint32_t nb_symbols_ = 0; + uint32_t timedatestamp_ = 0; +}; + +LIEF_API inline const char* to_string(Header::KIND kind) { + switch (kind) { + case Header::KIND::UNKNOWN: return "UNKNOWN"; + case Header::KIND::REGULAR: return "REGULAR"; + case Header::KIND::BIGOBJ: return "BIGOBJ"; + } + return "UNKNOWN"; +} + +LIEF_API inline const char* to_string(Header::MACHINE_TYPES machine) { + return LIEF::PE::to_string(machine); +} +} +} +#endif diff --git a/deps/LIEF/include/LIEF/COFF/Parser.hpp b/deps/LIEF/include/LIEF/COFF/Parser.hpp new file mode 100644 index 00000000000000..fa1a5ff79d7d54 --- /dev/null +++ b/deps/LIEF/include/LIEF/COFF/Parser.hpp @@ -0,0 +1,109 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_COFF_PARSER_H +#define LIEF_COFF_PARSER_H +#include +#include "LIEF/visibility.h" + +#include "LIEF/BinaryStream/VectorStream.hpp" +#include "LIEF/BinaryStream/SpanStream.hpp" + +#include "LIEF/COFF/ParserConfig.hpp" +#include "LIEF/COFF/Header.hpp" + +namespace LIEF { +namespace COFF { +class Binary; +class Section; +class String; +class Symbol; + +class Parser { + public: + /// Parse the COFF binary referenced by the `stream` argument with the + /// given config + static LIEF_API + std::unique_ptr parse(std::unique_ptr stream, + const ParserConfig& config = ParserConfig::default_conf()); + + /// Parse the COFF binary pointed by the `file` argument with the given config + static std::unique_ptr parse(const std::string& file, + const ParserConfig& config = ParserConfig::default_conf()) + { + if (auto strm = VectorStream::from_file(file)) { + return parse(std::unique_ptr(new VectorStream(std::move(*strm))), config); + } + return nullptr; + } + + /// \private + struct SymSec { + size_t sec_idx = 0; + Symbol* symbol = nullptr; + + friend bool operator<(const SymSec& lhs, const SymSec& rhs) { + return lhs.sec_idx < rhs.sec_idx; + } + + friend bool operator==(const SymSec& lhs, const SymSec& rhs) { + return lhs.sec_idx == rhs.sec_idx && lhs.symbol == rhs.symbol; + } + + friend bool operator!=(const SymSec& lhs, const SymSec& rhs) { + return !(lhs == rhs); + } + }; + + /// <=> std::unordered_multimap
+ using SymSecMap = std::vector; + + /// \private + LIEF_LOCAL void memoize(String str); + + /// \private + LIEF_LOCAL String* find_coff_string(uint32_t offset) const; + + ~Parser(); + + private: + Parser(std::unique_ptr stream, const ParserConfig& config, + Header::KIND kind) : + stream_(std::move(stream)), + kind_(kind), + config_(config) + {} + + ok_error_t process(); + ok_error_t parse_header(); + ok_error_t parse_optional_header(); + ok_error_t parse_sections(); + ok_error_t parse_relocations(Section& section); + ok_error_t parse_symbols(); + ok_error_t parse_string_table(); + + std::unique_ptr stream_; + std::unique_ptr bin_; + Header::KIND kind_ = Header::KIND::UNKNOWN; + + std::map memoize_coff_str_; + std::map symbol_idx_; + SymSecMap symsec_; + + ParserConfig config_; +}; +} +} +#endif diff --git a/deps/LIEF/include/LIEF/COFF/ParserConfig.hpp b/deps/LIEF/include/LIEF/COFF/ParserConfig.hpp new file mode 100644 index 00000000000000..3d0a09636b5262 --- /dev/null +++ b/deps/LIEF/include/LIEF/COFF/ParserConfig.hpp @@ -0,0 +1,38 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_COFF_PARSER_CONFIG_H +#define LIEF_COFF_PARSER_CONFIG_H + +#include "LIEF/visibility.h" + +namespace LIEF { +namespace COFF { +/// Class used to configure the COFF parser +class LIEF_API ParserConfig { + public: + static const ParserConfig& default_conf() { + static const ParserConfig DEFAULT; + return DEFAULT; + } + + static const ParserConfig& all() { + // To be updated when there is options that are off by default + return default_conf(); + } +}; +} +} +#endif diff --git a/deps/LIEF/include/LIEF/COFF/RegularHeader.hpp b/deps/LIEF/include/LIEF/COFF/RegularHeader.hpp new file mode 100644 index 00000000000000..1aab5525400d38 --- /dev/null +++ b/deps/LIEF/include/LIEF/COFF/RegularHeader.hpp @@ -0,0 +1,78 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_COFF_REGULAR_HEADER_H +#define LIEF_COFF_REGULAR_HEADER_H +#include +#include "LIEF/COFF/Header.hpp" +#include "LIEF/visibility.h" + +namespace LIEF { +namespace COFF { + +/// This class represents the COFF header for non-bigobj +class LIEF_API RegularHeader : public Header { + public: + RegularHeader() : + Header(KIND::REGULAR) + {} + + /// Create a RegularHeader from the given stream + static std::unique_ptr create(BinaryStream& stream); + + RegularHeader& operator=(const RegularHeader&) = default; + RegularHeader(const RegularHeader&) = default; + + RegularHeader& operator=(RegularHeader&&) = default; + RegularHeader(RegularHeader&&) = default; + + std::unique_ptr
clone() const override { + return std::unique_ptr
(new RegularHeader(*this)); + } + + /// The size of the optional header that follows this header (should be 0) + uint16_t sizeof_optionalheader() const { + return sizeof_optionalheader_; + } + + /// Characteristics + uint16_t characteristics() const { + return characteristics_; + } + + void sizeof_optionalheader(uint16_t value) { + sizeof_optionalheader_ = value; + } + + void characteristics(uint16_t value) { + characteristics_ = value; + } + + static bool classof(const Header* header) { + return header->kind() == Header::KIND::REGULAR; + } + + ~RegularHeader() override = default; + + std::string to_string() const override; + + protected: + uint16_t sizeof_optionalheader_ = 0; + uint16_t characteristics_ = 0; +}; + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/COFF/Relocation.hpp b/deps/LIEF/include/LIEF/COFF/Relocation.hpp new file mode 100644 index 00000000000000..960caf0ad8b3c0 --- /dev/null +++ b/deps/LIEF/include/LIEF/COFF/Relocation.hpp @@ -0,0 +1,218 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_COFF_RELOCATION_H +#define LIEF_COFF_RELOCATION_H +#include +#include +#include + +#include "LIEF/visibility.h" +#include "LIEF/COFF/Header.hpp" +#include "LIEF/Abstract/Relocation.hpp" + +namespace LIEF { +class BinaryStream; +namespace COFF { +class Section; +class Parser; +class Symbol; + +/// This class represents a COFF relocation +class LIEF_API Relocation : public LIEF::Relocation { + public: + friend class Parser; + + static constexpr uint32_t I386 = 1 << 17; + static constexpr uint32_t X64 = 1 << 18; + static constexpr uint32_t ARM = 1 << 19; + static constexpr uint32_t ARM64 = 1 << 20; + static constexpr uint32_t MIPS = 1 << 21; + + /// The different relocation types. + /// + /// Please note that the original type is encoded on 16 bits but we encode + /// the type on 32-bits by adding a discriminator from the 17th bit + enum class TYPE : uint32_t { + UNKNOWN = uint32_t(-1), + I386_ABSOLUTE = I386 + 0x0000, + I386_DIR16 = I386 + 0x0001, + I386_REL16 = I386 + 0x0002, + I386_DIR32 = I386 + 0x0006, + I386_DIR32NB = I386 + 0x0007, + I386_SEG12 = I386 + 0x0009, + I386_SECTION = I386 + 0x000A, + I386_SECREL = I386 + 0x000B, + I386_TOKEN = I386 + 0x000C, + I386_SECREL7 = I386 + 0x000D, + I386_REL32 = I386 + 0x0014, + + AMD64_ABSOLUTE = X64 + 0x0000, + AMD64_ADDR64 = X64 + 0x0001, + AMD64_ADDR32 = X64 + 0x0002, + AMD64_ADDR32NB = X64 + 0x0003, + AMD64_REL32 = X64 + 0x0004, + AMD64_REL32_1 = X64 + 0x0005, + AMD64_REL32_2 = X64 + 0x0006, + AMD64_REL32_3 = X64 + 0x0007, + AMD64_REL32_4 = X64 + 0x0008, + AMD64_REL32_5 = X64 + 0x0009, + AMD64_SECTION = X64 + 0x000A, + AMD64_SECREL = X64 + 0x000B, + AMD64_SECREL7 = X64 + 0x000C, + AMD64_TOKEN = X64 + 0x000D, + AMD64_SREL32 = X64 + 0x000E, + AMD64_PAIR = X64 + 0x000F, + AMD64_SSPAN32 = X64 + 0x0010, + + ARM_ABSOLUTE = ARM + 0x0000, + ARM_ADDR32 = ARM + 0x0001, + ARM_ADDR32NB = ARM + 0x0002, + ARM_BRANCH24 = ARM + 0x0003, + ARM_BRANCH11 = ARM + 0x0004, + ARM_TOKEN = ARM + 0x0005, + ARM_BLX24 = ARM + 0x0008, + ARM_BLX11 = ARM + 0x0009, + ARM_REL32 = ARM + 0x000A, + ARM_SECTION = ARM + 0x000E, + ARM_SECREL = ARM + 0x000F, + ARM_MOV32A = ARM + 0x0010, + ARM_MOV32T = ARM + 0x0011, + ARM_BRANCH20T = ARM + 0x0012, + ARM_BRANCH24T = ARM + 0x0014, + ARM_BLX23T = ARM + 0x0015, + ARM_PAIR = ARM + 0x0016, + + ARM64_ABSOLUTE = ARM64 + 0x0000, + ARM64_ADDR32 = ARM64 + 0x0001, + ARM64_ADDR32NB = ARM64 + 0x0002, + ARM64_BRANCH26 = ARM64 + 0x0003, + ARM64_PAGEBASE_REL21 = ARM64 + 0x0004, + ARM64_REL21 = ARM64 + 0x0005, + ARM64_PAGEOFFSET_12A = ARM64 + 0x0006, + ARM64_PAGEOFFSET_12L = ARM64 + 0x0007, + ARM64_SECREL = ARM64 + 0x0008, + ARM64_SECREL_LOW12A = ARM64 + 0x0009, + ARM64_SECREL_HIGH12A = ARM64 + 0x000A, + ARM64_SECREL_LOW12L = ARM64 + 0x000B, + ARM64_TOKEN = ARM64 + 0x000C, + ARM64_SECTION = ARM64 + 0x000D, + ARM64_ADDR64 = ARM64 + 0x000E, + ARM64_BRANCH19 = ARM64 + 0x000F, + ARM64_BRANCH14 = ARM64 + 0x0010, + ARM64_REL32 = ARM64 + 0x0011, + + MIPS_ABSOLUTE = MIPS + 0x0000, + MIPS_REFHALF = MIPS + 0x0001, + MIPS_REFWORD = MIPS + 0x0002, + MIPS_JMPADDR = MIPS + 0x0003, + MIPS_REFHI = MIPS + 0x0004, + MIPS_REFLO = MIPS + 0x0005, + MIPS_GPREL = MIPS + 0x0006, + MIPS_LITERAL = MIPS + 0x0007, + MIPS_SECTION = MIPS + 0x000A, + MIPS_SECREL = MIPS + 0x000B, + MIPS_SECRELLO = MIPS + 0x000C, + MIPS_SECRELHI = MIPS + 0x000D, + MIPS_JMPADDR16 = MIPS + 0x0010, + MIPS_REFWORDNB = MIPS + 0x0022, + MIPS_PAIR = MIPS + 0x0025, + }; + + /// Convert a relocation enum type into a 16-bits value. + static uint16_t to_value(TYPE rtype) { + return (uint16_t)rtype; + } + + /// Create a relocation type from its raw value and the architecture + static TYPE from_value(uint16_t value, Header::MACHINE_TYPES arch) { + switch (arch) { + case Header::MACHINE_TYPES::ARM64: + return TYPE(value + ARM64); + + case Header::MACHINE_TYPES::AMD64: + return TYPE(value + X64); + + case Header::MACHINE_TYPES::I386: + return TYPE(value + I386); + + case Header::MACHINE_TYPES::ARM: + case Header::MACHINE_TYPES::ARMNT: + case Header::MACHINE_TYPES::THUMB: + return TYPE(value + ARM); + + case Header::MACHINE_TYPES::R4000: + return TYPE(value + MIPS); + + default: + return TYPE::UNKNOWN; + } + return TYPE::UNKNOWN; + } + + /// Create a relocation from the given stream + static std::unique_ptr parse( + BinaryStream& stream, Header::MACHINE_TYPES arch); + + /// Symbol index associated with this relocation + uint32_t symbol_idx() const { + return symbol_idx_; + } + + /// Symbol associated with the relocation (if any) + Symbol* symbol() { + return symbol_; + } + + const Symbol* symbol() const { + return symbol_; + } + + /// Type of the relocation + TYPE type() const { + return type_; + } + + /// Section in which the relocation takes place + Section* section() { + return section_; + } + + const Section* section() const { + return section_; + } + + std::string to_string() const; + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const Relocation& R) { + os << R.to_string(); + return os; + } + + ~Relocation() override = default; + + private: + Relocation() = default; + uint32_t symbol_idx_ = 0; + TYPE type_ = TYPE::UNKNOWN; + Section* section_ = nullptr; + Symbol* symbol_ = nullptr; +}; + +LIEF_API const char* to_string(Relocation::TYPE e); + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/COFF/Section.hpp b/deps/LIEF/include/LIEF/COFF/Section.hpp new file mode 100644 index 00000000000000..d70a3117adf5e2 --- /dev/null +++ b/deps/LIEF/include/LIEF/COFF/Section.hpp @@ -0,0 +1,250 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_COFF_SECTION_H +#define LIEF_COFF_SECTION_H +#include +#include +#include + +#include "LIEF/visibility.h" +#include "LIEF/Abstract/Section.hpp" +#include "LIEF/PE/Section.hpp" +#include "LIEF/COFF/AuxiliarySymbols/AuxiliarySectionDefinition.hpp" +#include "LIEF/iterators.hpp" +#include "LIEF/optional.hpp" + +namespace LIEF { +class BinaryStream; + +namespace COFF { +class Relocation; +class Parser; +class Symbol; + + +/// This class represents a COFF section +class LIEF_API Section : public LIEF::Section { + public: + friend class Parser; + using LIEF::Section::name; + + using COMDAT_SELECTION = AuxiliarySectionDefinition::COMDAT_SELECTION; + + /// This structure wraps comdat information which is composed of the symbol + /// associated with the comdat section and its selection flag + struct LIEF_API ComdatInfo { + Symbol* symbol = nullptr; + COMDAT_SELECTION kind = COMDAT_SELECTION::NONE; + }; + + /// Mirror Characteristics from PE + using CHARACTERISTICS = LIEF::PE::Section::CHARACTERISTICS; + + /// Container for the relocations in this section (owned by the Binary object) + using relocations_t = std::vector; + + /// Iterator that outputs Relocation& + using it_relocations = ref_iterator; + + /// Iterator that outputs const Relocation& + using it_const_relocations = const_ref_iterator; + + /// Container for the symbols associated with this section (owned by the Binary object) + using symbols_t = std::vector; + + /// Iterator that outputs Symbol& + using it_symbols = ref_iterator; + + /// Iterator that outputs const Symbol& + using it_const_symbols = const_ref_iterator; + + /// Parse a section from the given stream + static std::unique_ptr
parse(BinaryStream& stream); + + /// Return the size of the data in the section. + uint32_t sizeof_raw_data() const { + return size_; + } + + /// Virtual size of the section (should be 0) + uint32_t virtual_size() const { + return virtual_size_; + } + + /// Content wrapped by this section + span content() const override { + return content_; + } + + /// Offset to the section's content + uint32_t pointerto_raw_data() const { + return offset_; + } + + /// Offset to the relocation table + uint32_t pointerto_relocation() const { + return pointer_to_relocations_; + } + + /// The file pointer to the beginning of line-number entries for the section. + /// + /// This is set to zero if there are no COFF line numbers. + /// This value should be zero for an image because COFF debugging information + /// is deprecated and modern debug information relies on the PDB files. + uint32_t pointerto_line_numbers() const { + return pointer_to_linenumbers_; + } + + /// Number of relocations. + /// + /// \warning If the number of relocations is greater than 0xFFFF (maximum + /// value for 16-bits integer), then the number of relocations is + /// stored in the virtual address of the **first** relocation. + uint16_t numberof_relocations() const { + return number_of_relocations_; + } + + /// Number of line number entries (if any). + uint16_t numberof_line_numbers() const { + return number_of_linenumbers_; + } + + /// Characteristics of the section: it provides information about + /// the permissions of the section when mapped. It can also provide + /// information about the *purpose* of the section (contain code, BSS-like, ...) + uint32_t characteristics() const { + return characteristics_; + } + + /// Check if the section has the given CHARACTERISTICS + bool has_characteristic(CHARACTERISTICS c) const { + return (characteristics() & (uint32_t)c) > 0; + } + + /// List of the section characteristics + std::vector characteristics_list() const { + return LIEF::PE::Section::characteristics_to_list(characteristics_); + } + + /// True if the section can be discarded as needed. + /// + /// This is typically the case for debug-related sections + bool is_discardable() const { + return has_characteristic(CHARACTERISTICS::MEM_DISCARDABLE); + } + + void clear(uint8_t c) { + std::fill(content_.begin(), content_.end(), c); + } + + /// Iterator over the relocations associated with this section + it_relocations relocations() { + return relocations_; + } + + it_const_relocations relocations() const { + return relocations_; + } + + /// Iterator over the symbols associated with this section + it_symbols symbols() { + return symbols_; + } + + it_const_symbols symbols() const { + return symbols_; + } + + /// Return comdat infomration (only if the section has the + /// CHARACTERISTICS::LNK_COMDAT characteristic) + optional comdat_info() const; + + /// Whether there is a large number of relocations whose number need + /// to be stored in the virtual address attribute + bool has_extended_relocations() const { + return has_characteristic(CHARACTERISTICS::LNK_NRELOC_OVFL) && + numberof_relocations() == std::numeric_limits::max(); + } + + void content(const std::vector& data) override { + content_ = data; + } + + void name(std::string name) override; + + void virtual_size(uint32_t virtual_sz) { + virtual_size_ = virtual_sz; + } + + void pointerto_raw_data(uint32_t ptr) { + offset(ptr); + } + + void pointerto_relocation(uint32_t ptr) { + pointer_to_relocations_ = ptr; + } + + void pointerto_line_numbers(uint32_t ptr) { + pointer_to_linenumbers_ = ptr; + } + + void numberof_relocations(uint16_t nb) { + number_of_relocations_ = nb; + } + + void numberof_line_numbers(uint16_t nb) { + number_of_linenumbers_ = nb; + } + + void sizeof_raw_data(uint32_t size) { + this->size(size); + } + + void characteristics(uint32_t characteristics) { + characteristics_ = characteristics; + } + + std::string to_string() const; + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const Section& sec) { + os << sec.to_string(); + return os; + } + + ~Section() override = default; + + private: + Section() = default; + + std::vector content_; + uint32_t virtual_size_ = 0; + uint32_t pointer_to_relocations_ = 0; + uint32_t pointer_to_linenumbers_ = 0; + uint16_t number_of_relocations_ = 0; + uint16_t number_of_linenumbers_ = 0; + uint32_t characteristics_ = 0; + + relocations_t relocations_; + symbols_t symbols_; +}; + +inline const char* to_string(Section::CHARACTERISTICS e) { + return LIEF::PE::to_string(e); +} + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/COFF/String.hpp b/deps/LIEF/include/LIEF/COFF/String.hpp new file mode 100644 index 00000000000000..8e678762642dc8 --- /dev/null +++ b/deps/LIEF/include/LIEF/COFF/String.hpp @@ -0,0 +1,83 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_COFF_STRING_H +#define LIEF_COFF_STRING_H +#include +#include +#include + +#include "LIEF/visibility.h" + +namespace LIEF { +namespace COFF { + +/// This class represents a string located in the COFF string table. +/// +/// Some of these strings can be used for section names that are greater than 8 +/// bytes. See: LIEF::PE::Section::coff_string() +/// +/// Reference: https://learn.microsoft.com/en-us/windows/win32/debug/pe-format#coff-string-table +class LIEF_API String { + public: + String() = default; + String(uint32_t offset, std::string str) : + str_(std::move(str)), + offset_(offset) + {} + + String(const String&) = default; + String& operator=(const String&) = default; + + String(String&&) = default; + String& operator=(String&&) = default; + + ~String() = default; + + /// The actual string + const std::string& str() const { + return str_; + } + + /// The offset of this string the in the COFF string table. + /// This offset includes the first 4-bytes that holds the table size + uint32_t offset() const { + return offset_; + } + + String& str(std::string str) { + str_ = std::move(str); + return *this; + } + + String& offset(uint32_t value) { + offset_ = value; + return *this; + } + + friend LIEF_API + std::ostream& operator<<(std::ostream& os, const String& str) + { + os << str.str(); + return os; + } + + private: + std::string str_; + uint32_t offset_ = 0; +}; +} +} +#endif diff --git a/deps/LIEF/include/LIEF/COFF/Symbol.hpp b/deps/LIEF/include/LIEF/COFF/Symbol.hpp new file mode 100644 index 00000000000000..3a9d8a97fcb4b7 --- /dev/null +++ b/deps/LIEF/include/LIEF/COFF/Symbol.hpp @@ -0,0 +1,286 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_COFF_SYMBOL_H +#define LIEF_COFF_SYMBOL_H + +#include "LIEF/Abstract/Symbol.hpp" +#include "LIEF/visibility.h" +#include "LIEF/iterators.hpp" + +#include +#include + +namespace LIEF { +class BinaryStream; +namespace COFF { +class Parser; +class AuxiliarySymbol; +class String; +class Section; + +/// This class represents a COFF symbol +class LIEF_API Symbol : public LIEF::Symbol { + public: + friend class Parser; + + using auxiliary_symbols_t = std::vector>; + using it_auxiliary_symbols_t = ref_iterator; + using it_const_auxiliary_symbols_t = const_ref_iterator; + + struct parsing_context_t { + std::function find_string; + bool is_bigobj; + }; + static std::unique_ptr parse( + parsing_context_t& ctx, BinaryStream& stream, size_t* idx); + + Symbol(); + Symbol(const Symbol&); + Symbol& operator=(const Symbol&); + + Symbol(Symbol&&); + Symbol& operator=(Symbol&&); + + /// The symbol provides general type or debugging information but does not + /// correspond to a section. Microsoft tools use this setting along with + /// `.file` records. + static constexpr auto SYM_SEC_IDX_DEBUG = -2; + /// The symbol has an absolute (non-relocatable) value and is not an address. + static constexpr auto SYM_SEC_IDX_ABS = -1; + /// The symbol record is not yet assigned a section. A value of zero indicates + /// that a reference to an external symbol is defined elsewhere. A value of + /// non-zero is a common symbol with a size that is specified by the value. + static constexpr auto SYM_SEC_IDX_UNDEF = 0; + + static constexpr auto SYM_COMPLEX_TYPE_SHIFT = 4; + + /// Reference: https://learn.microsoft.com/en-us/windows/win32/debug/pe-format#storage-class + enum class STORAGE_CLASS : int32_t { + INVALID = 0xFF, + + END_OF_FUNCTION = -1, ///< Physical end of function + NONE = 0, ///< No symbol + AUTOMATIC = 1, ///< Stack variable + EXTERNAL = 2, ///< External symbol + STATIC = 3, ///< Static + REGISTER = 4, ///< Register variable + EXTERNAL_DEF = 5, ///< External definition + LABEL = 6, ///< Label + UNDEFINED_LABEL = 7, ///< Undefined label + MEMBER_OF_STRUCT = 8, ///< Member of structure + ARGUMENT = 9, ///< Function argument + STRUCT_TAG = 10, ///< Structure tag + MEMBER_OF_UNION = 11, ///< Member of union + UNION_TAG = 12, ///< Union tag + TYPE_DEFINITION = 13, ///< Type definition + UNDEFINED_STATIC = 14, ///< Undefined static + ENUM_TAG = 15, ///< Enumeration tag + MEMBER_OF_ENUM = 16, ///< Member of enumeration + REGISTER_PARAM = 17, ///< Register parameter + BIT_FIELD = 18, ///< Bit field + BLOCK = 100, + FUNCTION = 101, + END_OF_STRUCT = 102, ///< End of structure + FILE = 103, ///< File name + SECTION = 104, + WEAK_EXTERNAL = 105, ///< Duplicate tag + CLR_TOKEN = 107 + }; + + enum class BASE_TYPE : uint32_t { + TY_NULL = 0, ///< No type information or unknown base type. + TY_VOID = 1, ///< Used with void pointers and functions. + TY_CHAR = 2, ///< A character (signed byte). + TY_SHORT = 3, ///< A 2-byte signed integer. + TY_INT = 4, ///< A natural integer type on the target. + TY_LONG = 5, ///< A 4-byte signed integer. + TY_FLOAT = 6, ///< A 4-byte floating-point number. + TY_DOUBLE = 7, ///< An 8-byte floating-point number. + TY_STRUCT = 8, ///< A structure. + TY_UNION = 9, ///< An union. + TY_ENUM = 10, ///< An enumerated type. + TY_MOE = 11, ///< A member of enumeration (a specific value). + TY_BYTE = 12, ///< A byte; unsigned 1-byte integer. + TY_WORD = 13, ///< A word; unsigned 2-byte integer. + TY_UINT = 14, ///< An unsigned integer of natural size. + TY_DWORD = 15 ///< An unsigned 4-byte integer. + }; + + enum class COMPLEX_TYPE : uint32_t { + TY_NULL = 0, ///< No complex type; simple scalar variable. + TY_POINTER = 1, ///< A pointer to base type. + TY_FUNCTION = 2, ///< A function that returns a base type. + TY_ARRAY = 3, ///< An array of base type. + }; + + /// Check if the given section index is a reserved value + static constexpr bool is_reversed_sec_idx(int16_t idx) { + return idx <= 0; + } + + /// The symbol type. The first byte represents the base type (see: base_type()) + /// while the upper byte represents the complex type, if any (see: + /// complex_type()). + uint16_t type() const { + return type_; + } + + /// Storage class of the symbol which indicates what kind of definition a + /// symbol represents. + STORAGE_CLASS storage_class() const { + return (STORAGE_CLASS)storage_class_; + } + + /// The simple (base) data type + BASE_TYPE base_type() const { + return (BASE_TYPE)(type_ & 0x0F); + } + + /// The complex type (if any) + COMPLEX_TYPE complex_type() const { + return (COMPLEX_TYPE)((type_ & 0xF0) >> SYM_COMPLEX_TYPE_SHIFT); + } + + /// The signed integer that identifies the section, using a one-based index + /// into the section table. Some values have special meaning: + /// + /// * 0: The symbol record is not yet assigned a section. A value of zero + /// indicates that a reference to an external symbol is defined elsewhere. + /// A value of non-zero is a common symbol with a size that is specified + /// by the value. + /// * -1: The symbol has an absolute (non-relocatable) value and is not an + /// address. + /// * -2: The symbol provides general type or debugging information but does + /// not correspond to a section. Microsoft tools use this setting along + /// with `.file` records + int16_t section_idx() const { + return section_idx_; + } + + /// Section associated with this symbol (if any) + Section* section() { + return section_; + } + + const Section* section() const { + return section_; + } + + bool is_external() const { + return storage_class() == STORAGE_CLASS::EXTERNAL; + } + + bool is_weak_external() const { + return storage_class() == STORAGE_CLASS::WEAK_EXTERNAL; + } + + bool is_absolute() const { + return section_idx() == SYM_SEC_IDX_ABS; + } + + bool is_undefined() const { + return is_external() && section_idx() == SYM_SEC_IDX_UNDEF && + value() == 0; + } + + bool is_function_line_info() const { + return storage_class() == STORAGE_CLASS::FUNCTION; + } + + bool is_function() const { + return complex_type() == COMPLEX_TYPE::TY_FUNCTION; + } + + bool is_file_record() const { + return storage_class() == STORAGE_CLASS::FILE; + } + + /// Auxiliary symbols associated with this symbol. + it_auxiliary_symbols_t auxiliary_symbols() { + return auxiliary_symbols_; + } + + it_const_auxiliary_symbols_t auxiliary_symbols() const { + return auxiliary_symbols_; + } + + /// Name of the symbol. If the symbol does not use a short name, it returns + /// the string pointed by the COFF string offset + const std::string& name() const override; + + std::string& name() override; + + /// COFF string used to represents the (long) symbol name + const String* coff_name() const { + return coff_name_; + } + + String* coff_name() { + return coff_name_; + } + + /// Demangled representation of the symbol or an empty string if it can't + /// be demangled + std::string demangled_name() const; + + Symbol& type(uint16_t ty) { + type_ = ty; + return *this; + } + + Symbol& storage_class(uint8_t value) { + storage_class_ = value; + return *this; + } + + Symbol& section_idx(int16_t idx) { + section_idx_ = idx; + return *this; + } + + /// Add a new auxiliary record + AuxiliarySymbol& add_aux(std::unique_ptr sym); + + std::string to_string() const; + + LIEF_API friend + std::ostream& operator<<(std::ostream& os, const Symbol& entry) + { + os << entry.to_string(); + return os; + } + + ~Symbol() override; + + private: + template + static std::unique_ptr parse_impl( + parsing_context_t& ctx, BinaryStream& stream, size_t* idx); + String* coff_name_ = nullptr; + uint16_t type_ = 0; + uint8_t storage_class_ = 0; + int16_t section_idx_ = 0; + auxiliary_symbols_t auxiliary_symbols_; + Section* section_ = nullptr; +}; + +LIEF_API const char* to_string(Symbol::STORAGE_CLASS e); +LIEF_API const char* to_string(Symbol::BASE_TYPE e); +LIEF_API const char* to_string(Symbol::COMPLEX_TYPE e); + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/COFF/utils.hpp b/deps/LIEF/include/LIEF/COFF/utils.hpp new file mode 100644 index 00000000000000..73cb9fbf34ad82 --- /dev/null +++ b/deps/LIEF/include/LIEF/COFF/utils.hpp @@ -0,0 +1,63 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_COFF_UTILS_H +#define LIEF_COFF_UTILS_H +#include "LIEF/visibility.h" +#include "LIEF/COFF/Header.hpp" +#include "LIEF/BinaryStream/FileStream.hpp" +#include "LIEF/BinaryStream/SpanStream.hpp" +#include + +namespace LIEF { +namespace COFF { + +/// This function determines if the given stream wraps a COFF binary and if so, +/// whether it's a regular or bigobj COFF. +LIEF_API Header::KIND get_kind(BinaryStream& stream); + +/// Check if the given stream wraps a COFF file +LIEF_API inline bool is_coff(BinaryStream& stream) { + return get_kind(stream) != Header::KIND::UNKNOWN; +} + +/// Check if the `file` is a COFF +LIEF_API bool is_coff(const std::string& file); + +/// Check if the given buffer points to a COFF file +LIEF_API inline bool is_coff(const uint8_t* buffer, size_t size) { + LIEF::SpanStream strm(buffer, size); + return is_coff(strm); +} + +/// Check if the given buffer points to a COFF file +LIEF_API inline bool is_coff(const std::vector& buffer) { + return is_coff(buffer.data(), buffer.size()); +} + +/// Check if the COFF file wrapped by the given stream is a `bigobj` +LIEF_API inline bool is_bigobj(BinaryStream& stream) { + return get_kind(stream) == Header::KIND::BIGOBJ; +} + +/// Check if the COFF file wrapped by the given stream is regular +/// (i.e. not a bigobj) +LIEF_API inline bool is_regular(BinaryStream& stream) { + return get_kind(stream) == Header::KIND::REGULAR; +} + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/DEX.hpp b/deps/LIEF/include/LIEF/DEX.hpp new file mode 100644 index 00000000000000..f976b08117d525 --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX.hpp @@ -0,0 +1,32 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DEX_H +#define LIEF_DEX_H +#include "LIEF/config.h" + +#if defined(LIEF_DEX_SUPPORT) +#include "LIEF/DEX/Parser.hpp" +#include "LIEF/DEX/utils.hpp" +#include "LIEF/DEX/File.hpp" +#include "LIEF/DEX/Class.hpp" +#include "LIEF/DEX/Prototype.hpp" +#include "LIEF/DEX/Header.hpp" +#include "LIEF/DEX/Method.hpp" +#include "LIEF/DEX/Field.hpp" +#include "LIEF/DEX/EnumToString.hpp" +#endif + +#endif diff --git a/deps/LIEF/include/LIEF/DEX/Class.hpp b/deps/LIEF/include/LIEF/DEX/Class.hpp new file mode 100644 index 00000000000000..0b1baff36ad5f2 --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX/Class.hpp @@ -0,0 +1,138 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DEX_CLASS_H +#define LIEF_DEX_CLASS_H + +#include +#include + +#include "LIEF/visibility.h" +#include "LIEF/Object.hpp" +#include "LIEF/iterators.hpp" + +#include "LIEF/DEX/enums.hpp" +#include "LIEF/DEX/deopt.hpp" + +namespace LIEF { +namespace DEX { +class Parser; +class Field; +class Method; + +/// Class which represents a DEX Class (i.e. a Java/Kotlin class) +class LIEF_API Class : public Object { + friend class Parser; + + public: + using access_flags_list_t = std::vector; + + using methods_t = std::vector; + using it_methods = ref_iterator; + using it_const_methods = const_ref_iterator; + + using fields_t = std::vector; + using it_fields = ref_iterator; + using it_const_fields = const_ref_iterator; + + using it_named_methods = filter_iterator; + using it_const_named_methods = const_filter_iterator; + + using it_named_fields = filter_iterator; + using it_const_named_fields = const_filter_iterator; + + public: + static std::string package_normalized(const std::string& pkg_name); + static std::string fullname_normalized(const std::string& pkg_cls); + static std::string fullname_normalized(const std::string& pkg, const std::string& cls_name); + + Class(); + Class(const Class&) = delete; + Class& operator=(const Class&) = delete; + + Class(std::string fullname, uint32_t access_flags = ACCESS_FLAGS::ACC_UNKNOWN, + Class* parent = nullptr, std::string source_filename = ""); + + /// Mangled class name (e.g. ``Lcom/example/android/MyActivity;``) + const std::string& fullname() const; + + /// Package Name + std::string package_name() const; + + /// Class name + std::string name() const; + + /// Demangled class name + std::string pretty_name() const; + + /// Check if the class has the given access flag + bool has(ACCESS_FLAGS f) const; + + /// Access flags used by this class + access_flags_list_t access_flags() const; + + /// Filename associated with this class (if any) + const std::string& source_filename() const; + + /// True if the current class extends another one + bool has_parent() const; + + /// Parent class + const Class* parent() const; + Class* parent(); + + /// Methods implemented in this class + it_const_methods methods() const; + it_methods methods(); + + /// Return Methods having the given name + it_named_methods methods(const std::string& name); + it_const_named_methods methods(const std::string& name) const; + + /// Fields implemented in this class + it_const_fields fields() const; + it_fields fields(); + + /// Return Fields having the given name + it_named_fields fields(const std::string& name); + it_const_named_fields fields(const std::string& name) const; + + /// De-optimize information + dex2dex_class_info_t dex2dex_info() const; + + /// Original index in the DEX class pool + size_t index() const; + + void accept(Visitor& visitor) const override; + + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const Class& cls); + + ~Class() override; + + private: + std::string fullname_; + uint32_t access_flags_ = ACCESS_FLAGS::ACC_UNKNOWN; + Class* parent_ = nullptr; + methods_t methods_; + fields_t fields_; + std::string source_filename_; + + uint32_t original_index_ = UINT_MAX; +}; + +} // Namespace DEX +} // Namespace LIEF +#endif diff --git a/deps/LIEF/include/LIEF/DEX/CodeInfo.hpp b/deps/LIEF/include/LIEF/DEX/CodeInfo.hpp new file mode 100644 index 00000000000000..85410e665b1700 --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX/CodeInfo.hpp @@ -0,0 +1,60 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DEX_CODE_INFO_H +#define LIEF_DEX_CODE_INFO_H + +#include +#include + +#include "LIEF/visibility.h" +#include "LIEF/Object.hpp" + +namespace LIEF { +namespace DEX { +namespace details { +struct code_item; +} + +class Parser; + +class LIEF_API CodeInfo : public Object { + friend class Parser; + + public: + CodeInfo(); + CodeInfo(const details::code_item& codeitem); + + CodeInfo(const CodeInfo&); + CodeInfo& operator=(const CodeInfo&); + + void accept(Visitor& visitor) const override; + + uint16_t nb_registers() const; + + ~CodeInfo() override; + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const CodeInfo& cinfo); + + private: + uint16_t nb_registers_ = 0; + uint16_t args_input_sizes_ = 0; + uint16_t output_sizes_ = 0; + +}; + +} // Namespace DEX +} // Namespace LIEF +#endif diff --git a/deps/LIEF/include/LIEF/DEX/EnumToString.hpp b/deps/LIEF/include/LIEF/DEX/EnumToString.hpp new file mode 100644 index 00000000000000..18336a7028b940 --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX/EnumToString.hpp @@ -0,0 +1,35 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DEX_ENUM_TO_STRING_H +#define LIEF_DEX_ENUM_TO_STRING_H +#include "LIEF/visibility.h" +#include "LIEF/DEX/enums.hpp" +#include "LIEF/DEX/MapItem.hpp" +#include "LIEF/DEX/Type.hpp" + +namespace LIEF { +namespace DEX { + +LIEF_API const char* to_string(MapItem::TYPES e); +LIEF_API const char* to_string(ACCESS_FLAGS e); +LIEF_API const char* to_string(Type::TYPES e); +LIEF_API const char* to_string(Type::PRIMITIVES e); + +} // namespace DEX +} // namespace LIEF + +#endif + diff --git a/deps/LIEF/include/LIEF/DEX/Field.hpp b/deps/LIEF/include/LIEF/DEX/Field.hpp new file mode 100644 index 00000000000000..25283a81e00718 --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX/Field.hpp @@ -0,0 +1,94 @@ +/* Copyright 2021 - 2025 R. Thomas + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DEX_FIELD_H +#define LIEF_DEX_FIELD_H + +#include +#include + +#include "LIEF/DEX/enums.hpp" + +#include "LIEF/visibility.h" +#include "LIEF/Object.hpp" + +#include "LIEF/DEX/Type.hpp" + +namespace LIEF { +namespace DEX { +class Parser; +class Class; + +/// Class which represent a DEX Field +class LIEF_API Field : public Object { + friend class Parser; + public: + using access_flags_list_t = std::vector; + + public: + Field(); + Field(std::string name, Class* parent = nullptr); + + Field(const Field&); + Field& operator=(const Field&); + + /// Name of the Field + const std::string& name() const; + + /// True if a class is associated with this field + /// (which should be the case) + bool has_class() const; + + /// Class associated with this Field + const Class* cls() const; + Class* cls(); + + /// Index in the DEX Fields pool + size_t index() const; + + /// True if this field is a static one. + bool is_static() const; + + /// Field's prototype + const Type* type() const; + Type* type(); + + void accept(Visitor& visitor) const override; + + /// Check if the field has the given ACCESS_FLAGS + bool has(ACCESS_FLAGS f) const; + + /// ACCESS_FLAGS as a list + access_flags_list_t access_flags() const; + + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const Field& mtd); + + ~Field() override; + + private: + void set_static(bool v); + + private: + std::string name_; + Class* parent_ = nullptr; + Type* type_ = nullptr; + uint32_t access_flags_ = 0; + uint32_t original_index_ = UINT_MAX; + bool is_static_ = false; +}; + +} // Namespace DEX +} // Namespace LIEF +#endif diff --git a/deps/LIEF/include/LIEF/DEX/File.hpp b/deps/LIEF/include/LIEF/DEX/File.hpp new file mode 100644 index 00000000000000..6cf6240d82c8de --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX/File.hpp @@ -0,0 +1,176 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DEX_FILE_H +#define LIEF_DEX_FILE_H +#include + +#include "LIEF/visibility.h" +#include "LIEF/Object.hpp" + +#include "LIEF/DEX/Header.hpp" +#include "LIEF/DEX/MapList.hpp" +#include "LIEF/DEX/instructions.hpp" +#include "LIEF/DEX/deopt.hpp" +#include "LIEF/DEX/types.hpp" + +namespace LIEF { +namespace DEX { +class Parser; +class Class; +class Method; +class Type; +class Prototype; +class Field; + +/// Class that represents a DEX file +class LIEF_API File : public Object { + friend class Parser; + + public: + using classes_t = std::unordered_map; + using classes_list_t = std::vector>; + using it_classes = ref_iterator; + using it_const_classes = const_ref_iterator; + + using methods_t = std::vector>; + using it_methods = ref_iterator; + using it_const_methods = const_ref_iterator; + + using strings_t = std::vector>; + using it_strings = ref_iterator; + using it_const_strings = const_ref_iterator; + + using types_t = std::vector>; + using it_types = ref_iterator; + using it_const_types = const_ref_iterator; + + using prototypes_t = std::vector>; + using it_prototypes = ref_iterator; + using it_const_prototypes = const_ref_iterator; + + using fields_t = std::vector>; + using it_fields = ref_iterator; + using it_const_fields = const_ref_iterator; + + public: + File& operator=(const File& copy) = delete; + File(const File& copy) = delete; + + /// Version of the current DEX file + dex_version_t version() const; + + /// Name of this file + const std::string& name() const; + + void name(const std::string& name); + + /// Location of this file + const std::string& location() const; + void location(const std::string& location); + + /// DEX header + const Header& header() const; + Header& header(); + + /// **All** classes used in the DEX file + it_const_classes classes() const; + it_classes classes(); + + /// Check if the given class name exists + bool has_class(const std::string& class_name) const; + + /// Return the DEX::Class object associated with the given name + const Class* get_class(const std::string& class_name) const; + + Class* get_class(const std::string& class_name); + + /// Return the DEX::Class object associated with the given index + const Class* get_class(size_t index) const; + + Class* get_class(size_t index); + + /// De-optimize information + dex2dex_info_t dex2dex_info() const; + + /// De-optimize information as JSON + std::string dex2dex_json_info() const; + + /// Return an iterator over **all** the DEX::Method used in this DEX file + it_const_methods methods() const; + it_methods methods(); + + /// Return an iterator over **all** the DEX::Field used in this DEX file + it_const_fields fields() const; + it_fields fields(); + + /// String pool + it_const_strings strings() const; + it_strings strings(); + + /// Type pool + it_const_types types() const; + it_types types(); + + /// Prototype pool + it_prototypes prototypes(); + it_const_prototypes prototypes() const; + + /// DEX Map + const MapList& map() const; + MapList& map(); + + /// Extract the current dex file and deoptimize it + std::string save(const std::string& path = "", bool deoptimize = true) const; + + std::vector raw(bool deoptimize = true) const; + + void accept(Visitor& visitor) const override; + + + ~File() override; + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const File& file); + + private: + File(); + + void add_class(std::unique_ptr cls); + + static void deoptimize_nop(uint8_t* inst_ptr, uint32_t value); + static void deoptimize_return(uint8_t* inst_ptr, uint32_t value); + static void deoptimize_invoke_virtual(uint8_t* inst_ptr, uint32_t value, OPCODES new_inst); + static void deoptimize_instance_field_access(uint8_t* inst_ptr, uint32_t value, OPCODES new_inst); + + std::string name_; + std::string location_; + + Header header_; + classes_t classes_; + methods_t methods_; + fields_t fields_; + strings_t strings_; + types_t types_; + prototypes_t prototypes_; + MapList map_; + + classes_list_t class_list_; + std::vector original_data_; +}; + +} +} + +#endif diff --git a/deps/LIEF/include/LIEF/DEX/Header.hpp b/deps/LIEF/include/LIEF/DEX/Header.hpp new file mode 100644 index 00000000000000..b1d269f7a2119a --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX/Header.hpp @@ -0,0 +1,136 @@ + +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DEX_HEADER_H +#define LIEF_DEX_HEADER_H + +#include +#include +#include +#include + +#include "LIEF/visibility.h" +#include "LIEF/Object.hpp" + +namespace LIEF { +class Visitor; + +namespace DEX { +class Parser; + +/// Class which represents the DEX header. +/// This is the first structure that begins the DEX format. +/// +/// The official documentation is provided here: +/// https://source.android.com/devices/tech/dalvik/dex-format#header-item +class LIEF_API Header : public Object { + friend class Parser; + + public: + using location_t = std::pair; + + using magic_t = std::array; + using signature_t = std::array; + + Header(); + Header(const Header&); + Header& operator=(const Header&); + + template + LIEF_LOCAL Header(const T& header); + + /// The DEX magic bytes (``DEX\n`` followed by the DEX version) + magic_t magic() const; + + /// The file checksum + uint32_t checksum() const; + + /// SHA-1 DEX signature (which is not really used as a signature) + signature_t signature() const; + + /// Size of the entire file (including the current the header) + uint32_t file_size() const; + + /// Size of this header. It should be 0x70 + uint32_t header_size() const; + + /// File endianess of the file + uint32_t endian_tag() const; + + /// Offset from the start of the file to the map list (see: DEX::MapList) + uint32_t map() const; + + /// Offset and size of the string pool + location_t strings() const; + location_t link() const; + location_t types() const; + location_t prototypes() const; + location_t fields() const; + location_t methods() const; + location_t classes() const; + location_t data() const; + + uint32_t nb_classes() const; + + uint32_t nb_methods() const; + + void accept(Visitor& visitor) const override; + + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const Header& hdr); + + ~Header() override; + + private: + magic_t magic_; + uint32_t checksum_; + signature_t signature_; + uint32_t file_size_; + + uint32_t header_size_; + uint32_t endian_tag_; + + uint32_t link_size_; + uint32_t link_off_; + + uint32_t map_off_; + + uint32_t string_ids_size_; + uint32_t string_ids_off_; + + uint32_t type_ids_size_; + uint32_t type_ids_off_; + + uint32_t proto_ids_size_; + uint32_t proto_ids_off_; + + uint32_t field_ids_size_; + uint32_t field_ids_off_; + + uint32_t method_ids_size_; + uint32_t method_ids_off_; + + uint32_t class_defs_size_; + uint32_t class_defs_off_; + + uint32_t data_size_; + uint32_t data_off_; +}; + +} // Namespace DEX +} // Namespace LIEF + +#endif diff --git a/deps/LIEF/include/LIEF/DEX/MapItem.hpp b/deps/LIEF/include/LIEF/DEX/MapItem.hpp new file mode 100644 index 00000000000000..bb791c472e18f1 --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX/MapItem.hpp @@ -0,0 +1,96 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_MAP_ITEM_H +#define LIEF_MAP_ITEM_H + +#include +#include + +#include "LIEF/visibility.h" +#include "LIEF/Object.hpp" + +namespace LIEF { +namespace DEX { +class Parser; +class Class; + +/// Class which represents an element of the MapList object +class LIEF_API MapItem : public Object { + friend class Parser; + + public: + enum class TYPES : uint16_t { + HEADER = 0x0000, + STRING_ID = 0x0001, + TYPE_ID = 0x0002, + PROTO_ID = 0x0003, + FIELD_ID = 0x0004, + METHOD_ID = 0x0005, + CLASS_DEF = 0x0006, + CALL_SITE_ID = 0x0007, + METHOD_HANDLE = 0x0008, + MAP_LIST = 0x1000, + TYPE_LIST = 0x1001, + ANNOTATION_SET_REF_LIST = 0x1002, + ANNOTATION_SET = 0x1003, + CLASS_DATA = 0x2000, + CODE = 0x2001, + STRING_DATA = 0x2002, + DEBUG_INFO = 0x2003, + ANNOTATION = 0x2004, + ENCODED_ARRAY = 0x2005, + ANNOTATIONS_DIRECTORY = 0x2006, + + }; + + public: + MapItem(); + MapItem(TYPES type, uint32_t offset, uint32_t size, uint16_t reserved = 0); + + MapItem(const MapItem&); + MapItem& operator=(const MapItem&); + + /// The type of the item + TYPES type() const; + + /// Reserved value (likely for alignment prupose) + uint16_t reserved() const; + + /// The number of elements (the real meaning depends on the type) + uint32_t size() const; + + /// Offset from the start of the DEX file to the items associated with + /// the underlying TYPES + uint32_t offset() const; + + void accept(Visitor& visitor) const override; + + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const MapItem& item); + + ~MapItem() override; + + private: + TYPES type_; + uint16_t reserved_; + uint32_t size_; + uint32_t offset_; + +}; + +} // Namespace DEX +} // Namespace LIEF +#endif diff --git a/deps/LIEF/include/LIEF/DEX/MapList.hpp b/deps/LIEF/include/LIEF/DEX/MapList.hpp new file mode 100644 index 00000000000000..c0c59ee83b1657 --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX/MapList.hpp @@ -0,0 +1,84 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_MAP_LIST_H +#define LIEF_MAP_LIST_H +#include +#include + +#include "LIEF/visibility.h" +#include "LIEF/iterators.hpp" +#include "LIEF/Object.hpp" + +#include "LIEF/DEX/MapItem.hpp" + +namespace LIEF { +namespace DEX { +class Parser; +class Class; + +/// Class which represents the ``map_list`` structure that +/// follows the main DEX header. +/// +/// This MapList aims at referencing the location of other DEX structures as +/// described in https://source.android.com/devices/tech/dalvik/dex-format#map-item +class LIEF_API MapList : public Object { + friend class Parser; + + public: + using items_t = std::map; + using it_items_t = ref_iterator>; + using it_const_items_t = const_ref_iterator>; + + public: + MapList(); + + MapList(const MapList&); + MapList& operator=(const MapList&); + + /// Iterator over LIEF::DEX::MapItem + it_items_t items(); + it_const_items_t items() const; + + /// Check if the given type exists + bool has(MapItem::TYPES type) const; + + /// Return the LIEF::DEX::MapItem associated with the given type + const MapItem& get(MapItem::TYPES type) const; + + /// Return the LIEF::DEX::MapItem associated with the given type + MapItem& get(MapItem::TYPES type); + + /// Return the LIEF::DEX::MapItem associated with the given type + const MapItem& operator[](MapItem::TYPES type) const; + + /// Return the LIEF::DEX::MapItem associated with the given type + MapItem& operator[](MapItem::TYPES type); + + void accept(Visitor& visitor) const override; + + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const MapList& mtd); + + ~MapList() override; + + private: + items_t items_; + +}; + +} // Namespace DEX +} // Namespace LIEF +#endif diff --git a/deps/LIEF/include/LIEF/DEX/Method.hpp b/deps/LIEF/include/LIEF/DEX/Method.hpp new file mode 100644 index 00000000000000..54eec9bd6627f1 --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX/Method.hpp @@ -0,0 +1,118 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DEX_METHOD_H +#define LIEF_DEX_METHOD_H + +#include +#include + +#include "LIEF/visibility.h" +#include "LIEF/Object.hpp" + +#include "LIEF/DEX/enums.hpp" +#include "LIEF/DEX/CodeInfo.hpp" +#include "LIEF/DEX/deopt.hpp" + +namespace LIEF { +namespace DEX { +class Parser; +class Class; +class Prototype; + +/// Class which represents a DEX::Method +class LIEF_API Method : public Object { + friend class Parser; + public: + using access_flags_list_t = std::vector; + + public: + using bytecode_t = std::vector; + Method(); + Method(std::string name, Class* parent = nullptr); + + Method(const Method&); + Method& operator=(const Method&); + + /// Name of the Method + const std::string& name() const; + + /// True if a class is associated with this method + bool has_class() const; + + /// DEX::Class associated with this Method or a nullptr + /// if not resolved + const Class* cls() const; + Class* cls(); + + /// Offset to the Dalvik Bytecode + uint64_t code_offset() const; + + /// Dalvik Bytecode as bytes + const bytecode_t& bytecode() const; + + /// Index in the DEX Methods pool + size_t index() const; + + /// True if this method is a virtual one. + /// i.e. not **static**, **private**, **finale** or constructor + bool is_virtual() const; + + /// Method's prototype or a nullptr if it is not resolved + const Prototype* prototype() const; + Prototype* prototype(); + + void insert_dex2dex_info(uint32_t pc, uint32_t index); + + void accept(Visitor& visitor) const override; + + const dex2dex_method_info_t& dex2dex_info() const; + + /// Check if the current method has the given ACCESS_FLAGS + bool has(ACCESS_FLAGS f) const; + + /// ACCESS_FLAGS as an std::set + access_flags_list_t access_flags() const; + + // CodeInfo to get additional data about a method i.e. argument count + const CodeInfo& code_info() const; + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const Method& mtd); + + ~Method() override; + + private: + void set_virtual(bool v); + + private: + std::string name_; + Class* parent_ = nullptr; + Prototype* prototype_ = nullptr; + uint32_t access_flags_ = ACCESS_FLAGS::ACC_UNKNOWN; + uint32_t original_index_ = UINT_MAX; + bool is_virtual_ = false; + + uint64_t code_offset_ = 0; + std::vector bytecode_; + + CodeInfo code_info_; + + dex2dex_method_info_t dex2dex_info_; + +}; + +} // Namespace DEX +} // Namespace LIEF +#endif diff --git a/deps/LIEF/include/LIEF/DEX/Parser.hpp b/deps/LIEF/include/LIEF/DEX/Parser.hpp new file mode 100644 index 00000000000000..7b925d129da240 --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX/Parser.hpp @@ -0,0 +1,124 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DEX_PARSER_H +#define LIEF_DEX_PARSER_H + +#include +#include +#include +#include + +#include "LIEF/visibility.h" +#include "LIEF/DEX/types.hpp" + +namespace LIEF { +class VectorStream; + +namespace DEX { +class Class; +class Method; +class Field; +class File; +class Type; + +/// Class which parses a DEX file to produce a DEX::File object +class LIEF_API Parser { + public: + + /// Parse the DEX file from the file path given in parameter + static std::unique_ptr parse(const std::string& file); + static std::unique_ptr parse(std::vector data, const std::string& name = ""); + + Parser& operator=(const Parser& copy) = delete; + Parser(const Parser& copy) = delete; + + private: + Parser(); + Parser(const std::string& file); + Parser(std::vector data); + ~Parser(); + + void init(const std::string& name, dex_version_t version); + + template + void parse_file(); + + template + void parse_header(); + + template + void parse_map(); + + template + void parse_strings(); + + template + void parse_types(); + + template + void parse_fields(); + + template + void parse_prototypes(); + + template + void parse_methods(); + + template + void parse_classes(); + + template + void parse_class_data(uint32_t offset, Class& cls); + + template + void parse_field(size_t index, Class& cls, bool is_static); + + template + void parse_method(size_t index, Class& cls, bool is_virtual); + + template + void parse_code_info(uint32_t offset, Method& method); + + void resolve_inheritance(); + + void resolve_external_methods(); + + void resolve_external_fields(); + + void resolve_types(); + + std::unique_ptr file_; + + // Map of inheritance relationship when parsing classes ('parse_classes') + // The key is the parent class name of the value + std::unordered_multimap inheritance_; + + // Map of method/class relationship when parsing methods ('parse_methods') + // The key is the Class name in which the method is defined + std::unordered_multimap class_method_map_; + + // Map of field/class relationship when parsing fields ('parse_fields') + // The key is the Class name in which the field is defined + std::unordered_multimap class_field_map_; + + std::unordered_multimap class_type_map_; + + std::unique_ptr stream_; +}; + +} // namespace DEX +} // namespace LIEF +#endif diff --git a/deps/LIEF/include/LIEF/DEX/Prototype.hpp b/deps/LIEF/include/LIEF/DEX/Prototype.hpp new file mode 100644 index 00000000000000..ff6413a5c0b88e --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX/Prototype.hpp @@ -0,0 +1,64 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DEX_PROTOTYPE_H +#define LIEF_DEX_PROTOTYPE_H + +#include "LIEF/visibility.h" +#include "LIEF/Object.hpp" +#include "LIEF/iterators.hpp" + +namespace LIEF { +namespace DEX { +class Parser; +class Type; + +/// Class which represents a DEX method prototype +class LIEF_API Prototype : public Object { + friend class Parser; + + public: + using parameters_type_t = std::vector; + using it_params = ref_iterator; + using it_const_params = const_ref_iterator; + + public: + Prototype(); + Prototype(const Prototype& other); + + /// Type returned or a nullptr if not resolved + const Type* return_type() const; + Type* return_type(); + + /// Types of the parameters + it_const_params parameters_type() const; + it_params parameters_type(); + + void accept(Visitor& visitor) const override; + + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const Prototype& type); + + ~Prototype() override; + + private: + Type* return_type_ = nullptr; + parameters_type_t params_; + +}; + +} // Namespace DEX +} // Namespace LIEF +#endif diff --git a/deps/LIEF/include/LIEF/DEX/Type.hpp b/deps/LIEF/include/LIEF/DEX/Type.hpp new file mode 100644 index 00000000000000..0c93f4922f7ea8 --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX/Type.hpp @@ -0,0 +1,113 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DEX_TYPE_H +#define LIEF_DEX_TYPE_H + +#include +#include +#include + +#include "LIEF/visibility.h" +#include "LIEF/Object.hpp" + +namespace LIEF { +namespace DEX { +class Parser; +class Class; + +/// Class which represents a DEX type as described in the +/// format specifications: https://source.android.com/devices/tech/dalvik/dex-format#typedescriptor +class LIEF_API Type : public Object { + friend class Parser; + + public: + enum class TYPES { + UNKNOWN = 0, + PRIMITIVE = 1, + CLASS = 2, + ARRAY = 3, + }; + + enum class PRIMITIVES { + VOID_T = 0x01, + BOOLEAN = 0x02, + BYTE = 0x03, + SHORT = 0x04, + CHAR = 0x05, + INT = 0x06, + LONG = 0x07, + FLOAT = 0x08, + DOUBLE = 0x09, + }; + + using array_t = std::vector; + + public: + static std::string pretty_name(PRIMITIVES p); + + public: + Type(); + Type(const std::string& mangled); + Type(const Type& other); + + /// Whether it is a primitive type, a class, ... + TYPES type() const; + + const Class& cls() const; + const array_t& array() const; + const PRIMITIVES& primitive() const; + + /// **IF** the current type is a TYPES::CLASS, return the + /// associated DEX::CLASS. Otherwise the returned value is **undefined**. + Class& cls(); + + /// **IF** the current type is a TYPES::ARRAY, return the + /// associated array. Otherwise the returned value is **undefined**. + array_t& array(); + + /// **IF** the current type is a TYPES::PRIMITIVE, return the + /// associated PRIMITIVES. Otherwise the returned value is **undefined**. + PRIMITIVES& primitive(); + + /// Return the array dimension if the current type is + /// an array. Otherwise it returns 0 + size_t dim() const; + + /// In the case of a TYPES::ARRAY, return the array's type + const Type& underlying_array_type() const; + Type& underlying_array_type(); + + void accept(Visitor& visitor) const override; + + + LIEF_API friend std::ostream& operator<<(std::ostream& os, const Type& type); + + ~Type() override; + + private: + void parse(const std::string& type); + + TYPES type_{TYPES::UNKNOWN}; + union { + Class* cls_{nullptr}; + array_t* array_; + PRIMITIVES* basic_; + }; +}; + +} // Namespace DEX +} // Namespace LIEF +#endif diff --git a/deps/LIEF/include/LIEF/DEX/deopt.hpp b/deps/LIEF/include/LIEF/DEX/deopt.hpp new file mode 100644 index 00000000000000..4164a8ffecc43b --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX/deopt.hpp @@ -0,0 +1,34 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DEX_DEOPT_TYPES_H +#define LIEF_DEX_DEOPT_TYPES_H +#include +#include + +namespace LIEF { +namespace DEX { +class Class; +class Method; + +// Method Index: {dex_pc: index, ...} +using dex2dex_method_info_t = std::unordered_map; +using dex2dex_class_info_t = std::unordered_map; +using dex2dex_info_t = std::unordered_map; + +} +} + +#endif diff --git a/deps/LIEF/include/LIEF/DEX/enums.hpp b/deps/LIEF/include/LIEF/DEX/enums.hpp new file mode 100644 index 00000000000000..4e808d8f9b76d1 --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX/enums.hpp @@ -0,0 +1,82 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DEX_ENUMS_H +#define LIEF_DEX_ENUMS_H + +namespace LIEF { +namespace DEX { + +enum ACCESS_FLAGS { + ACC_UNKNOWN = 0x0, + ACC_PUBLIC = 0x1, + ACC_PRIVATE = 0x2, + ACC_PROTECTED = 0x4, + ACC_STATIC = 0x8, + ACC_FINAL = 0x10, + ACC_SYNCHRONIZED = 0x20, + ACC_VOLATILE = 0x40, + ACC_BRIDGE = 0x40, + ACC_TRANSIENT = 0x80, + ACC_VARARGS = 0x80, + ACC_NATIVE = 0x100, + ACC_INTERFACE = 0x200, + ACC_ABSTRACT = 0x400, + ACC_STRICT = 0x800, + ACC_SYNTHETIC = 0x1000, + ACC_ANNOTATION = 0x2000, + ACC_ENUM = 0x4000, + ACC_CONSTRUCTOR = 0x10000, + ACC_DECLARED_SYNCHRONIZED = 0x20000 +}; + + +enum METHOD_TYPES { + METHOD_UNKNOWN = 0x00, + METHOD_VIRTUAL = 0x01, + METHOD_DIRECT = 0x02, // Deprecated + + METHOD_EXTERN = 0x03, + METHOD_CTOR = 0x04, + METHOD_STATIC = 0x05, + METHOD_STATIC_CTOR = 0x06, +}; + +static const ACCESS_FLAGS access_flags_list[] = { + ACCESS_FLAGS::ACC_UNKNOWN, + ACCESS_FLAGS::ACC_PUBLIC, + ACCESS_FLAGS::ACC_PRIVATE, + ACCESS_FLAGS::ACC_PROTECTED, + ACCESS_FLAGS::ACC_STATIC, + ACCESS_FLAGS::ACC_FINAL, + ACCESS_FLAGS::ACC_SYNCHRONIZED, + ACCESS_FLAGS::ACC_VOLATILE, + ACCESS_FLAGS::ACC_BRIDGE, + ACCESS_FLAGS::ACC_TRANSIENT, + ACCESS_FLAGS::ACC_VARARGS, + ACCESS_FLAGS::ACC_NATIVE, + ACCESS_FLAGS::ACC_INTERFACE, + ACCESS_FLAGS::ACC_ABSTRACT, + ACCESS_FLAGS::ACC_STRICT, + ACCESS_FLAGS::ACC_SYNTHETIC, + ACCESS_FLAGS::ACC_ANNOTATION, + ACCESS_FLAGS::ACC_ENUM, + ACCESS_FLAGS::ACC_CONSTRUCTOR, + ACCESS_FLAGS::ACC_DECLARED_SYNCHRONIZED, +}; + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/DEX/hash.hpp b/deps/LIEF/include/LIEF/DEX/hash.hpp new file mode 100644 index 00000000000000..42184a42af9ef1 --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX/hash.hpp @@ -0,0 +1,66 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DEX_HASH_H +#define LIEF_DEX_HASH_H + +#include "LIEF/visibility.h" +#include "LIEF/hash.hpp" + +namespace LIEF { +class Object; + +namespace DEX { + +class Class; +class Field; +class File; +class Header; +class MapItem; +class MapList; +class Method; +class Prototype; +class CodeInfo; +class Type; + +/// Class which implements a visitor to compute +/// a **deterministic** hash for LIEF DEX objects +class LIEF_API Hash : public LIEF::Hash { + public: + static LIEF::Hash::value_type hash(const Object& obj); + + public: + using LIEF::Hash::Hash; + using LIEF::Hash::visit; + + public: + void visit(const File& file) override; + void visit(const Header& header) override; + void visit(const Class& cls) override; + void visit(const Field& field) override; + void visit(const Method& method) override; + void visit(const CodeInfo& code_info) override; + void visit(const Type& type) override; + void visit(const Prototype& type) override; + void visit(const MapItem& item) override; + void visit(const MapList& list) override; + + ~Hash() override; +}; + +} +} + +#endif diff --git a/deps/LIEF/include/LIEF/DEX/instructions.hpp b/deps/LIEF/include/LIEF/DEX/instructions.hpp new file mode 100644 index 00000000000000..4c076d4575fabb --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX/instructions.hpp @@ -0,0 +1,356 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DEX_INSTRUCTIONS_H +#define LIEF_DEX_INSTRUCTIONS_H +#include "LIEF/visibility.h" +#include "LIEF/types.hpp" +#include + +namespace LIEF { +namespace DEX { + +enum SWITCH_ARRAY_IDENT : uint16_t { + IDENT_PACKED_SWITCH = 0x0100, + IDENT_SPARSE_SWITCH = 0x0200, + IDENT_FILL_ARRAY = 0x0300, +}; + +/// The Dalvik Opcodes +enum OPCODES : uint8_t { + OP_NOP = 0x00, + OP_MOVE = 0x01, + OP_MOVE_FROM_16 = 0x02, + OP_MOVE_16 = 0x03, + OP_MOVE_WIDE = 0x04, + OP_MOVE_WIDE_FROM_16 = 0x05, + OP_MOVE_WIDE_16 = 0x06, + OP_MOVE_OBJECT = 0x07, + OP_MOVE_OBJECT_FROM_16 = 0x08, + OP_MOVE_OBJECT_16 = 0x09, + OP_MOVE_RESULT = 0x0a, + OP_MOVE_RESULT_WIDE = 0x0b, + OP_MOVE_RESULT_OBJECT = 0x0c, + OP_MOVE_EXCEPTION = 0x0d, + OP_RETURN_VOID = 0x0e, + OP_RETURN = 0x0f, + OP_RETURN_WIDE = 0x10, + OP_RETURN_OBJECT = 0x11, + OP_CONST_4 = 0x12, + OP_CONST_16 = 0x13, + OP_CONST = 0x14, + OP_CONST_HIGH_16 = 0x15, + OP_CONST_WIDE_16 = 0x16, + OP_CONST_WIDE_32 = 0x17, + OP_CONST_WIDE = 0x18, + OP_CONST_WIDE_HIGH_16 = 0x19, + OP_CONST_STRING = 0x1a, + OP_CONST_STRING_JUMBO = 0x1b, + OP_CONST_CLASS = 0x1c, + OP_MONITOR_ENTER = 0x1d, + OP_MONITOR_EXIT = 0x1e, + OP_CHECK_CAST = 0x1f, + OP_INSTANCE_OF = 0x20, + OP_ARRAY_LENGTH = 0x21, + OP_NEW_INSTANCE = 0x22, + OP_NEW_ARRAY = 0x23, + OP_FILLED_NEW_ARRAY = 0x24, + OP_FILLED_NEW_ARRAY_RANGE = 0x25, + OP_FILL_ARRAY_DATA = 0x26, + OP_THROW = 0x27, + OP_GOTO = 0x28, + OP_GOTO_16 = 0x29, + OP_GOTO_32 = 0x2a, + OP_PACKED_SWITCH = 0x2b, + OP_SPARSE_SWITCH = 0x2c, + OP_CMPL_FLOAT = 0x2d, + OP_CMPG_FLOAT = 0x2e, + OP_CMPL_DOUBLE = 0x2f, + OP_CMPG_DOUBLE = 0x30, + OP_CMP_LONG = 0x31, + OP_IF_EQ = 0x32, + OP_IF_NE = 0x33, + OP_IF_LT = 0x34, + OP_IF_GE = 0x35, + OP_IF_GT = 0x36, + OP_IF_LE = 0x37, + OP_IF_EQZ = 0x38, + OP_IF_NEZ = 0x39, + OP_IF_LTZ = 0x3a, + OP_IF_GEZ = 0x3b, + OP_IF_GTZ = 0x3c, + OP_IF_LEZ = 0x3d, + OP_AGET = 0x44, + OP_AGET_WIDE = 0x45, + OP_AGET_OBJECT = 0x46, + OP_AGET_BOOLEAN = 0x47, + OP_AGET_BYTE = 0x48, + OP_AGET_CHAR = 0x49, + OP_AGET_SHORT = 0x4a, + OP_APUT = 0x4b, + OP_APUT_WIDE = 0x4c, + OP_APUT_OBJECT = 0x4d, + OP_APUT_BOOLEAN = 0x4e, + OP_APUT_BYTE = 0x4f, + OP_APUT_CHAR = 0x50, + OP_APUT_SHORT = 0x51, + OP_IGET = 0x52, + OP_IGET_WIDE = 0x53, + OP_IGET_OBJECT = 0x54, + OP_IGET_BOOLEAN = 0x55, + OP_IGET_BYTE = 0x56, + OP_IGET_CHAR = 0x57, + OP_IGET_SHORT = 0x58, + OP_IPUT = 0x59, + OP_IPUT_WIDE = 0x5a, + OP_IPUT_OBJECT = 0x5b, + OP_IPUT_BOOLEAN = 0x5c, + OP_IPUT_BYTE = 0x5d, + OP_IPUT_CHAR = 0x5e, + OP_IPUT_SHORT = 0x5f, + OP_SGET = 0x60, + OP_SGET_WIDE = 0x61, + OP_SGET_OBJECT = 0x62, + OP_SGET_BOOLEAN = 0x63, + OP_SGET_BYTE = 0x64, + OP_SGET_CHAR = 0x65, + OP_SGET_SHORT = 0x66, + OP_SPUT = 0x67, + OP_SPUT_WIDE = 0x68, + OP_SPUT_OBJECT = 0x69, + OP_SPUT_BOOLEAN = 0x6a, + OP_SPUT_BYTE = 0x6b, + OP_SPUT_CHAR = 0x6c, + OP_SPUT_SHORT = 0x6d, + OP_INVOKE_VIRTUAL = 0x6e, + OP_INVOKE_SUPER = 0x6f, + OP_INVOKE_DIRECT = 0x70, + OP_INVOKE_STATIC = 0x71, + OP_INVOKE_INTERFACE = 0x72, + OP_RETURN_VOID_NO_BARRIER = 0x73, + OP_INVOKE_VIRTUAL_RANGE = 0x74, + OP_INVOKE_SUPER_RANGE = 0x75, + OP_INVOKE_DIRECT_RANGE = 0x76, + OP_INVOKE_STATIC_RANGE = 0x77, + OP_INVOKE_INTERFACE_RANGE = 0x78, + OP_NEG_INT = 0x7b, + OP_NOT_INT = 0x7c, + OP_NEG_LONG = 0x7d, + OP_NOT_LONG = 0x7e, + OP_NEG_FLOAT = 0x7f, + OP_NEG_DOUBLE = 0x80, + OP_INT_TO_LONG = 0x81, + OP_INT_TO_FLOAT = 0x82, + OP_INT_TO_DOUBLE = 0x83, + OP_LONG_TO_INT = 0x84, + OP_LONG_TO_FLOAT = 0x85, + OP_LONG_TO_DOUBLE = 0x86, + OP_FLOAT_TO_INT = 0x87, + OP_FLOAT_TO_LONG = 0x88, + OP_FLOAT_TO_DOUBLE = 0x89, + OP_DOUBLE_TO_INT = 0x8a, + OP_DOUBLE_TO_LONG = 0x8b, + OP_DOUBLE_TO_FLOAT = 0x8c, + OP_INT_TO_BYTE = 0x8d, + OP_INT_TO_CHAR = 0x8e, + OP_INT_TO_SHORT = 0x8f, + OP_ADD_INT = 0x90, + OP_SUB_INT = 0x91, + OP_MUL_INT = 0x92, + OP_DIV_INT = 0x93, + OP_REM_INT = 0x94, + OP_AND_INT = 0x95, + OP_OR_INT = 0x96, + OP_XOR_INT = 0x97, + OP_SHL_INT = 0x98, + OP_SHR_INT = 0x99, + OP_USHR_INT = 0x9a, + OP_ADD_LONG = 0x9b, + OP_SUB_LONG = 0x9c, + OP_MUL_LONG = 0x9d, + OP_DIV_LONG = 0x9e, + OP_REM_LONG = 0x9f, + OP_AND_LONG = 0xa0, + OP_OR_LONG = 0xa1, + OP_XOR_LONG = 0xa2, + OP_SHL_LONG = 0xa3, + OP_SHR_LONG = 0xa4, + OP_USHR_LONG = 0xa5, + OP_ADD_FLOAT = 0xa6, + OP_SUB_FLOAT = 0xa7, + OP_MUL_FLOAT = 0xa8, + OP_DIV_FLOAT = 0xa9, + OP_REM_FLOAT = 0xaa, + OP_ADD_DOUBLE = 0xab, + OP_SUB_DOUBLE = 0xac, + OP_MUL_DOUBLE = 0xad, + OP_DIV_DOUBLE = 0xae, + OP_REM_DOUBLE = 0xaf, + OP_ADD_INT_2_ADDR = 0xb0, + OP_SUB_INT_2_ADDR = 0xb1, + OP_MUL_INT_2_ADDR = 0xb2, + OP_DIV_INT_2_ADDR = 0xb3, + OP_REM_INT_2_ADDR = 0xb4, + OP_AND_INT_2_ADDR = 0xb5, + OP_OR_INT_2_ADDR = 0xb6, + OP_XOR_INT_2_ADDR = 0xb7, + OP_SHL_INT_2_ADDR = 0xb8, + OP_SHR_INT_2_ADDR = 0xb9, + OP_USHR_INT_2_ADDR = 0xba, + OP_ADD_LONG_2_ADDR = 0xbb, + OP_SUB_LONG_2_ADDR = 0xbc, + OP_MUL_LONG_2_ADDR = 0xbd, + OP_DIV_LONG_2_ADDR = 0xbe, + OP_REM_LONG_2_ADDR = 0xbf, + OP_AND_LONG_2_ADDR = 0xc0, + OP_OR_LONG_2_ADDR = 0xc1, + OP_XOR_LONG_2_ADDR = 0xc2, + OP_SHL_LONG_2_ADDR = 0xc3, + OP_SHR_LONG_2_ADDR = 0xc4, + OP_USHR_LONG_2_ADDR = 0xc5, + OP_ADD_FLOAT_2_ADDR = 0xc6, + OP_SUB_FLOAT_2_ADDR = 0xc7, + OP_MUL_FLOAT_2_ADDR = 0xc8, + OP_DIV_FLOAT_2_ADDR = 0xc9, + OP_REM_FLOAT_2_ADDR = 0xca, + OP_ADD_DOUBLE_2_ADDR = 0xcb, + OP_SUB_DOUBLE_2_ADDR = 0xcc, + OP_MUL_DOUBLE_2_ADDR = 0xcd, + OP_DIV_DOUBLE_2_ADDR = 0xce, + OP_REM_DOUBLE_2_ADDR = 0xcf, + OP_ADD_INT_LIT_16 = 0xd0, + OP_RSUB_INT = 0xd1, + OP_MUL_INT_LIT_16 = 0xd2, + OP_DIV_INT_LIT_16 = 0xd3, + OP_REM_INT_LIT_16 = 0xd4, + OP_AND_INT_LIT_16 = 0xd5, + OP_OR_INT_LIT_16 = 0xd6, + OP_XOR_INT_LIT_16 = 0xd7, + OP_ADD_INT_LIT_8 = 0xd8, + OP_RSUB_INT_LIT_8 = 0xd9, + OP_MUL_INT_LIT_8 = 0xda, + OP_DIV_INT_LIT_8 = 0xdb, + OP_REM_INT_LIT_8 = 0xdc, + OP_AND_INT_LIT_8 = 0xdd, + OP_OR_INT_LIT_8 = 0xde, + OP_XOR_INT_LIT_8 = 0xdf, + OP_SHL_INT_LIT_8 = 0xe0, + OP_SHR_INT_LIT_8 = 0xe1, + OP_USHR_INT_LIT_8 = 0xe2, + + // ODEX + OP_IGET_QUICK = 0xe3, + OP_IGET_WIDE_QUICK = 0xe4, + OP_IGET_OBJECT_QUICK = 0xe5, + OP_IPUT_QUICK = 0xe6, + OP_IPUT_WIDE_QUICK = 0xe7, + OP_IPUT_OBJECT_QUICK = 0xe8, + OP_INVOKE_VIRTUAL_QUICK = 0xe9, + OP_INVOKE_VIRTUAL_RANGE_QUICK = 0xea, + OP_IPUT_BOOLEAN_QUICK = 0xeb, + OP_IPUT_BYTE_QUICK = 0xec, + OP_IPUT_CHAR_QUICK = 0xed, + OP_IPUT_SHORT_QUICK = 0xee, + OP_IGET_BOOLEAN_QUICK = 0xef, + OP_IGET_BYTE_QUICK = 0xf0, + OP_IGET_CHAR_QUICK = 0xf1, + OP_IGET_SHORT_QUICK = 0xf2, + + // From DEX 38 + OP_INVOKE_POLYMORPHIC = 0xfa, + OP_INVOKE_POLYMORPHIC_RANGE = 0xfb, + OP_INVOKE_CUSTOM = 0xfc, + OP_INVOKE_CUSTOM_RANGE = 0xfd, + + // From DEX 39 + OP_CONST_METHOD_HANDLE = 0xfe, + OP_CONST_METHOD_TYPE = 0xff, +}; + +enum INST_FORMATS : uint8_t { + F_00x = 0, + F_10x, + F_12x, + F_11n, + F_11x, + F_10t, + F_20t, + F_20bc, + F_22x, + F_21t, + F_21s, + F_21h, + F_21c, + F_23x, + F_22b, + F_22t, + F_22s, + F_22c, + F_22cs, + F_30t, + F_32x, + F_31i, + F_31t, + F_31c, + F_35c, + F_35ms, + F_35mi, + F_3rc, + F_3rms, + F_3rmi, + F_51l, + + // Since DEX 38 + F_45cc, + F_4rcc, +}; + +struct packed_switch { + uint16_t ident; // 0x0100 + uint16_t size; + uint32_t first_key; + // uint32_t targets[size] +}; + + +struct sparse_switch { + uint16_t ident; // 0x0200 + uint16_t size; + // uint32_t targets[size] +}; + +struct fill_array_data { + uint16_t ident; + uint16_t element_width; + uint32_t size; + //uint8_t data[size]; +}; + + +/// Return the INST_FORMATS format associated with the given opcode +LIEF_API INST_FORMATS inst_format_from_opcode(OPCODES op); + +LIEF_API size_t inst_size_from_format(INST_FORMATS fmt); +LIEF_API size_t inst_size_from_opcode(OPCODES op); + +LIEF_API bool is_switch_array(const uint8_t* ptr, const uint8_t* end); + +LIEF_API size_t switch_array_size(const uint8_t* ptr, const uint8_t* end); + +} // Namespace LIEF +} // Namespace DEX + +#endif + diff --git a/deps/LIEF/include/LIEF/DEX/json.hpp b/deps/LIEF/include/LIEF/DEX/json.hpp new file mode 100644 index 00000000000000..74b79fe48e4584 --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX/json.hpp @@ -0,0 +1,33 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DEX_PUBLIC_JSON_H +#define LIEF_DEX_PUBLIC_JSON_H + +#include "LIEF/visibility.h" +#include + +namespace LIEF { +class Object; + +namespace DEX { + +LIEF_API std::string to_json(const Object& v); + +} +} + + +#endif diff --git a/deps/LIEF/include/LIEF/DEX/types.hpp b/deps/LIEF/include/LIEF/DEX/types.hpp new file mode 100644 index 00000000000000..e38b44ea5aeba9 --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX/types.hpp @@ -0,0 +1,28 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DEX_TYPEDEF_H +#define LIEF_DEX_TYPEDEF_H +#include + +namespace LIEF { +namespace DEX { + +using dex_version_t = uint32_t; + +} +} + +#endif diff --git a/deps/LIEF/include/LIEF/DEX/utils.hpp b/deps/LIEF/include/LIEF/DEX/utils.hpp new file mode 100644 index 00000000000000..ceecc6b0a0a8cc --- /dev/null +++ b/deps/LIEF/include/LIEF/DEX/utils.hpp @@ -0,0 +1,49 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DEX_UTILS_H +#define LIEF_DEX_UTILS_H + +#include +#include + +#include "LIEF/DEX/types.hpp" + +#include "LIEF/types.hpp" +#include "LIEF/visibility.h" + +namespace LIEF { +class BinaryStream; +namespace DEX { + +/// Check if the given file is a DEX. +LIEF_API bool is_dex(const std::string& file); + +/// Check if the given raw data is a DEX. +LIEF_API bool is_dex(const std::vector& raw); + +/// Return the DEX version of the given file +LIEF_API dex_version_t version(const std::string& file); + +/// Return the DEX version of the raw data +LIEF_API dex_version_t version(const std::vector& raw); + +dex_version_t version(BinaryStream& stream); + +} +} + + +#endif diff --git a/deps/LIEF/include/LIEF/DWARF.hpp b/deps/LIEF/include/LIEF/DWARF.hpp new file mode 100644 index 00000000000000..39c516b5e6cbf0 --- /dev/null +++ b/deps/LIEF/include/LIEF/DWARF.hpp @@ -0,0 +1,42 @@ +/* Copyright 2017 - 2025 R. Thomas + * Copyright 2017 - 2025 Quarkslab + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DWARF_H +#define LIEF_DWARF_H + +#include "LIEF/DWARF/enums.hpp" +#include "LIEF/DWARF/DebugInfo.hpp" +#include "LIEF/DWARF/CompilationUnit.hpp" +#include "LIEF/DWARF/Function.hpp" +#include "LIEF/DWARF/Variable.hpp" +#include "LIEF/DWARF/Scope.hpp" +#include "LIEF/DWARF/Type.hpp" +#include "LIEF/DWARF/types.hpp" +#include "LIEF/DWARF/Parameter.hpp" +#include "LIEF/DWARF/Editor.hpp" + +#include "LIEF/DWARF/editor/CompilationUnit.hpp" +#include "LIEF/DWARF/editor/Function.hpp" +#include "LIEF/DWARF/editor/Variable.hpp" +#include "LIEF/DWARF/editor/Type.hpp" +#include "LIEF/DWARF/editor/PointerType.hpp" +#include "LIEF/DWARF/editor/EnumType.hpp" +#include "LIEF/DWARF/editor/BaseType.hpp" +#include "LIEF/DWARF/editor/ArrayType.hpp" +#include "LIEF/DWARF/editor/FunctionType.hpp" +#include "LIEF/DWARF/editor/TypeDef.hpp" +#include "LIEF/DWARF/editor/StructType.hpp" + +#endif diff --git a/deps/LIEF/include/LIEF/DWARF/CompilationUnit.hpp b/deps/LIEF/include/LIEF/DWARF/CompilationUnit.hpp new file mode 100644 index 00000000000000..c9491b530d4099 --- /dev/null +++ b/deps/LIEF/include/LIEF/DWARF/CompilationUnit.hpp @@ -0,0 +1,282 @@ +/* Copyright 2022 - 2025 R. Thomas + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DWARF_COMPILATION_UNIT_H +#define LIEF_DWARF_COMPILATION_UNIT_H +#include +#include +#include + +#include "LIEF/visibility.h" +#include "LIEF/range.hpp" +#include "LIEF/iterators.hpp" +#include "LIEF/DWARF/Function.hpp" +#include "LIEF/DWARF/Type.hpp" + +namespace LIEF { +namespace dwarf { + +namespace details { +class CompilationUnit; +class CompilationUnitIt; +} + +/// This class represents a DWARF compilation unit +class LIEF_API CompilationUnit { + public: + class LIEF_API Iterator { + public: + using iterator_category = std::bidirectional_iterator_tag; + using value_type = std::unique_ptr; + using difference_type = std::ptrdiff_t; + using pointer = CompilationUnit*; + using reference = std::unique_ptr&; + using implementation = details::CompilationUnitIt; + + class LIEF_API PointerProxy { + // Inspired from LLVM's iterator_facade_base + friend class Iterator; + public: + pointer operator->() const { return R.get(); } + + private: + value_type R; + + template + PointerProxy(RefT &&R) : R(std::forward(R)) {} // NOLINT(bugprone-forwarding-reference-overload) + }; + + Iterator(const Iterator&); + Iterator(Iterator&&) noexcept; + Iterator(std::unique_ptr impl); + ~Iterator(); + + friend LIEF_API bool operator==(const Iterator& LHS, const Iterator& RHS); + friend LIEF_API bool operator!=(const Iterator& LHS, const Iterator& RHS) { + return !(LHS == RHS); + } + + Iterator& operator++(); + Iterator& operator--(); + + Iterator operator--(int) { + Iterator tmp = *static_cast(this); + --*static_cast(this); + return tmp; + } + + Iterator operator++(int) { + Iterator tmp = *static_cast(this); + ++*static_cast(this); + return tmp; + } + + std::unique_ptr operator*() const; + + PointerProxy operator->() const { + return static_cast(this)->operator*(); + } + + private: + std::unique_ptr impl_; + }; + + /// Iterator over the dwarf::Function + using functions_it = iterator_range; + + /// Iterator over the dwarf::Type + using types_it = iterator_range; + + /// Iterator over the CompilationUnit's variables + using vars_it = iterator_range; + + /// Languages supported by the DWARF (v5) format. + /// See: https://dwarfstd.org/languages.html + /// + /// Some languages (like C++11, C++17, ..) have a version (11, 17, ...) which + /// is stored in a dedicated attribute: #version + class Language { + public: + enum LANG : uint32_t { + UNKNOWN = 0, + C, + CPP, + RUST, + DART, + MODULA, + FORTRAN, + SWIFT, + D, + JAVA, + COBOL, + }; + + /// The language itself + LANG lang = UNKNOWN; + + /// Version of the language (e.g. 17 for C++17) + uint32_t version = 0; + + Language() = default; + Language(LANG lang, uint32_t version) : + lang(lang), version(version) + {} + Language(LANG lang) : + Language(lang, 0) + {} + + Language(const Language&) = default; + Language& operator=(const Language&) = default; + Language(Language&&) = default; + Language& operator=(Language&&) = default; + ~Language() = default; + }; + CompilationUnit(std::unique_ptr impl); + ~CompilationUnit(); + + /// Name of the file associated with this compilation unit (e.g. `test.cpp`) + /// Return an **empty** string if the name is not found or can't be resolved + /// + /// This value matches the `DW_AT_name` attribute + std::string name() const; + + /// Information about the program (or library) that generated this compilation + /// unit. For instance, it can output: `Debian clang version 17.0.6`. + /// + /// It returns an **empty** string if the producer is not present or can't be + /// resolved + /// + /// This value matches the `DW_AT_producer` attribute + std::string producer() const; + + /// Return the path to the directory in which the compilation took place for + /// compiling this compilation unit (e.g. `/workdir/build`) + /// + /// It returns an **empty** string if the entry is not present or can't be + /// resolved + /// + /// This value matches the `DW_AT_comp_dir` attribute + std::string compilation_dir() const; + + /// Original Language of this compilation unit. + /// + /// This value matches the `DW_AT_language` attribute. + Language language() const; + + /// Return the lowest virtual address owned by this compilation unit. + uint64_t low_address() const; + + /// Return the highest virtual address owned by this compilation unit. + uint64_t high_address() const; + + /// Return the size of the compilation unit according to its range of address. + /// + /// If the compilation is fragmented (i.e. there are some address ranges + /// between the lowest address and the highest that are not owned by the CU), + /// then it returns the sum of **all** the address ranges owned by this CU. + /// + /// If the compilation unit is **not** fragmented, then is basically returns + /// `high_address - low_address`. + uint64_t size() const; + + /// Return a list of address ranges owned by this compilation unit. + /// + /// If the compilation unit owns a contiguous range, it should return + /// **a single** range. + std::vector ranges() const; + + /// Try to find the function whose name is given in parameter. + /// + /// The provided name can be demangled + std::unique_ptr find_function(const std::string& name) const; + + /// Try to find the function at the given address + std::unique_ptr find_function(uint64_t addr) const; + + /// Try to find the Variable at the given address + std::unique_ptr find_variable(uint64_t addr) const; + + /// Try to find the Variable with the given name + std::unique_ptr find_variable(const std::string& name) const; + + /// Return an iterator over the functions implemented in this compilation + /// unit. + /// + /// Note that this iterator only iterates over the functions that have a + /// **concrete** implementation in the compilation unit. + /// + /// For instance with this code: + /// + /// ```cpp + /// inline const char* get_secret_env() { + /// return getenv("MY_SECRET_ENV"); + /// } + /// + /// int main() { + /// printf("%s", get_secret_env()); + /// return 0; + /// } + /// ``` + /// + /// The iterator will only return **one function** for `main` since + /// `get_secret_env` is inlined and thus, its implementation is located in + /// `main`. + functions_it functions() const; + + /// Return an iterator over the functions **imported** in this compilation + /// unit **but not** implemented. + /// + /// For instance with this code: + /// + /// ```cpp + /// #include + /// int main() { + /// printf("Hello\n"); + /// return 0; + /// } + /// ``` + /// + /// `printf` is imported from the standard libc so the function is returned by + /// the iterator. On the other hand, `main()` is implemented in this + /// compilation unit so it is not returned by imported_function() but + /// functions(). + functions_it imported_functions() const; + + /// Return an iterator over the different types defined in this + /// compilation unit. + types_it types() const; + + + /// Return an iterator over all the variables defined in the this compilation + /// unit: + /// + /// ```cpp + /// static int A = 1; // Returned by the iterator + /// static const char* B = "Hello"; // Returned by the iterator + /// + /// int get() { + /// static int C = 2; // Returned by the iterator + /// return C; + /// } + /// ``` + vars_it variables() const; + + private: + std::unique_ptr impl_; +}; + +} +} +#endif + diff --git a/deps/LIEF/include/LIEF/DWARF/DebugInfo.hpp b/deps/LIEF/include/LIEF/DWARF/DebugInfo.hpp new file mode 100644 index 00000000000000..ac76755b7906d3 --- /dev/null +++ b/deps/LIEF/include/LIEF/DWARF/DebugInfo.hpp @@ -0,0 +1,96 @@ +/* Copyright 2022 - 2025 R. Thomas + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DWARF_INFO_H +#define LIEF_DWARF_INFO_H +#include +#include + +#include "LIEF/iterators.hpp" +#include "LIEF/Abstract/DebugInfo.hpp" +#include "LIEF/DWARF/CompilationUnit.hpp" + +#include "LIEF/visibility.h" + +namespace LIEF { +/// Namespace for the DWARF debug format +namespace dwarf { +class Function; +class Variable; + +/// This class represents a DWARF debug information. It can embed different +/// compilation units which can be accessed through compilation_units() . +/// +/// This class can be instantiated from LIEF::Binary::debug_info() or load() +class LIEF_API DebugInfo : public LIEF::DebugInfo { + public: + using LIEF::DebugInfo::DebugInfo; + + static std::unique_ptr from_file(const std::string& path); + + /// Iterator over the CompilationUnit + using compilation_units_it = iterator_range; + + /// Try to find the function with the given name (mangled or not) + /// + /// ```cpp + /// const DebugInfo& info = ...; + /// if (auto func = info.find_function("_ZNSt6localeD1Ev")) { + /// // Found + /// } + /// if (auto func = info.find_function("std::locale::~locale()")) { + /// // Found + /// } + /// ``` + std::unique_ptr find_function(const std::string& name) const; + + /// Try to find the function at the given **virtual** address + std::unique_ptr find_function(uint64_t addr) const; + + /// Try to find the variable with the given name. This name can be mangled or + /// not. + std::unique_ptr find_variable(const std::string& name) const; + + /// Try to find the variable at the given **virtual** address + std::unique_ptr find_variable(uint64_t addr) const; + + /// Try to find the type with the given name + std::unique_ptr find_type(const std::string& name) const; + + /// Iterator on the CompilationUnit embedded in this dwarf + compilation_units_it compilation_units() const; + + /// Attempt to resolve the address of the function specified by `name`. + optional find_function_address(const std::string& name) const override; + + FORMAT format() const override { + return LIEF::DebugInfo::FORMAT::DWARF; + } + + static bool classof(const LIEF::DebugInfo* info) { + return info->format() == LIEF::DebugInfo::FORMAT::DWARF; + } + + ~DebugInfo() override = default; +}; + + +/// Load DWARF file from the given path +inline std::unique_ptr load(const std::string& dwarf_path) { + return DebugInfo::from_file(dwarf_path); +} + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/DWARF/Editor.hpp b/deps/LIEF/include/LIEF/DWARF/Editor.hpp new file mode 100644 index 00000000000000..3c63c13833894a --- /dev/null +++ b/deps/LIEF/include/LIEF/DWARF/Editor.hpp @@ -0,0 +1,58 @@ +/* Copyright 2022 - 2025 R. Thomas + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DWARF_EDITOR_H +#define LIEF_DWARF_EDITOR_H +#include +#include + +#include "LIEF/visibility.h" + +namespace LIEF { +class Binary; +namespace dwarf { + +namespace details { +class Editor; +} + +namespace editor { +class CompilationUnit; +} + +/// This class exposes the main API to create DWARF information +class LIEF_API Editor { + public: + Editor() = delete; + Editor(std::unique_ptr impl); + + /// Instantiate an editor for the given binary object + static std::unique_ptr from_binary(LIEF::Binary& bin); + + /// Create a new compilation unit + std::unique_ptr create_compilation_unit(); + + /// Write the DWARF file to the specified output + void write(const std::string& output); + + ~Editor(); + + private: + std::unique_ptr impl_; + +}; + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/DWARF/Function.hpp b/deps/LIEF/include/LIEF/DWARF/Function.hpp new file mode 100644 index 00000000000000..495cbadac72317 --- /dev/null +++ b/deps/LIEF/include/LIEF/DWARF/Function.hpp @@ -0,0 +1,180 @@ +/* Copyright 2022 - 2025 R. Thomas + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DWARF_FUNCTION_H +#define LIEF_DWARF_FUNCTION_H + +#include +#include + +#include "LIEF/visibility.h" +#include "LIEF/errors.hpp" +#include "LIEF/iterators.hpp" +#include "LIEF/range.hpp" +#include "LIEF/DWARF/Variable.hpp" +#include "LIEF/DWARF/Type.hpp" +#include "LIEF/asm/Instruction.hpp" + +namespace LIEF { +namespace dwarf { + +class Scope; +class Parameter; + +namespace details { +class Function; +class FunctionIt; +} + +/// This class represents a DWARF function which can be associated with either: +/// `DW_TAG_subprogram` or `DW_TAG_inlined_subroutine`. +class LIEF_API Function { + public: + class LIEF_API Iterator { + public: + using iterator_category = std::bidirectional_iterator_tag; + using value_type = std::unique_ptr; + using difference_type = std::ptrdiff_t; + using pointer = Function*; + using reference = std::unique_ptr&; + using implementation = details::FunctionIt; + + class LIEF_API PointerProxy { + // Inspired from LLVM's iterator_facade_base + friend class Iterator; + public: + pointer operator->() const { return R.get(); } + + private: + value_type R; + + template + PointerProxy(RefT &&R) : R(std::forward(R)) {} // NOLINT(bugprone-forwarding-reference-overload) + }; + + Iterator(const Iterator&); + Iterator(Iterator&&) noexcept; + Iterator(std::unique_ptr impl); + ~Iterator(); + + friend LIEF_API bool operator==(const Iterator& LHS, const Iterator& RHS); + + friend LIEF_API bool operator!=(const Iterator& LHS, const Iterator& RHS) { + return !(LHS == RHS); + } + + Iterator& operator++(); + Iterator& operator--(); + + Iterator operator--(int) { + Iterator tmp = *static_cast(this); + --*static_cast(this); + return tmp; + } + + Iterator operator++(int) { + Iterator tmp = *static_cast(this); + ++*static_cast(this); + return tmp; + } + + std::unique_ptr operator*() const; + + PointerProxy operator->() const { + return static_cast(this)->operator*(); + } + + private: + std::unique_ptr impl_; + }; + + /// Iterator over the variables defined in the scope of this function + using vars_it = iterator_range; + using parameters_t = std::vector>; + using thrown_types_t = std::vector>; + + using instructions_it = iterator_range; + + Function(std::unique_ptr impl); + + /// The name of the function (`DW_AT_name`) + std::string name() const; + + /// The name of the function which is used for linking (`DW_AT_linkage_name`). + /// + /// This name differs from name() as it is usually mangled. The function + /// return an empty string if the linkage name is not available. + std::string linkage_name() const; + + /// Return the address of the function (`DW_AT_entry_pc` or `DW_AT_low_pc`). + result address() const; + + /// Return an iterator of variables (`DW_TAG_variable`) defined within the + /// scope of this function. This includes regular stack-based variables as + /// well as static ones. + vars_it variables() const; + + /// Whether this function is created by the compiler and not + /// present in the original source code + bool is_artificial() const; + + /// Whether the function is defined **outside** the current compilation unit + /// (`DW_AT_external`). + bool is_external() const; + + /// Return the size taken by this function in the binary + uint64_t size() const; + + /// Ranges of virtual addresses owned by this function + std::vector ranges() const; + + /// Original source code location + debug_location_t debug_location() const; + + /// Return the dwarf::Type associated with the **return type** of this + /// function + std::unique_ptr type() const; + + /// Return the function's parameters (including any template parameter) + parameters_t parameters() const; + + /// List of exceptions (types) that can be thrown by the function. + /// + /// For instance, given this Swift code: + /// + /// ```swift + /// func summarize(_ ratings: [Int]) throws(StatisticsError) { + /// // ... + /// } + /// ``` + /// + /// thrown_types() returns one element associated with the Type: + /// `StatisticsError`. + thrown_types_t thrown_types() const; + + /// Return the scope in which this function is defined + std::unique_ptr scope() const; + + /// Disassemble the current function by returning an iterator over + /// the assembly::Instruction + instructions_it instructions() const; + + ~Function(); + private: + std::unique_ptr impl_; +}; + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/DWARF/Parameter.hpp b/deps/LIEF/include/LIEF/DWARF/Parameter.hpp new file mode 100644 index 00000000000000..9fa7c1211deb86 --- /dev/null +++ b/deps/LIEF/include/LIEF/DWARF/Parameter.hpp @@ -0,0 +1,146 @@ +/* Copyright 2022 - 2025 R. Thomas + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DWARF_PARAMETER_H +#define LIEF_DWARF_PARAMETER_H + +#include "LIEF/visibility.h" + +#include +#include + +namespace LIEF { +namespace dwarf { + +class Type; + +namespace details { +class Parameter; +} + +/// This class represents a DWARF parameter which can be either: +/// - A regular function parameter (see: parameters::Formal) +/// - A template type parameter (see: parameters::TemplateType) +/// - A template value parameter (see: parameters::TemplateValue) +class LIEF_API Parameter { + public: + enum class KIND { + UNKNOWN = 0, + TEMPLATE_TYPE, ///< DW_TAG_template_type_parameter + TEMPLATE_VALUE, ///< DW_TAG_template_value_parameter + FORMAL, ///< DW_TAG_formal_parameter + }; + Parameter() = delete; + Parameter(Parameter&& other); + Parameter& operator=(Parameter&& other); + Parameter& operator=(const Parameter&) = delete; + Parameter(const Parameter&) = delete; + + KIND kind() const; + + /// Name of the parameter + std::string name() const; + + /// Type of this parameter + std::unique_ptr type() const; + + template + const T* as() const { + if (T::classof(this)) { + return static_cast(this); + } + return nullptr; + } + + virtual ~Parameter(); + + LIEF_LOCAL static + std::unique_ptr create(std::unique_ptr impl); + + protected: + Parameter(std::unique_ptr impl); + std::unique_ptr impl_; +}; + +namespace parameters { + +/// This class represents a regular function parameter. +/// +/// For instance, given this prototype: +/// +/// ```cpp +/// int main(int argc, const char** argv); +/// ``` +/// +/// The function `main` has two parameters::Formal parameters: +/// +/// 1. `argc` (Parameter::name) typed as `int` (types::Base from Parameter::type) +/// 2. `argv` (Parameter::name) typed as `const char**` +/// (types::Const from Parameter::type) +class LIEF_API Formal : public Parameter { + public: + using Parameter::Parameter; + static bool classof(const Parameter* P) { + return P->kind() == Parameter::KIND::FORMAL; + } + + ~Formal() override = default; +}; + + +/// This class represents a template **value** parameter. +/// +/// For instance, given this prototype: +/// +/// ```cpp +/// template +/// void generic(); +/// ``` +/// +/// The function `generic` has one parameters::TemplateValue parameter: `X`. +class LIEF_API TemplateValue : public Parameter { + public: + using Parameter::Parameter; + static bool classof(const Parameter* P) { + return P->kind() == Parameter::KIND::TEMPLATE_VALUE; + } + + ~TemplateValue() override = default; +}; + +/// This class represents a template **type** parameter. +/// +/// For instance, given this prototype: +/// +/// ```cpp +/// template +/// void generic(); +/// ``` +/// +/// The function `generic` has one parameters::TemplateType parameter: `Y`. +class LIEF_API TemplateType : public Parameter { +public: + using Parameter::Parameter; + static bool classof(const Parameter* P) { + return P->kind() == Parameter::KIND::TEMPLATE_TYPE; + } + + ~TemplateType() override = default; +}; + +} + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/DWARF/Scope.hpp b/deps/LIEF/include/LIEF/DWARF/Scope.hpp new file mode 100644 index 00000000000000..61977c21aec5e6 --- /dev/null +++ b/deps/LIEF/include/LIEF/DWARF/Scope.hpp @@ -0,0 +1,66 @@ +/* Copyright 2022 - 2025 R. Thomas + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DWARF_SCOPE_H +#define LIEF_DWARF_SCOPE_H + +#include +#include +#include + +#include "LIEF/visibility.h" + +namespace LIEF { +namespace dwarf { + +namespace details { +class Scope; +} + +/// This class materializes a scope in which Function, Variable, Type, ... +/// can be defined. +class LIEF_API Scope { + public: + enum class TYPE : uint32_t { + UNKNOWN = 0, + UNION, + CLASS, + STRUCT, + NAMESPACE, + FUNCTION, + COMPILATION_UNIT, + }; + Scope(std::unique_ptr impl); + + /// Name of the scope. For instance namespace's name or function's name. + std::string name() const; + + /// Parent scope (if any) + std::unique_ptr parent() const; + + /// The current scope type + TYPE type() const; + + /// Represent the whole chain of all (parent) scopes using the provided + /// separator. E.g. `ns1::ns2::Class1::Struct2::Type` + std::string chained(const std::string& sep = "::") const; + + ~Scope(); + private: + std::unique_ptr impl_; +}; + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/DWARF/Type.hpp b/deps/LIEF/include/LIEF/DWARF/Type.hpp new file mode 100644 index 00000000000000..9e13e896c0d782 --- /dev/null +++ b/deps/LIEF/include/LIEF/DWARF/Type.hpp @@ -0,0 +1,198 @@ +/* Copyright 2022 - 2025 R. Thomas + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DWARF_TYPE_H +#define LIEF_DWARF_TYPE_H + +#include + +#include "LIEF/visibility.h" +#include "LIEF/errors.hpp" +#include "LIEF/debug_loc.hpp" +#include "LIEF/canbe_unique.hpp" + +namespace LIEF { +namespace dwarf { +class Scope; + +namespace details { +class Type; +class TypeIt; +} + +/// This class represents a DWARF Type which includes: +/// +/// - `DW_TAG_array_type` +/// - `DW_TAG_atomic_type` +/// - `DW_TAG_base_type` +/// - `DW_TAG_class_type` +/// - `DW_TAG_coarray_type` +/// - `DW_TAG_const_type` +/// - `DW_TAG_dynamic_type` +/// - `DW_TAG_enumeration_type` +/// - `DW_TAG_file_type` +/// - `DW_TAG_immutable_type` +/// - `DW_TAG_interface_type` +/// - `DW_TAG_packed_type` +/// - `DW_TAG_pointer_type` +/// - `DW_TAG_ptr_to_member_type` +/// - `DW_TAG_reference_type` +/// - `DW_TAG_restrict_type` +/// - `DW_TAG_rvalue_reference_type` +/// - `DW_TAG_set_type` +/// - `DW_TAG_shared_type` +/// - `DW_TAG_string_type` +/// - `DW_TAG_structure_type` +/// - `DW_TAG_subroutine_type` +/// - `DW_TAG_template_alias` +/// - `DW_TAG_thrown_type` +/// - `DW_TAG_typedef` +/// - `DW_TAG_union_type` +/// - `DW_TAG_unspecified_type` +/// - `DW_TAG_volatile_type` +class LIEF_API Type { + public: + class LIEF_API Iterator { + public: + using iterator_category = std::bidirectional_iterator_tag; + using value_type = std::unique_ptr; + using difference_type = std::ptrdiff_t; + using pointer = Type*; + using reference = std::unique_ptr&; + using implementation = details::TypeIt; + + class LIEF_API PointerProxy { + // Inspired from LLVM's iterator_facade_base + friend class Iterator; + public: + pointer operator->() const { return R.get(); } + + private: + value_type R; + + template + PointerProxy(RefT &&R) : R(std::forward(R)) {} // NOLINT(bugprone-forwarding-reference-overload) + }; + + Iterator(const Iterator&); + Iterator(Iterator&&) noexcept; + Iterator(std::unique_ptr impl); + ~Iterator(); + + friend LIEF_API bool operator==(const Iterator& LHS, const Iterator& RHS); + friend LIEF_API bool operator!=(const Iterator& LHS, const Iterator& RHS) { + return !(LHS == RHS); + } + + Iterator& operator++(); + Iterator& operator--(); + + Iterator operator--(int) { + Iterator tmp = *static_cast(this); + --*static_cast(this); + return tmp; + } + + Iterator operator++(int) { + Iterator tmp = *static_cast(this); + ++*static_cast(this); + return tmp; + } + + std::unique_ptr operator*() const; + + PointerProxy operator->() const { + return static_cast(this)->operator*(); + } + + private: + std::unique_ptr impl_; + }; + + virtual ~Type(); + + enum class KIND { + UNKNOWN = 0, + UNSPECIFIED, + BASE, + CONST_KIND, + CLASS, + ARRAY, + POINTER, + STRUCT, + UNION, + TYPEDEF, + REF, + SET_TYPE, + STRING, + SUBROUTINE, + POINTER_MEMBER, + PACKED, + FILE, + THROWN, + VOLATILE, + RESTRICT, + INTERFACE, + SHARED, + RVALREF, + TEMPLATE_ALIAS, + COARRAY, + DYNAMIC, + ATOMIC, + IMMUTABLE, + ENUM, + }; + + KIND kind() const; + + /// Whether this type is a `DW_TAG_unspecified_type` + bool is_unspecified() const { + return kind() == KIND::UNSPECIFIED; + } + + /// Return the type's name using either `DW_AT_name` or `DW_AT_picture_string` + /// (if any). + result name() const; + + /// Return the size of the type or an error if it can't be computed. + /// + /// This size should match the equivalent of `sizeof(Type)`. + result size() const; + + /// Return the debug location where this type is defined. + debug_location_t location() const; + + /// Return the scope in which this type is defined + std::unique_ptr scope() const; + + template + const T* as() const { + if (T::classof(this)) { + return static_cast(this); + } + return nullptr; + } + + static std::unique_ptr create(std::unique_ptr impl); + + protected: + Type(std::unique_ptr impl); + Type(details::Type& impl); + + LIEF::details::canbe_unique impl_; +}; + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/DWARF/Variable.hpp b/deps/LIEF/include/LIEF/DWARF/Variable.hpp new file mode 100644 index 00000000000000..dd457f45d6ea8e --- /dev/null +++ b/deps/LIEF/include/LIEF/DWARF/Variable.hpp @@ -0,0 +1,141 @@ +/* Copyright 2022 - 2025 R. Thomas + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DWARF_VARIABLE_H +#define LIEF_DWARF_VARIABLE_H + +#include + +#include "LIEF/visibility.h" +#include "LIEF/errors.hpp" +#include "LIEF/debug_loc.hpp" +#include "LIEF/DWARF/Type.hpp" + +namespace LIEF { +namespace dwarf { +class Scope; + +namespace details { +class Variable; +class VariableIt; +} + +/// This class represents a DWARF variable which can be owned by a +/// dwarf::Function or a dwarf::CompilationUnit +class LIEF_API Variable { + public: + class LIEF_API Iterator { + public: + using iterator_category = std::bidirectional_iterator_tag; + using value_type = std::unique_ptr; + using difference_type = std::ptrdiff_t; + using pointer = Variable*; + using reference = std::unique_ptr&; + using implementation = details::VariableIt; + + class LIEF_API PointerProxy { + // Inspired from LLVM's iterator_facade_base + friend class Iterator; + public: + pointer operator->() const { return R.get(); } + + private: + value_type R; + + template + PointerProxy(RefT &&R) : R(std::forward(R)) {} // NOLINT(bugprone-forwarding-reference-overload) + }; + + Iterator(const Iterator&); + Iterator(Iterator&&) noexcept; + Iterator(std::unique_ptr impl); + ~Iterator(); + + friend LIEF_API bool operator==(const Iterator& LHS, const Iterator& RHS); + friend LIEF_API bool operator!=(const Iterator& LHS, const Iterator& RHS) { + return !(LHS == RHS); + } + + Iterator& operator++(); + Iterator& operator--(); + + Iterator operator--(int) { + Iterator tmp = *static_cast(this); + --*static_cast(this); + return tmp; + } + + Iterator operator++(int) { + Iterator tmp = *static_cast(this); + ++*static_cast(this); + return tmp; + } + + std::unique_ptr operator*() const; + + PointerProxy operator->() const { + return static_cast(this)->operator*(); + } + + private: + std::unique_ptr impl_; + }; + + Variable(std::unique_ptr impl); + + /// Name of the variable (usually demangled) + std::string name() const; + + /// The name of the variable which is used for linking (`DW_AT_linkage_name`). + /// + /// This name differs from name() as it is usually mangled. The function + /// return an empty string if the linkage name is not available. + std::string linkage_name() const; + + /// Address of the variable. + /// + /// If the variable is **static**, it returns the **virtual address** + /// where it is defined. + /// If the variable is stack-based, it returns the **relative offset** from + /// the frame based register. + /// + /// If the address can't be resolved, it returns a lief_errors. + result address() const; + + /// Return the size of the variable (or a lief_errors if it can't be + /// resolved). + /// + /// This size is defined by its type. + result size() const; + + /// Whether it's a `constexpr` variable + bool is_constexpr() const; + + /// The original source location where the variable is defined. + debug_location_t debug_location() const; + + /// Return the type of this variable + std::unique_ptr type() const; + + /// Return the scope in which this variable is defined + std::unique_ptr scope() const; + + ~Variable(); + private: + std::unique_ptr impl_; +}; + +} +} +#endif diff --git a/deps/LIEF/include/LIEF/DWARF/editor/ArrayType.hpp b/deps/LIEF/include/LIEF/DWARF/editor/ArrayType.hpp new file mode 100644 index 00000000000000..2116dd9666e4b8 --- /dev/null +++ b/deps/LIEF/include/LIEF/DWARF/editor/ArrayType.hpp @@ -0,0 +1,38 @@ +/* Copyright 2022 - 2025 R. Thomas + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DWARF_EDITOR_ARRAY_TYPE_H +#define LIEF_DWARF_EDITOR_ARRAY_TYPE_H + +#include "LIEF/visibility.h" +#include "LIEF/DWARF/editor/Type.hpp" + +namespace LIEF { +namespace dwarf { +namespace editor { + +/// This class represents an array type (`DW_TAG_array_type`). +class LIEF_API ArrayType : public Type { + public: + using Type::Type; + + static bool classof(const Type* type); + + ~ArrayType() override = default; +}; + +} +} +} +#endif diff --git a/deps/LIEF/include/LIEF/DWARF/editor/BaseType.hpp b/deps/LIEF/include/LIEF/DWARF/editor/BaseType.hpp new file mode 100644 index 00000000000000..f06db195b0aef7 --- /dev/null +++ b/deps/LIEF/include/LIEF/DWARF/editor/BaseType.hpp @@ -0,0 +1,50 @@ +/* Copyright 2022 - 2025 R. Thomas + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DWARF_EDITOR_BASE_TYPE_H +#define LIEF_DWARF_EDITOR_BASE_TYPE_H + +#include +#include "LIEF/visibility.h" +#include "LIEF/DWARF/editor/Type.hpp" + +namespace LIEF { +namespace dwarf { +namespace editor { + +/// This class represents a primitive type like `int, char`. +class LIEF_API BaseType : public Type { + public: + using Type::Type; + + enum class ENCODING : uint32_t { + NONE = 0, + ADDRESS, + SIGNED, + SIGNED_CHAR, + UNSIGNED, + UNSIGNED_CHAR, + BOOLEAN, + FLOAT + }; + + static bool classof(const Type* type); + + ~BaseType() override = default; +}; + +} +} +} +#endif diff --git a/deps/LIEF/include/LIEF/DWARF/editor/CompilationUnit.hpp b/deps/LIEF/include/LIEF/DWARF/editor/CompilationUnit.hpp new file mode 100644 index 00000000000000..b6b00714d0de5f --- /dev/null +++ b/deps/LIEF/include/LIEF/DWARF/editor/CompilationUnit.hpp @@ -0,0 +1,100 @@ +/* Copyright 2022 - 2025 R. Thomas + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DWARF_EDITOR_COMPILATION_UNIT_H +#define LIEF_DWARF_EDITOR_COMPILATION_UNIT_H +#include +#include + +#include "LIEF/visibility.h" +#include "LIEF/DWARF/editor/StructType.hpp" +#include "LIEF/DWARF/editor/BaseType.hpp" +#include "LIEF/DWARF/editor/FunctionType.hpp" +#include "LIEF/DWARF/editor/PointerType.hpp" + +namespace LIEF { +namespace dwarf { +namespace editor { +class Function; +class Variable; +class Type; +class EnumType; +class TypeDef; +class ArrayType; + +namespace details { +class CompilationUnit; +} + +/// This class represents an **editable** DWARF compilation unit +class LIEF_API CompilationUnit { + public: + CompilationUnit() = delete; + CompilationUnit(std::unique_ptr impl); + + /// Set the `DW_AT_producer` producer attribute. + /// + /// This attribute aims to inform about the program that generated this + /// compilation unit (e.g. `LIEF Extended`) + CompilationUnit& set_producer(const std::string& producer); + + /// Create a new function owned by this compilation unit + std::unique_ptr create_function(const std::string& name); + + /// Create a new **global** variable owned by this compilation unit + std::unique_ptr create_variable(const std::string& name); + + /// Create a `DW_TAG_unspecified_type` type with the given name + std::unique_ptr create_generic_type(const std::string& name); + + /// Create an enum type (`DW_TAG_enumeration_type`) + std::unique_ptr create_enum(const std::string& name); + + /// Create a typdef with the name provided in the first parameter which aliases + /// the type provided in the second parameter + std::unique_ptr create_typedef(const std::string& name, const Type& type); + + /// Create a struct-like type (struct, class, union) with the given name. + std::unique_ptr create_structure( + const std::string& name, StructType::TYPE kind = StructType::TYPE::STRUCT); + + /// Create a primitive type with the given name and size. + std::unique_ptr create_base_type(const std::string& name, size_t size, + BaseType::ENCODING encoding = BaseType::ENCODING::NONE); + + /// Create a function type with the given name. + std::unique_ptr create_function_type(const std::string& name); + + /// Create a pointer on the provided type + std::unique_ptr create_pointer_type(const Type& ty) { + return ty.pointer_to(); + } + + /// Create a `void` type + std::unique_ptr create_void_type(); + + /// Create an array type with the given name, type and size. + std::unique_ptr + create_array(const std::string& name, const Type& type, size_t count); + + ~CompilationUnit(); + + private: + std::unique_ptr impl_; +}; + +} +} +} +#endif diff --git a/deps/LIEF/include/LIEF/DWARF/editor/EnumType.hpp b/deps/LIEF/include/LIEF/DWARF/editor/EnumType.hpp new file mode 100644 index 00000000000000..90d76fa0e91a4e --- /dev/null +++ b/deps/LIEF/include/LIEF/DWARF/editor/EnumType.hpp @@ -0,0 +1,62 @@ +/* Copyright 2022 - 2025 R. Thomas + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DWARF_EDITOR_ENUM_TYPE_H +#define LIEF_DWARF_EDITOR_ENUM_TYPE_H +#include +#include + +#include "LIEF/visibility.h" +#include "LIEF/DWARF/editor/Type.hpp" + +namespace LIEF { +namespace dwarf { +namespace editor { + +namespace details { +class EnumValue; +} + +/// This class represents an editable enum type (`DW_TAG_enumeration_type`) +class LIEF_API EnumType : public Type { + public: + using Type::Type; + + /// This class represents an enum value + class LIEF_API Value { + public: + Value() = delete; + Value(std::unique_ptr impl); + + ~Value(); + private: + std::unique_ptr impl_; + }; + + /// Define the number of bytes required to hold an instance of the + /// enumeration (`DW_AT_byte_size`). + EnumType& set_size(uint64_t size); + + /// Add an enum value by specifying its name and its integer value + std::unique_ptr add_value(const std::string& name, int64_t value); + + static bool classof(const Type* type); + + ~EnumType() override = default; +}; + +} +} +} +#endif diff --git a/deps/LIEF/include/LIEF/DWARF/editor/Function.hpp b/deps/LIEF/include/LIEF/DWARF/editor/Function.hpp new file mode 100644 index 00000000000000..e91f64139c055e --- /dev/null +++ b/deps/LIEF/include/LIEF/DWARF/editor/Function.hpp @@ -0,0 +1,129 @@ +/* Copyright 2022 - 2025 R. Thomas + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LIEF_DWARF_EDITOR_FUNCTION_H +#define LIEF_DWARF_EDITOR_FUNCTION_H +#include +#include +#include +#include + +#include "LIEF/visibility.h" + +namespace LIEF { +namespace dwarf { +namespace editor { +class Type; +class Variable; + +namespace details { +class Function; +class FunctionParameter; +class FunctionLexicalBlock; +class FunctionLabel; +} + +/// This class represents an **editable** DWARF function (`DW_TAG_subprogram`) +class LIEF_API Function { + public: + struct LIEF_API range_t { + range_t() = default; + range_t(uint64_t start, uint64_t end) : + start(start), end(end) + {} + uint64_t start = 0; + uint64_t end = 0; + }; + + /// This class represents a parameter of the current function (`DW_TAG_formal_parameter`) + class LIEF_API Parameter { + public: + Parameter() = delete; + Parameter(std::unique_ptr impl); + + ~Parameter(); + private: + std::unique_ptr impl_; + }; + + /// This class mirrors the `DW_TAG_lexical_block` DWARF tag + class LIEF_API LexicalBlock { + public: + LexicalBlock() = delete; + LexicalBlock(std::unique_ptr impl); + + ~LexicalBlock(); + private: + std::unique_ptr impl_; + }; + + /// This class mirrors the `DW_TAG_label` DWARF tag + class LIEF_API Label { + public: + Label() = delete; + Label(std::unique_ptr impl); + + ~Label(); + private: + std::unique_ptr impl_; + }; + + Function() = delete; + Function(std::unique_ptr impl); + + /// Set the address of this function by defining `DW_AT_entry_pc` + Function& set_address(uint64_t addr); + + /// Set the upper and lower bound addresses for this function. This assumes + /// that the function is contiguous between `low` and `high`. + /// + /// Underneath, the function defines `DW_AT_low_pc` and `DW_AT_high_pc` + Function& set_low_high(uint64_t low, uint64_t high); + + /// Set the ranges of addresses owned by the implementation of this function + /// by setting the `DW_AT_ranges` attribute. + /// + /// This setter should be used for non-contiguous function. + Function& set_ranges(const std::vector& ranges); + + /// Set the function as external by defining `DW_AT_external` to true. + /// This means that the function is **imported** by the current compilation + /// unit. + Function& set_external(); + + /// Set the return type of this function + Function& set_return_type(const Type& type); + + /// Add a parameter to the current function + std::unique_ptr add_parameter(const std::string& name, const Type& type); + + /// Create a stack-based variable owned by the current function + std::unique_ptr create_stack_variable(const std::string& name); + + /// Add a lexical block with the given range + std::unique_ptr add_lexical_block(uint64_t start, uint64_t end); + + /// Add a label at the given address + std::unique_ptr