diff --git a/.github/scripts/validate-openapi.js b/.github/scripts/validate-openapi.js
new file mode 100644
index 00000000..e3f0a8e8
--- /dev/null
+++ b/.github/scripts/validate-openapi.js
@@ -0,0 +1,57 @@
+const SwaggerParser = require('@apidevtools/swagger-parser');
+const glob = require('glob');
+
+// Define the files we want to validate
+const TARGET_FILES = ['transaction.yaml', 'meta.yaml', 'registry.yaml'];
+
+// Function to validate a single OpenAPI file
+async function validateOpenApiFile(filePath) {
+ try {
+ await SwaggerParser.validate(filePath);
+ console.log(`✅ Valid: ${filePath}`);
+ return true;
+ } catch (error) {
+ console.error(`❌ Invalid: ${filePath}`);
+ console.error(` Error: ${error.message}`);
+
+ if (error.path) {
+ console.error(` Location: ${error.path.join('.')}`);
+ }
+ return false;
+ }
+}
+
+// Main function
+async function main() {
+ let failures = 0;
+
+ // Find all target files in the repository
+ for (const targetFile of TARGET_FILES) {
+ const files = glob.sync(`**/${targetFile}`, {
+ ignore: ['**/node_modules/**', '.github/**']
+ });
+
+ console.log(`Found ${files.length} ${targetFile} files to validate`);
+
+ // Validate each file
+ for (const file of files) {
+ const isValid = await validateOpenApiFile(file);
+ if (!isValid) failures++;
+ }
+ }
+
+ // Exit with appropriate code
+ if (failures > 0) {
+ console.error(`\n❌ Validation failed for ${failures} files`);
+ process.exit(1);
+ } else {
+ console.log('\n✅ All OpenAPI specifications are valid!');
+ process.exit(0);
+ }
+}
+
+// Run the validation
+main().catch(error => {
+ console.error('An unexpected error occurred during validation:', error);
+ process.exit(1);
+});
\ No newline at end of file
diff --git a/.github/workflows/openapi-validation.yml b/.github/workflows/openapi-validation.yml
new file mode 100644
index 00000000..8bb37704
--- /dev/null
+++ b/.github/workflows/openapi-validation.yml
@@ -0,0 +1,114 @@
+name: OpenAPI Validation
+
+on:
+ pull_request:
+ branches: [ master, main ]
+ paths:
+ - '**/transaction.yaml'
+ - '**/meta.yaml'
+ - '**/registry.yaml'
+ workflow_dispatch:
+
+jobs:
+ validate-openapi:
+ name: Validate OpenAPI Specifications
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v3
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v3
+ with:
+ node-version: '18'
+
+ - name: Install dependencies
+ run: |
+ npm init -y
+ npm install @apidevtools/swagger-parser glob
+
+ - name: Create and run validation script
+ run: |
+ mkdir -p .github/scripts
+ cat > .github/scripts/validate-openapi.js << 'EOF'
+ const SwaggerParser = require('@apidevtools/swagger-parser');
+ const glob = require('glob');
+ const fs = require('fs');
+
+ // Files to validate as specified in the requirements
+ const TARGET_FILES = ['transaction.yaml', 'meta.yaml', 'registry.yaml'];
+
+ // Function to validate OpenAPI specification file
+ async function validateOpenApiFile(filePath) {
+ console.log(`Validating ${filePath}...`);
+ try {
+ // Parse and validate the file
+ await SwaggerParser.validate(filePath);
+ console.log(`✅ Valid: ${filePath}`);
+ return true;
+ } catch (error) {
+ console.error(`❌ Invalid: ${filePath}`);
+ console.error(` Error: ${error.message}`);
+
+ // If there's a pointer to the exact location of the error, show it
+ if (error.path) {
+ console.error(` Location: ${error.path.join('.')}`);
+ }
+ return false;
+ }
+ }
+
+ // Main function to find and validate all relevant OpenAPI files
+ async function main() {
+ let failures = 0;
+ let validatedFiles = 0;
+
+ // Find all target files in the repository
+ for (const targetFile of TARGET_FILES) {
+ const files = glob.sync(`**/${targetFile}`, {
+ ignore: ['**/node_modules/**', '.github/**']
+ });
+
+ console.log(`Found ${files.length} ${targetFile} files to validate`);
+
+ // Validate each file
+ for (const file of files) {
+ validatedFiles++;
+ const isValid = await validateOpenApiFile(file);
+ if (!isValid) failures++;
+ }
+ }
+
+ console.log(`\nValidation summary: ${validatedFiles} files processed, ${failures} failures found`);
+
+ // Exit with appropriate code
+ if (failures > 0) {
+ console.error(`\n❌ Validation failed for ${failures} files`);
+ process.exit(1);
+ } else {
+ console.log('\n✅ All OpenAPI specifications are valid!');
+ process.exit(0);
+ }
+ }
+
+ // Run the validation
+ main().catch(error => {
+ console.error('An unexpected error occurred during validation:', error);
+ process.exit(1);
+ });
+ EOF
+
+ node .github/scripts/validate-openapi.js
+
+ - name: Report validation status on PR
+ if: failure() && github.event_name == 'pull_request'
+ uses: actions/github-script@v6
+ with:
+ script: |
+ github.rest.issues.createComment({
+ issue_number: context.issue.number,
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ body: `⚠️ **OpenAPI Validation Failed**\n\nThe OpenAPI validation check has failed. Please check the GitHub Actions logs for details on which files failed validation and the specific errors found.`
+ });
\ No newline at end of file
diff --git a/CONTRIBUTION.md b/CONTRIBUTION.md
index 00c7a478..2151dd1d 100644
--- a/CONTRIBUTION.md
+++ b/CONTRIBUTION.md
@@ -238,3 +238,5 @@ Not all future new features will be introduced in this way. Some new features im
While governance of the specification is the role of the CWG, the evolution of the specification happens through the participation of members of the developer community at large. Any person willing to contribute to the effort is welcome, and contributions may include filing or participating in issues, creating pull requests, or helping others with such activities.
However, during any interaction with the community or its members, there is a code of conduct and a set of community guidelines that everyone is expected to adhere to. Please find the details [here](https://becknprotocol.io/community-guidelines/).
+
+
diff --git a/README.md b/README.md
index 19a71d1b..5cf99cd1 100644
--- a/README.md
+++ b/README.md
@@ -65,6 +65,16 @@ All communication using beckn protocol have the following packet structure
While beckn protocol it designed to be transport agnostic, it is conventional to use HTTP as the default transport protocol. Additional layers like security and trust can be layered on top of this protocol using exisiting standards like HTTPS and SSL. It is recommended that any platform implementing beckn protocol use HTTPS to secure its communication.
+## OpenAPI Validation
+
+This repository uses automated validation to ensure all OpenAPI specifications conform to the OpenAPI 3.0 standard. The following files are validated:
+- transaction.yaml
+- meta.yaml
+- registry.yaml
+
+When submitting a pull request, please ensure your changes pass the automated validation checks. The status of these checks is shown below:
+
+
## Communication
diff --git a/node_modules/.bin/glob b/node_modules/.bin/glob
new file mode 120000
index 00000000..85c9c1db
--- /dev/null
+++ b/node_modules/.bin/glob
@@ -0,0 +1 @@
+../glob/dist/esm/bin.mjs
\ No newline at end of file
diff --git a/node_modules/.bin/js-yaml b/node_modules/.bin/js-yaml
new file mode 120000
index 00000000..9dbd010d
--- /dev/null
+++ b/node_modules/.bin/js-yaml
@@ -0,0 +1 @@
+../js-yaml/bin/js-yaml.js
\ No newline at end of file
diff --git a/node_modules/.bin/node-which b/node_modules/.bin/node-which
new file mode 120000
index 00000000..6f8415ec
--- /dev/null
+++ b/node_modules/.bin/node-which
@@ -0,0 +1 @@
+../which/bin/node-which
\ No newline at end of file
diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json
new file mode 100644
index 00000000..be40cd58
--- /dev/null
+++ b/node_modules/.package-lock.json
@@ -0,0 +1,640 @@
+{
+ "name": "protocol-specifications",
+ "version": "1.0.0",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {
+ "node_modules/@apidevtools/json-schema-ref-parser": {
+ "version": "11.7.2",
+ "resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-11.7.2.tgz",
+ "integrity": "sha512-4gY54eEGEstClvEkGnwVkTkrx0sqwemEFG5OSRRn3tD91XH0+Q8XIkYIfo7IwEWPpJZwILb9GUXeShtplRc/eA==",
+ "license": "MIT",
+ "dependencies": {
+ "@jsdevtools/ono": "^7.1.3",
+ "@types/json-schema": "^7.0.15",
+ "js-yaml": "^4.1.0"
+ },
+ "engines": {
+ "node": ">= 16"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/philsturgeon"
+ }
+ },
+ "node_modules/@apidevtools/openapi-schemas": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/@apidevtools/openapi-schemas/-/openapi-schemas-2.1.0.tgz",
+ "integrity": "sha512-Zc1AlqrJlX3SlpupFGpiLi2EbteyP7fXmUOGup6/DnkRgjP9bgMM/ag+n91rsv0U1Gpz0H3VILA/o3bW7Ua6BQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/@apidevtools/swagger-methods": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@apidevtools/swagger-methods/-/swagger-methods-3.0.2.tgz",
+ "integrity": "sha512-QAkD5kK2b1WfjDS/UQn/qQkbwF31uqRjPTrsCs5ZG9BQGAkjwvqGFjjPqAuzac/IYzpPtRzjCP1WrTuAIjMrXg==",
+ "license": "MIT"
+ },
+ "node_modules/@apidevtools/swagger-parser": {
+ "version": "10.1.1",
+ "resolved": "https://registry.npmjs.org/@apidevtools/swagger-parser/-/swagger-parser-10.1.1.tgz",
+ "integrity": "sha512-u/kozRnsPO/x8QtKYJOqoGtC4kH6yg1lfYkB9Au0WhYB0FNLpyFusttQtvhlwjtG3rOwiRz4D8DnnXa8iEpIKA==",
+ "license": "MIT",
+ "dependencies": {
+ "@apidevtools/json-schema-ref-parser": "11.7.2",
+ "@apidevtools/openapi-schemas": "^2.1.0",
+ "@apidevtools/swagger-methods": "^3.0.2",
+ "@jsdevtools/ono": "^7.1.3",
+ "ajv": "^8.17.1",
+ "ajv-draft-04": "^1.0.0",
+ "call-me-maybe": "^1.0.2"
+ },
+ "peerDependencies": {
+ "openapi-types": ">=7"
+ }
+ },
+ "node_modules/@isaacs/cliui": {
+ "version": "8.0.2",
+ "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
+ "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==",
+ "license": "ISC",
+ "dependencies": {
+ "string-width": "^5.1.2",
+ "string-width-cjs": "npm:string-width@^4.2.0",
+ "strip-ansi": "^7.0.1",
+ "strip-ansi-cjs": "npm:strip-ansi@^6.0.1",
+ "wrap-ansi": "^8.1.0",
+ "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@jsdevtools/ono": {
+ "version": "7.1.3",
+ "resolved": "https://registry.npmjs.org/@jsdevtools/ono/-/ono-7.1.3.tgz",
+ "integrity": "sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==",
+ "license": "MIT"
+ },
+ "node_modules/@pkgjs/parseargs": {
+ "version": "0.11.0",
+ "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
+ "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==",
+ "license": "MIT",
+ "optional": true,
+ "engines": {
+ "node": ">=14"
+ }
+ },
+ "node_modules/@types/json-schema": {
+ "version": "7.0.15",
+ "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz",
+ "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==",
+ "license": "MIT"
+ },
+ "node_modules/ajv": {
+ "version": "8.17.1",
+ "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
+ "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
+ "license": "MIT",
+ "dependencies": {
+ "fast-deep-equal": "^3.1.3",
+ "fast-uri": "^3.0.1",
+ "json-schema-traverse": "^1.0.0",
+ "require-from-string": "^2.0.2"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/epoberezkin"
+ }
+ },
+ "node_modules/ajv-draft-04": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/ajv-draft-04/-/ajv-draft-04-1.0.0.tgz",
+ "integrity": "sha512-mv00Te6nmYbRp5DCwclxtt7yV/joXJPGS7nM+97GdxvuttCOfgI3K4U25zboyeX0O+myI8ERluxQe5wljMmVIw==",
+ "license": "MIT",
+ "peerDependencies": {
+ "ajv": "^8.5.0"
+ },
+ "peerDependenciesMeta": {
+ "ajv": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/ansi-regex": {
+ "version": "6.1.0",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz",
+ "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/ansi-regex?sponsor=1"
+ }
+ },
+ "node_modules/ansi-styles": {
+ "version": "6.2.1",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz",
+ "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/ansi-styles?sponsor=1"
+ }
+ },
+ "node_modules/argparse": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
+ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
+ "license": "Python-2.0"
+ },
+ "node_modules/balanced-match": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
+ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
+ "license": "MIT"
+ },
+ "node_modules/brace-expansion": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
+ "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
+ "license": "MIT",
+ "dependencies": {
+ "balanced-match": "^1.0.0"
+ }
+ },
+ "node_modules/call-me-maybe": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/call-me-maybe/-/call-me-maybe-1.0.2.tgz",
+ "integrity": "sha512-HpX65o1Hnr9HH25ojC1YGs7HCQLq0GCOibSaWER0eNpgJ/Z1MZv2mTc7+xh6WOPxbRVcmgbv4hGU+uSQ/2xFZQ==",
+ "license": "MIT"
+ },
+ "node_modules/color-convert": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+ "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+ "license": "MIT",
+ "dependencies": {
+ "color-name": "~1.1.4"
+ },
+ "engines": {
+ "node": ">=7.0.0"
+ }
+ },
+ "node_modules/color-name": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
+ "license": "MIT"
+ },
+ "node_modules/cross-spawn": {
+ "version": "7.0.6",
+ "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
+ "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
+ "license": "MIT",
+ "dependencies": {
+ "path-key": "^3.1.0",
+ "shebang-command": "^2.0.0",
+ "which": "^2.0.1"
+ },
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/eastasianwidth": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz",
+ "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==",
+ "license": "MIT"
+ },
+ "node_modules/emoji-regex": {
+ "version": "9.2.2",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
+ "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==",
+ "license": "MIT"
+ },
+ "node_modules/fast-deep-equal": {
+ "version": "3.1.3",
+ "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
+ "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
+ "license": "MIT"
+ },
+ "node_modules/fast-uri": {
+ "version": "3.0.6",
+ "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.6.tgz",
+ "integrity": "sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/fastify"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/fastify"
+ }
+ ],
+ "license": "BSD-3-Clause"
+ },
+ "node_modules/foreground-child": {
+ "version": "3.3.1",
+ "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz",
+ "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==",
+ "license": "ISC",
+ "dependencies": {
+ "cross-spawn": "^7.0.6",
+ "signal-exit": "^4.0.1"
+ },
+ "engines": {
+ "node": ">=14"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/glob": {
+ "version": "10.4.5",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz",
+ "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==",
+ "license": "ISC",
+ "dependencies": {
+ "foreground-child": "^3.1.0",
+ "jackspeak": "^3.1.2",
+ "minimatch": "^9.0.4",
+ "minipass": "^7.1.2",
+ "package-json-from-dist": "^1.0.0",
+ "path-scurry": "^1.11.1"
+ },
+ "bin": {
+ "glob": "dist/esm/bin.mjs"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/is-fullwidth-code-point": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
+ "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/isexe": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
+ "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
+ "license": "ISC"
+ },
+ "node_modules/jackspeak": {
+ "version": "3.4.3",
+ "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz",
+ "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==",
+ "license": "BlueOak-1.0.0",
+ "dependencies": {
+ "@isaacs/cliui": "^8.0.2"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ },
+ "optionalDependencies": {
+ "@pkgjs/parseargs": "^0.11.0"
+ }
+ },
+ "node_modules/js-yaml": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
+ "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
+ "license": "MIT",
+ "dependencies": {
+ "argparse": "^2.0.1"
+ },
+ "bin": {
+ "js-yaml": "bin/js-yaml.js"
+ }
+ },
+ "node_modules/json-schema-traverse": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
+ "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
+ "license": "MIT"
+ },
+ "node_modules/lru-cache": {
+ "version": "10.4.3",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
+ "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
+ "license": "ISC"
+ },
+ "node_modules/minimatch": {
+ "version": "9.0.5",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
+ "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
+ "license": "ISC",
+ "dependencies": {
+ "brace-expansion": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=16 || 14 >=14.17"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/minipass": {
+ "version": "7.1.2",
+ "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz",
+ "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=16 || 14 >=14.17"
+ }
+ },
+ "node_modules/openapi-types": {
+ "version": "12.1.3",
+ "resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-12.1.3.tgz",
+ "integrity": "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==",
+ "license": "MIT",
+ "peer": true
+ },
+ "node_modules/package-json-from-dist": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz",
+ "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==",
+ "license": "BlueOak-1.0.0"
+ },
+ "node_modules/path-key": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
+ "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/path-scurry": {
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
+ "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==",
+ "license": "BlueOak-1.0.0",
+ "dependencies": {
+ "lru-cache": "^10.2.0",
+ "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
+ },
+ "engines": {
+ "node": ">=16 || 14 >=14.18"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/require-from-string": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz",
+ "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/shebang-command": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
+ "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
+ "license": "MIT",
+ "dependencies": {
+ "shebang-regex": "^3.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/shebang-regex": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
+ "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/signal-exit": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz",
+ "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=14"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/string-width": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz",
+ "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
+ "license": "MIT",
+ "dependencies": {
+ "eastasianwidth": "^0.2.0",
+ "emoji-regex": "^9.2.2",
+ "strip-ansi": "^7.0.1"
+ },
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/string-width-cjs": {
+ "name": "string-width",
+ "version": "4.2.3",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
+ "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "license": "MIT",
+ "dependencies": {
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/string-width-cjs/node_modules/ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/string-width-cjs/node_modules/emoji-regex": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "license": "MIT"
+ },
+ "node_modules/string-width-cjs/node_modules/strip-ansi": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "license": "MIT",
+ "dependencies": {
+ "ansi-regex": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/strip-ansi": {
+ "version": "7.1.0",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz",
+ "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==",
+ "license": "MIT",
+ "dependencies": {
+ "ansi-regex": "^6.0.1"
+ },
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/strip-ansi?sponsor=1"
+ }
+ },
+ "node_modules/strip-ansi-cjs": {
+ "name": "strip-ansi",
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "license": "MIT",
+ "dependencies": {
+ "ansi-regex": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/strip-ansi-cjs/node_modules/ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/which": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
+ "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
+ "license": "ISC",
+ "dependencies": {
+ "isexe": "^2.0.0"
+ },
+ "bin": {
+ "node-which": "bin/node-which"
+ },
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/wrap-ansi": {
+ "version": "8.1.0",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz",
+ "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==",
+ "license": "MIT",
+ "dependencies": {
+ "ansi-styles": "^6.1.0",
+ "string-width": "^5.0.1",
+ "strip-ansi": "^7.0.1"
+ },
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/wrap-ansi?sponsor=1"
+ }
+ },
+ "node_modules/wrap-ansi-cjs": {
+ "name": "wrap-ansi",
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
+ "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
+ "license": "MIT",
+ "dependencies": {
+ "ansi-styles": "^4.0.0",
+ "string-width": "^4.1.0",
+ "strip-ansi": "^6.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/wrap-ansi?sponsor=1"
+ }
+ },
+ "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "license": "MIT",
+ "dependencies": {
+ "color-convert": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/ansi-styles?sponsor=1"
+ }
+ },
+ "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "license": "MIT"
+ },
+ "node_modules/wrap-ansi-cjs/node_modules/string-width": {
+ "version": "4.2.3",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
+ "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "license": "MIT",
+ "dependencies": {
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "license": "MIT",
+ "dependencies": {
+ "ansi-regex": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ }
+ }
+}
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/LICENSE b/node_modules/@apidevtools/json-schema-ref-parser/LICENSE
new file mode 100644
index 00000000..853473ae
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 James Messinger
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/README.md b/node_modules/@apidevtools/json-schema-ref-parser/README.md
new file mode 100644
index 00000000..4b700db8
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/README.md
@@ -0,0 +1,168 @@
+# JSON Schema $Ref Parser
+
+#### Parse, Resolve, and Dereference JSON Schema $ref pointers
+
+[](https://github.com/APIDevTools/json-schema-ref-parser/actions)
+[](https://coveralls.io/github/APIDevTools/json-schema-ref-parser)
+
+[](https://www.npmjs.com/package/@apidevtools/json-schema-ref-parser)
+[](LICENSE)
+[](https://plant.treeware.earth/APIDevTools/json-schema-ref-parser)
+
+## Installation
+
+Install using [npm](https://docs.npmjs.com/about-npm/):
+
+```bash
+npm install @apidevtools/json-schema-ref-parser
+yarn add @apidevtools/json-schema-ref-parser
+bun add @apidevtools/json-schema-ref-parser
+```
+
+## The Problem:
+
+You've got a JSON Schema with `$ref` pointers to other files and/or URLs. Maybe you know all the referenced files ahead
+of time. Maybe you don't. Maybe some are local files, and others are remote URLs. Maybe they are a mix of JSON and YAML
+format. Maybe some of the files contain cross-references to each other.
+
+```json
+{
+ "definitions": {
+ "person": {
+ // references an external file
+ "$ref": "schemas/people/Bruce-Wayne.json"
+ },
+ "place": {
+ // references a sub-schema in an external file
+ "$ref": "schemas/places.yaml#/definitions/Gotham-City"
+ },
+ "thing": {
+ // references a URL
+ "$ref": "http://wayne-enterprises.com/things/batmobile"
+ },
+ "color": {
+ // references a value in an external file via an internal reference
+ "$ref": "#/definitions/thing/properties/colors/black-as-the-night"
+ }
+ }
+}
+```
+
+## The Solution:
+
+JSON Schema $Ref Parser is a full [JSON Reference](https://tools.ietf.org/html/draft-pbryan-zyp-json-ref-03)
+and [JSON Pointer](https://tools.ietf.org/html/rfc6901) implementation that crawls even the most
+complex [JSON Schemas](http://json-schema.org/latest/json-schema-core.html) and gives you simple, straightforward
+JavaScript objects.
+
+- Use **JSON** or **YAML** schemas — or even a mix of both!
+- Supports `$ref` pointers to external files and URLs, as well
+ as [custom sources](https://apitools.dev/json-schema-ref-parser/docs/plugins/resolvers.html) such as databases
+- Can [bundle](https://apitools.dev/json-schema-ref-parser/docs/ref-parser.html#bundlepath-options-callback) multiple
+ files into a single schema that only has _internal_ `$ref` pointers
+- Can [dereference](https://apitools.dev/json-schema-ref-parser/docs/ref-parser.html#dereferencepath-options-callback)
+ your schema, producing a plain-old JavaScript object that's easy to work with
+- Supports [circular references](https://apitools.dev/json-schema-ref-parser/docs/#circular-refs), nested references,
+ back-references, and cross-references between files
+- Maintains object reference equality — `$ref` pointers to the same value always resolve to the same object
+ instance
+- Compatible with Node LTS and beyond, and all major web browsers on Windows, Mac, and Linux
+
+## Example
+
+```javascript
+import $RefParser from "@apidevtools/json-schema-ref-parser";
+
+try {
+ await $RefParser.dereference(mySchema);
+ // note - by default, mySchema is modified in place, and the returned value is a reference to the same object
+ console.log(mySchema.definitions.person.properties.firstName);
+
+ // if you want to avoid modifying the original schema, you can disable the `mutateInputSchema` option
+ let clonedSchema = await $RefParser.dereference(mySchema, { mutateInputSchema: false });
+ console.log(clonedSchema.definitions.person.properties.firstName);
+} catch (err) {
+ console.error(err);
+}
+```
+
+For more detailed examples, please see the [API Documentation](https://apitools.dev/json-schema-ref-parser/docs/)
+
+## Polyfills
+
+If you are using Node.js < 18, you'll need a polyfill for `fetch`,
+like [node-fetch](https://github.com/node-fetch/node-fetch):
+
+```javascript
+import fetch from "node-fetch";
+
+globalThis.fetch = fetch;
+```
+
+## Browser support
+
+JSON Schema $Ref Parser supports recent versions of every major web browser. Older browsers may
+require [Babel](https://babeljs.io/) and/or [polyfills](https://babeljs.io/docs/en/next/babel-polyfill).
+
+To use JSON Schema $Ref Parser in a browser, you'll need to use a bundling tool such
+as [Webpack](https://webpack.js.org/), [Rollup](https://rollupjs.org/), [Parcel](https://parceljs.org/),
+or [Browserify](http://browserify.org/). Some bundlers may require a bit of configuration, such as
+setting `browser: true` in [rollup-plugin-resolve](https://github.com/rollup/rollup-plugin-node-resolve).
+
+#### Webpack 5
+
+Webpack 5 has dropped the default export of node core modules in favour of polyfills, you'll need to set them up
+yourself ( after npm-installing them )
+Edit your `webpack.config.js` :
+
+```js
+config.resolve.fallback = {
+ path: require.resolve("path-browserify"),
+ fs: require.resolve("browserify-fs"),
+};
+
+config.plugins.push(
+ new webpack.ProvidePlugin({
+ Buffer: ["buffer", "Buffer"],
+ }),
+);
+```
+
+## API Documentation
+
+Full API documentation is available [right here](https://apitools.dev/json-schema-ref-parser/docs/)
+
+## Contributing
+
+I welcome any contributions, enhancements, and
+bug-fixes. [Open an issue](https://github.com/APIDevTools/json-schema-ref-parser/issues) on GitHub
+and [submit a pull request](https://github.com/APIDevTools/json-schema-ref-parser/pulls).
+
+#### Building/Testing
+
+To build/test the project locally on your computer:
+
+1. **Clone this repo**
+ `git clone https://github.com/APIDevTools/json-schema-ref-parser.git`
+
+2. **Install dependencies**
+ `yarn install`
+
+3. **Run the tests**
+ `yarn test`
+
+## License
+
+JSON Schema $Ref Parser is 100% free and open-source, under the [MIT license](LICENSE). Use it however you want.
+
+This package is [Treeware](http://treeware.earth). If you use it in production, then we ask that you [**buy the world a
+tree**](https://plant.treeware.earth/APIDevTools/json-schema-ref-parser) to thank us for our work. By contributing to
+the Treeware forest you’ll be creating employment for local families and restoring wildlife habitats.
+
+## Big Thanks To
+
+Thanks to these awesome companies for their support of Open Source developers ❤
+
+[](https://stoplight.io/?utm_source=github&utm_medium=readme&utm_campaign=json_schema_ref_parser)
+[](https://saucelabs.com)
+[](https://coveralls.io)
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/bundle.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/bundle.d.ts
new file mode 100644
index 00000000..0e2a47a2
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/bundle.d.ts
@@ -0,0 +1,27 @@
+import type $RefParser from "./index";
+import type { ParserOptions } from "./index";
+import type { JSONSchema } from "./index";
+export interface InventoryEntry {
+ $ref: any;
+ parent: any;
+ key: any;
+ pathFromRoot: any;
+ depth: any;
+ file: any;
+ hash: any;
+ value: any;
+ circular: any;
+ extended: any;
+ external: any;
+ indirections: any;
+}
+/**
+ * Bundles all external JSON references into the main JSON schema, thus resulting in a schema that
+ * only has *internal* references, not any *external* references.
+ * This method mutates the JSON schema object, adding new references and re-mapping existing ones.
+ *
+ * @param parser
+ * @param options
+ */
+declare function bundle = ParserOptions>(parser: $RefParser, options: O): void;
+export default bundle;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/bundle.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/bundle.js
new file mode 100644
index 00000000..9e73495b
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/bundle.js
@@ -0,0 +1,283 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const ref_js_1 = __importDefault(require("./ref.js"));
+const pointer_js_1 = __importDefault(require("./pointer.js"));
+const url = __importStar(require("./util/url.js"));
+/**
+ * Bundles all external JSON references into the main JSON schema, thus resulting in a schema that
+ * only has *internal* references, not any *external* references.
+ * This method mutates the JSON schema object, adding new references and re-mapping existing ones.
+ *
+ * @param parser
+ * @param options
+ */
+function bundle(parser, options) {
+ // console.log('Bundling $ref pointers in %s', parser.$refs._root$Ref.path);
+ // Build an inventory of all $ref pointers in the JSON Schema
+ const inventory = [];
+ crawl(parser, "schema", parser.$refs._root$Ref.path + "#", "#", 0, inventory, parser.$refs, options);
+ // Remap all $ref pointers
+ remap(inventory);
+}
+/**
+ * Recursively crawls the given value, and inventories all JSON references.
+ *
+ * @param parent - The object containing the value to crawl. If the value is not an object or array, it will be ignored.
+ * @param key - The property key of `parent` to be crawled
+ * @param path - The full path of the property being crawled, possibly with a JSON Pointer in the hash
+ * @param pathFromRoot - The path of the property being crawled, from the schema root
+ * @param indirections
+ * @param inventory - An array of already-inventoried $ref pointers
+ * @param $refs
+ * @param options
+ */
+function crawl(parent, key, path, pathFromRoot, indirections, inventory, $refs, options) {
+ const obj = key === null ? parent : parent[key];
+ if (obj && typeof obj === "object" && !ArrayBuffer.isView(obj)) {
+ if (ref_js_1.default.isAllowed$Ref(obj)) {
+ inventory$Ref(parent, key, path, pathFromRoot, indirections, inventory, $refs, options);
+ }
+ else {
+ // Crawl the object in a specific order that's optimized for bundling.
+ // This is important because it determines how `pathFromRoot` gets built,
+ // which later determines which keys get dereferenced and which ones get remapped
+ const keys = Object.keys(obj).sort((a, b) => {
+ // Most people will expect references to be bundled into the the "definitions" property,
+ // so we always crawl that property first, if it exists.
+ if (a === "definitions") {
+ return -1;
+ }
+ else if (b === "definitions") {
+ return 1;
+ }
+ else {
+ // Otherwise, crawl the keys based on their length.
+ // This produces the shortest possible bundled references
+ return a.length - b.length;
+ }
+ });
+ for (const key of keys) {
+ const keyPath = pointer_js_1.default.join(path, key);
+ const keyPathFromRoot = pointer_js_1.default.join(pathFromRoot, key);
+ const value = obj[key];
+ if (ref_js_1.default.isAllowed$Ref(value)) {
+ inventory$Ref(obj, key, path, keyPathFromRoot, indirections, inventory, $refs, options);
+ }
+ else {
+ crawl(obj, key, keyPath, keyPathFromRoot, indirections, inventory, $refs, options);
+ }
+ }
+ }
+ }
+}
+/**
+ * Inventories the given JSON Reference (i.e. records detailed information about it so we can
+ * optimize all $refs in the schema), and then crawls the resolved value.
+ *
+ * @param $refParent - The object that contains a JSON Reference as one of its keys
+ * @param $refKey - The key in `$refParent` that is a JSON Reference
+ * @param path - The full path of the JSON Reference at `$refKey`, possibly with a JSON Pointer in the hash
+ * @param indirections - unknown
+ * @param pathFromRoot - The path of the JSON Reference at `$refKey`, from the schema root
+ * @param inventory - An array of already-inventoried $ref pointers
+ * @param $refs
+ * @param options
+ */
+function inventory$Ref($refParent, $refKey, path, pathFromRoot, indirections, inventory, $refs, options) {
+ const $ref = $refKey === null ? $refParent : $refParent[$refKey];
+ const $refPath = url.resolve(path, $ref.$ref);
+ const pointer = $refs._resolve($refPath, pathFromRoot, options);
+ if (pointer === null) {
+ return;
+ }
+ const parsed = pointer_js_1.default.parse(pathFromRoot);
+ const depth = parsed.length;
+ const file = url.stripHash(pointer.path);
+ const hash = url.getHash(pointer.path);
+ const external = file !== $refs._root$Ref.path;
+ const extended = ref_js_1.default.isExtended$Ref($ref);
+ indirections += pointer.indirections;
+ const existingEntry = findInInventory(inventory, $refParent, $refKey);
+ if (existingEntry) {
+ // This $Ref has already been inventoried, so we don't need to process it again
+ if (depth < existingEntry.depth || indirections < existingEntry.indirections) {
+ removeFromInventory(inventory, existingEntry);
+ }
+ else {
+ return;
+ }
+ }
+ inventory.push({
+ $ref, // The JSON Reference (e.g. {$ref: string})
+ parent: $refParent, // The object that contains this $ref pointer
+ key: $refKey, // The key in `parent` that is the $ref pointer
+ pathFromRoot, // The path to the $ref pointer, from the JSON Schema root
+ depth, // How far from the JSON Schema root is this $ref pointer?
+ file, // The file that the $ref pointer resolves to
+ hash, // The hash within `file` that the $ref pointer resolves to
+ value: pointer.value, // The resolved value of the $ref pointer
+ circular: pointer.circular, // Is this $ref pointer DIRECTLY circular? (i.e. it references itself)
+ extended, // Does this $ref extend its resolved value? (i.e. it has extra properties, in addition to "$ref")
+ external, // Does this $ref pointer point to a file other than the main JSON Schema file?
+ indirections, // The number of indirect references that were traversed to resolve the value
+ });
+ // Recursively crawl the resolved value
+ if (!existingEntry || external) {
+ crawl(pointer.value, null, pointer.path, pathFromRoot, indirections + 1, inventory, $refs, options);
+ }
+}
+/**
+ * Re-maps every $ref pointer, so that they're all relative to the root of the JSON Schema.
+ * Each referenced value is dereferenced EXACTLY ONCE. All subsequent references to the same
+ * value are re-mapped to point to the first reference.
+ *
+ * @example: {
+ * first: { $ref: somefile.json#/some/part },
+ * second: { $ref: somefile.json#/another/part },
+ * third: { $ref: somefile.json },
+ * fourth: { $ref: somefile.json#/some/part/sub/part }
+ * }
+ *
+ * In this example, there are four references to the same file, but since the third reference points
+ * to the ENTIRE file, that's the only one we need to dereference. The other three can just be
+ * remapped to point inside the third one.
+ *
+ * On the other hand, if the third reference DIDN'T exist, then the first and second would both need
+ * to be dereferenced, since they point to different parts of the file. The fourth reference does NOT
+ * need to be dereferenced, because it can be remapped to point inside the first one.
+ *
+ * @param inventory
+ */
+function remap(inventory) {
+ // Group & sort all the $ref pointers, so they're in the order that we need to dereference/remap them
+ inventory.sort((a, b) => {
+ if (a.file !== b.file) {
+ // Group all the $refs that point to the same file
+ return a.file < b.file ? -1 : +1;
+ }
+ else if (a.hash !== b.hash) {
+ // Group all the $refs that point to the same part of the file
+ return a.hash < b.hash ? -1 : +1;
+ }
+ else if (a.circular !== b.circular) {
+ // If the $ref points to itself, then sort it higher than other $refs that point to this $ref
+ return a.circular ? -1 : +1;
+ }
+ else if (a.extended !== b.extended) {
+ // If the $ref extends the resolved value, then sort it lower than other $refs that don't extend the value
+ return a.extended ? +1 : -1;
+ }
+ else if (a.indirections !== b.indirections) {
+ // Sort direct references higher than indirect references
+ return a.indirections - b.indirections;
+ }
+ else if (a.depth !== b.depth) {
+ // Sort $refs by how close they are to the JSON Schema root
+ return a.depth - b.depth;
+ }
+ else {
+ // Determine how far each $ref is from the "definitions" property.
+ // Most people will expect references to be bundled into the the "definitions" property if possible.
+ const aDefinitionsIndex = a.pathFromRoot.lastIndexOf("/definitions");
+ const bDefinitionsIndex = b.pathFromRoot.lastIndexOf("/definitions");
+ if (aDefinitionsIndex !== bDefinitionsIndex) {
+ // Give higher priority to the $ref that's closer to the "definitions" property
+ return bDefinitionsIndex - aDefinitionsIndex;
+ }
+ else {
+ // All else is equal, so use the shorter path, which will produce the shortest possible reference
+ return a.pathFromRoot.length - b.pathFromRoot.length;
+ }
+ }
+ });
+ let file, hash, pathFromRoot;
+ for (const entry of inventory) {
+ // console.log('Re-mapping $ref pointer "%s" at %s', entry.$ref.$ref, entry.pathFromRoot);
+ if (!entry.external) {
+ // This $ref already resolves to the main JSON Schema file
+ entry.$ref.$ref = entry.hash;
+ }
+ else if (entry.file === file && entry.hash === hash) {
+ // This $ref points to the same value as the prevous $ref, so remap it to the same path
+ entry.$ref.$ref = pathFromRoot;
+ }
+ else if (entry.file === file && entry.hash.indexOf(hash + "/") === 0) {
+ // This $ref points to a sub-value of the prevous $ref, so remap it beneath that path
+ entry.$ref.$ref = pointer_js_1.default.join(pathFromRoot, pointer_js_1.default.parse(entry.hash.replace(hash, "#")));
+ }
+ else {
+ // We've moved to a new file or new hash
+ file = entry.file;
+ hash = entry.hash;
+ pathFromRoot = entry.pathFromRoot;
+ // This is the first $ref to point to this value, so dereference the value.
+ // Any other $refs that point to the same value will point to this $ref instead
+ entry.$ref = entry.parent[entry.key] = ref_js_1.default.dereference(entry.$ref, entry.value);
+ if (entry.circular) {
+ // This $ref points to itself
+ entry.$ref.$ref = entry.pathFromRoot;
+ }
+ }
+ }
+ // we want to ensure that any $refs that point to another $ref are remapped to point to the final value
+ // let hadChange = true;
+ // while (hadChange) {
+ // hadChange = false;
+ // for (const entry of inventory) {
+ // if (entry.$ref && typeof entry.$ref === "object" && "$ref" in entry.$ref) {
+ // const resolved = inventory.find((e: InventoryEntry) => e.pathFromRoot === entry.$ref.$ref);
+ // if (resolved) {
+ // const resolvedPointsToAnotherRef =
+ // resolved.$ref && typeof resolved.$ref === "object" && "$ref" in resolved.$ref;
+ // if (resolvedPointsToAnotherRef && entry.$ref.$ref !== resolved.$ref.$ref) {
+ // // console.log('Re-mapping $ref pointer "%s" at %s', entry.$ref.$ref, entry.pathFromRoot);
+ // entry.$ref.$ref = resolved.$ref.$ref;
+ // hadChange = true;
+ // }
+ // }
+ // }
+ // }
+ // }
+}
+/**
+ * TODO
+ */
+function findInInventory(inventory, $refParent, $refKey) {
+ for (const existingEntry of inventory) {
+ if (existingEntry && existingEntry.parent === $refParent && existingEntry.key === $refKey) {
+ return existingEntry;
+ }
+ }
+ return undefined;
+}
+function removeFromInventory(inventory, entry) {
+ const index = inventory.indexOf(entry);
+ inventory.splice(index, 1);
+}
+exports.default = bundle;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/dereference.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/dereference.d.ts
new file mode 100644
index 00000000..ff33f9c4
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/dereference.d.ts
@@ -0,0 +1,12 @@
+import type { ParserOptions } from "./options.js";
+import type { JSONSchema } from "./types";
+import type $RefParser from "./index";
+export default dereference;
+/**
+ * Crawls the JSON schema, finds all JSON references, and dereferences them.
+ * This method mutates the JSON schema object, replacing JSON references with their resolved value.
+ *
+ * @param parser
+ * @param options
+ */
+declare function dereference = ParserOptions>(parser: $RefParser, options: O): void;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/dereference.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/dereference.js
new file mode 100644
index 00000000..84975202
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/dereference.js
@@ -0,0 +1,216 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const ref_js_1 = __importDefault(require("./ref.js"));
+const pointer_js_1 = __importDefault(require("./pointer.js"));
+const ono_1 = require("@jsdevtools/ono");
+const url = __importStar(require("./util/url.js"));
+const errors_1 = require("./util/errors");
+exports.default = dereference;
+/**
+ * Crawls the JSON schema, finds all JSON references, and dereferences them.
+ * This method mutates the JSON schema object, replacing JSON references with their resolved value.
+ *
+ * @param parser
+ * @param options
+ */
+function dereference(parser, options) {
+ const start = Date.now();
+ // console.log('Dereferencing $ref pointers in %s', parser.$refs._root$Ref.path);
+ const dereferenced = crawl(parser.schema, parser.$refs._root$Ref.path, "#", new Set(), new Set(), new Map(), parser.$refs, options, start);
+ parser.$refs.circular = dereferenced.circular;
+ parser.schema = dereferenced.value;
+}
+/**
+ * Recursively crawls the given value, and dereferences any JSON references.
+ *
+ * @param obj - The value to crawl. If it's not an object or array, it will be ignored.
+ * @param path - The full path of `obj`, possibly with a JSON Pointer in the hash
+ * @param pathFromRoot - The path of `obj` from the schema root
+ * @param parents - An array of the parent objects that have already been dereferenced
+ * @param processedObjects - An array of all the objects that have already been processed
+ * @param dereferencedCache - An map of all the dereferenced objects
+ * @param $refs
+ * @param options
+ * @param startTime - The time when the dereferencing started
+ * @returns
+ */
+function crawl(obj, path, pathFromRoot, parents, processedObjects, dereferencedCache, $refs, options, startTime) {
+ let dereferenced;
+ const result = {
+ value: obj,
+ circular: false,
+ };
+ if (options && options.timeoutMs) {
+ if (Date.now() - startTime > options.timeoutMs) {
+ throw new errors_1.TimeoutError(options.timeoutMs);
+ }
+ }
+ const derefOptions = (options.dereference || {});
+ const isExcludedPath = derefOptions.excludedPathMatcher || (() => false);
+ if (derefOptions?.circular === "ignore" || !processedObjects.has(obj)) {
+ if (obj && typeof obj === "object" && !ArrayBuffer.isView(obj) && !isExcludedPath(pathFromRoot)) {
+ parents.add(obj);
+ processedObjects.add(obj);
+ if (ref_js_1.default.isAllowed$Ref(obj, options)) {
+ dereferenced = dereference$Ref(obj, path, pathFromRoot, parents, processedObjects, dereferencedCache, $refs, options, startTime);
+ result.circular = dereferenced.circular;
+ result.value = dereferenced.value;
+ }
+ else {
+ for (const key of Object.keys(obj)) {
+ const keyPath = pointer_js_1.default.join(path, key);
+ const keyPathFromRoot = pointer_js_1.default.join(pathFromRoot, key);
+ if (isExcludedPath(keyPathFromRoot)) {
+ continue;
+ }
+ const value = obj[key];
+ let circular = false;
+ if (ref_js_1.default.isAllowed$Ref(value, options)) {
+ dereferenced = dereference$Ref(value, keyPath, keyPathFromRoot, parents, processedObjects, dereferencedCache, $refs, options, startTime);
+ circular = dereferenced.circular;
+ // Avoid pointless mutations; breaks frozen objects to no profit
+ if (obj[key] !== dereferenced.value) {
+ obj[key] = dereferenced.value;
+ derefOptions?.onDereference?.(value.$ref, obj[key], obj, key);
+ }
+ }
+ else {
+ if (!parents.has(value)) {
+ dereferenced = crawl(value, keyPath, keyPathFromRoot, parents, processedObjects, dereferencedCache, $refs, options, startTime);
+ circular = dereferenced.circular;
+ // Avoid pointless mutations; breaks frozen objects to no profit
+ if (obj[key] !== dereferenced.value) {
+ obj[key] = dereferenced.value;
+ }
+ }
+ else {
+ circular = foundCircularReference(keyPath, $refs, options);
+ }
+ }
+ // Set the "isCircular" flag if this or any other property is circular
+ result.circular = result.circular || circular;
+ }
+ }
+ parents.delete(obj);
+ }
+ }
+ return result;
+}
+/**
+ * Dereferences the given JSON Reference, and then crawls the resulting value.
+ *
+ * @param $ref - The JSON Reference to resolve
+ * @param path - The full path of `$ref`, possibly with a JSON Pointer in the hash
+ * @param pathFromRoot - The path of `$ref` from the schema root
+ * @param parents - An array of the parent objects that have already been dereferenced
+ * @param processedObjects - An array of all the objects that have already been dereferenced
+ * @param dereferencedCache - An map of all the dereferenced objects
+ * @param $refs
+ * @param options
+ * @returns
+ */
+function dereference$Ref($ref, path, pathFromRoot, parents, processedObjects, dereferencedCache, $refs, options, startTime) {
+ const isExternalRef = ref_js_1.default.isExternal$Ref($ref);
+ const shouldResolveOnCwd = isExternalRef && options?.dereference?.externalReferenceResolution === "root";
+ const $refPath = url.resolve(shouldResolveOnCwd ? url.cwd() : path, $ref.$ref);
+ const cache = dereferencedCache.get($refPath);
+ if (cache && !cache.circular) {
+ const refKeys = Object.keys($ref);
+ if (refKeys.length > 1) {
+ const extraKeys = {};
+ for (const key of refKeys) {
+ if (key !== "$ref" && !(key in cache.value)) {
+ // @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message
+ extraKeys[key] = $ref[key];
+ }
+ }
+ return {
+ circular: cache.circular,
+ value: Object.assign({}, cache.value, extraKeys),
+ };
+ }
+ return cache;
+ }
+ const pointer = $refs._resolve($refPath, path, options);
+ if (pointer === null) {
+ return {
+ circular: false,
+ value: null,
+ };
+ }
+ // Check for circular references
+ const directCircular = pointer.circular;
+ let circular = directCircular || parents.has(pointer.value);
+ if (circular) {
+ foundCircularReference(path, $refs, options);
+ }
+ // Dereference the JSON reference
+ let dereferencedValue = ref_js_1.default.dereference($ref, pointer.value);
+ // Crawl the dereferenced value (unless it's circular)
+ if (!circular) {
+ // Determine if the dereferenced value is circular
+ const dereferenced = crawl(dereferencedValue, pointer.path, pathFromRoot, parents, processedObjects, dereferencedCache, $refs, options, startTime);
+ circular = dereferenced.circular;
+ dereferencedValue = dereferenced.value;
+ }
+ if (circular && !directCircular && options.dereference?.circular === "ignore") {
+ // The user has chosen to "ignore" circular references, so don't change the value
+ dereferencedValue = $ref;
+ }
+ if (directCircular) {
+ // The pointer is a DIRECT circular reference (i.e. it references itself).
+ // So replace the $ref path with the absolute path from the JSON Schema root
+ dereferencedValue.$ref = pathFromRoot;
+ }
+ const dereferencedObject = {
+ circular,
+ value: dereferencedValue,
+ };
+ // only cache if no extra properties than $ref
+ if (Object.keys($ref).length === 1) {
+ dereferencedCache.set($refPath, dereferencedObject);
+ }
+ return dereferencedObject;
+}
+/**
+ * Called when a circular reference is found.
+ * It sets the {@link $Refs#circular} flag, and throws an error if options.dereference.circular is false.
+ *
+ * @param keyPath - The JSON Reference path of the circular reference
+ * @param $refs
+ * @param options
+ * @returns - always returns true, to indicate that a circular reference was found
+ */
+function foundCircularReference(keyPath, $refs, options) {
+ $refs.circular = true;
+ if (!options.dereference.circular) {
+ throw ono_1.ono.reference(`Circular $ref pointer found at ${keyPath}`);
+ }
+ return true;
+}
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/index.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/index.d.ts
new file mode 100644
index 00000000..d8dcbf83
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/index.d.ts
@@ -0,0 +1,162 @@
+import $Refs from "./refs.js";
+import normalizeArgs from "./normalize-args.js";
+import _dereference from "./dereference.js";
+import { JSONParserError, InvalidPointerError, MissingPointerError, ResolverError, ParserError, UnmatchedParserError, UnmatchedResolverError, isHandledError, JSONParserErrorGroup } from "./util/errors.js";
+import type { ParserOptions } from "./options.js";
+import { getJsonSchemaRefParserDefaultOptions } from "./options.js";
+import type { $RefsCallback, JSONSchema, SchemaCallback, FileInfo, Plugin, ResolverOptions, HTTPResolverOptions } from "./types/index.js";
+export type RefParserSchema = string | JSONSchema;
+/**
+ * This class parses a JSON schema, builds a map of its JSON references and their resolved values,
+ * and provides methods for traversing, manipulating, and dereferencing those references.
+ *
+ * @class
+ */
+export declare class $RefParser = ParserOptions> {
+ /**
+ * The parsed (and possibly dereferenced) JSON schema object
+ *
+ * @type {object}
+ * @readonly
+ */
+ schema: S | null;
+ /**
+ * The resolved JSON references
+ *
+ * @type {$Refs}
+ * @readonly
+ */
+ $refs: $Refs;
+ /**
+ * Parses the given JSON schema.
+ * This method does not resolve any JSON references.
+ * It just reads a single file in JSON or YAML format, and parse it as a JavaScript object.
+ *
+ * @param [path] - The file path or URL of the JSON schema
+ * @param [schema] - A JSON schema object. This object will be used instead of reading from `path`.
+ * @param [options] - Options that determine how the schema is parsed
+ * @param [callback] - An error-first callback. The second parameter is the parsed JSON schema object.
+ * @returns - The returned promise resolves with the parsed JSON schema object.
+ */
+ parse(schema: S | string | unknown): Promise;
+ parse(schema: S | string | unknown, callback: SchemaCallback): Promise;
+ parse(schema: S | string | unknown, options: O): Promise;
+ parse(schema: S | string | unknown, options: O, callback: SchemaCallback): Promise;
+ parse(baseUrl: string, schema: S | string | unknown, options: O): Promise;
+ parse(baseUrl: string, schema: S | string | unknown, options: O, callback: SchemaCallback): Promise;
+ static parse = ParserOptions>(schema: S | string | unknown): Promise;
+ static parse = ParserOptions>(schema: S | string | unknown, callback: SchemaCallback): Promise;
+ static parse = ParserOptions>(schema: S | string | unknown, options: O): Promise;
+ static parse = ParserOptions>(schema: S | string | unknown, options: O, callback: SchemaCallback): Promise;
+ static parse = ParserOptions>(baseUrl: string, schema: S | string | unknown, options: O): Promise;
+ static parse = ParserOptions>(baseUrl: string, schema: S | string | unknown, options: O, callback: SchemaCallback): Promise;
+ /**
+ * *This method is used internally by other methods, such as `bundle` and `dereference`. You probably won't need to call this method yourself.*
+ *
+ * Resolves all JSON references (`$ref` pointers) in the given JSON Schema file. If it references any other files/URLs, then they will be downloaded and resolved as well. This method **does not** dereference anything. It simply gives you a `$Refs` object, which is a map of all the resolved references and their values.
+ *
+ * See https://apitools.dev/json-schema-ref-parser/docs/ref-parser.html#resolveschema-options-callback
+ *
+ * @param schema A JSON Schema object, or the file path or URL of a JSON Schema file. See the `parse` method for more info.
+ * @param options (optional)
+ * @param callback (optional) A callback that will receive a `$Refs` object
+ */
+ resolve(schema: S | string | unknown): Promise<$Refs>;
+ resolve(schema: S | string | unknown, callback: $RefsCallback): Promise;
+ resolve(schema: S | string | unknown, options: O): Promise<$Refs>;
+ resolve(schema: S | string | unknown, options: O, callback: $RefsCallback): Promise;
+ resolve(baseUrl: string, schema: S | string | unknown, options: O): Promise<$Refs>;
+ resolve(baseUrl: string, schema: S | string | unknown, options: O, callback: $RefsCallback): Promise;
+ /**
+ * *This method is used internally by other methods, such as `bundle` and `dereference`. You probably won't need to call this method yourself.*
+ *
+ * Resolves all JSON references (`$ref` pointers) in the given JSON Schema file. If it references any other files/URLs, then they will be downloaded and resolved as well. This method **does not** dereference anything. It simply gives you a `$Refs` object, which is a map of all the resolved references and their values.
+ *
+ * See https://apitools.dev/json-schema-ref-parser/docs/ref-parser.html#resolveschema-options-callback
+ *
+ * @param schema A JSON Schema object, or the file path or URL of a JSON Schema file. See the `parse` method for more info.
+ * @param options (optional)
+ * @param callback (optional) A callback that will receive a `$Refs` object
+ */
+ static resolve = ParserOptions>(schema: S | string | unknown): Promise<$Refs>;
+ static resolve = ParserOptions>(schema: S | string | unknown, callback: $RefsCallback): Promise;
+ static resolve = ParserOptions>(schema: S | string | unknown, options: O): Promise<$Refs>;
+ static resolve = ParserOptions>(schema: S | string | unknown, options: O, callback: $RefsCallback): Promise;
+ static resolve = ParserOptions>(baseUrl: string, schema: S | string | unknown, options: O): Promise<$Refs>;
+ static resolve = ParserOptions>(baseUrl: string, schema: S | string | unknown, options: O, callback: $RefsCallback): Promise;
+ /**
+ * Bundles all referenced files/URLs into a single schema that only has internal `$ref` pointers. This lets you split-up your schema however you want while you're building it, but easily combine all those files together when it's time to package or distribute the schema to other people. The resulting schema size will be small, since it will still contain internal JSON references rather than being fully-dereferenced.
+ *
+ * This also eliminates the risk of circular references, so the schema can be safely serialized using `JSON.stringify()`.
+ *
+ * See https://apitools.dev/json-schema-ref-parser/docs/ref-parser.html#bundleschema-options-callback
+ *
+ * @param schema A JSON Schema object, or the file path or URL of a JSON Schema file. See the `parse` method for more info.
+ * @param options (optional)
+ * @param callback (optional) A callback that will receive the bundled schema object
+ */
+ static bundle = ParserOptions>(schema: S | string | unknown): Promise;
+ static bundle = ParserOptions>(schema: S | string | unknown, callback: SchemaCallback): Promise;
+ static bundle = ParserOptions>(schema: S | string | unknown, options: O): Promise;
+ static bundle = ParserOptions>(schema: S | string | unknown, options: O, callback: SchemaCallback): Promise;
+ static bundle = ParserOptions>(baseUrl: string, schema: S | string | unknown, options: O): Promise;
+ static bundle = ParserOptions>(baseUrl: string, schema: S | string | unknown, options: O, callback: SchemaCallback): Promise;
+ /**
+ * Bundles all referenced files/URLs into a single schema that only has internal `$ref` pointers. This lets you split-up your schema however you want while you're building it, but easily combine all those files together when it's time to package or distribute the schema to other people. The resulting schema size will be small, since it will still contain internal JSON references rather than being fully-dereferenced.
+ *
+ * This also eliminates the risk of circular references, so the schema can be safely serialized using `JSON.stringify()`.
+ *
+ * See https://apitools.dev/json-schema-ref-parser/docs/ref-parser.html#bundleschema-options-callback
+ *
+ * @param schema A JSON Schema object, or the file path or URL of a JSON Schema file. See the `parse` method for more info.
+ * @param options (optional)
+ * @param callback (optional) A callback that will receive the bundled schema object
+ */
+ bundle(schema: S | string | unknown): Promise;
+ bundle(schema: S | string | unknown, callback: SchemaCallback): Promise;
+ bundle(schema: S | string | unknown, options: O): Promise;
+ bundle(schema: S | string | unknown, options: O, callback: SchemaCallback): Promise;
+ bundle(baseUrl: string, schema: S | string | unknown, options: O): Promise;
+ bundle(baseUrl: string, schema: S | string | unknown, options: O, callback: SchemaCallback): Promise;
+ /**
+ * Dereferences all `$ref` pointers in the JSON Schema, replacing each reference with its resolved value. This results in a schema object that does not contain any `$ref` pointers. Instead, it's a normal JavaScript object tree that can easily be crawled and used just like any other JavaScript object. This is great for programmatic usage, especially when using tools that don't understand JSON references.
+ *
+ * The dereference method maintains object reference equality, meaning that all `$ref` pointers that point to the same object will be replaced with references to the same object. Again, this is great for programmatic usage, but it does introduce the risk of circular references, so be careful if you intend to serialize the schema using `JSON.stringify()`. Consider using the bundle method instead, which does not create circular references.
+ *
+ * See https://apitools.dev/json-schema-ref-parser/docs/ref-parser.html#dereferenceschema-options-callback
+ *
+ * @param schema A JSON Schema object, or the file path or URL of a JSON Schema file. See the `parse` method for more info.
+ * @param options (optional)
+ * @param callback (optional) A callback that will receive the dereferenced schema object
+ */
+ static dereference = ParserOptions>(schema: S | string | unknown): Promise;
+ static dereference = ParserOptions>(schema: S | string | unknown, callback: SchemaCallback): Promise;
+ static dereference = ParserOptions>(schema: S | string | unknown, options: O): Promise;
+ static dereference = ParserOptions>(schema: S | string | unknown, options: O, callback: SchemaCallback): Promise;
+ static dereference = ParserOptions>(baseUrl: string, schema: S | string | unknown, options: O): Promise;
+ static dereference = ParserOptions>(baseUrl: string, schema: S | string | unknown, options: O, callback: SchemaCallback): Promise;
+ /**
+ * Dereferences all `$ref` pointers in the JSON Schema, replacing each reference with its resolved value. This results in a schema object that does not contain any `$ref` pointers. Instead, it's a normal JavaScript object tree that can easily be crawled and used just like any other JavaScript object. This is great for programmatic usage, especially when using tools that don't understand JSON references.
+ *
+ * The dereference method maintains object reference equality, meaning that all `$ref` pointers that point to the same object will be replaced with references to the same object. Again, this is great for programmatic usage, but it does introduce the risk of circular references, so be careful if you intend to serialize the schema using `JSON.stringify()`. Consider using the bundle method instead, which does not create circular references.
+ *
+ * See https://apitools.dev/json-schema-ref-parser/docs/ref-parser.html#dereferenceschema-options-callback
+ *
+ * @param baseUrl
+ * @param schema A JSON Schema object, or the file path or URL of a JSON Schema file. See the `parse` method for more info.
+ * @param options (optional)
+ * @param callback (optional) A callback that will receive the dereferenced schema object
+ */
+ dereference(baseUrl: string, schema: S | string | unknown, options: O, callback: SchemaCallback): Promise;
+ dereference(schema: S | string | unknown, options: O, callback: SchemaCallback): Promise;
+ dereference(schema: S | string | unknown, callback: SchemaCallback): Promise;
+ dereference(baseUrl: string, schema: S | string | unknown, options: O): Promise;
+ dereference(schema: S | string | unknown, options: O): Promise;
+ dereference(schema: S | string | unknown): Promise;
+}
+export default $RefParser;
+export declare const parse: typeof $RefParser.parse;
+export declare const resolve: typeof $RefParser.resolve;
+export declare const bundle: typeof $RefParser.bundle;
+export declare const dereference: typeof $RefParser.dereference;
+export { UnmatchedResolverError, JSONParserError, JSONSchema, InvalidPointerError, MissingPointerError, ResolverError, ParserError, UnmatchedParserError, ParserOptions, $RefsCallback, isHandledError, JSONParserErrorGroup, SchemaCallback, FileInfo, Plugin, ResolverOptions, HTTPResolverOptions, _dereference as dereferenceInternal, normalizeArgs as jsonSchemaParserNormalizeArgs, getJsonSchemaRefParserDefaultOptions, };
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/index.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/index.js
new file mode 100644
index 00000000..f389f252
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/index.js
@@ -0,0 +1,206 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.getJsonSchemaRefParserDefaultOptions = exports.jsonSchemaParserNormalizeArgs = exports.dereferenceInternal = exports.JSONParserErrorGroup = exports.isHandledError = exports.UnmatchedParserError = exports.ParserError = exports.ResolverError = exports.MissingPointerError = exports.InvalidPointerError = exports.JSONParserError = exports.UnmatchedResolverError = exports.dereference = exports.bundle = exports.resolve = exports.parse = exports.$RefParser = void 0;
+const refs_js_1 = __importDefault(require("./refs.js"));
+const parse_js_1 = __importDefault(require("./parse.js"));
+const normalize_args_js_1 = __importDefault(require("./normalize-args.js"));
+exports.jsonSchemaParserNormalizeArgs = normalize_args_js_1.default;
+const resolve_external_js_1 = __importDefault(require("./resolve-external.js"));
+const bundle_js_1 = __importDefault(require("./bundle.js"));
+const dereference_js_1 = __importDefault(require("./dereference.js"));
+exports.dereferenceInternal = dereference_js_1.default;
+const url = __importStar(require("./util/url.js"));
+const errors_js_1 = require("./util/errors.js");
+Object.defineProperty(exports, "JSONParserError", { enumerable: true, get: function () { return errors_js_1.JSONParserError; } });
+Object.defineProperty(exports, "InvalidPointerError", { enumerable: true, get: function () { return errors_js_1.InvalidPointerError; } });
+Object.defineProperty(exports, "MissingPointerError", { enumerable: true, get: function () { return errors_js_1.MissingPointerError; } });
+Object.defineProperty(exports, "ResolverError", { enumerable: true, get: function () { return errors_js_1.ResolverError; } });
+Object.defineProperty(exports, "ParserError", { enumerable: true, get: function () { return errors_js_1.ParserError; } });
+Object.defineProperty(exports, "UnmatchedParserError", { enumerable: true, get: function () { return errors_js_1.UnmatchedParserError; } });
+Object.defineProperty(exports, "UnmatchedResolverError", { enumerable: true, get: function () { return errors_js_1.UnmatchedResolverError; } });
+Object.defineProperty(exports, "isHandledError", { enumerable: true, get: function () { return errors_js_1.isHandledError; } });
+Object.defineProperty(exports, "JSONParserErrorGroup", { enumerable: true, get: function () { return errors_js_1.JSONParserErrorGroup; } });
+const ono_1 = require("@jsdevtools/ono");
+const maybe_js_1 = __importDefault(require("./util/maybe.js"));
+const options_js_1 = require("./options.js");
+Object.defineProperty(exports, "getJsonSchemaRefParserDefaultOptions", { enumerable: true, get: function () { return options_js_1.getJsonSchemaRefParserDefaultOptions; } });
+/**
+ * This class parses a JSON schema, builds a map of its JSON references and their resolved values,
+ * and provides methods for traversing, manipulating, and dereferencing those references.
+ *
+ * @class
+ */
+class $RefParser {
+ constructor() {
+ /**
+ * The parsed (and possibly dereferenced) JSON schema object
+ *
+ * @type {object}
+ * @readonly
+ */
+ this.schema = null;
+ /**
+ * The resolved JSON references
+ *
+ * @type {$Refs}
+ * @readonly
+ */
+ this.$refs = new refs_js_1.default();
+ }
+ async parse() {
+ const args = (0, normalize_args_js_1.default)(arguments);
+ let promise;
+ if (!args.path && !args.schema) {
+ const err = (0, ono_1.ono)(`Expected a file path, URL, or object. Got ${args.path || args.schema}`);
+ return (0, maybe_js_1.default)(args.callback, Promise.reject(err));
+ }
+ // Reset everything
+ this.schema = null;
+ this.$refs = new refs_js_1.default();
+ // If the path is a filesystem path, then convert it to a URL.
+ // NOTE: According to the JSON Reference spec, these should already be URLs,
+ // but, in practice, many people use local filesystem paths instead.
+ // So we're being generous here and doing the conversion automatically.
+ // This is not intended to be a 100% bulletproof solution.
+ // If it doesn't work for your use-case, then use a URL instead.
+ let pathType = "http";
+ if (url.isFileSystemPath(args.path)) {
+ args.path = url.fromFileSystemPath(args.path);
+ pathType = "file";
+ }
+ else if (!args.path && args.schema && "$id" in args.schema && args.schema.$id) {
+ // when schema id has defined an URL should use that hostname to request the references,
+ // instead of using the current page URL
+ const params = url.parse(args.schema.$id);
+ const port = params.protocol === "https:" ? 443 : 80;
+ args.path = `${params.protocol}//${params.hostname}:${port}`;
+ }
+ // Resolve the absolute path of the schema
+ args.path = url.resolve(url.cwd(), args.path);
+ if (args.schema && typeof args.schema === "object") {
+ // A schema object was passed-in.
+ // So immediately add a new $Ref with the schema object as its value
+ const $ref = this.$refs._add(args.path);
+ $ref.value = args.schema;
+ $ref.pathType = pathType;
+ promise = Promise.resolve(args.schema);
+ }
+ else {
+ // Parse the schema file/url
+ promise = (0, parse_js_1.default)(args.path, this.$refs, args.options);
+ }
+ try {
+ const result = await promise;
+ if (result !== null && typeof result === "object" && !Buffer.isBuffer(result)) {
+ this.schema = result;
+ return (0, maybe_js_1.default)(args.callback, Promise.resolve(this.schema));
+ }
+ else if (args.options.continueOnError) {
+ this.schema = null; // it's already set to null at line 79, but let's set it again for the sake of readability
+ return (0, maybe_js_1.default)(args.callback, Promise.resolve(this.schema));
+ }
+ else {
+ throw ono_1.ono.syntax(`"${this.$refs._root$Ref.path || result}" is not a valid JSON Schema`);
+ }
+ }
+ catch (err) {
+ if (!args.options.continueOnError || !(0, errors_js_1.isHandledError)(err)) {
+ return (0, maybe_js_1.default)(args.callback, Promise.reject(err));
+ }
+ if (this.$refs._$refs[url.stripHash(args.path)]) {
+ this.$refs._$refs[url.stripHash(args.path)].addError(err);
+ }
+ return (0, maybe_js_1.default)(args.callback, Promise.resolve(null));
+ }
+ }
+ static parse() {
+ const parser = new $RefParser();
+ return parser.parse.apply(parser, arguments);
+ }
+ async resolve() {
+ const args = (0, normalize_args_js_1.default)(arguments);
+ try {
+ await this.parse(args.path, args.schema, args.options);
+ await (0, resolve_external_js_1.default)(this, args.options);
+ finalize(this);
+ return (0, maybe_js_1.default)(args.callback, Promise.resolve(this.$refs));
+ }
+ catch (err) {
+ return (0, maybe_js_1.default)(args.callback, Promise.reject(err));
+ }
+ }
+ static resolve() {
+ const instance = new $RefParser();
+ return instance.resolve.apply(instance, arguments);
+ }
+ static bundle() {
+ const instance = new $RefParser();
+ return instance.bundle.apply(instance, arguments);
+ }
+ async bundle() {
+ const args = (0, normalize_args_js_1.default)(arguments);
+ try {
+ await this.resolve(args.path, args.schema, args.options);
+ (0, bundle_js_1.default)(this, args.options);
+ finalize(this);
+ return (0, maybe_js_1.default)(args.callback, Promise.resolve(this.schema));
+ }
+ catch (err) {
+ return (0, maybe_js_1.default)(args.callback, Promise.reject(err));
+ }
+ }
+ static dereference() {
+ const instance = new $RefParser();
+ return instance.dereference.apply(instance, arguments);
+ }
+ async dereference() {
+ const args = (0, normalize_args_js_1.default)(arguments);
+ try {
+ await this.resolve(args.path, args.schema, args.options);
+ (0, dereference_js_1.default)(this, args.options);
+ finalize(this);
+ return (0, maybe_js_1.default)(args.callback, Promise.resolve(this.schema));
+ }
+ catch (err) {
+ return (0, maybe_js_1.default)(args.callback, Promise.reject(err));
+ }
+ }
+}
+exports.$RefParser = $RefParser;
+exports.default = $RefParser;
+function finalize(parser) {
+ const errors = errors_js_1.JSONParserErrorGroup.getParserErrors(parser);
+ if (errors.length > 0) {
+ throw new errors_js_1.JSONParserErrorGroup(parser);
+ }
+}
+exports.parse = $RefParser.parse;
+exports.resolve = $RefParser.resolve;
+exports.bundle = $RefParser.bundle;
+exports.dereference = $RefParser.dereference;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/normalize-args.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/normalize-args.d.ts
new file mode 100644
index 00000000..3d6d6d8d
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/normalize-args.d.ts
@@ -0,0 +1,13 @@
+import type { Options, ParserOptions } from "./options.js";
+import type { JSONSchema, SchemaCallback } from "./types";
+export interface NormalizedArguments = ParserOptions> {
+ path: string;
+ schema: S;
+ options: O & Options;
+ callback: SchemaCallback;
+}
+/**
+ * Normalizes the given arguments, accounting for optional args.
+ */
+export declare function normalizeArgs = ParserOptions>(_args: Partial): NormalizedArguments;
+export default normalizeArgs;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/normalize-args.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/normalize-args.js
new file mode 100644
index 00000000..27af6ab6
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/normalize-args.js
@@ -0,0 +1,55 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.normalizeArgs = normalizeArgs;
+const options_js_1 = require("./options.js");
+/**
+ * Normalizes the given arguments, accounting for optional args.
+ */
+function normalizeArgs(_args) {
+ let path;
+ let schema;
+ let options;
+ let callback;
+ const args = Array.prototype.slice.call(_args);
+ if (typeof args[args.length - 1] === "function") {
+ // The last parameter is a callback function
+ callback = args.pop();
+ }
+ if (typeof args[0] === "string") {
+ // The first parameter is the path
+ path = args[0];
+ if (typeof args[2] === "object") {
+ // The second parameter is the schema, and the third parameter is the options
+ schema = args[1];
+ options = args[2];
+ }
+ else {
+ // The second parameter is the options
+ schema = undefined;
+ options = args[1];
+ }
+ }
+ else {
+ // The first parameter is the schema
+ path = "";
+ schema = args[0];
+ options = args[1];
+ }
+ try {
+ options = (0, options_js_1.getNewOptions)(options);
+ }
+ catch (e) {
+ console.error(`JSON Schema Ref Parser: Error normalizing options: ${e}`);
+ }
+ if (!options.mutateInputSchema && typeof schema === "object") {
+ // Make a deep clone of the schema, so that we don't alter the original object
+ schema = JSON.parse(JSON.stringify(schema));
+ }
+ return {
+ path,
+ schema,
+ options,
+ callback,
+ };
+}
+exports.default = normalizeArgs;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/options.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/options.d.ts
new file mode 100644
index 00000000..845d51a1
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/options.d.ts
@@ -0,0 +1,387 @@
+import type { HTTPResolverOptions, JSONSchema, JSONSchemaObject, Plugin, ResolverOptions } from "./types/index.js";
+export type DeepPartial = T extends object ? {
+ [P in keyof T]?: DeepPartial;
+} : T;
+export interface DereferenceOptions {
+ /**
+ * Determines whether circular `$ref` pointers are handled.
+ *
+ * If set to `false`, then a `ReferenceError` will be thrown if the schema contains any circular references.
+ *
+ * If set to `"ignore"`, then circular references will simply be ignored. No error will be thrown, but the `$Refs.circular` property will still be set to `true`.
+ */
+ circular?: boolean | "ignore";
+ /**
+ * A function, called for each path, which can return true to stop this path and all
+ * subpaths from being dereferenced further. This is useful in schemas where some
+ * subpaths contain literal $ref keys that should not be dereferenced.
+ */
+ excludedPathMatcher?(path: string): boolean;
+ /**
+ * Callback invoked during dereferencing.
+ *
+ * @argument {string} path - The path being dereferenced (ie. the `$ref` string)
+ * @argument {JSONSchemaObject} value - The JSON-Schema that the `$ref` resolved to
+ * @argument {JSONSchemaObject} parent - The parent of the dereferenced object
+ * @argument {string} parentPropName - The prop name of the parent object whose value was dereferenced
+ */
+ onDereference?(path: string, value: JSONSchemaObject, parent?: JSONSchemaObject, parentPropName?: string): void;
+ /**
+ * Whether a reference should resolve relative to its directory/path, or from the cwd
+ *
+ * Default: `relative`
+ */
+ externalReferenceResolution?: "relative" | "root";
+}
+/**
+ * Options that determine how JSON schemas are parsed, resolved, and dereferenced.
+ *
+ * @param [options] - Overridden options
+ * @class
+ */
+export interface $RefParserOptions {
+ /**
+ * The `parse` options determine how different types of files will be parsed.
+ *
+ * JSON Schema `$Ref` Parser comes with built-in JSON, YAML, plain-text, and binary parsers, any of which you can configure or disable. You can also add your own custom parsers if you want.
+ */
+ parse: {
+ json?: Plugin | boolean;
+ yaml?: Plugin | boolean;
+ binary?: Plugin | boolean;
+ text?: Plugin | boolean;
+ [key: string]: Plugin | boolean | undefined;
+ };
+ /**
+ * The `resolve` options control how JSON Schema $Ref Parser will resolve file paths and URLs, and how those files will be read/downloaded.
+ *
+ * JSON Schema `$Ref` Parser comes with built-in support for HTTP and HTTPS, as well as support for local files (when running in Node.js). You can configure or disable either of these built-in resolvers. You can also add your own custom resolvers if you want.
+ */
+ resolve: {
+ /**
+ * Determines whether external $ref pointers will be resolved. If this option is disabled, then external `$ref` pointers will simply be ignored.
+ */
+ external?: boolean;
+ file?: Partial> | boolean;
+ http?: HTTPResolverOptions | boolean;
+ } & {
+ [key: string]: Partial> | HTTPResolverOptions | boolean | undefined;
+ };
+ /**
+ * By default, JSON Schema $Ref Parser throws the first error it encounters. Setting `continueOnError` to `true`
+ * causes it to keep processing as much as possible and then throw a single error that contains all errors
+ * that were encountered.
+ */
+ continueOnError: boolean;
+ /**
+ * The `dereference` options control how JSON Schema `$Ref` Parser will dereference `$ref` pointers within the JSON schema.
+ */
+ dereference: DereferenceOptions;
+ /**
+ * Whether to clone the schema before dereferencing it.
+ * This is useful when you want to dereference the same schema multiple times, but you don't want to modify the original schema.
+ * Default: `true` due to mutating the input being the default behavior historically
+ */
+ mutateInputSchema?: boolean;
+ /**
+ * The maximum amount of time (in milliseconds) that JSON Schema $Ref Parser will spend dereferencing a single schema.
+ * It will throw a timeout error if the operation takes longer than this.
+ */
+ timeoutMs?: number;
+}
+export declare const getJsonSchemaRefParserDefaultOptions: () => $RefParserOptions;
+export declare const getNewOptions: = {
+ parse?: {
+ [x: string]: boolean | {
+ name?: string | undefined;
+ order?: number | undefined;
+ allowEmpty?: boolean | undefined;
+ allowBOM?: boolean | undefined;
+ encoding?: BufferEncoding | undefined;
+ canParse?: string | boolean | {
+ exec?: {} | undefined;
+ test?: {} | undefined;
+ readonly source?: string | undefined;
+ readonly global?: boolean | undefined;
+ readonly ignoreCase?: boolean | undefined;
+ readonly multiline?: boolean | undefined;
+ lastIndex?: number | undefined;
+ compile?: {} | undefined;
+ readonly flags?: string | undefined;
+ readonly sticky?: boolean | undefined;
+ readonly unicode?: boolean | undefined;
+ readonly dotAll?: boolean | undefined;
+ readonly hasIndices?: boolean | undefined;
+ readonly unicodeSets?: boolean | undefined;
+ [Symbol.match]?: {} | undefined;
+ [Symbol.replace]?: {} | undefined;
+ [Symbol.search]?: {} | undefined;
+ [Symbol.split]?: {} | undefined;
+ [Symbol.matchAll]?: {} | undefined;
+ } | (string | undefined)[] | {} | undefined;
+ parse?: string | number | {} | undefined;
+ } | undefined;
+ json?: boolean | {
+ name?: string | undefined;
+ order?: number | undefined;
+ allowEmpty?: boolean | undefined;
+ allowBOM?: boolean | undefined;
+ encoding?: BufferEncoding | undefined;
+ canParse?: string | boolean | {
+ exec?: {} | undefined;
+ test?: {} | undefined;
+ readonly source?: string | undefined;
+ readonly global?: boolean | undefined;
+ readonly ignoreCase?: boolean | undefined;
+ readonly multiline?: boolean | undefined;
+ lastIndex?: number | undefined;
+ compile?: {} | undefined;
+ readonly flags?: string | undefined;
+ readonly sticky?: boolean | undefined;
+ readonly unicode?: boolean | undefined;
+ readonly dotAll?: boolean | undefined;
+ readonly hasIndices?: boolean | undefined;
+ readonly unicodeSets?: boolean | undefined;
+ [Symbol.match]?: {} | undefined;
+ [Symbol.replace]?: {} | undefined;
+ [Symbol.search]?: {} | undefined;
+ [Symbol.split]?: {} | undefined;
+ [Symbol.matchAll]?: {} | undefined;
+ } | (string | undefined)[] | {} | undefined;
+ parse?: string | number | {} | undefined;
+ } | undefined;
+ yaml?: boolean | {
+ name?: string | undefined;
+ order?: number | undefined;
+ allowEmpty?: boolean | undefined;
+ allowBOM?: boolean | undefined;
+ encoding?: BufferEncoding | undefined;
+ canParse?: string | boolean | {
+ exec?: {} | undefined;
+ test?: {} | undefined;
+ readonly source?: string | undefined;
+ readonly global?: boolean | undefined;
+ readonly ignoreCase?: boolean | undefined;
+ readonly multiline?: boolean | undefined;
+ lastIndex?: number | undefined;
+ compile?: {} | undefined;
+ readonly flags?: string | undefined;
+ readonly sticky?: boolean | undefined;
+ readonly unicode?: boolean | undefined;
+ readonly dotAll?: boolean | undefined;
+ readonly hasIndices?: boolean | undefined;
+ readonly unicodeSets?: boolean | undefined;
+ [Symbol.match]?: {} | undefined;
+ [Symbol.replace]?: {} | undefined;
+ [Symbol.search]?: {} | undefined;
+ [Symbol.split]?: {} | undefined;
+ [Symbol.matchAll]?: {} | undefined;
+ } | (string | undefined)[] | {} | undefined;
+ parse?: string | number | {} | undefined;
+ } | undefined;
+ binary?: boolean | {
+ name?: string | undefined;
+ order?: number | undefined;
+ allowEmpty?: boolean | undefined;
+ allowBOM?: boolean | undefined;
+ encoding?: BufferEncoding | undefined;
+ canParse?: string | boolean | {
+ exec?: {} | undefined;
+ test?: {} | undefined;
+ readonly source?: string | undefined;
+ readonly global?: boolean | undefined;
+ readonly ignoreCase?: boolean | undefined;
+ readonly multiline?: boolean | undefined;
+ lastIndex?: number | undefined;
+ compile?: {} | undefined;
+ readonly flags?: string | undefined;
+ readonly sticky?: boolean | undefined;
+ readonly unicode?: boolean | undefined;
+ readonly dotAll?: boolean | undefined;
+ readonly hasIndices?: boolean | undefined;
+ readonly unicodeSets?: boolean | undefined;
+ [Symbol.match]?: {} | undefined;
+ [Symbol.replace]?: {} | undefined;
+ [Symbol.search]?: {} | undefined;
+ [Symbol.split]?: {} | undefined;
+ [Symbol.matchAll]?: {} | undefined;
+ } | (string | undefined)[] | {} | undefined;
+ parse?: string | number | {} | undefined;
+ } | undefined;
+ text?: boolean | {
+ name?: string | undefined;
+ order?: number | undefined;
+ allowEmpty?: boolean | undefined;
+ allowBOM?: boolean | undefined;
+ encoding?: BufferEncoding | undefined;
+ canParse?: string | boolean | {
+ exec?: {} | undefined;
+ test?: {} | undefined;
+ readonly source?: string | undefined;
+ readonly global?: boolean | undefined;
+ readonly ignoreCase?: boolean | undefined;
+ readonly multiline?: boolean | undefined;
+ lastIndex?: number | undefined;
+ compile?: {} | undefined;
+ readonly flags?: string | undefined;
+ readonly sticky?: boolean | undefined;
+ readonly unicode?: boolean | undefined;
+ readonly dotAll?: boolean | undefined;
+ readonly hasIndices?: boolean | undefined;
+ readonly unicodeSets?: boolean | undefined;
+ [Symbol.match]?: {} | undefined;
+ [Symbol.replace]?: {} | undefined;
+ [Symbol.search]?: {} | undefined;
+ [Symbol.split]?: {} | undefined;
+ [Symbol.matchAll]?: {} | undefined;
+ } | (string | undefined)[] | {} | undefined;
+ parse?: string | number | {} | undefined;
+ } | undefined;
+ } | undefined;
+ resolve?: {
+ [x: string]: boolean | {
+ name?: string | undefined;
+ order?: number | undefined;
+ canRead?: string | boolean | {
+ exec?: {} | undefined;
+ test?: {} | undefined;
+ readonly source?: string | undefined;
+ readonly global?: boolean | undefined;
+ readonly ignoreCase?: boolean | undefined;
+ readonly multiline?: boolean | undefined;
+ lastIndex?: number | undefined;
+ compile?: {} | undefined;
+ readonly flags?: string | undefined;
+ readonly sticky?: boolean | undefined;
+ readonly unicode?: boolean | undefined;
+ readonly dotAll?: boolean | undefined;
+ readonly hasIndices?: boolean | undefined;
+ readonly unicodeSets?: boolean | undefined;
+ [Symbol.match]?: {} | undefined;
+ [Symbol.replace]?: {} | undefined;
+ [Symbol.search]?: {} | undefined;
+ [Symbol.split]?: {} | undefined;
+ [Symbol.matchAll]?: {} | undefined;
+ } | (string | undefined)[] | {} | undefined;
+ read?: string | object | {} | undefined;
+ } | {
+ headers?: ([(string | undefined)?, (string | undefined)?] | undefined)[] | {
+ [x: string]: string | undefined;
+ } | {
+ append?: {} | undefined;
+ delete?: {} | undefined;
+ get?: {} | undefined;
+ getSetCookie?: {} | undefined;
+ has?: {} | undefined;
+ set?: {} | undefined;
+ forEach?: {} | undefined;
+ } | null | undefined;
+ timeout?: number | undefined;
+ redirects?: number | undefined;
+ withCredentials?: boolean | undefined;
+ name?: string | undefined;
+ order?: number | undefined;
+ canRead?: string | boolean | {
+ exec?: {} | undefined;
+ test?: {} | undefined;
+ readonly source?: string | undefined;
+ readonly global?: boolean | undefined;
+ readonly ignoreCase?: boolean | undefined;
+ readonly multiline?: boolean | undefined;
+ lastIndex?: number | undefined;
+ compile?: {} | undefined;
+ readonly flags?: string | undefined;
+ readonly sticky?: boolean | undefined;
+ readonly unicode?: boolean | undefined;
+ readonly dotAll?: boolean | undefined;
+ readonly hasIndices?: boolean | undefined;
+ readonly unicodeSets?: boolean | undefined;
+ [Symbol.match]?: {} | undefined;
+ [Symbol.replace]?: {} | undefined;
+ [Symbol.search]?: {} | undefined;
+ [Symbol.split]?: {} | undefined;
+ [Symbol.matchAll]?: {} | undefined;
+ } | (string | undefined)[] | {} | undefined;
+ read?: string | object | {} | undefined;
+ } | undefined;
+ external?: boolean | undefined;
+ file?: boolean | {
+ name?: string | undefined;
+ order?: number | undefined;
+ canRead?: string | boolean | {
+ exec?: {} | undefined;
+ test?: {} | undefined;
+ readonly source?: string | undefined;
+ readonly global?: boolean | undefined;
+ readonly ignoreCase?: boolean | undefined;
+ readonly multiline?: boolean | undefined;
+ lastIndex?: number | undefined;
+ compile?: {} | undefined;
+ readonly flags?: string | undefined;
+ readonly sticky?: boolean | undefined;
+ readonly unicode?: boolean | undefined;
+ readonly dotAll?: boolean | undefined;
+ readonly hasIndices?: boolean | undefined;
+ readonly unicodeSets?: boolean | undefined;
+ [Symbol.match]?: {} | undefined;
+ [Symbol.replace]?: {} | undefined;
+ [Symbol.search]?: {} | undefined;
+ [Symbol.split]?: {} | undefined;
+ [Symbol.matchAll]?: {} | undefined;
+ } | (string | undefined)[] | {} | undefined;
+ read?: string | object | {} | undefined;
+ } | undefined;
+ http?: boolean | {
+ headers?: ([(string | undefined)?, (string | undefined)?] | undefined)[] | {
+ [x: string]: string | undefined;
+ } | {
+ append?: {} | undefined;
+ delete?: {} | undefined;
+ get?: {} | undefined;
+ getSetCookie?: {} | undefined;
+ has?: {} | undefined;
+ set?: {} | undefined;
+ forEach?: {} | undefined;
+ } | null | undefined;
+ timeout?: number | undefined;
+ redirects?: number | undefined;
+ withCredentials?: boolean | undefined;
+ name?: string | undefined;
+ order?: number | undefined;
+ canRead?: string | boolean | {
+ exec?: {} | undefined;
+ test?: {} | undefined;
+ readonly source?: string | undefined;
+ readonly global?: boolean | undefined;
+ readonly ignoreCase?: boolean | undefined;
+ readonly multiline?: boolean | undefined;
+ lastIndex?: number | undefined;
+ compile?: {} | undefined;
+ readonly flags?: string | undefined;
+ readonly sticky?: boolean | undefined;
+ readonly unicode?: boolean | undefined;
+ readonly dotAll?: boolean | undefined;
+ readonly hasIndices?: boolean | undefined;
+ readonly unicodeSets?: boolean | undefined;
+ [Symbol.match]?: {} | undefined;
+ [Symbol.replace]?: {} | undefined;
+ [Symbol.search]?: {} | undefined;
+ [Symbol.split]?: {} | undefined;
+ [Symbol.matchAll]?: {} | undefined;
+ } | (string | undefined)[] | {} | undefined;
+ read?: string | object | {} | undefined;
+ } | undefined;
+ } | undefined;
+ continueOnError?: boolean | undefined;
+ dereference?: {
+ circular?: (boolean | "ignore") | undefined;
+ excludedPathMatcher?: {} | undefined;
+ onDereference?: {} | undefined;
+ externalReferenceResolution?: ("relative" | "root") | undefined;
+ } | undefined;
+ mutateInputSchema?: boolean | undefined;
+ timeoutMs?: number | undefined;
+}>(options: O | undefined) => O & $RefParserOptions;
+export type Options = $RefParserOptions;
+export type ParserOptions = DeepPartial<$RefParserOptions>;
+export default $RefParserOptions;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/options.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/options.js
new file mode 100644
index 00000000..a4134d04
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/options.js
@@ -0,0 +1,122 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.getNewOptions = exports.getJsonSchemaRefParserDefaultOptions = void 0;
+const json_js_1 = __importDefault(require("./parsers/json.js"));
+const yaml_js_1 = __importDefault(require("./parsers/yaml.js"));
+const text_js_1 = __importDefault(require("./parsers/text.js"));
+const binary_js_1 = __importDefault(require("./parsers/binary.js"));
+const file_js_1 = __importDefault(require("./resolvers/file.js"));
+const http_js_1 = __importDefault(require("./resolvers/http.js"));
+const getJsonSchemaRefParserDefaultOptions = () => {
+ const defaults = {
+ /**
+ * Determines how different types of files will be parsed.
+ *
+ * You can add additional parsers of your own, replace an existing one with
+ * your own implementation, or disable any parser by setting it to false.
+ */
+ parse: {
+ json: { ...json_js_1.default },
+ yaml: { ...yaml_js_1.default },
+ text: { ...text_js_1.default },
+ binary: { ...binary_js_1.default },
+ },
+ /**
+ * Determines how JSON References will be resolved.
+ *
+ * You can add additional resolvers of your own, replace an existing one with
+ * your own implementation, or disable any resolver by setting it to false.
+ */
+ resolve: {
+ file: { ...file_js_1.default },
+ http: { ...http_js_1.default },
+ /**
+ * Determines whether external $ref pointers will be resolved.
+ * If this option is disabled, then none of above resolvers will be called.
+ * Instead, external $ref pointers will simply be ignored.
+ *
+ * @type {boolean}
+ */
+ external: true,
+ },
+ /**
+ * By default, JSON Schema $Ref Parser throws the first error it encounters. Setting `continueOnError` to `true`
+ * causes it to keep processing as much as possible and then throw a single error that contains all errors
+ * that were encountered.
+ */
+ continueOnError: false,
+ /**
+ * Determines the types of JSON references that are allowed.
+ */
+ dereference: {
+ /**
+ * Dereference circular (recursive) JSON references?
+ * If false, then a {@link ReferenceError} will be thrown if a circular reference is found.
+ * If "ignore", then circular references will not be dereferenced.
+ *
+ * @type {boolean|string}
+ */
+ circular: true,
+ /**
+ * A function, called for each path, which can return true to stop this path and all
+ * subpaths from being dereferenced further. This is useful in schemas where some
+ * subpaths contain literal $ref keys that should not be dereferenced.
+ *
+ * @type {function}
+ */
+ excludedPathMatcher: () => false,
+ referenceResolution: "relative",
+ },
+ mutateInputSchema: true,
+ };
+ return defaults;
+};
+exports.getJsonSchemaRefParserDefaultOptions = getJsonSchemaRefParserDefaultOptions;
+const getNewOptions = (options) => {
+ const newOptions = (0, exports.getJsonSchemaRefParserDefaultOptions)();
+ if (options) {
+ merge(newOptions, options);
+ }
+ return newOptions;
+};
+exports.getNewOptions = getNewOptions;
+/**
+ * Merges the properties of the source object into the target object.
+ *
+ * @param target - The object that we're populating
+ * @param source - The options that are being merged
+ * @returns
+ */
+function merge(target, source) {
+ if (isMergeable(source)) {
+ // prevent prototype pollution
+ const keys = Object.keys(source).filter((key) => !["__proto__", "constructor", "prototype"].includes(key));
+ for (let i = 0; i < keys.length; i++) {
+ const key = keys[i];
+ const sourceSetting = source[key];
+ const targetSetting = target[key];
+ if (isMergeable(sourceSetting)) {
+ // It's a nested object, so merge it recursively
+ target[key] = merge(targetSetting || {}, sourceSetting);
+ }
+ else if (sourceSetting !== undefined) {
+ // It's a scalar value, function, or array. No merging necessary. Just overwrite the target value.
+ target[key] = sourceSetting;
+ }
+ }
+ }
+ return target;
+}
+/**
+ * Determines whether the given value can be merged,
+ * or if it is a scalar value that should just override the target value.
+ *
+ * @param val
+ * @returns
+ */
+function isMergeable(val) {
+ return val && typeof val === "object" && !Array.isArray(val) && !(val instanceof RegExp) && !(val instanceof Date);
+}
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parse.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parse.d.ts
new file mode 100644
index 00000000..37e4c3b8
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parse.d.ts
@@ -0,0 +1,8 @@
+import type $Refs from "./refs.js";
+import type { ParserOptions } from "./options.js";
+import type { JSONSchema } from "./types/index.js";
+/**
+ * Reads and parses the specified file path or URL.
+ */
+declare function parse = ParserOptions>(path: string, $refs: $Refs, options: O): Promise;
+export default parse;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parse.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parse.js
new file mode 100644
index 00000000..1ce4a5f2
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parse.js
@@ -0,0 +1,169 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const ono_1 = require("@jsdevtools/ono");
+const url = __importStar(require("./util/url.js"));
+const plugins = __importStar(require("./util/plugins.js"));
+const errors_js_1 = require("./util/errors.js");
+/**
+ * Reads and parses the specified file path or URL.
+ */
+async function parse(path, $refs, options) {
+ // Remove the URL fragment, if any
+ const hashIndex = path.indexOf("#");
+ let hash = "";
+ if (hashIndex >= 0) {
+ hash = path.substring(hashIndex);
+ // Remove the URL fragment, if any
+ path = path.substring(0, hashIndex);
+ }
+ // Add a new $Ref for this file, even though we don't have the value yet.
+ // This ensures that we don't simultaneously read & parse the same file multiple times
+ const $ref = $refs._add(path);
+ // This "file object" will be passed to all resolvers and parsers.
+ const file = {
+ url: path,
+ hash,
+ extension: url.getExtension(path),
+ };
+ // Read the file and then parse the data
+ try {
+ const resolver = await readFile(file, options, $refs);
+ $ref.pathType = resolver.plugin.name;
+ file.data = resolver.result;
+ const parser = await parseFile(file, options, $refs);
+ $ref.value = parser.result;
+ return parser.result;
+ }
+ catch (err) {
+ if ((0, errors_js_1.isHandledError)(err)) {
+ $ref.value = err;
+ }
+ throw err;
+ }
+}
+/**
+ * Reads the given file, using the configured resolver plugins
+ *
+ * @param file - An object containing information about the referenced file
+ * @param file.url - The full URL of the referenced file
+ * @param file.extension - The lowercased file extension (e.g. ".txt", ".html", etc.)
+ * @param options
+ * @param $refs
+ * @returns
+ * The promise resolves with the raw file contents and the resolver that was used.
+ */
+async function readFile(file, options, $refs) {
+ // console.log('Reading %s', file.url);
+ // Find the resolvers that can read this file
+ let resolvers = plugins.all(options.resolve);
+ resolvers = plugins.filter(resolvers, "canRead", file);
+ // Run the resolvers, in order, until one of them succeeds
+ plugins.sort(resolvers);
+ try {
+ const data = await plugins.run(resolvers, "read", file, $refs);
+ return data;
+ }
+ catch (err) {
+ if (!err && options.continueOnError) {
+ // No resolver could be matched
+ throw new errors_js_1.UnmatchedResolverError(file.url);
+ }
+ else if (!err || !("error" in err)) {
+ // Throw a generic, friendly error.
+ throw ono_1.ono.syntax(`Unable to resolve $ref pointer "${file.url}"`);
+ }
+ // Throw the original error, if it's one of our own (user-friendly) errors.
+ else if (err.error instanceof errors_js_1.ResolverError) {
+ throw err.error;
+ }
+ else {
+ throw new errors_js_1.ResolverError(err, file.url);
+ }
+ }
+}
+/**
+ * Parses the given file's contents, using the configured parser plugins.
+ *
+ * @param file - An object containing information about the referenced file
+ * @param file.url - The full URL of the referenced file
+ * @param file.extension - The lowercased file extension (e.g. ".txt", ".html", etc.)
+ * @param file.data - The file contents. This will be whatever data type was returned by the resolver
+ * @param options
+ * @param $refs
+ *
+ * @returns
+ * The promise resolves with the parsed file contents and the parser that was used.
+ */
+async function parseFile(file, options, $refs) {
+ // Find the parsers that can read this file type.
+ // If none of the parsers are an exact match for this file, then we'll try ALL of them.
+ // This handles situations where the file IS a supported type, just with an unknown extension.
+ const allParsers = plugins.all(options.parse);
+ const filteredParsers = plugins.filter(allParsers, "canParse", file);
+ const parsers = filteredParsers.length > 0 ? filteredParsers : allParsers;
+ // Run the parsers, in order, until one of them succeeds
+ plugins.sort(parsers);
+ try {
+ const parser = await plugins.run(parsers, "parse", file, $refs);
+ if (!parser.plugin.allowEmpty && isEmpty(parser.result)) {
+ throw ono_1.ono.syntax(`Error parsing "${file.url}" as ${parser.plugin.name}. \nParsed value is empty`);
+ }
+ else {
+ return parser;
+ }
+ }
+ catch (err) {
+ if (!err && options.continueOnError) {
+ // No resolver could be matched
+ throw new errors_js_1.UnmatchedParserError(file.url);
+ }
+ else if (err && err.message && err.message.startsWith("Error parsing")) {
+ throw err;
+ }
+ else if (!err || !("error" in err)) {
+ throw ono_1.ono.syntax(`Unable to parse ${file.url}`);
+ }
+ else if (err.error instanceof errors_js_1.ParserError) {
+ throw err.error;
+ }
+ else {
+ throw new errors_js_1.ParserError(err.error.message, file.url);
+ }
+ }
+}
+/**
+ * Determines whether the parsed value is "empty".
+ *
+ * @param value
+ * @returns
+ */
+function isEmpty(value) {
+ return (value === undefined ||
+ (typeof value === "object" && Object.keys(value).length === 0) ||
+ (typeof value === "string" && value.trim().length === 0) ||
+ (Buffer.isBuffer(value) && value.length === 0));
+}
+exports.default = parse;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/binary.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/binary.d.ts
new file mode 100644
index 00000000..b1f6df3e
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/binary.d.ts
@@ -0,0 +1,3 @@
+import type { Plugin } from "../types/index.js";
+declare const _default: Plugin;
+export default _default;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/binary.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/binary.js
new file mode 100644
index 00000000..dfc1d09a
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/binary.js
@@ -0,0 +1,35 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+const BINARY_REGEXP = /\.(jpeg|jpg|gif|png|bmp|ico)$/i;
+exports.default = {
+ /**
+ * The order that this parser will run, in relation to other parsers.
+ */
+ order: 400,
+ /**
+ * Whether to allow "empty" files (zero bytes).
+ */
+ allowEmpty: true,
+ /**
+ * Determines whether this parser can parse a given file reference.
+ * Parsers that return true will be tried, in order, until one successfully parses the file.
+ * Parsers that return false will be skipped, UNLESS all parsers returned false, in which case
+ * every parser will be tried.
+ */
+ canParse(file) {
+ // Use this parser if the file is a Buffer, and has a known binary extension
+ return Buffer.isBuffer(file.data) && BINARY_REGEXP.test(file.url);
+ },
+ /**
+ * Parses the given data as a Buffer (byte array).
+ */
+ parse(file) {
+ if (Buffer.isBuffer(file.data)) {
+ return file.data;
+ }
+ else {
+ // This will reject if data is anything other than a string or typed array
+ return Buffer.from(file.data);
+ }
+ },
+};
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/json.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/json.d.ts
new file mode 100644
index 00000000..b1f6df3e
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/json.d.ts
@@ -0,0 +1,3 @@
+import type { Plugin } from "../types/index.js";
+declare const _default: Plugin;
+export default _default;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/json.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/json.js
new file mode 100644
index 00000000..e67280cc
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/json.js
@@ -0,0 +1,62 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+const errors_js_1 = require("../util/errors.js");
+exports.default = {
+ /**
+ * The order that this parser will run, in relation to other parsers.
+ */
+ order: 100,
+ /**
+ * Whether to allow "empty" files. This includes zero-byte files, as well as empty JSON objects.
+ */
+ allowEmpty: true,
+ /**
+ * Determines whether this parser can parse a given file reference.
+ * Parsers that match will be tried, in order, until one successfully parses the file.
+ * Parsers that don't match will be skipped, UNLESS none of the parsers match, in which case
+ * every parser will be tried.
+ */
+ canParse: ".json",
+ /**
+ * Allow JSON files with byte order marks (BOM)
+ */
+ allowBOM: true,
+ /**
+ * Parses the given file as JSON
+ */
+ async parse(file) {
+ let data = file.data;
+ if (Buffer.isBuffer(data)) {
+ data = data.toString();
+ }
+ if (typeof data === "string") {
+ if (data.trim().length === 0) {
+ return; // This mirrors the YAML behavior
+ }
+ else {
+ try {
+ return JSON.parse(data);
+ }
+ catch (e) {
+ if (this.allowBOM) {
+ try {
+ // find the first curly brace
+ const firstCurlyBrace = data.indexOf("{");
+ // remove any characters before the first curly brace
+ data = data.slice(firstCurlyBrace);
+ return JSON.parse(data);
+ }
+ catch (e) {
+ throw new errors_js_1.ParserError(e.message, file.url);
+ }
+ }
+ throw new errors_js_1.ParserError(e.message, file.url);
+ }
+ }
+ }
+ else {
+ // data is already a JavaScript value (object, array, number, null, NaN, etc.)
+ return data;
+ }
+ },
+};
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/text.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/text.d.ts
new file mode 100644
index 00000000..b1f6df3e
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/text.d.ts
@@ -0,0 +1,3 @@
+import type { Plugin } from "../types/index.js";
+declare const _default: Plugin;
+export default _default;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/text.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/text.js
new file mode 100644
index 00000000..0740d040
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/text.js
@@ -0,0 +1,42 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+const errors_js_1 = require("../util/errors.js");
+const TEXT_REGEXP = /\.(txt|htm|html|md|xml|js|min|map|css|scss|less|svg)$/i;
+exports.default = {
+ /**
+ * The order that this parser will run, in relation to other parsers.
+ */
+ order: 300,
+ /**
+ * Whether to allow "empty" files (zero bytes).
+ */
+ allowEmpty: true,
+ /**
+ * The encoding that the text is expected to be in.
+ */
+ encoding: "utf8",
+ /**
+ * Determines whether this parser can parse a given file reference.
+ * Parsers that return true will be tried, in order, until one successfully parses the file.
+ * Parsers that return false will be skipped, UNLESS all parsers returned false, in which case
+ * every parser will be tried.
+ */
+ canParse(file) {
+ // Use this parser if the file is a string or Buffer, and has a known text-based extension
+ return (typeof file.data === "string" || Buffer.isBuffer(file.data)) && TEXT_REGEXP.test(file.url);
+ },
+ /**
+ * Parses the given file as text
+ */
+ parse(file) {
+ if (typeof file.data === "string") {
+ return file.data;
+ }
+ else if (Buffer.isBuffer(file.data)) {
+ return file.data.toString(this.encoding);
+ }
+ else {
+ throw new errors_js_1.ParserError("data is not text", file.url);
+ }
+ },
+};
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/yaml.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/yaml.d.ts
new file mode 100644
index 00000000..b1f6df3e
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/yaml.d.ts
@@ -0,0 +1,3 @@
+import type { Plugin } from "../types/index.js";
+declare const _default: Plugin;
+export default _default;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/yaml.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/yaml.js
new file mode 100644
index 00000000..29addd9b
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/parsers/yaml.js
@@ -0,0 +1,52 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const errors_js_1 = require("../util/errors.js");
+const js_yaml_1 = __importDefault(require("js-yaml"));
+const js_yaml_2 = require("js-yaml");
+exports.default = {
+ /**
+ * The order that this parser will run, in relation to other parsers.
+ */
+ order: 200,
+ /**
+ * Whether to allow "empty" files. This includes zero-byte files, as well as empty JSON objects.
+ */
+ allowEmpty: true,
+ /**
+ * Determines whether this parser can parse a given file reference.
+ * Parsers that match will be tried, in order, until one successfully parses the file.
+ * Parsers that don't match will be skipped, UNLESS none of the parsers match, in which case
+ * every parser will be tried.
+ */
+ canParse: [".yaml", ".yml", ".json"], // JSON is valid YAML
+ /**
+ * Parses the given file as YAML
+ *
+ * @param file - An object containing information about the referenced file
+ * @param file.url - The full URL of the referenced file
+ * @param file.extension - The lowercased file extension (e.g. ".txt", ".html", etc.)
+ * @param file.data - The file contents. This will be whatever data type was returned by the resolver
+ * @returns
+ */
+ async parse(file) {
+ let data = file.data;
+ if (Buffer.isBuffer(data)) {
+ data = data.toString();
+ }
+ if (typeof data === "string") {
+ try {
+ return js_yaml_1.default.load(data, { schema: js_yaml_2.JSON_SCHEMA });
+ }
+ catch (e) {
+ throw new errors_js_1.ParserError(e?.message || "Parser Error", file.url);
+ }
+ }
+ else {
+ // data is already a JavaScript value (object, array, number, null, NaN, etc.)
+ return data;
+ }
+ },
+};
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/pointer.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/pointer.d.ts
new file mode 100644
index 00000000..c93476ff
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/pointer.d.ts
@@ -0,0 +1,88 @@
+import type { ParserOptions } from "./options.js";
+import $Ref from "./ref.js";
+import type { JSONSchema } from "./types";
+/**
+ * This class represents a single JSON pointer and its resolved value.
+ *
+ * @param $ref
+ * @param path
+ * @param [friendlyPath] - The original user-specified path (used for error messages)
+ * @class
+ */
+declare class Pointer = ParserOptions> {
+ /**
+ * The {@link $Ref} object that contains this {@link Pointer} object.
+ */
+ $ref: $Ref;
+ /**
+ * The file path or URL, containing the JSON pointer in the hash.
+ * This path is relative to the path of the main JSON schema file.
+ */
+ path: string;
+ /**
+ * The original path or URL, used for error messages.
+ */
+ originalPath: string;
+ /**
+ * The value of the JSON pointer.
+ * Can be any JSON type, not just objects. Unknown file types are represented as Buffers (byte arrays).
+ */
+ value: any;
+ /**
+ * Indicates whether the pointer references itself.
+ */
+ circular: boolean;
+ /**
+ * The number of indirect references that were traversed to resolve the value.
+ * Resolving a single pointer may require resolving multiple $Refs.
+ */
+ indirections: number;
+ constructor($ref: $Ref, path: string, friendlyPath?: string);
+ /**
+ * Resolves the value of a nested property within the given object.
+ *
+ * @param obj - The object that will be crawled
+ * @param options
+ * @param pathFromRoot - the path of place that initiated resolving
+ *
+ * @returns
+ * Returns a JSON pointer whose {@link Pointer#value} is the resolved value.
+ * If resolving this value required resolving other JSON references, then
+ * the {@link Pointer#$ref} and {@link Pointer#path} will reflect the resolution path
+ * of the resolved value.
+ */
+ resolve(obj: S, options?: O, pathFromRoot?: string): this;
+ /**
+ * Sets the value of a nested property within the given object.
+ *
+ * @param obj - The object that will be crawled
+ * @param value - the value to assign
+ * @param options
+ *
+ * @returns
+ * Returns the modified object, or an entirely new object if the entire object is overwritten.
+ */
+ set(obj: S, value: any, options?: O): any;
+ /**
+ * Parses a JSON pointer (or a path containing a JSON pointer in the hash)
+ * and returns an array of the pointer's tokens.
+ * (e.g. "schema.json#/definitions/person/name" => ["definitions", "person", "name"])
+ *
+ * The pointer is parsed according to RFC 6901
+ * {@link https://tools.ietf.org/html/rfc6901#section-3}
+ *
+ * @param path
+ * @param [originalPath]
+ * @returns
+ */
+ static parse(path: string, originalPath?: string): string[];
+ /**
+ * Creates a JSON pointer path, by joining one or more tokens to a base path.
+ *
+ * @param base - The base path (e.g. "schema.json#/definitions/person")
+ * @param tokens - The token(s) to append (e.g. ["name", "first"])
+ * @returns
+ */
+ static join(base: string, tokens: string | string[]): string;
+}
+export default Pointer;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/pointer.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/pointer.js
new file mode 100644
index 00000000..2a71de61
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/pointer.js
@@ -0,0 +1,283 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const ref_js_1 = __importDefault(require("./ref.js"));
+const url = __importStar(require("./util/url.js"));
+const errors_js_1 = require("./util/errors.js");
+const slashes = /\//g;
+const tildes = /~/g;
+const escapedSlash = /~1/g;
+const escapedTilde = /~0/g;
+const safeDecodeURIComponent = (encodedURIComponent) => {
+ try {
+ return decodeURIComponent(encodedURIComponent);
+ }
+ catch {
+ return encodedURIComponent;
+ }
+};
+/**
+ * This class represents a single JSON pointer and its resolved value.
+ *
+ * @param $ref
+ * @param path
+ * @param [friendlyPath] - The original user-specified path (used for error messages)
+ * @class
+ */
+class Pointer {
+ constructor($ref, path, friendlyPath) {
+ this.$ref = $ref;
+ this.path = path;
+ this.originalPath = friendlyPath || path;
+ this.value = undefined;
+ this.circular = false;
+ this.indirections = 0;
+ }
+ /**
+ * Resolves the value of a nested property within the given object.
+ *
+ * @param obj - The object that will be crawled
+ * @param options
+ * @param pathFromRoot - the path of place that initiated resolving
+ *
+ * @returns
+ * Returns a JSON pointer whose {@link Pointer#value} is the resolved value.
+ * If resolving this value required resolving other JSON references, then
+ * the {@link Pointer#$ref} and {@link Pointer#path} will reflect the resolution path
+ * of the resolved value.
+ */
+ resolve(obj, options, pathFromRoot) {
+ const tokens = Pointer.parse(this.path, this.originalPath);
+ // Crawl the object, one token at a time
+ this.value = unwrapOrThrow(obj);
+ for (let i = 0; i < tokens.length; i++) {
+ if (resolveIf$Ref(this, options, pathFromRoot)) {
+ // The $ref path has changed, so append the remaining tokens to the path
+ this.path = Pointer.join(this.path, tokens.slice(i));
+ }
+ if (typeof this.value === "object" && this.value !== null && !isRootPath(pathFromRoot) && "$ref" in this.value) {
+ return this;
+ }
+ const token = tokens[i];
+ if (this.value[token] === undefined || (this.value[token] === null && i === tokens.length - 1)) {
+ // one final case is if the entry itself includes slashes, and was parsed out as a token - we can join the remaining tokens and try again
+ let didFindSubstringSlashMatch = false;
+ for (let j = tokens.length - 1; j > i; j--) {
+ const joinedToken = tokens.slice(i, j + 1).join("/");
+ if (this.value[joinedToken] !== undefined) {
+ this.value = this.value[joinedToken];
+ i = j;
+ didFindSubstringSlashMatch = true;
+ break;
+ }
+ }
+ if (didFindSubstringSlashMatch) {
+ continue;
+ }
+ this.value = null;
+ throw new errors_js_1.MissingPointerError(token, decodeURI(this.originalPath));
+ }
+ else {
+ this.value = this.value[token];
+ }
+ }
+ // Resolve the final value
+ if (!this.value || (this.value.$ref && url.resolve(this.path, this.value.$ref) !== pathFromRoot)) {
+ resolveIf$Ref(this, options, pathFromRoot);
+ }
+ return this;
+ }
+ /**
+ * Sets the value of a nested property within the given object.
+ *
+ * @param obj - The object that will be crawled
+ * @param value - the value to assign
+ * @param options
+ *
+ * @returns
+ * Returns the modified object, or an entirely new object if the entire object is overwritten.
+ */
+ set(obj, value, options) {
+ const tokens = Pointer.parse(this.path);
+ let token;
+ if (tokens.length === 0) {
+ // There are no tokens, replace the entire object with the new value
+ this.value = value;
+ return value;
+ }
+ // Crawl the object, one token at a time
+ this.value = unwrapOrThrow(obj);
+ for (let i = 0; i < tokens.length - 1; i++) {
+ resolveIf$Ref(this, options);
+ token = tokens[i];
+ if (this.value && this.value[token] !== undefined) {
+ // The token exists
+ this.value = this.value[token];
+ }
+ else {
+ // The token doesn't exist, so create it
+ this.value = setValue(this, token, {});
+ }
+ }
+ // Set the value of the final token
+ resolveIf$Ref(this, options);
+ token = tokens[tokens.length - 1];
+ setValue(this, token, value);
+ // Return the updated object
+ return obj;
+ }
+ /**
+ * Parses a JSON pointer (or a path containing a JSON pointer in the hash)
+ * and returns an array of the pointer's tokens.
+ * (e.g. "schema.json#/definitions/person/name" => ["definitions", "person", "name"])
+ *
+ * The pointer is parsed according to RFC 6901
+ * {@link https://tools.ietf.org/html/rfc6901#section-3}
+ *
+ * @param path
+ * @param [originalPath]
+ * @returns
+ */
+ static parse(path, originalPath) {
+ // Get the JSON pointer from the path's hash
+ const pointer = url.getHash(path).substring(1);
+ // If there's no pointer, then there are no tokens,
+ // so return an empty array
+ if (!pointer) {
+ return [];
+ }
+ // Split into an array
+ const split = pointer.split("/");
+ // Decode each part, according to RFC 6901
+ for (let i = 0; i < split.length; i++) {
+ split[i] = safeDecodeURIComponent(split[i].replace(escapedSlash, "/").replace(escapedTilde, "~"));
+ }
+ if (split[0] !== "") {
+ throw new errors_js_1.InvalidPointerError(split, originalPath === undefined ? path : originalPath);
+ }
+ return split.slice(1);
+ }
+ /**
+ * Creates a JSON pointer path, by joining one or more tokens to a base path.
+ *
+ * @param base - The base path (e.g. "schema.json#/definitions/person")
+ * @param tokens - The token(s) to append (e.g. ["name", "first"])
+ * @returns
+ */
+ static join(base, tokens) {
+ // Ensure that the base path contains a hash
+ if (base.indexOf("#") === -1) {
+ base += "#";
+ }
+ // Append each token to the base path
+ tokens = Array.isArray(tokens) ? tokens : [tokens];
+ for (let i = 0; i < tokens.length; i++) {
+ const token = tokens[i];
+ // Encode the token, according to RFC 6901
+ base += "/" + encodeURIComponent(token.replace(tildes, "~0").replace(slashes, "~1"));
+ }
+ return base;
+ }
+}
+/**
+ * If the given pointer's {@link Pointer#value} is a JSON reference,
+ * then the reference is resolved and {@link Pointer#value} is replaced with the resolved value.
+ * In addition, {@link Pointer#path} and {@link Pointer#$ref} are updated to reflect the
+ * resolution path of the new value.
+ *
+ * @param pointer
+ * @param options
+ * @param [pathFromRoot] - the path of place that initiated resolving
+ * @returns - Returns `true` if the resolution path changed
+ */
+function resolveIf$Ref(pointer, options, pathFromRoot) {
+ // Is the value a JSON reference? (and allowed?)
+ if (ref_js_1.default.isAllowed$Ref(pointer.value, options)) {
+ const $refPath = url.resolve(pointer.path, pointer.value.$ref);
+ if ($refPath === pointer.path && !isRootPath(pathFromRoot)) {
+ // The value is a reference to itself, so there's nothing to do.
+ pointer.circular = true;
+ }
+ else {
+ const resolved = pointer.$ref.$refs._resolve($refPath, pointer.path, options);
+ if (resolved === null) {
+ return false;
+ }
+ pointer.indirections += resolved.indirections + 1;
+ if (ref_js_1.default.isExtended$Ref(pointer.value)) {
+ // This JSON reference "extends" the resolved value, rather than simply pointing to it.
+ // So the resolved path does NOT change. Just the value does.
+ pointer.value = ref_js_1.default.dereference(pointer.value, resolved.value);
+ return false;
+ }
+ else {
+ // Resolve the reference
+ pointer.$ref = resolved.$ref;
+ pointer.path = resolved.path;
+ pointer.value = resolved.value;
+ }
+ return true;
+ }
+ }
+ return undefined;
+}
+exports.default = Pointer;
+/**
+ * Sets the specified token value of the {@link Pointer#value}.
+ *
+ * The token is evaluated according to RFC 6901.
+ * {@link https://tools.ietf.org/html/rfc6901#section-4}
+ *
+ * @param pointer - The JSON Pointer whose value will be modified
+ * @param token - A JSON Pointer token that indicates how to modify `obj`
+ * @param value - The value to assign
+ * @returns - Returns the assigned value
+ */
+function setValue(pointer, token, value) {
+ if (pointer.value && typeof pointer.value === "object") {
+ if (token === "-" && Array.isArray(pointer.value)) {
+ pointer.value.push(value);
+ }
+ else {
+ pointer.value[token] = value;
+ }
+ }
+ else {
+ throw new errors_js_1.JSONParserError(`Error assigning $ref pointer "${pointer.path}". \nCannot set "${token}" of a non-object.`);
+ }
+ return value;
+}
+function unwrapOrThrow(value) {
+ if ((0, errors_js_1.isHandledError)(value)) {
+ throw value;
+ }
+ return value;
+}
+function isRootPath(pathFromRoot) {
+ return typeof pathFromRoot == "string" && Pointer.parse(pathFromRoot).length == 0;
+}
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/ref.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/ref.d.ts
new file mode 100644
index 00000000..63390b07
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/ref.d.ts
@@ -0,0 +1,181 @@
+import Pointer from "./pointer.js";
+import type { JSONParserError, MissingPointerError, ParserError, ResolverError } from "./util/errors.js";
+import type $Refs from "./refs.js";
+import type { ParserOptions } from "./options.js";
+import type { JSONSchema } from "./types";
+export type $RefError = JSONParserError | ResolverError | ParserError | MissingPointerError;
+/**
+ * This class represents a single JSON reference and its resolved value.
+ *
+ * @class
+ */
+declare class $Ref = ParserOptions> {
+ /**
+ * The file path or URL of the referenced file.
+ * This path is relative to the path of the main JSON schema file.
+ *
+ * This path does NOT contain document fragments (JSON pointers). It always references an ENTIRE file.
+ * Use methods such as {@link $Ref#get}, {@link $Ref#resolve}, and {@link $Ref#exists} to get
+ * specific JSON pointers within the file.
+ *
+ * @type {string}
+ */
+ path: undefined | string;
+ /**
+ * The resolved value of the JSON reference.
+ * Can be any JSON type, not just objects. Unknown file types are represented as Buffers (byte arrays).
+ *
+ * @type {?*}
+ */
+ value: any;
+ /**
+ * The {@link $Refs} object that contains this {@link $Ref} object.
+ *
+ * @type {$Refs}
+ */
+ $refs: $Refs;
+ /**
+ * Indicates the type of {@link $Ref#path} (e.g. "file", "http", etc.)
+ */
+ pathType: string | unknown;
+ /**
+ * List of all errors. Undefined if no errors.
+ */
+ errors: Array<$RefError>;
+ constructor($refs: $Refs);
+ /**
+ * Pushes an error to errors array.
+ *
+ * @param err - The error to be pushed
+ * @returns
+ */
+ addError(err: $RefError): void;
+ /**
+ * Determines whether the given JSON reference exists within this {@link $Ref#value}.
+ *
+ * @param path - The full path being resolved, optionally with a JSON pointer in the hash
+ * @param options
+ * @returns
+ */
+ exists(path: string, options?: O): boolean;
+ /**
+ * Resolves the given JSON reference within this {@link $Ref#value} and returns the resolved value.
+ *
+ * @param path - The full path being resolved, optionally with a JSON pointer in the hash
+ * @param options
+ * @returns - Returns the resolved value
+ */
+ get(path: string, options?: O): any;
+ /**
+ * Resolves the given JSON reference within this {@link $Ref#value}.
+ *
+ * @param path - The full path being resolved, optionally with a JSON pointer in the hash
+ * @param options
+ * @param friendlyPath - The original user-specified path (used for error messages)
+ * @param pathFromRoot - The path of `obj` from the schema root
+ * @returns
+ */
+ resolve(path: string, options?: O, friendlyPath?: string, pathFromRoot?: string): Pointer | null;
+ /**
+ * Sets the value of a nested property within this {@link $Ref#value}.
+ * If the property, or any of its parents don't exist, they will be created.
+ *
+ * @param path - The full path of the property to set, optionally with a JSON pointer in the hash
+ * @param value - The value to assign
+ */
+ set(path: string, value: any): void;
+ /**
+ * Determines whether the given value is a JSON reference.
+ *
+ * @param value - The value to inspect
+ * @returns
+ */
+ static is$Ref(value: unknown): value is {
+ $ref: string;
+ length?: number;
+ };
+ /**
+ * Determines whether the given value is an external JSON reference.
+ *
+ * @param value - The value to inspect
+ * @returns
+ */
+ static isExternal$Ref(value: unknown): boolean;
+ /**
+ * Determines whether the given value is a JSON reference, and whether it is allowed by the options.
+ * For example, if it references an external file, then options.resolve.external must be true.
+ *
+ * @param value - The value to inspect
+ * @param options
+ * @returns
+ */
+ static isAllowed$Ref(value: unknown, options?: ParserOptions): true | undefined;
+ /**
+ * Determines whether the given value is a JSON reference that "extends" its resolved value.
+ * That is, it has extra properties (in addition to "$ref"), so rather than simply pointing to
+ * an existing value, this $ref actually creates a NEW value that is a shallow copy of the resolved
+ * value, plus the extra properties.
+ *
+ * @example: {
+ person: {
+ properties: {
+ firstName: { type: string }
+ lastName: { type: string }
+ }
+ }
+ employee: {
+ properties: {
+ $ref: #/person/properties
+ salary: { type: number }
+ }
+ }
+ }
+ * In this example, "employee" is an extended $ref, since it extends "person" with an additional
+ * property (salary). The result is a NEW value that looks like this:
+ *
+ * {
+ * properties: {
+ * firstName: { type: string }
+ * lastName: { type: string }
+ * salary: { type: number }
+ * }
+ * }
+ *
+ * @param value - The value to inspect
+ * @returns
+ */
+ static isExtended$Ref(value: unknown): boolean;
+ /**
+ * Returns the resolved value of a JSON Reference.
+ * If necessary, the resolved value is merged with the JSON Reference to create a new object
+ *
+ * @example: {
+ person: {
+ properties: {
+ firstName: { type: string }
+ lastName: { type: string }
+ }
+ }
+ employee: {
+ properties: {
+ $ref: #/person/properties
+ salary: { type: number }
+ }
+ }
+ } When "person" and "employee" are merged, you end up with the following object:
+ *
+ * {
+ * properties: {
+ * firstName: { type: string }
+ * lastName: { type: string }
+ * salary: { type: number }
+ * }
+ * }
+ *
+ * @param $ref - The JSON reference object (the one with the "$ref" property)
+ * @param resolvedValue - The resolved value, which can be any type
+ * @returns - Returns the dereferenced value
+ */
+ static dereference = ParserOptions>($ref: $Ref, resolvedValue: S): S;
+}
+export default $Ref;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/ref.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/ref.js
new file mode 100644
index 00000000..acf2e217
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/ref.js
@@ -0,0 +1,243 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const pointer_js_1 = __importDefault(require("./pointer.js"));
+const errors_js_1 = require("./util/errors.js");
+const url_js_1 = require("./util/url.js");
+/**
+ * This class represents a single JSON reference and its resolved value.
+ *
+ * @class
+ */
+class $Ref {
+ constructor($refs) {
+ /**
+ * List of all errors. Undefined if no errors.
+ */
+ this.errors = [];
+ this.$refs = $refs;
+ }
+ /**
+ * Pushes an error to errors array.
+ *
+ * @param err - The error to be pushed
+ * @returns
+ */
+ addError(err) {
+ if (this.errors === undefined) {
+ this.errors = [];
+ }
+ const existingErrors = this.errors.map(({ footprint }) => footprint);
+ // the path has been almost certainly set at this point,
+ // but just in case something went wrong, normalizeError injects path if necessary
+ // moreover, certain errors might point at the same spot, so filter them out to reduce noise
+ if ("errors" in err && Array.isArray(err.errors)) {
+ this.errors.push(...err.errors.map(errors_js_1.normalizeError).filter(({ footprint }) => !existingErrors.includes(footprint)));
+ }
+ else if (!("footprint" in err) || !existingErrors.includes(err.footprint)) {
+ this.errors.push((0, errors_js_1.normalizeError)(err));
+ }
+ }
+ /**
+ * Determines whether the given JSON reference exists within this {@link $Ref#value}.
+ *
+ * @param path - The full path being resolved, optionally with a JSON pointer in the hash
+ * @param options
+ * @returns
+ */
+ exists(path, options) {
+ try {
+ this.resolve(path, options);
+ return true;
+ }
+ catch {
+ return false;
+ }
+ }
+ /**
+ * Resolves the given JSON reference within this {@link $Ref#value} and returns the resolved value.
+ *
+ * @param path - The full path being resolved, optionally with a JSON pointer in the hash
+ * @param options
+ * @returns - Returns the resolved value
+ */
+ get(path, options) {
+ return this.resolve(path, options)?.value;
+ }
+ /**
+ * Resolves the given JSON reference within this {@link $Ref#value}.
+ *
+ * @param path - The full path being resolved, optionally with a JSON pointer in the hash
+ * @param options
+ * @param friendlyPath - The original user-specified path (used for error messages)
+ * @param pathFromRoot - The path of `obj` from the schema root
+ * @returns
+ */
+ resolve(path, options, friendlyPath, pathFromRoot) {
+ const pointer = new pointer_js_1.default(this, path, friendlyPath);
+ try {
+ return pointer.resolve(this.value, options, pathFromRoot);
+ }
+ catch (err) {
+ if (!options || !options.continueOnError || !(0, errors_js_1.isHandledError)(err)) {
+ throw err;
+ }
+ if (err.path === null) {
+ err.path = (0, url_js_1.safePointerToPath)((0, url_js_1.getHash)(pathFromRoot));
+ }
+ if (err instanceof errors_js_1.InvalidPointerError) {
+ err.source = decodeURI((0, url_js_1.stripHash)(pathFromRoot));
+ }
+ this.addError(err);
+ return null;
+ }
+ }
+ /**
+ * Sets the value of a nested property within this {@link $Ref#value}.
+ * If the property, or any of its parents don't exist, they will be created.
+ *
+ * @param path - The full path of the property to set, optionally with a JSON pointer in the hash
+ * @param value - The value to assign
+ */
+ set(path, value) {
+ const pointer = new pointer_js_1.default(this, path);
+ this.value = pointer.set(this.value, value);
+ }
+ /**
+ * Determines whether the given value is a JSON reference.
+ *
+ * @param value - The value to inspect
+ * @returns
+ */
+ static is$Ref(value) {
+ return (Boolean(value) &&
+ typeof value === "object" &&
+ value !== null &&
+ "$ref" in value &&
+ typeof value.$ref === "string" &&
+ value.$ref.length > 0);
+ }
+ /**
+ * Determines whether the given value is an external JSON reference.
+ *
+ * @param value - The value to inspect
+ * @returns
+ */
+ static isExternal$Ref(value) {
+ return $Ref.is$Ref(value) && value.$ref[0] !== "#";
+ }
+ /**
+ * Determines whether the given value is a JSON reference, and whether it is allowed by the options.
+ * For example, if it references an external file, then options.resolve.external must be true.
+ *
+ * @param value - The value to inspect
+ * @param options
+ * @returns
+ */
+ static isAllowed$Ref(value, options) {
+ if (this.is$Ref(value)) {
+ if (value.$ref.substring(0, 2) === "#/" || value.$ref === "#") {
+ // It's a JSON Pointer reference, which is always allowed
+ return true;
+ }
+ else if (value.$ref[0] !== "#" && (!options || options.resolve?.external)) {
+ // It's an external reference, which is allowed by the options
+ return true;
+ }
+ }
+ return undefined;
+ }
+ /**
+ * Determines whether the given value is a JSON reference that "extends" its resolved value.
+ * That is, it has extra properties (in addition to "$ref"), so rather than simply pointing to
+ * an existing value, this $ref actually creates a NEW value that is a shallow copy of the resolved
+ * value, plus the extra properties.
+ *
+ * @example: {
+ person: {
+ properties: {
+ firstName: { type: string }
+ lastName: { type: string }
+ }
+ }
+ employee: {
+ properties: {
+ $ref: #/person/properties
+ salary: { type: number }
+ }
+ }
+ }
+ * In this example, "employee" is an extended $ref, since it extends "person" with an additional
+ * property (salary). The result is a NEW value that looks like this:
+ *
+ * {
+ * properties: {
+ * firstName: { type: string }
+ * lastName: { type: string }
+ * salary: { type: number }
+ * }
+ * }
+ *
+ * @param value - The value to inspect
+ * @returns
+ */
+ static isExtended$Ref(value) {
+ return $Ref.is$Ref(value) && Object.keys(value).length > 1;
+ }
+ /**
+ * Returns the resolved value of a JSON Reference.
+ * If necessary, the resolved value is merged with the JSON Reference to create a new object
+ *
+ * @example: {
+ person: {
+ properties: {
+ firstName: { type: string }
+ lastName: { type: string }
+ }
+ }
+ employee: {
+ properties: {
+ $ref: #/person/properties
+ salary: { type: number }
+ }
+ }
+ } When "person" and "employee" are merged, you end up with the following object:
+ *
+ * {
+ * properties: {
+ * firstName: { type: string }
+ * lastName: { type: string }
+ * salary: { type: number }
+ * }
+ * }
+ *
+ * @param $ref - The JSON reference object (the one with the "$ref" property)
+ * @param resolvedValue - The resolved value, which can be any type
+ * @returns - Returns the dereferenced value
+ */
+ static dereference($ref, resolvedValue) {
+ if (resolvedValue && typeof resolvedValue === "object" && $Ref.isExtended$Ref($ref)) {
+ const merged = {};
+ for (const key of Object.keys($ref)) {
+ if (key !== "$ref") {
+ // @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message
+ merged[key] = $ref[key];
+ }
+ }
+ for (const key of Object.keys(resolvedValue)) {
+ if (!(key in merged)) {
+ // @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message
+ merged[key] = resolvedValue[key];
+ }
+ }
+ return merged;
+ }
+ else {
+ // Completely replace the original reference with the resolved value
+ return resolvedValue;
+ }
+ }
+}
+exports.default = $Ref;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/refs.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/refs.d.ts
new file mode 100644
index 00000000..00c7448b
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/refs.d.ts
@@ -0,0 +1,127 @@
+import $Ref from "./ref.js";
+import type { JSONSchema4Type, JSONSchema6Type, JSONSchema7Type } from "json-schema";
+import type { ParserOptions } from "./options.js";
+import type { JSONSchema } from "./types";
+interface $RefsMap = ParserOptions> {
+ [url: string]: $Ref;
+}
+/**
+ * When you call the resolve method, the value that gets passed to the callback function (or Promise) is a $Refs object. This same object is accessible via the parser.$refs property of $RefParser objects.
+ *
+ * This object is a map of JSON References and their resolved values. It also has several convenient helper methods that make it easy for you to navigate and manipulate the JSON References.
+ *
+ * See https://apitools.dev/json-schema-ref-parser/docs/refs.html
+ */
+export default class $Refs = ParserOptions> {
+ /**
+ * This property is true if the schema contains any circular references. You may want to check this property before serializing the dereferenced schema as JSON, since JSON.stringify() does not support circular references by default.
+ *
+ * See https://apitools.dev/json-schema-ref-parser/docs/refs.html#circular
+ */
+ circular: boolean;
+ /**
+ * Returns the paths/URLs of all the files in your schema (including the main schema file).
+ *
+ * See https://apitools.dev/json-schema-ref-parser/docs/refs.html#pathstypes
+ *
+ * @param types (optional) Optionally only return certain types of paths ("file", "http", etc.)
+ */
+ paths(...types: (string | string[])[]): string[];
+ /**
+ * Returns a map of paths/URLs and their correspond values.
+ *
+ * See https://apitools.dev/json-schema-ref-parser/docs/refs.html#valuestypes
+ *
+ * @param types (optional) Optionally only return values from certain locations ("file", "http", etc.)
+ */
+ values(...types: (string | string[])[]): S;
+ /**
+ * Returns `true` if the given path exists in the schema; otherwise, returns `false`
+ *
+ * See https://apitools.dev/json-schema-ref-parser/docs/refs.html#existsref
+ *
+ * @param $ref The JSON Reference path, optionally with a JSON Pointer in the hash
+ */
+ /**
+ * Determines whether the given JSON reference exists.
+ *
+ * @param path - The path being resolved, optionally with a JSON pointer in the hash
+ * @param [options]
+ * @returns
+ */
+ exists(path: string, options: any): boolean;
+ /**
+ * Resolves the given JSON reference and returns the resolved value.
+ *
+ * @param path - The path being resolved, with a JSON pointer in the hash
+ * @param [options]
+ * @returns - Returns the resolved value
+ */
+ get(path: string, options?: O): JSONSchema4Type | JSONSchema6Type | JSONSchema7Type;
+ /**
+ * Sets the value at the given path in the schema. If the property, or any of its parents, don't exist, they will be created.
+ *
+ * @param path The JSON Reference path, optionally with a JSON Pointer in the hash
+ * @param value The value to assign. Can be anything (object, string, number, etc.)
+ */
+ set(path: string, value: JSONSchema4Type | JSONSchema6Type | JSONSchema7Type): void;
+ /**
+ * Returns the specified {@link $Ref} object, or undefined.
+ *
+ * @param path - The path being resolved, optionally with a JSON pointer in the hash
+ * @returns
+ * @protected
+ */
+ _get$Ref(path: string): $Ref;
+ /**
+ * Creates a new {@link $Ref} object and adds it to this {@link $Refs} object.
+ *
+ * @param path - The file path or URL of the referenced file
+ */
+ _add(path: string): $Ref;
+ /**
+ * Resolves the given JSON reference.
+ *
+ * @param path - The path being resolved, optionally with a JSON pointer in the hash
+ * @param pathFromRoot - The path of `obj` from the schema root
+ * @param [options]
+ * @returns
+ * @protected
+ */
+ _resolve(path: string, pathFromRoot: string, options?: O): import("./pointer.js").default | null;
+ /**
+ * A map of paths/urls to {@link $Ref} objects
+ *
+ * @type {object}
+ * @protected
+ */
+ _$refs: $RefsMap;
+ /**
+ * The {@link $Ref} object that is the root of the JSON schema.
+ *
+ * @type {$Ref}
+ * @protected
+ */
+ _root$Ref: $Ref;
+ constructor();
+ /**
+ * Returns the paths of all the files/URLs that are referenced by the JSON schema,
+ * including the schema itself.
+ *
+ * @param [types] - Only return paths of the given types ("file", "http", etc.)
+ * @returns
+ */
+ /**
+ * Returns the map of JSON references and their resolved values.
+ *
+ * @param [types] - Only return references of the given types ("file", "http", etc.)
+ * @returns
+ */
+ /**
+ * Returns a POJO (plain old JavaScript object) for serialization as JSON.
+ *
+ * @returns {object}
+ */
+ toJSON: (...types: (string | string[])[]) => S;
+}
+export {};
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/refs.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/refs.js
new file mode 100644
index 00000000..d33e2c30
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/refs.js
@@ -0,0 +1,222 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const ono_1 = require("@jsdevtools/ono");
+const ref_js_1 = __importDefault(require("./ref.js"));
+const url = __importStar(require("./util/url.js"));
+const convert_path_to_posix_1 = __importDefault(require("./util/convert-path-to-posix"));
+/**
+ * When you call the resolve method, the value that gets passed to the callback function (or Promise) is a $Refs object. This same object is accessible via the parser.$refs property of $RefParser objects.
+ *
+ * This object is a map of JSON References and their resolved values. It also has several convenient helper methods that make it easy for you to navigate and manipulate the JSON References.
+ *
+ * See https://apitools.dev/json-schema-ref-parser/docs/refs.html
+ */
+class $Refs {
+ /**
+ * Returns the paths/URLs of all the files in your schema (including the main schema file).
+ *
+ * See https://apitools.dev/json-schema-ref-parser/docs/refs.html#pathstypes
+ *
+ * @param types (optional) Optionally only return certain types of paths ("file", "http", etc.)
+ */
+ paths(...types) {
+ const paths = getPaths(this._$refs, types.flat());
+ return paths.map((path) => {
+ return (0, convert_path_to_posix_1.default)(path.decoded);
+ });
+ }
+ /**
+ * Returns a map of paths/URLs and their correspond values.
+ *
+ * See https://apitools.dev/json-schema-ref-parser/docs/refs.html#valuestypes
+ *
+ * @param types (optional) Optionally only return values from certain locations ("file", "http", etc.)
+ */
+ values(...types) {
+ const $refs = this._$refs;
+ const paths = getPaths($refs, types.flat());
+ return paths.reduce((obj, path) => {
+ obj[(0, convert_path_to_posix_1.default)(path.decoded)] = $refs[path.encoded].value;
+ return obj;
+ }, {});
+ }
+ /**
+ * Returns `true` if the given path exists in the schema; otherwise, returns `false`
+ *
+ * See https://apitools.dev/json-schema-ref-parser/docs/refs.html#existsref
+ *
+ * @param $ref The JSON Reference path, optionally with a JSON Pointer in the hash
+ */
+ /**
+ * Determines whether the given JSON reference exists.
+ *
+ * @param path - The path being resolved, optionally with a JSON pointer in the hash
+ * @param [options]
+ * @returns
+ */
+ exists(path, options) {
+ try {
+ this._resolve(path, "", options);
+ return true;
+ }
+ catch {
+ return false;
+ }
+ }
+ /**
+ * Resolves the given JSON reference and returns the resolved value.
+ *
+ * @param path - The path being resolved, with a JSON pointer in the hash
+ * @param [options]
+ * @returns - Returns the resolved value
+ */
+ get(path, options) {
+ return this._resolve(path, "", options).value;
+ }
+ /**
+ * Sets the value at the given path in the schema. If the property, or any of its parents, don't exist, they will be created.
+ *
+ * @param path The JSON Reference path, optionally with a JSON Pointer in the hash
+ * @param value The value to assign. Can be anything (object, string, number, etc.)
+ */
+ set(path, value) {
+ const absPath = url.resolve(this._root$Ref.path, path);
+ const withoutHash = url.stripHash(absPath);
+ const $ref = this._$refs[withoutHash];
+ if (!$ref) {
+ throw (0, ono_1.ono)(`Error resolving $ref pointer "${path}". \n"${withoutHash}" not found.`);
+ }
+ $ref.set(absPath, value);
+ }
+ /**
+ * Returns the specified {@link $Ref} object, or undefined.
+ *
+ * @param path - The path being resolved, optionally with a JSON pointer in the hash
+ * @returns
+ * @protected
+ */
+ _get$Ref(path) {
+ path = url.resolve(this._root$Ref.path, path);
+ const withoutHash = url.stripHash(path);
+ return this._$refs[withoutHash];
+ }
+ /**
+ * Creates a new {@link $Ref} object and adds it to this {@link $Refs} object.
+ *
+ * @param path - The file path or URL of the referenced file
+ */
+ _add(path) {
+ const withoutHash = url.stripHash(path);
+ const $ref = new ref_js_1.default(this);
+ $ref.path = withoutHash;
+ this._$refs[withoutHash] = $ref;
+ this._root$Ref = this._root$Ref || $ref;
+ return $ref;
+ }
+ /**
+ * Resolves the given JSON reference.
+ *
+ * @param path - The path being resolved, optionally with a JSON pointer in the hash
+ * @param pathFromRoot - The path of `obj` from the schema root
+ * @param [options]
+ * @returns
+ * @protected
+ */
+ _resolve(path, pathFromRoot, options) {
+ const absPath = url.resolve(this._root$Ref.path, path);
+ const withoutHash = url.stripHash(absPath);
+ const $ref = this._$refs[withoutHash];
+ if (!$ref) {
+ throw (0, ono_1.ono)(`Error resolving $ref pointer "${path}". \n"${withoutHash}" not found.`);
+ }
+ return $ref.resolve(absPath, options, path, pathFromRoot);
+ }
+ constructor() {
+ /**
+ * A map of paths/urls to {@link $Ref} objects
+ *
+ * @type {object}
+ * @protected
+ */
+ this._$refs = {};
+ /**
+ * Returns the paths of all the files/URLs that are referenced by the JSON schema,
+ * including the schema itself.
+ *
+ * @param [types] - Only return paths of the given types ("file", "http", etc.)
+ * @returns
+ */
+ /**
+ * Returns the map of JSON references and their resolved values.
+ *
+ * @param [types] - Only return references of the given types ("file", "http", etc.)
+ * @returns
+ */
+ /**
+ * Returns a POJO (plain old JavaScript object) for serialization as JSON.
+ *
+ * @returns {object}
+ */
+ this.toJSON = this.values;
+ /**
+ * Indicates whether the schema contains any circular references.
+ *
+ * @type {boolean}
+ */
+ this.circular = false;
+ this._$refs = {};
+ // @ts-ignore
+ this._root$Ref = null;
+ }
+}
+exports.default = $Refs;
+/**
+ * Returns the encoded and decoded paths keys of the given object.
+ *
+ * @param $refs - The object whose keys are URL-encoded paths
+ * @param [types] - Only return paths of the given types ("file", "http", etc.)
+ * @returns
+ */
+function getPaths($refs, types) {
+ let paths = Object.keys($refs);
+ // Filter the paths by type
+ types = Array.isArray(types[0]) ? types[0] : Array.prototype.slice.call(types);
+ if (types.length > 0 && types[0]) {
+ paths = paths.filter((key) => {
+ return types.includes($refs[key].pathType);
+ });
+ }
+ // Decode local filesystem paths
+ return paths.map((path) => {
+ return {
+ encoded: path,
+ decoded: $refs[path].pathType === "file" ? url.toFileSystemPath(path, true) : path,
+ };
+ });
+}
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/resolve-external.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/resolve-external.d.ts
new file mode 100644
index 00000000..0f1ae3d4
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/resolve-external.d.ts
@@ -0,0 +1,15 @@
+import type { ParserOptions } from "./options.js";
+import type { JSONSchema } from "./types/index.js";
+import type $RefParser from "./index.js";
+/**
+ * Crawls the JSON schema, finds all external JSON references, and resolves their values.
+ * This method does not mutate the JSON schema. The resolved values are added to {@link $RefParser#$refs}.
+ *
+ * NOTE: We only care about EXTERNAL references here. INTERNAL references are only relevant when dereferencing.
+ *
+ * @returns
+ * The promise resolves once all JSON references in the schema have been resolved,
+ * including nested references that are contained in externally-referenced files.
+ */
+declare function resolveExternal = ParserOptions>(parser: $RefParser, options: O): Promise | Promise;
+export default resolveExternal;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/resolve-external.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/resolve-external.js
new file mode 100644
index 00000000..038b5a32
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/resolve-external.js
@@ -0,0 +1,133 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const ref_js_1 = __importDefault(require("./ref.js"));
+const pointer_js_1 = __importDefault(require("./pointer.js"));
+const parse_js_1 = __importDefault(require("./parse.js"));
+const url = __importStar(require("./util/url.js"));
+const errors_js_1 = require("./util/errors.js");
+/**
+ * Crawls the JSON schema, finds all external JSON references, and resolves their values.
+ * This method does not mutate the JSON schema. The resolved values are added to {@link $RefParser#$refs}.
+ *
+ * NOTE: We only care about EXTERNAL references here. INTERNAL references are only relevant when dereferencing.
+ *
+ * @returns
+ * The promise resolves once all JSON references in the schema have been resolved,
+ * including nested references that are contained in externally-referenced files.
+ */
+function resolveExternal(parser, options) {
+ if (!options.resolve?.external) {
+ // Nothing to resolve, so exit early
+ return Promise.resolve();
+ }
+ try {
+ // console.log('Resolving $ref pointers in %s', parser.$refs._root$Ref.path);
+ const promises = crawl(parser.schema, parser.$refs._root$Ref.path + "#", parser.$refs, options);
+ return Promise.all(promises);
+ }
+ catch (e) {
+ return Promise.reject(e);
+ }
+}
+/**
+ * Recursively crawls the given value, and resolves any external JSON references.
+ *
+ * @param obj - The value to crawl. If it's not an object or array, it will be ignored.
+ * @param path - The full path of `obj`, possibly with a JSON Pointer in the hash
+ * @param {boolean} external - Whether `obj` was found in an external document.
+ * @param $refs
+ * @param options
+ * @param seen - Internal.
+ *
+ * @returns
+ * Returns an array of promises. There will be one promise for each JSON reference in `obj`.
+ * If `obj` does not contain any JSON references, then the array will be empty.
+ * If any of the JSON references point to files that contain additional JSON references,
+ * then the corresponding promise will internally reference an array of promises.
+ */
+function crawl(obj, path, $refs, options, seen, external) {
+ seen || (seen = new Set());
+ let promises = [];
+ if (obj && typeof obj === "object" && !ArrayBuffer.isView(obj) && !seen.has(obj)) {
+ seen.add(obj); // Track previously seen objects to avoid infinite recursion
+ if (ref_js_1.default.isExternal$Ref(obj)) {
+ promises.push(resolve$Ref(obj, path, $refs, options));
+ }
+ const keys = Object.keys(obj);
+ for (const key of keys) {
+ const keyPath = pointer_js_1.default.join(path, key);
+ const value = obj[key];
+ promises = promises.concat(crawl(value, keyPath, $refs, options, seen, external));
+ }
+ }
+ return promises;
+}
+/**
+ * Resolves the given JSON Reference, and then crawls the resulting value.
+ *
+ * @param $ref - The JSON Reference to resolve
+ * @param path - The full path of `$ref`, possibly with a JSON Pointer in the hash
+ * @param $refs
+ * @param options
+ *
+ * @returns
+ * The promise resolves once all JSON references in the object have been resolved,
+ * including nested references that are contained in externally-referenced files.
+ */
+async function resolve$Ref($ref, path, $refs, options) {
+ const shouldResolveOnCwd = options.dereference?.externalReferenceResolution === "root";
+ const resolvedPath = url.resolve(shouldResolveOnCwd ? url.cwd() : path, $ref.$ref);
+ const withoutHash = url.stripHash(resolvedPath);
+ // $ref.$ref = url.relative($refs._root$Ref.path, resolvedPath);
+ // Do we already have this $ref?
+ const ref = $refs._$refs[withoutHash];
+ if (ref) {
+ // We've already parsed this $ref, so use the existing value
+ return Promise.resolve(ref.value);
+ }
+ // Parse the $referenced file/url
+ try {
+ const result = await (0, parse_js_1.default)(resolvedPath, $refs, options);
+ // Crawl the parsed value
+ // console.log('Resolving $ref pointers in %s', withoutHash);
+ const promises = crawl(result, withoutHash + "#", $refs, options, new Set(), true);
+ return Promise.all(promises);
+ }
+ catch (err) {
+ if (!options?.continueOnError || !(0, errors_js_1.isHandledError)(err)) {
+ throw err;
+ }
+ if ($refs._$refs[withoutHash]) {
+ err.source = decodeURI(url.stripHash(path));
+ err.path = url.safePointerToPath(url.getHash(path));
+ }
+ return [];
+ }
+}
+exports.default = resolveExternal;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/resolvers/file.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/resolvers/file.d.ts
new file mode 100644
index 00000000..207aad98
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/resolvers/file.d.ts
@@ -0,0 +1,3 @@
+import type { JSONSchema, ResolverOptions } from "../types/index.js";
+declare const _default: ResolverOptions;
+export default _default;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/resolvers/file.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/resolvers/file.js
new file mode 100644
index 00000000..52bf28f5
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/resolvers/file.js
@@ -0,0 +1,64 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const fs_1 = __importDefault(require("fs"));
+const ono_1 = require("@jsdevtools/ono");
+const url = __importStar(require("../util/url.js"));
+const errors_js_1 = require("../util/errors.js");
+exports.default = {
+ /**
+ * The order that this resolver will run, in relation to other resolvers.
+ */
+ order: 100,
+ /**
+ * Determines whether this resolver can read a given file reference.
+ * Resolvers that return true will be tried, in order, until one successfully resolves the file.
+ * Resolvers that return false will not be given a chance to resolve the file.
+ */
+ canRead(file) {
+ return url.isFileSystemPath(file.url);
+ },
+ /**
+ * Reads the given file and returns its raw contents as a Buffer.
+ */
+ async read(file) {
+ let path;
+ try {
+ path = url.toFileSystemPath(file.url);
+ }
+ catch (err) {
+ throw new errors_js_1.ResolverError(ono_1.ono.uri(err, `Malformed URI: ${file.url}`), file.url);
+ }
+ try {
+ return await fs_1.default.promises.readFile(path);
+ }
+ catch (err) {
+ throw new errors_js_1.ResolverError((0, ono_1.ono)(err, `Error opening file "${path}"`), path);
+ }
+ },
+};
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/resolvers/http.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/resolvers/http.d.ts
new file mode 100644
index 00000000..1f5f6d34
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/resolvers/http.d.ts
@@ -0,0 +1,3 @@
+import type { HTTPResolverOptions, JSONSchema } from "../types/index.js";
+declare const _default: HTTPResolverOptions;
+export default _default;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/resolvers/http.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/resolvers/http.js
new file mode 100644
index 00000000..45094e81
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/resolvers/http.js
@@ -0,0 +1,137 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const ono_1 = require("@jsdevtools/ono");
+const url = __importStar(require("../util/url.js"));
+const errors_js_1 = require("../util/errors.js");
+exports.default = {
+ /**
+ * The order that this resolver will run, in relation to other resolvers.
+ */
+ order: 200,
+ /**
+ * HTTP headers to send when downloading files.
+ *
+ * @example:
+ * {
+ * "User-Agent": "JSON Schema $Ref Parser",
+ * Accept: "application/json"
+ * }
+ */
+ headers: null,
+ /**
+ * HTTP request timeout (in milliseconds).
+ */
+ timeout: 60000, // 60 seconds
+ /**
+ * The maximum number of HTTP redirects to follow.
+ * To disable automatic following of redirects, set this to zero.
+ */
+ redirects: 5,
+ /**
+ * The `withCredentials` option of XMLHttpRequest.
+ * Set this to `true` if you're downloading files from a CORS-enabled server that requires authentication
+ */
+ withCredentials: false,
+ /**
+ * Determines whether this resolver can read a given file reference.
+ * Resolvers that return true will be tried in order, until one successfully resolves the file.
+ * Resolvers that return false will not be given a chance to resolve the file.
+ */
+ canRead(file) {
+ return url.isHttp(file.url);
+ },
+ /**
+ * Reads the given URL and returns its raw contents as a Buffer.
+ */
+ read(file) {
+ const u = url.parse(file.url);
+ if (typeof window !== "undefined" && !u.protocol) {
+ // Use the protocol of the current page
+ u.protocol = url.parse(location.href).protocol;
+ }
+ return download(u, this);
+ },
+};
+/**
+ * Downloads the given file.
+ * @returns
+ * The promise resolves with the raw downloaded data, or rejects if there is an HTTP error.
+ */
+async function download(u, httpOptions, _redirects) {
+ u = url.parse(u);
+ const redirects = _redirects || [];
+ redirects.push(u.href);
+ try {
+ const res = await get(u, httpOptions);
+ if (res.status >= 400) {
+ throw (0, ono_1.ono)({ status: res.status }, `HTTP ERROR ${res.status}`);
+ }
+ else if (res.status >= 300) {
+ if (!Number.isNaN(httpOptions.redirects) && redirects.length > httpOptions.redirects) {
+ throw new errors_js_1.ResolverError((0, ono_1.ono)({ status: res.status }, `Error downloading ${redirects[0]}. \nToo many redirects: \n ${redirects.join(" \n ")}`));
+ }
+ else if (!("location" in res.headers) || !res.headers.location) {
+ throw (0, ono_1.ono)({ status: res.status }, `HTTP ${res.status} redirect with no location header`);
+ }
+ else {
+ const redirectTo = url.resolve(u.href, res.headers.location);
+ return download(redirectTo, httpOptions, redirects);
+ }
+ }
+ else {
+ if (res.body) {
+ const buf = await res.arrayBuffer();
+ return Buffer.from(buf);
+ }
+ return Buffer.alloc(0);
+ }
+ }
+ catch (err) {
+ throw new errors_js_1.ResolverError((0, ono_1.ono)(err, `Error downloading ${u.href}`), u.href);
+ }
+}
+/**
+ * Sends an HTTP GET request.
+ * The promise resolves with the HTTP Response object.
+ */
+async function get(u, httpOptions) {
+ let controller;
+ let timeoutId;
+ if (httpOptions.timeout) {
+ controller = new AbortController();
+ timeoutId = setTimeout(() => controller.abort(), httpOptions.timeout);
+ }
+ const response = await fetch(u, {
+ method: "GET",
+ headers: httpOptions.headers || {},
+ credentials: httpOptions.withCredentials ? "include" : "same-origin",
+ signal: controller ? controller.signal : null,
+ });
+ if (timeoutId) {
+ clearTimeout(timeoutId);
+ }
+ return response;
+}
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/types/index.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/types/index.d.ts
new file mode 100644
index 00000000..75612d41
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/types/index.d.ts
@@ -0,0 +1,114 @@
+import type { JSONSchema4, JSONSchema4Object, JSONSchema6, JSONSchema6Object, JSONSchema7, JSONSchema7Object } from "json-schema";
+import type $Refs from "../refs.js";
+import type { ParserOptions } from "../options";
+export type JSONSchema = JSONSchema4 | JSONSchema6 | JSONSchema7;
+export type JSONSchemaObject = JSONSchema4Object | JSONSchema6Object | JSONSchema7Object;
+export type SchemaCallback = (err: Error | null, schema?: S | object | null) => any;
+export type $RefsCallback = ParserOptions> = (err: Error | null, $refs?: $Refs) => any;
+/**
+ * See https://apitools.dev/json-schema-ref-parser/docs/options.html
+ */
+export interface HTTPResolverOptions extends Partial> {
+ /**
+ * You can specify any HTTP headers that should be sent when downloading files. For example, some servers may require you to set the `Accept` or `Referrer` header.
+ */
+ headers?: RequestInit["headers"] | null;
+ /**
+ * The amount of time (in milliseconds) to wait for a response from the server when downloading files. The default is 5 seconds.
+ */
+ timeout?: number;
+ /**
+ * The maximum number of HTTP redirects to follow per file. The default is 5. To disable automatic following of redirects, set this to zero.
+ */
+ redirects?: number;
+ /**
+ * Set this to `true` if you're downloading files from a CORS-enabled server that requires authentication
+ */
+ withCredentials?: boolean;
+}
+/**
+ * JSON Schema `$Ref` Parser comes with built-in resolvers for HTTP and HTTPS URLs, as well as local filesystem paths (when running in Node.js). You can add your own custom resolvers to support additional protocols, or even replace any of the built-in resolvers with your own custom implementation.
+ *
+ * See https://apitools.dev/json-schema-ref-parser/docs/plugins/resolvers.html
+ */
+export interface ResolverOptions {
+ name?: string;
+ /**
+ * All resolvers have an order property, even the built-in resolvers. If you don't specify an order property, then your resolver will run last. Specifying `order: 1`, like we did in this example, will make your resolver run first. Or you can squeeze your resolver in-between some of the built-in resolvers. For example, `order: 101` would make it run after the file resolver, but before the HTTP resolver. You can see the order of all the built-in resolvers by looking at their source code.
+ *
+ * The order property and canRead property are related to each other. For each file that JSON Schema $Ref Parser needs to resolve, it first determines which resolvers can read that file by checking their canRead property. If only one resolver matches a file, then only that one resolver is called, regardless of its order. If multiple resolvers match a file, then those resolvers are tried in order until one of them successfully reads the file. Once a resolver successfully reads the file, the rest of the resolvers are skipped.
+ */
+ order?: number;
+ /**
+ * The `canRead` property tells JSON Schema `$Ref` Parser what kind of files your resolver can read. In this example, we've simply specified a regular expression that matches "mogodb://" URLs, but we could have used a simple boolean, or even a function with custom logic to determine which files to resolve. Here are examples of each approach:
+ */
+ canRead: boolean | RegExp | string | string[] | ((file: FileInfo) => boolean);
+ /**
+ * This is where the real work of a resolver happens. The `read` method accepts the same file info object as the `canRead` function, but rather than returning a boolean value, the `read` method should return the contents of the file. The file contents should be returned in as raw a form as possible, such as a string or a byte array. Any further parsing or processing should be done by parsers.
+ *
+ * Unlike the `canRead` function, the `read` method can also be asynchronous. This might be important if your resolver needs to read data from a database or some other external source. You can return your asynchronous value using either an ES6 Promise or a Node.js-style error-first callback. Of course, if your resolver has the ability to return its data synchronously, then that's fine too. Here are examples of all three approaches:
+ */
+ read: string | object | ((file: FileInfo, callback?: (error: Error | null, data: string | null) => any) => string | Buffer | S | Promise);
+}
+export interface Plugin {
+ name?: string;
+ /**
+ * Parsers run in a specific order, relative to other parsers. For example, a parser with `order: 5` will run before a parser with `order: 10`. If a parser is unable to successfully parse a file, then the next parser is tried, until one succeeds or they all fail.
+ *
+ * You can change the order in which parsers run, which is useful if you know that most of your referenced files will be a certain type, or if you add your own custom parser that you want to run first.
+ */
+ order?: number;
+ /**
+ * All of the built-in parsers allow empty files by default. The JSON and YAML parsers will parse empty files as `undefined`. The text parser will parse empty files as an empty string. The binary parser will parse empty files as an empty byte array.
+ *
+ * You can set `allowEmpty: false` on any parser, which will cause an error to be thrown if a file empty.
+ */
+ allowEmpty?: boolean;
+ /**
+ * Specifies whether a Byte Order Mark (BOM) is allowed or not. Only applies to JSON parsing
+ *
+ * @type {boolean} @default true
+ */
+ allowBOM?: boolean;
+ /**
+ * The encoding that the text is expected to be in.
+ */
+ encoding?: BufferEncoding;
+ /**
+ * Determines which parsers will be used for which files.
+ *
+ * A regular expression can be used to match files by their full path. A string (or array of strings) can be used to match files by their file extension. Or a function can be used to perform more complex matching logic. See the custom parser docs for details.
+ */
+ canParse?: boolean | RegExp | string | string[] | ((file: FileInfo) => boolean);
+ /**
+ * This is where the real work of a parser happens. The `parse` method accepts the same file info object as the `canParse` function, but rather than returning a boolean value, the `parse` method should return a JavaScript representation of the file contents. For our CSV parser, that is a two-dimensional array of lines and values. For your parser, it might be an object, a string, a custom class, or anything else.
+ *
+ * Unlike the `canParse` function, the `parse` method can also be asynchronous. This might be important if your parser needs to retrieve data from a database or if it relies on an external HTTP service to return the parsed value. You can return your asynchronous value via a [Promise](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise) or a Node.js-style error-first callback. Here are examples of both approaches:
+ */
+ parse: ((file: FileInfo, callback?: (error: Error | null, data: string | null) => any) => unknown | Promise) | number | string;
+}
+/**
+ * JSON Schema `$Ref` Parser supports plug-ins, such as resolvers and parsers. These plug-ins can have methods such as `canRead()`, `read()`, `canParse()`, and `parse()`. All of these methods accept the same object as their parameter: an object containing information about the file being read or parsed.
+ *
+ * The file info object currently only consists of a few properties, but it may grow in the future if plug-ins end up needing more information.
+ *
+ * See https://apitools.dev/json-schema-ref-parser/docs/plugins/file-info-object.html
+ */
+export interface FileInfo {
+ /**
+ * The full URL of the file. This could be any type of URL, including "http://", "https://", "file://", "ftp://", "mongodb://", or even a local filesystem path (when running in Node.js).
+ */
+ url: string;
+ /**
+ * The hash (URL fragment) of the file URL, including the # symbol. If the URL doesn't have a hash, then this will be an empty string.
+ */
+ hash: string;
+ /**
+ * The lowercase file extension, such as ".json", ".yaml", ".txt", etc.
+ */
+ extension: string;
+ /**
+ * The raw file contents, in whatever form they were returned by the resolver that read the file.
+ */
+ data: string | Buffer;
+}
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/types/index.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/types/index.js
new file mode 100644
index 00000000..c8ad2e54
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/types/index.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/convert-path-to-posix.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/convert-path-to-posix.d.ts
new file mode 100644
index 00000000..0bf2822b
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/convert-path-to-posix.d.ts
@@ -0,0 +1 @@
+export default function convertPathToPosix(filePath: string): string;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/convert-path-to-posix.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/convert-path-to-posix.js
new file mode 100644
index 00000000..a7207f7b
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/convert-path-to-posix.js
@@ -0,0 +1,14 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.default = convertPathToPosix;
+const path_1 = __importDefault(require("path"));
+function convertPathToPosix(filePath) {
+ const isExtendedLengthPath = filePath.startsWith("\\\\?\\");
+ if (isExtendedLengthPath) {
+ return filePath;
+ }
+ return filePath.split(path_1.default?.win32?.sep).join(path_1.default?.posix?.sep ?? "/");
+}
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/errors.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/errors.d.ts
new file mode 100644
index 00000000..26e8d6db
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/errors.d.ts
@@ -0,0 +1,57 @@
+import type $RefParser from "../index.js";
+import type { ParserOptions } from "../index.js";
+import type { JSONSchema } from "../index.js";
+export type JSONParserErrorType = "EUNKNOWN" | "EPARSER" | "EUNMATCHEDPARSER" | "ETIMEOUT" | "ERESOLVER" | "EUNMATCHEDRESOLVER" | "EMISSINGPOINTER" | "EINVALIDPOINTER";
+export declare class JSONParserError extends Error {
+ readonly name: string;
+ readonly message: string;
+ source: string | undefined;
+ path: Array | null;
+ readonly code: JSONParserErrorType;
+ constructor(message: string, source?: string);
+ get footprint(): string;
+}
+export declare class JSONParserErrorGroup = ParserOptions> extends Error {
+ files: $RefParser;
+ constructor(parser: $RefParser);
+ static getParserErrors = ParserOptions>(parser: $RefParser): JSONParserError[];
+ get errors(): Array;
+}
+export declare class ParserError extends JSONParserError {
+ code: JSONParserErrorType;
+ name: string;
+ constructor(message: any, source: any);
+}
+export declare class UnmatchedParserError extends JSONParserError {
+ code: JSONParserErrorType;
+ name: string;
+ constructor(source: string);
+}
+export declare class ResolverError extends JSONParserError {
+ code: JSONParserErrorType;
+ name: string;
+ ioErrorCode?: string;
+ constructor(ex: Error | any, source?: string);
+}
+export declare class UnmatchedResolverError extends JSONParserError {
+ code: JSONParserErrorType;
+ name: string;
+ constructor(source: any);
+}
+export declare class MissingPointerError extends JSONParserError {
+ code: JSONParserErrorType;
+ name: string;
+ constructor(token: any, path: any);
+}
+export declare class TimeoutError extends JSONParserError {
+ code: JSONParserErrorType;
+ name: string;
+ constructor(timeout: number);
+}
+export declare class InvalidPointerError extends JSONParserError {
+ code: JSONParserErrorType;
+ name: string;
+ constructor(pointer: any, path: any);
+}
+export declare function isHandledError(err: any): err is JSONParserError;
+export declare function normalizeError(err: any): any;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/errors.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/errors.js
new file mode 100644
index 00000000..f8e4848f
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/errors.js
@@ -0,0 +1,112 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.InvalidPointerError = exports.TimeoutError = exports.MissingPointerError = exports.UnmatchedResolverError = exports.ResolverError = exports.UnmatchedParserError = exports.ParserError = exports.JSONParserErrorGroup = exports.JSONParserError = void 0;
+exports.isHandledError = isHandledError;
+exports.normalizeError = normalizeError;
+const ono_1 = require("@jsdevtools/ono");
+const url_js_1 = require("./url.js");
+class JSONParserError extends Error {
+ constructor(message, source) {
+ super();
+ this.code = "EUNKNOWN";
+ this.name = "JSONParserError";
+ this.message = message;
+ this.source = source;
+ this.path = null;
+ ono_1.Ono.extend(this);
+ }
+ get footprint() {
+ return `${this.path}+${this.source}+${this.code}+${this.message}`;
+ }
+}
+exports.JSONParserError = JSONParserError;
+class JSONParserErrorGroup extends Error {
+ constructor(parser) {
+ super();
+ this.files = parser;
+ this.name = "JSONParserErrorGroup";
+ this.message = `${this.errors.length} error${this.errors.length > 1 ? "s" : ""} occurred while reading '${(0, url_js_1.toFileSystemPath)(parser.$refs._root$Ref.path)}'`;
+ ono_1.Ono.extend(this);
+ }
+ static getParserErrors(parser) {
+ const errors = [];
+ for (const $ref of Object.values(parser.$refs._$refs)) {
+ if ($ref.errors) {
+ errors.push(...$ref.errors);
+ }
+ }
+ return errors;
+ }
+ get errors() {
+ return JSONParserErrorGroup.getParserErrors(this.files);
+ }
+}
+exports.JSONParserErrorGroup = JSONParserErrorGroup;
+class ParserError extends JSONParserError {
+ constructor(message, source) {
+ super(`Error parsing ${source}: ${message}`, source);
+ this.code = "EPARSER";
+ this.name = "ParserError";
+ }
+}
+exports.ParserError = ParserError;
+class UnmatchedParserError extends JSONParserError {
+ constructor(source) {
+ super(`Could not find parser for "${source}"`, source);
+ this.code = "EUNMATCHEDPARSER";
+ this.name = "UnmatchedParserError";
+ }
+}
+exports.UnmatchedParserError = UnmatchedParserError;
+class ResolverError extends JSONParserError {
+ constructor(ex, source) {
+ super(ex.message || `Error reading file "${source}"`, source);
+ this.code = "ERESOLVER";
+ this.name = "ResolverError";
+ if ("code" in ex) {
+ this.ioErrorCode = String(ex.code);
+ }
+ }
+}
+exports.ResolverError = ResolverError;
+class UnmatchedResolverError extends JSONParserError {
+ constructor(source) {
+ super(`Could not find resolver for "${source}"`, source);
+ this.code = "EUNMATCHEDRESOLVER";
+ this.name = "UnmatchedResolverError";
+ }
+}
+exports.UnmatchedResolverError = UnmatchedResolverError;
+class MissingPointerError extends JSONParserError {
+ constructor(token, path) {
+ super(`Token "${token}" does not exist.`, (0, url_js_1.stripHash)(path));
+ this.code = "EUNMATCHEDRESOLVER";
+ this.name = "MissingPointerError";
+ }
+}
+exports.MissingPointerError = MissingPointerError;
+class TimeoutError extends JSONParserError {
+ constructor(timeout) {
+ super(`Dereferencing timeout reached: ${timeout}ms`);
+ this.code = "ETIMEOUT";
+ this.name = "TimeoutError";
+ }
+}
+exports.TimeoutError = TimeoutError;
+class InvalidPointerError extends JSONParserError {
+ constructor(pointer, path) {
+ super(`Invalid $ref pointer "${pointer}". Pointers must begin with "#/"`, (0, url_js_1.stripHash)(path));
+ this.code = "EUNMATCHEDRESOLVER";
+ this.name = "InvalidPointerError";
+ }
+}
+exports.InvalidPointerError = InvalidPointerError;
+function isHandledError(err) {
+ return err instanceof JSONParserError || err instanceof JSONParserErrorGroup;
+}
+function normalizeError(err) {
+ if (err.path === null) {
+ err.path = [];
+ }
+ return err;
+}
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/is-windows.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/is-windows.d.ts
new file mode 100644
index 00000000..cabefaf6
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/is-windows.d.ts
@@ -0,0 +1 @@
+export declare const isWindows: () => boolean;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/is-windows.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/is-windows.js
new file mode 100644
index 00000000..f5ae98e6
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/is-windows.js
@@ -0,0 +1,6 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.isWindows = void 0;
+const isWindowsConst = /^win/.test(globalThis.process ? globalThis.process.platform : "");
+const isWindows = () => isWindowsConst;
+exports.isWindows = isWindows;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/maybe.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/maybe.d.ts
new file mode 100644
index 00000000..05c61687
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/maybe.d.ts
@@ -0,0 +1,3 @@
+type MaybeParams = (err: Error | any | null, result?: T) => void;
+export default function maybe(cb: MaybeParams | undefined, promise: Promise): Promise | void;
+export {};
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/maybe.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/maybe.js
new file mode 100644
index 00000000..0c41ce8f
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/maybe.js
@@ -0,0 +1,24 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.default = maybe;
+const next_js_1 = __importDefault(require("./next.js"));
+function maybe(cb, promise) {
+ if (cb) {
+ promise.then(function (result) {
+ (0, next_js_1.default)(function () {
+ cb(null, result);
+ });
+ }, function (err) {
+ (0, next_js_1.default)(function () {
+ cb(err);
+ });
+ });
+ return undefined;
+ }
+ else {
+ return promise;
+ }
+}
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/next.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/next.d.ts
new file mode 100644
index 00000000..a1595e42
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/next.d.ts
@@ -0,0 +1,2 @@
+declare const _default: (callback: Function, ...args: any[]) => void;
+export default _default;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/next.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/next.js
new file mode 100644
index 00000000..9fc939e2
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/next.js
@@ -0,0 +1,16 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+function makeNext() {
+ if (typeof process === "object" && typeof process.nextTick === "function") {
+ return process.nextTick;
+ }
+ else if (typeof setImmediate === "function") {
+ return setImmediate;
+ }
+ else {
+ return function next(f) {
+ setTimeout(f, 0);
+ };
+ }
+}
+exports.default = makeNext();
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/plugins.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/plugins.d.ts
new file mode 100644
index 00000000..3a303ced
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/plugins.d.ts
@@ -0,0 +1,34 @@
+import type { FileInfo, JSONSchema } from "../types/index.js";
+import type { ParserOptions } from "../options.js";
+import type { ResolverOptions } from "../types/index.js";
+import type $Refs from "../refs.js";
+import type { Plugin } from "../types/index.js";
+/**
+ * Returns the given plugins as an array, rather than an object map.
+ * All other methods in this module expect an array of plugins rather than an object map.
+ *
+ * @returns
+ */
+export declare function all = ParserOptions>(plugins: O["resolve"]): Plugin[];
+/**
+ * Filters the given plugins, returning only the ones return `true` for the given method.
+ */
+export declare function filter(plugins: Plugin[], method: any, file: any): Plugin[];
+/**
+ * Sorts the given plugins, in place, by their `order` property.
+ */
+export declare function sort(plugins: Plugin[]): Plugin[];
+export interface PluginResult = ParserOptions> {
+ plugin: Plugin;
+ result?: string | Buffer | S;
+ error?: any;
+}
+/**
+ * Runs the specified method of the given plugins, in order, until one of them returns a successful result.
+ * Each method can return a synchronous value, a Promise, or call an error-first callback.
+ * If the promise resolves successfully, or the callback is called without an error, then the result
+ * is immediately returned and no further plugins are called.
+ * If the promise rejects, or the callback is called with an error, then the next plugin is called.
+ * If ALL plugins fail, then the last error is thrown.
+ */
+export declare function run = ParserOptions>(plugins: Plugin[], method: keyof Plugin | keyof ResolverOptions, file: FileInfo, $refs: $Refs): Promise>;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/plugins.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/plugins.js
new file mode 100644
index 00000000..cb9fda83
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/plugins.js
@@ -0,0 +1,132 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.all = all;
+exports.filter = filter;
+exports.sort = sort;
+exports.run = run;
+/**
+ * Returns the given plugins as an array, rather than an object map.
+ * All other methods in this module expect an array of plugins rather than an object map.
+ *
+ * @returns
+ */
+function all(plugins) {
+ return Object.keys(plugins || {})
+ .filter((key) => {
+ return typeof plugins[key] === "object";
+ })
+ .map((key) => {
+ plugins[key].name = key;
+ return plugins[key];
+ });
+}
+/**
+ * Filters the given plugins, returning only the ones return `true` for the given method.
+ */
+function filter(plugins, method, file) {
+ return plugins.filter((plugin) => {
+ return !!getResult(plugin, method, file);
+ });
+}
+/**
+ * Sorts the given plugins, in place, by their `order` property.
+ */
+function sort(plugins) {
+ for (const plugin of plugins) {
+ plugin.order = plugin.order || Number.MAX_SAFE_INTEGER;
+ }
+ return plugins.sort((a, b) => {
+ return a.order - b.order;
+ });
+}
+/**
+ * Runs the specified method of the given plugins, in order, until one of them returns a successful result.
+ * Each method can return a synchronous value, a Promise, or call an error-first callback.
+ * If the promise resolves successfully, or the callback is called without an error, then the result
+ * is immediately returned and no further plugins are called.
+ * If the promise rejects, or the callback is called with an error, then the next plugin is called.
+ * If ALL plugins fail, then the last error is thrown.
+ */
+async function run(plugins, method, file, $refs) {
+ let plugin;
+ let lastError;
+ let index = 0;
+ return new Promise((resolve, reject) => {
+ runNextPlugin();
+ function runNextPlugin() {
+ plugin = plugins[index++];
+ if (!plugin) {
+ // There are no more functions, so re-throw the last error
+ return reject(lastError);
+ }
+ try {
+ // console.log(' %s', plugin.name);
+ const result = getResult(plugin, method, file, callback, $refs);
+ if (result && typeof result.then === "function") {
+ // A promise was returned
+ result.then(onSuccess, onError);
+ }
+ else if (result !== undefined) {
+ // A synchronous result was returned
+ onSuccess(result);
+ }
+ else if (index === plugins.length) {
+ throw new Error("No promise has been returned or callback has been called.");
+ }
+ }
+ catch (e) {
+ onError(e);
+ }
+ }
+ function callback(err, result) {
+ if (err) {
+ onError(err);
+ }
+ else {
+ onSuccess(result);
+ }
+ }
+ function onSuccess(result) {
+ // console.log(' success');
+ resolve({
+ plugin,
+ result,
+ });
+ }
+ function onError(error) {
+ // console.log(' %s', err.message || err);
+ lastError = {
+ plugin,
+ error,
+ };
+ runNextPlugin();
+ }
+ });
+}
+/**
+ * Returns the value of the given property.
+ * If the property is a function, then the result of the function is returned.
+ * If the value is a RegExp, then it will be tested against the file URL.
+ * If the value is an array, then it will be compared against the file extension.
+ */
+function getResult(obj, prop, file, callback, $refs) {
+ const value = obj[prop];
+ if (typeof value === "function") {
+ return value.apply(obj, [file, callback, $refs]);
+ }
+ if (!callback) {
+ // The synchronous plugin functions (canParse and canRead)
+ // allow a "shorthand" syntax, where the user can match
+ // files by RegExp or by file extension.
+ if (value instanceof RegExp) {
+ return value.test(file.url);
+ }
+ else if (typeof value === "string") {
+ return value === file.extension;
+ }
+ else if (Array.isArray(value)) {
+ return value.indexOf(file.extension) !== -1;
+ }
+ }
+ return value;
+}
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/url.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/url.d.ts
new file mode 100644
index 00000000..50ee503e
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/url.d.ts
@@ -0,0 +1,94 @@
+export declare const parse: (u: string | URL) => URL;
+/**
+ * Returns resolved target URL relative to a base URL in a manner similar to that of a Web browser resolving an anchor tag HREF.
+ *
+ * @returns
+ */
+export declare function resolve(from: string, to: string): string;
+/**
+ * Returns the current working directory (in Node) or the current page URL (in browsers).
+ *
+ * @returns
+ */
+export declare function cwd(): string;
+/**
+ * Returns the protocol of the given URL, or `undefined` if it has no protocol.
+ *
+ * @param path
+ * @returns
+ */
+export declare function getProtocol(path: string | undefined): string | undefined;
+/**
+ * Returns the lowercased file extension of the given URL,
+ * or an empty string if it has no extension.
+ *
+ * @param path
+ * @returns
+ */
+export declare function getExtension(path: any): any;
+/**
+ * Removes the query, if any, from the given path.
+ *
+ * @param path
+ * @returns
+ */
+export declare function stripQuery(path: any): any;
+/**
+ * Returns the hash (URL fragment), of the given path.
+ * If there is no hash, then the root hash ("#") is returned.
+ *
+ * @param path
+ * @returns
+ */
+export declare function getHash(path: undefined | string): string;
+/**
+ * Removes the hash (URL fragment), if any, from the given path.
+ *
+ * @param path
+ * @returns
+ */
+export declare function stripHash(path?: string | undefined): string;
+/**
+ * Determines whether the given path is an HTTP(S) URL.
+ *
+ * @param path
+ * @returns
+ */
+export declare function isHttp(path: string): boolean;
+/**
+ * Determines whether the given path is a filesystem path.
+ * This includes "file://" URLs.
+ *
+ * @param path
+ * @returns
+ */
+export declare function isFileSystemPath(path: string | undefined): boolean;
+/**
+ * Converts a filesystem path to a properly-encoded URL.
+ *
+ * This is intended to handle situations where JSON Schema $Ref Parser is called
+ * with a filesystem path that contains characters which are not allowed in URLs.
+ *
+ * @example
+ * The following filesystem paths would be converted to the following URLs:
+ *
+ * <"!@#$%^&*+=?'>.json ==> %3C%22!@%23$%25%5E&*+=%3F\'%3E.json
+ * C:\\My Documents\\File (1).json ==> C:/My%20Documents/File%20(1).json
+ * file://Project #42/file.json ==> file://Project%20%2342/file.json
+ *
+ * @param path
+ * @returns
+ */
+export declare function fromFileSystemPath(path: string): string;
+/**
+ * Converts a URL to a local filesystem path.
+ */
+export declare function toFileSystemPath(path: string | undefined, keepFileProtocol?: boolean): string;
+/**
+ * Converts a $ref pointer to a valid JSON Path.
+ *
+ * @param pointer
+ * @returns
+ */
+export declare function safePointerToPath(pointer: any): any;
+export declare function relative(from: string, to: string): string;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/url.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/url.js
new file mode 100644
index 00000000..36535c4d
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/lib/util/url.js
@@ -0,0 +1,319 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.parse = void 0;
+exports.resolve = resolve;
+exports.cwd = cwd;
+exports.getProtocol = getProtocol;
+exports.getExtension = getExtension;
+exports.stripQuery = stripQuery;
+exports.getHash = getHash;
+exports.stripHash = stripHash;
+exports.isHttp = isHttp;
+exports.isFileSystemPath = isFileSystemPath;
+exports.fromFileSystemPath = fromFileSystemPath;
+exports.toFileSystemPath = toFileSystemPath;
+exports.safePointerToPath = safePointerToPath;
+exports.relative = relative;
+const convert_path_to_posix_1 = __importDefault(require("./convert-path-to-posix"));
+const path_1 = __importStar(require("path"));
+const forwardSlashPattern = /\//g;
+const protocolPattern = /^(\w{2,}):\/\//i;
+const jsonPointerSlash = /~1/g;
+const jsonPointerTilde = /~0/g;
+const path_2 = require("path");
+const is_windows_1 = require("./is-windows");
+// RegExp patterns to URL-encode special characters in local filesystem paths
+const urlEncodePatterns = [
+ [/\?/g, "%3F"],
+ [/#/g, "%23"],
+];
+// RegExp patterns to URL-decode special characters for local filesystem paths
+const urlDecodePatterns = [/%23/g, "#", /%24/g, "$", /%26/g, "&", /%2C/g, ",", /%40/g, "@"];
+const parse = (u) => new URL(u);
+exports.parse = parse;
+/**
+ * Returns resolved target URL relative to a base URL in a manner similar to that of a Web browser resolving an anchor tag HREF.
+ *
+ * @returns
+ */
+function resolve(from, to) {
+ const fromUrl = new URL((0, convert_path_to_posix_1.default)(from), "resolve://");
+ const resolvedUrl = new URL((0, convert_path_to_posix_1.default)(to), fromUrl);
+ const endSpaces = to.match(/(\s*)$/)?.[1] || "";
+ if (resolvedUrl.protocol === "resolve:") {
+ // `from` is a relative URL.
+ const { pathname, search, hash } = resolvedUrl;
+ return pathname + search + hash + endSpaces;
+ }
+ return resolvedUrl.toString() + endSpaces;
+}
+/**
+ * Returns the current working directory (in Node) or the current page URL (in browsers).
+ *
+ * @returns
+ */
+function cwd() {
+ if (typeof window !== "undefined") {
+ return location.href;
+ }
+ const path = process.cwd();
+ const lastChar = path.slice(-1);
+ if (lastChar === "/" || lastChar === "\\") {
+ return path;
+ }
+ else {
+ return path + "/";
+ }
+}
+/**
+ * Returns the protocol of the given URL, or `undefined` if it has no protocol.
+ *
+ * @param path
+ * @returns
+ */
+function getProtocol(path) {
+ const match = protocolPattern.exec(path || "");
+ if (match) {
+ return match[1].toLowerCase();
+ }
+ return undefined;
+}
+/**
+ * Returns the lowercased file extension of the given URL,
+ * or an empty string if it has no extension.
+ *
+ * @param path
+ * @returns
+ */
+function getExtension(path) {
+ const lastDot = path.lastIndexOf(".");
+ if (lastDot >= 0) {
+ return stripQuery(path.substr(lastDot).toLowerCase());
+ }
+ return "";
+}
+/**
+ * Removes the query, if any, from the given path.
+ *
+ * @param path
+ * @returns
+ */
+function stripQuery(path) {
+ const queryIndex = path.indexOf("?");
+ if (queryIndex >= 0) {
+ path = path.substr(0, queryIndex);
+ }
+ return path;
+}
+/**
+ * Returns the hash (URL fragment), of the given path.
+ * If there is no hash, then the root hash ("#") is returned.
+ *
+ * @param path
+ * @returns
+ */
+function getHash(path) {
+ if (!path) {
+ return "#";
+ }
+ const hashIndex = path.indexOf("#");
+ if (hashIndex >= 0) {
+ return path.substring(hashIndex);
+ }
+ return "#";
+}
+/**
+ * Removes the hash (URL fragment), if any, from the given path.
+ *
+ * @param path
+ * @returns
+ */
+function stripHash(path) {
+ if (!path) {
+ return "";
+ }
+ const hashIndex = path.indexOf("#");
+ if (hashIndex >= 0) {
+ path = path.substring(0, hashIndex);
+ }
+ return path;
+}
+/**
+ * Determines whether the given path is an HTTP(S) URL.
+ *
+ * @param path
+ * @returns
+ */
+function isHttp(path) {
+ const protocol = getProtocol(path);
+ if (protocol === "http" || protocol === "https") {
+ return true;
+ }
+ else if (protocol === undefined) {
+ // There is no protocol. If we're running in a browser, then assume it's HTTP.
+ return typeof window !== "undefined";
+ }
+ else {
+ // It's some other protocol, such as "ftp://", "mongodb://", etc.
+ return false;
+ }
+}
+/**
+ * Determines whether the given path is a filesystem path.
+ * This includes "file://" URLs.
+ *
+ * @param path
+ * @returns
+ */
+function isFileSystemPath(path) {
+ // @ts-ignore
+ if (typeof window !== "undefined" || (typeof process !== "undefined" && process.browser)) {
+ // We're running in a browser, so assume that all paths are URLs.
+ // This way, even relative paths will be treated as URLs rather than as filesystem paths
+ return false;
+ }
+ const protocol = getProtocol(path);
+ return protocol === undefined || protocol === "file";
+}
+/**
+ * Converts a filesystem path to a properly-encoded URL.
+ *
+ * This is intended to handle situations where JSON Schema $Ref Parser is called
+ * with a filesystem path that contains characters which are not allowed in URLs.
+ *
+ * @example
+ * The following filesystem paths would be converted to the following URLs:
+ *
+ * <"!@#$%^&*+=?'>.json ==> %3C%22!@%23$%25%5E&*+=%3F\'%3E.json
+ * C:\\My Documents\\File (1).json ==> C:/My%20Documents/File%20(1).json
+ * file://Project #42/file.json ==> file://Project%20%2342/file.json
+ *
+ * @param path
+ * @returns
+ */
+function fromFileSystemPath(path) {
+ // Step 1: On Windows, replace backslashes with forward slashes,
+ // rather than encoding them as "%5C"
+ if ((0, is_windows_1.isWindows)()) {
+ const projectDir = cwd();
+ const upperPath = path.toUpperCase();
+ const projectDirPosixPath = (0, convert_path_to_posix_1.default)(projectDir);
+ const posixUpper = projectDirPosixPath.toUpperCase();
+ const hasProjectDir = upperPath.includes(posixUpper);
+ const hasProjectUri = upperPath.includes(posixUpper);
+ const isAbsolutePath = path_1.win32?.isAbsolute(path) ||
+ path.startsWith("http://") ||
+ path.startsWith("https://") ||
+ path.startsWith("file://");
+ if (!(hasProjectDir || hasProjectUri || isAbsolutePath) && !projectDir.startsWith("http")) {
+ path = (0, path_2.join)(projectDir, path);
+ }
+ path = (0, convert_path_to_posix_1.default)(path);
+ }
+ // Step 2: `encodeURI` will take care of MOST characters
+ path = encodeURI(path);
+ // Step 3: Manually encode characters that are not encoded by `encodeURI`.
+ // This includes characters such as "#" and "?", which have special meaning in URLs,
+ // but are just normal characters in a filesystem path.
+ for (const pattern of urlEncodePatterns) {
+ path = path.replace(pattern[0], pattern[1]);
+ }
+ return path;
+}
+/**
+ * Converts a URL to a local filesystem path.
+ */
+function toFileSystemPath(path, keepFileProtocol) {
+ // Step 1: `decodeURI` will decode characters such as Cyrillic characters, spaces, etc.
+ path = decodeURI(path);
+ // Step 2: Manually decode characters that are not decoded by `decodeURI`.
+ // This includes characters such as "#" and "?", which have special meaning in URLs,
+ // but are just normal characters in a filesystem path.
+ for (let i = 0; i < urlDecodePatterns.length; i += 2) {
+ path = path.replace(urlDecodePatterns[i], urlDecodePatterns[i + 1]);
+ }
+ // Step 3: If it's a "file://" URL, then format it consistently
+ // or convert it to a local filesystem path
+ let isFileUrl = path.substr(0, 7).toLowerCase() === "file://";
+ if (isFileUrl) {
+ // Strip-off the protocol, and the initial "/", if there is one
+ path = path[7] === "/" ? path.substr(8) : path.substr(7);
+ // insert a colon (":") after the drive letter on Windows
+ if ((0, is_windows_1.isWindows)() && path[1] === "/") {
+ path = path[0] + ":" + path.substr(1);
+ }
+ if (keepFileProtocol) {
+ // Return the consistently-formatted "file://" URL
+ path = "file:///" + path;
+ }
+ else {
+ // Convert the "file://" URL to a local filesystem path.
+ // On Windows, it will start with something like "C:/".
+ // On Posix, it will start with "/"
+ isFileUrl = false;
+ path = (0, is_windows_1.isWindows)() ? path : "/" + path;
+ }
+ }
+ // Step 4: Normalize Windows paths (unless it's a "file://" URL)
+ if ((0, is_windows_1.isWindows)() && !isFileUrl) {
+ // Replace forward slashes with backslashes
+ path = path.replace(forwardSlashPattern, "\\");
+ // Capitalize the drive letter
+ if (path.substr(1, 2) === ":\\") {
+ path = path[0].toUpperCase() + path.substr(1);
+ }
+ }
+ return path;
+}
+/**
+ * Converts a $ref pointer to a valid JSON Path.
+ *
+ * @param pointer
+ * @returns
+ */
+function safePointerToPath(pointer) {
+ if (pointer.length <= 1 || pointer[0] !== "#" || pointer[1] !== "/") {
+ return [];
+ }
+ return pointer
+ .slice(2)
+ .split("/")
+ .map((value) => {
+ return decodeURIComponent(value).replace(jsonPointerSlash, "/").replace(jsonPointerTilde, "~");
+ });
+}
+function relative(from, to) {
+ if (!isFileSystemPath(from) || !isFileSystemPath(to)) {
+ return resolve(from, to);
+ }
+ const fromDir = path_1.default.dirname(stripHash(from));
+ const toPath = stripHash(to);
+ const result = path_1.default.relative(fromDir, toPath);
+ return result + getHash(to);
+}
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/vite.config.d.ts b/node_modules/@apidevtools/json-schema-ref-parser/dist/vite.config.d.ts
new file mode 100644
index 00000000..340562af
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/vite.config.d.ts
@@ -0,0 +1,2 @@
+declare const _default: import("vite").UserConfig;
+export default _default;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/dist/vite.config.js b/node_modules/@apidevtools/json-schema-ref-parser/dist/vite.config.js
new file mode 100644
index 00000000..7453e635
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/dist/vite.config.js
@@ -0,0 +1,18 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+const config_1 = require("vitest/config");
+const isBrowser = process.env.BROWSER === "true";
+exports.default = (0, config_1.defineConfig)({
+ test: {
+ environment: isBrowser ? "jsdom" : "node",
+ dir: "test",
+ exclude: ["**/__IGNORED__/**"],
+ watch: false,
+ globalSetup: isBrowser ? ["./test/fixtures/server.ts"] : undefined,
+ testTimeout: 5000,
+ globals: true,
+ passWithNoTests: true,
+ reporters: ["verbose"],
+ coverage: { reporter: ["lcov", "html", "text"] },
+ },
+});
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/lib/bundle.ts b/node_modules/@apidevtools/json-schema-ref-parser/lib/bundle.ts
new file mode 100644
index 00000000..48068082
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/lib/bundle.ts
@@ -0,0 +1,299 @@
+import $Ref from "./ref.js";
+import Pointer from "./pointer.js";
+import * as url from "./util/url.js";
+import type $Refs from "./refs.js";
+import type $RefParser from "./index";
+import type { ParserOptions } from "./index";
+import type { JSONSchema } from "./index";
+
+export interface InventoryEntry {
+ $ref: any;
+ parent: any;
+ key: any;
+ pathFromRoot: any;
+ depth: any;
+ file: any;
+ hash: any;
+ value: any;
+ circular: any;
+ extended: any;
+ external: any;
+ indirections: any;
+}
+/**
+ * Bundles all external JSON references into the main JSON schema, thus resulting in a schema that
+ * only has *internal* references, not any *external* references.
+ * This method mutates the JSON schema object, adding new references and re-mapping existing ones.
+ *
+ * @param parser
+ * @param options
+ */
+function bundle = ParserOptions>(
+ parser: $RefParser,
+ options: O,
+) {
+ // console.log('Bundling $ref pointers in %s', parser.$refs._root$Ref.path);
+
+ // Build an inventory of all $ref pointers in the JSON Schema
+ const inventory: InventoryEntry[] = [];
+ crawl(parser, "schema", parser.$refs._root$Ref.path + "#", "#", 0, inventory, parser.$refs, options);
+
+ // Remap all $ref pointers
+ remap(inventory);
+}
+
+/**
+ * Recursively crawls the given value, and inventories all JSON references.
+ *
+ * @param parent - The object containing the value to crawl. If the value is not an object or array, it will be ignored.
+ * @param key - The property key of `parent` to be crawled
+ * @param path - The full path of the property being crawled, possibly with a JSON Pointer in the hash
+ * @param pathFromRoot - The path of the property being crawled, from the schema root
+ * @param indirections
+ * @param inventory - An array of already-inventoried $ref pointers
+ * @param $refs
+ * @param options
+ */
+function crawl = ParserOptions>(
+ parent: object | $RefParser,
+ key: string | null,
+ path: string,
+ pathFromRoot: string,
+ indirections: number,
+ inventory: InventoryEntry[],
+ $refs: $Refs,
+ options: O,
+) {
+ const obj = key === null ? parent : parent[key as keyof typeof parent];
+
+ if (obj && typeof obj === "object" && !ArrayBuffer.isView(obj)) {
+ if ($Ref.isAllowed$Ref(obj)) {
+ inventory$Ref(parent, key, path, pathFromRoot, indirections, inventory, $refs, options);
+ } else {
+ // Crawl the object in a specific order that's optimized for bundling.
+ // This is important because it determines how `pathFromRoot` gets built,
+ // which later determines which keys get dereferenced and which ones get remapped
+ const keys = Object.keys(obj).sort((a, b) => {
+ // Most people will expect references to be bundled into the the "definitions" property,
+ // so we always crawl that property first, if it exists.
+ if (a === "definitions") {
+ return -1;
+ } else if (b === "definitions") {
+ return 1;
+ } else {
+ // Otherwise, crawl the keys based on their length.
+ // This produces the shortest possible bundled references
+ return a.length - b.length;
+ }
+ }) as (keyof typeof obj)[];
+
+ for (const key of keys) {
+ const keyPath = Pointer.join(path, key);
+ const keyPathFromRoot = Pointer.join(pathFromRoot, key);
+ const value = obj[key];
+
+ if ($Ref.isAllowed$Ref(value)) {
+ inventory$Ref(obj, key, path, keyPathFromRoot, indirections, inventory, $refs, options);
+ } else {
+ crawl(obj, key, keyPath, keyPathFromRoot, indirections, inventory, $refs, options);
+ }
+ }
+ }
+ }
+}
+
+/**
+ * Inventories the given JSON Reference (i.e. records detailed information about it so we can
+ * optimize all $refs in the schema), and then crawls the resolved value.
+ *
+ * @param $refParent - The object that contains a JSON Reference as one of its keys
+ * @param $refKey - The key in `$refParent` that is a JSON Reference
+ * @param path - The full path of the JSON Reference at `$refKey`, possibly with a JSON Pointer in the hash
+ * @param indirections - unknown
+ * @param pathFromRoot - The path of the JSON Reference at `$refKey`, from the schema root
+ * @param inventory - An array of already-inventoried $ref pointers
+ * @param $refs
+ * @param options
+ */
+function inventory$Ref = ParserOptions>(
+ $refParent: any,
+ $refKey: string | null,
+ path: string,
+ pathFromRoot: string,
+ indirections: number,
+ inventory: InventoryEntry[],
+ $refs: $Refs,
+ options: O,
+) {
+ const $ref = $refKey === null ? $refParent : $refParent[$refKey];
+ const $refPath = url.resolve(path, $ref.$ref);
+ const pointer = $refs._resolve($refPath, pathFromRoot, options);
+ if (pointer === null) {
+ return;
+ }
+ const parsed = Pointer.parse(pathFromRoot);
+ const depth = parsed.length;
+ const file = url.stripHash(pointer.path);
+ const hash = url.getHash(pointer.path);
+ const external = file !== $refs._root$Ref.path;
+ const extended = $Ref.isExtended$Ref($ref);
+ indirections += pointer.indirections;
+
+ const existingEntry = findInInventory(inventory, $refParent, $refKey);
+ if (existingEntry) {
+ // This $Ref has already been inventoried, so we don't need to process it again
+ if (depth < existingEntry.depth || indirections < existingEntry.indirections) {
+ removeFromInventory(inventory, existingEntry);
+ } else {
+ return;
+ }
+ }
+
+ inventory.push({
+ $ref, // The JSON Reference (e.g. {$ref: string})
+ parent: $refParent, // The object that contains this $ref pointer
+ key: $refKey, // The key in `parent` that is the $ref pointer
+ pathFromRoot, // The path to the $ref pointer, from the JSON Schema root
+ depth, // How far from the JSON Schema root is this $ref pointer?
+ file, // The file that the $ref pointer resolves to
+ hash, // The hash within `file` that the $ref pointer resolves to
+ value: pointer.value, // The resolved value of the $ref pointer
+ circular: pointer.circular, // Is this $ref pointer DIRECTLY circular? (i.e. it references itself)
+ extended, // Does this $ref extend its resolved value? (i.e. it has extra properties, in addition to "$ref")
+ external, // Does this $ref pointer point to a file other than the main JSON Schema file?
+ indirections, // The number of indirect references that were traversed to resolve the value
+ });
+
+ // Recursively crawl the resolved value
+ if (!existingEntry || external) {
+ crawl(pointer.value, null, pointer.path, pathFromRoot, indirections + 1, inventory, $refs, options);
+ }
+}
+
+/**
+ * Re-maps every $ref pointer, so that they're all relative to the root of the JSON Schema.
+ * Each referenced value is dereferenced EXACTLY ONCE. All subsequent references to the same
+ * value are re-mapped to point to the first reference.
+ *
+ * @example: {
+ * first: { $ref: somefile.json#/some/part },
+ * second: { $ref: somefile.json#/another/part },
+ * third: { $ref: somefile.json },
+ * fourth: { $ref: somefile.json#/some/part/sub/part }
+ * }
+ *
+ * In this example, there are four references to the same file, but since the third reference points
+ * to the ENTIRE file, that's the only one we need to dereference. The other three can just be
+ * remapped to point inside the third one.
+ *
+ * On the other hand, if the third reference DIDN'T exist, then the first and second would both need
+ * to be dereferenced, since they point to different parts of the file. The fourth reference does NOT
+ * need to be dereferenced, because it can be remapped to point inside the first one.
+ *
+ * @param inventory
+ */
+function remap(inventory: InventoryEntry[]) {
+ // Group & sort all the $ref pointers, so they're in the order that we need to dereference/remap them
+ inventory.sort((a: InventoryEntry, b: InventoryEntry) => {
+ if (a.file !== b.file) {
+ // Group all the $refs that point to the same file
+ return a.file < b.file ? -1 : +1;
+ } else if (a.hash !== b.hash) {
+ // Group all the $refs that point to the same part of the file
+ return a.hash < b.hash ? -1 : +1;
+ } else if (a.circular !== b.circular) {
+ // If the $ref points to itself, then sort it higher than other $refs that point to this $ref
+ return a.circular ? -1 : +1;
+ } else if (a.extended !== b.extended) {
+ // If the $ref extends the resolved value, then sort it lower than other $refs that don't extend the value
+ return a.extended ? +1 : -1;
+ } else if (a.indirections !== b.indirections) {
+ // Sort direct references higher than indirect references
+ return a.indirections - b.indirections;
+ } else if (a.depth !== b.depth) {
+ // Sort $refs by how close they are to the JSON Schema root
+ return a.depth - b.depth;
+ } else {
+ // Determine how far each $ref is from the "definitions" property.
+ // Most people will expect references to be bundled into the the "definitions" property if possible.
+ const aDefinitionsIndex = a.pathFromRoot.lastIndexOf("/definitions");
+ const bDefinitionsIndex = b.pathFromRoot.lastIndexOf("/definitions");
+
+ if (aDefinitionsIndex !== bDefinitionsIndex) {
+ // Give higher priority to the $ref that's closer to the "definitions" property
+ return bDefinitionsIndex - aDefinitionsIndex;
+ } else {
+ // All else is equal, so use the shorter path, which will produce the shortest possible reference
+ return a.pathFromRoot.length - b.pathFromRoot.length;
+ }
+ }
+ });
+
+ let file, hash, pathFromRoot;
+ for (const entry of inventory) {
+ // console.log('Re-mapping $ref pointer "%s" at %s', entry.$ref.$ref, entry.pathFromRoot);
+
+ if (!entry.external) {
+ // This $ref already resolves to the main JSON Schema file
+ entry.$ref.$ref = entry.hash;
+ } else if (entry.file === file && entry.hash === hash) {
+ // This $ref points to the same value as the prevous $ref, so remap it to the same path
+ entry.$ref.$ref = pathFromRoot;
+ } else if (entry.file === file && entry.hash.indexOf(hash + "/") === 0) {
+ // This $ref points to a sub-value of the prevous $ref, so remap it beneath that path
+ entry.$ref.$ref = Pointer.join(pathFromRoot, Pointer.parse(entry.hash.replace(hash, "#")));
+ } else {
+ // We've moved to a new file or new hash
+ file = entry.file;
+ hash = entry.hash;
+ pathFromRoot = entry.pathFromRoot;
+
+ // This is the first $ref to point to this value, so dereference the value.
+ // Any other $refs that point to the same value will point to this $ref instead
+ entry.$ref = entry.parent[entry.key] = $Ref.dereference(entry.$ref, entry.value);
+
+ if (entry.circular) {
+ // This $ref points to itself
+ entry.$ref.$ref = entry.pathFromRoot;
+ }
+ }
+ }
+
+ // we want to ensure that any $refs that point to another $ref are remapped to point to the final value
+ // let hadChange = true;
+ // while (hadChange) {
+ // hadChange = false;
+ // for (const entry of inventory) {
+ // if (entry.$ref && typeof entry.$ref === "object" && "$ref" in entry.$ref) {
+ // const resolved = inventory.find((e: InventoryEntry) => e.pathFromRoot === entry.$ref.$ref);
+ // if (resolved) {
+ // const resolvedPointsToAnotherRef =
+ // resolved.$ref && typeof resolved.$ref === "object" && "$ref" in resolved.$ref;
+ // if (resolvedPointsToAnotherRef && entry.$ref.$ref !== resolved.$ref.$ref) {
+ // // console.log('Re-mapping $ref pointer "%s" at %s', entry.$ref.$ref, entry.pathFromRoot);
+ // entry.$ref.$ref = resolved.$ref.$ref;
+ // hadChange = true;
+ // }
+ // }
+ // }
+ // }
+ // }
+}
+
+/**
+ * TODO
+ */
+function findInInventory(inventory: InventoryEntry[], $refParent: any, $refKey: any) {
+ for (const existingEntry of inventory) {
+ if (existingEntry && existingEntry.parent === $refParent && existingEntry.key === $refKey) {
+ return existingEntry;
+ }
+ }
+ return undefined;
+}
+
+function removeFromInventory(inventory: InventoryEntry[], entry: any) {
+ const index = inventory.indexOf(entry);
+ inventory.splice(index, 1);
+}
+export default bundle;
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/lib/dereference.ts b/node_modules/@apidevtools/json-schema-ref-parser/lib/dereference.ts
new file mode 100644
index 00000000..b56b58bb
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/lib/dereference.ts
@@ -0,0 +1,288 @@
+import $Ref from "./ref.js";
+import Pointer from "./pointer.js";
+import { ono } from "@jsdevtools/ono";
+import * as url from "./util/url.js";
+import type $Refs from "./refs.js";
+import type { DereferenceOptions, ParserOptions } from "./options.js";
+import type { JSONSchema } from "./types";
+import type $RefParser from "./index";
+import { TimeoutError } from "./util/errors";
+
+export default dereference;
+
+/**
+ * Crawls the JSON schema, finds all JSON references, and dereferences them.
+ * This method mutates the JSON schema object, replacing JSON references with their resolved value.
+ *
+ * @param parser
+ * @param options
+ */
+function dereference = ParserOptions>(
+ parser: $RefParser,
+ options: O,
+) {
+ const start = Date.now();
+ // console.log('Dereferencing $ref pointers in %s', parser.$refs._root$Ref.path);
+ const dereferenced = crawl(
+ parser.schema,
+ parser.$refs._root$Ref.path!,
+ "#",
+ new Set(),
+ new Set(),
+ new Map(),
+ parser.$refs,
+ options,
+ start,
+ );
+ parser.$refs.circular = dereferenced.circular;
+ parser.schema = dereferenced.value;
+}
+
+/**
+ * Recursively crawls the given value, and dereferences any JSON references.
+ *
+ * @param obj - The value to crawl. If it's not an object or array, it will be ignored.
+ * @param path - The full path of `obj`, possibly with a JSON Pointer in the hash
+ * @param pathFromRoot - The path of `obj` from the schema root
+ * @param parents - An array of the parent objects that have already been dereferenced
+ * @param processedObjects - An array of all the objects that have already been processed
+ * @param dereferencedCache - An map of all the dereferenced objects
+ * @param $refs
+ * @param options
+ * @param startTime - The time when the dereferencing started
+ * @returns
+ */
+function crawl = ParserOptions>(
+ obj: any,
+ path: string,
+ pathFromRoot: string,
+ parents: Set,
+ processedObjects: Set,
+ dereferencedCache: any,
+ $refs: $Refs,
+ options: O,
+ startTime: number,
+) {
+ let dereferenced;
+ const result = {
+ value: obj,
+ circular: false,
+ };
+
+ if (options && options.timeoutMs) {
+ if (Date.now() - startTime > options.timeoutMs) {
+ throw new TimeoutError(options.timeoutMs);
+ }
+ }
+ const derefOptions = (options.dereference || {}) as DereferenceOptions;
+ const isExcludedPath = derefOptions.excludedPathMatcher || (() => false);
+
+ if (derefOptions?.circular === "ignore" || !processedObjects.has(obj)) {
+ if (obj && typeof obj === "object" && !ArrayBuffer.isView(obj) && !isExcludedPath(pathFromRoot)) {
+ parents.add(obj);
+ processedObjects.add(obj);
+
+ if ($Ref.isAllowed$Ref(obj, options)) {
+ dereferenced = dereference$Ref(
+ obj,
+ path,
+ pathFromRoot,
+ parents,
+ processedObjects,
+ dereferencedCache,
+ $refs,
+ options,
+ startTime,
+ );
+ result.circular = dereferenced.circular;
+ result.value = dereferenced.value;
+ } else {
+ for (const key of Object.keys(obj)) {
+ const keyPath = Pointer.join(path, key);
+ const keyPathFromRoot = Pointer.join(pathFromRoot, key);
+
+ if (isExcludedPath(keyPathFromRoot)) {
+ continue;
+ }
+
+ const value = obj[key];
+ let circular = false;
+
+ if ($Ref.isAllowed$Ref(value, options)) {
+ dereferenced = dereference$Ref(
+ value,
+ keyPath,
+ keyPathFromRoot,
+ parents,
+ processedObjects,
+ dereferencedCache,
+ $refs,
+ options,
+ startTime,
+ );
+ circular = dereferenced.circular;
+ // Avoid pointless mutations; breaks frozen objects to no profit
+ if (obj[key] !== dereferenced.value) {
+ obj[key] = dereferenced.value;
+ derefOptions?.onDereference?.(value.$ref, obj[key], obj, key);
+ }
+ } else {
+ if (!parents.has(value)) {
+ dereferenced = crawl(
+ value,
+ keyPath,
+ keyPathFromRoot,
+ parents,
+ processedObjects,
+ dereferencedCache,
+ $refs,
+ options,
+ startTime,
+ );
+ circular = dereferenced.circular;
+ // Avoid pointless mutations; breaks frozen objects to no profit
+ if (obj[key] !== dereferenced.value) {
+ obj[key] = dereferenced.value;
+ }
+ } else {
+ circular = foundCircularReference(keyPath, $refs, options);
+ }
+ }
+
+ // Set the "isCircular" flag if this or any other property is circular
+ result.circular = result.circular || circular;
+ }
+ }
+
+ parents.delete(obj);
+ }
+ }
+
+ return result;
+}
+
+/**
+ * Dereferences the given JSON Reference, and then crawls the resulting value.
+ *
+ * @param $ref - The JSON Reference to resolve
+ * @param path - The full path of `$ref`, possibly with a JSON Pointer in the hash
+ * @param pathFromRoot - The path of `$ref` from the schema root
+ * @param parents - An array of the parent objects that have already been dereferenced
+ * @param processedObjects - An array of all the objects that have already been dereferenced
+ * @param dereferencedCache - An map of all the dereferenced objects
+ * @param $refs
+ * @param options
+ * @returns
+ */
+function dereference$Ref = ParserOptions>(
+ $ref: any,
+ path: string,
+ pathFromRoot: string,
+ parents: Set,
+ processedObjects: any,
+ dereferencedCache: any,
+ $refs: $Refs,
+ options: O,
+ startTime: number,
+) {
+ const isExternalRef = $Ref.isExternal$Ref($ref);
+ const shouldResolveOnCwd = isExternalRef && options?.dereference?.externalReferenceResolution === "root";
+ const $refPath = url.resolve(shouldResolveOnCwd ? url.cwd() : path, $ref.$ref);
+
+ const cache = dereferencedCache.get($refPath);
+ if (cache && !cache.circular) {
+ const refKeys = Object.keys($ref);
+ if (refKeys.length > 1) {
+ const extraKeys = {};
+ for (const key of refKeys) {
+ if (key !== "$ref" && !(key in cache.value)) {
+ // @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message
+ extraKeys[key] = $ref[key];
+ }
+ }
+ return {
+ circular: cache.circular,
+ value: Object.assign({}, cache.value, extraKeys),
+ };
+ }
+
+ return cache;
+ }
+
+ const pointer = $refs._resolve($refPath, path, options);
+
+ if (pointer === null) {
+ return {
+ circular: false,
+ value: null,
+ };
+ }
+
+ // Check for circular references
+ const directCircular = pointer.circular;
+ let circular = directCircular || parents.has(pointer.value);
+ if (circular) {
+ foundCircularReference(path, $refs, options);
+ }
+
+ // Dereference the JSON reference
+ let dereferencedValue = $Ref.dereference($ref, pointer.value);
+
+ // Crawl the dereferenced value (unless it's circular)
+ if (!circular) {
+ // Determine if the dereferenced value is circular
+ const dereferenced = crawl(
+ dereferencedValue,
+ pointer.path,
+ pathFromRoot,
+ parents,
+ processedObjects,
+ dereferencedCache,
+ $refs,
+ options,
+ startTime,
+ );
+ circular = dereferenced.circular;
+ dereferencedValue = dereferenced.value;
+ }
+
+ if (circular && !directCircular && options.dereference?.circular === "ignore") {
+ // The user has chosen to "ignore" circular references, so don't change the value
+ dereferencedValue = $ref;
+ }
+
+ if (directCircular) {
+ // The pointer is a DIRECT circular reference (i.e. it references itself).
+ // So replace the $ref path with the absolute path from the JSON Schema root
+ dereferencedValue.$ref = pathFromRoot;
+ }
+
+ const dereferencedObject = {
+ circular,
+ value: dereferencedValue,
+ };
+
+ // only cache if no extra properties than $ref
+ if (Object.keys($ref).length === 1) {
+ dereferencedCache.set($refPath, dereferencedObject);
+ }
+
+ return dereferencedObject;
+}
+
+/**
+ * Called when a circular reference is found.
+ * It sets the {@link $Refs#circular} flag, and throws an error if options.dereference.circular is false.
+ *
+ * @param keyPath - The JSON Reference path of the circular reference
+ * @param $refs
+ * @param options
+ * @returns - always returns true, to indicate that a circular reference was found
+ */
+function foundCircularReference(keyPath: any, $refs: any, options: any) {
+ $refs.circular = true;
+ if (!options.dereference.circular) {
+ throw ono.reference(`Circular $ref pointer found at ${keyPath}`);
+ }
+ return true;
+}
diff --git a/node_modules/@apidevtools/json-schema-ref-parser/lib/index.ts b/node_modules/@apidevtools/json-schema-ref-parser/lib/index.ts
new file mode 100644
index 00000000..3df98356
--- /dev/null
+++ b/node_modules/@apidevtools/json-schema-ref-parser/lib/index.ts
@@ -0,0 +1,453 @@
+import $Refs from "./refs.js";
+import _parse from "./parse.js";
+import normalizeArgs from "./normalize-args.js";
+import resolveExternal from "./resolve-external.js";
+import _bundle from "./bundle.js";
+import _dereference from "./dereference.js";
+import * as url from "./util/url.js";
+import {
+ JSONParserError,
+ InvalidPointerError,
+ MissingPointerError,
+ ResolverError,
+ ParserError,
+ UnmatchedParserError,
+ UnmatchedResolverError,
+ isHandledError,
+ JSONParserErrorGroup,
+} from "./util/errors.js";
+import { ono } from "@jsdevtools/ono";
+import maybe from "./util/maybe.js";
+import type { ParserOptions } from "./options.js";
+import { getJsonSchemaRefParserDefaultOptions } from "./options.js";
+import type {
+ $RefsCallback,
+ JSONSchema,
+ SchemaCallback,
+ FileInfo,
+ Plugin,
+ ResolverOptions,
+ HTTPResolverOptions,
+} from "./types/index.js";
+
+export type RefParserSchema = string | JSONSchema;
+
+/**
+ * This class parses a JSON schema, builds a map of its JSON references and their resolved values,
+ * and provides methods for traversing, manipulating, and dereferencing those references.
+ *
+ * @class
+ */
+export class $RefParser = ParserOptions> {
+ /**
+ * The parsed (and possibly dereferenced) JSON schema object
+ *
+ * @type {object}
+ * @readonly
+ */
+ public schema: S | null = null;
+
+ /**
+ * The resolved JSON references
+ *
+ * @type {$Refs}
+ * @readonly
+ */
+ $refs = new $Refs();
+
+ /**
+ * Parses the given JSON schema.
+ * This method does not resolve any JSON references.
+ * It just reads a single file in JSON or YAML format, and parse it as a JavaScript object.
+ *
+ * @param [path] - The file path or URL of the JSON schema
+ * @param [schema] - A JSON schema object. This object will be used instead of reading from `path`.
+ * @param [options] - Options that determine how the schema is parsed
+ * @param [callback] - An error-first callback. The second parameter is the parsed JSON schema object.
+ * @returns - The returned promise resolves with the parsed JSON schema object.
+ */
+ public parse(schema: S | string | unknown): Promise;
+ public parse(schema: S | string | unknown, callback: SchemaCallback): Promise;
+ public parse(schema: S | string | unknown, options: O): Promise;
+ public parse(schema: S | string | unknown, options: O, callback: SchemaCallback): Promise;
+ public parse(baseUrl: string, schema: S | string | unknown, options: O): Promise;
+ public parse(baseUrl: string, schema: S | string | unknown, options: O, callback: SchemaCallback): Promise;
+ async parse() {
+ const args = normalizeArgs